update stable to r2893

git-svn-id: http://webrtc.googlecode.com/svn/stable/src@2894 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/common_video/plane.cc b/common_video/plane.cc
index e47208f..dc20c62 100644
--- a/common_video/plane.cc
+++ b/common_video/plane.cc
@@ -15,6 +15,9 @@
 
 namespace webrtc {
 
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment =  64;
+
 Plane::Plane()
     : buffer_(NULL),
       allocated_size_(0),
@@ -37,12 +40,12 @@
     return -1;
   if (new_size <= allocated_size_)
     return 0;
-  uint8_t* new_buffer = new uint8_t[new_size];
+  Allocator<uint8_t>::scoped_ptr_aligned new_buffer(
+    AlignedMalloc<uint8_t>(new_size, kBufferAlignment));
   if (buffer_.get()) {
-    memcpy(new_buffer, buffer_.get(), plane_size_);
-    buffer_.reset();
+    memcpy(new_buffer.get(), buffer_.get(), plane_size_);
   }
-  buffer_.reset(new_buffer);
+  buffer_.reset(new_buffer.release());
   allocated_size_ = new_size;
   return 0;
 }
diff --git a/common_video/plane.h b/common_video/plane.h
index 795f800..c6d08ce 100644
--- a/common_video/plane.h
+++ b/common_video/plane.h
@@ -11,7 +11,7 @@
 #ifndef COMMON_VIDEO_PLANE_H
 #define COMMON_VIDEO_PLANE_H
 
-#include "system_wrappers/interface/scoped_ptr.h"
+#include "system_wrappers/interface/aligned_malloc.h"
 #include "typedefs.h"  //NOLINT
 
 namespace webrtc {
@@ -57,7 +57,7 @@
   // Return value: 0 on success ,-1 on error.
   int MaybeResize(int new_size);
 
-  scoped_array<uint8_t> buffer_;
+  Allocator<uint8_t>::scoped_ptr_aligned buffer_;
   int allocated_size_;
   int plane_size_;
   int stride_;
diff --git a/modules/rtp_rtcp/interface/rtp_rtcp_defines.h b/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
index 7399dc1..6dbf52c 100644
--- a/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
+++ b/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
@@ -225,6 +225,8 @@
   virtual void OnReceivedRPSI(uint32_t ssrc,
                               uint64_t picture_id) = 0;
 
+  virtual void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) = 0;
+
   virtual ~RtcpIntraFrameObserver() {}
 };
 
diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc
index 631cc1b..c734e0c 100644
--- a/modules/rtp_rtcp/source/rtcp_receiver.cc
+++ b/modules/rtp_rtcp/source/rtcp_receiver.cc
@@ -133,9 +133,19 @@
 }
 
 
-void RTCPReceiver::SetSSRC( const WebRtc_UWord32 ssrc) {
+void RTCPReceiver::SetSSRC(const WebRtc_UWord32 ssrc) {
+  WebRtc_UWord32 old_ssrc = 0;
+  {
     CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    old_ssrc = _SSRC;
     _SSRC = ssrc;
+  }
+  {
+    CriticalSectionScoped lock(_criticalSectionFeedbacks);
+    if (_cbRtcpIntraFrameObserver && old_ssrc != ssrc) {
+      _cbRtcpIntraFrameObserver->OnLocalSsrcChanged(old_ssrc, ssrc);
+    }
+  }
 }
 
 WebRtc_Word32 RTCPReceiver::ResetRTT(const WebRtc_UWord32 remoteSSRC) {
@@ -1196,6 +1206,12 @@
     // Might trigger a OnReceivedBandwidthEstimateUpdate.
     UpdateTMMBR();
   }
+  unsigned int local_ssrc = 0;
+  {
+    // We don't want to hold this critsect when triggering the callbacks below.
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    local_ssrc = _SSRC;
+  }
   if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSrReq) {
     _rtpRtcp.OnRequestSendReport();
   }
@@ -1228,18 +1244,15 @@
                        "SIG [RTCP] Incoming FIR from SSRC:0x%x",
                        rtcpPacketInformation.remoteSSRC);
         }
-        _cbRtcpIntraFrameObserver->OnReceivedIntraFrameRequest(
-            rtcpPacketInformation.remoteSSRC);
+        _cbRtcpIntraFrameObserver->OnReceivedIntraFrameRequest(local_ssrc);
       }
       if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSli) {
         _cbRtcpIntraFrameObserver->OnReceivedSLI(
-            rtcpPacketInformation.remoteSSRC,
-            rtcpPacketInformation.sliPictureId);
+            local_ssrc, rtcpPacketInformation.sliPictureId);
       }
       if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRpsi) {
         _cbRtcpIntraFrameObserver->OnReceivedRPSI(
-            rtcpPacketInformation.remoteSSRC,
-            rtcpPacketInformation.rpsiPictureId);
+            local_ssrc, rtcpPacketInformation.rpsiPictureId);
       }
     }
     if (_cbRtcpBandwidthObserver) {
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index 6002979..9716f8a 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -106,6 +106,7 @@
   // make sure that RTCP objects are aware of our SSRC
   WebRtc_UWord32 SSRC = _rtpSender.SSRC();
   _rtcpSender.SetSSRC(SSRC);
+  _rtcpReceiver.SetSSRC(SSRC);
 
   WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s created", __FUNCTION__);
 }
diff --git a/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc b/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
index ce77953..833f867 100644
--- a/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
+++ b/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
@@ -71,6 +71,7 @@
                               uint64_t pictureId) {
     EXPECT_EQ(kTestPictureId, pictureId);
   };
+  virtual void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {};
  private:
   RtpRtcp* _rtpRtcpModule;
 };
diff --git a/modules/video_coding/codecs/i420/main/interface/i420.h b/modules/video_coding/codecs/i420/main/interface/i420.h
index ea740c5..6699c22 100644
--- a/modules/video_coding/codecs/i420/main/interface/i420.h
+++ b/modules/video_coding/codecs/i420/main/interface/i420.h
@@ -49,7 +49,7 @@
 //                                <0 - Error
   virtual int Encode(const VideoFrame& inputImage,
                      const CodecSpecificInfo* /*codecSpecificInfo*/,
-                     const VideoFrameType /*frameTypes*/);
+                     const std::vector<VideoFrameType>* /*frame_types*/);
 
 // Register an encode complete callback object.
 //
diff --git a/modules/video_coding/codecs/i420/main/source/i420.cc b/modules/video_coding/codecs/i420/main/source/i420.cc
index 38cbbb8..75d85c1 100644
--- a/modules/video_coding/codecs/i420/main/source/i420.cc
+++ b/modules/video_coding/codecs/i420/main/source/i420.cc
@@ -78,7 +78,7 @@
 
 int I420Encoder::Encode(const VideoFrame& inputImage,
                     const CodecSpecificInfo* /*codecSpecificInfo*/,
-                    const VideoFrameType /*frameType*/) {
+                    const std::vector<VideoFrameType>* /*frame_types*/) {
   if (!_inited) {
     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   }
diff --git a/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h b/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
index 7b60cf5..f1123c9 100644
--- a/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
+++ b/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
@@ -39,7 +39,7 @@
   MOCK_METHOD3(Encode,
                WebRtc_Word32(const VideoFrame& inputImage,
                              const CodecSpecificInfo* codecSpecificInfo,
-                             const VideoFrameType frameType));
+                             const std::vector<VideoFrameType>* frame_types));
   MOCK_METHOD1(RegisterEncodeCompleteCallback,
                WebRtc_Word32(EncodedImageCallback* callback));
   MOCK_METHOD0(Release, WebRtc_Word32());
diff --git a/modules/video_coding/codecs/interface/video_codec_interface.h b/modules/video_coding/codecs/interface/video_codec_interface.h
index c107b18..7ae5253 100644
--- a/modules/video_coding/codecs/interface/video_codec_interface.h
+++ b/modules/video_coding/codecs/interface/video_codec_interface.h
@@ -11,6 +11,8 @@
 #ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
 #define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
 
+#include <vector>
+
 #include "common_types.h"
 #include "modules/interface/module_common_types.h"
 #include "modules/video_coding/codecs/interface/video_error_codes.h"
@@ -95,12 +97,14 @@
     // Input:
     //          - inputImage        : Image to be encoded
     //          - codecSpecificInfo : Pointer to codec specific data
-    //          - frameType         : The frame type to encode
+    //          - frame_types        : The frame type to encode
     //
-    // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
-    virtual WebRtc_Word32 Encode(const VideoFrame& inputImage,
-                                 const CodecSpecificInfo* codecSpecificInfo,
-                                 const VideoFrameType frameType) = 0;
+    // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK, < 0
+    //                                otherwise.
+    virtual WebRtc_Word32 Encode(
+        const VideoFrame& inputImage,
+        const CodecSpecificInfo* codecSpecificInfo,
+        const std::vector<VideoFrameType>* frame_types) = 0;
 
     // Register an encode complete callback object.
     //
diff --git a/modules/video_coding/codecs/test/videoprocessor.cc b/modules/video_coding/codecs/test/videoprocessor.cc
index fadc1e8..53bd112 100644
--- a/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/modules/video_coding/codecs/test/videoprocessor.cc
@@ -180,17 +180,17 @@
     source_frame_.SetTimeStamp(frame_number);
 
     // Decide if we're going to force a keyframe:
-    VideoFrameType frame_type = kDeltaFrame;
+    std::vector<VideoFrameType> frame_types(1, kDeltaFrame);
     if (config_.keyframe_interval > 0 &&
         frame_number % config_.keyframe_interval == 0) {
-      frame_type = kKeyFrame;
+      frame_types[0] = kKeyFrame;
     }
 
     // For dropped frames, we regard them as zero size encoded frames.
     encoded_frame_size_ = 0;
 
     WebRtc_Word32 encode_result = encoder_->Encode(source_frame_, NULL,
-                                                   frame_type);
+                                                   &frame_types);
 
     if (encode_result != WEBRTC_VIDEO_CODEC_OK) {
       fprintf(stderr, "Failed to encode frame %d, return code: %d\n",
diff --git a/modules/video_coding/codecs/test_framework/normal_async_test.cc b/modules/video_coding/codecs/test_framework/normal_async_test.cc
index c9081d9..4a35d1a 100644
--- a/modules/video_coding/codecs/test_framework/normal_async_test.cc
+++ b/modules/video_coding/codecs/test_framework/normal_async_test.cc
@@ -12,8 +12,9 @@
 
 #include <assert.h>
 #include <string.h>
-#include <sstream>
 #include <queue>
+#include <sstream>
+#include <vector>
 
 #include "gtest/gtest.h"
 #include "tick_util.h"
@@ -422,7 +423,7 @@
     }
     _encodeCompleteTime = 0;
     _encodeTimes[rawImage.TimeStamp()] = tGetTime();
-    VideoFrameType frameType = kDeltaFrame;
+    std::vector<VideoFrameType> frame_types(1, kDeltaFrame);
 
     // check SLI queue
     _hasReceivedSLI = false;
@@ -458,13 +459,13 @@
     if (_hasReceivedPLI)
     {
         // respond to PLI by encoding a key frame
-        frameType = kKeyFrame;
+        frame_types[0] = kKeyFrame;
         _hasReceivedPLI = false;
         _hasReceivedSLI = false; // don't trigger both at once
     }
 
     webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
-    int ret = _encoder->Encode(rawImage, codecSpecificInfo, frameType);
+    int ret = _encoder->Encode(rawImage, codecSpecificInfo, &frame_types);
     EXPECT_EQ(ret, WEBRTC_VIDEO_CODEC_OK);
     if (codecSpecificInfo != NULL)
     {
diff --git a/modules/video_coding/codecs/test_framework/performance_test.cc b/modules/video_coding/codecs/test_framework/performance_test.cc
index 18c6ad9..d235928 100644
--- a/modules/video_coding/codecs/test_framework/performance_test.cc
+++ b/modules/video_coding/codecs/test_framework/performance_test.cc
@@ -267,13 +267,13 @@
 {
     VideoFrame rawImage;
     VideoBufferToRawImage(_inputVideoBuffer, rawImage);
-    VideoFrameType frameType = kDeltaFrame;
+    std::vector<VideoFrameType> frameTypes(1, kDeltaFrame);
     if (_requestKeyFrame && !(_encFrameCnt%50))
     {
-        frameType = kKeyFrame;
+        frameTypes[0] = kKeyFrame;
     }
     webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
-    int ret = _encoder->Encode(rawImage, codecSpecificInfo, frameType);
+    int ret = _encoder->Encode(rawImage, codecSpecificInfo, &frameTypes);
     EXPECT_EQ(ret, WEBRTC_VIDEO_CODEC_OK);
     if (codecSpecificInfo != NULL)
     {
diff --git a/modules/video_coding/codecs/test_framework/unit_test.cc b/modules/video_coding/codecs/test_framework/unit_test.cc
index dbbbcf0..4863f0e 100644
--- a/modules/video_coding/codecs/test_framework/unit_test.cc
+++ b/modules/video_coding/codecs/test_framework/unit_test.cc
@@ -240,8 +240,7 @@
 
     // Ensures our initial parameters are valid.
     EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
-    VideoFrameType videoFrameType = kDeltaFrame;
-    _encoder->Encode(image, NULL, videoFrameType);
+    _encoder->Encode(image, NULL, NULL);
     _refEncFrameLength = WaitForEncodedFrame();
     ASSERT_TRUE(_refEncFrameLength > 0);
     _refEncFrame = new unsigned char[_refEncFrameLength];
@@ -266,7 +265,7 @@
             _inputVideoBuffer.SetWidth(_source->GetWidth());
             _inputVideoBuffer.SetHeight(_source->GetHeight());
             VideoBufferToRawImage(_inputVideoBuffer, image);
-            _encoder->Encode(image, NULL, videoFrameType);
+            _encoder->Encode(image, NULL, NULL);
             ASSERT_TRUE(WaitForEncodedFrame() > 0);
         }
         EncodedImage encodedImage;
@@ -352,7 +351,6 @@
     int frameLength;
     VideoFrame inputImage;
     EncodedImage encodedImage;
-    VideoFrameType videoFrameType = kDeltaFrame;
 
     //----- Encoder parameter tests -----
 
@@ -360,7 +358,7 @@
     // We want to revert the initialization done in Setup().
     EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
     VideoBufferToRawImage(_inputVideoBuffer, inputImage);
-    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, videoFrameType)
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL)
                == WEBRTC_VIDEO_CODEC_UNINITIALIZED);
 
     //-- InitEncode() errors --
@@ -423,7 +421,7 @@
     // inputVideoBuffer unallocated.
     _inputVideoBuffer.Free();
     inputImage.Free();
-    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, videoFrameType) ==
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
         WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
     _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
     _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
@@ -436,8 +434,9 @@
     VideoBufferToRawImage(_inputVideoBuffer, inputImage);
     for (int i = 1; i <= 60; i++)
     {
-        VideoFrameType frameType = !(i % 2) ? kKeyFrame : kDeltaFrame;
-        EXPECT_TRUE(_encoder->Encode(inputImage, NULL, frameType) ==
+        VideoFrameType frame_type = !(i % 2) ? kKeyFrame : kDeltaFrame;
+        std::vector<VideoFrameType> frame_types(1, frame_type);
+        EXPECT_TRUE(_encoder->Encode(inputImage, NULL, &frame_types) ==
             WEBRTC_VIDEO_CODEC_OK);
         EXPECT_TRUE(WaitForEncodedFrame() > 0);
     }
@@ -445,12 +444,12 @@
     // Init then encode.
     _encodedVideoBuffer.UpdateLength(0);
     _encodedVideoBuffer.Reset();
-    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, videoFrameType) ==
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
         WEBRTC_VIDEO_CODEC_OK);
     EXPECT_TRUE(WaitForEncodedFrame() > 0);
 
     EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
-    _encoder->Encode(inputImage, NULL, videoFrameType);
+    _encoder->Encode(inputImage, NULL, NULL);
     frameLength = WaitForEncodedFrame();
     EXPECT_TRUE(frameLength > 0);
     EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
@@ -459,11 +458,11 @@
     // Reset then encode.
     _encodedVideoBuffer.UpdateLength(0);
     _encodedVideoBuffer.Reset();
-    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, videoFrameType) ==
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
         WEBRTC_VIDEO_CODEC_OK);
     WaitForEncodedFrame();
     EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
-    _encoder->Encode(inputImage, NULL, videoFrameType);
+    _encoder->Encode(inputImage, NULL, NULL);
     frameLength = WaitForEncodedFrame();
     EXPECT_TRUE(frameLength > 0);
     EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
@@ -472,12 +471,12 @@
     // Release then encode.
     _encodedVideoBuffer.UpdateLength(0);
     _encodedVideoBuffer.Reset();
-    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, videoFrameType) ==
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
         WEBRTC_VIDEO_CODEC_OK);
     WaitForEncodedFrame();
     EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
     EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
-    _encoder->Encode(inputImage, NULL, videoFrameType);
+    _encoder->Encode(inputImage, NULL, NULL);
     frameLength = WaitForEncodedFrame();
     EXPECT_TRUE(frameLength > 0);
     EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
@@ -588,8 +587,7 @@
         tempInput.CopyFrame(tmpLength, inputImage.Buffer());
         tempInput.SetWidth(tempInst.width);
         tempInput.SetHeight(tempInst.height);
-        VideoFrameType videoFrameType = kDeltaFrame;
-        _encoder->Encode(tempInput, NULL, videoFrameType);
+        _encoder->Encode(tempInput, NULL, NULL);
         frameLength = WaitForEncodedFrame();
         EXPECT_TRUE(frameLength > 0);
         tempInput.Free();
@@ -607,7 +605,7 @@
         EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
         EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
             WEBRTC_VIDEO_CODEC_OK);
-        _encoder->Encode(inputImage, NULL, videoFrameType);
+        _encoder->Encode(inputImage, NULL, NULL);
         frameLength = WaitForEncodedFrame();
         EXPECT_TRUE(frameLength > 0);
 
@@ -666,8 +664,7 @@
         _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
         _inputVideoBuffer.SetTimeStamp(frames);
         VideoBufferToRawImage(_inputVideoBuffer, inputImage);
-        VideoFrameType videoFrameType = kDeltaFrame;
-        ASSERT_TRUE(_encoder->Encode(inputImage, NULL, videoFrameType) ==
+        ASSERT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
             WEBRTC_VIDEO_CODEC_OK);
         frameLength = WaitForEncodedFrame();
         //ASSERT_TRUE(frameLength);
@@ -745,8 +742,7 @@
                 static_cast<WebRtc_UWord32>(9e4 /
                     static_cast<float>(_inst.maxFramerate)));
             VideoBufferToRawImage(_inputVideoBuffer, inputImage);
-            VideoFrameType videoFrameType = kDeltaFrame;
-            ASSERT_EQ(_encoder->Encode(inputImage, NULL, videoFrameType),
+            ASSERT_EQ(_encoder->Encode(inputImage, NULL, NULL),
                       WEBRTC_VIDEO_CODEC_OK);
             frameLength = WaitForEncodedFrame();
             ASSERT_GE(frameLength, 0u);
diff --git a/modules/video_coding/codecs/vp8/test/rps_test.cc b/modules/video_coding/codecs/vp8/test/rps_test.cc
index 82b63db..dd27f4c 100644
--- a/modules/video_coding/codecs/vp8/test/rps_test.cc
+++ b/modules/video_coding/codecs/vp8/test/rps_test.cc
@@ -149,7 +149,6 @@
   }
   _encodeCompleteTime = 0;
   _encodeTimes[rawImage.TimeStamp()] = tGetTime();
-  webrtc::VideoFrameType frameType = webrtc::kDeltaFrame;
 
   webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
   codecSpecificInfo->codecSpecific.VP8.pictureIdRPSI =
@@ -162,7 +161,7 @@
     sli_ = false;
   }
   printf("Encoding: %u\n", _framecnt);
-  int ret = _encoder->Encode(rawImage, codecSpecificInfo, frameType);
+  int ret = _encoder->Encode(rawImage, codecSpecificInfo, NULL);
   if (ret < 0)
     printf("Failed to encode: %u\n", _framecnt);
 
diff --git a/modules/video_coding/codecs/vp8/vp8.gyp b/modules/video_coding/codecs/vp8/vp8.gyp
index c828d6a..373a864 100644
--- a/modules/video_coding/codecs/vp8/vp8.gyp
+++ b/modules/video_coding/codecs/vp8/vp8.gyp
@@ -103,6 +103,7 @@
           'type': 'executable',
           'dependencies': [
             'webrtc_vp8',
+            '<(DEPTH)/testing/gmock.gyp:gmock',
             '<(DEPTH)/testing/gtest.gyp:gtest',
             '<(webrtc_root)/test/test.gyp:test_support_main',
           ],
diff --git a/modules/video_coding/codecs/vp8/vp8_impl.cc b/modules/video_coding/codecs/vp8/vp8_impl.cc
index 4f08f46..5f5a6bc 100644
--- a/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -326,7 +326,7 @@
 
 int VP8EncoderImpl::Encode(const VideoFrame& input_image,
                            const CodecSpecificInfo* codec_specific_info,
-                           const VideoFrameType frame_type) {
+                           const std::vector<VideoFrameType>* frame_types) {
   if (!inited_) {
     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   }
@@ -337,6 +337,12 @@
     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   }
 
+  VideoFrameType frame_type = kDeltaFrame;
+  // We only support one stream at the moment.
+  if (frame_types && frame_types->size() > 0) {
+    frame_type = (*frame_types)[0];
+  }
+
   // Check for change in frame size.
   if (input_image.Width() != codec_.width ||
       input_image.Height() != codec_.height) {
diff --git a/modules/video_coding/codecs/vp8/vp8_impl.h b/modules/video_coding/codecs/vp8/vp8_impl.h
index 82c85ea..e1843a6 100644
--- a/modules/video_coding/codecs/vp8/vp8_impl.h
+++ b/modules/video_coding/codecs/vp8/vp8_impl.h
@@ -74,7 +74,7 @@
 
   virtual int Encode(const VideoFrame& input_image,
                      const CodecSpecificInfo* codec_specific_info,
-                     const VideoFrameType frame_type);
+                     const std::vector<VideoFrameType>* frame_types);
 
   // Register an encode complete callback object.
   //
diff --git a/modules/video_coding/main/interface/video_coding.h b/modules/video_coding/main/interface/video_coding.h
index af05491..bea0107 100644
--- a/modules/video_coding/main/interface/video_coding.h
+++ b/modules/video_coding/main/interface/video_coding.h
@@ -260,7 +260,7 @@
     //
     // Return value      : VCM_OK, on success.
     //                     < 0,         on error.
-    virtual WebRtc_Word32 IntraFrameRequest() = 0;
+    virtual WebRtc_Word32 IntraFrameRequest(int stream_index) = 0;
 
     // Frame Dropper enable. Can be used to disable the frame dropping when the encoder
     // over-uses its bit rate. This API is designed to be used when the encoded frames
diff --git a/modules/video_coding/main/source/encoded_frame.cc b/modules/video_coding/main/source/encoded_frame.cc
index dff9df3..a7e9bb7 100644
--- a/modules/video_coding/main/source/encoded_frame.cc
+++ b/modules/video_coding/main/source/encoded_frame.cc
@@ -227,31 +227,30 @@
     }
 }
 
-VideoFrameType VCMEncodedFrame::ConvertFrameType(webrtc::FrameType frameType)
-{
-    switch (frameType)
-    {
+VideoFrameType VCMEncodedFrame::ConvertFrameType(webrtc::FrameType frame_type) {
+  switch (frame_type) {
     case kVideoFrameKey:
-        {
-            return kKeyFrame;
-        }
+      return kKeyFrame;
     case kVideoFrameDelta:
-        {
-            return kDeltaFrame;
-        }
+      return kDeltaFrame;
     case kVideoFrameGolden:
-        {
-            return kGoldenFrame;
-        }
+      return kGoldenFrame;
     case kVideoFrameAltRef:
-        {
-            return kAltRefFrame;
-        }
+      return kAltRefFrame;
     default:
-        {
-            return kDeltaFrame;
-        }
-    }
+      assert(false);
+      return kDeltaFrame;
+  }
+}
+
+void VCMEncodedFrame::ConvertFrameTypes(
+    const std::vector<webrtc::FrameType>& frame_types,
+    std::vector<VideoFrameType>* video_frame_types) {
+  assert(video_frame_types);
+  video_frame_types->reserve(frame_types.size());
+  for (size_t i = 0; i < frame_types.size(); ++i) {
+    (*video_frame_types)[i] = ConvertFrameType(frame_types[i]);
+  }
 }
 
 }
diff --git a/modules/video_coding/main/source/encoded_frame.h b/modules/video_coding/main/source/encoded_frame.h
index 6289e9e..932e98b 100644
--- a/modules/video_coding/main/source/encoded_frame.h
+++ b/modules/video_coding/main/source/encoded_frame.h
@@ -11,6 +11,8 @@
 #ifndef WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
 #define WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
 
+#include <vector>
+
 #include "common_types.h"
 #include "common_video/interface/video_image.h"
 #include "modules/interface/module_common_types.h"
@@ -93,6 +95,9 @@
 
     static webrtc::FrameType ConvertFrameType(VideoFrameType frameType);
     static VideoFrameType ConvertFrameType(webrtc::FrameType frameType);
+    static void ConvertFrameTypes(
+        const std::vector<webrtc::FrameType>& frame_types,
+        std::vector<VideoFrameType>* video_frame_types);
 
 protected:
     /**
diff --git a/modules/video_coding/main/source/frame_dropper.cc b/modules/video_coding/main/source/frame_dropper.cc
index 065e452..47aa819 100644
--- a/modules/video_coding/main/source/frame_dropper.cc
+++ b/modules/video_coding/main/source/frame_dropper.cc
@@ -35,8 +35,8 @@
     _accumulator = 0.0f;
     _accumulatorMax = 150.0f; // assume 300 kb/s and 0.5 s window
     _targetBitRate = 300.0f;
-    _userFrameRate = 30;
-    _keyFrameSpreadFrames = 0.5f * _userFrameRate;
+    _incoming_frame_rate = 30;
+    _keyFrameSpreadFrames = 0.5f * _incoming_frame_rate;
     _dropNext = false;
     _dropRatio.Reset(0.9f);
     _dropRatio.Apply(0.0f, 0.0f); // Initialize to 0
@@ -45,6 +45,10 @@
     _wasBelowMax = true;
     _enabled = true;
     _fastMode = false; // start with normal (non-aggressive) mode
+    // Cap for the encoder buffer level/accumulator, in secs.
+    _cap_buffer_size = 3.0f;
+    // Cap on maximum amount of dropped frames between kept frames, in secs.
+    _max_time_drops = 4.0f;
 }
 
 void
@@ -98,6 +102,7 @@
     }
     // Change the level of the accumulator (bucket)
     _accumulator += frameSizeKbits;
+    CapAccumulator();
 }
 
 void
@@ -135,7 +140,6 @@
     }
     _accumulator -= T;
     UpdateRatio();
-
 }
 
 void
@@ -222,6 +226,13 @@
             denom = (float)1e-5;
         }
         WebRtc_Word32 limit = static_cast<WebRtc_Word32>(1.0f / denom - 1.0f + 0.5f);
+        // Put a bound on the max amount of dropped frames between each kept
+        // frame, in terms of frame rate and window size (secs).
+        int max_limit = static_cast<int>(_incoming_frame_rate *
+                                         _max_time_drops);
+        if (limit > max_limit) {
+          limit = max_limit;
+        }
         if (_dropCount < 0)
         {
             // Reset the _dropCount since it was negative and should be positive.
@@ -302,7 +313,7 @@
 }
 
 void
-VCMFrameDropper::SetRates(float bitRate, float userFrameRate)
+VCMFrameDropper::SetRates(float bitRate, float incoming_frame_rate)
 {
     // Bit rate of -1 means infinite bandwidth.
     _accumulatorMax = bitRate * _windowSize; // bitRate * windowSize (in seconds)
@@ -312,10 +323,8 @@
         _accumulator = bitRate / _targetBitRate * _accumulator;
     }
     _targetBitRate = bitRate;
-    if (userFrameRate > 0.0f)
-    {
-        _userFrameRate = userFrameRate;
-    }
+    CapAccumulator();
+    _incoming_frame_rate = incoming_frame_rate;
 }
 
 float
@@ -328,4 +337,14 @@
     return inputFrameRate * (1.0f - _dropRatio.Value());
 }
 
+// Put a cap on the accumulator, i.e., don't let it grow beyond some level.
+// This is a temporary fix for screencasting where very large frames from
+// encoder will cause very slow response (too many frame drops).
+void VCMFrameDropper::CapAccumulator() {
+  float max_accumulator = _targetBitRate * _cap_buffer_size;
+  if (_accumulator > max_accumulator) {
+    _accumulator = max_accumulator;
+  }
+}
+
 }
diff --git a/modules/video_coding/main/source/frame_dropper.h b/modules/video_coding/main/source/frame_dropper.h
index 5e7e8a1..fdf024c 100644
--- a/modules/video_coding/main/source/frame_dropper.h
+++ b/modules/video_coding/main/source/frame_dropper.h
@@ -60,16 +60,18 @@
     //
     // Input:
     //          - bitRate       : The target bit rate
-    void SetRates(float bitRate, float userFrameRate);
+    void SetRates(float bitRate, float incoming_frame_rate);
 
     // Return value     : The current average frame rate produced
     //                    if the DropFrame() function is used as
     //                    instruction of when to drop frames.
     float ActualFrameRate(WebRtc_UWord32 inputFrameRate) const;
 
+
 private:
     void FillBucket(float inKbits, float outKbits);
     void UpdateRatio();
+    void CapAccumulator();
 
     WebRtc_Word32     _vcmId;
     VCMExpFilter       _keyFrameSizeAvgKbits;
@@ -83,10 +85,12 @@
     VCMExpFilter       _dropRatio;
     WebRtc_Word32     _dropCount;
     float           _windowSize;
-    float           _userFrameRate;
+    float           _incoming_frame_rate;
     bool            _wasBelowMax;
     bool            _enabled;
     bool            _fastMode;
+    float           _cap_buffer_size;
+    float           _max_time_drops;
 }; // end of VCMFrameDropper class
 
 } // namespace webrtc
diff --git a/modules/video_coding/main/source/generic_encoder.cc b/modules/video_coding/main/source/generic_encoder.cc
index 8ead0e5..1bf8fbe 100644
--- a/modules/video_coding/main/source/generic_encoder.cc
+++ b/modules/video_coding/main/source/generic_encoder.cc
@@ -59,9 +59,13 @@
 WebRtc_Word32
 VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
                           const CodecSpecificInfo* codecSpecificInfo,
-                          const FrameType frameType) {
-  VideoFrameType videoFrameType = VCMEncodedFrame::ConvertFrameType(frameType);
-  return _encoder.Encode(inputFrame, codecSpecificInfo, videoFrameType);
+                          const std::vector<FrameType>* frameTypes) {
+  std::vector<VideoFrameType> video_frame_types(frameTypes->size(),
+                                                kDeltaFrame);
+  if (frameTypes) {
+    VCMEncodedFrame::ConvertFrameTypes(*frameTypes, &video_frame_types);
+  }
+  return _encoder.Encode(inputFrame, codecSpecificInfo, &video_frame_types);
 }
 
 WebRtc_Word32
@@ -110,10 +114,17 @@
     return _encoder.SetPeriodicKeyFrames(enable);
 }
 
-WebRtc_Word32 VCMGenericEncoder::RequestFrame(const FrameType frameType) {
+WebRtc_Word32 VCMGenericEncoder::RequestFrame(
+    const std::vector<FrameType>* frame_types) {
+  if (!frame_types) {
+    return 0;
+  }
   VideoFrame image;
-  VideoFrameType videoFrameType = VCMEncodedFrame::ConvertFrameType(frameType);
-  return _encoder.Encode(image, NULL,  videoFrameType);
+  std::vector<VideoFrameType> video_frame_types(kVideoFrameDelta);
+  if (frame_types) {
+    VCMEncodedFrame::ConvertFrameTypes(*frame_types, &video_frame_types);
+  }
+  return _encoder.Encode(image, NULL, &video_frame_types);
 }
 
 WebRtc_Word32
diff --git a/modules/video_coding/main/source/generic_encoder.h b/modules/video_coding/main/source/generic_encoder.h
index c75339b..9e8ae16 100644
--- a/modules/video_coding/main/source/generic_encoder.h
+++ b/modules/video_coding/main/source/generic_encoder.h
@@ -101,7 +101,7 @@
     */
     WebRtc_Word32 Encode(const VideoFrame& inputFrame,
                          const CodecSpecificInfo* codecSpecificInfo,
-                         const FrameType frameType);
+                         const std::vector<FrameType>* frameTypes);
     /**
     *	Set new target bit rate and frame rate
     * Return Value: new bit rate if OK, otherwise <0s
@@ -127,7 +127,7 @@
 
     WebRtc_Word32 SetPeriodicKeyFrames(bool enable);
 
-    WebRtc_Word32 RequestFrame(const FrameType frameType);
+    WebRtc_Word32 RequestFrame(const std::vector<FrameType>* frame_types);
 
     bool InternalSource() const;
 
diff --git a/modules/video_coding/main/source/media_optimization.cc b/modules/video_coding/main/source/media_optimization.cc
index b2ed54c..48b7b47 100644
--- a/modules/video_coding/main/source/media_optimization.cc
+++ b/modules/video_coding/main/source/media_optimization.cc
@@ -176,7 +176,8 @@
     _targetBitRate = bitRate - protection_overhead_kbps;
 
     // Update encoding rates following protection settings
-    _frameDropper->SetRates(static_cast<float>(_targetBitRate), 0);
+    _frameDropper->SetRates(static_cast<float>(_targetBitRate),
+                                               _incomingFrameRate);
 
     if (_enableQm)
     {
diff --git a/modules/video_coding/main/source/video_coding_impl.cc b/modules/video_coding/main/source/video_coding_impl.cc
index a0c853f..ded4fb7 100644
--- a/modules/video_coding/main/source/video_coding_impl.cc
+++ b/modules/video_coding/main/source/video_coding_impl.cc
@@ -73,7 +73,7 @@
 _sendCritSect(CriticalSectionWrapper::CreateCriticalSection()),
 _encoder(),
 _encodedFrameCallback(),
-_nextFrameType(kVideoFrameDelta),
+_nextFrameTypes(1, kVideoFrameDelta),
 _mediaOpt(id, clock_),
 _sendCodecType(kVideoCodecUnknown),
 _sendStatsCallback(NULL),
@@ -334,6 +334,9 @@
     _sendCodecType = sendCodec->codecType;
     int numLayers = (_sendCodecType != kVideoCodecVP8) ? 1 :
                         sendCodec->codecSpecific.VP8.numberOfTemporalLayers;
+    _nextFrameTypes.clear();
+    _nextFrameTypes.resize(VCM_MAX(sendCodec->numberOfSimulcastStreams, 1),
+                           kVideoFrameDelta);
 
     _mediaOpt.SetEncodingData(_sendCodecType,
                               sendCodec->maxBitrate,
@@ -661,7 +664,9 @@
     {
         return VCM_UNINITIALIZED;
     }
-    if (_nextFrameType == kFrameEmpty)
+    // TODO(holmer): Add support for dropping frames per stream. Currently we
+    // only have one frame dropper for all streams.
+    if (_nextFrameTypes[0] == kFrameEmpty)
     {
         return VCM_OK;
     }
@@ -679,7 +684,7 @@
         _mediaOpt.updateContentData(contentMetrics);
         WebRtc_Word32 ret = _encoder->Encode(videoFrame,
                                              codecSpecificInfo,
-                                             _nextFrameType);
+                                             &_nextFrameTypes);
         if (_encoderInputFile != NULL)
         {
           if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
@@ -695,19 +700,23 @@
                          "Encode error: %d", ret);
             return ret;
         }
-        _nextFrameType = kVideoFrameDelta; // default frame type
+        for (size_t i = 0; i < _nextFrameTypes.size(); ++i) {
+          _nextFrameTypes[i] = kVideoFrameDelta;  // Default frame type.
+        }
     }
     return VCM_OK;
 }
 
-WebRtc_Word32 VideoCodingModuleImpl::IntraFrameRequest() {
+WebRtc_Word32 VideoCodingModuleImpl::IntraFrameRequest(int stream_index) {
+  assert(stream_index >= 0);
   CriticalSectionScoped cs(_sendCritSect);
-  _nextFrameType = kVideoFrameKey;
+  _nextFrameTypes[stream_index] = kVideoFrameKey;
   if (_encoder != NULL && _encoder->InternalSource()) {
     // Try to request the frame if we have an external encoder with
     // internal source since AddVideoFrame never will be called.
-    if (_encoder->RequestFrame(_nextFrameType) == WEBRTC_VIDEO_CODEC_OK) {
-      _nextFrameType = kVideoFrameDelta;
+    if (_encoder->RequestFrame(&_nextFrameTypes) ==
+        WEBRTC_VIDEO_CODEC_OK) {
+      _nextFrameTypes[stream_index] = kVideoFrameDelta;
     }
   }
   return VCM_OK;
diff --git a/modules/video_coding/main/source/video_coding_impl.h b/modules/video_coding/main/source/video_coding_impl.h
index 1e39cbb..ed42ced 100644
--- a/modules/video_coding/main/source/video_coding_impl.h
+++ b/modules/video_coding/main/source/video_coding_impl.h
@@ -11,19 +11,20 @@
 #ifndef WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
 #define WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
 
-#include "video_coding.h"
-#include "critical_section_wrapper.h"
-#include "frame_buffer.h"
-#include "receiver.h"
-#include "timing.h"
-#include "jitter_buffer.h"
-#include "codec_database.h"
-#include "generic_decoder.h"
-#include "generic_encoder.h"
-#include "media_optimization.h"
-#include "modules/video_coding/main/source/tick_time_base.h"
+#include "modules/video_coding/main/interface/video_coding.h"
 
-#include <stdio.h>
+#include <vector>
+
+#include "modules/video_coding/main/source/codec_database.h"
+#include "modules/video_coding/main/source/frame_buffer.h"
+#include "modules/video_coding/main/source/generic_decoder.h"
+#include "modules/video_coding/main/source/generic_encoder.h"
+#include "modules/video_coding/main/source/jitter_buffer.h"
+#include "modules/video_coding/main/source/media_optimization.h"
+#include "modules/video_coding/main/source/receiver.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "modules/video_coding/main/source/timing.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
 
 namespace webrtc
 {
@@ -147,7 +148,7 @@
         const VideoContentMetrics* _contentMetrics = NULL,
         const CodecSpecificInfo* codecSpecificInfo = NULL);
 
-    virtual WebRtc_Word32 IntraFrameRequest();
+    virtual WebRtc_Word32 IntraFrameRequest(int stream_index);
 
     //Enable frame dropper
     virtual WebRtc_Word32 EnableFrameDropper(bool enable);
@@ -300,7 +301,7 @@
     CriticalSectionWrapper*             _sendCritSect; // Critical section for send side
     VCMGenericEncoder*                  _encoder;
     VCMEncodedFrameCallback             _encodedFrameCallback;
-    FrameType                           _nextFrameType;
+    std::vector<FrameType>              _nextFrameTypes;
     VCMMediaOptimization                _mediaOpt;
     VideoCodecType                      _sendCodecType;
     VCMSendStatisticsCallback*          _sendStatsCallback;
diff --git a/modules/video_coding/main/source/video_coding_impl_unittest.cc b/modules/video_coding/main/source/video_coding_impl_unittest.cc
new file mode 100644
index 0000000..bfb8227
--- /dev/null
+++ b/modules/video_coding/main/source/video_coding_impl_unittest.cc
@@ -0,0 +1,115 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <vector>
+
+#include "modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+
+#include "gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::AllOf;
+using ::testing::ElementsAre;
+using ::testing::ElementsAreArray;
+using ::testing::Field;
+using ::testing::NiceMock;
+using ::testing::Pointee;
+using ::testing::Return;
+
+namespace webrtc {
+
+class TestVideoCodingModule : public ::testing::Test {
+ protected:
+  static const int kDefaultWidth = 1280;
+  static const int kDefaultHeight = 720;
+  static const int kNumberOfStreams = 3;
+  static const int kNumberOfLayers = 3;
+  static const int kUnusedPayloadType = 10;
+
+  virtual void SetUp() {
+    vcm_ = VideoCodingModule::Create(0);
+    EXPECT_EQ(0, vcm_->RegisterExternalEncoder(&encoder_, kUnusedPayloadType,
+                                               false));
+    memset(&settings_, 0, sizeof(settings_));
+    EXPECT_EQ(0, vcm_->Codec(kVideoCodecVP8, &settings_));
+    settings_.numberOfSimulcastStreams = kNumberOfStreams;
+    ConfigureStream(kDefaultWidth / 4, kDefaultHeight / 4, 100,
+                    &settings_.simulcastStream[0]);
+    ConfigureStream(kDefaultWidth / 2, kDefaultHeight / 2, 500,
+                    &settings_.simulcastStream[1]);
+    ConfigureStream(kDefaultWidth, kDefaultHeight, 1200,
+                    &settings_.simulcastStream[2]);
+    settings_.plType = kUnusedPayloadType;  // Use the mocked encoder.
+    EXPECT_EQ(0, vcm_->RegisterSendCodec(&settings_, 1, 1200));
+  }
+
+  virtual void TearDown() {
+    VideoCodingModule::Destroy(vcm_);
+    input_frame_.Free();
+  }
+
+  void ExpectIntraRequest(int stream) {
+    if (stream == -1) {
+      // No intra request expected.
+      EXPECT_CALL(encoder_, Encode(
+          _, _, Pointee(ElementsAre(kDeltaFrame, kDeltaFrame, kDeltaFrame))))
+          .Times(1)
+          .WillRepeatedly(Return(0));
+      return;
+    }
+    assert(stream >= 0);
+    assert(stream < kNumberOfStreams);
+    std::vector<VideoFrameType> frame_types(kNumberOfStreams, kDeltaFrame);
+    frame_types[stream] = kKeyFrame;
+    EXPECT_CALL(encoder_, Encode(
+        _, _, Pointee(ElementsAreArray(&frame_types[0], frame_types.size()))))
+        .Times(1)
+        .WillRepeatedly(Return(0));
+  }
+
+  static void ConfigureStream(int width, int height, int max_bitrate,
+                              SimulcastStream* stream) {
+    assert(stream);
+    stream->width = width;
+    stream->height = height;
+    stream->maxBitrate = max_bitrate;
+    stream->numberOfTemporalLayers = kNumberOfLayers;
+    stream->qpMax = 45;
+  }
+
+  VideoCodingModule* vcm_;
+  NiceMock<MockVideoEncoder> encoder_;
+  VideoFrame input_frame_;
+  VideoCodec settings_;
+};
+
+TEST_F(TestVideoCodingModule, TestIntraRequests) {
+  EXPECT_EQ(0, vcm_->IntraFrameRequest(0));
+  ExpectIntraRequest(0);
+  EXPECT_EQ(0, vcm_->AddVideoFrame(input_frame_, NULL, NULL));
+  ExpectIntraRequest(-1);
+  EXPECT_EQ(0, vcm_->AddVideoFrame(input_frame_, NULL, NULL));
+
+  EXPECT_EQ(0, vcm_->IntraFrameRequest(1));
+  ExpectIntraRequest(1);
+  EXPECT_EQ(0, vcm_->AddVideoFrame(input_frame_, NULL, NULL));
+  ExpectIntraRequest(-1);
+  EXPECT_EQ(0, vcm_->AddVideoFrame(input_frame_, NULL, NULL));
+
+  EXPECT_EQ(0, vcm_->IntraFrameRequest(2));
+  ExpectIntraRequest(2);
+  EXPECT_EQ(0, vcm_->AddVideoFrame(input_frame_, NULL, NULL));
+  ExpectIntraRequest(-1);
+  EXPECT_EQ(0, vcm_->AddVideoFrame(input_frame_, NULL, NULL));
+}
+
+}  // namespace webrtc
diff --git a/modules/video_coding/main/source/video_coding_test.gypi b/modules/video_coding/main/source/video_coding_test.gypi
index 72e9ad3..249f7a4 100644
--- a/modules/video_coding/main/source/video_coding_test.gypi
+++ b/modules/video_coding/main/source/video_coding_test.gypi
@@ -83,6 +83,7 @@
         'jitter_buffer_unittest.cc',
         'session_info_unittest.cc',
         'video_coding_robustness_unittest.cc',
+        'video_coding_impl_unittest.cc',
         'qm_select_unittest.cc',
       ],
     },
diff --git a/modules/video_coding/main/test/codec_database_test.cc b/modules/video_coding/main/test/codec_database_test.cc
index 10487bd..7696300 100644
--- a/modules/video_coding/main/test/codec_database_test.cc
+++ b/modules/video_coding/main/test/codec_database_test.cc
@@ -238,7 +238,7 @@
     // Try to decode a delta frame. Should get a warning since we have enabled the "require key frame" setting
     // and because no frame type request callback has been registered.
     TEST(_vcm->Decode() == VCM_MISSING_CALLBACK);
-    TEST(_vcm->IntraFrameRequest() == VCM_OK);
+    TEST(_vcm->IntraFrameRequest(0) == VCM_OK);
     _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
     sourceFrame.SetTimeStamp(_timeStamp);
     TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
@@ -250,7 +250,7 @@
     sendCodec.width = _width;
     sendCodec.height = _height;
     TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK);
-    TEST(_vcm->IntraFrameRequest() == VCM_OK);
+    TEST(_vcm->IntraFrameRequest(0) == VCM_OK);
     waitEvent->Wait(33);
     _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
     sourceFrame.SetTimeStamp(_timeStamp);
@@ -260,7 +260,7 @@
     waitEvent->Wait(33);
     _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
     sourceFrame.SetTimeStamp(_timeStamp);
-    TEST(_vcm->IntraFrameRequest() == VCM_OK);
+    TEST(_vcm->IntraFrameRequest(0) == VCM_OK);
     TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
     TEST(_vcm->Decode() == VCM_OK);
     TEST(_vcm->ResetDecoder() == VCM_OK);
diff --git a/modules/video_coding/main/test/receiver_tests.h b/modules/video_coding/main/test/receiver_tests.h
index cb45ca1..403b22a 100644
--- a/modules/video_coding/main/test/receiver_tests.h
+++ b/modules/video_coding/main/test/receiver_tests.h
@@ -41,16 +41,25 @@
     FrameReceiveCallback(std::string outFilename) :
         _outFilename(outFilename),
         _outFile(NULL),
-        _timingFile(NULL) {}
+        _timingFile(NULL),
+        width_(0),
+        height_(0) {}
 
     virtual ~FrameReceiveCallback();
 
     WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame);
 
 private:
+    static void SplitFilename(std::string filename, std::string* basename,
+                              std::string* ending);
+    static std::string AppendWidthAndHeight(std::string basename,
+                                            unsigned int width,
+                                            unsigned int height);
     std::string     _outFilename;
     FILE*           _outFile;
     FILE*           _timingFile;
+    unsigned int width_;
+    unsigned int height_;
 };
 
 class SharedState
diff --git a/modules/video_coding/main/test/video_rtp_play.cc b/modules/video_coding/main/test/video_rtp_play.cc
index d07711b..2b7f800 100644
--- a/modules/video_coding/main/test/video_rtp_play.cc
+++ b/modules/video_coding/main/test/video_rtp_play.cc
@@ -20,6 +20,7 @@
 
 #include <stdio.h>
 #include <string.h>
+#include <sstream>
 
 using namespace webrtc;
 
@@ -55,9 +56,18 @@
             return -1;
         }
     }
-    if (_outFile == NULL)
+    if (_outFile == NULL || videoFrame.Width() != width_ ||
+        videoFrame.Height() != height_)
     {
-        _outFile = fopen(_outFilename.c_str(), "wb");
+        if (_outFile) {
+          fclose(_outFile);
+        }
+        printf("New size: %ux%u\n", videoFrame.Width(), videoFrame.Height());
+        width_ = videoFrame.Width();
+        height_ = videoFrame.Height();
+        std::string filename_with_width_height = AppendWidthAndHeight(
+            _outFilename, width_, height_);
+        _outFile = fopen(filename_with_width_height.c_str(), "wb");
         if (_outFile == NULL)
         {
             return -1;
@@ -73,6 +83,30 @@
     return 0;
 }
 
+void FrameReceiveCallback::SplitFilename(std::string filename,
+                                         std::string* basename,
+                                         std::string* ending) {
+  std::string::size_type idx;
+  idx = filename.rfind('.');
+
+  if(idx != std::string::npos) {
+      *ending = filename.substr(idx + 1);
+      *basename = filename.substr(0, idx);
+  } else {
+      *basename = filename;
+      *ending = "";
+  }
+}
+std::string FrameReceiveCallback::AppendWidthAndHeight(
+    std::string filename, unsigned int width, unsigned int height) {
+  std::string basename;
+  std::string ending;
+  SplitFilename(filename, &basename, &ending);
+  std::stringstream ss;
+  ss << basename << "." <<  width << "_" << height << "." << ending;
+  return ss.str();
+}
+
 int RtpPlay(CmdArgs& args)
 {
     // Make sure this test isn't executed without simulated events.
@@ -81,7 +115,7 @@
 #endif
     // BEGIN Settings
 
-    bool protectionEnabled = false;
+    bool protectionEnabled = true;
     VCMVideoProtection protectionMethod = kProtectionNack;
     WebRtc_UWord32 rttMS = 0;
     float lossRate = 0.0f;
@@ -102,6 +136,10 @@
     PayloadTypeList payloadTypes;
     payloadTypes.push_front(new PayloadCodecTuple(VCM_VP8_PAYLOAD_TYPE, "VP8",
                                                   kVideoCodecVP8));
+    payloadTypes.push_front(new PayloadCodecTuple(VCM_RED_PAYLOAD_TYPE, "RED",
+                                                  kVideoCodecRED));
+    payloadTypes.push_front(new PayloadCodecTuple(VCM_ULPFEC_PAYLOAD_TYPE,
+                                                  "ULPFEC", kVideoCodecULPFEC));
 
     Trace::CreateTrace();
     Trace::SetTraceFile((test::OutputPath() + "receiverTestTrace.txt").c_str());
@@ -125,14 +163,17 @@
         if (payloadType != NULL)
         {
             VideoCodec codec;
-            if (VideoCodingModule::Codec(payloadType->codecType, &codec) < 0)
-            {
-                return -1;
-            }
-            codec.plType = payloadType->payloadType;
-            if (vcm->RegisterReceiveCodec(&codec, 1) < 0)
-            {
-                return -1;
+            if (payloadType->codecType != kVideoCodecULPFEC &&
+                payloadType->codecType != kVideoCodecRED) {
+              if (VideoCodingModule::Codec(payloadType->codecType, &codec) < 0)
+              {
+                  return -1;
+              }
+              codec.plType = payloadType->payloadType;
+              if (vcm->RegisterReceiveCodec(&codec, 1) < 0)
+              {
+                  return -1;
+              }
             }
         }
     }
diff --git a/system_wrappers/source/aligned_malloc.cc b/system_wrappers/source/aligned_malloc.cc
index c8efb42..c2f8f70 100644
--- a/system_wrappers/source/aligned_malloc.cc
+++ b/system_wrappers/source/aligned_malloc.cc
@@ -24,12 +24,6 @@
 // Reference on memory alignment:
 // http://stackoverflow.com/questions/227897/solve-the-memory-alignment-in-c-interview-question-that-stumped-me
 namespace webrtc {
-// TODO(henrike): better to create just one memory block and interpret the
-//                first sizeof(AlignedMemory) bytes as an AlignedMemory struct.
-struct AlignedMemory {
-  void* aligned_buffer;
-  void* memory_pointer;
-};
 
 uintptr_t GetRightAlign(uintptr_t start_pos, size_t alignment) {
   // The pointer should be aligned with |alignment| bytes. The - 1 guarantees
@@ -45,14 +39,14 @@
   return (alignment & (alignment - 1)) == 0;
 }
 
-void* GetRightAlign(const void* ptr, size_t alignment) {
-  if (!ptr) {
+void* GetRightAlign(const void* pointer, size_t alignment) {
+  if (!pointer) {
     return NULL;
   }
   if (!ValidAlignment(alignment)) {
     return NULL;
   }
-  uintptr_t start_pos = reinterpret_cast<uintptr_t>(ptr);
+  uintptr_t start_pos = reinterpret_cast<uintptr_t>(pointer);
   return reinterpret_cast<void*>(GetRightAlign(start_pos, alignment));
 }
 
@@ -64,38 +58,30 @@
     return NULL;
   }
 
-  AlignedMemory* return_value = new AlignedMemory();
-  if (return_value == NULL) {
-    return NULL;
-  }
-
   // The memory is aligned towards the lowest address that so only
   // alignment - 1 bytes needs to be allocated.
-  // A pointer to AlignedMemory must be stored so that it can be retreived for
-  // deletion, ergo the sizeof(uintptr_t).
-  return_value->memory_pointer = malloc(size + sizeof(uintptr_t) +
-                                        alignment - 1);
-  if (return_value->memory_pointer == NULL) {
-    delete return_value;
+  // A pointer to the start of the memory must be stored so that it can be
+  // retreived for deletion, ergo the sizeof(uintptr_t).
+  void* memory_pointer = malloc(size + sizeof(uintptr_t) + alignment - 1);
+  if (memory_pointer == NULL) {
     return NULL;
   }
 
-  // Aligning after the sizeof(header) bytes will leave room for the header
+  // Aligning after the sizeof(uintptr_t) bytes will leave room for the header
   // in the same memory block.
-  uintptr_t align_start_pos =
-      reinterpret_cast<uintptr_t>(return_value->memory_pointer);
+  uintptr_t align_start_pos = reinterpret_cast<uintptr_t>(memory_pointer);
   align_start_pos += sizeof(uintptr_t);
   uintptr_t aligned_pos = GetRightAlign(align_start_pos, alignment);
-  return_value->aligned_buffer = reinterpret_cast<void*>(aligned_pos);
+  void* aligned_pointer = reinterpret_cast<void*>(aligned_pos);
 
-  // Store the address to the AlignedMemory struct in the header so that a
-  // it's possible to reclaim all memory.
-  uintptr_t header_pos = aligned_pos;
-  header_pos -= sizeof(uintptr_t);
-  void* header_ptr = reinterpret_cast<void*>(header_pos);
-  uintptr_t header_value = reinterpret_cast<uintptr_t>(return_value);
-  memcpy(header_ptr, &header_value, sizeof(uintptr_t));
-  return return_value->aligned_buffer;
+  // Store the address to the beginning of the memory just before the aligned
+  // memory.
+  uintptr_t header_pos = aligned_pos - sizeof(uintptr_t);
+  void* header_pointer = reinterpret_cast<void*>(header_pos);
+  uintptr_t memory_start = reinterpret_cast<uintptr_t>(memory_pointer);
+  memcpy(header_pointer, &memory_start, sizeof(uintptr_t));
+
+  return aligned_pointer;
 }
 
 void AlignedFree(void* mem_block) {
@@ -106,13 +92,9 @@
   uintptr_t header_pos = aligned_pos - sizeof(uintptr_t);
 
   // Read out the address of the AlignedMemory struct from the header.
-  uintptr_t* header_ptr = reinterpret_cast<uintptr_t*>(header_pos);
-  AlignedMemory* delete_memory = reinterpret_cast<AlignedMemory*>(*header_ptr);
-
-  if (delete_memory->memory_pointer != NULL) {
-    free(delete_memory->memory_pointer);
-  }
-  delete delete_memory;
+  uintptr_t memory_start_pos = *reinterpret_cast<uintptr_t*>(header_pos);
+  void* memory_start = reinterpret_cast<void*>(memory_start_pos);
+  free(memory_start);
 }
 
 }  // namespace webrtc
diff --git a/video_engine/encoder_state_feedback.cc b/video_engine/encoder_state_feedback.cc
index cd155a6..cfd0984 100644
--- a/video_engine/encoder_state_feedback.cc
+++ b/video_engine/encoder_state_feedback.cc
@@ -37,6 +37,10 @@
     owner_->OnReceivedRPSI(ssrc, picture_id);
   }
 
+  virtual void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {
+    owner_->OnLocalSsrcChanged(old_ssrc, new_ssrc);
+  }
+
  private:
   EncoderStateFeedback* owner_;
 };
@@ -51,20 +55,25 @@
 
 bool EncoderStateFeedback::AddEncoder(uint32_t ssrc, ViEEncoder* encoder)  {
   CriticalSectionScoped lock(crit_.get());
-  if (encoders_.find(ssrc) != encoders_.end())
+  if (encoders_.find(ssrc) != encoders_.end()) {
+    // Two encoders must not have the same ssrc.
     return false;
+  }
 
   encoders_[ssrc] = encoder;
   return true;
 }
 
-void EncoderStateFeedback::RemoveEncoder(uint32_t ssrc)  {
+void EncoderStateFeedback::RemoveEncoder(const ViEEncoder* encoder)  {
   CriticalSectionScoped lock(crit_.get());
-  SsrcEncoderMap::iterator it = encoders_.find(ssrc);
-  if (it == encoders_.end())
-    return;
-
-  encoders_.erase(it);
+  SsrcEncoderMap::iterator it = encoders_.begin();
+  while (it != encoders_.end()) {
+    if (it->second == encoder) {
+      encoders_.erase(it++);
+    } else {
+      ++it;
+    }
+  }
 }
 
 RtcpIntraFrameObserver* EncoderStateFeedback::GetRtcpIntraFrameObserver() {
@@ -98,4 +107,18 @@
   it->second->OnReceivedRPSI(ssrc, picture_id);
 }
 
+void EncoderStateFeedback::OnLocalSsrcChanged(uint32_t old_ssrc,
+                                              uint32_t new_ssrc) {
+  CriticalSectionScoped lock(crit_.get());
+  SsrcEncoderMap::iterator it = encoders_.find(old_ssrc);
+  if (it == encoders_.end() || encoders_.find(new_ssrc) != encoders_.end()) {
+    return;
+  }
+
+  ViEEncoder* encoder = it->second;
+  encoders_.erase(it);
+  encoders_[new_ssrc] = encoder;
+  encoder->OnLocalSsrcChanged(old_ssrc, new_ssrc);
+}
+
 }  // namespace webrtc
diff --git a/video_engine/encoder_state_feedback.h b/video_engine/encoder_state_feedback.h
index 7d063d4..04d8205 100644
--- a/video_engine/encoder_state_feedback.h
+++ b/video_engine/encoder_state_feedback.h
@@ -38,7 +38,7 @@
   bool AddEncoder(uint32_t ssrc, ViEEncoder* encoder);
 
   // Removes a registered ViEEncoder.
-  void RemoveEncoder(uint32_t ssrc);
+  void RemoveEncoder(const ViEEncoder* encoder);
 
   // Returns an observer to register at the requesting class. The observer has
   // the same lifetime as the EncoderStateFeedback instance.
@@ -49,6 +49,7 @@
   void OnReceivedIntraFrameRequest(uint32_t ssrc);
   void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id);
   void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id);
+  void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc);
 
  private:
   typedef std::map<uint32_t,  ViEEncoder*> SsrcEncoderMap;
diff --git a/video_engine/encoder_state_feedback_unittest.cc b/video_engine/encoder_state_feedback_unittest.cc
index 3059526..4133a71 100644
--- a/video_engine/encoder_state_feedback_unittest.cc
+++ b/video_engine/encoder_state_feedback_unittest.cc
@@ -45,6 +45,8 @@
                void(uint32_t ssrc, uint8_t picture_id));
   MOCK_METHOD2(OnReceivedRPSI,
                void(uint32_t ssrc, uint64_t picture_id));
+  MOCK_METHOD2(OnLocalSsrcChanged,
+               void(uint32_t old_ssrc, uint32_t new_ssrc));
 };
 
 class VieKeyRequestTest : public ::testing::Test {
@@ -79,7 +81,7 @@
   encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
       ssrc, rpsi_picture_id);
 
-  encoder_state_feedback_->RemoveEncoder(ssrc);
+  encoder_state_feedback_->RemoveEncoder(&encoder);
 }
 
 // Register multiple encoders and make sure the request is relayed to correct
@@ -123,12 +125,12 @@
   encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
       ssrc_2, rpsi_pid_2);
 
-  encoder_state_feedback_->RemoveEncoder(ssrc_1);
+  encoder_state_feedback_->RemoveEncoder(&encoder_1);
   EXPECT_CALL(encoder_2, OnReceivedIntraFrameRequest(ssrc_2))
       .Times(1);
   encoder_state_feedback_->GetRtcpIntraFrameObserver()->
       OnReceivedIntraFrameRequest(ssrc_2);
-  encoder_state_feedback_->RemoveEncoder(ssrc_2);
+  encoder_state_feedback_->RemoveEncoder(&encoder_2);
 }
 
 TEST_F(VieKeyRequestTest, AddTwiceError) {
@@ -136,7 +138,7 @@
   MockVieEncoder encoder(process_thread_.get());
   EXPECT_TRUE(encoder_state_feedback_->AddEncoder(ssrc, &encoder));
   EXPECT_FALSE(encoder_state_feedback_->AddEncoder(ssrc, &encoder));
-  encoder_state_feedback_->RemoveEncoder(ssrc);
+  encoder_state_feedback_->RemoveEncoder(&encoder);
 }
 
 }  // namespace webrtc
diff --git a/video_engine/test/android/AndroidManifest.xml b/video_engine/test/android/AndroidManifest.xml
index d660310..8b44477 100644
--- a/video_engine/test/android/AndroidManifest.xml
+++ b/video_engine/test/android/AndroidManifest.xml
@@ -13,16 +13,11 @@
             <intent-filter>
                 <action android:name="android.intent.action.MAIN" />
                 <category android:name="android.intent.category.LAUNCHER" />
+                <action android:name="android.intent.action.HEADSET_PLUG"/>
             </intent-filter>
         </activity>
     </application>
 
-    <receiver android:name="com.juno.brheadset.HeadsetStateReceiver">
-      <intent-filter>
-        <action android:name="android.intent.action.HEADSET_PLUG"/>
-      </intent-filter>
-    </receiver>
-
     <uses-sdk android:minSdkVersion="10" />
     <uses-permission android:name="android.permission.CAMERA"></uses-permission>
     <uses-feature android:name="android.hardware.camera" />
diff --git a/video_engine/test/android/jni/vie_android_java_api.cc b/video_engine/test/android/jni/vie_android_java_api.cc
index 6d6f55a..3443a3e 100644
--- a/video_engine/test/android/jni/vie_android_java_api.cc
+++ b/video_engine/test/android/jni/vie_android_java_api.cc
@@ -447,7 +447,6 @@
       return -1;
     }
     memset(&vieData, 0, sizeof(vieData));
-    VideoEngine::SetAndroidObjects(NULL, NULL);
     return 0;
   }
   else {
diff --git a/video_engine/test/android/res/layout/main.xml b/video_engine/test/android/res/layout/main.xml
index aa6bb88..a816800 100644
--- a/video_engine/test/android/res/layout/main.xml
+++ b/video_engine/test/android/res/layout/main.xml
@@ -61,6 +61,12 @@
                       android:id="@+id/LinearLayout03"
 		      android:layout_height="wrap_content"
 		      android:layout_width="fill_parent">
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbCPULoad"
+		  android:text="@string/cpuload">
+	</CheckBox>
+
         <RadioGroup
                   android:layout_width="fill_parent"
                   android:layout_height="wrap_content"
diff --git a/video_engine/test/android/res/values/strings.xml b/video_engine/test/android/res/values/strings.xml
index 82760b0..3ace3d3 100644
--- a/video_engine/test/android/res/values/strings.xml
+++ b/video_engine/test/android/res/values/strings.xml
@@ -10,6 +10,7 @@
 <string name="remoteIp">Remote IP address</string>
 <string name="loopback">Loopback</string>
 <string name="stats">Stats</string>
+<string name="cpuload">CPULoad</string>
 <string name="startListen">Start Listen</string>
 <string name="startSend">Start Send</string>
 <string name="startBoth">Start Both</string>
diff --git a/video_engine/test/android/src/org/webrtc/videoengineapp/WebRTCDemo.java b/video_engine/test/android/src/org/webrtc/videoengineapp/WebRTCDemo.java
index 680dcd8..9d94726 100644
--- a/video_engine/test/android/src/org/webrtc/videoengineapp/WebRTCDemo.java
+++ b/video_engine/test/android/src/org/webrtc/videoengineapp/WebRTCDemo.java
@@ -11,6 +11,7 @@
 package org.webrtc.videoengineapp;
 
 import java.io.File;
+import java.io.IOException;
 import java.net.InetAddress;
 import java.net.NetworkInterface;
 import java.net.SocketException;
@@ -37,11 +38,12 @@
 import android.graphics.Rect;
 import android.hardware.SensorManager;
 import android.media.AudioManager;
+import android.media.MediaPlayer;
+import android.net.Uri;
 import android.os.Bundle;
 import android.os.Environment;
 import android.os.PowerManager;
 import android.os.PowerManager.WakeLock;
-
 import android.util.Log;
 import android.view.Gravity;
 import android.view.KeyEvent;
@@ -51,15 +53,14 @@
 import android.view.View;
 import android.view.ViewGroup;
 import android.view.Display;
+import android.view.OrientationEventListener;
 import android.view.Window;
 import android.view.WindowManager;
 import android.view.WindowManager.LayoutParams;
-
 import android.widget.AdapterView;
 import android.widget.ArrayAdapter;
 import android.widget.Button;
 import android.widget.CheckBox;
-
 import android.widget.EditText;
 import android.widget.LinearLayout;
 import android.widget.RadioGroup;
@@ -68,7 +69,6 @@
 import android.widget.TextView;
 import android.widget.AdapterView.OnItemSelectedListener;
 import android.widget.TabHost.TabSpec;
-import android.view.OrientationEventListener;
 
 public class WebRTCDemo extends TabActivity implements IViEAndroidCallback,
                                                 View.OnClickListener,
@@ -99,6 +99,7 @@
     private static final int SEND_CODEC_FRAMERATE = 15;
     private static final int INIT_BITRATE = 500;
     private static final String LOOPBACK_IP = "127.0.0.1";
+    private static final String RINGTONE_URL = "content://settings/system/ringtone";
 
     private int volumeLevel = 204;
 
@@ -127,6 +128,8 @@
     private boolean loopbackMode = true;
     private CheckBox cbStats;
     private boolean isStatsOn = true;
+    private CheckBox cbCPULoad;
+    private boolean isCPULoadOn = true;
     private boolean useOpenGLRender = true;
 
     // Video settings
@@ -181,6 +184,9 @@
                                                  "352x288", "640x480" };
     private String[] mVoiceCodecsStrings = null;
 
+    private Thread mBackgroundLoad = null;
+    private boolean mIsBackgroudLoadRunning = false;
+
     private OrientationEventListener orientationListener;
     int currentOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
     int currentCameraOrientation = 0;
@@ -241,14 +247,21 @@
         receiver = new BroadcastReceiver() {
                 @Override
                 public void onReceive(Context context, Intent intent) {
-                    AudioManager am = (AudioManager)getSystemService(AUDIO_SERVICE);
-                    if (am.isWiredHeadsetOn()) {
-                        enableSpeaker = false;
+                    if (intent.getAction().compareTo(Intent.ACTION_HEADSET_PLUG)
+                            == 0) {
+                        int state = intent.getIntExtra("state", 0);
+                        Log.v(TAG, "Intent.ACTION_HEADSET_PLUG state: " + state +
+                                " microphone: " + intent.getIntExtra("microphone", 0));
+                        if (voERunning) {
+                            if (state == 1) {
+                                enableSpeaker = true;
+                            }
+                            else {
+                                enableSpeaker = false;
+                            }
+                            RouteAudio(enableSpeaker);
+                        }
                     }
-                    else {
-                        enableSpeaker = true;
-                    }
-                    RouteAudio(enableSpeaker);
                 }
             };
         registerReceiver(receiver, receiverFilter );
@@ -309,6 +322,14 @@
         return;
     }
 
+    // Called before the activity is destroyed.
+    @Override
+    public void onDestroy() {
+        Log.d(TAG, "onDestroy");
+        super.onDestroy();
+        unregisterReceiver(receiver);
+    }
+
     private class StatsView extends View{
         public StatsView(Context context){
             super(context);
@@ -378,6 +399,9 @@
         Log.d(TAG, "StopAll");
 
         if (ViEAndroidAPI != null) {
+
+            StopCPULoad();
+
             if (voERunning) {
                 voERunning = false;
                 StopVoiceEngine();
@@ -506,6 +530,9 @@
         cbStats = (CheckBox) findViewById(R.id.cbStats);
         cbStats.setChecked(isStatsOn);
 
+        cbCPULoad = (CheckBox) findViewById(R.id.cbCPULoad);
+        cbCPULoad.setChecked(isCPULoadOn);
+
         cbVoice = (CheckBox) findViewById(R.id.cbVoice);
         cbVoice.setChecked(enableVoice);
 
@@ -550,6 +577,7 @@
         etRemoteIp.setOnClickListener(this);
         cbLoopback.setOnClickListener(this);
         cbStats.setOnClickListener(this);
+        cbCPULoad.setOnClickListener(this);
         cbEnableNack.setOnClickListener(this);
         cbEnableSpeaker.setOnClickListener(this);
         cbEnableAECM.setOnClickListener(this);
@@ -576,9 +604,23 @@
         return etRemoteIp.getText().toString();
     }
 
+    private void StartPlayingRingtone() {
+        MediaPlayer mMediaPlayer = new MediaPlayer();
+        try {
+            mMediaPlayer.setDataSource(this, Uri.parse(RINGTONE_URL));
+            mMediaPlayer.prepare();
+            mMediaPlayer.seekTo(0);
+            mMediaPlayer.start();
+        } catch (IOException e) {
+            Log.v(TAG, "MediaPlayer Failed: " + e);
+        }
+    }
+
     private void StartCall() {
         int ret = 0;
 
+        StartPlayingRingtone();
+
         if (enableVoice) {
             StartVoiceEngine();
         }
@@ -660,6 +702,14 @@
                 RemoveSatsView();
             }
 
+            isCPULoadOn = cbCPULoad.isChecked();
+            if (isCPULoadOn) {
+                StartCPULoad();
+            }
+            else {
+                StopCPULoad();
+            }
+
             viERunning = true;
         }
     }
@@ -837,6 +887,15 @@
                     RemoveSatsView();
                 }
                 break;
+            case R.id.cbCPULoad:
+                isCPULoadOn = cbCPULoad.isChecked();
+                if (isCPULoadOn) {
+                    StartCPULoad();
+                }
+                else {
+                    StopCPULoad();
+                }
+                break;
             case R.id.radio_surface:
                 useOpenGLRender = false;
                 break;
@@ -1008,4 +1067,42 @@
         mTabHost.removeView(statsView);
         statsView = null;
     }
+
+    private void StartCPULoad() {
+        if (null == mBackgroundLoad) {
+            mBackgroundLoad = new Thread(new Runnable() {
+                    public void run() {
+                        Log.v(TAG, "Background load started");
+                        mIsBackgroudLoadRunning = true;
+                        try{
+                            while (mIsBackgroudLoadRunning) {
+                                // This while simulates cpu load.
+                                // Log.v(TAG, "Runnable!!!");
+                            }
+                        }
+                        catch(Throwable t) {
+                            Log.v(TAG, "StartCPULoad failed");
+                        }
+                    }
+                });
+            mBackgroundLoad.start();
+        }
+        else {
+            if (mBackgroundLoad.getState() == Thread.State.TERMINATED) {
+                mBackgroundLoad.start();
+            }
+        }
+    }
+
+    private void StopCPULoad() {
+        if (null != mBackgroundLoad) {
+            mIsBackgroudLoadRunning = false;
+            try{
+                mBackgroundLoad.join();
+            }
+            catch(Throwable t) {
+                Log.v(TAG, "StopCPULoad failed");
+            }
+        }
+    }
 }
diff --git a/video_engine/test/auto_test/source/vie_autotest_base.cc b/video_engine/test/auto_test/source/vie_autotest_base.cc
index 0225cb6..e9f270b 100644
--- a/video_engine/test/auto_test/source/vie_autotest_base.cc
+++ b/video_engine/test/auto_test/source/vie_autotest_base.cc
@@ -136,6 +136,10 @@
       webrtc::ViENetwork::GetInterface(video_engine);
   EXPECT_TRUE(vie_network != NULL);
 
+  webrtc::ViERTP_RTCP* vie_rtp =
+      webrtc::ViERTP_RTCP::GetInterface(video_engine);
+  EXPECT_TRUE(vie_rtp != NULL);
+
   // ***************************************************************
   // Engine ready. Begin testing class
   // ***************************************************************
@@ -169,12 +173,15 @@
 
   const char* ip_address = "127.0.0.1\0";
   const int send_port = 1234;
+  EXPECT_EQ(0, vie_rtp->SetLocalSSRC(video_channel, 1));
   EXPECT_EQ(0, vie_network->SetSendDestination(video_channel, ip_address,
-                                                   send_port));
+                                               send_port));
+  EXPECT_EQ(0, vie_rtp->SetLocalSSRC(video_channel, 2));
   EXPECT_EQ(0, vie_network->SetSendDestination(video_channel2, ip_address,
-                                                   send_port + 2));
+                                               send_port + 2));
+  EXPECT_EQ(0, vie_rtp->SetLocalSSRC(video_channel, 3));
   EXPECT_EQ(0, vie_network->SetSendDestination(video_channel3, ip_address,
-                                                   send_port + 4));
+                                               send_port + 4));
 
   EXPECT_EQ(0, vie_base->StartSend(video_channel));
   EXPECT_EQ(-1, vie_base->StartSend(video_channel2));
@@ -214,6 +221,7 @@
   EXPECT_EQ(0, vie_base->DisconnectAudioChannel(video_channel));
 
   // Clean up voice engine
+  EXPECT_EQ(0, vie_rtp->Release());
   EXPECT_EQ(0, vie_network->Release());
   EXPECT_EQ(0, vie_base->SetVoiceEngine(NULL));
   // VoiceEngine reference counting is per object, not per interface, so
diff --git a/video_engine/test/auto_test/source/vie_autotest_codec.cc b/video_engine/test/auto_test/source/vie_autotest_codec.cc
index b9a6480..0812e03 100644
--- a/video_engine/test/auto_test/source/vie_autotest_codec.cc
+++ b/video_engine/test/auto_test/source/vie_autotest_codec.cc
@@ -343,10 +343,12 @@
         video_channel_1, rtp_port_1));
     EXPECT_EQ(0, video_engine.network->SetSendDestination(
         video_channel_1, "127.0.0.1", rtp_port_1));
+    EXPECT_EQ(0, video_engine.rtp_rtcp->SetLocalSSRC(video_channel_1, 1));
     EXPECT_EQ(0, video_engine.network->SetLocalReceiver(
         video_channel_2, rtp_port_2));
     EXPECT_EQ(0, video_engine.network->SetSendDestination(
         video_channel_2, "127.0.0.1", rtp_port_2));
+    EXPECT_EQ(0, video_engine.rtp_rtcp->SetLocalSSRC(video_channel_2, 2));
     tb_capture.ConnectTo(video_channel_1);
     tb_capture.ConnectTo(video_channel_2);
     EXPECT_EQ(0, video_engine.rtp_rtcp->SetKeyFrameRequestMethod(
diff --git a/video_engine/test/libvietest/include/tb_I420_codec.h b/video_engine/test/libvietest/include/tb_I420_codec.h
index e1c9b79..0d15212 100644
--- a/video_engine/test/libvietest/include/tb_I420_codec.h
+++ b/video_engine/test/libvietest/include/tb_I420_codec.h
@@ -36,7 +36,7 @@
     virtual WebRtc_Word32 Encode(
         const webrtc::VideoFrame& inputImage,
         const webrtc::CodecSpecificInfo* codecSpecificInfo,
-        const webrtc::VideoFrameType frameType);
+        const std::vector<webrtc::VideoFrameType>* frameTypes);
 
     virtual WebRtc_Word32 RegisterEncodeCompleteCallback(
         webrtc::EncodedImageCallback* callback);
diff --git a/video_engine/test/libvietest/testbed/tb_I420_codec.cc b/video_engine/test/libvietest/testbed/tb_I420_codec.cc
index af30307..2782f93 100644
--- a/video_engine/test/libvietest/testbed/tb_I420_codec.cc
+++ b/video_engine/test/libvietest/testbed/tb_I420_codec.cc
@@ -120,7 +120,7 @@
 WebRtc_Word32 TbI420Encoder::Encode(
     const webrtc::VideoFrame& inputImage,
     const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
-    const webrtc::VideoFrameType /*frameType*/)
+    const std::vector<webrtc::VideoFrameType>* /*frameTypes*/)
 {
     _functionCalls.Encode++;
     if (!_inited)
diff --git a/video_engine/vie_base_impl.cc b/video_engine/vie_base_impl.cc
index 107b176..a87c7c0 100644
--- a/video_engine/vie_base_impl.cc
+++ b/video_engine/vie_base_impl.cc
@@ -397,7 +397,7 @@
 
   // Add WebRTC Version.
   std::stringstream version_stream;
-  version_stream << "VideoEngine 3.13.0" << std::endl;
+  version_stream << "VideoEngine 3.14.0" << std::endl;
 
   // Add build info.
   version_stream << "Build: svn:" << WEBRTC_SVNREVISION << " " << BUILDINFO
diff --git a/video_engine/vie_capturer.cc b/video_engine/vie_capturer.cc
index 5804475..09c09db 100644
--- a/video_engine/vie_capturer.cc
+++ b/video_engine/vie_capturer.cc
@@ -749,17 +749,19 @@
   return capture_encoder_->ConfigureEncoder(*codec_settings, max_payload_size);
 }
 
-WebRtc_Word32 ViECapturer::Encode(const VideoFrame& input_image,
-                                  const CodecSpecificInfo* codec_specific_info,
-                                  const VideoFrameType frame_type) {
+WebRtc_Word32 ViECapturer::Encode(
+    const VideoFrame& input_image,
+    const CodecSpecificInfo* codec_specific_info,
+    const std::vector<VideoFrameType>* frame_types) {
   CriticalSectionScoped cs(encoding_cs_.get());
   if (!capture_encoder_) {
     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   }
-  if (frame_type == kKeyFrame) {
+  if (frame_types == NULL) {
+    return capture_encoder_->EncodeFrameType(kVideoFrameDelta);
+  } else if ((*frame_types)[0] == kKeyFrame) {
     return capture_encoder_->EncodeFrameType(kVideoFrameKey);
-  }
-  if (frame_type == kSkipFrame) {
+  } else if ((*frame_types)[0] == kSkipFrame) {
     return capture_encoder_->EncodeFrameType(kFrameEmpty);
   }
   return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
diff --git a/video_engine/vie_capturer.h b/video_engine/vie_capturer.h
index 6b054fe..5fc0dad 100644
--- a/video_engine/vie_capturer.h
+++ b/video_engine/vie_capturer.h
@@ -11,6 +11,8 @@
 #ifndef WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
 #define WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
 
+#include <vector>
+
 #include "common_types.h"  // NOLINT
 #include "engine_configurations.h"  // NOLINT
 #include "modules/video_capture/main/interface/video_capture.h"
@@ -142,7 +144,7 @@
                                    WebRtc_UWord32 max_payload_size);
   virtual WebRtc_Word32 Encode(const VideoFrame& input_image,
                                const CodecSpecificInfo* codec_specific_info,
-                               const VideoFrameType frame_type);
+                               const std::vector<VideoFrameType>* frame_types);
   virtual WebRtc_Word32 RegisterEncodeCompleteCallback(
       EncodedImageCallback* callback);
   virtual WebRtc_Word32 Release();
diff --git a/video_engine/vie_channel.cc b/video_engine/vie_channel.cc
index 21cf32d..faa3896 100644
--- a/video_engine/vie_channel.cc
+++ b/video_engine/vie_channel.cc
@@ -804,10 +804,26 @@
   return rtp_rtcp_->SetRTXReceiveStatus(true, SSRC);
 }
 
-WebRtc_Word32 ViEChannel::GetLocalSSRC(uint32_t* ssrc) {
+// TODO(mflodman) Add kViEStreamTypeRtx.
+WebRtc_Word32 ViEChannel::GetLocalSSRC(uint8_t idx, unsigned int* ssrc) {
   WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
                "%s", __FUNCTION__);
-  *ssrc = rtp_rtcp_->SSRC();
+
+  if (idx == 0) {
+    *ssrc = rtp_rtcp_->SSRC();
+    return 0;
+  }
+  CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+  if (idx > simulcast_rtp_rtcp_.size()) {
+    return -1;
+  }
+  std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
+  for (int i = 1; i < idx; ++i, ++it) {
+    if (it ==  simulcast_rtp_rtcp_.end()) {
+      return -1;
+    }
+  }
+  *ssrc = (*it)->SSRC();
   return 0;
 }
 
diff --git a/video_engine/vie_channel.h b/video_engine/vie_channel.h
index 062eb01..bab5a82 100644
--- a/video_engine/vie_channel.h
+++ b/video_engine/vie_channel.h
@@ -118,8 +118,8 @@
                         const StreamType usage,
                         const unsigned char simulcast_idx);
 
-  // Gets SSRC for outgoing stream.
-  WebRtc_Word32 GetLocalSSRC(uint32_t* ssrc);
+  // Gets SSRC for outgoing stream number |idx|.
+  WebRtc_Word32 GetLocalSSRC(uint8_t idx, unsigned int* ssrc);
 
   // Gets SSRC for the incoming stream.
   WebRtc_Word32 GetRemoteSSRC(uint32_t* ssrc);
diff --git a/video_engine/vie_channel_group.cc b/video_engine/vie_channel_group.cc
index 6254c21..d9830de 100644
--- a/video_engine/vie_channel_group.cc
+++ b/video_engine/vie_channel_group.cc
@@ -13,6 +13,7 @@
 #include "modules/bitrate_controller/include/bitrate_controller.h"
 #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
 #include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "video_engine/encoder_state_feedback.h"
 #include "video_engine/vie_channel.h"
 #include "video_engine/vie_encoder.h"
 #include "video_engine/vie_remb.h"
@@ -25,7 +26,8 @@
     : remb_(new VieRemb(process_thread)),
       bitrate_controller_(BitrateController::CreateBitrateController()),
       remote_bitrate_estimator_(RemoteBitrateEstimator::Create(remb_.get(),
-                                                               options, mode)) {
+                                                               options, mode)),
+      encoder_state_feedback_(new EncoderStateFeedback()) {
 }
 
 ChannelGroup::~ChannelGroup() {
@@ -57,6 +59,10 @@
   return remote_bitrate_estimator_.get();
 }
 
+EncoderStateFeedback* ChannelGroup::GetEncoderStateFeedback() {
+  return encoder_state_feedback_.get();
+}
+
 bool ChannelGroup::SetChannelRembStatus(int channel_id,
                                         bool sender,
                                         bool receiver,
diff --git a/video_engine/vie_channel_group.h b/video_engine/vie_channel_group.h
index 14b55db..4b6e927 100644
--- a/video_engine/vie_channel_group.h
+++ b/video_engine/vie_channel_group.h
@@ -19,6 +19,7 @@
 namespace webrtc {
 
 class BitrateController;
+class EncoderStateFeedback;
 struct OverUseDetectorOptions;
 class ProcessThread;
 class ViEChannel;
@@ -47,6 +48,7 @@
 
   BitrateController* GetBitrateController();
   RemoteBitrateEstimator* GetRemoteBitrateEstimator();
+  EncoderStateFeedback* GetEncoderStateFeedback();
 
  private:
   typedef std::set<int> ChannelSet;
@@ -54,6 +56,7 @@
   scoped_ptr<VieRemb> remb_;
   scoped_ptr<BitrateController> bitrate_controller_;
   scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
+  scoped_ptr<EncoderStateFeedback> encoder_state_feedback_;
   ChannelSet channels_;
 };
 
diff --git a/video_engine/vie_channel_manager.cc b/video_engine/vie_channel_manager.cc
index bb4d09f..ad18138 100644
--- a/video_engine/vie_channel_manager.cc
+++ b/video_engine/vie_channel_manager.cc
@@ -16,6 +16,7 @@
 #include "system_wrappers/interface/critical_section_wrapper.h"
 #include "system_wrappers/interface/map_wrapper.h"
 #include "system_wrappers/interface/trace.h"
+#include "video_engine/encoder_state_feedback.h"
 #include "video_engine/vie_channel.h"
 #include "video_engine/vie_defines.h"
 #include "video_engine/vie_encoder.h"
@@ -103,10 +104,14 @@
       bitrate_controller->CreateRtcpBandwidthObserver();
   RemoteBitrateEstimator* remote_bitrate_estimator =
       group->GetRemoteBitrateEstimator();
+  EncoderStateFeedback* encoder_state_feedback =
+      group->GetEncoderStateFeedback();
 
   if (!(vie_encoder->Init() &&
         CreateChannelObject(new_channel_id, vie_encoder, bandwidth_observer,
-                            remote_bitrate_estimator, true))) {
+                            remote_bitrate_estimator,
+                            encoder_state_feedback->GetRtcpIntraFrameObserver(),
+                            true))) {
     delete vie_encoder;
     vie_encoder = NULL;
     ReturnChannelId(new_channel_id);
@@ -114,6 +119,15 @@
     return -1;
   }
 
+  // Add ViEEncoder to EncoderFeedBackObserver.
+  unsigned int ssrc = 0;
+  int idx = 0;
+  channel_map_[new_channel_id]->GetLocalSSRC(idx, &ssrc);
+  encoder_state_feedback->AddEncoder(ssrc, vie_encoder);
+  std::list<unsigned int> ssrcs;
+  ssrcs.push_back(ssrc);
+  vie_encoder->SetSsrcs(ssrcs);
+
   *channel_id = new_channel_id;
   group->AddChannel(*channel_id);
   channel_groups_.push_back(group);
@@ -141,6 +155,8 @@
       bitrate_controller->CreateRtcpBandwidthObserver();
   RemoteBitrateEstimator* remote_bitrate_estimator =
       channel_group->GetRemoteBitrateEstimator();
+  EncoderStateFeedback* encoder_state_feedback =
+      channel_group->GetEncoderStateFeedback();
 
   ViEEncoder* vie_encoder = NULL;
   if (sender) {
@@ -149,18 +165,25 @@
                                  *module_process_thread_,
                                  bitrate_controller);
     if (!(vie_encoder->Init() &&
-          CreateChannelObject(new_channel_id, vie_encoder,
-                              bandwidth_observer,
-                              remote_bitrate_estimator,
-                              sender))) {
+          CreateChannelObject(
+              new_channel_id, vie_encoder, bandwidth_observer,
+              remote_bitrate_estimator,
+              encoder_state_feedback->GetRtcpIntraFrameObserver(), sender))) {
       delete vie_encoder;
       vie_encoder = NULL;
     }
+    // Register the ViEEncoder to get key frame requests for this channel.
+    unsigned int ssrc = 0;
+    int stream_idx = 0;
+    channel_map_[new_channel_id]->GetLocalSSRC(stream_idx, &ssrc);
+    encoder_state_feedback->AddEncoder(ssrc, vie_encoder);
   } else {
     vie_encoder = ViEEncoderPtr(original_channel);
     assert(vie_encoder);
-    if (!CreateChannelObject(new_channel_id, vie_encoder, bandwidth_observer,
-                             remote_bitrate_estimator, sender)) {
+    if (!CreateChannelObject(
+        new_channel_id, vie_encoder, bandwidth_observer,
+        remote_bitrate_estimator,
+        encoder_state_feedback->GetRtcpIntraFrameObserver(), sender)) {
       vie_encoder = NULL;
     }
   }
@@ -206,9 +229,11 @@
     group = FindGroup(channel_id);
     group->SetChannelRembStatus(channel_id, false, false, vie_channel,
                                 vie_encoder);
-    unsigned int ssrc = 0;
-    vie_channel->GetRemoteSSRC(&ssrc);
-    group->RemoveChannel(channel_id, ssrc);
+    group->GetEncoderStateFeedback()->RemoveEncoder(vie_encoder);
+
+    unsigned int remote_ssrc = 0;
+    vie_channel->GetRemoteSSRC(&remote_ssrc);
+    group->RemoveChannel(channel_id, remote_ssrc);
 
     // Check if other channels are using the same encoder.
     if (ChannelUsingViEEncoder(channel_id)) {
@@ -348,11 +373,30 @@
   return true;
 }
 
+void ViEChannelManager::UpdateSsrcs(int channel_id,
+                                    const std::list<unsigned int>& ssrcs) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  ChannelGroup* channel_group =  FindGroup(channel_id);
+  if (channel_group == NULL) {
+    return;
+  }
+  ViEEncoder* encoder = ViEEncoderPtr(channel_id);
+  assert(encoder);
+
+  EncoderStateFeedback* encoder_state_feedback =
+      channel_group->GetEncoderStateFeedback();
+  for (std::list<unsigned int>::const_iterator it = ssrcs.begin();
+       it != ssrcs.end(); ++it) {
+    encoder_state_feedback->AddEncoder(*it, encoder);
+  }
+}
+
 bool ViEChannelManager::CreateChannelObject(
     int channel_id,
     ViEEncoder* vie_encoder,
     RtcpBandwidthObserver* bandwidth_observer,
     RemoteBitrateEstimator* remote_bitrate_estimator,
+    RtcpIntraFrameObserver* intra_frame_observer,
     bool sender) {
   // Register the channel at the encoder.
   RtpRtcp* send_rtp_rtcp_module = vie_encoder->SendRtpRtcpModule();
@@ -360,7 +404,7 @@
   ViEChannel* vie_channel = new ViEChannel(channel_id, engine_id_,
                                            number_of_cores_,
                                            *module_process_thread_,
-                                           vie_encoder,
+                                           intra_frame_observer,
                                            bandwidth_observer,
                                            remote_bitrate_estimator,
                                            send_rtp_rtcp_module,
diff --git a/video_engine/vie_channel_manager.h b/video_engine/vie_channel_manager.h
index 758cdd3..04bd37a 100644
--- a/video_engine/vie_channel_manager.h
+++ b/video_engine/vie_channel_manager.h
@@ -79,12 +79,17 @@
   // adding a channel.
   bool SetBandwidthEstimationMode(BandwidthEstimationMode mode);
 
+  // Updates the SSRCs for a channel. If one of the SSRCs already is registered,
+  // it will simply be ignored and no error is returned.
+  void UpdateSsrcs(int channel_id, const std::list<unsigned int>& ssrcs);
+
  private:
   // Creates a channel object connected to |vie_encoder|. Assumed to be called
   // protected.
   bool CreateChannelObject(int channel_id, ViEEncoder* vie_encoder,
                            RtcpBandwidthObserver* bandwidth_observer,
                            RemoteBitrateEstimator* remote_bitrate_estimator,
+                           RtcpIntraFrameObserver* intra_frame_observer,
                            bool sender);
 
   // Used by ViEChannelScoped, forcing a manager user to use scoped.
diff --git a/video_engine/vie_codec_impl.cc b/video_engine/vie_codec_impl.cc
index dd4d37d..7518d17 100644
--- a/video_engine/vie_codec_impl.cc
+++ b/video_engine/vie_codec_impl.cc
@@ -10,6 +10,8 @@
 
 #include "video_engine/vie_codec_impl.h"
 
+#include <list>
+
 #include "engine_configurations.h"  // NOLINT
 #include "modules/video_coding/main/interface/video_coding.h"
 #include "system_wrappers/interface/trace.h"
@@ -238,6 +240,32 @@
     }
   }
 
+  // TODO(mflodman) Break out this part in GetLocalSsrcListi().
+  // Update all SSRCs to ViEEncoder.
+  std::list<unsigned int> ssrcs;
+  if (video_codec_internal.numberOfSimulcastStreams == 0) {
+    unsigned int ssrc = 0;
+    if (vie_channel->GetLocalSSRC(0, &ssrc) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo,
+                   ViEId(shared_data_->instance_id(), video_channel),
+                   "%s: Could not get ssrc", __FUNCTION__);
+    }
+    ssrcs.push_back(ssrc);
+  } else {
+    for (int idx = 0; idx < video_codec_internal.numberOfSimulcastStreams;
+         ++idx) {
+      unsigned int ssrc = 0;
+      if (vie_channel->GetLocalSSRC(idx, &ssrc) != 0) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo,
+                     ViEId(shared_data_->instance_id(), video_channel),
+                     "%s: Could not get ssrc for idx %d", __FUNCTION__, idx);
+      }
+      ssrcs.push_back(ssrc);
+    }
+  }
+  vie_encoder->SetSsrcs(ssrcs);
+  shared_data_->channel_manager()->UpdateSsrcs(video_channel, ssrcs);
+
   // Update the protection mode, we might be switching NACK/FEC.
   vie_encoder->UpdateProtectionMethod();
 
diff --git a/video_engine/vie_encoder.cc b/video_engine/vie_encoder.cc
index e56b6b3..1df51e5 100644
--- a/video_engine/vie_encoder.cc
+++ b/video_engine/vie_encoder.cc
@@ -72,7 +72,6 @@
     data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
     bitrate_controller_(bitrate_controller),
     paused_(false),
-    time_last_intra_request_ms_(0),
     channels_dropping_delta_frames_(0),
     drop_next_frame_(false),
     fec_enabled_(false),
@@ -585,7 +584,7 @@
 int ViEEncoder::SendKeyFrame() {
   WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
                ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
-  return vcm_.IntraFrameRequest();
+  return vcm_.IntraFrameRequest(0);
 }
 
 WebRtc_Word32 ViEEncoder::SendCodecStatistics(
@@ -806,20 +805,79 @@
   has_received_rpsi_ = true;
 }
 
-void ViEEncoder::OnReceivedIntraFrameRequest(uint32_t /*ssrc*/) {
+void ViEEncoder::OnReceivedIntraFrameRequest(uint32_t ssrc) {
   // Key frame request from remote side, signal to VCM.
   WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideo,
                ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
 
-  WebRtc_Word64 now = TickTime::MillisecondTimestamp();
-  if (time_last_intra_request_ms_ + kViEMinKeyRequestIntervalMs > now) {
-    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
-                 ViEId(engine_id_, channel_id_),
-                 "%s: Not not encoding new intra due to timing", __FUNCTION__);
+  int idx = 0;
+  {
+    CriticalSectionScoped cs(data_cs_.get());
+    std::map<unsigned int, int>::iterator stream_it = ssrc_streams_.find(ssrc);
+    if (stream_it == ssrc_streams_.end()) {
+      assert(false);
+      return;
+    }
+    std::map<unsigned int, WebRtc_Word64>::iterator time_it =
+        time_last_intra_request_ms_.find(ssrc);
+    if (time_it == time_last_intra_request_ms_.end()) {
+      time_last_intra_request_ms_[ssrc] = 0;
+    }
+
+    WebRtc_Word64 now = TickTime::MillisecondTimestamp();
+    if (time_last_intra_request_ms_[ssrc] + kViEMinKeyRequestIntervalMs > now) {
+      WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: Not encoding new intra due to timing", __FUNCTION__);
+      return;
+    }
+    time_last_intra_request_ms_[ssrc] = now;
+    idx = stream_it->second;
+  }
+  // Release the critsect before triggering key frame.
+  vcm_.IntraFrameRequest(idx);
+}
+
+void ViEEncoder::OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {
+  CriticalSectionScoped cs(data_cs_.get());
+  std::map<unsigned int, int>::iterator it = ssrc_streams_.find(old_ssrc);
+  if (it == ssrc_streams_.end()) {
     return;
   }
-  vcm_.IntraFrameRequest();
-  time_last_intra_request_ms_ = now;
+
+  ssrc_streams_[new_ssrc] = it->second;
+  ssrc_streams_.erase(it);
+
+  std::map<unsigned int, int64_t>::iterator time_it =
+      time_last_intra_request_ms_.find(old_ssrc);
+  int64_t last_intra_request_ms = 0;
+  if (time_it != time_last_intra_request_ms_.end()) {
+    last_intra_request_ms = time_it->second;
+    time_last_intra_request_ms_.erase(time_it);
+  }
+  time_last_intra_request_ms_[new_ssrc] = last_intra_request_ms;
+}
+
+bool ViEEncoder::SetSsrcs(const std::list<unsigned int>& ssrcs) {
+  VideoCodec codec;
+  if (vcm_.SendCodec(&codec) != 0)
+    return false;
+
+  if (codec.numberOfSimulcastStreams > 0 &&
+      ssrcs.size() != codec.numberOfSimulcastStreams) {
+    return false;
+  }
+
+  CriticalSectionScoped cs(data_cs_.get());
+  ssrc_streams_.clear();
+  time_last_intra_request_ms_.clear();
+  int idx = 0;
+  for (std::list<unsigned int>::const_iterator it = ssrcs.begin();
+       it != ssrcs.end(); ++it, ++idx) {
+    unsigned int ssrc = *it;
+    ssrc_streams_[ssrc] = idx;
+  }
+  return true;
 }
 
 // Called from ViEBitrateObserver.
diff --git a/video_engine/vie_encoder.h b/video_engine/vie_encoder.h
index 475c232..dc74f33 100644
--- a/video_engine/vie_encoder.h
+++ b/video_engine/vie_encoder.h
@@ -11,6 +11,9 @@
 #ifndef WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
 #define WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
 
+#include <list>
+#include <map>
+
 #include "common_types.h"  // NOLINT
 #include "typedefs.h"  //NOLINT
 #include "modules/bitrate_controller/include/bitrate_controller.h"
@@ -130,12 +133,12 @@
 
   // Implements RtcpIntraFrameObserver.
   virtual void OnReceivedIntraFrameRequest(uint32_t ssrc);
+  virtual void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id);
+  virtual void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id);
+  virtual void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc);
 
-  virtual void OnReceivedSLI(uint32_t ssrc,
-                             uint8_t picture_id);
-
-  virtual void OnReceivedRPSI(uint32_t ssrc,
-                              uint64_t picture_id);
+  // Sets SSRCs for all streams.
+  bool SetSsrcs(const std::list<unsigned int>& ssrcs);
 
   // Effect filter.
   WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effect_filter);
@@ -170,7 +173,7 @@
   BitrateController* bitrate_controller_;
 
   bool paused_;
-  WebRtc_Word64 time_last_intra_request_ms_;
+  std::map<unsigned int, int64_t> time_last_intra_request_ms_;
   WebRtc_Word32 channels_dropping_delta_frames_;
   bool drop_next_frame_;
 
@@ -185,6 +188,7 @@
   WebRtc_UWord8 picture_id_sli_;
   bool has_received_rpsi_;
   WebRtc_UWord64 picture_id_rpsi_;
+  std::map<unsigned int, int> ssrc_streams_;
 
   ViEFileRecorder file_recorder_;
 
diff --git a/video_engine/vie_rtp_rtcp_impl.cc b/video_engine/vie_rtp_rtcp_impl.cc
index c716e77..0c047d7 100644
--- a/video_engine/vie_rtp_rtcp_impl.cc
+++ b/video_engine/vie_rtp_rtcp_impl.cc
@@ -183,7 +183,8 @@
     shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
     return -1;
   }
-  if (vie_channel->GetLocalSSRC(&SSRC) != 0) {
+  uint8_t idx = 0;
+  if (vie_channel->GetLocalSSRC(idx, &SSRC) != 0) {
     shared_data_->SetLastError(kViERtpRtcpUnknownError);
     return -1;
   }