Updated stable to r2960.

git-svn-id: http://webrtc.googlecode.com/svn/stable/src@2961 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/build/common.gypi b/build/common.gypi
index d57b265..6f9b7e4 100644
--- a/build/common.gypi
+++ b/build/common.gypi
@@ -32,10 +32,12 @@
       'webrtc_root%': '<(webrtc_root)',
 
       'webrtc_vp8_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp8',
+      'include_opus%': 0,
     },
     'build_with_chromium%': '<(build_with_chromium)',
     'webrtc_root%': '<(webrtc_root)',
     'webrtc_vp8_dir%': '<(webrtc_vp8_dir)',
+    'include_opus%': '<(include_opus)',
 
     # The Chromium common.gypi we use treats all gyp files without
     # chromium_code==1 as third party code. This disables many of the
@@ -62,6 +64,9 @@
     'build_libyuv%': 1,
     'build_libvpx%': 1,
 
+    # Enable to use the Mozilla internal Opus version.
+    'build_with_mozilla%': 0,
+
     'libyuv_dir%': '<(DEPTH)/third_party/libyuv',
 
     'conditions': [
@@ -86,9 +91,6 @@
         # Disable the use of protocol buffers in production code.
         'enable_protobuf%': 0,
 
-        # Disable Mozilla internal Opus version
-        'build_with_mozilla%': 0,
-
       }, {  # Settings for the standalone (not-in-Chromium) build.
         'include_pulse_audio%': 1,
         'include_internal_audio_device%': 1,
diff --git a/common_types.h b/common_types.h
index 0d71f3f..83c55a3 100644
--- a/common_types.h
+++ b/common_types.h
@@ -131,7 +131,8 @@
     kPlaybackPerChannel = 0,
     kPlaybackAllChannelsMixed,
     kRecordingPerChannel,
-    kRecordingAllChannelsMixed
+    kRecordingAllChannelsMixed,
+    kRecordingPreprocessing
 };
 
 // Encryption enums
diff --git a/modules/audio_coding/main/source/acm_codec_database.cc b/modules/audio_coding/main/source/acm_codec_database.cc
index 2e3db19..14e6137 100644
--- a/modules/audio_coding/main/source/acm_codec_database.cc
+++ b/modules/audio_coding/main/source/acm_codec_database.cc
@@ -88,7 +88,7 @@
 #endif
 #ifdef WEBRTC_CODEC_OPUS
     #include "acm_opus.h"
-    #include "opus_interface.h"
+    #include "modules/audio_coding/codecs/opus/interface/opus_interface.h"
 #endif
 #ifdef WEBRTC_CODEC_SPEEX
     #include "acm_speex.h"
diff --git a/modules/audio_coding/main/source/acm_opus.cc b/modules/audio_coding/main/source/acm_opus.cc
index 034e57d..e7dcfe8 100644
--- a/modules/audio_coding/main/source/acm_opus.cc
+++ b/modules/audio_coding/main/source/acm_opus.cc
@@ -18,7 +18,7 @@
 #include "webrtc_neteq_help_macros.h"
 
 #ifdef WEBRTC_CODEC_OPUS
-#include "opus_interface.h"
+#include "modules/audio_coding/codecs/opus/interface/opus_interface.h"
 #endif
 
 namespace webrtc {
diff --git a/modules/audio_coding/main/source/acm_opus.h b/modules/audio_coding/main/source/acm_opus.h
index d8baa30..72794c4 100644
--- a/modules/audio_coding/main/source/acm_opus.h
+++ b/modules/audio_coding/main/source/acm_opus.h
@@ -12,9 +12,11 @@
 #define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_OPUS_H_
 
 #include "acm_generic_codec.h"
-#include "opus_interface.h"
 #include "resampler.h"
 
+struct WebRtcOpusEncInst;
+struct WebRtcOpusDecInst;
+
 namespace webrtc {
 
 class ACMOpus : public ACMGenericCodec {
@@ -48,8 +50,8 @@
 
   int16_t SetBitRateSafe(const int32_t rate);
 
-  OpusEncInst* _encoderInstPtr;
-  OpusDecInst* _decoderInstPtr;
+  WebRtcOpusEncInst* _encoderInstPtr;
+  WebRtcOpusDecInst* _decoderInstPtr;
   uint16_t _sampleFreq;
   uint16_t _bitrate;
 };
diff --git a/modules/audio_coding/main/source/audio_coding_module.gypi b/modules/audio_coding/main/source/audio_coding_module.gypi
index f62ba36..36c3cb7 100644
--- a/modules/audio_coding/main/source/audio_coding_module.gypi
+++ b/modules/audio_coding/main/source/audio_coding_module.gypi
@@ -15,7 +15,6 @@
       'iLBC',
       'iSAC',
       'iSACFix',
-      'webrtc_opus',
       'PCM16B',
       'NetEq',
       '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
@@ -24,6 +23,12 @@
       '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
     ],
     'audio_coding_defines': [],
+    'conditions': [
+      ['include_opus==1', {
+        'audio_coding_dependencies': ['webrtc_opus',],
+        'audio_coding_defines': ['WEBRTC_CODEC_OPUS',],
+      }],
+    ],
   },
   'targets': [
     {
@@ -38,13 +43,11 @@
       'include_dirs': [
         '../interface',
         '../../../interface',
-        '../../codecs/opus/interface',
       ],
       'direct_dependent_settings': {
         'include_dirs': [
-        '../interface',
-        '../../../interface',
-        '../../codecs/opus/interface',
+          '../interface',
+          '../../../interface',
         ],
       },
       'sources': [
diff --git a/modules/modules.gyp b/modules/modules.gyp
index a5b31c6..9cce27c 100644
--- a/modules/modules.gyp
+++ b/modules/modules.gyp
@@ -15,7 +15,6 @@
     'audio_coding/codecs/ilbc/ilbc.gypi',
     'audio_coding/codecs/isac/main/source/isac.gypi',
     'audio_coding/codecs/isac/fix/source/isacfix.gypi',
-    'audio_coding/codecs/opus/opus.gypi',
     'audio_coding/codecs/pcm16b/pcm16b.gypi',
     'audio_coding/main/source/audio_coding_module.gypi',
     'audio_coding/neteq/neteq.gypi',
@@ -34,8 +33,10 @@
     'video_render/main/source/video_render.gypi',
     'rtp_rtcp/source/rtp_rtcp.gypi',
   ],
-
   'conditions': [
+    ['include_opus==1', {
+      'includes': ['audio_coding/codecs/opus/opus.gypi',],
+    }],
     ['include_tests==1', {
       'includes': [
         'audio_coding/codecs/isac/isac_test.gypi',
diff --git a/modules/video_capture/main/interface/video_capture.h b/modules/video_capture/main/interface/video_capture.h
index 43380ec..38e0e07 100644
--- a/modules/video_capture/main/interface/video_capture.h
+++ b/modules/video_capture/main/interface/video_capture.h
@@ -122,12 +122,6 @@
 
   virtual WebRtc_Word32 StopCapture() = 0;
 
-  // Send an image when the capture device is not running.
-  virtual WebRtc_Word32 StartSendImage(const VideoFrame& videoFrame,
-                                       WebRtc_Word32 frameRate = 1) = 0;
-
-  virtual WebRtc_Word32 StopSendImage() = 0;
-
   // Returns the name of the device used by this module.
   virtual const char* CurrentDeviceName() const = 0;
 
diff --git a/modules/video_capture/main/source/video_capture_impl.cc b/modules/video_capture/main/source/video_capture_impl.cc
index 1084ff4..c326f5c 100644
--- a/modules/video_capture/main/source/video_capture_impl.cc
+++ b/modules/video_capture/main/source/video_capture_impl.cc
@@ -49,22 +49,11 @@
 WebRtc_Word32 VideoCaptureImpl::TimeUntilNextProcess()
 {
     CriticalSectionScoped cs(&_callBackCs);
-    TickTime timeNow = TickTime::Now();
 
     WebRtc_Word32 timeToNormalProcess = kProcessInterval
         - (WebRtc_Word32)((TickTime::Now() - _lastProcessTime).Milliseconds());
-    WebRtc_Word32 timeToStartImage = timeToNormalProcess;
-    if (_startImageFrameIntervall)
-    {
-        timeToStartImage = _startImageFrameIntervall
-            - (WebRtc_Word32)((timeNow - _lastSentStartImageTime).Milliseconds());
-        if (timeToStartImage < 0)
-        {
-            timeToStartImage = 0;
-        }
-    }
-    return (timeToStartImage < timeToNormalProcess)
-            ? timeToStartImage : timeToNormalProcess;
+
+    return timeToNormalProcess;
 }
 
 // Process any pending tasks such as timeouts
@@ -112,19 +101,6 @@
 
     _lastProcessFrameCount = _incomingFrameTimes[0];
 
-    // Handle start image frame rates.
-    if (_startImageFrameIntervall
-        && (now - _lastSentStartImageTime).Milliseconds() >= _startImageFrameIntervall)
-    {
-        _lastSentStartImageTime = now;
-        if (_dataCallBack)
-        {
-            _captureFrame.CopyFrame(_startImage);
-            _captureFrame.SetRenderTime(TickTime::MillisecondTimestamp());
-            _dataCallBack->OnIncomingCapturedFrame(_id, _captureFrame,
-                                                   kVideoCodecUnknown);
-        }
-    }
     return 0;
 }
 
@@ -136,8 +112,6 @@
       _lastFrameRateCallbackTime(TickTime::Now()), _frameRateCallBack(false),
       _noPictureAlarmCallBack(false), _captureAlarm(Cleared), _setCaptureDelay(0),
       _dataCallBack(NULL), _captureCallBack(NULL),
-      _startImage(), _startImageFrameIntervall(0),
-      _lastSentStartImageTime(TickTime::Now()),
       _lastProcessFrameCount(TickTime::Now()), _rotateFrame(kRotateNone),
       last_capture_time_(TickTime::MillisecondTimestamp())
 
@@ -210,7 +184,6 @@
 WebRtc_Word32 VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame,
     WebRtc_Word64 capture_time, VideoCodecType codec_type) {
   UpdateFrameCount();  // frame count used for local frame rate callback.
-  _startImageFrameIntervall = 0;  // prevent the start image to be displayed.
 
   const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay;
   // Capture delay changed
@@ -246,7 +219,6 @@
     VideoFrame& captureFrame, WebRtc_Word64 capture_time,
     VideoCodecType codec_type) {
   UpdateFrameCount();  // frame count used for local frame rate callback.
-  _startImageFrameIntervall = 0;  // prevent the start image to be displayed.
 
   const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay;
   // Capture delay changed
@@ -446,31 +418,6 @@
     return 0;
 }
 
-WebRtc_Word32 VideoCaptureImpl::StartSendImage(const VideoFrame& videoFrame,
-                                                     WebRtc_Word32 frameRate)
-{
-    CriticalSectionScoped cs(&_apiCs);
-    CriticalSectionScoped cs2(&_callBackCs);
-    if (frameRate < 1 || frameRate > kMaxFrameRate)
-    {
-        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
-                   "StartSendImage Invalid parameter. frameRate %d", (int) frameRate);
-        return -1;;
-    }
-    _startImage.CopyFrame(videoFrame);
-    _startImageFrameIntervall = 1000 / frameRate;
-    _lastSentStartImageTime = TickTime::Now();
-    return 0;
-
-}
-WebRtc_Word32 VideoCaptureImpl::StopSendImage()
-{
-    CriticalSectionScoped cs(&_apiCs);
-    CriticalSectionScoped cs2(&_callBackCs);
-    _startImageFrameIntervall = 0;
-    return 0;
-}
-
 WebRtc_Word32 VideoCaptureImpl::EnableFrameRateCallback(const bool enable)
 {
     CriticalSectionScoped cs(&_apiCs);
diff --git a/modules/video_capture/main/source/video_capture_impl.h b/modules/video_capture/main/source/video_capture_impl.h
index 12c822f..5d9021f 100644
--- a/modules/video_capture/main/source/video_capture_impl.h
+++ b/modules/video_capture/main/source/video_capture_impl.h
@@ -59,10 +59,6 @@
     virtual WebRtc_Word32 RegisterCaptureCallback(VideoCaptureFeedBack& callBack);
     virtual WebRtc_Word32 DeRegisterCaptureCallback();
 
-    virtual WebRtc_Word32 StartSendImage(const VideoFrame& videoFrame,
-                                         WebRtc_Word32 frameRate = 1);
-    virtual WebRtc_Word32 StopSendImage();
-
     virtual WebRtc_Word32 SetCaptureDelay(WebRtc_Word32 delayMS);
     virtual WebRtc_Word32 CaptureDelay();
     virtual WebRtc_Word32 SetCaptureRotation(VideoCaptureRotation rotation);
@@ -129,9 +125,6 @@
     VideoCaptureDataCallback* _dataCallBack;
     VideoCaptureFeedBack* _captureCallBack;
 
-    VideoFrame _startImage;
-    WebRtc_Word32 _startImageFrameIntervall;
-    TickTime _lastSentStartImageTime; // last time the start image was sent
     TickTime _lastProcessFrameCount;
     TickTime _incomingFrameTimes[kFrameRateCountHistorySize];// timestamp for local captured frames
     VideoRotationMode _rotateFrame; //Set if the frame should be rotated by the capture module.
diff --git a/modules/video_capture/main/test/video_capture_unittest.cc b/modules/video_capture/main/test/video_capture_unittest.cc
index 1a96abe..62a0069 100644
--- a/modules/video_capture/main/test/video_capture_unittest.cc
+++ b/modules/video_capture/main/test/video_capture_unittest.cc
@@ -557,19 +557,3 @@
   EXPECT_TRUE(capture_feedback_.frame_rate() >= 25 &&
               capture_feedback_.frame_rate() <= 33);
 }
-
-// Test start image
-TEST_F(VideoCaptureExternalTest , StartImage) {
-  EXPECT_EQ(0, capture_module_->StartSendImage(
-      test_frame_, 10));
-
-  EXPECT_TRUE_WAIT(capture_callback_.incoming_frames() == 5, kTimeOut);
-  EXPECT_EQ(0, capture_module_->StopSendImage());
-
-  SleepMs(200);
-  // Test that no more start images have arrived.
-  EXPECT_TRUE(capture_callback_.incoming_frames() >= 4 &&
-              capture_callback_.incoming_frames() <= 5);
-  EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
-}
-
diff --git a/modules/video_coding/codecs/test_framework/benchmark.cc b/modules/video_coding/codecs/test_framework/benchmark.cc
index e3048ae..8b5fe0a 100644
--- a/modules/video_coding/codecs/test_framework/benchmark.cc
+++ b/modules/video_coding/codecs/test_framework/benchmark.cc
@@ -283,7 +283,6 @@
     }
 
     _inputVideoBuffer.Free();
-    //_encodedVideoBuffer.Reset(); ?
     _encodedVideoBuffer.Free();
     _decodedVideoBuffer.Free();
 
diff --git a/modules/video_coding/codecs/test_framework/normal_async_test.cc b/modules/video_coding/codecs/test_framework/normal_async_test.cc
index 4a35d1a..6a53bc8 100644
--- a/modules/video_coding/codecs/test_framework/normal_async_test.cc
+++ b/modules/video_coding/codecs/test_framework/normal_async_test.cc
@@ -197,7 +197,7 @@
     }
 }
 
-void FrameQueue::PushFrame(TestVideoEncodedBuffer *frame,
+void FrameQueue::PushFrame(VideoFrame *frame,
                            webrtc::CodecSpecificInfo* codecSpecificInfo)
 {
     WriteLockScoped cs(_queueRWLock);
@@ -234,7 +234,7 @@
                                      fragmentation)
 {
     _test.Encoded(encodedImage);
-    TestVideoEncodedBuffer *newBuffer = new TestVideoEncodedBuffer();
+    VideoFrame *newBuffer = new VideoFrame();
     //newBuffer->VerifyAndAllocate(encodedImage._length);
     newBuffer->VerifyAndAllocate(encodedImage._size);
     _encodedBytes += encodedImage._length;
@@ -247,8 +247,8 @@
     _test.CopyEncodedImage(*newBuffer, encodedImage, codecSpecificInfoCopy);
     if (_encodedFile != NULL)
     {
-      if (fwrite(newBuffer->GetBuffer(), 1, newBuffer->GetLength(),
-                 _encodedFile) !=  newBuffer->GetLength()) {
+      if (fwrite(newBuffer->Buffer(), 1, newBuffer->Length(),
+                 _encodedFile) !=  newBuffer->Length()) {
         return -1;
       }
     }
@@ -410,19 +410,17 @@
 {
     _lengthEncFrame = 0;
     EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile), 0u);
-    _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+    _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
     _inputVideoBuffer.SetTimeStamp((unsigned int)
         (_encFrameCnt * 9e4 / _inst.maxFramerate));
     _inputVideoBuffer.SetWidth(_inst.width);
     _inputVideoBuffer.SetHeight(_inst.height);
-    VideoFrame rawImage;
-    VideoBufferToRawImage(_inputVideoBuffer, rawImage);
     if (feof(_sourceFile) != 0)
     {
         return true;
     }
     _encodeCompleteTime = 0;
-    _encodeTimes[rawImage.TimeStamp()] = tGetTime();
+    _encodeTimes[_inputVideoBuffer.TimeStamp()] = tGetTime();
     std::vector<VideoFrameType> frame_types(1, kDeltaFrame);
 
     // check SLI queue
@@ -465,7 +463,8 @@
     }
 
     webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
-    int ret = _encoder->Encode(rawImage, codecSpecificInfo, &frame_types);
+    int ret = _encoder->Encode(_inputVideoBuffer,
+                               codecSpecificInfo, &frame_types);
     EXPECT_EQ(ret, WEBRTC_VIDEO_CODEC_OK);
     if (codecSpecificInfo != NULL)
     {
@@ -475,11 +474,12 @@
     if (_encodeCompleteTime > 0)
     {
         _totalEncodeTime += _encodeCompleteTime -
-            _encodeTimes[rawImage.TimeStamp()];
+            _encodeTimes[_inputVideoBuffer.TimeStamp()];
     }
     else
     {
-        _totalEncodeTime += tGetTime() - _encodeTimes[rawImage.TimeStamp()];
+        _totalEncodeTime += tGetTime() -
+            _encodeTimes[_inputVideoBuffer.TimeStamp()];
     }
     assert(ret >= 0);
     return false;
@@ -488,7 +488,7 @@
 int
 NormalAsyncTest::Decode(int lossValue)
 {
-    _sumEncBytes += _frameToDecode->_frame->GetLength();
+    _sumEncBytes += _frameToDecode->_frame->Length();
     EncodedImage encodedImage;
     VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
     encodedImage._completeFrame = !lossValue;
@@ -570,14 +570,14 @@
     }
 }
 
-void NormalAsyncTest::CopyEncodedImage(TestVideoEncodedBuffer& dest,
+void NormalAsyncTest::CopyEncodedImage(VideoFrame& dest,
                                        EncodedImage& src,
                                        void* /*codecSpecificInfo*/) const
 {
-    dest.CopyBuffer(src._length, src._buffer);
-    dest.SetFrameType(src._frameType);
-    dest.SetCaptureWidth((WebRtc_UWord16)src._encodedWidth);
-    dest.SetCaptureHeight((WebRtc_UWord16)src._encodedHeight);
+    dest.CopyFrame(src._length, src._buffer);
+    //dest.SetFrameType(src._frameType);
+    dest.SetWidth((WebRtc_UWord16)src._encodedWidth);
+    dest.SetHeight((WebRtc_UWord16)src._encodedHeight);
     dest.SetTimeStamp(src._timeStamp);
 }
 
diff --git a/modules/video_coding/codecs/test_framework/normal_async_test.h b/modules/video_coding/codecs/test_framework/normal_async_test.h
index c866217..e03f7bf 100644
--- a/modules/video_coding/codecs/test_framework/normal_async_test.h
+++ b/modules/video_coding/codecs/test_framework/normal_async_test.h
@@ -22,14 +22,14 @@
 class FrameQueueTuple
 {
 public:
-    FrameQueueTuple(TestVideoEncodedBuffer *frame,
+    FrameQueueTuple(webrtc::VideoFrame *frame,
                     const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL)
     :
         _frame(frame),
         _codecSpecificInfo(codecSpecificInfo)
     {};
     ~FrameQueueTuple();
-    TestVideoEncodedBuffer*          _frame;
+    webrtc::VideoFrame*          _frame;
     const webrtc::CodecSpecificInfo* _codecSpecificInfo;
 };
 
@@ -47,7 +47,7 @@
         delete &_queueRWLock;
     }
 
-    void PushFrame(TestVideoEncodedBuffer *frame,
+    void PushFrame(webrtc::VideoFrame *frame,
                    webrtc::CodecSpecificInfo* codecSpecificInfo = NULL);
     FrameQueueTuple* PopFrame();
     bool Empty();
@@ -84,7 +84,7 @@
     virtual webrtc::CodecSpecificInfo*
     CopyCodecSpecificInfo(
         const webrtc::CodecSpecificInfo* codecSpecificInfo) const;
-    virtual void CopyEncodedImage(TestVideoEncodedBuffer& dest,
+    virtual void CopyEncodedImage(webrtc::VideoFrame& dest,
                                   webrtc::EncodedImage& src,
                                   void* /*codecSpecificInfo*/) const;
     virtual webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const
diff --git a/modules/video_coding/codecs/test_framework/normal_test.cc b/modules/video_coding/codecs/test_framework/normal_test.cc
index b5dc961..97f3f88 100644
--- a/modules/video_coding/codecs/test_framework/normal_test.cc
+++ b/modules/video_coding/codecs/test_framework/normal_test.cc
@@ -128,10 +128,10 @@
     while (!Encode())
     {
         DoPacketLoss();
-        _encodedVideoBuffer.UpdateLength(_encodedVideoBuffer.GetLength());
-        if (fwrite(_encodedVideoBuffer.GetBuffer(), 1,
-                   _encodedVideoBuffer.GetLength(),
-                   _encodedFile) !=  _encodedVideoBuffer.GetLength()) {
+        _encodedVideoBuffer.SetLength(_encodedVideoBuffer.Length());
+        if (fwrite(_encodedVideoBuffer.Buffer(), 1,
+                   _encodedVideoBuffer.Length(),
+                   _encodedFile) !=  _encodedVideoBuffer.Length()) {
           return;
         }
         decodeLength = Decode();
@@ -140,7 +140,7 @@
             fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
             exit(EXIT_FAILURE);
         }
-        if (fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength,
+        if (fwrite(_decodedVideoBuffer.Buffer(), 1, decodeLength,
                    _decodedFile) != static_cast<unsigned int>(decodeLength)) {
           return;
         }
@@ -157,7 +157,7 @@
             fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
             exit(EXIT_FAILURE);
         }
-        if (fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength,
+        if (fwrite(_decodedVideoBuffer.Buffer(), 1, decodeLength,
                    _decodedFile) != static_cast<unsigned int>(decodeLength)) {
           return;
         }
@@ -174,8 +174,6 @@
     (*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
 
     _inputVideoBuffer.Free();
-    _encodedVideoBuffer.Reset();
-    _decodedVideoBuffer.Free();
 
     _encoder->Release();
     _decoder->Release();
@@ -192,7 +190,7 @@
     {
         return true;
     }
-    _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+    _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
     _inputVideoBuffer.SetTimeStamp(_framecnt);
 
     // This multiple attempt ridiculousness is to accomodate VP7:
@@ -213,8 +211,8 @@
 
         endtime = clock()/(double)CLOCKS_PER_SEC;
 
-        _encodedVideoBuffer.SetCaptureHeight(_inst.height);
-        _encodedVideoBuffer.SetCaptureWidth(_inst.width);
+        _encodedVideoBuffer.SetHeight(_inst.height);
+        _encodedVideoBuffer.SetWidth(_inst.width);
         if (_lengthEncFrame < 0)
         {
             (*_log) << "Error in encoder: " << _lengthEncFrame << std::endl;
@@ -256,7 +254,6 @@
     {
         return lengthDecFrame;
     }
-    _encodedVideoBuffer.Reset();
-    _encodedVideoBuffer.UpdateLength(0);
+    _encodedVideoBuffer.SetLength(0);
     return lengthDecFrame;
 }
diff --git a/modules/video_coding/codecs/test_framework/packet_loss_test.cc b/modules/video_coding/codecs/test_framework/packet_loss_test.cc
index aa51fc5..a9d4b37 100644
--- a/modules/video_coding/codecs/test_framework/packet_loss_test.cc
+++ b/modules/video_coding/codecs/test_framework/packet_loss_test.cc
@@ -172,13 +172,15 @@
 int PacketLossTest::DoPacketLoss()
 {
     // Only packet loss for delta frames
-    if (_frameToDecode->_frame->GetLength() == 0 || _frameToDecode->_frame->GetFrameType() != kDeltaFrame)
+    // TODO(mikhal): Identify delta frames
+    // First frame so never a delta frame.
+    if (_frameToDecode->_frame->Length() == 0 || _sumChannelBytes == 0)
     {
-        _sumChannelBytes += _frameToDecode->_frame->GetLength();
+        _sumChannelBytes += _frameToDecode->_frame->Length();
         return 0;
     }
     unsigned char *packet = NULL;
-    TestVideoEncodedBuffer newEncBuf;
+    VideoFrame newEncBuf;
     newEncBuf.VerifyAndAllocate(_lengthSourceFrame);
     _inBufIdx = 0;
     _outBufIdx = 0;
@@ -210,24 +212,24 @@
     {
         lossResult += (kept==0);	// 2 = all lost = full frame
     }
-    _frameToDecode->_frame->CopyBuffer(newEncBuf.GetLength(), newEncBuf.GetBuffer());
-    _sumChannelBytes += newEncBuf.GetLength();
+    _frameToDecode->_frame->CopyFrame(newEncBuf.Length(), newEncBuf.Buffer());
+    _sumChannelBytes += newEncBuf.Length();
     _totalKept += kept;
     _totalThrown += thrown;
 
     return lossResult;
     //printf("Threw away: %d out of %d packets\n", thrown, thrown + kept);
-    //printf("Encoded left: %d bytes\n", _encodedVideoBuffer.GetLength());
+    //printf("Encoded left: %d bytes\n", _encodedVideoBuffer.Length());
 }
 
 int PacketLossTest::NextPacket(int mtu, unsigned char **pkg)
 {
-    unsigned char *buf = _frameToDecode->_frame->GetBuffer();
+    unsigned char *buf = _frameToDecode->_frame->Buffer();
     *pkg = buf + _inBufIdx;
-    if (static_cast<long>(_frameToDecode->_frame->GetLength()) - _inBufIdx <= mtu)
+    if (static_cast<long>(_frameToDecode->_frame->Length()) - _inBufIdx <= mtu)
     {
-        int size = _frameToDecode->_frame->GetLength() - _inBufIdx;
-        _inBufIdx = _frameToDecode->_frame->GetLength();
+        int size = _frameToDecode->_frame->Length() - _inBufIdx;
+        _inBufIdx = _frameToDecode->_frame->Length();
         return size;
     }
     _inBufIdx += mtu;
@@ -239,14 +241,14 @@
     return size;
 }
 
-void PacketLossTest::InsertPacket(TestVideoEncodedBuffer *buf, unsigned char *pkg, int size)
+void PacketLossTest::InsertPacket(VideoFrame *buf, unsigned char *pkg, int size)
 {
-    if (static_cast<long>(buf->GetSize()) - _outBufIdx < size)
+    if (static_cast<long>(buf->Size()) - _outBufIdx < size)
     {
         printf("InsertPacket error!\n");
         return;
     }
-    memcpy(buf->GetBuffer() + _outBufIdx, pkg, size);
-    buf->UpdateLength(buf->GetLength() + size);
+    memcpy(buf->Buffer() + _outBufIdx, pkg, size);
+    buf->SetLength(buf->Length() + size);
     _outBufIdx += size;
 }
diff --git a/modules/video_coding/codecs/test_framework/packet_loss_test.h b/modules/video_coding/codecs/test_framework/packet_loss_test.h
index 1051ce5..1702f50 100644
--- a/modules/video_coding/codecs/test_framework/packet_loss_test.h
+++ b/modules/video_coding/codecs/test_framework/packet_loss_test.h
@@ -36,7 +36,8 @@
     virtual int DoPacketLoss();
     virtual int NextPacket(int size, unsigned char **pkg);
     virtual int ByteLoss(int size, unsigned char *pkg, int bytesToLose);
-    virtual void InsertPacket(TestVideoEncodedBuffer *buf, unsigned char *pkg, int size);
+    virtual void InsertPacket(webrtc::VideoFrame *buf, unsigned char *pkg,
+                              int size);
     int _inBufIdx;
     int _outBufIdx;
 
diff --git a/modules/video_coding/codecs/test_framework/performance_test.cc b/modules/video_coding/codecs/test_framework/performance_test.cc
deleted file mode 100644
index d235928..0000000
--- a/modules/video_coding/codecs/test_framework/performance_test.cc
+++ /dev/null
@@ -1,296 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "performance_test.h"
-
-#include <assert.h>
-
-#include "gtest/gtest.h"
-#include "testsupport/fileutils.h"
-#include "tick_util.h"
-
-using namespace webrtc;
-
-#define NUM_FRAMES 300
-
-PerformanceTest::PerformanceTest(WebRtc_UWord32 bitRate)
-:
-NormalAsyncTest(bitRate),
-_numCodecs(0),
-_tests(NULL),
-_encoders(NULL),
-_decoders(NULL),
-_threads(NULL),
-_rawImageLock(NULL),
-_encodeEvents(new EventWrapper*[1]),
-_stopped(true),
-_encodeCompleteCallback(NULL),
-_decodeCompleteCallback(NULL)
-{
-}
-
-PerformanceTest::PerformanceTest(WebRtc_UWord32 bitRate, WebRtc_UWord8 numCodecs)
-:
-NormalAsyncTest(bitRate),
-_numCodecs(numCodecs),
-_tests(new PerformanceTest*[_numCodecs]),
-_encoders(new VideoEncoder*[_numCodecs]),
-_decoders(new VideoDecoder*[_numCodecs]),
-_threads(new ThreadWrapper*[_numCodecs]),
-_rawImageLock(RWLockWrapper::CreateRWLock()),
-_encodeEvents(new EventWrapper*[_numCodecs]),
-_stopped(true),
-_encodeCompleteCallback(NULL),
-_decodeCompleteCallback(NULL)
-{
-    for (int i=0; i < _numCodecs; i++)
-    {
-        _tests[i] = new PerformanceTest(bitRate);
-        _encodeEvents[i] = EventWrapper::Create();
-    }
-}
-
-PerformanceTest::~PerformanceTest()
-{
-    if (_encoders != NULL)
-    {
-        delete [] _encoders;
-    }
-    if (_decoders != NULL)
-    {
-        delete [] _decoders;
-    }
-    if (_tests != NULL)
-    {
-        delete [] _tests;
-    }
-    if (_threads != NULL)
-    {
-        delete [] _threads;
-    }
-    if (_rawImageLock != NULL)
-    {
-        delete _rawImageLock;
-    }
-    if (_encodeEvents != NULL)
-    {
-        delete [] _encodeEvents;
-    }
-}
-
-void
-PerformanceTest::Setup()
-{
-    _inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
-    NormalAsyncTest::Setup(); // Setup input and output files
-    CodecSettings(352, 288, 30, _bitRate); // common to all codecs
-    for (int i=0; i < _numCodecs; i++)
-    {
-        _encoders[i] = CreateEncoder();
-        _decoders[i] = CreateDecoder();
-        if (_encoders[i] == NULL)
-        {
-            printf("Must create a codec specific test!\n");
-            exit(EXIT_FAILURE);
-        }
-        if(_encoders[i]->InitEncode(&_inst, 4, 1440) < 0)
-        {
-            exit(EXIT_FAILURE);
-        }
-        if (_decoders[i]->InitDecode(&_inst, 1))
-        {
-            exit(EXIT_FAILURE);
-        }
-        _tests[i]->SetEncoder(_encoders[i]);
-        _tests[i]->SetDecoder(_decoders[i]);
-        _tests[i]->_rawImageLock = _rawImageLock;
-        _encodeEvents[i]->Reset();
-        _tests[i]->_encodeEvents[0] = _encodeEvents[i];
-        _tests[i]->_inst = _inst;
-        _threads[i] = ThreadWrapper::CreateThread(PerformanceTest::RunThread, _tests[i]);
-        unsigned int id = 0;
-        _tests[i]->_stopped = false;
-        _threads[i]->Start(id);
-    }
-}
-
-void
-PerformanceTest::Perform()
-{
-    Setup();
-    EventWrapper& sleepEvent = *EventWrapper::Create();
-    const WebRtc_Word64 startTime = TickTime::MillisecondTimestamp();
-    for (int i=0; i < NUM_FRAMES; i++)
-    {
-        {
-            // Read a new frame from file
-            WriteLockScoped imageLock(*_rawImageLock);
-            _lengthEncFrame = 0;
-            EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile),
-                      0u);
-            if (feof(_sourceFile) != 0)
-            {
-                rewind(_sourceFile);
-            }
-            _inputVideoBuffer.VerifyAndAllocate(_inst.width*_inst.height*3/2);
-            _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
-            _inputVideoBuffer.SetTimeStamp((unsigned int) (_encFrameCnt * 9e4 / static_cast<float>(_inst.maxFramerate)));
-            _inputVideoBuffer.SetWidth(_inst.width);
-            _inputVideoBuffer.SetHeight(_inst.height);
-            for (int i=0; i < _numCodecs; i++)
-            {
-                _tests[i]->_inputVideoBuffer.CopyPointer(_inputVideoBuffer);
-                _encodeEvents[i]->Set();
-            }
-        }
-        if (i < NUM_FRAMES - 1)
-        {
-            sleepEvent.Wait(33);
-        }
-    }
-    for (int i=0; i < _numCodecs; i++)
-    {
-        _tests[i]->_stopped = true;
-        _encodeEvents[i]->Set();
-        _threads[i]->Stop();
-    }
-    const WebRtc_UWord32 totalTime =
-            static_cast<WebRtc_UWord32>(TickTime::MillisecondTimestamp() - startTime);
-    printf("Total time: %u\n", totalTime);
-    delete &sleepEvent;
-    Teardown();
-}
-
-void PerformanceTest::Teardown()
-{
-    if (_encodeCompleteCallback != NULL)
-    {
-        delete _encodeCompleteCallback;
-    }
-    if (_decodeCompleteCallback != NULL)
-    {
-        delete _decodeCompleteCallback;
-    }
-    // main test only, all others have numCodecs = 0:
-    if (_numCodecs > 0)
-    {
-        WriteLockScoped imageLock(*_rawImageLock);
-        _inputVideoBuffer.Free();
-        NormalAsyncTest::Teardown();
-    }
-    for (int i=0; i < _numCodecs; i++)
-    {
-        _encoders[i]->Release();
-        delete _encoders[i];
-        _decoders[i]->Release();
-        delete _decoders[i];
-        _tests[i]->_inputVideoBuffer.ClearPointer();
-        _tests[i]->_rawImageLock = NULL;
-        _tests[i]->Teardown();
-        delete _tests[i];
-        delete _encodeEvents[i];
-        delete _threads[i];
-    }
-}
-
-bool
-PerformanceTest::RunThread(void* obj)
-{
-    PerformanceTest& test = *static_cast<PerformanceTest*>(obj);
-    return test.PerformSingleTest();
-}
-
-bool
-PerformanceTest::PerformSingleTest()
-{
-    if (_encodeCompleteCallback == NULL)
-    {
-        _encodeCompleteCallback = new VideoEncodeCompleteCallback(NULL, &_frameQueue, *this);
-        _encoder->RegisterEncodeCompleteCallback(_encodeCompleteCallback);
-    }
-    if (_decodeCompleteCallback == NULL)
-    {
-        _decodeCompleteCallback = new VideoDecodeCompleteCallback(NULL, *this);
-        _decoder->RegisterDecodeCompleteCallback(_decodeCompleteCallback);
-    }
-    (*_encodeEvents)->Wait(WEBRTC_EVENT_INFINITE); // The first event is used for every single test
-    CodecSpecific_InitBitrate();
-    bool complete = false;
-    {
-        ReadLockScoped imageLock(*_rawImageLock);
-        complete = Encode();
-    }
-    if (!_frameQueue.Empty() || complete)
-    {
-        while (!_frameQueue.Empty())
-        {
-            _frameToDecode = static_cast<FrameQueueTuple *>(_frameQueue.PopFrame());
-            int lost = DoPacketLoss();
-            if (lost == 2)
-            {
-                // Lost the whole frame, continue
-                _missingFrames = true;
-                delete _frameToDecode;
-                _frameToDecode = NULL;
-                continue;
-            }
-            int ret = Decode(lost);
-            delete _frameToDecode;
-            _frameToDecode = NULL;
-            if (ret < 0)
-            {
-                fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
-                return false;
-            }
-            else if (ret < 0)
-            {
-                fprintf(stderr, "\n\nPositive return value from decode!\n\n");
-                return false;
-            }
-        }
-    }
-    if (_stopped)
-    {
-        return false;
-    }
-    return true;
-}
-
-bool PerformanceTest::Encode()
-{
-    VideoFrame rawImage;
-    VideoBufferToRawImage(_inputVideoBuffer, rawImage);
-    std::vector<VideoFrameType> frameTypes(1, kDeltaFrame);
-    if (_requestKeyFrame && !(_encFrameCnt%50))
-    {
-        frameTypes[0] = kKeyFrame;
-    }
-    webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
-    int ret = _encoder->Encode(rawImage, codecSpecificInfo, &frameTypes);
-    EXPECT_EQ(ret, WEBRTC_VIDEO_CODEC_OK);
-    if (codecSpecificInfo != NULL)
-    {
-        delete codecSpecificInfo;
-        codecSpecificInfo = NULL;
-    }
-    assert(ret >= 0);
-    return false;
-}
-
-int PerformanceTest::Decode(int lossValue)
-{
-    EncodedImage encodedImage;
-    VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
-    encodedImage._completeFrame = !lossValue;
-    int ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
-                               _frameToDecode->_codecSpecificInfo);
-    _missingFrames = false;
-    return ret;
-}
diff --git a/modules/video_coding/codecs/test_framework/performance_test.h b/modules/video_coding/codecs/test_framework/performance_test.h
deleted file mode 100644
index d060832..0000000
--- a/modules/video_coding/codecs/test_framework/performance_test.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
-#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
-
-#include "normal_async_test.h"
-#include "thread_wrapper.h"
-#include "rw_lock_wrapper.h"
-#include "event_wrapper.h"
-
-class PerformanceTest : public NormalAsyncTest
-{
-public:
-    PerformanceTest(WebRtc_UWord32 bitRate, WebRtc_UWord8 numCodecs);
-    virtual ~PerformanceTest();
-
-    virtual void Perform();
-    virtual void Print() {};
-
-protected:
-    PerformanceTest(WebRtc_UWord32 bitRate);
-    virtual void Setup();
-    virtual bool Encode();
-    virtual int Decode(int lossValue = 0);
-    virtual void Teardown();
-    static bool RunThread(void* obj);
-    bool PerformSingleTest();
-
-    virtual webrtc::VideoEncoder* CreateEncoder() const { return NULL; };
-    virtual webrtc::VideoDecoder* CreateDecoder() const { return NULL; };
-
-    WebRtc_UWord8                 _numCodecs;
-    PerformanceTest**             _tests;
-    webrtc::VideoEncoder**        _encoders;
-    webrtc::VideoDecoder**        _decoders;
-    webrtc::ThreadWrapper**       _threads;
-    webrtc::RWLockWrapper*        _rawImageLock;
-    webrtc::EventWrapper**        _encodeEvents;
-    FrameQueue                    _frameQueue;
-    bool                          _stopped;
-    webrtc::EncodedImageCallback* _encodeCompleteCallback;
-    webrtc::DecodedImageCallback* _decodeCompleteCallback;
-    FILE*                         _outFile;
-};
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
diff --git a/modules/video_coding/codecs/test_framework/test.cc b/modules/video_coding/codecs/test_framework/test.cc
index de04213..71d8ff0 100644
--- a/modules/video_coding/codecs/test_framework/test.cc
+++ b/modules/video_coding/codecs/test_framework/test.cc
@@ -135,26 +135,17 @@
 }
 
 void
-CodecTest::VideoBufferToRawImage(TestVideoBuffer& videoBuffer,
-                                 VideoFrame &image)
-{
-  // TODO(mikhal): Use videoBuffer in lieu of TestVideoBuffer.
-  image.CopyFrame(videoBuffer.GetLength(), videoBuffer.GetBuffer());
-  image.SetWidth(videoBuffer.GetWidth());
-  image.SetHeight(videoBuffer.GetHeight());
-  image.SetTimeStamp(videoBuffer.GetTimeStamp());
-}
-void
-CodecTest::VideoEncodedBufferToEncodedImage(TestVideoEncodedBuffer& videoBuffer,
+CodecTest::VideoEncodedBufferToEncodedImage(VideoFrame& videoBuffer,
                                             EncodedImage &image)
 {
-    image._buffer = videoBuffer.GetBuffer();
-    image._length = videoBuffer.GetLength();
-    image._size = videoBuffer.GetSize();
-    image._frameType = static_cast<VideoFrameType>(videoBuffer.GetFrameType());
-    image._timeStamp = videoBuffer.GetTimeStamp();
-    image._encodedWidth = videoBuffer.GetCaptureWidth();
-    image._encodedHeight = videoBuffer.GetCaptureHeight();
+    image._buffer = videoBuffer.Buffer();
+    image._length = videoBuffer.Length();
+    image._size = videoBuffer.Size();
+    //image._frameType = static_cast<VideoFrameType>
+    //  (videoBuffer.GetFrameType());
+    image._timeStamp = videoBuffer.TimeStamp();
+    image._encodedWidth = videoBuffer.Width();
+    image._encodedHeight = videoBuffer.Height();
     image._completeFrame = true;
 }
 
diff --git a/modules/video_coding/codecs/test_framework/test.h b/modules/video_coding/codecs/test_framework/test.h
index 0df6a0a..27207e0 100644
--- a/modules/video_coding/codecs/test_framework/test.h
+++ b/modules/video_coding/codecs/test_framework/test.h
@@ -11,8 +11,8 @@
 #ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
 #define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
 
+#include "modules/interface/module_common_types.h"
 #include "video_codec_interface.h"
-#include "video_buffer.h"
 #include <string>
 #include <fstream>
 #include <cstdlib>
@@ -40,10 +40,8 @@
     double ActualBitRate(int nFrames);
     virtual bool PacketLoss(double lossRate, int /*thrown*/);
     static double RandUniform() { return (std::rand() + 1.0)/(RAND_MAX + 1.0); }
-    static void VideoBufferToRawImage(TestVideoBuffer& videoBuffer,
-                                      webrtc::VideoFrame &image);
     static void VideoEncodedBufferToEncodedImage(
-        TestVideoEncodedBuffer& videoBuffer,
+        webrtc::VideoFrame& videoBuffer,
         webrtc::EncodedImage &image);
 
     webrtc::VideoEncoder*   _encoder;
@@ -51,9 +49,11 @@
     WebRtc_UWord32          _bitRate;
     unsigned int            _lengthSourceFrame;
     unsigned char*          _sourceBuffer;
-    TestVideoBuffer         _inputVideoBuffer;
-    TestVideoEncodedBuffer  _encodedVideoBuffer;
-    TestVideoBuffer         _decodedVideoBuffer;
+    webrtc::VideoFrame      _inputVideoBuffer;
+    // TODO(mikhal): For now using VideoFrame for encodedBuffer, should use a
+    // designated class.
+    webrtc::VideoFrame      _encodedVideoBuffer;
+    webrtc::VideoFrame      _decodedVideoBuffer;
     webrtc::VideoCodec      _inst;
     std::fstream*           _log;
     std::string             _inname;
diff --git a/modules/video_coding/codecs/test_framework/test_framework.gypi b/modules/video_coding/codecs/test_framework/test_framework.gypi
index fed82b4..438dd2d 100644
--- a/modules/video_coding/codecs/test_framework/test_framework.gypi
+++ b/modules/video_coding/codecs/test_framework/test_framework.gypi
@@ -40,10 +40,8 @@
             'normal_async_test.h',
             'normal_test.h',
             'packet_loss_test.h',
-            'performance_test.h',
             'test.h',
             'unit_test.h',
-            'video_buffer.h',
             'video_source.h',
 
             # source files
@@ -51,10 +49,8 @@
             'normal_async_test.cc',
             'normal_test.cc',
             'packet_loss_test.cc',
-            'performance_test.cc',
             'test.cc',
             'unit_test.cc',
-            'video_buffer.cc',
             'video_source.cc',
           ],
         },
diff --git a/modules/video_coding/codecs/test_framework/unit_test.cc b/modules/video_coding/codecs/test_framework/unit_test.cc
index 4863f0e..8d38593 100644
--- a/modules/video_coding/codecs/test_framework/unit_test.cc
+++ b/modules/video_coding/codecs/test_framework/unit_test.cc
@@ -96,12 +96,12 @@
                                         fragmentation)
 {
     _encodedVideoBuffer->VerifyAndAllocate(encodedImage._size);
-    _encodedVideoBuffer->CopyBuffer(encodedImage._size, encodedImage._buffer);
-    _encodedVideoBuffer->UpdateLength(encodedImage._length);
-    _encodedVideoBuffer->SetFrameType(encodedImage._frameType);
-    _encodedVideoBuffer->SetCaptureWidth(
+    _encodedVideoBuffer->CopyFrame(encodedImage._size, encodedImage._buffer);
+    _encodedVideoBuffer->SetLength(encodedImage._length);
+//    _encodedVideoBuffer->SetFrameType(encodedImage._frameType);
+    _encodedVideoBuffer->SetWidth(
         (WebRtc_UWord16)encodedImage._encodedWidth);
-    _encodedVideoBuffer->SetCaptureHeight(
+    _encodedVideoBuffer->SetHeight(
         (WebRtc_UWord16)encodedImage._encodedHeight);
     _encodedVideoBuffer->SetTimeStamp(encodedImage._timeStamp);
     _encodeComplete = true;
@@ -111,7 +111,7 @@
 
 WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(VideoFrame& image)
 {
-    _decodedVideoBuffer->CopyBuffer(image.Length(), image.Buffer());
+    _decodedVideoBuffer->CopyFrame(image.Length(), image.Buffer());
     _decodedVideoBuffer->SetWidth(image.Width());
     _decodedVideoBuffer->SetHeight(image.Height());
     _decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
@@ -155,7 +155,7 @@
     {
         if (_encodeCompleteCallback->EncodeComplete())
         {
-            return _encodedVideoBuffer.GetLength();
+            return _encodedVideoBuffer.Length();
         }
     }
     return 0;
@@ -169,7 +169,7 @@
     {
         if (_decodeCompleteCallback->DecodeComplete())
         {
-            return _decodedVideoBuffer.GetLength();
+            return _decodedVideoBuffer.Length();
         }
     }
     return 0;
@@ -227,7 +227,7 @@
     _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
     ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame, _sourceFile)
                            == _lengthSourceFrame);
-    _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
+    _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
     _inputVideoBuffer.SetWidth(_source->GetWidth());
     _inputVideoBuffer.SetHeight(_source->GetHeight());
     rewind(_sourceFile);
@@ -235,16 +235,13 @@
     // Get a reference encoded frame.
     _encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
 
-    VideoFrame image;
-    VideoBufferToRawImage(_inputVideoBuffer, image);
-
     // Ensures our initial parameters are valid.
     EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
-    _encoder->Encode(image, NULL, NULL);
+    _encoder->Encode(_inputVideoBuffer, NULL, NULL);
     _refEncFrameLength = WaitForEncodedFrame();
     ASSERT_TRUE(_refEncFrameLength > 0);
     _refEncFrame = new unsigned char[_refEncFrameLength];
-    memcpy(_refEncFrame, _encodedVideoBuffer.GetBuffer(), _refEncFrameLength);
+    memcpy(_refEncFrame, _encodedVideoBuffer.Buffer(), _refEncFrameLength);
 
     // Get a reference decoded frame.
     _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
@@ -261,11 +258,10 @@
             _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
             ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame,
                 _sourceFile) == _lengthSourceFrame);
-            _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
+            _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
             _inputVideoBuffer.SetWidth(_source->GetWidth());
             _inputVideoBuffer.SetHeight(_source->GetHeight());
-            VideoBufferToRawImage(_inputVideoBuffer, image);
-            _encoder->Encode(image, NULL, NULL);
+            _encoder->Encode(_inputVideoBuffer, NULL, NULL);
             ASSERT_TRUE(WaitForEncodedFrame() > 0);
         }
         EncodedImage encodedImage;
@@ -273,13 +269,12 @@
         ASSERT_TRUE(_decoder->Decode(encodedImage, 0, NULL)
                                == WEBRTC_VIDEO_CODEC_OK);
         frameLength = WaitForDecodedFrame();
-        _encodedVideoBuffer.Reset();
-        _encodedVideoBuffer.UpdateLength(0);
+        _encodedVideoBuffer.SetLength(0);
         i++;
     }
     rewind(_sourceFile);
     EXPECT_TRUE(frameLength == _lengthSourceFrame);
-    memcpy(_refDecFrame, _decodedVideoBuffer.GetBuffer(), _lengthSourceFrame);
+    memcpy(_refDecFrame, _decodedVideoBuffer.Buffer(), _lengthSourceFrame);
 }
 
 void
@@ -318,8 +313,7 @@
     VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
     int ret = _decoder->Decode(encodedImage, 0, NULL);
     int frameLength = WaitForDecodedFrame();
-    _encodedVideoBuffer.Reset();
-    _encodedVideoBuffer.UpdateLength(0);
+    _encodedVideoBuffer.SetLength(0);
     return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
 }
 
@@ -338,8 +332,7 @@
         == _lengthSourceFrame));
     EXPECT_TRUE(ret == WEBRTC_VIDEO_CODEC_OK && (frameLength == 0 || frameLength
         == _lengthSourceFrame));
-    _encodedVideoBuffer.Reset();
-    _encodedVideoBuffer.UpdateLength(0);
+    _encodedVideoBuffer.SetLength(0);
     return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
 }
 
@@ -357,8 +350,7 @@
     //-- Calls before InitEncode() --
     // We want to revert the initialization done in Setup().
     EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
-    VideoBufferToRawImage(_inputVideoBuffer, inputImage);
-    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL)
+    EXPECT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL)
                == WEBRTC_VIDEO_CODEC_UNINITIALIZED);
 
     //-- InitEncode() errors --
@@ -424,63 +416,59 @@
     EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
         WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
     _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
-    _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
+    _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
     _inputVideoBuffer.SetWidth(_source->GetWidth());
     _inputVideoBuffer.SetHeight(_source->GetHeight());
 
     //----- Encoder stress tests -----
 
     // Vary frame rate and I-frame request.
-    VideoBufferToRawImage(_inputVideoBuffer, inputImage);
     for (int i = 1; i <= 60; i++)
     {
         VideoFrameType frame_type = !(i % 2) ? kKeyFrame : kDeltaFrame;
         std::vector<VideoFrameType> frame_types(1, frame_type);
-        EXPECT_TRUE(_encoder->Encode(inputImage, NULL, &frame_types) ==
+        EXPECT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, &frame_types) ==
             WEBRTC_VIDEO_CODEC_OK);
         EXPECT_TRUE(WaitForEncodedFrame() > 0);
     }
 
     // Init then encode.
-    _encodedVideoBuffer.UpdateLength(0);
-    _encodedVideoBuffer.Reset();
-    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
+    _encodedVideoBuffer.SetLength(0);
+    EXPECT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL) ==
         WEBRTC_VIDEO_CODEC_OK);
     EXPECT_TRUE(WaitForEncodedFrame() > 0);
 
     EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
-    _encoder->Encode(inputImage, NULL, NULL);
+    _encoder->Encode(_inputVideoBuffer, NULL, NULL);
     frameLength = WaitForEncodedFrame();
     EXPECT_TRUE(frameLength > 0);
     EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
-            _encodedVideoBuffer.GetBuffer(), frameLength) == true);
+            _encodedVideoBuffer.Buffer(), frameLength) == true);
 
     // Reset then encode.
-    _encodedVideoBuffer.UpdateLength(0);
-    _encodedVideoBuffer.Reset();
-    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
+    _encodedVideoBuffer.SetLength(0);
+    EXPECT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL) ==
         WEBRTC_VIDEO_CODEC_OK);
     WaitForEncodedFrame();
     EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
-    _encoder->Encode(inputImage, NULL, NULL);
+    _encoder->Encode(_inputVideoBuffer, NULL, NULL);
     frameLength = WaitForEncodedFrame();
     EXPECT_TRUE(frameLength > 0);
     EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
-        _encodedVideoBuffer.GetBuffer(), frameLength) == true);
+        _encodedVideoBuffer.Buffer(), frameLength) == true);
 
     // Release then encode.
-    _encodedVideoBuffer.UpdateLength(0);
-    _encodedVideoBuffer.Reset();
-    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
+    _encodedVideoBuffer.SetLength(0);
+    EXPECT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL) ==
         WEBRTC_VIDEO_CODEC_OK);
     WaitForEncodedFrame();
     EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
     EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
-    _encoder->Encode(inputImage, NULL, NULL);
+    _encoder->Encode(_inputVideoBuffer, NULL, NULL);
     frameLength = WaitForEncodedFrame();
     EXPECT_TRUE(frameLength > 0);
     EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
-        _encodedVideoBuffer.GetBuffer(), frameLength) == true);
+        _encodedVideoBuffer.Buffer(), frameLength) == true);
 
     //----- Decoder parameter tests -----
 
@@ -517,7 +505,7 @@
     {
         ASSERT_TRUE(fread(tmpBuf, 1, _refEncFrameLength, _sourceFile)
             == _refEncFrameLength);
-        _encodedVideoBuffer.CopyBuffer(_refEncFrameLength, tmpBuf);
+        _encodedVideoBuffer.CopyFrame(_refEncFrameLength, tmpBuf);
         VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
         int ret = _decoder->Decode(encodedImage, false, NULL);
         EXPECT_TRUE(ret <= 0);
@@ -527,7 +515,7 @@
         }
 
         memset(tmpBuf, 0, _refEncFrameLength);
-        _encodedVideoBuffer.CopyBuffer(_refEncFrameLength, tmpBuf);
+        _encodedVideoBuffer.CopyFrame(_refEncFrameLength, tmpBuf);
         VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
         ret = _decoder->Decode(encodedImage, false, NULL);
         EXPECT_TRUE(ret <= 0);
@@ -538,8 +526,8 @@
     }
     rewind(_sourceFile);
 
-    _encodedVideoBuffer.UpdateLength(_refEncFrameLength);
-    _encodedVideoBuffer.CopyBuffer(_refEncFrameLength, _refEncFrame);
+    _encodedVideoBuffer.SetLength(_refEncFrameLength);
+    _encodedVideoBuffer.CopyFrame(_refEncFrameLength, _refEncFrame);
 
     // Init then decode.
     EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
@@ -551,7 +539,7 @@
         _decoder->Decode(encodedImage, false, NULL);
         frameLength = WaitForDecodedFrame();
     }
-    EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
+    EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
         _refDecFrame, _lengthSourceFrame) == true);
 
     // Reset then decode.
@@ -563,7 +551,7 @@
         _decoder->Decode(encodedImage, false, NULL);
         frameLength = WaitForDecodedFrame();
     }
-    EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
+    EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
         _refDecFrame, _lengthSourceFrame) == true);
 
     // Decode with other size, reset, then decode with original size again
@@ -583,8 +571,8 @@
         EXPECT_TRUE(_encoder->InitEncode(&tempInst, 1, 1440) ==
             WEBRTC_VIDEO_CODEC_OK);
         VideoFrame tempInput;
-        unsigned int tmpLength = inputImage.Length() / 4;
-        tempInput.CopyFrame(tmpLength, inputImage.Buffer());
+        unsigned int tmpLength = _inputVideoBuffer.Length() / 4;
+        tempInput.CopyFrame(tmpLength, _inputVideoBuffer.Buffer());
         tempInput.SetWidth(tempInst.width);
         tempInput.SetHeight(tempInst.height);
         _encoder->Encode(tempInput, NULL, NULL);
@@ -605,7 +593,7 @@
         EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
         EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
             WEBRTC_VIDEO_CODEC_OK);
-        _encoder->Encode(inputImage, NULL, NULL);
+        _encoder->Encode(_inputVideoBuffer, NULL, NULL);
         frameLength = WaitForEncodedFrame();
         EXPECT_TRUE(frameLength > 0);
 
@@ -620,7 +608,7 @@
         }
 
         // check that decoded frame matches with reference
-        EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
+        EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
             _refDecFrame, _lengthSourceFrame) == true);
 
     }
@@ -636,10 +624,9 @@
         _decoder->Decode(encodedImage, false, NULL);
         frameLength = WaitForDecodedFrame();
     }
-    EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
+    EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
         _refDecFrame, _lengthSourceFrame) == true);
-    _encodedVideoBuffer.UpdateLength(0);
-    _encodedVideoBuffer.Reset();
+    _encodedVideoBuffer.SetLength(0);
 
     delete [] tmpBuf;
 
@@ -661,16 +648,15 @@
     while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
         _lengthSourceFrame)
     {
-        _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+        _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
         _inputVideoBuffer.SetTimeStamp(frames);
-        VideoBufferToRawImage(_inputVideoBuffer, inputImage);
-        ASSERT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
+        ASSERT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL) ==
             WEBRTC_VIDEO_CODEC_OK);
         frameLength = WaitForEncodedFrame();
         //ASSERT_TRUE(frameLength);
         EXPECT_TRUE(frameLength > 0);
-        encTimeStamp = _encodedVideoBuffer.GetTimeStamp();
-        EXPECT_TRUE(_inputVideoBuffer.GetTimeStamp() ==
+        encTimeStamp = _encodedVideoBuffer.TimeStamp();
+        EXPECT_TRUE(_inputVideoBuffer.TimeStamp() ==
                 static_cast<unsigned>(encTimeStamp));
 
         frameLength = Decode();
@@ -684,7 +670,7 @@
         {
             encTimeStamp = 0;
         }
-        EXPECT_TRUE(_decodedVideoBuffer.GetTimeStamp() ==
+        EXPECT_TRUE(_decodedVideoBuffer.TimeStamp() ==
                 static_cast<unsigned>(encTimeStamp));
         frames++;
     }
@@ -737,20 +723,18 @@
         while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
             _lengthSourceFrame)
         {
-            _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
-            _inputVideoBuffer.SetTimeStamp(_inputVideoBuffer.GetTimeStamp() +
+            _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
+            _inputVideoBuffer.SetTimeStamp(_inputVideoBuffer.TimeStamp() +
                 static_cast<WebRtc_UWord32>(9e4 /
                     static_cast<float>(_inst.maxFramerate)));
-            VideoBufferToRawImage(_inputVideoBuffer, inputImage);
-            ASSERT_EQ(_encoder->Encode(inputImage, NULL, NULL),
+            ASSERT_EQ(_encoder->Encode(_inputVideoBuffer, NULL, NULL),
                       WEBRTC_VIDEO_CODEC_OK);
             frameLength = WaitForEncodedFrame();
             ASSERT_GE(frameLength, 0u);
             totalBytes += frameLength;
             frames++;
 
-            _encodedVideoBuffer.UpdateLength(0);
-            _encodedVideoBuffer.Reset();
+            _encodedVideoBuffer.SetLength(0);
         }
         WebRtc_UWord32 actualBitrate =
             (totalBytes  / frames * _inst.maxFramerate * 8)/1000;
@@ -765,7 +749,6 @@
         ASSERT_TRUE(feof(_sourceFile) != 0);
         rewind(_sourceFile);
     }
-    inputImage.Free();
 }
 
 bool
diff --git a/modules/video_coding/codecs/test_framework/unit_test.h b/modules/video_coding/codecs/test_framework/unit_test.h
index 0a4fee1..2ff8959 100644
--- a/modules/video_coding/codecs/test_framework/unit_test.h
+++ b/modules/video_coding/codecs/test_framework/unit_test.h
@@ -73,7 +73,7 @@
 class UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback
 {
 public:
-    UnitTestEncodeCompleteCallback(TestVideoEncodedBuffer* buffer,
+    UnitTestEncodeCompleteCallback(webrtc::VideoFrame* buffer,
                                    WebRtc_UWord32 decoderSpecificSize = 0,
                                    void* decoderSpecificInfo = NULL) :
       _encodedVideoBuffer(buffer),
@@ -86,7 +86,7 @@
     // Note that this only makes sense if an encode has been completed
     webrtc::VideoFrameType EncodedFrameType() const;
 private:
-    TestVideoEncodedBuffer* _encodedVideoBuffer;
+    webrtc::VideoFrame* _encodedVideoBuffer;
     bool _encodeComplete;
     webrtc::VideoFrameType _encodedFrameType;
 };
@@ -94,12 +94,12 @@
 class UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback
 {
 public:
-    UnitTestDecodeCompleteCallback(TestVideoBuffer* buffer) :
+    UnitTestDecodeCompleteCallback(webrtc::VideoFrame* buffer) :
         _decodedVideoBuffer(buffer), _decodeComplete(false) {}
     WebRtc_Word32 Decoded(webrtc::VideoFrame& image);
     bool DecodeComplete();
 private:
-    TestVideoBuffer* _decodedVideoBuffer;
+    webrtc::VideoFrame* _decodedVideoBuffer;
     bool _decodeComplete;
 };
 
diff --git a/modules/video_coding/codecs/test_framework/video_buffer.cc b/modules/video_coding/codecs/test_framework/video_buffer.cc
deleted file mode 100644
index 3958e90..0000000
--- a/modules/video_coding/codecs/test_framework/video_buffer.cc
+++ /dev/null
@@ -1,319 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <assert.h>
-#include <string.h>
-#include "video_buffer.h"
-
-using namespace webrtc;
-
-TestVideoBuffer::TestVideoBuffer():
-_buffer(0),
-_bufferSize(0),
-_bufferLength(0),
-_startOffset(0),
-_timeStamp(0),
-_width(0),
-_height(0)
-{
-   //
-}
-
-
-TestVideoBuffer::~TestVideoBuffer()
-{
-    _timeStamp = 0;
-    _startOffset = 0;
-    _bufferLength = 0;
-    _bufferSize = 0;
-
-    if(_buffer)
-    {
-        delete [] _buffer;
-        _buffer = 0;
-    }
-}
-
-TestVideoBuffer::TestVideoBuffer(const TestVideoBuffer& rhs)
-:
-_buffer(0),
-_bufferSize(rhs._bufferSize),
-_bufferLength(rhs._bufferLength),
-_startOffset(rhs._startOffset),
-_timeStamp(rhs._timeStamp),
-_width(rhs._width),
-_height(rhs._height)
-{
-    // make sure that our buffer is big enough
-    _buffer = new unsigned char[_bufferSize];
-
-    // only copy required length
-    memcpy(_buffer + _startOffset, rhs._buffer, _bufferLength);  // GetBuffer() includes _startOffset
-}
-
-void TestVideoBuffer::SetTimeStamp(unsigned int timeStamp)
-{
-    _timeStamp = timeStamp;
-}
-
-unsigned int
-TestVideoBuffer::GetWidth() const
-{
-    return _width;
-}
-
-unsigned int
-TestVideoBuffer::GetHeight() const
-{
-    return _height;
-}
-
-void
-TestVideoBuffer::SetWidth(unsigned int width)
-{
-    _width = width;
-}
-
-void
-TestVideoBuffer::SetHeight(unsigned int height)
-{
-    _height = height;
-}
-
-
-void TestVideoBuffer::Free()
-{
-    _timeStamp = 0;
-    _startOffset = 0;
-    _bufferLength = 0;
-    _bufferSize = 0;
-    _height = 0;
-    _width = 0;
-
-    if(_buffer)
-    {
-        delete [] _buffer;
-        _buffer = 0;
-    }
-}
-
-void TestVideoBuffer::VerifyAndAllocate(unsigned int minimumSize)
-{
-    if(minimumSize > _bufferSize)
-    {
-        // make sure that our buffer is big enough
-        unsigned char * newBufferBuffer = new unsigned char[minimumSize];
-        if(_buffer)
-        {
-            // copy the old data
-            memcpy(newBufferBuffer, _buffer, _bufferSize);
-            delete [] _buffer;
-        }
-        _buffer = newBufferBuffer;
-        _bufferSize = minimumSize;
-    }
-}
-
-int TestVideoBuffer::SetOffset(unsigned int length)
-{
-    if (length > _bufferSize ||
-        length > _bufferLength)
-    {
-        return -1;
-    }
-
-    unsigned int oldOffset = _startOffset;
-
-    if(oldOffset > length)
-    {
-        unsigned int newLength = _bufferLength + (oldOffset-length);// increase by the diff
-        assert(newLength <= _bufferSize);
-        _bufferLength = newLength;
-    }
-    if(oldOffset < length)
-    {
-        if(_bufferLength > (length-oldOffset))
-        {
-            _bufferLength -= (length-oldOffset); // decrease by the diff
-        }
-    }
-    _startOffset = length; // update
-
-    return 0;
-}
-
-void TestVideoBuffer::UpdateLength(unsigned int newLength)
-{
-    assert(newLength +_startOffset <= _bufferSize);
-    _bufferLength = newLength;
-}
-
-void TestVideoBuffer::CopyBuffer(unsigned int length, const unsigned char* buffer)
-{
-    assert(length+_startOffset <= _bufferSize);
-    memcpy(_buffer+_startOffset, buffer, length);
-    _bufferLength = length;
-}
-
-void TestVideoBuffer::CopyBuffer(TestVideoBuffer& fromVideoBuffer)
-{
-    assert(fromVideoBuffer.GetLength() + fromVideoBuffer.GetStartOffset() <= _bufferSize);
-    assert(fromVideoBuffer.GetSize() <= _bufferSize);
-
-    _bufferLength = fromVideoBuffer.GetLength();
-    _startOffset = fromVideoBuffer.GetStartOffset();
-    _timeStamp = fromVideoBuffer.GetTimeStamp();
-    _height = fromVideoBuffer.GetHeight();
-    _width = fromVideoBuffer.GetWidth();
-
-    // only copy required length
-    memcpy(_buffer+_startOffset, fromVideoBuffer.GetBuffer(), fromVideoBuffer.GetLength());  // GetBuffer() includes _startOffset
-
-}
-
-void TestVideoBuffer::CopyPointer(const TestVideoBuffer& fromVideoBuffer)
-{
-    _bufferSize = fromVideoBuffer.GetSize();
-    _bufferLength = fromVideoBuffer.GetLength();
-    _startOffset = fromVideoBuffer.GetStartOffset();
-    _timeStamp = fromVideoBuffer.GetTimeStamp();
-    _height = fromVideoBuffer.GetHeight();
-    _width = fromVideoBuffer.GetWidth();
-
-    _buffer = fromVideoBuffer.GetBuffer();
-}
-
-void TestVideoBuffer::ClearPointer()
-{
-    _buffer = NULL;
-}
-
-void TestVideoBuffer::SwapBuffers(TestVideoBuffer& videoBuffer)
-{
-    unsigned char*  tempBuffer = _buffer;
-    unsigned int    tempSize = _bufferSize;
-    unsigned int    tempLength =_bufferLength;
-    unsigned int    tempOffset = _startOffset;
-    unsigned int    tempTime = _timeStamp;
-    unsigned int    tempHeight = _height;
-    unsigned int    tempWidth = _width;
-
-    _buffer = videoBuffer.GetBuffer();
-    _bufferSize = videoBuffer.GetSize();
-    _bufferLength = videoBuffer.GetLength();
-    _startOffset = videoBuffer.GetStartOffset();
-    _timeStamp =  videoBuffer.GetTimeStamp();
-    _height = videoBuffer.GetHeight();
-    _width = videoBuffer.GetWidth();
-
-
-    videoBuffer.Set(tempBuffer, tempSize, tempLength, tempOffset, tempTime);
-    videoBuffer.SetHeight(tempHeight);
-    videoBuffer.SetWidth(tempWidth);
-}
-
-void TestVideoBuffer::Set(unsigned char* tempBuffer,unsigned int tempSize,unsigned int tempLength, unsigned int tempOffset,unsigned int timeStamp)
-{
-    _buffer = tempBuffer;
-    _bufferSize = tempSize;
-    _bufferLength = tempLength;
-    _startOffset = tempOffset;
-    _timeStamp = timeStamp;
-}
-
-unsigned char* TestVideoBuffer::GetBuffer() const
-{
-    return _buffer+_startOffset;
-}
-
-unsigned int TestVideoBuffer::GetStartOffset() const
-{
-    return _startOffset;
-}
-
-unsigned int TestVideoBuffer::GetSize() const
-{
-    return _bufferSize;
-}
-
-unsigned int TestVideoBuffer::GetLength() const
-{
-    return _bufferLength;
-}
-
-unsigned int TestVideoBuffer::GetTimeStamp() const
-{
-    return _timeStamp;
-}
-
-/**
-*   TestVideoEncodedBuffer
-*
-*/
-
-TestVideoEncodedBuffer::TestVideoEncodedBuffer() :
-    _captureWidth(0),
-    _captureHeight(0),
-    _frameRate(-1)
-{
-    _frameType = kDeltaFrame;
-}
-
-TestVideoEncodedBuffer::~TestVideoEncodedBuffer()
-{
-}
-
-void TestVideoEncodedBuffer::SetCaptureWidth(unsigned short width)
-{
-    _captureWidth = width;
-}
-
-void TestVideoEncodedBuffer::SetCaptureHeight(unsigned short height)
-{
-    _captureHeight = height;
-}
-
-unsigned short TestVideoEncodedBuffer::GetCaptureWidth()
-{
-    return _captureWidth;
-}
-
-unsigned short TestVideoEncodedBuffer::GetCaptureHeight()
-{
-    return _captureHeight;
-}
-
-VideoFrameType TestVideoEncodedBuffer::GetFrameType()
-{
-    return _frameType;
-}
-
-void TestVideoEncodedBuffer::SetFrameType(VideoFrameType frametype)
-{
-    _frameType = frametype;
-}
-
-void TestVideoEncodedBuffer::Reset()
-{
-    _captureWidth = 0;
-    _captureHeight = 0;
-    _frameRate = -1;
-    _frameType = kDeltaFrame;
-}
-
-void  TestVideoEncodedBuffer::SetFrameRate(float frameRate)
-{
-    _frameRate = frameRate;
-}
-
-float  TestVideoEncodedBuffer::GetFrameRate()
-{
-    return _frameRate;
-}
diff --git a/modules/video_coding/codecs/test_framework/video_buffer.h b/modules/video_coding/codecs/test_framework/video_buffer.h
deleted file mode 100644
index 824440e..0000000
--- a/modules/video_coding/codecs/test_framework/video_buffer.h
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
-
-#include "typedefs.h"
-#include "video_image.h"
-
-class TestVideoBuffer
-{
-public:
-    TestVideoBuffer();
-
-    virtual ~TestVideoBuffer();
-
-    TestVideoBuffer(const TestVideoBuffer& rhs);
-
-    /**
-    * Verifies that current allocated buffer size is larger than or equal to the input size.
-    * If the current buffer size is smaller, a new allocation is made and the old buffer data is copied to the new buffer.
-    */
-    void VerifyAndAllocate(unsigned int minimumSize);
-
-    void UpdateLength(unsigned int newLength);
-
-    void SwapBuffers(TestVideoBuffer& videoBuffer);
-
-    void CopyBuffer(unsigned int length, const unsigned char* fromBuffer);
-
-    void CopyBuffer(TestVideoBuffer& fromVideoBuffer);
-
-    // Use with care, and remember to call ClearPointer() when done.
-    void CopyPointer(const TestVideoBuffer& fromVideoBuffer);
-
-    void ClearPointer();
-
-    int  SetOffset(unsigned int length);            // Sets offset to beginning of frame in buffer
-
-    void Free();                                    // Deletes frame buffer and resets members to zero
-
-    void SetTimeStamp(unsigned int timeStamp);      // Sets timestamp of frame (90kHz)
-
-    /**
-    *   Gets pointer to frame buffer
-    */
-    unsigned char* GetBuffer() const;
-
-    /**
-    *   Gets allocated buffer size
-    */
-    unsigned int	GetSize() const;
-
-    /**
-    *   Gets length of frame
-    */
-    unsigned int	GetLength() const;
-
-    /**
-    *   Gets timestamp of frame (90kHz)
-    */
-    unsigned int	GetTimeStamp() const;
-
-    unsigned int	GetWidth() const;
-    unsigned int	GetHeight() const;
-
-    void            SetWidth(unsigned int width);
-    void            SetHeight(unsigned int height);
-
-private:
-    TestVideoBuffer& operator=(const TestVideoBuffer& inBuffer);
-
-private:
-    void Set(unsigned char* buffer,unsigned int size,unsigned int length,unsigned int offset, unsigned int timeStamp);
-    unsigned int GetStartOffset() const;
-
-    unsigned char*		  _buffer;          // Pointer to frame buffer
-    unsigned int		  _bufferSize;      // Allocated buffer size
-    unsigned int		  _bufferLength;    // Length (in bytes) of frame
-    unsigned int		  _startOffset;     // Offset (in bytes) to beginning of frame in buffer
-    unsigned int		  _timeStamp;       // Timestamp of frame (90kHz)
-    unsigned int          _width;
-    unsigned int          _height;
-};
-
-class TestVideoEncodedBuffer: public TestVideoBuffer
-{
-public:
-    TestVideoEncodedBuffer();
-    ~TestVideoEncodedBuffer();
-
-    void SetCaptureWidth(unsigned short width);
-    void SetCaptureHeight(unsigned short height);
-    unsigned short GetCaptureWidth();
-    unsigned short GetCaptureHeight();
-
-    webrtc::VideoFrameType GetFrameType();
-    void SetFrameType(webrtc::VideoFrameType frametype);
-
-    void Reset();
-
-    void SetFrameRate(float frameRate);
-    float GetFrameRate();
-
-private:
-    TestVideoEncodedBuffer& operator=(const TestVideoEncodedBuffer& inBuffer);
-
-private:
-    unsigned short			   _captureWidth;
-    unsigned short			   _captureHeight;
-    webrtc::VideoFrameType     _frameType;
-    float                      _frameRate;
-};
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
diff --git a/modules/video_coding/codecs/vp8/test/dual_decoder_test.cc b/modules/video_coding/codecs/vp8/test/dual_decoder_test.cc
index 60cd97a..7924759 100644
--- a/modules/video_coding/codecs/vp8/test/dual_decoder_test.cc
+++ b/modules/video_coding/codecs/vp8/test/dual_decoder_test.cc
@@ -149,7 +149,7 @@
 int
 VP8DualDecoderTest::Decode(int lossValue)
 {
-    _sumEncBytes += _frameToDecode->_frame->GetLength();
+    _sumEncBytes += _frameToDecode->_frame->Length();
     webrtc::EncodedImage encodedImage;
     VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
     encodedImage._completeFrame = !lossValue;
@@ -171,9 +171,9 @@
         }
 
         // compare decoded images
-        if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
-            _decodedVideoBuffer.GetLength(),
-            _decodedVideoBuffer2.GetBuffer(), _decodedVideoBuffer.GetLength()))
+        if (!CheckIfBitExact(_decodedVideoBuffer.Buffer(),
+            _decodedVideoBuffer.Length(),
+            _decodedVideoBuffer2.Buffer(), _decodedVideoBuffer.Length()))
         {
             fprintf(stderr,"\n\nClone output different from master.\n\n");
             exit(EXIT_FAILURE);
@@ -201,7 +201,7 @@
 WebRtc_Word32 DualDecoderCompleteCallback::Decoded(webrtc::VideoFrame& image)
 {
     _decodedVideoBuffer->VerifyAndAllocate(image.Length());
-    _decodedVideoBuffer->CopyBuffer(image.Length(), image.Buffer());
+    _decodedVideoBuffer->CopyFrame(image.Length(), image.Buffer());
     _decodedVideoBuffer->SetWidth(image.Width());
     _decodedVideoBuffer->SetHeight(image.Height());
     _decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
diff --git a/modules/video_coding/codecs/vp8/test/dual_decoder_test.h b/modules/video_coding/codecs/vp8/test/dual_decoder_test.h
index b1d84a7..e4e17e5 100644
--- a/modules/video_coding/codecs/vp8/test/dual_decoder_test.h
+++ b/modules/video_coding/codecs/vp8/test/dual_decoder_test.h
@@ -30,7 +30,7 @@
     virtual int Decode(int lossValue = 0);
 
     webrtc::VP8Decoder*     _decoder2;
-    TestVideoBuffer         _decodedVideoBuffer2;
+    webrtc::VideoFrame      _decodedVideoBuffer2;
     static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes, 
         const void *ptrB, unsigned int bLengthBytes);
 private:
@@ -39,12 +39,12 @@
 class DualDecoderCompleteCallback : public webrtc::DecodedImageCallback
 {
 public:
-    DualDecoderCompleteCallback(TestVideoBuffer* buffer)
+    DualDecoderCompleteCallback(webrtc::VideoFrame* buffer)
     : _decodedVideoBuffer(buffer), _decodeComplete(false) {}
     WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
     bool DecodeComplete();
 private:
-    TestVideoBuffer* _decodedVideoBuffer;
+    webrtc::VideoFrame* _decodedVideoBuffer;
     bool _decodeComplete;
 };
 
diff --git a/modules/video_coding/codecs/vp8/test/rps_test.cc b/modules/video_coding/codecs/vp8/test/rps_test.cc
index dd27f4c..0ca02b7 100644
--- a/modules/video_coding/codecs/vp8/test/rps_test.cc
+++ b/modules/video_coding/codecs/vp8/test/rps_test.cc
@@ -137,18 +137,16 @@
   size_t bytes_read = fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile);
   if (bytes_read < _lengthSourceFrame)
     return true;
-  _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+  _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
   _inputVideoBuffer.SetTimeStamp((unsigned int)
       (_encFrameCnt * 9e4 / _inst.maxFramerate));
   _inputVideoBuffer.SetWidth(_inst.width);
   _inputVideoBuffer.SetHeight(_inst.height);
-  webrtc::VideoFrame rawImage;
-  VideoBufferToRawImage(_inputVideoBuffer, rawImage);
   if (feof(_sourceFile) != 0) {
       return true;
   }
   _encodeCompleteTime = 0;
-  _encodeTimes[rawImage.TimeStamp()] = tGetTime();
+  _encodeTimes[_inputVideoBuffer.TimeStamp()] = tGetTime();
 
   webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
   codecSpecificInfo->codecSpecific.VP8.pictureIdRPSI =
@@ -161,7 +159,7 @@
     sli_ = false;
   }
   printf("Encoding: %u\n", _framecnt);
-  int ret = _encoder->Encode(rawImage, codecSpecificInfo, NULL);
+  int ret = _encoder->Encode(_inputVideoBuffer, codecSpecificInfo, NULL);
   if (ret < 0)
     printf("Failed to encode: %u\n", _framecnt);
 
@@ -171,10 +169,11 @@
   }
   if (_encodeCompleteTime > 0) {
       _totalEncodeTime += _encodeCompleteTime -
-          _encodeTimes[rawImage.TimeStamp()];
+          _encodeTimes[_inputVideoBuffer.TimeStamp()];
   }
   else {
-      _totalEncodeTime += tGetTime() - _encodeTimes[rawImage.TimeStamp()];
+      _totalEncodeTime += tGetTime() -
+          _encodeTimes[_inputVideoBuffer.TimeStamp()];
   }
   return false;
 }
@@ -182,7 +181,7 @@
 //#define FRAME_LOSS 1
 
 int VP8RpsTest::Decode(int lossValue) {
-  _sumEncBytes += _frameToDecode->_frame->GetLength();
+  _sumEncBytes += _frameToDecode->_frame->Length();
   webrtc::EncodedImage encodedImage;
   VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
   encodedImage._completeFrame = !lossValue;
@@ -230,9 +229,9 @@
     }
 #else
     if (_framecnt > 0 && _framecnt % 10 != 0) {
-      if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
-        _decodedVideoBuffer.GetLength(),
-        decoded_frame2_.GetBuffer(), _decodedVideoBuffer.GetLength())) {
+      if (!CheckIfBitExact(_decodedVideoBuffer.Buffer(),
+        _decodedVideoBuffer.Length(),
+        decoded_frame2_.Buffer(), _decodedVideoBuffer.Length())) {
         fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n",
                 _framecnt);
         return -1;
@@ -257,7 +256,7 @@
   return memcmp(ptrA, ptrB, aLengthBytes) == 0;
 }
 
-RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(TestVideoBuffer* buffer)
+RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(webrtc::VideoFrame* buffer)
     : decoded_frame_(buffer),
       decode_complete_(false),
       last_decoded_picture_id_(0),
@@ -266,13 +265,8 @@
 }
 
 WebRtc_Word32 RpsDecodeCompleteCallback::Decoded(webrtc::VideoFrame& image) {
-  decoded_frame_->VerifyAndAllocate(image.Length());
-  decoded_frame_->CopyBuffer(image.Length(), image.Buffer());
-  decoded_frame_->SetWidth(image.Width());
-  decoded_frame_->SetHeight(image.Height());
-  decoded_frame_->SetTimeStamp(image.TimeStamp());
+  return decoded_frame_->CopyFrame(image);
   decode_complete_ = true;
-  return 0;
 }
 
 bool RpsDecodeCompleteCallback::DecodeComplete() {
diff --git a/modules/video_coding/codecs/vp8/test/rps_test.h b/modules/video_coding/codecs/vp8/test/rps_test.h
index f5cdcc6..b00e773 100644
--- a/modules/video_coding/codecs/vp8/test/rps_test.h
+++ b/modules/video_coding/codecs/vp8/test/rps_test.h
@@ -32,13 +32,13 @@
       const void *ptrB, unsigned int bLengthBytes);
 
   webrtc::VP8Decoder* decoder2_;
-  TestVideoBuffer decoded_frame2_;
+  webrtc::VideoFrame decoded_frame2_;
   bool sli_;
 };
 
 class RpsDecodeCompleteCallback : public webrtc::DecodedImageCallback {
  public:
-  RpsDecodeCompleteCallback(TestVideoBuffer* buffer);
+  RpsDecodeCompleteCallback(webrtc::VideoFrame* buffer);
   WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
   bool DecodeComplete();
   WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 picture_id);
@@ -47,7 +47,7 @@
   WebRtc_UWord64 LastDecodedRefPictureId(bool *updated);
 
  private:
-  TestVideoBuffer* decoded_frame_;
+  webrtc::VideoFrame* decoded_frame_;
   bool decode_complete_;
   WebRtc_UWord64 last_decoded_picture_id_;
   WebRtc_UWord64 last_decoded_ref_picture_id_;
diff --git a/modules/video_coding/main/test/quality_modes_test.cc b/modules/video_coding/main/test/quality_modes_test.cc
index bc3d9dd..6a80476 100644
--- a/modules/video_coding/main/test/quality_modes_test.cc
+++ b/modules/video_coding/main/test/quality_modes_test.cc
@@ -251,7 +251,7 @@
         _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(codec.maxFramerate));
         sourceFrame.SetTimeStamp(_timeStamp);
 
-        ret = _vpm->PreprocessFrame(&sourceFrame, &decimatedFrame);
+        ret = _vpm->PreprocessFrame(sourceFrame, &decimatedFrame);
         if (ret  == 1)
         {
             printf("VD: frame drop %d \n",_frameCnt);
diff --git a/modules/video_processing/main/interface/video_processing.h b/modules/video_processing/main/interface/video_processing.h
index 512cace..8008b03 100644
--- a/modules/video_processing/main/interface/video_processing.h
+++ b/modules/video_processing/main/interface/video_processing.h
@@ -29,8 +29,8 @@
    concurrently processed stream. Similarly, it is recommended to call Reset()
    before switching to a new stream, but this is not absolutely required.
    
-   The module provides basic thread safety by permitting only a single function to
-   execute concurrently.
+   The module provides basic thread safety by permitting only a single function
+   to execute concurrently.
 */
 
 namespace webrtc {
@@ -57,8 +57,10 @@
         WebRtc_UWord32 mean;           /**< Mean value of frame */
         WebRtc_UWord32 sum;            /**< Sum of frame */
         WebRtc_UWord32 numPixels;      /**< Number of pixels */
-        WebRtc_UWord8  subSamplWidth;  /**< Subsampling rate of width in powers of 2 */
-        WebRtc_UWord8  subSamplHeight; /**< Subsampling rate of height in powers of 2 */
+        WebRtc_UWord8  subSamplWidth;  /**< Subsampling rate of width in powers
+                                            of 2 */
+        WebRtc_UWord8  subSamplHeight; /**< Subsampling rate of height in powers
+                                            of 2 */
     };
 
     /**
@@ -113,26 +115,12 @@
            The frame statistics will be stored here on return.
       
        \param[in]  frame
-           Pointer to the video frame.
-      
-       \param[in]  width
-           Frame width in pixels.
-      
-       \param[in]  height
-           Frame height in pixels.
+           Reference to the video frame.
       
        \return 0 on success, -1 on failure.
     */
-    static WebRtc_Word32 GetFrameStats(FrameStats& stats,
-                                     const WebRtc_UWord8* frame,
-                                     WebRtc_UWord32 width,
-                                     WebRtc_UWord32 height);
-
-    /**
-       \overload
-    */
-     static WebRtc_Word32 GetFrameStats(FrameStats& stats,
-                                     const VideoFrame& frame);
+    static WebRtc_Word32 GetFrameStats(FrameStats* stats,
+                                       const VideoFrame& frame);
 
     /**
        Checks the validity of a FrameStats struct. Currently, valid implies only
@@ -151,7 +139,7 @@
        \param[in,out] stats
            Frame statistics.
     */
-    static void ClearFrameStats(FrameStats& stats);
+    static void ClearFrameStats(FrameStats* stats);
 
     /**
        Enhances the color of an image through a constant mapping. Only the 
@@ -159,35 +147,14 @@
       
        \param[in,out] frame
            Pointer to the video frame.
-      
-       \param[in]     width
-           Frame width in pixels.
-      
-       \param[in]     height
-           Frame height in pixels.
-      
-       \return 0 on success, -1 on failure.
     */
-    static WebRtc_Word32 ColorEnhancement(WebRtc_UWord8* frame,
-                                        WebRtc_UWord32 width,
-                                        WebRtc_UWord32 height);
-
-    /**
-       \overload
-    */
-    static WebRtc_Word32 ColorEnhancement(VideoFrame& frame);
+    static WebRtc_Word32 ColorEnhancement(VideoFrame* frame);
 
     /**
        Increases/decreases the luminance value.
 
        \param[in,out] frame
-           Pointer to the video frame buffer.
-
-       \param[in]     width
-           Frame width in pixels.
-
-       \param[in]     height
-           Frame height in pixels.
+           Pointer to the video frame.
 
       \param[in] delta
            The amount to change the chrominance value of every single pixel.
@@ -195,30 +162,16 @@
 
        \return 0 on success, -1 on failure.
     */
-    static WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
-                                  int width, int height, int delta);
-    /**
-       \overload
-    */
-    static WebRtc_Word32 Brighten(VideoFrame& frame, int delta);
+    static WebRtc_Word32 Brighten(VideoFrame* frame, int delta);
 
     /**
-       Detects and removes camera flicker from a video stream. Every frame from the
-       stream must be passed in. A frame will only be altered if flicker has been
-       detected. Has a fixed-point implementation.
+       Detects and removes camera flicker from a video stream. Every frame from
+       the stream must be passed in. A frame will only be altered if flicker has
+       been detected. Has a fixed-point implementation.
       
        \param[in,out] frame
            Pointer to the video frame.
       
-       \param[in]     width
-           Frame width in pixels.
-      
-       \param[in]     height
-           Frame height in pixels.
-      
-       \param[in]     timestamp
-           Frame timestamp in 90 kHz format.
-      
        \param[in,out] stats
            Frame statistics provided by GetFrameStats(). On return the stats will
            be reset to zero if the frame was altered. Call GetFrameStats() again
@@ -226,76 +179,40 @@
       
        \return 0 on success, -1 on failure.
     */
-    virtual WebRtc_Word32 Deflickering(WebRtc_UWord8* frame,
-                                     WebRtc_UWord32 width,
-                                     WebRtc_UWord32 height,
-                                     WebRtc_UWord32 timestamp,
-                                     FrameStats& stats) = 0;
+    virtual WebRtc_Word32 Deflickering(VideoFrame* frame,
+                                       FrameStats* stats) = 0;
     
     /**
-       \overload
-    */
-    virtual WebRtc_Word32 Deflickering(VideoFrame& frame,
-                                     FrameStats& stats) = 0;
-
-    /**
        Denoises a video frame. Every frame from the stream should be passed in.
        Has a fixed-point implementation.
       
        \param[in,out] frame
            Pointer to the video frame.
       
-       \param[in]     width
-           Frame width in pixels.
-      
-       \param[in]     height
-           Frame height in pixels.
-      
        \return The number of modified pixels on success, -1 on failure.
     */
-    virtual WebRtc_Word32 Denoising(WebRtc_UWord8* frame,
-                                  WebRtc_UWord32 width,
-                                  WebRtc_UWord32 height) = 0;
+    virtual WebRtc_Word32 Denoising(VideoFrame* frame) = 0;
     
     /**
-       \overload
-    */
-    virtual WebRtc_Word32 Denoising(VideoFrame& frame) = 0;
-
-    /**
-       Detects if a video frame is excessively bright or dark. Returns a warning if
-       this is the case. Multiple frames should be passed in before expecting a 
-       warning. Has a floating-point implementation.
+       Detects if a video frame is excessively bright or dark. Returns a
+       warning if this is the case. Multiple frames should be passed in before
+       expecting a warning. Has a floating-point implementation.
       
        \param[in] frame
            Pointer to the video frame.
       
-       \param[in]     width
-           Frame width in pixels.
-      
-       \param[in]     height
-           Frame height in pixels.
-      
        \param[in] stats
            Frame statistics provided by GetFrameStats().
       
        \return A member of BrightnessWarning on success, -1 on error
     */
-    virtual WebRtc_Word32 BrightnessDetection(const WebRtc_UWord8* frame,
-                                            WebRtc_UWord32 width,
-                                            WebRtc_UWord32 height,
-                                            const FrameStats& stats) = 0;
-
-    /**
-       \overload
-    */
     virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
-                                            const FrameStats& stats) = 0;
-
+                                              const FrameStats& stats) = 0;
 
     /**
-    The following functions refer to the pre-processor unit within VPM. The pre-processor
-    perfoms spatial/temporal decimation and content analysis on the frames prior to encoding.
+    The following functions refer to the pre-processor unit within VPM. The
+    pre-processor perfoms spatial/temporal decimation and content analysis on
+    the frames prior to encoding.
     */
 	
     /**
@@ -320,7 +237,9 @@
     \return VPM_OK on success, a negative value on error (see error codes)
 
     */
-    virtual WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate) = 0;
+    virtual WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width,
+                                              WebRtc_UWord32 height,
+                                              WebRtc_UWord32 frameRate) = 0;
     
     /**
     Set max frame rate
@@ -352,7 +271,8 @@
     \param[in] resamplingMode
     Set resampling mode (a member of VideoFrameResampling)
     */
-    virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode) = 0;
+    virtual void SetInputFrameResampleMode(VideoFrameResampling
+                                           resamplingMode) = 0;
   
     /**
     Get Processed (decimated) frame
@@ -363,7 +283,8 @@
     
     \return VPM_OK on success, a negative value on error (see error codes)
     */
-    virtual WebRtc_Word32 PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame) = 0;
+    virtual WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
+                                          VideoFrame** processedFrame) = 0;
 
     /**
     Return content metrics for the last processed frame
diff --git a/modules/video_processing/main/source/brighten.cc b/modules/video_processing/main/source/brighten.cc
index 51e4b6b..68b0c25 100644
--- a/modules/video_processing/main/source/brighten.cc
+++ b/modules/video_processing/main/source/brighten.cc
@@ -17,21 +17,21 @@
 namespace webrtc {
 namespace VideoProcessing {
 
-WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
-                       int width, int height, int delta) {
-  if (frame == NULL) {
+WebRtc_Word32 Brighten(VideoFrame* frame, int delta) {
+  assert(frame);
+  if (frame->Buffer() == NULL) {
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
                  "Null frame pointer");
     return VPM_PARAMETER_ERROR;
   }
 
-  if (width <= 0 || height <= 0) {
+  if (frame->Width() <= 0 || frame->Height() <= 0) {
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
                  "Invalid frame size");
     return VPM_PARAMETER_ERROR;
   }
 
-  int numPixels = width * height;
+  int numPixels = frame->Width() * frame->Height();
 
   int lookUp[256];
   for (int i = 0; i < 256; i++) {
@@ -39,7 +39,7 @@
     lookUp[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val);
   }
 
-  WebRtc_UWord8* tempPtr = frame;
+  WebRtc_UWord8* tempPtr = frame->Buffer();
 
   for (int i = 0; i < numPixels; i++) {
     *tempPtr = static_cast<WebRtc_UWord8>(lookUp[*tempPtr]);
diff --git a/modules/video_processing/main/source/brighten.h b/modules/video_processing/main/source/brighten.h
index b7e6fb7..319cc6f 100644
--- a/modules/video_processing/main/source/brighten.h
+++ b/modules/video_processing/main/source/brighten.h
@@ -17,8 +17,7 @@
 namespace webrtc {
 namespace VideoProcessing {
 
-WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
-                       int width, int height, int delta);
+WebRtc_Word32 Brighten(VideoFrame* frame, int delta);
 
 }  // namespace VideoProcessing
 }  // namespace webrtc
diff --git a/modules/video_processing/main/source/brightness_detection.cc b/modules/video_processing/main/source/brightness_detection.cc
index 6840df2..07ca7e7 100644
--- a/modules/video_processing/main/source/brightness_detection.cc
+++ b/modules/video_processing/main/source/brightness_detection.cc
@@ -41,26 +41,30 @@
 }
 
 WebRtc_Word32
-VPMBrightnessDetection::ProcessFrame(const WebRtc_UWord8* frame,
-                                     const WebRtc_UWord32 width,
-                                     const WebRtc_UWord32 height,
-                                     const VideoProcessingModule::FrameStats& stats)
+VPMBrightnessDetection::ProcessFrame(const VideoFrame& frame,
+                                     const VideoProcessingModule::FrameStats&
+                                     stats)
 {
-    if (frame == NULL)
+    if (frame.Buffer() == NULL)
     {
-        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer");
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
+                     "Null frame pointer");
         return VPM_PARAMETER_ERROR;
     }
+    int width = frame.Width();
+    int height = frame.Height();
     
     if (width == 0 || height == 0)
     {
-        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size");
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
+                     "Invalid frame size");
         return VPM_PARAMETER_ERROR;
     }
 
     if (!VideoProcessingModule::ValidFrameStats(stats))
     {
-        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame stats");
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
+                     "Invalid frame stats");
         return VPM_PARAMETER_ERROR;
     }
 
@@ -90,12 +94,14 @@
         {
             // Standard deviation of Y
             float stdY = 0;
-            for (WebRtc_UWord32 h = 0; h < height; h += (1 << stats.subSamplHeight))
+            uint8_t* buffer = frame.Buffer();
+            for (int h = 0; h < height; h += (1 << stats.subSamplHeight))
             {
                 WebRtc_UWord32 row = h*width;
-                for (WebRtc_UWord32 w = 0; w < width; w += (1 << stats.subSamplWidth))
+                for (int w = 0; w < width; w += (1 << stats.subSamplWidth))
                 {
-                    stdY += (frame[w + row] - stats.mean) * (frame[w + row] - stats.mean);
+                    stdY += (buffer[w + row] - stats.mean) * (buffer[w + row] -
+                        stats.mean);
                 }
             }           
             stdY = sqrt(stdY / stats.numPixels);
@@ -133,7 +139,8 @@
             // Check if image is too dark
             if ((stdY < 55) && (perc05 < 50))
             { 
-                if (medianY < 60 || stats.mean < 80 ||  perc95 < 130 || propLow > 0.20)
+                if (medianY < 60 || stats.mean < 80 ||  perc95 < 130 ||
+                    propLow > 0.20)
                 {
                     _frameCntDark++;
                 }
@@ -150,7 +157,8 @@
             // Check if image is too bright
             if ((stdY < 52) && (perc95 > 200) && (medianY > 160))
             {
-                if (medianY > 185 || stats.mean > 185 || perc05 > 140 || propHigh > 0.25)
+                if (medianY > 185 || stats.mean > 185 || perc05 > 140 ||
+                    propHigh > 0.25)
                 {
                     _frameCntBright++;  
                 }
diff --git a/modules/video_processing/main/source/brightness_detection.h b/modules/video_processing/main/source/brightness_detection.h
index 7bed556..63f4816 100644
--- a/modules/video_processing/main/source/brightness_detection.h
+++ b/modules/video_processing/main/source/brightness_detection.h
@@ -29,10 +29,8 @@
 
     void Reset();
 
-    WebRtc_Word32 ProcessFrame(const WebRtc_UWord8* frame,
-                             WebRtc_UWord32 width,
-                             WebRtc_UWord32 height,
-                             const VideoProcessingModule::FrameStats& stats);
+    WebRtc_Word32 ProcessFrame(const VideoFrame& frame,
+                               const VideoProcessingModule::FrameStats& stats);
 
 private:
     WebRtc_Word32 _id;
diff --git a/modules/video_processing/main/source/color_enhancement.cc b/modules/video_processing/main/source/color_enhancement.cc
index 426596f..2cc39d3 100644
--- a/modules/video_processing/main/source/color_enhancement.cc
+++ b/modules/video_processing/main/source/color_enhancement.cc
@@ -18,39 +18,38 @@
 namespace VideoProcessing
 { 
     WebRtc_Word32
-    ColorEnhancement(WebRtc_UWord8* frame,
-                     const WebRtc_UWord32 width,
-                     const WebRtc_UWord32 height)
+    ColorEnhancement(VideoFrame* frame)
     {
+        assert(frame);
         // pointers to U and V color pixels
         WebRtc_UWord8* ptrU;
         WebRtc_UWord8* ptrV;
         WebRtc_UWord8 tempChroma;
-        const WebRtc_UWord32 numPixels = width * height;
+        const unsigned int size_y = frame->Width() * frame->Height();
+        const unsigned int size_uv = ((frame->Width() + 1) / 2) *
+            ((frame->Height() + 1 ) / 2);
 
 
-        if (frame == NULL)
+        if (frame->Buffer() == NULL)
         {
-            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Null frame pointer");
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
+                         -1, "Null frame pointer");
             return VPM_GENERAL_ERROR;
         }
 
-        if (width == 0 || height == 0)
+        if (frame->Width() == 0 || frame->Height() == 0)
         {
-            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Invalid frame size");
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
+                         -1, "Invalid frame size");
             return VPM_GENERAL_ERROR;
         }
         
-        // set pointers to first U and V pixels
-        
-        // stream format:
-        // | numPixels bytes luminance | numPixels/4 bytes chroma U | numPixels/4 chroma V |
-        
-        ptrU = frame + numPixels;       // skip luminance
-        ptrV = ptrU + (numPixels>>2);
+        // set pointers to first U and V pixels (skip luminance)
+        ptrU = frame->Buffer() + size_y;
+        ptrV = ptrU + size_uv;
 
         // loop through all chrominance pixels and modify color
-        for (WebRtc_UWord32 ix = 0; ix < (numPixels>>2); ix++)
+        for (unsigned int ix = 0; ix < size_uv; ix++)
         {
             tempChroma = colorTable[*ptrU][*ptrV];
             *ptrV = colorTable[*ptrV][*ptrU];
diff --git a/modules/video_processing/main/source/color_enhancement.h b/modules/video_processing/main/source/color_enhancement.h
index 87fabc3..3384657 100644
--- a/modules/video_processing/main/source/color_enhancement.h
+++ b/modules/video_processing/main/source/color_enhancement.h
@@ -21,9 +21,7 @@
 
 namespace VideoProcessing
 {
-    WebRtc_Word32 ColorEnhancement(WebRtc_UWord8* frame,
-                                 WebRtc_UWord32 width,
-                                 WebRtc_UWord32 height);
+    WebRtc_Word32 ColorEnhancement(VideoFrame* frame);
 }
 
 } //namespace
diff --git a/modules/video_processing/main/source/content_analysis.cc b/modules/video_processing/main/source/content_analysis.cc
index bd15e17..77cdcd4 100644
--- a/modules/video_processing/main/source/content_analysis.cc
+++ b/modules/video_processing/main/source/content_analysis.cc
@@ -56,25 +56,25 @@
 
 
 VideoContentMetrics*
-VPMContentAnalysis::ComputeContentMetrics(const VideoFrame* inputFrame)
+VPMContentAnalysis::ComputeContentMetrics(const VideoFrame& inputFrame)
 {
-    if (inputFrame == NULL)
+    if (inputFrame.Buffer() == NULL)
     {
         return NULL;
     }
 
     // Init if needed (native dimension change)
-    if (_width != static_cast<int>(inputFrame->Width()) ||
-        _height != static_cast<int>(inputFrame->Height()))
+    if (_width != static_cast<int>(inputFrame.Width()) ||
+        _height != static_cast<int>(inputFrame.Height()))
     {
-        if (VPM_OK != Initialize(static_cast<int>(inputFrame->Width()),
-                                 static_cast<int>(inputFrame->Height())))
+        if (VPM_OK != Initialize(static_cast<int>(inputFrame.Width()),
+                                 static_cast<int>(inputFrame.Height())))
         {
             return NULL;
         }
     }
 
-    _origFrame = inputFrame->Buffer();
+    _origFrame = inputFrame.Buffer();
 
     // compute spatial metrics: 3 spatial prediction errors
     (this->*ComputeSpatialMetrics)();
diff --git a/modules/video_processing/main/source/content_analysis.h b/modules/video_processing/main/source/content_analysis.h
index f927a01..6724af5 100644
--- a/modules/video_processing/main/source/content_analysis.h
+++ b/modules/video_processing/main/source/content_analysis.h
@@ -35,7 +35,7 @@
     // Input:           new frame
     // Return value:    pointer to structure containing content Analysis
     //                  metrics or NULL value upon error
-    VideoContentMetrics* ComputeContentMetrics(const VideoFrame* inputFrame);
+    VideoContentMetrics* ComputeContentMetrics(const VideoFrame& inputFrame);
 
     // Release all allocated memory
     // Output: 0 if OK, negative value upon error
diff --git a/modules/video_processing/main/source/deflickering.cc b/modules/video_processing/main/source/deflickering.cc
index d0b8d3b..f17dc8d 100644
--- a/modules/video_processing/main/source/deflickering.cc
+++ b/modules/video_processing/main/source/deflickering.cc
@@ -89,12 +89,10 @@
 }
 
 WebRtc_Word32
-VPMDeflickering::ProcessFrame(WebRtc_UWord8* frame,
-                              const WebRtc_UWord32 width,
-                              const WebRtc_UWord32 height,
-                              const WebRtc_UWord32 timestamp,
-                              VideoProcessingModule::FrameStats& stats)
+VPMDeflickering::ProcessFrame(VideoFrame* frame,
+                              VideoProcessingModule::FrameStats* stats)
 {
+    assert(frame);
     WebRtc_UWord32 frameMemory;
     WebRtc_UWord8 quantUW8[kNumQuants];
     WebRtc_UWord8 maxQuantUW8[kNumQuants];
@@ -105,27 +103,32 @@
 
     WebRtc_UWord16 tmpUW16;
     WebRtc_UWord32 tmpUW32;
+    int width = frame->Width();
+    int height = frame->Height();
 
-    if (frame == NULL)
+    if (frame->Buffer() == NULL)
     {
-        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer");
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
+                     "Null frame pointer");
         return VPM_GENERAL_ERROR;
     }
 
     // Stricter height check due to subsampling size calculation below.
     if (width == 0 || height < 2)
     {
-        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size");
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
+                     "Invalid frame size");
         return VPM_GENERAL_ERROR;
     }
 
-    if (!VideoProcessingModule::ValidFrameStats(stats))
+    if (!VideoProcessingModule::ValidFrameStats(*stats))
     {
-        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame stats");
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
+                     "Invalid frame stats");
         return VPM_GENERAL_ERROR;
     }
 
-    if (PreDetection(timestamp, stats) == -1)
+    if (PreDetection(frame->TimeStamp(), *stats) == -1)
     {
         return VPM_GENERAL_ERROR;
     }
@@ -148,9 +151,10 @@
         kLog2OfDownsamplingFactor) + 1);
     WebRtc_UWord8* ySorted = new WebRtc_UWord8[ySubSize];
     WebRtc_UWord32 sortRowIdx = 0;
-    for (WebRtc_UWord32 i = 0; i < height; i += kDownsamplingFactor)
+    for (int i = 0; i < height; i += kDownsamplingFactor)
     {
-        memcpy(ySorted + sortRowIdx * width, frame + i * width, width);
+        memcpy(ySorted + sortRowIdx * width,
+               frame->Buffer() + i * width, width);
         sortRowIdx++;
     }
     
@@ -254,9 +258,10 @@
     }
 
     // Map to the output frame.
+    uint8_t* buffer = frame->Buffer();
     for (WebRtc_UWord32 i = 0; i < ySize; i++)
     {
-        frame[i] = mapUW8[frame[i]];
+      buffer[i] = mapUW8[buffer[i]];
     }
 
     // Frame was altered, so reset stats.
diff --git a/modules/video_processing/main/source/deflickering.h b/modules/video_processing/main/source/deflickering.h
index ee5f90d..dfe7d9d 100644
--- a/modules/video_processing/main/source/deflickering.h
+++ b/modules/video_processing/main/source/deflickering.h
@@ -32,14 +32,11 @@
 
     void Reset();
 
-    WebRtc_Word32 ProcessFrame(WebRtc_UWord8* frame,
-                             WebRtc_UWord32 width,
-                             WebRtc_UWord32 height,
-                             WebRtc_UWord32 timestamp,
-                             VideoProcessingModule::FrameStats& stats);
+    WebRtc_Word32 ProcessFrame(VideoFrame* frame,
+                               VideoProcessingModule::FrameStats* stats);
 private:
     WebRtc_Word32 PreDetection(WebRtc_UWord32 timestamp,
-                             const VideoProcessingModule::FrameStats& stats);
+                               const VideoProcessingModule::FrameStats& stats);
 
     WebRtc_Word32 DetectFlicker();
 
diff --git a/modules/video_processing/main/source/denoising.cc b/modules/video_processing/main/source/denoising.cc
index d8931c9..33608a0 100644
--- a/modules/video_processing/main/source/denoising.cc
+++ b/modules/video_processing/main/source/denoising.cc
@@ -72,28 +72,31 @@
 }
 
 WebRtc_Word32
-VPMDenoising::ProcessFrame(WebRtc_UWord8* frame,
-                           const WebRtc_UWord32 width,
-                           const WebRtc_UWord32 height)
+VPMDenoising::ProcessFrame(VideoFrame* frame)
 {
+    assert(frame);
     WebRtc_Word32     thevar;
-    WebRtc_UWord32    k;
-    WebRtc_UWord32    jsub, ksub;
+    int               k;
+    int               jsub, ksub;
     WebRtc_Word32     diff0;
     WebRtc_UWord32    tmpMoment1;
     WebRtc_UWord32    tmpMoment2;
     WebRtc_UWord32    tmp;
     WebRtc_Word32     numPixelsChanged = 0;
 
-    if (frame == NULL)
+    if (frame->Buffer() == NULL)
     {
-        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer");
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
+                     "Null frame pointer");
         return VPM_GENERAL_ERROR;
     }
 
+    int width = frame->Width();
+    int height = frame->Height();
     if (width == 0 || height == 0)
     {
-        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size");
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
+                     "Invalid frame size");
         return VPM_GENERAL_ERROR;
     }
 
@@ -124,31 +127,34 @@
     }
 
     /* Apply de-noising on each pixel, but update variance sub-sampled */
-    for (WebRtc_UWord32 i = 0; i < height; i++)
+    uint8_t* buffer = frame->Buffer();
+    for (int i = 0; i < height; i++)
     { // Collect over height
         k = i * width;
         ksub = ((i >> kSubsamplingHeight) << kSubsamplingHeight) * width;
-        for (WebRtc_UWord32 j = 0; j < width; j++)
+        for (int j = 0; j < width; j++)
         { // Collect over width
             jsub = ((j >> kSubsamplingWidth) << kSubsamplingWidth);
             /* Update mean value for every pixel and every frame */
             tmpMoment1 = _moment1[k + j];
             tmpMoment1 *= kDenoiseFiltParam; // Q16
-            tmpMoment1 += ((kDenoiseFiltParamRec * ((WebRtc_UWord32)frame[k + j])) << 8);
+            tmpMoment1 += ((kDenoiseFiltParamRec *
+                          ((WebRtc_UWord32)buffer[k + j])) << 8);
             tmpMoment1 >>= 8; // Q8
             _moment1[k + j] = tmpMoment1;
 
             tmpMoment2 = _moment2[ksub + jsub];
             if ((ksub == k) && (jsub == j) && (_denoiseFrameCnt == 0))
             {
-                tmp = ((WebRtc_UWord32)frame[k + j] * (WebRtc_UWord32)frame[k + j]);
+                tmp = ((WebRtc_UWord32)buffer[k + j] *
+                      (WebRtc_UWord32)buffer[k + j]);
                 tmpMoment2 *= kDenoiseFiltParam; // Q16
                 tmpMoment2 += ((kDenoiseFiltParamRec * tmp)<<8);
                 tmpMoment2 >>= 8; // Q8
             }
             _moment2[k + j] = tmpMoment2;
             /* Current event = deviation from mean value */
-            diff0 = ((WebRtc_Word32)frame[k + j] << 8) - _moment1[k + j];
+            diff0 = ((WebRtc_Word32)buffer[k + j] << 8) - _moment1[k + j];
             /* Recent events = variance (variations over time) */
             thevar = _moment2[k + j];
             thevar -= ((_moment1[k + j] * _moment1[k + j]) >> 8);
@@ -161,7 +167,7 @@
             if ((thevar < kDenoiseThreshold)
                 && ((diff0 * diff0 >> 8) < kDenoiseThreshold))
             { // Replace with mean
-                frame[k + j] = (WebRtc_UWord8)(_moment1[k + j] >> 8);
+                buffer[k + j] = (WebRtc_UWord8)(_moment1[k + j] >> 8);
                 numPixelsChanged++;
             }
         }
diff --git a/modules/video_processing/main/source/denoising.h b/modules/video_processing/main/source/denoising.h
index f53157c..eed772f 100644
--- a/modules/video_processing/main/source/denoising.h
+++ b/modules/video_processing/main/source/denoising.h
@@ -29,9 +29,7 @@
 
     void Reset();
 
-    WebRtc_Word32 ProcessFrame(WebRtc_UWord8* frame,
-                             WebRtc_UWord32 width,
-                             WebRtc_UWord32 height);
+    WebRtc_Word32 ProcessFrame(VideoFrame* frame);
 
 private:
     WebRtc_Word32 _id;
@@ -39,7 +37,7 @@
     WebRtc_UWord32*   _moment1;           // (Q8) First order moment (mean)
     WebRtc_UWord32*   _moment2;           // (Q8) Second order moment
     WebRtc_UWord32    _frameSize;         // Size (# of pixels) of frame
-    WebRtc_Word32     _denoiseFrameCnt;   // Counter for subsampling in time
+    int               _denoiseFrameCnt;   // Counter for subsampling in time
 };
 
 } //namespace
diff --git a/modules/video_processing/main/source/frame_preprocessor.cc b/modules/video_processing/main/source/frame_preprocessor.cc
index c317528..d9609f9 100644
--- a/modules/video_processing/main/source/frame_preprocessor.cc
+++ b/modules/video_processing/main/source/frame_preprocessor.cc
@@ -136,9 +136,10 @@
 
 
 WebRtc_Word32
-VPMFramePreprocessor::PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame)
+VPMFramePreprocessor::PreprocessFrame(const VideoFrame& frame,
+                                      VideoFrame** processedFrame)
 {
-    if (frame == NULL || frame->Height() == 0 || frame->Width() == 0)
+    if (frame.Buffer() == NULL || frame.Height() == 0 || frame.Width() == 0)
     {
         return VPM_PARAMETER_ERROR;
     }
@@ -147,7 +148,8 @@
 
     if (_vd->DropFrame())
     {
-        WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, _id, "Drop frame due to frame rate");
+        WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, _id,
+                     "Drop frame due to frame rate");
         return 1;  // drop 1 frame
     }
 
@@ -155,8 +157,9 @@
     // Note that we must make a copy of it.
     // We are not allowed to resample the input frame.
     *processedFrame = NULL;
-    if (_spatialResampler->ApplyResample(frame->Width(), frame->Height()))  {
-      WebRtc_Word32 ret = _spatialResampler->ResampleFrame(*frame, _resampledFrame);
+    if (_spatialResampler->ApplyResample(frame.Width(), frame.Height()))  {
+      WebRtc_Word32 ret = _spatialResampler->ResampleFrame(frame,
+                                                           _resampledFrame);
       if (ret != VPM_OK)
         return ret;
       *processedFrame = &_resampledFrame;
@@ -171,7 +174,7 @@
           if (*processedFrame == NULL)  {
             _contentMetrics = _ca->ComputeContentMetrics(frame);
           } else {
-            _contentMetrics = _ca->ComputeContentMetrics(&_resampledFrame);
+            _contentMetrics = _ca->ComputeContentMetrics(_resampledFrame);
           }
         }
         ++_frameCnt;
diff --git a/modules/video_processing/main/source/frame_preprocessor.h b/modules/video_processing/main/source/frame_preprocessor.h
index 2d89c4e..f85d5c0 100644
--- a/modules/video_processing/main/source/frame_preprocessor.h
+++ b/modules/video_processing/main/source/frame_preprocessor.h
@@ -46,12 +46,15 @@
     WebRtc_Word32 SetMaxFrameRate(WebRtc_UWord32 maxFrameRate);
 
     //Set target resolution: frame rate and dimension
-    WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate);
+    WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width,
+                                      WebRtc_UWord32 height,
+                                      WebRtc_UWord32 frameRate);
 
     //Update incoming frame rate/dimension
     void UpdateIncomingFrameRate();
 
-    WebRtc_Word32 updateIncomingFrameSize(WebRtc_UWord32 width, WebRtc_UWord32 height);
+    WebRtc_Word32 updateIncomingFrameSize(WebRtc_UWord32 width,
+                                          WebRtc_UWord32 height);
 
     //Set decimated values: frame rate/dimension
     WebRtc_UWord32 DecimatedFrameRate();
@@ -59,7 +62,8 @@
     WebRtc_UWord32 DecimatedHeight() const;
 
     //Preprocess output:
-    WebRtc_Word32 PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame);
+    WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
+                                  VideoFrame** processedFrame);
     VideoContentMetrics* ContentMetrics() const;
 
 private:
diff --git a/modules/video_processing/main/source/video_processing_impl.cc b/modules/video_processing/main/source/video_processing_impl.cc
index 8a470a9..346f655 100644
--- a/modules/video_processing/main/source/video_processing_impl.cc
+++ b/modules/video_processing/main/source/video_processing_impl.cc
@@ -19,29 +19,29 @@
 namespace
 {
     void
-    SetSubSampling(VideoProcessingModule::FrameStats& stats,
+    SetSubSampling(VideoProcessingModule::FrameStats* stats,
                    const WebRtc_Word32 width,
                    const WebRtc_Word32 height)
     {
         if (width * height >= 640 * 480)
         {
-            stats.subSamplWidth = 3; 
-            stats.subSamplHeight = 3;
+            stats->subSamplWidth = 3;
+            stats->subSamplHeight = 3;
         }
         else if (width * height >= 352 * 288)
         {
-            stats.subSamplWidth = 2; 
-            stats.subSamplHeight = 2;
+            stats->subSamplWidth = 2;
+            stats->subSamplHeight = 2;
         }
         else if (width * height >= 176 * 144)
         {
-            stats.subSamplWidth = 1; 
-            stats.subSamplHeight = 1;
+            stats->subSamplWidth = 1;
+            stats->subSamplHeight = 1;
         }
         else
         {
-            stats.subSamplWidth = 0; 
-            stats.subSamplHeight = 0;
+            stats->subSamplWidth = 0;
+            stats->subSamplHeight = 0;
         }
     }
 }
@@ -89,13 +89,15 @@
     _deflickering.ChangeUniqueId(id);
     _denoising.ChangeUniqueId(id);
     _framePreProcessor.ChangeUniqueId(id);
-    WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id, "Created");
+    WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id,
+                 "Created");
 }
 
 
 VideoProcessingModuleImpl::~VideoProcessingModuleImpl()
 {
-    WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id, "Destroyed");
+    WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id,
+                 "Destroyed");
     
     delete &_mutex;
 }
@@ -112,49 +114,47 @@
 }
 
 WebRtc_Word32
-VideoProcessingModule::GetFrameStats(FrameStats& stats,
-                                         const VideoFrame& frame)
+VideoProcessingModule::GetFrameStats(FrameStats* stats,
+                                     const VideoFrame& frame)
 {
-    return GetFrameStats(stats, frame.Buffer(), frame.Width(), frame.Height());
-}
-
-WebRtc_Word32
-VideoProcessingModule::GetFrameStats(FrameStats& stats,
-                                         const WebRtc_UWord8* frame,
-                                         const WebRtc_UWord32 width,
-                                         const WebRtc_UWord32 height)
-{
-    if (frame == NULL)
+    if (frame.Buffer() == NULL)
     {
-        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Null frame pointer");
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
+                     "Null frame pointer");
         return VPM_PARAMETER_ERROR;
     }
     
+    int width = frame.Width();
+    int height = frame.Height();
+
     if (width == 0 || height == 0)
     {
-        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Invalid frame size");
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
+                     "Invalid frame size");
         return VPM_PARAMETER_ERROR;
     }
 
     ClearFrameStats(stats); // The histogram needs to be zeroed out.
     SetSubSampling(stats, width, height);
 
+    uint8_t* buffer = frame.Buffer();
     // Compute histogram and sum of frame
-    for (WebRtc_UWord32 i = 0; i < height; i += (1 << stats.subSamplHeight))
+    for (int i = 0; i < height; i += (1 << stats->subSamplHeight))
     {
-        WebRtc_Word32 k = i * width;
-        for (WebRtc_UWord32 j = 0; j < width; j += (1 << stats.subSamplWidth))
+        int k = i * width;
+        for (int j = 0; j < width; j += (1 << stats->subSamplWidth))
         { 
-            stats.hist[frame[k + j]]++;
-            stats.sum += frame[k + j];
+            stats->hist[buffer[k + j]]++;
+            stats->sum += buffer[k + j];
         }
     }
 
-    stats.numPixels = (width * height) / ((1 << stats.subSamplWidth) * (1 << stats.subSamplHeight));
-    assert(stats.numPixels > 0);
+    stats->numPixels = (width * height) / ((1 << stats->subSamplWidth) *
+        (1 << stats->subSamplHeight));
+    assert(stats->numPixels > 0);
 
     // Compute mean value of frame
-    stats.mean = stats.sum / stats.numPixels;
+    stats->mean = stats->sum / stats->numPixels;
     
     return VPM_OK;
 }
@@ -171,94 +171,48 @@
 }
 
 void
-VideoProcessingModule::ClearFrameStats(FrameStats& stats)
+VideoProcessingModule::ClearFrameStats(FrameStats* stats)
 {
-    stats.mean = 0;
-    stats.sum = 0;
-    stats.numPixels = 0;
-    stats.subSamplWidth = 0;
-    stats.subSamplHeight = 0;
-    memset(stats.hist, 0, sizeof(stats.hist));
+    stats->mean = 0;
+    stats->sum = 0;
+    stats->numPixels = 0;
+    stats->subSamplWidth = 0;
+    stats->subSamplHeight = 0;
+    memset(stats->hist, 0, sizeof(stats->hist));
 }
 
 WebRtc_Word32
-VideoProcessingModule::ColorEnhancement(VideoFrame& frame)
+VideoProcessingModule::ColorEnhancement(VideoFrame* frame)
 {
-    return ColorEnhancement(frame.Buffer(), frame.Width(), frame.Height());
+    return VideoProcessing::ColorEnhancement(frame);
 }
 
 WebRtc_Word32
-VideoProcessingModule::ColorEnhancement(WebRtc_UWord8* frame,
-                                            const WebRtc_UWord32 width,
-                                            const WebRtc_UWord32 height)
+VideoProcessingModule::Brighten(VideoFrame* frame, int delta)
 {
-    return VideoProcessing::ColorEnhancement(frame, width, height);
+    return VideoProcessing::Brighten(frame, delta);
 }
 
 WebRtc_Word32
-VideoProcessingModule::Brighten(VideoFrame& frame, int delta)
-{
-    return Brighten(frame.Buffer(), frame.Width(), frame.Height(), delta);
-}
-
-WebRtc_Word32
-VideoProcessingModule::Brighten(WebRtc_UWord8* frame,
-                                    int width,
-                                    int height,
-                                    int delta)
-{
-    return VideoProcessing::Brighten(frame, width, height, delta);
-}
-
-WebRtc_Word32
-VideoProcessingModuleImpl::Deflickering(VideoFrame& frame,
-                                            FrameStats& stats)
-{
-    return Deflickering(frame.Buffer(), frame.Width(), frame.Height(), 
-        frame.TimeStamp(), stats);
-}
-
-WebRtc_Word32
-VideoProcessingModuleImpl::Deflickering(WebRtc_UWord8* frame,
-                                            const WebRtc_UWord32 width,
-                                            const WebRtc_UWord32 height,
-                                            const WebRtc_UWord32 timestamp,
-                                            FrameStats& stats)
+VideoProcessingModuleImpl::Deflickering(VideoFrame* frame, FrameStats* stats)
 {
     CriticalSectionScoped mutex(&_mutex);
-    return _deflickering.ProcessFrame(frame, width, height, timestamp, stats);
+    return _deflickering.ProcessFrame(frame, stats);
 }
 
 WebRtc_Word32
-VideoProcessingModuleImpl::Denoising(VideoFrame& frame)
-{
-    return Denoising(frame.Buffer(), frame.Width(), frame.Height());
-}
-
-WebRtc_Word32
-VideoProcessingModuleImpl::Denoising(WebRtc_UWord8* frame,
-                                         const WebRtc_UWord32 width,
-                                         const WebRtc_UWord32 height)
+VideoProcessingModuleImpl::Denoising(VideoFrame* frame)
 {
     CriticalSectionScoped mutex(&_mutex);
-    return _denoising.ProcessFrame(frame, width, height);
+    return _denoising.ProcessFrame(frame);
 }
 
 WebRtc_Word32
 VideoProcessingModuleImpl::BrightnessDetection(const VideoFrame& frame,
-                                                   const FrameStats& stats)
-{
-    return BrightnessDetection(frame.Buffer(), frame.Width(), frame.Height(), stats);
-}
-
-WebRtc_Word32
-VideoProcessingModuleImpl::BrightnessDetection(const WebRtc_UWord8* frame,
-                                                   const WebRtc_UWord32 width,
-                                                   const WebRtc_UWord32 height,
-                                                   const FrameStats& stats)
+                                               const FrameStats& stats)
 {
     CriticalSectionScoped mutex(&_mutex);
-    return _brightnessDetection.ProcessFrame(frame, width, height, stats);
+    return _brightnessDetection.ProcessFrame(frame, stats);
 }
 
 
@@ -271,7 +225,8 @@
 
 
 void 
-VideoProcessingModuleImpl::SetInputFrameResampleMode(VideoFrameResampling resamplingMode)
+VideoProcessingModuleImpl::SetInputFrameResampleMode(VideoFrameResampling
+                                                     resamplingMode)
 {
     CriticalSectionScoped cs(&_mutex);
     _framePreProcessor.SetInputFrameResampleMode(resamplingMode);
@@ -286,7 +241,9 @@
 }
 
 WebRtc_Word32
-VideoProcessingModuleImpl::SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate)
+VideoProcessingModuleImpl::SetTargetResolution(WebRtc_UWord32 width,
+                                               WebRtc_UWord32 height,
+                                               WebRtc_UWord32 frameRate)
 {
     CriticalSectionScoped cs(&_mutex);
     return _framePreProcessor.SetTargetResolution(width, height, frameRate);
@@ -316,7 +273,8 @@
 }
 
 WebRtc_Word32
-VideoProcessingModuleImpl::PreprocessFrame(const VideoFrame *frame, VideoFrame **processedFrame)
+VideoProcessingModuleImpl::PreprocessFrame(const VideoFrame& frame,
+                                           VideoFrame **processedFrame)
 {
     CriticalSectionScoped mutex(&_mutex);
     return _framePreProcessor.PreprocessFrame(frame, processedFrame);
diff --git a/modules/video_processing/main/source/video_processing_impl.h b/modules/video_processing/main/source/video_processing_impl.h
index 3170ab1..43c4318 100644
--- a/modules/video_processing/main/source/video_processing_impl.h
+++ b/modules/video_processing/main/source/video_processing_impl.h
@@ -36,30 +36,14 @@
 
     virtual void Reset();
 
-    virtual WebRtc_Word32 Deflickering(WebRtc_UWord8* frame,
-                                     WebRtc_UWord32 width,
-                                     WebRtc_UWord32 height,
-                                     WebRtc_UWord32 timestamp,
-                                     FrameStats& stats);
+    virtual WebRtc_Word32 Deflickering(VideoFrame* frame,
+                                       FrameStats* stats);
 
-    virtual WebRtc_Word32 Deflickering(VideoFrame& frame,
-                                       FrameStats& stats);
-
-    virtual WebRtc_Word32 Denoising(WebRtc_UWord8* frame,
-                                    WebRtc_UWord32 width,
-                                    WebRtc_UWord32 height);
-
-    virtual WebRtc_Word32 Denoising(VideoFrame& frame);
-
-    virtual WebRtc_Word32 BrightnessDetection(const WebRtc_UWord8* frame,
-                                              WebRtc_UWord32 width,
-                                              WebRtc_UWord32 height,
-                                              const FrameStats& stats);
+    virtual WebRtc_Word32 Denoising(VideoFrame* frame);
 
     virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
                                               const FrameStats& stats);
 
-
     //Frame pre-processor functions
 
     //Enable temporal decimation
@@ -88,7 +72,7 @@
     // Pre-process incoming frame: Sample when needed and compute content
     // metrics when enabled.
     // If no resampling takes place - processedFrame is set to NULL.
-    virtual WebRtc_Word32 PreprocessFrame(const VideoFrame* frame,
+    virtual WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
                                           VideoFrame** processedFrame);
     virtual VideoContentMetrics* ContentMetrics() const;
 
diff --git a/modules/video_processing/main/test/unit_test/brightness_detection_test.cc b/modules/video_processing/main/test/unit_test/brightness_detection_test.cc
index 6510a5c..e8e6883 100644
--- a/modules/video_processing/main/test/unit_test/brightness_detection_test.cc
+++ b/modules/video_processing/main/test/unit_test/brightness_detection_test.cc
@@ -18,12 +18,14 @@
     WebRtc_UWord32 frameNum = 0;
     WebRtc_Word32 brightnessWarning = 0;
     WebRtc_UWord32 warningCount = 0;
-    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) ==
+        _frameLength)
     {
         frameNum++;
         VideoProcessingModule::FrameStats stats;
-        ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
-        ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0);
+        ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
+        ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame,
+                                                                stats), 0);
         if (brightnessWarning != VideoProcessingModule::kNoWarning)
         {
             warningCount++;
@@ -40,7 +42,8 @@
     rewind(_sourceFile);
     frameNum = 0;
     warningCount = 0;
-    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength &&
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) ==
+        _frameLength &&
         frameNum < 300)
     {
         frameNum++;
@@ -58,8 +61,9 @@
         }
 
         VideoProcessingModule::FrameStats stats;
-        ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
-        ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0);
+        ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
+        ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame,
+                                                                stats), 0);
         EXPECT_NE(VideoProcessingModule::kDarkWarning, brightnessWarning);
         if (brightnessWarning == VideoProcessingModule::kBrightWarning)
         {
@@ -90,8 +94,9 @@
         }
 
         VideoProcessingModule::FrameStats stats;
-        ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
-        ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0);
+        ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
+        ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame,
+                                                                stats), 0);
         EXPECT_NE(VideoProcessingModule::kBrightWarning, brightnessWarning);
         if (brightnessWarning == VideoProcessingModule::kDarkWarning)
         {
diff --git a/modules/video_processing/main/test/unit_test/color_enhancement_test.cc b/modules/video_processing/main/test/unit_test/color_enhancement_test.cc
index 0a94db4..68bf43e 100644
--- a/modules/video_processing/main/test/unit_test/color_enhancement_test.cc
+++ b/modules/video_processing/main/test/unit_test/color_enhancement_test.cc
@@ -11,6 +11,7 @@
 #include <cstdio>
 #include <cstdlib>
 
+#include "common_video/libyuv/include/webrtc_libyuv.h"
 #include "modules/video_processing/main/interface/video_processing.h"
 #include "modules/video_processing/main/test/unit_test/unit_test.h"
 #include "system_wrappers/interface/tick_util.h"
@@ -42,7 +43,7 @@
     {
         frameNum++;
         t0 = TickTime::Now();
-        ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(_videoFrame));
+        ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&_videoFrame));
         t1 = TickTime::Now();
         accTicks += t1 - t0;
         if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
@@ -88,41 +89,31 @@
     }
     ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
 
-    // Verify that all color pixels are enhanced, that no luminance values are altered,
-    // and that the function does not write outside the vector.
-    WebRtc_UWord32 safeGuard = 1000;
-    WebRtc_UWord32 numPixels = 352*288; // CIF size
-    WebRtc_UWord8 *testFrame = new WebRtc_UWord8[numPixels + (numPixels / 2) + (2 * safeGuard)];
-    WebRtc_UWord8 *refFrame = new WebRtc_UWord8[numPixels + (numPixels / 2) + (2 * safeGuard)];
+    // Verify that all color pixels are enhanced, and no luminance values are
+    // altered.
 
-    // use value 128 as probe value, since we know that this will be changed in the enhancement
-    memset(testFrame, 128, safeGuard);
-    memset(&testFrame[safeGuard], 128, numPixels);
-    memset(&testFrame[safeGuard + numPixels], 128, numPixels / 2);
-    memset(&testFrame[safeGuard + numPixels + (numPixels / 2)], 128, safeGuard);
+    WebRtc_UWord8 *testFrame = new WebRtc_UWord8[_frameLength];
 
-    memcpy(refFrame, testFrame, numPixels + (numPixels / 2) + (2 * safeGuard));
+    // Use value 128 as probe value, since we know that this will be changed
+    // in the enhancement.
+    memset(testFrame, 128, _frameLength);
 
-    ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testFrame[safeGuard], 352, 288));
+    VideoFrame testVideoFrame;
+    testVideoFrame.CopyFrame(_frameLength, testFrame);
+    testVideoFrame.SetWidth(_width);
+    testVideoFrame.SetHeight(_height);
+    ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testVideoFrame));
 
-    EXPECT_EQ(0, memcmp(testFrame, refFrame, safeGuard)) <<
-        "Function is writing outside the frame memory.";
-    
-    EXPECT_EQ(0, memcmp(&testFrame[safeGuard + numPixels + (numPixels / 2)], 
-        &refFrame[safeGuard + numPixels + (numPixels / 2)], safeGuard)) <<
-        "Function is writing outside the frame memory.";
+    EXPECT_EQ(0, memcmp(testVideoFrame.Buffer(), testFrame, _width * _height))
+      << "Function is modifying the luminance.";
 
-    EXPECT_EQ(0, memcmp(&testFrame[safeGuard], &refFrame[safeGuard], numPixels)) <<
-        "Function is modifying the luminance.";
-
-    EXPECT_NE(0, memcmp(&testFrame[safeGuard + numPixels],
-        &refFrame[safeGuard + numPixels], numPixels / 2)) <<
+    EXPECT_NE(0, memcmp(testVideoFrame.Buffer() + _width * _height,
+        &testFrame[_width * _height], _width * _height / 2)) <<
         "Function is not modifying all chrominance pixels";
 
     ASSERT_EQ(0, fclose(refFile));
     ASSERT_EQ(0, fclose(modFile));
     delete [] testFrame;
-    delete [] refFrame;
 }
 
 }  // namespace webrtc
diff --git a/modules/video_processing/main/test/unit_test/content_metrics_test.cc b/modules/video_processing/main/test/unit_test/content_metrics_test.cc
index 54a1390..0247e99 100644
--- a/modules/video_processing/main/test/unit_test/content_metrics_test.cc
+++ b/modules/video_processing/main/test/unit_test/content_metrics_test.cc
@@ -26,8 +26,8 @@
     while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile)
            == _frameLength)
     {
-        _cM_c   = _ca_c.ComputeContentMetrics(&_videoFrame);
-        _cM_SSE = _ca_sse.ComputeContentMetrics(&_videoFrame);
+        _cM_c   = _ca_c.ComputeContentMetrics(_videoFrame);
+        _cM_SSE = _ca_sse.ComputeContentMetrics(_videoFrame);
 
         ASSERT_EQ(_cM_c->spatial_pred_err, _cM_SSE->spatial_pred_err);
         ASSERT_EQ(_cM_c->spatial_pred_err_v, _cM_SSE->spatial_pred_err_v);
diff --git a/modules/video_processing/main/test/unit_test/deflickering_test.cc b/modules/video_processing/main/test/unit_test/deflickering_test.cc
index c189490..7119bdb 100644
--- a/modules/video_processing/main/test/unit_test/deflickering_test.cc
+++ b/modules/video_processing/main/test/unit_test/deflickering_test.cc
@@ -57,8 +57,8 @@
 
             t0 = TickTime::Now();
             VideoProcessingModule::FrameStats stats;
-            ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
-            ASSERT_EQ(0, _vpm->Deflickering(_videoFrame, stats));
+            ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
+            ASSERT_EQ(0, _vpm->Deflickering(&_videoFrame, &stats));
             t1 = TickTime::Now();
             accTicks += t1 - t0;
 
diff --git a/modules/video_processing/main/test/unit_test/denoising_test.cc b/modules/video_processing/main/test/unit_test/denoising_test.cc
index 0787f1d..a4d9761 100644
--- a/modules/video_processing/main/test/unit_test/denoising_test.cc
+++ b/modules/video_processing/main/test/unit_test/denoising_test.cc
@@ -99,7 +99,7 @@
             }
 
             t0 = TickTime::Now();
-            ASSERT_GE(modifiedPixels = _vpm->Denoising(_videoFrame), 0);
+            ASSERT_GE(modifiedPixels = _vpm->Denoising(&_videoFrame), 0);
             t1 = TickTime::Now();
             accTicks += t1 - t0;
 
diff --git a/modules/video_processing/main/test/unit_test/unit_test.cc b/modules/video_processing/main/test/unit_test/unit_test.cc
index c6fdb2b..f6d7d10 100644
--- a/modules/video_processing/main/test/unit_test/unit_test.cc
+++ b/modules/video_processing/main/test/unit_test/unit_test.cc
@@ -68,28 +68,21 @@
 TEST_F(VideoProcessingModuleTest, HandleNullBuffer)
 {
   VideoProcessingModule::FrameStats stats;
-  ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+  ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
   // Video frame with unallocated buffer.
   VideoFrame videoFrame;
   videoFrame.SetWidth(_width);
   videoFrame.SetHeight(_height);
 
-  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, NULL, _width, _height));
-  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, videoFrame));
+  EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, videoFrame));
 
-  EXPECT_EQ(-1, _vpm->ColorEnhancement(NULL, _width, _height));
-  EXPECT_EQ(-1, _vpm->ColorEnhancement(videoFrame));
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(&videoFrame));
 
-  EXPECT_EQ(-1, _vpm->Deflickering(NULL, _width, _height, 0, stats));
-  EXPECT_EQ(-1, _vpm->Deflickering(videoFrame, stats));
+  EXPECT_EQ(-1, _vpm->Deflickering(&videoFrame, &stats));
 
-  EXPECT_EQ(-1, _vpm->Denoising(NULL, _width, _height));
-  EXPECT_EQ(-1, _vpm->Denoising(videoFrame));
+  EXPECT_EQ(-1, _vpm->Denoising(&videoFrame));
 
-  EXPECT_EQ(-3, _vpm->BrightnessDetection(NULL, _width, _height, stats));
   EXPECT_EQ(-3, _vpm->BrightnessDetection(videoFrame, stats));
-
-  EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(NULL, NULL));
 }
 
 TEST_F(VideoProcessingModuleTest, HandleBadStats)
@@ -99,65 +92,48 @@
   ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
                                 _sourceFile));
 
-  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), _width, _height, 0,
-                                   stats));
-  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
+  _videoFrame.SetWidth(_width);
+  _videoFrame.SetHeight(_height);
+  EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
 
-  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), _width,
-                                          _height, stats));
   EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
 }
 
 TEST_F(VideoProcessingModuleTest, HandleBadSize)
 {
   VideoProcessingModule::FrameStats stats;
-  ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+  ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
 
   // Bad width
   _videoFrame.SetWidth(0);
-  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame.Buffer(), 0, _height));
-  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame));
+  EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, _videoFrame));
 
-  EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame.Buffer(), 0, _height));
-  EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame));
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(&_videoFrame));
 
-  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), 0, _height, 0,
-                                   stats));
-  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
+  EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
 
-  EXPECT_EQ(-1, _vpm->Denoising(_videoFrame.Buffer(), 0, _height));
-  EXPECT_EQ(-1, _vpm->Denoising(_videoFrame));
+  EXPECT_EQ(-1, _vpm->Denoising(&_videoFrame));
 
-  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), 0, _height,
-                                          stats));
   EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
 
-
   // Bad height
   _videoFrame.SetWidth(_width);
   _videoFrame.SetHeight(0);
-  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame.Buffer(), _width, 0));
-  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame));
+  EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, _videoFrame));
 
-  EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame.Buffer(), _width, 0));
-  EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame));
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(&_videoFrame));
 
-  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), _width, 0, 0,
-                                   stats));
-  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
+  EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
 
-  EXPECT_EQ(-1, _vpm->Denoising(_videoFrame.Buffer(), _width, 0));
-  EXPECT_EQ(-1, _vpm->Denoising(_videoFrame));
+  EXPECT_EQ(-1, _vpm->Denoising(&_videoFrame));
 
-  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), _width, 0,
-                                          stats));
   EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
 
   EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetTargetResolution(0,0,0));
   EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetMaxFrameRate(0));
 
   VideoFrame *outFrame = NULL;
-  EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(&_videoFrame,
+  EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(_videoFrame,
                                                        &outFrame));
 }
 
@@ -173,28 +149,28 @@
   // Only testing non-static functions here.
   ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
                                 _sourceFile));
-  ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+  ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
   memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
-  ASSERT_EQ(0, _vpm->Deflickering(_videoFrame, stats));
+  ASSERT_EQ(0, _vpm->Deflickering(&_videoFrame, &stats));
   _vpm->Reset();
   // Retrieve frame stats again in case Deflickering() has zeroed them.
-  ASSERT_EQ(0, _vpm->GetFrameStats(stats, videoFrame2));
-  ASSERT_EQ(0, _vpm->Deflickering(videoFrame2, stats));
+  ASSERT_EQ(0, _vpm->GetFrameStats(&stats, videoFrame2));
+  ASSERT_EQ(0, _vpm->Deflickering(&videoFrame2, &stats));
   EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
                       _frameLength));
 
   ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
                                 _sourceFile));
   memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
-  ASSERT_GE(_vpm->Denoising(_videoFrame), 0);
+  ASSERT_GE(_vpm->Denoising(&_videoFrame), 0);
   _vpm->Reset();
-  ASSERT_GE(_vpm->Denoising(videoFrame2), 0);
+  ASSERT_GE(_vpm->Denoising(&videoFrame2), 0);
   EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
                       _frameLength));
 
   ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
                                 _sourceFile));
-  ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+  ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
   memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
   ASSERT_EQ(0, _vpm->BrightnessDetection(_videoFrame, stats));
   _vpm->Reset();
@@ -210,7 +186,7 @@
                                 _sourceFile));
 
   EXPECT_FALSE(_vpm->ValidFrameStats(stats));
-  EXPECT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+  EXPECT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
   EXPECT_TRUE(_vpm->ValidFrameStats(stats));
 
   printf("\nFrameStats\n");
@@ -222,7 +198,7 @@
          static_cast<unsigned int>(stats.subSamplWidth),
          static_cast<unsigned int>(stats.sum));
 
-  _vpm->ClearFrameStats(stats);
+  _vpm->ClearFrameStats(&stats);
   EXPECT_FALSE(_vpm->ValidFrameStats(stats));
 }
 
@@ -239,7 +215,7 @@
   _vpm->SetInputFrameResampleMode(kNoRescaling);
   ASSERT_EQ(VPM_OK, _vpm->SetTargetResolution(100, 100, 30));
   VideoFrame *outFrame = NULL;
-  ASSERT_EQ(VPM_OK, _vpm->PreprocessFrame(&_videoFrame, &outFrame));
+  ASSERT_EQ(VPM_OK, _vpm->PreprocessFrame(_videoFrame, &outFrame));
   // No rescaling=> output frame = NULL
   ASSERT_TRUE(outFrame == NULL);
 }
@@ -324,7 +300,7 @@
   VideoFrame* out_frame = NULL;
 
   ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30));
-  ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(&source_frame, &out_frame));
+  ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(source_frame, &out_frame));
 
   // If the frame was resampled (scale changed) then:
   // (1) verify the new size and write out processed frame for viewing.
@@ -362,7 +338,7 @@
     ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(source_width,
                                                source_height,
                                                30));
-    ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(&resampled_source_frame,
+    ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(resampled_source_frame,
                                            &out_frame));
 
     // Write the processed frame to file for visual inspection.
diff --git a/test/testsupport/fileutils_unittest.cc b/test/testsupport/fileutils_unittest.cc
index 940b070..54a308e 100644
--- a/test/testsupport/fileutils_unittest.cc
+++ b/test/testsupport/fileutils_unittest.cc
@@ -94,12 +94,10 @@
 // directory that is automatically set when the test executable is launched.
 // The test is not fully testing the implementation, since we cannot be sure
 // of where the executable was launched from.
-// The test will fail if the top level directory is not named "trunk".
-TEST_F(FileUtilsTest, ProjectRootPathFromUnchangedWorkingDir) {
-  std::string path = webrtc::test::ProjectRootPath();
-  std::string expected_end = "trunk";
-  expected_end = kPathDelimiter + expected_end + kPathDelimiter;
-  ASSERT_EQ(path.length() - expected_end.length(), path.find(expected_end));
+TEST_F(FileUtilsTest, ProjectRootPath) {
+  std::string project_root = webrtc::test::ProjectRootPath();
+  // Not very smart, but at least tests something.
+  ASSERT_GT(project_root.length(), 0u);
 }
 
 // Similar to the above test, but for the output dir
diff --git a/video_engine/include/vie_errors.h b/video_engine/include/vie_errors.h
index 16c9299..bcaec7d 100644
--- a/video_engine/include/vie_errors.h
+++ b/video_engine/include/vie_errors.h
@@ -74,8 +74,6 @@
   kViEFileInvalidFile,                // Can't open the file with provided filename. Is the path and file format correct?
   kViEFileInvalidCapture,             // Can't use ViEPicture. Is the object correct?
   kViEFileSetRenderTimeoutError,      // SetRenderTimeoutImage- Please see log file.
-  kViEFileInvalidCaptureId,           // SetCaptureDeviceImage capture id does not exist.
-  kViEFileSetCaptureImageError,       // SetCaptureDeviceImage error. Please see log file.
   kViEFileSetStartImageError,         // SetRenderStartImage error. Please see log file.
   kViEFileUnknownError,               // An unknown error has occurred. Check the log file.
 
diff --git a/video_engine/include/vie_file.h b/video_engine/include/vie_file.h
index 6a521cf..c27d74d 100644
--- a/video_engine/include/vie_file.h
+++ b/video_engine/include/vie_file.h
@@ -175,18 +175,6 @@
   virtual int GetCaptureDeviceSnapshot(const int capture_id,
                                        ViEPicture& picture) = 0;
 
-  // This function sets a jpg image to show before the first frame is captured
-  // by the capture device. This frame will be encoded and transmitted to a
-  // possible receiver
-  virtual int SetCaptureDeviceImage(const int capture_id,
-                                    const char* file_name_utf8) = 0;
-
-  // This function sets an image to show before the first frame is captured by
-  // the capture device. This frame will be encoded and transmitted to a
-  // possible receiver
-  virtual int SetCaptureDeviceImage(const int capture_id,
-                                    const ViEPicture& picture) = 0;
-
   virtual int FreePicture(ViEPicture& picture) = 0;
 
   // This function sets a jpg image to render before the first received video
diff --git a/video_engine/test/android/jni/vie_android_java_api.cc b/video_engine/test/android/jni/vie_android_java_api.cc
index 5273d90..c544682 100644
--- a/video_engine/test/android/jni/vie_android_java_api.cc
+++ b/video_engine/test/android/jni/vie_android_java_api.cc
@@ -974,14 +974,9 @@
   if (NULL == vieData.rtp)
     return -1;
 
-  if (enable)
-    __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
-                            "EnableNACK enable");
-  else
-    __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
-                            "EnableNACK disable");
-
   int ret = vieData.rtp->SetNACKStatus(channel, enable);
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "EnableNACK(%d) ret:%d", enable, ret);
   return ret;
 }
 
diff --git a/video_engine/test/android/src/org/webrtc/videoengineapp/WebRTCDemo.java b/video_engine/test/android/src/org/webrtc/videoengineapp/WebRTCDemo.java
index 55431ff..950f04a 100644
--- a/video_engine/test/android/src/org/webrtc/videoengineapp/WebRTCDemo.java
+++ b/video_engine/test/android/src/org/webrtc/videoengineapp/WebRTCDemo.java
@@ -138,7 +138,7 @@
     private TextView etVTxPort;
     private int destinationPortVideo = 11111;
     private CheckBox cbEnableNack;
-    private boolean enableNack = false;
+    private boolean enableNack = true;
     private CheckBox cbEnableVideoRTPDump;
 
     // Audio settings
@@ -670,6 +670,7 @@
             // TODO(leozwang): Add more options besides PLI, currently use pli
             // as the default. Also check return value.
             ret = vieAndroidAPI.EnablePLI(channel, true);
+            ret = vieAndroidAPI.EnableNACK(channel, enableNack);
             ret = vieAndroidAPI.SetCallback(channel, this);
 
             if (enableVideoSend) {
diff --git a/video_engine/test/auto_test/source/vie_autotest_file.cc b/video_engine/test/auto_test/source/vie_autotest_file.cc
index 4d5ee74..45ceda4 100644
--- a/video_engine/test/auto_test/source/vie_autotest_file.cc
+++ b/video_engine/test/auto_test/source/vie_autotest_file.cc
@@ -332,37 +332,6 @@
 
         AutoTestSleep(TEST_SPACING);
 
-        // Testing: SetCaptureDeviceImage
-        {
-            ViETest::Log("Testing SetCaptureDeviceImage(int, char*)");
-            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
-            EXPECT_EQ(0, ptrViEFile->SetCaptureDeviceImage(
-                captureId, captureDeviceImage.c_str()));
-
-            ViETest::Log("you should see the capture device image now");
-            AutoTestSleep(2 * RENDER_TIMEOUT);
-            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
-            ViETest::Log("Done\n");
-        }
-
-        AutoTestSleep(TEST_SPACING);
-
-        // Testing: SetCaptureDeviceImage
-        if (FLAGS_include_timing_dependent_tests)
-        {
-            ViETest::Log("Testing SetCaptureDeviceImage(int, ViEPicture)");
-            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
-            EXPECT_EQ(0, ptrViEFile->SetCaptureDeviceImage(
-                captureId, capturePicture));
-
-            ViETest::Log("you should see the capture device image now");
-            AutoTestSleep(2 * RENDER_TIMEOUT);
-            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
-            ViETest::Log("Done\n");
-        }
-
-        AutoTestSleep(TEST_SPACING);
-
         // testing SetRenderStartImage(videoChannel, renderStartImage);
         if (FLAGS_include_timing_dependent_tests)
         {
diff --git a/video_engine/vie_capturer.cc b/video_engine/vie_capturer.cc
index 09c09db..ad9f39d 100644
--- a/video_engine/vie_capturer.cc
+++ b/video_engine/vie_capturer.cc
@@ -567,9 +567,9 @@
 void ViECapturer::DeliverI420Frame(VideoFrame* video_frame) {
   // Apply image enhancement and effect filter.
   if (deflicker_frame_stats_) {
-    if (image_proc_module_->GetFrameStats(*deflicker_frame_stats_,
+    if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
                                           *video_frame) == 0) {
-      image_proc_module_->Deflickering(*video_frame, *deflicker_frame_stats_);
+      image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_);
     } else {
       WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
                    "%s: could not get frame stats for captured frame",
@@ -577,10 +577,10 @@
     }
   }
   if (denoising_enabled_) {
-    image_proc_module_->Denoising(*video_frame);
+    image_proc_module_->Denoising(video_frame);
   }
   if (brightness_frame_stats_) {
-    if (image_proc_module_->GetFrameStats(*brightness_frame_stats_,
+    if (image_proc_module_->GetFrameStats(brightness_frame_stats_,
                                           *video_frame) == 0) {
       WebRtc_Word32 brightness = image_proc_module_->BrightnessDetection(
           *video_frame, *brightness_frame_stats_);
@@ -898,9 +898,4 @@
   observer_->NoPictureAlarm(id, vie_alarm);
 }
 
-WebRtc_Word32 ViECapturer::SetCaptureDeviceImage(
-    const VideoFrame& capture_device_image) {
-  return capture_module_->StartSendImage(capture_device_image, 10);
-}
-
 }  // namespace webrtc
diff --git a/video_engine/vie_capturer.h b/video_engine/vie_capturer.h
index 5fc0dad..1102898 100644
--- a/video_engine/vie_capturer.h
+++ b/video_engine/vie_capturer.h
@@ -105,9 +105,6 @@
   // Information.
   const char* CurrentDeviceName() const;
 
-  // Set device image.
-  WebRtc_Word32 SetCaptureDeviceImage(const VideoFrame& capture_device_image);
-
  protected:
   ViECapturer(int capture_id,
               int engine_id,
diff --git a/video_engine/vie_channel.cc b/video_engine/vie_channel.cc
index faa3896..ac10968 100644
--- a/video_engine/vie_channel.cc
+++ b/video_engine/vie_channel.cc
@@ -2096,7 +2096,7 @@
                               video_frame.Height());
   }
   if (color_enhancement_) {
-    VideoProcessingModule::ColorEnhancement(video_frame);
+    VideoProcessingModule::ColorEnhancement(&video_frame);
   }
 
   // Record videoframe.
diff --git a/video_engine/vie_channel_manager.cc b/video_engine/vie_channel_manager.cc
index a6b17eb..9fabd8d 100644
--- a/video_engine/vie_channel_manager.cc
+++ b/video_engine/vie_channel_manager.cc
@@ -39,7 +39,7 @@
       voice_engine_(NULL),
       module_process_thread_(NULL),
       over_use_detector_options_(options),
-      bwe_mode_(RemoteBitrateEstimator::kMultiStreamEstimation) {
+      bwe_mode_(RemoteBitrateEstimator::kSingleStreamEstimation) {
   WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id),
                "ViEChannelManager::ViEChannelManager(engine_id: %d)",
                engine_id);
diff --git a/video_engine/vie_encoder.cc b/video_engine/vie_encoder.cc
index 1df51e5..ed3b423 100644
--- a/video_engine/vie_encoder.cc
+++ b/video_engine/vie_encoder.cc
@@ -495,7 +495,7 @@
       has_received_rpsi_ = false;
     }
     VideoFrame* decimated_frame = NULL;
-    const int ret = vpm_.PreprocessFrame(video_frame, &decimated_frame);
+    const int ret = vpm_.PreprocessFrame(*video_frame, &decimated_frame);
     if (ret == 1) {
       // Drop this frame.
       return;
@@ -528,7 +528,7 @@
   // TODO(mflodman) Rewrite this to use code common to VP8 case.
   // Pass frame via preprocessor.
   VideoFrame* decimated_frame = NULL;
-  const int ret = vpm_.PreprocessFrame(video_frame, &decimated_frame);
+  const int ret = vpm_.PreprocessFrame(*video_frame, &decimated_frame);
   if (ret == 1) {
     // Drop this frame.
     return;
diff --git a/video_engine/vie_file_impl.cc b/video_engine/vie_file_impl.cc
index 1e2753b..147b766 100644
--- a/video_engine/vie_file_impl.cc
+++ b/video_engine/vie_file_impl.cc
@@ -687,73 +687,6 @@
   picture.type = kVideoUnknown;
   return 0;
 }
-int ViEFileImpl::SetCaptureDeviceImage(const int capture_id,
-                                       const char* file_nameUTF8) {
-  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
-               "%s(capture_id: %d)", __FUNCTION__, capture_id);
-
-  ViEInputManagerScoped is(*(shared_data_->input_manager()));
-  ViECapturer* capturer = is.Capture(capture_id);
-  if (!capturer) {
-    shared_data_->SetLastError(kViEFileInvalidCaptureId);
-    return -1;
-  }
-
-  VideoFrame capture_image;
-  if (ViEFileImage::ConvertJPEGToVideoFrame(
-          ViEId(shared_data_->instance_id(), capture_id), file_nameUTF8,
-          &capture_image) != 0) {
-    WEBRTC_TRACE(kTraceError, kTraceVideo,
-                 ViEId(shared_data_->instance_id(), capture_id),
-                 "%s(capture_id: %d) Failed to open file.", __FUNCTION__,
-                 capture_id);
-    shared_data_->SetLastError(kViEFileInvalidFile);
-    return -1;
-  }
-  if (capturer->SetCaptureDeviceImage(capture_image)) {
-    shared_data_->SetLastError(kViEFileSetCaptureImageError);
-    return -1;
-  }
-  return 0;
-}
-
-int ViEFileImpl::SetCaptureDeviceImage(const int capture_id,
-                                       const ViEPicture& picture) {
-  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
-               "%s(capture_id: %d)", __FUNCTION__, capture_id);
-
-  if (picture.type != kVideoI420) {
-    WEBRTC_TRACE(kTraceError, kTraceVideo,
-                 ViEId(shared_data_->instance_id(), capture_id),
-                 "%s(capture_id: %d) Not a valid picture type.",
-                 __FUNCTION__, capture_id);
-    shared_data_->SetLastError(kViEFileInvalidArgument);
-    return -1;
-  }
-  ViEInputManagerScoped is(*(shared_data_->input_manager()));
-  ViECapturer* capturer = is.Capture(capture_id);
-  if (!capturer) {
-    shared_data_->SetLastError(kViEFileSetCaptureImageError);
-    return -1;
-  }
-
-  VideoFrame capture_image;
-  if (ViEFileImage::ConvertPictureToVideoFrame(
-      ViEId(shared_data_->instance_id(), capture_id), picture,
-          &capture_image) != 0) {
-    WEBRTC_TRACE(kTraceError, kTraceVideo,
-                 ViEId(shared_data_->instance_id(), capture_id),
-                 "%s(capture_id: %d) Failed to use picture.", __FUNCTION__,
-                 capture_id);
-    shared_data_->SetLastError(kViEFileInvalidFile);
-    return -1;
-  }
-  if (capturer->SetCaptureDeviceImage(capture_image)) {
-    shared_data_->SetLastError(kViEFileInvalidCapture);
-    return -1;
-  }
-  return 0;
-}
 
 int ViEFileImpl::SetRenderStartImage(const int video_channel,
                                      const char* file_nameUTF8) {
diff --git a/video_engine/vie_file_impl.h b/video_engine/vie_file_impl.h
index b90c92b..d19cc4d 100644
--- a/video_engine/vie_file_impl.h
+++ b/video_engine/vie_file_impl.h
@@ -106,10 +106,6 @@
                                        const char* file_nameUTF8);
   virtual int GetCaptureDeviceSnapshot(const int capture_id,
                                        ViEPicture& picture);
-  virtual int SetCaptureDeviceImage(const int capture_id,
-                                    const char* file_nameUTF8);
-  virtual int SetCaptureDeviceImage(const int capture_id,
-                                    const ViEPicture& picture);
   virtual int SetRenderStartImage(const int video_channel,
                                   const char* file_nameUTF8);
   virtual int SetRenderStartImage(const int video_channel,
diff --git a/voice_engine/transmit_mixer.cc b/voice_engine/transmit_mixer.cc
index d987c4e..452af1c 100644
--- a/voice_engine/transmit_mixer.cc
+++ b/voice_engine/transmit_mixer.cc
@@ -202,8 +202,8 @@
     _instanceId(instanceId),
     _mixFileWithMicrophone(false),
     _captureLevel(0),
-    _externalMedia(false),
-    _externalMediaCallbackPtr(NULL),
+    external_postproc_ptr_(NULL),
+    external_preproc_ptr_(NULL),
     _mute(false),
     _remainingMuteMicTimeMs(0),
     _mixingFrequency(0),
@@ -223,10 +223,8 @@
     {
         _processThreadPtr->DeRegisterModule(&_monitorModule);
     }
-    if (_externalMedia)
-    {
-        DeRegisterExternalMediaProcessing();
-    }
+    DeRegisterExternalMediaProcessing(kRecordingAllChannelsMixed);
+    DeRegisterExternalMediaProcessing(kRecordingPreprocessing);
     {
         CriticalSectionScoped cs(&_critSect);
         if (_fileRecorderPtr)
@@ -362,6 +360,17 @@
         return -1;
     }
 
+    {
+      CriticalSectionScoped cs(&_callbackCritSect);
+      if (external_preproc_ptr_) {
+        external_preproc_ptr_->Process(-1, kRecordingPreprocessing,
+                                       _audioFrame.data_,
+                                       _audioFrame.samples_per_channel_,
+                                       _audioFrame.sample_rate_hz_,
+                                       _audioFrame.num_channels_ == 2);
+      }
+    }
+
     // --- Near-end Voice Quality Enhancement (APM) processing
 
     APMProcessStream(totalDelayMS, clockDrift, currentMicLevel);
@@ -413,22 +422,15 @@
         RecordAudioToFile(_mixingFrequency);
     }
 
-    // --- External media processing
-
-    if (_externalMedia)
     {
-        CriticalSectionScoped cs(&_callbackCritSect);
-        const bool isStereo = (_audioFrame.num_channels_ == 2);
-        if (_externalMediaCallbackPtr)
-        {
-            _externalMediaCallbackPtr->Process(
-                -1,
-                kRecordingAllChannelsMixed,
-                (WebRtc_Word16*) _audioFrame.data_,
-                _audioFrame.samples_per_channel_,
-                _audioFrame.sample_rate_hz_,
-                isStereo);
-        }
+      CriticalSectionScoped cs(&_callbackCritSect);
+      if (external_postproc_ptr_) {
+        external_postproc_ptr_->Process(-1, kRecordingAllChannelsMixed,
+                                        _audioFrame.data_,
+                                        _audioFrame.samples_per_channel_,
+                                        _audioFrame.sample_rate_hz_,
+                                        _audioFrame.num_channels_ == 2);
+      }
     }
 
     return 0;
@@ -1095,28 +1097,40 @@
 }
 
 int TransmitMixer::RegisterExternalMediaProcessing(
-    VoEMediaProcess& proccess_object)
-{
-    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
-                 "TransmitMixer::RegisterExternalMediaProcessing()");
+    VoEMediaProcess* object,
+    ProcessingTypes type) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+               "TransmitMixer::RegisterExternalMediaProcessing()");
 
-    CriticalSectionScoped cs(&_callbackCritSect);
-    _externalMediaCallbackPtr = &proccess_object;
-    _externalMedia = true;
+  CriticalSectionScoped cs(&_callbackCritSect);
+  if (!object) {
+    return -1;
+  }
 
-    return 0;
+  // Store the callback object according to the processing type.
+  if (type == kRecordingAllChannelsMixed) {
+    external_postproc_ptr_ = object;
+  } else if (type == kRecordingPreprocessing) {
+    external_preproc_ptr_ = object;
+  } else {
+    return -1;
+  }
+  return 0;
 }
 
-int TransmitMixer::DeRegisterExternalMediaProcessing()
-{
-    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
-                 "TransmitMixer::DeRegisterExternalMediaProcessing()");
+int TransmitMixer::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+               "TransmitMixer::DeRegisterExternalMediaProcessing()");
 
-    CriticalSectionScoped cs(&_callbackCritSect);
-    _externalMedia = false;
-    _externalMediaCallbackPtr = NULL;
-
-    return 0;
+  CriticalSectionScoped cs(&_callbackCritSect);
+  if (type == kRecordingAllChannelsMixed) {
+    external_postproc_ptr_ = NULL;
+  } else if (type == kRecordingPreprocessing) {
+    external_preproc_ptr_ = NULL;
+  } else {
+    return -1;
+  }
+  return 0;
 }
 
 int
diff --git a/voice_engine/transmit_mixer.h b/voice_engine/transmit_mixer.h
index da87218..0dac049 100644
--- a/voice_engine/transmit_mixer.h
+++ b/voice_engine/transmit_mixer.h
@@ -73,9 +73,9 @@
     void UpdateMuteMicrophoneTime(const WebRtc_UWord32 lengthMs);
 
     // VoEExternalMedia
-    int RegisterExternalMediaProcessing(VoEMediaProcess& proccess_object);
-
-    int DeRegisterExternalMediaProcessing();
+    int RegisterExternalMediaProcessing(VoEMediaProcess* object,
+                                        ProcessingTypes type);
+    int DeRegisterExternalMediaProcessing(ProcessingTypes type);
 
     int GetMixingFrequency();
 
@@ -193,8 +193,8 @@
     // owns
     MonitorModule _monitorModule;
     AudioFrame _audioFrame;
-    Resampler _audioResampler;		// ADM sample rate -> mixing rate
-    FilePlayer*	_filePlayerPtr;
+    Resampler _audioResampler; // ADM sample rate -> mixing rate
+    FilePlayer* _filePlayerPtr;
     FileRecorder* _fileRecorderPtr;
     FileRecorder* _fileCallRecorderPtr;
     int _filePlayerId;
@@ -228,8 +228,8 @@
     int _instanceId;
     bool _mixFileWithMicrophone;
     WebRtc_UWord32 _captureLevel;
-    bool _externalMedia;
-    VoEMediaProcess* _externalMediaCallbackPtr;
+    VoEMediaProcess* external_postproc_ptr_;
+    VoEMediaProcess* external_preproc_ptr_;
     bool _mute;
     WebRtc_Word32 _remainingMuteMicTimeMs;
     int _mixingFrequency;
diff --git a/voice_engine/transmit_mixer_unittest.cc b/voice_engine/transmit_mixer_unittest.cc
new file mode 100644
index 0000000..d8d85b6
--- /dev/null
+++ b/voice_engine/transmit_mixer_unittest.cc
@@ -0,0 +1,59 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voice_engine/transmit_mixer.h"
+
+#include "gtest/gtest.h"
+#include "voice_engine/include/voe_external_media.h"
+
+namespace webrtc {
+namespace voe {
+namespace {
+
+class MediaCallback : public VoEMediaProcess {
+ public:
+  virtual void Process(const int channel, const ProcessingTypes type,
+                       int16_t audio[], const int samples_per_channel,
+                       const int sample_rate_hz, const bool is_stereo) {
+  }
+};
+
+// TODO(andrew): Mock VoEMediaProcess, and verify the behavior when calling
+// PrepareDemux().
+TEST(TransmitMixerTest, RegisterExternalMediaCallback) {
+  TransmitMixer* tm = NULL;
+  ASSERT_EQ(0, TransmitMixer::Create(tm, 0));
+  ASSERT_TRUE(tm != NULL);
+  MediaCallback callback;
+  EXPECT_EQ(-1, tm->RegisterExternalMediaProcessing(NULL,
+                                                    kRecordingPreprocessing));
+  EXPECT_EQ(-1, tm->RegisterExternalMediaProcessing(&callback,
+                                                    kPlaybackPerChannel));
+  EXPECT_EQ(-1, tm->RegisterExternalMediaProcessing(&callback,
+                                                    kPlaybackAllChannelsMixed));
+  EXPECT_EQ(-1, tm->RegisterExternalMediaProcessing(&callback,
+                                                    kRecordingPerChannel));
+  EXPECT_EQ(0, tm->RegisterExternalMediaProcessing(&callback,
+                                                   kRecordingAllChannelsMixed));
+  EXPECT_EQ(0, tm->RegisterExternalMediaProcessing(&callback,
+                                                   kRecordingPreprocessing));
+  EXPECT_EQ(-1, tm->DeRegisterExternalMediaProcessing(kPlaybackPerChannel));
+  EXPECT_EQ(-1, tm->DeRegisterExternalMediaProcessing(
+                    kPlaybackAllChannelsMixed));
+  EXPECT_EQ(-1, tm->DeRegisterExternalMediaProcessing(kRecordingPerChannel));
+  EXPECT_EQ(0, tm->DeRegisterExternalMediaProcessing(
+                   kRecordingAllChannelsMixed));
+  EXPECT_EQ(0, tm->DeRegisterExternalMediaProcessing(kRecordingPreprocessing));
+  TransmitMixer::Destroy(tm);
+}
+
+}  // namespace
+}  // namespace voe
+}  // namespace webrtc
diff --git a/voice_engine/voe_external_media_impl.cc b/voice_engine/voe_external_media_impl.cc
index 0158c3d..0216023 100644
--- a/voice_engine/voe_external_media_impl.cc
+++ b/voice_engine/voe_external_media_impl.cc
@@ -88,9 +88,10 @@
                 processObject);
         }
         case kRecordingAllChannelsMixed:
+        case kRecordingPreprocessing:
         {
             return shared_->transmit_mixer()->RegisterExternalMediaProcessing(
-                processObject);
+                &processObject, type);
         }
     }
     return -1;
@@ -131,9 +132,10 @@
                 DeRegisterExternalMediaProcessing();
         }
         case kRecordingAllChannelsMixed:
+        case kRecordingPreprocessing:
         {
             return shared_->transmit_mixer()->
-                DeRegisterExternalMediaProcessing();
+                DeRegisterExternalMediaProcessing(type);
         }
     }
     return -1;
diff --git a/voice_engine/voice_engine_core.gypi b/voice_engine/voice_engine_core.gypi
index 79d55cd..0478a71 100644
--- a/voice_engine/voice_engine_core.gypi
+++ b/voice_engine/voice_engine_core.gypi
@@ -144,6 +144,7 @@
           'sources': [
             'channel_unittest.cc',
             'output_mixer_unittest.cc',
+            'transmit_mixer_unittest.cc',
             'voe_audio_processing_unittest.cc',
           ],
         },