Updated stable to r2862.
git-svn-id: http://webrtc.googlecode.com/svn/stable/src@2863 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/common_video/libyuv/include/webrtc_libyuv.h b/common_video/libyuv/include/webrtc_libyuv.h
index a539ae6..c6e9571 100644
--- a/common_video/libyuv/include/webrtc_libyuv.h
+++ b/common_video/libyuv/include/webrtc_libyuv.h
@@ -100,13 +100,11 @@
// - src_stride : Number of bytes in a row of the src Y plane.
// - dst_video_type : Type of output video.
// - dst_sample_size : Required only for the parsing of MJPG.
-// - width : Width in pixels.
-// - height : Height in pixels.
// - dst_frame : Pointer to a destination frame.
// Return value: 0 if OK, < 0 otherwise.
-int ConvertFromI420(const uint8_t* src_frame, int src_stride,
+// It is assumed that source and destination have equal height.
+int ConvertFromI420(const VideoFrame& src_frame, int src_stride,
VideoType dst_video_type, int dst_sample_size,
- int width, int height,
uint8_t* dst_frame);
// ConvertFrom YV12.
// Interface - same as above.
diff --git a/common_video/libyuv/libyuv_unittest.cc b/common_video/libyuv/libyuv_unittest.cc
index ced0b6e..5ab8c11 100644
--- a/common_video/libyuv/libyuv_unittest.cc
+++ b/common_video/libyuv/libyuv_unittest.cc
@@ -135,8 +135,12 @@
double psnr = 0;
- uint8_t* orig_buffer = new uint8_t[frame_length_];
- EXPECT_GT(fread(orig_buffer, 1, frame_length_, source_file_), 0U);
+ VideoFrame orig_frame;
+ orig_frame.VerifyAndAllocate(frame_length_);
+ orig_frame.SetWidth(width_);
+ orig_frame.SetHeight(height_);
+ EXPECT_GT(fread(orig_frame.Buffer(), 1, frame_length_, source_file_), 0U);
+ orig_frame.SetLength(frame_length_);
// printf("\nConvert #%d I420 <-> RGB24\n", j);
uint8_t* res_rgb_buffer2 = new uint8_t[width_ * height_ * 3];
@@ -144,8 +148,8 @@
res_i420_frame.VerifyAndAllocate(frame_length_);
res_i420_frame.SetHeight(height_);
res_i420_frame.SetWidth(width_);
- EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_, kRGB24, 0,
- width_, height_, res_rgb_buffer2));
+ EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, kRGB24, 0,
+ res_rgb_buffer2));
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2, 0, 0, width_, height_,
0, kRotateNone, &res_i420_frame));
@@ -154,7 +158,8 @@
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
- psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
+ psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
+ width_, height_);
// Optimization Speed- quality trade-off => 45 dB only (platform dependant).
EXPECT_GT(ceil(psnr), 44);
j++;
@@ -162,11 +167,12 @@
// printf("\nConvert #%d I420 <-> UYVY\n", j);
uint8_t* out_uyvy_buffer = new uint8_t[width_ * height_ * 2];
- EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
- kUYVY, 0, width_, height_, out_uyvy_buffer));
+ EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
+ kUYVY, 0, out_uyvy_buffer));
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer, 0, 0, width_, height_,
0, kRotateNone, &res_i420_frame));
- psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
+ psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
+ width_, height_);
EXPECT_EQ(48.0, psnr);
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
@@ -178,15 +184,15 @@
// printf("\nConvert #%d I420 <-> I420 \n", j);
uint8_t* out_i420_buffer = new uint8_t[width_ * height_ * 3 / 2 ];
- EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
+ EXPECT_EQ(0, ConvertToI420(kI420, orig_frame.Buffer(), 0, 0, width_, height_,
0, kRotateNone, &res_i420_frame));
- EXPECT_EQ(0, ConvertFromI420(res_i420_frame.Buffer(), width_, kI420, 0,
- width_, height_, out_i420_buffer));
+ EXPECT_EQ(0, ConvertFromI420(res_i420_frame, width_, kI420, 0,
+ out_i420_buffer));
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
- psnr = I420PSNR(orig_buffer, out_i420_buffer, width_, height_);
+ psnr = I420PSNR(orig_frame.Buffer(), out_i420_buffer, width_, height_);
EXPECT_EQ(48.0, psnr);
j++;
delete [] out_i420_buffer;
@@ -194,8 +200,8 @@
// printf("\nConvert #%d I420 <-> YV12\n", j);
uint8_t* outYV120Buffer = new uint8_t[frame_length_];
- EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_, kYV12, 0,
- width_, height_, outYV120Buffer));
+ EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, kYV12, 0,
+ outYV120Buffer));
EXPECT_EQ(0, ConvertFromYV12(outYV120Buffer, width_,
kI420, 0,
width_, height_,
@@ -205,15 +211,16 @@
return;
}
- psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
+ psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
+ width_, height_);
EXPECT_EQ(48.0, psnr);
j++;
delete [] outYV120Buffer;
// printf("\nConvert #%d I420 <-> YUY2\n", j);
uint8_t* out_yuy2_buffer = new uint8_t[width_ * height_ * 2];
- EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
- kYUY2, 0, width_, height_, out_yuy2_buffer));
+ EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
+ kYUY2, 0, out_yuy2_buffer));
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer, 0, 0, width_, height_,
0, kRotateNone, &res_i420_frame));
@@ -222,13 +229,14 @@
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
- psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
+ psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
+ width_, height_);
EXPECT_EQ(48.0, psnr);
// printf("\nConvert #%d I420 <-> RGB565\n", j);
uint8_t* out_rgb565_buffer = new uint8_t[width_ * height_ * 2];
- EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
- kRGB565, 0, width_, height_, out_rgb565_buffer));
+ EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
+ kRGB565, 0, out_rgb565_buffer));
EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer, 0, 0, width_, height_,
0, kRotateNone, &res_i420_frame));
@@ -237,15 +245,16 @@
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
- psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
+ psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
+ width_, height_);
// TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
// Another example is I420ToRGB24, the psnr is 44
EXPECT_GT(ceil(psnr), 40);
// printf("\nConvert #%d I420 <-> ARGB8888\n", j);
uint8_t* out_argb8888_buffer = new uint8_t[width_ * height_ * 4];
- EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
- kARGB, 0, width_, height_, out_argb8888_buffer));
+ EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
+ kARGB, 0, out_argb8888_buffer));
EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer, 0, 0, width_, height_,
0, kRotateNone, &res_i420_frame));
@@ -254,17 +263,18 @@
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
- psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
+ psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
+ width_, height_);
// TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
EXPECT_GT(ceil(psnr), 42);
ASSERT_EQ(0, fclose(output_file));
res_i420_frame.Free();
+ orig_frame.Free();
delete [] out_argb8888_buffer;
delete [] out_rgb565_buffer;
delete [] out_yuy2_buffer;
- delete [] orig_buffer;
}
// TODO(holmer): Disabled for now due to crashes on Linux 32 bit. The theory
diff --git a/common_video/libyuv/webrtc_libyuv.cc b/common_video/libyuv/webrtc_libyuv.cc
index 41648f8..82dc4a3 100644
--- a/common_video/libyuv/webrtc_libyuv.cc
+++ b/common_video/libyuv/webrtc_libyuv.cc
@@ -194,14 +194,15 @@
ConvertVideoType(src_video_type));
}
-int ConvertFromI420(const uint8_t* src_frame, int src_stride,
+int ConvertFromI420(const VideoFrame& src_frame, int src_stride,
VideoType dst_video_type, int dst_sample_size,
- int width, int height,
uint8_t* dst_frame) {
+ int height = src_frame.Height();
+ int width = src_frame.Width();
int abs_height = (height < 0) ? -height : height;
int half_width = (width + 1) >> 1;
int half_height = (abs_height + 1) >> 1;
- const uint8_t* src_yplane = src_frame;
+ const uint8_t* src_yplane = src_frame.Buffer();
const uint8_t* src_uplane = src_yplane + width * abs_height;
const uint8_t* src_vplane = src_uplane + half_width * half_height;
return libyuv::ConvertFromI420(src_yplane, src_stride,
diff --git a/modules/audio_device/android/audio_device_android_jni.cc b/modules/audio_device/android/audio_device_android_jni.cc
index 23df34a..9f48c40 100644
--- a/modules/audio_device/android/audio_device_android_jni.cc
+++ b/modules/audio_device/android/audio_device_android_jni.cc
@@ -243,7 +243,7 @@
}
// RECORDING
- const char* threadName = "webrtc_jni_audio_capture_thread";
+ const char* threadName = "jni_audio_capture_thread";
_ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this,
kRealtimePriority, threadName);
if (_ptrThreadRec == NULL)
@@ -265,7 +265,7 @@
_recThreadID = threadID;
// PLAYOUT
- threadName = "webrtc_jni_audio_render_thread";
+ threadName = "jni_audio_render_thread";
_ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc, this,
kRealtimePriority, threadName);
if (_ptrThreadPlay == NULL)
diff --git a/modules/audio_device/android/audio_device_android_opensles.cc b/modules/audio_device/android/audio_device_android_opensles.cc
index 451766d..f383c79 100644
--- a/modules/audio_device/android/audio_device_android_opensles.cc
+++ b/modules/audio_device/android/audio_device_android_opensles.cc
@@ -1076,7 +1076,7 @@
rec_available_queue_.push(rec_buffer_[i]);
}
- const char* threadName = "webrtc_opensles_audio_capture_thread";
+ const char* threadName = "sles_audio_capture_thread";
_ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this,
kRealtimePriority, threadName);
if (_ptrThreadRec == NULL)
diff --git a/modules/video_capture/main/source/Linux/device_info_linux.cc b/modules/video_capture/main/source/Linux/device_info_linux.cc
index 653ee16..239a292 100644
--- a/modules/video_capture/main/source/Linux/device_info_linux.cc
+++ b/modules/video_capture/main/source/Linux/device_info_linux.cc
@@ -142,7 +142,7 @@
if (cap.bus_info[0] != 0) // may not available in all drivers
{
- // copy device id
+ // copy device id
if (deviceUniqueIdUTF8Length >= strlen((const char*) cap.bus_info))
{
memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length);
diff --git a/modules/video_render/main/source/android/video_render_android_surface_view.cc b/modules/video_render/main/source/android/video_render_android_surface_view.cc
index 889a6e7..d78ec4b 100644
--- a/modules/video_render/main/source/android/video_render_android_surface_view.cc
+++ b/modules/video_render/main/source/android/video_render_android_surface_view.cc
@@ -459,8 +459,8 @@
if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
const int conversionResult =
- ConvertFromI420((unsigned char* )_bufferToRender.Buffer(), _bitmapWidth,
- kRGB565, 0, _bitmapWidth, _bitmapHeight, _directBuffer);
+ ConvertFromI420(_bufferToRender, _bitmapWidth,
+ kRGB565, 0, _directBuffer);
if (conversionResult < 0) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
diff --git a/modules/video_render/main/source/linux/video_x11_channel.cc b/modules/video_render/main/source/linux/video_x11_channel.cc
index 4d574e3..e143740 100644
--- a/modules/video_render/main/source/linux/video_x11_channel.cc
+++ b/modules/video_render/main/source/linux/video_x11_channel.cc
@@ -44,19 +44,15 @@
}
WebRtc_Word32 VideoX11Channel::RenderFrame(const WebRtc_UWord32 streamId,
- VideoFrame& videoFrame)
-{
- CriticalSectionScoped cs(&_crit);
- if (_width != (WebRtc_Word32) videoFrame.Width() || _height
- != (WebRtc_Word32) videoFrame.Height())
- {
- if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
- {
- return -1;
- }
+ VideoFrame& videoFrame) {
+ CriticalSectionScoped cs(&_crit);
+ if (_width != (WebRtc_Word32) videoFrame.Width() || _height
+ != (WebRtc_Word32) videoFrame.Height()) {
+ if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1) {
+ return -1;
}
- return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(),
- videoFrame.TimeStamp());
+ }
+ return DeliverFrame(videoFrame);
}
WebRtc_Word32 VideoX11Channel::FrameSizeChange(WebRtc_Word32 width,
@@ -76,33 +72,26 @@
return 0;
}
-WebRtc_Word32 VideoX11Channel::DeliverFrame(unsigned char* buffer,
- WebRtc_Word32 bufferSize,
- unsigned WebRtc_Word32 /*timeStamp90kHz*/)
-{
- CriticalSectionScoped cs(&_crit);
- if (!_prepared)
- {
- return 0;
- }
-
- if (!dispArray[_dispCount])
- {
- return -1;
- }
-
- unsigned char *pBuf = buffer;
- // convert to RGB32, setting stride = width.
- ConvertFromI420(pBuf, _width, kARGB, 0, _width, _height, _buffer);
-
- // put image in window
- XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
- _height, True);
-
- // very important for the image to update properly!
- XSync(_display, False);
+WebRtc_Word32 VideoX11Channel::DeliverFrame(const VideoFrame& videoFrame) {
+ CriticalSectionScoped cs(&_crit);
+ if (!_prepared) {
return 0;
+ }
+ if (!dispArray[_dispCount]) {
+ return -1;
+ }
+
+ // convert to RGB32, setting stride = width.
+ ConvertFromI420(videoFrame, _width, kARGB, 0, _buffer);
+
+ // Put image in window.
+ XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
+ _height, True);
+
+ // Very important for the image to update properly!
+ XSync(_display, False);
+ return 0;
}
WebRtc_Word32 VideoX11Channel::GetFrameSize(WebRtc_Word32& width,
diff --git a/modules/video_render/main/source/linux/video_x11_channel.h b/modules/video_render/main/source/linux/video_x11_channel.h
index d713422..a7ed412 100644
--- a/modules/video_render/main/source/linux/video_x11_channel.h
+++ b/modules/video_render/main/source/linux/video_x11_channel.h
@@ -38,8 +38,7 @@
WebRtc_Word32 FrameSizeChange(WebRtc_Word32 width, WebRtc_Word32 height,
WebRtc_Word32 numberOfStreams);
- WebRtc_Word32 DeliverFrame(unsigned char* buffer, WebRtc_Word32 bufferSize,
- unsigned WebRtc_Word32 /*timeStamp90kHz*/);
+ WebRtc_Word32 DeliverFrame(const VideoFrame& videoFrame);
WebRtc_Word32 GetFrameSize(WebRtc_Word32& width, WebRtc_Word32& height);
WebRtc_Word32 Init(Window window, float left, float top, float right,
float bottom);
diff --git a/modules/video_render/main/source/mac/video_render_agl.cc b/modules/video_render/main/source/mac/video_render_agl.cc
index b431cce..80d340d 100644
--- a/modules/video_render/main/source/mac/video_render_agl.cc
+++ b/modules/video_render/main/source/mac/video_render_agl.cc
@@ -80,21 +80,21 @@
}
}
-WebRtc_Word32 VideoChannelAGL::RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame)
-{
- _owner->LockAGLCntx();
- if(_width != videoFrame.Width() ||
- _height != videoFrame.Height())
- {
- if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
- { //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSizeChange returned an error", __FUNCTION__, __LINE__);
- _owner->UnlockAGLCntx();
- return -1;
- }
+WebRtc_Word32 VideoChannelAGL::RenderFrame(const WebRtc_UWord32 streamId,
+ VideoFrame& videoFrame) {
+ _owner->LockAGLCntx();
+ if (_width != videoFrame.Width() ||
+ _height != videoFrame.Height()) {
+ if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSize
+ Change returned an error", __FUNCTION__, __LINE__);
+ _owner->UnlockAGLCntx();
+ return -1;
}
+ }
- _owner->UnlockAGLCntx();
- return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(), videoFrame.TimeStamp());
+ _owner->UnlockAGLCntx();
+ return DeliverFrame(videoFrame);
}
int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)
@@ -220,63 +220,58 @@
}
// Called from video engine when a new frame should be rendered.
-int VideoChannelAGL::DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int /*timeStamp90kHz*/)
-{
- _owner->LockAGLCntx();
+int VideoChannelAGL::DeliverFrame(const VideoFrame& videoFrame) {
+ _owner->LockAGLCntx();
- if (_texture == 0)
- {
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- if (bufferSize != _incommingBufferSize)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Setting stride = width.
- int rgbret = ConvertFromYV12(buffer, _width, kBGRA, 0, _width, _height,
- _buffer);
- if (rgbret < 0)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- aglSetCurrentContext(_aglContext);
-
- // Put the new frame into the graphic card texture.
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); // Make sure this texture is the active one
- GLenum glErr = glGetError();
- if (glErr != GL_NO_ERROR)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Copy buffer to texture
- glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
- 0, // Level, not use
- 0, // start point x, (low left of pic)
- 0, // start point y,
- _width, // width
- _height, // height
- _pixelFormat, // pictue format for _buffer
- _pixelDataType, // data type of _buffer
- (const GLvoid*) _buffer); // the pixel data
-
- if (glGetError() != GL_NO_ERROR)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- _bufferIsUpdated = true;
+ if (_texture == 0) {
_owner->UnlockAGLCntx();
-
return 0;
+ }
+
+ if (bufferSize != _incommingBufferSize) {
+ _owner->UnlockAGLCntx();
+ return -1;
+ }
+
+ // Setting stride = width.
+ int rgbret = ConvertFromYV12(videoFrame.Buffer(), _width, kBGRA, 0, _width,
+ _height, _buffer);
+ if (rgbret < 0) {
+ _owner->UnlockAGLCntx();
+ return -1;
+ }
+
+ aglSetCurrentContext(_aglContext);
+
+ // Put the new frame into the graphic card texture.
+ // Make sure this texture is the active one
+ glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+ GLenum glErr = glGetError();
+ if (glErr != GL_NO_ERROR) {
+ _owner->UnlockAGLCntx();
+ return -1;
+ }
+
+ // Copy buffer to texture
+ glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
+ 0, // Level, not use
+ 0, // start point x, (low left of pic)
+ 0, // start point y,
+ _width, // width
+ _height, // height
+ _pixelFormat, // pictue format for _buffer
+ _pixelDataType, // data type of _buffer
+ (const GLvoid*) _buffer); // the pixel data
+
+ if (glGetError() != GL_NO_ERROR) {
+ _owner->UnlockAGLCntx();
+ return -1;
+ }
+
+ _bufferIsUpdated = true;
+ _owner->UnlockAGLCntx();
+
+ return 0;
}
int VideoChannelAGL::RenderOffScreenBuffer()
diff --git a/modules/video_render/main/source/mac/video_render_agl.h b/modules/video_render/main/source/mac/video_render_agl.h
index bdee619..58302f1 100644
--- a/modules/video_render/main/source/mac/video_render_agl.h
+++ b/modules/video_render/main/source/mac/video_render_agl.h
@@ -45,7 +45,7 @@
VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner);
virtual ~VideoChannelAGL();
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
- virtual int DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int timeStame90kHz);
+ virtual int DeliverFrame(const VideoFrame& videoFrame);
virtual int UpdateSize(int width, int height);
int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
diff --git a/modules/video_render/main/source/mac/video_render_nsopengl.h b/modules/video_render/main/source/mac/video_render_nsopengl.h
index 56058dc..be1fb75 100644
--- a/modules/video_render/main/source/mac/video_render_nsopengl.h
+++ b/modules/video_render/main/source/mac/video_render_nsopengl.h
@@ -44,7 +44,7 @@
virtual ~VideoChannelNSOpenGL();
// A new frame is delivered
- virtual int DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int timeStame90kHz);
+ virtual int DeliverFrame(const VideoFrame& videoFrame);
// Called when the incomming frame size and/or number of streams in mix changes
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
diff --git a/modules/video_render/main/source/mac/video_render_nsopengl.mm b/modules/video_render/main/source/mac/video_render_nsopengl.mm
index 65b2e48..7727784 100644
--- a/modules/video_render/main/source/mac/video_render_nsopengl.mm
+++ b/modules/video_render/main/source/mac/video_render_nsopengl.mm
@@ -91,25 +91,22 @@
return 0;
}
-WebRtc_Word32 VideoChannelNSOpenGL::RenderFrame(const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame)
-{
+WebRtc_Word32 VideoChannelNSOpenGL::RenderFrame(
+ const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame) {
- _owner->LockAGLCntx();
+ _owner->LockAGLCntx();
- if(_width != (int)videoFrame.Width() ||
- _height != (int)videoFrame.Height())
- {
- if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
- }
+ if(_width != (int)videoFrame.Width() ||
+ _height != (int)videoFrame.Height()) {
+ if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1) {
+ _owner->UnlockAGLCntx();
+ return -1;
+ }
+ }
+ int ret = DeliverFrame(videoFrame);
- int ret = DeliverFrame(videoFrame.Buffer(), videoFrame.Length(), videoFrame.TimeStamp());
-
- _owner->UnlockAGLCntx();
- return ret;
+ _owner->UnlockAGLCntx();
+ return ret;
}
int VideoChannelNSOpenGL::UpdateSize(int width, int height)
@@ -156,7 +153,7 @@
}
_incommingBufferSize = CalcBufferSize(kI420, _width, _height);
- _bufferSize = CalcBufferSize(kARGB, _width, _height);//_width * _height * bytesPerPixel;
+ _bufferSize = CalcBufferSize(kARGB, _width, _height);
_buffer = new unsigned char [_bufferSize];
memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
@@ -211,66 +208,61 @@
return 0;
}
-int VideoChannelNSOpenGL::DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int /*timeStamp90kHz*/)
-{
+int VideoChannelNSOpenGL::DeliverFrame(const VideoFrame& videoFrame) {
- _owner->LockAGLCntx();
+ _owner->LockAGLCntx();
- if (_texture == 0)
- {
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- if (bufferSize != _incommingBufferSize)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- int rgbRet = ConvertFromYV12(buffer, _width,
- kBGRA, 0, _width, _height,
- _buffer);
- if (rgbRet < 0)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- [_nsglContext makeCurrentContext];
-
-
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); // Make sure this texture is the active one
- GLenum glErr = glGetError();
- if (glErr != GL_NO_ERROR)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "ERROR %d while calling glBindTexture", glErr);
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
- 0, // Level, not use
- 0, // start point x, (low left of pic)
- 0, // start point y,
- _width, // width
- _height, // height
- _pixelFormat, // pictue format for _buffer
- _pixelDataType, // data type of _buffer
- (const GLvoid*) _buffer); // the pixel data
-
- glErr = glGetError();
- if (glErr != GL_NO_ERROR)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "ERROR %d while calling glTexSubImage2d", glErr);
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- _bufferIsUpdated = true;
-
+ if (_texture == 0) {
_owner->UnlockAGLCntx();
return 0;
+ }
+
+ if (static_cast<int>(videoFrame.Length()) != _incommingBufferSize) {
+ _owner->UnlockAGLCntx();
+ return -1;
+ }
+
+ int rgbRet = ConvertFromYV12(videoFrame.Buffer(), _width,
+ kBGRA, 0, _width, _height, _buffer);
+ if (rgbRet < 0) {
+ _owner->UnlockAGLCntx();
+ return -1;
+ }
+
+ [_nsglContext makeCurrentContext];
+
+ // Make sure this texture is the active one
+ glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+ GLenum glErr = glGetError();
+ if (glErr != GL_NO_ERROR) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "ERROR %d while calling glBindTexture", glErr);
+ _owner->UnlockAGLCntx();
+ return -1;
+ }
+
+ glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
+ 0, // Level, not use
+ 0, // start point x, (low left of pic)
+ 0, // start point y,
+ _width, // width
+ _height, // height
+ _pixelFormat, // pictue format for _buffer
+ _pixelDataType, // data type of _buffer
+ (const GLvoid*) _buffer); // the pixel data
+
+ glErr = glGetError();
+ if (glErr != GL_NO_ERROR) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "ERROR %d while calling glTexSubImage2d", glErr);
+ _owner->UnlockAGLCntx();
+ return -1;
+ }
+
+ _bufferIsUpdated = true;
+
+ _owner->UnlockAGLCntx();
+ return 0;
}
int VideoChannelNSOpenGL::RenderOffScreenBuffer()
diff --git a/modules/video_render/main/source/windows/video_render_direct3d9.cc b/modules/video_render/main/source/windows/video_render_direct3d9.cc
index 14dca55..a83a1be 100644
--- a/modules/video_render/main/source/windows/video_render_direct3d9.cc
+++ b/modules/video_render/main/source/windows/video_render_direct3d9.cc
@@ -153,65 +153,55 @@
return -1;
}
}
- return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(),
- videoFrame.TimeStamp());
+ return DeliverFrame(videoFrame);
}
// Called from video engine when a new frame should be rendered.
-int D3D9Channel::DeliverFrame(unsigned char* buffer,
- int bufferSize,
- unsigned int timeStamp90kHz)
-{
+int D3D9Channel::DeliverFrame(const VideoFrame& videoFrame) {
+ WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
+ "DeliverFrame to D3D9Channel");
+
+ CriticalSectionScoped cs(_critSect);
+
+ // FIXME if _bufferIsUpdated is still true (not be renderred), do we want to
+ // update the texture? probably not
+ if (_bufferIsUpdated) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
- "DeliverFrame to D3D9Channel");
+ "Last frame hasn't been rendered yet. Drop this frame.");
+ return -1;
+ }
- CriticalSectionScoped cs(_critSect);
+ if (!_pd3dDevice) {
+ WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+ "D3D for rendering not initialized.");
+ return -1;
+ }
- //FIXME if _bufferIsUpdated is still true (not be renderred), do we what to update the texture?)
- //probably not
- if (_bufferIsUpdated)
- {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
- "Last frame hasn't been rendered yet. Drop this frame.");
- return -1;
- }
+ if (!_pTexture) {
+ WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+ "Texture for rendering not initialized.");
+ return -1;
+ }
- if (!_pd3dDevice)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "D3D for rendering not initialized.");
- return -1;
- }
+ D3DLOCKED_RECT lr;
- if (!_pTexture)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Texture for rendering not initialized.");
- return -1;
- }
+ if (FAILED(_pTexture->LockRect(0, &lr, NULL, 0))) {
+ WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+ "Failed to lock a texture in D3D9 Channel.");
+ return -1;
+ }
+ UCHAR* pRect = (UCHAR*) lr.pBits;
- D3DLOCKED_RECT lr;
+ ConvertFromI420(videoFrame, _width, kARGB, 0, pRect);
- if (FAILED(_pTexture->LockRect(0, &lr, NULL, 0)))
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Failed to lock a texture in D3D9 Channel.");
- return -1;
- }
- UCHAR* pRect = (UCHAR*) lr.pBits;
+ if (FAILED(_pTexture->UnlockRect(0))) {
+ WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+ "Failed to unlock a texture in D3D9 Channel.");
+ return -1;
+ }
- ConvertFromI420(buffer, _width, kARGB, 0, _width, _height, pRect);
-
- if (FAILED(_pTexture->UnlockRect(0)))
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Failed to unlock a texture in D3D9 Channel.");
- return -1;
- }
-
- _bufferIsUpdated = true;
-
- return 0;
+ _bufferIsUpdated = true;
+ return 0;
}
// Called by d3d channel owner to indicate the frame/texture has been rendered off
diff --git a/modules/video_render/main/source/windows/video_render_direct3d9.h b/modules/video_render/main/source/windows/video_render_direct3d9.h
index 6d6fef3..8430393 100644
--- a/modules/video_render/main/source/windows/video_render_direct3d9.h
+++ b/modules/video_render/main/source/windows/video_render_direct3d9.h
@@ -43,10 +43,8 @@
// Called when the incomming frame size and/or number of streams in mix changes
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
- // A new frame is delivered
- virtual int DeliverFrame(unsigned char* buffer,
- int bufferSize,
- unsigned int timeStame90kHz);
+ // A new frame is delivered.
+ virtual int DeliverFrame(const VideoFrame& videoFrame);
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
VideoFrame& videoFrame);
diff --git a/system_wrappers/interface/aligned_malloc.h b/system_wrappers/interface/aligned_malloc.h
index c229435..3fda6b6 100644
--- a/system_wrappers/interface/aligned_malloc.h
+++ b/system_wrappers/interface/aligned_malloc.h
@@ -11,15 +11,37 @@
#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ALIGNED_MALLOC_H_
#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ALIGNED_MALLOC_H_
+// The functions declared here
+// 1) Allocates block of aligned memory.
+// 2) Re-calculates a pointer such that it is aligned to a higher or equal
+// address.
+// Note: alignment must be a power of two. The alignment is in bytes.
+
#include <stddef.h>
-namespace webrtc
-{
- void* AlignedMalloc(
- size_t size,
- size_t alignment);
- void AlignedFree(
- void* memBlock);
-}
+#include "system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+// Returns a pointer to the first boundry of |alignment| bytes following the
+// address of |ptr|.
+// Note that there is no guarantee that the memory in question is available.
+// |ptr| has no requirements other than it can't be NULL.
+void* GetRightAlign(const void* ptr, size_t alignment);
+
+// Allocates memory of |size| bytes aligned on an |alignment| boundry.
+// The return value is a pointer to the memory. Note that the memory must
+// be de-allocated using AlignedFree.
+void* AlignedMalloc(size_t size, size_t alignment);
+// De-allocates memory created using the AlignedMalloc() API.
+void AlignedFree(void* memBlock);
+
+// Scoped pointer to AlignedMalloc-memory.
+template<typename T>
+struct Allocator {
+ typedef scoped_ptr_malloc<T, AlignedFree> scoped_ptr_aligned;
+};
+
+} // namespace webrtc
#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ALIGNED_MALLOC_H_
diff --git a/system_wrappers/source/aligned_malloc.cc b/system_wrappers/source/aligned_malloc.cc
index bb10c6b..3dd281f 100644
--- a/system_wrappers/source/aligned_malloc.cc
+++ b/system_wrappers/source/aligned_malloc.cc
@@ -10,112 +10,109 @@
#include "aligned_malloc.h"
-#include <assert.h>
#include <memory.h>
-
-#ifdef WEBRTC_ANDROID
#include <stdlib.h>
-#endif
-
-#if WEBRTC_MAC
- #include <malloc/malloc.h>
-#else
- #include <malloc.h>
-#endif
#if _WIN32
- #include <windows.h>
+#include <windows.h>
#else
- #include <stdint.h>
+#include <stdint.h>
#endif
#include "typedefs.h"
-// Ok reference on memory alignment:
+// Reference on memory alignment:
// http://stackoverflow.com/questions/227897/solve-the-memory-alignment-in-c-interview-question-that-stumped-me
-
-namespace webrtc
-{
-// TODO (hellner) better to create just one memory block and
-// interpret the first sizeof(AlignedMemory) bytes as
-// an AlignedMemory struct.
-struct AlignedMemory
-{
+namespace webrtc {
+// TODO(henrike): better to create just one memory block and interpret the
+// first sizeof(AlignedMemory) bytes as an AlignedMemory struct.
+struct AlignedMemory {
void* alignedBuffer;
void* memoryPointer;
};
-void* AlignedMalloc(size_t size, size_t alignment)
-{
- if(alignment == 0)
- {
- // Don't allow alignment 0 since it's undefined.
- return NULL;
- }
- // Make sure that the alignment is an integer power of two or fail.
- if(alignment & (alignment - 1))
- {
- return NULL;
- }
-
- AlignedMemory* returnValue = new AlignedMemory();
- if(returnValue == NULL)
- {
- return NULL;
- }
-
- // The memory is aligned towards the lowest address that so only
- // alignment - 1 bytes needs to be allocated.
- // A pointer to AlignedMemory must be stored so that it can be retreived for
- // deletion, ergo the sizeof(uintptr_t).
- returnValue->memoryPointer = malloc(size + sizeof(uintptr_t) +
- alignment - 1);
- if(returnValue->memoryPointer == NULL)
- {
- delete returnValue;
- return NULL;
- }
-
- // Alligning after the sizeof(header) bytes will leave room for the header
- // in the same memory block.
- uintptr_t alignStartPos = (uintptr_t)returnValue->memoryPointer;
- alignStartPos += sizeof(uintptr_t);
-
- // The buffer should be aligned with 'alignment' bytes. The - 1 guarantees
- // that we align towards the lowest address.
- uintptr_t alignedPos = (alignStartPos + alignment - 1) & ~(alignment - 1);
-
- // alignedPos is the address sought for.
- returnValue->alignedBuffer = (void*)alignedPos;
-
- // Store the address to the AlignedMemory struct in the header so that a
- // it's possible to reclaim all memory.
- uintptr_t headerPos = alignedPos;
- headerPos -= sizeof(uintptr_t);
- void* headerPtr = (void*) headerPos;
- uintptr_t headerValue = (uintptr_t)returnValue;
- memcpy(headerPtr,&headerValue,sizeof(uintptr_t));
-
- return returnValue->alignedBuffer;
+uintptr_t GetRightAlign(uintptr_t startPos, size_t alignment) {
+ // The pointer should be aligned with |alignment| bytes. The - 1 guarantees
+ // that it is aligned towards the closest higher (right) address.
+ return (startPos + alignment - 1) & ~(alignment - 1);
}
-void AlignedFree(void* memBlock)
-{
- if(memBlock == NULL)
- {
- return;
- }
- uintptr_t alignedPos = (uintptr_t)memBlock;
- uintptr_t headerPos = alignedPos - sizeof(uintptr_t);
-
- // Read out the address of the AlignedMemory struct from the header.
- uintptr_t* headerPtr = (uintptr_t*)headerPos;
- AlignedMemory* deleteMemory = (AlignedMemory*) *headerPtr;
-
- if(deleteMemory->memoryPointer != NULL)
- {
- free(deleteMemory->memoryPointer);
- }
- delete deleteMemory;
+// Alignment must be an integer power of two.
+bool ValidAlignment(size_t alignment) {
+ if (!alignment) {
+ return false;
+ }
+ return (alignment & (alignment - 1)) == 0;
}
-} // namespace webrtc
+
+void* GetRightAlign(const void* ptr, size_t alignment) {
+ if (!ptr) {
+ return NULL;
+ }
+ if (!ValidAlignment(alignment)) {
+ return NULL;
+ }
+ uintptr_t startPos = reinterpret_cast<uintptr_t>(ptr);
+ return reinterpret_cast<void*>(GetRightAlign(startPos, alignment));
+}
+
+void* AlignedMalloc(size_t size, size_t alignment) {
+ if (size == 0) {
+ return NULL;
+ }
+ if (!ValidAlignment(alignment)) {
+ return NULL;
+ }
+
+ AlignedMemory* returnValue = new AlignedMemory();
+ if (returnValue == NULL) {
+ return NULL;
+ }
+
+ // The memory is aligned towards the lowest address that so only
+ // alignment - 1 bytes needs to be allocated.
+ // A pointer to AlignedMemory must be stored so that it can be retreived for
+ // deletion, ergo the sizeof(uintptr_t).
+ returnValue->memoryPointer = malloc(size + sizeof(uintptr_t) +
+ alignment - 1);
+ if (returnValue->memoryPointer == NULL) {
+ delete returnValue;
+ return NULL;
+ }
+
+ // Aligning after the sizeof(header) bytes will leave room for the header
+ // in the same memory block.
+ uintptr_t alignStartPos =
+ reinterpret_cast<uintptr_t>(returnValue->memoryPointer);
+ alignStartPos += sizeof(uintptr_t);
+ uintptr_t alignedPos = GetRightAlign(alignStartPos, alignment);
+ returnValue->alignedBuffer = reinterpret_cast<void*>(alignedPos);
+
+ // Store the address to the AlignedMemory struct in the header so that a
+ // it's possible to reclaim all memory.
+ uintptr_t headerPos = alignedPos;
+ headerPos -= sizeof(uintptr_t);
+ void* headerPtr = reinterpret_cast<void*>(headerPos);
+ uintptr_t headerValue = reinterpret_cast<uintptr_t>(returnValue);
+ memcpy(headerPtr,&headerValue,sizeof(uintptr_t));
+ return returnValue->alignedBuffer;
+}
+
+void AlignedFree(void* memBlock) {
+ if (memBlock == NULL) {
+ return;
+ }
+ uintptr_t alignedPos = reinterpret_cast<uintptr_t>(memBlock);
+ uintptr_t headerPos = alignedPos - sizeof(uintptr_t);
+
+ // Read out the address of the AlignedMemory struct from the header.
+ uintptr_t* headerPtr = reinterpret_cast<uintptr_t*>(headerPos);
+ AlignedMemory* deleteMemory = reinterpret_cast<AlignedMemory*>(*headerPtr);
+
+ if (deleteMemory->memoryPointer != NULL) {
+ free(deleteMemory->memoryPointer);
+ }
+ delete deleteMemory;
+}
+
+} // namespace webrtc
diff --git a/system_wrappers/source/aligned_malloc_unittest.cc b/system_wrappers/source/aligned_malloc_unittest.cc
new file mode 100644
index 0000000..9c6223c
--- /dev/null
+++ b/system_wrappers/source/aligned_malloc_unittest.cc
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "system_wrappers/interface/aligned_malloc.h"
+
+#if _WIN32
+#include <windows.h>
+#else
+#include <stdint.h>
+#endif
+
+#include "typedefs.h" // NOLINT
+
+#include "gtest/gtest.h"
+
+// Returns true if |size| and |alignment| are valid combinations.
+bool CorrectUsage(size_t size, size_t alignment) {
+ webrtc::Allocator<char>::scoped_ptr_aligned scoped(
+ static_cast<char*> (webrtc::AlignedMalloc(size, alignment)));
+ if (scoped.get() == NULL) {
+ return false;
+ }
+ const uintptr_t scoped_address = reinterpret_cast<uintptr_t> (scoped.get());
+ return 0u == scoped_address % alignment;
+}
+
+TEST(AlignedMalloc, GetRightAlign) {
+ const size_t size = 100;
+ const size_t alignment = 32;
+ const size_t left_missalignment = 8;
+ webrtc::Allocator<char>::scoped_ptr_aligned scoped(
+ static_cast<char*> (webrtc::AlignedMalloc(size, alignment)));
+ EXPECT_TRUE(scoped.get() != NULL);
+ const uintptr_t aligned_address = reinterpret_cast<uintptr_t> (scoped.get());
+ const uintptr_t missaligned_address = aligned_address - left_missalignment;
+ const void* missaligned_ptr = reinterpret_cast<void*> (missaligned_address);
+ const void* realignedPtr = webrtc::GetRightAlign(
+ missaligned_ptr, alignment);
+ EXPECT_EQ(scoped.get(), realignedPtr);
+}
+
+TEST(AlignedMalloc, IncorrectSize) {
+ const size_t incorrect_size = 0;
+ const size_t alignment = 64;
+ EXPECT_FALSE(CorrectUsage(incorrect_size, alignment));
+}
+
+TEST(AlignedMalloc, IncorrectAlignment) {
+ const size_t size = 100;
+ const size_t incorrect_alignment = 63;
+ EXPECT_FALSE(CorrectUsage(size, incorrect_alignment));
+}
+
+TEST(AlignedMalloc, AlignTo2Bytes) {
+ size_t size = 100;
+ size_t alignment = 2;
+ EXPECT_TRUE(CorrectUsage(size, alignment));
+}
+
+TEST(AlignedMalloc, AlignTo32Bytes) {
+ size_t size = 100;
+ size_t alignment = 32;
+ EXPECT_TRUE(CorrectUsage(size, alignment));
+}
+
+TEST(AlignedMalloc, AlignTo128Bytes) {
+ size_t size = 100;
+ size_t alignment = 128;
+ EXPECT_TRUE(CorrectUsage(size, alignment));
+}
diff --git a/system_wrappers/source/system_wrappers.gyp b/system_wrappers/source/system_wrappers.gyp
index 386a2c6..dcde1c7 100644
--- a/system_wrappers/source/system_wrappers.gyp
+++ b/system_wrappers/source/system_wrappers.gyp
@@ -202,6 +202,7 @@
'<(webrtc_root)/test/test.gyp:test_support_main',
],
'sources': [
+ 'aligned_malloc_unittest.cc',
'condition_variable_unittest.cc',
'cpu_wrapper_unittest.cc',
'cpu_measurement_harness.h',
diff --git a/video_engine/main/test/android_test/Android.mk b/video_engine/main/test/android_test/Android.mk
deleted file mode 100644
index c693911..0000000
--- a/video_engine/main/test/android_test/Android.mk
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_MODULE_TAGS := tests
-
-LOCAL_SRC_FILES := \
- src/org/webrtc/videoengineapp/WebRTCDemo.java \
- src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java \
- src/org/webrtc/videoengineapp/IViEAndroidCallback.java \
- src/org/webrtc/videoengine/CaptureCapabilityAndroid.java \
- src/org/webrtc/videoengine/VideoCaptureAndroid.java \
- src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java \
- src/org/webrtc/videoengine/ViEAndroidGLES20.java \
- src/org/webrtc/videoengine/ViERenderer.java \
- src/org/webrtc/videoengine/ViESurfaceRenderer.java \
- src/org/webrtc/voiceengine/WebRTCAudioDevice.java
-
-LOCAL_PACKAGE_NAME := webrtc-video-demo
-LOCAL_CERTIFICATE := platform
-
-LOCAL_JNI_SHARED_LIBRARIES := libwebrtc-video-demo-jni
-
-include $(BUILD_PACKAGE)
-
-include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/video_engine/main/test/android_test/AndroidManifest.xml b/video_engine/test/android/AndroidManifest.xml
similarity index 100%
rename from video_engine/main/test/android_test/AndroidManifest.xml
rename to video_engine/test/android/AndroidManifest.xml
diff --git a/video_engine/main/test/android_test/build.xml b/video_engine/test/android/build.xml
similarity index 99%
rename from video_engine/main/test/android_test/build.xml
rename to video_engine/test/android/build.xml
index 6507169..b378a7a 100644
--- a/video_engine/main/test/android_test/build.xml
+++ b/video_engine/test/android/build.xml
@@ -883,7 +883,7 @@
<path refid="project.all.jars.path" />
<path refid="tested.project.classpath" />
</path>
- <property name="webrtc.modules.dir" value="../../../../modules" />
+ <property name="webrtc.modules.dir" value="../../../modules" />
<javac encoding="${java.encoding}"
source="${java.source}" target="${java.target}"
debug="true" extdirs="" includeantruntime="false"
diff --git a/video_engine/main/test/android_test/jni/Android.mk b/video_engine/test/android/jni/Android.mk
similarity index 97%
rename from video_engine/main/test/android_test/jni/Android.mk
rename to video_engine/test/android/jni/Android.mk
index 1c17312..15ec8a0 100644
--- a/video_engine/main/test/android_test/jni/Android.mk
+++ b/video_engine/test/android/jni/Android.mk
@@ -8,7 +8,7 @@
LOCAL_PATH := $(call my-dir)
-MY_LIBS_PATH := ../../../../../../out/Debug/obj.target
+MY_LIBS_PATH := ../../../../../out/Debug/obj.target
include $(CLEAR_VARS)
LOCAL_MODULE := libvoice_engine_core
@@ -261,9 +261,9 @@
LOCAL_C_INCLUDES := \
external/gtest/include \
- $(LOCAL_PATH)/../../../../.. \
- $(LOCAL_PATH)/../../../../include \
- $(LOCAL_PATH)/../../../../../voice_engine/include
+ $(LOCAL_PATH)/../../../.. \
+ $(LOCAL_PATH)/../../../include \
+ $(LOCAL_PATH)/../../../../voice_engine/include
LOCAL_LDLIBS := \
-llog \
diff --git a/video_engine/main/test/android_test/jni/Application.mk b/video_engine/test/android/jni/Application.mk
similarity index 100%
rename from video_engine/main/test/android_test/jni/Application.mk
rename to video_engine/test/android/jni/Application.mk
diff --git a/video_engine/main/test/android_test/jni/org_webrtc_videoengineapp_vie_android_java_api.h b/video_engine/test/android/jni/org_webrtc_videoengineapp_vie_android_java_api.h
similarity index 100%
rename from video_engine/main/test/android_test/jni/org_webrtc_videoengineapp_vie_android_java_api.h
rename to video_engine/test/android/jni/org_webrtc_videoengineapp_vie_android_java_api.h
diff --git a/video_engine/main/test/android_test/jni/vie_android_java_api.cc b/video_engine/test/android/jni/vie_android_java_api.cc
similarity index 100%
rename from video_engine/main/test/android_test/jni/vie_android_java_api.cc
rename to video_engine/test/android/jni/vie_android_java_api.cc
diff --git a/video_engine/main/test/android_test/project.properties b/video_engine/test/android/project.properties
similarity index 100%
rename from video_engine/main/test/android_test/project.properties
rename to video_engine/test/android/project.properties
diff --git a/video_engine/main/test/android_test/res/drawable/logo.png b/video_engine/test/android/res/drawable/logo.png
similarity index 100%
rename from video_engine/main/test/android_test/res/drawable/logo.png
rename to video_engine/test/android/res/drawable/logo.png
Binary files differ
diff --git a/video_engine/main/test/android_test/res/layout/aconfig.xml b/video_engine/test/android/res/layout/aconfig.xml
similarity index 100%
rename from video_engine/main/test/android_test/res/layout/aconfig.xml
rename to video_engine/test/android/res/layout/aconfig.xml
diff --git a/video_engine/main/test/android_test/res/layout/both.xml b/video_engine/test/android/res/layout/both.xml
similarity index 100%
rename from video_engine/main/test/android_test/res/layout/both.xml
rename to video_engine/test/android/res/layout/both.xml
diff --git a/video_engine/main/test/android_test/res/layout/main.xml b/video_engine/test/android/res/layout/main.xml
similarity index 100%
rename from video_engine/main/test/android_test/res/layout/main.xml
rename to video_engine/test/android/res/layout/main.xml
diff --git a/video_engine/main/test/android_test/res/layout/row.xml b/video_engine/test/android/res/layout/row.xml
similarity index 100%
rename from video_engine/main/test/android_test/res/layout/row.xml
rename to video_engine/test/android/res/layout/row.xml
diff --git a/video_engine/main/test/android_test/res/layout/send.xml b/video_engine/test/android/res/layout/send.xml
similarity index 100%
rename from video_engine/main/test/android_test/res/layout/send.xml
rename to video_engine/test/android/res/layout/send.xml
diff --git a/video_engine/main/test/android_test/res/layout/tabhost.xml b/video_engine/test/android/res/layout/tabhost.xml
similarity index 100%
rename from video_engine/main/test/android_test/res/layout/tabhost.xml
rename to video_engine/test/android/res/layout/tabhost.xml
diff --git a/video_engine/main/test/android_test/res/layout/vconfig.xml b/video_engine/test/android/res/layout/vconfig.xml
similarity index 100%
rename from video_engine/main/test/android_test/res/layout/vconfig.xml
rename to video_engine/test/android/res/layout/vconfig.xml
diff --git a/video_engine/main/test/android_test/res/values/strings.xml b/video_engine/test/android/res/values/strings.xml
similarity index 100%
rename from video_engine/main/test/android_test/res/values/strings.xml
rename to video_engine/test/android/res/values/strings.xml
diff --git a/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/IViEAndroidCallback.java b/video_engine/test/android/src/org/webrtc/videoengineapp/IViEAndroidCallback.java
similarity index 100%
rename from video_engine/main/test/android_test/src/org/webrtc/videoengineapp/IViEAndroidCallback.java
rename to video_engine/test/android/src/org/webrtc/videoengineapp/IViEAndroidCallback.java
diff --git a/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java b/video_engine/test/android/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java
similarity index 100%
rename from video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java
rename to video_engine/test/android/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java
diff --git a/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/WebRTCDemo.java b/video_engine/test/android/src/org/webrtc/videoengineapp/WebRTCDemo.java
similarity index 100%
rename from video_engine/main/test/android_test/src/org/webrtc/videoengineapp/WebRTCDemo.java
rename to video_engine/test/android/src/org/webrtc/videoengineapp/WebRTCDemo.java
diff --git a/video_engine/test/auto_test/interface/vie_autotest.h b/video_engine/test/auto_test/interface/vie_autotest.h
index 0608701..f137fdf 100644
--- a/video_engine/test/auto_test/interface/vie_autotest.h
+++ b/video_engine/test/auto_test/interface/vie_autotest.h
@@ -16,24 +16,22 @@
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_H_
#include "common_types.h"
-
-#include "voe_base.h"
-#include "voe_codec.h"
-#include "voe_hardware.h"
-#include "voe_audio_processing.h"
-
-#include "vie_base.h"
-#include "vie_capture.h"
-#include "vie_codec.h"
-#include "vie_file.h"
-#include "vie_network.h"
-#include "vie_render.h"
-#include "vie_rtp_rtcp.h"
-#include "vie_defines.h"
-#include "vie_errors.h"
-#include "video_render_defines.h"
-
-#include "vie_autotest_defines.h"
+#include "gflags/gflags.h"
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/include/vie_base.h"
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_file.h"
+#include "video_engine/include/vie_network.h"
+#include "video_engine/include/vie_render.h"
+#include "video_engine/include/vie_rtp_rtcp.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_defines.h"
+#include "voice_engine/include/voe_audio_processing.h"
+#include "voice_engine/include/voe_base.h"
+#include "voice_engine/include/voe_codec.h"
+#include "voice_engine/include/voe_hardware.h"
#ifndef WEBRTC_ANDROID
#include <string>
@@ -44,6 +42,8 @@
class TbVideoChannel;
class ViEToFileRenderer;
+DECLARE_bool(include_timing_dependent_tests);
+
// This class provides a bunch of methods, implemented across several .cc
// files, which runs tests on the video engine. All methods will report
// errors using standard googletest macros, except when marked otherwise.
diff --git a/video_engine/test/auto_test/source/vie_autotest.cc b/video_engine/test/auto_test/source/vie_autotest.cc
index 28bedc9..e81e6ed 100644
--- a/video_engine/test/auto_test/source/vie_autotest.cc
+++ b/video_engine/test/auto_test/source/vie_autotest.cc
@@ -25,6 +25,10 @@
#include "video_engine/test/libvietest/include/tb_interfaces.h"
#include "video_engine/test/libvietest/include/tb_video_channel.h"
+DEFINE_bool(include_timing_dependent_tests, true,
+ "If true, we will include tests / parts of tests that are known "
+ "to break in slow execution environments (such as valgrind).");
+
// ViETest implementation
FILE* ViETest::log_file_ = NULL;
char* ViETest::log_str_ = NULL;
diff --git a/video_engine/test/auto_test/source/vie_autotest_capture.cc b/video_engine/test/auto_test/source/vie_autotest_capture.cc
index 8a7887c..f232d89 100644
--- a/video_engine/test/auto_test/source/vie_autotest_capture.cc
+++ b/video_engine/test/auto_test/source/vie_autotest_capture.cc
@@ -237,6 +237,7 @@
/// **************************************************************
// Testing finished. Tear down Video Engine
/// **************************************************************
+ delete dev_info;
// Stop all started capture devices.
for (int device_index = 0; device_index < number_of_capture_devices;
diff --git a/video_engine/test/auto_test/source/vie_autotest_codec.cc b/video_engine/test/auto_test/source/vie_autotest_codec.cc
index b1cb960..b9a6480 100644
--- a/video_engine/test/auto_test/source/vie_autotest_codec.cc
+++ b/video_engine/test/auto_test/source/vie_autotest_codec.cc
@@ -203,20 +203,10 @@
EXPECT_EQ(video_codec.codecType,
codec_observer.incoming_codec_.codecType);
- int max_number_of_possible_frames = video_codec.maxFramerate
- * KAutoTestSleepTimeMs / 1000;
-
- if (video_codec.codecType == webrtc::kVideoCodecI420) {
- // Don't expect too much from I420, it requires a lot of bandwidth.
- EXPECT_GT(frame_counter.num_frames_, 0);
- } else {
-#ifdef WEBRTC_ANDROID
- // To get the autotest to pass on some slow devices
- EXPECT_GT(frame_counter.num_frames_, max_number_of_possible_frames / 6);
-#else
- EXPECT_GT(frame_counter.num_frames_, max_number_of_possible_frames / 4);
-#endif
- }
+ // This requirement is quite relaxed, but it's hard to say what's an
+ // acceptable number of received frames when we take into account the
+ // wide variety of devices (and that we run under valgrind).
+ EXPECT_GT(frame_counter.num_frames_, 0);
EXPECT_EQ(0, image_process->DeregisterRenderEffectFilter(
video_channel));
diff --git a/video_engine/test/auto_test/source/vie_autotest_file.cc b/video_engine/test/auto_test/source/vie_autotest_file.cc
index 17b4d0c..4d5ee74 100644
--- a/video_engine/test/auto_test/source/vie_autotest_file.cc
+++ b/video_engine/test/auto_test/source/vie_autotest_file.cc
@@ -271,6 +271,7 @@
AutoTestSleep(TEST_SPACING);
// GetCaptureDeviceSnapshot
+ if (FLAGS_include_timing_dependent_tests)
{
ViETest::Log("Testing GetCaptureDeviceSnapshot(int, ViEPicture)");
ViETest::Log("Taking a picture to use for displaying ViEPictures "
@@ -318,6 +319,7 @@
AutoTestSleep(TEST_SPACING);
// GetCaptureDeviceSnapshot
+ if (FLAGS_include_timing_dependent_tests)
{
ViETest::Log("Testing GetCaptureDeviceSnapshot(int, char*)");
ViETest::Log("Taking snapshot from capture device %d", captureId);
@@ -346,6 +348,7 @@
AutoTestSleep(TEST_SPACING);
// Testing: SetCaptureDeviceImage
+ if (FLAGS_include_timing_dependent_tests)
{
ViETest::Log("Testing SetCaptureDeviceImage(int, ViEPicture)");
EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
@@ -361,6 +364,7 @@
AutoTestSleep(TEST_SPACING);
// testing SetRenderStartImage(videoChannel, renderStartImage);
+ if (FLAGS_include_timing_dependent_tests)
{
ViETest::Log("Testing SetRenderStartImage(int, char*)");
// set render image, then stop capture and stop render to display it
@@ -406,6 +410,7 @@
// testing SetRenderTimeoutImage(videoChannel, renderTimeoutFile,
// RENDER_TIMEOUT);
+ if (FLAGS_include_timing_dependent_tests)
{
ViETest::Log("Testing SetRenderTimeoutImage(int, char*)");
ViETest::Log("Stopping capture device to induce timeout of %d ms",
diff --git a/video_engine/test/auto_test/source/vie_autotest_render.cc b/video_engine/test/auto_test/source/vie_autotest_render.cc
index 8bfc9db..0870ca1 100644
--- a/video_engine/test/auto_test/source/vie_autotest_render.cc
+++ b/video_engine/test/auto_test/source/vie_autotest_render.cc
@@ -18,6 +18,7 @@
#include "video_render.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "tb_interfaces.h"
#include "tb_video_channel.h"
#include "tb_capture_device.h"
@@ -57,15 +58,13 @@
virtual int DeliverFrame(unsigned char* buffer, int bufferSize,
uint32_t time_stamp,
- int64_t render_time)
- {
- if (bufferSize != _width * _height * 3 / 2)
- {
- ViETest::Log("incorrect render buffer received, of length = %d\n",
- bufferSize);
- return 0;
- }
+ int64_t render_time) {
+ if (bufferSize != CalcBufferSize(webrtc::kI420, _width, _height)) {
+ ViETest::Log("Incorrect render buffer received, of length = %d\n",
+ bufferSize);
return 0;
+ }
+ return 0;
}
public:
diff --git a/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc b/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc
index 57eac00..858a34a 100644
--- a/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc
+++ b/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc
@@ -8,19 +8,16 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-//
-// vie_autotest_rtp_rtcp.cc
-//
#include <iostream>
#include "engine_configurations.h"
-#include "tb_capture_device.h"
-#include "tb_external_transport.h"
-#include "tb_interfaces.h"
-#include "tb_video_channel.h"
-#include "testsupport/fileutils.h"
-#include "vie_autotest.h"
-#include "vie_autotest_defines.h"
+#include "video_engine/test/libvietest/include/tb_capture_device.h"
+#include "video_engine/test/libvietest/include/tb_external_transport.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+#include "video_engine/test/libvietest/include/tb_video_channel.h"
+#include "test/testsupport/fileutils.h"
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
class ViERtpObserver: public webrtc::ViERTPObserver
{
@@ -150,11 +147,13 @@
AutoTestSleep(1000);
- char remoteCName[webrtc::ViERTP_RTCP::KMaxRTCPCNameLength];
- memset(remoteCName, 0, webrtc::ViERTP_RTCP::KMaxRTCPCNameLength);
- EXPECT_EQ(0, ViE.rtp_rtcp->GetRemoteRTCPCName(
- tbChannel.videoChannel, remoteCName));
- EXPECT_STRCASEEQ(sendCName, remoteCName);
+ if (FLAGS_include_timing_dependent_tests) {
+ char remoteCName[webrtc::ViERTP_RTCP::KMaxRTCPCNameLength];
+ memset(remoteCName, 0, webrtc::ViERTP_RTCP::KMaxRTCPCNameLength);
+ EXPECT_EQ(0, ViE.rtp_rtcp->GetRemoteRTCPCName(
+ tbChannel.videoChannel, remoteCName));
+ EXPECT_STRCASEEQ(sendCName, remoteCName);
+ }
//
// Statistics
@@ -228,10 +227,12 @@
&estimated_bandwidth));
EXPECT_GT(estimated_bandwidth, 0u);
- EXPECT_EQ(0, ViE.rtp_rtcp->GetEstimatedReceiveBandwidth(
- tbChannel.videoChannel,
- &estimated_bandwidth));
- EXPECT_GT(estimated_bandwidth, 0u);
+ if (FLAGS_include_timing_dependent_tests) {
+ EXPECT_EQ(0, ViE.rtp_rtcp->GetEstimatedReceiveBandwidth(
+ tbChannel.videoChannel,
+ &estimated_bandwidth));
+ EXPECT_GT(estimated_bandwidth, 0u);
+ }
// Check that rec stats extended max is greater than what we've sent.
EXPECT_GE(recExtendedMax, sentExtendedMax);
@@ -298,16 +299,20 @@
AutoTestSleep(2000);
unsigned int receivedSSRC = myTransport.ReceivedSSRC();
ViETest::Log("Received SSRC %u\n", receivedSSRC);
- EXPECT_EQ(setSSRC, receivedSSRC);
- unsigned int localSSRC = 0;
- EXPECT_EQ(0, ViE.rtp_rtcp->GetLocalSSRC(tbChannel.videoChannel, localSSRC));
- EXPECT_EQ(setSSRC, localSSRC);
+ if (FLAGS_include_timing_dependent_tests) {
+ EXPECT_EQ(setSSRC, receivedSSRC);
- unsigned int remoteSSRC = 0;
- EXPECT_EQ(0, ViE.rtp_rtcp->GetRemoteSSRC(
- tbChannel.videoChannel, remoteSSRC));
- EXPECT_EQ(setSSRC, remoteSSRC);
+ unsigned int localSSRC = 0;
+ EXPECT_EQ(0, ViE.rtp_rtcp->GetLocalSSRC(
+ tbChannel.videoChannel, localSSRC));
+ EXPECT_EQ(setSSRC, localSSRC);
+
+ unsigned int remoteSSRC = 0;
+ EXPECT_EQ(0, ViE.rtp_rtcp->GetRemoteSSRC(
+ tbChannel.videoChannel, remoteSSRC));
+ EXPECT_EQ(setSSRC, remoteSSRC);
+ }
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
diff --git a/video_engine/test/libvietest/include/vie_to_file_renderer.h b/video_engine/test/libvietest/include/vie_to_file_renderer.h
index 08559af..c272f00 100644
--- a/video_engine/test/libvietest/include/vie_to_file_renderer.h
+++ b/video_engine/test/libvietest/include/vie_to_file_renderer.h
@@ -42,7 +42,8 @@
int FrameSizeChange(unsigned int width, unsigned int height,
unsigned int number_of_streams);
- int DeliverFrame(unsigned char* buffer, int buffer_size,
+ int DeliverFrame(unsigned char* buffer,
+ int buffer_size,
uint32_t time_stamp,
int64_t render_time);
diff --git a/video_engine/vie_renderer.cc b/video_engine/vie_renderer.cc
index 9359262..588f8f6 100644
--- a/video_engine/vie_renderer.cc
+++ b/video_engine/vie_renderer.cc
@@ -195,8 +195,7 @@
case kVideoARGB4444:
case kVideoARGB1555 :
{
- ConvertFromI420(video_frame.Buffer(), video_frame.Width(), type, 0,
- video_frame.Width(), video_frame.Height(),
+ ConvertFromI420(video_frame, video_frame.Width(), type, 0,
converted_frame_->Buffer());
}
break;