Updated stable to r1967
git-svn-id: http://webrtc.googlecode.com/svn/stable/src@1973 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/common_audio/signal_processing/Android.mk b/common_audio/signal_processing/Android.mk
index bdc8796..26ebb30 100644
--- a/common_audio/signal_processing/Android.mk
+++ b/common_audio/signal_processing/Android.mk
@@ -57,7 +57,8 @@
LOCAL_SRC_FILES += \
cross_correlation_neon.s \
downsample_fast_neon.s \
- min_max_operations_neon.s
+ min_max_operations_neon.s \
+ vector_scaling_operations_neon.s
LOCAL_CFLAGS += \
$(MY_ARM_CFLAGS_NEON)
else
diff --git a/common_audio/signal_processing/ilbc_specific_functions.c b/common_audio/signal_processing/ilbc_specific_functions.c
index 5a9e577..3588ba4 100644
--- a/common_audio/signal_processing/ilbc_specific_functions.c
+++ b/common_audio/signal_processing/ilbc_specific_functions.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -11,35 +11,16 @@
/*
* This file contains implementations of the iLBC specific functions
- * WebRtcSpl_ScaleAndAddVectorsWithRound()
* WebRtcSpl_ReverseOrderMultArrayElements()
* WebRtcSpl_ElementwiseVectorMult()
* WebRtcSpl_AddVectorsAndShift()
* WebRtcSpl_AddAffineVectorToVector()
* WebRtcSpl_AffineTransformVector()
*
- * The description header can be found in signal_processing_library.h
- *
*/
#include "signal_processing_library.h"
-void WebRtcSpl_ScaleAndAddVectorsWithRound(WebRtc_Word16 *vector1, WebRtc_Word16 scale1,
- WebRtc_Word16 *vector2, WebRtc_Word16 scale2,
- WebRtc_Word16 right_shifts, WebRtc_Word16 *out,
- WebRtc_Word16 vector_length)
-{
- int i;
- WebRtc_Word16 roundVal;
- roundVal = 1 << right_shifts;
- roundVal = roundVal >> 1;
- for (i = 0; i < vector_length; i++)
- {
- out[i] = (WebRtc_Word16)((WEBRTC_SPL_MUL_16_16(vector1[i], scale1)
- + WEBRTC_SPL_MUL_16_16(vector2[i], scale2) + roundVal) >> right_shifts);
- }
-}
-
void WebRtcSpl_ReverseOrderMultArrayElements(WebRtc_Word16 *out, G_CONST WebRtc_Word16 *in,
G_CONST WebRtc_Word16 *win,
WebRtc_Word16 vector_length,
diff --git a/common_audio/signal_processing/include/signal_processing_library.h b/common_audio/signal_processing/include/signal_processing_library.h
index 03e3eda..348b5c8 100644
--- a/common_audio/signal_processing/include/signal_processing_library.h
+++ b/common_audio/signal_processing/include/signal_processing_library.h
@@ -268,17 +268,37 @@
WebRtc_Word16 gain2, int right_shifts2,
WebRtc_Word16* out_vector,
int vector_length);
+
+// Performs the vector operation:
+// out_vector[k] = ((scale1 * in_vector1[k]) + (scale2 * in_vector2[k])
+// + round_value) >> right_shifts,
+// where round_value = (1 << right_shifts) >> 1.
+//
+// Input:
+// - in_vector1 : Input vector 1
+// - in_vector1_scale : Gain to be used for vector 1
+// - in_vector2 : Input vector 2
+// - in_vector2_scale : Gain to be used for vector 2
+// - right_shifts : Number of right bit shifts to be applied
+// - length : Number of elements in the input vectors
+//
+// Output:
+// - out_vector : Output vector
+// Return value : 0 if OK, -1 if (in_vector1 == NULL
+// || in_vector2 == NULL || out_vector == NULL
+// || length <= 0 || right_shift < 0).
+int WebRtcSpl_ScaleAndAddVectorsWithRound(const int16_t* in_vector1,
+ int16_t in_vector1_scale,
+ const int16_t* in_vector2,
+ int16_t in_vector2_scale,
+ int right_shifts,
+ int16_t* out_vector,
+ int length);
+
// End: Vector scaling operations.
// iLBC specific functions. Implementations in ilbc_specific_functions.c.
// Description at bottom of file.
-void WebRtcSpl_ScaleAndAddVectorsWithRound(WebRtc_Word16* in_vector1,
- WebRtc_Word16 scale1,
- WebRtc_Word16* in_vector2,
- WebRtc_Word16 scale2,
- WebRtc_Word16 right_shifts,
- WebRtc_Word16* out_vector,
- WebRtc_Word16 vector_length);
void WebRtcSpl_ReverseOrderMultArrayElements(WebRtc_Word16* out_vector,
G_CONST WebRtc_Word16* in_vector,
G_CONST WebRtc_Word16* window,
@@ -992,30 +1012,6 @@
//
//
-// WebRtcSpl_ScaleAndAddVectorsWithRound(...)
-//
-// Performs the vector operation:
-//
-// out_vector[k] = ((scale1*in_vector1[k]) + (scale2*in_vector2[k])
-// + round_value) >> right_shifts
-//
-// where:
-//
-// round_value = (1<<right_shifts)>>1
-//
-// Input:
-// - in_vector1 : Input vector 1
-// - scale1 : Gain to be used for vector 1
-// - in_vector2 : Input vector 2
-// - scale2 : Gain to be used for vector 2
-// - right_shifts : Number of right bit shifts to be applied
-// - vector_length : Number of elements in the input vectors
-//
-// Output:
-// - out_vector : Output vector
-//
-
-//
// WebRtcSpl_ReverseOrderMultArrayElements(...)
//
// Performs the vector operation:
diff --git a/common_audio/signal_processing/vector_scaling_operations.c b/common_audio/signal_processing/vector_scaling_operations.c
index 20d239c..76601ad 100644
--- a/common_audio/signal_processing/vector_scaling_operations.c
+++ b/common_audio/signal_processing/vector_scaling_operations.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -17,9 +17,7 @@
* WebRtcSpl_ScaleVector()
* WebRtcSpl_ScaleVectorWithSat()
* WebRtcSpl_ScaleAndAddVectors()
- *
- * The description header can be found in signal_processing_library.h
- *
+ * WebRtcSpl_ScaleAndAddVectorsWithRound()
*/
#include "signal_processing_library.h"
@@ -149,3 +147,30 @@
+ (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(gain2, *in2ptr++, shift2);
}
}
+
+#if !(defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM_NEON))
+int WebRtcSpl_ScaleAndAddVectorsWithRound(const int16_t* in_vector1,
+ int16_t in_vector1_scale,
+ const int16_t* in_vector2,
+ int16_t in_vector2_scale,
+ int right_shifts,
+ int16_t* out_vector,
+ int length) {
+ int i = 0;
+ int round_value = (1 << right_shifts) >> 1;
+
+ if (in_vector1 == NULL || in_vector2 == NULL || out_vector == NULL ||
+ length <= 0 || right_shifts < 0) {
+ return -1;
+ }
+
+ for (i = 0; i < length; i++) {
+ out_vector[i] = (int16_t)((
+ WEBRTC_SPL_MUL_16_16(in_vector1[i], in_vector1_scale)
+ + WEBRTC_SPL_MUL_16_16(in_vector2[i], in_vector2_scale)
+ + round_value) >> right_shifts);
+ }
+
+ return 0;
+}
+#endif
diff --git a/common_audio/signal_processing/vector_scaling_operations_neon.s b/common_audio/signal_processing/vector_scaling_operations_neon.s
new file mode 100644
index 0000000..003943b
--- /dev/null
+++ b/common_audio/signal_processing/vector_scaling_operations_neon.s
@@ -0,0 +1,88 @@
+@
+@ Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS. All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ vector_scaling_operations_neon.s
+@ This file contains the function WebRtcSpl_ScaleAndAddVectorsWithRound(),
+@ optimized for ARM Neon platform. Output is bit-exact with the reference
+@ C code in vector_scaling_operations.c.
+
+.arch armv7-a
+.fpu neon
+
+.align 2
+.global WebRtcSpl_ScaleAndAddVectorsWithRound
+
+WebRtcSpl_ScaleAndAddVectorsWithRound:
+.fnstart
+
+ push {r4-r9}
+
+ ldr r4, [sp, #32] @ length
+ ldr r5, [sp, #28] @ out_vector
+ ldrsh r6, [sp, #24] @ right_shifts
+
+ cmp r4, #0
+ ble END @ Return if length <= 0.
+
+ cmp r4, #8
+ blt SET_ROUND_VALUE
+
+ vdup.16 d26, r1 @ in_vector1_scale
+ vdup.16 d27, r3 @ in_vector2_scale
+
+ @ Neon instructions can only right shift by an immediate value. To shift right
+ @ by a register value, we have to do a left shift left by the negative value.
+ rsb r7, r6, #0
+ vdup.16 q12, r7 @ -right_shifts
+
+ bic r7, r4, #7 @ Counter for LOOP_UNROLLED_BY_8: length / 8 * 8.
+
+LOOP_UNROLLED_BY_8:
+ vld1.16 {d28, d29}, [r0]! @ in_vector1[]
+ vld1.16 {d30, d31}, [r2]! @ in_vector2[]
+ vmull.s16 q0, d28, d26
+ vmull.s16 q1, d29, d26
+ vmull.s16 q2, d30, d27
+ vmull.s16 q3, d31, d27
+ vadd.s32 q0, q2
+ vadd.s32 q1, q3
+ vrshl.s32 q0, q12 @ Round shift right by right_shifts.
+ vrshl.s32 q1, q12
+ vmovn.i32 d0, q0 @ Cast to 16 bit values.
+ vmovn.i32 d1, q1
+ subs r7, #8
+ vst1.16 {d0, d1}, [r5]!
+ bgt LOOP_UNROLLED_BY_8
+
+ ands r4, #0xFF @ Counter for LOOP_NO_UNROLLING: length % 8.
+ beq END
+
+SET_ROUND_VALUE:
+ mov r9, #1
+ lsl r9, r6
+ lsr r9, #1
+
+LOOP_NO_UNROLLING:
+ ldrh r7, [r0], #2
+ ldrh r8, [r2], #2
+ smulbb r7, r7, r1
+ smulbb r8, r8, r3
+ subs r4, #1
+ add r7, r9
+ add r7, r8
+ asr r7, r6
+ strh r7, [r5], #2
+ bne LOOP_NO_UNROLLING
+
+END:
+ pop {r4-r9}
+ bx lr
+
+.fnend
diff --git a/common_audio/vad/include/webrtc_vad.h b/common_audio/vad/include/webrtc_vad.h
index 1e860ae..07e5cdd 100644
--- a/common_audio/vad/include/webrtc_vad.h
+++ b/common_audio/vad/include/webrtc_vad.h
@@ -62,25 +62,21 @@
// - handle [i/o] : Instance that should be initialized.
//
// returns : 0 - (OK),
-// -1 - (NULL pointer or Default mode could not be set)
+// -1 - (NULL pointer or Default mode could not be set).
int WebRtcVad_Init(VadInst* handle);
-/****************************************************************************
- * WebRtcVad_set_mode(...)
- *
- * This function initializes a VAD instance
- *
- * Input:
- * - vad_inst : VAD instance
- * - mode : Aggressiveness setting (0, 1, 2, or 3)
- *
- * Output:
- * - vad_inst : Initialized instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcVad_set_mode(VadInst *vad_inst, int mode);
+// Sets the VAD operating mode. A more aggressive (higher mode) VAD is more
+// restrictive in reporting speech. Put in other words the probability of being
+// speech when the VAD returns 1 is increased with increasing mode. As a
+// consequence also the missed detection rate goes up.
+//
+// - handle [i/o] : VAD instance.
+// - mode [i] : Aggressiveness mode (0, 1, 2, or 3).
+//
+// returns : 0 - (OK),
+// -1 - (NULL pointer, mode could not be set or the VAD instance
+// has not been initialized).
+int WebRtcVad_set_mode(VadInst* handle, int mode);
/****************************************************************************
* WebRtcVad_Process(...)
@@ -100,10 +96,8 @@
* 0 - Non-active Voice
* -1 - Error
*/
-WebRtc_Word16 WebRtcVad_Process(VadInst *vad_inst,
- WebRtc_Word16 fs,
- WebRtc_Word16 *speech_frame,
- WebRtc_Word16 frame_length);
+int16_t WebRtcVad_Process(VadInst* vad_inst, int16_t fs, int16_t* speech_frame,
+ int16_t frame_length);
#ifdef __cplusplus
}
diff --git a/common_audio/vad/vad_core.c b/common_audio/vad/vad_core.c
index 2c6e6a9..ef66658 100644
--- a/common_audio/vad/vad_core.c
+++ b/common_audio/vad/vad_core.c
@@ -12,46 +12,45 @@
#include "signal_processing_library.h"
#include "typedefs.h"
-#include "vad_defines.h"
#include "vad_filterbank.h"
#include "vad_gmm.h"
#include "vad_sp.h"
// Spectrum Weighting
-static const WebRtc_Word16 kSpectrumWeight[6] = { 6, 8, 10, 12, 14, 16 };
-static const WebRtc_Word16 kNoiseUpdateConst = 655; // Q15
-static const WebRtc_Word16 kSpeechUpdateConst = 6554; // Q15
-static const WebRtc_Word16 kBackEta = 154; // Q8
+static const int16_t kSpectrumWeight[6] = { 6, 8, 10, 12, 14, 16 };
+static const int16_t kNoiseUpdateConst = 655; // Q15
+static const int16_t kSpeechUpdateConst = 6554; // Q15
+static const int16_t kBackEta = 154; // Q8
// Minimum difference between the two models, Q5
-static const WebRtc_Word16 kMinimumDifference[6] = {
+static const int16_t kMinimumDifference[6] = {
544, 544, 576, 576, 576, 576 };
// Upper limit of mean value for speech model, Q7
-static const WebRtc_Word16 kMaximumSpeech[6] = {
+static const int16_t kMaximumSpeech[6] = {
11392, 11392, 11520, 11520, 11520, 11520 };
// Minimum value for mean value
-static const WebRtc_Word16 kMinimumMean[2] = { 640, 768 };
+static const int16_t kMinimumMean[2] = { 640, 768 };
// Upper limit of mean value for noise model, Q7
-static const WebRtc_Word16 kMaximumNoise[6] = {
+static const int16_t kMaximumNoise[6] = {
9216, 9088, 8960, 8832, 8704, 8576 };
// Start values for the Gaussian models, Q7
// Weights for the two Gaussians for the six channels (noise)
-static const WebRtc_Word16 kNoiseDataWeights[12] = {
+static const int16_t kNoiseDataWeights[12] = {
34, 62, 72, 66, 53, 25, 94, 66, 56, 62, 75, 103 };
// Weights for the two Gaussians for the six channels (speech)
-static const WebRtc_Word16 kSpeechDataWeights[12] = {
+static const int16_t kSpeechDataWeights[12] = {
48, 82, 45, 87, 50, 47, 80, 46, 83, 41, 78, 81 };
// Means for the two Gaussians for the six channels (noise)
-static const WebRtc_Word16 kNoiseDataMeans[12] = {
+static const int16_t kNoiseDataMeans[12] = {
6738, 4892, 7065, 6715, 6771, 3369, 7646, 3863, 7820, 7266, 5020, 4362 };
// Means for the two Gaussians for the six channels (speech)
-static const WebRtc_Word16 kSpeechDataMeans[12] = {
+static const int16_t kSpeechDataMeans[12] = {
8306, 10085, 10078, 11823, 11843, 6309, 9473, 9571, 10879, 7581, 8180, 7483
};
// Stds for the two Gaussians for the six channels (noise)
-static const WebRtc_Word16 kNoiseDataStds[12] = {
+static const int16_t kNoiseDataStds[12] = {
378, 1064, 493, 582, 688, 593, 474, 697, 475, 688, 421, 455 };
// Stds for the two Gaussians for the six channels (speech)
-static const WebRtc_Word16 kSpeechDataStds[12] = {
+static const int16_t kSpeechDataStds[12] = {
555, 505, 567, 524, 585, 1231, 509, 828, 492, 1540, 1079, 850 };
// Constants used in GmmProbability().
@@ -66,6 +65,31 @@
static const short kDefaultMode = 0;
static const int kInitCheck = 42;
+// Constants used in WebRtcVad_set_mode_core().
+//
+// Thresholds for different frame lengths (10 ms, 20 ms and 30 ms).
+//
+// Mode 0, Quality.
+static const int16_t kOverHangMax1Q[3] = { 8, 4, 3 };
+static const int16_t kOverHangMax2Q[3] = { 14, 7, 5 };
+static const int16_t kLocalThresholdQ[3] = { 24, 21, 24 };
+static const int16_t kGlobalThresholdQ[3] = { 57, 48, 57 };
+// Mode 1, Low bitrate.
+static const int16_t kOverHangMax1LBR[3] = { 8, 4, 3 };
+static const int16_t kOverHangMax2LBR[3] = { 14, 7, 5 };
+static const int16_t kLocalThresholdLBR[3] = { 37, 32, 37 };
+static const int16_t kGlobalThresholdLBR[3] = { 100, 80, 100 };
+// Mode 2, Aggressive.
+static const int16_t kOverHangMax1AGG[3] = { 6, 3, 2 };
+static const int16_t kOverHangMax2AGG[3] = { 9, 5, 3 };
+static const int16_t kLocalThresholdAGG[3] = { 82, 78, 82 };
+static const int16_t kGlobalThresholdAGG[3] = { 285, 260, 285 };
+// Mode 3, Very aggressive.
+static const int16_t kOverHangMax1VAG[3] = { 6, 3, 2 };
+static const int16_t kOverHangMax2VAG[3] = { 9, 5, 3 };
+static const int16_t kLocalThresholdVAG[3] = { 94, 94, 94 };
+static const int16_t kGlobalThresholdVAG[3] = { 1100, 1050, 1100 };
+
// Calculates the probabilities for both speech and background noise using
// Gaussian Mixture Models. A hypothesis-test is performed to decide which type
// of signal is most probable.
@@ -76,30 +100,30 @@
// - frame_length [i] : Number of input samples
//
// - returns : the VAD decision (0 - noise, 1 - speech).
-static int16_t GmmProbability(VadInstT *inst, WebRtc_Word16 *feature_vector,
- WebRtc_Word16 total_power, int frame_length)
+static int16_t GmmProbability(VadInstT *inst, int16_t *feature_vector,
+ int16_t total_power, int frame_length)
{
int n, k;
- WebRtc_Word16 backval;
- WebRtc_Word16 h0, h1;
- WebRtc_Word16 ratvec, xval;
- WebRtc_Word16 vadflag;
- WebRtc_Word16 shifts0, shifts1;
- WebRtc_Word16 tmp16, tmp16_1, tmp16_2;
- WebRtc_Word16 diff, nr, pos;
- WebRtc_Word16 nmk, nmk2, nmk3, smk, smk2, nsk, ssk;
- WebRtc_Word16 delt, ndelt;
- WebRtc_Word16 maxspe, maxmu;
- WebRtc_Word16 deltaN[NUM_TABLE_VALUES], deltaS[NUM_TABLE_VALUES];
- WebRtc_Word16 ngprvec[NUM_TABLE_VALUES], sgprvec[NUM_TABLE_VALUES];
- WebRtc_Word32 h0test, h1test;
- WebRtc_Word32 tmp32_1, tmp32_2;
- WebRtc_Word32 dotVal;
- WebRtc_Word32 nmid, smid;
- WebRtc_Word32 probn[NUM_MODELS], probs[NUM_MODELS];
- WebRtc_Word16 *nmean1ptr, *nmean2ptr, *smean1ptr, *smean2ptr, *nstd1ptr, *nstd2ptr,
+ int16_t backval;
+ int16_t h0, h1;
+ int16_t ratvec, xval;
+ int16_t vadflag;
+ int16_t shifts0, shifts1;
+ int16_t tmp16, tmp16_1, tmp16_2;
+ int16_t diff, nr, pos;
+ int16_t nmk, nmk2, nmk3, smk, smk2, nsk, ssk;
+ int16_t delt, ndelt;
+ int16_t maxspe, maxmu;
+ int16_t deltaN[kTableSize], deltaS[kTableSize];
+ int16_t ngprvec[kTableSize], sgprvec[kTableSize];
+ int32_t h0test, h1test;
+ int32_t tmp32_1, tmp32_2;
+ int32_t dotVal;
+ int32_t nmid, smid;
+ int32_t probn[kNumGaussians], probs[kNumGaussians];
+ int16_t *nmean1ptr, *nmean2ptr, *smean1ptr, *smean2ptr, *nstd1ptr, *nstd2ptr,
*sstd1ptr, *sstd2ptr;
- WebRtc_Word16 overhead1, overhead2, individualTest, totalTest;
+ int16_t overhead1, overhead2, individualTest, totalTest;
// Set the thresholds to different values based on frame length
if (frame_length == 80)
@@ -125,22 +149,22 @@
totalTest = inst->total[2];
}
- if (total_power > MIN_ENERGY)
+ if (total_power > kMinEnergy)
{ // If signal present at all
// Set pointers to the gaussian parameters
nmean1ptr = &inst->noise_means[0];
- nmean2ptr = &inst->noise_means[NUM_CHANNELS];
+ nmean2ptr = &inst->noise_means[kNumChannels];
smean1ptr = &inst->speech_means[0];
- smean2ptr = &inst->speech_means[NUM_CHANNELS];
+ smean2ptr = &inst->speech_means[kNumChannels];
nstd1ptr = &inst->noise_stds[0];
- nstd2ptr = &inst->noise_stds[NUM_CHANNELS];
+ nstd2ptr = &inst->noise_stds[kNumChannels];
sstd1ptr = &inst->speech_stds[0];
- sstd2ptr = &inst->speech_stds[NUM_CHANNELS];
+ sstd2ptr = &inst->speech_stds[kNumChannels];
vadflag = 0;
dotVal = 0;
- for (n = 0; n < NUM_CHANNELS; n++)
+ for (n = 0; n < kNumChannels; n++)
{ // For all channels
pos = WEBRTC_SPL_LSHIFT_W16(n, 1);
@@ -149,22 +173,22 @@
// Probability for Noise, Q7 * Q20 = Q27
tmp32_1 = WebRtcVad_GaussianProbability(xval, *nmean1ptr++, *nstd1ptr++,
&deltaN[pos]);
- probn[0] = (WebRtc_Word32)(kNoiseDataWeights[n] * tmp32_1);
+ probn[0] = (int32_t)(kNoiseDataWeights[n] * tmp32_1);
tmp32_1 = WebRtcVad_GaussianProbability(xval, *nmean2ptr++, *nstd2ptr++,
&deltaN[pos + 1]);
- probn[1] = (WebRtc_Word32)(kNoiseDataWeights[n + NUM_CHANNELS] * tmp32_1);
+ probn[1] = (int32_t)(kNoiseDataWeights[n + kNumChannels] * tmp32_1);
h0test = probn[0] + probn[1]; // Q27
- h0 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(h0test, 12); // Q15
+ h0 = (int16_t)WEBRTC_SPL_RSHIFT_W32(h0test, 12); // Q15
// Probability for Speech
tmp32_1 = WebRtcVad_GaussianProbability(xval, *smean1ptr++, *sstd1ptr++,
&deltaS[pos]);
- probs[0] = (WebRtc_Word32)(kSpeechDataWeights[n] * tmp32_1);
+ probs[0] = (int32_t)(kSpeechDataWeights[n] * tmp32_1);
tmp32_1 = WebRtcVad_GaussianProbability(xval, *smean2ptr++, *sstd2ptr++,
&deltaS[pos + 1]);
- probs[1] = (WebRtc_Word32)(kSpeechDataWeights[n + NUM_CHANNELS] * tmp32_1);
+ probs[1] = (int32_t)(kSpeechDataWeights[n + kNumChannels] * tmp32_1);
h1test = probs[0] + probs[1]; // Q27
- h1 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(h1test, 12); // Q15
+ h1 = (int16_t)WEBRTC_SPL_RSHIFT_W32(h1test, 12); // Q15
// Get likelihood ratio. Approximate log2(H1/H0) with shifts0 - shifts1
shifts0 = WebRtcSpl_NormW32(h0test);
@@ -198,7 +222,7 @@
{
tmp32_1 = probn[0] & 0xFFFFF000; // Q27
tmp32_2 = WEBRTC_SPL_LSHIFT_W32(tmp32_1, 2); // Q29
- ngprvec[pos] = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_2, h0);
+ ngprvec[pos] = (int16_t)WebRtcSpl_DivW32W16(tmp32_2, h0);
ngprvec[pos + 1] = 16384 - ngprvec[pos];
} else
{
@@ -211,7 +235,7 @@
{
tmp32_1 = probs[0] & 0xFFFFF000;
tmp32_2 = WEBRTC_SPL_LSHIFT_W32(tmp32_1, 2);
- sgprvec[pos] = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_2, h1);
+ sgprvec[pos] = (int16_t)WebRtcSpl_DivW32W16(tmp32_2, h1);
sgprvec[pos + 1] = 16384 - sgprvec[pos];
} else
{
@@ -235,7 +259,7 @@
maxspe = 12800;
// Update the model's parameters
- for (n = 0; n < NUM_CHANNELS; n++)
+ for (n = 0; n < kNumChannels; n++)
{
pos = WEBRTC_SPL_LSHIFT_W16(n, 1);
@@ -245,19 +269,19 @@
// Compute the "global" mean, that is the sum of the two means weighted
nmid = WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n], *nmean1ptr); // Q7 * Q7
- nmid += WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n+NUM_CHANNELS],
- *(nmean1ptr+NUM_CHANNELS));
- tmp16_1 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(nmid, 6); // Q8
+ nmid += WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n+kNumChannels],
+ *(nmean1ptr+kNumChannels));
+ tmp16_1 = (int16_t)WEBRTC_SPL_RSHIFT_W32(nmid, 6); // Q8
- for (k = 0; k < NUM_MODELS; k++)
+ for (k = 0; k < kNumGaussians; k++)
{
nr = pos + k;
- nmean2ptr = nmean1ptr + k * NUM_CHANNELS;
- smean2ptr = smean1ptr + k * NUM_CHANNELS;
- nstd2ptr = nstd1ptr + k * NUM_CHANNELS;
- sstd2ptr = sstd1ptr + k * NUM_CHANNELS;
+ nmean2ptr = nmean1ptr + k * kNumChannels;
+ smean2ptr = smean1ptr + k * kNumChannels;
+ nstd2ptr = nstd1ptr + k * kNumChannels;
+ sstd2ptr = sstd1ptr + k * kNumChannels;
nmk = *nmean2ptr;
smk = *smean2ptr;
nsk = *nstd2ptr;
@@ -270,9 +294,9 @@
// deltaN = (x-mu)/sigma^2
// ngprvec[k] = probn[k]/(probn[0] + probn[1])
- delt = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ngprvec[nr],
+ delt = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ngprvec[nr],
deltaN[nr], 11); // Q14*Q11
- nmk2 = nmk + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(delt,
+ nmk2 = nmk + (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(delt,
kNoiseUpdateConst,
22); // Q7+(Q14*Q15>>22)
}
@@ -280,7 +304,7 @@
// Long term correction of the noise mean
ndelt = WEBRTC_SPL_LSHIFT_W16(backval, 4);
ndelt -= tmp16_1; // Q8 - Q8
- nmk3 = nmk2 + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ndelt,
+ nmk3 = nmk2 + (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ndelt,
kBackEta,
9); // Q7+(Q8*Q8)>>9
@@ -299,10 +323,10 @@
// deltaS = (x-mu)/sigma^2
// sgprvec[k] = probn[k]/(probn[0] + probn[1])
- delt = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sgprvec[nr],
+ delt = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sgprvec[nr],
deltaS[nr],
11); // (Q14*Q11)>>11=Q14
- tmp16 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(delt,
+ tmp16 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(delt,
kSpeechUpdateConst,
21) + 1;
smk2 = smk + (tmp16 >> 1); // Q7 + (Q14 * Q15 >> 22)
@@ -321,18 +345,18 @@
tmp16 = feature_vector[n] - tmp16; // Q4
tmp32_1 = WEBRTC_SPL_MUL_16_16_RSFT(deltaS[nr], tmp16, 3);
- tmp32_2 = tmp32_1 - (WebRtc_Word32)4096; // Q12
+ tmp32_2 = tmp32_1 - (int32_t)4096; // Q12
tmp16 = WEBRTC_SPL_RSHIFT_W16((sgprvec[nr]), 2);
- tmp32_1 = (WebRtc_Word32)(tmp16 * tmp32_2);// (Q15>>3)*(Q14>>2)=Q12*Q12=Q24
+ tmp32_1 = (int32_t)(tmp16 * tmp32_2);// (Q15>>3)*(Q14>>2)=Q12*Q12=Q24
tmp32_2 = WEBRTC_SPL_RSHIFT_W32(tmp32_1, 4); // Q20
// 0.1 * Q20 / Q7 = Q13
if (tmp32_2 > 0)
- tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_2, ssk * 10);
+ tmp16 = (int16_t)WebRtcSpl_DivW32W16(tmp32_2, ssk * 10);
else
{
- tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(-tmp32_2, ssk * 10);
+ tmp16 = (int16_t)WebRtcSpl_DivW32W16(-tmp32_2, ssk * 10);
tmp16 = -tmp16;
}
// divide by 4 giving an update factor of 0.025
@@ -351,17 +375,17 @@
// (Q15>>3) * (Q14>>2) = Q12 * Q12 = Q24
tmp32_1 = WEBRTC_SPL_MUL_16_16_RSFT(deltaN[nr], tmp16, 3) - 4096;
tmp16 = WEBRTC_SPL_RSHIFT_W16((ngprvec[nr]+2), 2);
- tmp32_2 = (WebRtc_Word32)(tmp16 * tmp32_1);
+ tmp32_2 = (int32_t)(tmp16 * tmp32_1);
tmp32_1 = WEBRTC_SPL_RSHIFT_W32(tmp32_2, 14);
// Q20 * approx 0.001 (2^-10=0.0009766)
// Q20 / Q7 = Q13
- tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_1, nsk);
+ tmp16 = (int16_t)WebRtcSpl_DivW32W16(tmp32_1, nsk);
if (tmp32_1 > 0)
- tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_1, nsk);
+ tmp16 = (int16_t)WebRtcSpl_DivW32W16(tmp32_1, nsk);
else
{
- tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(-tmp32_1, nsk);
+ tmp16 = (int16_t)WebRtcSpl_DivW32W16(-tmp32_1, nsk);
tmp16 = -tmp16;
}
tmp16 += 32; // Rounding
@@ -376,15 +400,15 @@
// Separate models if they are too close - nmid in Q14
nmid = WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n], *nmean1ptr);
- nmid += WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n+NUM_CHANNELS], *nmean2ptr);
+ nmid += WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n+kNumChannels], *nmean2ptr);
// smid in Q14
smid = WEBRTC_SPL_MUL_16_16(kSpeechDataWeights[n], *smean1ptr);
- smid += WEBRTC_SPL_MUL_16_16(kSpeechDataWeights[n+NUM_CHANNELS], *smean2ptr);
+ smid += WEBRTC_SPL_MUL_16_16(kSpeechDataWeights[n+kNumChannels], *smean2ptr);
// diff = "global" speech mean - "global" noise mean
- diff = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(smid, 9);
- tmp16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(nmid, 9);
+ diff = (int16_t)WEBRTC_SPL_RSHIFT_W32(smid, 9);
+ tmp16 = (int16_t)WEBRTC_SPL_RSHIFT_W32(nmid, 9);
diff -= tmp16;
if (diff < kMinimumDifference[n])
@@ -394,8 +418,8 @@
// tmp16_1 = ~0.8 * (kMinimumDifference - diff) in Q7
// tmp16_2 = ~0.2 * (kMinimumDifference - diff) in Q7
- tmp16_1 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(13, tmp16, 2);
- tmp16_2 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(3, tmp16, 2);
+ tmp16_1 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(13, tmp16, 2);
+ tmp16_2 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(3, tmp16, 2);
// First Gauss, speech model
tmp16 = tmp16_1 + *smean1ptr;
@@ -405,7 +429,7 @@
// Second Gauss, speech model
tmp16 = tmp16_1 + *smean2ptr;
*smean2ptr = tmp16;
- smid += WEBRTC_SPL_MUL_16_16(tmp16, kSpeechDataWeights[n+NUM_CHANNELS]);
+ smid += WEBRTC_SPL_MUL_16_16(tmp16, kSpeechDataWeights[n+kNumChannels]);
// First Gauss, noise model
tmp16 = *nmean1ptr - tmp16_2;
@@ -416,12 +440,12 @@
// Second Gauss, noise model
tmp16 = *nmean2ptr - tmp16_2;
*nmean2ptr = tmp16;
- nmid += WEBRTC_SPL_MUL_16_16(tmp16, kNoiseDataWeights[n+NUM_CHANNELS]);
+ nmid += WEBRTC_SPL_MUL_16_16(tmp16, kNoiseDataWeights[n+kNumChannels]);
}
// Control that the speech & noise means do not drift to much
maxspe = kMaximumSpeech[n];
- tmp16_2 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(smid, 7);
+ tmp16_2 = (int16_t)WEBRTC_SPL_RSHIFT_W32(smid, 7);
if (tmp16_2 > maxspe)
{ // Upper limit of speech model
tmp16_2 -= maxspe;
@@ -430,7 +454,7 @@
*smean2ptr -= tmp16_2;
}
- tmp16_2 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(nmid, 7);
+ tmp16_2 = (int16_t)WEBRTC_SPL_RSHIFT_W32(nmid, 7);
if (tmp16_2 > kMaximumNoise[n])
{
tmp16_2 -= kMaximumNoise[n];
@@ -491,7 +515,7 @@
sizeof(self->downsampling_filter_states));
// Read initial PDF parameters.
- for (i = 0; i < NUM_TABLE_VALUES; i++) {
+ for (i = 0; i < kTableSize; i++) {
self->noise_means[i] = kNoiseDataMeans[i];
self->speech_means[i] = kSpeechDataMeans[i];
self->noise_stds[i] = kNoiseDataStds[i];
@@ -499,7 +523,7 @@
}
// Initialize Index and Minimum value vectors.
- for (i = 0; i < 16 * NUM_CHANNELS; i++) {
+ for (i = 0; i < 16 * kNumChannels; i++) {
self->low_value_vector[i] = 10000;
self->index_vector[i] = 0;
}
@@ -512,7 +536,7 @@
memset(self->hp_filter_state, 0, sizeof(self->hp_filter_state));
// Initialize mean value memory, for WebRtcVad_FindMinimum().
- for (i = 0; i < NUM_CHANNELS; i++) {
+ for (i = 0; i < kNumChannels; i++) {
self->mean_value[i] = 1600;
}
@@ -527,94 +551,71 @@
}
// Set aggressiveness mode
-int WebRtcVad_set_mode_core(VadInstT *inst, int mode)
-{
+int WebRtcVad_set_mode_core(VadInstT* self, int mode) {
+ int return_value = 0;
- if (mode == 0)
- {
- // Quality mode
- inst->over_hang_max_1[0] = OHMAX1_10MS_Q; // Overhang short speech burst
- inst->over_hang_max_1[1] = OHMAX1_20MS_Q; // Overhang short speech burst
- inst->over_hang_max_1[2] = OHMAX1_30MS_Q; // Overhang short speech burst
- inst->over_hang_max_2[0] = OHMAX2_10MS_Q; // Overhang long speech burst
- inst->over_hang_max_2[1] = OHMAX2_20MS_Q; // Overhang long speech burst
- inst->over_hang_max_2[2] = OHMAX2_30MS_Q; // Overhang long speech burst
+ switch (mode) {
+ case 0:
+ // Quality mode.
+ memcpy(self->over_hang_max_1, kOverHangMax1Q,
+ sizeof(self->over_hang_max_1));
+ memcpy(self->over_hang_max_2, kOverHangMax2Q,
+ sizeof(self->over_hang_max_2));
+ memcpy(self->individual, kLocalThresholdQ,
+ sizeof(self->individual));
+ memcpy(self->total, kGlobalThresholdQ,
+ sizeof(self->total));
+ break;
+ case 1:
+ // Low bitrate mode.
+ memcpy(self->over_hang_max_1, kOverHangMax1LBR,
+ sizeof(self->over_hang_max_1));
+ memcpy(self->over_hang_max_2, kOverHangMax2LBR,
+ sizeof(self->over_hang_max_2));
+ memcpy(self->individual, kLocalThresholdLBR,
+ sizeof(self->individual));
+ memcpy(self->total, kGlobalThresholdLBR,
+ sizeof(self->total));
+ break;
+ case 2:
+ // Aggressive mode.
+ memcpy(self->over_hang_max_1, kOverHangMax1AGG,
+ sizeof(self->over_hang_max_1));
+ memcpy(self->over_hang_max_2, kOverHangMax2AGG,
+ sizeof(self->over_hang_max_2));
+ memcpy(self->individual, kLocalThresholdAGG,
+ sizeof(self->individual));
+ memcpy(self->total, kGlobalThresholdAGG,
+ sizeof(self->total));
+ break;
+ case 3:
+ // Very aggressive mode.
+ memcpy(self->over_hang_max_1, kOverHangMax1VAG,
+ sizeof(self->over_hang_max_1));
+ memcpy(self->over_hang_max_2, kOverHangMax2VAG,
+ sizeof(self->over_hang_max_2));
+ memcpy(self->individual, kLocalThresholdVAG,
+ sizeof(self->individual));
+ memcpy(self->total, kGlobalThresholdVAG,
+ sizeof(self->total));
+ break;
+ default:
+ return_value = -1;
+ break;
+ }
- inst->individual[0] = INDIVIDUAL_10MS_Q;
- inst->individual[1] = INDIVIDUAL_20MS_Q;
- inst->individual[2] = INDIVIDUAL_30MS_Q;
-
- inst->total[0] = TOTAL_10MS_Q;
- inst->total[1] = TOTAL_20MS_Q;
- inst->total[2] = TOTAL_30MS_Q;
- } else if (mode == 1)
- {
- // Low bitrate mode
- inst->over_hang_max_1[0] = OHMAX1_10MS_LBR; // Overhang short speech burst
- inst->over_hang_max_1[1] = OHMAX1_20MS_LBR; // Overhang short speech burst
- inst->over_hang_max_1[2] = OHMAX1_30MS_LBR; // Overhang short speech burst
- inst->over_hang_max_2[0] = OHMAX2_10MS_LBR; // Overhang long speech burst
- inst->over_hang_max_2[1] = OHMAX2_20MS_LBR; // Overhang long speech burst
- inst->over_hang_max_2[2] = OHMAX2_30MS_LBR; // Overhang long speech burst
-
- inst->individual[0] = INDIVIDUAL_10MS_LBR;
- inst->individual[1] = INDIVIDUAL_20MS_LBR;
- inst->individual[2] = INDIVIDUAL_30MS_LBR;
-
- inst->total[0] = TOTAL_10MS_LBR;
- inst->total[1] = TOTAL_20MS_LBR;
- inst->total[2] = TOTAL_30MS_LBR;
- } else if (mode == 2)
- {
- // Aggressive mode
- inst->over_hang_max_1[0] = OHMAX1_10MS_AGG; // Overhang short speech burst
- inst->over_hang_max_1[1] = OHMAX1_20MS_AGG; // Overhang short speech burst
- inst->over_hang_max_1[2] = OHMAX1_30MS_AGG; // Overhang short speech burst
- inst->over_hang_max_2[0] = OHMAX2_10MS_AGG; // Overhang long speech burst
- inst->over_hang_max_2[1] = OHMAX2_20MS_AGG; // Overhang long speech burst
- inst->over_hang_max_2[2] = OHMAX2_30MS_AGG; // Overhang long speech burst
-
- inst->individual[0] = INDIVIDUAL_10MS_AGG;
- inst->individual[1] = INDIVIDUAL_20MS_AGG;
- inst->individual[2] = INDIVIDUAL_30MS_AGG;
-
- inst->total[0] = TOTAL_10MS_AGG;
- inst->total[1] = TOTAL_20MS_AGG;
- inst->total[2] = TOTAL_30MS_AGG;
- } else if (mode == 3)
- {
- // Very aggressive mode
- inst->over_hang_max_1[0] = OHMAX1_10MS_VAG; // Overhang short speech burst
- inst->over_hang_max_1[1] = OHMAX1_20MS_VAG; // Overhang short speech burst
- inst->over_hang_max_1[2] = OHMAX1_30MS_VAG; // Overhang short speech burst
- inst->over_hang_max_2[0] = OHMAX2_10MS_VAG; // Overhang long speech burst
- inst->over_hang_max_2[1] = OHMAX2_20MS_VAG; // Overhang long speech burst
- inst->over_hang_max_2[2] = OHMAX2_30MS_VAG; // Overhang long speech burst
-
- inst->individual[0] = INDIVIDUAL_10MS_VAG;
- inst->individual[1] = INDIVIDUAL_20MS_VAG;
- inst->individual[2] = INDIVIDUAL_30MS_VAG;
-
- inst->total[0] = TOTAL_10MS_VAG;
- inst->total[1] = TOTAL_20MS_VAG;
- inst->total[2] = TOTAL_30MS_VAG;
- } else
- {
- return -1;
- }
-
- return 0;
+ return return_value;
}
// Calculate VAD decision by first extracting feature values and then calculate
// probability for both speech and background noise.
-WebRtc_Word16 WebRtcVad_CalcVad32khz(VadInstT *inst, WebRtc_Word16 *speech_frame,
- int frame_length)
+int16_t WebRtcVad_CalcVad32khz(VadInstT* inst, int16_t* speech_frame,
+ int frame_length)
{
- WebRtc_Word16 len, vad;
- WebRtc_Word16 speechWB[480]; // Downsampled speech frame: 960 samples (30ms in SWB)
- WebRtc_Word16 speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB)
+ int16_t len, vad;
+ int16_t speechWB[480]; // Downsampled speech frame: 960 samples (30ms in SWB)
+ int16_t speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB)
// Downsample signal 32->16->8 before doing VAD
@@ -631,11 +632,11 @@
return vad;
}
-WebRtc_Word16 WebRtcVad_CalcVad16khz(VadInstT *inst, WebRtc_Word16 *speech_frame,
- int frame_length)
+int16_t WebRtcVad_CalcVad16khz(VadInstT* inst, int16_t* speech_frame,
+ int frame_length)
{
- WebRtc_Word16 len, vad;
- WebRtc_Word16 speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB)
+ int16_t len, vad;
+ int16_t speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB)
// Wideband: Downsample signal before doing VAD
WebRtcVad_Downsampling(speech_frame, speechNB, inst->downsampling_filter_states,
@@ -647,10 +648,10 @@
return vad;
}
-WebRtc_Word16 WebRtcVad_CalcVad8khz(VadInstT *inst, WebRtc_Word16 *speech_frame,
- int frame_length)
+int16_t WebRtcVad_CalcVad8khz(VadInstT* inst, int16_t* speech_frame,
+ int frame_length)
{
- WebRtc_Word16 feature_vector[NUM_CHANNELS], total_power;
+ int16_t feature_vector[kNumChannels], total_power;
// Get power in the bands
total_power = WebRtcVad_CalculateFeatures(inst, speech_frame, frame_length,
diff --git a/common_audio/vad/vad_core.h b/common_audio/vad/vad_core.h
index c82fbce..a5c420c 100644
--- a/common_audio/vad/vad_core.h
+++ b/common_audio/vad/vad_core.h
@@ -17,33 +17,37 @@
#define WEBRTC_COMMON_AUDIO_VAD_VAD_CORE_H_
#include "typedefs.h"
-#include "vad_defines.h"
+
+enum { kNumChannels = 6 }; // Number of frequency bands (named channels).
+enum { kNumGaussians = 2 }; // Number of Gaussians per channel in the GMM.
+enum { kTableSize = kNumChannels * kNumGaussians };
+enum { kMinEnergy = 10 }; // Minimum energy required to trigger audio signal.
typedef struct VadInstT_
{
- WebRtc_Word16 vad;
- WebRtc_Word32 downsampling_filter_states[4];
- WebRtc_Word16 noise_means[NUM_TABLE_VALUES];
- WebRtc_Word16 speech_means[NUM_TABLE_VALUES];
- WebRtc_Word16 noise_stds[NUM_TABLE_VALUES];
- WebRtc_Word16 speech_stds[NUM_TABLE_VALUES];
+ int16_t vad;
+ int32_t downsampling_filter_states[4];
+ int16_t noise_means[kTableSize];
+ int16_t speech_means[kTableSize];
+ int16_t noise_stds[kTableSize];
+ int16_t speech_stds[kTableSize];
// TODO(bjornv): Change to |frame_count|.
- WebRtc_Word32 frame_counter;
- WebRtc_Word16 over_hang; // Over Hang
- WebRtc_Word16 num_of_speech;
+ int32_t frame_counter;
+ int16_t over_hang; // Over Hang
+ int16_t num_of_speech;
// TODO(bjornv): Change to |age_vector|.
- WebRtc_Word16 index_vector[16 * NUM_CHANNELS];
- WebRtc_Word16 low_value_vector[16 * NUM_CHANNELS];
+ int16_t index_vector[16 * kNumChannels];
+ int16_t low_value_vector[16 * kNumChannels];
// TODO(bjornv): Change to |median|.
- WebRtc_Word16 mean_value[NUM_CHANNELS];
- WebRtc_Word16 upper_state[5];
- WebRtc_Word16 lower_state[5];
- WebRtc_Word16 hp_filter_state[4];
- WebRtc_Word16 over_hang_max_1[3];
- WebRtc_Word16 over_hang_max_2[3];
- WebRtc_Word16 individual[3];
- WebRtc_Word16 total[3];
+ int16_t mean_value[kNumChannels];
+ int16_t upper_state[5];
+ int16_t lower_state[5];
+ int16_t hp_filter_state[4];
+ int16_t over_hang_max_1[3];
+ int16_t over_hang_max_2[3];
+ int16_t individual[3];
+ int16_t total[3];
int init_flag;
@@ -75,7 +79,7 @@
* -1 - Error
*/
-int WebRtcVad_set_mode_core(VadInstT* inst, int mode);
+int WebRtcVad_set_mode_core(VadInstT* self, int mode);
/****************************************************************************
* WebRtcVad_CalcVad32khz(...)
@@ -96,11 +100,11 @@
* 0 - No active speech
* 1-6 - Active speech
*/
-WebRtc_Word16 WebRtcVad_CalcVad32khz(VadInstT* inst, WebRtc_Word16* speech_frame,
- int frame_length);
-WebRtc_Word16 WebRtcVad_CalcVad16khz(VadInstT* inst, WebRtc_Word16* speech_frame,
- int frame_length);
-WebRtc_Word16 WebRtcVad_CalcVad8khz(VadInstT* inst, WebRtc_Word16* speech_frame,
- int frame_length);
+int16_t WebRtcVad_CalcVad32khz(VadInstT* inst, int16_t* speech_frame,
+ int frame_length);
+int16_t WebRtcVad_CalcVad16khz(VadInstT* inst, int16_t* speech_frame,
+ int frame_length);
+int16_t WebRtcVad_CalcVad8khz(VadInstT* inst, int16_t* speech_frame,
+ int frame_length);
#endif // WEBRTC_COMMON_AUDIO_VAD_VAD_CORE_H_
diff --git a/common_audio/vad/vad_defines.h b/common_audio/vad/vad_defines.h
deleted file mode 100644
index 5d1539d..0000000
--- a/common_audio/vad/vad_defines.h
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-
-/*
- * This header file includes the macros used in VAD.
- */
-
-#ifndef WEBRTC_VAD_DEFINES_H_
-#define WEBRTC_VAD_DEFINES_H_
-
-#define NUM_CHANNELS 6 // Eight frequency bands
-#define NUM_MODELS 2 // Number of Gaussian models
-#define NUM_TABLE_VALUES NUM_CHANNELS * NUM_MODELS
-
-#define MIN_ENERGY 10
-#define ALPHA1 6553 // 0.2 in Q15
-#define ALPHA2 32439 // 0.99 in Q15
-// Mode 0, Quality thresholds - Different thresholds for the different frame lengths
-#define INDIVIDUAL_10MS_Q 24
-#define INDIVIDUAL_20MS_Q 21 // (log10(2)*66)<<2 ~=16
-#define INDIVIDUAL_30MS_Q 24
-
-#define TOTAL_10MS_Q 57
-#define TOTAL_20MS_Q 48
-#define TOTAL_30MS_Q 57
-
-#define OHMAX1_10MS_Q 8 // Max Overhang 1
-#define OHMAX2_10MS_Q 14 // Max Overhang 2
-#define OHMAX1_20MS_Q 4 // Max Overhang 1
-#define OHMAX2_20MS_Q 7 // Max Overhang 2
-#define OHMAX1_30MS_Q 3
-#define OHMAX2_30MS_Q 5
-
-// Mode 1, Low bitrate thresholds - Different thresholds for the different frame lengths
-#define INDIVIDUAL_10MS_LBR 37
-#define INDIVIDUAL_20MS_LBR 32
-#define INDIVIDUAL_30MS_LBR 37
-
-#define TOTAL_10MS_LBR 100
-#define TOTAL_20MS_LBR 80
-#define TOTAL_30MS_LBR 100
-
-#define OHMAX1_10MS_LBR 8 // Max Overhang 1
-#define OHMAX2_10MS_LBR 14 // Max Overhang 2
-#define OHMAX1_20MS_LBR 4
-#define OHMAX2_20MS_LBR 7
-
-#define OHMAX1_30MS_LBR 3
-#define OHMAX2_30MS_LBR 5
-
-// Mode 2, Very aggressive thresholds - Different thresholds for the different frame lengths
-#define INDIVIDUAL_10MS_AGG 82
-#define INDIVIDUAL_20MS_AGG 78
-#define INDIVIDUAL_30MS_AGG 82
-
-#define TOTAL_10MS_AGG 285 //580
-#define TOTAL_20MS_AGG 260
-#define TOTAL_30MS_AGG 285
-
-#define OHMAX1_10MS_AGG 6 // Max Overhang 1
-#define OHMAX2_10MS_AGG 9 // Max Overhang 2
-#define OHMAX1_20MS_AGG 3
-#define OHMAX2_20MS_AGG 5
-
-#define OHMAX1_30MS_AGG 2
-#define OHMAX2_30MS_AGG 3
-
-// Mode 3, Super aggressive thresholds - Different thresholds for the different frame lengths
-#define INDIVIDUAL_10MS_VAG 94
-#define INDIVIDUAL_20MS_VAG 94
-#define INDIVIDUAL_30MS_VAG 94
-
-#define TOTAL_10MS_VAG 1100 //1700
-#define TOTAL_20MS_VAG 1050
-#define TOTAL_30MS_VAG 1100
-
-#define OHMAX1_10MS_VAG 6 // Max Overhang 1
-#define OHMAX2_10MS_VAG 9 // Max Overhang 2
-#define OHMAX1_20MS_VAG 3
-#define OHMAX2_20MS_VAG 5
-
-#define OHMAX1_30MS_VAG 2
-#define OHMAX2_30MS_VAG 3
-
-#endif // WEBRTC_VAD_DEFINES_H_
diff --git a/common_audio/vad/vad_filterbank.c b/common_audio/vad/vad_filterbank.c
index 2f5db44..b626ad0 100644
--- a/common_audio/vad/vad_filterbank.c
+++ b/common_audio/vad/vad_filterbank.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -14,7 +14,6 @@
#include "signal_processing_library.h"
#include "typedefs.h"
-#include "vad_defines.h"
// Constants used in LogOfEnergy().
static const int16_t kLogConst = 24660; // 160*log10(2) in Q9.
@@ -151,7 +150,7 @@
// - total_energy [i/o] : An external energy updated with the energy of
// |data_in|.
// NOTE: |total_energy| is only updated if
-// |total_energy| <= MIN_ENERGY.
+// |total_energy| <= |kMinEnergy|.
// - log_energy [o] : 10 * log10("energy of |data_in|") given in Q4.
static void LogOfEnergy(const int16_t* data_in, int data_length,
int16_t offset, int16_t* total_energy,
@@ -228,18 +227,18 @@
*log_energy += offset;
// Update the approximate |total_energy| with the energy of |data_in|, if
- // |total_energy| has not exceeded MIN_ENERGY. |total_energy| is used as an
+ // |total_energy| has not exceeded |kMinEnergy|. |total_energy| is used as an
// energy indicator in WebRtcVad_GmmProbability() in vad_core.c.
- if (*total_energy <= MIN_ENERGY) {
+ if (*total_energy <= kMinEnergy) {
if (tot_rshifts >= 0) {
- // We know by construction that the |energy| > MIN_ENERGY in Q0, so add an
- // arbitrary value such that |total_energy| exceeds MIN_ENERGY.
- *total_energy += MIN_ENERGY + 1;
+ // We know by construction that the |energy| > |kMinEnergy| in Q0, so add
+ // an arbitrary value such that |total_energy| exceeds |kMinEnergy|.
+ *total_energy += kMinEnergy + 1;
} else {
// By construction |energy| is represented by 15 bits, hence any number of
// right shifted |energy| will fit in an int16_t. In addition, adding the
// value to |total_energy| is wrap around safe as long as
- // MIN_ENERGY < 8192.
+ // |kMinEnergy| < 8192.
*total_energy += (int16_t) (energy >> -tot_rshifts); // Q0.
}
}
@@ -266,7 +265,7 @@
assert(data_length >= 0);
assert(data_length <= 240);
- assert(4 < NUM_CHANNELS - 1); // Checking maximum |frequency_band|.
+ assert(4 < kNumChannels - 1); // Checking maximum |frequency_band|.
// Split at 2000 Hz and downsample.
SplitFilter(in_ptr, data_length, &self->upper_state[frequency_band],
diff --git a/common_audio/vad/vad_filterbank.h b/common_audio/vad/vad_filterbank.h
index 0c5c00c..b5fd69e 100644
--- a/common_audio/vad/vad_filterbank.h
+++ b/common_audio/vad/vad_filterbank.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -19,7 +19,7 @@
#include "vad_core.h"
// Takes |data_length| samples of |data_in| and calculates the logarithm of the
-// energy of each of the |NUM_CHANNELS| = 6 frequency bands used by the VAD:
+// energy of each of the |kNumChannels| = 6 frequency bands used by the VAD:
// 80 Hz - 250 Hz
// 250 Hz - 500 Hz
// 500 Hz - 1000 Hz
@@ -30,7 +30,7 @@
// The values are given in Q4 and written to |features|. Further, an approximate
// overall energy is returned. The return value is used in
// WebRtcVad_GmmProbability() as a signal indicator, hence it is arbitrary above
-// the threshold MIN_ENERGY.
+// the threshold |kMinEnergy|.
//
// - self [i/o] : State information of the VAD.
// - data_in [i] : Input audio data, for feature extraction.
diff --git a/common_audio/vad/vad_filterbank_unittest.cc b/common_audio/vad/vad_filterbank_unittest.cc
index 320fda9..ef01146 100644
--- a/common_audio/vad/vad_filterbank_unittest.cc
+++ b/common_audio/vad/vad_filterbank_unittest.cc
@@ -16,7 +16,6 @@
extern "C" {
#include "vad_core.h"
-#include "vad_defines.h"
#include "vad_filterbank.h"
}
@@ -27,14 +26,14 @@
TEST_F(VadTest, vad_filterbank) {
VadInstT* self = reinterpret_cast<VadInstT*>(malloc(sizeof(VadInstT)));
static const int16_t kReference[kNumValidFrameLengths] = { 48, 11, 11 };
- static const int16_t kFeatures[kNumValidFrameLengths * NUM_CHANNELS] = {
+ static const int16_t kFeatures[kNumValidFrameLengths * kNumChannels] = {
1213, 759, 587, 462, 434, 272,
1479, 1385, 1291, 1200, 1103, 1099,
1732, 1692, 1681, 1629, 1436, 1436
};
- static const int16_t kOffsetVector[NUM_CHANNELS] = {
+ static const int16_t kOffsetVector[kNumChannels] = {
368, 368, 272, 176, 176, 176 };
- int16_t features[NUM_CHANNELS];
+ int16_t features[kNumChannels];
// Construct a speech signal that will trigger the VAD in all modes. It is
// known that (i * i) will wrap around, but that doesn't matter in this case.
@@ -50,8 +49,8 @@
EXPECT_EQ(kReference[frame_length_index],
WebRtcVad_CalculateFeatures(self, speech, kFrameLengths[j],
features));
- for (int k = 0; k < NUM_CHANNELS; ++k) {
- EXPECT_EQ(kFeatures[k + frame_length_index * NUM_CHANNELS],
+ for (int k = 0; k < kNumChannels; ++k) {
+ EXPECT_EQ(kFeatures[k + frame_length_index * kNumChannels],
features[k]);
}
frame_length_index++;
@@ -66,7 +65,7 @@
if (ValidRatesAndFrameLengths(8000, kFrameLengths[j])) {
EXPECT_EQ(0, WebRtcVad_CalculateFeatures(self, speech, kFrameLengths[j],
features));
- for (int k = 0; k < NUM_CHANNELS; ++k) {
+ for (int k = 0; k < kNumChannels; ++k) {
EXPECT_EQ(kOffsetVector[k], features[k]);
}
}
@@ -82,7 +81,7 @@
ASSERT_EQ(0, WebRtcVad_InitCore(self));
EXPECT_EQ(0, WebRtcVad_CalculateFeatures(self, speech, kFrameLengths[j],
features));
- for (int k = 0; k < NUM_CHANNELS; ++k) {
+ for (int k = 0; k < kNumChannels; ++k) {
EXPECT_EQ(kOffsetVector[k], features[k]);
}
}
diff --git a/common_audio/vad/vad_sp.c b/common_audio/vad/vad_sp.c
index 4fface3..74de361 100644
--- a/common_audio/vad/vad_sp.c
+++ b/common_audio/vad/vad_sp.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -14,11 +14,13 @@
#include "signal_processing_library.h"
#include "typedefs.h"
-#include "vad_defines.h"
+#include "vad_core.h"
// Allpass filter coefficients, upper and lower, in Q13.
// Upper: 0.64, Lower: 0.17.
-static const int16_t kAllPassCoefsQ13[2] = { 5243, 1392 }; // Q13
+static const int16_t kAllPassCoefsQ13[2] = { 5243, 1392 }; // Q13.
+static const int16_t kSmoothingDown = 6553; // 0.2 in Q15.
+static const int16_t kSmoothingUp = 32439; // 0.99 in Q15.
// TODO(bjornv): Move this function to vad_filterbank.c.
// Downsampling filter based on splitting filter and allpass functions.
@@ -72,7 +74,7 @@
int16_t* value_ptr = &self->low_value_vector[offset];
int16_t *p1, *p2, *p3;
- assert(channel < NUM_CHANNELS);
+ assert(channel < kNumChannels);
// Each value in |low_value_vector| is getting 1 loop older.
// Update age of each value in |age_ptr|, and remove old values.
@@ -167,9 +169,9 @@
// Smooth the median value.
if (self->frame_counter > 0) {
if (current_median < self->mean_value[channel]) {
- alpha = (int16_t) ALPHA1; // 0.2 in Q15.
+ alpha = kSmoothingDown; // 0.2 in Q15.
} else {
- alpha = (int16_t) ALPHA2; // 0.99 in Q15.
+ alpha = kSmoothingUp; // 0.99 in Q15.
}
}
tmp32 = WEBRTC_SPL_MUL_16_16(alpha + 1, self->mean_value[channel]);
diff --git a/common_audio/vad/vad_sp_unittest.cc b/common_audio/vad/vad_sp_unittest.cc
index 03c844e..2b25316 100644
--- a/common_audio/vad/vad_sp_unittest.cc
+++ b/common_audio/vad/vad_sp_unittest.cc
@@ -16,7 +16,6 @@
extern "C" {
#include "vad_core.h"
-#include "vad_defines.h"
#include "vad_sp.h"
}
@@ -63,7 +62,7 @@
// ordered.
for (int16_t i = 0; i < 16; ++i) {
int16_t value = 500 * (i + 1);
- for (int j = 0; j < NUM_CHANNELS; ++j) {
+ for (int j = 0; j < kNumChannels; ++j) {
// Use values both above and below initialized value.
EXPECT_EQ(kReferenceMin[i], WebRtcVad_FindMinimum(self, value, j));
EXPECT_EQ(kReferenceMin[i + 16], WebRtcVad_FindMinimum(self, 12000, j));
diff --git a/common_audio/vad/vad_unittest.cc b/common_audio/vad/vad_unittest.cc
index 4b3c224..cf82f80 100644
--- a/common_audio/vad/vad_unittest.cc
+++ b/common_audio/vad/vad_unittest.cc
@@ -12,6 +12,7 @@
#include <stdlib.h>
+#include "common_audio/signal_processing/include/signal_processing_library.h"
#include "gtest/gtest.h"
#include "typedefs.h"
#include "webrtc_vad.h"
@@ -61,7 +62,7 @@
speech[i] = (i * i);
}
- // Null instance tests
+ // NULL instance tests
EXPECT_EQ(-1, WebRtcVad_Create(NULL));
EXPECT_EQ(-1, WebRtcVad_Init(NULL));
EXPECT_EQ(-1, WebRtcVad_Assign(NULL, NULL));
@@ -91,9 +92,14 @@
// WebRtcVad_Init() test
ASSERT_EQ(0, WebRtcVad_Init(handle));
- // WebRtcVad_set_mode() invalid modes tests
- EXPECT_EQ(-1, WebRtcVad_set_mode(handle, kModes[0] - 1));
- EXPECT_EQ(-1, WebRtcVad_set_mode(handle, kModes[kModesSize - 1] + 1));
+ // WebRtcVad_set_mode() invalid modes tests. Tries smallest supported value
+ // minus one and largest supported value plus one.
+ EXPECT_EQ(-1, WebRtcVad_set_mode(handle,
+ WebRtcSpl_MinValueW32(kModes,
+ kModesSize) - 1));
+ EXPECT_EQ(-1, WebRtcVad_set_mode(handle,
+ WebRtcSpl_MaxValueW32(kModes,
+ kModesSize) + 1));
// WebRtcVad_Process() tests
// NULL speech pointer
diff --git a/common_audio/vad/webrtc_vad.c b/common_audio/vad/webrtc_vad.c
index c00c962..40ada95 100644
--- a/common_audio/vad/webrtc_vad.c
+++ b/common_audio/vad/webrtc_vad.c
@@ -65,30 +65,24 @@
return WebRtcVad_InitCore((VadInstT*) handle);
}
-int WebRtcVad_set_mode(VadInst *vad_inst, int mode)
-{
- VadInstT* vad_ptr;
+// TODO(bjornv): Move WebRtcVad_set_mode_core() code here.
+int WebRtcVad_set_mode(VadInst* handle, int mode) {
+ VadInstT* self = (VadInstT*) handle;
- if (vad_inst == NULL)
- {
- return -1;
- }
+ if (handle == NULL) {
+ return -1;
+ }
+ if (self->init_flag != kInitCheck) {
+ return -1;
+ }
- vad_ptr = (VadInstT*)vad_inst;
- if (vad_ptr->init_flag != kInitCheck)
- {
- return -1;
- }
-
- return WebRtcVad_set_mode_core((VadInstT*)vad_inst, mode);
+ return WebRtcVad_set_mode_core(self, mode);
}
-WebRtc_Word16 WebRtcVad_Process(VadInst *vad_inst,
- WebRtc_Word16 fs,
- WebRtc_Word16 *speech_frame,
- WebRtc_Word16 frame_length)
+int16_t WebRtcVad_Process(VadInst* vad_inst, int16_t fs, int16_t* speech_frame,
+ int16_t frame_length)
{
- WebRtc_Word16 vad;
+ int16_t vad;
VadInstT* vad_ptr;
if (vad_inst == NULL)
diff --git a/common_types.h b/common_types.h
index bd82762..c1806c6 100644
--- a/common_types.h
+++ b/common_types.h
@@ -513,6 +513,7 @@
VideoCodecComplexity complexity;
VP8ResilienceMode resilience;
unsigned char numberOfTemporalLayers;
+ bool denoisingOn;
};
// Unknown specific
diff --git a/common_video/jpeg/include/jpeg.h b/common_video/jpeg/include/jpeg.h
index 05e759c..cfb7fce 100644
--- a/common_video/jpeg/include/jpeg.h
+++ b/common_video/jpeg/include/jpeg.h
@@ -49,7 +49,7 @@
private:
jpeg_compress_struct* _cinfo;
- char _fileName[256];
+ char _fileName[257];
};
class JpegDecoder
diff --git a/common_video/jpeg/jpeg.cc b/common_video/jpeg/jpeg.cc
index 00586d4..3afbbab 100644
--- a/common_video/jpeg/jpeg.cc
+++ b/common_video/jpeg/jpeg.cc
@@ -74,6 +74,7 @@
if (fileName)
{
strncpy(_fileName, fileName, 256);
+ _fileName[256] = 0;
}
return 0;
}
diff --git a/common_video/libyuv/libyuv.cc b/common_video/libyuv/libyuv.cc
index d1ced7a..5b2b815 100644
--- a/common_video/libyuv/libyuv.cc
+++ b/common_video/libyuv/libyuv.cc
@@ -13,11 +13,7 @@
#include <assert.h>
// LibYuv includes
-#ifdef WEBRTC_ANDROID
-#include "libyuv/files/include/libyuv.h"
-#else
#include "third_party/libyuv/include/libyuv.h"
-#endif
namespace webrtc {
diff --git a/common_video/libyuv/scaler.cc b/common_video/libyuv/scaler.cc
index a12462c..fda7854 100644
--- a/common_video/libyuv/scaler.cc
+++ b/common_video/libyuv/scaler.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -11,11 +11,7 @@
#include "common_video/libyuv/include/scaler.h"
// LibYuv
-#ifdef WEBRTC_ANDROID
-#include "libyuv/files/include/libyuv.h"
-#else
#include "third_party/libyuv/include/libyuv.h"
-#endif
namespace webrtc {
diff --git a/engine_configurations.h b/engine_configurations.h
index 61d3ecf..5778a14 100644
--- a/engine_configurations.h
+++ b/engine_configurations.h
@@ -77,6 +77,7 @@
#define WEBRTC_VOICE_ENGINE_RTP_RTCP_API
#define WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
#define WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+#define WEBRTC_VOICE_ENGINE_FILE_API
#ifndef WEBRTC_CHROMIUM_BUILD
#define WEBRTC_VOICE_ENGINE_CALL_REPORT_API
diff --git a/modules/audio_coding/main/source/acm_celt.cc b/modules/audio_coding/main/source/acm_celt.cc
index c6a9efa..a1462ec 100644
--- a/modules/audio_coding/main/source/acm_celt.cc
+++ b/modules/audio_coding/main/source/acm_celt.cc
@@ -199,12 +199,17 @@
}
}
- // Initiate decoder.
+ // Initiate decoder, both master and slave parts.
if (WebRtcCelt_DecoderInit(dec_inst_ptr_) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
"InternalInitDecoder: init decoder failed for Celt.");
return -1;
}
+ if (WebRtcCelt_DecoderInitSlave(dec_inst_ptr_) < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+ "InternalInitDecoder: init decoder failed for Celt.");
+ return -1;
+ }
return 0;
}
diff --git a/modules/audio_coding/main/source/acm_neteq.cc b/modules/audio_coding/main/source/acm_neteq.cc
index be25918..69942bd 100644
--- a/modules/audio_coding/main/source/acm_neteq.cc
+++ b/modules/audio_coding/main/source/acm_neteq.cc
@@ -669,7 +669,7 @@
// Check for errors that can be recovered from:
// RECOUT_ERROR_SAMPLEUNDERRUN = 2003
- int errorCode = WebRtcNetEQ_GetErrorCode(_inst[0]);
+ int errorCode = WebRtcNetEQ_GetErrorCode(_inst[1]);
if(errorCode != 2003)
{
// Cannot recover; return an error
diff --git a/modules/audio_coding/main/source/audio_coding_module_impl.cc b/modules/audio_coding/main/source/audio_coding_module_impl.cc
index 5f60ce9..85950c8 100644
--- a/modules/audio_coding/main/source/audio_coding_module_impl.cc
+++ b/modules/audio_coding/main/source/audio_coding_module_impl.cc
@@ -1842,6 +1842,13 @@
_codecs[i]->UpdateDecoderSampFreq(i);
_netEq.SetReceivedStereo(_stereoReceive[i]);
+ // If we have a change in expected number of channels,
+ // flush packet buffers in NetEQ.
+ if ((_stereoReceive[i] && (_expected_channels == 1)) ||
+ (!_stereoReceive[i] && (_expected_channels == 2))) {
+ _netEq.FlushBuffers();
+ }
+
// Store number of channels we expect to receive for the
// current payload type.
if (_stereoReceive[i]) {
diff --git a/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h b/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h
index 214bd10..c6f19bb 100644
--- a/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h
+++ b/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h
@@ -331,7 +331,7 @@
#define SET_CELT_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcCelt_Decode; \
inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcCelt_DecodePlc; \
+ inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcCelt_DecoderInit; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
@@ -342,8 +342,8 @@
#define SET_CELTSLAVE_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcCelt_DecodeSlave; \
inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcCelt_DecodePlc; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcCelt_DecoderInit; \
+ inst.funcDecodePLC=NULL; \
+ inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcCelt_DecoderInitSlave; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
diff --git a/modules/audio_coding/neteq/neteq.gypi b/modules/audio_coding/neteq/neteq.gypi
index c72efdd..5c0d080 100644
--- a/modules/audio_coding/neteq/neteq.gypi
+++ b/modules/audio_coding/neteq/neteq.gypi
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
@@ -105,8 +105,8 @@
'target_name': 'NetEqRTPplay',
'type': 'executable',
'dependencies': [
- 'NetEq', # NetEQ library defined above
- 'NetEqTestTools',# Test helpers
+ 'NetEq', # NetEQ library defined above
+ 'NetEqTestTools', # Test helpers
'G711',
'G722',
'PCM16B',
@@ -230,9 +230,24 @@
],
},
{
+ 'target_name': 'rtp_to_text',
+ 'type': 'executable',
+ 'dependencies': [
+ 'NetEqTestTools',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ ],
+ 'sources': [
+ 'test/rtp_to_text.cc',
+ ],
+ },
+ {
'target_name': 'NetEqTestTools',
# Collection of useful functions used in other tests
'type': '<(library)',
+ 'variables': {
+ # Expects RTP packets without payloads when enabled.
+ 'neteq_dummy_rtp%': 0,
+ },
'dependencies': [
'G711',
'G722',
@@ -269,21 +284,17 @@
'test',
],
'sources': [
- 'test/NETEQTEST_NetEQClass.cc',
- 'test/NETEQTEST_RTPpacket.cc',
'test/NETEQTEST_CodecClass.cc',
- 'test/NETEQTEST_NetEQClass.h',
- 'test/NETEQTEST_RTPpacket.h',
'test/NETEQTEST_CodecClass.h',
+ 'test/NETEQTEST_DummyRTPpacket.cc',
+ 'test/NETEQTEST_DummyRTPpacket.h',
+ 'test/NETEQTEST_NetEQClass.cc',
+ 'test/NETEQTEST_NetEQClass.h',
+ 'test/NETEQTEST_RTPpacket.cc',
+ 'test/NETEQTEST_RTPpacket.h',
],
},
], # targets
}], # build_with_chromium
], # conditions
}
-
-# Local Variables:
-# tab-width:2
-# indent-tabs-mode:nil
-# End:
-# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc b/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc
new file mode 100644
index 0000000..e8d153b
--- /dev/null
+++ b/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc
@@ -0,0 +1,191 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "NETEQTEST_DummyRTPpacket.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+
+#ifdef WIN32
+#include <winsock2.h>
+#else
+#include <netinet/in.h> // for htons, htonl, etc
+#endif
+
+int NETEQTEST_DummyRTPpacket::readFromFile(FILE *fp)
+{
+ if (!fp)
+ {
+ return -1;
+ }
+
+ WebRtc_UWord16 length, plen;
+ WebRtc_UWord32 offset;
+
+ if (fread(&length, 2, 1, fp) == 0)
+ {
+ reset();
+ return -2;
+ }
+ length = ntohs(length);
+
+ if (fread(&plen, 2, 1, fp) == 0)
+ {
+ reset();
+ return -1;
+ }
+ int packetLen = ntohs(plen);
+
+ if (fread(&offset, 4, 1, fp) == 0)
+ {
+ reset();
+ return -1;
+ }
+ // Store in local variable until we have passed the reset below.
+ WebRtc_UWord32 receiveTime = ntohl(offset);
+
+ // Use length here because a plen of 0 specifies rtcp.
+ length = (WebRtc_UWord16) (length - _kRDHeaderLen);
+
+ // check buffer size
+ if (_datagram && _memSize < length)
+ {
+ reset();
+ }
+
+ if (!_datagram)
+ {
+ _datagram = new WebRtc_UWord8[length];
+ _memSize = length;
+ }
+ memset(_datagram, 0, length);
+
+ if (length == 0)
+ {
+ _datagramLen = 0;
+ return packetLen;
+ }
+
+ // Read basic header
+ if (fread((unsigned short *) _datagram, 1, _kBasicHeaderLen, fp)
+ != (size_t)_kBasicHeaderLen)
+ {
+ reset();
+ return -1;
+ }
+ _receiveTime = receiveTime;
+ _datagramLen = _kBasicHeaderLen;
+
+ // Parse the basic header
+ WebRtcNetEQ_RTPInfo tempRTPinfo;
+ int P, X, CC;
+ parseBasicHeader(&tempRTPinfo, &P, &X, &CC);
+
+ // Check if we have to extend the header
+ if (X != 0 || CC != 0)
+ {
+ int newLen = _kBasicHeaderLen + CC * 4 + X * 4;
+ assert(_memSize >= newLen);
+
+ // Read extension from file
+ size_t readLen = newLen - _kBasicHeaderLen;
+ if (fread((unsigned short *) _datagram + _kBasicHeaderLen, 1, readLen,
+ fp) != readLen)
+ {
+ reset();
+ return -1;
+ }
+ _datagramLen = newLen;
+
+ if (X != 0)
+ {
+ int totHdrLen = calcHeaderLength(X, CC);
+ assert(_memSize >= totHdrLen);
+
+ // Read extension from file
+ size_t readLen = totHdrLen - newLen;
+ if (fread((unsigned short *) _datagram + newLen, 1, readLen, fp)
+ != readLen)
+ {
+ reset();
+ return -1;
+ }
+ _datagramLen = totHdrLen;
+ }
+ }
+ _datagramLen = length;
+
+ if (!_blockList.empty() && _blockList.count(payloadType()) > 0)
+ {
+ // discard this payload
+ return readFromFile(fp);
+ }
+
+ return packetLen;
+
+}
+
+int NETEQTEST_DummyRTPpacket::writeToFile(FILE *fp)
+{
+ if (!fp)
+ {
+ return -1;
+ }
+
+ WebRtc_UWord16 length, plen;
+ WebRtc_UWord32 offset;
+
+ // length including RTPplay header
+ length = htons(_datagramLen + _kRDHeaderLen);
+ if (fwrite(&length, 2, 1, fp) != 1)
+ {
+ return -1;
+ }
+
+ // payload length
+ plen = htons(_datagramLen);
+ if (fwrite(&plen, 2, 1, fp) != 1)
+ {
+ return -1;
+ }
+
+ // offset (=receive time)
+ offset = htonl(_receiveTime);
+ if (fwrite(&offset, 4, 1, fp) != 1)
+ {
+ return -1;
+ }
+
+ // Figure out the length of the RTP header.
+ int headerLen;
+ if (_datagramLen == 0)
+ {
+ // No payload at all; we are done writing to file.
+ headerLen = 0;
+ }
+ else
+ {
+ parseHeader();
+ headerLen = _payloadPtr - _datagram;
+ assert(headerLen >= 0);
+ }
+
+ // write RTP header
+ if (fwrite((unsigned short *) _datagram, 1, headerLen, fp) !=
+ static_cast<size_t>(headerLen))
+ {
+ return -1;
+ }
+
+ return (headerLen + _kRDHeaderLen); // total number of bytes written
+
+}
+
diff --git a/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.h b/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.h
new file mode 100644
index 0000000..ef74421
--- /dev/null
+++ b/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef NETEQTEST_DUMMYRTPPACKET_H
+#define NETEQTEST_DUMMYRTPPACKET_H
+
+#include "NETEQTEST_RTPpacket.h"
+
+class NETEQTEST_DummyRTPpacket : public NETEQTEST_RTPpacket
+{
+public:
+ virtual int readFromFile(FILE *fp);
+ virtual int writeToFile(FILE *fp);
+};
+
+#endif //NETEQTEST_DUMMYRTPPACKET_H
diff --git a/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc b/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc
index 2e60658..0d8be00 100644
--- a/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc
+++ b/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -20,7 +20,8 @@
_bufferMem(NULL),
_preparseRTP(false),
_fsmult(1),
- _isMaster(true)
+ _isMaster(true),
+ _noDecode(false)
{
#ifdef WINDOWS_TIMING
_totTimeRecIn.QuadPart = 0;
@@ -36,7 +37,8 @@
_bufferMem(NULL),
_preparseRTP(false),
_fsmult(1),
- _isMaster(true)
+ _isMaster(true),
+ _noDecode(false)
{
#ifdef WINDOWS_TIMING
_totTimeRecIn.QuadPart = 0;
@@ -283,7 +285,14 @@
if (!msInfo)
{
// no msInfo given, do mono mode
- err = WebRtcNetEQ_RecOut(_inst, outData, &outLen);
+ if (_noDecode)
+ {
+ err = WebRtcNetEQ_RecOutNoDecode(_inst, outData, &outLen);
+ }
+ else
+ {
+ err = WebRtcNetEQ_RecOut(_inst, outData, &outLen);
+ }
}
else
{
diff --git a/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h b/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h
index c425b58..3e43125 100644
--- a/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h
+++ b/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -47,8 +47,10 @@
bool usingPreparseRTP() { return (_preparseRTP); };
void setMaster( bool isMaster = true ) { _isMaster = isMaster; };
void setSlave() { _isMaster = false; };
+ void setNoDecode(bool noDecode = true) { _noDecode = noDecode; };
bool isMaster() { return (_isMaster); };
bool isSlave() { return (!_isMaster); };
+ bool isNoDecode() { return _noDecode; };
#ifdef WINDOWS_TIMING
double getRecInTime() { return (static_cast<double>( _totTimeRecIn.QuadPart )); };
@@ -69,24 +71,11 @@
bool _preparseRTP;
int _fsmult;
bool _isMaster;
+ bool _noDecode;
#ifdef WINDOWS_TIMING
LARGE_INTEGER _totTimeRecIn;
LARGE_INTEGER _totTimeRecOut;
#endif
};
-
-
-//class NETEQTEST_NetEQVector
-//{
-//public:
-// NETEQTEST_NetEQVector(int numChannels);
-// NETEQTEST_NetEQVector(int numChannels, enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs,
-// WebRtc_UWord16 fs = 8000, WebRtcNetEQNetworkType nwType = kTCPLargeJitter);
-// ~NETEQTEST_NetEQVector();
-//
-//private:
-// std::vector<NETEQTEST_NetEQClass *> channels;
-//};
-
#endif //NETEQTEST_NETEQCLASS_H
diff --git a/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc b/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc
index 0412f06..ed26e9e 100644
--- a/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc
+++ b/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -11,6 +11,7 @@
#include "NETEQTEST_RTPpacket.h"
#include <assert.h>
+#include <stdlib.h> // rand
#include <string.h>
#ifdef WIN32
@@ -19,12 +20,8 @@
#include <netinet/in.h> // for htons, htonl, etc
#endif
-#include <cstdlib>
-
-#include "gtest/gtest.h"
-
-#define HDR_SIZE 8 // rtpplay packet header size in bytes
-
+const int NETEQTEST_RTPpacket::_kRDHeaderLen = 8;
+const int NETEQTEST_RTPpacket::_kBasicHeaderLen = 12;
NETEQTEST_RTPpacket::NETEQTEST_RTPpacket()
:
@@ -41,88 +38,14 @@
_blockList.clear();
}
-NETEQTEST_RTPpacket::NETEQTEST_RTPpacket(const NETEQTEST_RTPpacket& copyFromMe)
-{
-
- memcpy(this, ©FromMe, sizeof(NETEQTEST_RTPpacket));
-
- _datagram = NULL;
- _payloadPtr = NULL;
-
- if(copyFromMe._datagram)
- {
- _datagram = new WebRtc_UWord8[_memSize];
-
- if(_datagram)
- {
- memcpy(_datagram, copyFromMe._datagram, _memSize);
- }
- }
-
- if(copyFromMe._payloadPtr)
- {
- _payloadPtr = _datagram + (copyFromMe._payloadPtr - copyFromMe._datagram);
- }
-
- _blockList = copyFromMe._blockList;
-
-}
-
-
-NETEQTEST_RTPpacket & NETEQTEST_RTPpacket::operator = (const NETEQTEST_RTPpacket & other)
-{
- if (this != &other) // protect against invalid self-assignment
- {
-
- // deallocate datagram memory if allocated
- if(_datagram)
- {
- delete [] _datagram;
- }
-
- // do shallow copy
- memcpy(this, &other, sizeof(NETEQTEST_RTPpacket));
-
- // reset pointers
- _datagram = NULL;
- _payloadPtr = NULL;
-
- if(other._datagram)
- {
- _datagram = new WebRtc_UWord8[other._memSize];
- _memSize = other._memSize;
-
- if(_datagram)
- {
- memcpy(_datagram, other._datagram, _memSize);
- }
- }
-
- if(other._payloadPtr)
- {
- _payloadPtr = _datagram + (other._payloadPtr - other._datagram);
- }
-
- // copy the blocking list (map)
- _blockList = other._blockList;
-
- }
-
- // by convention, always return *this
- return *this;
-}
-
-
-
NETEQTEST_RTPpacket::~NETEQTEST_RTPpacket()
{
- if(_datagram)
+ if(_datagram)
{
delete [] _datagram;
}
}
-
void NETEQTEST_RTPpacket::reset()
{
if(_datagram) {
@@ -180,7 +103,7 @@
return(-1);
}
- WebRtc_UWord16 length, plen;
+ WebRtc_UWord16 length, plen;
WebRtc_UWord32 offset;
if (fread(&length,2,1,fp)==0)
@@ -203,9 +126,9 @@
return(-1);
}
WebRtc_UWord32 receiveTime = ntohl(offset); // store in local variable until we have passed the reset below
-
- // Use length here because a plen of 0 specifies rtcp
- length = (WebRtc_UWord16) (length - HDR_SIZE);
+
+ // Use length here because a plen of 0 specifies rtcp
+ length = (WebRtc_UWord16) (length - _kRDHeaderLen);
// check buffer size
if (_datagram && _memSize < length)
@@ -219,10 +142,10 @@
_memSize = length;
}
- if (fread((unsigned short *) _datagram,1,length,fp) != length)
+ if (fread((unsigned short *) _datagram,1,length,fp) != length)
{
reset();
- return(-1);
+ return(-1);
}
_datagramLen = length;
@@ -234,7 +157,7 @@
return(readFromFile(fp));
}
- return(packetLen);
+ return(packetLen);
}
@@ -289,7 +212,7 @@
WebRtc_UWord32 offset;
// length including RTPplay header
- length = htons(_datagramLen + HDR_SIZE);
+ length = htons(_datagramLen + _kRDHeaderLen);
if (fwrite(&length, 2, 1, fp) != 1)
{
return -1;
@@ -301,7 +224,7 @@
{
return -1;
}
-
+
// offset (=receive time)
offset = htonl(_receiveTime);
if (fwrite(&offset, 4, 1, fp) != 1)
@@ -317,7 +240,7 @@
return -1;
}
- return _datagramLen + HDR_SIZE; // total number of bytes written
+ return _datagramLen + _kRDHeaderLen; // total number of bytes written
}
@@ -336,13 +259,13 @@
return;
}
- if (_datagramLen < 12)
+ if (_datagramLen < _kBasicHeaderLen)
{
// corrupt packet?
return;
}
- _payloadLen = parseRTPheader(_datagram, _datagramLen, &_rtpInfo, &_payloadPtr);
+ _payloadLen = parseRTPheader(&_payloadPtr);
_rtpParsed = true;
@@ -397,8 +320,9 @@
}
}
-WebRtc_Word16 NETEQTEST_RTPpacket::payloadLen() const
+WebRtc_Word16 NETEQTEST_RTPpacket::payloadLen()
{
+ parseHeader();
return _payloadLen;
}
@@ -420,10 +344,10 @@
WebRtc_UWord8 NETEQTEST_RTPpacket::payloadType() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
-
- if(_datagram)
+
+ if(_datagram && _datagramLen >= _kBasicHeaderLen)
{
- parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
+ parseRTPheader(&tempRTPinfo);
}
else
{
@@ -436,10 +360,10 @@
WebRtc_UWord16 NETEQTEST_RTPpacket::sequenceNumber() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
-
- if(_datagram)
+
+ if(_datagram && _datagramLen >= _kBasicHeaderLen)
{
- parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
+ parseRTPheader(&tempRTPinfo);
}
else
{
@@ -452,10 +376,10 @@
WebRtc_UWord32 NETEQTEST_RTPpacket::timeStamp() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
-
- if(_datagram)
+
+ if(_datagram && _datagramLen >= _kBasicHeaderLen)
{
- parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
+ parseRTPheader(&tempRTPinfo);
}
else
{
@@ -468,10 +392,10 @@
WebRtc_UWord32 NETEQTEST_RTPpacket::SSRC() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
-
- if(_datagram)
+
+ if(_datagram && _datagramLen >= _kBasicHeaderLen)
{
- parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
+ parseRTPheader(&tempRTPinfo);
}
else
{
@@ -484,10 +408,10 @@
WebRtc_UWord8 NETEQTEST_RTPpacket::markerBit() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
-
- if(_datagram)
+
+ if(_datagram && _datagramLen >= _kBasicHeaderLen)
{
- parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
+ parseRTPheader(&tempRTPinfo);
}
else
{
@@ -501,7 +425,7 @@
int NETEQTEST_RTPpacket::setPayloadType(WebRtc_UWord8 pt)
{
-
+
if (_datagramLen < 12)
{
return -1;
@@ -520,7 +444,7 @@
int NETEQTEST_RTPpacket::setSequenceNumber(WebRtc_UWord16 sn)
{
-
+
if (_datagramLen < 12)
{
return -1;
@@ -540,7 +464,7 @@
int NETEQTEST_RTPpacket::setTimeStamp(WebRtc_UWord32 ts)
{
-
+
if (_datagramLen < 12)
{
return -1;
@@ -553,7 +477,7 @@
_datagram[4]=(unsigned char)((ts>>24)&0xFF);
_datagram[5]=(unsigned char)((ts>>16)&0xFF);
- _datagram[6]=(unsigned char)((ts>>8)&0xFF);
+ _datagram[6]=(unsigned char)((ts>>8)&0xFF);
_datagram[7]=(unsigned char)(ts & 0xFF);
return 0;
@@ -562,7 +486,7 @@
int NETEQTEST_RTPpacket::setSSRC(WebRtc_UWord32 ssrc)
{
-
+
if (_datagramLen < 12)
{
return -1;
@@ -584,7 +508,7 @@
int NETEQTEST_RTPpacket::setMarkerBit(WebRtc_UWord8 mb)
{
-
+
if (_datagramLen < 12)
{
return -1;
@@ -616,10 +540,10 @@
return -1;
}
- makeRTPheader(_datagram,
- RTPinfo->payloadType,
- RTPinfo->sequenceNumber,
- RTPinfo->timeStamp,
+ makeRTPheader(_datagram,
+ RTPinfo->payloadType,
+ RTPinfo->sequenceNumber,
+ RTPinfo->timeStamp,
RTPinfo->SSRC,
RTPinfo->markerBit);
@@ -627,7 +551,8 @@
}
-int NETEQTEST_RTPpacket::splitStereo(NETEQTEST_RTPpacket& slaveRtp, enum stereoModes mode)
+int NETEQTEST_RTPpacket::splitStereo(NETEQTEST_RTPpacket* slaveRtp,
+ enum stereoModes mode)
{
// if mono, do nothing
if (mode == stereoModeMono)
@@ -639,7 +564,7 @@
parseHeader();
// start by copying the main rtp packet
- slaveRtp = *this;
+ *slaveRtp = *this;
if(_payloadLen == 0)
{
@@ -701,7 +626,7 @@
rtp_data[4]=(unsigned char)((timestamp>>24)&0xFF);
rtp_data[5]=(unsigned char)((timestamp>>16)&0xFF);
- rtp_data[6]=(unsigned char)((timestamp>>8)&0xFF);
+ rtp_data[6]=(unsigned char)((timestamp>>8)&0xFF);
rtp_data[7]=(unsigned char)(timestamp & 0xFF);
rtp_data[8]=(unsigned char)((ssrc>>24)&0xFF);
@@ -711,65 +636,114 @@
rtp_data[11]=(unsigned char)(ssrc & 0xFF);
}
-
-WebRtc_UWord16 NETEQTEST_RTPpacket::parseRTPheader(const WebRtc_UWord8 *datagram, int datagramLen, WebRtcNetEQ_RTPInfo *RTPinfo, WebRtc_UWord8 **payloadPtr) const
+WebRtc_UWord16
+ NETEQTEST_RTPpacket::parseRTPheader(WebRtcNetEQ_RTPInfo *RTPinfo,
+ WebRtc_UWord8 **payloadPtr) const
{
- WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) datagram;
- int i_P, i_X, i_CC, i_extlength=-1, i_padlength=0, i_startPosition;
+ WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+ int i_P, i_X, i_CC;
- i_P=(((WebRtc_UWord16)(rtp_data[0] & 0x20))>>5); /* Extract the P bit */
- i_X=(((WebRtc_UWord16)(rtp_data[0] & 0x10))>>4); /* Extract the X bit */
- i_CC=(WebRtc_UWord16)(rtp_data[0] & 0xF); /* Get the CC number */
- RTPinfo->markerBit = (WebRtc_UWord8) ((rtp_data[0] >> 15) & 0x01); /* Get the marker bit */
- RTPinfo->payloadType = (WebRtc_UWord8) ((rtp_data[0] >> 8) & 0x7F); /* Get the coder type */
- RTPinfo->sequenceNumber = ((( ((WebRtc_UWord16)rtp_data[1]) >> 8) & 0xFF) |
- ( ((WebRtc_UWord16)(rtp_data[1] & 0xFF)) << 8)); /* Get the packet number */
- RTPinfo->timeStamp = ((((WebRtc_UWord16)rtp_data[2]) & 0xFF) << 24) |
- ((((WebRtc_UWord16)rtp_data[2]) & 0xFF00) << 8) |
- ((((WebRtc_UWord16)rtp_data[3]) >> 8) & 0xFF) |
- ((((WebRtc_UWord16)rtp_data[3]) & 0xFF) << 8); /* Get timestamp */
- RTPinfo->SSRC=((((WebRtc_UWord16)rtp_data[4]) & 0xFF) << 24) |
- ((((WebRtc_UWord16)rtp_data[4]) & 0xFF00) << 8) |
- ((((WebRtc_UWord16)rtp_data[5]) >> 8) & 0xFF) |
- ((((WebRtc_UWord16)rtp_data[5]) & 0xFF) << 8); /* Get the SSRC */
+ assert(_datagramLen >= 12);
+ parseBasicHeader(RTPinfo, &i_P, &i_X, &i_CC);
- if (i_X==1) {
- /* Extention header exists. Find out how many WebRtc_Word32 it consists of */
- i_extlength=((( ((WebRtc_UWord16)rtp_data[7+2*i_CC]) >> 8) & 0xFF) |
- ( ((WebRtc_UWord16)(rtp_data[7+2*i_CC]&0xFF)) << 8));
- }
- if (i_P==1) {
- /* Padding exists. Find out how many bytes the padding consists of */
- if (datagramLen & 0x1) {
- /* odd number of bytes => last byte in higher byte */
- i_padlength=(rtp_data[datagramLen>>1] & 0xFF);
- } else {
- /* even number of bytes => last byte in lower byte */
- i_padlength=(((WebRtc_UWord16)rtp_data[(datagramLen>>1)-1]) >> 8);
- }
- }
+ int i_startPosition = calcHeaderLength(i_X, i_CC);
- i_startPosition=12+4*(i_extlength+1)+4*i_CC;
+ int i_padlength = calcPadLength(i_P);
- if (payloadPtr) {
- *payloadPtr = (WebRtc_UWord8*) &rtp_data[i_startPosition>>1];
+ if (payloadPtr)
+ {
+ *payloadPtr = (WebRtc_UWord8*) &rtp_data[i_startPosition >> 1];
}
- return (WebRtc_UWord16) (datagramLen-i_startPosition-i_padlength);
+ return (WebRtc_UWord16) (_datagramLen - i_startPosition - i_padlength);
}
-//void NETEQTEST_RTPpacket::splitStereoSample(WebRtc_UWord8 *data, WebRtc_UWord16 *lenBytes, WebRtc_UWord8 *slaveData, WebRtc_UWord16 *slaveLenBytes, int stride)
-void NETEQTEST_RTPpacket::splitStereoSample(NETEQTEST_RTPpacket& slaveRtp, int stride)
+
+void NETEQTEST_RTPpacket::parseBasicHeader(WebRtcNetEQ_RTPInfo *RTPinfo,
+ int *i_P, int *i_X, int *i_CC) const
{
- if(!_payloadPtr || !slaveRtp._payloadPtr
- || _payloadLen <= 0 || slaveRtp._memSize < _memSize)
+ WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+ if (_datagramLen < 12)
+ {
+ assert(false);
+ return;
+ }
+
+ *i_P=(((WebRtc_UWord16)(rtp_data[0] & 0x20))>>5); /* Extract the P bit */
+ *i_X=(((WebRtc_UWord16)(rtp_data[0] & 0x10))>>4); /* Extract the X bit */
+ *i_CC=(WebRtc_UWord16)(rtp_data[0] & 0xF); /* Get the CC number */
+ /* Get the marker bit */
+ RTPinfo->markerBit = (WebRtc_UWord8) ((rtp_data[0] >> 15) & 0x01);
+ /* Get the coder type */
+ RTPinfo->payloadType = (WebRtc_UWord8) ((rtp_data[0] >> 8) & 0x7F);
+ /* Get the packet number */
+ RTPinfo->sequenceNumber = ((( ((WebRtc_UWord16)rtp_data[1]) >> 8) & 0xFF) |
+ ( ((WebRtc_UWord16)(rtp_data[1] & 0xFF)) << 8));
+ /* Get timestamp */
+ RTPinfo->timeStamp = ((((WebRtc_UWord16)rtp_data[2]) & 0xFF) << 24) |
+ ((((WebRtc_UWord16)rtp_data[2]) & 0xFF00) << 8) |
+ ((((WebRtc_UWord16)rtp_data[3]) >> 8) & 0xFF) |
+ ((((WebRtc_UWord16)rtp_data[3]) & 0xFF) << 8);
+ /* Get the SSRC */
+ RTPinfo->SSRC=((((WebRtc_UWord16)rtp_data[4]) & 0xFF) << 24) |
+ ((((WebRtc_UWord16)rtp_data[4]) & 0xFF00) << 8) |
+ ((((WebRtc_UWord16)rtp_data[5]) >> 8) & 0xFF) |
+ ((((WebRtc_UWord16)rtp_data[5]) & 0xFF) << 8);
+}
+
+int NETEQTEST_RTPpacket::calcHeaderLength(int i_X, int i_CC) const
+{
+ int i_extlength = 0;
+ WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+
+ if (i_X == 1)
+ {
+ // Extension header exists.
+ // Find out how many WebRtc_Word32 it consists of.
+ assert(_datagramLen > 2 * (7 + 2 * i_CC));
+ if (_datagramLen > 2 * (7 + 2 * i_CC))
+ {
+ i_extlength = (((((WebRtc_UWord16) rtp_data[7 + 2 * i_CC]) >> 8)
+ & 0xFF) | (((WebRtc_UWord16) (rtp_data[7 + 2 * i_CC] & 0xFF))
+ << 8)) + 1;
+ }
+ }
+
+ return 12 + 4 * i_extlength + 4 * i_CC;
+}
+
+int NETEQTEST_RTPpacket::calcPadLength(int i_P) const
+{
+ WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+ if (i_P == 1)
+ {
+ /* Padding exists. Find out how many bytes the padding consists of. */
+ if (_datagramLen & 0x1)
+ {
+ /* odd number of bytes => last byte in higher byte */
+ return rtp_data[_datagramLen >> 1] & 0xFF;
+ }
+ else
+ {
+ /* even number of bytes => last byte in lower byte */
+ return ((WebRtc_UWord16) rtp_data[(_datagramLen >> 1) - 1]) >> 8;
+ }
+ }
+ return 0;
+}
+
+void NETEQTEST_RTPpacket::splitStereoSample(NETEQTEST_RTPpacket* slaveRtp,
+ int stride)
+{
+ if(!_payloadPtr || !slaveRtp || !slaveRtp->_payloadPtr
+ || _payloadLen <= 0 || slaveRtp->_memSize < _memSize)
{
return;
}
WebRtc_UWord8 *readDataPtr = _payloadPtr;
WebRtc_UWord8 *writeDataPtr = _payloadPtr;
- WebRtc_UWord8 *slaveData = slaveRtp._payloadPtr;
+ WebRtc_UWord8 *slaveData = slaveRtp->_payloadPtr;
while (readDataPtr - _payloadPtr < _payloadLen)
{
@@ -789,23 +763,22 @@
}
_payloadLen /= 2;
- slaveRtp._payloadLen = _payloadLen;
+ slaveRtp->_payloadLen = _payloadLen;
}
-//void NETEQTEST_RTPpacket::splitStereoFrame(WebRtc_UWord8 *data, WebRtc_UWord16 *lenBytes, WebRtc_UWord8 *slaveData, WebRtc_UWord16 *slaveLenBytes)
-void NETEQTEST_RTPpacket::splitStereoFrame(NETEQTEST_RTPpacket& slaveRtp)
+void NETEQTEST_RTPpacket::splitStereoFrame(NETEQTEST_RTPpacket* slaveRtp)
{
- if(!_payloadPtr || !slaveRtp._payloadPtr
- || _payloadLen <= 0 || slaveRtp._memSize < _memSize)
+ if(!_payloadPtr || !slaveRtp || !slaveRtp->_payloadPtr
+ || _payloadLen <= 0 || slaveRtp->_memSize < _memSize)
{
return;
}
- memmove(slaveRtp._payloadPtr, _payloadPtr + _payloadLen/2, _payloadLen/2);
+ memmove(slaveRtp->_payloadPtr, _payloadPtr + _payloadLen/2, _payloadLen/2);
_payloadLen /= 2;
- slaveRtp._payloadLen = _payloadLen;
+ slaveRtp->_payloadLen = _payloadLen;
}
// Get the RTP header for the RED payload indicated by argument index.
@@ -870,6 +843,6 @@
for (int i = 0; i < _payloadLen; ++i)
{
- _payloadPtr[i] = static_cast<WebRtc_UWord8>(std::rand());
+ _payloadPtr[i] = static_cast<WebRtc_UWord8>(rand());
}
}
diff --git a/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h b/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h
index 0478568..16beb95 100644
--- a/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h
+++ b/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -27,15 +27,13 @@
{
public:
NETEQTEST_RTPpacket();
- NETEQTEST_RTPpacket(const NETEQTEST_RTPpacket& copyFromMe);
- NETEQTEST_RTPpacket & operator = (const NETEQTEST_RTPpacket & other);
bool operator !() const { return (dataLen() < 0); };
- ~NETEQTEST_RTPpacket();
+ virtual ~NETEQTEST_RTPpacket();
void reset();
static int skipFileHeader(FILE *fp);
- int readFromFile(FILE *fp);
+ virtual int readFromFile(FILE *fp);
int readFixedFromFile(FILE *fp, size_t len);
- int writeToFile(FILE *fp);
+ virtual int writeToFile(FILE *fp);
void blockPT(WebRtc_UWord8 pt);
//WebRtc_Word16 payloadType();
void parseHeader();
@@ -43,7 +41,7 @@
WebRtcNetEQ_RTPInfo const * RTPinfo() const;
WebRtc_UWord8 * datagram() const;
WebRtc_UWord8 * payload() const;
- WebRtc_Word16 payloadLen() const;
+ WebRtc_Word16 payloadLen();
WebRtc_Word16 dataLen() const;
bool isParsed() const;
bool isLost() const;
@@ -64,7 +62,7 @@
int setRTPheader(const WebRtcNetEQ_RTPInfo *RTPinfo);
- int splitStereo(NETEQTEST_RTPpacket& slaveRtp, enum stereoModes mode);
+ int splitStereo(NETEQTEST_RTPpacket* slaveRtp, enum stereoModes mode);
int extractRED(int index, WebRtcNetEQ_RTPInfo& red);
@@ -81,11 +79,25 @@
bool _lost;
std::map<WebRtc_UWord8, bool> _blockList;
+protected:
+ static const int _kRDHeaderLen;
+ static const int _kBasicHeaderLen;
+
+ void parseBasicHeader(WebRtcNetEQ_RTPInfo *RTPinfo, int *i_P, int *i_X,
+ int *i_CC) const;
+ int calcHeaderLength(int i_X, int i_CC) const;
+
private:
- void makeRTPheader(unsigned char* rtp_data, WebRtc_UWord8 payloadType, WebRtc_UWord16 seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc, WebRtc_UWord8 markerBit) const;
- WebRtc_UWord16 parseRTPheader(const WebRtc_UWord8 *datagram, int datagramLen, WebRtcNetEQ_RTPInfo *RTPinfo, WebRtc_UWord8 **payloadPtr = NULL) const;
- void splitStereoSample(NETEQTEST_RTPpacket& slaveRtp, int stride);
- void splitStereoFrame(NETEQTEST_RTPpacket& slaveRtp);
+ void makeRTPheader(unsigned char* rtp_data, WebRtc_UWord8 payloadType,
+ WebRtc_UWord16 seqNo, WebRtc_UWord32 timestamp,
+ WebRtc_UWord32 ssrc, WebRtc_UWord8 markerBit) const;
+ WebRtc_UWord16 parseRTPheader(WebRtcNetEQ_RTPInfo *RTPinfo,
+ WebRtc_UWord8 **payloadPtr = NULL) const;
+ WebRtc_UWord16 parseRTPheader(WebRtc_UWord8 **payloadPtr = NULL)
+ { return parseRTPheader(&_rtpInfo, payloadPtr);};
+ int calcPadLength(int i_P) const;
+ void splitStereoSample(NETEQTEST_RTPpacket* slaveRtp, int stride);
+ void splitStereoFrame(NETEQTEST_RTPpacket* slaveRtp);
};
#endif //NETEQTEST_RTPPACKET_H
diff --git a/modules/audio_coding/neteq/test/NetEqRTPplay.cc b/modules/audio_coding/neteq/test/NetEqRTPplay.cc
index 09b0791..bb2d895 100644
--- a/modules/audio_coding/neteq/test/NetEqRTPplay.cc
+++ b/modules/audio_coding/neteq/test/NetEqRTPplay.cc
@@ -19,6 +19,7 @@
#include "neteq_error_codes.h" // for the API test
#include "NETEQTEST_RTPpacket.h"
+#include "NETEQTEST_DummyRTPpacket.h"
#include "NETEQTEST_NetEQClass.h"
#include "NETEQTEST_CodecClass.h"
@@ -67,7 +68,7 @@
#define TIME_STEP 1
#define FIRSTLINELEN 40
-#define MAX_NETEQ_BUFFERSIZE 170000 //100000
+#define MAX_NETEQ_BUFFERSIZE 170000 //100000
#define CHECK_ZERO(a) {int errCode = a; char tempErrName[WEBRTC_NETEQ_MAX_ERROR_NAME]; if((errCode)!=0){errCode = WebRtcNetEQ_GetErrorCode(inst); WebRtcNetEQ_GetErrorName(errCode, tempErrName, WEBRTC_NETEQ_MAX_ERROR_NAME); printf("\n %s \n line: %d \n error at %s\n Error Code = %d\n",__FILE__,__LINE__,#a, errCode); exit(0);}}
#define CHECK_NOT_NULL(a) if((a)==NULL){printf("\n %s \n line: %d \nerror at %s\n",__FILE__,__LINE__,#a );return(-1);}
//#define PLAY_CLEAN // ignore arrival times and let the packets arrive according to RTP timestamps
@@ -113,8 +114,8 @@
void stereoInterleave(WebRtc_Word16 *data, WebRtc_Word16 totalLen);
int getNextRecoutTime(FILE *fp, WebRtc_UWord32 *nextTime);
void getNextExtraDelay(FILE *fp, WebRtc_UWord32 *t, int *d);
-bool splitStereo(NETEQTEST_RTPpacket& rtp, NETEQTEST_RTPpacket& rtpSlave,
- const WebRtc_Word16 *stereoPtype, const enum stereoModes *stereoMode, int noOfStereoCodecs,
+bool splitStereo(NETEQTEST_RTPpacket* rtp, NETEQTEST_RTPpacket* rtpSlave,
+ const WebRtc_Word16 *stereoPtype, const enum stereoModes *stereoMode, int noOfStereoCodecs,
const WebRtc_Word16 *cngPtype, int noOfCngCodecs,
bool *isStereo);
void parsePtypeFile(FILE *ptypeFile, std::map<WebRtc_UWord8, decoderStruct>* decoders);
@@ -134,10 +135,10 @@
WebRtc_Word16 NetEqPacketBufferSlave[MAX_NETEQ_BUFFERSIZE>>1];
#ifdef NETEQ_DELAY_LOGGING
-extern "C" {
- FILE *delay_fid2; /* file pointer */
- WebRtc_UWord32 tot_received_packets=0;
-}
+extern "C" {
+ FILE *delay_fid2; /* file pointer */
+ WebRtc_UWord32 tot_received_packets=0;
+}
#endif
#ifdef DEF_BUILD_DATE
@@ -150,29 +151,25 @@
int main(int argc, char* argv[])
{
std::vector<NETEQTEST_NetEQClass *> NetEQvector;
- NETEQTEST_RTPpacket rtp;
- char version[20];
+ char version[20];
- NETEQTEST_RTPpacket slaveRtp;
- //bool switchMS = false;
- //bool duplicatePayload = false;
- enum WebRtcNetEQDecoder usedCodec[kDecoderReservedEnd-1];
- int noOfCodecs;
- int ok;
- WebRtc_Word16 out_data[640*2];
- WebRtc_Word16 outLen, writeLen;
+ enum WebRtcNetEQDecoder usedCodec[kDecoderReservedEnd-1];
+ int noOfCodecs;
+ int ok;
+ WebRtc_Word16 out_data[640*2];
+ WebRtc_Word16 outLen, writeLen;
int fs = 8000;
- WebRtcNetEQ_RTCPStat RTCPstat;
+ WebRtcNetEQ_RTCPStat RTCPstat;
#ifdef WIN32
- char outdrive[MY_MAX_DRIVE];
- char outpath[MY_MAX_PATH];
- char outfile[MY_MAX_FNAME];
- char outext[MY_MAX_EXT];
+ char outdrive[MY_MAX_DRIVE];
+ char outpath[MY_MAX_PATH];
+ char outfile[MY_MAX_FNAME];
+ char outext[MY_MAX_EXT];
#endif
- char outfilename[MY_MAX_PATH];
+ char outfilename[MY_MAX_PATH];
#ifdef NETEQ_DELAY_LOGGING
- float clock_float;
- int temp_var;
+ float clock_float;
+ int temp_var;
#endif
#ifdef JUNK_DATA
FILE *seedfile;
@@ -185,38 +182,40 @@
int packetLen = 0;
int packetCount = 0;
std::map<WebRtc_UWord8, decoderStruct> decoders;
+ bool dummyRtp = false;
+ bool noDecode = false;
- /* get the version string */
- WebRtcNetEQ_GetVersion(version);
- printf("\n\nNetEq version: %s\n", version);
+ /* get the version string */
+ WebRtcNetEQ_GetVersion(version);
+ printf("\n\nNetEq version: %s\n", version);
#ifdef DEF_BUILD_DATE
- printf("Build time: %s\n", __BUILD_DATE);
+ printf("Build time: %s\n", __BUILD_DATE);
#endif
- /* check number of parameters */
- if ((argc < 3)
+ /* check number of parameters */
+ if ((argc < 3)
#ifdef WIN32 // implicit output file name possible for windows
&& (argc < 2)
#endif
) {
- /* print help text and exit */
- printf("Test program for NetEQ.\n");
- printf("The program reads an RTP stream from file and inserts it into NetEQ.\n");
- printf("The format of the RTP stream file should be the same as for rtpplay,\n");
- printf("and can be obtained e.g., from Ethereal by using\n");
- printf("Statistics -> RTP -> Show All Streams -> [select a stream] -> Save As\n\n");
- printf("Usage:\n\n");
+ /* print help text and exit */
+ printf("Test program for NetEQ.\n");
+ printf("The program reads an RTP stream from file and inserts it into NetEQ.\n");
+ printf("The format of the RTP stream file should be the same as for rtpplay,\n");
+ printf("and can be obtained e.g., from Ethereal by using\n");
+ printf("Statistics -> RTP -> Show All Streams -> [select a stream] -> Save As\n\n");
+ printf("Usage:\n\n");
#ifdef WIN32
- printf("%s RTPfile [outfile] [-options]\n", argv[0]);
+ printf("%s RTPfile [outfile] [-options]\n", argv[0]);
#else
printf("%s RTPfile outfile [-options]\n", argv[0]);
#endif
- printf("where:\n");
+ printf("where:\n");
- printf("RTPfile : RTP stream input file\n\n");
+ printf("RTPfile : RTP stream input file\n\n");
- printf("outfile : PCM speech output file\n");
- printf(" Output file name is derived from RTP file name if omitted\n\n");
+ printf("outfile : PCM speech output file\n");
+ printf(" Output file name is derived from RTP file name if omitted\n\n");
printf("-options are optional switches:\n");
printf("\t-recout datfile : supply recout times\n");
@@ -225,51 +224,52 @@
printf("\t-fax : engage fax mode\n");
printf("\t-preparsertp : use RecIn with pre-parsed RTP\n");
printf("\t-rtponly packLenBytes : input file consists of constant size RTP packets without RTPplay headers\n");
+ printf("\t-dummyrtp : input file contains only RTP headers\n");
+ printf("\t-nodecode : no decoding will be done\n");
//printf("\t-switchms : switch from mono to stereo (copy channel) after 10 seconds\n");
//printf("\t-duplicate : use two instances with identical input (2-channel mono)\n");
- return(0);
- }
+ return(0);
+ }
- if (strcmp(argv[1], "-apitest")==0) {
- // do API test and then return
- ok=doAPItest();
+ if (strcmp(argv[1], "-apitest")==0) {
+ // do API test and then return
+ ok=doAPItest();
- if (ok==0)
- printf("API test successful!\n");
- else
- printf("API test failed!\n");
+ if (ok==0)
+ printf("API test successful!\n");
+ else
+ printf("API test failed!\n");
- return(ok);
- }
+ return(ok);
+ }
- FILE* in_file=fopen(argv[1],"rb");
- CHECK_NOT_NULL(in_file);
- printf("Input file: %s\n",argv[1]);
+ FILE* in_file=fopen(argv[1],"rb");
+ CHECK_NOT_NULL(in_file);
+ printf("Input file: %s\n",argv[1]);
int argIx = 2; // index of next argument from command line
- if ( argc >= 3 && argv[2][0] != '-' ) { // output name given on command line
- strcpy(outfilename, argv[2]);
+ if ( argc >= 3 && argv[2][0] != '-' ) { // output name given on command line
+ strcpy(outfilename, argv[2]);
argIx++;
- } else { // derive output name from input name
+ } else { // derive output name from input name
#ifdef WIN32
- _splitpath(argv[1],outdrive,outpath,outfile,outext);
- _makepath(outfilename,outdrive,outpath,outfile,"pcm");
+ _splitpath(argv[1],outdrive,outpath,outfile,outext);
+ _makepath(outfilename,outdrive,outpath,outfile,"pcm");
#else
fprintf(stderr,"Output file name must be specified.\n");
- return(-1);
+ return(-1);
#endif
- }
- FILE* out_file=fopen(outfilename,"wb");
- if (out_file==NULL) {
- fprintf(stderr,"Could not open file %s for writing\n", outfilename);
- return(-1);
- }
- printf("Output file: %s\n",outfilename);
+ }
+ FILE* out_file=fopen(outfilename,"wb");
+ if (out_file==NULL) {
+ fprintf(stderr,"Could not open file %s for writing\n", outfilename);
+ return(-1);
+ }
+ printf("Output file: %s\n",outfilename);
// Parse for more arguments, all beginning with '-'
-
while( argIx < argc ) {
if (argv[argIx][0] != '-') {
fprintf(stderr,"Unknown input argument %s\n", argv[argIx]);
@@ -311,6 +311,18 @@
exit(1);
}
}
+ else if (strcmp(argv[argIx], "-dummyrtp") == 0
+ || strcmp(argv[argIx], "-dummy") == 0)
+ {
+ argIx++;
+ dummyRtp = true;
+ noDecode = true; // force noDecode since there are no payloads
+ }
+ else if (strcmp(argv[argIx], "-nodecode") == 0)
+ {
+ argIx++;
+ noDecode = true;
+ }
//else if( strcmp(argv[argIx], "-switchms") == 0 ) {
// argIx++;
// switchMS = true;
@@ -328,23 +340,23 @@
#ifdef NETEQ_DELAY_LOGGING
- char delayfile[MY_MAX_PATH];
+ char delayfile[MY_MAX_PATH];
#ifdef WIN32
- _splitpath(outfilename,outdrive,outpath,outfile,outext);
- _makepath(delayfile,outdrive,outpath,outfile,"d");
+ _splitpath(outfilename,outdrive,outpath,outfile,outext);
+ _makepath(delayfile,outdrive,outpath,outfile,"d");
#else
sprintf(delayfile, "%s.d", outfilename);
#endif
- delay_fid2 = fopen(delayfile,"wb");
- fprintf(delay_fid2, "#!NetEQ_Delay_Logging%s\n", NETEQ_DELAY_LOGGING_VERSION_STRING);
+ delay_fid2 = fopen(delayfile,"wb");
+ fprintf(delay_fid2, "#!NetEQ_Delay_Logging%s\n", NETEQ_DELAY_LOGGING_VERSION_STRING);
#endif
- char ptypesfile[MY_MAX_PATH];
+ char ptypesfile[MY_MAX_PATH];
#ifdef WIN32
_splitpath(argv[0],outdrive,outpath,outfile,outext);
- _makepath(ptypesfile,outdrive,outpath,"ptypes","txt");
+ _makepath(ptypesfile,outdrive,outpath,"ptypes","txt");
#else
- // TODO(hlundin): Include path to ptypes, as for WIN32 above.
+ // TODO(hlundin): Include path to ptypes, as for WIN32 above.
strcpy(ptypesfile, "ptypes.txt");
#endif
FILE *ptypeFile = fopen(ptypesfile,"rt");
@@ -368,7 +380,7 @@
noOfCodecs = populateUsedCodec(&decoders, usedCodec);
- /* read RTP file header */
+ /* read RTP file header */
if (!rtpOnly)
{
if (NETEQTEST_RTPpacket::skipFileHeader(in_file) != 0)
@@ -382,32 +394,45 @@
long tempFilePos = ftell(in_file);
enum stereoModes stereoMode = stereoModeMono;
+ NETEQTEST_RTPpacket *rtp;
+ NETEQTEST_RTPpacket *slaveRtp;
+ if (!dummyRtp)
+ {
+ rtp = new NETEQTEST_RTPpacket();
+ slaveRtp = new NETEQTEST_RTPpacket();
+ }
+ else
+ {
+ rtp = new NETEQTEST_DummyRTPpacket();
+ slaveRtp = new NETEQTEST_DummyRTPpacket();
+ }
+
if (!rtpOnly)
{
- while (rtp.readFromFile(in_file) >= 0)
+ while (rtp->readFromFile(in_file) >= 0)
{
- if (decoders.count(rtp.payloadType()) > 0
- && decoders[rtp.payloadType()].codec != kDecoderRED
- && decoders[rtp.payloadType()].codec != kDecoderAVT
- && decoders[rtp.payloadType()].codec != kDecoderCNG )
+ if (decoders.count(rtp->payloadType()) > 0
+ && decoders[rtp->payloadType()].codec != kDecoderRED
+ && decoders[rtp->payloadType()].codec != kDecoderAVT
+ && decoders[rtp->payloadType()].codec != kDecoderCNG )
{
- stereoMode = decoders[rtp.payloadType()].stereo;
- fs = decoders[rtp.payloadType()].fs;
+ stereoMode = decoders[rtp->payloadType()].stereo;
+ fs = decoders[rtp->payloadType()].fs;
break;
}
}
}
else
{
- while (rtp.readFixedFromFile(in_file, packetLen) >= 0)
+ while (rtp->readFixedFromFile(in_file, packetLen) >= 0)
{
- if (decoders.count(rtp.payloadType()) > 0
- && decoders[rtp.payloadType()].codec != kDecoderRED
- && decoders[rtp.payloadType()].codec != kDecoderAVT
- && decoders[rtp.payloadType()].codec != kDecoderCNG )
+ if (decoders.count(rtp->payloadType()) > 0
+ && decoders[rtp->payloadType()].codec != kDecoderRED
+ && decoders[rtp->payloadType()].codec != kDecoderAVT
+ && decoders[rtp->payloadType()].codec != kDecoderCNG )
{
- stereoMode = decoders[rtp.payloadType()].stereo;
- fs = decoders[rtp.payloadType()].fs;
+ stereoMode = decoders[rtp->payloadType()].stereo;
+ fs = decoders[rtp->payloadType()].fs;
break;
}
}
@@ -417,18 +442,18 @@
/* block some payload types */
- //rtp.blockPT(72);
- //rtp.blockPT(23);
+ //rtp->blockPT(72);
+ //rtp->blockPT(23);
- /* read first packet */
+ /* read first packet */
if (!rtpOnly)
{
- rtp.readFromFile(in_file);
+ rtp->readFromFile(in_file);
}
else
{
- rtp.readFixedFromFile(in_file, packetLen);
- rtp.setTime((1000 * rtp.timeStamp()) / fs);
+ rtp->readFixedFromFile(in_file, packetLen);
+ rtp->setTime((1000 * rtp->timeStamp()) / fs);
}
if (!rtp)
{
@@ -436,7 +461,7 @@
}
- /* Initialize NetEQ instances */
+ /* Initialize NetEQ instances */
int numInst = 1;
if (stereoMode > stereoModeMono)
{
@@ -456,26 +481,28 @@
NetEQvector[i]->usePreparseRTP(preParseRTP);
+ NetEQvector[i]->setNoDecode(noDecode);
+
if (numInst > 1)
{
// we are using master/slave mode
if (i == 0)
{
// first instance is master
- NetEQvector[i]->isMaster();
+ NetEQvector[i]->setMaster();
}
else
{
// all other are slaves
- NetEQvector[i]->isSlave();
+ NetEQvector[i]->setSlave();
}
}
}
#ifdef ZERO_TS_START
- WebRtc_UWord32 firstTS = rtp.timeStamp();
- rtp.setTimeStamp(0);
+ WebRtc_UWord32 firstTS = rtp->timeStamp();
+ rtp->setTimeStamp(0);
#else
WebRtc_UWord32 firstTS = 0;
#endif
@@ -483,15 +510,15 @@
// check stereo mode
if (stereoMode > stereoModeMono)
{
- if(rtp.splitStereo(slaveRtp, stereoMode))
+ if(rtp->splitStereo(slaveRtp, stereoMode))
{
printf("Error in splitStereo\n");
}
}
#ifdef PLAY_CLEAN
- WebRtc_UWord32 prevTS = rtp.timeStamp();
- WebRtc_UWord32 currTS, prev_time;
+ WebRtc_UWord32 prevTS = rtp->timeStamp();
+ WebRtc_UWord32 currTS, prev_time;
#endif
#ifdef JUNK_DATA
@@ -511,9 +538,9 @@
int lastRecout = getNextRecoutTime(recoutTimes, &nextRecoutTime); // does nothing if recoutTimes == NULL
if (recoutTimes)
- simClock = (rtp.time() < nextRecoutTime ? rtp.time(): nextRecoutTime);
+ simClock = (rtp->time() < nextRecoutTime ? rtp->time(): nextRecoutTime);
else
- simClock = rtp.time(); // start immediately with first packet
+ simClock = rtp->time(); // start immediately with first packet
WebRtc_UWord32 start_clock = simClock;
@@ -526,14 +553,14 @@
if(msInfo == NULL)
return(-1);
- while(rtp.dataLen() >= 0 || (recoutTimes && !lastRecout)) {
+ while(rtp->dataLen() >= 0 || (recoutTimes && !lastRecout)) {
// printf("simClock = %Lu\n", simClock);
-
+
#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_CLOCK;
- clock_float = (float) simClock;
- fwrite(&temp_var,sizeof(int),1,delay_fid2);
- fwrite(&clock_float, sizeof(float),1,delay_fid2);
+ temp_var = NETEQ_DELAY_LOGGING_SIGNAL_CLOCK;
+ clock_float = (float) simClock;
+ fwrite(&temp_var,sizeof(int),1,delay_fid2);
+ fwrite(&clock_float, sizeof(float),1,delay_fid2);
#endif
/* time to set extra delay */
if (extraDelay > -1 && simClock >= nextExtraDelayTime) {
@@ -545,67 +572,67 @@
getNextExtraDelay(extraDelays, &nextExtraDelayTime, &extraDelay);
}
- /* check if time to receive */
- while (simClock >= rtp.time() && rtp.dataLen() >= 0)
+ /* check if time to receive */
+ while (simClock >= rtp->time() && rtp->dataLen() >= 0)
{
- if (rtp.dataLen() > 0)
+ if (rtp->dataLen() > 0)
{
// insert main packet
- NetEQvector[0]->recIn(rtp);
+ NetEQvector[0]->recIn(*rtp);
if (stereoMode > stereoModeMono
- && slaveRtp.dataLen() > 0)
+ && slaveRtp->dataLen() > 0)
{
// insert slave packet
- NetEQvector[1]->recIn(slaveRtp);
+ NetEQvector[1]->recIn(*slaveRtp);
}
- }
+ }
- /* get next packet */
+ /* get next packet */
#ifdef PLAY_CLEAN
- prev_time = rtp.time();
+ prev_time = rtp->time();
#endif
if (!rtpOnly)
{
- rtp.readFromFile(in_file);
+ rtp->readFromFile(in_file);
}
else
{
- rtp.readFixedFromFile(in_file, packetLen);
- rtp.setTime((1000 * rtp.timeStamp()) / fs);
+ rtp->readFixedFromFile(in_file, packetLen);
+ rtp->setTime((1000 * rtp->timeStamp()) / fs);
}
- if (rtp.dataLen() >= 0)
+ if (rtp->dataLen() >= 0)
{
- rtp.setTimeStamp(rtp.timeStamp() - firstTS);
+ rtp->setTimeStamp(rtp->timeStamp() - firstTS);
}
packetCount++;
- if (changeStereoMode(rtp, decoders, &stereoMode))
+ if (changeStereoMode(*rtp, decoders, &stereoMode))
{
printf("Warning: stereo mode changed\n");
}
if (stereoMode > stereoModeMono)
{
- if(rtp.splitStereo(slaveRtp, stereoMode))
+ if(rtp->splitStereo(slaveRtp, stereoMode))
{
printf("Error in splitStereo\n");
}
}
#ifdef PLAY_CLEAN
- currTS = rtp.timeStamp();
- rtp.setTime(prev_time + (currTS-prevTS)/(fs/1000));
- prevTS = currTS;
+ currTS = rtp->timeStamp();
+ rtp->setTime(prev_time + (currTS-prevTS)/(fs/1000));
+ prevTS = currTS;
#endif
- }
-
- /* check if time to RecOut */
- if ( (!recoutTimes && (simClock%10)==0) // recout times not given from file
+ }
+
+ /* check if time to RecOut */
+ if ( (!recoutTimes && (simClock%10)==0) // recout times not given from file
|| ( recoutTimes && (simClock >= nextRecoutTime) ) ) // recout times given from file
{
if (stereoMode > stereoModeMono)
@@ -640,27 +667,27 @@
}
- /* increase time */
- simClock+=TIME_STEP;
- }
+ /* increase time */
+ simClock+=TIME_STEP;
+ }
- fclose(in_file);
- fclose(out_file);
+ fclose(in_file);
+ fclose(out_file);
#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EOF;
- fwrite(&temp_var,sizeof(int),1,delay_fid2);
- fwrite(&tot_received_packets,sizeof(WebRtc_UWord32),1,delay_fid2);
- fprintf(delay_fid2,"End of file\n");
- fclose(delay_fid2);
+ temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EOF;
+ fwrite(&temp_var,sizeof(int),1,delay_fid2);
+ fwrite(&tot_received_packets,sizeof(WebRtc_UWord32),1,delay_fid2);
+ fprintf(delay_fid2,"End of file\n");
+ fclose(delay_fid2);
#endif
- WebRtcNetEQ_GetRTCPStats(NetEQvector[0]->instance(), &RTCPstat);
- printf("RTCP statistics:\n");
- printf(" cum_lost : %d\n", (int) RTCPstat.cum_lost);
- printf(" ext_max : %d\n", (int) RTCPstat.ext_max);
- printf(" fraction_lost : %d (%f%%)\n", RTCPstat.fraction_lost, (float)(100.0*RTCPstat.fraction_lost/256.0));
- printf(" jitter : %d\n", (int) RTCPstat.jitter);
+ WebRtcNetEQ_GetRTCPStats(NetEQvector[0]->instance(), &RTCPstat);
+ printf("RTCP statistics:\n");
+ printf(" cum_lost : %d\n", (int) RTCPstat.cum_lost);
+ printf(" ext_max : %d\n", (int) RTCPstat.ext_max);
+ printf(" fraction_lost : %d (%f%%)\n", RTCPstat.fraction_lost, (float)(100.0*RTCPstat.fraction_lost/256.0));
+ printf(" jitter : %d\n", (int) RTCPstat.jitter);
printf("\n Call duration ms : %u\n", simClock-start_clock);
@@ -668,8 +695,11 @@
printf(" RecIn complexity : %.2f MCPS\n", NetEQvector[0]->getRecInTime() / ((float) 1000*(simClock-start_clock)));
printf(" RecOut complexity : %.2f MCPS\n", NetEQvector[0]->getRecOutTime() / ((float) 1000*(simClock-start_clock)));
+ delete rtp;
+ delete slaveRtp;
+
free_coders(decoders);
- //free_coders(0 /* first channel */);
+ //free_coders(0 /* first channel */);
// if (stereoMode > stereoModeMono) {
// free_coders(1 /* second channel */);
// }
@@ -678,7 +708,7 @@
for (std::vector<NETEQTEST_NetEQClass *>::iterator it = NetEQvector.begin();
it < NetEQvector.end(); delete *it++);
- printf("\nSimulation done!\n");
+ printf("\nSimulation done!\n");
#ifdef JUNK_DATA
if ( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL ) {
@@ -697,7 +727,7 @@
fprintf(statfile,"%.4f, %.4f\n", (float) totTime_RecIn.QuadPart / ((float) 1000*(simClock-start_clock)), (float) totTime_RecOut.QuadPart / ((float) 1000*(simClock-start_clock)));
fclose(statfile);*/
- return(0);
+ return(0);
}
@@ -709,8 +739,8 @@
/* Subfunctions */
/****************/
-bool splitStereo(NETEQTEST_RTPpacket& rtp, NETEQTEST_RTPpacket& rtpSlave,
- const WebRtc_Word16 *stereoPtype, const enum stereoModes *stereoMode, int noOfStereoCodecs,
+bool splitStereo(NETEQTEST_RTPpacket* rtp, NETEQTEST_RTPpacket* rtpSlave,
+ const WebRtc_Word16 *stereoPtype, const enum stereoModes *stereoMode, int noOfStereoCodecs,
const WebRtc_Word16 *cngPtype, int noOfCngCodecs,
bool *isStereo)
{
@@ -721,13 +751,13 @@
bool isCng = false;
// check payload length
- if (rtp.dataLen() <= 0) {
+ if (rtp->dataLen() <= 0) {
//*isStereo = false; // don't change
return(*isStereo);
}
// check payload type
- WebRtc_Word16 ptype = rtp.payloadType();
+ WebRtc_Word16 ptype = rtp->payloadType();
// is this a cng payload?
for (int k = 0; k < noOfCngCodecs; k++) {
@@ -756,7 +786,7 @@
{
// split the payload if stereo
- if(rtp.splitStereo(rtpSlave, tempStereoMode))
+ if(rtp->splitStereo(rtpSlave, tempStereoMode))
{
printf("Error in splitStereo\n");
}
@@ -792,7 +822,7 @@
*nextTime = (WebRtc_UWord32) tempTime;
return 0;
}
-
+
*nextTime = 0;
fclose(fp);
@@ -814,13 +844,13 @@
*d = (int) temp[1];
return;
}
-
+
*d = -1;
fclose(fp);
return;
}
-
+
void parsePtypeFile(FILE *ptypeFile, std::map<WebRtc_UWord8, decoderStruct>* decoders)
{
@@ -1461,7 +1491,7 @@
#if (_MSC_VER >= 1400) && !defined(_WIN64) // only for Visual 2005 or later, and not for x64
*dec = new decoder_SILK8( pt );
#endif
- break;
+ break;
#endif
#ifdef CODEC_SILK_WB
case NETEQ_CODEC_SILK_12:
@@ -1527,213 +1557,213 @@
int doAPItest() {
- char version[20];
- void *inst;
- enum WebRtcNetEQDecoder usedCodec;
- int NetEqBufferMaxPackets, BufferSizeInBytes;
- WebRtcNetEQ_CodecDef codecInst;
- WebRtcNetEQ_RTCPStat RTCPstat;
- WebRtc_UWord32 timestamp;
- int memorySize;
- int ok;
-
- printf("API-test:\n");
+ char version[20];
+ void *inst;
+ enum WebRtcNetEQDecoder usedCodec;
+ int NetEqBufferMaxPackets, BufferSizeInBytes;
+ WebRtcNetEQ_CodecDef codecInst;
+ WebRtcNetEQ_RTCPStat RTCPstat;
+ WebRtc_UWord32 timestamp;
+ int memorySize;
+ int ok;
- /* get the version string */
- WebRtcNetEQ_GetVersion(version);
- printf("NetEq version: %s\n\n", version);
+ printf("API-test:\n");
- /* test that API functions return -1 if instance is NULL */
+ /* get the version string */
+ WebRtcNetEQ_GetVersion(version);
+ printf("NetEq version: %s\n\n", version);
+
+ /* test that API functions return -1 if instance is NULL */
#define CHECK_MINUS_ONE(x) {int errCode = x; if((errCode)!=-1){printf("\n API test failed at line %d: %s. Function did not return -1 as expected\n",__LINE__,#x); return(-1);}}
//#define RESET_ERROR(x) ((MainInst_t*) x)->ErrorCode = 0;
- inst = NULL;
+ inst = NULL;
- CHECK_MINUS_ONE(WebRtcNetEQ_GetErrorCode(inst))
- CHECK_MINUS_ONE(WebRtcNetEQ_Assign(&inst, NULL))
-// printf("WARNING: Test of WebRtcNetEQ_Assign() is disabled due to a bug.\n");
- usedCodec=kDecoderPCMu;
- CHECK_MINUS_ONE(WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, kTCPLargeJitter, &NetEqBufferMaxPackets, &BufferSizeInBytes))
- CHECK_MINUS_ONE(WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NetEqPacketBuffer, BufferSizeInBytes))
+ CHECK_MINUS_ONE(WebRtcNetEQ_GetErrorCode(inst))
+ CHECK_MINUS_ONE(WebRtcNetEQ_Assign(&inst, NULL))
+// printf("WARNING: Test of WebRtcNetEQ_Assign() is disabled due to a bug.\n");
+ usedCodec=kDecoderPCMu;
+ CHECK_MINUS_ONE(WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, kTCPLargeJitter, &NetEqBufferMaxPackets, &BufferSizeInBytes))
+ CHECK_MINUS_ONE(WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NetEqPacketBuffer, BufferSizeInBytes))
- CHECK_MINUS_ONE(WebRtcNetEQ_Init(inst, 8000))
- CHECK_MINUS_ONE(WebRtcNetEQ_SetAVTPlayout(inst, 0))
- CHECK_MINUS_ONE(WebRtcNetEQ_SetExtraDelay(inst, 17))
- CHECK_MINUS_ONE(WebRtcNetEQ_SetPlayoutMode(inst, kPlayoutOn))
-
- CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbReset(inst))
- CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
- CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbRemove(inst, usedCodec))
- WebRtc_Word16 temp1, temp2;
- CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbGetSizeInfo(inst, &temp1, &temp2))
- CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbGetCodecInfo(inst, 0, &usedCodec))
+ CHECK_MINUS_ONE(WebRtcNetEQ_Init(inst, 8000))
+ CHECK_MINUS_ONE(WebRtcNetEQ_SetAVTPlayout(inst, 0))
+ CHECK_MINUS_ONE(WebRtcNetEQ_SetExtraDelay(inst, 17))
+ CHECK_MINUS_ONE(WebRtcNetEQ_SetPlayoutMode(inst, kPlayoutOn))
- CHECK_MINUS_ONE(WebRtcNetEQ_RecIn(inst, &temp1, 17, 4711))
- CHECK_MINUS_ONE(WebRtcNetEQ_RecOut(inst, &temp1, &temp2))
- CHECK_MINUS_ONE(WebRtcNetEQ_GetRTCPStats(inst, &RTCPstat)); // error here!!!
- CHECK_MINUS_ONE(WebRtcNetEQ_GetSpeechTimeStamp(inst, ×tamp))
- WebRtcNetEQOutputType temptype;
- CHECK_MINUS_ONE(WebRtcNetEQ_GetSpeechOutputType(inst, &temptype))
+ CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbReset(inst))
+ CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+ CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbRemove(inst, usedCodec))
+ WebRtc_Word16 temp1, temp2;
+ CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbGetSizeInfo(inst, &temp1, &temp2))
+ CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbGetCodecInfo(inst, 0, &usedCodec))
- WebRtc_UWord8 tempFlags;
- WebRtc_UWord16 utemp1, utemp2;
- CHECK_MINUS_ONE(WebRtcNetEQ_VQmonRecOutStatistics(inst, &utemp1, &utemp2, &tempFlags))
- CHECK_MINUS_ONE(WebRtcNetEQ_VQmonGetRxStatistics(inst, &utemp1, &utemp2))
+ CHECK_MINUS_ONE(WebRtcNetEQ_RecIn(inst, &temp1, 17, 4711))
+ CHECK_MINUS_ONE(WebRtcNetEQ_RecOut(inst, &temp1, &temp2))
+ CHECK_MINUS_ONE(WebRtcNetEQ_GetRTCPStats(inst, &RTCPstat)); // error here!!!
+ CHECK_MINUS_ONE(WebRtcNetEQ_GetSpeechTimeStamp(inst, ×tamp))
+ WebRtcNetEQOutputType temptype;
+ CHECK_MINUS_ONE(WebRtcNetEQ_GetSpeechOutputType(inst, &temptype))
- WebRtcNetEQ_AssignSize(&memorySize);
- CHECK_ZERO(WebRtcNetEQ_Assign(&inst, malloc(memorySize)))
+ WebRtc_UWord8 tempFlags;
+ WebRtc_UWord16 utemp1, utemp2;
+ CHECK_MINUS_ONE(WebRtcNetEQ_VQmonRecOutStatistics(inst, &utemp1, &utemp2, &tempFlags))
+ CHECK_MINUS_ONE(WebRtcNetEQ_VQmonGetRxStatistics(inst, &utemp1, &utemp2))
- /* init with wrong sample frequency */
- CHECK_MINUS_ONE(WebRtcNetEQ_Init(inst, 17))
-
- /* init with correct fs */
- CHECK_ZERO(WebRtcNetEQ_Init(inst, 8000))
+ WebRtcNetEQ_AssignSize(&memorySize);
+ CHECK_ZERO(WebRtcNetEQ_Assign(&inst, malloc(memorySize)))
- /* GetRecommendedBufferSize with wrong codec */
- usedCodec=kDecoderReservedStart;
- ok = WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, kTCPLargeJitter , &NetEqBufferMaxPackets, &BufferSizeInBytes);
- if((ok!=-1) || ((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNKNOWN_CODEC))){
- printf("WebRtcNetEQ_GetRecommendedBufferSize() did not return proper error code for wrong codec.\n");
- printf("return value = %d; error code = %d\n", ok, WebRtcNetEQ_GetErrorCode(inst));
- }
- //RESET_ERROR(inst)
+ /* init with wrong sample frequency */
+ CHECK_MINUS_ONE(WebRtcNetEQ_Init(inst, 17))
- /* GetRecommendedBufferSize with wrong network type */
- usedCodec = kDecoderPCMu;
- ok=WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, (enum WebRtcNetEQNetworkType) 4711 , &NetEqBufferMaxPackets, &BufferSizeInBytes);
- if ((ok!=-1) || ((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_NETWORK_TYPE))) {
- printf("WebRtcNetEQ_GetRecommendedBufferSize() did not return proper error code for wrong network type.\n");
- printf("return value = %d; error code = %d\n", ok, WebRtcNetEQ_GetErrorCode(inst));
- //RESET_ERROR(inst)
- }
- CHECK_ZERO(WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, kTCPLargeJitter , &NetEqBufferMaxPackets, &BufferSizeInBytes))
+ /* init with correct fs */
+ CHECK_ZERO(WebRtcNetEQ_Init(inst, 8000))
- /* try to do RecIn before assigning the packet buffer */
-/* makeRTPheader(rtp_data, NETEQ_CODEC_AVT_PT, 17,4711, 1235412312);
- makeDTMFpayload(&rtp_data[12], 1, 1, 10, 100);
- ok = WebRtcNetEQ_RecIn(inst, (short *) rtp_data, 12+4, 4711);
- printf("return value = %d; error code = %d\n", ok, WebRtcNetEQ_GetErrorCode(inst));*/
-
- /* check all limits of WebRtcNetEQ_AssignBuffer */
- ok=WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NetEqPacketBuffer, 149<<1);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
- printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for wrong sizeinbytes\n");
- }
- ok=WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NULL, BufferSizeInBytes);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
- printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for NULL memory pointer\n");
- }
- ok=WebRtcNetEQ_AssignBuffer(inst, 1, NetEqPacketBuffer, BufferSizeInBytes);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
- printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for wrong MaxNoOfPackets\n");
- }
- ok=WebRtcNetEQ_AssignBuffer(inst, 601, NetEqPacketBuffer, BufferSizeInBytes);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
- printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for wrong MaxNoOfPackets\n");
- }
+ /* GetRecommendedBufferSize with wrong codec */
+ usedCodec=kDecoderReservedStart;
+ ok = WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, kTCPLargeJitter , &NetEqBufferMaxPackets, &BufferSizeInBytes);
+ if((ok!=-1) || ((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNKNOWN_CODEC))){
+ printf("WebRtcNetEQ_GetRecommendedBufferSize() did not return proper error code for wrong codec.\n");
+ printf("return value = %d; error code = %d\n", ok, WebRtcNetEQ_GetErrorCode(inst));
+ }
+ //RESET_ERROR(inst)
- /* do correct assignbuffer */
- CHECK_ZERO(WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NetEqPacketBuffer, BufferSizeInBytes))
+ /* GetRecommendedBufferSize with wrong network type */
+ usedCodec = kDecoderPCMu;
+ ok=WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, (enum WebRtcNetEQNetworkType) 4711 , &NetEqBufferMaxPackets, &BufferSizeInBytes);
+ if ((ok!=-1) || ((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_NETWORK_TYPE))) {
+ printf("WebRtcNetEQ_GetRecommendedBufferSize() did not return proper error code for wrong network type.\n");
+ printf("return value = %d; error code = %d\n", ok, WebRtcNetEQ_GetErrorCode(inst));
+ //RESET_ERROR(inst)
+ }
+ CHECK_ZERO(WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, kTCPLargeJitter , &NetEqBufferMaxPackets, &BufferSizeInBytes))
- ok=WebRtcNetEQ_SetExtraDelay(inst, -1);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_DELAYVALUE))) {
- printf("WebRtcNetEQ_SetExtraDelay() did not return proper error code for too small delay\n");
- }
- ok=WebRtcNetEQ_SetExtraDelay(inst, 1001);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_DELAYVALUE))) {
- printf("WebRtcNetEQ_SetExtraDelay() did not return proper error code for too large delay\n");
- }
+ /* try to do RecIn before assigning the packet buffer */
+/* makeRTPheader(rtp_data, NETEQ_CODEC_AVT_PT, 17,4711, 1235412312);
+ makeDTMFpayload(&rtp_data[12], 1, 1, 10, 100);
+ ok = WebRtcNetEQ_RecIn(inst, (short *) rtp_data, 12+4, 4711);
+ printf("return value = %d; error code = %d\n", ok, WebRtcNetEQ_GetErrorCode(inst));*/
- ok=WebRtcNetEQ_SetPlayoutMode(inst,(enum WebRtcNetEQPlayoutMode) 4711);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_PLAYOUTMODE))) {
- printf("WebRtcNetEQ_SetPlayoutMode() did not return proper error code for wrong mode\n");
- }
+ /* check all limits of WebRtcNetEQ_AssignBuffer */
+ ok=WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NetEqPacketBuffer, 149<<1);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
+ printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for wrong sizeinbytes\n");
+ }
+ ok=WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NULL, BufferSizeInBytes);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
+ printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for NULL memory pointer\n");
+ }
+ ok=WebRtcNetEQ_AssignBuffer(inst, 1, NetEqPacketBuffer, BufferSizeInBytes);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
+ printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for wrong MaxNoOfPackets\n");
+ }
+ ok=WebRtcNetEQ_AssignBuffer(inst, 601, NetEqPacketBuffer, BufferSizeInBytes);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
+ printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for wrong MaxNoOfPackets\n");
+ }
- /* number of codecs should return zero before adding any codecs */
- WebRtcNetEQ_CodecDbGetSizeInfo(inst, &temp1, &temp2);
- if(temp1!=0)
- printf("WebRtcNetEQ_CodecDbGetSizeInfo() return non-zero number of codecs in DB before adding any codecs\n");
+ /* do correct assignbuffer */
+ CHECK_ZERO(WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NetEqPacketBuffer, BufferSizeInBytes))
- /* get info from empty database */
- ok=WebRtcNetEQ_CodecDbGetCodecInfo(inst, 17, &usedCodec);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_NOT_EXIST1))) {
- printf("WebRtcNetEQ_CodecDbGetCodecInfo() did not return proper error code for out-of-range entry number\n");
- }
+ ok=WebRtcNetEQ_SetExtraDelay(inst, -1);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_DELAYVALUE))) {
+ printf("WebRtcNetEQ_SetExtraDelay() did not return proper error code for too small delay\n");
+ }
+ ok=WebRtcNetEQ_SetExtraDelay(inst, 1001);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_DELAYVALUE))) {
+ printf("WebRtcNetEQ_SetExtraDelay() did not return proper error code for too large delay\n");
+ }
- /* remove codec from empty database */
- ok=WebRtcNetEQ_CodecDbRemove(inst,kDecoderPCMa);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_NOT_EXIST4))) {
- printf("WebRtcNetEQ_CodecDbRemove() did not return proper error code when removing codec that has not been added\n");
- }
+ ok=WebRtcNetEQ_SetPlayoutMode(inst,(enum WebRtcNetEQPlayoutMode) 4711);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_PLAYOUTMODE))) {
+ printf("WebRtcNetEQ_SetPlayoutMode() did not return proper error code for wrong mode\n");
+ }
- /* add codec with unsupported fs */
+ /* number of codecs should return zero before adding any codecs */
+ WebRtcNetEQ_CodecDbGetSizeInfo(inst, &temp1, &temp2);
+ if(temp1!=0)
+ printf("WebRtcNetEQ_CodecDbGetSizeInfo() return non-zero number of codecs in DB before adding any codecs\n");
+
+ /* get info from empty database */
+ ok=WebRtcNetEQ_CodecDbGetCodecInfo(inst, 17, &usedCodec);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_NOT_EXIST1))) {
+ printf("WebRtcNetEQ_CodecDbGetCodecInfo() did not return proper error code for out-of-range entry number\n");
+ }
+
+ /* remove codec from empty database */
+ ok=WebRtcNetEQ_CodecDbRemove(inst,kDecoderPCMa);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_NOT_EXIST4))) {
+ printf("WebRtcNetEQ_CodecDbRemove() did not return proper error code when removing codec that has not been added\n");
+ }
+
+ /* add codec with unsupported fs */
#ifdef CODEC_PCM16B
#ifndef NETEQ_48KHZ_WIDEBAND
- SET_CODEC_PAR(codecInst,kDecoderPCM16Bswb48kHz,77,NULL,48000);
- SET_PCM16B_SWB48_FUNCTIONS(codecInst);
- ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNSUPPORTED_FS))) {
- printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding codec with unsupported sample freq\n");
- }
+ SET_CODEC_PAR(codecInst,kDecoderPCM16Bswb48kHz,77,NULL,48000);
+ SET_PCM16B_SWB48_FUNCTIONS(codecInst);
+ ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNSUPPORTED_FS))) {
+ printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding codec with unsupported sample freq\n");
+ }
#else
- printf("Could not test adding codec with unsupported sample frequency since NetEQ is compiled with 48kHz support.\n");
+ printf("Could not test adding codec with unsupported sample frequency since NetEQ is compiled with 48kHz support.\n");
#endif
#else
printf("Could not test adding codec with unsupported sample frequency since NetEQ is compiled without PCM16B support.\n");
#endif
- /* add two codecs with identical payload types */
- SET_CODEC_PAR(codecInst,kDecoderPCMa,17,NULL,8000);
- SET_PCMA_FUNCTIONS(codecInst);
- CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+ /* add two codecs with identical payload types */
+ SET_CODEC_PAR(codecInst,kDecoderPCMa,17,NULL,8000);
+ SET_PCMA_FUNCTIONS(codecInst);
+ CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
- SET_CODEC_PAR(codecInst,kDecoderPCMu,17,NULL,8000);
- SET_PCMU_FUNCTIONS(codecInst);
- ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_PAYLOAD_TAKEN))) {
- printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding two codecs with identical payload types\n");
- }
+ SET_CODEC_PAR(codecInst,kDecoderPCMu,17,NULL,8000);
+ SET_PCMU_FUNCTIONS(codecInst);
+ ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_PAYLOAD_TAKEN))) {
+ printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding two codecs with identical payload types\n");
+ }
- /* try adding several payload types for CNG codecs */
- SET_CODEC_PAR(codecInst,kDecoderCNG,105,NULL,16000);
- SET_CNG_FUNCTIONS(codecInst);
- CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
- SET_CODEC_PAR(codecInst,kDecoderCNG,13,NULL,8000);
- SET_CNG_FUNCTIONS(codecInst);
- CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+ /* try adding several payload types for CNG codecs */
+ SET_CODEC_PAR(codecInst,kDecoderCNG,105,NULL,16000);
+ SET_CNG_FUNCTIONS(codecInst);
+ CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+ SET_CODEC_PAR(codecInst,kDecoderCNG,13,NULL,8000);
+ SET_CNG_FUNCTIONS(codecInst);
+ CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
/* try adding a speech codec over a CNG codec */
SET_CODEC_PAR(codecInst,kDecoderISAC,105,NULL,16000); /* same as WB CNG above */
- SET_ISAC_FUNCTIONS(codecInst);
- ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_PAYLOAD_TAKEN))) {
- printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding a speech codec over a CNG codec\n");
- }
+ SET_ISAC_FUNCTIONS(codecInst);
+ ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_PAYLOAD_TAKEN))) {
+ printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding a speech codec over a CNG codec\n");
+ }
/* try adding a CNG codec over a speech codec */
SET_CODEC_PAR(codecInst,kDecoderCNG,17,NULL,32000); /* same as PCMU above */
- SET_CNG_FUNCTIONS(codecInst);
- ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_PAYLOAD_TAKEN))) {
- printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding a speech codec over a CNG codec\n");
- }
+ SET_CNG_FUNCTIONS(codecInst);
+ ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_PAYLOAD_TAKEN))) {
+ printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding a speech codec over a CNG codec\n");
+ }
- /* remove codec out of range */
- ok=WebRtcNetEQ_CodecDbRemove(inst,kDecoderReservedStart);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNSUPPORTED_CODEC))) {
- printf("WebRtcNetEQ_CodecDbRemove() did not return proper error code when removing codec that is out of range\n");
- }
- ok=WebRtcNetEQ_CodecDbRemove(inst,kDecoderReservedEnd);
- if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNSUPPORTED_CODEC))) {
- printf("WebRtcNetEQ_CodecDbRemove() did not return proper error code when removing codec that is out of range\n");
- }
+ /* remove codec out of range */
+ ok=WebRtcNetEQ_CodecDbRemove(inst,kDecoderReservedStart);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNSUPPORTED_CODEC))) {
+ printf("WebRtcNetEQ_CodecDbRemove() did not return proper error code when removing codec that is out of range\n");
+ }
+ ok=WebRtcNetEQ_CodecDbRemove(inst,kDecoderReservedEnd);
+ if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNSUPPORTED_CODEC))) {
+ printf("WebRtcNetEQ_CodecDbRemove() did not return proper error code when removing codec that is out of range\n");
+ }
- /*SET_CODEC_PAR(codecInst,kDecoderEG711a,NETEQ_CODEC_EG711A_PT,NetEqiPCMAState,8000);
- SET_IPCMA_FUNCTIONS(codecInst);
- CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+ /*SET_CODEC_PAR(codecInst,kDecoderEG711a,NETEQ_CODEC_EG711A_PT,NetEqiPCMAState,8000);
+ SET_IPCMA_FUNCTIONS(codecInst);
+ CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
*/
- free(inst);
+ free(inst);
- return(0);
+ return(0);
}
diff --git a/modules/audio_coding/neteq/test/RTPanalyze.cc b/modules/audio_coding/neteq/test/RTPanalyze.cc
index 12617dd..4d7d573 100644
--- a/modules/audio_coding/neteq/test/RTPanalyze.cc
+++ b/modules/audio_coding/neteq/test/RTPanalyze.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -13,6 +13,9 @@
#include <vector>
#include "modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
+#include "modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.h"
+
+//#define WEBRTC_DUMMY_RTP
enum {
kRedPayloadType = 127
@@ -38,7 +41,11 @@
// Read file header.
NETEQTEST_RTPpacket::skipFileHeader(in_file);
+#ifdef WEBRTC_DUMMY_RTP
+ NETEQTEST_DummyRTPpacket packet;
+#else
NETEQTEST_RTPpacket packet;
+#endif
while (packet.readFromFile(in_file) >= 0) {
// Write packet data to file.
diff --git a/modules/audio_coding/neteq/test/RTPchange.cc b/modules/audio_coding/neteq/test/RTPchange.cc
index ecbd81c..259a773 100644
--- a/modules/audio_coding/neteq/test/RTPchange.cc
+++ b/modules/audio_coding/neteq/test/RTPchange.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -13,10 +13,11 @@
#include <algorithm>
#include <vector>
-#include "gtest/gtest.h"
#include "modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
+#include "modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.h"
#define FIRSTLINELEN 40
+//#define WEBRTC_DUMMY_RTP
static bool pktCmp(NETEQTEST_RTPpacket *a, NETEQTEST_RTPpacket *b) {
return (a->time() < b->time());
@@ -91,7 +92,11 @@
while (1) {
// Insert in vector.
+#ifdef WEBRTC_DUMMY_RTP
+ NETEQTEST_RTPpacket *new_packet = new NETEQTEST_DummyRTPpacket();
+#else
NETEQTEST_RTPpacket *new_packet = new NETEQTEST_RTPpacket();
+#endif
if (new_packet->readFromFile(in_file) < 0) {
// End of file.
break;
diff --git a/modules/audio_coding/neteq/test/rtp_to_text.cc b/modules/audio_coding/neteq/test/rtp_to_text.cc
new file mode 100644
index 0000000..1112d79
--- /dev/null
+++ b/modules/audio_coding/neteq/test/rtp_to_text.cc
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Parses an rtpdump file and outputs a text table parsable by parseLog.m.
+ * The output file will have .txt appended to the specified base name.
+ * $ rtp_to_text [-d] <input_rtp_file> <output_base_name>
+ *
+ * -d RTP headers only
+ *
+ */
+
+#include "data_log.h"
+#include "NETEQTEST_DummyRTPpacket.h"
+#include "NETEQTEST_RTPpacket.h"
+
+#include <stdio.h>
+#include <string.h>
+
+#include <iostream>
+#include <string>
+#include <vector>
+
+/*********************/
+/* Misc. definitions */
+/*********************/
+
+#define FIRSTLINELEN 40
+
+using ::webrtc::DataLog;
+
+int main(int argc, char* argv[])
+{
+ int arg_count = 1;
+ NETEQTEST_RTPpacket* packet;
+
+ if (argc < 3)
+ {
+ printf("Usage: %s [-d] <input_rtp_file> <output_base_name>\n", argv[0]);
+ return -1;
+ }
+
+ // Parse dummy option
+ if (argc >= 3 && strcmp(argv[arg_count], "-d") == 0)
+ {
+ packet = new NETEQTEST_DummyRTPpacket;
+ ++arg_count;
+ }
+ else
+ {
+ packet = new NETEQTEST_RTPpacket;
+ }
+
+ std::string input_filename = argv[arg_count++];
+ std::string table_name = argv[arg_count];
+
+ std::cout << "Input file: " << input_filename << std::endl;
+ std::cout << "Output file: " << table_name << ".txt" << std::endl;
+
+ FILE *inFile=fopen(input_filename.c_str(),"rb");
+ if (!inFile)
+ {
+ std::cout << "Cannot open input file " << input_filename << std::endl;
+ return -1;
+ }
+
+ // Set up the DataLog and define the table
+ DataLog::CreateLog();
+ if (DataLog::AddTable(table_name) < 0)
+ {
+ std::cout << "Error adding table " << table_name << ".txt" << std::endl;
+ return -1;
+ }
+
+ DataLog::AddColumn(table_name, "seq", 1);
+ DataLog::AddColumn(table_name, "ssrc", 1);
+ DataLog::AddColumn(table_name, "payload type", 1);
+ DataLog::AddColumn(table_name, "length", 1);
+ DataLog::AddColumn(table_name, "timestamp", 1);
+ DataLog::AddColumn(table_name, "marker bit", 1);
+ DataLog::AddColumn(table_name, "arrival", 1);
+
+ // read file header
+ char firstline[FIRSTLINELEN];
+ if (fgets(firstline, FIRSTLINELEN, inFile) == NULL)
+ {
+ std::cout << "Error reading file " << input_filename << std::endl;
+ return -1;
+ }
+
+ // start_sec + start_usec + source + port + padding
+ if (fread(firstline, 4+4+4+2+2, 1, inFile) != 1)
+ {
+ std::cout << "Error reading file " << input_filename << std::endl;
+ return -1;
+ }
+
+ while (packet->readFromFile(inFile) >= 0)
+ {
+ // write packet headers to
+ DataLog::InsertCell(table_name, "seq", packet->sequenceNumber());
+ DataLog::InsertCell(table_name, "ssrc", packet->SSRC());
+ DataLog::InsertCell(table_name, "payload type", packet->payloadType());
+ DataLog::InsertCell(table_name, "length", packet->dataLen());
+ DataLog::InsertCell(table_name, "timestamp", packet->timeStamp());
+ DataLog::InsertCell(table_name, "marker bit", packet->markerBit());
+ DataLog::InsertCell(table_name, "arrival", packet->time());
+ DataLog::NextRow(table_name);
+ return -1;
+ }
+
+ DataLog::ReturnLog();
+
+ fclose(inFile);
+
+ return 0;
+}
diff --git a/modules/audio_device/main/source/audio_device.gypi b/modules/audio_device/main/source/audio_device.gypi
index 984dd30..b4cdc06 100644
--- a/modules/audio_device/main/source/audio_device.gypi
+++ b/modules/audio_device/main/source/audio_device.gypi
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
@@ -124,7 +124,6 @@
'link_settings': {
'libraries': [
'-ldl',
- '-lasound',
],
},
'conditions': [
@@ -140,11 +139,6 @@
'linux/pulseaudiosymboltable_linux.cc',
'linux/pulseaudiosymboltable_linux.h',
],
- 'link_settings': {
- 'libraries': [
- '-lpulse',
- ],
- },
}],
],
}],
diff --git a/modules/audio_device/main/source/linux/audio_device_pulse_linux.cc b/modules/audio_device/main/source/linux/audio_device_pulse_linux.cc
index 449ae8e..d4fdaee 100644
--- a/modules/audio_device/main/source/linux/audio_device_pulse_linux.cc
+++ b/modules/audio_device/main/source/linux/audio_device_pulse_linux.cc
@@ -1976,11 +1976,8 @@
_samplingFreq = paSampleRate / 1000;
// Copy the PA server version
- if (_paServerVersion)
- {
- strncpy(_paServerVersion, i->server_version, 31);
- _paServerVersion[31] = '\0';
- }
+ strncpy(_paServerVersion, i->server_version, 31);
+ _paServerVersion[31] = '\0';
if (_recDisplayDeviceName)
{
diff --git a/modules/audio_processing/Android.mk b/modules/audio_processing/Android.mk
index 2ab5bb6..7b2a499 100644
--- a/modules/audio_processing/Android.mk
+++ b/modules/audio_processing/Android.mk
@@ -33,7 +33,9 @@
# Flags passed to both C and C++ files.
LOCAL_CFLAGS := \
$(MY_WEBRTC_COMMON_DEFS) \
- '-DWEBRTC_NS_FIXED'
+ '-DWEBRTC_NS_FIXED' \
+ '-DWEBRTC_ANDROID_PLATFORM_BUILD' \
+ '-DWEBRTC_AUDIOPROC_DEBUG_DUMP'
# floating point
# -DWEBRTC_NS_FLOAT'
@@ -72,7 +74,9 @@
# Flags passed to both C and C++ files.
LOCAL_CFLAGS := \
- $(MY_WEBRTC_COMMON_DEFS)
+ $(MY_WEBRTC_COMMON_DEFS) \
+ '-DWEBRTC_ANDROID_PLATFORM_BUILD' \
+ '-DWEBRTC_AUDIOPROC_DEBUG_DUMP'
LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/include \
@@ -90,7 +94,7 @@
libstlport \
libwebrtc_audio_preprocessing
-LOCAL_MODULE:= webrtc_apm_process_test
+LOCAL_MODULE:= webrtc_audioproc
ifdef NDK_ROOT
include $(BUILD_EXECUTABLE)
@@ -113,7 +117,9 @@
# Flags passed to both C and C++ files.
LOCAL_CFLAGS := \
$(MY_WEBRTC_COMMON_DEFS) \
- '-DWEBRTC_APM_UNIT_TEST_FIXED_PROFILE'
+ '-DWEBRTC_AUDIOPROC_FIXED_PROFILE' \
+ '-DWEBRTC_ANDROID_PLATFORM_BUILD' \
+ '-DWEBRTC_AUDIOPROC_DEBUG_DUMP'
LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/include \
@@ -133,7 +139,7 @@
libstlport \
libwebrtc_audio_preprocessing
-LOCAL_MODULE:= webrtc_apm_unit_test
+LOCAL_MODULE:= webrtc_audioproc_unittest
ifdef NDK_ROOT
include $(BUILD_EXECUTABLE)
diff --git a/modules/audio_processing/audio_processing_impl.cc b/modules/audio_processing/audio_processing_impl.cc
index 2dea0e9..f1f2f48 100644
--- a/modules/audio_processing/audio_processing_impl.cc
+++ b/modules/audio_processing/audio_processing_impl.cc
@@ -28,7 +28,7 @@
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
// Files generated at build-time by the protobuf compiler.
-#ifdef WEBRTC_ANDROID
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/src/modules/audio_processing/debug.pb.h"
#else
#include "webrtc/audio_processing/debug.pb.h"
diff --git a/modules/audio_processing/test/process_test.cc b/modules/audio_processing/test/process_test.cc
index 2023ddb..7ebf8ad 100644
--- a/modules/audio_processing/test/process_test.cc
+++ b/modules/audio_processing/test/process_test.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -21,7 +21,7 @@
#include "module_common_types.h"
#include "scoped_ptr.h"
#include "tick_util.h"
-#ifdef WEBRTC_ANDROID
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/src/modules/audio_processing/debug.pb.h"
#else
#include "webrtc/audio_processing/debug.pb.h"
diff --git a/modules/audio_processing/test/unit_test.cc b/modules/audio_processing/test/unit_test.cc
index f92490a..00161a6 100644
--- a/modules/audio_processing/test/unit_test.cc
+++ b/modules/audio_processing/test/unit_test.cc
@@ -20,7 +20,7 @@
#include "testsupport/fileutils.h"
#include "thread_wrapper.h"
#include "trace.h"
-#ifdef WEBRTC_ANDROID
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/src/modules/audio_processing/test/unittest.pb.h"
#else
#include "webrtc/audio_processing/unittest.pb.h"
diff --git a/modules/interface/module_common_types.h b/modules/interface/module_common_types.h
index 2e1beaa..1c5c18b 100644
--- a/modules/interface/module_common_types.h
+++ b/modules/interface/module_common_types.h
@@ -297,16 +297,31 @@
WebRtc_UWord16 JBabsMax;
};
+// Struct containing forward error correction settings.
+struct FecProtectionParams {
+ int fec_rate;
+ bool use_uep_protection;
+ int max_fec_frames;
+};
+
// class describing a complete, or parts of an encoded frame.
class EncodedVideoData
{
public:
EncodedVideoData() :
+ payloadType(0),
+ timeStamp(0),
+ renderTimeMs(0),
+ encodedWidth(0),
+ encodedHeight(0),
completeFrame(false),
missingFrame(false),
payloadData(NULL),
payloadSize(0),
- bufferSize(0)
+ bufferSize(0),
+ fragmentationHeader(),
+ frameType(kVideoFrameDelta),
+ codec(kVideoCodecUnknown)
{};
EncodedVideoData(const EncodedVideoData& data)
diff --git a/modules/rtp_rtcp/interface/rtp_rtcp.h b/modules/rtp_rtcp/interface/rtp_rtcp.h
index 0293911..12fa090 100644
--- a/modules/rtp_rtcp/interface/rtp_rtcp.h
+++ b/modules/rtp_rtcp/interface/rtp_rtcp.h
@@ -367,39 +367,6 @@
virtual WebRtc_UWord16 MaxDataPayloadLength() const = 0;
/*
- * set RTPKeepaliveStatus
- *
- * enable - on/off
- * unknownPayloadType - payload type to use for RTP keepalive
- * deltaTransmitTimeMS - delta time between RTP keepalive packets
- *
- * return -1 on failure else 0
- */
- virtual WebRtc_Word32 SetRTPKeepaliveStatus(
- const bool enable,
- const int unknownPayloadType,
- const WebRtc_UWord16 deltaTransmitTimeMS) = 0;
-
- /*
- * Get RTPKeepaliveStatus
- *
- * enable - on/off
- * unknownPayloadType - payload type in use for RTP keepalive
- * deltaTransmitTimeMS - delta time between RTP keepalive packets
- *
- * return -1 on failure else 0
- */
- virtual WebRtc_Word32 RTPKeepaliveStatus(
- bool* enable,
- int* unknownPayloadType,
- WebRtc_UWord16* deltaTransmitTimeMS) const = 0;
-
- /*
- * check if RTPKeepaliveStatus is enabled
- */
- virtual bool RTPKeepalive() const = 0;
-
- /*
* set codec name and payload type
*
* return -1 on failure else 0
@@ -1049,32 +1016,9 @@
WebRtc_UWord8& payloadTypeFEC) = 0;
- /*
- * Set FEC code rate of key and delta frames
- * codeRate on a scale of 0 to 255 where 255 is 100% added packets, hence protect up to 50% packet loss
- *
- * return -1 on failure else 0
- */
- virtual WebRtc_Word32 SetFECCodeRate(const WebRtc_UWord8 keyFrameCodeRate,
- const WebRtc_UWord8 deltaFrameCodeRate) = 0;
-
-
- /*
- * Set FEC unequal protection (UEP) across packets,
- * for key and delta frames.
- *
- * If keyUseUepProtection is true UEP is enabled for key frames.
- * If deltaUseUepProtection is true UEP is enabled for delta frames.
- *
- * UEP skews the FEC protection towards being spent more on the
- * important packets, at the cost of less FEC protection for the
- * non-important packets.
- *
- * return -1 on failure else 0
- */
- virtual WebRtc_Word32 SetFECUepProtection(const bool keyUseUepProtection,
- const bool deltaUseUepProtection) = 0;
-
+ virtual WebRtc_Word32 SetFecParameters(
+ const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params) = 0;
/*
* Set method for requestion a new key frame
diff --git a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
index d3ab74f..3218c7c 100644
--- a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
+++ b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
@@ -94,16 +94,6 @@
WebRtc_UWord16());
MOCK_CONST_METHOD0(MaxDataPayloadLength,
WebRtc_UWord16());
- MOCK_METHOD3(SetRTPKeepaliveStatus,
- WebRtc_Word32(const bool enable,
- const int unknownPayloadType,
- const WebRtc_UWord16 deltaTransmitTimeMS));
- MOCK_CONST_METHOD3(RTPKeepaliveStatus,
- WebRtc_Word32(bool* enable,
- int* unknownPayloadType,
- WebRtc_UWord16* deltaTransmitTimeMS));
- MOCK_CONST_METHOD0(RTPKeepalive,
- bool());
MOCK_METHOD1(RegisterSendPayload,
WebRtc_Word32(const CodecInst& voiceCodec));
MOCK_METHOD1(RegisterSendPayload,
@@ -280,10 +270,9 @@
WebRtc_Word32(const bool enable, const WebRtc_UWord8 payloadTypeRED, const WebRtc_UWord8 payloadTypeFEC));
MOCK_METHOD3(GenericFECStatus,
WebRtc_Word32(bool& enable, WebRtc_UWord8& payloadTypeRED, WebRtc_UWord8& payloadTypeFEC));
- MOCK_METHOD2(SetFECCodeRate,
- WebRtc_Word32(const WebRtc_UWord8 keyFrameCodeRate, const WebRtc_UWord8 deltaFrameCodeRate));
- MOCK_METHOD2(SetFECUepProtection,
- WebRtc_Word32(const bool keyUseUepProtection, const bool deltaUseUepProtection));
+ MOCK_METHOD2(SetFecParameters,
+ WebRtc_Word32(const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params));
MOCK_METHOD1(SetKeyFrameRequestMethod,
WebRtc_Word32(const KeyFrameRequestMethod method));
MOCK_METHOD0(RequestKeyFrame,
diff --git a/modules/rtp_rtcp/source/fec_test_helper.cc b/modules/rtp_rtcp/source/fec_test_helper.cc
new file mode 100644
index 0000000..1fbadb8
--- /dev/null
+++ b/modules/rtp_rtcp/source/fec_test_helper.cc
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/fec_test_helper.h"
+
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+
+namespace webrtc {
+
+FrameGenerator::FrameGenerator()
+ : num_packets_(0),
+ seq_num_(0),
+ timestamp_(0) {}
+
+void FrameGenerator::NewFrame(int num_packets) {
+ num_packets_ = num_packets;
+ timestamp_ += 3000;
+}
+
+uint16_t FrameGenerator::NextSeqNum() {
+ return ++seq_num_;
+}
+
+RtpPacket* FrameGenerator::NextPacket(int offset, size_t length) {
+ RtpPacket* rtp_packet = new RtpPacket;
+ for (size_t i = 0; i < length; ++i)
+ rtp_packet->data[i + kRtpHeaderSize] = offset + i;
+ rtp_packet->length = length + kRtpHeaderSize;
+ memset(&rtp_packet->header, 0, sizeof(WebRtcRTPHeader));
+ rtp_packet->header.frameType = kVideoFrameDelta;
+ rtp_packet->header.header.headerLength = kRtpHeaderSize;
+ rtp_packet->header.header.markerBit = (num_packets_ == 1);
+ rtp_packet->header.header.sequenceNumber = seq_num_;
+ rtp_packet->header.header.timestamp = timestamp_;
+ rtp_packet->header.header.payloadType = kVp8PayloadType;
+ BuildRtpHeader(rtp_packet->data, &rtp_packet->header.header);
+ ++seq_num_;
+ --num_packets_;
+ return rtp_packet;
+}
+
+// Creates a new RtpPacket with the RED header added to the packet.
+RtpPacket* FrameGenerator::BuildMediaRedPacket(const RtpPacket* packet) {
+ const int kHeaderLength = packet->header.header.headerLength;
+ RtpPacket* red_packet = new RtpPacket;
+ red_packet->header = packet->header;
+ red_packet->length = packet->length + 1; // 1 byte RED header.
+ memset(red_packet->data, 0, red_packet->length);
+ // Copy RTP header.
+ memcpy(red_packet->data, packet->data, kHeaderLength);
+ SetRedHeader(red_packet, red_packet->data[1] & 0x7f, kHeaderLength);
+ memcpy(red_packet->data + kHeaderLength + 1, packet->data + kHeaderLength,
+ packet->length - kHeaderLength);
+ return red_packet;
+}
+
+// Creates a new RtpPacket with FEC payload and red header. Does this by
+// creating a new fake media RtpPacket, clears the marker bit and adds a RED
+// header. Finally replaces the payload with the content of |packet->data|.
+RtpPacket* FrameGenerator::BuildFecRedPacket(const Packet* packet) {
+ // Create a fake media packet to get a correct header. 1 byte RED header.
+ ++num_packets_;
+ RtpPacket* red_packet = NextPacket(0, packet->length + 1);
+ red_packet->data[1] &= ~0x80; // Clear marker bit.
+ const int kHeaderLength = red_packet->header.header.headerLength;
+ SetRedHeader(red_packet, kFecPayloadType, kHeaderLength);
+ memcpy(red_packet->data + kHeaderLength + 1, packet->data,
+ packet->length);
+ red_packet->length = kHeaderLength + 1 + packet->length;
+ return red_packet;
+}
+
+void FrameGenerator::SetRedHeader(Packet* red_packet, uint8_t payload_type,
+ int header_length) const {
+ // Replace pltype.
+ red_packet->data[1] &= 0x80; // Reset.
+ red_packet->data[1] += kRedPayloadType; // Replace.
+
+ // Add RED header, f-bit always 0.
+ red_packet->data[header_length] = payload_type;
+}
+
+void FrameGenerator::BuildRtpHeader(uint8_t* data, const RTPHeader* header) {
+ data[0] = 0x80; // Version 2.
+ data[1] = header->payloadType;
+ data[1] |= (header->markerBit ? kRtpMarkerBitMask : 0);
+ ModuleRTPUtility::AssignUWord16ToBuffer(data+2, header->sequenceNumber);
+ ModuleRTPUtility::AssignUWord32ToBuffer(data+4, header->timestamp);
+ ModuleRTPUtility::AssignUWord32ToBuffer(data+8, header->ssrc);
+}
+
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/fec_test_helper.h b/modules/rtp_rtcp/source/fec_test_helper.h
new file mode 100644
index 0000000..4a037c7
--- /dev/null
+++ b/modules/rtp_rtcp/source/fec_test_helper.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_TEST_HELPER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_TEST_HELPER_H_
+
+#include "modules/interface/module_common_types.h"
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+
+namespace webrtc {
+
+enum { kRtpHeaderSize = 12 };
+enum { kFecPayloadType = 96 };
+enum { kRedPayloadType = 97 };
+enum { kVp8PayloadType = 120 };
+
+typedef ForwardErrorCorrection::Packet Packet;
+
+struct RtpPacket : public Packet {
+ WebRtcRTPHeader header;
+};
+
+class FrameGenerator {
+ public:
+ FrameGenerator();
+
+ void NewFrame(int num_packets);
+
+ uint16_t NextSeqNum();
+
+ RtpPacket* NextPacket(int offset, size_t length);
+
+ // Creates a new RtpPacket with the RED header added to the packet.
+ RtpPacket* BuildMediaRedPacket(const RtpPacket* packet);
+
+ // Creates a new RtpPacket with FEC payload and red header. Does this by
+ // creating a new fake media RtpPacket, clears the marker bit and adds a RED
+ // header. Finally replaces the payload with the content of |packet->data|.
+ RtpPacket* BuildFecRedPacket(const Packet* packet);
+
+ void SetRedHeader(Packet* red_packet, uint8_t payload_type,
+ int header_length) const;
+
+ private:
+ static void BuildRtpHeader(uint8_t* data, const RTPHeader* header);
+
+ int num_packets_;
+ uint16_t seq_num_;
+ uint32_t timestamp_;
+};
+}
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_TEST_HELPER_H_
diff --git a/modules/rtp_rtcp/source/forward_error_correction.cc b/modules/rtp_rtcp/source/forward_error_correction.cc
index 4476d51..7ce0fe2 100644
--- a/modules/rtp_rtcp/source/forward_error_correction.cc
+++ b/modules/rtp_rtcp/source/forward_error_correction.cc
@@ -375,7 +375,8 @@
}
RecoveredPacket* recoverdPacketToInsert = new RecoveredPacket;
recoverdPacketToInsert->wasRecovered = false;
- recoverdPacketToInsert->returned = false;
+ // Inserted Media packet is already sent to VCM.
+ recoverdPacketToInsert->returned = true;
recoverdPacketToInsert->seqNum = rxPacket->seqNum;
recoverdPacketToInsert->pkt = rxPacket->pkt;
recoverdPacketToInsert->pkt->length = rxPacket->pkt->length;
diff --git a/modules/rtp_rtcp/source/producer_fec.cc b/modules/rtp_rtcp/source/producer_fec.cc
new file mode 100644
index 0000000..20e0249
--- /dev/null
+++ b/modules/rtp_rtcp/source/producer_fec.cc
@@ -0,0 +1,203 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/producer_fec.h"
+
+#include <stdio.h>
+
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+
+namespace webrtc {
+
+// Minimum RTP header size in bytes.
+enum { kRtpHeaderSize = 12 };
+enum { kREDForFECHeaderLength = 1 };
+enum { kMaxOverhead = 60 }; // Q8.
+
+struct RtpPacket {
+ WebRtc_UWord16 rtpHeaderLength;
+ ForwardErrorCorrection::Packet* pkt;
+};
+
+RedPacket::RedPacket(int length)
+ : data_(new uint8_t[length]),
+ length_(length),
+ header_length_(0) {
+}
+
+RedPacket::~RedPacket() {
+ delete [] data_;
+}
+
+void RedPacket::CreateHeader(const uint8_t* rtp_header, int header_length,
+ int red_pl_type, int pl_type) {
+ assert(header_length + kREDForFECHeaderLength <= length_);
+ memcpy(data_, rtp_header, header_length);
+ // Replace payload type.
+ data_[1] &= 0x80;
+ data_[1] += red_pl_type;
+ // Add RED header
+ // f-bit always 0
+ data_[header_length] = pl_type;
+ header_length_ = header_length + kREDForFECHeaderLength;
+}
+
+void RedPacket::SetSeqNum(int seq_num) {
+ assert(seq_num >= 0 && seq_num < (1<<16));
+ ModuleRTPUtility::AssignUWord16ToBuffer(&data_[2], seq_num);
+}
+
+void RedPacket::AssignPayload(const uint8_t* payload, int length) {
+ assert(header_length_ + length <= length_);
+ memcpy(data_ + header_length_, payload, length);
+}
+
+void RedPacket::ClearMarkerBit() {
+ data_[1] &= 0x7F;
+}
+
+uint8_t* RedPacket::data() const {
+ return data_;
+}
+
+int RedPacket::length() const {
+ return length_;
+}
+
+ProducerFec::ProducerFec(ForwardErrorCorrection* fec)
+ : fec_(fec),
+ media_packets_fec_(),
+ fec_packets_(),
+ num_frames_(0),
+ incomplete_frame_(false),
+ num_first_partition_(0),
+ params_() {
+ memset(¶ms_, 0, sizeof(params_));
+}
+
+ProducerFec::~ProducerFec() {
+ DeletePackets();
+}
+
+void ProducerFec::SetFecParameters(const FecProtectionParams* params,
+ int num_first_partition) {
+ // Number of first partition packets cannot exceed kMaxMediaPackets
+ assert(params->fec_rate >= 0 && params->fec_rate < 256);
+ if (num_first_partition >
+ static_cast<int>(ForwardErrorCorrection::kMaxMediaPackets)) {
+ num_first_partition =
+ ForwardErrorCorrection::kMaxMediaPackets;
+ }
+ params_ = *params;
+ num_first_partition_ = num_first_partition;
+}
+
+RedPacket* ProducerFec::BuildRedPacket(const uint8_t* data_buffer,
+ int payload_length,
+ int rtp_header_length,
+ int red_pl_type) {
+ RedPacket* red_packet = new RedPacket(payload_length +
+ kREDForFECHeaderLength +
+ rtp_header_length);
+ int pl_type = data_buffer[1] & 0x7f;
+ red_packet->CreateHeader(data_buffer, rtp_header_length,
+ red_pl_type, pl_type);
+ red_packet->AssignPayload(data_buffer + rtp_header_length, payload_length);
+ return red_packet;
+}
+
+int ProducerFec::AddRtpPacketAndGenerateFec(const uint8_t* data_buffer,
+ int payload_length,
+ int rtp_header_length) {
+ assert(fec_packets_.empty());
+ incomplete_frame_ = true;
+ const bool marker_bit = (data_buffer[1] & kRtpMarkerBitMask) ? true : false;
+ if (media_packets_fec_.size() < ForwardErrorCorrection::kMaxMediaPackets) {
+ // Generic FEC can only protect up to kMaxMediaPackets packets.
+ ForwardErrorCorrection::Packet* packet = new ForwardErrorCorrection::Packet;
+ packet->length = payload_length + rtp_header_length;
+ memcpy(packet->data, data_buffer, packet->length);
+ media_packets_fec_.push_back(packet);
+ }
+ if (marker_bit) {
+ ++num_frames_;
+ incomplete_frame_ = false;
+ }
+ if (!incomplete_frame_ &&
+ (num_frames_ == params_.max_fec_frames ||
+ (Overhead() - params_.fec_rate) < kMaxOverhead)) {
+ assert(num_first_partition_ <=
+ static_cast<int>(ForwardErrorCorrection::kMaxMediaPackets));
+ int ret = fec_->GenerateFEC(media_packets_fec_,
+ params_.fec_rate,
+ num_first_partition_,
+ params_.use_uep_protection,
+ &fec_packets_);
+ if (fec_packets_.empty()) {
+ num_frames_ = 0;
+ DeletePackets();
+ }
+ return ret;
+ }
+ return 0;
+}
+
+bool ProducerFec::FecAvailable() const {
+ return (fec_packets_.size() > 0);
+}
+
+RedPacket* ProducerFec::GetFecPacket(int red_pl_type, int fec_pl_type,
+ uint16_t seq_num) {
+ if (fec_packets_.empty())
+ return NULL;
+ // Build FEC packet. The FEC packets in |fec_packets_| doesn't
+ // have RTP headers, so we're reusing the header from the last
+ // media packet.
+ ForwardErrorCorrection::Packet* packet_to_send = fec_packets_.front();
+ ForwardErrorCorrection::Packet* last_media_packet = media_packets_fec_.back();
+ RedPacket* return_packet = new RedPacket(packet_to_send->length +
+ kREDForFECHeaderLength +
+ kRtpHeaderSize);
+ return_packet->CreateHeader(last_media_packet->data,
+ kRtpHeaderSize,
+ red_pl_type,
+ fec_pl_type);
+ return_packet->SetSeqNum(seq_num);
+ return_packet->ClearMarkerBit();
+ return_packet->AssignPayload(packet_to_send->data, packet_to_send->length);
+ fec_packets_.pop_front();
+ if (fec_packets_.empty()) {
+ // Done with all the FEC packets. Reset for next run.
+ DeletePackets();
+ num_frames_ = 0;
+ }
+ return return_packet;
+}
+
+int ProducerFec::Overhead() const {
+ int num_fec_packets = params_.fec_rate * media_packets_fec_.size();
+ // Ceil.
+ int rounding = (num_fec_packets % (1 << 8) > 0) ? (1 << 8) : 0;
+ num_fec_packets = (num_fec_packets + rounding) >> 8;
+ // Return the overhead in Q8.
+ return (num_fec_packets << 8) /
+ (media_packets_fec_.size() + num_fec_packets);
+}
+
+void ProducerFec::DeletePackets() {
+ while (!media_packets_fec_.empty()) {
+ delete media_packets_fec_.front();
+ media_packets_fec_.pop_front();
+ }
+ assert(media_packets_fec_.empty());
+}
+
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/producer_fec.h b/modules/rtp_rtcp/source/producer_fec.h
new file mode 100644
index 0000000..4c0b951
--- /dev/null
+++ b/modules/rtp_rtcp/source/producer_fec.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_PRODUCER_FEC_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_PRODUCER_FEC_H_
+
+#include <list>
+
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+
+namespace webrtc {
+
+struct RtpPacket;
+
+class RedPacket {
+ public:
+ explicit RedPacket(int length);
+ ~RedPacket();
+ void CreateHeader(const uint8_t* rtp_header, int header_length,
+ int red_pl_type, int pl_type);
+ void SetSeqNum(int seq_num);
+ void AssignPayload(const uint8_t* payload, int length);
+ void ClearMarkerBit();
+ uint8_t* data() const;
+ int length() const;
+
+ private:
+ uint8_t* data_;
+ int length_;
+ int header_length_;
+};
+
+class ProducerFec {
+ public:
+ explicit ProducerFec(ForwardErrorCorrection* fec);
+ ~ProducerFec();
+
+ void SetFecParameters(const FecProtectionParams* params,
+ int max_fec_frames);
+
+ RedPacket* BuildRedPacket(const uint8_t* data_buffer,
+ int payload_length,
+ int rtp_header_length,
+ int red_pl_type);
+
+ int AddRtpPacketAndGenerateFec(const uint8_t* data_buffer,
+ int payload_length,
+ int rtp_header_length);
+
+ bool FecAvailable() const;
+
+ RedPacket* GetFecPacket(int red_pl_type, int fec_pl_type,
+ uint16_t seq_num);
+
+ private:
+ void DeletePackets();
+ int Overhead() const;
+ ForwardErrorCorrection* fec_;
+ std::list<ForwardErrorCorrection::Packet*> media_packets_fec_;
+ std::list<ForwardErrorCorrection::Packet*> fec_packets_;
+ int num_frames_;
+ bool incomplete_frame_;
+ int num_first_partition_;
+ FecProtectionParams params_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_PRODUCER_FEC_H_
diff --git a/modules/rtp_rtcp/source/producer_fec_unittest.cc b/modules/rtp_rtcp/source/producer_fec_unittest.cc
new file mode 100644
index 0000000..fcd8c66
--- /dev/null
+++ b/modules/rtp_rtcp/source/producer_fec_unittest.cc
@@ -0,0 +1,143 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <list>
+
+#include "gtest/gtest.h"
+#include "modules/rtp_rtcp/source/fec_test_helper.h"
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+#include "modules/rtp_rtcp/source/producer_fec.h"
+
+namespace webrtc {
+
+void VerifyHeader(uint16_t seq_num,
+ uint32_t timestamp,
+ int red_pltype,
+ int fec_pltype,
+ RedPacket* packet,
+ bool marker_bit) {
+ EXPECT_GT(packet->length(), static_cast<int>(kRtpHeaderSize));
+ EXPECT_TRUE(packet->data() != NULL);
+ uint8_t* data = packet->data();
+ // Marker bit not set.
+ EXPECT_EQ(marker_bit ? 0x80 : 0, data[1] & 0x80);
+ EXPECT_EQ(red_pltype, data[1] & 0x7F);
+ EXPECT_EQ(seq_num, (data[2] << 8) + data[3]);
+ uint32_t parsed_timestamp = (data[4] << 24) + (data[5] << 16) +
+ (data[6] << 8) + data[7];
+ EXPECT_EQ(timestamp, parsed_timestamp);
+ EXPECT_EQ(fec_pltype, data[kRtpHeaderSize]);
+}
+
+class ProducerFecTest : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ fec_ = new ForwardErrorCorrection(0);
+ producer_ = new ProducerFec(fec_);
+ generator_ = new FrameGenerator;
+ }
+
+ virtual void TearDown() {
+ delete producer_;
+ delete fec_;
+ delete generator_;
+ }
+ ForwardErrorCorrection* fec_;
+ ProducerFec* producer_;
+ FrameGenerator* generator_;
+};
+
+TEST_F(ProducerFecTest, OneFrameFec) {
+ const int kNumPackets = 3;
+ FecProtectionParams params = {5, false, 3};
+ std::list<RtpPacket*> rtp_packets;
+ generator_->NewFrame(kNumPackets);
+ producer_->SetFecParameters(¶ms, 0); // Expecting one FEC packet.
+ uint32_t last_timestamp = 0;
+ for (int i = 0; i < kNumPackets; ++i) {
+ RtpPacket* rtp_packet = generator_->NextPacket(i, 10);
+ rtp_packets.push_back(rtp_packet);
+ EXPECT_EQ(0, producer_->AddRtpPacketAndGenerateFec(rtp_packet->data,
+ rtp_packet->length,
+ kRtpHeaderSize));
+ last_timestamp = rtp_packet->header.header.timestamp;
+ }
+ EXPECT_TRUE(producer_->FecAvailable());
+ uint16_t seq_num = generator_->NextSeqNum();
+ RedPacket* packet = producer_->GetFecPacket(kRedPayloadType,
+ kFecPayloadType,
+ seq_num);
+ EXPECT_FALSE(producer_->FecAvailable());
+ EXPECT_TRUE(packet != NULL);
+ VerifyHeader(seq_num, last_timestamp,
+ kRedPayloadType, kFecPayloadType, packet, false);
+ while (!rtp_packets.empty()) {
+ delete rtp_packets.front();
+ rtp_packets.pop_front();
+ }
+ delete packet;
+}
+
+TEST_F(ProducerFecTest, TwoFrameFec) {
+ const int kNumPackets = 2;
+ const int kNumFrames = 2;
+ FecProtectionParams params = {5, 0, 3};
+ std::list<RtpPacket*> rtp_packets;
+ producer_->SetFecParameters(¶ms, 0); // Expecting one FEC packet.
+ uint32_t last_timestamp = 0;
+ for (int i = 0; i < kNumFrames; ++i) {
+ generator_->NewFrame(kNumPackets);
+ for (int j = 0; j < kNumPackets; ++j) {
+ RtpPacket* rtp_packet = generator_->NextPacket(i * kNumPackets + j, 10);
+ rtp_packets.push_back(rtp_packet);
+ EXPECT_EQ(0, producer_->AddRtpPacketAndGenerateFec(rtp_packet->data,
+ rtp_packet->length,
+ kRtpHeaderSize));
+ last_timestamp = rtp_packet->header.header.timestamp;
+ }
+ }
+ EXPECT_TRUE(producer_->FecAvailable());
+ uint16_t seq_num = generator_->NextSeqNum();
+ RedPacket* packet = producer_->GetFecPacket(kRedPayloadType,
+ kFecPayloadType,
+ seq_num);
+ EXPECT_FALSE(producer_->FecAvailable());
+ EXPECT_TRUE(packet != NULL);
+ VerifyHeader(seq_num, last_timestamp,
+ kRedPayloadType, kFecPayloadType, packet, false);
+ while (!rtp_packets.empty()) {
+ delete rtp_packets.front();
+ rtp_packets.pop_front();
+ }
+ delete packet;
+}
+
+TEST_F(ProducerFecTest, BuildRedPacket) {
+ generator_->NewFrame(1);
+ RtpPacket* packet = generator_->NextPacket(0, 10);
+ RedPacket* red_packet = producer_->BuildRedPacket(packet->data,
+ packet->length -
+ kRtpHeaderSize,
+ kRtpHeaderSize,
+ kRedPayloadType);
+ EXPECT_EQ(packet->length + 1, red_packet->length());
+ VerifyHeader(packet->header.header.sequenceNumber,
+ packet->header.header.timestamp,
+ kRedPayloadType,
+ packet->header.header.payloadType,
+ red_packet,
+ true); // Marker bit set.
+ for (int i = 0; i < 10; ++i)
+ EXPECT_EQ(i, red_packet->data()[kRtpHeaderSize + 1 + i]);
+ delete red_packet;
+ delete packet;
+}
+
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/receiver_fec.cc b/modules/rtp_rtcp/source/receiver_fec.cc
index 77a3465..e86f578 100644
--- a/modules/rtp_rtcp/source/receiver_fec.cc
+++ b/modules/rtp_rtcp/source/receiver_fec.cc
@@ -223,11 +223,18 @@
WebRtc_Word32 ReceiverFEC::ProcessReceivedFEC() {
if (!_receivedPacketList.empty()) {
+ // Send received media packet to VCM.
+ if (!_receivedPacketList.front()->isFec) {
+ if (ParseAndReceivePacket(_receivedPacketList.front()->pkt) != 0) {
+ return -1;
+ }
+ }
if (_fec->DecodeFEC(&_receivedPacketList, &_recoveredPacketList) != 0) {
return -1;
}
assert(_receivedPacketList.empty());
}
+ // Send any recovered media packets to VCM.
ForwardErrorCorrection::RecoveredPacketList::iterator it =
_recoveredPacketList.begin();
for (; it != _recoveredPacketList.end(); ++it) {
diff --git a/modules/rtp_rtcp/source/receiver_fec_unittest.cc b/modules/rtp_rtcp/source/receiver_fec_unittest.cc
index ebedc8f..8438ca4 100644
--- a/modules/rtp_rtcp/source/receiver_fec_unittest.cc
+++ b/modules/rtp_rtcp/source/receiver_fec_unittest.cc
@@ -13,6 +13,7 @@
#include "gmock/gmock.h"
#include "gtest/gtest.h"
+#include "modules/rtp_rtcp/source/fec_test_helper.h"
#include "modules/rtp_rtcp/source/forward_error_correction.h"
#include "modules/rtp_rtcp/source/mock/mock_rtp_receiver_video.h"
#include "modules/rtp_rtcp/source/receiver_fec.h"
@@ -20,104 +21,9 @@
using ::testing::_;
using ::testing::Args;
using ::testing::ElementsAreArray;
-using ::testing::InSequence;
namespace webrtc {
-typedef ForwardErrorCorrection::Packet Packet;
-
-enum { kRtpHeaderSize = 12 };
-enum { kFecPayloadType = 96 };
-enum { kRedPayloadType = 97 };
-enum { kVp8PayloadType = 120 };
-
-struct RtpPacket : public Packet {
- WebRtcRTPHeader header;
-};
-
-class FrameGenerator {
- public:
- FrameGenerator() : num_packets_(0), seq_num_(0), timestamp_(0) {}
-
- void NewFrame(int num_packets) {
- num_packets_ = num_packets;
- timestamp_ += 3000;
- }
-
- RtpPacket* NextPacket(int offset, size_t length) {
- RtpPacket* rtp_packet = new RtpPacket;
- for (size_t i = 0; i < length; ++i)
- rtp_packet->data[i] = offset + i;
- rtp_packet->length = length;
- memset(&rtp_packet->header, 0, sizeof(WebRtcRTPHeader));
- rtp_packet->header.frameType = kVideoFrameDelta;
- rtp_packet->header.header.headerLength = kRtpHeaderSize;
- rtp_packet->header.header.markerBit = (num_packets_ == 1);
- rtp_packet->header.header.sequenceNumber = seq_num_;
- rtp_packet->header.header.timestamp = timestamp_;
- rtp_packet->header.header.payloadType = kVp8PayloadType;
- BuildRtpHeader(rtp_packet->data, rtp_packet->header.header);
- ++seq_num_;
- --num_packets_;
- return rtp_packet;
- }
-
- // Creates a new RtpPacket with the RED header added to the packet.
- RtpPacket* BuildMediaRedPacket(const RtpPacket* packet) {
- const int kHeaderLength = packet->header.header.headerLength;
- RtpPacket* red_packet = new RtpPacket;
- red_packet->header = packet->header;
- red_packet->length = packet->length + 1; // 1 byte RED header.
- memset(red_packet->data, 0, red_packet->length);
- // Copy RTP header.
- memcpy(red_packet->data, packet->data, kHeaderLength);
- SetRedHeader(red_packet, red_packet->data[1] & 0x7f, kHeaderLength);
- memcpy(red_packet->data + kHeaderLength + 1, packet->data + kHeaderLength,
- packet->length - kHeaderLength);
- return red_packet;
- }
-
- // Creates a new RtpPacket with FEC payload and red header. Does this by
- // creating a new fake media RtpPacket, clears the marker bit and adds a RED
- // header. Finally replaces the payload with the content of |packet->data|.
- RtpPacket* BuildFecRedPacket(const Packet* packet) {
- // Create a fake media packet to get a correct header. 1 byte RED header.
- ++num_packets_;
- RtpPacket* red_packet = NextPacket(0, packet->length + 1);
- red_packet->data[1] &= ~0x80; // Clear marker bit.
- const int kHeaderLength = red_packet->header.header.headerLength;
- SetRedHeader(red_packet, kFecPayloadType, kHeaderLength);
- memcpy(red_packet->data + kHeaderLength + 1, packet->data,
- packet->length);
- red_packet->length = kHeaderLength + 1 + packet->length;
- return red_packet;
- }
-
- void SetRedHeader(Packet* red_packet, uint8_t payload_type,
- int header_length) const {
- // Replace pltype.
- red_packet->data[1] &= 0x80; // Reset.
- red_packet->data[1] += kRedPayloadType; // Replace.
-
- // Add RED header, f-bit always 0.
- red_packet->data[header_length] = payload_type;
- }
-
- private:
- void BuildRtpHeader(uint8_t* data, RTPHeader header) {
- data[0] = 0x80; // Version 2.
- data[1] = header.payloadType;
- data[1] |= (header.markerBit ? kRtpMarkerBitMask : 0);
- ModuleRTPUtility::AssignUWord16ToBuffer(data+2, header.sequenceNumber);
- ModuleRTPUtility::AssignUWord32ToBuffer(data+4, header.timestamp);
- ModuleRTPUtility::AssignUWord32ToBuffer(data+8, header.ssrc);
- }
-
- int num_packets_;
- uint16_t seq_num_;
- uint32_t timestamp_;
-};
-
class ReceiverFecTest : public ::testing::Test {
protected:
virtual void SetUp() {
@@ -133,20 +39,6 @@
delete generator_;
}
- void GenerateAndAddFrames(int num_frames,
- int num_packets_per_frame,
- std::list<RtpPacket*>* media_rtp_packets,
- std::list<Packet*>* media_packets) {
- for (int i = 0; i < num_frames; ++i) {
- GenerateFrame(num_packets_per_frame, i, media_rtp_packets,
- media_packets);
- }
- for (std::list<RtpPacket*>::iterator it = media_rtp_packets->begin();
- it != media_rtp_packets->end(); ++it) {
- BuildAndAddRedMediaPacket(*it);
- }
- }
-
void GenerateFEC(std::list<Packet*>* media_packets,
std::list<Packet*>* fec_packets,
unsigned int num_fec_packets) {
@@ -229,18 +121,16 @@
GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
// Recovery
+ std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
std::list<RtpPacket*>::iterator media_it = media_rtp_packets.begin();
BuildAndAddRedMediaPacket(*media_it);
+ VerifyReconstructedMediaPacket(*it, 1);
+ EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
// Drop one media packet.
std::list<Packet*>::iterator fec_it = fec_packets.begin();
BuildAndAddRedFecPacket(*fec_it);
- {
- InSequence s;
- std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
- VerifyReconstructedMediaPacket(*it, 1);
- ++it;
- VerifyReconstructedMediaPacket(*it, 1);
- }
+ ++it;
+ VerifyReconstructedMediaPacket(*it, 1);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
DeletePackets(&media_packets);
@@ -256,17 +146,15 @@
// Recovery
// Drop both media packets.
+ std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
std::list<Packet*>::iterator fec_it = fec_packets.begin();
BuildAndAddRedFecPacket(*fec_it);
+ VerifyReconstructedMediaPacket(*it, 1);
+ EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
++fec_it;
BuildAndAddRedFecPacket(*fec_it);
- {
- InSequence s;
- std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
- VerifyReconstructedMediaPacket(*it, 1);
- ++it;
- VerifyReconstructedMediaPacket(*it, 1);
- }
+ ++it;
+ VerifyReconstructedMediaPacket(*it, 1);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
DeletePackets(&media_packets);
@@ -282,16 +170,14 @@
GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
// Recovery
+ std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
BuildAndAddRedMediaPacket(media_rtp_packets.front());
+ VerifyReconstructedMediaPacket(*it, 1);
+ EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
// Drop one media packet.
BuildAndAddRedFecPacket(fec_packets.front());
- {
- InSequence s;
- std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
- VerifyReconstructedMediaPacket(*it, 1);
- ++it;
- VerifyReconstructedMediaPacket(*it, 1);
- }
+ ++it;
+ VerifyReconstructedMediaPacket(*it, 1);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
DeletePackets(&media_packets);
@@ -303,15 +189,18 @@
std::list<Packet*> media_packets;
GenerateFrame(1, 0, &media_rtp_packets, &media_packets);
GenerateFrame(2, 1, &media_rtp_packets, &media_packets);
+
std::list<Packet*> fec_packets;
GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
// Recovery
std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
- BuildAndAddRedMediaPacket(*it); // First frame
+ BuildAndAddRedMediaPacket(*it); // First frame: one packet.
+ VerifyReconstructedMediaPacket(*it, 1);
+ EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+ ++it;
BuildAndAddRedMediaPacket(*it); // First packet of second frame.
- EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
- .Times(1);
+ VerifyReconstructedMediaPacket(*it, 1);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
DeletePackets(&media_packets);
@@ -322,23 +211,23 @@
const unsigned int kNumMediaPackets = 48u;
std::list<RtpPacket*> media_rtp_packets;
std::list<Packet*> media_packets;
- for (unsigned int i = 0; i < kNumMediaPackets; ++i)
+ for (unsigned int i = 0; i < kNumMediaPackets; ++i) {
GenerateFrame(1, i, &media_rtp_packets, &media_packets);
+ }
std::list<Packet*> fec_packets;
GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
// Recovery
std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
++it; // Drop first packet.
- for (; it != media_rtp_packets.end(); ++it)
+ for (; it != media_rtp_packets.end(); ++it) {
BuildAndAddRedMediaPacket(*it);
- BuildAndAddRedFecPacket(fec_packets.front());
- {
- InSequence s;
- std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
- for (; it != media_rtp_packets.end(); ++it)
- VerifyReconstructedMediaPacket(*it, 1);
+ VerifyReconstructedMediaPacket(*it, 1);
+ EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
}
+ BuildAndAddRedFecPacket(fec_packets.front());
+ it = media_rtp_packets.begin();
+ VerifyReconstructedMediaPacket(*it, 1);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
DeletePackets(&media_packets);
@@ -349,8 +238,9 @@
const unsigned int kNumMediaPackets = 49u;
std::list<RtpPacket*> media_rtp_packets;
std::list<Packet*> media_packets;
- for (unsigned int i = 0; i < kNumMediaPackets; ++i)
+ for (unsigned int i = 0; i < kNumMediaPackets; ++i) {
GenerateFrame(1, i, &media_rtp_packets, &media_packets);
+ }
std::list<Packet*> fec_packets;
EXPECT_EQ(-1, fec_->GenerateFEC(media_packets,
kNumFecPackets * 255 / kNumMediaPackets,
@@ -384,11 +274,16 @@
const unsigned int kNumMediaPacketsBatch2 = 46u;
std::list<RtpPacket*> media_rtp_packets_batch2;
std::list<Packet*> media_packets_batch2;
- GenerateAndAddFrames(kNumMediaPacketsBatch2, 1, &media_rtp_packets_batch2,
- &media_packets_batch2);
- EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
- .Times(media_packets_batch2.size());
- EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+ for (unsigned int i = 0; i < kNumMediaPacketsBatch2; ++i) {
+ GenerateFrame(1, i, &media_rtp_packets_batch2, &media_packets_batch2);
+ }
+ for (std::list<RtpPacket*>::iterator it = media_rtp_packets_batch2.begin();
+ it != media_rtp_packets_batch2.end(); ++it) {
+ BuildAndAddRedMediaPacket(*it);
+ EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+ .Times(1);
+ EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+ }
// Add the delayed FEC packet. One packet should be reconstructed.
BuildAndAddRedFecPacket(delayed_fec);
@@ -423,11 +318,16 @@
const unsigned int kNumMediaPacketsBatch2 = 48u;
std::list<RtpPacket*> media_rtp_packets_batch2;
std::list<Packet*> media_packets_batch2;
- GenerateAndAddFrames(kNumMediaPacketsBatch2, 1, &media_rtp_packets_batch2,
- &media_packets_batch2);
- EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
- .Times(media_packets_batch2.size());
- EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+ for (unsigned int i = 0; i < kNumMediaPacketsBatch2; ++i) {
+ GenerateFrame(1, i, &media_rtp_packets_batch2, &media_packets_batch2);
+ }
+ for (std::list<RtpPacket*>::iterator it = media_rtp_packets_batch2.begin();
+ it != media_rtp_packets_batch2.end(); ++it) {
+ BuildAndAddRedMediaPacket(*it);
+ EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+ .Times(1);
+ EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+ }
// Add the delayed FEC packet. No packet should be reconstructed since the
// first media packet of that frame has been dropped due to being too old.
@@ -454,7 +354,11 @@
GenerateFEC(&frame_media_packets, &fec_packets, 1);
for (std::list<Packet*>::iterator it = fec_packets.begin();
it != fec_packets.end(); ++it) {
+ // Only FEC packets inserted. No packets recoverable at this time.
BuildAndAddRedFecPacket(*it);
+ EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+ .Times(0);
+ EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
}
media_packets.insert(media_packets.end(),
frame_media_packets.begin(),
@@ -463,58 +367,12 @@
frame_media_rtp_packets.begin(),
frame_media_rtp_packets.end());
}
- // Don't insert any media packets.
- // Only FEC packets inserted. No packets should be recoverable at this time.
- EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
- .Times(0);
- EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
-
// Insert the oldest media packet. The corresponding FEC packet is too old
// and should've been dropped. Only the media packet we inserted will be
// returned.
BuildAndAddRedMediaPacket(media_rtp_packets.front());
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
- .Times(1);
- EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
-
- DeletePackets(&media_packets);
-}
-
-TEST_F(ReceiverFecTest, PacketsOnlyReturnedOnce) {
- const unsigned int kNumFecPackets = 1u;
- std::list<RtpPacket*> media_rtp_packets;
- std::list<Packet*> media_packets;
- GenerateFrame(1, 0, &media_rtp_packets, &media_packets);
- GenerateFrame(2, 1, &media_rtp_packets, &media_packets);
- std::list<Packet*> fec_packets;
- GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
-
- // Recovery
- std::list<RtpPacket*>::iterator media_it = media_rtp_packets.begin();
- BuildAndAddRedMediaPacket(*media_it); // First frame.
- {
- std::list<RtpPacket*>::iterator verify_it = media_rtp_packets.begin();
- VerifyReconstructedMediaPacket(*verify_it, 1); // First frame
- }
- EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
-
- ++media_it;
- BuildAndAddRedMediaPacket(*media_it); // 1st packet of 2nd frame.
- BuildAndAddRedFecPacket(fec_packets.front()); // Insert FEC packet.
- {
- InSequence s;
- std::list<RtpPacket*>::iterator verify_it = media_rtp_packets.begin();
- ++verify_it; // First frame has already been returned.
- VerifyReconstructedMediaPacket(*verify_it, 1); // 1st packet of 2nd frame.
- ++verify_it;
- VerifyReconstructedMediaPacket(*verify_it, 1); // 2nd packet of 2nd frame.
- }
- EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
-
- ++media_it;
- BuildAndAddRedMediaPacket(*media_it); // 2nd packet of 2nd frame.
- EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
- .Times(0);
+ .Times(1);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
DeletePackets(&media_packets);
diff --git a/modules/rtp_rtcp/source/rtp_receiver_audio.cc b/modules/rtp_rtcp/source/rtp_receiver_audio.cc
index b61157d..b180c01 100644
--- a/modules/rtp_rtcp/source/rtp_receiver_audio.cc
+++ b/modules/rtp_rtcp/source/rtp_receiver_audio.cc
@@ -449,7 +449,7 @@
WebRtc_UWord16 s = payloadData[offsetBytes] << 8;
// check that we don't read outside the memory
- if(offsetBytes < (WebRtc_UWord32)payloadLength -2)
+ if(offsetBytes < (WebRtc_UWord32)payloadLength - 1)
{
s += payloadData[offsetBytes+1];
}
@@ -463,8 +463,8 @@
offsetSamples += audioSpecific.bitsPerSample;
if(readShift <= audioSpecific.bitsPerSample)
{
- // next does not fitt
- // or fitt exactly
+ // next does not fit
+ // or fit exactly
offsetSamples -= 8;
offsetBytes++;
}
@@ -485,8 +485,8 @@
offsetSamplesInsert += audioSpecific.bitsPerSample;
if(insertShift <= audioSpecific.bitsPerSample)
{
- // next does not fitt
- // or fitt exactly
+ // next does not fit
+ // or fit exactly
offsetSamplesInsert -= 8;
offsetBytesInsert++;
}
diff --git a/modules/rtp_rtcp/source/rtp_rtcp.gypi b/modules/rtp_rtcp/source/rtp_rtcp.gypi
index 48fc05f..0867d7a 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp.gypi
+++ b/modules/rtp_rtcp/source/rtp_rtcp.gypi
@@ -71,6 +71,8 @@
'forward_error_correction_internal.h',
'overuse_detector.cc',
'overuse_detector.h',
+ 'producer_fec.cc',
+ 'producer_fec.h',
'remote_rate_control.cc',
'remote_rate_control.h',
'rtp_packet_history.cc',
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index 6087cc3..5fb591e 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -425,13 +425,6 @@
_rtcpSender.SendRTCP(kRtcpReport);
}
- if (_rtpSender.RTPKeepalive()) {
- // check time to send RTP keep alive
- if (_rtpSender.TimeToSendRTPKeepalive()) {
- _rtpSender.SendRTPKeepalivePacket();
- }
- }
-
if (UpdateRTCPReceiveInformationTimers()) {
// a receiver has timed out
UpdateTMMBR();
@@ -862,67 +855,6 @@
return retVal;
}
-bool ModuleRtpRtcpImpl::RTPKeepalive() const {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id, "RTPKeepalive()");
-
- return _rtpSender.RTPKeepalive();
-}
-
-WebRtc_Word32 ModuleRtpRtcpImpl::RTPKeepaliveStatus(
- bool* enable,
- int* unknownPayloadType,
- WebRtc_UWord16* deltaTransmitTimeMS) const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RTPKeepaliveStatus()");
-
- return _rtpSender.RTPKeepaliveStatus(enable,
- unknownPayloadType,
- deltaTransmitTimeMS);
-}
-
-WebRtc_Word32 ModuleRtpRtcpImpl::SetRTPKeepaliveStatus(
- bool enable,
- const int unknownPayloadType,
- WebRtc_UWord16 deltaTransmitTimeMS) {
- if (enable) {
- WEBRTC_TRACE(
- kTraceModuleCall,
- kTraceRtpRtcp,
- _id,
- "SetRTPKeepaliveStatus(true, plType:%d deltaTransmitTimeMS:%u)",
- unknownPayloadType,
- deltaTransmitTimeMS);
-
- // check the transmit keepalive delta time [1,60]
- if (deltaTransmitTimeMS < 1000 || deltaTransmitTimeMS > 60000) {
- WEBRTC_TRACE(kTraceError,
- kTraceRtpRtcp,
- _id,
- "\tinvalid deltaTransmitTimeSeconds (%d)",
- deltaTransmitTimeMS);
- return -1;
- }
-
- // check the payload time [0,127]
- if (unknownPayloadType < 0) {
- WEBRTC_TRACE(kTraceError,
- kTraceRtpRtcp,
- _id,
- "\tinvalid unknownPayloadType (%d)",
- unknownPayloadType);
- return -1;
- }
- // enable RTP keepalive mechanism
- return _rtpSender.EnableRTPKeepalive(unknownPayloadType,
- deltaTransmitTimeMS);
- } else {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- _id,
- "SetRTPKeepaliveStatus(disable)");
- return _rtpSender.DisableRTPKeepalive();
- }
-}
-
WebRtc_Word32 ModuleRtpRtcpImpl::RegisterSendPayload(
const CodecInst& voiceCodec) {
WEBRTC_TRACE(kTraceModuleCall,
@@ -1098,14 +1030,6 @@
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
"SetSendingStatus(sending)");
} else {
- if (_rtpSender.RTPKeepalive()) {
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceRtpRtcp,
- _id,
- "Can't SetSendingStatus(stopped) when RTP Keepalive is active");
- return -1;
- }
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
"SetSendingStatus(stopped)");
}
@@ -2196,16 +2120,9 @@
return retVal;
}
-WebRtc_Word32 ModuleRtpRtcpImpl::SetFECCodeRate(
- const WebRtc_UWord8 keyFrameCodeRate,
- const WebRtc_UWord8 deltaFrameCodeRate) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- _id,
- "SetFECCodeRate(%u, %u)",
- keyFrameCodeRate,
- deltaFrameCodeRate);
-
+WebRtc_Word32 ModuleRtpRtcpImpl::SetFecParameters(
+ const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params) {
const bool defaultInstance(_childModules.empty() ? false : true);
if (defaultInstance) {
// for default we need to update all child modules too
@@ -2215,42 +2132,13 @@
while (it != _childModules.end()) {
RtpRtcp* module = *it;
if (module) {
- module->SetFECCodeRate(keyFrameCodeRate, deltaFrameCodeRate);
+ module->SetFecParameters(delta_params, key_params);
}
it++;
}
return 0;
}
- return _rtpSender.SetFECCodeRate(keyFrameCodeRate, deltaFrameCodeRate);
-}
-
-WebRtc_Word32 ModuleRtpRtcpImpl::SetFECUepProtection(
- const bool keyUseUepProtection,
- const bool deltaUseUepProtection) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp, _id,
- "SetFECUepProtection(%d, %d)",
- keyUseUepProtection,
- deltaUseUepProtection);
-
- const bool defaultInstance(_childModules.empty() ? false : true);
- if (defaultInstance) {
- // for default we need to update all child modules too
- CriticalSectionScoped lock(_criticalSectionModulePtrs);
-
- std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
- while (it != _childModules.end()) {
- RtpRtcp* module = *it;
- if (module) {
- module->SetFECUepProtection(keyUseUepProtection,
- deltaUseUepProtection);
- }
- it++;
- }
- return 0;
- }
- return _rtpSender.SetFECUepProtection(keyUseUepProtection,
- deltaUseUepProtection);
+ return _rtpSender.SetFecParameters(delta_params, key_params);
}
void ModuleRtpRtcpImpl::SetRemoteSSRC(const WebRtc_UWord32 SSRC) {
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/modules/rtp_rtcp/source/rtp_rtcp_impl.h
index 19858d5..9236061 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.h
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.h
@@ -148,18 +148,6 @@
*/
virtual WebRtc_Word32 InitSender();
- virtual WebRtc_Word32 SetRTPKeepaliveStatus(
- const bool enable,
- const int unknownPayloadType,
- const WebRtc_UWord16 deltaTransmitTimeMS);
-
- virtual WebRtc_Word32 RTPKeepaliveStatus(
- bool* enable,
- int* unknownPayloadType,
- WebRtc_UWord16* deltaTransmitTimeMS) const;
-
- virtual bool RTPKeepalive() const;
-
virtual WebRtc_Word32 RegisterSendPayload(const CodecInst& voiceCodec);
virtual WebRtc_Word32 RegisterSendPayload(const VideoCodec& videoCodec);
@@ -477,12 +465,9 @@
WebRtc_UWord8& payloadTypeRED,
WebRtc_UWord8& payloadTypeFEC);
-
- virtual WebRtc_Word32 SetFECCodeRate(const WebRtc_UWord8 keyFrameCodeRate,
- const WebRtc_UWord8 deltaFrameCodeRate);
-
- virtual WebRtc_Word32 SetFECUepProtection(const bool keyUseUepProtection,
- const bool deltaUseUepProtection);
+ virtual WebRtc_Word32 SetFecParameters(
+ const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params);
virtual WebRtc_Word32 LastReceivedNTP(WebRtc_UWord32& NTPsecs,
WebRtc_UWord32& NTPfrac,
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_tests.gypi b/modules/rtp_rtcp/source/rtp_rtcp_tests.gypi
index ea705a8..1c66efc 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_tests.gypi
+++ b/modules/rtp_rtcp/source/rtp_rtcp_tests.gypi
@@ -22,6 +22,9 @@
'../../../',
],
'sources': [
+ 'fec_test_helper.cc',
+ 'fec_test_helper.h',
+ 'producer_fec_unittest.cc',
'receiver_fec_unittest.cc',
'rtp_fec_unittest.cc',
'rtp_format_vp8_unittest.cc',
diff --git a/modules/rtp_rtcp/source/rtp_sender.cc b/modules/rtp_rtcp/source/rtp_sender.cc
index 7b0755a..64837bf 100644
--- a/modules/rtp_rtcp/source/rtp_sender.cc
+++ b/modules/rtp_rtcp/source/rtp_sender.cc
@@ -45,11 +45,6 @@
_rtpHeaderExtensionMap(),
_transmissionTimeOffset(0),
- _keepAliveIsActive(false),
- _keepAlivePayloadType(-1),
- _keepAliveLastSent(0),
- _keepAliveDeltaTimeSend(0),
-
// NACK
_nackByteCountTimes(),
_nackByteCount(),
@@ -149,8 +144,6 @@
_payloadBytesSent = 0;
_packetOverHead = 28;
- _keepAlivePayloadType = -1;
-
_rtpHeaderExtensionMap.Erase();
while (!_payloadTypeMap.empty()) {
@@ -280,11 +273,6 @@
assert(payloadName);
CriticalSectionScoped cs(_sendCritsect);
- if (payloadNumber == _keepAlivePayloadType) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "invalid state",
- __FUNCTION__);
- return -1;
- }
std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
_payloadTypeMap.find(payloadNumber);
@@ -351,162 +339,6 @@
}
-// See http://www.ietf.org/internet-drafts/draft-ietf-avt-app-rtp-keepalive-04.txt
-// for details about this method. Only Section 4.6 is implemented so far.
-bool
-RTPSender::RTPKeepalive() const
-{
- return _keepAliveIsActive;
-}
-
-WebRtc_Word32
-RTPSender::RTPKeepaliveStatus(bool* enable,
- int* unknownPayloadType,
- WebRtc_UWord16* deltaTransmitTimeMS) const
-{
- CriticalSectionScoped cs(_sendCritsect);
-
- if(enable)
- {
- *enable = _keepAliveIsActive;
- }
- if(unknownPayloadType)
- {
- *unknownPayloadType = _keepAlivePayloadType;
- }
- if(deltaTransmitTimeMS)
- {
- *deltaTransmitTimeMS =_keepAliveDeltaTimeSend;
- }
- return 0;
-}
-
-WebRtc_Word32 RTPSender::EnableRTPKeepalive(
- const int unknownPayloadType,
- const WebRtc_UWord16 deltaTransmitTimeMS) {
- CriticalSectionScoped cs(_sendCritsect);
-
- std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
- _payloadTypeMap.find(unknownPayloadType);
-
- if (it != _payloadTypeMap.end()) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument",
- __FUNCTION__);
- return -1;
- }
- _keepAliveIsActive = true;
- _keepAlivePayloadType = unknownPayloadType;
- _keepAliveLastSent = _clock.GetTimeInMS();
- _keepAliveDeltaTimeSend = deltaTransmitTimeMS;
- return 0;
-}
-
-WebRtc_Word32
-RTPSender::DisableRTPKeepalive()
-{
- _keepAliveIsActive = false;
- return 0;
-}
-
-bool
-RTPSender::TimeToSendRTPKeepalive() const
-{
- CriticalSectionScoped cs(_sendCritsect);
-
- bool timeToSend(false);
-
- WebRtc_UWord32 dT = _clock.GetTimeInMS() - _keepAliveLastSent;
- if (dT > _keepAliveDeltaTimeSend)
- {
- timeToSend = true;
- }
- return timeToSend;
-}
-
-// ----------------------------------------------------------------------------
-// From the RFC draft:
-//
-// 4.6. RTP Packet with Unknown Payload Type
-//
-// The application sends an RTP packet of 0 length with a dynamic
-// payload type that has not been negotiated by the peers (e.g. not
-// negotiated within the SDP offer/answer, and thus not mapped to any
-// media format).
-//
-// The sequence number is incremented by one for each packet, as it is
-// sent within the same RTP session as the actual media. The timestamp
-// contains the same value a media packet would have at this time. The
-// marker bit is not significant for the keepalive packets and is thus
-// set to zero.
-//
-// Normally the peer will ignore this packet, as RTP [RFC3550] states
-// that "a receiver MUST ignore packets with payload types that it does
-// not understand".
-//
-// Cons:
-// o [RFC4566] and [RFC3264] mandate not to send media with inactive
-// and recvonly attributes, however this is mitigated as no real
-// media is sent with this mechanism.
-//
-// Recommendation:
-// o This method should be used for RTP keepalive.
-//
-// 7. Timing and Transport Considerations
-//
-// An application supporting this specification must transmit keepalive
-// packets every Tr seconds during the whole duration of the media
-// session. Tr SHOULD be configurable, and otherwise MUST default to 15
-// seconds.
-//
-// Keepalives packets within a particular RTP session MUST use the tuple
-// (source IP address, source TCP/UDP ports, target IP address, target
-// TCP/UDP Port) of the regular RTP packets.
-//
-// The agent SHOULD only send RTP keepalive when it does not send
-// regular RTP packets.
-//
-// http://www.ietf.org/internet-drafts/draft-ietf-avt-app-rtp-keepalive-04.txt
-// ----------------------------------------------------------------------------
-
-WebRtc_Word32
-RTPSender::SendRTPKeepalivePacket()
-{
- // RFC summary:
- //
- // - Send an RTP packet of 0 length;
- // - dynamic payload type has not been negotiated (not mapped to any media);
- // - sequence number is incremented by one for each packet;
- // - timestamp contains the same value a media packet would have at this time;
- // - marker bit is set to zero.
-
- WebRtc_UWord8 dataBuffer[IP_PACKET_SIZE];
- WebRtc_UWord16 rtpHeaderLength = 12;
- {
- CriticalSectionScoped cs(_sendCritsect);
-
- WebRtc_UWord32 now = _clock.GetTimeInMS();
- WebRtc_UWord32 dT = now -_keepAliveLastSent; // delta time in MS
-
- WebRtc_UWord32 freqKHz = 90; // video
- if(_audioConfigured)
- {
- freqKHz = _audio->AudioFrequency()/1000;
- }
- WebRtc_UWord32 dSamples = dT*freqKHz;
-
- // set timestamp
- _timeStamp += dSamples;
- _keepAliveLastSent = now;
-
- rtpHeaderLength = RTPHeaderLength();
-
- // correct seq num, time stamp and payloadtype
- BuildRTPheader(dataBuffer, _keepAlivePayloadType, false, 0, false);
- }
-
- return SendToNetwork(dataBuffer, 0, rtpHeaderLength, kAllowRetransmission);
-}
-
WebRtc_Word32
RTPSender::SetMaxPayloadLength(const WebRtc_UWord16 maxPayloadLength, const WebRtc_UWord16 packetOverHead)
{
@@ -673,8 +505,6 @@
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument failed to find payloadType:%d", __FUNCTION__, payloadType);
return -1;
}
- // update keepalive so that we don't trigger keepalive messages while sending data
- _keepAliveLastSent = _clock.GetTimeInMS();
if(_audioConfigured)
{
@@ -1742,27 +1572,12 @@
return _video->GenericFECStatus(enable, payloadTypeRED, payloadTypeFEC);
}
-WebRtc_Word32
-RTPSender::SetFECCodeRate(const WebRtc_UWord8 keyFrameCodeRate,
- const WebRtc_UWord8 deltaFrameCodeRate)
-{
- if(_audioConfigured)
- {
- return -1;
- }
- return _video->SetFECCodeRate(keyFrameCodeRate, deltaFrameCodeRate);
-}
-
-WebRtc_Word32
-RTPSender::SetFECUepProtection(const bool keyUseUepProtection,
- const bool deltaUseUepProtection)
-
-{
- if(_audioConfigured)
- {
- return -1;
- }
- return _video->SetFECUepProtection(keyUseUepProtection,
- deltaUseUepProtection);
+WebRtc_Word32 RTPSender::SetFecParameters(
+ const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params) {
+ if (_audioConfigured) {
+ return -1;
+ }
+ return _video->SetFecParameters(delta_params, key_params);
}
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_sender.h b/modules/rtp_rtcp/source/rtp_sender.h
index 1a2cb82..6547738 100644
--- a/modules/rtp_rtcp/source/rtp_sender.h
+++ b/modules/rtp_rtcp/source/rtp_sender.h
@@ -197,24 +197,6 @@
bool ProcessNACKBitRate(const WebRtc_UWord32 now);
/*
- * Keep alive
- */
- WebRtc_Word32 EnableRTPKeepalive( const int unknownPayloadType,
- const WebRtc_UWord16 deltaTransmitTimeMS);
-
- WebRtc_Word32 RTPKeepaliveStatus(bool* enable,
- int* unknownPayloadType,
- WebRtc_UWord16* deltaTransmitTimeMS) const;
-
- WebRtc_Word32 DisableRTPKeepalive();
-
- bool RTPKeepalive() const;
-
- bool TimeToSendRTPKeepalive() const;
-
- WebRtc_Word32 SendRTPKeepalivePacket();
-
- /*
* RTX
*/
void SetRTXStatus(const bool enable,
@@ -299,11 +281,9 @@
WebRtc_UWord8& payloadTypeRED,
WebRtc_UWord8& payloadTypeFEC) const;
- WebRtc_Word32 SetFECCodeRate(const WebRtc_UWord8 keyFrameCodeRate,
- const WebRtc_UWord8 deltaFrameCodeRate);
-
- WebRtc_Word32 SetFECUepProtection(const bool keyUseUepProtection,
- const bool deltaUseUepProtection);
+ WebRtc_Word32 SetFecParameters(
+ const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params);
protected:
WebRtc_Word32 CheckPayloadType(const WebRtc_Word8 payloadType,
@@ -335,11 +315,6 @@
RtpHeaderExtensionMap _rtpHeaderExtensionMap;
WebRtc_Word32 _transmissionTimeOffset;
- bool _keepAliveIsActive;
- WebRtc_Word8 _keepAlivePayloadType;
- WebRtc_UWord32 _keepAliveLastSent;
- WebRtc_UWord16 _keepAliveDeltaTimeSend;
-
// NACK
WebRtc_UWord32 _nackByteCountTimes[NACK_BYTECOUNT_SIZE];
WebRtc_Word32 _nackByteCount[NACK_BYTECOUNT_SIZE];
diff --git a/modules/rtp_rtcp/source/rtp_sender_video.cc b/modules/rtp_rtcp/source/rtp_sender_video.cc
index 7a15208..2d30641 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -19,11 +19,17 @@
#include <cassert> // assert
#include <cstdlib> // srand
+#include "producer_fec.h"
#include "rtp_format_vp8.h"
namespace webrtc {
enum { REDForFECHeaderLength = 1 };
+struct RtpPacket {
+ WebRtc_UWord16 rtpHeaderLength;
+ ForwardErrorCorrection::Packet* pkt;
+};
+
RTPSenderVideo::RTPSenderVideo(const WebRtc_Word32 id,
RtpRtcpClock* clock,
RTPSenderInterface* rtpSender) :
@@ -41,15 +47,15 @@
_fecEnabled(false),
_payloadTypeRED(-1),
_payloadTypeFEC(-1),
- _codeRateKey(0),
- _codeRateDelta(0),
- _useUepProtectionKey(false),
- _useUepProtectionDelta(false),
- _fecProtectionFactor(0),
- _fecUseUepProtection(false),
_numberFirstPartition(0),
+ delta_fec_params_(),
+ key_fec_params_(),
+ producer_fec_(&_fec),
_fecOverheadRate(clock),
_videoBitrate(clock) {
+ memset(&delta_fec_params_, 0, sizeof(delta_fec_params_));
+ memset(&key_fec_params_, 0, sizeof(key_fec_params_));
+ delta_fec_params_.max_fec_frames = key_fec_params_.max_fec_frames = 1;
}
RTPSenderVideo::~RTPSenderVideo()
@@ -70,13 +76,10 @@
_fecEnabled = false;
_payloadTypeRED = -1;
_payloadTypeFEC = -1;
- _codeRateKey = 0;
- _codeRateDelta = 0;
- _useUepProtectionKey = false;
- _useUepProtectionDelta = false;
- _fecProtectionFactor = 0;
- _fecUseUepProtection = false;
_numberFirstPartition = 0;
+ memset(&delta_fec_params_, 0, sizeof(delta_fec_params_));
+ memset(&key_fec_params_, 0, sizeof(key_fec_params_));
+ delta_fec_params_.max_fec_frames = key_fec_params_.max_fec_frames = 1;
_fecOverheadRate.Init();
return 0;
}
@@ -124,199 +127,77 @@
return 0;
}
-struct RtpPacket
-{
- WebRtc_UWord16 rtpHeaderLength;
- ForwardErrorCorrection::Packet* pkt;
-};
-
WebRtc_Word32
-RTPSenderVideo::SendVideoPacket(const FrameType frameType,
- const WebRtc_UWord8* dataBuffer,
- const WebRtc_UWord16 payloadLength,
- const WebRtc_UWord16 rtpHeaderLength,
- StorageType storage)
-{
- if(_fecEnabled)
- {
- WebRtc_Word32 retVal = 0;
+RTPSenderVideo::SendVideoPacket(const WebRtc_UWord8* data_buffer,
+ const WebRtc_UWord16 payload_length,
+ const WebRtc_UWord16 rtp_header_length,
+ StorageType storage) {
+ if(_fecEnabled) {
+ int ret = 0;
+ int fec_overhead_sent = 0;
+ int video_sent = 0;
- const bool markerBit = (dataBuffer[1] & kRtpMarkerBitMask)?true:false;
- RtpPacket* ptrGenericFEC = new RtpPacket;
- ptrGenericFEC->pkt = new ForwardErrorCorrection::Packet;
- ptrGenericFEC->pkt->length = payloadLength + rtpHeaderLength;
- ptrGenericFEC->rtpHeaderLength = rtpHeaderLength;
- memcpy(ptrGenericFEC->pkt->data, dataBuffer,
- ptrGenericFEC->pkt->length);
+ RedPacket* red_packet = producer_fec_.BuildRedPacket(data_buffer,
+ payload_length,
+ rtp_header_length,
+ _payloadTypeRED);
+ // Sending the media packet with RED header.
+ int packet_success = _rtpSender.SendToNetwork(
+ red_packet->data(),
+ red_packet->length() - rtp_header_length,
+ rtp_header_length,
+ storage);
- // Add packet to FEC list
- _rtpPacketListFec.push_back(ptrGenericFEC);
- // FEC can only protect up to kMaxMediaPackets packets
- if (_mediaPacketListFec.size() <
- ForwardErrorCorrection::kMaxMediaPackets)
- {
- _mediaPacketListFec.push_back(ptrGenericFEC->pkt);
- }
+ ret |= packet_success;
- // Last packet in frame
- if (markerBit)
- {
-
- // Retain the RTP header of the last media packet to construct FEC
- // packet RTP headers.
- ForwardErrorCorrection::Packet lastMediaRtpHeader;
- memcpy(lastMediaRtpHeader.data,
- ptrGenericFEC->pkt->data,
- ptrGenericFEC->rtpHeaderLength);
-
- lastMediaRtpHeader.length = ptrGenericFEC->rtpHeaderLength;
- // Replace payload and clear marker bit.
- lastMediaRtpHeader.data[1] = _payloadTypeRED;
-
- // Number of first partition packets cannot exceed kMaxMediaPackets
- if (_numberFirstPartition >
- ForwardErrorCorrection::kMaxMediaPackets)
- {
- _numberFirstPartition =
- ForwardErrorCorrection::kMaxMediaPackets;
- }
-
- std::list<ForwardErrorCorrection::Packet*> fecPacketList;
- retVal = _fec.GenerateFEC(_mediaPacketListFec,
- _fecProtectionFactor,
- _numberFirstPartition,
- _fecUseUepProtection,
- &fecPacketList);
-
- int fecOverheadSent = 0;
- int videoSent = 0;
-
- while(!_rtpPacketListFec.empty())
- {
- WebRtc_UWord8 newDataBuffer[IP_PACKET_SIZE];
- memset(newDataBuffer, 0, sizeof(newDataBuffer));
-
- RtpPacket* packetToSend = _rtpPacketListFec.front();
-
- // Copy RTP header
- memcpy(newDataBuffer, packetToSend->pkt->data,
- packetToSend->rtpHeaderLength);
-
- // Get codec pltype
- WebRtc_UWord8 payloadType = newDataBuffer[1] & 0x7f;
-
- // Replace pltype
- newDataBuffer[1] &= 0x80; // reset
- newDataBuffer[1] += _payloadTypeRED; // replace
-
- // Add RED header
- // f-bit always 0
- newDataBuffer[packetToSend->rtpHeaderLength] = payloadType;
-
- // Copy payload data
- memcpy(newDataBuffer + packetToSend->rtpHeaderLength +
- REDForFECHeaderLength,
- packetToSend->pkt->data + packetToSend->rtpHeaderLength,
- packetToSend->pkt->length -
- packetToSend->rtpHeaderLength);
-
- _rtpPacketListFec.pop_front();
- // Check if _mediaPacketListFec is non-empty.
- // This list may be smaller than rtpPacketList, if the frame
- // has more than kMaxMediaPackets.
- if (!_mediaPacketListFec.empty()) {
- _mediaPacketListFec.pop_front();
- }
-
- // Send normal packet with RED header
- int packetSuccess = _rtpSender.SendToNetwork(
- newDataBuffer,
- packetToSend->pkt->length - packetToSend->rtpHeaderLength +
- REDForFECHeaderLength,
- packetToSend->rtpHeaderLength,
- storage);
-
- retVal |= packetSuccess;
-
- if (packetSuccess == 0)
- {
- videoSent += packetToSend->pkt->length +
- REDForFECHeaderLength;
- }
-
- delete packetToSend->pkt;
- delete packetToSend;
- packetToSend = NULL;
- }
- assert(_mediaPacketListFec.empty());
- assert(_rtpPacketListFec.empty());
-
- while(!fecPacketList.empty())
- {
- WebRtc_UWord8 newDataBuffer[IP_PACKET_SIZE];
-
- // Build FEC packets
- ForwardErrorCorrection::Packet* packetToSend = fecPacketList.front();
-
- // The returned FEC packets have no RTP headers.
- // Copy the last media packet's modified RTP header.
- memcpy(newDataBuffer, lastMediaRtpHeader.data,
- lastMediaRtpHeader.length);
-
- // Add sequence number
- ModuleRTPUtility::AssignUWord16ToBuffer(
- &newDataBuffer[2], _rtpSender.IncrementSequenceNumber());
-
- // Add RED header
- // f-bit always 0
- newDataBuffer[lastMediaRtpHeader.length] = _payloadTypeFEC;
-
- // Copy payload data
- memcpy(newDataBuffer + lastMediaRtpHeader.length +
- REDForFECHeaderLength,
- packetToSend->data,
- packetToSend->length);
-
- fecPacketList.pop_front();
-
- // Invalid FEC packet
- assert(packetToSend->length != 0);
-
- StorageType storage = kDontRetransmit;
- if (_retransmissionSettings & kRetransmitFECPackets) {
- storage = kAllowRetransmission;
- }
-
- // No marker bit on FEC packets, last media packet have the
- // marker send FEC packet with RED header
- int packetSuccess = _rtpSender.SendToNetwork(
- newDataBuffer,
- packetToSend->length + REDForFECHeaderLength,
- lastMediaRtpHeader.length,
- storage);
-
- retVal |= packetSuccess;
-
- if (packetSuccess == 0)
- {
- fecOverheadSent += packetToSend->length +
- REDForFECHeaderLength + lastMediaRtpHeader.length;
- }
- }
- _videoBitrate.Update(videoSent);
- _fecOverheadRate.Update(fecOverheadSent);
- }
- return retVal;
+ if (packet_success == 0) {
+ video_sent += red_packet->length();
}
- int retVal = _rtpSender.SendToNetwork(dataBuffer,
- payloadLength,
- rtpHeaderLength,
- storage);
- if (retVal == 0)
- {
- _videoBitrate.Update(payloadLength + rtpHeaderLength);
+ delete red_packet;
+ red_packet = NULL;
+
+ ret = producer_fec_.AddRtpPacketAndGenerateFec(data_buffer,
+ payload_length,
+ rtp_header_length);
+ if (ret != 0)
+ return ret;
+
+ while (producer_fec_.FecAvailable()) {
+ red_packet = producer_fec_.GetFecPacket(
+ _payloadTypeRED,
+ _payloadTypeFEC,
+ _rtpSender.IncrementSequenceNumber());
+ StorageType storage = kDontRetransmit;
+ if (_retransmissionSettings & kRetransmitFECPackets) {
+ storage = kAllowRetransmission;
+ }
+ // Sending FEC packet with RED header.
+ int packet_success = _rtpSender.SendToNetwork(
+ red_packet->data(),
+ red_packet->length() - rtp_header_length,
+ rtp_header_length,
+ storage);
+
+ ret |= packet_success;
+
+ if (packet_success == 0) {
+ fec_overhead_sent += red_packet->length();
+ }
+ delete red_packet;
+ red_packet = NULL;
}
- return retVal;
+ _videoBitrate.Update(video_sent);
+ _fecOverheadRate.Update(fec_overhead_sent);
+ return ret;
+ }
+ int ret = _rtpSender.SendToNetwork(data_buffer,
+ payload_length,
+ rtp_header_length,
+ storage);
+ if (ret == 0) {
+ _videoBitrate.Update(payload_length + rtp_header_length);
+ }
+ return ret;
}
WebRtc_Word32
@@ -345,10 +226,9 @@
_fecEnabled = enable;
_payloadTypeRED = payloadTypeRED;
_payloadTypeFEC = payloadTypeFEC;
- _codeRateKey = 0;
- _codeRateDelta = 0;
- _useUepProtectionKey = false;
- _useUepProtectionDelta = false;
+ memset(&delta_fec_params_, 0, sizeof(delta_fec_params_));
+ memset(&key_fec_params_, 0, sizeof(key_fec_params_));
+ delta_fec_params_.max_fec_frames = key_fec_params_.max_fec_frames = 1;
return 0;
}
@@ -374,22 +254,14 @@
return 0;
}
-WebRtc_Word32
-RTPSenderVideo::SetFECCodeRate(const WebRtc_UWord8 keyFrameCodeRate,
- const WebRtc_UWord8 deltaFrameCodeRate)
-{
- _codeRateKey = keyFrameCodeRate;
- _codeRateDelta = deltaFrameCodeRate;
- return 0;
-}
-
-WebRtc_Word32
-RTPSenderVideo::SetFECUepProtection(const bool keyUseUepProtection,
- const bool deltaUseUepProtection)
-{
- _useUepProtectionKey = keyUseUepProtection;
- _useUepProtectionDelta = deltaUseUepProtection;
- return 0;
+WebRtc_Word32 RTPSenderVideo::SetFecParameters(
+ const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params) {
+ assert(delta_params);
+ assert(key_params);
+ delta_fec_params_ = *delta_params;
+ key_fec_params_ = *key_params;
+ return 0;
}
WebRtc_Word32
@@ -408,19 +280,19 @@
return -1;
}
- if (frameType == kVideoFrameKey)
- {
- _fecProtectionFactor = _codeRateKey;
- _fecUseUepProtection = _useUepProtectionKey;
- } else if (videoType == kRtpVp8Video && rtpTypeHdr->VP8.temporalIdx > 0)
- {
+ if (frameType == kVideoFrameKey) {
+ producer_fec_.SetFecParameters(&key_fec_params_,
+ _numberFirstPartition);
+ } else if (videoType == kRtpVp8Video && rtpTypeHdr->VP8.temporalIdx > 0) {
// In current version, we only apply FEC on the base layer.
- _fecProtectionFactor = 0;
- _fecUseUepProtection = false;
- } else
- {
- _fecProtectionFactor = _codeRateDelta;
- _fecUseUepProtection = _useUepProtectionDelta;
+ FecProtectionParams params;
+ params.fec_rate = 0;
+ params.max_fec_frames = 0;
+ params.use_uep_protection = false;
+ producer_fec_.SetFecParameters(¶ms, _numberFirstPartition);
+ } else {
+ producer_fec_.SetFecParameters(&delta_fec_params_,
+ _numberFirstPartition);
}
// Default setting for number of first partition packets:
@@ -498,8 +370,7 @@
payloadBytesInPacket);
bytesSent += payloadBytesInPacket;
- if(-1 == SendVideoPacket(kVideoFrameKey,
- dataBuffer,
+ if(-1 == SendVideoPacket(dataBuffer,
payloadBytesInPacket,
rtpHeaderLength,
kAllowRetransmission))
@@ -583,7 +454,7 @@
// Set marker bit true if this is the last packet in frame.
_rtpSender.BuildRTPheader(dataBuffer, payloadType, last,
captureTimeStamp);
- if (-1 == SendVideoPacket(frameType, dataBuffer, payloadBytesInPacket,
+ if (-1 == SendVideoPacket(dataBuffer, payloadBytesInPacket,
rtpHeaderLength, storage))
{
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
diff --git a/modules/rtp_rtcp/source/rtp_sender_video.h b/modules/rtp_rtcp/source/rtp_sender_video.h
index 1bf7142..0765e3f 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video.h
+++ b/modules/rtp_rtcp/source/rtp_sender_video.h
@@ -24,6 +24,7 @@
#include "forward_error_correction.h"
#include "Bitrate.h"
#include "rtp_sender.h"
+#include "producer_fec.h"
namespace webrtc {
class CriticalSectionWrapper;
@@ -79,11 +80,8 @@
WebRtc_UWord8& payloadTypeRED,
WebRtc_UWord8& payloadTypeFEC) const;
- WebRtc_Word32 SetFECCodeRate(const WebRtc_UWord8 keyFrameCodeRate,
- const WebRtc_UWord8 deltaFrameCodeRate);
-
- WebRtc_Word32 SetFECUepProtection(const bool keyUseUepProtection,
- const bool deltaUseUepProtection);
+ WebRtc_Word32 SetFecParameters(const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params);
void ProcessBitrate();
@@ -94,8 +92,7 @@
int SetSelectiveRetransmissions(uint8_t settings);
protected:
- virtual WebRtc_Word32 SendVideoPacket(const FrameType frameType,
- const WebRtc_UWord8* dataBuffer,
+ virtual WebRtc_Word32 SendVideoPacket(const WebRtc_UWord8* dataBuffer,
const WebRtc_UWord16 payloadLength,
const WebRtc_UWord16 rtpHeaderLength,
StorageType storage);
@@ -129,15 +126,11 @@
bool _fecEnabled;
WebRtc_Word8 _payloadTypeRED;
WebRtc_Word8 _payloadTypeFEC;
- WebRtc_UWord8 _codeRateKey;
- WebRtc_UWord8 _codeRateDelta;
- bool _useUepProtectionKey;
- bool _useUepProtectionDelta;
- WebRtc_UWord8 _fecProtectionFactor;
- bool _fecUseUepProtection;
unsigned int _numberFirstPartition;
- std::list<ForwardErrorCorrection::Packet*> _mediaPacketListFec;
- std::list<RtpPacket*> _rtpPacketListFec;
+ FecProtectionParams delta_fec_params_;
+ FecProtectionParams key_fec_params_;
+ ProducerFec producer_fec_;
+
// Bitrate used for FEC payload, RED headers, RTP headers for FEC packets
// and any padding overhead.
Bitrate _fecOverheadRate;
diff --git a/modules/udp_transport/source/udp_transport_impl.cc b/modules/udp_transport/source/udp_transport_impl.cc
index b8eb142..28ab892 100644
--- a/modules/udp_transport/source/udp_transport_impl.cc
+++ b/modules/udp_transport/source/udp_transport_impl.cc
@@ -2168,7 +2168,9 @@
information");
}else
{
- strncpy(_fromIP, ipAddress, kIpAddressVersion6Length);
+ // Make sure ipAddress is null terminated.
+ ipAddress[kIpAddressVersion6Length - 1] = 0;
+ strncpy(_fromIP, ipAddress, kIpAddressVersion6Length - 1);
}
// Filter based on port.
@@ -2226,7 +2228,9 @@
"UdpTransportImpl::IncomingRTCPFunction - Cannot get sender\
information");
}else {
- strncpy(_fromIP, ipAddress, kIpAddressVersion6Length);
+ // Make sure ipAddress is null terminated.
+ ipAddress[kIpAddressVersion6Length - 1] = 0;
+ strncpy(_fromIP, ipAddress, kIpAddressVersion6Length - 1);
}
// Filter based on port.
diff --git a/modules/video_capture/main/interface/video_capture.h b/modules/video_capture/main/interface/video_capture.h
index 20b8987..d596b71 100644
--- a/modules/video_capture/main/interface/video_capture.h
+++ b/modules/video_capture/main/interface/video_capture.h
@@ -11,8 +11,8 @@
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_H_
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_H_
-#include "module.h"
-#include "video_capture_defines.h"
+#include "modules/interface/module.h"
+#include "modules/video_capture/main/interface/video_capture_defines.h"
namespace webrtc {
diff --git a/modules/video_capture/main/interface/video_capture_defines.h b/modules/video_capture/main/interface/video_capture_defines.h
index 2a3408b..72188df 100644
--- a/modules/video_capture/main/interface/video_capture_defines.h
+++ b/modules/video_capture/main/interface/video_capture_defines.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -13,7 +13,7 @@
// Includes
#include "typedefs.h"
-#include "module_common_types.h"
+#include "modules/interface/module_common_types.h"
namespace webrtc
{
diff --git a/modules/video_capture/main/source/Android.mk b/modules/video_capture/main/source/Android.mk
index 8976f19..971f282 100644
--- a/modules/video_capture/main/source/Android.mk
+++ b/modules/video_capture/main/source/Android.mk
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
@@ -21,15 +21,15 @@
video_capture_impl.cc \
device_info_impl.cc \
video_capture_factory.cc \
- Android/video_capture_android.cc \
- Android/device_info_android.cc
+ android/video_capture_android.cc \
+ android/device_info_android.cc
# Flags passed to both C and C++ files.
LOCAL_CFLAGS := \
$(MY_WEBRTC_COMMON_DEFS)
LOCAL_C_INCLUDES := \
- $(LOCAL_PATH)/Android \
+ $(LOCAL_PATH)/android \
$(LOCAL_PATH)/../interface \
$(LOCAL_PATH)/../source \
$(LOCAL_PATH)/../../../interface \
diff --git a/modules/video_capture/main/source/Android/device_info_android.cc b/modules/video_capture/main/source/Android/device_info_android.cc
deleted file mode 100644
index 3e22716..0000000
--- a/modules/video_capture/main/source/Android/device_info_android.cc
+++ /dev/null
@@ -1,362 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "device_info_android.h"
-
-#include <stdio.h>
-
-#include "ref_count.h"
-#include "trace.h"
-#include "video_capture_android.h"
-
-namespace webrtc
-{
-namespace videocapturemodule
-{
-VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo (
- const WebRtc_Word32 id)
-{
- videocapturemodule::DeviceInfoAndroid *deviceInfo =
- new videocapturemodule::DeviceInfoAndroid(id);
- if (deviceInfo && deviceInfo->Init() != 0) // Failed to init
- {
- delete deviceInfo;
- deviceInfo = NULL;
- }
- return deviceInfo;
-}
-
-DeviceInfoAndroid::DeviceInfoAndroid(const WebRtc_Word32 id) :
- DeviceInfoImpl(id)
-{
-}
-
-WebRtc_Word32 DeviceInfoAndroid::Init()
-{
- return 0;
-}
-
-DeviceInfoAndroid::~DeviceInfoAndroid()
-{
-}
-
-WebRtc_UWord32 DeviceInfoAndroid::NumberOfDevices()
-{
-
- JNIEnv *env;
- jclass javaCmDevInfoClass;
- jobject javaCmDevInfoObject;
- bool attached = false;
- if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
- env,
- javaCmDevInfoClass,
- javaCmDevInfoObject,
- attached) != 0)
- {
- return 0;
- }
-
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
- "%s GetMethodId", __FUNCTION__);
- // get the method ID for the Android Java GetDeviceUniqueName name.
- jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
- "NumberOfDevices",
- "()I");
-
- jint numberOfDevices = 0;
- if (cid != NULL)
- {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
- "%s Calling Number of devices", __FUNCTION__);
- numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid);
- }
- VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
-
- if (numberOfDevices > 0)
- return numberOfDevices;
- return 0;
-}
-
-WebRtc_Word32 DeviceInfoAndroid::GetDeviceName(
- WebRtc_UWord32 deviceNumber,
- char* deviceNameUTF8,
- WebRtc_UWord32 deviceNameLength,
- char* deviceUniqueIdUTF8,
- WebRtc_UWord32 deviceUniqueIdUTF8Length,
- char* /*productUniqueIdUTF8*/,
- WebRtc_UWord32 /*productUniqueIdUTF8Length*/)
-{
-
- JNIEnv *env;
- jclass javaCmDevInfoClass;
- jobject javaCmDevInfoObject;
- WebRtc_Word32 result = 0;
- bool attached = false;
- if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
- env,
- javaCmDevInfoClass,
- javaCmDevInfoObject,
- attached)!= 0)
- {
- return -1;
- }
-
- // get the method ID for the Android Java GetDeviceUniqueName name.
- jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName",
- "(I)Ljava/lang/String;");
- if (cid != NULL)
- {
-
- jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject,
- cid, deviceNumber);
- if (javaDeviceNameObj == NULL)
- {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Failed to get device name for device %d.",
- __FUNCTION__, (int) deviceNumber);
- result = -1;
- }
- else
- {
- jboolean isCopy;
- const char* javaDeviceNameChar = env->GetStringUTFChars(
- (jstring) javaDeviceNameObj
- ,&isCopy);
- const jsize javaDeviceNameCharLength = env->GetStringUTFLength(
- (jstring) javaDeviceNameObj);
- if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceUniqueIdUTF8Length)
- {
- memcpy(deviceUniqueIdUTF8,
- javaDeviceNameChar,
- javaDeviceNameCharLength + 1);
- }
- else
- {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
- _id, "%s: deviceUniqueIdUTF8 to short.",
- __FUNCTION__);
- result = -1;
- }
- if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceNameLength)
- {
- memcpy(deviceNameUTF8,
- javaDeviceNameChar,
- javaDeviceNameCharLength + 1);
- }
- env->ReleaseStringUTFChars((jstring) javaDeviceNameObj,
- javaDeviceNameChar);
- }//javaDeviceNameObj==NULL
-
- }
- else
- {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: Failed to find GetDeviceUniqueName function id",
- __FUNCTION__);
- result = -1;
- }
-
- VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
-
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
- "%s: result %d", __FUNCTION__, (int) result);
- return result;
-
-}
-
-WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap(
- const char* deviceUniqueIdUTF8)
-{
-
- MapItem* item = NULL;
- while ((item = _captureCapabilities.Last()))
- {
- delete (VideoCaptureCapability*) item->GetItem();
- _captureCapabilities.Erase(item);
- }
-
- JNIEnv *env;
- jclass javaCmDevInfoClass;
- jobject javaCmDevInfoObject;
- bool attached = false;
- if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
- env,
- javaCmDevInfoClass,
- javaCmDevInfoObject,
- attached) != 0)
- {
- return -1;
- }
-
- // Find the capability class
- jclass javaCapClassLocal = env->FindClass(AndroidJavaCaptureCapabilityClass);
- if (javaCapClassLocal == NULL)
- {
- VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Can't find java class VideoCaptureCapabilityAndroid.",
- __FUNCTION__);
- return -1;
- }
-
- // get the method ID for the Android Java GetCapabilityArray .
- char signature[256];
- sprintf(signature,
- "(Ljava/lang/String;)[L%s;",
- AndroidJavaCaptureCapabilityClass);
- jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
- "GetCapabilityArray",
- signature);
- if (cid == NULL)
- {
- VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Can't find method GetCapabilityArray.", __FUNCTION__);
- return -1;
- }
- // Create a jstring so we can pass the deviceUniquName to the java method.
- jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
-
- if (capureIdString == NULL)
- {
- VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Can't create string for method GetCapabilityArray.",
- __FUNCTION__);
- return -1;
- }
- // Call the java class and get an array with capabilities back.
- jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject,
- cid, capureIdString);
- if (!javaCapabilitiesObj)
- {
- VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Failed to call java GetCapabilityArray.",
- __FUNCTION__);
- return -1;
- }
-
- jfieldID widthField = env->GetFieldID(javaCapClassLocal, "width", "I");
- jfieldID heigtField = env->GetFieldID(javaCapClassLocal, "height", "I");
- jfieldID maxFpsField = env->GetFieldID(javaCapClassLocal, "maxFPS", "I");
- if (widthField == NULL || heigtField == NULL || maxFpsField == NULL)
- {
- VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Failed to get field Id.", __FUNCTION__);
- return -1;
- }
-
- const jsize numberOfCapabilities =
- env->GetArrayLength((jarray) javaCapabilitiesObj);
-
- for (jsize i = 0; i < numberOfCapabilities; ++i)
- {
- VideoCaptureCapability *cap = new VideoCaptureCapability();
- jobject capabilityElement = env->GetObjectArrayElement(
- (jobjectArray) javaCapabilitiesObj,
- i);
-
- cap->width = env->GetIntField(capabilityElement, widthField);
- cap->height = env->GetIntField(capabilityElement, heigtField);
- cap->expectedCaptureDelay = _expectedCaptureDelay;
- cap->rawType = kVideoNV21;
- cap->maxFPS = env->GetIntField(capabilityElement, maxFpsField);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Cap width %d, height %d, fps %d", __FUNCTION__,
- cap->width, cap->height, cap->maxFPS);
- _captureCapabilities.Insert(i, cap);
- }
-
- _lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8);
- _lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName,
- _lastUsedDeviceNameLength + 1);
- memcpy(_lastUsedDeviceName,
- deviceUniqueIdUTF8,
- _lastUsedDeviceNameLength + 1);
-
- VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
- "CreateCapabilityMap %d", _captureCapabilities.Size());
-
- return _captureCapabilities.Size();
-}
-
-WebRtc_Word32 DeviceInfoAndroid::GetOrientation(
- const char* deviceUniqueIdUTF8,
- VideoCaptureRotation& orientation)
-{
-
- JNIEnv *env;
- jclass javaCmDevInfoClass;
- jobject javaCmDevInfoObject;
- bool attached = false;
- if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
- env,
- javaCmDevInfoClass,
- javaCmDevInfoObject,
- attached) != 0)
- {
- return -1;
- }
-
- // get the method ID for the Android Java GetOrientation .
- jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation",
- "(Ljava/lang/String;)I");
- if (cid == NULL)
- {
- VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Can't find method GetOrientation.", __FUNCTION__);
- return -1;
- }
- // Create a jstring so we can pass the deviceUniquName to the java method.
- jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
- if (capureIdString == NULL)
- {
- VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Can't create string for method GetCapabilityArray.",
- __FUNCTION__);
- return -1;
- }
- // Call the java class and get the orientation.
- jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid,
- capureIdString);
- VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
-
- WebRtc_Word32 retValue = 0;
- switch (jorientation)
- {
- case -1: //Error
- orientation = kCameraRotate0;
- retValue = -1;
- break;
- case 0:
- orientation = kCameraRotate0;
- break;
- case 90:
- orientation = kCameraRotate90;
- break;
- case 180:
- orientation = kCameraRotate180;
- break;
- case 270:
- orientation = kCameraRotate270;
- break;
- case 360:
- orientation = kCameraRotate0;
- break;
- }
- return retValue;
-}
-} // namespace videocapturemodule
-} // namespace webrtc
diff --git a/modules/video_capture/main/source/android/device_info_android.cc b/modules/video_capture/main/source/android/device_info_android.cc
new file mode 100644
index 0000000..9d11b85
--- /dev/null
+++ b/modules/video_capture/main/source/android/device_info_android.cc
@@ -0,0 +1,348 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "device_info_android.h"
+
+#include <stdio.h>
+
+#include "ref_count.h"
+#include "trace.h"
+#include "video_capture_android.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo (
+ const WebRtc_Word32 id)
+{
+ videocapturemodule::DeviceInfoAndroid *deviceInfo =
+ new videocapturemodule::DeviceInfoAndroid(id);
+ if (deviceInfo && deviceInfo->Init() != 0) // Failed to init
+ {
+ delete deviceInfo;
+ deviceInfo = NULL;
+ }
+ return deviceInfo;
+}
+
+DeviceInfoAndroid::DeviceInfoAndroid(const WebRtc_Word32 id) :
+ DeviceInfoImpl(id)
+{
+}
+
+WebRtc_Word32 DeviceInfoAndroid::Init()
+{
+ return 0;
+}
+
+DeviceInfoAndroid::~DeviceInfoAndroid()
+{
+}
+
+WebRtc_UWord32 DeviceInfoAndroid::NumberOfDevices()
+{
+
+ JNIEnv *env;
+ jclass javaCmDevInfoClass;
+ jobject javaCmDevInfoObject;
+ bool attached = false;
+ if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+ env,
+ javaCmDevInfoClass,
+ javaCmDevInfoObject,
+ attached) != 0)
+ {
+ return 0;
+ }
+
+ WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+ "%s GetMethodId", __FUNCTION__);
+ // get the method ID for the Android Java GetDeviceUniqueName name.
+ jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
+ "NumberOfDevices",
+ "()I");
+
+ jint numberOfDevices = 0;
+ if (cid != NULL)
+ {
+ WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+ "%s Calling Number of devices", __FUNCTION__);
+ numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid);
+ }
+ VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+
+ if (numberOfDevices > 0)
+ return numberOfDevices;
+ return 0;
+}
+
+WebRtc_Word32 DeviceInfoAndroid::GetDeviceName(
+ WebRtc_UWord32 deviceNumber,
+ char* deviceNameUTF8,
+ WebRtc_UWord32 deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ WebRtc_UWord32 deviceUniqueIdUTF8Length,
+ char* /*productUniqueIdUTF8*/,
+ WebRtc_UWord32 /*productUniqueIdUTF8Length*/) {
+
+ JNIEnv *env;
+ jclass javaCmDevInfoClass;
+ jobject javaCmDevInfoObject;
+ WebRtc_Word32 result = 0;
+ bool attached = false;
+ if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+ env,
+ javaCmDevInfoClass,
+ javaCmDevInfoObject,
+ attached)!= 0)
+ {
+ return -1;
+ }
+
+ // get the method ID for the Android Java GetDeviceUniqueName name.
+ jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName",
+ "(I)Ljava/lang/String;");
+ if (cid != NULL)
+ {
+
+ jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject,
+ cid, deviceNumber);
+ if (javaDeviceNameObj == NULL)
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "%s: Failed to get device name for device %d.",
+ __FUNCTION__, (int) deviceNumber);
+ result = -1;
+ }
+ else
+ {
+ jboolean isCopy;
+ const char* javaDeviceNameChar = env->GetStringUTFChars(
+ (jstring) javaDeviceNameObj
+ ,&isCopy);
+ const jsize javaDeviceNameCharLength =
+ env->GetStringUTFLength((jstring) javaDeviceNameObj);
+ if ((WebRtc_UWord32) javaDeviceNameCharLength <
+ deviceUniqueIdUTF8Length) {
+ memcpy(deviceUniqueIdUTF8,
+ javaDeviceNameChar,
+ javaDeviceNameCharLength + 1);
+ }
+ else
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+ _id, "%s: deviceUniqueIdUTF8 to short.",
+ __FUNCTION__);
+ result = -1;
+ }
+ if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceNameLength)
+ {
+ memcpy(deviceNameUTF8,
+ javaDeviceNameChar,
+ javaDeviceNameCharLength + 1);
+ }
+ env->ReleaseStringUTFChars((jstring) javaDeviceNameObj,
+ javaDeviceNameChar);
+ }//javaDeviceNameObj==NULL
+
+ }
+ else
+ {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+ "%s: Failed to find GetDeviceUniqueName function id",
+ __FUNCTION__);
+ result = -1;
+ }
+
+ VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+
+ WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+ "%s: result %d", __FUNCTION__, (int) result);
+ return result;
+
+}
+
+WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap(
+ const char* deviceUniqueIdUTF8)
+{
+ MapItem* item = NULL;
+ while ((item = _captureCapabilities.Last())) {
+ delete (VideoCaptureCapability*) item->GetItem();
+ _captureCapabilities.Erase(item);
+ }
+
+ JNIEnv *env;
+ jclass javaCmDevInfoClass;
+ jobject javaCmDevInfoObject;
+ bool attached = false;
+ if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+ env,
+ javaCmDevInfoClass,
+ javaCmDevInfoObject,
+ attached) != 0) {
+ return -1;
+ }
+
+ // Find the capability class
+ jclass javaCapClassLocal = env->FindClass(AndroidJavaCaptureCapabilityClass);
+ if (javaCapClassLocal == NULL) {
+ VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "%s: Can't find java class VideoCaptureCapabilityAndroid.",
+ __FUNCTION__);
+ return -1;
+ }
+
+ // get the method ID for the Android Java GetCapabilityArray .
+ char signature[256];
+ sprintf(signature,
+ "(Ljava/lang/String;)[L%s;",
+ AndroidJavaCaptureCapabilityClass);
+ jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
+ "GetCapabilityArray",
+ signature);
+ if (cid == NULL) {
+ VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "%s: Can't find method GetCapabilityArray.", __FUNCTION__);
+ return -1;
+ }
+ // Create a jstring so we can pass the deviceUniquName to the java method.
+ jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
+
+ if (capureIdString == NULL) {
+ VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "%s: Can't create string for method GetCapabilityArray.",
+ __FUNCTION__);
+ return -1;
+ }
+ // Call the java class and get an array with capabilities back.
+ jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject,
+ cid, capureIdString);
+ if (!javaCapabilitiesObj) {
+ VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "%s: Failed to call java GetCapabilityArray.",
+ __FUNCTION__);
+ return -1;
+ }
+
+ jfieldID widthField = env->GetFieldID(javaCapClassLocal, "width", "I");
+ jfieldID heigtField = env->GetFieldID(javaCapClassLocal, "height", "I");
+ jfieldID maxFpsField = env->GetFieldID(javaCapClassLocal, "maxFPS", "I");
+ if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) {
+ VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "%s: Failed to get field Id.", __FUNCTION__);
+ return -1;
+ }
+
+ const jsize numberOfCapabilities =
+ env->GetArrayLength((jarray) javaCapabilitiesObj);
+
+ for (jsize i = 0; i < numberOfCapabilities; ++i) {
+ VideoCaptureCapability *cap = new VideoCaptureCapability();
+ jobject capabilityElement = env->GetObjectArrayElement(
+ (jobjectArray) javaCapabilitiesObj,
+ i);
+
+ cap->width = env->GetIntField(capabilityElement, widthField);
+ cap->height = env->GetIntField(capabilityElement, heigtField);
+ cap->expectedCaptureDelay = _expectedCaptureDelay;
+ cap->rawType = kVideoNV21;
+ cap->maxFPS = env->GetIntField(capabilityElement, maxFpsField);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "%s: Cap width %d, height %d, fps %d", __FUNCTION__,
+ cap->width, cap->height, cap->maxFPS);
+ _captureCapabilities.Insert(i, cap);
+ }
+
+ _lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8);
+ _lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName,
+ _lastUsedDeviceNameLength + 1);
+ memcpy(_lastUsedDeviceName,
+ deviceUniqueIdUTF8,
+ _lastUsedDeviceNameLength + 1);
+
+ VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+ WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+ "CreateCapabilityMap %d", _captureCapabilities.Size());
+
+ return _captureCapabilities.Size();
+}
+
+WebRtc_Word32 DeviceInfoAndroid::GetOrientation(
+ const char* deviceUniqueIdUTF8,
+ VideoCaptureRotation& orientation)
+{
+ JNIEnv *env;
+ jclass javaCmDevInfoClass;
+ jobject javaCmDevInfoObject;
+ bool attached = false;
+ if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+ env,
+ javaCmDevInfoClass,
+ javaCmDevInfoObject,
+ attached) != 0) {
+ return -1;
+ }
+
+ // get the method ID for the Android Java GetOrientation .
+ jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation",
+ "(Ljava/lang/String;)I");
+ if (cid == NULL) {
+ VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "%s: Can't find method GetOrientation.", __FUNCTION__);
+ return -1;
+ }
+ // Create a jstring so we can pass the deviceUniquName to the java method.
+ jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
+ if (capureIdString == NULL) {
+ VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+ "%s: Can't create string for method GetCapabilityArray.",
+ __FUNCTION__);
+ return -1;
+ }
+ // Call the java class and get the orientation.
+ jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid,
+ capureIdString);
+ VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+
+ WebRtc_Word32 retValue = 0;
+ switch (jorientation) {
+ case -1: //Error
+ orientation = kCameraRotate0;
+ retValue = -1;
+ break;
+ case 0:
+ orientation = kCameraRotate0;
+ break;
+ case 90:
+ orientation = kCameraRotate90;
+ break;
+ case 180:
+ orientation = kCameraRotate180;
+ break;
+ case 270:
+ orientation = kCameraRotate270;
+ break;
+ case 360:
+ orientation = kCameraRotate0;
+ break;
+ }
+ return retValue;
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/modules/video_capture/main/source/Android/device_info_android.h b/modules/video_capture/main/source/android/device_info_android.h
similarity index 91%
rename from modules/video_capture/main/source/Android/device_info_android.h
rename to modules/video_capture/main/source/android/device_info_android.h
index 5125d96..8e02b75 100644
--- a/modules/video_capture/main/source/Android/device_info_android.h
+++ b/modules/video_capture/main/source/android/device_info_android.h
@@ -23,9 +23,11 @@
namespace videocapturemodule
{
-// Android logging, uncomment to print trace to logcat instead of trace file/callback
-//#include <android/log.h>
-//#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
+// Android logging, uncomment to print trace to
+// logcat instead of trace file/callback
+// #include <android/log.h>
+// #define WEBRTC_TRACE(a,b,c,...)
+// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
class DeviceInfoAndroid: public DeviceInfoImpl
{
diff --git a/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java b/modules/video_capture/main/source/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java
similarity index 88%
rename from modules/video_capture/main/source/Android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java
rename to modules/video_capture/main/source/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java
index 0443953..0cfe457 100644
--- a/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java
+++ b/modules/video_capture/main/source/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
diff --git a/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureAndroid.java b/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
similarity index 98%
rename from modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
rename to modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
index a4c39a8..1029162 100644
--- a/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
+++ b/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
diff --git a/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java b/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
similarity index 99%
rename from modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
rename to modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
index 4ccf060..e8f63f0 100644
--- a/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
+++ b/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
diff --git a/modules/video_capture/main/source/Android/video_capture_android.cc b/modules/video_capture/main/source/android/video_capture_android.cc
similarity index 88%
rename from modules/video_capture/main/source/Android/video_capture_android.cc
rename to modules/video_capture/main/source/android/video_capture_android.cc
index 7694716..f73837d 100644
--- a/modules/video_capture/main/source/Android/video_capture_android.cc
+++ b/modules/video_capture/main/source/android/video_capture_android.cc
@@ -35,15 +35,20 @@
return implementation;
}
-// Android logging, uncomment to print trace to logcat instead of trace file/callback
-//#include <android/log.h>
-//#undef WEBRTC_TRACE
-//#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
+// Android logging, uncomment to print trace to
+// logcat instead of trace file/callback
+// #include <android/log.h>
+// #undef WEBRTC_TRACE
+// #define WEBRTC_TRACE(a,b,c,...)
+// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
JavaVM* VideoCaptureAndroid::g_jvm = NULL;
-jclass VideoCaptureAndroid::g_javaCmClass = NULL; //VideoCaptureAndroid.java
-jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL; //VideoCaptureDeviceInfoAndroid.java
-jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL; //static instance of VideoCaptureDeviceInfoAndroid.java
+//VideoCaptureAndroid.java
+jclass VideoCaptureAndroid::g_javaCmClass = NULL;
+//VideoCaptureDeviceInfoAndroid.java
+jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL;
+//static instance of VideoCaptureDeviceInfoAndroid.java
+jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL;
jobject VideoCaptureAndroid::g_javaContext = NULL;
/*
@@ -73,7 +78,8 @@
"%s: could not find java class", __FUNCTION__);
return -1;
}
- // create a global reference to the class (to tell JNI that we are referencing it
+ // create a global reference to the class
+ // (to tell JNI that we are referencing it
// after this function has returned)
g_javaCmClass = static_cast<jclass>
(env->NewGlobalRef(javaCmClassLocal));
@@ -112,7 +118,8 @@
return -1;
}
- // create a global reference to the class (to tell JNI that we are referencing it
+ // create a global reference to the class
+ // (to tell JNI that we are referencing it
// after this function has returned)
g_javaCmDevInfoClass = static_cast<jclass>
(env->NewGlobalRef(javaCmDevInfoClassLocal));
@@ -132,14 +139,16 @@
// get the method ID for the Android Java CaptureClass static
//CreateVideoCaptureAndroid factory method.
- jmethodID cid = env->GetStaticMethodID(g_javaCmDevInfoClass,
- "CreateVideoCaptureDeviceInfoAndroid",
- "(ILandroid/content/Context;)"
- "Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;");
+ jmethodID cid = env->GetStaticMethodID(
+ g_javaCmDevInfoClass,
+ "CreateVideoCaptureDeviceInfoAndroid",
+ "(ILandroid/content/Context;)"
+ "Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;");
if (cid == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: could not get java VideoCaptureDeviceInfoAndroid constructor ID",
+ "%s: could not get java"
+ "VideoCaptureDeviceInfoAndroid constructor ID",
__FUNCTION__);
return -1;
}
@@ -148,10 +157,10 @@
"%s: construct static java device object", __FUNCTION__);
// construct the object by calling the static constructor object
- jobject javaCameraDeviceInfoObjLocal = env->CallStaticObjectMethod(
- g_javaCmDevInfoClass,
- cid, (int) -1,
- g_javaContext);
+ jobject javaCameraDeviceInfoObjLocal =
+ env->CallStaticObjectMethod(g_javaCmDevInfoClass,
+ cid, (int) -1,
+ g_javaContext);
if (!javaCameraDeviceInfoObjLocal)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
@@ -159,13 +168,16 @@
__FUNCTION__);
return -1;
}
- // create a reference to the object (to tell JNI that we are referencing it
- // after this function has returned)
+ // create a reference to the object (to tell JNI that
+ // we are referencing it after this function has returned)
g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal);
if (!g_javaCmDevInfoObject)
{
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, -1,
- "%s: could not create Java cameradevinceinfo object reference",
+ WEBRTC_TRACE(webrtc::kTraceError,
+ webrtc::kTraceAudioDevice,
+ -1,
+ "%s: could not create Java"
+ "cameradevinceinfo object reference",
__FUNCTION__);
return -1;
}
@@ -216,10 +228,10 @@
}
WebRtc_Word32 VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
- JNIEnv*& env,
- jclass& javaCmDevInfoClass,
- jobject& javaCmDevInfoObject,
- bool& attached)
+ JNIEnv*& env,
+ jclass& javaCmDevInfoClass,
+ jobject& javaCmDevInfoObject,
+ bool& attached)
{
// get the JNI env for this thread
if (!g_jvm)
@@ -250,19 +262,19 @@
}
-WebRtc_Word32 VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(bool attached)
-{
- if (attached && g_jvm->DetachCurrentThread() < 0)
- {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- return -1;
- }
- return 0;
+WebRtc_Word32 VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(
+ bool attached) {
+ if (attached && g_jvm->DetachCurrentThread() < 0) {
+ WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
+ "%s: Could not detach thread from JVM", __FUNCTION__);
+ return -1;
+ }
+ return 0;
}
/*
- * JNI callback from Java class. Called when the camera has a new frame to deliver
+ * JNI callback from Java class. Called
+ * when the camera has a new frame to deliver
* Class: org_webrtc_capturemodule_VideoCaptureAndroid
* Method: ProvideCameraFrame
* Signature: ([BIJ)V
@@ -273,7 +285,8 @@
jint length,
jlong context)
{
- VideoCaptureAndroid* captureModule=reinterpret_cast<VideoCaptureAndroid*>(context);
+ VideoCaptureAndroid* captureModule =
+ reinterpret_cast<VideoCaptureAndroid*>(context);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture,
-1, "%s: IncomingFrame %d", __FUNCTION__,length);
jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL);
@@ -313,8 +326,11 @@
if (_capInfo.Init() != 0)
{
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Failed to initialize CaptureDeviceInfo", __FUNCTION__);
+ WEBRTC_TRACE(webrtc::kTraceError,
+ webrtc::kTraceVideoCapture,
+ _id,
+ "%s: Failed to initialize CaptureDeviceInfo",
+ __FUNCTION__);
return -1;
}
@@ -350,7 +366,8 @@
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"get method id");
- // get the method ID for the Android Java CaptureDeviceInfoClass AllocateCamera factory method.
+ // get the method ID for the Android Java
+ // CaptureDeviceInfoClass AllocateCamera factory method.
char signature[256];
sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass);
@@ -438,9 +455,10 @@
// get the method ID for the Android Java CaptureClass static
// DeleteVideoCaptureAndroid method. Call this to release the camera so
// another application can use it.
- jmethodID cid = env->GetStaticMethodID(g_javaCmClass,
- "DeleteVideoCaptureAndroid",
- "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
+ jmethodID cid = env->GetStaticMethodID(
+ g_javaCmClass,
+ "DeleteVideoCaptureAndroid",
+ "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
if (cid != NULL)
{
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
@@ -473,7 +491,7 @@
}
WebRtc_Word32 VideoCaptureAndroid::StartCapture(
- const VideoCaptureCapability& capability)
+ const VideoCaptureCapability& capability)
{
CriticalSectionScoped cs(&_apiCs);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
@@ -516,7 +534,8 @@
"%s: _frameInfo w%d h%d", __FUNCTION__, _frameInfo.width,
_frameInfo.height);
- // get the method ID for the Android Java CaptureClass static StartCapture method.
+ // get the method ID for the Android Java
+ // CaptureClass static StartCapture method.
jmethodID cid = env->GetMethodID(g_javaCmClass, "StartCapture", "(III)I");
if (cid != NULL)
{
diff --git a/modules/video_capture/main/source/Android/video_capture_android.h b/modules/video_capture/main/source/android/video_capture_android.h
similarity index 92%
rename from modules/video_capture/main/source/Android/video_capture_android.h
rename to modules/video_capture/main/source/android/video_capture_android.h
index 1ea70cf..fc72323 100644
--- a/modules/video_capture/main/source/Android/video_capture_android.h
+++ b/modules/video_capture/main/source/android/video_capture_android.h
@@ -36,7 +36,8 @@
const char* deviceUniqueIdUTF8);
- virtual WebRtc_Word32 StartCapture(const VideoCaptureCapability& capability);
+ virtual WebRtc_Word32 StartCapture(
+ const VideoCaptureCapability& capability);
virtual WebRtc_Word32 StopCapture();
virtual bool CaptureStarted();
virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
@@ -56,7 +57,8 @@
static JavaVM* g_jvm;
static jclass g_javaCmClass;
static jclass g_javaCmDevInfoClass;
- static jobject g_javaCmDevInfoObject; //Static java object implementing the needed device info functions;
+ //Static java object implementing the needed device info functions;
+ static jobject g_javaCmDevInfoObject;
static jobject g_javaContext; // Java Application context
};
} // namespace videocapturemodule
diff --git a/modules/video_capture/main/source/video_capture.gypi b/modules/video_capture/main/source/video_capture.gypi
index de696ae..99a4937 100644
--- a/modules/video_capture/main/source/video_capture.gypi
+++ b/modules/video_capture/main/source/video_capture.gypi
@@ -186,13 +186,13 @@
}], # win
['OS=="android"', {
'include_dirs': [
- 'Android',
+ 'android',
],
'sources': [
- 'Android/device_info_android.cc',
- 'Android/device_info_android.h',
- 'Android/video_capture_android.cc',
- 'Android/video_capture_android.h',
+ 'android/device_info_android.cc',
+ 'android/device_info_android.h',
+ 'android/video_capture_android.cc',
+ 'android/video_capture_android.h',
],
}], # android
], # conditions
diff --git a/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc b/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
index 19c40ad..472c3b6 100644
--- a/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
+++ b/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
@@ -148,11 +148,10 @@
}
// Run with 5% packet loss. Quality should be a bit lower.
-// TODO(mflodman): Reenable this once it's not flaky.
TEST_F(VideoProcessorIntegrationTest, Process5PercentPacketLoss) {
config_.networking_config.packet_loss_probability = 0.05;
double minimum_avg_psnr = 21;
- double minimum_min_psnr = 17;
+ double minimum_min_psnr = 16;
double minimum_avg_ssim = 0.6;
double minimum_min_ssim = 0.4;
ProcessFramesAndVerify(minimum_avg_psnr, minimum_min_psnr,
@@ -164,8 +163,8 @@
config_.networking_config.packet_loss_probability = 0.10;
double minimum_avg_psnr = 19;
double minimum_min_psnr = 16;
- double minimum_avg_ssim = 0.6;
- double minimum_min_ssim = 0.4;
+ double minimum_avg_ssim = 0.5;
+ double minimum_min_ssim = 0.35;
ProcessFramesAndVerify(minimum_avg_psnr, minimum_min_psnr,
minimum_avg_ssim, minimum_min_ssim);
}
diff --git a/modules/video_coding/codecs/vp8/main/interface/vp8.h b/modules/video_coding/codecs/vp8/main/interface/vp8.h
index 3ee312b..ff13386 100644
--- a/modules/video_coding/codecs/vp8/main/interface/vp8.h
+++ b/modules/video_coding/codecs/vp8/main/interface/vp8.h
@@ -107,7 +107,7 @@
VP8Encoder();
// Call encoder initialize function and set control settings.
- int InitAndSetControlSettings();
+ int InitAndSetControlSettings(const VideoCodec* inst);
// Update frame size for codec.
int UpdateCodecFrameSize(WebRtc_UWord32 input_image_width,
diff --git a/modules/video_coding/codecs/vp8/main/source/vp8.cc b/modules/video_coding/codecs/vp8/main/source/vp8.cc
index c274896..9b0baea 100644
--- a/modules/video_coding/codecs/vp8/main/source/vp8.cc
+++ b/modules/video_coding/codecs/vp8/main/source/vp8.cc
@@ -51,6 +51,7 @@
encoder_(NULL),
config_(NULL),
raw_(NULL) {
+ memset(&codec_, 0, sizeof(codec_));
uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
srand(seed);
}
@@ -268,10 +269,10 @@
break;
}
rps_->Init();
- return InitAndSetControlSettings();
+ return InitAndSetControlSettings(inst);
}
-int VP8Encoder::InitAndSetControlSettings() {
+int VP8Encoder::InitAndSetControlSettings(const VideoCodec* inst) {
vpx_codec_flags_t flags = 0;
// TODO(holmer): We should make a smarter decision on the number of
// partitions. Eight is probably not the optimal number for low resolution
@@ -287,7 +288,8 @@
vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_);
vpx_codec_control(encoder_, VP8E_SET_TOKEN_PARTITIONS,
static_cast<vp8e_token_partitions>(token_partitions_));
- vpx_codec_control(encoder_, VP8E_SET_NOISE_SENSITIVITY, 2);
+ vpx_codec_control(encoder_, VP8E_SET_NOISE_SENSITIVITY,
+ inst->codecSpecific.VP8.denoisingOn ? 1 : 0);
#if WEBRTC_LIBVPX_VERSION >= 971
vpx_codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT,
rc_max_intra_target_);
@@ -570,6 +572,7 @@
propagation_cnt_(-1),
latest_keyframe_complete_(false),
mfqe_enabled_(false) {
+ memset(&codec_, 0, sizeof(codec_));
}
VP8Decoder::~VP8Decoder() {
@@ -589,6 +592,9 @@
}
int VP8Decoder::InitDecode(const VideoCodec* inst, int number_of_cores) {
+ if (inst == NULL) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
int ret_val = Release();
if (ret_val < 0 ) {
return ret_val;
@@ -596,7 +602,7 @@
if (decoder_ == NULL) {
decoder_ = new vpx_dec_ctx_t;
}
- if (inst && inst->codecType == kVideoCodecVP8) {
+ if (inst->codecType == kVideoCodecVP8) {
feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn;
}
vpx_codec_dec_cfg_t cfg;
diff --git a/modules/video_coding/main/interface/video_coding_defines.h b/modules/video_coding/main/interface/video_coding_defines.h
index 3755f6d..bd2a98c 100644
--- a/modules/video_coding/main/interface/video_coding_defines.h
+++ b/modules/video_coding/main/interface/video_coding_defines.h
@@ -14,8 +14,7 @@
#include "typedefs.h"
#include "modules/interface/module_common_types.h"
-namespace webrtc
-{
+namespace webrtc {
// Error codes
#define VCM_FRAME_NOT_READY 3
@@ -41,155 +40,148 @@
#define VCM_VP8_PAYLOAD_TYPE 120
#define VCM_I420_PAYLOAD_TYPE 124
-enum VCMNackProperties
-{
- kNackHistoryLength = 450
+enum VCMNackProperties {
+ kNackHistoryLength = 450
};
-enum VCMVideoProtection
-{
- kProtectionNack, // Both send-side and receive-side
- kProtectionNackSender, // Send-side only
- kProtectionNackReceiver, // Receive-side only
- kProtectionDualDecoder,
- kProtectionFEC,
- kProtectionNackFEC,
- kProtectionKeyOnLoss,
- kProtectionKeyOnKeyLoss,
- kProtectionPeriodicKeyFrames
+enum VCMVideoProtection {
+ kProtectionNack, // Both send-side and receive-side
+ kProtectionNackSender, // Send-side only
+ kProtectionNackReceiver, // Receive-side only
+ kProtectionDualDecoder,
+ kProtectionFEC,
+ kProtectionNackFEC,
+ kProtectionKeyOnLoss,
+ kProtectionKeyOnKeyLoss,
+ kProtectionPeriodicKeyFrames
};
-enum VCMTemporalDecimation
-{
- kBitrateOverUseDecimation,
+enum VCMTemporalDecimation {
+ kBitrateOverUseDecimation,
};
-struct VCMFrameCount
-{
- WebRtc_UWord32 numKeyFrames;
- WebRtc_UWord32 numDeltaFrames;
+struct VCMFrameCount {
+ WebRtc_UWord32 numKeyFrames;
+ WebRtc_UWord32 numDeltaFrames;
};
-
// Callback class used for sending data ready to be packetized
-class VCMPacketizationCallback
-{
-public:
- virtual WebRtc_Word32 SendData(
- const FrameType frameType,
- const WebRtc_UWord8 payloadType,
- const WebRtc_UWord32 timeStamp,
- const WebRtc_UWord8* payloadData,
- const WebRtc_UWord32 payloadSize,
- const RTPFragmentationHeader& fragmentationHeader,
- const RTPVideoHeader* rtpVideoHdr) = 0;
-protected:
- virtual ~VCMPacketizationCallback() {}
+class VCMPacketizationCallback {
+ public:
+ virtual WebRtc_Word32 SendData(
+ const FrameType frameType, const WebRtc_UWord8 payloadType,
+ const WebRtc_UWord32 timeStamp, const WebRtc_UWord8* payloadData,
+ const WebRtc_UWord32 payloadSize,
+ const RTPFragmentationHeader& fragmentationHeader,
+ const RTPVideoHeader* rtpVideoHdr) = 0;
+ protected:
+ virtual ~VCMPacketizationCallback() {
+ }
};
// Callback class used for passing decoded frames which are ready to be rendered.
-class VCMFrameStorageCallback
-{
-public:
- virtual WebRtc_Word32 StoreReceivedFrame(const EncodedVideoData& frameToStore) = 0;
+class VCMFrameStorageCallback {
+ public:
+ virtual WebRtc_Word32 StoreReceivedFrame(
+ const EncodedVideoData& frameToStore) = 0;
-protected:
- virtual ~VCMFrameStorageCallback() {}
+ protected:
+ virtual ~VCMFrameStorageCallback() {
+ }
};
// Callback class used for passing decoded frames which are ready to be rendered.
-class VCMReceiveCallback
-{
-public:
- virtual WebRtc_Word32 FrameToRender(VideoFrame& videoFrame) = 0;
- virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId) {return -1;}
+class VCMReceiveCallback {
+ public:
+ virtual WebRtc_Word32 FrameToRender(VideoFrame& videoFrame) = 0;
+ virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(
+ const WebRtc_UWord64 pictureId) {
+ return -1;
+ }
-protected:
- virtual ~VCMReceiveCallback() {}
+ protected:
+ virtual ~VCMReceiveCallback() {
+ }
};
// Callback class used for informing the user of the bit rate and frame rate produced by the
// encoder.
-class VCMSendStatisticsCallback
-{
-public:
- virtual WebRtc_Word32 SendStatistics(const WebRtc_UWord32 bitRate,
- const WebRtc_UWord32 frameRate) = 0;
+class VCMSendStatisticsCallback {
+ public:
+ virtual WebRtc_Word32 SendStatistics(const WebRtc_UWord32 bitRate,
+ const WebRtc_UWord32 frameRate) = 0;
-protected:
- virtual ~VCMSendStatisticsCallback() {}
+ protected:
+ virtual ~VCMSendStatisticsCallback() {
+ }
};
// Callback class used for informing the user of the incoming bit rate and frame rate.
-class VCMReceiveStatisticsCallback
-{
-public:
- virtual WebRtc_Word32 ReceiveStatistics(const WebRtc_UWord32 bitRate,
- const WebRtc_UWord32 frameRate) = 0;
+class VCMReceiveStatisticsCallback {
+ public:
+ virtual WebRtc_Word32 ReceiveStatistics(const WebRtc_UWord32 bitRate,
+ const WebRtc_UWord32 frameRate) = 0;
-protected:
- virtual ~VCMReceiveStatisticsCallback() {}
+ protected:
+ virtual ~VCMReceiveStatisticsCallback() {
+ }
};
-// Callback class used for telling the user about the requested amount of
-// bit stream protection: FEC rate for key and delta frame;
-// whether the FEC uses unequal protection (UEP) across packets,
-// for key and delta frame;
-// and whether NACK should be on or off.
-class VCMProtectionCallback
-{
-public:
- virtual int ProtectionRequest(
- uint8_t delta_fec_rate,
- uint8_t key_fec_rate,
- bool delta_use_uep_protection,
- bool key_use_uep_protection,
- bool nack_enabled,
- uint32_t* sent_video_rate_bps,
- uint32_t* sent_nack_rate_bps,
- uint32_t* sent_fec_rate_bps) = 0;
+// Callback class used for telling the user about how to configure the FEC,
+// and the rates sent the last second is returned to the VCM.
+class VCMProtectionCallback {
+ public:
+ virtual int ProtectionRequest(const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params,
+ uint32_t* sent_video_rate_bps,
+ uint32_t* sent_nack_rate_bps,
+ uint32_t* sent_fec_rate_bps) = 0;
-protected:
- virtual ~VCMProtectionCallback() {}
+ protected:
+ virtual ~VCMProtectionCallback() {
+ }
};
// Callback class used for telling the user about what frame type needed to continue decoding.
// Typically a key frame when the stream has been corrupted in some way.
-class VCMFrameTypeCallback
-{
-public:
- virtual WebRtc_Word32 RequestKeyFrame() = 0;
- virtual WebRtc_Word32 SliceLossIndicationRequest(const WebRtc_UWord64 pictureId) {return -1;}
+class VCMFrameTypeCallback {
+ public:
+ virtual WebRtc_Word32 RequestKeyFrame() = 0;
+ virtual WebRtc_Word32 SliceLossIndicationRequest(
+ const WebRtc_UWord64 pictureId) {
+ return -1;
+ }
-protected:
- virtual ~VCMFrameTypeCallback() {}
+ protected:
+ virtual ~VCMFrameTypeCallback() {
+ }
};
// Callback class used for telling the user about which packet sequence numbers are currently
// missing and need to be resent.
-class VCMPacketRequestCallback
-{
-public:
- virtual WebRtc_Word32 ResendPackets(const WebRtc_UWord16* sequenceNumbers,
- WebRtc_UWord16 length) = 0;
+class VCMPacketRequestCallback {
+ public:
+ virtual WebRtc_Word32 ResendPackets(const WebRtc_UWord16* sequenceNumbers,
+ WebRtc_UWord16 length) = 0;
-protected:
- virtual ~VCMPacketRequestCallback() {}
+ protected:
+ virtual ~VCMPacketRequestCallback() {
+ }
};
// Callback used to inform the user of the the desired resolution
// as subscribed by Media Optimization (Quality Modes)
-class VCMQMSettingsCallback
-{
-public:
- virtual WebRtc_Word32 SetVideoQMSettings(const WebRtc_UWord32 frameRate,
- const WebRtc_UWord32 width,
- const WebRtc_UWord32 height) = 0;
+class VCMQMSettingsCallback {
+ public:
+ virtual WebRtc_Word32 SetVideoQMSettings(const WebRtc_UWord32 frameRate,
+ const WebRtc_UWord32 width,
+ const WebRtc_UWord32 height) = 0;
-protected:
- virtual ~VCMQMSettingsCallback() {}
+ protected:
+ virtual ~VCMQMSettingsCallback() {
+ }
};
-} // namespace webrtc
+} // namespace webrtc
#endif // WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
diff --git a/modules/video_coding/main/source/codec_database.cc b/modules/video_coding/main/source/codec_database.cc
index 6f81895..22c554d 100644
--- a/modules/video_coding/main/source/codec_database.cc
+++ b/modules/video_coding/main/source/codec_database.cc
@@ -173,6 +173,7 @@
settings->numberOfSimulcastStreams = 0;
settings->codecSpecific.VP8.resilience = kResilientStream;
settings->codecSpecific.VP8.numberOfTemporalLayers = 1;
+ settings->codecSpecific.VP8.denoisingOn = false;
break;
}
#endif
diff --git a/modules/video_coding/main/source/generic_encoder.cc b/modules/video_coding/main/source/generic_encoder.cc
index 58cdd99..87c6c96 100644
--- a/modules/video_coding/main/source/generic_encoder.cc
+++ b/modules/video_coding/main/source/generic_encoder.cc
@@ -158,6 +158,8 @@
_mediaOpt(NULL),
_encodedBytes(0),
_payloadType(0),
+_codecType(kVideoCodecUnknown),
+_internalSource(false),
_bitStreamAfterEncoder(NULL)
{
#ifdef DEBUG_ENCODER_BIT_STREAM
diff --git a/modules/video_coding/main/source/jitter_buffer.cc b/modules/video_coding/main/source/jitter_buffer.cc
index 4b146b6..b4eee60 100644
--- a/modules/video_coding/main/source/jitter_buffer.cc
+++ b/modules/video_coding/main/source/jitter_buffer.cc
@@ -1652,7 +1652,7 @@
bool
VCMJitterBuffer::IsPacketRetransmitted(const VCMPacket& packet) const
{
- if (_NACKSeqNum && _NACKSeqNumLength > 0)
+ if (_NACKSeqNumLength > 0)
{
for (WebRtc_UWord16 i = 0; i < _NACKSeqNumLength; i++)
{
diff --git a/modules/video_coding/main/source/media_optimization.cc b/modules/video_coding/main/source/media_optimization.cc
index 552aaab..a3dd89f 100644
--- a/modules/video_coding/main/source/media_optimization.cc
+++ b/modules/video_coding/main/source/media_optimization.cc
@@ -9,6 +9,7 @@
*/
#include "media_optimization.h"
+
#include "content_metrics_processing.h"
#include "frame_dropper.h"
#include "qm_select.h"
@@ -24,8 +25,6 @@
_sendCodecType(kVideoCodecUnknown),
_codecWidth(0),
_codecHeight(0),
-_initCodecWidth(0),
-_initCodecHeight(0),
_userFrameRate(0),
_packetLossEnc(0),
_fractionLost(0),
@@ -208,33 +207,29 @@
{
return VCM_OK;
}
+ FecProtectionParams delta_fec_params;
+ FecProtectionParams key_fec_params;
// Get the FEC code rate for Key frames (set to 0 when NA)
- const WebRtc_UWord8
- codeRateKeyRTP = selected_method->RequiredProtectionFactorK();
+ key_fec_params.fec_rate = selected_method->RequiredProtectionFactorK();
// Get the FEC code rate for Delta frames (set to 0 when NA)
- const WebRtc_UWord8
- codeRateDeltaRTP = selected_method->RequiredProtectionFactorD();
+ delta_fec_params.fec_rate =
+ selected_method->RequiredProtectionFactorD();
// Get the FEC-UEP protection status for Key frames: UEP on/off
- const bool
- useUepProtectionKeyRTP = selected_method->RequiredUepProtectionK();
+ key_fec_params.use_uep_protection =
+ selected_method->RequiredUepProtectionK();
// Get the FEC-UEP protection status for Delta frames: UEP on/off
- const bool
- useUepProtectionDeltaRTP = selected_method->RequiredUepProtectionD();
+ delta_fec_params.use_uep_protection =
+ selected_method->RequiredUepProtectionD();
- // NACK is on for NACK and NackFec protection method: off for FEC method
- bool nackStatus = (selected_method->Type() == kNackFec ||
- selected_method->Type() == kNack);
+ delta_fec_params.max_fec_frames = 1;
+ key_fec_params.max_fec_frames = 1;
// TODO(Marco): Pass FEC protection values per layer.
-
- return _videoProtectionCallback->ProtectionRequest(codeRateDeltaRTP,
- codeRateKeyRTP,
- useUepProtectionDeltaRTP,
- useUepProtectionKeyRTP,
- nackStatus,
+ return _videoProtectionCallback->ProtectionRequest(&delta_fec_params,
+ &key_fec_params,
video_rate_bps,
nack_overhead_rate_bps,
fec_overhead_rate_bps);
@@ -286,8 +281,6 @@
_userFrameRate = static_cast<float>(frameRate);
_codecWidth = width;
_codecHeight = height;
- _initCodecWidth = width;
- _initCodecHeight = height;
_numLayers = (numLayers <= 1) ? 1 : numLayers; // Can also be zero.
WebRtc_Word32 ret = VCM_OK;
ret = _qmResolution->Initialize((float)_targetBitRate, _userFrameRate,
@@ -577,41 +570,39 @@
bool VCMMediaOptimization::QMUpdate(VCMResolutionScale* qm) {
// Check for no change
- if (!qm->change_resolution) {
+ if (!qm->change_resolution_spatial && !qm->change_resolution_temporal) {
return false;
}
// Check for change in frame rate.
- if (qm->temporal_fact != 1.0f) {
- _incomingFrameRate = _incomingFrameRate / qm->temporal_fact + 0.5f;
+ if (qm->change_resolution_temporal) {
+ _incomingFrameRate = qm->frame_rate;
+ // Reset frame rate estimate.
memset(_incomingFrameTimes, -1, sizeof(_incomingFrameTimes));
}
// Check for change in frame size.
- if (qm->spatial_height_fact != 1.0 || qm->spatial_width_fact != 1.0) {
- _codecWidth = static_cast<uint16_t>(_codecWidth /
- qm->spatial_width_fact);
- _codecHeight = static_cast<uint16_t>(_codecHeight /
- qm->spatial_height_fact);
- // New frame sizes should not exceed original size from SetEncodingData().
- assert(_codecWidth <= _initCodecWidth);
- assert(_codecHeight <= _initCodecHeight);
- // Check that new frame sizes are multiples of two.
- assert(_codecWidth % 2 == 0);
- assert(_codecHeight % 2 == 0);
+ if (qm->change_resolution_spatial) {
+ _codecWidth = qm->codec_width;
+ _codecHeight = qm->codec_height;
}
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, _id,
- "Quality Mode Update: W = %d, H = %d, FR = %f",
- _codecWidth, _codecHeight, _incomingFrameRate);
+ "Resolution change from QM select: W = %d, H = %d, FR = %f",
+ qm->codec_width, qm->codec_height, qm->frame_rate);
- // Update VPM with new target frame rate and size
- _videoQMSettingsCallback->SetVideoQMSettings(_incomingFrameRate,
+ // Update VPM with new target frame rate and frame size.
+ // Note: use |qm->frame_rate| instead of |_incomingFrameRate| for updating
+ // target frame rate in VPM frame dropper. The quantity |_incomingFrameRate|
+ // will vary/fluctuate, and since we don't want to change the state of the
+ // VPM frame dropper, unless a temporal action was selected, we use the
+ // quantity |qm->frame_rate| for updating.
+ _videoQMSettingsCallback->SetVideoQMSettings(qm->frame_rate,
_codecWidth,
_codecHeight);
-
- _content->UpdateFrameRate(_incomingFrameRate);
- _qmResolution->UpdateCodecFrameSize(_codecWidth, _codecHeight);
+ _content->UpdateFrameRate(qm->frame_rate);
+ _qmResolution->UpdateCodecParameters(qm->frame_rate, _codecWidth,
+ _codecHeight);
return true;
}
diff --git a/modules/video_coding/main/source/media_optimization.h b/modules/video_coding/main/source/media_optimization.h
index 14e5d1a..7d87a6d 100644
--- a/modules/video_coding/main/source/media_optimization.h
+++ b/modules/video_coding/main/source/media_optimization.h
@@ -168,8 +168,6 @@
VideoCodecType _sendCodecType;
WebRtc_UWord16 _codecWidth;
WebRtc_UWord16 _codecHeight;
- WebRtc_UWord16 _initCodecWidth;
- WebRtc_UWord16 _initCodecHeight;
float _userFrameRate;
VCMFrameDropper* _frameDropper;
diff --git a/modules/video_coding/main/source/qm_select.cc b/modules/video_coding/main/source/qm_select.cc
index ef273c0..ba1e3af 100644
--- a/modules/video_coding/main/source/qm_select.cc
+++ b/modules/video_coding/main/source/qm_select.cc
@@ -26,9 +26,11 @@
: content_metrics_(NULL),
width_(0),
height_(0),
+ user_frame_rate_(0.0f),
native_width_(0),
native_height_(0),
- framerate_level_(kDefault),
+ native_frame_rate_(0.0f),
+ framerate_level_(kFrameRateHigh),
init_(false) {
ResetQM();
}
@@ -134,13 +136,15 @@
return static_cast<ImageType>(isel);
}
-LevelClass VCMQmMethod::FrameRateLevel(float avg_framerate) {
+FrameRateLevelClass VCMQmMethod::FrameRateLevel(float avg_framerate) {
if (avg_framerate < kLowFrameRate) {
- return kLow;
- } else if (avg_framerate > kHighFrameRate) {
- return kHigh;
+ return kFrameRateLow;
+ } else if (avg_framerate < kMiddleFrameRate) {
+ return kFrameRateMiddle1;
+ } else if (avg_framerate < kHighFrameRate) {
+ return kFrameRateMiddle2;
} else {
- return kDefault;
+ return kFrameRateHigh;
}
}
@@ -179,7 +183,6 @@
void VCMQmResolution::Reset() {
target_bitrate_ = 0.0f;
- user_framerate_ = 0.0f;
incoming_framerate_ = 0.0f;
buffer_level_ = 0.0f;
per_frame_bandwidth_ =0.0f;
@@ -212,23 +215,26 @@
}
Reset();
target_bitrate_ = bitrate;
- user_framerate_ = user_framerate;
incoming_framerate_ = user_framerate;
- UpdateCodecFrameSize(width, height);
+ UpdateCodecParameters(user_framerate, width, height);
native_width_ = width;
native_height_ = height;
+ native_frame_rate_ = user_framerate;
num_layers_ = num_layers;
// Initial buffer level.
buffer_level_ = kOptBufferLevel * target_bitrate_;
// Per-frame bandwidth.
- per_frame_bandwidth_ = target_bitrate_ / user_framerate_;
+ per_frame_bandwidth_ = target_bitrate_ / user_framerate;
init_ = true;
return VCM_OK;
}
-void VCMQmResolution::UpdateCodecFrameSize(uint16_t width, uint16_t height) {
+void VCMQmResolution::UpdateCodecParameters(float frame_rate, uint16_t width,
+ uint16_t height) {
width_ = width;
height_ = height;
+ // |user_frame_rate| is the target frame rate for VPM frame dropper.
+ user_frame_rate_ = frame_rate;
image_type_ = GetImageType(width, height);
}
@@ -256,11 +262,9 @@
float encoder_sent_rate,
float incoming_framerate,
uint8_t packet_loss) {
- // Sum the target bitrate and incoming frame rate:
- // these values are the encoder rates (from previous update ~1sec),
- // i.e, before the update for next ~1sec.
+ // Sum the target bitrate: this is the encoder rate from previous update
+ // (~1sec), i.e, before the update for next ~1sec.
sum_target_rate_ += target_bitrate_;
- sum_incoming_framerate_ += incoming_framerate_;
update_rate_cnt_++;
// Sum the received (from RTCP reports) packet loss rates.
@@ -281,6 +285,7 @@
// these values are ones the encoder will use for the current/next ~1sec
target_bitrate_ = target_bitrate;
incoming_framerate_ = incoming_framerate;
+ sum_incoming_framerate_ += incoming_framerate_;
// Update the per_frame_bandwidth:
// this is the per_frame_bw for the current/next ~1sec
@@ -296,8 +301,8 @@
// In the current version the following constraints are imposed:
// 1) We only allow for one action, either down or up, at a given time.
-// 2) The possible down-sampling actions are: spatial 1/2x1/2, 3/4x3/4;
-// temporal 1/2 and 2/3.
+// 2) The possible down-sampling actions are: spatial by 1/2x1/2, 3/4x3/4;
+// temporal/frame rate reduction by 1/2 and 2/3.
// 3) The action for going back up is the reverse of last (spatial or temporal)
// down-sampling action. The list of down-sampling actions from the
// Initialize() state are kept in |down_action_history_|.
@@ -313,10 +318,6 @@
return VCM_OK;
}
- // Default settings: no action.
- SetDefaultAction();
- *qm = qm_;
-
// Compute content class for selection.
content_class_ = ComputeContentClass();
@@ -326,6 +327,10 @@
// Get the encoder state.
ComputeEncoderState();
+ // Default settings: no action.
+ SetDefaultAction();
+ *qm = qm_;
+
// Check for going back up in resolution, if we have had some down-sampling
// relative to native state in Initialize().
if (down_action_history_[0].spatial != kNoChangeSpatial ||
@@ -348,10 +353,14 @@
}
void VCMQmResolution::SetDefaultAction() {
+ qm_->codec_width = width_;
+ qm_->codec_height = height_;
+ qm_->frame_rate = user_frame_rate_;
+ qm_->change_resolution_spatial = false;
+ qm_->change_resolution_temporal = false;
qm_->spatial_width_fact = 1.0f;
qm_->spatial_height_fact = 1.0f;
qm_->temporal_fact = 1.0f;
- qm_->change_resolution = false;
action_.spatial = kNoChangeSpatial;
action_.temporal = kNoChangeTemporal;
}
@@ -385,7 +394,10 @@
(1.0 - kWeightRate) * target_bitrate_;
avg_incoming_framerate_ = kWeightRate * avg_incoming_framerate_ +
(1.0 - kWeightRate) * incoming_framerate_;
- framerate_level_ = FrameRateLevel(avg_incoming_framerate_);
+ // Use base layer frame rate for temporal layers: this will favor spatial.
+ assert(num_layers_ > 0);
+ framerate_level_ = FrameRateLevel(
+ avg_incoming_framerate_ / static_cast<float>(1 << (num_layers_ - 1)));
}
void VCMQmResolution::ComputeEncoderState() {
@@ -461,6 +473,7 @@
float scale_fac) {
float estimated_transition_rate_up = GetTransitionRate(fac_width, fac_height,
fac_temp, scale_fac);
+
// Go back up if:
// 1) target rate is above threshold and current encoder state is stable, or
// 2) encoder state is easy (encoder is significantly under-shooting target).
@@ -527,11 +540,15 @@
}
}
- // TODO(marpan): If num_layers_ > 1, adjust/favor spatial over temporal ?
+ // Only allow for one action (spatial or temporal) at a given time.
+ assert(action_.temporal == kNoChangeTemporal ||
+ action_.spatial == kNoChangeSpatial);
// Adjust cases not captured in tables, mainly based on frame rate.
AdjustAction();
+ ConvertSpatialFractionalToWhole();
+
CheckForEvenFrameSize();
// Update down-sampling state.
@@ -552,8 +569,14 @@
static_cast<uint16_t>(fac_width * width_),
static_cast<uint16_t>(fac_height * height_));
- LevelClass framerate_level =
+ FrameRateLevelClass framerate_level =
FrameRateLevel(fac_temp * avg_incoming_framerate_);
+ // If we are checking for going up temporally, and this is the last
+ // temporal action, then use native frame rate.
+ if (down_action_history_[1].temporal == kNoChangeTemporal &&
+ fac_temp > 1.0f) {
+ framerate_level = FrameRateLevel(native_frame_rate_);
+ }
// The maximum allowed rate below which down-sampling is allowed:
// Nominal values based on image format (frame size and frame rate).
@@ -570,7 +593,6 @@
}
void VCMQmResolution::UpdateDownsamplingState(UpDownAction up_down) {
- qm_->change_resolution = true;
if (up_down == kUpResolution) {
qm_->spatial_width_fact = 1.0f / kFactorWidthSpatial[action_.spatial];
qm_->spatial_height_fact = 1.0f / kFactorHeightSpatial[action_.spatial];
@@ -587,9 +609,9 @@
// has been selected.
assert(false);
}
+ UpdateCodecResolution();
state_dec_factor_spatial_ = state_dec_factor_spatial_ *
- qm_->spatial_width_fact *
- qm_->spatial_height_fact;
+ qm_->spatial_width_fact * qm_->spatial_height_fact;
state_dec_factor_temporal_ = state_dec_factor_temporal_ * qm_->temporal_fact;
assert(state_dec_factor_spatial_ >= 1.0f);
assert(state_dec_factor_spatial_ <= kMaxSpatialDown);
@@ -597,6 +619,34 @@
assert(state_dec_factor_temporal_ <= kMaxTempDown);
}
+void VCMQmResolution::UpdateCodecResolution() {
+ if (action_.spatial != kNoChangeSpatial) {
+ qm_->change_resolution_spatial = true;
+ qm_->codec_width = static_cast<uint16_t>(width_ /
+ qm_->spatial_width_fact + 0.5f);
+ qm_->codec_height = static_cast<uint16_t>(height_ /
+ qm_->spatial_height_fact + 0.5f);
+ // Size can never exceed native sizes.
+ assert(qm_->codec_width <= native_width_);
+ assert(qm_->codec_height <= native_height_);
+ // Size should be multiple of 2.
+ assert(qm_->codec_width % 2 == 0);
+ assert(qm_->codec_height % 2 == 0);
+ }
+ if (action_.temporal != kNoChangeTemporal) {
+ qm_->change_resolution_temporal = true;
+ // Update the frame rate based on the average incoming frame rate.
+ qm_->frame_rate = avg_incoming_framerate_ / qm_->temporal_fact + 0.5f;
+ if (down_action_history_[0].temporal == 0) {
+ // When we undo the last temporal-down action, make sure we go back up
+ // to the native frame rate. Since the incoming frame rate may
+ // fluctuate over time, |avg_incoming_framerate_| scaled back up may
+ // be smaller than |native_frame rate_|.
+ qm_->frame_rate = native_frame_rate_;
+ }
+ }
+}
+
uint8_t VCMQmResolution::RateClass(float transition_rate) {
return avg_target_rate_ < (kFacLowRate * transition_rate) ? 0:
(avg_target_rate_ >= transition_rate ? 2 : 1);
@@ -607,17 +657,67 @@
// is not high, then safer to take frame rate reduction if the
// average incoming frame rate is high.
if (spatial_.level == kDefault && motion_.level != kHigh &&
- framerate_level_ == kHigh) {
+ framerate_level_ == kFrameRateHigh) {
action_.spatial = kNoChangeSpatial;
action_.temporal = kOneHalfTemporal;
}
- // If both motion and spatial level are low, and temporal down-action
+ // If both motion and spatial level are low, and temporal down action
// was selected, switch to spatial 3/4x3/4 if the frame rate is low.
if (motion_.level == kLow && spatial_.level == kLow &&
- framerate_level_ == kLow && action_.temporal != kNoChangeTemporal) {
+ framerate_level_ == kFrameRateLow &&
+ action_.temporal != kNoChangeTemporal) {
action_.spatial = kOneHalfSpatialUniform;
action_.temporal = kNoChangeTemporal;
}
+
+ // If too much spatial action, and temporal action has not yet been chosen,
+ // then change to temporal action if the average frame rate is not low.
+ if (action_.spatial == kOneQuarterSpatialUniform &&
+ down_action_history_[0].spatial == kOneQuarterSpatialUniform &&
+ down_action_history_[0].temporal == kNoChangeTemporal &&
+ framerate_level_ != kFrameRateLow) {
+ action_.spatial = kNoChangeSpatial;
+ action_.temporal = kOneHalfTemporal;
+ }
+
+ // Never use temporal action if number of temporal layers is above 2.
+ if (num_layers_ > 2) {
+ if (action_.temporal != kNoChangeTemporal) {
+ action_.spatial = kOneHalfSpatialUniform;
+ }
+ action_.temporal = kNoChangeTemporal;
+ }
+}
+
+void VCMQmResolution::ConvertSpatialFractionalToWhole() {
+ // If 3/4 spatial is selected, check if there has been another 3/4,
+ // and if so, combine them into 1/2. 1/2 scaling is more efficient than 9/16.
+ // Note we define 3/4x3/4 spatial as kOneHalfSpatialUniform.
+ if (action_.spatial == kOneHalfSpatialUniform) {
+ bool found = false;
+ int isel = kDownActionHistorySize;
+ for (int i = 0; i < kDownActionHistorySize; ++i) {
+ if (down_action_history_[i].spatial == kOneHalfSpatialUniform) {
+ isel = i;
+ found = true;
+ break;
+ }
+ }
+ if (found) {
+ // Update state for removing 3/4 spatial.
+ state_dec_factor_spatial_ = state_dec_factor_spatial_ /
+ (kFactorWidthSpatial[kOneHalfSpatialUniform] *
+ kFactorHeightSpatial[kOneHalfSpatialUniform]);
+ width_ = width_ * kFactorWidthSpatial[kOneHalfSpatialUniform];
+ height_ = height_ * kFactorHeightSpatial[kOneHalfSpatialUniform];
+ // Remove 3/4 from the history.
+ for (int i = isel; i < kDownActionHistorySize - 1; ++i) {
+ down_action_history_[i].spatial = down_action_history_[i + 1].spatial;
+ }
+ // Update current selection action to be 1/2x1/2 (=1/4) spatial.
+ action_.spatial = kOneQuarterSpatialUniform;
+ }
+ }
}
void VCMQmResolution::CheckForEvenFrameSize() {
@@ -669,11 +769,11 @@
// No spatial sampling if current frame size is too small (QCIF),
// or if the amount of spatial down-sampling will be too much.
float new_dec_factor_spatial = state_dec_factor_spatial_ *
- qm_->spatial_width_fact *
- qm_->spatial_height_fact;
+ qm_->spatial_width_fact * qm_->spatial_height_fact;
if ((width_ * height_) <= kMinImageSize ||
new_dec_factor_spatial > kMaxSpatialDown) {
action_.spatial = kNoChangeSpatial;
+ qm_->change_resolution_spatial = false;
qm_->spatial_width_fact = 1.0f;
qm_->spatial_height_fact = 1.0f;
}
@@ -683,6 +783,7 @@
if (avg_incoming_framerate_ <= kMinFrameRate ||
new_dec_factor_temp >= kMaxTempDown) {
action_.temporal = kNoChangeTemporal;
+ qm_->change_resolution_temporal = false;
qm_->temporal_fact = 1.0f;
}
}
@@ -698,7 +799,7 @@
}
}
-// TODO(marpan): Update this when we allow for 1/2 spatial down-sampling.
+// TODO(marpan): Update when we allow for directional spatial down-sampling.
void VCMQmResolution::SelectSpatialDirectionMode(float transition_rate) {
// Default is 4/3x4/3
// For bit rates well below transitional rate, we select 2x2.
diff --git a/modules/video_coding/main/source/qm_select.h b/modules/video_coding/main/source/qm_select.h
index b76e32a..f5b9d21 100644
--- a/modules/video_coding/main/source/qm_select.h
+++ b/modules/video_coding/main/source/qm_select.h
@@ -23,15 +23,23 @@
struct VCMResolutionScale {
VCMResolutionScale()
- : spatial_width_fact(1.0f),
+ : codec_width(640),
+ codec_height(480),
+ frame_rate(30.0f),
+ spatial_width_fact(1.0f),
spatial_height_fact(1.0f),
temporal_fact(1.0f),
- change_resolution(false) {
+ change_resolution_spatial(false),
+ change_resolution_temporal(false) {
}
+ uint16_t codec_width;
+ uint16_t codec_height;
+ float frame_rate;
float spatial_width_fact;
float spatial_height_fact;
float temporal_fact;
- bool change_resolution;
+ bool change_resolution_spatial;
+ bool change_resolution_temporal;
};
enum ImageType {
@@ -50,7 +58,14 @@
const uint32_t kSizeOfImageType[kNumImageTypes] =
{ 25344, 57024, 76800, 101376, 172800, 307200, 518400, 921600, 2073600 };
-enum LevelClass {
+enum FrameRateLevelClass {
+ kFrameRateLow,
+ kFrameRateMiddle1,
+ kFrameRateMiddle2,
+ kFrameRateHigh
+};
+
+enum ContentLevelClass {
kLow,
kHigh,
kDefault
@@ -66,7 +81,7 @@
level = kDefault;
}
float value;
- LevelClass level;
+ ContentLevelClass level;
};
enum UpDownAction {
@@ -146,7 +161,7 @@
ImageType FindClosestImageType(uint16_t width, uint16_t height);
// Get the frame rate level.
- LevelClass FrameRateLevel(float frame_rate);
+ FrameRateLevelClass FrameRateLevel(float frame_rate);
protected:
// Content Data.
@@ -155,12 +170,14 @@
// Encoder frame sizes and native frame sizes.
uint16_t width_;
uint16_t height_;
+ float user_frame_rate_;
uint16_t native_width_;
uint16_t native_height_;
+ float native_frame_rate_;
float aspect_ratio_;
// Image type and frame rate leve, for the current encoder resolution.
ImageType image_type_;
- LevelClass framerate_level_;
+ FrameRateLevelClass framerate_level_;
// Content class data.
VCMContFeature motion_;
VCMContFeature spatial_;
@@ -195,7 +212,7 @@
int num_layers);
// Update the encoder frame size.
- void UpdateCodecFrameSize(uint16_t width, uint16_t height);
+ void UpdateCodecParameters(float frame_rate, uint16_t width, uint16_t height);
// Update with actual bit rate (size of the latest encoded frame)
// and frame type, after every encoded frame.
@@ -214,6 +231,7 @@
// Output: the spatial and/or temporal scale change.
int SelectResolution(VCMResolutionScale** qm);
+ private:
// Set the default resolution action.
void SetDefaultAction();
@@ -248,12 +266,18 @@
// Update the down-sampling state.
void UpdateDownsamplingState(UpDownAction up_down);
+ // Update the codec frame size and frame rate.
+ void UpdateCodecResolution();
+
// Return a state based on average target rate relative transition rate.
uint8_t RateClass(float transition_rate);
// Adjust the action selected from the table.
void AdjustAction();
+ // Covert 2 stages of 3/4 (=9/16) spatial decimation to 1/2.
+ void ConvertSpatialFractionalToWhole();
+
// Check if the new frame sizes are still divisible by 2.
void CheckForEvenFrameSize();
@@ -273,13 +297,11 @@
// Select the directional (1x2 or 2x1) spatial down-sampling action.
void SelectSpatialDirectionMode(float transition_rate);
- private:
enum { kDownActionHistorySize = 10};
VCMResolutionScale* qm_;
// Encoder rate control parameters.
float target_bitrate_;
- float user_framerate_;
float incoming_framerate_;
float per_frame_bandwidth_;
float buffer_level_;
diff --git a/modules/video_coding/main/source/qm_select_data.h b/modules/video_coding/main/source/qm_select_data.h
index ec1e988..1c9858d 100644
--- a/modules/video_coding/main/source/qm_select_data.h
+++ b/modules/video_coding/main/source/qm_select_data.h
@@ -53,7 +53,6 @@
// Factor for reducing transitonal bitrate under packet loss.
const float kPacketLossRateFac = 1.0f;
-
// Maximum possible transitional rate for down-sampling:
// (units in kbps), for 30fps.
const uint16_t kMaxRateQm[9] = {
@@ -69,10 +68,11 @@
};
// Frame rate scale for maximum transition rate.
-const float kFrameRateFac[3] = {
- 0.7f, // L
- 1.0f, // H
- 0.8f // D
+const float kFrameRateFac[4] = {
+ 0.5f, // Low
+ 0.7f, // Middle level 1
+ 0.85f, // Middle level 2
+ 1.0f, // High
};
// Scale for transitional rate: based on content class
@@ -180,7 +180,7 @@
// Control the total amount of down-sampling allowed.
const float kMaxSpatialDown = 8.0f;
const float kMaxTempDown = 4.0f;
-const float kMaxDownSample = 16.0f;
+const float kMaxDownSample = 12.0f;
// Minimum image size for a spatial down-sampling.
const int kMinImageSize= 176 * 144;
@@ -199,6 +199,7 @@
// Thresholds for frame rate:
const int kLowFrameRate = 10;
+const int kMiddleFrameRate = 15;
const int kHighFrameRate = 25;
// Thresholds for motion: motion level is from NFD
diff --git a/modules/video_coding/main/source/qm_select_unittest.cc b/modules/video_coding/main/source/qm_select_unittest.cc
index 5b670fb..f6990a6 100644
--- a/modules/video_coding/main/source/qm_select_unittest.cc
+++ b/modules/video_coding/main/source/qm_select_unittest.cc
@@ -62,7 +62,10 @@
bool IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
float fac_width,
float fac_height,
- float fac_temp);
+ float fac_temp,
+ uint16_t new_width,
+ uint16_t new_height,
+ float new_frame_rate);
void TearDown() {
delete qm_resolution_;
@@ -84,7 +87,8 @@
qm_resolution_->UpdateContent(content_metrics);
// Content metrics are NULL: Expect success and no down-sampling action.
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0, 1.0, 1.0));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0, 1.0, 1.0, 640, 480,
+ 30.0f));
}
// No down-sampling action at high rates.
@@ -95,7 +99,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Update rates for a sequence of intervals.
@@ -111,7 +115,8 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+ 30.0f));
}
// Rate is well below transition, down-sampling action is taken,
@@ -123,7 +128,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Update rates for a sequence of intervals.
@@ -140,35 +145,40 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+ 30.0f));
qm_resolution_->ResetDownSamplingState();
// Low motion, low spatial: 2/3 temporal is expected.
UpdateQmContentData(kTemporalLow, kSpatialLow, kSpatialLow, kSpatialLow);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
+ 20.5f));
qm_resolution_->ResetDownSamplingState();
// Medium motion, low spatial: 2x2 spatial expected.
UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+ 30.0f));
qm_resolution_->ResetDownSamplingState();
- // High motion, high spatial: 1/2 temporal expected.
+ // High motion, high spatial: 2/3 temporal expected.
UpdateQmContentData(kTemporalHigh, kSpatialHigh, kSpatialHigh, kSpatialHigh);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(4, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
+ 20.5f));
qm_resolution_->ResetDownSamplingState();
// Low motion, high spatial: 1/2 temporal expected.
UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
+ 15.5f));
qm_resolution_->ResetDownSamplingState();
// Medium motion, high spatial: 1/2 temporal expected.
@@ -176,7 +186,8 @@
kSpatialHigh);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
+ 15.5f));
qm_resolution_->ResetDownSamplingState();
// High motion, medium spatial: 2x2 spatial expected.
@@ -184,7 +195,9 @@
kSpatialMedium);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f));
+ // Target frame rate for frame dropper should be the same as previous == 15.
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+ 30.0f));
qm_resolution_->ResetDownSamplingState();
// Low motion, medium spatial: high frame rate, so 1/2 temporal expected.
@@ -192,7 +205,8 @@
kSpatialMedium);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
+ 15.5f));
qm_resolution_->ResetDownSamplingState();
// Medium motion, medium spatial: high frame rate, so 1/2 temporal expected.
@@ -200,7 +214,8 @@
kSpatialMedium);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(8, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
+ 15.5f));
}
// Rate mis-match is high, and we have over-shooting.
@@ -212,7 +227,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Update rates for a sequence of intervals.
@@ -230,14 +245,15 @@
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
- 1.0f));
+ 1.0f, 480, 360, 30.0f));
qm_resolution_->ResetDownSamplingState();
// Low motion, high spatial
UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
+ 20.5f));
}
// Rate mis-match is high, target rate is below max for down-sampling,
@@ -249,7 +265,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Update rates for a sequence of intervals.
@@ -266,14 +282,16 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+ 30.0f));
qm_resolution_->ResetDownSamplingState();
// Low motion, high spatial
UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+ 30.0f));
}
// Buffer is underflowing, and target rate is below max for down-sampling,
@@ -285,7 +303,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Update with encoded size over a number of frames.
@@ -308,14 +326,15 @@
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
- 1.0f));
+ 1.0f, 480, 360, 30.0f));
qm_resolution_->ResetDownSamplingState();
// Low motion, high spatial
UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
+ 20.5f));
}
// Target rate is below max for down-sampling, but buffer level is stable,
@@ -327,7 +346,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Update with encoded size over a number of frames.
@@ -349,14 +368,16 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+ 30.0f));
qm_resolution_->ResetDownSamplingState();
// Low motion, high spatial
UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+ 30.0f));
}
// Very low rate, but no spatial down-sampling below some size (QCIF).
@@ -367,7 +388,7 @@
// Update with encoder frame size.
uint16_t codec_width = 176;
uint16_t codec_height = 144;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(0, qm_resolution_->GetImageType(codec_width, codec_height));
// Update rates for a sequence of intervals.
@@ -384,7 +405,8 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 176, 144,
+ 30.0f));
}
// Very low rate, but no frame reduction below some frame_rate (8fps).
@@ -395,7 +417,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(8.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Update rates for a sequence of intervals.
@@ -413,7 +435,8 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+ 8.0f));
}
// Two stages: spatial down-sample and then back up spatially,
@@ -425,7 +448,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Update rates for a sequence of intervals.
@@ -442,11 +465,12 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+ 30.0f));
// Reset and go up in rate: expected to go back up.
qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecFrameSize(320, 240);
+ qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
// Update rates for a sequence of intervals.
int target_rate2[] = {400, 400, 400, 400, 400};
@@ -457,7 +481,8 @@
fraction_lost2, 5);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 0.5f, 0.5f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 0.5f, 0.5f, 1.0f, 640, 480,
+ 30.0f));
}
// Two stages: spatial down-sample and then back up spatially, since encoder
@@ -469,7 +494,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Update rates for a sequence of intervals.
@@ -486,11 +511,12 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+ 30.0f));
// Reset rates and simulate under-shooting scenario.: expect to go back up.
qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecFrameSize(320, 240);
+ qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
// Update rates for a sequence of intervals.
int target_rate2[] = {200, 200, 200, 200, 200};
@@ -501,7 +527,8 @@
fraction_lost2, 5);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 0.5f, 0.5f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 0.5f, 0.5f, 1.0f, 640, 480,
+ 30.0f));
}
// Two stages: spatial down-sample and then no action to go up,
@@ -513,7 +540,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Update rates for a sequence of intervals.
@@ -530,11 +557,12 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+ 30.0f));
// Reset and simulate large rate mis-match: expect no action to go back up.
qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecFrameSize(320, 240);
+ qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
// Update rates for a sequence of intervals.
int target_rate2[] = {400, 400, 400, 400, 400};
@@ -545,8 +573,10 @@
fraction_lost2, 5);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 320, 240,
+ 30.0f));
}
+
// Two stages: temporally down-sample and then back up temporally,
// as rate as increased.
TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporal) {
@@ -556,91 +586,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {100, 100, 100};
- int encoder_sent_rate[] = {100, 100, 100};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f));
-
- // Reset rates and go up in rate: expect to go back up.
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate2[] = {400, 400, 400, 400, 400};
- int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
- int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f));
-}
-
-// Two stages: temporal down-sample and then back up temporally, since encoder
-// is under-shooting target even though rate has not increased much.
-TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporalUndershoot) {
- // Initialize with bitrate, frame rate, and native system width/height.
- InitQmNativeData(100, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {100, 100, 100};
- int encoder_sent_rate[] = {100, 100, 100};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f));
-
- // Reset rates and simulate under-shooting scenario.: expect to go back up.
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate2[] = {200, 200, 200, 200, 200};
- int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
- int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f));
-}
-
-// Two stages: temporal down-sample and then no action to go up,
-// as encoding rate mis-match is too high.
-TEST_F(QmSelectTest, 2StageDownTemporalNoActionUp) {
- // Initialize with bitrate, frame rate, and native system width/height.
- InitQmNativeData(100, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Update rates for a sequence of intervals.
@@ -657,9 +603,98 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
+ 15.5f));
+
+ // Reset rates and go up in rate: expect to go back up.
+ qm_resolution_->ResetRates();
+ // Update rates for a sequence of intervals.
+ int target_rate2[] = {400, 400, 400, 400, 400};
+ int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
+ int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 640, 480,
+ 30.0f));
+}
+
+// Two stages: temporal down-sample and then back up temporally, since encoder
+// is under-shooting target even though rate has not increased much.
+TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporalUndershoot) {
+ // Initialize with bitrate, frame rate, and native system width/height.
+ InitQmNativeData(100, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {100, 100, 100};
+ int encoder_sent_rate[] = {100, 100, 100};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Low motion, high spatial.
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
+ 15.5f));
+
+ // Reset rates and simulate under-shooting scenario.: expect to go back up.
+ qm_resolution_->ResetRates();
+ // Update rates for a sequence of intervals.
+ int target_rate2[] = {200, 200, 200, 200, 200};
+ int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
+ int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 640, 480,
+ 30.0f));
+}
+
+// Two stages: temporal down-sample and then no action to go up,
+// as encoding rate mis-match is too high.
+TEST_F(QmSelectTest, 2StageDownTemporalNoActionUp) {
+ // Initialize with bitrate, frame rate, and native system width/height.
+ InitQmNativeData(100, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {100, 100, 100};
+ int encoder_sent_rate[] = {100, 100, 100};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Low motion, high spatial.
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2, 640, 480, 15.5f));
// Reset and simulate large rate mis-match: expect no action to go back up.
+ qm_resolution_->UpdateCodecParameters(15.0f, codec_width, codec_height);
qm_resolution_->ResetRates();
// Update rates for a sequence of intervals.
int target_rate2[] = {600, 600, 600, 600, 600};
@@ -670,7 +705,8 @@
fraction_lost2, 5);
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+ 15.0f));
}
// 3 stages: spatial down-sample, followed by temporal down-sample,
// and then go up to full state, as encoding rate has increased.
@@ -681,7 +717,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Update rates for a sequence of intervals.
@@ -698,10 +734,11 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+ 30.0f));
// Change content data: expect temporal down-sample.
- qm_resolution_->UpdateCodecFrameSize(320, 240);
+ qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
// Update content: motion level, and 3 spatial prediction errors.
@@ -710,7 +747,8 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 320, 240,
+ 15.5f));
// Reset rates and go high up in rate: expect to go back up both spatial
// and temporally.
@@ -726,7 +764,8 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 0.5f, 0.5f, 0.5f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 0.5f, 0.5f, 0.5f, 640, 480,
+ 30.0f));
}
// No down-sampling below some total amount.
@@ -737,7 +776,7 @@
// Update with encoder frame size.
uint16_t codec_width = 1280;
uint16_t codec_height = 720;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(7, qm_resolution_->GetImageType(codec_width, codec_height));
// Update rates for a sequence of intervals.
@@ -754,11 +793,12 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 640, 360,
+ 30.0f));
// Reset and lower rates to get another spatial action (3/4x3/4)
qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecFrameSize(640, 360);
+ qm_resolution_->UpdateCodecParameters(30.0f, 640, 360);
EXPECT_EQ(4, qm_resolution_->GetImageType(640, 360));
// Update rates for a sequence of intervals.
int target_rate2[] = {80, 80, 80, 80, 80};
@@ -776,13 +816,13 @@
EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
- 1.0f));
+ 1.0f, 480, 270, 30.0f));
// Reset and go to very low rate: no action should be taken,
// we went down too much already.
qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecFrameSize(320, 180);
- EXPECT_EQ(1, qm_resolution_->GetImageType(320, 180));
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 270);
+ EXPECT_EQ(3, qm_resolution_->GetImageType(480, 270));
// Update rates for a sequence of intervals.
int target_rate3[] = {10, 10, 10, 10, 10};
int encoder_sent_rate3[] = {10, 10, 10, 10, 10};
@@ -793,7 +833,8 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 480, 270,
+ 30.0f));
}
// Multiple down-sampling stages and then undo all of them.
@@ -807,7 +848,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Go down spatial 3/4x3/4.
@@ -826,9 +867,9 @@
EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
- 1.0f));
+ 1.0f, 480, 360, 30.0f));
// Go down 1/2 temporal.
- qm_resolution_->UpdateCodecFrameSize(480, 360);
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
qm_resolution_->ResetRates();
int target_rate2[] = {100, 100, 100, 100, 100};
@@ -844,9 +885,11 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 480, 360,
+ 15.5f));
// Go down 1/2x1/2 spatial.
+ qm_resolution_->UpdateCodecParameters(15.0f, 480, 360);
qm_resolution_->ResetRates();
int target_rate3[] = {50, 50, 50, 50, 50};
int encoder_sent_rate3[] = {50, 50, 50, 50, 50};
@@ -861,14 +904,15 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 240, 180,
+ 15.0f));
// Reset rates and go high up in rate: expect to go up:
// should go up first: 1/2x1x2 and 1/2 temporally,
// and second: 3/4x3/4 spatial.
// Go up 1/2x1/2 spatially and 1/2 temporally
- qm_resolution_->UpdateCodecFrameSize(240, 180);
+ qm_resolution_->UpdateCodecParameters(15.0f, 240, 180);
EXPECT_EQ(1, qm_resolution_->GetImageType(240, 180));
qm_resolution_->ResetRates();
// Update rates for a sequence of intervals.
@@ -882,10 +926,11 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 0.5f, 0.5f, 0.5f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 0.5f, 0.5f, 0.5f, 480, 360,
+ 30.0f));
// Go up 3/4x3/4 spatially.
- qm_resolution_->UpdateCodecFrameSize(480, 360);
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
qm_resolution_->ResetRates();
// Update rates for a sequence of intervals.
@@ -900,7 +945,7 @@
EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f,
- 1.0f));
+ 1.0f, 640, 480, 30.0f));
}
// Multiple down-sampling and up-sample stages, with partial undoing.
@@ -914,7 +959,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Go down 1/2x1/2 spatial.
@@ -932,10 +977,11 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+ 30.0f));
// Go down 2/3 temporal.
- qm_resolution_->UpdateCodecFrameSize(320, 240);
+ qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
qm_resolution_->ResetRates();
int target_rate2[] = {80, 80, 80, 80, 80};
@@ -952,9 +998,11 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240,
+ 20.5f));
// Go up 1/2x1/2 spatially.
+ qm_resolution_->UpdateCodecParameters(20.0f, 320, 240);
qm_resolution_->ResetRates();
// Update rates for a sequence of intervals.
int target_rate3[] = {300, 300, 300, 300, 300};
@@ -967,10 +1015,11 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 0.5f, 0.5f, 1.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 0.5f, 0.5f, 1.0f, 640, 480,
+ 20.0f));
// Go down 1/2 temporal.
- qm_resolution_->UpdateCodecFrameSize(640, 480);
+ qm_resolution_->UpdateCodecParameters(20.0f, 640, 480);
EXPECT_EQ(5, qm_resolution_->GetImageType(640, 480));
qm_resolution_->ResetRates();
int target_rate4[] = {100, 100, 100, 100, 100};
@@ -986,7 +1035,8 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
+ 10.5f));
// Go up 1/2 temporal.
qm_resolution_->ResetRates();
@@ -1001,7 +1051,8 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 640, 480,
+ 20.5f));
}
// Multiple down-sampling and up-sample stages, with partial undoing.
@@ -1015,7 +1066,7 @@
// Update with encoder frame size.
uint16_t codec_width = 640;
uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecFrameSize(codec_width, codec_height);
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
// Go down 3/4x3/4 spatial.
@@ -1034,10 +1085,10 @@
EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
- 1.0f));
+ 1.0f, 480, 360, 30.0f));
// Go down 1/2 temporal.
- qm_resolution_->UpdateCodecFrameSize(480, 360);
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
qm_resolution_->ResetRates();
int target_rate2[] = {100, 100, 100, 100, 100};
@@ -1053,7 +1104,8 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 480, 360,
+ 15.5f));
// Go up 1/2 temporal.
qm_resolution_->ResetRates();
@@ -1068,15 +1120,16 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 480, 360,
+ 30.0f));
// Go down 2/3 temporal.
- qm_resolution_->UpdateCodecFrameSize(640, 480);
+ qm_resolution_->UpdateCodecParameters(30.0f, 640, 480);
EXPECT_EQ(5, qm_resolution_->GetImageType(640, 480));
qm_resolution_->ResetRates();
- int target_rate4[] = {150, 150, 150, 150, 150};
- int encoder_sent_rate4[] = {150, 150, 150, 150, 150};
- int incoming_frame_rate4[] = {20, 20, 20, 20, 20};
+ int target_rate4[] = {200, 200, 200, 200, 200};
+ int encoder_sent_rate4[] = {200, 200, 200, 200, 200};
+ int incoming_frame_rate4[] = {30, 30, 30, 30, 30};
uint8_t fraction_lost4[] = {30, 30, 30, 30, 30};
UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
fraction_lost4, 5);
@@ -1088,7 +1141,8 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
+ 20.5f));
// Go up 2/3 temporal.
qm_resolution_->ResetRates();
@@ -1103,7 +1157,58 @@
EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f / 3.0f));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f / 3.0f, 640,
+ 480, 30.0f));
+}
+
+// Two stages of 3/4x3/4 converted to one stage of 1/2x1/2.
+TEST_F(QmSelectTest, ConvertThreeQuartersToOneHalf) {
+ // Initialize with bitrate, frame rate, and native system width/height.
+ InitQmNativeData(200, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Go down 3/4x3/4 spatial.
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {200, 200, 200};
+ int encoder_sent_rate[] = {200, 200, 200};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Medium motion, low spatial.
+ UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
+ 1.0f, 480, 360, 30.0f));
+
+ // Set rates to go down another 3/4 spatial. Should be converted ton 1/2.
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+ EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+ qm_resolution_->ResetRates();
+ int target_rate2[] = {150, 150, 150, 150, 150};
+ int encoder_sent_rate2[] = {150, 150, 150, 150, 150};
+ int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Medium motion, low spatial.
+ UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+ 30.0f));
}
void QmSelectTest::InitQmNativeData(float initial_bit_rate,
@@ -1160,10 +1265,16 @@
bool QmSelectTest::IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
float fac_width,
float fac_height,
- float fac_temp) {
+ float fac_temp,
+ uint16_t new_width,
+ uint16_t new_height,
+ float new_frame_rate) {
if (qm_scale->spatial_width_fact == fac_width &&
qm_scale->spatial_height_fact == fac_height &&
- qm_scale->temporal_fact == fac_temp) {
+ qm_scale->temporal_fact == fac_temp &&
+ qm_scale->codec_width == new_width &&
+ qm_scale->codec_height == new_height &&
+ qm_scale->frame_rate == new_frame_rate) {
return true;
} else {
return false;
diff --git a/modules/video_coding/main/test/mt_rx_tx_test.cc b/modules/video_coding/main/test/mt_rx_tx_test.cc
index d0a39ad..dc20b32 100644
--- a/modules/video_coding/main/test/mt_rx_tx_test.cc
+++ b/modules/video_coding/main/test/mt_rx_tx_test.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -235,9 +235,10 @@
vcm->SetVideoProtection(kProtectionFEC, fecEnabled);
// inform RTP Module of error resilience features
- rtp->SetFECCodeRate(protectionCallback.FECKeyRate(),
- protectionCallback.FECDeltaRate());
- rtp->SetNACKStatus(protectionCallback.NACKMethod());
+ FecProtectionParams delta_params = protectionCallback.DeltaFecParameters();
+ FecProtectionParams key_params = protectionCallback.KeyFecParameters();
+ rtp->SetFecParameters(&delta_params, &key_params);
+ rtp->SetNACKStatus(nackEnabled ? kNackRtcp : kNackOff);
vcm->SetChannelParameters((WebRtc_UWord32) bitRate,
(WebRtc_UWord8) lossRate, rttMS);
diff --git a/modules/video_coding/main/test/test_callbacks.cc b/modules/video_coding/main/test/test_callbacks.cc
index 1ec0e59..1075aae 100644
--- a/modules/video_coding/main/test/test_callbacks.cc
+++ b/modules/video_coding/main/test/test_callbacks.cc
@@ -412,13 +412,11 @@
VideoProtectionCallback::VideoProtectionCallback():
-_deltaFECRate(0),
-_keyFECRate(0),
-_deltaUseUepProtection(0),
-_keyUseUepProtection(0),
-_nack(kNackOff)
+delta_fec_params_(),
+key_fec_params_()
{
- //
+ memset(&delta_fec_params_, 0, sizeof(delta_fec_params_));
+ memset(&key_fec_params_, 0, sizeof(key_fec_params_));
}
VideoProtectionCallback::~VideoProtectionCallback()
@@ -427,72 +425,36 @@
}
WebRtc_Word32
-VideoProtectionCallback::ProtectionRequest(WebRtc_UWord8 deltaFECRate,
- WebRtc_UWord8 keyFECRate,
- bool deltaUseUepProtection,
- bool keyUseUepProtection,
- bool nack_enabled,
- WebRtc_UWord32* sent_video_rate_bps,
- WebRtc_UWord32* sent_nack_rate_bps,
- WebRtc_UWord32* sent_fec_rate_bps)
+VideoProtectionCallback::ProtectionRequest(
+ const FecProtectionParams* delta_fec_params,
+ const FecProtectionParams* key_fec_params,
+ WebRtc_UWord32* sent_video_rate_bps,
+ WebRtc_UWord32* sent_nack_rate_bps,
+ WebRtc_UWord32* sent_fec_rate_bps)
{
- _deltaFECRate = deltaFECRate;
- _keyFECRate = keyFECRate;
- _deltaUseUepProtection = deltaUseUepProtection;
- _keyUseUepProtection = keyUseUepProtection;
- if (nack_enabled)
- {
- _nack = kNackRtcp;
- }
- else
- {
- _nack = kNackOff;
- }
+ key_fec_params_ = *key_fec_params;
+ delta_fec_params_ = *delta_fec_params;
// Update RTP
- if (_rtp->SetFECCodeRate(keyFECRate, deltaFECRate) != 0)
+ if (_rtp->SetFecParameters(&delta_fec_params_,
+ &key_fec_params_) != 0)
{
printf("Error in Setting FEC rate\n");
return -1;
}
- if (_rtp->SetFECUepProtection(keyUseUepProtection,
- deltaUseUepProtection) != 0)
- {
- printf("Error in Setting FEC UEP protection\n");
- return -1;
- }
return 0;
}
-NACKMethod
-VideoProtectionCallback::NACKMethod()
+
+FecProtectionParams VideoProtectionCallback::DeltaFecParameters() const
{
- return _nack;
+ return delta_fec_params_;
}
-WebRtc_UWord8
-VideoProtectionCallback::FECDeltaRate()
+FecProtectionParams VideoProtectionCallback::KeyFecParameters() const
{
- return _deltaFECRate;
-}
-
-WebRtc_UWord8
-VideoProtectionCallback::FECKeyRate()
-{
- return _keyFECRate;
-}
-
-bool
-VideoProtectionCallback::FECDeltaUepProtection()
-{
- return _deltaUseUepProtection;
-}
-
-bool
-VideoProtectionCallback::FECKeyUepProtection()
-{
- return _keyUseUepProtection;
+ return key_fec_params_;
}
void
diff --git a/modules/video_coding/main/test/test_callbacks.h b/modules/video_coding/main/test/test_callbacks.h
index 07820bb..5c67c4c 100644
--- a/modules/video_coding/main/test/test_callbacks.h
+++ b/modules/video_coding/main/test/test_callbacks.h
@@ -235,26 +235,18 @@
VideoProtectionCallback();
virtual ~VideoProtectionCallback();
void RegisterRtpModule(RtpRtcp* rtp) {_rtp = rtp;}
- WebRtc_Word32 ProtectionRequest(WebRtc_UWord8 deltaFECRate,
- WebRtc_UWord8 keyFECRate,
- bool deltaUseUepProtection,
- bool keyUseUepProtection,
- bool nack_enabled,
- WebRtc_UWord32* sent_video_rate_bps,
- WebRtc_UWord32* sent_nack_rate_bps,
- WebRtc_UWord32* sent_fec_rate_bps);
- enum NACKMethod NACKMethod();
- WebRtc_UWord8 FECDeltaRate();
- WebRtc_UWord8 FECKeyRate();
- bool FECDeltaUepProtection();
- bool FECKeyUepProtection();
+ WebRtc_Word32 ProtectionRequest(
+ const FecProtectionParams* delta_fec_params,
+ const FecProtectionParams* key_fec_params,
+ WebRtc_UWord32* sent_video_rate_bps,
+ WebRtc_UWord32* sent_nack_rate_bps,
+ WebRtc_UWord32* sent_fec_rate_bps);
+ FecProtectionParams DeltaFecParameters() const;
+ FecProtectionParams KeyFecParameters() const;
private:
- RtpRtcp* _rtp;
- WebRtc_UWord8 _deltaFECRate;
- WebRtc_UWord8 _keyFECRate;
- bool _deltaUseUepProtection;
- bool _keyUseUepProtection;
- enum NACKMethod _nack;
+ RtpRtcp* _rtp;
+ FecProtectionParams delta_fec_params_;
+ FecProtectionParams key_fec_params_;
};
// Feed back from the RTP Module callback
diff --git a/modules/video_render/main/source/Android.mk b/modules/video_render/main/source/Android.mk
index 73eec93..c11a89c 100644
--- a/modules/video_render/main/source/Android.mk
+++ b/modules/video_render/main/source/Android.mk
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
@@ -21,10 +21,10 @@
video_render_frames.cc \
video_render_impl.cc \
external/video_render_external_impl.cc \
- Android/video_render_android_impl.cc \
- Android/video_render_android_native_opengl2.cc \
- Android/video_render_android_surface_view.cc \
- Android/video_render_opengles20.cc
+ android/video_render_android_impl.cc \
+ android/video_render_android_native_opengl2.cc \
+ android/video_render_android_surface_view.cc \
+ android/video_render_opengles20.cc
# Flags passed to both C and C++ files.
LOCAL_CFLAGS := \
@@ -33,7 +33,7 @@
LOCAL_C_INCLUDES := \
$(LOCAL_PATH) \
- $(LOCAL_PATH)/Android \
+ $(LOCAL_PATH)/android \
$(LOCAL_PATH)/../interface \
$(LOCAL_PATH)/../../../.. \
$(LOCAL_PATH)/../../../audio_coding/main/interface \
diff --git a/modules/video_render/main/source/Android/video_render_android_impl.cc b/modules/video_render/main/source/Android/video_render_android_impl.cc
deleted file mode 100644
index 8df91d2..0000000
--- a/modules/video_render/main/source/Android/video_render_android_impl.cc
+++ /dev/null
@@ -1,392 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "video_render_android_impl.h"
-
-#include "critical_section_wrapper.h"
-#include "event_wrapper.h"
-#include "thread_wrapper.h"
-#include "tick_util.h"
-
-#ifdef ANDROID_LOG
-#include <stdio.h>
-#include <android/log.h>
-
-#undef WEBRTC_TRACE
-#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
-#else
-#include "trace.h"
-#endif
-
-namespace webrtc {
-JavaVM* VideoRenderAndroid::g_jvm = NULL;
-
-WebRtc_Word32 VideoRenderAndroid::SetAndroidEnvVariables(void* javaVM)
-{
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
-
- g_jvm = (JavaVM*) javaVM;
-
- return 0;
-
-}
-
-VideoRenderAndroid::VideoRenderAndroid(
- const WebRtc_Word32 id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool /*fullscreen*/):
- _id(id),
- _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
- _renderType(videoRenderType),
- _ptrWindow((jobject)(window)),
- _streamsMap(),
- _javaShutDownFlag(false),
- _javaShutdownEvent(*EventWrapper::Create()),
- _javaRenderEvent(*EventWrapper::Create()),
- _lastJavaRenderEvent(0),
- _javaRenderJniEnv(NULL),
- _javaRenderThread(NULL)
-{
-}
-
-VideoRenderAndroid::~VideoRenderAndroid()
-{
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "VideoRenderAndroid dtor");
-
- if (_javaRenderThread)
- StopRender();
-
- for (MapItem* item = _streamsMap.First(); item != NULL; item
- = _streamsMap.Next(item))
- { // Delete streams
- delete static_cast<AndroidStream*> (item->GetItem());
- }
- delete &_javaShutdownEvent;
- delete &_javaRenderEvent;
- delete &_critSect;
-}
-
-WebRtc_Word32 VideoRenderAndroid::ChangeUniqueId(const WebRtc_Word32 id)
-{
- CriticalSectionScoped cs(&_critSect);
- _id = id;
-
- return 0;
-}
-
-WebRtc_Word32 VideoRenderAndroid::ChangeWindow(void* /*window*/)
-{
- return -1;
-}
-
-VideoRenderCallback*
-VideoRenderAndroid::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
- const WebRtc_UWord32 zOrder,
- const float left, const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_critSect);
-
- AndroidStream* renderStream = NULL;
- MapItem* item = _streamsMap.Find(streamId);
- if (item)
- {
- renderStream = (AndroidStream*) (item->GetItem());
- if (NULL != renderStream)
- {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
- "%s: Render stream already exists", __FUNCTION__);
- return renderStream;
- }
- }
-
- renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top,
- right, bottom, *this);
- if (renderStream)
- {
- _streamsMap.Insert(streamId, renderStream);
- }
- else
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
- return NULL;
- }
- return renderStream;
-}
-
-WebRtc_Word32 VideoRenderAndroid::DeleteIncomingRenderStream(
- const WebRtc_UWord32 streamId)
-{
- CriticalSectionScoped cs(&_critSect);
-
- MapItem* item = _streamsMap.Find(streamId);
- if (item)
- {
- delete (AndroidStream*) item->GetItem();
- _streamsMap.Erase(streamId);
- }
- else
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
- return -1;
- }
- return 0;
-}
-
-WebRtc_Word32 VideoRenderAndroid::GetIncomingRenderStreamProperties(
- const WebRtc_UWord32 streamId,
- WebRtc_UWord32& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const
-{
-
- return -1;
-}
-
-WebRtc_Word32 VideoRenderAndroid::StartRender()
-{
- CriticalSectionScoped cs(&_critSect);
-
- if (_javaRenderThread)
- {
- // StartRender is called when this stream should start render.
- // However StopRender is not called when the streams stop rendering. Thus the the thread is only deleted when the renderer is removed.
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
- "%s, Render thread already exist", __FUNCTION__);
- return 0;
- }
-
- _javaRenderThread = ThreadWrapper::CreateThread(JavaRenderThreadFun, this,
- kRealtimePriority,
- "AndroidRenderThread");
- if (!_javaRenderThread)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No thread", __FUNCTION__);
- return -1;
- }
-
- unsigned int tId = 0;
- if (_javaRenderThread->Start(tId))
- {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "%s: thread started: %u", __FUNCTION__, tId);
- }
- else
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not start send thread", __FUNCTION__);
- return -1;
- }
- return 0;
-}
-
-WebRtc_Word32 VideoRenderAndroid::StopRender()
-{
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__);
- {
- CriticalSectionScoped cs(&_critSect);
- if (!_javaRenderThread)
- {
- return -1;
- }
- _javaShutDownFlag = true;
- _javaRenderEvent.Set();
- }
-
- _javaShutdownEvent.Wait(3000);
- CriticalSectionScoped cs(&_critSect);
- _javaRenderThread->SetNotAlive();
- if (_javaRenderThread->Stop())
- {
- delete _javaRenderThread;
- _javaRenderThread = NULL;
- }
- else
- {
- assert(false);
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "%s: Not able to stop thread, leaking", __FUNCTION__);
- _javaRenderThread = NULL;
- }
- return 0;
-}
-
-void VideoRenderAndroid::ReDraw()
-{
- CriticalSectionScoped cs(&_critSect);
- if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) // Allow redraw if it was more than 20ms since last.
- {
- _lastJavaRenderEvent = TickTime::MillisecondTimestamp();
- _javaRenderEvent.Set();
- }
-}
-
-bool VideoRenderAndroid::JavaRenderThreadFun(void* obj)
-{
- return static_cast<VideoRenderAndroid*> (obj)->JavaRenderThreadProcess();
-}
-
-bool VideoRenderAndroid::JavaRenderThreadProcess()
-{
- _javaRenderEvent.Wait(1000);
-
- CriticalSectionScoped cs(&_critSect);
- if (!_javaRenderJniEnv)
- {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !_javaRenderJniEnv)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, _javaRenderJniEnv);
- return false;
- }
- }
-
- for (MapItem* item = _streamsMap.First(); item != NULL; item
- = _streamsMap.Next(item))
- {
- static_cast<AndroidStream*> (item->GetItem())->DeliverFrame(
- _javaRenderJniEnv);
- }
-
- if (_javaShutDownFlag)
- {
- if (g_jvm->DetachCurrentThread() < 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- else
- {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "%s: Java thread detached", __FUNCTION__);
- }
- _javaRenderJniEnv = false;
- _javaShutDownFlag = false;
- _javaShutdownEvent.Set();
- return false; // Do not run this thread again.
- }
- return true;
-}
-
-VideoRenderType VideoRenderAndroid::RenderType()
-{
- return _renderType;
-}
-
-RawVideoType VideoRenderAndroid::PerferedVideoType()
-{
- return kVideoI420;
-}
-
-bool VideoRenderAndroid::FullScreen()
-{
- return false;
-}
-
-WebRtc_Word32 VideoRenderAndroid::GetGraphicsMemory(
- WebRtc_UWord64& /*totalGraphicsMemory*/,
- WebRtc_UWord64& /*availableGraphicsMemory*/) const
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-WebRtc_Word32 VideoRenderAndroid::GetScreenResolution(
- WebRtc_UWord32& /*screenWidth*/,
- WebRtc_UWord32& /*screenHeight*/) const
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-WebRtc_UWord32 VideoRenderAndroid::RenderFrameRate(const WebRtc_UWord32 /*streamId*/)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-WebRtc_Word32 VideoRenderAndroid::SetStreamCropping(
- const WebRtc_UWord32 /*streamId*/,
- const float /*left*/,
- const float /*top*/,
- const float /*right*/,
- const float /*bottom*/)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-WebRtc_Word32 VideoRenderAndroid::SetTransparentBackground(const bool enable)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-WebRtc_Word32 VideoRenderAndroid::ConfigureRenderer(
- const WebRtc_UWord32 streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-WebRtc_Word32 VideoRenderAndroid::SetText(
- const WebRtc_UWord8 textId,
- const WebRtc_UWord8* text,
- const WebRtc_Word32 textLength,
- const WebRtc_UWord32 textColorRef,
- const WebRtc_UWord32 backgroundColorRef,
- const float left, const float top,
- const float rigth, const float bottom)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-WebRtc_Word32 VideoRenderAndroid::SetBitmap(const void* bitMap,
- const WebRtc_UWord8 pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right,
- const float bottom)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-} //namespace webrtc
-
diff --git a/modules/video_render/main/source/Android/video_render_android_impl.h b/modules/video_render/main/source/Android/video_render_android_impl.h
deleted file mode 100644
index 7058871..0000000
--- a/modules/video_render/main/source/Android/video_render_android_impl.h
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
-
-#include <jni.h>
-#include "i_video_render.h"
-#include "map_wrapper.h"
-
-
-namespace webrtc {
-
-//#define ANDROID_LOG
-
-
-class CriticalSectionWrapper;
-class EventWrapper;
-class ThreadWrapper;
-
-
-// The object a module user uses to send new frames to the java renderer
-// Base class for android render streams.
-
-class AndroidStream: public VideoRenderCallback
-{
-public:
- /*
- * DeliverFrame is called from a thread connected to the Java VM.
- * Used for Delivering frame for rendering.
- */
- virtual void DeliverFrame(JNIEnv* jniEnv)=0;
-
- virtual ~AndroidStream()
- {
- };
-};
-
-class VideoRenderAndroid: IVideoRender
-{
-public:
- static WebRtc_Word32 SetAndroidEnvVariables(void* javaVM);
-
- VideoRenderAndroid(const WebRtc_Word32 id,
- const VideoRenderType videoRenderType, void* window,
- const bool fullscreen);
-
- virtual ~VideoRenderAndroid();
-
- virtual WebRtc_Word32 Init()=0;
-
- virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
-
- virtual WebRtc_Word32 ChangeWindow(void* window);
-
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
- const WebRtc_UWord32 zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual WebRtc_Word32
- DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
-
- virtual WebRtc_Word32
- GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
- WebRtc_UWord32& zOrder,
- float& left, float& top,
- float& right, float& bottom) const;
- virtual WebRtc_Word32 StartRender();
-
- virtual WebRtc_Word32 StopRender();
-
- virtual void ReDraw();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual VideoRenderType RenderType();
-
- virtual RawVideoType PerferedVideoType();
-
- virtual bool FullScreen();
-
- virtual WebRtc_Word32
- GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
- WebRtc_UWord64& availableGraphicsMemory) const;
-
- virtual WebRtc_Word32
- GetScreenResolution(WebRtc_UWord32& screenWidth,
- WebRtc_UWord32& screenHeight) const;
-
- virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
-
- virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
- const float left, const float top,
- const float right,
- const float bottom);
-
- virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
-
- virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
- const unsigned int zOrder,
- const float left, const float top,
- const float right,
- const float bottom);
-
- virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
- const WebRtc_UWord8* text,
- const WebRtc_Word32 textLength,
- const WebRtc_UWord32 textColorRef,
- const WebRtc_UWord32 backgroundColorRef,
- const float left, const float top,
- const float rigth, const float bottom);
-
- virtual WebRtc_Word32 SetBitmap(const void* bitMap,
- const WebRtc_UWord8 pictureId,
- const void* colorKey, const float left,
- const float top, const float right,
- const float bottom);
-
-protected:
- virtual AndroidStream
- * CreateAndroidRenderChannel(WebRtc_Word32 streamId,
- WebRtc_Word32 zOrder,
- const float left, const float top,
- const float right, const float bottom,
- VideoRenderAndroid& renderer) = 0;
-
- WebRtc_Word32 _id;
- CriticalSectionWrapper& _critSect;
- VideoRenderType _renderType;
- jobject _ptrWindow;
-
- static JavaVM* g_jvm;
-
-private:
- static bool JavaRenderThreadFun(void* obj);
- bool JavaRenderThreadProcess();
-
- MapWrapper _streamsMap; // Map with streams to render.
- bool _javaShutDownFlag; // True if the _javaRenderThread thread shall be detached from the JVM.
- EventWrapper& _javaShutdownEvent;
- EventWrapper& _javaRenderEvent;
- WebRtc_Word64 _lastJavaRenderEvent;
- JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
- ThreadWrapper* _javaRenderThread;
-};
-
-} //namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
diff --git a/modules/video_render/main/source/Android/video_render_android_native_opengl2.h b/modules/video_render/main/source/Android/video_render_android_native_opengl2.h
deleted file mode 100644
index 54532a6..0000000
--- a/modules/video_render/main/source/Android/video_render_android_native_opengl2.h
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
-
-#include <jni.h>
-
-#include "video_render_defines.h"
-
-#include "video_render_android_impl.h"
-#include "video_render_opengles20.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-
-class AndroidNativeOpenGl2Channel: public AndroidStream
-{
-
-public:
- AndroidNativeOpenGl2Channel(WebRtc_UWord32 streamId,JavaVM* jvm,VideoRenderAndroid& renderer,jobject javaRenderObj);
- ~AndroidNativeOpenGl2Channel();
-
- WebRtc_Word32 Init(WebRtc_Word32 zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- //Implement VideoRenderCallback
- virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame);
-
- //Implements AndroidStream
- virtual void DeliverFrame(JNIEnv* jniEnv);
-
-private:
- static jint CreateOpenGLNativeStatic(JNIEnv * env,jobject, jlong context, jint width, jint height);
- jint CreateOpenGLNative(int width, int height);
-
- static void DrawNativeStatic(JNIEnv * env,jobject, jlong context);
- void DrawNative();
- WebRtc_UWord32 _id;
- CriticalSectionWrapper& _renderCritSect;
-
- VideoFrame _bufferToRender;
- VideoRenderAndroid& _renderer;
- JavaVM* _jvm;
- jobject _javaRenderObj;
-
- jmethodID _redrawCid;
- jmethodID _registerNativeCID;
- jmethodID _deRegisterNativeCID;
- VideoRenderOpenGles20 _openGLRenderer;
-
-};
-
-
-class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid
-{
-public:
- AndroidNativeOpenGl2Renderer(const WebRtc_Word32 id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen);
-
- ~AndroidNativeOpenGl2Renderer();
- static bool UseOpenGL2(void* window);
-
- WebRtc_Word32 Init();
- virtual AndroidStream* CreateAndroidRenderChannel(WebRtc_Word32 streamId,
- WebRtc_Word32 zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer);
-
-private:
- jobject _javaRenderObj;
- jclass _javaRenderClass;
-
-};
-
-} //namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
diff --git a/modules/video_render/main/source/Android/video_render_android_surface_view.cc b/modules/video_render/main/source/Android/video_render_android_surface_view.cc
deleted file mode 100644
index 253d831..0000000
--- a/modules/video_render/main/source/Android/video_render_android_surface_view.cc
+++ /dev/null
@@ -1,470 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "video_render_android_surface_view.h"
-#include "critical_section_wrapper.h"
-#include "common_video/libyuv/include/libyuv.h"
-#include "tick_util.h"
-#ifdef ANDROID_NDK_8_OR_ABOVE
- #include <android/bitmap.h>
-#endif
-
-
-#ifdef ANDROID_LOG
-#include <stdio.h>
-#include <android/log.h>
-
-#undef WEBRTC_TRACE
-#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
-#else
-#include "trace.h"
-#endif
-
-namespace webrtc {
-
-AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen)
-:
- VideoRenderAndroid(id,videoRenderType,window,fullscreen),
- _javaRenderObj(NULL),
- _javaRenderClass(NULL)
-{
-}
-
-AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "AndroidSurfaceViewRenderer dtor");
- if(g_jvm)
- {
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
- {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
- env=NULL;
- }
- else
- {
- isAttached = true;
- }
- }
- env->DeleteGlobalRef(_javaRenderObj);
- env->DeleteGlobalRef(_javaRenderClass);
-
- if (isAttached)
- {
- if (g_jvm->DetachCurrentThread() < 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
- }
-}
-
-
-WebRtc_Word32
-AndroidSurfaceViewRenderer::Init()
-{
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
- if (!g_jvm)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "(%s): Not a valid Java VM pointer.", __FUNCTION__);
- return -1;
- }
- if(!_ptrWindow)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "(%s): No window have been provided.", __FUNCTION__);
- return -1;
- }
-
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
- {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
- return -1;
- }
- isAttached = true;
- }
-
- // get the ViESurfaceRender class
- jclass javaRenderClassLocal = env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
- if (!javaRenderClassLocal)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not find ViESurfaceRenderer", __FUNCTION__);
- return -1;
- }
-
- // create a global reference to the class (to tell JNI that we are referencing it after this function has returned)
- _javaRenderClass = reinterpret_cast<jclass>(env->NewGlobalRef(javaRenderClassLocal));
- if (!_javaRenderClass)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java ViESurfaceRenderer class reference", __FUNCTION__);
- return -1;
- }
-
- // Delete local class ref, we only use the global ref
- env->DeleteLocalRef(javaRenderClassLocal);
-
- // get the method ID for the constructor
- jmethodID cid = env->GetMethodID(_javaRenderClass, "<init>", "(Landroid/view/SurfaceView;)V");
- if (cid == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get constructor ID", __FUNCTION__);
- return -1; /* exception thrown */
- }
-
- // construct the object
- jobject javaRenderObjLocal = env->NewObject(_javaRenderClass, cid, _ptrWindow);
- if (!javaRenderObjLocal)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java Render", __FUNCTION__);
- return -1;
- }
-
- // create a reference to the object (to tell JNI that we are referencing it
- // after this function has returned)
- _javaRenderObj = env->NewGlobalRef(javaRenderObjLocal);
- if (!_javaRenderObj)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java SurfaceRender object reference", __FUNCTION__);
- return -1;
- }
-
- // Detach this thread if it was attached
- if (isAttached)
- {
- if (g_jvm->DetachCurrentThread() < 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__);
- return 0;
-
-}
-AndroidStream*
-AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(WebRtc_Word32 streamId,
- WebRtc_Word32 zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer)
-{
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d", __FUNCTION__,streamId);
- AndroidSurfaceViewChannel* stream=new AndroidSurfaceViewChannel(streamId,g_jvm,renderer,_javaRenderObj);
- if(stream && stream->Init(zOrder,left,top,right,bottom)==0)
- return stream;
- else
- delete stream;
- return NULL;
-}
-
-
-
-
-
-
-AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,JavaVM* jvm,VideoRenderAndroid& renderer,jobject javaRenderObj)
-:
-_id(streamId),
-_renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
-_renderer(renderer),
-_jvm(jvm),
-_javaRenderObj(javaRenderObj),
-_bitmapWidth(0),
-_bitmapHeight(0)
-{
-
-}
-AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "AndroidSurfaceViewChannel dtor");
- delete &_renderCritSect;
- if(_jvm)
- {
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
- {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = _jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
- env=NULL;
- }
- else
- {
- isAttached = true;
- }
- }
-
-#ifdef ANDROID_NDK_8_OR_ABOVE
- env->DeleteGlobalRef(_javaBitmapObj);
-#else
- env->DeleteGlobalRef(_javaByteBufferObj);
-#endif
- if (isAttached)
- {
- if (_jvm->DetachCurrentThread() < 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
- }
-}
-
-WebRtc_Word32
-AndroidSurfaceViewChannel::Init(WebRtc_Word32 /*zOrder*/,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: AndroidSurfaceViewChannel", __FUNCTION__);
- if (!_jvm)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer,_id, "%s: Not a valid Java VM pointer", __FUNCTION__);
- return -1;
- }
-
- if((top>1 || top<0) || (right>1 || right<0) || (bottom>1 || bottom<0) || (left>1 || left<0))
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Wrong coordinates",
- __FUNCTION__);
- return -1;
- }
-
-
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
- {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = _jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
- return -1;
- }
- isAttached = true;
- }
-
- jclass javaRenderClass = env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
- if (!javaRenderClass)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not find ViESurfaceRenderer", __FUNCTION__);
- return -1;
- }
-#ifdef ANDROID_NDK_8_OR_ABOVE
- // get the method ID for the CreateBitmap
- _createBitmapCid = env->GetMethodID(_javaRenderClass, "CreateBitmap", "(II)Landroid/graphics/Bitmap;");
- if (_createBitmapCid == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get CreateBitmap ID", __FUNCTION__);
- return -1; /* exception thrown */
- }
- // get the method ID for the DrawBitmap function
- _drawBitmapCid = env->GetMethodID(_javaRenderClass, "DrawBitmap", "()V");
- if (_drawBitmapCid == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get DrawBitmap ID", __FUNCTION__);
- return -1; /* exception thrown */
- }
-#else
- // get the method ID for the CreateIntArray
- _createByteBufferCid = env->GetMethodID(javaRenderClass, "CreateByteBuffer", "(II)Ljava/nio/ByteBuffer;");
- if (_createByteBufferCid == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get CreateByteBuffer ID", __FUNCTION__);
- return -1; /* exception thrown */
- }
-
- // get the method ID for the DrawByteBuffer function
- _drawByteBufferCid = env->GetMethodID(javaRenderClass, "DrawByteBuffer", "()V");
- if (_drawByteBufferCid == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get DrawByteBuffer ID", __FUNCTION__);
- return -1; /* exception thrown */
- }
-#endif
-
- // get the method ID for the SetCoordinates function
- _setCoordinatesCid = env->GetMethodID(javaRenderClass, "SetCoordinates", "(FFFF)V");
- if (_setCoordinatesCid == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get SetCoordinates ID", __FUNCTION__);
- return -1; /* exception thrown */
- }
-
- env->CallVoidMethod(_javaRenderObj,_setCoordinatesCid,left,top,right,bottom);
-
- // Detach this thread if it was attached
- if (isAttached)
- {
- if (_jvm->DetachCurrentThread() < 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
-
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: AndroidSurfaceViewChannel done", __FUNCTION__);
- return 0;
-}
-
-
-WebRtc_Word32 AndroidSurfaceViewChannel::RenderFrame(const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame)
-{
- // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
- _renderCritSect.Enter();
- _bufferToRender.SwapFrame(videoFrame);
- _renderCritSect.Leave();
- _renderer.ReDraw();
- return 0;
-}
-
-
-/*Implements AndroidStream
- * Calls the Java object and render the buffer in _bufferToRender
- */
-void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
- _renderCritSect.Enter();
-
-#ifdef ANDROID_NDK_8_OR_ABOVE
- if (_bitmapWidth != _bufferToRender.Width() ||
- _bitmapHeight != _bufferToRender.Height()) {
- // Create the bitmap to write to
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Creating bitmap %u "
- "%u", __FUNCTION__, _bufferToRender.Width(),
- _bufferToRender.Height());
- if (_javaBitmapObj) {
- jniEnv->DeleteGlobalRef(_javaBitmapObj);
- _javaBitmapObj = NULL;
- }
- jobject javaBitmap = jniEnv->CallObjectMethod(_javaRenderObj,
- _createBitmapCid,
- videoFrame.Width(),
- videoFrame.Height());
- _javaBitmapObj = jniEnv->NewGlobalRef(javaBitmap);
- if (!_javaBitmapObj) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
- "create Java Bitmap object reference", __FUNCTION__);
- _renderCritSect.Leave();
- return;
- } else {
- _bitmapWidth=_bufferToRender.Width();
- _bitmapHeight=_bufferToRender.Height();
- }
- }
- void* pixels;
- if (_javaBitmapObj &&
- AndroidBitmap_lockPixels(jniEnv, _javaBitmapObj, &pixels) >= 0) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Locked bitmap",
- __FUNCTION__);
- // Convert I420 straight into the Java bitmap.
- int ret = ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(),
- (unsigned char* ) pixels,
- _bitmapWidth, _bitmapHeight);
- if (ret < 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion "
- "failed.", __FUNCTION__);
- }
-
- AndroidBitmap_unlockPixels(jniEnv, _javaBitmapObj);
- // Draw the Surface.
- jniEnv->CallVoidMethod(_javaRenderObj,_drawCid);
-
- } else {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not lock "
- "bitmap", __FUNCTION__);
- }
- _renderCritSect.Leave();
-
-#else
- if (_bitmapWidth != _bufferToRender.Width() ||
- _bitmapHeight != _bufferToRender.Height()) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d "
- "%d",__FUNCTION__,
- _bufferToRender.Width(), _bufferToRender.Height());
- if (_javaByteBufferObj) {
- jniEnv->DeleteGlobalRef(_javaByteBufferObj);
- _javaByteBufferObj = NULL;
- _directBuffer = NULL;
- }
- jobject javaByteBufferObj =
- jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
- _bufferToRender.Width(),
- _bufferToRender.Height());
- _javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
- if (!_javaByteBufferObj) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
- "create Java ByteBuffer object reference", __FUNCTION__);
- _renderCritSect.Leave();
- return;
- } else {
- _directBuffer = static_cast<unsigned char*>
- (jniEnv->GetDirectBufferAddress(_javaByteBufferObj));
- _bitmapWidth = _bufferToRender.Width();
- _bitmapHeight = _bufferToRender.Height();
- }
- }
-
- if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
- // Android requires a vertically flipped image compared to std convert.
- // This is done by giving a negative height input.
- const int conversionResult =
- ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(),
- _directBuffer, _bitmapWidth, -_bitmapHeight);
- if (conversionResult < 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
- " failed.", __FUNCTION__);
- _renderCritSect.Leave();
- return;
- }
- }
- _renderCritSect.Leave();
- // Draw the Surface
- jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid);
-#endif
-}
-
-} // namespace webrtc
-
diff --git a/modules/video_render/main/source/Android/video_render_android_surface_view.h b/modules/video_render/main/source/Android/video_render_android_surface_view.h
deleted file mode 100644
index f55e60b..0000000
--- a/modules/video_render/main/source/Android/video_render_android_surface_view.h
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
-
-#include <jni.h>
-
-#include "video_render_defines.h"
-
-#include "video_render_android_impl.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-
-
-class AndroidSurfaceViewChannel: public AndroidStream
-{
-
-public:
- AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,
- JavaVM* jvm,
- VideoRenderAndroid& renderer,
- jobject javaRenderObj);
- ~AndroidSurfaceViewChannel();
-
- WebRtc_Word32 Init(WebRtc_Word32 zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- //Implement VideoRenderCallback
- virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
- VideoFrame& videoFrame);
-
- //Implements AndroidStream
- virtual void DeliverFrame(JNIEnv* jniEnv);
-
-private:
- WebRtc_UWord32 _id;
- CriticalSectionWrapper& _renderCritSect;
-
- VideoFrame _bufferToRender;
- VideoRenderAndroid& _renderer;
- JavaVM* _jvm;
- jobject _javaRenderObj;
-
-#ifdef ANDROID_NDK_8_OR_ABOVE
- jclass _javaBitmapClass;
- jmethodID _createBitmapCid;
- jobject _javaBitmapObj;
- jmethodID _drawBitmapCid;
-#else
- jobject _javaByteBufferObj;
- unsigned char* _directBuffer;
- jmethodID _createByteBufferCid;
- jmethodID _drawByteBufferCid;
-#endif
- jmethodID _setCoordinatesCid;
- unsigned int _bitmapWidth;
- unsigned int _bitmapHeight;
-};
-
-class AndroidSurfaceViewRenderer: private VideoRenderAndroid
-{
-public:
- AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen);
- ~AndroidSurfaceViewRenderer();
- WebRtc_Word32 Init();
- virtual AndroidStream* CreateAndroidRenderChannel(WebRtc_Word32 streamId,
- WebRtc_Word32 zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer);
-private:
- jobject _javaRenderObj;
- jclass _javaRenderClass;
-
-};
-
-} //namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
diff --git a/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViEAndroidGLES20.java b/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java
similarity index 99%
rename from modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViEAndroidGLES20.java
rename to modules/video_render/main/source/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java
index fc30607..73cf251 100644
--- a/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViEAndroidGLES20.java
+++ b/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
diff --git a/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViERenderer.java b/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViERenderer.java
similarity index 96%
rename from modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViERenderer.java
rename to modules/video_render/main/source/android/java/org/webrtc/videoengine/ViERenderer.java
index 56d5261..6d87441 100644
--- a/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViERenderer.java
+++ b/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViERenderer.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
diff --git a/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViESurfaceRenderer.java b/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java
similarity index 98%
rename from modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViESurfaceRenderer.java
rename to modules/video_render/main/source/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java
index 3412582..9ae4b8b 100644
--- a/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViESurfaceRenderer.java
+++ b/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
diff --git a/modules/video_render/main/source/android/video_render_android_impl.cc b/modules/video_render/main/source/android/video_render_android_impl.cc
new file mode 100644
index 0000000..2c40fa5
--- /dev/null
+++ b/modules/video_render/main/source/android/video_render_android_impl.cc
@@ -0,0 +1,377 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_android_impl.h"
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+
+#ifdef ANDROID_LOG
+#include <stdio.h>
+#include <android/log.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
+#else
+#include "trace.h"
+#endif
+
+namespace webrtc {
+JavaVM* VideoRenderAndroid::g_jvm = NULL;
+
+WebRtc_Word32 VideoRenderAndroid::SetAndroidEnvVariables(void* javaVM)
+{
+ WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
+
+ g_jvm = (JavaVM*) javaVM;
+
+ return 0;
+
+}
+
+VideoRenderAndroid::VideoRenderAndroid(
+ const WebRtc_Word32 id,
+ const VideoRenderType videoRenderType,
+ void* window,
+ const bool /*fullscreen*/):
+ _id(id),
+ _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+ _renderType(videoRenderType),
+ _ptrWindow((jobject)(window)),
+ _streamsMap(),
+ _javaShutDownFlag(false),
+ _javaShutdownEvent(*EventWrapper::Create()),
+ _javaRenderEvent(*EventWrapper::Create()),
+ _lastJavaRenderEvent(0),
+ _javaRenderJniEnv(NULL),
+ _javaRenderThread(NULL)
+{
+}
+
+VideoRenderAndroid::~VideoRenderAndroid()
+{
+
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+ "VideoRenderAndroid dtor");
+
+ if (_javaRenderThread)
+ StopRender();
+
+ for (MapItem* item = _streamsMap.First(); item != NULL; item
+ = _streamsMap.Next(item))
+ { // Delete streams
+ delete static_cast<AndroidStream*> (item->GetItem());
+ }
+ delete &_javaShutdownEvent;
+ delete &_javaRenderEvent;
+ delete &_critSect;
+}
+
+WebRtc_Word32 VideoRenderAndroid::ChangeUniqueId(const WebRtc_Word32 id)
+{
+ CriticalSectionScoped cs(&_critSect);
+ _id = id;
+
+ return 0;
+}
+
+WebRtc_Word32 VideoRenderAndroid::ChangeWindow(void* /*window*/)
+{
+ return -1;
+}
+
+VideoRenderCallback*
+VideoRenderAndroid::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+ const WebRtc_UWord32 zOrder,
+ const float left, const float top,
+ const float right,
+ const float bottom)
+{
+ CriticalSectionScoped cs(&_critSect);
+
+ AndroidStream* renderStream = NULL;
+ MapItem* item = _streamsMap.Find(streamId);
+ if (item)
+ {
+ renderStream = (AndroidStream*) (item->GetItem());
+ if (NULL != renderStream)
+ {
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
+ "%s: Render stream already exists", __FUNCTION__);
+ return renderStream;
+ }
+ }
+
+ renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top,
+ right, bottom, *this);
+ if (renderStream)
+ {
+ _streamsMap.Insert(streamId, renderStream);
+ }
+ else
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
+ return NULL;
+ }
+ return renderStream;
+}
+
+WebRtc_Word32 VideoRenderAndroid::DeleteIncomingRenderStream(
+ const WebRtc_UWord32 streamId)
+{
+ CriticalSectionScoped cs(&_critSect);
+
+ MapItem* item = _streamsMap.Find(streamId);
+ if (item)
+ {
+ delete (AndroidStream*) item->GetItem();
+ _streamsMap.Erase(streamId);
+ }
+ else
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
+ return -1;
+ }
+ return 0;
+}
+
+WebRtc_Word32 VideoRenderAndroid::GetIncomingRenderStreamProperties(
+ const WebRtc_UWord32 streamId,
+ WebRtc_UWord32& zOrder,
+ float& left,
+ float& top,
+ float& right,
+ float& bottom) const {
+ return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::StartRender() {
+ CriticalSectionScoped cs(&_critSect);
+
+ if (_javaRenderThread) {
+ // StartRender is called when this stream should start render.
+ // However StopRender is not called when the streams stop rendering.
+ // Thus the the thread is only deleted when the renderer is removed.
+ WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+ "%s, Render thread already exist", __FUNCTION__);
+ return 0;
+ }
+
+ _javaRenderThread = ThreadWrapper::CreateThread(JavaRenderThreadFun, this,
+ kRealtimePriority,
+ "AndroidRenderThread");
+ if (!_javaRenderThread) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: No thread", __FUNCTION__);
+ return -1;
+ }
+
+ unsigned int tId = 0;
+ if (_javaRenderThread->Start(tId)) {
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+ "%s: thread started: %u", __FUNCTION__, tId);
+ }
+ else {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: Could not start send thread", __FUNCTION__);
+ return -1;
+ }
+ return 0;
+}
+
+WebRtc_Word32 VideoRenderAndroid::StopRender()
+{
+
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__);
+ {
+ CriticalSectionScoped cs(&_critSect);
+ if (!_javaRenderThread)
+ {
+ return -1;
+ }
+ _javaShutDownFlag = true;
+ _javaRenderEvent.Set();
+ }
+
+ _javaShutdownEvent.Wait(3000);
+ CriticalSectionScoped cs(&_critSect);
+ _javaRenderThread->SetNotAlive();
+ if (_javaRenderThread->Stop())
+ {
+ delete _javaRenderThread;
+ _javaRenderThread = NULL;
+ }
+ else
+ {
+ assert(false);
+ WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+ "%s: Not able to stop thread, leaking", __FUNCTION__);
+ _javaRenderThread = NULL;
+ }
+ return 0;
+}
+
+void VideoRenderAndroid::ReDraw() {
+ CriticalSectionScoped cs(&_critSect);
+ // Allow redraw if it was more than 20ms since last.
+ if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) {
+ _lastJavaRenderEvent = TickTime::MillisecondTimestamp();
+ _javaRenderEvent.Set();
+ }
+}
+
+bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) {
+ return static_cast<VideoRenderAndroid*> (obj)->JavaRenderThreadProcess();
+}
+
+bool VideoRenderAndroid::JavaRenderThreadProcess()
+{
+ _javaRenderEvent.Wait(1000);
+
+ CriticalSectionScoped cs(&_critSect);
+ if (!_javaRenderJniEnv)
+ {
+ // try to attach the thread and get the env
+ // Attach this thread to JVM
+ jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL);
+
+ // Get the JNI env for this thread
+ if ((res < 0) || !_javaRenderJniEnv)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: Could not attach thread to JVM (%d, %p)",
+ __FUNCTION__, res, _javaRenderJniEnv);
+ return false;
+ }
+ }
+
+ for (MapItem* item = _streamsMap.First(); item != NULL; item
+ = _streamsMap.Next(item))
+ {
+ static_cast<AndroidStream*> (item->GetItem())->DeliverFrame(
+ _javaRenderJniEnv);
+ }
+
+ if (_javaShutDownFlag)
+ {
+ if (g_jvm->DetachCurrentThread() < 0)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+ "%s: Could not detach thread from JVM", __FUNCTION__);
+ }
+ else
+ {
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+ "%s: Java thread detached", __FUNCTION__);
+ }
+ _javaRenderJniEnv = false;
+ _javaShutDownFlag = false;
+ _javaShutdownEvent.Set();
+ return false; // Do not run this thread again.
+ }
+ return true;
+}
+
+VideoRenderType VideoRenderAndroid::RenderType()
+{
+ return _renderType;
+}
+
+RawVideoType VideoRenderAndroid::PerferedVideoType()
+{
+ return kVideoI420;
+}
+
+bool VideoRenderAndroid::FullScreen()
+{
+ return false;
+}
+
+WebRtc_Word32 VideoRenderAndroid::GetGraphicsMemory(
+ WebRtc_UWord64& /*totalGraphicsMemory*/,
+ WebRtc_UWord64& /*availableGraphicsMemory*/) const {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s - not supported on Android", __FUNCTION__);
+ return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::GetScreenResolution(
+ WebRtc_UWord32& /*screenWidth*/,
+ WebRtc_UWord32& /*screenHeight*/) const {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s - not supported on Android", __FUNCTION__);
+ return -1;
+}
+
+WebRtc_UWord32 VideoRenderAndroid::RenderFrameRate(
+ const WebRtc_UWord32 /*streamId*/) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s - not supported on Android", __FUNCTION__);
+ return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::SetStreamCropping(
+ const WebRtc_UWord32 /*streamId*/,
+ const float /*left*/,
+ const float /*top*/,
+ const float /*right*/,
+ const float /*bottom*/) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s - not supported on Android", __FUNCTION__);
+ return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::SetTransparentBackground(const bool enable) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s - not supported on Android", __FUNCTION__);
+ return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::ConfigureRenderer(
+ const WebRtc_UWord32 streamId,
+ const unsigned int zOrder,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s - not supported on Android", __FUNCTION__);
+ return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::SetText(
+ const WebRtc_UWord8 textId,
+ const WebRtc_UWord8* text,
+ const WebRtc_Word32 textLength,
+ const WebRtc_UWord32 textColorRef,
+ const WebRtc_UWord32 backgroundColorRef,
+ const float left, const float top,
+ const float rigth, const float bottom) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s - not supported on Android", __FUNCTION__);
+ return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::SetBitmap(const void* bitMap,
+ const WebRtc_UWord8 pictureId,
+ const void* colorKey,
+ const float left, const float top,
+ const float right,
+ const float bottom) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s - not supported on Android", __FUNCTION__);
+ return -1;
+}
+
+} //namespace webrtc
diff --git a/modules/video_render/main/source/android/video_render_android_impl.h b/modules/video_render/main/source/android/video_render_android_impl.h
new file mode 100644
index 0000000..b3f1525
--- /dev/null
+++ b/modules/video_render/main/source/android/video_render_android_impl.h
@@ -0,0 +1,168 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
+
+#include <jni.h>
+#include "i_video_render.h"
+#include "map_wrapper.h"
+
+
+namespace webrtc {
+
+//#define ANDROID_LOG
+
+
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+
+
+// The object a module user uses to send new frames to the java renderer
+// Base class for android render streams.
+
+class AndroidStream: public VideoRenderCallback
+{
+public:
+ /*
+ * DeliverFrame is called from a thread connected to the Java VM.
+ * Used for Delivering frame for rendering.
+ */
+ virtual void DeliverFrame(JNIEnv* jniEnv)=0;
+
+ virtual ~AndroidStream()
+ {
+ };
+};
+
+class VideoRenderAndroid: IVideoRender
+{
+public:
+ static WebRtc_Word32 SetAndroidEnvVariables(void* javaVM);
+
+ VideoRenderAndroid(const WebRtc_Word32 id,
+ const VideoRenderType videoRenderType,
+ void* window,
+ const bool fullscreen);
+
+ virtual ~VideoRenderAndroid();
+
+ virtual WebRtc_Word32 Init()=0;
+
+ virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+ virtual WebRtc_Word32 ChangeWindow(void* window);
+
+ virtual VideoRenderCallback* AddIncomingRenderStream(
+ const WebRtc_UWord32 streamId,
+ const WebRtc_UWord32 zOrder,
+ const float left, const float top,
+ const float right, const float bottom);
+
+ virtual WebRtc_Word32 DeleteIncomingRenderStream(
+ const WebRtc_UWord32 streamId);
+
+ virtual WebRtc_Word32 GetIncomingRenderStreamProperties(
+ const WebRtc_UWord32 streamId,
+ WebRtc_UWord32& zOrder,
+ float& left, float& top,
+ float& right, float& bottom) const;
+
+ virtual WebRtc_Word32 StartRender();
+
+ virtual WebRtc_Word32 StopRender();
+
+ virtual void ReDraw();
+
+ /**************************************************************************
+ *
+ * Properties
+ *
+ ***************************************************************************/
+
+ virtual VideoRenderType RenderType();
+
+ virtual RawVideoType PerferedVideoType();
+
+ virtual bool FullScreen();
+
+ virtual WebRtc_Word32 GetGraphicsMemory(
+ WebRtc_UWord64& totalGraphicsMemory,
+ WebRtc_UWord64& availableGraphicsMemory) const;
+
+ virtual WebRtc_Word32 GetScreenResolution(
+ WebRtc_UWord32& screenWidth,
+ WebRtc_UWord32& screenHeight) const;
+
+ virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+ virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+ const float left, const float top,
+ const float right,
+ const float bottom);
+
+ virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+ virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+ const unsigned int zOrder,
+ const float left, const float top,
+ const float right,
+ const float bottom);
+
+ virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+ const WebRtc_UWord8* text,
+ const WebRtc_Word32 textLength,
+ const WebRtc_UWord32 textColorRef,
+ const WebRtc_UWord32 backgroundColorRef,
+ const float left, const float top,
+ const float rigth, const float bottom);
+
+ virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+ const WebRtc_UWord8 pictureId,
+ const void* colorKey, const float left,
+ const float top, const float right,
+ const float bottom);
+
+ protected:
+ virtual AndroidStream* CreateAndroidRenderChannel(
+ WebRtc_Word32 streamId,
+ WebRtc_Word32 zOrder,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom,
+ VideoRenderAndroid& renderer) = 0;
+
+ WebRtc_Word32 _id;
+ CriticalSectionWrapper& _critSect;
+ VideoRenderType _renderType;
+ jobject _ptrWindow;
+
+ static JavaVM* g_jvm;
+
+ private:
+ static bool JavaRenderThreadFun(void* obj);
+ bool JavaRenderThreadProcess();
+
+ // Map with streams to render.
+ MapWrapper _streamsMap;
+ // True if the _javaRenderThread thread shall be detached from the JVM.
+ bool _javaShutDownFlag;
+ EventWrapper& _javaShutdownEvent;
+ EventWrapper& _javaRenderEvent;
+ WebRtc_Word64 _lastJavaRenderEvent;
+ JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
+ ThreadWrapper* _javaRenderThread;
+};
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
diff --git a/modules/video_render/main/source/Android/video_render_android_native_opengl2.cc b/modules/video_render/main/source/android/video_render_android_native_opengl2.cc
similarity index 79%
rename from modules/video_render/main/source/Android/video_render_android_native_opengl2.cc
rename to modules/video_render/main/source/android/video_render_android_native_opengl2.cc
index ad93bf2..69de8c7 100644
--- a/modules/video_render/main/source/Android/video_render_android_native_opengl2.cc
+++ b/modules/video_render/main/source/android/video_render_android_native_opengl2.cc
@@ -25,10 +25,10 @@
namespace webrtc {
AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer(
- const WebRtc_Word32 id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
+ const WebRtc_Word32 id,
+ const VideoRenderType videoRenderType,
+ void* window,
+ const bool fullscreen) :
VideoRenderAndroid(id, videoRenderType, window, fullscreen),
_javaRenderObj(NULL),
_javaRenderClass(NULL)
@@ -54,12 +54,12 @@
// Get the JNI env for this thread
if ((res < 0) || !env)
{
- WEBRTC_TRACE(
- kTraceError,
- kTraceVideoRenderer,
- -1,
- "RendererAndroid(): Could not attach thread to JVM (%d, %p)",
- res, env);
+ WEBRTC_TRACE(
+ kTraceError,
+ kTraceVideoRenderer,
+ -1,
+ "RendererAndroid(): Could not attach thread to JVM (%d, %p)",
+ res, env);
return false;
}
isAttached = true;
@@ -191,9 +191,10 @@
return -1;
}
- // create a global reference to the class (to tell JNI that we are referencing it after this function has returned)
- _javaRenderClass
- = reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
+ // create a global reference to the class (to tell JNI that
+ // we are referencing it after this function has returned)
+ _javaRenderClass =
+ reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
if (!_javaRenderClass)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
@@ -211,11 +212,11 @@
if (!_javaRenderObj)
{
WEBRTC_TRACE(
- kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not create Java SurfaceRender object reference",
- __FUNCTION__);
+ kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: could not create Java SurfaceRender object reference",
+ __FUNCTION__);
return -1;
}
@@ -236,13 +237,13 @@
}
AndroidStream*
AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
- WebRtc_Word32 streamId,
- WebRtc_Word32 zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer)
+ WebRtc_Word32 streamId,
+ WebRtc_Word32 zOrder,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom,
+ VideoRenderAndroid& renderer)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d",
__FUNCTION__, streamId);
@@ -258,9 +259,10 @@
return NULL;
}
-AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(WebRtc_UWord32 streamId,
- JavaVM* jvm,
- VideoRenderAndroid& renderer,jobject javaRenderObj):
+AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
+ WebRtc_UWord32 streamId,
+ JavaVM* jvm,
+ VideoRenderAndroid& renderer,jobject javaRenderObj):
_id(streamId),
_renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj),
@@ -428,8 +430,9 @@
return 0;
}
-WebRtc_Word32 AndroidNativeOpenGl2Channel::RenderFrame(const WebRtc_UWord32 /*streamId*/,
- VideoFrame& videoFrame)
+WebRtc_Word32 AndroidNativeOpenGl2Channel::RenderFrame(
+ const WebRtc_UWord32 /*streamId*/,
+ VideoFrame& videoFrame)
{
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
@@ -449,46 +452,50 @@
//Draw the Surface
jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid);
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s: time to deliver %lld" ,__FUNCTION__,(TickTime::Now()-timeNow).Milliseconds());
+ // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id,
+ // "%s: time to deliver %lld" ,__FUNCTION__,
+ // (TickTime::Now()-timeNow).Milliseconds());
}
/*
- * JNI callback from Java class. Called when the render want to render a frame. Called from the GLRenderThread
+ * JNI callback from Java class. Called when the render
+ * want to render a frame. Called from the GLRenderThread
* Method: DrawNative
* Signature: (J)V
*/
-void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic
-(JNIEnv * env, jobject, jlong context)
-{
- AndroidNativeOpenGl2Channel* renderChannel=reinterpret_cast<AndroidNativeOpenGl2Channel*>(context);
- renderChannel->DrawNative();
+void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic(
+ JNIEnv * env, jobject, jlong context) {
+ AndroidNativeOpenGl2Channel* renderChannel =
+ reinterpret_cast<AndroidNativeOpenGl2Channel*>(context);
+ renderChannel->DrawNative();
}
void AndroidNativeOpenGl2Channel::DrawNative()
{
- _openGLRenderer.Render(_bufferToRender);
+ _openGLRenderer.Render(_bufferToRender);
}
+
/*
- * JNI callback from Java class. Called when the GLSurfaceview have created a surface. Called from the GLRenderThread
+ * JNI callback from Java class. Called when the GLSurfaceview
+ * have created a surface. Called from the GLRenderThread
* Method: CreateOpenGLNativeStatic
* Signature: (JII)I
*/
-jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(JNIEnv * env,
- jobject,
- jlong context,
- jint width,
- jint height)
-{
- AndroidNativeOpenGl2Channel* renderChannel =
- reinterpret_cast<AndroidNativeOpenGl2Channel*> (context);
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__);
- return renderChannel->CreateOpenGLNative(width, height);
+jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(
+ JNIEnv * env,
+ jobject,
+ jlong context,
+ jint width,
+ jint height) {
+ AndroidNativeOpenGl2Channel* renderChannel =
+ reinterpret_cast<AndroidNativeOpenGl2Channel*> (context);
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__);
+ return renderChannel->CreateOpenGLNative(width, height);
}
-jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(int width, int height)
-{
-
- return _openGLRenderer.Setup(width, height);
+jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(
+ int width, int height) {
+ return _openGLRenderer.Setup(width, height);
}
} //namespace webrtc
diff --git a/modules/video_render/main/source/android/video_render_android_native_opengl2.h b/modules/video_render/main/source/android/video_render_android_native_opengl2.h
new file mode 100644
index 0000000..c69f17d
--- /dev/null
+++ b/modules/video_render/main/source/android/video_render_android_native_opengl2.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
+
+#include <jni.h>
+
+#include "video_render_defines.h"
+#include "video_render_android_impl.h"
+#include "video_render_opengles20.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+class AndroidNativeOpenGl2Channel: public AndroidStream {
+ public:
+ AndroidNativeOpenGl2Channel(
+ WebRtc_UWord32 streamId,
+ JavaVM* jvm,
+ VideoRenderAndroid& renderer,jobject javaRenderObj);
+ ~AndroidNativeOpenGl2Channel();
+
+ WebRtc_Word32 Init(WebRtc_Word32 zOrder,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom);
+
+ //Implement VideoRenderCallback
+ virtual WebRtc_Word32 RenderFrame(
+ const WebRtc_UWord32 streamId,
+ VideoFrame& videoFrame);
+
+ //Implements AndroidStream
+ virtual void DeliverFrame(JNIEnv* jniEnv);
+
+ private:
+ static jint CreateOpenGLNativeStatic(
+ JNIEnv * env,
+ jobject,
+ jlong context,
+ jint width,
+ jint height);
+ jint CreateOpenGLNative(int width, int height);
+
+ static void DrawNativeStatic(JNIEnv * env,jobject, jlong context);
+ void DrawNative();
+ WebRtc_UWord32 _id;
+ CriticalSectionWrapper& _renderCritSect;
+
+ VideoFrame _bufferToRender;
+ VideoRenderAndroid& _renderer;
+ JavaVM* _jvm;
+ jobject _javaRenderObj;
+
+ jmethodID _redrawCid;
+ jmethodID _registerNativeCID;
+ jmethodID _deRegisterNativeCID;
+ VideoRenderOpenGles20 _openGLRenderer;
+};
+
+
+class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid {
+ public:
+ AndroidNativeOpenGl2Renderer(const WebRtc_Word32 id,
+ const VideoRenderType videoRenderType,
+ void* window,
+ const bool fullscreen);
+
+ ~AndroidNativeOpenGl2Renderer();
+ static bool UseOpenGL2(void* window);
+
+ WebRtc_Word32 Init();
+ virtual AndroidStream* CreateAndroidRenderChannel(
+ WebRtc_Word32 streamId,
+ WebRtc_Word32 zOrder,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom,
+ VideoRenderAndroid& renderer);
+
+ private:
+ jobject _javaRenderObj;
+ jclass _javaRenderClass;
+};
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
diff --git a/modules/video_render/main/source/android/video_render_android_surface_view.cc b/modules/video_render/main/source/android/video_render_android_surface_view.cc
new file mode 100644
index 0000000..20555b1
--- /dev/null
+++ b/modules/video_render/main/source/android/video_render_android_surface_view.cc
@@ -0,0 +1,562 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_android_surface_view.h"
+#include "critical_section_wrapper.h"
+#include "common_video/libyuv/include/libyuv.h"
+#include "tick_util.h"
+#ifdef ANDROID_NDK_8_OR_ABOVE
+ #include <android/bitmap.h>
+#endif
+
+
+#ifdef ANDROID_LOG
+#include <stdio.h>
+#include <android/log.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
+#else
+#include "trace.h"
+#endif
+
+namespace webrtc {
+
+AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
+ const VideoRenderType videoRenderType,
+ void* window,
+ const bool fullscreen)
+:
+ VideoRenderAndroid(id,videoRenderType,window,fullscreen),
+ _javaRenderObj(NULL),
+ _javaRenderClass(NULL)
+{
+}
+
+AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() {
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+ "AndroidSurfaceViewRenderer dtor");
+ if(g_jvm) {
+ // get the JNI env for this thread
+ bool isAttached = false;
+ JNIEnv* env = NULL;
+ if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
+ // try to attach the thread and get the env
+ // Attach this thread to JVM
+ jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+ // Get the JNI env for this thread
+ if ((res < 0) || !env) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: Could not attach thread to JVM (%d, %p)",
+ __FUNCTION__,
+ res,
+ env);
+ env=NULL;
+ }
+ else {
+ isAttached = true;
+ }
+ }
+ env->DeleteGlobalRef(_javaRenderObj);
+ env->DeleteGlobalRef(_javaRenderClass);
+
+ if (isAttached) {
+ if (g_jvm->DetachCurrentThread() < 0) {
+ WEBRTC_TRACE(kTraceWarning,
+ kTraceVideoRenderer,
+ _id,
+ "%s: Could not detach thread from JVM",
+ __FUNCTION__);
+ }
+ }
+ }
+}
+
+WebRtc_Word32 AndroidSurfaceViewRenderer::Init() {
+ WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+ if (!g_jvm) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "(%s): Not a valid Java VM pointer.",
+ __FUNCTION__);
+ return -1;
+ }
+ if(!_ptrWindow) {
+ WEBRTC_TRACE(kTraceWarning,
+ kTraceVideoRenderer,
+ _id,
+ "(%s): No window have been provided.",
+ __FUNCTION__);
+ return -1;
+ }
+
+ // get the JNI env for this thread
+ bool isAttached = false;
+ JNIEnv* env = NULL;
+ if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
+ // try to attach the thread and get the env
+ // Attach this thread to JVM
+ jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+ // Get the JNI env for this thread
+ if ((res < 0) || !env) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: Could not attach thread to JVM (%d, %p)",
+ __FUNCTION__,
+ res,
+ env);
+ return -1;
+ }
+ isAttached = true;
+ }
+
+ // get the ViESurfaceRender class
+ jclass javaRenderClassLocal =
+ env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
+ if (!javaRenderClassLocal) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: could not find ViESurfaceRenderer",
+ __FUNCTION__);
+ return -1;
+ }
+
+ // create a global reference to the class (to tell JNI that
+ // we are referencing it after this function has returned)
+ _javaRenderClass =
+ reinterpret_cast<jclass>(env->NewGlobalRef(javaRenderClassLocal));
+ if (!_javaRenderClass) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: could not create Java ViESurfaceRenderer class reference",
+ __FUNCTION__);
+ return -1;
+ }
+
+ // Delete local class ref, we only use the global ref
+ env->DeleteLocalRef(javaRenderClassLocal);
+
+ // get the method ID for the constructor
+ jmethodID cid = env->GetMethodID(_javaRenderClass,
+ "<init>",
+ "(Landroid/view/SurfaceView;)V");
+ if (cid == NULL) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: could not get constructor ID",
+ __FUNCTION__);
+ return -1; /* exception thrown */
+ }
+
+ // construct the object
+ jobject javaRenderObjLocal = env->NewObject(_javaRenderClass,
+ cid,
+ _ptrWindow);
+ if (!javaRenderObjLocal) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: could not create Java Render",
+ __FUNCTION__);
+ return -1;
+ }
+
+ // create a reference to the object (to tell JNI that we are referencing it
+ // after this function has returned)
+ _javaRenderObj = env->NewGlobalRef(javaRenderObjLocal);
+ if (!_javaRenderObj) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: could not create Java SurfaceRender object reference",
+ __FUNCTION__);
+ return -1;
+ }
+
+ // Detach this thread if it was attached
+ if (isAttached) {
+ if (g_jvm->DetachCurrentThread() < 0) {
+ WEBRTC_TRACE(kTraceWarning,
+ kTraceVideoRenderer,
+ _id,
+ "%s: Could not detach thread from JVM", __FUNCTION__);
+ }
+ }
+
+ WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__);
+ return 0;
+}
+
+AndroidStream*
+AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(
+ WebRtc_Word32 streamId,
+ WebRtc_Word32 zOrder,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom,
+ VideoRenderAndroid& renderer) {
+ WEBRTC_TRACE(kTraceDebug,
+ kTraceVideoRenderer,
+ _id,
+ "%s: Id %d",
+ __FUNCTION__,
+ streamId);
+ AndroidSurfaceViewChannel* stream =
+ new AndroidSurfaceViewChannel(streamId, g_jvm, renderer, _javaRenderObj);
+ if(stream && stream->Init(zOrder, left, top, right, bottom) == 0)
+ return stream;
+ else
+ delete stream;
+ return NULL;
+}
+
+AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(
+ WebRtc_UWord32 streamId,
+ JavaVM* jvm,
+ VideoRenderAndroid& renderer,
+ jobject javaRenderObj) :
+ _id(streamId),
+ _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+ _renderer(renderer),
+ _jvm(jvm),
+ _javaRenderObj(javaRenderObj),
+ _bitmapWidth(0),
+ _bitmapHeight(0) {
+}
+
+AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel() {
+ WEBRTC_TRACE(kTraceInfo,
+ kTraceVideoRenderer,
+ _id,
+ "AndroidSurfaceViewChannel dtor");
+ delete &_renderCritSect;
+ if(_jvm) {
+ // get the JNI env for this thread
+ bool isAttached = false;
+ JNIEnv* env = NULL;
+ if ( _jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
+ // try to attach the thread and get the env
+ // Attach this thread to JVM
+ jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+ // Get the JNI env for this thread
+ if ((res < 0) || !env) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: Could not attach thread to JVM (%d, %p)",
+ __FUNCTION__,
+ res,
+ env);
+ env=NULL;
+ }
+ else {
+ isAttached = true;
+ }
+ }
+
+#ifdef ANDROID_NDK_8_OR_ABOVE
+ env->DeleteGlobalRef(_javaBitmapObj);
+#else
+ env->DeleteGlobalRef(_javaByteBufferObj);
+#endif
+ if (isAttached) {
+ if (_jvm->DetachCurrentThread() < 0) {
+ WEBRTC_TRACE(kTraceWarning,
+ kTraceVideoRenderer,
+ _id,
+ "%s: Could not detach thread from JVM",
+ __FUNCTION__);
+ }
+ }
+ }
+}
+
+WebRtc_Word32 AndroidSurfaceViewChannel::Init(
+ WebRtc_Word32 /*zOrder*/,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom) {
+
+ WEBRTC_TRACE(kTraceDebug,
+ kTraceVideoRenderer,
+ _id,
+ "%s: AndroidSurfaceViewChannel",
+ __FUNCTION__);
+ if (!_jvm) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: Not a valid Java VM pointer",
+ __FUNCTION__);
+ return -1;
+ }
+
+ if( (top > 1 || top < 0) ||
+ (right > 1 || right < 0) ||
+ (bottom > 1 || bottom < 0) ||
+ (left > 1 || left < 0)) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "%s: Wrong coordinates", __FUNCTION__);
+ return -1;
+ }
+
+ // get the JNI env for this thread
+ bool isAttached = false;
+ JNIEnv* env = NULL;
+ if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
+ // try to attach the thread and get the env
+ // Attach this thread to JVM
+ jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+ // Get the JNI env for this thread
+ if ((res < 0) || !env) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: Could not attach thread to JVM (%d, %p)",
+ __FUNCTION__,
+ res,
+ env);
+ return -1;
+ }
+ isAttached = true;
+ }
+
+ jclass javaRenderClass =
+ env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
+ if (!javaRenderClass) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: could not find ViESurfaceRenderer",
+ __FUNCTION__);
+ return -1;
+ }
+#ifdef ANDROID_NDK_8_OR_ABOVE
+ // get the method ID for the CreateBitmap
+ _createBitmapCid =
+ env->GetMethodID(_javaRenderClass,
+ "CreateBitmap",
+ "(II)Landroid/graphics/Bitmap;");
+ if (_createBitmapCid == NULL) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: could not get CreateBitmap ID",
+ __FUNCTION__);
+ return -1; /* exception thrown */
+ }
+ // get the method ID for the DrawBitmap function
+ _drawBitmapCid = env->GetMethodID(_javaRenderClass, "DrawBitmap", "()V");
+ if (_drawBitmapCid == NULL) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: could not get DrawBitmap ID",
+ __FUNCTION__);
+ return -1; /* exception thrown */
+ }
+#else
+ // get the method ID for the CreateIntArray
+ _createByteBufferCid =
+ env->GetMethodID(javaRenderClass,
+ "CreateByteBuffer",
+ "(II)Ljava/nio/ByteBuffer;");
+ if (_createByteBufferCid == NULL) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: could not get CreateByteBuffer ID",
+ __FUNCTION__);
+ return -1; /* exception thrown */
+ }
+
+ // get the method ID for the DrawByteBuffer function
+ _drawByteBufferCid = env->GetMethodID(javaRenderClass,
+ "DrawByteBuffer",
+ "()V");
+ if (_drawByteBufferCid == NULL) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: could not get DrawByteBuffer ID",
+ __FUNCTION__);
+ return -1; /* exception thrown */
+ }
+#endif
+
+ // get the method ID for the SetCoordinates function
+ _setCoordinatesCid = env->GetMethodID(javaRenderClass,
+ "SetCoordinates",
+ "(FFFF)V");
+ if (_setCoordinatesCid == NULL) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: could not get SetCoordinates ID",
+ __FUNCTION__);
+ return -1; /* exception thrown */
+ }
+
+ env->CallVoidMethod(_javaRenderObj, _setCoordinatesCid,
+ left, top, right, bottom);
+
+ // Detach this thread if it was attached
+ if (isAttached) {
+ if (_jvm->DetachCurrentThread() < 0) {
+ WEBRTC_TRACE(kTraceWarning,
+ kTraceVideoRenderer,
+ _id,
+ "%s: Could not detach thread from JVM",
+ __FUNCTION__);
+ }
+ }
+
+ WEBRTC_TRACE(kTraceDebug,
+ kTraceVideoRenderer,
+ _id,
+ "%s: AndroidSurfaceViewChannel done",
+ __FUNCTION__);
+ return 0;
+}
+
+
+WebRtc_Word32 AndroidSurfaceViewChannel::RenderFrame(
+ const WebRtc_UWord32 /*streamId*/,
+ VideoFrame& videoFrame) {
+ // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
+ _renderCritSect.Enter();
+ _bufferToRender.SwapFrame(videoFrame);
+ _renderCritSect.Leave();
+ _renderer.ReDraw();
+ return 0;
+}
+
+
+/*Implements AndroidStream
+ * Calls the Java object and render the buffer in _bufferToRender
+ */
+void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
+ _renderCritSect.Enter();
+
+#ifdef ANDROID_NDK_8_OR_ABOVE
+ if (_bitmapWidth != _bufferToRender.Width() ||
+ _bitmapHeight != _bufferToRender.Height()) {
+ // Create the bitmap to write to
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Creating bitmap %u "
+ "%u", __FUNCTION__, _bufferToRender.Width(),
+ _bufferToRender.Height());
+ if (_javaBitmapObj) {
+ jniEnv->DeleteGlobalRef(_javaBitmapObj);
+ _javaBitmapObj = NULL;
+ }
+ jobject javaBitmap = jniEnv->CallObjectMethod(_javaRenderObj,
+ _createBitmapCid,
+ videoFrame.Width(),
+ videoFrame.Height());
+ _javaBitmapObj = jniEnv->NewGlobalRef(javaBitmap);
+ if (!_javaBitmapObj) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
+ "create Java Bitmap object reference", __FUNCTION__);
+ _renderCritSect.Leave();
+ return;
+ } else {
+ _bitmapWidth = _bufferToRender.Width();
+ _bitmapHeight = _bufferToRender.Height();
+ }
+ }
+ void* pixels;
+ if (_javaBitmapObj &&
+ AndroidBitmap_lockPixels(jniEnv, _javaBitmapObj, &pixels) >= 0) {
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Locked bitmap",
+ __FUNCTION__);
+ // Convert I420 straight into the Java bitmap.
+ int ret = ConvertI420ToRGB565((unsigned char*)_bufferToRender.Buffer(),
+ (unsigned char*) pixels,
+ _bitmapWidth, _bitmapHeight);
+ if (ret < 0) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: Color conversion failed.",
+ __FUNCTION__);
+ }
+
+ AndroidBitmap_unlockPixels(jniEnv, _javaBitmapObj);
+ // Draw the Surface.
+ jniEnv->CallVoidMethod(_javaRenderObj,_drawCid);
+
+ } else {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not lock "
+ "bitmap", __FUNCTION__);
+ }
+ _renderCritSect.Leave();
+
+#else
+ if (_bitmapWidth != _bufferToRender.Width() ||
+ _bitmapHeight != _bufferToRender.Height()) {
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d "
+ "%d",__FUNCTION__,
+ _bufferToRender.Width(), _bufferToRender.Height());
+ if (_javaByteBufferObj) {
+ jniEnv->DeleteGlobalRef(_javaByteBufferObj);
+ _javaByteBufferObj = NULL;
+ _directBuffer = NULL;
+ }
+ jobject javaByteBufferObj =
+ jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
+ _bufferToRender.Width(),
+ _bufferToRender.Height());
+ _javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
+ if (!_javaByteBufferObj) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
+ "create Java ByteBuffer object reference", __FUNCTION__);
+ _renderCritSect.Leave();
+ return;
+ } else {
+ _directBuffer = static_cast<unsigned char*>
+ (jniEnv->GetDirectBufferAddress(_javaByteBufferObj));
+ _bitmapWidth = _bufferToRender.Width();
+ _bitmapHeight = _bufferToRender.Height();
+ }
+ }
+
+ if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
+ // Android requires a vertically flipped image compared to std convert.
+ // This is done by giving a negative height input.
+ const int conversionResult =
+ ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(),
+ _directBuffer, _bitmapWidth, -_bitmapHeight);
+ if (conversionResult < 0) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
+ " failed.", __FUNCTION__);
+ _renderCritSect.Leave();
+ return;
+ }
+ }
+ _renderCritSect.Leave();
+ // Draw the Surface
+ jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid);
+#endif
+}
+
+} // namespace webrtc
diff --git a/modules/video_render/main/source/android/video_render_android_surface_view.h b/modules/video_render/main/source/android/video_render_android_surface_view.h
new file mode 100644
index 0000000..1355e83
--- /dev/null
+++ b/modules/video_render/main/source/android/video_render_android_surface_view.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
+
+#include <jni.h>
+
+#include "video_render_defines.h"
+#include "video_render_android_impl.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+class AndroidSurfaceViewChannel: public AndroidStream
+{
+public:
+ AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,
+ JavaVM* jvm,
+ VideoRenderAndroid& renderer,
+ jobject javaRenderObj);
+ ~AndroidSurfaceViewChannel();
+
+ WebRtc_Word32 Init(WebRtc_Word32 zOrder,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom);
+
+ //Implement VideoRenderCallback
+ virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+ VideoFrame& videoFrame);
+
+ //Implements AndroidStream
+ virtual void DeliverFrame(JNIEnv* jniEnv);
+
+ private:
+ WebRtc_UWord32 _id;
+ CriticalSectionWrapper& _renderCritSect;
+
+ VideoFrame _bufferToRender;
+ VideoRenderAndroid& _renderer;
+ JavaVM* _jvm;
+ jobject _javaRenderObj;
+
+#ifdef ANDROID_NDK_8_OR_ABOVE
+ jclass _javaBitmapClass;
+ jmethodID _createBitmapCid;
+ jobject _javaBitmapObj;
+ jmethodID _drawBitmapCid;
+#else
+ jobject _javaByteBufferObj;
+ unsigned char* _directBuffer;
+ jmethodID _createByteBufferCid;
+ jmethodID _drawByteBufferCid;
+#endif
+ jmethodID _setCoordinatesCid;
+ unsigned int _bitmapWidth;
+ unsigned int _bitmapHeight;
+};
+
+class AndroidSurfaceViewRenderer: private VideoRenderAndroid
+{
+public:
+ AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
+ const VideoRenderType videoRenderType,
+ void* window,
+ const bool fullscreen);
+ ~AndroidSurfaceViewRenderer();
+ WebRtc_Word32 Init();
+ virtual AndroidStream* CreateAndroidRenderChannel(
+ WebRtc_Word32 streamId,
+ WebRtc_Word32 zOrder,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom,
+ VideoRenderAndroid& renderer);
+ private:
+ jobject _javaRenderObj;
+ jclass _javaRenderClass;
+};
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
diff --git a/modules/video_render/main/source/Android/video_render_opengles20.cc b/modules/video_render/main/source/android/video_render_opengles20.cc
similarity index 93%
rename from modules/video_render/main/source/Android/video_render_opengles20.cc
rename to modules/video_render/main/source/android/video_render_opengles20.cc
index 8f4e5c5..f207a16 100644
--- a/modules/video_render/main/source/Android/video_render_opengles20.cc
+++ b/modules/video_render/main/source/android/video_render_opengles20.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -139,7 +139,8 @@
}
// set the vertices array in the shader
- // _vertices contains 4 vertices with 5 coordinates. 3 for (xyz) for the vertices and 2 for the texture
+ // _vertices contains 4 vertices with 5 coordinates.
+ // 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false, 5
* sizeof(GLfloat), _vertices);
checkGlError("glVertexAttribPointer aPosition");
@@ -148,7 +149,8 @@
checkGlError("glEnableVertexAttribArray positionHandle");
// set the texture coordinate array in the shader
- // _vertices contains 4 vertices with 5 coordinates. 3 for (xyz) for the vertices and 2 for the texture
+ // _vertices contains 4 vertices with 5 coordinates.
+ // 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5
* sizeof(GLfloat), &_vertices[3]);
checkGlError("glVertexAttribPointer maTextureHandle");
@@ -178,13 +180,14 @@
}
/*
* SetCoordinates
- * Sets the coordinates where the stream shall be rendered. Values must be between 0 and 1.
+ * Sets the coordinates where the stream shall be rendered.
+ * Values must be between 0 and 1.
*/
WebRtc_Word32 VideoRenderOpenGles20::SetCoordinates(WebRtc_Word32 zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
+ const float left,
+ const float top,
+ const float right,
+ const float bottom)
{
if ((top > 1 || top < 0) || (right > 1 || right < 0) || (bottom > 1
|| bottom < 0) || (left > 1 || left < 0))
@@ -344,16 +347,14 @@
name, v);
}
-void VideoRenderOpenGles20::checkGlError(const char* op)
-{
+void VideoRenderOpenGles20::checkGlError(const char* op) {
#ifdef ANDROID_LOG
- for (GLint error = glGetError(); error; error
- = glGetError())
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "after %s() glError (0x%x)\n", op, error);
- }
+ for (GLint error = glGetError(); error; error = glGetError()) {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+ "after %s() glError (0x%x)\n", op, error);
+ }
#else
- return;
+ return;
#endif
}
@@ -443,4 +444,3 @@
}
} //namespace webrtc
-
diff --git a/modules/video_render/main/source/Android/video_render_opengles20.h b/modules/video_render/main/source/android/video_render_opengles20.h
similarity index 91%
rename from modules/video_render/main/source/Android/video_render_opengles20.h
rename to modules/video_render/main/source/android/video_render_opengles20.h
index 379b1e7..eee4089 100644
--- a/modules/video_render/main/source/Android/video_render_opengles20.h
+++ b/modules/video_render/main/source/android/video_render_opengles20.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -37,7 +37,8 @@
void printGLString(const char *name, GLenum s);
void checkGlError(const char* op);
GLuint loadShader(GLenum shaderType, const char* pSource);
- GLuint createProgram(const char* pVertexSource, const char* pFragmentSource);
+ GLuint createProgram(const char* pVertexSource,
+ const char* pFragmentSource);
void SetupTextures(const VideoFrame& frameToRender);
void UpdateTextures(const VideoFrame& frameToRender);
diff --git a/modules/video_render/main/source/video_render.gypi b/modules/video_render/main/source/video_render.gypi
index 414f99a..6fe7c88 100644
--- a/modules/video_render/main/source/video_render.gypi
+++ b/modules/video_render/main/source/video_render.gypi
@@ -38,10 +38,10 @@
'video_render_impl.h',
'i_video_render.h',
# Android
- 'Android/video_render_android_impl.h',
- 'Android/video_render_android_native_opengl2.h',
- 'Android/video_render_android_surface_view.h',
- 'Android/video_render_opengles20.h',
+ 'android/video_render_android_impl.h',
+ 'android/video_render_android_native_opengl2.h',
+ 'android/video_render_android_surface_view.h',
+ 'android/video_render_opengles20.h',
# Linux
'linux/video_render_linux_impl.h',
'linux/video_x11_channel.h',
@@ -67,10 +67,10 @@
'video_render_impl.cc',
# PLATFORM SPECIFIC SOURCE FILES - Will be filtered below
# Android
- 'Android/video_render_android_impl.cc',
- 'Android/video_render_android_native_opengl2.cc',
- 'Android/video_render_android_surface_view.cc',
- 'Android/video_render_opengles20.cc',
+ 'android/video_render_android_impl.cc',
+ 'android/video_render_android_native_opengl2.cc',
+ 'android/video_render_android_surface_view.cc',
+ 'android/video_render_opengles20.cc',
# Linux
'linux/video_render_linux_impl.cc',
'linux/video_x11_channel.cc',
@@ -100,14 +100,14 @@
['OS!="android" or include_internal_video_render==0', {
'sources!': [
# Android
- 'Android/video_render_android_impl.h',
- 'Android/video_render_android_native_opengl2.h',
- 'Android/video_render_android_surface_view.h',
- 'Android/video_render_opengles20.h',
- 'Android/video_render_android_impl.cc',
- 'Android/video_render_android_native_opengl2.cc',
- 'Android/video_render_android_surface_view.cc',
- 'Android/video_render_opengles20.cc',
+ 'android/video_render_android_impl.h',
+ 'android/video_render_android_native_opengl2.h',
+ 'android/video_render_android_surface_view.h',
+ 'android/video_render_opengles20.h',
+ 'android/video_render_android_impl.cc',
+ 'android/video_render_android_native_opengl2.cc',
+ 'android/video_render_android_surface_view.cc',
+ 'android/video_render_opengles20.cc',
],
}],
['OS!="linux" or include_internal_video_render==0', {
diff --git a/modules/video_render/main/source/video_render_impl.cc b/modules/video_render/main/source/video_render_impl.cc
index 1012244..2ce0b91 100644
--- a/modules/video_render/main/source/video_render_impl.cc
+++ b/modules/video_render/main/source/video_render_impl.cc
@@ -40,9 +40,9 @@
#endif
#elif defined(WEBRTC_ANDROID)
-#include "Android/video_render_android_impl.h"
-#include "Android/video_render_android_surface_view.h"
-#include "Android/video_render_android_native_opengl2.h"
+#include "android/video_render_android_impl.h"
+#include "android/video_render_android_surface_view.h"
+#include "android/video_render_android_native_opengl2.h"
#define STANDARD_RENDERING kRenderAndroid
#elif defined(WEBRTC_LINUX)
@@ -978,4 +978,3 @@
}
} //namespace webrtc
-
diff --git a/system_wrappers/interface/tick_util.h b/system_wrappers/interface/tick_util.h
index e78e53d..0cd85d0 100644
--- a/system_wrappers/interface/tick_util.h
+++ b/system_wrappers/interface/tick_util.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -19,6 +19,9 @@
#include <mmsystem.h>
#elif WEBRTC_LINUX
#include <ctime>
+#elif WEBRTC_MAC
+#include <mach/mach_time.h>
+#include <string.h>
#else
#include <sys/time.h>
#include <time.h>
@@ -133,6 +136,7 @@
{
TickTime result;
#if _WIN32
+ // TODO(wu): Remove QueryPerformanceCounter implementation.
#ifdef USE_QUERY_PERFORMANCE_COUNTER
// QueryPerformanceCounter returns the value from the TSC which is
// incremented at the CPU frequency. The algorithm used requires
@@ -164,12 +168,27 @@
#endif
#elif defined(WEBRTC_LINUX)
struct timespec ts;
+ // TODO(wu): Remove CLOCK_REALTIME implementation.
#ifdef WEBRTC_CLOCK_TYPE_REALTIME
clock_gettime(CLOCK_REALTIME, &ts);
#else
clock_gettime(CLOCK_MONOTONIC, &ts);
#endif
result._ticks = 1000000000LL * static_cast<WebRtc_Word64>(ts.tv_sec) + static_cast<WebRtc_Word64>(ts.tv_nsec);
+#elif defined(WEBRTC_MAC)
+ static mach_timebase_info_data_t timebase;
+ if (timebase.denom == 0) {
+ // Get the timebase if this is the first time we run.
+ // Recommended by Apple's QA1398.
+ kern_return_t retval = mach_timebase_info(&timebase);
+ if (retval != KERN_SUCCESS) {
+ // TODO(wu): Implement CHECK similar to chrome for all the platforms.
+ // Then replace this with a CHECK(retval == KERN_SUCCESS);
+ asm("int3");
+ }
+ }
+ // Use timebase to convert absolute time tick units into nanoseconds.
+ result._ticks = mach_absolute_time() * timebase.numer / timebase.denom;
#else
struct timeval tv;
gettimeofday(&tv, NULL);
@@ -189,7 +208,7 @@
#else
return now._ticks;
#endif
-#elif WEBRTC_LINUX
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
return now._ticks / 1000000LL;
#else
return now._ticks / 1000LL;
@@ -208,7 +227,7 @@
#else
return now._ticks *1000LL;
#endif
-#elif WEBRTC_LINUX
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
return now._ticks / 1000LL;
#else
return now._ticks;
@@ -230,7 +249,7 @@
#else
return ms;
#endif
-#elif WEBRTC_LINUX
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
return ms * 1000000LL;
#else
return ms * 1000LL;
@@ -247,7 +266,7 @@
#else
return ticks;
#endif
-#elif WEBRTC_LINUX
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
return ticks / 1000000LL;
#else
return ticks / 1000LL;
@@ -280,7 +299,7 @@
// _interval is in ms
return _interval;
#endif
-#elif WEBRTC_LINUX
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
// _interval is in ns
return _interval / 1000000;
#else
@@ -300,7 +319,7 @@
// _interval is in ms
return _interval *1000LL;
#endif
-#elif WEBRTC_LINUX
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
// _interval is in ns
return _interval / 1000;
#else
diff --git a/video_engine/include/vie_rtp_rtcp.h b/video_engine/include/vie_rtp_rtcp.h
index f618fae..bbaeae3 100644
--- a/video_engine/include/vie_rtp_rtcp.h
+++ b/video_engine/include/vie_rtp_rtcp.h
@@ -15,7 +15,6 @@
// - Obtaining RTCP data from incoming RTCP sender reports.
// - RTP and RTCP statistics (jitter, packet loss, RTT etc.).
// - Forward Error Correction (FEC).
-// - RTP Keep‐alive for maintaining the NAT mappings associated to RTP flows.
// - Writing RTP and RTCP packets to binary files for off‐line analysis of the
// call quality.
// - Inserting extra RTP packets into active audio stream.
@@ -257,23 +256,6 @@
const int video_channel,
unsigned int* estimated_bandwidth) const = 0;
- // This function enables or disables an RTP keep-alive mechanism which can
- // be used to maintain an existing Network Address Translator (NAT) mapping
- // while regular RTP is no longer transmitted.
- virtual int SetRTPKeepAliveStatus(
- const int video_channel,
- bool enable,
- const int unknown_payload_type,
- const unsigned int delta_transmit_time_seconds =
- KDefaultDeltaTransmitTimeSeconds) = 0;
-
- // This function gets the RTP keep-alive status.
- virtual int GetRTPKeepAliveStatus(
- const int video_channel,
- bool& enabled,
- int& unkown_payload_type,
- unsigned int& delta_transmit_time_seconds) const = 0;
-
// This function enables capturing of RTP packets to a binary file on a
// specific channel and for a given direction. The file can later be
// replayed using e.g. RTP Tools rtpplay since the binary file format is
diff --git a/video_engine/main/test/android_test/Android.mk b/video_engine/main/test/android_test/Android.mk
index 3ee8627..4f65534 100644
--- a/video_engine/main/test/android_test/Android.mk
+++ b/video_engine/main/test/android_test/Android.mk
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
@@ -10,26 +10,18 @@
include $(CLEAR_VARS)
-MY_CAPTURE_FOLDER := ../../../../modules/video_capture/main/source
-MY_CAPTURE_JAVA_FOLDER := Android/java/org/webrtc/videoengine
-MY_CAPTURE_PATH := $(MY_CAPTURE_FOLDER)/$(MY_CAPTURE_JAVA_FOLDER)
-
-MY_RENDER_FOLDER := ../../../../modules/video_render/main/source
-MY_RENDER_JAVA_FOLDER := Android/java/org/webrtc/videoengine
-MY_RENDER_PATH := $(MY_RENDER_FOLDER)/$(MY_RENDER_JAVA_FOLDER)
-
LOCAL_MODULE_TAGS := tests
LOCAL_SRC_FILES := \
src/org/webrtc/videoengineapp/ViEAndroidDemo.java \
src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java \
src/org/webrtc/videoengineapp/IViEAndroidCallback.java \
- $(MY_CAPTURE_PATH)/CaptureCapabilityAndroid.java \
- $(MY_CAPTURE_PATH)/VideoCaptureAndroid.java \
- $(MY_CAPTURE_PATH)/VideoCaptureDeviceInfoAndroid.java \
- $(MY_RENDER_PATH)/ViEAndroidGLES20.java \
- $(MY_RENDER_PATH)/ViERenderer.java \
- $(MY_RENDER_PATH)/ViESurfaceRenderer.java
+ src/org/webrtc/videoengine/CaptureCapabilityAndroid.java \
+ src/org/webrtc/videoengine/VideoCaptureAndroid.java \
+ src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java \
+ src/org/webrtc/videoengine/ViEAndroidGLES20.java \
+ src/org/webrtc/videoengine/ViERenderer.java \
+ src/org/webrtc/videoengine/ViESurfaceRenderer.java
LOCAL_PACKAGE_NAME := webrtc-video-demo
LOCAL_CERTIFICATE := platform
diff --git a/video_engine/main/test/android_test/AndroidManifest.xml b/video_engine/main/test/android_test/AndroidManifest.xml
index 0714aed..08a9c4d 100644
--- a/video_engine/main/test/android_test/AndroidManifest.xml
+++ b/video_engine/main/test/android_test/AndroidManifest.xml
@@ -16,7 +16,7 @@
</intent-filter>
</activity>
</application>
- <uses-sdk android:minSdkVersion="7" />
+ <uses-sdk android:minSdkVersion="9" />
<uses-permission android:name="android.permission.CAMERA"></uses-permission>
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
diff --git a/video_engine/main/test/android_test/build.xml b/video_engine/main/test/android_test/build.xml
new file mode 100644
index 0000000..9afa947
--- /dev/null
+++ b/video_engine/main/test/android_test/build.xml
@@ -0,0 +1,85 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="ViEAndroidDemo" default="help">
+
+ <!-- The local.properties file is created and updated by the 'android' tool.
+ It contains the path to the SDK. It should *NOT* be checked into
+ Version Control Systems. -->
+ <property file="local.properties" />
+
+ <!-- The ant.properties file can be created by you. It is only edited by the
+ 'android' tool to add properties to it.
+ This is the place to change some Ant specific build properties.
+ Here are some properties you may want to change/update:
+
+ source.dir
+ The name of the source directory. Default is 'src'.
+ out.dir
+ The name of the output directory. Default is 'bin'.
+
+ For other overridable properties, look at the beginning of the rules
+ files in the SDK, at tools/ant/build.xml
+
+ Properties related to the SDK location or the project target should
+ be updated using the 'android' tool with the 'update' action.
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems.
+
+ -->
+ <property file="ant.properties" />
+
+ <!-- The project.properties file is created and updated by the 'android'
+ tool, as well as ADT.
+
+ This contains project specific properties such as project target, and library
+ dependencies. Lower level build properties are stored in ant.properties
+ (or in .classpath for Eclipse projects).
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems. -->
+ <loadproperties srcFile="project.properties" />
+
+ <!-- quick check on sdk.dir -->
+ <fail
+ message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through an env var"
+ unless="sdk.dir"
+ />
+
+
+<!-- extension targets. Uncomment the ones where you want to do custom work
+ in between standard targets -->
+<!--
+ <target name="-pre-build">
+ </target>
+ <target name="-pre-compile">
+ </target>
+
+ /* This is typically used for code obfuscation.
+ Compiled code location: ${out.classes.absolute.dir}
+ If this is not done in place, override ${out.dex.input.absolute.dir} */
+ <target name="-post-compile">
+ </target>
+-->
+
+ <!-- Import the actual build file.
+
+ To customize existing targets, there are two options:
+ - Customize only one target:
+ - copy/paste the target into this file, *before* the
+ <import> task.
+ - customize it to your needs.
+ - Customize the whole content of build.xml
+ - copy/paste the content of the rules files (minus the top node)
+ into this file, replacing the <import> task.
+ - customize to your needs.
+
+ ***********************
+ ****** IMPORTANT ******
+ ***********************
+ In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
+ in order to avoid having your file be overridden by tools such as "android update project"
+ -->
+ <!-- version-tag: 1 -->
+ <import file="${sdk.dir}/tools/ant/build.xml" />
+
+</project>
diff --git a/video_engine/main/test/android_test/gen/org/webrtc/androidapp/R.java b/video_engine/main/test/android_test/gen/org/webrtc/androidapp/R.java
deleted file mode 100644
index c35047b..0000000
--- a/video_engine/main/test/android_test/gen/org/webrtc/androidapp/R.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/* AUTO-GENERATED FILE. DO NOT MODIFY.
- *
- * This class was automatically generated by the
- * aapt tool from the resource data it found. It
- * should not be modified by hand.
- */
-
-package org.webrtc.videoengineapp;
-
-public final class R {
- public static final class array {
- public static final int codecSize=0x7f040001;
- public static final int codectype=0x7f040000;
- public static final int voiceCodecType=0x7f040002;
- }
- public static final class attr {
- }
- public static final class drawable {
- public static final int bar=0x7f020000;
- public static final int bg=0x7f020001;
- public static final int logo=0x7f020002;
- public static final int robot=0x7f020003;
- public static final int video=0x7f020004;
- }
- public static final class id {
- public static final int LinearLayout01=0x7f060010;
- public static final int LinearLayout02=0x7f060006;
- public static final int TextView01=0x7f060005;
- public static final int TextView02=0x7f06000b;
- public static final int TextView03=0x7f060004;
- public static final int btStartBoth=0x7f060013;
- public static final int btStartListen=0x7f060011;
- public static final int btStartSend=0x7f060012;
- public static final int cbLoopback=0x7f06000e;
- public static final int cbVoice=0x7f06000d;
- public static final int etRemoteIp=0x7f06000c;
- public static final int ivPreview=0x7f060014;
- public static final int ivTopBar=0x7f060002;
- public static final int rlSurfaces=0x7f060000;
- public static final int spCodecSize=0x7f06000a;
- public static final int spCodecType=0x7f060007;
- public static final int spVoiceCodecType=0x7f060008;
- public static final int svLocal=0x7f060001;
- public static final int tvCodecSize=0x7f060009;
- public static final int tvLocalIp=0x7f06000f;
- public static final int tvTitle=0x7f060003;
- }
- public static final class layout {
- public static final int both=0x7f030000;
- public static final int main=0x7f030001;
- public static final int send=0x7f030002;
- }
- public static final class string {
- public static final int app_name=0x7f050001;
- public static final int codecSize=0x7f050007;
- public static final int codecType=0x7f050006;
- public static final int codectype_prompt=0x7f050004;
- public static final int demoTitle=0x7f050005;
- public static final int enableVoice=0x7f05000d;
- public static final int error=0x7f050002;
- public static final int errorCamera=0x7f050003;
- public static final int loopback=0x7f050009;
- public static final int remoteIp=0x7f050008;
- public static final int startBoth=0x7f05000c;
- public static final int startListen=0x7f05000a;
- public static final int startSend=0x7f05000b;
- }
-}
diff --git a/video_engine/main/test/android_test/jni/Android.mk b/video_engine/main/test/android_test/jni/Android.mk
index 4121cc6..35af493 100644
--- a/video_engine/main/test/android_test/jni/Android.mk
+++ b/video_engine/main/test/android_test/jni/Android.mk
@@ -1,40 +1,81 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-LOCAL_PATH := $(call my-dir)
-
-INTERFACES_PATH := $(LOCAL_PATH)/../../../../../../../build/interface
-LIBS_PATH := $(LOCAL_PATH)/../../../../../../../build/libraries
-
-include $(CLEAR_VARS)
-
-LOCAL_MODULE_TAGS := tests
-LOCAL_MODULE := libwebrtc-video-demo-jni
-LOCAL_CPP_EXTENSION := .cc
-LOCAL_SRC_FILES := vie_android_java_api.cc
-LOCAL_CFLAGS := \
- '-DWEBRTC_TARGET_PC' \
- '-DWEBRTC_ANDROID'
-
-LOCAL_C_INCLUDES := \
- external/gtest/include \
- $(LOCAL_PATH)/../../../../.. \
- $(LOCAL_PATH)/../../../../include \
- $(LOCAL_PATH)/../../../../../voice_engine/main/interface
-
-LOCAL_PRELINK_MODULE := false
-
-LOCAL_SHARED_LIBRARIES := \
- libutils \
- libstlport \
- libandroid \
- libwebrtc \
- libGLESv2
-LOCAL_LDLIBS := $(LIBS_PATH)/VideoEngine_android_gcc.a -llog -lgcc
-
-include $(BUILD_SHARED_LIBRARY)
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+
+MY_LIBS_PATH := $(LOCAL_PATH)/../../../../../../out/Release/obj.target
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := tests
+LOCAL_MODULE := libwebrtc-video-demo-jni
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := vie_android_java_api.cc
+LOCAL_CFLAGS := \
+ '-DWEBRTC_TARGET_PC' \
+ '-DWEBRTC_ANDROID'
+
+LOCAL_C_INCLUDES := \
+ external/gtest/include \
+ $(LOCAL_PATH)/../../../../.. \
+ $(LOCAL_PATH)/../../../../include \
+ $(LOCAL_PATH)/../../../../../voice_engine/main/interface
+
+LOCAL_PRELINK_MODULE := false
+
+LOCAL_SHARED_LIBRARIES := \
+ libstlport_shared
+
+LOCAL_LDLIBS := \
+ -llog \
+ -lgcc \
+ -lGLESv2 \
+ -lOpenSLES \
+ $(MY_LIBS_PATH)/src/voice_engine/libvoice_engine_core.a \
+ $(MY_LIBS_PATH)/src/video_engine/libvideo_engine_core.a \
+ $(MY_LIBS_PATH)/src/modules/libvideo_processing.a \
+ $(MY_LIBS_PATH)/src/modules/libwebrtc_video_coding.a \
+ $(MY_LIBS_PATH)/src/modules/libvideo_render_module.a \
+ $(MY_LIBS_PATH)/src/modules/libvideo_capture_module.a \
+ $(MY_LIBS_PATH)/src/modules/libaudio_coding_module.a \
+ $(MY_LIBS_PATH)/src/modules/libaudio_processing.a \
+ $(MY_LIBS_PATH)/src/modules/libPCM16B.a \
+ $(MY_LIBS_PATH)/src/modules/libCNG.a \
+ $(MY_LIBS_PATH)/src/modules/libNetEq.a \
+ $(MY_LIBS_PATH)/src/modules/libG722.a \
+ $(MY_LIBS_PATH)/src/modules/libiSAC.a \
+ $(MY_LIBS_PATH)/src/modules/libG711.a \
+ $(MY_LIBS_PATH)/src/modules/libiLBC.a \
+ $(MY_LIBS_PATH)/src/modules/libiSACFix.a \
+ $(MY_LIBS_PATH)/src/common_audio/libvad.a \
+ $(MY_LIBS_PATH)/src/modules/libns.a \
+ $(MY_LIBS_PATH)/src/modules/libagc.a \
+ $(MY_LIBS_PATH)/src/modules/libaec.a \
+ $(MY_LIBS_PATH)/src/modules/libaecm.a \
+ $(MY_LIBS_PATH)/src/common_audio/libresampler.a \
+ $(MY_LIBS_PATH)/src/common_audio/libsignal_processing.a \
+ $(MY_LIBS_PATH)/src/modules/libapm_util.a \
+ $(MY_LIBS_PATH)/src/system_wrappers/source/libsystem_wrappers.a \
+ $(MY_LIBS_PATH)/src/modules/libaudio_device.a \
+ $(MY_LIBS_PATH)/src/modules/librtp_rtcp.a \
+ $(MY_LIBS_PATH)/src/modules/libmedia_file.a \
+ $(MY_LIBS_PATH)/src/modules/libudp_transport.a \
+ $(MY_LIBS_PATH)/src/modules/libwebrtc_utility.a \
+ $(MY_LIBS_PATH)/src/modules/libaudio_conference_mixer.a \
+ $(MY_LIBS_PATH)/src/common_video/libwebrtc_libyuv.a \
+ $(MY_LIBS_PATH)/third_party/libyuv/libyuv.a \
+ $(MY_LIBS_PATH)/src/modules/libwebrtc_i420.a \
+ $(MY_LIBS_PATH)/src/modules/libwebrtc_vp8.a \
+ $(MY_LIBS_PATH)/src/common_video/libwebrtc_jpeg.a \
+ $(MY_LIBS_PATH)/third_party/libjpeg_turbo/libjpeg_turbo.a \
+ $(MY_LIBS_PATH)/src/modules/libaudioproc_debug_proto.a \
+ $(MY_LIBS_PATH)/third_party/protobuf/libprotobuf_lite.a \
+ $(MY_LIBS_PATH)/third_party/libvpx/libvpx.a
+
+include $(BUILD_SHARED_LIBRARY)
+
diff --git a/video_engine/main/test/android_test/jni/Application.mk b/video_engine/main/test/android_test/jni/Application.mk
new file mode 100644
index 0000000..647560e
--- /dev/null
+++ b/video_engine/main/test/android_test/jni/Application.mk
@@ -0,0 +1,11 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# Build both ARMv5TE and ARMv7-A machine code.
+APP_ABI := armeabi-v7a #armeabi armeabi-v7a x86
+APP_STL := stlport_static
diff --git a/video_engine/main/test/android_test/project.properties b/video_engine/main/test/android_test/project.properties
new file mode 100644
index 0000000..4d7ea1e
--- /dev/null
+++ b/video_engine/main/test/android_test/project.properties
@@ -0,0 +1,13 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system use,
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+
+# Indicates whether an apk should be generated for each density.
+split.density=false
+# Project target.
+target=android-9
diff --git a/video_engine/test/auto_test/automated/two_windows_fixture.cc b/video_engine/test/auto_test/automated/two_windows_fixture.cc
index d181aa5..a4a551d 100644
--- a/video_engine/test/auto_test/automated/two_windows_fixture.cc
+++ b/video_engine/test/auto_test/automated/two_windows_fixture.cc
@@ -10,7 +10,7 @@
#include "video_engine/test/auto_test/automated/two_windows_fixture.h"
-#include "video_engine/test/auto_test/helpers/vie_window_creator.h"
+#include "video_engine/test/auto_test/interface/vie_window_creator.h"
#include "video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
void TwoWindowsFixture::SetUpTestCase() {
diff --git a/video_engine/test/auto_test/automated/vie_rtp_fuzz_test.cc b/video_engine/test/auto_test/automated/vie_rtp_fuzz_test.cc
index f8740be..75ca8f6 100644
--- a/video_engine/test/auto_test/automated/vie_rtp_fuzz_test.cc
+++ b/video_engine/test/auto_test/automated/vie_rtp_fuzz_test.cc
@@ -13,14 +13,14 @@
#include "gtest/gtest.h"
#include "gflags/gflags.h"
#include "video_engine/test/auto_test/automated/two_windows_fixture.h"
-#include "video_engine/test/auto_test/helpers/bit_flip_encryption.h"
-#include "video_engine/test/auto_test/helpers/random_encryption.h"
-#include "video_engine/test/auto_test/helpers/vie_window_creator.h"
-#include "video_engine/test/auto_test/interface/tb_capture_device.h"
-#include "video_engine/test/auto_test/interface/tb_interfaces.h"
-#include "video_engine/test/auto_test/interface/tb_video_channel.h"
+#include "video_engine/test/auto_test/interface/vie_window_creator.h"
#include "video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
#include "video_engine/test/auto_test/primitives/general_primitives.h"
+#include "video_engine/test/libvietest/include/bit_flip_encryption.h"
+#include "video_engine/test/libvietest/include/random_encryption.h"
+#include "video_engine/test/libvietest/include/tb_capture_device.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+#include "video_engine/test/libvietest/include/tb_video_channel.h"
namespace {
diff --git a/video_engine/test/auto_test/automated/vie_video_verification_test.cc b/video_engine/test/auto_test/automated/vie_video_verification_test.cc
index 861af8a..074ae7c 100644
--- a/video_engine/test/auto_test/automated/vie_video_verification_test.cc
+++ b/video_engine/test/auto_test/automated/vie_video_verification_test.cc
@@ -14,10 +14,10 @@
#include "gtest/gtest.h"
#include "testsupport/fileutils.h"
#include "testsupport/metrics/video_metrics.h"
-#include "video_engine/test/auto_test/helpers/vie_to_file_renderer.h"
#include "video_engine/test/auto_test/interface/vie_autotest.h"
#include "video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h"
#include "video_engine/test/auto_test/primitives/framedrop_primitives.h"
+#include "video_engine/test/libvietest/include/vie_to_file_renderer.h"
namespace {
diff --git a/video_engine/test/auto_test/helpers/vie_window_creator.h b/video_engine/test/auto_test/interface/vie_window_creator.h
similarity index 94%
rename from video_engine/test/auto_test/helpers/vie_window_creator.h
rename to video_engine/test/auto_test/interface/vie_window_creator.h
index 25c23a3..c13a888 100644
--- a/video_engine/test/auto_test/helpers/vie_window_creator.h
+++ b/video_engine/test/auto_test/interface/vie_window_creator.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
diff --git a/video_engine/test/auto_test/primitives/framedrop_primitives.cc b/video_engine/test/auto_test/primitives/framedrop_primitives.cc
index 6767cc6..fc7d87b 100644
--- a/video_engine/test/auto_test/primitives/framedrop_primitives.cc
+++ b/video_engine/test/auto_test/primitives/framedrop_primitives.cc
@@ -16,13 +16,13 @@
#include "testsupport/fileutils.h"
#include "testsupport/frame_reader.h"
#include "testsupport/frame_writer.h"
-#include "video_engine/test/auto_test/helpers/vie_to_file_renderer.h"
-#include "video_engine/test/auto_test/interface/tb_interfaces.h"
-#include "video_engine/test/auto_test/interface/tb_external_transport.h"
#include "video_engine/test/auto_test/interface/vie_autotest.h"
#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "video_engine/test/auto_test/primitives/framedrop_primitives.h"
#include "video_engine/test/auto_test/primitives/general_primitives.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+#include "video_engine/test/libvietest/include/tb_external_transport.h"
+#include "video_engine/test/libvietest/include/vie_to_file_renderer.h"
// Tracks which frames are created on the local side and reports them to the
// FrameDropDetector class.
diff --git a/video_engine/test/auto_test/primitives/framedrop_primitives.h b/video_engine/test/auto_test/primitives/framedrop_primitives.h
index 80575c3..cf3c1de 100644
--- a/video_engine/test/auto_test/primitives/framedrop_primitives.h
+++ b/video_engine/test/auto_test/primitives/framedrop_primitives.h
@@ -17,7 +17,7 @@
#include "video_engine/include/vie_codec.h"
#include "video_engine/include/vie_image_process.h"
#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
-#include "video_engine/test/auto_test/helpers/vie_to_file_renderer.h"
+#include "video_engine/test/libvietest/include/vie_to_file_renderer.h"
class FrameDropDetector;
class TbInterfaces;
diff --git a/video_engine/test/auto_test/source/vie_autotest.cc b/video_engine/test/auto_test/source/vie_autotest.cc
index 8281ea2..95337b2 100644
--- a/video_engine/test/auto_test/source/vie_autotest.cc
+++ b/video_engine/test/auto_test/source/vie_autotest.cc
@@ -12,29 +12,25 @@
// vie_autotest.cc
//
-#include "vie_autotest.h"
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
#include <stdio.h>
#include "engine_configurations.h"
-#include "general_primitives.h"
-#include "tb_interfaces.h"
-#include "tb_video_channel.h"
-#include "tb_capture_device.h"
+#include "modules/video_render/main/interface/video_render.h"
#include "testsupport/fileutils.h"
-#include "video_render.h"
-#include "vie_autotest_defines.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/auto_test/primitives/general_primitives.h"
+#include "video_engine/test/libvietest/include/tb_capture_device.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+#include "video_engine/test/libvietest/include/tb_video_channel.h"
// ViETest implementation
FILE* ViETest::log_file_ = NULL;
char* ViETest::log_str_ = NULL;
std::string ViETest::GetResultOutputPath() {
-#ifdef WEBRTC_ANDROID
- return "/sdcard/";
-#else
- return webrtc::test::OutputPath();
-#endif
+ return webrtc::test::OutputPath();
}
// ViEAutoTest implementation
diff --git a/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc b/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc
index 10c1304..c5f9365 100644
--- a/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc
+++ b/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc
@@ -282,34 +282,10 @@
EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, false));
- //
- // Keepalive
- //
- ViETest::Log("Testing RTP keep alive...\n");
- EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
-
+ // Test to set SSRC
myTransport.SetPacketLoss(0);
myTransport.ClearStats();
- const char keepAlivePT = 109;
- unsigned int deltaTimeSeconds = 2;
- EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
- tbChannel.videoChannel, true, keepAlivePT, deltaTimeSeconds));
-
- AutoTestSleep(KAutoTestSleepTimeMs);
-
- EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
- tbChannel.videoChannel, false, keepAlivePT, deltaTimeSeconds));
-
- WebRtc_Word32 numRtpPackets = 0;
- WebRtc_Word32 numDroppedPackets = 0;
- WebRtc_Word32 numRtcpPackets = 0;
- myTransport.GetStats(numRtpPackets, numDroppedPackets, numRtcpPackets);
- WebRtc_Word32 expectedPackets = KAutoTestSleepTimeMs / (1000 *
- static_cast<WebRtc_Word32>(deltaTimeSeconds));
- EXPECT_EQ(expectedPackets, numRtpPackets);
-
- // Test to set SSRC
unsigned int setSSRC = 0x01234567;
ViETest::Log("Set SSRC %u", setSSRC);
EXPECT_EQ(0, ViE.rtp_rtcp->SetLocalSSRC(tbChannel.videoChannel, setSSRC));
@@ -658,57 +634,13 @@
// packet.
//
- // RTP Keepalive
- //
- {
- int setPT = 123;
- unsigned int setDeltaTime = 10;
- bool enabled = false;
- int getPT = 0;
- unsigned int getDeltaTime = 0;
- EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
- tbChannel.videoChannel, true, 119));
- EXPECT_NE(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
- tbChannel.videoChannel, true, setPT, setDeltaTime));
- EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
- tbChannel.videoChannel, false, setPT, setDeltaTime));
- EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
- tbChannel.videoChannel, true, setPT, setDeltaTime));
- EXPECT_EQ(0, ViE.rtp_rtcp->GetRTPKeepAliveStatus(
- tbChannel.videoChannel, enabled, getPT, getDeltaTime));
-
- EXPECT_TRUE(enabled);
- EXPECT_EQ(setPT, getPT);
- EXPECT_EQ(setDeltaTime, getDeltaTime);
-
- EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
- tbChannel.videoChannel, false, setPT, setDeltaTime));
-
- EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
-
- EXPECT_EQ(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
- tbChannel.videoChannel, true, setPT, setDeltaTime));
-
- EXPECT_NE(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
- tbChannel.videoChannel, true, setPT, setDeltaTime));
-
- tbChannel.StopSend();
- EXPECT_NE(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
- tbChannel.videoChannel, enabled, getPT, 0));
- EXPECT_NE(0, ViE.rtp_rtcp->SetRTPKeepAliveStatus(
- tbChannel.videoChannel, enabled, getPT, 61));
- }
- //
// RTP Dump
//
{
-#ifdef WEBRTC_ANDROID
- const char* dumpName = "/sdcard/DumpFileName.rtp";
-#else
std::string output_file = webrtc::test::OutputPath() +
"DumpFileName.rtp";
const char* dumpName = output_file.c_str();
-#endif
+
EXPECT_EQ(0, ViE.rtp_rtcp->StartRTPDump(
tbChannel.videoChannel, dumpName, webrtc::kRtpIncoming));
EXPECT_EQ(0, ViE.rtp_rtcp->StopRTPDump(
diff --git a/video_engine/test/auto_test/source/vie_file_based_comparison_tests.cc b/video_engine/test/auto_test/source/vie_file_based_comparison_tests.cc
index 023b917..4b9d315 100644
--- a/video_engine/test/auto_test/source/vie_file_based_comparison_tests.cc
+++ b/video_engine/test/auto_test/source/vie_file_based_comparison_tests.cc
@@ -10,14 +10,14 @@
#include "video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h"
-#include "video_engine/test/auto_test/interface/tb_interfaces.h"
#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
-#include "video_engine/test/auto_test/helpers/vie_fake_camera.h"
-#include "video_engine/test/auto_test/helpers/vie_to_file_renderer.h"
#include "video_engine/test/auto_test/primitives/base_primitives.h"
#include "video_engine/test/auto_test/primitives/codec_primitives.h"
#include "video_engine/test/auto_test/primitives/framedrop_primitives.h"
#include "video_engine/test/auto_test/primitives/general_primitives.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+#include "video_engine/test/libvietest/include/vie_fake_camera.h"
+#include "video_engine/test/libvietest/include/vie_to_file_renderer.h"
bool ViEFileBasedComparisonTests::TestCallSetup(
const std::string& i420_video_file,
diff --git a/video_engine/test/auto_test/helpers/vie_window_creator.cc b/video_engine/test/auto_test/source/vie_window_creator.cc
similarity index 77%
rename from video_engine/test/auto_test/helpers/vie_window_creator.cc
rename to video_engine/test/auto_test/source/vie_window_creator.cc
index d3b44a9..0ccd121 100644
--- a/video_engine/test/auto_test/helpers/vie_window_creator.cc
+++ b/video_engine/test/auto_test/source/vie_window_creator.cc
@@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "vie_window_creator.h"
+#include "video_engine/test/auto_test/interface/vie_window_creator.h"
-#include "vie_autotest_main.h"
-#include "vie_codec.h"
-#include "voe_codec.h"
-#include "vie_window_manager_factory.h"
-#include "vie_autotest_window_manager_interface.h"
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_main.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
+#include "video_engine/test/auto_test/interface/vie_window_manager_factory.h"
+#include "voice_engine/main/interface/voe_codec.h"
#if defined(WIN32)
#include <tchar.h>
diff --git a/video_engine/test/auto_test/vie_auto_test.gypi b/video_engine/test/auto_test/vie_auto_test.gypi
index 2ba10e8..675a864 100644
--- a/video_engine/test/auto_test/vie_auto_test.gypi
+++ b/video_engine/test/auto_test/vie_auto_test.gypi
@@ -21,6 +21,7 @@
'<(webrtc_root)/../test/metrics.gyp:metrics',
'<(webrtc_root)/../test/test.gyp:test_support',
'video_engine_core',
+ 'libvietest',
],
'include_dirs': [
'interface/',
@@ -32,11 +33,6 @@
'../../../common_video/interface',
],
'sources': [
- 'interface/tb_capture_device.h',
- 'interface/tb_external_transport.h',
- 'interface/tb_I420_codec.h',
- 'interface/tb_interfaces.h',
- 'interface/tb_video_channel.h',
'interface/vie_autotest.h',
'interface/vie_autotest_defines.h',
'interface/vie_autotest_linux.h',
@@ -47,20 +43,7 @@
'interface/vie_autotest_windows.h',
'interface/vie_file_based_comparison_tests.h',
'interface/vie_window_manager_factory.h',
-
- # Helper classes
- 'helpers/bit_flip_encryption.cc',
- 'helpers/bit_flip_encryption.h',
- 'helpers/random_encryption.cc',
- 'helpers/random_encryption.h',
- 'helpers/vie_fake_camera.cc',
- 'helpers/vie_fake_camera.h',
- 'helpers/vie_file_capture_device.cc',
- 'helpers/vie_file_capture_device.h',
- 'helpers/vie_to_file_renderer.cc',
- 'helpers/vie_to_file_renderer.h',
- 'helpers/vie_window_creator.cc',
- 'helpers/vie_window_creator.h',
+ 'interface/vie_window_creator.h',
# New, fully automated tests
'automated/legacy_fixture.cc',
@@ -83,11 +66,6 @@
'primitives/general_primitives.h',
# Platform independent
- 'source/tb_capture_device.cc',
- 'source/tb_external_transport.cc',
- 'source/tb_I420_codec.cc',
- 'source/tb_interfaces.cc',
- 'source/tb_video_channel.cc',
'source/vie_autotest.cc',
'source/vie_autotest_base.cc',
'source/vie_autotest_capture.cc',
@@ -103,6 +81,7 @@
'source/vie_autotest_custom_call.cc',
'source/vie_autotest_simulcast.cc',
'source/vie_file_based_comparison_tests.cc',
+ 'source/vie_window_creator.cc',
# Platform dependent
# Android
diff --git a/video_engine/test/auto_test/helpers/bit_flip_encryption.cc b/video_engine/test/libvietest/helpers/bit_flip_encryption.cc
similarity index 89%
rename from video_engine/test/auto_test/helpers/bit_flip_encryption.cc
rename to video_engine/test/libvietest/helpers/bit_flip_encryption.cc
index 423ad5a..20a69e7 100644
--- a/video_engine/test/auto_test/helpers/bit_flip_encryption.cc
+++ b/video_engine/test/libvietest/helpers/bit_flip_encryption.cc
@@ -8,12 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "video_engine/test/auto_test/helpers/bit_flip_encryption.h"
+#include "video_engine/test/libvietest/include/bit_flip_encryption.h"
#include <cstdlib>
-#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
-
float NormalizedRand() {
return static_cast<float>(rand()) /
static_cast<float>(RAND_MAX);
diff --git a/video_engine/test/auto_test/helpers/random_encryption.cc b/video_engine/test/libvietest/helpers/random_encryption.cc
similarity index 95%
rename from video_engine/test/auto_test/helpers/random_encryption.cc
rename to video_engine/test/libvietest/helpers/random_encryption.cc
index 04904b5..8f5579e 100644
--- a/video_engine/test/auto_test/helpers/random_encryption.cc
+++ b/video_engine/test/libvietest/helpers/random_encryption.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "video_engine/test/auto_test/helpers/random_encryption.h"
+#include "video_engine/test/libvietest/include/random_encryption.h"
#include <algorithm>
#include <cstdlib>
diff --git a/video_engine/test/auto_test/helpers/vie_fake_camera.cc b/video_engine/test/libvietest/helpers/vie_fake_camera.cc
similarity index 88%
rename from video_engine/test/auto_test/helpers/vie_fake_camera.cc
rename to video_engine/test/libvietest/helpers/vie_fake_camera.cc
index f7dae2f..b9b7281 100644
--- a/video_engine/test/auto_test/helpers/vie_fake_camera.cc
+++ b/video_engine/test/libvietest/helpers/vie_fake_camera.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -7,13 +7,13 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "vie_fake_camera.h"
+#include "video_engine/test/libvietest/include/vie_fake_camera.h"
#include <assert.h>
-#include "vie_capture.h"
-#include "vie_file_capture_device.h"
-#include "thread_wrapper.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/test/libvietest/include/vie_file_capture_device.h"
// This callback runs the camera thread:
bool StreamVideoFileRepeatedlyIntoCaptureDevice(void* data) {
diff --git a/video_engine/test/auto_test/helpers/vie_file_capture_device.cc b/video_engine/test/libvietest/helpers/vie_file_capture_device.cc
similarity index 87%
rename from video_engine/test/auto_test/helpers/vie_file_capture_device.cc
rename to video_engine/test/libvietest/helpers/vie_file_capture_device.cc
index e4caa2a..5d6abfa 100644
--- a/video_engine/test/auto_test/helpers/vie_file_capture_device.cc
+++ b/video_engine/test/libvietest/helpers/vie_file_capture_device.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -7,17 +7,16 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "vie_file_capture_device.h"
+#include "video_engine/test/libvietest/include/vie_file_capture_device.h"
#include <assert.h>
#include "common_types.h"
-#include "critical_section_wrapper.h"
-#include "event_wrapper.h"
-#include "module_common_types.h"
-#include "vie_autotest_defines.h"
-#include "vie_capture.h"
-#include "tick_util.h"
+#include "modules/interface/module_common_types.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "video_engine/include/vie_capture.h"
// This class ensures we are not exceeding the max FPS.
class FramePacemaker {
diff --git a/video_engine/test/auto_test/helpers/vie_to_file_renderer.cc b/video_engine/test/libvietest/helpers/vie_to_file_renderer.cc
similarity index 97%
rename from video_engine/test/auto_test/helpers/vie_to_file_renderer.cc
rename to video_engine/test/libvietest/helpers/vie_to_file_renderer.cc
index d9c8da3..f186aad 100644
--- a/video_engine/test/auto_test/helpers/vie_to_file_renderer.cc
+++ b/video_engine/test/libvietest/helpers/vie_to_file_renderer.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "vie_to_file_renderer.h"
+#include "video_engine/test/libvietest/include/vie_to_file_renderer.h"
#include <assert.h>
diff --git a/video_engine/test/auto_test/helpers/bit_flip_encryption.h b/video_engine/test/libvietest/include/bit_flip_encryption.h
similarity index 100%
rename from video_engine/test/auto_test/helpers/bit_flip_encryption.h
rename to video_engine/test/libvietest/include/bit_flip_encryption.h
diff --git a/video_engine/test/auto_test/helpers/random_encryption.h b/video_engine/test/libvietest/include/random_encryption.h
similarity index 100%
rename from video_engine/test/auto_test/helpers/random_encryption.h
rename to video_engine/test/libvietest/include/random_encryption.h
diff --git a/video_engine/test/auto_test/interface/tb_I420_codec.h b/video_engine/test/libvietest/include/tb_I420_codec.h
similarity index 97%
rename from video_engine/test/auto_test/interface/tb_I420_codec.h
rename to video_engine/test/libvietest/include/tb_I420_codec.h
index 0853796..f5467f4 100644
--- a/video_engine/test/auto_test/interface/tb_I420_codec.h
+++ b/video_engine/test/libvietest/include/tb_I420_codec.h
@@ -9,17 +9,14 @@
*/
/*
- * tb_I420_codec.h
- *
* This file contains the interface to I420 "codec"
* This is a dummy wrapper to allow VCM deal with raw I420 sequences
- *
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_I420_CODEC_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_I420_CODEC_H_
-#include "video_codec_interface.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
class TbI420Encoder: public webrtc::VideoEncoder
{
diff --git a/video_engine/test/auto_test/interface/tb_capture_device.h b/video_engine/test/libvietest/include/tb_capture_device.h
similarity index 91%
rename from video_engine/test/auto_test/interface/tb_capture_device.h
rename to video_engine/test/libvietest/include/tb_capture_device.h
index 233b812..10f8db8 100644
--- a/video_engine/test/auto_test/interface/tb_capture_device.h
+++ b/video_engine/test/libvietest/include/tb_capture_device.h
@@ -13,8 +13,9 @@
#include <string>
-#include "tb_interfaces.h"
-#include "video_capture_factory.h"
+#include "modules/video_capture/main/interface/video_capture_factory.h"
+
+class TbInterfaces;
class TbCaptureDevice
{
diff --git a/video_engine/test/auto_test/interface/tb_external_transport.h b/video_engine/test/libvietest/include/tb_external_transport.h
similarity index 100%
rename from video_engine/test/auto_test/interface/tb_external_transport.h
rename to video_engine/test/libvietest/include/tb_external_transport.h
diff --git a/video_engine/test/auto_test/interface/tb_interfaces.h b/video_engine/test/libvietest/include/tb_interfaces.h
similarity index 65%
rename from video_engine/test/auto_test/interface/tb_interfaces.h
rename to video_engine/test/libvietest/include/tb_interfaces.h
index 63e78a3..69d8b53 100644
--- a/video_engine/test/auto_test/interface/tb_interfaces.h
+++ b/video_engine/test/libvietest/include/tb_interfaces.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -11,26 +11,27 @@
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_INTERFACES_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_INTERFACES_H_
-#include "vie_autotest_defines.h"
+#include <string>
#include "common_types.h"
-#include "vie_base.h"
-#include "vie_capture.h"
-#include "vie_codec.h"
-#include "vie_image_process.h"
-#include "vie_network.h"
-#include "vie_render.h"
-#include "vie_rtp_rtcp.h"
-#include "vie_encryption.h"
-#include "vie_defines.h"
+#include "video_engine/include/vie_base.h"
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_image_process.h"
+#include "video_engine/include/vie_network.h"
+#include "video_engine/include/vie_render.h"
+#include "video_engine/include/vie_rtp_rtcp.h"
+#include "video_engine/include/vie_encryption.h"
+#include "video_engine/vie_defines.h"
// This class deals with all the tedium of setting up video engine interfaces.
// It does its work in constructor and destructor, so keeping it in scope is
-// enough.
+// enough. It also sets up tracing.
class TbInterfaces
{
public:
- TbInterfaces(const char* test_name);
+ // Sets up all interfaces and creates a trace file
+ TbInterfaces(std::string test_name);
~TbInterfaces(void);
webrtc::VideoEngine* video_engine;
diff --git a/video_engine/test/auto_test/interface/tb_video_channel.h b/video_engine/test/libvietest/include/tb_video_channel.h
similarity index 90%
rename from video_engine/test/auto_test/interface/tb_video_channel.h
rename to video_engine/test/libvietest/include/tb_video_channel.h
index 5961e62..5c7e6f8 100644
--- a/video_engine/test/auto_test/interface/tb_video_channel.h
+++ b/video_engine/test/libvietest/include/tb_video_channel.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -11,7 +11,8 @@
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_VIDEO_CHANNEL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_VIDEO_CHANNEL_H_
-#include "tb_interfaces.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+
class TbVideoChannel
{
public:
diff --git a/video_engine/test/auto_test/helpers/vie_fake_camera.h b/video_engine/test/libvietest/include/vie_fake_camera.h
similarity index 96%
rename from video_engine/test/auto_test/helpers/vie_fake_camera.h
rename to video_engine/test/libvietest/include/vie_fake_camera.h
index 0b9c2e7..abc5d40 100644
--- a/video_engine/test/auto_test/helpers/vie_fake_camera.h
+++ b/video_engine/test/libvietest/include/vie_fake_camera.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
diff --git a/video_engine/test/auto_test/helpers/vie_file_capture_device.h b/video_engine/test/libvietest/include/vie_file_capture_device.h
similarity index 96%
rename from video_engine/test/auto_test/helpers/vie_file_capture_device.h
rename to video_engine/test/libvietest/include/vie_file_capture_device.h
index 5e62c48..7c98685 100644
--- a/video_engine/test/auto_test/helpers/vie_file_capture_device.h
+++ b/video_engine/test/libvietest/include/vie_file_capture_device.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
diff --git a/video_engine/test/auto_test/helpers/vie_to_file_renderer.h b/video_engine/test/libvietest/include/vie_to_file_renderer.h
similarity index 100%
rename from video_engine/test/auto_test/helpers/vie_to_file_renderer.h
rename to video_engine/test/libvietest/include/vie_to_file_renderer.h
diff --git a/video_engine/test/libvietest/libvietest.gypi b/video_engine/test/libvietest/libvietest.gypi
new file mode 100644
index 0000000..e93212a
--- /dev/null
+++ b/video_engine/test/libvietest/libvietest.gypi
@@ -0,0 +1,57 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'targets': [
+ {
+ 'target_name': 'libvietest',
+ 'type': '<(library)',
+ 'dependencies': [
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/../testing/gtest.gyp:gtest',
+ '<(webrtc_root)/../test/test.gyp:test_support',
+ 'video_engine_core',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ 'include/',
+ ]
+ },
+ 'include_dirs': [
+ 'include/',
+ 'helpers/',
+ ],
+ 'sources': [
+ # Helper classes
+ 'include/bit_flip_encryption.h',
+ 'include/random_encryption.h',
+ 'include/vie_fake_camera.h',
+ 'include/vie_file_capture_device.h',
+ 'include/vie_to_file_renderer.h',
+
+ 'helpers/bit_flip_encryption.cc',
+ 'helpers/random_encryption.cc',
+ 'helpers/vie_fake_camera.cc',
+ 'helpers/vie_file_capture_device.cc',
+ 'helpers/vie_to_file_renderer.cc',
+
+ # Testbed classes
+ 'include/tb_capture_device.h',
+ 'include/tb_external_transport.h',
+ 'include/tb_I420_codec.h',
+ 'include/tb_interfaces.h',
+ 'include/tb_video_channel.h',
+
+ 'testbed/tb_capture_device.cc',
+ 'testbed/tb_external_transport.cc',
+ 'testbed/tb_I420_codec.cc',
+ 'testbed/tb_interfaces.cc',
+ 'testbed/tb_video_channel.cc',
+ ],
+ },
+ ],
+}
diff --git a/video_engine/test/auto_test/source/tb_I420_codec.cc b/video_engine/test/libvietest/testbed/tb_I420_codec.cc
similarity index 98%
rename from video_engine/test/auto_test/source/tb_I420_codec.cc
rename to video_engine/test/libvietest/testbed/tb_I420_codec.cc
index c94c0b7..afab59a 100644
--- a/video_engine/test/auto_test/source/tb_I420_codec.cc
+++ b/video_engine/test/libvietest/testbed/tb_I420_codec.cc
@@ -8,12 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-/*
- * tb_I420_codec.cc
- *
- */
+#include "video_engine/test/libvietest/include/tb_I420_codec.h"
-#include "tb_I420_codec.h"
#include <string.h>
#include <stdio.h>
#include <assert.h>
diff --git a/video_engine/test/auto_test/source/tb_capture_device.cc b/video_engine/test/libvietest/testbed/tb_capture_device.cc
similarity index 87%
rename from video_engine/test/auto_test/source/tb_capture_device.cc
rename to video_engine/test/libvietest/testbed/tb_capture_device.cc
index 66f1619..684bff9 100644
--- a/video_engine/test/auto_test/source/tb_capture_device.cc
+++ b/video_engine/test/libvietest/testbed/tb_capture_device.cc
@@ -8,7 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "tb_capture_device.h"
+#include "video_engine/test/libvietest/include/tb_capture_device.h"
+
+#include "gtest/gtest.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
TbCaptureDevice::TbCaptureDevice(TbInterfaces& Engine) :
captureId(-1),
@@ -45,8 +48,6 @@
int error = ViE.capture->AllocateCaptureDevice(*vcpm_, captureId);
if (error == 0)
{
- ViETest::Log("Using capture device: %s, captureId: %d", deviceName,
- captureId);
captureDeviceSet = true;
break;
}
@@ -58,14 +59,11 @@
}
device_name_ = deviceName;
- ViETest::Log("Starting capture device %s with captureId %d\n", deviceName,
- captureId);
EXPECT_EQ(0, ViE.capture->StartCapture(captureId));
}
TbCaptureDevice::~TbCaptureDevice(void)
{
- ViETest::Log("Stopping capture device with id %d\n", captureId);
EXPECT_EQ(0, ViE.capture->StopCapture(captureId));
EXPECT_EQ(0, ViE.capture->ReleaseCaptureDevice(captureId));
vcpm_->Release();
diff --git a/video_engine/test/auto_test/source/tb_external_transport.cc b/video_engine/test/libvietest/testbed/tb_external_transport.cc
similarity index 96%
rename from video_engine/test/auto_test/source/tb_external_transport.cc
rename to video_engine/test/libvietest/testbed/tb_external_transport.cc
index 8670c40..e1e30be 100644
--- a/video_engine/test/auto_test/source/tb_external_transport.cc
+++ b/video_engine/test/libvietest/testbed/tb_external_transport.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "tb_external_transport.h"
+#include "video_engine/test/libvietest/include/tb_external_transport.h"
#include <stdio.h> // printf
#include <stdlib.h> // rand
@@ -21,11 +21,11 @@
#include <cstring>
#endif
-#include "critical_section_wrapper.h"
-#include "event_wrapper.h"
-#include "thread_wrapper.h"
-#include "tick_util.h"
-#include "vie_network.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "video_engine/include/vie_network.h"
#if defined(_WIN32)
#pragma warning(disable: 4355) // 'this' : used in base member initializer list
@@ -110,7 +110,7 @@
ssrc += ptr[10] << 8;
ssrc += ptr[11];
if (ssrc != _SSRC)
- {
+ {
return len; // return len to avoid error in trace file
}
}
diff --git a/video_engine/test/auto_test/source/tb_interfaces.cc b/video_engine/test/libvietest/testbed/tb_interfaces.cc
similarity index 82%
rename from video_engine/test/auto_test/source/tb_interfaces.cc
rename to video_engine/test/libvietest/testbed/tb_interfaces.cc
index b6b13a2..80a161c 100644
--- a/video_engine/test/auto_test/source/tb_interfaces.cc
+++ b/video_engine/test/libvietest/testbed/tb_interfaces.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -8,20 +8,19 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "tb_interfaces.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
-TbInterfaces::TbInterfaces(const char* test_name) {
- std::string trace_file_path =
- (ViETest::GetResultOutputPath() + test_name) + "_trace.txt";
-
- ViETest::Log("Creating ViE Interfaces for test %s\n", test_name);
+TbInterfaces::TbInterfaces(std::string test_name) {
+ std::string complete_path =
+ webrtc::test::OutputPath() + test_name + "_trace.txt";
video_engine = webrtc::VideoEngine::Create();
EXPECT_TRUE(video_engine != NULL);
- EXPECT_EQ(0, video_engine->SetTraceFile(trace_file_path.c_str()));
+ EXPECT_EQ(0, video_engine->SetTraceFile(complete_path.c_str()));
EXPECT_EQ(0, video_engine->SetTraceFilter(webrtc::kTraceAll));
base = webrtc::ViEBase::GetInterface(video_engine);
diff --git a/video_engine/test/auto_test/source/tb_video_channel.cc b/video_engine/test/libvietest/testbed/tb_video_channel.cc
similarity index 94%
rename from video_engine/test/auto_test/source/tb_video_channel.cc
rename to video_engine/test/libvietest/testbed/tb_video_channel.cc
index 1e62a6b..3359f18 100644
--- a/video_engine/test/auto_test/source/tb_video_channel.cc
+++ b/video_engine/test/libvietest/testbed/tb_video_channel.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -8,7 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "tb_video_channel.h"
+#include "video_engine/test/libvietest/include/tb_video_channel.h"
+
+#include "gtest/gtest.h"
TbVideoChannel::TbVideoChannel(TbInterfaces& Engine,
webrtc::VideoCodecType sendCodec, int width,
diff --git a/video_engine/video_engine.gyp b/video_engine/video_engine.gyp
index 261292c..1ca5142 100644
--- a/video_engine/video_engine.gyp
+++ b/video_engine/video_engine.gyp
@@ -16,6 +16,7 @@
'conditions': [
['build_with_chromium==0', {
'includes': [
+ 'test/libvietest/libvietest.gypi',
'test/auto_test/vie_auto_test.gypi',
'main/test/WindowsTest/windowstest.gypi',
],
diff --git a/video_engine/vie_channel.cc b/video_engine/vie_channel.cc
index a0f14dc..3bf3af8 100644
--- a/video_engine/vie_channel.cc
+++ b/video_engine/vie_channel.cc
@@ -73,7 +73,8 @@
effect_filter_(NULL),
color_enhancement_(true),
vcm_rttreported_(TickTime::Now()),
- file_recorder_(channel_id) {
+ file_recorder_(channel_id),
+ mtu_(0) {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, channel_id),
"ViEChannel::ViEChannel(channel_id: %d, engine_id: %d)",
channel_id, engine_id);
@@ -312,6 +313,9 @@
"%s: could not register payload type", __FUNCTION__);
return -1;
}
+ if (mtu_ != 0) {
+ rtp_rtcp->SetMaxTransferUnit(mtu_);
+ }
if (restart_rtp) {
rtp_rtcp->SetSendingStatus(true);
}
@@ -1053,78 +1057,6 @@
return rtp_rtcp_.EstimatedReceiveBandwidth(estimated_bandwidth);
}
-WebRtc_Word32 ViEChannel::SetKeepAliveStatus(
- const bool enable,
- const int unknown_payload_type,
- const WebRtc_UWord16 delta_transmit_timeMS) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
-
- if (enable && rtp_rtcp_.RTPKeepalive()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: RTP keepalive already enabled", __FUNCTION__);
- return -1;
- } else if (!enable && !rtp_rtcp_.RTPKeepalive()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: RTP keepalive already disabled", __FUNCTION__);
- return -1;
- }
-
- if (rtp_rtcp_.SetRTPKeepaliveStatus(enable, unknown_payload_type,
- delta_transmit_timeMS) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not set RTP keepalive status %d", __FUNCTION__,
- enable);
- if (enable == false && !rtp_rtcp_.DefaultModuleRegistered()) {
- // Not sending media and we try to disable keep alive
- rtp_rtcp_.ResetSendDataCountersRTP();
- rtp_rtcp_.SetSendingStatus(false);
- }
- return -1;
- }
-
- if (enable && !rtp_rtcp_.Sending()) {
- // Enable sending to start sending Sender reports instead of receive
- // reports.
- if (rtp_rtcp_.SetSendingStatus(true) != 0) {
- rtp_rtcp_.SetRTPKeepaliveStatus(false, 0, 0);
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not start sending", __FUNCTION__);
- return -1;
- }
- } else if (!enable && !rtp_rtcp_.SendingMedia()) {
- // Not sending media and we're disabling keep alive.
- rtp_rtcp_.ResetSendDataCountersRTP();
- if (rtp_rtcp_.SetSendingStatus(false) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not stop sending", __FUNCTION__);
- return -1;
- }
- }
- return 0;
-}
-
-WebRtc_Word32 ViEChannel::GetKeepAliveStatus(
- bool& enabled,
- int& unknown_payload_type,
- WebRtc_UWord16& delta_transmit_time_ms) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
- if (rtp_rtcp_.RTPKeepaliveStatus(&enabled, &unknown_payload_type,
- &delta_transmit_time_ms) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not get RTP keepalive status", __FUNCTION__);
- return -1;
- }
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: enabled = %d, unknown_payload_type = %d, "
- "delta_transmit_time_ms = %ul",
- __FUNCTION__, enabled, (WebRtc_Word32) unknown_payload_type,
- delta_transmit_time_ms);
-
- return 0;
-}
-
WebRtc_Word32 ViEChannel::StartRTPDump(const char file_nameUTF8[1024],
RTPDirections direction) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
@@ -1421,11 +1353,7 @@
#endif
rtp_rtcp_.SetSendingMediaStatus(true);
- if (rtp_rtcp_.Sending() && !rtp_rtcp_.RTPKeepalive()) {
- if (rtp_rtcp_.RTPKeepalive()) {
- // Sending Keep alive, don't trigger an error.
- return 0;
- }
+ if (rtp_rtcp_.Sending()) {
// Already sending.
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Already sending", __FUNCTION__);
@@ -1457,10 +1385,6 @@
RtpRtcp* rtp_rtcp = *it;
rtp_rtcp->SetSendingMediaStatus(false);
}
- if (rtp_rtcp_.RTPKeepalive()) {
- // Don't turn off sending since we'll send keep alive packets.
- return 0;
- }
if (!rtp_rtcp_.Sending()) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Not sending", __FUNCTION__);
@@ -1932,6 +1856,7 @@
RtpRtcp* rtp_rtcp = *it;
rtp_rtcp->SetMaxTransferUnit(mtu);
}
+ mtu_ = mtu;
return 0;
}
diff --git a/video_engine/vie_channel.h b/video_engine/vie_channel.h
index 349a04b..1b1816f 100644
--- a/video_engine/vie_channel.h
+++ b/video_engine/vie_channel.h
@@ -163,12 +163,7 @@
WebRtc_UWord32& fec_bitrate_sent,
WebRtc_UWord32& nackBitrateSent) const;
int GetEstimatedReceiveBandwidth(WebRtc_UWord32* estimated_bandwidth) const;
- WebRtc_Word32 SetKeepAliveStatus(const bool enable,
- const int unknown_payload_type,
- const WebRtc_UWord16 delta_transmit_timeMS);
- WebRtc_Word32 GetKeepAliveStatus(bool& enable,
- int& unknown_payload_type,
- WebRtc_UWord16& delta_transmit_timeMS);
+
WebRtc_Word32 StartRTPDump(const char file_nameUTF8[1024],
RTPDirections direction);
WebRtc_Word32 StopRTPDump(RTPDirections direction);
@@ -397,6 +392,9 @@
TickTime vcm_rttreported_;
ViEFileRecorder file_recorder_;
+
+ // User set MTU, -1 if not set.
+ uint16_t mtu_;
};
} // namespace webrtc
diff --git a/video_engine/vie_channel_manager.cc b/video_engine/vie_channel_manager.cc
index d0fc416..1928dcf 100644
--- a/video_engine/vie_channel_manager.cc
+++ b/video_engine/vie_channel_manager.cc
@@ -97,6 +97,7 @@
vie_encoder = NULL;
ReturnChannelId(new_channel_id);
delete group;
+ return -1;
}
channel_id = new_channel_id;
diff --git a/video_engine/vie_encoder.cc b/video_engine/vie_encoder.cc
index 20d8dac..6bcc86e 100644
--- a/video_engine/vie_encoder.cc
+++ b/video_engine/vie_encoder.cc
@@ -684,11 +684,8 @@
}
WebRtc_Word32 ViEEncoder::ProtectionRequest(
- WebRtc_UWord8 delta_fecrate,
- WebRtc_UWord8 key_fecrate,
- bool delta_use_uep_protection,
- bool key_use_uep_protection,
- bool nack_enabled,
+ const FecProtectionParams* delta_fec_params,
+ const FecProtectionParams* key_fec_params,
WebRtc_UWord32* sent_video_rate_bps,
WebRtc_UWord32* sent_nack_rate_bps,
WebRtc_UWord32* sent_fec_rate_bps) {
@@ -696,19 +693,17 @@
ViEId(engine_id_, channel_id_),
"%s, deltaFECRate: %u, key_fecrate: %u, "
"delta_use_uep_protection: %d, key_use_uep_protection: %d, ",
- __FUNCTION__, delta_fecrate, key_fecrate,
- delta_use_uep_protection, key_use_uep_protection);
+ __FUNCTION__,
+ delta_fec_params->fec_rate,
+ key_fec_params->fec_rate,
+ delta_fec_params->use_uep_protection,
+ key_fec_params->use_uep_protection);
- if (default_rtp_rtcp_.SetFECCodeRate(key_fecrate, delta_fecrate) != 0) {
+ if (default_rtp_rtcp_.SetFecParameters(delta_fec_params,
+ key_fec_params) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
- "%s: Could not update FEC code rate", __FUNCTION__);
- }
- if (default_rtp_rtcp_.SetFECUepProtection(key_use_uep_protection,
- delta_use_uep_protection) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: Could not update FEC-UEP protection", __FUNCTION__);
+ "%s: Could not update FEC parameters", __FUNCTION__);
}
default_rtp_rtcp_.BitrateSent(NULL,
sent_video_rate_bps,
@@ -856,31 +851,7 @@
const WebRtc_UWord32 frame_rate,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height) {
- WebRtc_Word32 ret_val = 0;
- ret_val = vpm_->SetTargetResolution(width, height, frame_rate);
-
- if (!ret_val) {
- // Get current settings.
- VideoCodec current_codec;
- vcm_->SendCodec(¤t_codec);
- WebRtc_UWord32 current_bit_rate;
- if (vcm_->Bitrate(¤t_bit_rate) != 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "Failed to get the current encoder target bitrate.");
- }
-
- // Set the new values.
- current_codec.height = static_cast<WebRtc_UWord16>(height);
- current_codec.width = static_cast<WebRtc_UWord16>(width);
- current_codec.maxFramerate = static_cast<WebRtc_UWord8>(frame_rate);
- current_codec.startBitrate = current_bit_rate;
-
- // Re-register encoder with the updated settings.
- ret_val = vcm_->RegisterSendCodec(¤t_codec, num_cores_,
- max_payload_length_);
- }
- return ret_val;
+ return vpm_->SetTargetResolution(width, height, frame_rate);
}
void QMVideoSettingsCallback::SetMaxPayloadLength(
diff --git a/video_engine/vie_encoder.h b/video_engine/vie_encoder.h
index f2e7f9e..79d8fef 100644
--- a/video_engine/vie_encoder.h
+++ b/video_engine/vie_encoder.h
@@ -108,12 +108,9 @@
const RTPVideoHeader* rtp_video_hdr);
// Implements VideoProtectionCallback.
- virtual WebRtc_Word32 ProtectionRequest(
- WebRtc_UWord8 delta_fecrate,
- WebRtc_UWord8 key_fecrate,
- bool delta_use_uep_protection,
- bool key_use_uep_protection,
- bool nack_enabled,
+ virtual int ProtectionRequest(
+ const FecProtectionParams* delta_fec_params,
+ const FecProtectionParams* key_fec_params,
WebRtc_UWord32* sent_video_rate_bps,
WebRtc_UWord32* sent_nack_rate_bps,
WebRtc_UWord32* sent_fec_rate_bps);
diff --git a/video_engine/vie_rtp_rtcp_impl.cc b/video_engine/vie_rtp_rtcp_impl.cc
index 0aaf8e4..630b5d6 100644
--- a/video_engine/vie_rtp_rtcp_impl.cc
+++ b/video_engine/vie_rtp_rtcp_impl.cc
@@ -767,65 +767,6 @@
static_cast<WebRtc_UWord32*>(estimated_bandwidth));
}
-int ViERTP_RTCPImpl::SetRTPKeepAliveStatus(
- const int video_channel,
- bool enable,
- const int unknown_payload_type,
- const unsigned int delta_transmit_time_seconds) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, enable: %d, unknown_payload_type: %d, "
- "deltaTransmitTimeMS: %ul)",
- __FUNCTION__, video_channel, enable,
- static_cast<int>(unknown_payload_type),
- delta_transmit_time_seconds);
- ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
- ViEChannel* vie_channel = cs.Channel(video_channel);
- if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__,
- video_channel);
- shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
- return -1;
- }
- WebRtc_UWord16 delta_transmit_time_ms = 1000 * delta_transmit_time_seconds;
- if (vie_channel->SetKeepAliveStatus(enable, unknown_payload_type,
- delta_transmit_time_ms) != 0) {
- shared_data_->SetLastError(kViERtpRtcpUnknownError);
- return -1;
- }
- return 0;
-}
-
-int ViERTP_RTCPImpl::GetRTPKeepAliveStatus(
- const int video_channel,
- bool& enabled,
- int& unknown_payload_type,
- unsigned int& delta_transmit_time_seconds) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
- ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
- ViEChannel* vie_channel = cs.Channel(video_channel);
- if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
- shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
- return -1;
- }
-
- WebRtc_UWord16 delta_time_ms = 0;
- int ret_val = vie_channel->GetKeepAliveStatus(enabled, unknown_payload_type,
- delta_time_ms);
- delta_transmit_time_seconds = delta_time_ms / 1000;
- if (ret_val != 0) {
- shared_data_->SetLastError(kViERtpRtcpUnknownError);
- }
- return ret_val;
-}
-
int ViERTP_RTCPImpl::StartRTPDump(const int video_channel,
const char file_nameUTF8[1024],
RTPDirections direction) {
diff --git a/video_engine/vie_rtp_rtcp_impl.h b/video_engine/vie_rtp_rtcp_impl.h
index 386c96f..bdb1a81 100644
--- a/video_engine/vie_rtp_rtcp_impl.h
+++ b/video_engine/vie_rtp_rtcp_impl.h
@@ -93,16 +93,6 @@
virtual int GetEstimatedReceiveBandwidth(
const int video_channel,
unsigned int* estimated_bandwidth) const;
- virtual int SetRTPKeepAliveStatus(
- const int video_channel,
- bool enable,
- const int unknown_payload_type,
- const unsigned int delta_transmit_time_seconds);
- virtual int GetRTPKeepAliveStatus(
- const int video_channel,
- bool& enabled,
- int& unkown_payload_type,
- unsigned int& delta_transmit_time_seconds) const;
virtual int StartRTPDump(const int video_channel,
const char file_nameUTF8[1024],
RTPDirections direction);
diff --git a/voice_engine/main/interface/voe_audio_processing.h b/voice_engine/main/interface/voe_audio_processing.h
index f4742dc..0d21f62 100644
--- a/voice_engine/main/interface/voe_audio_processing.h
+++ b/voice_engine/main/interface/voe_audio_processing.h
@@ -187,6 +187,12 @@
// Gets the current typing detection status.
virtual int GetTypingDetectionStatus(bool& enabled) = 0;
+ // Reports the lower of:
+ // * Time in seconds since the last typing event.
+ // * Time in seconds since the typing detection was enabled.
+ // Returns error if typing detection is disabled.
+ virtual int TimeSinceLastTyping(int &seconds) = 0;
+
protected:
VoEAudioProcessing() {}
virtual ~VoEAudioProcessing() {}
diff --git a/voice_engine/main/interface/voe_rtp_rtcp.h b/voice_engine/main/interface/voe_rtp_rtcp.h
index 9f8609e..0af5bac 100644
--- a/voice_engine/main/interface/voe_rtp_rtcp.h
+++ b/voice_engine/main/interface/voe_rtp_rtcp.h
@@ -16,7 +16,6 @@
// - Obtaining RTCP data from incoming RTCP sender reports.
// - RTP and RTCP statistics (jitter, packet loss, RTT etc.).
// - Forward Error Correction (FEC).
-// - RTP Keepalive for maintaining the NAT mappings associated to RTP flows.
// - Writing RTP and RTCP packets to binary files for off-line analysis of
// the call quality.
// - Inserting extra RTP packets into active audio stream.
@@ -187,21 +186,9 @@
virtual int GetFECStatus(
int channel, bool& enabled, int& redPayloadtype) = 0;
- // Sets the RTP keepalive mechanism status.
- // This functionality can maintain an existing Network Address Translator
- // (NAT) mapping while regular RTP is no longer transmitted.
- virtual int SetRTPKeepaliveStatus(
- int channel, bool enable, int unknownPayloadType,
- int deltaTransmitTimeSeconds = 15) = 0;
-
- // Gets the RTP keepalive mechanism status.
- virtual int GetRTPKeepaliveStatus(
- int channel, bool& enabled, int& unknownPayloadType,
- int& deltaTransmitTimeSeconds) = 0;
-
// Enables capturing of RTP packets to a binary file on a specific
// |channel| and for a given |direction|. The file can later be replayed
- // using e.g. RTP Tools rtpplay since the binary file format is
+ // using e.g. RTP Tools rtpplay since the binary file format is
// compatible with the rtpdump format.
virtual int StartRTPDump(
int channel, const char fileNameUTF8[1024],
diff --git a/voice_engine/main/source/channel.cc b/voice_engine/main/source/channel.cc
index cd93137..94b6dd4 100644
--- a/voice_engine/main/source/channel.cc
+++ b/voice_engine/main/source/channel.cc
@@ -918,20 +918,7 @@
if (_outputFileRecording && _outputFileRecorderPtr)
{
- if(audioFrame._audioChannel == 2)
- {
- AudioFrame temp = audioFrame;
- AudioFrameOperations::StereoToMono (temp);
- _outputFileRecorderPtr->RecordAudioToFile(temp);
- }
- else if(audioFrame._audioChannel == 1)
- {
- _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
- }
- else
- {
- assert(false);
- }
+ _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
}
}
@@ -3902,7 +3889,8 @@
const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
CodecInst dummyCodec={100,"L16",16000,320,1,320000};
- if (codecInst != NULL && codecInst->channels != 1)
+ if ((codecInst != NULL) &&
+ ((codecInst->channels < 1) || (codecInst->channels > 2)))
{
_engineStatisticsPtr->SetLastError(
VE_BAD_ARGUMENT, kTraceError,
@@ -5643,60 +5631,6 @@
}
int
-Channel::SetRTPKeepaliveStatus(bool enable,
- int unknownPayloadType,
- int deltaTransmitTimeSeconds)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
- "Channel::SetRTPKeepaliveStatus()");
- if (_sending)
- {
- _engineStatisticsPtr->SetLastError(
- VE_ALREADY_SENDING, kTraceError,
- "SetRTPKeepaliveStatus() already sending");
- return -1;
- }
- if (_rtpRtcpModule.SetRTPKeepaliveStatus(
- enable,
- unknownPayloadType,
- 1000 * deltaTransmitTimeSeconds) != 0)
- {
- _engineStatisticsPtr->SetLastError(
- VE_RTP_RTCP_MODULE_ERROR, kTraceError,
- "SetRTPKeepaliveStatus() failed to set RTP keepalive status");
- return -1;
- }
- return 0;
-}
-
-int
-Channel::GetRTPKeepaliveStatus(bool& enabled,
- int& unknownPayloadType,
- int& deltaTransmitTimeSeconds)
-{
- bool onOff(false);
- int payloadType(0);
- WebRtc_UWord16 deltaTransmitTimeMS(0);
- if (_rtpRtcpModule.RTPKeepaliveStatus(&onOff, &payloadType,
- &deltaTransmitTimeMS) != 0)
- {
- _engineStatisticsPtr->SetLastError(
- VE_RTP_RTCP_MODULE_ERROR, kTraceError,
- "GetRTPKeepaliveStatus() failed to retrieve RTP keepalive status");
- return -1;
- }
- enabled = onOff;
- unknownPayloadType = payloadType;
- deltaTransmitTimeSeconds = static_cast<int> (deltaTransmitTimeMS / 1000);
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
- VoEId(_instanceId, _channelId),
- "GetRTPKeepaliveStatus() => enabled=%d, "
- "unknownPayloadType=%u, deltaTransmitTimeSeconds=%d",
- enabled, unknownPayloadType, deltaTransmitTimeSeconds);
- return 0;
-}
-
-int
Channel::StartRTPDump(const char fileNameUTF8[1024],
RTPDirections direction)
{
@@ -6217,7 +6151,7 @@
WebRtc_Word32
Channel::MixOrReplaceAudioWithFile(const int mixingFrequency)
{
- WebRtc_Word16 fileBuffer[320];
+ scoped_array<WebRtc_Word16> fileBuffer(new WebRtc_Word16[640]);
WebRtc_UWord32 fileSamples(0);
{
@@ -6232,7 +6166,7 @@
return -1;
}
- if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer,
+ if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
fileSamples,
mixingFrequency) == -1)
{
@@ -6255,17 +6189,23 @@
if (_mixFileWithMicrophone)
{
+ // Currently file stream is always mono.
+ // TODO(xians): Change the code when FilePlayer supports real stereo.
Utility::MixWithSat(_audioFrame._payloadData,
- fileBuffer,
- (WebRtc_UWord16)fileSamples);
+ static_cast<int>(_audioFrame._audioChannel),
+ fileBuffer.get(),
+ 1,
+ static_cast<int>(fileSamples));
}
else
{
- // replace ACM audio with file
+ // Replace ACM audio with file.
+ // Currently file stream is always mono.
+ // TODO(xians): Change the code when FilePlayer supports real stereo.
_audioFrame.UpdateFrame(_channelId,
-1,
- fileBuffer,
- (WebRtc_UWord16)fileSamples,
+ fileBuffer.get(),
+ static_cast<WebRtc_UWord16>(fileSamples),
mixingFrequency,
AudioFrame::kNormalSpeech,
AudioFrame::kVadUnknown,
@@ -6281,7 +6221,7 @@
{
assert(mixingFrequency <= 32000);
- WebRtc_Word16 fileBuffer[640];
+ scoped_array<WebRtc_Word16> fileBuffer(new WebRtc_Word16[640]);
WebRtc_UWord32 fileSamples(0);
{
@@ -6296,7 +6236,7 @@
}
// We should get the frequency we ask for.
- if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer,
+ if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
fileSamples,
mixingFrequency) == -1)
{
@@ -6309,28 +6249,13 @@
if (audioFrame._payloadDataLengthInSamples == fileSamples)
{
- // In case the incoming stream is stereo and file stream is mono,
- // turn the file stream into stereo.
- // TODO(xians): remove the code when FilePlayer supports real stereo.
- if (audioFrame._audioChannel == 2)
- {
- // The mono file stream is copied to be stereo.
- WebRtc_Word16* FileBufferCopy = new WebRtc_Word16[fileSamples];
- memcpy(FileBufferCopy, fileBuffer,
- sizeof(WebRtc_Word16) * fileSamples);
- for (unsigned int i = 0; i < fileSamples; i++)
- {
- fileBuffer[2*i] = FileBufferCopy[i];
- fileBuffer[2*i+1] = FileBufferCopy[i];
- }
- fileSamples = 2*fileSamples;
- delete [] FileBufferCopy;
- }
-
- // Mix the incoming stream and file stream.
+ // Currently file stream is always mono.
+ // TODO(xians): Change the code when FilePlayer supports real stereo.
Utility::MixWithSat(audioFrame._payloadData,
- fileBuffer,
- (WebRtc_UWord16)fileSamples);
+ static_cast<int>(audioFrame._audioChannel),
+ fileBuffer.get(),
+ 1,
+ static_cast<int>(fileSamples));
}
else
{
diff --git a/voice_engine/main/source/channel.h b/voice_engine/main/source/channel.h
index 7ceca4a..3551054 100644
--- a/voice_engine/main/source/channel.h
+++ b/voice_engine/main/source/channel.h
@@ -343,10 +343,6 @@
int GetRTPStatistics(CallStatistics& stats);
int SetFECStatus(bool enable, int redPayloadtype);
int GetFECStatus(bool& enabled, int& redPayloadtype);
- int SetRTPKeepaliveStatus(bool enable, int unknownPayloadType,
- int deltaTransmitTimeSeconds);
- int GetRTPKeepaliveStatus(bool& enabled, int& unknownPayloadType,
- int& deltaTransmitTimeSeconds);
int StartRTPDump(const char fileNameUTF8[1024], RTPDirections direction);
int StopRTPDump(RTPDirections direction);
bool RTPDumpIsActive(RTPDirections direction);
diff --git a/voice_engine/main/source/output_mixer.cc b/voice_engine/main/source/output_mixer.cc
index 2764f78..f00a0d1 100644
--- a/voice_engine/main/source/output_mixer.cc
+++ b/voice_engine/main/source/output_mixer.cc
@@ -342,7 +342,8 @@
const WebRtc_UWord32 notificationTime(0);
CodecInst dummyCodec={100,"L16",16000,320,1,320000};
- if (codecInst != NULL && codecInst->channels != 1)
+ if ((codecInst != NULL) &&
+ ((codecInst->channels < 1) || (codecInst->channels > 2)))
{
_engineStatisticsPtr->SetLastError(
VE_BAD_ARGUMENT, kTraceError,
@@ -529,8 +530,6 @@
CriticalSectionScoped cs(&_fileCritSect);
if (_outputFileRecording)
{
- assert(audioFrame._audioChannel == 1);
-
if (_outputFileRecorderPtr)
{
_outputFileRecorderPtr->RecordAudioToFile(audioFrame);
diff --git a/voice_engine/main/source/transmit_mixer.cc b/voice_engine/main/source/transmit_mixer.cc
index cf7f38e..9fc9f4f 100644
--- a/voice_engine/main/source/transmit_mixer.cc
+++ b/voice_engine/main/source/transmit_mixer.cc
@@ -183,6 +183,7 @@
_callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
_timeActive(0),
+ _timeSinceLastTyping(0),
_penaltyCounter(0),
_typingNoiseWarning(0),
#endif
@@ -1218,7 +1219,7 @@
WebRtc_Word32 TransmitMixer::MixOrReplaceAudioWithFile(
const int mixingFrequency)
{
- WebRtc_Word16 fileBuffer[320];
+ scoped_array<WebRtc_Word16> fileBuffer(new WebRtc_Word16[640]);
WebRtc_UWord32 fileSamples(0);
@@ -1233,7 +1234,7 @@
return -1;
}
- if (_filePlayerPtr->Get10msAudioFromFile(fileBuffer,
+ if (_filePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
fileSamples,
mixingFrequency) == -1)
{
@@ -1244,19 +1245,27 @@
}
}
+ assert(_audioFrame._payloadDataLengthInSamples == fileSamples);
+
if (_mixFileWithMicrophone)
{
+ // Currently file stream is always mono.
+ // TODO(xians): Change the code when FilePlayer supports real stereo.
Utility::MixWithSat(_audioFrame._payloadData,
- fileBuffer,
- (WebRtc_UWord16) fileSamples);
- assert(_audioFrame._payloadDataLengthInSamples == fileSamples);
+ static_cast<int>(_audioFrame._audioChannel),
+ fileBuffer.get(),
+ 1,
+ static_cast<int>(fileSamples));
} else
{
- // replace ACM audio with file
+ // Replace ACM audio with file.
+ // Currently file stream is always mono.
+ // TODO(xians): Change the code when FilePlayer supports real stereo.
_audioFrame.UpdateFrame(-1,
-1,
- fileBuffer,
- (WebRtc_UWord16) fileSamples, mixingFrequency,
+ fileBuffer.get(),
+ static_cast<WebRtc_UWord16>(fileSamples),
+ mixingFrequency,
AudioFrame::kNormalSpeech,
AudioFrame::kVadUnknown,
1);
@@ -1376,6 +1385,16 @@
else
_timeActive = 0;
+ // Keep track if time since last typing event
+ if (keyPressed)
+ {
+ _timeSinceLastTyping = 0;
+ }
+ else
+ {
+ ++_timeSinceLastTyping;
+ }
+
if (keyPressed && (_audioFrame._vadActivity == AudioFrame::kVadActive)
&& (_timeActive < 10))
{
@@ -1411,6 +1430,18 @@
return (_mixingFrequency);
}
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+int TransmitMixer::TimeSinceLastTyping(int &seconds)
+{
+ // We check in VoEAudioProcessingImpl that this is only called when
+ // typing detection is active.
+
+ // Round to whole seconds
+ seconds = (_timeSinceLastTyping + 50) / 100;
+ return(0);
+}
+#endif
+
} // namespace voe
} // namespace webrtc
diff --git a/voice_engine/main/source/transmit_mixer.h b/voice_engine/main/source/transmit_mixer.h
index d65bb80..271686d 100644
--- a/voice_engine/main/source/transmit_mixer.h
+++ b/voice_engine/main/source/transmit_mixer.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -148,6 +148,11 @@
void RecordFileEnded(const WebRtc_Word32 id);
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+public: // Typing detection
+ int TimeSinceLastTyping(int &seconds);
+#endif
+
private:
TransmitMixer(const WebRtc_UWord32 instanceId);
@@ -197,6 +202,7 @@
#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
WebRtc_Word32 _timeActive;
+ WebRtc_Word32 _timeSinceLastTyping;
WebRtc_Word32 _penaltyCounter;
WebRtc_UWord32 _typingNoiseWarning;
#endif
diff --git a/voice_engine/main/source/utility.cc b/voice_engine/main/source/utility.cc
index 6e70156..1ef108e 100644
--- a/voice_engine/main/source/utility.cc
+++ b/voice_engine/main/source/utility.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -12,27 +12,53 @@
#include "module.h"
#include "trace.h"
+#include "signal_processing_library.h"
namespace webrtc
{
namespace voe
{
+enum{kMaxTargetLen = 2*32*10}; // stereo 32KHz 10ms
void Utility::MixWithSat(WebRtc_Word16 target[],
+ int target_channel,
const WebRtc_Word16 source[],
- WebRtc_UWord16 len)
+ int source_channel,
+ int source_len)
{
- WebRtc_Word32 temp(0);
- for (int i = 0; i < len; i++)
+ assert((target_channel == 1) || (target_channel == 2));
+ assert((source_channel == 1) || (source_channel == 2));
+ assert(source_len <= kMaxTargetLen);
+
+ if ((target_channel == 2) && (source_channel == 1))
{
- temp = source[i] + target[i];
- if (temp > 32767)
- target[i] = 32767;
- else if (temp < -32768)
- target[i] = -32768;
- else
- target[i] = (WebRtc_Word16) temp;
+ // Convert source from mono to stereo.
+ WebRtc_Word32 left = 0;
+ WebRtc_Word32 right = 0;
+ for (int i = 0; i < source_len; ++i) {
+ left = source[i] + target[i*2];
+ right = source[i] + target[i*2 + 1];
+ target[i*2] = WebRtcSpl_SatW32ToW16(left);
+ target[i*2 + 1] = WebRtcSpl_SatW32ToW16(right);
+ }
+ }
+ else if ((target_channel == 1) && (source_channel == 2))
+ {
+ // Convert source from stereo to mono.
+ WebRtc_Word32 temp = 0;
+ for (int i = 0; i < source_len/2; ++i) {
+ temp = ((source[i*2] + source[i*2 + 1])>>1) + target[i];
+ target[i] = WebRtcSpl_SatW32ToW16(temp);
+ }
+ }
+ else
+ {
+ WebRtc_Word32 temp = 0;
+ for (int i = 0; i < source_len; ++i) {
+ temp = source[i] + target[i];
+ target[i] = WebRtcSpl_SatW32ToW16(temp);
+ }
}
}
diff --git a/voice_engine/main/source/utility.h b/voice_engine/main/source/utility.h
index 084ddf4..a8af8bd 100644
--- a/voice_engine/main/source/utility.h
+++ b/voice_engine/main/source/utility.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -30,8 +30,10 @@
{
public:
static void MixWithSat(WebRtc_Word16 target[],
+ int target_channel,
const WebRtc_Word16 source[],
- WebRtc_UWord16 len);
+ int source_channel,
+ int source_len);
static void MixSubtractWithSat(WebRtc_Word16 target[],
const WebRtc_Word16 source[],
diff --git a/voice_engine/main/source/voe_audio_processing_impl.cc b/voice_engine/main/source/voe_audio_processing_impl.cc
index 7303f48..eddc23e 100644
--- a/voice_engine/main/source/voe_audio_processing_impl.cc
+++ b/voice_engine/main/source/voe_audio_processing_impl.cc
@@ -14,6 +14,7 @@
#include "channel.h"
#include "critical_section_wrapper.h"
#include "trace.h"
+#include "transmit_mixer.h"
#include "voe_errors.h"
#include "voice_engine_impl.h"
@@ -1031,6 +1032,40 @@
#endif
}
+
+int VoEAudioProcessingImpl::TimeSinceLastTyping(int &seconds) {
+ WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId, -1),
+ "TimeSinceLastTyping()");
+ ANDROID_NOT_SUPPORTED(_engineStatistics);
+ IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+ if (!_engineStatistics.Initialized()) {
+ _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
+ return -1;
+ }
+ // Check if typing detection is enabled
+ bool enabled = _audioProcessingModulePtr->voice_detection()->is_enabled();
+ if (enabled)
+ {
+ _transmitMixerPtr->TimeSinceLastTyping(seconds);
+ return 0;
+ }
+ else
+ {
+ _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+ "SetTypingDetectionStatus is not enabled");
+ return -1;
+ }
+#else
+ _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+ "SetTypingDetectionStatus is not supported");
+ return -1;
+#endif
+
+}
+
+
#endif // #ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
} // namespace webrtc
diff --git a/voice_engine/main/source/voe_audio_processing_impl.h b/voice_engine/main/source/voe_audio_processing_impl.h
index 0c49bf4..9598da4 100644
--- a/voice_engine/main/source/voe_audio_processing_impl.h
+++ b/voice_engine/main/source/voe_audio_processing_impl.h
@@ -89,6 +89,8 @@
virtual int GetTypingDetectionStatus(bool& enabled);
+ virtual int TimeSinceLastTyping(int &seconds);
+
protected:
VoEAudioProcessingImpl();
virtual ~VoEAudioProcessingImpl();
diff --git a/voice_engine/main/source/voe_file_impl.cc b/voice_engine/main/source/voe_file_impl.cc
index cd3193f..637dce7 100644
--- a/voice_engine/main/source/voe_file_impl.cc
+++ b/voice_engine/main/source/voe_file_impl.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -469,8 +469,8 @@
}
if (channel == -1)
{
- _outputMixerPtr->StartRecordingPlayout(fileNameUTF8, compression);
- return 0;
+ return _outputMixerPtr->StartRecordingPlayout
+ (fileNameUTF8, compression);
}
else
{
diff --git a/voice_engine/main/source/voe_rtp_rtcp_impl.cc b/voice_engine/main/source/voe_rtp_rtcp_impl.cc
index bc7a5c8..ba6d365 100644
--- a/voice_engine/main/source/voe_rtp_rtcp_impl.cc
+++ b/voice_engine/main/source/voe_rtp_rtcp_impl.cc
@@ -573,60 +573,6 @@
#endif
}
-int VoERTP_RTCPImpl::SetRTPKeepaliveStatus(int channel,
- bool enable,
- int unknownPayloadType,
- int deltaTransmitTimeSeconds)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
- "SetRTPKeepaliveStatus(channel=%d, enable=%d,"
- " unknownPayloadType=%u, deltaTransmitTimeSeconds=%d)",
- channel, enable, unknownPayloadType, deltaTransmitTimeSeconds);
- if (!_engineStatistics.Initialized())
- {
- _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ScopedChannel sc(_channelManager, channel);
- voe::Channel* channelPtr = sc.ChannelPtr();
- if (channelPtr == NULL)
- {
- _engineStatistics.SetLastError(
- VE_CHANNEL_NOT_VALID, kTraceError,
- "SetRTPKeepaliveStatus() failed to locate channel");
- return -1;
- }
- return channelPtr->SetRTPKeepaliveStatus(enable,
- unknownPayloadType,
- deltaTransmitTimeSeconds);
-}
-
-int VoERTP_RTCPImpl::GetRTPKeepaliveStatus(int channel,
- bool& enabled,
- int& unknownPayloadType,
- int& deltaTransmitTimeSeconds)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
- "GetRTPKeepaliveStatus(channel=%d)", channel);
- if (!_engineStatistics.Initialized())
- {
- _engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ScopedChannel sc(_channelManager, channel);
- voe::Channel* channelPtr = sc.ChannelPtr();
- if (channelPtr == NULL)
- {
- _engineStatistics.SetLastError(
- VE_CHANNEL_NOT_VALID, kTraceError,
- "GetRTPKeepaliveStatus() failed to locate channel");
- return -1;
- }
- return channelPtr->GetRTPKeepaliveStatus(enabled,
- unknownPayloadType,
- deltaTransmitTimeSeconds);
-}
-
int VoERTP_RTCPImpl::StartRTPDump(int channel,
const char fileNameUTF8[1024],
RTPDirections direction)
diff --git a/voice_engine/main/source/voe_rtp_rtcp_impl.h b/voice_engine/main/source/voe_rtp_rtcp_impl.h
index d3a840d..1912c3a 100644
--- a/voice_engine/main/source/voe_rtp_rtcp_impl.h
+++ b/voice_engine/main/source/voe_rtp_rtcp_impl.h
@@ -87,17 +87,6 @@
virtual int GetRTCPStatistics(int channel, CallStatistics& stats);
- // RTP keepalive mechanism (maintains NAT mappings associated to RTP flows)
- virtual int SetRTPKeepaliveStatus(int channel,
- bool enable,
- int unknownPayloadType,
- int deltaTransmitTimeSeconds = 15);
-
- virtual int GetRTPKeepaliveStatus(int channel,
- bool& enabled,
- int& unknownPayloadType,
- int& deltaTransmitTimeSeconds);
-
// FEC
virtual int SetFECStatus(int channel,
bool enable,
diff --git a/voice_engine/main/test/auto_test/standard/codec_test.cc b/voice_engine/main/test/auto_test/standard/codec_test.cc
index dc5558b..d861452 100644
--- a/voice_engine/main/test/auto_test/standard/codec_test.cc
+++ b/voice_engine/main/test/auto_test/standard/codec_test.cc
@@ -193,7 +193,8 @@
EXPECT_EQ(0, voe_base_->StartSend(channel_));
}
-TEST_F(CodecTest, ManualVerifySendCodecsForAllPacketSizes) {
+// TODO(xians, phoglund): Re-enable when issue 372 is resolved.
+TEST_F(CodecTest, DISABLED_ManualVerifySendCodecsForAllPacketSizes) {
for (int i = 0; i < voe_codec_->NumOfCodecs(); ++i) {
voe_codec_->GetCodec(i, codec_instance_);
if (IsNotViableSendCodec(codec_instance_.plname)) {
diff --git a/voice_engine/main/test/auto_test/standard/encryption_test.cc b/voice_engine/main/test/auto_test/standard/encryption_test.cc
new file mode 100644
index 0000000..fba5f9a
--- /dev/null
+++ b/voice_engine/main/test/auto_test/standard/encryption_test.cc
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voice_engine/main/interface/voe_encryption.h"
+#include "voice_engine/main/test/auto_test/fixtures/after_streaming_fixture.h"
+
+class BasicBitInverseEncryption : public webrtc::Encryption {
+ void encrypt(int channel_no, unsigned char* in_data,
+ unsigned char* out_data, int bytes_in, int* bytes_out);
+ void decrypt(int channel_no, unsigned char* in_data,
+ unsigned char* out_data, int bytes_in, int* bytes_out);
+ void encrypt_rtcp(int channel_no, unsigned char* in_data,
+ unsigned char* out_data, int bytes_in, int* bytes_out);
+ void decrypt_rtcp(int channel_no, unsigned char* in_data,
+ unsigned char* out_data, int bytes_in, int* bytes_out);
+};
+
+void BasicBitInverseEncryption::encrypt(int, unsigned char* in_data,
+ unsigned char* out_data,
+ int bytes_in, int* bytes_out) {
+ int i;
+ for (i = 0; i < bytes_in; i++)
+ out_data[i] = ~in_data[i];
+ *bytes_out = bytes_in + 2;
+}
+
+void BasicBitInverseEncryption::decrypt(int, unsigned char* in_data,
+ unsigned char* out_data,
+ int bytes_in, int* bytes_out) {
+ int i;
+ for (i = 0; i < bytes_in; i++)
+ out_data[i] = ~in_data[i];
+ *bytes_out = bytes_in - 2;
+}
+
+void BasicBitInverseEncryption::encrypt_rtcp(int, unsigned char* in_data,
+ unsigned char* out_data,
+ int bytes_in, int* bytes_out) {
+ int i;
+ for (i = 0; i < bytes_in; i++)
+ out_data[i] = ~in_data[i];
+ *bytes_out = bytes_in + 2;
+}
+
+void BasicBitInverseEncryption::decrypt_rtcp(int, unsigned char* in_data,
+ unsigned char* out_data,
+ int bytes_in, int* bytes_out) {
+ int i;
+ for (i = 0; i < bytes_in; i++)
+ out_data[i] = ~in_data[i];
+ *bytes_out = bytes_in + 2;
+}
+
+
+class EncryptionTest : public AfterStreamingFixture {
+};
+
+TEST_F(EncryptionTest, ManualBasicCorrectExternalEncryptionHasNoEffectOnVoice) {
+ BasicBitInverseEncryption basic_encryption;
+
+ voe_encrypt_->RegisterExternalEncryption(channel_, basic_encryption);
+
+ TEST_LOG("Registered external encryption, should still hear good audio.");
+ Sleep(3000);
+
+ voe_encrypt_->DeRegisterExternalEncryption(channel_);
+}
diff --git a/voice_engine/main/test/auto_test/standard/rtp_rtcp_before_streaming_test.cc b/voice_engine/main/test/auto_test/standard/rtp_rtcp_before_streaming_test.cc
index 9965516..93170f6 100644
--- a/voice_engine/main/test/auto_test/standard/rtp_rtcp_before_streaming_test.cc
+++ b/voice_engine/main/test/auto_test/standard/rtp_rtcp_before_streaming_test.cc
@@ -42,77 +42,6 @@
EXPECT_TRUE(on);
}
-TEST_F(RtpRtcpBeforeStreamingTest, RtpKeepAliveStatusIsOffByDefault) {
- int payload_type;
- int delta_seconds;
- bool on;
-
- // Should be off by default.
- EXPECT_EQ(0, voe_rtp_rtcp_->GetRTPKeepaliveStatus(
- channel_, on, payload_type, delta_seconds));
- EXPECT_FALSE(on);
- EXPECT_EQ(-1, payload_type);
- EXPECT_EQ(0, delta_seconds);
-}
-
-TEST_F(RtpRtcpBeforeStreamingTest, SetRtpKeepAliveDealsWithInvalidParameters) {
- int payload_type;
- int delta_seconds;
- bool on;
-
- // Verify invalid input parameters.
- EXPECT_NE(0, voe_rtp_rtcp_->GetRTPKeepaliveStatus(
- -1, on, payload_type, delta_seconds)) <<
- "Should fail for invalid channel -1.";
- EXPECT_NE(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
- -1, true, 0, 15)) <<
- "Should fail for invalid channel -1.";
- EXPECT_NE(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
- channel_, true, -1, 15)) <<
- "Should fail for invalid payload -1.";
- EXPECT_NE(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
- channel_, true, 0, 61)) <<
- "The delta time must be [1, 60] seconds.";
- EXPECT_EQ(0, voe_rtp_rtcp_->GetRTPKeepaliveStatus(
- channel_, on, payload_type, delta_seconds));
- EXPECT_NE(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
- channel_, true, 0));
-
- // Should still be off, default 0 used by PCMU.
- EXPECT_FALSE(on);
-}
-
-TEST_F(RtpRtcpBeforeStreamingTest,
- GetRtpKeepaliveStatusObeysSetRtpKeepaliveStatus) {
- EXPECT_EQ(0, voe_rtp_rtcp_->SetRTCP_CNAME(channel_, "SomeName"));
-
- // Try valid settings.
- EXPECT_EQ(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
- channel_, true, 1));
-
- int payload_type;
- int delta_seconds;
- bool on;
-
- EXPECT_EQ(0, voe_rtp_rtcp_->GetRTPKeepaliveStatus(
- 0, on, payload_type, delta_seconds));
- EXPECT_TRUE(on);
- EXPECT_EQ(1, payload_type);
- EXPECT_EQ(15, delta_seconds) << "15 seconds delta is default.";
-
- // Set the keep-alive payload to 60, which the codecs can't use.
- EXPECT_EQ(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
- channel_, true, 60, 3));
- EXPECT_EQ(0, voe_rtp_rtcp_->GetRTPKeepaliveStatus(
- channel_, on, payload_type, delta_seconds));
- EXPECT_TRUE(on);
- EXPECT_EQ(60, payload_type);
- EXPECT_EQ(3, delta_seconds);
-
- EXPECT_EQ(0, voe_rtp_rtcp_->SetRTPKeepaliveStatus(
- channel_, false, 60));
-}
-
TEST_F(RtpRtcpBeforeStreamingTest, GetLocalSsrcObeysSetLocalSsrc) {
EXPECT_EQ(0, voe_rtp_rtcp_->SetLocalSSRC(channel_, 1234));
unsigned int result = 0;
diff --git a/voice_engine/main/test/auto_test/standard/rtp_rtcp_test.cc b/voice_engine/main/test/auto_test/standard/rtp_rtcp_test.cc
index 42c1b4e..325ec25 100644
--- a/voice_engine/main/test/auto_test/standard/rtp_rtcp_test.cc
+++ b/voice_engine/main/test/auto_test/standard/rtp_rtcp_test.cc
@@ -147,7 +147,8 @@
}
}
-TEST_F(RtpRtcpTest, CanCreateRtpDumpFilesWithoutError) {
+// TODO(xians, phoglund): Re-enable when issue 372 is resolved.
+TEST_F(RtpRtcpTest, DISABLED_CanCreateRtpDumpFilesWithoutError) {
// Create two RTP dump files (3 seconds long). You can verify these after
// the test using rtpplay or NetEqRTPplay if you like.
std::string output_path = webrtc::test::OutputPath();
diff --git a/voice_engine/main/test/auto_test/standard/video_sync_test.cc b/voice_engine/main/test/auto_test/standard/video_sync_test.cc
new file mode 100644
index 0000000..488593d
--- /dev/null
+++ b/voice_engine/main/test/auto_test/standard/video_sync_test.cc
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cmath>
+#include <numeric>
+#include <vector>
+
+#include "voice_engine/main/test/auto_test/fixtures/after_streaming_fixture.h"
+
+#ifdef MAC_IPHONE
+ const int kMinimumReasonableDelayEstimateMs = 30;
+#else
+ const int kMinimumReasonableDelayEstimateMs = 45;
+#endif // !MAC_IPHONE
+
+class VideoSyncTest : public AfterStreamingFixture {
+ protected:
+ // This test will verify that delay estimates converge (e.g. the standard
+ // deviation for the last five seconds' estimates is less than 20) without
+ // manual observation. The test runs for 15 seconds, sampling once per second.
+ // All samples are checked so they are greater than |min_estimate|.
+ int CollectEstimatesDuring15Seconds(int min_estimate) {
+ Sleep(1000);
+
+ std::vector<int> all_delay_estimates;
+ for (int second = 0; second < 15; second++) {
+ int delay_estimate = 0;
+ EXPECT_EQ(0, voe_vsync_->GetDelayEstimate(channel_, delay_estimate));
+
+ EXPECT_GT(delay_estimate, min_estimate) <<
+ "The delay estimate can not conceivably get lower than " <<
+ min_estimate << " ms, it's unrealistic.";
+
+ all_delay_estimates.push_back(delay_estimate);
+ Sleep(1000);
+ }
+
+ return ComputeStandardDeviation(
+ all_delay_estimates.begin() + 10, all_delay_estimates.end());
+ }
+
+ void CheckEstimatesConvergeReasonablyWell(int min_estimate) {
+ float standard_deviation = CollectEstimatesDuring15Seconds(min_estimate);
+ EXPECT_LT(standard_deviation, 20.0f);
+ }
+
+ // Computes the standard deviation by first estimating the sample variance
+ // with an unbiased estimator.
+ float ComputeStandardDeviation(std::vector<int>::const_iterator start,
+ std::vector<int>::const_iterator end) const {
+ int num_elements = end - start;
+ int mean = std::accumulate(start, end, 0) / num_elements;
+ assert(num_elements > 1);
+
+ float variance = 0;
+ for (; start != end; ++start) {
+ variance += (*start - mean) * (*start - mean) / (num_elements - 1);
+ }
+ return std::sqrt(variance);
+ }
+};
+
+TEST_F(VideoSyncTest, CanGetPlayoutTimestampWhilePlayingWithoutSettingItFirst) {
+ unsigned int ignored;
+ EXPECT_EQ(0, voe_vsync_->GetPlayoutTimestamp(channel_, ignored));
+}
+
+TEST_F(VideoSyncTest, CannotSetInitTimestampWhilePlaying) {
+ EXPECT_EQ(-1, voe_vsync_->SetInitTimestamp(channel_, 12345));
+}
+
+TEST_F(VideoSyncTest, CannotSetInitSequenceNumberWhilePlaying) {
+ EXPECT_EQ(-1, voe_vsync_->SetInitSequenceNumber(channel_, 123));
+}
+
+TEST_F(VideoSyncTest, CanSetInitTimestampWhileStopped) {
+ EXPECT_EQ(0, voe_base_->StopSend(channel_));
+ EXPECT_EQ(0, voe_vsync_->SetInitTimestamp(channel_, 12345));
+}
+
+TEST_F(VideoSyncTest, CanSetInitSequenceNumberWhileStopped) {
+ EXPECT_EQ(0, voe_base_->StopSend(channel_));
+ EXPECT_EQ(0, voe_vsync_->SetInitSequenceNumber(channel_, 123));
+}
+
+TEST_F(VideoSyncTest, DelayEstimatesStabilizeDuring15sAndAreNotTooLow) {
+ EXPECT_EQ(0, voe_base_->StopSend(channel_));
+ EXPECT_EQ(0, voe_vsync_->SetInitTimestamp(channel_, 12345));
+ EXPECT_EQ(0, voe_vsync_->SetInitSequenceNumber(channel_, 123));
+ EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+ CheckEstimatesConvergeReasonablyWell(kMinimumReasonableDelayEstimateMs);
+}
+
+TEST_F(VideoSyncTest, DelayEstimatesStabilizeAfterNetEqMinDelayChanges45s) {
+ EXPECT_EQ(0, voe_base_->StopSend(channel_));
+ EXPECT_EQ(0, voe_vsync_->SetInitTimestamp(channel_, 12345));
+ EXPECT_EQ(0, voe_vsync_->SetInitSequenceNumber(channel_, 123));
+ EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+ CheckEstimatesConvergeReasonablyWell(kMinimumReasonableDelayEstimateMs);
+ EXPECT_EQ(0, voe_vsync_->SetMinimumPlayoutDelay(channel_, 200));
+ CheckEstimatesConvergeReasonablyWell(kMinimumReasonableDelayEstimateMs);
+ EXPECT_EQ(0, voe_vsync_->SetMinimumPlayoutDelay(channel_, 0));
+ CheckEstimatesConvergeReasonablyWell(kMinimumReasonableDelayEstimateMs);
+}
+
+#if !defined(WEBRTC_ANDROID)
+TEST_F(VideoSyncTest, CanGetPlayoutBufferSize) {
+ int ignored;
+ EXPECT_EQ(0, voe_vsync_->GetPlayoutBufferSize(ignored));
+}
+#endif // !ANDROID
diff --git a/voice_engine/main/test/auto_test/standard/volume_test.cc b/voice_engine/main/test/auto_test/standard/volume_test.cc
index 44887b7..b00153d 100644
--- a/voice_engine/main/test/auto_test/standard/volume_test.cc
+++ b/voice_engine/main/test/auto_test/standard/volume_test.cc
@@ -44,15 +44,18 @@
#if !defined(MAC_IPHONE)
-// NOTE(phoglund): This test is flaky because of how the OS works, and is hence
-// disabled by default.
+// TODO(phoglund): pending investigation in
+// http://code.google.com/p/webrtc/issues/detail?id=367
TEST_F(VolumeTest, DISABLED_DefaultMicrophoneVolumeIsAtMost255) {
unsigned int volume = 1000;
EXPECT_EQ(0, voe_volume_control_->GetMicVolume(volume));
EXPECT_LE(volume, 255u);
}
-TEST_F(VolumeTest, ManualRequiresMicrophoneCanSetMicrophoneVolumeWithAcgOff) {
+// TODO(phoglund): pending investigation in
+// http://code.google.com/p/webrtc/issues/detail?id=367
+TEST_F(VolumeTest,
+ DISABLED_ManualRequiresMicrophoneCanSetMicrophoneVolumeWithAcgOff) {
SwitchToManualMicrophone();
EXPECT_EQ(0, voe_apm_->SetAgcStatus(false));
@@ -123,15 +126,17 @@
Sleep(2000);
}
-// NOTE(phoglund): This test is flaky because of how the OS works, and is hence
-// disabled by default.
+// TODO(phoglund): pending investigation in
+// http://code.google.com/p/webrtc/issues/detail?id=367
TEST_F(VolumeTest, DISABLED_SystemInputMutingIsNotEnabledByDefault) {
bool is_muted = true;
EXPECT_EQ(0, voe_volume_control_->GetSystemInputMute(is_muted));
EXPECT_FALSE(is_muted);
}
-TEST_F(VolumeTest, ManualSystemInputMutingMutesMicrophone) {
+// TODO(phoglund): pending investigation in
+// http://code.google.com/p/webrtc/issues/detail?id=367
+TEST_F(VolumeTest, DISABLED_ManualSystemInputMutingMutesMicrophone) {
SwitchToManualMicrophone();
// Enable system input muting.
diff --git a/voice_engine/main/test/auto_test/voe_extended_test.cc b/voice_engine/main/test/auto_test/voe_extended_test.cc
index 11b41af..6c5a63f 100644
--- a/voice_engine/main/test/auto_test/voe_extended_test.cc
+++ b/voice_engine/main/test/auto_test/voe_extended_test.cc
@@ -7148,49 +7148,6 @@
TEST_MUSTPASS(rtp_rtcp->DeRegisterRTPObserver(0));
ANL();
- TEST(GetRTPKeepaliveStatus);
- int pt;
- int dT;
- TEST_MUSTPASS(!rtp_rtcp->GetRTPKeepaliveStatus(-1, enabled, pt, dT));
- MARK();
- TEST_MUSTPASS(rtp_rtcp->GetRTPKeepaliveStatus(0, enabled, pt, dT));
- MARK(); // should be off by default
- TEST_MUSTPASS(enabled != false);
- TEST_MUSTPASS(pt != 255);
- TEST_MUSTPASS(dT != 0);
- ANL();
-
- TEST(SetRTPKeepaliveStatus);
- // stop send before changing the settings
- TEST_MUSTPASS(voe_base_->StopSend(0));
- // verify invalid input parameters
- TEST_MUSTPASS(!rtp_rtcp->SetRTPKeepaliveStatus(-1, true, 0, 15));
- MARK();
- TEST_MUSTPASS(!rtp_rtcp->SetRTPKeepaliveStatus(0, true, -1, 15));
- MARK();
- TEST_MUSTPASS(!rtp_rtcp->SetRTPKeepaliveStatus(0, true, 0, 61));
- MARK();
- TEST_MUSTPASS(rtp_rtcp->GetRTPKeepaliveStatus(0, enabled, pt, dT));
- MARK(); // should still be off
- TEST_MUSTPASS(enabled != false);
- // try valid settings
- TEST_MUSTPASS(rtp_rtcp->SetRTPKeepaliveStatus(0, true, 117));
- MARK();
- TEST_MUSTPASS(rtp_rtcp->GetRTPKeepaliveStatus(0, enabled, pt, dT));
- MARK(); // should be on now
- TEST_MUSTPASS(enabled != true);
- TEST_MUSTPASS(pt != 117);
- TEST_MUSTPASS(dT != 15);
- // change from PT 99 to 121, as 99 is occupied
- TEST_MUSTPASS(rtp_rtcp->SetRTPKeepaliveStatus(0, true, 121, 3));
- MARK(); // on, PT=99, dT=3
- TEST_MUSTPASS(rtp_rtcp->GetRTPKeepaliveStatus(0, enabled, pt, dT));
- MARK();
- TEST_MUSTPASS(enabled != true);
- TEST_MUSTPASS(pt != 121);
- TEST_MUSTPASS(dT != 3);
- ANL();
-
// Make fresh restart (ensures that SSRC is randomized)
TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
TEST_MUSTPASS(voe_base_->StopSend(0));
diff --git a/voice_engine/main/test/auto_test/voe_standard_test.cc b/voice_engine/main/test/auto_test/voe_standard_test.cc
index ff8e531..6fcecd9 100644
--- a/voice_engine/main/test/auto_test/voe_standard_test.cc
+++ b/voice_engine/main/test/auto_test/voe_standard_test.cc
@@ -332,47 +332,6 @@
name_ = 0;
}
-void my_encryption::encrypt(int, unsigned char * in_data,
- unsigned char * out_data,
- int bytes_in,
- int * bytes_out) {
- int i;
- for (i = 0; i < bytes_in; i++)
- out_data[i] = ~in_data[i];
- *bytes_out = bytes_in + 2; // length is increased by 2
-}
-
-void my_encryption::decrypt(int, unsigned char * in_data,
- unsigned char * out_data,
- int bytes_in,
- int * bytes_out) {
- int i;
- for (i = 0; i < bytes_in; i++)
- out_data[i] = ~in_data[i];
- *bytes_out = bytes_in - 2; // length is decreased by 2
-}
-
-void my_encryption::encrypt_rtcp(int,
- unsigned char * in_data,
- unsigned char * out_data,
- int bytes_in,
- int * bytes_out) {
- int i;
- for (i = 0; i < bytes_in; i++)
- out_data[i] = ~in_data[i];
- *bytes_out = bytes_in + 2;
-}
-
-void my_encryption::decrypt_rtcp(int, unsigned char * in_data,
- unsigned char * out_data,
- int bytes_in,
- int * bytes_out) {
- int i;
- for (i = 0; i < bytes_in; i++)
- out_data[i] = ~in_data[i];
- *bytes_out = bytes_in + 2;
-}
-
void SubAPIManager::DisplayStatus() const {
TEST_LOG("Supported sub APIs:\n\n");
if (_base)
@@ -983,157 +942,6 @@
if (TestStartStreaming(channel0_transport) != 0) return -1;
if (TestStartPlaying() != 0) return -1;
- //////////////
- // Video Sync
-
-#ifdef _TEST_VIDEO_SYNC_
- TEST_LOG("\n\n+++ Video sync tests +++\n\n");
-
- unsigned int val;
- TEST_MUSTPASS(voe_vsync_->GetPlayoutTimestamp(0, val));
- TEST_LOG("Playout timestamp = %lu\n", (long unsigned int) val);
-
- TEST_LOG("Init timestamp and sequence number manually\n");
- TEST_MUSTPASS(!voe_vsync_->SetInitTimestamp(0, 12345));
- TEST_MUSTPASS(!voe_vsync_->SetInitSequenceNumber(0, 123));
- TEST_MUSTPASS(voe_base_->StopSend(0));
- TEST_MUSTPASS(voe_vsync_->SetInitTimestamp(0, 12345));
- TEST_MUSTPASS(voe_vsync_->SetInitSequenceNumber(0, 123));
- TEST_MUSTPASS(voe_base_->StartSend(0));
- if (voe_file_) {
- TEST_LOG("Start playing a file as microphone again \n");
- TEST_MUSTPASS(voe_file_->StartPlayingFileAsMicrophone(0,
- AudioFilename(),
- true,
- true));
- }
- SLEEP(3000);
-
- TEST_LOG("Check delay estimates during 15 seconds, verify that "
- "they stabilize during this time\n");
- int valInt = -1;
- for (int i = 0; i < 15; i++) {
- TEST_MUSTPASS(voe_vsync_->GetDelayEstimate(0, valInt));
- TEST_LOG("Delay estimate = %d ms\n", valInt);
-#if defined(MAC_IPHONE)
- TEST_MUSTPASS(valInt <= 30);
-#else
- TEST_MUSTPASS(valInt <= 45); // 45=20+25 => can't be this low
-#endif
- SLEEP(1000);
- }
-
- TEST_LOG("Setting NetEQ min delay to 500 milliseconds and repeat "
- "the test above\n");
- TEST_MUSTPASS(voe_vsync_->SetMinimumPlayoutDelay(0, 500));
- for (int i = 0; i < 15; i++) {
- TEST_MUSTPASS(voe_vsync_->GetDelayEstimate(0, valInt));
- TEST_LOG("Delay estimate = %d ms\n", valInt);
- TEST_MUSTPASS(valInt <= 45);
- SLEEP(1000);
- }
-
- TEST_LOG("Setting NetEQ min delay to 0 milliseconds and repeat"
- " the test above\n");
- TEST_MUSTPASS(voe_vsync_->SetMinimumPlayoutDelay(0, 0));
- for (int i = 0; i < 15; i++) {
- TEST_MUSTPASS(voe_vsync_->GetDelayEstimate(0, valInt));
- TEST_LOG("Delay estimate = %d ms\n", valInt);
- TEST_MUSTPASS(valInt <= 45);
- SLEEP(1000);
- }
-
-#if (defined (_WIN32) || (defined(WEBRTC_LINUX)) && !defined(WEBRTC_ANDROID))
- valInt = -1;
- TEST_MUSTPASS(voe_vsync_->GetPlayoutBufferSize(valInt));
- TEST_LOG("Soundcard buffer size = %d ms\n", valInt);
-#endif
-#else
- TEST_LOG("\n\n+++ Video sync tests NOT ENABLED +++\n");
-#endif // #ifdef _TEST_VIDEO_SYNC_
- //////////////
- // Encryption
-
-#ifdef _TEST_ENCRYPT_
- TEST_LOG("\n\n+++ Encryption tests +++\n\n");
-
-#ifdef WEBRTC_SRTP
- TEST_LOG("SRTP tests:\n");
-
- unsigned char encrKey[30] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 0,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0};
-
- TEST_LOG("Enable SRTP encryption and decryption, you should still hear"
- " the voice\n");
- TEST_MUSTPASS(voe_encrypt_->EnableSRTPSend(0,
- kCipherAes128CounterMode,
- 30,
- kAuthHmacSha1,
- 20, 4, kEncryptionAndAuthentication, encrKey));
- TEST_MUSTPASS(voe_encrypt_->EnableSRTPReceive(0,
- kCipherAes128CounterMode,
- 30,
- kAuthHmacSha1,
- 20, 4, kEncryptionAndAuthentication, encrKey));
- SLEEP(2000);
-
- TEST_LOG("Disabling decryption, you should hear nothing or garbage\n");
- TEST_MUSTPASS(voe_encrypt_->DisableSRTPReceive(0));
- SLEEP(2000);
-
- TEST_LOG("Enable decryption again, you should hear the voice again\n");
- TEST_MUSTPASS(voe_encrypt_->EnableSRTPReceive(0,
- kCipherAes128CounterMode,
- 30,
- kAuthHmacSha1,
- 20, 4, kEncryptionAndAuthentication, encrKey));
- SLEEP(2000);
-
- TEST_LOG("Disabling encryption and enabling decryption, you should"
- " hear nothing\n");
- TEST_MUSTPASS(voe_encrypt_->DisableSRTPSend(0));
- SLEEP(2000);
-
- TEST_LOG("Back to normal\n");
- // both SRTP sides are now inactive
- TEST_MUSTPASS(voe_encrypt_->DisableSRTPReceive(0));
- SLEEP(2000);
-
- TEST_LOG("Enable SRTP and SRTCP encryption and decryption,"
- " you should still hear the voice\n");
- TEST_MUSTPASS(voe_encrypt_->EnableSRTPSend(0,
- kCipherAes128CounterMode,
- 30,
- kAuthHmacSha1,
- 20, 4, kEncryptionAndAuthentication, encrKey, true));
- TEST_MUSTPASS(voe_encrypt_->EnableSRTPReceive(0,
- kCipherAes128CounterMode,
- 30,
- kAuthHmacSha1,
- 20, 4, kEncryptionAndAuthentication, encrKey, true));
- SLEEP(2000);
-
- TEST_LOG("Back to normal\n");
- TEST_MUSTPASS(voe_encrypt_->DisableSRTPSend(0));
- // both SRTP sides are now inactive
- TEST_MUSTPASS(voe_encrypt_->DisableSRTPReceive(0));
- SLEEP(2000);
-
-#else
- TEST_LOG("Skipping SRTP tests - WEBRTC_SRTP not defined \n");
-#endif // #ifdef WEBRTC_SRTP
- TEST_LOG("\nExternal encryption tests:\n");
- my_encryption * encObj = new my_encryption;
- TEST_MUSTPASS(voe_encrypt_->RegisterExternalEncryption(0, *encObj));
- TEST_LOG("Encryption enabled but you should still hear the voice\n");
- SLEEP(2000);
- TEST_LOG("Removing encryption object and deleting it\n");
- TEST_MUSTPASS(voe_encrypt_->DeRegisterExternalEncryption(0));
- delete encObj;
- SLEEP(2000);
-#else
- TEST_LOG("\n\n+++ Encryption tests NOT ENABLED +++\n");
-#endif // #ifdef _TEST_ENCRYPT_
//////////////////
// External media
diff --git a/voice_engine/main/test/auto_test/voe_standard_test.h b/voice_engine/main/test/auto_test/voe_standard_test.h
index ae0fb24..4512e5d 100644
--- a/voice_engine/main/test/auto_test/voe_standard_test.h
+++ b/voice_engine/main/test/auto_test/voe_standard_test.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -150,17 +150,6 @@
}
};
-class my_encryption : public Encryption {
- void encrypt(int channel_no, unsigned char * in_data,
- unsigned char * out_data, int bytes_in, int * bytes_out);
- void decrypt(int channel_no, unsigned char * in_data,
- unsigned char * out_data, int bytes_in, int * bytes_out);
- void encrypt_rtcp(int channel_no, unsigned char * in_data,
- unsigned char * out_data, int bytes_in, int * bytes_out);
- void decrypt_rtcp(int channel_no, unsigned char * in_data,
- unsigned char * out_data, int bytes_in, int * bytes_out);
-};
-
class RxCallback : public VoERxVadCallback {
public:
RxCallback() :
diff --git a/voice_engine/main/test/auto_test/voe_unit_test.cc b/voice_engine/main/test/auto_test/voe_unit_test.cc
index 1aa9e10..2c1e5c5 100644
--- a/voice_engine/main/test/auto_test/voe_unit_test.cc
+++ b/voice_engine/main/test/auto_test/voe_unit_test.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -187,7 +187,7 @@
// ----------------------------------------------------------------------------
VoEUnitTest::VoEUnitTest(VoETestManager& mgr) :
- _mgr(mgr), _extOnOff(false), _extBitsPerSample(-1) {
+ _mgr(mgr), _extOnOff(false), _extBitsPerSample(-1), _extChannel(0) {
for (int i = 0; i < 32; i++) {
_listening[i] = false;
_playing[i] = false;
diff --git a/voice_engine/main/test/cmd_test/voe_cmd_test.cc b/voice_engine/main/test/cmd_test/voe_cmd_test.cc
index 99e88d2..a576491 100644
--- a/voice_engine/main/test/cmd_test/voe_cmd_test.cc
+++ b/voice_engine/main/test/cmd_test/voe_cmd_test.cc
@@ -144,11 +144,7 @@
MyObserver my_observer;
-#if defined(WEBRTC_ANDROID)
- const std::string out_path = "/sdcard/";
-#else
const std::string out_path = webrtc::test::OutputPath();
-#endif
const std::string trace_filename = out_path + "webrtc_trace.txt";
printf("Set trace filenames (enable trace)\n");
diff --git a/voice_engine/main/test/voice_engine_tests.gypi b/voice_engine/main/test/voice_engine_tests.gypi
index db108ba..80a9fe2 100644
--- a/voice_engine/main/test/voice_engine_tests.gypi
+++ b/voice_engine/main/test/voice_engine_tests.gypi
@@ -41,6 +41,7 @@
'auto_test/standard/codec_before_streaming_test.cc',
'auto_test/standard/codec_test.cc',
'auto_test/standard/dtmf_test.cc',
+ 'auto_test/standard/encryption_test.cc',
'auto_test/standard/file_test.cc',
'auto_test/standard/hardware_before_initializing_test.cc',
'auto_test/standard/hardware_before_streaming_test.cc',
@@ -52,6 +53,7 @@
'auto_test/standard/rtp_rtcp_before_streaming_test.cc',
'auto_test/standard/rtp_rtcp_test.cc',
'auto_test/standard/voe_base_misc_test.cc',
+ 'auto_test/standard/video_sync_test.cc',
'auto_test/standard/volume_test.cc',
'auto_test/resource_manager.cc',
'auto_test/voe_cpu_test.cc',