Use pixels from single active stream if set in CanDecreaseResolutionTo

Simulcast with one active stream:
Use pixels from single active stream if set (instead of input stream which could be larger) to avoid going below the min_pixel_per_frame limit when downgrading resolution.

Bug: none
Change-Id: I65acb12cc53e46f726ccb5bfab8ce08ff0c4cf78
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/208101
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Commit-Queue: Åsa Persson <asapersson@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#33309}
diff --git a/call/adaptation/video_stream_adapter.cc b/call/adaptation/video_stream_adapter.cc
index f81e217..6620eff 100644
--- a/call/adaptation/video_stream_adapter.cc
+++ b/call/adaptation/video_stream_adapter.cc
@@ -62,13 +62,14 @@
 }
 
 bool CanDecreaseResolutionTo(int target_pixels,
+                             int target_pixels_min,
                              const VideoStreamInputState& input_state,
                              const VideoSourceRestrictions& restrictions) {
   int max_pixels_per_frame =
       rtc::dchecked_cast<int>(restrictions.max_pixels_per_frame().value_or(
           std::numeric_limits<int>::max()));
   return target_pixels < max_pixels_per_frame &&
-         target_pixels >= input_state.min_pixels_per_frame();
+         target_pixels_min >= input_state.min_pixels_per_frame();
 }
 
 bool CanIncreaseResolutionTo(int target_pixels,
@@ -96,6 +97,11 @@
 }
 
 bool MinPixelLimitReached(const VideoStreamInputState& input_state) {
+  if (input_state.single_active_stream_pixels().has_value()) {
+    return GetLowerResolutionThan(
+               input_state.single_active_stream_pixels().value()) <
+           input_state.min_pixels_per_frame();
+  }
   return input_state.frame_size_pixels().has_value() &&
          GetLowerResolutionThan(input_state.frame_size_pixels().value()) <
              input_state.min_pixels_per_frame();
@@ -470,7 +476,11 @@
     const RestrictionsWithCounters& current_restrictions) {
   int target_pixels =
       GetLowerResolutionThan(input_state.frame_size_pixels().value());
-  if (!CanDecreaseResolutionTo(target_pixels, input_state,
+  // Use single active stream if set, this stream could be lower than the input.
+  int target_pixels_min =
+      GetLowerResolutionThan(input_state.single_active_stream_pixels().value_or(
+          input_state.frame_size_pixels().value()));
+  if (!CanDecreaseResolutionTo(target_pixels, target_pixels_min, input_state,
                                current_restrictions.restrictions)) {
     return Adaptation::Status::kLimitReached;
   }
@@ -693,4 +703,27 @@
     : pixels_increased(pixels_increased),
       frame_size_pixels(frame_size_pixels) {}
 
+absl::optional<uint32_t> VideoStreamAdapter::GetSingleActiveLayerPixels(
+    const VideoCodec& codec) {
+  int num_active = 0;
+  absl::optional<uint32_t> pixels;
+  if (codec.codecType == VideoCodecType::kVideoCodecVP9) {
+    for (int i = 0; i < codec.VP9().numberOfSpatialLayers; ++i) {
+      if (codec.spatialLayers[i].active) {
+        ++num_active;
+        pixels = codec.spatialLayers[i].width * codec.spatialLayers[i].height;
+      }
+    }
+  } else {
+    for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) {
+      if (codec.simulcastStream[i].active) {
+        ++num_active;
+        pixels =
+            codec.simulcastStream[i].width * codec.simulcastStream[i].height;
+      }
+    }
+  }
+  return (num_active > 1) ? absl::nullopt : pixels;
+}
+
 }  // namespace webrtc
diff --git a/call/adaptation/video_stream_adapter.h b/call/adaptation/video_stream_adapter.h
index 2b55c3d..3c876b8 100644
--- a/call/adaptation/video_stream_adapter.h
+++ b/call/adaptation/video_stream_adapter.h
@@ -163,6 +163,9 @@
     VideoAdaptationCounters counters;
   };
 
+  static absl::optional<uint32_t> GetSingleActiveLayerPixels(
+      const VideoCodec& codec);
+
  private:
   void BroadcastVideoRestrictionsUpdate(
       const VideoStreamInputState& input_state,
diff --git a/call/adaptation/video_stream_input_state.cc b/call/adaptation/video_stream_input_state.cc
index dc3315e..9c0d475 100644
--- a/call/adaptation/video_stream_input_state.cc
+++ b/call/adaptation/video_stream_input_state.cc
@@ -19,7 +19,8 @@
       frame_size_pixels_(absl::nullopt),
       frames_per_second_(0),
       video_codec_type_(VideoCodecType::kVideoCodecGeneric),
-      min_pixels_per_frame_(kDefaultMinPixelsPerFrame) {}
+      min_pixels_per_frame_(kDefaultMinPixelsPerFrame),
+      single_active_stream_pixels_(absl::nullopt) {}
 
 void VideoStreamInputState::set_has_input(bool has_input) {
   has_input_ = has_input;
@@ -43,6 +44,11 @@
   min_pixels_per_frame_ = min_pixels_per_frame;
 }
 
+void VideoStreamInputState::set_single_active_stream_pixels(
+    absl::optional<int> single_active_stream_pixels) {
+  single_active_stream_pixels_ = single_active_stream_pixels;
+}
+
 bool VideoStreamInputState::has_input() const {
   return has_input_;
 }
@@ -63,6 +69,10 @@
   return min_pixels_per_frame_;
 }
 
+absl::optional<int> VideoStreamInputState::single_active_stream_pixels() const {
+  return single_active_stream_pixels_;
+}
+
 bool VideoStreamInputState::HasInputFrameSizeAndFramesPerSecond() const {
   return has_input_ && frame_size_pixels_.has_value();
 }
diff --git a/call/adaptation/video_stream_input_state.h b/call/adaptation/video_stream_input_state.h
index af0d7c7..191e223 100644
--- a/call/adaptation/video_stream_input_state.h
+++ b/call/adaptation/video_stream_input_state.h
@@ -27,12 +27,15 @@
   void set_frames_per_second(int frames_per_second);
   void set_video_codec_type(VideoCodecType video_codec_type);
   void set_min_pixels_per_frame(int min_pixels_per_frame);
+  void set_single_active_stream_pixels(
+      absl::optional<int> single_active_stream_pixels);
 
   bool has_input() const;
   absl::optional<int> frame_size_pixels() const;
   int frames_per_second() const;
   VideoCodecType video_codec_type() const;
   int min_pixels_per_frame() const;
+  absl::optional<int> single_active_stream_pixels() const;
 
   bool HasInputFrameSizeAndFramesPerSecond() const;
 
@@ -42,6 +45,7 @@
   int frames_per_second_;
   VideoCodecType video_codec_type_;
   int min_pixels_per_frame_;
+  absl::optional<int> single_active_stream_pixels_;
 };
 
 }  // namespace webrtc
diff --git a/call/adaptation/video_stream_input_state_provider.cc b/call/adaptation/video_stream_input_state_provider.cc
index 3c0a7e3..3261af3 100644
--- a/call/adaptation/video_stream_input_state_provider.cc
+++ b/call/adaptation/video_stream_input_state_provider.cc
@@ -10,6 +10,8 @@
 
 #include "call/adaptation/video_stream_input_state_provider.h"
 
+#include "call/adaptation/video_stream_adapter.h"
+
 namespace webrtc {
 
 VideoStreamInputStateProvider::VideoStreamInputStateProvider(
@@ -36,6 +38,9 @@
       encoder_settings.encoder_config().codec_type);
   input_state_.set_min_pixels_per_frame(
       encoder_settings.encoder_info().scaling_settings.min_pixels_per_frame);
+  input_state_.set_single_active_stream_pixels(
+      VideoStreamAdapter::GetSingleActiveLayerPixels(
+          encoder_settings.video_codec()));
 }
 
 VideoStreamInputState VideoStreamInputStateProvider::InputState() {
diff --git a/call/adaptation/video_stream_input_state_provider_unittest.cc b/call/adaptation/video_stream_input_state_provider_unittest.cc
index 49c662c..5da2ef2 100644
--- a/call/adaptation/video_stream_input_state_provider_unittest.cc
+++ b/call/adaptation/video_stream_input_state_provider_unittest.cc
@@ -28,6 +28,7 @@
   EXPECT_EQ(0, input_state.frames_per_second());
   EXPECT_EQ(VideoCodecType::kVideoCodecGeneric, input_state.video_codec_type());
   EXPECT_EQ(kDefaultMinPixelsPerFrame, input_state.min_pixels_per_frame());
+  EXPECT_EQ(absl::nullopt, input_state.single_active_stream_pixels());
 }
 
 TEST(VideoStreamInputStateProviderTest, ValuesSet) {
@@ -40,14 +41,22 @@
   encoder_info.scaling_settings.min_pixels_per_frame = 1337;
   VideoEncoderConfig encoder_config;
   encoder_config.codec_type = VideoCodecType::kVideoCodecVP9;
+  VideoCodec video_codec;
+  video_codec.codecType = VideoCodecType::kVideoCodecVP8;
+  video_codec.numberOfSimulcastStreams = 2;
+  video_codec.simulcastStream[0].active = false;
+  video_codec.simulcastStream[1].active = true;
+  video_codec.simulcastStream[1].width = 111;
+  video_codec.simulcastStream[1].height = 222;
   input_state_provider.OnEncoderSettingsChanged(EncoderSettings(
-      std::move(encoder_info), std::move(encoder_config), VideoCodec()));
+      std::move(encoder_info), std::move(encoder_config), video_codec));
   VideoStreamInputState input_state = input_state_provider.InputState();
   EXPECT_EQ(true, input_state.has_input());
   EXPECT_EQ(42, input_state.frame_size_pixels());
   EXPECT_EQ(123, input_state.frames_per_second());
   EXPECT_EQ(VideoCodecType::kVideoCodecVP9, input_state.video_codec_type());
   EXPECT_EQ(1337, input_state.min_pixels_per_frame());
+  EXPECT_EQ(111 * 222, input_state.single_active_stream_pixels());
 }
 
 }  // namespace webrtc
diff --git a/video/adaptation/bitrate_constraint.cc b/video/adaptation/bitrate_constraint.cc
index bb15bf4..a092a71 100644
--- a/video/adaptation/bitrate_constraint.cc
+++ b/video/adaptation/bitrate_constraint.cc
@@ -61,7 +61,7 @@
     }
 
     absl::optional<uint32_t> current_frame_size_px =
-        VideoStreamEncoderResourceManager::GetSingleActiveLayerPixels(
+        VideoStreamAdapter::GetSingleActiveLayerPixels(
             encoder_settings_->video_codec());
     if (!current_frame_size_px.has_value()) {
       return true;
diff --git a/video/adaptation/video_stream_encoder_resource_manager.cc b/video/adaptation/video_stream_encoder_resource_manager.cc
index 6cf3801..59b4c8d 100644
--- a/video/adaptation/video_stream_encoder_resource_manager.cc
+++ b/video/adaptation/video_stream_encoder_resource_manager.cc
@@ -173,7 +173,8 @@
                             "stream parameters";
         initial_framedrop_ = 0;
         if (single_active_stream_pixels_ &&
-            GetSingleActiveLayerPixels(codec) > *single_active_stream_pixels_) {
+            VideoStreamAdapter::GetSingleActiveLayerPixels(codec) >
+                *single_active_stream_pixels_) {
           // Resolution increased.
           use_bandwidth_allocation_ = true;
         }
@@ -183,7 +184,8 @@
     last_active_flags_ = active_flags;
     last_input_width_ = codec.width;
     last_input_height_ = codec.height;
-    single_active_stream_pixels_ = GetSingleActiveLayerPixels(codec);
+    single_active_stream_pixels_ =
+        VideoStreamAdapter::GetSingleActiveLayerPixels(codec);
   }
 
   void OnFrameDroppedDueToSize() { ++initial_framedrop_; }
@@ -712,32 +714,6 @@
   quality_rampup_experiment_.reset();
 }
 
-absl::optional<uint32_t>
-VideoStreamEncoderResourceManager::GetSingleActiveLayerPixels(
-    const VideoCodec& codec) {
-  int num_active = 0;
-  absl::optional<uint32_t> pixels;
-  if (codec.codecType == VideoCodecType::kVideoCodecVP9) {
-    for (int i = 0; i < codec.VP9().numberOfSpatialLayers; ++i) {
-      if (codec.spatialLayers[i].active) {
-        ++num_active;
-        pixels = codec.spatialLayers[i].width * codec.spatialLayers[i].height;
-      }
-    }
-  } else {
-    for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) {
-      if (codec.simulcastStream[i].active) {
-        ++num_active;
-        pixels =
-            codec.simulcastStream[i].width * codec.simulcastStream[i].height;
-      }
-    }
-  }
-  if (num_active > 1)
-    return absl::nullopt;
-  return pixels;
-}
-
 bool VideoStreamEncoderResourceManager::IsSimulcast(
     const VideoEncoderConfig& encoder_config) {
   const std::vector<VideoStream>& simulcast_layers =
diff --git a/video/adaptation/video_stream_encoder_resource_manager.h b/video/adaptation/video_stream_encoder_resource_manager.h
index 8eb5229..2e7060c 100644
--- a/video/adaptation/video_stream_encoder_resource_manager.h
+++ b/video/adaptation/video_stream_encoder_resource_manager.h
@@ -147,8 +147,6 @@
   // QualityRampUpExperimentListener implementation.
   void OnQualityRampUp() override;
 
-  static absl::optional<uint32_t> GetSingleActiveLayerPixels(
-      const VideoCodec& codec);
   static bool IsSimulcast(const VideoEncoderConfig& encoder_config);
 
  private:
diff --git a/video/quality_scaling_tests.cc b/video/quality_scaling_tests.cc
index 03c7a3c..f1b6567 100644
--- a/video/quality_scaling_tests.cc
+++ b/video/quality_scaling_tests.cc
@@ -197,6 +197,14 @@
           /*automatic_resize=*/false, /*expect_adaptation=*/false);
 }
 
+TEST_F(QualityScalingTest, AdaptsDownForHighQp_HighestStreamActive_Vp8) {
+  // qp_low:1, qp_high:1 -> kHighQp
+  test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd);
+
+  RunTest("VP8", {false, false, true}, kHighStartBps,
+          /*automatic_resize=*/true, /*expect_adaptation=*/true);
+}
+
 TEST_F(QualityScalingTest,
        AdaptsDownForLowStartBitrate_HighestStreamActive_Vp8) {
   // qp_low:1, qp_high:127 -> kNormalQp
@@ -206,6 +214,14 @@
           /*automatic_resize=*/true, /*expect_adaptation=*/true);
 }
 
+TEST_F(QualityScalingTest, NoAdaptDownForHighQp_LowestStreamActive_Vp8) {
+  // qp_low:1, qp_high:1 -> kHighQp
+  test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd);
+
+  RunTest("VP8", {true, false, false}, kHighStartBps,
+          /*automatic_resize=*/true, /*expect_adaptation=*/false);
+}
+
 TEST_F(QualityScalingTest,
        NoAdaptDownForLowStartBitrate_LowestStreamActive_Vp8) {
   // qp_low:1, qp_high:127 -> kNormalQp
@@ -250,6 +266,15 @@
           /*automatic_resize=*/true, /*expect_adaptation=*/true);
 }
 
+TEST_F(QualityScalingTest, NoAdaptDownForHighQp_LowestStreamActive_Vp9) {
+  // qp_low:1, qp_high:1 -> kHighQp
+  test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd +
+                                       "WebRTC-VP9QualityScaler/Enabled/");
+
+  RunTest("VP9", {true, false, false}, kHighStartBps,
+          /*automatic_resize=*/true, /*expect_adaptation=*/false);
+}
+
 TEST_F(QualityScalingTest,
        NoAdaptDownForLowStartBitrate_LowestStreamActive_Vp9) {
   // qp_low:1, qp_high:255 -> kNormalQp
@@ -260,6 +285,15 @@
           /*automatic_resize=*/true, /*expect_adaptation=*/false);
 }
 
+TEST_F(QualityScalingTest, AdaptsDownForHighQp_MiddleStreamActive_Vp9) {
+  // qp_low:1, qp_high:1 -> kHighQp
+  test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd +
+                                       "WebRTC-VP9QualityScaler/Enabled/");
+
+  RunTest("VP9", {false, true, false}, kHighStartBps,
+          /*automatic_resize=*/true, /*expect_adaptation=*/true);
+}
+
 TEST_F(QualityScalingTest,
        AdaptsDownForLowStartBitrate_MiddleStreamActive_Vp9) {
   // qp_low:1, qp_high:255 -> kNormalQp
diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc
index 979fda0..63770c4 100644
--- a/video/video_stream_encoder.cc
+++ b/video/video_stream_encoder.cc
@@ -359,7 +359,7 @@
 
   // Get bitrate limits for active stream.
   absl::optional<uint32_t> pixels =
-      VideoStreamEncoderResourceManager::GetSingleActiveLayerPixels(*codec);
+      VideoStreamAdapter::GetSingleActiveLayerPixels(*codec);
   if (!pixels.has_value()) {
     return;
   }