Add callback to copy texture backed frames in WebRtcVideoFrameAdapter

This CL adds a callback to copy texture backed frames in WebRtcVideoFrameAdapter
so that hardware decoded video tracks can be cloned or forwarded. The callback is
assigned by WebRtcVideoCapturerAdapter and runs in main renderer thread.

BUG=642663
TEST=Ran https://loopback-dot-apprtc.appspot.com/?debug=loopback&vsc=h264 on Mac.

Review-Url: https://codereview.chromium.org/2456443002
Cr-Commit-Position: refs/heads/master@{#428535}
diff --git a/content/renderer/media/gpu/rtc_video_decoder.cc b/content/renderer/media/gpu/rtc_video_decoder.cc
index d905793..3410035 100644
--- a/content/renderer/media/gpu/rtc_video_decoder.cc
+++ b/content/renderer/media/gpu/rtc_video_decoder.cc
@@ -430,8 +430,9 @@
 
   // Create a WebRTC video frame.
   webrtc::VideoFrame decoded_image(
-      new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame), timestamp, 0,
-      webrtc::kVideoRotation_0);
+      new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(
+          frame, WebRtcVideoFrameAdapter::CopyTextureFrameCallback()),
+      timestamp, 0, webrtc::kVideoRotation_0);
 
   // Invoke decode callback. WebRTC expects no callback after Release.
   {
diff --git a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc
index 96b6fe3..c0d5c7c7 100644
--- a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc
+++ b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc
@@ -6,17 +6,26 @@
 
 #include "base/bind.h"
 #include "base/memory/aligned_memory.h"
+#include "base/memory/ref_counted.h"
+#include "base/synchronization/waitable_event.h"
 #include "base/trace_event/trace_event.h"
+#include "content/common/gpu/client/context_provider_command_buffer.h"
 #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h"
+#include "content/renderer/render_thread_impl.h"
 #include "media/base/timestamp_constants.h"
 #include "media/base/video_util.h"
+#include "media/renderers/skcanvas_video_renderer.h"
+#include "skia/ext/platform_canvas.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
 #include "third_party/libyuv/include/libyuv/convert_from.h"
 #include "third_party/libyuv/include/libyuv/scale.h"
+#include "third_party/skia/include/core/SkSurface.h"
 #include "third_party/webrtc/common_video/include/video_frame_buffer.h"
 #include "third_party/webrtc/common_video/rotation.h"
 #include "third_party/webrtc/media/engine/webrtcvideoframe.h"
 
 namespace content {
+
 namespace {
 
 // Empty method used for keeping a reference to the original media::VideoFrame.
@@ -24,72 +33,127 @@
 void ReleaseOriginalFrame(const scoped_refptr<media::VideoFrame>& frame) {
 }
 
+// Helper class that signals a WaitableEvent when it goes out of scope.
+class ScopedWaitableEvent {
+ public:
+  explicit ScopedWaitableEvent(base::WaitableEvent* event) : event_(event) {}
+  ~ScopedWaitableEvent() {
+    if (event_)
+      event_->Signal();
+  }
+
+ private:
+  base::WaitableEvent* const event_;
+};
+
 }  // anonymous namespace
 
+// Initializes the GL context environment and provides a method for copying
+// texture backed frames into CPU mappable memory.
+// The class is created and destroyed on the main render thread.
+class WebRtcVideoCapturerAdapter::TextureFrameCopier
+    : public base::RefCounted<WebRtcVideoCapturerAdapter::TextureFrameCopier> {
+ public:
+  TextureFrameCopier()
+      : main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()),
+        canvas_video_renderer_(new media::SkCanvasVideoRenderer) {
+    RenderThreadImpl* const main_thread = RenderThreadImpl::current();
+    if (main_thread)
+      provider_ = main_thread->SharedMainThreadContextProvider();
+  }
+
+  // Synchronous call to copy a texture backed |frame| into a CPU mappable
+  // |new_frame|. If it is not called on the main render thread, this call posts
+  // a task on main thread by calling CopyTextureFrameOnMainThread() and blocks
+  // until it is completed.
+  void CopyTextureFrame(const scoped_refptr<media::VideoFrame>& frame,
+                        scoped_refptr<media::VideoFrame>* new_frame) {
+    if (main_thread_task_runner_->BelongsToCurrentThread()) {
+      CopyTextureFrameOnMainThread(frame, new_frame, nullptr);
+      return;
+    }
+
+    base::WaitableEvent waiter(base::WaitableEvent::ResetPolicy::MANUAL,
+                               base::WaitableEvent::InitialState::NOT_SIGNALED);
+    main_thread_task_runner_->PostTask(
+        FROM_HERE, base::Bind(&TextureFrameCopier::CopyTextureFrameOnMainThread,
+                              this, frame, new_frame, &waiter));
+    waiter.Wait();
+  }
+
+ private:
+  friend class base::RefCounted<TextureFrameCopier>;
+  ~TextureFrameCopier() {
+    // |canvas_video_renderer_| should be deleted on the thread it was created.
+    if (!main_thread_task_runner_->BelongsToCurrentThread()) {
+      main_thread_task_runner_->DeleteSoon(FROM_HERE,
+                                           canvas_video_renderer_.release());
+    }
+  }
+
+  void CopyTextureFrameOnMainThread(
+      const scoped_refptr<media::VideoFrame>& frame,
+      scoped_refptr<media::VideoFrame>* new_frame,
+      base::WaitableEvent* waiter) {
+    DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
+    DCHECK(frame->format() == media::PIXEL_FORMAT_ARGB ||
+           frame->format() == media::PIXEL_FORMAT_XRGB ||
+           frame->format() == media::PIXEL_FORMAT_I420 ||
+           frame->format() == media::PIXEL_FORMAT_UYVY ||
+           frame->format() == media::PIXEL_FORMAT_NV12);
+    ScopedWaitableEvent event(waiter);
+    sk_sp<SkSurface> surface = SkSurface::MakeRasterN32Premul(
+        frame->visible_rect().width(), frame->visible_rect().height());
+
+    if (!surface || !provider_) {
+      // Return a black frame (yuv = {0, 0x80, 0x80}).
+      *new_frame = media::VideoFrame::CreateColorFrame(
+          frame->visible_rect().size(), 0u, 0x80, 0x80, frame->timestamp());
+      return;
+    }
+
+    *new_frame = media::VideoFrame::CreateFrame(
+        media::PIXEL_FORMAT_I420, frame->coded_size(), frame->visible_rect(),
+        frame->natural_size(), frame->timestamp());
+    DCHECK(provider_->ContextGL());
+    canvas_video_renderer_->Copy(
+        frame.get(), surface->getCanvas(),
+        media::Context3D(provider_->ContextGL(), provider_->GrContext()));
+
+    SkPixmap pixmap;
+    const bool result = surface->getCanvas()->peekPixels(&pixmap);
+    DCHECK(result) << "Error trying to access SkSurface's pixels";
+    const uint32 source_pixel_format =
+        (kN32_SkColorType == kRGBA_8888_SkColorType) ? cricket::FOURCC_ABGR
+                                                     : cricket::FOURCC_ARGB;
+    libyuv::ConvertToI420(
+        static_cast<const uint8*>(pixmap.addr(0, 0)), pixmap.getSafeSize64(),
+        (*new_frame)->visible_data(media::VideoFrame::kYPlane),
+        (*new_frame)->stride(media::VideoFrame::kYPlane),
+        (*new_frame)->visible_data(media::VideoFrame::kUPlane),
+        (*new_frame)->stride(media::VideoFrame::kUPlane),
+        (*new_frame)->visible_data(media::VideoFrame::kVPlane),
+        (*new_frame)->stride(media::VideoFrame::kVPlane), 0 /* crop_x */,
+        0 /* crop_y */, pixmap.width(), pixmap.height(),
+        (*new_frame)->visible_rect().width(),
+        (*new_frame)->visible_rect().height(), libyuv::kRotate0,
+        source_pixel_format);
+  }
+
+  const scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner_;
+  scoped_refptr<ContextProviderCommandBuffer> provider_;
+  std::unique_ptr<media::SkCanvasVideoRenderer> canvas_video_renderer_;
+};
+
 WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast)
-    : is_screencast_(is_screencast),
+    : texture_copier_(new WebRtcVideoCapturerAdapter::TextureFrameCopier()),
+      is_screencast_(is_screencast),
       running_(false) {
   thread_checker_.DetachFromThread();
 }
 
 WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() {
-  DVLOG(3) << " WebRtcVideoCapturerAdapter::dtor";
-}
-
-cricket::CaptureState WebRtcVideoCapturerAdapter::Start(
-    const cricket::VideoFormat& capture_format) {
-  DCHECK(thread_checker_.CalledOnValidThread());
-  DCHECK(!running_);
-  DVLOG(3) << " WebRtcVideoCapturerAdapter::Start w = " << capture_format.width
-           << " h = " << capture_format.height;
-
-  running_ = true;
-  return cricket::CS_RUNNING;
-}
-
-void WebRtcVideoCapturerAdapter::Stop() {
-  DCHECK(thread_checker_.CalledOnValidThread());
-  DVLOG(3) << " WebRtcVideoCapturerAdapter::Stop ";
-  DCHECK(running_);
-  running_ = false;
-  SetCaptureFormat(NULL);
-  SignalStateChange(this, cricket::CS_STOPPED);
-}
-
-bool WebRtcVideoCapturerAdapter::IsRunning() {
-  DCHECK(thread_checker_.CalledOnValidThread());
-  return running_;
-}
-
-bool WebRtcVideoCapturerAdapter::GetPreferredFourccs(
-    std::vector<uint32_t>* fourccs) {
-  DCHECK(thread_checker_.CalledOnValidThread());
-  DCHECK(!fourccs || fourccs->empty());
-  if (fourccs)
-    fourccs->push_back(cricket::FOURCC_I420);
-  return fourccs != NULL;
-}
-
-bool WebRtcVideoCapturerAdapter::IsScreencast() const {
-  return is_screencast_;
-}
-
-bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat(
-    const cricket::VideoFormat& desired,
-    cricket::VideoFormat* best_format) {
-  DCHECK(thread_checker_.CalledOnValidThread());
-  DVLOG(3) << " GetBestCaptureFormat:: "
-           << " w = " << desired.width
-           << " h = " << desired.height;
-
-  // Capability enumeration is done in MediaStreamVideoSource. The adapter can
-  // just use what is provided.
-  // Use the desired format as the best format.
-  best_format->width = desired.width;
-  best_format->height = desired.height;
-  best_format->fourcc = cricket::FOURCC_I420;
-  best_format->interval = desired.interval;
-  return true;
+  DVLOG(3) << __func__;
 }
 
 void WebRtcVideoCapturerAdapter::OnFrameCaptured(
@@ -99,9 +163,12 @@
   if (!(input_frame->IsMappable() &&
         (input_frame->format() == media::PIXEL_FORMAT_I420 ||
          input_frame->format() == media::PIXEL_FORMAT_YV12 ||
-         input_frame->format() == media::PIXEL_FORMAT_YV12A))) {
+         input_frame->format() == media::PIXEL_FORMAT_YV12A)) &&
+      !input_frame->HasTextures()) {
     // Since connecting sources and sinks do not check the format, we need to
     // just ignore formats that we can not handle.
+    LOG(ERROR) << "We cannot send frame with storage type: "
+               << input_frame->AsHumanReadableString();
     NOTREACHED();
     return;
   }
@@ -133,11 +200,11 @@
 
   // Return |frame| directly if it is texture backed, because there is no
   // cropping support for texture yet. See http://crbug/503653.
-  // Return |frame| directly if it is GpuMemoryBuffer backed, as we want to
-  // keep the frame on native buffers.
   if (frame->HasTextures()) {
     OnFrame(cricket::WebRtcVideoFrame(
-                new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame),
+                new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(
+                    frame, base::Bind(&TextureFrameCopier::CopyTextureFrame,
+                                      texture_copier_)),
                 webrtc::kVideoRotation_0, translated_camera_time_us),
             orig_width, orig_height);
     return;
@@ -164,7 +231,9 @@
   // If no scaling is needed, return a wrapped version of |frame| directly.
   if (video_frame->natural_size() == video_frame->visible_rect().size()) {
     OnFrame(cricket::WebRtcVideoFrame(
-                new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(video_frame),
+                new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(
+                    video_frame,
+                    WebRtcVideoFrameAdapter::CopyTextureFrameCallback()),
                 webrtc::kVideoRotation_0, translated_camera_time_us),
             orig_width, orig_height);
     return;
@@ -192,9 +261,65 @@
                     adapted_width, adapted_height, libyuv::kFilterBilinear);
 
   OnFrame(cricket::WebRtcVideoFrame(
-              new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(scaled_frame),
+              new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(
+                  scaled_frame,
+                  WebRtcVideoFrameAdapter::CopyTextureFrameCallback()),
               webrtc::kVideoRotation_0, translated_camera_time_us),
           orig_width, orig_height);
 }
 
+cricket::CaptureState WebRtcVideoCapturerAdapter::Start(
+    const cricket::VideoFormat& capture_format) {
+  DCHECK(thread_checker_.CalledOnValidThread());
+  DCHECK(!running_);
+  DVLOG(3) << __func__ << " capture format: " << capture_format.ToString();
+
+  running_ = true;
+  return cricket::CS_RUNNING;
+}
+
+void WebRtcVideoCapturerAdapter::Stop() {
+  DCHECK(thread_checker_.CalledOnValidThread());
+  DVLOG(3) << __func__;
+  DCHECK(running_);
+  running_ = false;
+  SetCaptureFormat(NULL);
+  SignalStateChange(this, cricket::CS_STOPPED);
+}
+
+bool WebRtcVideoCapturerAdapter::IsRunning() {
+  DCHECK(thread_checker_.CalledOnValidThread());
+  return running_;
+}
+
+bool WebRtcVideoCapturerAdapter::GetPreferredFourccs(
+    std::vector<uint32_t>* fourccs) {
+  DCHECK(thread_checker_.CalledOnValidThread());
+  if (!fourccs)
+    return false;
+  DCHECK(fourccs->empty());
+  fourccs->push_back(cricket::FOURCC_I420);
+  return true;
+}
+
+bool WebRtcVideoCapturerAdapter::IsScreencast() const {
+  return is_screencast_;
+}
+
+bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat(
+    const cricket::VideoFormat& desired,
+    cricket::VideoFormat* best_format) {
+  DCHECK(thread_checker_.CalledOnValidThread());
+  DVLOG(3) << __func__ << " desired: " << desired.ToString();
+
+  // Capability enumeration is done in MediaStreamVideoSource. The adapter can
+  // just use what is provided.
+  // Use the desired format as the best format.
+  best_format->width = desired.width;
+  best_format->height = desired.height;
+  best_format->fourcc = cricket::FOURCC_I420;
+  best_format->interval = desired.interval;
+  return true;
+}
+
 }  // namespace content
diff --git a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.h b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.h
index 7b3877c..41a3563 100644
--- a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.h
+++ b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.h
@@ -51,6 +51,10 @@
                             cricket::VideoFormat* best_format) override;
   bool IsScreencast() const override;
 
+  // Helper class used for copying texture backed frames.
+  class TextureFrameCopier;
+  const scoped_refptr<TextureFrameCopier> texture_copier_;
+
   // |thread_checker_| is bound to the libjingle worker thread.
   base::ThreadChecker thread_checker_;
 
diff --git a/content/renderer/media/webrtc/webrtc_video_capturer_adapter_unittest.cc b/content/renderer/media/webrtc/webrtc_video_capturer_adapter_unittest.cc
index 81d812b..8a4f46e 100644
--- a/content/renderer/media/webrtc/webrtc_video_capturer_adapter_unittest.cc
+++ b/content/renderer/media/webrtc/webrtc_video_capturer_adapter_unittest.cc
@@ -4,10 +4,18 @@
 
 #include <algorithm>
 
+#include "base/bind.h"
+#include "base/run_loop.h"
+#include "content/child/child_process.h"
 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
+#include "gpu/command_buffer/common/mailbox_holder.h"
 #include "media/base/video_frame.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
+namespace {
+static void ReleaseMailboxCB(const gpu::SyncToken& sync_token) {}
+}  // anonymous namespace
+
 namespace content {
 
 class WebRtcVideoCapturerAdapterTest
@@ -44,14 +52,43 @@
     EXPECT_EQ(natural_height, output_frame_height_);
   }
 
+  void TestSourceTextureFrame() {
+    EXPECT_TRUE(message_loop_.IsCurrent());
+    gpu::MailboxHolder holders[media::VideoFrame::kMaxPlanes] = {
+        gpu::MailboxHolder(gpu::Mailbox::Generate(), gpu::SyncToken(), 5)};
+    scoped_refptr<media::VideoFrame> frame =
+        media::VideoFrame::WrapNativeTextures(
+            media::PIXEL_FORMAT_ARGB, holders, base::Bind(&ReleaseMailboxCB),
+            gfx::Size(10, 10), gfx::Rect(10, 10), gfx::Size(10, 10),
+            base::TimeDelta());
+    adapter_.OnFrameCaptured(frame);
+    rtc::scoped_refptr<webrtc::VideoFrameBuffer> texture_frame =
+        output_frame_.video_frame_buffer();
+    EXPECT_EQ(media::VideoFrame::STORAGE_OPAQUE,
+              static_cast<media::VideoFrame*>(texture_frame->native_handle())
+                  ->storage_type());
+
+    rtc::scoped_refptr<webrtc::VideoFrameBuffer> copied_frame =
+        texture_frame->NativeToI420Buffer();
+    EXPECT_TRUE(copied_frame);
+    EXPECT_TRUE(copied_frame->DataY());
+    EXPECT_TRUE(copied_frame->DataU());
+    EXPECT_TRUE(copied_frame->DataV());
+  }
+
   // rtc::VideoSinkInterface
   void OnFrame(const cricket::VideoFrame& frame) override {
+    output_frame_ = frame;
     output_frame_width_ = frame.width();
     output_frame_height_ = frame.height();
   }
 
  private:
+  const base::MessageLoopForIO message_loop_;
+  const ChildProcess child_process_;
+
   WebRtcVideoCapturerAdapter adapter_;
+  cricket::VideoFrame output_frame_;
   int output_frame_width_;
   int output_frame_height_;
 };
@@ -68,4 +105,8 @@
   TestSourceCropFrame(1280, 720, 1280, 720, 640, 360);
 }
 
+TEST_F(WebRtcVideoCapturerAdapterTest, SendsWithCopyTextureFrameCallback) {
+  TestSourceTextureFrame();
+}
+
 }  // namespace content
diff --git a/content/renderer/media/webrtc/webrtc_video_frame_adapter.cc b/content/renderer/media/webrtc/webrtc_video_frame_adapter.cc
index b75b454..eaeec88 100644
--- a/content/renderer/media/webrtc/webrtc_video_frame_adapter.cc
+++ b/content/renderer/media/webrtc/webrtc_video_frame_adapter.cc
@@ -6,12 +6,32 @@
 
 #include "base/logging.h"
 
+namespace {
+
+void IsValidFrame(const scoped_refptr<media::VideoFrame>& frame) {
+  // Paranoia checks.
+  DCHECK(frame);
+  DCHECK(media::VideoFrame::IsValidConfig(
+      frame->format(), frame->storage_type(), frame->coded_size(),
+      frame->visible_rect(), frame->natural_size()));
+  DCHECK(media::PIXEL_FORMAT_I420 == frame->format() ||
+         media::PIXEL_FORMAT_YV12 == frame->format());
+  CHECK(reinterpret_cast<void*>(frame->data(media::VideoFrame::kYPlane)));
+  CHECK(reinterpret_cast<void*>(frame->data(media::VideoFrame::kUPlane)));
+  CHECK(reinterpret_cast<void*>(frame->data(media::VideoFrame::kVPlane)));
+  CHECK(frame->stride(media::VideoFrame::kYPlane));
+  CHECK(frame->stride(media::VideoFrame::kUPlane));
+  CHECK(frame->stride(media::VideoFrame::kVPlane));
+}
+
+}  // anonymous namespace
+
 namespace content {
 
 WebRtcVideoFrameAdapter::WebRtcVideoFrameAdapter(
-    const scoped_refptr<media::VideoFrame>& frame)
-    : frame_(frame) {
-}
+    const scoped_refptr<media::VideoFrame>& frame,
+    const CopyTextureFrameCallback& copy_texture_callback)
+    : frame_(frame), copy_texture_callback_(copy_texture_callback) {}
 
 WebRtcVideoFrameAdapter::~WebRtcVideoFrameAdapter() {
 }
@@ -45,6 +65,8 @@
 }
 
 void* WebRtcVideoFrameAdapter::native_handle() const {
+  // Keep native handle for shared memory backed frames, so that we can use
+  // the existing handle to share for hw encode.
   if (frame_->HasTextures() ||
       frame_->storage_type() == media::VideoFrame::STORAGE_SHMEM)
     return frame_.get();
@@ -53,17 +75,28 @@
 
 rtc::scoped_refptr<webrtc::VideoFrameBuffer>
 WebRtcVideoFrameAdapter::NativeToI420Buffer() {
-  CHECK(media::VideoFrame::IsValidConfig(
-      frame_->format(), frame_->storage_type(), frame_->coded_size(),
-      frame_->visible_rect(), frame_->natural_size()));
-  CHECK_EQ(media::PIXEL_FORMAT_I420, frame_->format());
-  CHECK(reinterpret_cast<void*>(frame_->data(media::VideoFrame::kYPlane)));
-  CHECK(reinterpret_cast<void*>(frame_->data(media::VideoFrame::kUPlane)));
-  CHECK(reinterpret_cast<void*>(frame_->data(media::VideoFrame::kVPlane)));
-  CHECK(frame_->stride(media::VideoFrame::kYPlane));
-  CHECK(frame_->stride(media::VideoFrame::kUPlane));
-  CHECK(frame_->stride(media::VideoFrame::kVPlane));
-  return this;
+  if (frame_->storage_type() == media::VideoFrame::STORAGE_SHMEM) {
+    IsValidFrame(frame_);
+    return this;
+  }
+
+  if (frame_->HasTextures()) {
+    if (copy_texture_callback_.is_null()) {
+      DLOG(ERROR) << "Texture backed frame cannot be copied.";
+      return nullptr;
+    }
+
+    scoped_refptr<media::VideoFrame> new_frame;
+    copy_texture_callback_.Run(frame_, &new_frame);
+    if (!new_frame)
+      return nullptr;
+    frame_ = new_frame;
+    IsValidFrame(frame_);
+    return this;
+  }
+
+  NOTREACHED();
+  return nullptr;
 }
 
 }  // namespace content
diff --git a/content/renderer/media/webrtc/webrtc_video_frame_adapter.h b/content/renderer/media/webrtc/webrtc_video_frame_adapter.h
index 46dd876..fadd508 100644
--- a/content/renderer/media/webrtc/webrtc_video_frame_adapter.h
+++ b/content/renderer/media/webrtc/webrtc_video_frame_adapter.h
@@ -7,6 +7,7 @@
 
 #include <stdint.h>
 
+#include "base/callback.h"
 #include "media/base/video_frame.h"
 #include "third_party/webrtc/common_video/include/video_frame_buffer.h"
 
@@ -17,8 +18,13 @@
 // different threads, but that's safe since it's read-only.
 class WebRtcVideoFrameAdapter : public webrtc::VideoFrameBuffer {
  public:
-  explicit WebRtcVideoFrameAdapter(
-      const scoped_refptr<media::VideoFrame>& frame);
+  using CopyTextureFrameCallback =
+      base::Callback<void(const scoped_refptr<media::VideoFrame>&,
+                          scoped_refptr<media::VideoFrame>*)>;
+
+  WebRtcVideoFrameAdapter(
+      const scoped_refptr<media::VideoFrame>& frame,
+      const CopyTextureFrameCallback& copy_texture_callback);
 
  private:
   int width() const override;
@@ -42,6 +48,7 @@
   ~WebRtcVideoFrameAdapter() override;
 
   scoped_refptr<media::VideoFrame> frame_;
+  const CopyTextureFrameCallback copy_texture_callback_;
 };
 
 }  // namespace content