diff --git a/DEPS b/DEPS
index 0c001b6..0e5c6ec1 100644
--- a/DEPS
+++ b/DEPS
@@ -44,7 +44,7 @@
   # Three lines of non-changing comments so that
   # the commit queue can handle CLs rolling V8
   # and whatever else without interference from each other.
-  'v8_revision': 'c43132b7590443b4e844ffadfbba7e9327e9ca8d',
+  'v8_revision': 'eb1b609e907e8b595cc8817d02e9f010f34cb4ad',
   # Three lines of non-changing comments so that
   # the commit queue can handle CLs rolling swarming_client
   # and whatever else without interference from each other.
diff --git a/android_webview/browser/aw_browser_main_parts.h b/android_webview/browser/aw_browser_main_parts.h
index 8d4671f..1c6b173d 100644
--- a/android_webview/browser/aw_browser_main_parts.h
+++ b/android_webview/browser/aw_browser_main_parts.h
@@ -17,7 +17,6 @@
 
 namespace android_webview {
 
-class AwBrowserContext;
 class AwContentBrowserClient;
 
 class AwBrowserMainParts : public content::BrowserMainParts {
diff --git a/android_webview/browser/aw_contents_io_thread_client.h b/android_webview/browser/aw_contents_io_thread_client.h
index 9eed00a..82968ed 100644
--- a/android_webview/browser/aw_contents_io_thread_client.h
+++ b/android_webview/browser/aw_contents_io_thread_client.h
@@ -12,8 +12,6 @@
 
 #include "base/callback_forward.h"
 
-class GURL;
-
 namespace net {
 class HttpResponseHeaders;
 class URLRequest;
diff --git a/android_webview/browser/aw_metrics_service_client.h b/android_webview/browser/aw_metrics_service_client.h
index 1e193965..7537d39 100644
--- a/android_webview/browser/aw_metrics_service_client.h
+++ b/android_webview/browser/aw_metrics_service_client.h
@@ -21,7 +21,6 @@
 }
 
 namespace metrics {
-struct ClientInfo;
 class MetricsStateManager;
 }
 
diff --git a/android_webview/browser/browser_view_renderer_client.h b/android_webview/browser/browser_view_renderer_client.h
index 0aa37d7..c13656b 100644
--- a/android_webview/browser/browser_view_renderer_client.h
+++ b/android_webview/browser/browser_view_renderer_client.h
@@ -15,7 +15,6 @@
 }
 
 namespace android_webview {
-struct ParentCompositorDrawConstraints;
 
 class BrowserViewRendererClient {
  public:
diff --git a/android_webview/browser/hardware_renderer.h b/android_webview/browser/hardware_renderer.h
index 4519150..c51fa5d 100644
--- a/android_webview/browser/hardware_renderer.h
+++ b/android_webview/browser/hardware_renderer.h
@@ -18,10 +18,8 @@
 struct AwDrawGLInfo;
 
 namespace cc {
-class Display;
 class SurfaceFactory;
 class SurfaceIdAllocator;
-class SurfaceManager;
 }
 
 namespace android_webview {
diff --git a/android_webview/browser/net/aw_network_delegate.h b/android_webview/browser/net/aw_network_delegate.h
index 4830eda..0954513 100644
--- a/android_webview/browser/net/aw_network_delegate.h
+++ b/android_webview/browser/net/aw_network_delegate.h
@@ -9,7 +9,6 @@
 #include "net/base/network_delegate_impl.h"
 
 namespace net {
-class ProxyInfo;
 class URLRequest;
 }
 
diff --git a/android_webview/browser/net/aw_url_request_context_getter.h b/android_webview/browser/net/aw_url_request_context_getter.h
index 9644a61..9264092 100644
--- a/android_webview/browser/net/aw_url_request_context_getter.h
+++ b/android_webview/browser/net/aw_url_request_context_getter.h
@@ -31,8 +31,6 @@
 
 namespace android_webview {
 
-class AwNetworkDelegate;
-
 class AwURLRequestContextGetter : public net::URLRequestContextGetter {
  public:
   AwURLRequestContextGetter(
diff --git a/android_webview/browser/net/aw_web_resource_response.h b/android_webview/browser/net/aw_web_resource_response.h
index 9ecaa45c..dd2ac1a 100644
--- a/android_webview/browser/net/aw_web_resource_response.h
+++ b/android_webview/browser/net/aw_web_resource_response.h
@@ -13,7 +13,6 @@
 
 namespace net {
 class HttpResponseHeaders;
-class NetworkDelegate;
 class URLRequest;
 }
 
diff --git a/android_webview/browser/net/init_native_callback.h b/android_webview/browser/net/init_native_callback.h
index a55e7d36..322440f3 100644
--- a/android_webview/browser/net/init_native_callback.h
+++ b/android_webview/browser/net/init_native_callback.h
@@ -19,7 +19,6 @@
 }  // namespace net
 
 namespace android_webview {
-class AwBrowserContext;
 
 // Gets the TaskRunner that the CookieStore must be called on.
 scoped_refptr<base::SingleThreadTaskRunner> GetCookieStoreTaskRunner();
diff --git a/android_webview/browser/renderer_host/aw_render_view_host_ext.h b/android_webview/browser/renderer_host/aw_render_view_host_ext.h
index 812eed7..c6bc1a1 100644
--- a/android_webview/browser/renderer_host/aw_render_view_host_ext.h
+++ b/android_webview/browser/renderer_host/aw_render_view_host_ext.h
@@ -16,8 +16,6 @@
 #include "ui/gfx/geometry/size.h"
 #include "ui/gfx/geometry/size_f.h"
 
-class GURL;
-
 namespace content {
 struct FrameNavigateParams;
 struct LoadCommittedDetails;
diff --git a/android_webview/browser/scoped_app_gl_state_restore.h b/android_webview/browser/scoped_app_gl_state_restore.h
index 2a52048..b15d14bc 100644
--- a/android_webview/browser/scoped_app_gl_state_restore.h
+++ b/android_webview/browser/scoped_app_gl_state_restore.h
@@ -7,10 +7,6 @@
 
 #include "base/macros.h"
 
-namespace gl {
-class GLContext;
-}
-
 namespace android_webview {
 
 namespace internal {
diff --git a/android_webview/browser/surfaces_instance.h b/android_webview/browser/surfaces_instance.h
index f4d8b4fa..8fb59b2 100644
--- a/android_webview/browser/surfaces_instance.h
+++ b/android_webview/browser/surfaces_instance.h
@@ -29,9 +29,7 @@
 
 namespace android_webview {
 
-class AwGLSurface;
 class ParentOutputSurface;
-class ScopedAppGLStateRestore;
 
 class SurfacesInstance : public base::RefCounted<SurfacesInstance>,
                          public cc::DisplayClient,
diff --git a/android_webview/browser/test/rendering_test.h b/android_webview/browser/test/rendering_test.h
index ef9b0be2..ee7cf41 100644
--- a/android_webview/browser/test/rendering_test.h
+++ b/android_webview/browser/test/rendering_test.h
@@ -39,8 +39,6 @@
 class CompositorFrameConsumer;
 class CompositorFrameProducer;
 class FakeWindow;
-class RenderThreadManager;
-struct ParentCompositorDrawConstraints;
 
 class RenderingTest : public testing::Test,
                       public BrowserViewRendererClient,
diff --git a/android_webview/native/android_protocol_handler.h b/android_webview/native/android_protocol_handler.h
index 6f538516..e003b0e5 100644
--- a/android_webview/native/android_protocol_handler.h
+++ b/android_webview/native/android_protocol_handler.h
@@ -10,7 +10,6 @@
 #include "base/android/jni_android.h"
 
 namespace net {
-class URLRequestContext;
 class URLRequestInterceptor;
 }  // namespace net
 
diff --git a/android_webview/native/aw_autofill_client.h b/android_webview/native/aw_autofill_client.h
index ea684951..3b9cc1c0 100644
--- a/android_webview/native/aw_autofill_client.h
+++ b/android_webview/native/aw_autofill_client.h
@@ -20,14 +20,11 @@
 #include "ui/android/view_android.h"
 
 namespace autofill {
-class AutofillMetrics;
 class AutofillPopupDelegate;
 class CardUnmaskDelegate;
 class CreditCard;
 class FormStructure;
-class PasswordGenerator;
 class PersonalDataManager;
-struct FormData;
 }
 
 namespace content {
diff --git a/android_webview/native/aw_contents.cc b/android_webview/native/aw_contents.cc
index c2e0ea4..ccd4ff8 100644
--- a/android_webview/native/aw_contents.cc
+++ b/android_webview/native/aw_contents.cc
@@ -401,13 +401,13 @@
 }
 
 namespace {
-void GenerateMHTMLCallback(ScopedJavaGlobalRef<jobject>* callback,
+void GenerateMHTMLCallback(const JavaRef<jobject>& callback,
                            const base::FilePath& path,
                            int64_t size) {
   JNIEnv* env = AttachCurrentThread();
   // Android files are UTF8, so the path conversion below is safe.
   Java_AwContents_generateMHTMLCallback(
-      env, ConvertUTF8ToJavaString(env, path.AsUTF8Unsafe()), size, *callback);
+      env, ConvertUTF8ToJavaString(env, path.AsUTF8Unsafe()), size, callback);
 }
 }  // namespace
 
@@ -416,12 +416,11 @@
                                const JavaParamRef<jstring>& jpath,
                                const JavaParamRef<jobject>& callback) {
   DCHECK_CURRENTLY_ON(BrowserThread::UI);
-  ScopedJavaGlobalRef<jobject>* j_callback = new ScopedJavaGlobalRef<jobject>();
-  j_callback->Reset(env, callback);
   base::FilePath target_path(ConvertJavaStringToUTF8(env, jpath));
   web_contents_->GenerateMHTML(
       content::MHTMLGenerationParams(target_path),
-      base::Bind(&GenerateMHTMLCallback, base::Owned(j_callback), target_path));
+      base::Bind(&GenerateMHTMLCallback,
+                 ScopedJavaGlobalRef<jobject>(env, callback), target_path));
 }
 
 void AwContents::CreatePdfExporter(JNIEnv* env,
@@ -1138,13 +1137,13 @@
 namespace {
 void InvokeVisualStateCallback(const JavaObjectWeakGlobalRef& java_ref,
                                jlong request_id,
-                               ScopedJavaGlobalRef<jobject>* callback,
+                               const JavaRef<jobject>& callback,
                                bool result) {
   JNIEnv* env = AttachCurrentThread();
   ScopedJavaLocalRef<jobject> obj = java_ref.get(env);
   if (obj.is_null())
      return;
-  Java_AwContents_invokeVisualStateCallback(env, obj, *callback, request_id);
+  Java_AwContents_invokeVisualStateCallback(env, obj, callback, request_id);
 }
 }  // namespace
 
@@ -1154,11 +1153,9 @@
     jlong request_id,
     const JavaParamRef<jobject>& callback) {
   DCHECK_CURRENTLY_ON(BrowserThread::UI);
-  ScopedJavaGlobalRef<jobject>* j_callback = new ScopedJavaGlobalRef<jobject>();
-  j_callback->Reset(env, callback);
   web_contents_->GetMainFrame()->InsertVisualStateCallback(
       base::Bind(&InvokeVisualStateCallback, java_ref_, request_id,
-                 base::Owned(j_callback)));
+                 ScopedJavaGlobalRef<jobject>(env, callback)));
 }
 
 void AwContents::ClearView(JNIEnv* env, const JavaParamRef<jobject>& obj) {
diff --git a/android_webview/native/aw_contents.h b/android_webview/native/aw_contents.h
index 0f7fc58..3416602 100644
--- a/android_webview/native/aw_contents.h
+++ b/android_webview/native/aw_contents.h
@@ -30,7 +30,6 @@
 
 class SkBitmap;
 class TabContents;
-struct AwDrawGLInfo;
 
 namespace content {
 class WebContents;
@@ -43,7 +42,6 @@
 class AwGLFunctor;
 class AwPdfExporter;
 class AwWebContentsDelegate;
-class HardwareRenderer;
 class PermissionRequestHandler;
 
 // Native side of java-class of same name.
diff --git a/android_webview/native/aw_contents_io_thread_client_impl.h b/android_webview/native/aw_contents_io_thread_client_impl.h
index bb517e1..41f365e 100644
--- a/android_webview/native/aw_contents_io_thread_client_impl.h
+++ b/android_webview/native/aw_contents_io_thread_client_impl.h
@@ -12,10 +12,7 @@
 #include "base/compiler_specific.h"
 #include "base/macros.h"
 
-class GURL;
-
 namespace content {
-class ResourceRequestInfo;
 class WebContents;
 }
 
@@ -25,8 +22,6 @@
 
 namespace android_webview {
 
-class AwWebResourceResponse;
-
 class AwContentsIoThreadClientImpl : public AwContentsIoThreadClient {
  public:
    // Called when AwContents is created before there is a Java client.
diff --git a/android_webview/native/aw_contents_statics.cc b/android_webview/native/aw_contents_statics.cc
index 6e2948c..60c352c 100644
--- a/android_webview/native/aw_contents_statics.cc
+++ b/android_webview/native/aw_contents_statics.cc
@@ -20,6 +20,7 @@
 using base::android::AttachCurrentThread;
 using base::android::ConvertJavaStringToUTF8;
 using base::android::JavaParamRef;
+using base::android::JavaRef;
 using base::android::ScopedJavaGlobalRef;
 using base::android::ScopedJavaLocalRef;
 using content::BrowserThread;
@@ -28,10 +29,10 @@
 
 namespace {
 
-void ClientCertificatesCleared(ScopedJavaGlobalRef<jobject>* callback) {
+void ClientCertificatesCleared(const JavaRef<jobject>& callback) {
   DCHECK_CURRENTLY_ON(BrowserThread::UI);
   JNIEnv* env = AttachCurrentThread();
-  Java_AwContentsStatics_clientCertificatesCleared(env, *callback);
+  Java_AwContentsStatics_clientCertificatesCleared(env, callback);
 }
 
 void NotifyClientCertificatesChanged() {
@@ -46,13 +47,11 @@
                                 const JavaParamRef<jclass>&,
                                 const JavaParamRef<jobject>& callback) {
   DCHECK_CURRENTLY_ON(content::BrowserThread::UI);
-  ScopedJavaGlobalRef<jobject>* j_callback = new ScopedJavaGlobalRef<jobject>();
-  j_callback->Reset(env, callback);
   BrowserThread::PostTaskAndReply(
-      BrowserThread::IO,
-      FROM_HERE,
+      BrowserThread::IO, FROM_HERE,
       base::Bind(&NotifyClientCertificatesChanged),
-      base::Bind(&ClientCertificatesCleared, base::Owned(j_callback)));
+      base::Bind(&ClientCertificatesCleared,
+                 ScopedJavaGlobalRef<jobject>(env, callback)));
 }
 
 // static
diff --git a/android_webview/native/aw_quota_manager_bridge_impl.h b/android_webview/native/aw_quota_manager_bridge_impl.h
index 1b0970bf..7478433 100644
--- a/android_webview/native/aw_quota_manager_bridge_impl.h
+++ b/android_webview/native/aw_quota_manager_bridge_impl.h
@@ -19,8 +19,6 @@
 #include "base/memory/weak_ptr.h"
 #include "base/strings/string16.h"
 
-class GURL;
-
 namespace content {
 class StoragePartition;
 }
diff --git a/android_webview/native/aw_web_contents_view_delegate.h b/android_webview/native/aw_web_contents_view_delegate.h
index c4b438a..61958ea 100644
--- a/android_webview/native/aw_web_contents_view_delegate.h
+++ b/android_webview/native/aw_web_contents_view_delegate.h
@@ -15,8 +15,6 @@
 
 namespace android_webview {
 
-class AwContents;
-
 class AwWebContentsViewDelegate : public content::WebContentsViewDelegate {
  public:
   static content::WebContentsViewDelegate* Create(
diff --git a/android_webview/native/aw_web_preferences_populater_impl.h b/android_webview/native/aw_web_preferences_populater_impl.h
index 8b02845..d20e432 100644
--- a/android_webview/native/aw_web_preferences_populater_impl.h
+++ b/android_webview/native/aw_web_preferences_populater_impl.h
@@ -10,8 +10,6 @@
 
 namespace android_webview {
 
-class AwSettings;
-
 class AwWebPreferencesPopulaterImpl : public AwWebPreferencesPopulater {
  public:
   AwWebPreferencesPopulaterImpl();
diff --git a/android_webview/renderer/aw_content_renderer_client.h b/android_webview/renderer/aw_content_renderer_client.h
index 68fda72..4bfae86 100644
--- a/android_webview/renderer/aw_content_renderer_client.h
+++ b/android_webview/renderer/aw_content_renderer_client.h
@@ -17,7 +17,6 @@
 
 #if BUILDFLAG(ENABLE_SPELLCHECK)
 class SpellCheck;
-class SpellCheckProvider;
 #endif
 
 namespace visitedlink {
diff --git a/chrome/browser/android/favicon_helper.cc b/chrome/browser/android/favicon_helper.cc
index 6ed17a9..aca5fcb 100644
--- a/chrome/browser/android/favicon_helper.cc
+++ b/chrome/browser/android/favicon_helper.cc
@@ -36,6 +36,7 @@
 #include "ui/gfx/image/image_skia_rep.h"
 
 using base::android::JavaParamRef;
+using base::android::JavaRef;
 using base::android::ScopedJavaGlobalRef;
 using base::android::ScopedJavaLocalRef;
 using base::android::AttachCurrentThread;
@@ -46,7 +47,7 @@
 namespace {
 
 void OnLocalFaviconAvailable(
-    ScopedJavaGlobalRef<jobject>* j_favicon_image_callback,
+    const JavaRef<jobject>& j_favicon_image_callback,
     const favicon_base::FaviconRawBitmapResult& result) {
   JNIEnv* env = AttachCurrentThread();
 
@@ -64,8 +65,8 @@
   }
 
   // Call java side OnLocalFaviconAvailable method.
-  Java_FaviconImageCallback_onFaviconAvailable(
-      env, j_favicon_image_callback->obj(), j_favicon_bitmap, j_icon_url);
+  Java_FaviconImageCallback_onFaviconAvailable(env, j_favicon_image_callback,
+                                               j_favicon_bitmap, j_icon_url);
 }
 
 size_t GetLargestSizeIndex(const std::vector<gfx::Size>& sizes) {
@@ -165,12 +166,9 @@
   if (!favicon_service)
     return false;
 
-  ScopedJavaGlobalRef<jobject>* j_scoped_favicon_callback =
-      new ScopedJavaGlobalRef<jobject>();
-  j_scoped_favicon_callback->Reset(env, j_favicon_image_callback);
-
-  favicon_base::FaviconRawBitmapCallback callback_runner = base::Bind(
-      &OnLocalFaviconAvailable, base::Owned(j_scoped_favicon_callback));
+  favicon_base::FaviconRawBitmapCallback callback_runner =
+      base::Bind(&OnLocalFaviconAvailable,
+                 ScopedJavaGlobalRef<jobject>(j_favicon_image_callback));
 
   favicon_service->GetRawFaviconForPageURL(
       GURL(ConvertJavaStringToUTF16(env, j_page_url)),
diff --git a/chrome/browser/android/feedback/connectivity_checker.cc b/chrome/browser/android/feedback/connectivity_checker.cc
index a499401..38d79b7 100644
--- a/chrome/browser/android/feedback/connectivity_checker.cc
+++ b/chrome/browser/android/feedback/connectivity_checker.cc
@@ -41,27 +41,21 @@
   CONNECTIVITY_CHECK_RESULT_END = 5
 };
 
-void ExecuteCallback(jobject callback, ConnectivityCheckResult result) {
+void ExecuteCallback(const base::android::JavaRef<jobject>& callback,
+                     ConnectivityCheckResult result) {
   CHECK(result >= CONNECTIVITY_CHECK_RESULT_UNKNOWN);
   CHECK(result < CONNECTIVITY_CHECK_RESULT_END);
   Java_ConnectivityChecker_executeCallback(base::android::AttachCurrentThread(),
                                            callback, result);
 }
 
-void ExecuteCallbackFromRef(
-    base::android::ScopedJavaGlobalRef<jobject>* callback,
-    ConnectivityCheckResult result) {
-  ExecuteCallback(callback->obj(), result);
-}
-
 void PostCallback(JNIEnv* env,
-                  jobject j_callback,
+                  const base::android::JavaRef<jobject>& j_callback,
                   ConnectivityCheckResult result) {
   base::ThreadTaskRunnerHandle::Get()->PostTask(
       FROM_HERE,
-      base::Bind(&ExecuteCallbackFromRef,
-                 base::Owned(new base::android::ScopedJavaGlobalRef<jobject>(
-                     env, j_callback)),
+      base::Bind(&ExecuteCallback,
+                 base::android::ScopedJavaGlobalRef<jobject>(j_callback),
                  result));
 }
 
@@ -119,10 +113,9 @@
 
   bool connected = status.is_success() && response_code == net::HTTP_NO_CONTENT;
   if (connected) {
-    ExecuteCallback(java_callback_.obj(), CONNECTIVITY_CHECK_RESULT_CONNECTED);
+    ExecuteCallback(java_callback_, CONNECTIVITY_CHECK_RESULT_CONNECTED);
   } else {
-    ExecuteCallback(java_callback_.obj(),
-                    CONNECTIVITY_CHECK_RESULT_NOT_CONNECTED);
+    ExecuteCallback(java_callback_, CONNECTIVITY_CHECK_RESULT_NOT_CONNECTED);
   }
 
   base::ThreadTaskRunnerHandle::Get()->DeleteSoon(FROM_HERE, this);
@@ -164,7 +157,7 @@
     return;
   is_being_destroyed_ = true;
   url_fetcher_.reset();
-  ExecuteCallback(java_callback_.obj(), CONNECTIVITY_CHECK_RESULT_TIMEOUT);
+  ExecuteCallback(java_callback_, CONNECTIVITY_CHECK_RESULT_TIMEOUT);
   base::ThreadTaskRunnerHandle::Get()->DeleteSoon(FROM_HERE, this);
 }
 
diff --git a/chrome/browser/android/feedback/screenshot_task.cc b/chrome/browser/android/feedback/screenshot_task.cc
index db79120e..4e8d3e2 100644
--- a/chrome/browser/android/feedback/screenshot_task.cc
+++ b/chrome/browser/android/feedback/screenshot_task.cc
@@ -18,6 +18,7 @@
 
 using base::android::AttachCurrentThread;
 using base::android::JavaParamRef;
+using base::android::JavaRef;
 using base::android::ScopedJavaGlobalRef;
 using ui::WindowAndroid;
 
@@ -29,7 +30,7 @@
 }
 
 void SnapshotCallback(JNIEnv* env,
-                      base::android::ScopedJavaGlobalRef<jobject>* callback,
+                      const JavaRef<jobject>& callback,
                       scoped_refptr<base::RefCountedBytes> png_data) {
   jbyteArray jbytes = nullptr;
   if (png_data.get()) {
@@ -37,9 +38,7 @@
     jbytes = env->NewByteArray(size);
     env->SetByteArrayRegion(jbytes, 0, size, (jbyte*) png_data->front());
   }
-  Java_ScreenshotTask_notifySnapshotFinished(env,
-                                             callback->obj(),
-                                             jbytes);
+  Java_ScreenshotTask_notifySnapshotFinished(env, callback, jbytes);
 }
 
 void GrabWindowSnapshotAsync(JNIEnv* env,
@@ -52,13 +51,9 @@
       native_window_android);
   gfx::Rect window_bounds(window_width, window_height);
   ui::GrabWindowSnapshotAsync(
-      window_android,
-      window_bounds,
-      base::ThreadTaskRunnerHandle::Get(),
-      base::Bind(&SnapshotCallback,
-                 env,
-                 base::Owned(new ScopedJavaGlobalRef<jobject>(env,
-                                                              jcallback))));
+      window_android, window_bounds, base::ThreadTaskRunnerHandle::Get(),
+      base::Bind(&SnapshotCallback, env,
+                 ScopedJavaGlobalRef<jobject>(env, jcallback)));
 }
 
 }  // namespace android
diff --git a/chrome/browser/android/large_icon_bridge.cc b/chrome/browser/android/large_icon_bridge.cc
index 513d46f..c5bfc68 100644
--- a/chrome/browser/android/large_icon_bridge.cc
+++ b/chrome/browser/android/large_icon_bridge.cc
@@ -23,6 +23,7 @@
 #include "ui/gfx/codec/png_codec.h"
 
 using base::android::JavaParamRef;
+using base::android::JavaRef;
 using base::android::ScopedJavaGlobalRef;
 using base::android::ScopedJavaLocalRef;
 using base::android::AttachCurrentThread;
@@ -30,9 +31,8 @@
 
 namespace {
 
-void OnLargeIconAvailable(
-    ScopedJavaGlobalRef<jobject>* j_callback,
-    const favicon_base::LargeIconResult& result) {
+void OnLargeIconAvailable(const JavaRef<jobject>& j_callback,
+                          const favicon_base::LargeIconResult& result) {
   JNIEnv* env = AttachCurrentThread();
 
   // Convert the result to a Java Bitmap.
@@ -51,7 +51,7 @@
     fallback = *result.fallback_icon_style;
 
   Java_LargeIconCallback_onLargeIconAvailable(
-      env, j_callback->obj(), j_bitmap, fallback.background_color,
+      env, j_callback, j_bitmap, fallback.background_color,
       fallback.is_default_background_color);
 }
 
@@ -87,12 +87,8 @@
   if (!large_icon_service)
     return false;
 
-  ScopedJavaGlobalRef<jobject>* j_global_callback =
-      new ScopedJavaGlobalRef<jobject>();
-  j_global_callback->Reset(env, j_callback);
-
-  favicon_base::LargeIconCallback callback_runner =
-      base::Bind(&OnLargeIconAvailable, base::Owned(j_global_callback));
+  favicon_base::LargeIconCallback callback_runner = base::Bind(
+      &OnLargeIconAvailable, ScopedJavaGlobalRef<jobject>(env, j_callback));
 
   large_icon_service->GetLargeIconOrFallbackStyle(
       GURL(ConvertJavaStringToUTF16(env, j_page_url)),
diff --git a/chrome/browser/android/preferences/pref_service_bridge.cc b/chrome/browser/android/preferences/pref_service_bridge.cc
index 6beb2ba..95a3398 100644
--- a/chrome/browser/android/preferences/pref_service_bridge.cc
+++ b/chrome/browser/android/preferences/pref_service_bridge.cc
@@ -72,6 +72,7 @@
 using base::android::ConvertJavaStringToUTF8;
 using base::android::ConvertUTF8ToJavaString;
 using base::android::JavaParamRef;
+using base::android::JavaRef;
 using base::android::ScopedJavaLocalRef;
 using base::android::ScopedJavaGlobalRef;
 using content::BrowserThread;
@@ -737,7 +738,7 @@
 }
 
 static void ShowNoticeAboutOtherFormsOfBrowsingHistory(
-    ScopedJavaGlobalRef<jobject>* listener,
+    const JavaRef<jobject>& listener,
     bool show) {
   JNIEnv* env = AttachCurrentThread();
   UMA_HISTOGRAM_BOOLEAN(
@@ -745,17 +746,17 @@
   if (!show)
     return;
   Java_OtherFormsOfBrowsingHistoryListener_showNoticeAboutOtherFormsOfBrowsingHistory(
-      env, listener->obj());
+      env, listener);
 }
 
 static void EnableDialogAboutOtherFormsOfBrowsingHistory(
-    ScopedJavaGlobalRef<jobject>* listener,
+    const JavaRef<jobject>& listener,
     bool enabled) {
   JNIEnv* env = AttachCurrentThread();
   if (!enabled)
     return;
   Java_OtherFormsOfBrowsingHistoryListener_enableDialogAboutOtherFormsOfBrowsingHistory(
-      env, listener->obj());
+      env, listener);
 }
 
 static void RequestInfoAboutOtherFormsOfBrowsingHistory(
@@ -767,7 +768,7 @@
       ProfileSyncServiceFactory::GetForProfile(GetOriginalProfile()),
       WebHistoryServiceFactory::GetForProfile(GetOriginalProfile()),
       base::Bind(&ShowNoticeAboutOtherFormsOfBrowsingHistory,
-                 base::Owned(new ScopedJavaGlobalRef<jobject>(env, listener))));
+                 ScopedJavaGlobalRef<jobject>(env, listener)));
 
   // The one-time notice in the dialog.
   browsing_data::ShouldPopupDialogAboutOtherFormsOfBrowsingHistory(
@@ -775,7 +776,7 @@
       WebHistoryServiceFactory::GetForProfile(GetOriginalProfile()),
       chrome::GetChannel(),
       base::Bind(&EnableDialogAboutOtherFormsOfBrowsingHistory,
-                 base::Owned(new ScopedJavaGlobalRef<jobject>(env, listener))));
+                 ScopedJavaGlobalRef<jobject>(env, listener)));
 }
 
 static void SetAutoplayEnabled(JNIEnv* env,
diff --git a/components/dom_distiller/content/browser/distillable_page_utils_android.cc b/components/dom_distiller/content/browser/distillable_page_utils_android.cc
index f0890f68..68430f6 100644
--- a/components/dom_distiller/content/browser/distillable_page_utils_android.cc
+++ b/components/dom_distiller/content/browser/distillable_page_utils_android.cc
@@ -15,25 +15,23 @@
 #include "jni/DistillablePageUtils_jni.h"
 
 using base::android::JavaParamRef;
+using base::android::JavaRef;
 using base::android::ScopedJavaGlobalRef;
 
 namespace dom_distiller {
 namespace android {
 namespace {
-void OnIsPageDistillableResult(
-    std::unique_ptr<ScopedJavaGlobalRef<jobject>> callback_holder,
-    bool isDistillable) {
+void OnIsPageDistillableResult(const JavaRef<jobject>& callback,
+                               bool isDistillable) {
   Java_DistillablePageUtils_callOnIsPageDistillableResult(
-      base::android::AttachCurrentThread(), callback_holder->obj(),
-      isDistillable);
+      base::android::AttachCurrentThread(), callback, isDistillable);
 }
 
-void OnIsPageDistillableUpdate(
-    ScopedJavaGlobalRef<jobject>* callback_holder,
-    bool isDistillable, bool isLast) {
+void OnIsPageDistillableUpdate(const JavaRef<jobject>& callback,
+                               bool isDistillable,
+                               bool isLast) {
   Java_DistillablePageUtils_callOnIsPageDistillableUpdate(
-      base::android::AttachCurrentThread(), callback_holder->obj(),
-      isDistillable, isLast);
+      base::android::AttachCurrentThread(), callback, isDistillable, isLast);
 }
 }  // namespace
 
@@ -44,19 +42,17 @@
                               const JavaParamRef<jobject>& callback) {
   content::WebContents* web_contents(
       content::WebContents::FromJavaWebContents(webContents));
-  std::unique_ptr<ScopedJavaGlobalRef<jobject>> callback_holder(
-      new ScopedJavaGlobalRef<jobject>());
-  callback_holder->Reset(env, callback);
 
   if (!web_contents) {
     base::ThreadTaskRunnerHandle::Get()->PostTask(
-        FROM_HERE, base::Bind(OnIsPageDistillableResult,
-                              base::Passed(&callback_holder), false));
+        FROM_HERE,
+        base::Bind(OnIsPageDistillableResult,
+                   ScopedJavaGlobalRef<jobject>(env, callback), false));
     return;
   }
-  IsDistillablePage(
-      web_contents, is_mobile_optimized,
-      base::Bind(OnIsPageDistillableResult, base::Passed(&callback_holder)));
+  IsDistillablePage(web_contents, is_mobile_optimized,
+                    base::Bind(OnIsPageDistillableResult,
+                               ScopedJavaGlobalRef<jobject>(env, callback)));
 }
 
 static void SetDelegate(JNIEnv* env,
@@ -69,13 +65,8 @@
     return;
   }
 
-  // TODO(wychen): check memory management
-  ScopedJavaGlobalRef<jobject>* callback_holder(
-      new ScopedJavaGlobalRef<jobject>());
-  callback_holder->Reset(env, callback);
-
-  DistillabilityDelegate delegate =
-      base::Bind(OnIsPageDistillableUpdate, base::Owned(callback_holder));
+  DistillabilityDelegate delegate = base::Bind(
+      OnIsPageDistillableUpdate, ScopedJavaGlobalRef<jobject>(env, callback));
   setDelegate(web_contents, delegate);
 }
 
diff --git a/content/browser/android/app_web_message_port_service_impl.cc b/content/browser/android/app_web_message_port_service_impl.cc
index 2dd13b9..5e749fea 100644
--- a/content/browser/android/app_web_message_port_service_impl.cc
+++ b/content/browser/android/app_web_message_port_service_impl.cc
@@ -21,6 +21,7 @@
 using base::android::ConvertJavaStringToUTF16;
 using base::android::ConvertUTF16ToJavaString;
 using base::android::JavaParamRef;
+using base::android::JavaRef;
 using base::android::ScopedJavaGlobalRef;
 using base::android::ScopedJavaLocalRef;
 using base::android::ToJavaIntArray;
@@ -48,7 +49,7 @@
 
 void AppWebMessagePortServiceImpl::CreateMessageChannel(
     JNIEnv* env,
-    jobjectArray ports,
+    const JavaRef<jobjectArray>& ports,
     WebContents* web_contents) {
   DCHECK_CURRENTLY_ON(BrowserThread::UI);
   RenderFrameHostImpl* rfh =
@@ -56,9 +57,6 @@
   int routing_id = web_contents->GetMainFrame()->GetRoutingID();
   scoped_refptr<AppWebMessagePortMessageFilter> filter =
       rfh->GetAppWebMessagePortMessageFilter(routing_id);
-  ScopedJavaGlobalRef<jobjectArray>* j_ports =
-      new ScopedJavaGlobalRef<jobjectArray>();
-  j_ports->Reset(env, ports);
 
   int* portId1 = new int;
   int* portId2 = new int;
@@ -67,8 +65,9 @@
       base::Bind(&AppWebMessagePortServiceImpl::CreateMessageChannelOnIOThread,
                  base::Unretained(this), filter, portId1, portId2),
       base::Bind(&AppWebMessagePortServiceImpl::OnMessageChannelCreated,
-                 base::Unretained(this), base::Owned(j_ports),
-                 base::Owned(portId1), base::Owned(portId2)));
+                 base::Unretained(this),
+                 ScopedJavaGlobalRef<jobjectArray>(ports), base::Owned(portId1),
+                 base::Owned(portId2)));
 }
 
 void AppWebMessagePortServiceImpl::OnConvertedWebToAppMessage(
@@ -211,7 +210,7 @@
 }
 
 void AppWebMessagePortServiceImpl::OnMessageChannelCreated(
-    ScopedJavaGlobalRef<jobjectArray>* ports,
+    const JavaRef<jobjectArray>& ports,
     int* port1,
     int* port2) {
   DCHECK_CURRENTLY_ON(BrowserThread::UI);
@@ -220,7 +219,7 @@
   if (obj.is_null())
     return;
   Java_AppWebMessagePortService_onMessageChannelCreated(env, obj, *port1,
-                                                        *port2, *ports);
+                                                        *port2, ports);
 }
 
 // Adds a new port to the message port service.
diff --git a/content/browser/android/app_web_message_port_service_impl.h b/content/browser/android/app_web_message_port_service_impl.h
index e43785b..84c72ea 100644
--- a/content/browser/android/app_web_message_port_service_impl.h
+++ b/content/browser/android/app_web_message_port_service_impl.h
@@ -34,7 +34,7 @@
   // AppWebMessagePortService implementation
 
   void CreateMessageChannel(JNIEnv* env,
-                            jobjectArray ports,
+                            const base::android::JavaRef<jobjectArray>& ports,
                             WebContents* web_contents) override;
   void CleanupPort(int message_port_id) override;
 
@@ -73,7 +73,7 @@
       int* port1,
       int* port2);
   void OnMessageChannelCreated(
-      base::android::ScopedJavaGlobalRef<jobjectArray>* ports,
+      const base::android::JavaRef<jobjectArray>& ports,
       int* port1,
       int* port2);
   void AddPort(int message_port_id, AppWebMessagePortMessageFilter* filter);
diff --git a/content/browser/renderer_host/media/audio_sync_reader.cc b/content/browser/renderer_host/media/audio_sync_reader.cc
index 6299aaf..6437b9d2 100644
--- a/content/browser/renderer_host/media/audio_sync_reader.cc
+++ b/content/browser/renderer_host/media/audio_sync_reader.cc
@@ -18,6 +18,7 @@
 #include "build/build_config.h"
 #include "content/browser/renderer_host/media/media_stream_manager.h"
 #include "content/public/common/content_switches.h"
+#include "media/audio/audio_device_thread.h"
 #include "media/base/audio_parameters.h"
 
 using media::AudioBus;
@@ -140,20 +141,41 @@
 }
 
 // media::AudioOutputController::SyncReader implementations.
-void AudioSyncReader::UpdatePendingBytes(uint32_t bytes,
-                                         uint32_t frames_skipped) {
-  // Increase the number of skipped frames stored in shared memory. We don't
-  // send it over the socket since sending more than 4 bytes might lead to being
-  // descheduled. The reading side will zero it when consumed.
+void AudioSyncReader::RequestMoreData(base::TimeDelta delay,
+                                      base::TimeTicks delay_timestamp,
+                                      int prior_frames_skipped) {
+  // We don't send arguments over the socket since sending more than 4
+  // bytes might lead to being descheduled. The reading side will zero
+  // them when consumed.
   AudioOutputBuffer* buffer =
       reinterpret_cast<AudioOutputBuffer*>(shared_memory_->memory());
-  buffer->params.frames_skipped += frames_skipped;
+  // Increase the number of skipped frames stored in shared memory.
+  buffer->params.frames_skipped += prior_frames_skipped;
+  buffer->params.delay = delay.InMicroseconds();
+  buffer->params.delay_timestamp
+      = (delay_timestamp - base::TimeTicks()).InMicroseconds();
 
   // Zero out the entire output buffer to avoid stuttering/repeating-buffers
   // in the anomalous case if the renderer is unable to keep up with real-time.
   output_bus_->Zero();
 
-  socket_->Send(&bytes, sizeof(bytes));
+  uint32_t control_signal = 0;
+  if (delay.is_max()) {
+    // std::numeric_limits<uint32_t>::max() is a special signal which is
+    // returned after the browser stops the output device in response to a
+    // renderer side request.
+    control_signal = std::numeric_limits<uint32_t>::max();
+  }
+
+  size_t sent_bytes = socket_->Send(&control_signal, sizeof(control_signal));
+  if (sent_bytes != sizeof(control_signal)) {
+    const std::string error_message = "ASR: No room in socket buffer.";
+    LOG(WARNING) << error_message;
+    MediaStreamManager::SendMessageToNativeLog(error_message);
+    TRACE_EVENT_INSTANT0("audio",
+                         "AudioSyncReader: No room in socket buffer",
+                         TRACE_EVENT_SCOPE_THREAD);
+  }
   ++buffer_index_;
 }
 
diff --git a/content/browser/renderer_host/media/audio_sync_reader.h b/content/browser/renderer_host/media/audio_sync_reader.h
index 1414704..29d1feb8 100644
--- a/content/browser/renderer_host/media/audio_sync_reader.h
+++ b/content/browser/renderer_host/media/audio_sync_reader.h
@@ -46,7 +46,9 @@
   }
 
   // media::AudioOutputController::SyncReader implementations.
-  void UpdatePendingBytes(uint32_t bytes, uint32_t frames_skipped) override;
+  void RequestMoreData(base::TimeDelta delay,
+                       base::TimeTicks delay_timestamp,
+                       int prior_frames_skipped) override;
   void Read(media::AudioBus* dest) override;
   void Close() override;
 
diff --git a/content/browser/web_contents/web_contents_android.cc b/content/browser/web_contents/web_contents_android.cc
index 9be39aeb..2d82f68 100644
--- a/content/browser/web_contents/web_contents_android.cc
+++ b/content/browser/web_contents/web_contents_android.cc
@@ -627,11 +627,10 @@
     jfloat width,
     jfloat height) {
   RenderWidgetHostViewAndroid* view = GetRenderWidgetHostViewAndroid();
-  const ReadbackRequestCallback result_callback =
-      base::Bind(&WebContentsAndroid::OnFinishGetContentBitmap,
-                 weak_factory_.GetWeakPtr(),
-                 base::Owned(new ScopedJavaGlobalRef<jobject>(env, obj)),
-                 base::Owned(new ScopedJavaGlobalRef<jobject>(env, jcallback)));
+  const ReadbackRequestCallback result_callback = base::Bind(
+      &WebContentsAndroid::OnFinishGetContentBitmap, weak_factory_.GetWeakPtr(),
+      ScopedJavaGlobalRef<jobject>(env, obj),
+      ScopedJavaGlobalRef<jobject>(env, jcallback));
   SkColorType pref_color_type = gfx::ConvertToSkiaColorType(color_type);
   if (!view || pref_color_type == kUnknown_SkColorType) {
     result_callback.Run(SkBitmap(), READBACK_FAILED);
@@ -665,10 +664,8 @@
       url, is_fav_icon, max_bitmap_size, bypass_cache,
       base::Bind(&WebContentsAndroid::OnFinishDownloadImage,
                  weak_factory_.GetWeakPtr(),
-                 base::Owned(new ScopedJavaGlobalRef<jobject>(
-                     env, obj)),
-                 base::Owned(new ScopedJavaGlobalRef<jobject>(
-                     env, jcallback))));
+                 ScopedJavaGlobalRef<jobject>(env, obj),
+                 ScopedJavaGlobalRef<jobject>(env, jcallback)));
 }
 
 void WebContentsAndroid::DismissTextHandles(
@@ -680,21 +677,21 @@
 }
 
 void WebContentsAndroid::OnFinishGetContentBitmap(
-    ScopedJavaGlobalRef<jobject>* obj,
-    ScopedJavaGlobalRef<jobject>* callback,
+    const JavaRef<jobject>& obj,
+    const JavaRef<jobject>& callback,
     const SkBitmap& bitmap,
     ReadbackResponse response) {
   JNIEnv* env = base::android::AttachCurrentThread();
   ScopedJavaLocalRef<jobject> java_bitmap;
   if (response == READBACK_SUCCESS)
     java_bitmap = gfx::ConvertToJavaBitmap(&bitmap);
-  Java_WebContentsImpl_onGetContentBitmapFinished(env, *obj, *callback,
+  Java_WebContentsImpl_onGetContentBitmapFinished(env, obj, callback,
                                                   java_bitmap, response);
 }
 
 void WebContentsAndroid::OnFinishDownloadImage(
-    base::android::ScopedJavaGlobalRef<jobject>* obj,
-    base::android::ScopedJavaGlobalRef<jobject>* callback,
+    const JavaRef<jobject>& obj,
+    const JavaRef<jobject>& callback,
     int id,
     int http_status_code,
     const GURL& url,
@@ -720,7 +717,7 @@
                                                 size.height());
   }
   Java_WebContentsImpl_onDownloadImageFinished(
-      env, *obj, *callback, id, http_status_code, jurl, jbitmaps, jsizes);
+      env, obj, callback, id, http_status_code, jurl, jbitmaps, jsizes);
 }
 
 void WebContentsAndroid::SetMediaSession(
diff --git a/content/browser/web_contents/web_contents_android.h b/content/browser/web_contents/web_contents_android.h
index d23f93a2..065d9d08 100644
--- a/content/browser/web_contents/web_contents_android.h
+++ b/content/browser/web_contents/web_contents_android.h
@@ -190,20 +190,18 @@
  private:
   RenderWidgetHostViewAndroid* GetRenderWidgetHostViewAndroid();
 
-  void OnFinishGetContentBitmap(
-      base::android::ScopedJavaGlobalRef<jobject>* obj,
-      base::android::ScopedJavaGlobalRef<jobject>* callback,
-      const SkBitmap& bitmap,
-      ReadbackResponse response);
+  void OnFinishGetContentBitmap(const base::android::JavaRef<jobject>& obj,
+                                const base::android::JavaRef<jobject>& callback,
+                                const SkBitmap& bitmap,
+                                ReadbackResponse response);
 
-  void OnFinishDownloadImage(
-      base::android::ScopedJavaGlobalRef<jobject>* obj,
-      base::android::ScopedJavaGlobalRef<jobject>* callback,
-      int id,
-      int http_status_code,
-      const GURL& url,
-      const std::vector<SkBitmap>& bitmaps,
-      const std::vector<gfx::Size>& sizes);
+  void OnFinishDownloadImage(const base::android::JavaRef<jobject>& obj,
+                             const base::android::JavaRef<jobject>& callback,
+                             int id,
+                             int http_status_code,
+                             const GURL& url,
+                             const std::vector<SkBitmap>& bitmaps,
+                             const std::vector<gfx::Size>& sizes);
 
   WebContentsImpl* web_contents_;
   NavigationControllerAndroid navigation_controller_;
diff --git a/content/public/browser/android/app_web_message_port_service.h b/content/public/browser/android/app_web_message_port_service.h
index d323ff4..c1dd515 100644
--- a/content/public/browser/android/app_web_message_port_service.h
+++ b/content/public/browser/android/app_web_message_port_service.h
@@ -8,6 +8,7 @@
 #include <jni.h>
 #include <vector>
 
+#include "base/android/scoped_java_ref.h"
 #include "base/values.h"
 
 namespace content {
@@ -18,9 +19,10 @@
  public:
   virtual ~AppWebMessagePortService() {}
 
-  virtual void CreateMessageChannel(JNIEnv* env,
-                                    jobjectArray ports,
-                                    WebContents* web_contents) = 0;
+  virtual void CreateMessageChannel(
+      JNIEnv* env,
+      const base::android::JavaRef<jobjectArray>& ports,
+      WebContents* web_contents) = 0;
 
   virtual void CleanupPort(int message_port_id) = 0;
 };
diff --git a/content/renderer/media/renderer_webaudiodevice_impl.cc b/content/renderer/media/renderer_webaudiodevice_impl.cc
index fc698e4d..4924cc68 100644
--- a/content/renderer/media/renderer_webaudiodevice_impl.cc
+++ b/content/renderer/media/renderer_webaudiodevice_impl.cc
@@ -15,6 +15,7 @@
 #include "content/renderer/media/audio_device_factory.h"
 #include "content/renderer/render_frame_impl.h"
 #include "content/renderer/render_thread_impl.h"
+#include "media/base/audio_timestamp_helper.h"
 #include "media/base/silent_sink_suspender.h"
 #include "third_party/WebKit/public/web/WebLocalFrame.h"
 #include "third_party/WebKit/public/web/WebView.h"
@@ -100,9 +101,10 @@
   return params_.sample_rate();
 }
 
-int RendererWebAudioDeviceImpl::Render(media::AudioBus* dest,
-                                       uint32_t frames_delayed,
-                                       uint32_t frames_skipped) {
+int RendererWebAudioDeviceImpl::Render(base::TimeDelta delay,
+                                       base::TimeTicks delay_timestamp,
+                                       int prior_frames_skipped,
+                                       media::AudioBus* dest) {
   // Wrap the output pointers using WebVector.
   WebVector<float*> web_audio_dest_data(static_cast<size_t>(dest->channels()));
   for (int i = 0; i < dest->channels(); ++i)
diff --git a/content/renderer/media/renderer_webaudiodevice_impl.h b/content/renderer/media/renderer_webaudiodevice_impl.h
index a761e429..83b596f 100644
--- a/content/renderer/media/renderer_webaudiodevice_impl.h
+++ b/content/renderer/media/renderer_webaudiodevice_impl.h
@@ -37,9 +37,10 @@
   double sampleRate() override;
 
   // AudioRendererSink::RenderCallback implementation.
-  int Render(media::AudioBus* dest,
-             uint32_t frames_delayed,
-             uint32_t frames_skipped) override;
+  int Render(base::TimeDelta delay,
+             base::TimeTicks delay_timestamp,
+             int prior_frames_skipped,
+             media::AudioBus* dest) override;
 
   void OnRenderError() override;
 
diff --git a/content/renderer/media/track_audio_renderer.cc b/content/renderer/media/track_audio_renderer.cc
index a17f85c..2e6b0cbe 100644
--- a/content/renderer/media/track_audio_renderer.cc
+++ b/content/renderer/media/track_audio_renderer.cc
@@ -39,9 +39,10 @@
 }  // namespace
 
 // media::AudioRendererSink::RenderCallback implementation
-int TrackAudioRenderer::Render(media::AudioBus* audio_bus,
-                               uint32_t frames_delayed,
-                               uint32_t frames_skipped) {
+int TrackAudioRenderer::Render(base::TimeDelta delay,
+                               base::TimeTicks delay_timestamp,
+                               int prior_frames_skipped,
+                               media::AudioBus* audio_bus) {
   TRACE_EVENT0("audio", "TrackAudioRenderer::Render");
   base::AutoLock auto_lock(thread_lock_);
 
@@ -50,19 +51,13 @@
     return 0;
   }
 
-  // Source sample rate equals to output one, see MaybeStartSink(), so using it.
-  uint32_t audio_delay_milliseconds = static_cast<double>(frames_delayed) *
-                                      base::Time::kMillisecondsPerSecond /
-                                      source_params_.sample_rate();
 
   // TODO(miu): Plumbing is needed to determine the actual playout timestamp
   // of the audio, instead of just snapshotting TimeTicks::Now(), for proper
   // audio/video sync.  http://crbug.com/335335
-  const base::TimeTicks playout_time =
-      base::TimeTicks::Now() +
-      base::TimeDelta::FromMilliseconds(audio_delay_milliseconds);
+  const base::TimeTicks playout_time = base::TimeTicks::Now() + delay;
   DVLOG(2) << "Pulling audio out of shifter to be played "
-           << audio_delay_milliseconds << " ms from now.";
+           << delay.InMilliseconds() << " ms from now.";
   audio_shifter_->Pull(audio_bus, playout_time);
   num_samples_rendered_ += audio_bus->frames();
   return audio_bus->frames();
diff --git a/content/renderer/media/track_audio_renderer.h b/content/renderer/media/track_audio_renderer.h
index 509f319..87547107 100644
--- a/content/renderer/media/track_audio_renderer.h
+++ b/content/renderer/media/track_audio_renderer.h
@@ -97,9 +97,10 @@
   // media::AudioRendererSink::RenderCallback implementation.
   // Render() is called on the AudioOutputDevice thread and OnRenderError()
   // on the IO thread.
-  int Render(media::AudioBus* audio_bus,
-             uint32_t frames_delayed,
-             uint32_t frames_skipped) override;
+  int Render(base::TimeDelta delay,
+             base::TimeTicks delay_timestamp,
+             int prior_frames_skipped,
+             media::AudioBus* audio_bus) override;
   void OnRenderError() override;
 
   // Initializes and starts the |sink_| if
diff --git a/content/renderer/media/webrtc_audio_renderer.cc b/content/renderer/media/webrtc_audio_renderer.cc
index 3e39fe4..32c27bf 100644
--- a/content/renderer/media/webrtc_audio_renderer.cc
+++ b/content/renderer/media/webrtc_audio_renderer.cc
@@ -167,7 +167,6 @@
       source_(NULL),
       play_ref_count_(0),
       start_ref_count_(0),
-      audio_delay_milliseconds_(0),
       sink_params_(kFormat, kChannelLayout, 0, kBitsPerSample, 0),
       output_device_id_(device_id),
       security_origin_(security_origin) {
@@ -268,7 +267,7 @@
     state_ = PLAYING;
 
     if (audio_fifo_) {
-      audio_delay_milliseconds_ = 0;
+      audio_delay_ = base::TimeDelta();
       audio_fifo_->Clear();
     }
   }
@@ -401,35 +400,26 @@
   callback.Run(media::OUTPUT_DEVICE_STATUS_OK);
 }
 
-int WebRtcAudioRenderer::Render(media::AudioBus* audio_bus,
-                                uint32_t frames_delayed,
-                                uint32_t frames_skipped) {
+int WebRtcAudioRenderer::Render(base::TimeDelta delay,
+                                base::TimeTicks delay_timestamp,
+                                int prior_frames_skipped,
+                                media::AudioBus* audio_bus) {
   DCHECK(sink_->CurrentThreadIsRenderingThread());
   base::AutoLock auto_lock(lock_);
   if (!source_)
     return 0;
 
-  // TODO(grunell): Converting from frames to milliseconds will potentially lose
-  // hundreds of microseconds which may cause audio video drift. Update
-  // this class and all usage of render delay msec -> frames (possibly even
-  // using a double type for frames). See http://crbug.com/586540
-  uint32_t audio_delay_milliseconds = static_cast<double>(frames_delayed) *
-                                      base::Time::kMillisecondsPerSecond /
-                                      sink_params_.sample_rate();
-
   DVLOG(2) << "WebRtcAudioRenderer::Render()";
-  DVLOG(2) << "audio_delay_milliseconds: " << audio_delay_milliseconds;
+  DVLOG(2) << "audio_delay: " << delay;
 
-  DCHECK_LE(audio_delay_milliseconds, static_cast<uint32_t>(INT_MAX));
-  audio_delay_milliseconds_ = static_cast<int>(audio_delay_milliseconds);
+  audio_delay_ = delay;
 
   // If there are skipped frames, pull and throw away the same amount. We always
   // pull 10 ms of data from the source (see PrepareSink()), so the fifo is only
   // required if the number of frames to drop doesn't correspond to 10 ms.
-  if (frames_skipped > 0) {
-    const uint32_t source_frames_per_buffer =
-        static_cast<uint32_t>(sink_params_.sample_rate() / 100);
-    if (!audio_fifo_ && frames_skipped != source_frames_per_buffer) {
+  if (prior_frames_skipped > 0) {
+    const int source_frames_per_buffer = sink_params_.sample_rate() / 100;
+    if (!audio_fifo_ && prior_frames_skipped != source_frames_per_buffer) {
       audio_fifo_.reset(new media::AudioPullFifo(
           kChannels, source_frames_per_buffer,
           base::Bind(&WebRtcAudioRenderer::SourceCallback,
@@ -437,7 +427,7 @@
     }
 
     std::unique_ptr<media::AudioBus> drop_bus =
-        media::AudioBus::Create(audio_bus->channels(), frames_skipped);
+        media::AudioBus::Create(audio_bus->channels(), prior_frames_skipped);
     if (audio_fifo_)
       audio_fifo_->Consume(drop_bus.get(), drop_bus->frames());
     else
@@ -467,7 +457,7 @@
            << fifo_frame_delay << ", "
            << audio_bus->frames() << ")";
 
-  int output_delay_milliseconds = audio_delay_milliseconds_;
+  int output_delay_milliseconds = audio_delay_.InMilliseconds();
   // TODO(grunell): This integer division by sample_rate will cause loss of
   // partial milliseconds, and may cause avsync drift. http://crbug.com/586540
   output_delay_milliseconds += fifo_frame_delay *
diff --git a/content/renderer/media/webrtc_audio_renderer.h b/content/renderer/media/webrtc_audio_renderer.h
index 3c4d22a11..1da6863 100644
--- a/content/renderer/media/webrtc_audio_renderer.h
+++ b/content/renderer/media/webrtc_audio_renderer.h
@@ -160,9 +160,10 @@
 
   // media::AudioRendererSink::RenderCallback implementation.
   // These two methods are called on the AudioOutputDevice worker thread.
-  int Render(media::AudioBus* audio_bus,
-             uint32_t frames_delayed,
-             uint32_t frames_skipped) override;
+  int Render(base::TimeDelta delay,
+             base::TimeTicks delay_timestamp,
+             int prior_frames_skipped,
+             media::AudioBus* audio_bus) override;
   void OnRenderError() override;
 
   // Called by AudioPullFifo when more data is necessary.
@@ -228,7 +229,7 @@
 
   // Contains the accumulated delay estimate which is provided to the WebRTC
   // AEC.
-  int audio_delay_milliseconds_;
+  base::TimeDelta audio_delay_;
 
   base::TimeDelta current_time_;
 
diff --git a/content/test/gpu/gpu_tests/pixel_integration_test.py b/content/test/gpu/gpu_tests/pixel_integration_test.py
index 309166e..17eed5a 100644
--- a/content/test/gpu/gpu_tests/pixel_integration_test.py
+++ b/content/test/gpu/gpu_tests/pixel_integration_test.py
@@ -5,6 +5,7 @@
 import logging
 import os
 import re
+import sys
 
 from gpu_tests import cloud_storage_integration_test_base
 from gpu_tests import pixel_expectations
@@ -114,6 +115,8 @@
     name = 'Pixel'
     pages = pixel_test_pages.DefaultPages(name)
     pages += pixel_test_pages.ExperimentalCanvasFeaturesPages(name)
+    if sys.platform.startswith('darwin'):
+      pages += pixel_test_pages.MacSpecificPages(name)
     for p in pages:
       yield(p.name, p.url, (p))
 
diff --git a/media/audio/audio_output_controller.cc b/media/audio/audio_output_controller.cc
index ae879c2..c8a75b6 100644
--- a/media/audio/audio_output_controller.cc
+++ b/media/audio/audio_output_controller.cc
@@ -16,6 +16,7 @@
 #include "base/threading/platform_thread.h"
 #include "base/time/time.h"
 #include "base/trace_event/trace_event.h"
+#include "media/base/audio_timestamp_helper.h"
 
 using base::TimeDelta;
 
@@ -174,7 +175,7 @@
     return;
 
   // Ask for first packet.
-  sync_reader_->UpdatePendingBytes(0, 0);
+  sync_reader_->RequestMoreData(base::TimeDelta(), base::TimeTicks(), 0);
 
   state_ = kPlaying;
 
@@ -227,7 +228,7 @@
   // Let the renderer know we've stopped.  Necessary to let PPAPI clients know
   // audio has been shutdown.  TODO(dalecurtis): This stinks.  PPAPI should have
   // a better way to know when it should exit PPB_Audio_Shared::Run().
-  sync_reader_->UpdatePendingBytes(std::numeric_limits<uint32_t>::max(), 0);
+  sync_reader_->RequestMoreData(base::TimeDelta::Max(), base::TimeTicks(), 0);
 
   handler_->OnPaused();
 }
@@ -308,12 +309,10 @@
 
   sync_reader_->Read(dest);
 
-  const int total_bytes_delay =
-      delay.InSecondsF() * params_.GetBytesPerSecond();
   const int frames = dest->frames();
-  sync_reader_->UpdatePendingBytes(
-      total_bytes_delay + frames * params_.GetBytesPerFrame(),
-      prior_frames_skipped);
+  delay += AudioTimestampHelper::FramesToTime(frames, params_.sample_rate());
+
+  sync_reader_->RequestMoreData(delay, delay_timestamp, prior_frames_skipped);
 
   bool need_to_duplicate = false;
   {
diff --git a/media/audio/audio_output_controller.h b/media/audio/audio_output_controller.h
index 856716f..ce16c1d1 100644
--- a/media/audio/audio_output_controller.h
+++ b/media/audio/audio_output_controller.h
@@ -88,14 +88,14 @@
    public:
     virtual ~SyncReader() {}
 
-    // Notify the synchronous reader the number of bytes in the
-    // AudioOutputController not yet played. This is used by SyncReader to
-    // prepare more data and perform synchronization. Also inform about if any
-    // frames has been skipped by the renderer (typically the OS). The renderer
-    // source can handle this appropriately depending on the type of source. An
-    // ordinary file playout would ignore this.
-    virtual void UpdatePendingBytes(uint32_t bytes,
-                                    uint32_t frames_skipped) = 0;
+    // This is used by SyncReader to prepare more data and perform
+    // synchronization. Also inform about output delay at a certain moment and
+    // if any frames have been skipped by the renderer (typically the OS). The
+    // renderer source can handle this appropriately depending on the type of
+    // source. An ordinary file playout would ignore this.
+    virtual void RequestMoreData(base::TimeDelta delay,
+                                 base::TimeTicks delay_timestamp,
+                                 int prior_frames_skipped) = 0;
 
     // Attempts to completely fill |dest|, zeroing |dest| if the request can not
     // be fulfilled (due to timeout).
diff --git a/media/audio/audio_output_controller_unittest.cc b/media/audio/audio_output_controller_unittest.cc
index 62058da..0fc47c82 100644
--- a/media/audio/audio_output_controller_unittest.cc
+++ b/media/audio/audio_output_controller_unittest.cc
@@ -62,8 +62,10 @@
  public:
   MockAudioOutputControllerSyncReader() {}
 
-  MOCK_METHOD2(UpdatePendingBytes,
-               void(uint32_t bytes, uint32_t frames_skipped));
+  MOCK_METHOD3(RequestMoreData,
+               void(base::TimeDelta delay,
+                    base::TimeTicks delay_timestamp,
+                    int prior_frames_skipped));
   MOCK_METHOD1(Read, void(AudioBus* dest));
   MOCK_METHOD0(Close, void());
 
@@ -142,7 +144,7 @@
 
     // During playback, the mock pretends to provide audio data rendered and
     // sent from the render process.
-    EXPECT_CALL(mock_sync_reader_, UpdatePendingBytes(_, _)).Times(AtLeast(1));
+    EXPECT_CALL(mock_sync_reader_, RequestMoreData(_, _, _)).Times(AtLeast(1));
     EXPECT_CALL(mock_sync_reader_, Read(_)).WillRepeatedly(PopulateBuffer());
     controller_->Play();
     base::RunLoop().RunUntilIdle();
diff --git a/media/audio/audio_output_device.cc b/media/audio/audio_output_device.cc
index 0dcc76a..2779174b 100644
--- a/media/audio/audio_output_device.cc
+++ b/media/audio/audio_output_device.cc
@@ -39,14 +39,13 @@
   void MapSharedMemory() override;
 
   // Called whenever we receive notifications about pending data.
-  void Process(uint32_t pending_data) override;
+  void Process(uint32_t control_signal) override;
 
   // Returns whether the current thread is the audio device thread or not.
   // Will always return true if DCHECKs are not enabled.
   bool CurrentThreadIsAudioDeviceThread();
 
  private:
-  const int bytes_per_frame_;
   AudioRendererSink::RenderCallback* render_callback_;
   std::unique_ptr<AudioBus> output_bus_;
   uint64_t callback_num_;
@@ -431,7 +430,6 @@
     int memory_length,
     AudioRendererSink::RenderCallback* render_callback)
     : AudioDeviceThread::Callback(audio_parameters, memory, memory_length, 1),
-      bytes_per_frame_(audio_parameters.GetBytesPerFrame()),
       render_callback_(render_callback),
       callback_num_(0) {}
 
@@ -451,10 +449,7 @@
 }
 
 // Called whenever we receive notifications about pending data.
-void AudioOutputDevice::AudioThreadCallback::Process(uint32_t pending_data) {
-  // Convert the number of pending bytes in the render buffer into frames.
-  double frames_delayed = static_cast<double>(pending_data) / bytes_per_frame_;
-
+void AudioOutputDevice::AudioThreadCallback::Process(uint32_t control_signal) {
   callback_num_++;
   TRACE_EVENT1("audio", "AudioOutputDevice::FireRenderCallback",
                "callback_num", callback_num_);
@@ -472,16 +467,22 @@
   uint32_t frames_skipped = buffer->params.frames_skipped;
   buffer->params.frames_skipped = 0;
 
-  DVLOG(4) << __func__ << " pending_data:" << pending_data
-           << " frames_delayed(pre-round):" << frames_delayed
+  base::TimeDelta delay =
+      base::TimeDelta::FromMicroseconds(buffer->params.delay);
+
+  base::TimeTicks delay_timestamp =
+      base::TimeTicks() +
+      base::TimeDelta::FromMicroseconds(buffer->params.delay_timestamp);
+
+  DVLOG(4) << __func__ << " delay:" << delay << " delay_timestamp:" << delay
            << " frames_skipped:" << frames_skipped;
 
   // Update the audio-delay measurement, inform about the number of skipped
   // frames, and ask client to render audio.  Since |output_bus_| is wrapping
   // the shared memory the Render() call is writing directly into the shared
   // memory.
-  render_callback_->Render(output_bus_.get(), std::round(frames_delayed),
-                           frames_skipped);
+  render_callback_->Render(delay, delay_timestamp, frames_skipped,
+                           output_bus_.get());
 }
 
 bool AudioOutputDevice::AudioThreadCallback::
diff --git a/media/audio/audio_output_device_unittest.cc b/media/audio/audio_output_device_unittest.cc
index 70a9b39..44414d4 100644
--- a/media/audio/audio_output_device_unittest.cc
+++ b/media/audio/audio_output_device_unittest.cc
@@ -47,16 +47,18 @@
 const char kNonDefaultDeviceId[] = "valid-nondefault-device-id";
 const char kUnauthorizedDeviceId[] = "unauthorized-device-id";
 const int kAuthTimeoutForTestingMs = 500;
+const int kOutputDelayMs = 20;
 
 class MockRenderCallback : public AudioRendererSink::RenderCallback {
  public:
   MockRenderCallback() {}
   virtual ~MockRenderCallback() {}
 
-  MOCK_METHOD3(Render,
-               int(AudioBus* dest,
-                   uint32_t frames_delayed,
-                   uint32_t frames_skipped));
+  MOCK_METHOD4(Render,
+               int(base::TimeDelta delay,
+                   base::TimeTicks timestamp,
+                   int prior_frames_skipped,
+                   AudioBus* dest));
   MOCK_METHOD0(OnRenderError, void());
 };
 
@@ -79,8 +81,17 @@
   MOCK_METHOD1(SetVolume, void(double volume));
 };
 
-ACTION_P2(SendPendingBytes, socket, pending_bytes) {
-  socket->Send(&pending_bytes, sizeof(pending_bytes));
+ACTION_P2(RequestMoreData, socket, shared_memory) {
+  AudioOutputBuffer* buffer =
+      reinterpret_cast<AudioOutputBuffer*>(shared_memory->memory());
+  buffer->params.frames_skipped = 0;
+  buffer->params.delay =
+      base::TimeDelta::FromMilliseconds(kOutputDelayMs).InMicroseconds();
+  buffer->params.delay_timestamp =
+      (base::TimeTicks::Now() - base::TimeTicks()).InMicroseconds();
+
+  constexpr int kControlSignal = 0;
+  socket->Send(&kControlSignal, sizeof(kControlSignal));
 }
 
 // Used to terminate a loop from a different thread than the loop belongs to.
@@ -227,10 +238,8 @@
   // Respond by asking for some audio data.  This should ask our callback
   // to provide some audio data that AudioOutputDevice then writes into the
   // shared memory section.
-  const int kMemorySize = CalculateMemorySize();
-
   EXPECT_CALL(*audio_output_ipc_, PlayStream())
-      .WillOnce(SendPendingBytes(&browser_socket_, kMemorySize));
+      .WillOnce(RequestMoreData(&browser_socket_, &shared_memory_));
 
   // We expect calls to our audio renderer callback, which returns the number
   // of frames written to the memory section.
@@ -240,7 +249,9 @@
   // So, for the sake of this test, we consider the call to Render a sign
   // of success and quit the loop.
   const int kNumberOfFramesToProcess = 0;
-  EXPECT_CALL(callback_, Render(_, _, _))
+  EXPECT_CALL(
+      callback_,
+      Render(base::TimeDelta::FromMilliseconds(kOutputDelayMs), _, _, _))
       .WillOnce(DoAll(QuitLoop(io_loop_.task_runner()),
                       Return(kNumberOfFramesToProcess)));
 }
diff --git a/media/audio/audio_output_stream_sink.cc b/media/audio/audio_output_stream_sink.cc
index 5bb2f8e..17ad073 100644
--- a/media/audio/audio_output_stream_sink.cc
+++ b/media/audio/audio_output_stream_sink.cc
@@ -85,7 +85,7 @@
 }
 
 int AudioOutputStreamSink::OnMoreData(base::TimeDelta delay,
-                                      base::TimeTicks /* delay_timestamp */,
+                                      base::TimeTicks delay_timestamp,
                                       int prior_frames_skipped,
                                       AudioBus* dest) {
   // Note: Runs on the audio thread created by the OS.
@@ -93,11 +93,8 @@
   if (!active_render_callback_)
     return 0;
 
-  uint32_t frames_delayed =
-      AudioTimestampHelper::TimeToFrames(delay, active_params_.sample_rate());
-
-  return active_render_callback_->Render(dest, frames_delayed,
-                                         prior_frames_skipped);
+  return active_render_callback_->Render(delay, delay_timestamp,
+                                         prior_frames_skipped, dest);
 }
 
 void AudioOutputStreamSink::OnError(AudioOutputStream* stream) {
diff --git a/media/audio/clockless_audio_sink.cc b/media/audio/clockless_audio_sink.cc
index d0dd7e94..1dc1a12 100644
--- a/media/audio/clockless_audio_sink.cc
+++ b/media/audio/clockless_audio_sink.cc
@@ -51,7 +51,8 @@
   void Run() override {
      base::TimeTicks start;
      while (!stop_event_->IsSignaled()) {
-       const int frames_received = callback_->Render(audio_bus_.get(), 0, 0);
+       const int frames_received = callback_->Render(
+           base::TimeDelta(), base::TimeTicks::Now(), 0, audio_bus_.get());
        DCHECK_GE(frames_received, 0);
        if (audio_hash_)
          audio_hash_->Update(audio_bus_.get(), frames_received);
diff --git a/media/audio/null_audio_sink.cc b/media/audio/null_audio_sink.cc
index 43221998..9e3d1f8 100644
--- a/media/audio/null_audio_sink.cc
+++ b/media/audio/null_audio_sink.cc
@@ -92,7 +92,8 @@
 void NullAudioSink::CallRender() {
   DCHECK(task_runner_->BelongsToCurrentThread());
 
-  int frames_received = callback_->Render(audio_bus_.get(), 0, 0);
+  int frames_received = callback_->Render(
+      base::TimeDelta(), base::TimeTicks::Now(), 0, audio_bus_.get());
   if (!audio_hash_ || frames_received <= 0)
     return;
 
diff --git a/media/audio/win/audio_output_win_unittest.cc b/media/audio/win/audio_output_win_unittest.cc
index 8a131ac..23c85e5 100644
--- a/media/audio/win/audio_output_win_unittest.cc
+++ b/media/audio/win/audio_output_win_unittest.cc
@@ -532,26 +532,34 @@
   SyncSocketSource(base::SyncSocket* socket, const AudioParameters& params)
       : socket_(socket), params_(params) {
     // Setup AudioBus wrapping data we'll receive over the sync socket.
-    data_size_ = AudioBus::CalculateMemorySize(params);
+    packet_size_ = AudioBus::CalculateMemorySize(params);
     data_.reset(static_cast<float*>(
-        base::AlignedAlloc(data_size_, AudioBus::kChannelAlignment)));
-    audio_bus_ = AudioBus::WrapMemory(params, data_.get());
+        base::AlignedAlloc(packet_size_ + sizeof(AudioOutputBufferParameters),
+                           AudioBus::kChannelAlignment)));
+    audio_bus_ = AudioBus::WrapMemory(params, output_buffer()->audio);
   }
   ~SyncSocketSource() override {}
 
   // AudioSourceCallback::OnMoreData implementation:
   int OnMoreData(base::TimeDelta delay,
-                 base::TimeTicks /* delay_timestamp */,
+                 base::TimeTicks delay_timestamp,
                  int /* prior_frames_skipped */,
                  AudioBus* dest) override {
-    uint32_t total_bytes_delay =
-        delay.InSecondsF() * params_.GetBytesPerSecond();
-    socket_->Send(&total_bytes_delay, sizeof(total_bytes_delay));
-    uint32_t size = socket_->Receive(data_.get(), data_size_);
+    uint32_t control_signal = 0;
+    socket_->Send(&control_signal, sizeof(control_signal));
+    output_buffer()->params.delay = delay.InMicroseconds();
+    output_buffer()->params.delay_timestamp =
+        (delay_timestamp - base::TimeTicks()).InMicroseconds();
+    uint32_t size = socket_->Receive(data_.get(), packet_size_);
+
     DCHECK_EQ(static_cast<size_t>(size) % sizeof(*audio_bus_->channel(0)), 0U);
     audio_bus_->CopyTo(dest);
     return audio_bus_->frames();
   }
+  int packet_size() const { return packet_size_; }
+  AudioOutputBuffer* output_buffer() const {
+    return reinterpret_cast<AudioOutputBuffer*>(data_.get());
+  }
 
   // AudioSourceCallback::OnError implementation:
   void OnError(AudioOutputStream* stream) override {}
@@ -559,7 +567,7 @@
  private:
   base::SyncSocket* socket_;
   const AudioParameters params_;
-  int data_size_;
+  int packet_size_;
   std::unique_ptr<float, base::AlignedFreeDeleter> data_;
   std::unique_ptr<AudioBus> audio_bus_;
 };
@@ -571,7 +579,7 @@
   int frames;
   double sine_freq;
   uint32_t packet_size_bytes;
-  int bytes_per_second;
+  AudioOutputBuffer* buffer;
 };
 
 // This thread provides the data that the SyncSocketSource above needs
@@ -592,17 +600,17 @@
   SineWaveAudioSource sine(1, ctx.sine_freq, ctx.sample_rate);
   const int kTwoSecFrames = ctx.sample_rate * 2;
 
-  uint32_t total_bytes_delay = 0;
-  int times = 0;
+  uint32_t control_signal = 0;
   for (int ix = 0; ix < kTwoSecFrames; ix += ctx.frames) {
-    if (ctx.socket->Receive(&total_bytes_delay, sizeof(total_bytes_delay)) == 0)
+    if (ctx.socket->Receive(&control_signal, sizeof(control_signal)) == 0)
       break;
-    if ((times > 0) && (total_bytes_delay < 1000)) __debugbreak();
-    base::TimeDelta delay = base::TimeDelta::FromSecondsD(
-        static_cast<double>(total_bytes_delay) / ctx.bytes_per_second);
-    sine.OnMoreData(delay, base::TimeTicks::Now(), 0, audio_bus.get());
+    base::TimeDelta delay =
+        base::TimeDelta::FromMicroseconds(ctx.buffer->params.delay);
+    base::TimeTicks delay_timestamp =
+        base::TimeTicks() +
+        base::TimeDelta::FromMicroseconds(ctx.buffer->params.delay_timestamp);
+    sine.OnMoreData(delay, delay_timestamp, 0, audio_bus.get());
     ctx.socket->Send(data.get(), ctx.packet_size_bytes);
-    ++times;
   }
 
   return 0;
@@ -638,11 +646,11 @@
   SyncThreadContext thread_context;
   thread_context.sample_rate = params.sample_rate();
   thread_context.sine_freq = 200.0;
-  thread_context.packet_size_bytes = AudioBus::CalculateMemorySize(params);
+  thread_context.packet_size_bytes = source.packet_size();
   thread_context.frames = params.frames_per_buffer();
   thread_context.channels = params.channels();
   thread_context.socket = &sockets[1];
-  thread_context.bytes_per_second = params.GetBytesPerSecond();
+  thread_context.buffer = source.output_buffer();
 
   HANDLE thread = ::CreateThread(NULL, 0, SyncSocketThread,
                                  &thread_context, 0, NULL);
diff --git a/media/base/audio_bus_perftest.cc b/media/base/audio_bus_perftest.cc
index 62d710e..fd882d7 100644
--- a/media/base/audio_bus_perftest.cc
+++ b/media/base/audio_bus_perftest.cc
@@ -46,7 +46,7 @@
 TEST(AudioBusPerfTest, Interleave) {
   std::unique_ptr<AudioBus> bus = AudioBus::Create(2, 48000 * 120);
   FakeAudioRenderCallback callback(0.2);
-  callback.Render(bus.get(), 0, 0);
+  callback.Render(base::TimeDelta(), base::TimeTicks::Now(), 0, bus.get());
 
   RunInterleaveBench<int8_t>(bus.get(), "int8_t");
   RunInterleaveBench<int16_t>(bus.get(), "int16_t");
diff --git a/media/base/audio_converter_unittest.cc b/media/base/audio_converter_unittest.cc
index 9d73dd32..b324912 100644
--- a/media/base/audio_converter_unittest.cc
+++ b/media/base/audio_converter_unittest.cc
@@ -115,7 +115,8 @@
     converter_->Convert(audio_bus_.get());
 
     // Render expected audio data.
-    expected_callback_->Render(expected_audio_bus_.get(), 0, 0);
+    expected_callback_->Render(base::TimeDelta(), base::TimeTicks::Now(), 0,
+                               expected_audio_bus_.get());
 
     // Zero out unused channels in the expected AudioBus just as AudioConverter
     // would during channel mixing.
diff --git a/media/base/audio_hash_unittest.cc b/media/base/audio_hash_unittest.cc
index ecc37bc..878800d 100644
--- a/media/base/audio_hash_unittest.cc
+++ b/media/base/audio_hash_unittest.cc
@@ -37,7 +37,8 @@
     // audio data, we need to fill each channel manually.
     for (int ch = 0; ch < audio_bus->channels(); ++ch) {
       wrapped_bus->SetChannelData(0, audio_bus->channel(ch));
-      fake_callback_.Render(wrapped_bus.get(), 0, 0);
+      fake_callback_.Render(base::TimeDelta(), base::TimeTicks::Now(), 0,
+                            wrapped_bus.get());
     }
   }
 
diff --git a/media/base/audio_parameters.h b/media/base/audio_parameters.h
index ea0aa87d..68fbd547 100644
--- a/media/base/audio_parameters.h
+++ b/media/base/audio_parameters.h
@@ -41,6 +41,8 @@
 };
 struct MEDIA_EXPORT ALIGNAS(PARAMETERS_ALIGNMENT) AudioOutputBufferParameters {
   uint32_t frames_skipped;
+  int64_t delay;
+  int64_t delay_timestamp;
 };
 #undef PARAMETERS_ALIGNMENT
 #if defined(OS_WIN)
diff --git a/media/base/audio_renderer_mixer.cc b/media/base/audio_renderer_mixer.cc
index b61a04a..80d32b02 100644
--- a/media/base/audio_renderer_mixer.cc
+++ b/media/base/audio_renderer_mixer.cc
@@ -12,6 +12,7 @@
 #include "base/memory/ptr_util.h"
 #include "base/metrics/histogram_macros.h"
 #include "base/trace_event/trace_event.h"
+#include "media/base/audio_timestamp_helper.h"
 
 namespace media {
 
@@ -162,9 +163,10 @@
   return audio_sink_->CurrentThreadIsRenderingThread();
 }
 
-int AudioRendererMixer::Render(AudioBus* audio_bus,
-                               uint32_t frames_delayed,
-                               uint32_t frames_skipped) {
+int AudioRendererMixer::Render(base::TimeDelta delay,
+                               base::TimeTicks delay_timestamp,
+                               int prior_frames_skipped,
+                               AudioBus* audio_bus) {
   TRACE_EVENT0("audio", "AudioRendererMixer::Render");
   base::AutoLock auto_lock(lock_);
 
@@ -179,6 +181,8 @@
     playing_ = false;
   }
 
+  uint32_t frames_delayed =
+      AudioTimestampHelper::TimeToFrames(delay, output_params_.sample_rate());
   master_converter_.ConvertWithDelay(frames_delayed, audio_bus);
   return audio_bus->frames();
 }
diff --git a/media/base/audio_renderer_mixer.h b/media/base/audio_renderer_mixer.h
index 1d432d4..d058ebf 100644
--- a/media/base/audio_renderer_mixer.h
+++ b/media/base/audio_renderer_mixer.h
@@ -64,9 +64,10 @@
       std::map<int, std::unique_ptr<LoopbackAudioConverter>>;
 
   // AudioRendererSink::RenderCallback implementation.
-  int Render(AudioBus* audio_bus,
-             uint32_t frames_delayed,
-             uint32_t frames_skipped) override;
+  int Render(base::TimeDelta delay,
+             base::TimeTicks delay_timestamp,
+             int prior_frames_skipped,
+             AudioBus* audio_bus) override;
   void OnRenderError() override;
 
   bool is_master_sample_rate(int sample_rate) {
diff --git a/media/base/audio_renderer_mixer_input.cc b/media/base/audio_renderer_mixer_input.cc
index b28c2a39..223c351 100644
--- a/media/base/audio_renderer_mixer_input.cc
+++ b/media/base/audio_renderer_mixer_input.cc
@@ -11,6 +11,7 @@
 #include "base/trace_event/trace_event.h"
 #include "media/base/audio_renderer_mixer.h"
 #include "media/base/audio_renderer_mixer_pool.h"
+#include "media/base/audio_timestamp_helper.h"
 
 namespace media {
 
@@ -168,7 +169,11 @@
 double AudioRendererMixerInput::ProvideInput(AudioBus* audio_bus,
                                              uint32_t frames_delayed) {
   TRACE_EVENT0("audio", "AudioRendererMixerInput::ProvideInput");
-  int frames_filled = callback_->Render(audio_bus, frames_delayed, 0);
+  const base::TimeDelta delay =
+      AudioTimestampHelper::FramesToTime(frames_delayed, params_.sample_rate());
+
+  int frames_filled =
+      callback_->Render(delay, base::TimeTicks::Now(), 0, audio_bus);
 
   // AudioConverter expects unfilled frames to be zeroed.
   if (frames_filled < audio_bus->frames()) {
diff --git a/media/base/audio_renderer_mixer_unittest.cc b/media/base/audio_renderer_mixer_unittest.cc
index b7731120..36f3cb6 100644
--- a/media/base/audio_renderer_mixer_unittest.cc
+++ b/media/base/audio_renderer_mixer_unittest.cc
@@ -163,12 +163,14 @@
     }
 
     // Render actual audio data.
-    int frames = mixer_callback_->Render(audio_bus_.get(), 0, 0);
+    int frames = mixer_callback_->Render(
+        base::TimeDelta(), base::TimeTicks::Now(), 0, audio_bus_.get());
     if (frames != audio_bus_->frames())
       return false;
 
     // Render expected audio data (without scaling).
-    expected_callback_->Render(expected_audio_bus_.get(), 0, 0);
+    expected_callback_->Render(base::TimeDelta(), base::TimeTicks::Now(), 0,
+                               expected_audio_bus_.get());
 
     if (half_fill_) {
       // In this case, just verify that every frame was initialized, this will
@@ -489,7 +491,8 @@
   const base::TimeDelta kSleepTime = base::TimeDelta::FromMilliseconds(100);
   base::TimeTicks start_time = base::TimeTicks::Now();
   while (!pause_event.IsSignaled()) {
-    mixer_callback_->Render(audio_bus_.get(), 0, 0);
+    mixer_callback_->Render(base::TimeDelta(), base::TimeTicks::Now(), 0,
+                            audio_bus_.get());
     base::PlatformThread::Sleep(kSleepTime);
     ASSERT_TRUE(base::TimeTicks::Now() - start_time < kTestTimeout);
   }
@@ -504,7 +507,8 @@
   // Ensure once the input is paused the sink eventually pauses.
   start_time = base::TimeTicks::Now();
   while (!pause_event.IsSignaled()) {
-    mixer_callback_->Render(audio_bus_.get(), 0, 0);
+    mixer_callback_->Render(base::TimeDelta(), base::TimeTicks::Now(), 0,
+                            audio_bus_.get());
     base::PlatformThread::Sleep(kSleepTime);
     ASSERT_TRUE(base::TimeTicks::Now() - start_time < kTestTimeout);
   }
diff --git a/media/base/audio_renderer_sink.h b/media/base/audio_renderer_sink.h
index f768b7c..4052112 100644
--- a/media/base/audio_renderer_sink.h
+++ b/media/base/audio_renderer_sink.h
@@ -28,14 +28,15 @@
   class RenderCallback {
    public:
     // Attempts to completely fill all channels of |dest|, returns actual
-    // number of frames filled. |frames_skipped| contains the number of frames
+    // number of frames filled. |prior_frames_skipped| contains the number of
+    // frames
     // the consumer has skipped, if any.
-    // TODO(jameswest): Change to use the same signature as
-    // AudioOutputStream::AudioSourceCallback::OnMoreData.
-    virtual int Render(AudioBus* dest,
-                       uint32_t frames_delayed,
-                       uint32_t frames_skipped) = 0;
-
+    // The |delay| argument represents audio device output latency,
+    // |delay_timestamp| represents the time when |delay| was obtained.
+    virtual int Render(base::TimeDelta delay,
+                       base::TimeTicks delay_timestamp,
+                       int prior_frames_skipped,
+                       AudioBus* dest) = 0;
     // Signals an error has occurred.
     virtual void OnRenderError() = 0;
 
diff --git a/media/base/fake_audio_render_callback.cc b/media/base/fake_audio_render_callback.cc
index 65de118..030883b 100644
--- a/media/base/fake_audio_render_callback.cc
+++ b/media/base/fake_audio_render_callback.cc
@@ -7,6 +7,7 @@
 
 #include <cmath>
 
+#include "media/base/audio_timestamp_helper.h"
 #include "media/base/fake_audio_render_callback.h"
 
 namespace media {
@@ -22,9 +23,12 @@
 
 FakeAudioRenderCallback::~FakeAudioRenderCallback() {}
 
-int FakeAudioRenderCallback::Render(AudioBus* audio_bus,
-                                    uint32_t frames_delayed,
-                                    uint32_t frames_skipped) {
+int FakeAudioRenderCallback::Render(base::TimeDelta delay,
+                                    base::TimeTicks delay_timestamp,
+                                    int prior_frames_skipped,
+                                    AudioBus* audio_bus) {
+  const int kSampleRate = 48000;
+  auto frames_delayed = AudioTimestampHelper::TimeToFrames(delay, kSampleRate);
   return RenderInternal(audio_bus, frames_delayed, volume_);
 }
 
diff --git a/media/base/fake_audio_render_callback.h b/media/base/fake_audio_render_callback.h
index aad6044..5e2a600 100644
--- a/media/base/fake_audio_render_callback.h
+++ b/media/base/fake_audio_render_callback.h
@@ -29,9 +29,10 @@
 
   // Renders a sine wave into the provided audio data buffer.  If |half_fill_|
   // is set, will only fill half the buffer.
-  int Render(AudioBus* audio_bus,
-             uint32_t frames_delayed,
-             uint32_t frames_skipped) override;
+  int Render(base::TimeDelta delay,
+             base::TimeTicks delay_timestamp,
+             int prior_frames_skipped,
+             AudioBus* audio_bus) override;
   MOCK_METHOD0(OnRenderError, void());
 
   // AudioTransform::ProvideAudioTransformInput implementation.
diff --git a/media/base/fake_audio_renderer_sink.cc b/media/base/fake_audio_renderer_sink.cc
index f21eb71c..aec0325 100644
--- a/media/base/fake_audio_renderer_sink.cc
+++ b/media/base/fake_audio_renderer_sink.cc
@@ -75,12 +75,12 @@
 }
 
 bool FakeAudioRendererSink::Render(AudioBus* dest,
-                                   uint32_t frames_delayed,
+                                   base::TimeDelta delay,
                                    int* frames_written) {
   if (state_ != kPlaying)
     return false;
 
-  *frames_written = callback_->Render(dest, frames_delayed, 0);
+  *frames_written = callback_->Render(delay, base::TimeTicks::Now(), 0, dest);
   return true;
 }
 
diff --git a/media/base/fake_audio_renderer_sink.h b/media/base/fake_audio_renderer_sink.h
index 2a90600b..9aca9590 100644
--- a/media/base/fake_audio_renderer_sink.h
+++ b/media/base/fake_audio_renderer_sink.h
@@ -42,13 +42,13 @@
   bool CurrentThreadIsRenderingThread() override;
 
   // Attempts to call Render() on the callback provided to
-  // Initialize() with |dest| and |frames_delayed|.
+  // Initialize() with |dest| and |delay|.
   // Returns true and sets |frames_written| to the return value of the
   // Render() call.
   // Returns false if this object is in a state where calling Render()
   // should not occur. (i.e., in the kPaused or kStopped state.) The
   // value of |frames_written| is undefined if false is returned.
-  bool Render(AudioBus* dest, uint32_t frames_delayed, int* frames_written);
+  bool Render(AudioBus* dest, base::TimeDelta delay, int* frames_written);
   void OnRenderError();
 
   State state() const { return state_; }
diff --git a/media/base/silent_sink_suspender.cc b/media/base/silent_sink_suspender.cc
index 70b9266e..2f01ed9 100644
--- a/media/base/silent_sink_suspender.cc
+++ b/media/base/silent_sink_suspender.cc
@@ -35,9 +35,10 @@
   fake_sink_.Stop();
 }
 
-int SilentSinkSuspender::Render(AudioBus* dest,
-                                uint32_t frames_delayed,
-                                uint32_t frames_skipped) {
+int SilentSinkSuspender::Render(base::TimeDelta delay,
+                                base::TimeTicks delay_timestamp,
+                                int prior_frames_skipped,
+                                AudioBus* dest) {
   // Lock required since AudioRendererSink::Pause() is not synchronous, we need
   // to discard these calls during the transition to the fake sink.
   base::AutoLock al(transition_lock_);
@@ -52,8 +53,8 @@
   // the audio data for a future transition out of silence.
   if (!dest) {
     DCHECK(is_using_fake_sink_);
-    DCHECK_EQ(frames_delayed, 0u);
-    DCHECK_EQ(frames_skipped, 0u);
+    DCHECK_EQ(delay, base::TimeDelta());
+    DCHECK_EQ(prior_frames_skipped, 0);
 
     // If we have no buffers or a transition is pending, one or more extra
     // Render() calls have occurred in before TransitionSinks() can run, so we
@@ -72,7 +73,7 @@
   }
 
   // Pass-through to client and request rendering.
-  callback_->Render(dest, frames_delayed, frames_skipped);
+  callback_->Render(delay, delay_timestamp, prior_frames_skipped, dest);
 
   // Check for silence or real audio data and transition if necessary.
   if (!dest->AreFramesZero()) {
@@ -123,7 +124,8 @@
     }
     fake_sink_.Start(
         base::Bind(base::IgnoreResult(&SilentSinkSuspender::Render),
-                   base::Unretained(this), nullptr, 0, 0));
+                   base::Unretained(this), base::TimeDelta(),
+                   base::TimeTicks::Now(), 0, nullptr));
   } else {
     fake_sink_.Stop();
 
diff --git a/media/base/silent_sink_suspender.h b/media/base/silent_sink_suspender.h
index 14585d3..82da971a 100644
--- a/media/base/silent_sink_suspender.h
+++ b/media/base/silent_sink_suspender.h
@@ -47,9 +47,10 @@
   ~SilentSinkSuspender() override;
 
   // AudioRendererSink::RenderCallback implementation.
-  int Render(AudioBus* dest,
-             uint32_t frames_delayed,
-             uint32_t frames_skipped) override;
+  int Render(base::TimeDelta delay,
+             base::TimeTicks delay_timestamp,
+             int prior_frames_skipped,
+             AudioBus* dest) override;
   void OnRenderError() override;
 
   bool is_using_fake_sink_for_testing() const { return is_using_fake_sink_; }
diff --git a/media/base/silent_sink_suspender_unittest.cc b/media/base/silent_sink_suspender_unittest.cc
index c69084b4..4434d69 100644
--- a/media/base/silent_sink_suspender_unittest.cc
+++ b/media/base/silent_sink_suspender_unittest.cc
@@ -49,7 +49,9 @@
 
 TEST_F(SilentSinkSuspenderTest, BasicPassthough) {
   temp_bus_->Zero();
-  EXPECT_EQ(temp_bus_->frames(), suspender_.Render(temp_bus_.get(), 0, 0));
+  EXPECT_EQ(temp_bus_->frames(),
+            suspender_.Render(base::TimeDelta(), base::TimeTicks(), 0,
+                              temp_bus_.get()));
   EXPECT_FALSE(temp_bus_->AreFramesZero());
 }
 
@@ -57,7 +59,9 @@
   // Verify a normal Render() doesn't invoke suspend.
   EXPECT_FALSE(suspender_.is_using_fake_sink_for_testing());
   temp_bus_->Zero();
-  EXPECT_EQ(temp_bus_->frames(), suspender_.Render(temp_bus_.get(), 0, 0));
+  EXPECT_EQ(temp_bus_->frames(),
+            suspender_.Render(base::TimeDelta(), base::TimeTicks(), 0,
+                              temp_bus_.get()));
   EXPECT_FALSE(temp_bus_->AreFramesZero());
   base::RunLoop().RunUntilIdle();
   EXPECT_FALSE(suspender_.is_using_fake_sink_for_testing());
@@ -65,7 +69,9 @@
   // Mute all audio generated by the callback, this should suspend immediately.
   fake_callback_.set_volume(0);
   temp_bus_->Zero();
-  EXPECT_EQ(temp_bus_->frames(), suspender_.Render(temp_bus_.get(), 0, 0));
+  EXPECT_EQ(temp_bus_->frames(),
+            suspender_.Render(base::TimeDelta(), base::TimeTicks(), 0,
+                              temp_bus_.get()));
   EXPECT_TRUE(temp_bus_->AreFramesZero());
   {
     base::RunLoop run_loop;
@@ -91,9 +97,12 @@
   // not silent.
   fake_callback_.reset();
   std::unique_ptr<AudioBus> true_bus = AudioBus::Create(params_);
-  fake_callback_.Render(true_bus.get(), 0, 0);
+  fake_callback_.Render(base::TimeDelta(), base::TimeTicks(), 0,
+                        true_bus.get());
   EXPECT_FALSE(true_bus->AreFramesZero());
-  EXPECT_EQ(temp_bus_->frames(), suspender_.Render(temp_bus_.get(), 0, 0));
+  EXPECT_EQ(temp_bus_->frames(),
+            suspender_.Render(base::TimeDelta(), base::TimeTicks(), 0,
+                              temp_bus_.get()));
   EXPECT_EQ(memcmp(temp_bus_->channel(0), true_bus->channel(0),
                    temp_bus_->frames() * sizeof(float)),
             0);
@@ -103,11 +112,15 @@
   // Mute all audio generated by the callback, this should suspend immediately.
   fake_callback_.set_volume(0);
   temp_bus_->Zero();
-  EXPECT_EQ(temp_bus_->frames(), suspender_.Render(temp_bus_.get(), 0, 0));
+  EXPECT_EQ(temp_bus_->frames(),
+            suspender_.Render(base::TimeDelta(), base::TimeTicks(), 0,
+                              temp_bus_.get()));
   EXPECT_TRUE(temp_bus_->AreFramesZero());
 
   // A second render should only result in a single Pause() call.
-  EXPECT_EQ(temp_bus_->frames(), suspender_.Render(temp_bus_.get(), 0, 0));
+  EXPECT_EQ(temp_bus_->frames(),
+            suspender_.Render(base::TimeDelta(), base::TimeTicks(), 0,
+                              temp_bus_.get()));
 
   EXPECT_CALL(*mock_sink_, Pause());
   base::RunLoop().RunUntilIdle();
@@ -118,7 +131,9 @@
   // Mute all audio generated by the callback, this should suspend immediately.
   fake_callback_.set_volume(0);
   temp_bus_->Zero();
-  EXPECT_EQ(temp_bus_->frames(), suspender_.Render(temp_bus_.get(), 0, 0));
+  EXPECT_EQ(temp_bus_->frames(),
+            suspender_.Render(base::TimeDelta(), base::TimeTicks(), 0,
+                              temp_bus_.get()));
   EXPECT_TRUE(temp_bus_->AreFramesZero());
   EXPECT_CALL(*mock_sink_, Pause());
   base::RunLoop().RunUntilIdle();
@@ -130,9 +145,11 @@
   // Prepare our equality testers.
   fake_callback_.reset();
   std::unique_ptr<AudioBus> true_bus1 = AudioBus::Create(params_);
-  fake_callback_.Render(true_bus1.get(), 0, 0);
+  fake_callback_.Render(base::TimeDelta(), base::TimeTicks(), 0,
+                        true_bus1.get());
   std::unique_ptr<AudioBus> true_bus2 = AudioBus::Create(params_);
-  fake_callback_.Render(true_bus2.get(), 0, 0);
+  fake_callback_.Render(base::TimeDelta(), base::TimeTicks(), 0,
+                        true_bus2.get());
   EXPECT_NE(memcmp(true_bus1->channel(0), true_bus2->channel(0),
                    true_bus1->frames() * sizeof(float)),
             0);
@@ -140,18 +157,26 @@
   // Reset the fake callback data generation and force two Render() calls before
   // the sink can transition.
   fake_callback_.reset();
-  EXPECT_EQ(temp_bus_->frames(), suspender_.Render(nullptr, 0, 0));
-  EXPECT_EQ(temp_bus_->frames(), suspender_.Render(nullptr, 0, 0));
+  EXPECT_EQ(
+      temp_bus_->frames(),
+      suspender_.Render(base::TimeDelta(), base::TimeTicks(), 0, nullptr));
+  EXPECT_EQ(
+      temp_bus_->frames(),
+      suspender_.Render(base::TimeDelta(), base::TimeTicks(), 0, nullptr));
   EXPECT_CALL(*mock_sink_, Play());
   base::RunLoop().RunUntilIdle();
   EXPECT_FALSE(suspender_.is_using_fake_sink_for_testing());
 
   // Each render after resuming should return one of the non-silent bus.
-  EXPECT_EQ(temp_bus_->frames(), suspender_.Render(temp_bus_.get(), 0, 0));
+  EXPECT_EQ(temp_bus_->frames(),
+            suspender_.Render(base::TimeDelta(), base::TimeTicks(), 0,
+                              temp_bus_.get()));
   EXPECT_EQ(memcmp(temp_bus_->channel(0), true_bus1->channel(0),
                    temp_bus_->frames() * sizeof(float)),
             0);
-  EXPECT_EQ(temp_bus_->frames(), suspender_.Render(temp_bus_.get(), 0, 0));
+  EXPECT_EQ(temp_bus_->frames(),
+            suspender_.Render(base::TimeDelta(), base::TimeTicks(), 0,
+                              temp_bus_.get()));
   EXPECT_EQ(memcmp(temp_bus_->channel(0), true_bus2->channel(0),
                    temp_bus_->frames() * sizeof(float)),
             0);
diff --git a/media/blink/webaudiosourceprovider_impl.cc b/media/blink/webaudiosourceprovider_impl.cc
index 9b13535..8d320c8 100644
--- a/media/blink/webaudiosourceprovider_impl.cc
+++ b/media/blink/webaudiosourceprovider_impl.cc
@@ -11,6 +11,7 @@
 #include "base/logging.h"
 #include "base/macros.h"
 #include "base/memory/ptr_util.h"
+#include "media/base/audio_timestamp_helper.h"
 #include "media/base/bind_to_current_loop.h"
 #include "third_party/WebKit/public/platform/WebAudioSourceProviderClient.h"
 
@@ -69,9 +70,10 @@
   // AudioRendererSink::RenderCallback implementation.
   // These are forwarders to |renderer_| and are here to allow for a client to
   // get a copy of the rendered audio by SetCopyAudioCallback().
-  int Render(AudioBus* audio_bus,
-             uint32_t frames_delayed,
-             uint32_t frames_skipped) override;
+  int Render(base::TimeDelta delay,
+             base::TimeTicks delay_timestamp,
+             int prior_frames_skipped,
+             AudioBus* dest) override;
   void OnRenderError() override;
 
   bool IsInitialized() const { return !!renderer_; }
@@ -162,7 +164,8 @@
 
   DCHECK(client_);
   DCHECK_EQ(tee_filter_->channels(), bus_wrapper_->channels());
-  const int frames = tee_filter_->Render(bus_wrapper_.get(), 0, 0);
+  const int frames = tee_filter_->Render(
+      base::TimeDelta(), base::TimeTicks::Now(), 0, bus_wrapper_.get());
   if (frames < incoming_number_of_frames)
     bus_wrapper_->ZeroFramesPartial(frames, incoming_number_of_frames - frames);
 
@@ -260,7 +263,8 @@
 }
 
 int WebAudioSourceProviderImpl::RenderForTesting(AudioBus* audio_bus) {
-  return tee_filter_->Render(audio_bus, 0, 0);
+  return tee_filter_->Render(base::TimeDelta(), base::TimeTicks::Now(), 0,
+                             audio_bus);
 }
 
 void WebAudioSourceProviderImpl::OnSetFormat() {
@@ -272,15 +276,19 @@
   client_->setFormat(tee_filter_->channels(), tee_filter_->sample_rate());
 }
 
-int WebAudioSourceProviderImpl::TeeFilter::Render(AudioBus* audio_bus,
-                                                  uint32_t frames_delayed,
-                                                  uint32_t frames_skipped) {
+int WebAudioSourceProviderImpl::TeeFilter::Render(
+    base::TimeDelta delay,
+    base::TimeTicks delay_timestamp,
+    int prior_frames_skipped,
+    AudioBus* audio_bus) {
   DCHECK(IsInitialized());
 
-  const int num_rendered_frames =
-      renderer_->Render(audio_bus, frames_delayed, frames_skipped);
+  const int num_rendered_frames = renderer_->Render(
+      delay, delay_timestamp, prior_frames_skipped, audio_bus);
 
   if (!copy_audio_bus_callback_.is_null()) {
+    const int64_t frames_delayed =
+        AudioTimestampHelper::TimeToFrames(delay, sample_rate_);
     std::unique_ptr<AudioBus> bus_copy =
         AudioBus::Create(audio_bus->channels(), audio_bus->frames());
     audio_bus->CopyTo(bus_copy.get());
diff --git a/media/blink/webaudiosourceprovider_impl_unittest.cc b/media/blink/webaudiosourceprovider_impl_unittest.cc
index d8c4b58..4a6d22f 100644
--- a/media/blink/webaudiosourceprovider_impl_unittest.cc
+++ b/media/blink/webaudiosourceprovider_impl_unittest.cc
@@ -215,7 +215,8 @@
 
   // Ensure volume adjustment is working.
   fake_callback_.reset();
-  fake_callback_.Render(bus2.get(), 0, 0);
+  fake_callback_.Render(base::TimeDelta(), base::TimeTicks::Now(), 0,
+                        bus2.get());
   bus2->Scale(kTestVolume);
 
   fake_callback_.reset();
@@ -234,9 +235,11 @@
   // configuring the fake callback to return half the data.  After these calls
   // bus1 is full of junk data, and bus2 is partially filled.
   wasp_impl_->SetVolume(1);
-  fake_callback_.Render(bus1.get(), 0, 0);
+  fake_callback_.Render(base::TimeDelta(), base::TimeTicks::Now(), 0,
+                        bus1.get());
   fake_callback_.reset();
-  fake_callback_.Render(bus2.get(), 0, 0);
+  fake_callback_.Render(base::TimeDelta(), base::TimeTicks::Now(), 0,
+                        bus2.get());
   bus2->ZeroFramesPartial(bus2->frames() / 2,
                           bus2->frames() - bus2->frames() / 2);
   fake_callback_.reset();
diff --git a/media/renderers/audio_renderer_impl.cc b/media/renderers/audio_renderer_impl.cc
index b7c45dd..c924195 100644
--- a/media/renderers/audio_renderer_impl.cc
+++ b/media/renderers/audio_renderer_impl.cc
@@ -754,12 +754,13 @@
          (buffer->timestamp() + buffer->duration()) < start_timestamp_;
 }
 
-int AudioRendererImpl::Render(AudioBus* audio_bus,
-                              uint32_t frames_delayed,
-                              uint32_t frames_skipped) {
+int AudioRendererImpl::Render(base::TimeDelta delay,
+                              base::TimeTicks delay_timestamp,
+                              int prior_frames_skipped,
+                              AudioBus* audio_bus) {
   const int frames_requested = audio_bus->frames();
-  DVLOG(4) << __func__ << " frames_delayed:" << frames_delayed
-           << " frames_skipped:" << frames_skipped
+  DVLOG(4) << __func__ << " delay:" << delay
+           << " prior_frames_skipped:" << prior_frames_skipped
            << " frames_requested:" << frames_requested;
 
   int frames_written = 0;
@@ -767,6 +768,9 @@
     base::AutoLock auto_lock(lock_);
     last_render_time_ = tick_clock_->NowTicks();
 
+    int64_t frames_delayed = AudioTimestampHelper::TimeToFrames(
+        delay, audio_parameters_.sample_rate());
+
     if (!stop_rendering_time_.is_null()) {
       audio_clock_->CompensateForSuspendedWrites(
           last_render_time_ - stop_rendering_time_, frames_delayed);
diff --git a/media/renderers/audio_renderer_impl.h b/media/renderers/audio_renderer_impl.h
index e93a18f2..f7915d31 100644
--- a/media/renderers/audio_renderer_impl.h
+++ b/media/renderers/audio_renderer_impl.h
@@ -151,11 +151,12 @@
   // Render() updates the pipeline's playback timestamp. If Render() is
   // not called at the same rate as audio samples are played, then the reported
   // timestamp in the pipeline will be ahead of the actual audio playback. In
-  // this case |frames_delayed| should be used to indicate when in the future
+  // this case |delay| should be used to indicate when in the future
   // should the filled buffer be played.
-  int Render(AudioBus* audio_bus,
-             uint32_t frames_delayed,
-             uint32_t frames_skipped) override;
+  int Render(base::TimeDelta delay,
+             base::TimeTicks delay_timestamp,
+             int prior_frames_skipped,
+             AudioBus* dest) override;
   void OnRenderError() override;
 
   // Helper methods that schedule an asynchronous read from the decoder as long
diff --git a/media/renderers/audio_renderer_impl_unittest.cc b/media/renderers/audio_renderer_impl_unittest.cc
index db031e96..b677889 100644
--- a/media/renderers/audio_renderer_impl_unittest.cc
+++ b/media/renderers/audio_renderer_impl_unittest.cc
@@ -341,16 +341,16 @@
   // buffer. Returns true if and only if all of |requested_frames| were able
   // to be consumed.
   bool ConsumeBufferedData(OutputFrames requested_frames,
-                           uint32_t frames_delayed) {
+                           base::TimeDelta delay) {
     std::unique_ptr<AudioBus> bus =
         AudioBus::Create(kChannels, requested_frames.value);
     int frames_read = 0;
-    EXPECT_TRUE(sink_->Render(bus.get(), frames_delayed, &frames_read));
+    EXPECT_TRUE(sink_->Render(bus.get(), delay, &frames_read));
     return frames_read == requested_frames.value;
   }
 
   bool ConsumeBufferedData(OutputFrames requested_frames) {
-    return ConsumeBufferedData(requested_frames, 0);
+    return ConsumeBufferedData(requested_frames, base::TimeDelta());
   }
 
   base::TimeTicks ConvertMediaTime(base::TimeDelta timestamp,
@@ -812,7 +812,7 @@
   std::unique_ptr<AudioBus> bus = AudioBus::Create(hardware_params_);
   int frames_read = 0;
   for (int i = 0; i < std::floor(kBuffers); ++i) {
-    EXPECT_TRUE(sink_->Render(bus.get(), 0, &frames_read));
+    EXPECT_TRUE(sink_->Render(bus.get(), base::TimeDelta(), &frames_read));
     EXPECT_EQ(frames_read, bus->frames());
     for (int j = 0; j < bus->frames(); ++j)
       ASSERT_FLOAT_EQ(0.0f, bus->channel(0)[j]);
@@ -821,7 +821,7 @@
   }
 
   // Verify the last buffer is half silence and half real data.
-  EXPECT_TRUE(sink_->Render(bus.get(), 0, &frames_read));
+  EXPECT_TRUE(sink_->Render(bus.get(), base::TimeDelta(), &frames_read));
   EXPECT_EQ(frames_read, bus->frames());
   const int zero_frames =
       bus->frames() * (kBuffers - static_cast<int>(kBuffers));
@@ -840,20 +840,20 @@
   // Verify the first buffer is real data.
   int frames_read = 0;
   std::unique_ptr<AudioBus> bus = AudioBus::Create(hardware_params_);
-  EXPECT_TRUE(sink_->Render(bus.get(), 0, &frames_read));
+  EXPECT_TRUE(sink_->Render(bus.get(), base::TimeDelta(), &frames_read));
   EXPECT_NE(0, frames_read);
   for (int i = 0; i < bus->frames(); ++i)
     ASSERT_NE(0.0f, bus->channel(0)[i]);
 
   // Verify after suspend we get silence.
   renderer_->OnSuspend();
-  EXPECT_TRUE(sink_->Render(bus.get(), 0, &frames_read));
+  EXPECT_TRUE(sink_->Render(bus.get(), base::TimeDelta(), &frames_read));
   EXPECT_EQ(0, frames_read);
 
   // Verify after resume we get audio.
   bus->Zero();
   renderer_->OnResume();
-  EXPECT_TRUE(sink_->Render(bus.get(), 0, &frames_read));
+  EXPECT_TRUE(sink_->Render(bus.get(), base::TimeDelta(), &frames_read));
   EXPECT_NE(0, frames_read);
   for (int i = 0; i < bus->frames(); ++i)
     ASSERT_NE(0.0f, bus->channel(0)[i]);
@@ -1042,7 +1042,7 @@
       std::round(delay_frames * kOutputMicrosPerFrame));
 
   frames_to_consume.value = frames_buffered().value / 16;
-  EXPECT_TRUE(ConsumeBufferedData(frames_to_consume, delay_frames));
+  EXPECT_TRUE(ConsumeBufferedData(frames_to_consume, delay_time));
 
   // Verify time is adjusted for the current delay.
   current_time = tick_clock_->NowTicks() + delay_time;
diff --git a/ppapi/shared_impl/ppb_audio_shared.cc b/ppapi/shared_impl/ppb_audio_shared.cc
index 6ae632a..9beed72 100644
--- a/ppapi/shared_impl/ppb_audio_shared.cc
+++ b/ppapi/shared_impl/ppb_audio_shared.cc
@@ -217,21 +217,24 @@
 }
 
 void PPB_Audio_Shared::Run() {
-  int pending_data = 0;
-  while (sizeof(pending_data) ==
-         socket_->Receive(&pending_data, sizeof(pending_data))) {
+  int control_signal = 0;
+  while (sizeof(control_signal) ==
+         socket_->Receive(&control_signal, sizeof(control_signal))) {
     // |buffer_index_| must track the number of Receive() calls.  See the Send()
     // call below for why this is important.
     ++buffer_index_;
-    if (pending_data < 0)
+    if (control_signal < 0)
       break;
 
     {
       TRACE_EVENT0("audio", "PPB_Audio_Shared::FireRenderCallback");
-      PP_TimeDelta latency =
-          static_cast<double>(pending_data) / bytes_per_second_;
-      callback_.Run(
-          client_buffer_.get(), client_buffer_size_bytes_, latency, user_data_);
+      media::AudioOutputBuffer* buffer =
+          reinterpret_cast<media::AudioOutputBuffer*>(shared_memory_->memory());
+      base::TimeDelta delay =
+          base::TimeDelta::FromMicroseconds(buffer->params.delay);
+
+      callback_.Run(client_buffer_.get(), client_buffer_size_bytes_,
+                    delay.InSecondsF(), user_data_);
     }
 
     // Deinterleave the audio data into the shared memory as floats.
diff --git a/third_party/WebKit/LayoutTests/editing/selection/extend/go-out-of-readonly-textarea.html b/third_party/WebKit/LayoutTests/editing/selection/extend/go-out-of-readonly-textarea.html
new file mode 100644
index 0000000..72f7e4c
--- /dev/null
+++ b/third_party/WebKit/LayoutTests/editing/selection/extend/go-out-of-readonly-textarea.html
@@ -0,0 +1,12 @@
+<!DOCTYPE html>
+<script src="../../../resources/testharness.js"></script>
+<script src="../../../resources/testharnessreport.js"></script>
+<script src="../../assert_selection.js"></script>
+<script>
+test(() => {
+  assert_selection(
+    '<div><textarea readonly="readonly">|foo</textarea></div>',
+    selection => selection.extend(selection.document.querySelector('div'), 0),
+    '<div><textarea readonly="readonly">|foo</textarea></div>');
+});
+</script>
diff --git a/third_party/WebKit/Source/core/editing/spellcheck/SpellChecker.cpp b/third_party/WebKit/Source/core/editing/spellcheck/SpellChecker.cpp
index 9c18fd8..09180c0 100644
--- a/third_party/WebKit/Source/core/editing/spellcheck/SpellChecker.cpp
+++ b/third_party/WebKit/Source/core/editing/spellcheck/SpellChecker.cpp
@@ -72,10 +72,6 @@
   return isHTMLTextAreaElement(textControl);
 }
 
-bool isSelectionInTextFormControl(const VisibleSelection& selection) {
-  return !!enclosingTextControl(selection.start());
-}
-
 static bool isSpellCheckingEnabledFor(const Position& position) {
   if (position.isNull())
     return false;
@@ -900,16 +896,9 @@
 
   VisibleSelection newAdjacentWords;
   const VisibleSelection newSelection = frame().selection().selection();
-  if (isSelectionInTextFormControl(newSelection)) {
-    const Position newStart = newSelection.start();
-    newAdjacentWords.setWithoutValidation(
-        TextControlElement::startOfWord(newStart),
-        TextControlElement::endOfWord(newStart));
-  } else {
-    if (newSelection.isContentEditable()) {
-      newAdjacentWords =
-          createVisibleSelection(selectWord(newSelection.visibleStart()));
-    }
+  if (newSelection.isContentEditable()) {
+    newAdjacentWords =
+        createVisibleSelection(selectWord(newSelection.visibleStart()));
   }
 
   // When typing we check spelling elsewhere, so don't redo it here.
diff --git a/third_party/WebKit/Source/core/html/TextControlElement.h b/third_party/WebKit/Source/core/html/TextControlElement.h
index 180eb09..98247e0 100644
--- a/third_party/WebKit/Source/core/html/TextControlElement.h
+++ b/third_party/WebKit/Source/core/html/TextControlElement.h
@@ -137,6 +137,7 @@
 
   String directionForFormData() const;
 
+  // TODO(yoichio): Remove these functions.
   // These functions don't cause synchronous layout and SpellChecker uses
   // them to improve performance.
   // Passed |Position| must point inside of a text form control.
diff --git a/third_party/WebKit/Source/core/loader/resource/FontResource.cpp b/third_party/WebKit/Source/core/loader/resource/FontResource.cpp
index 15569e19..d17d3a9 100644
--- a/third_party/WebKit/Source/core/loader/resource/FontResource.cpp
+++ b/third_party/WebKit/Source/core/loader/resource/FontResource.cpp
@@ -110,7 +110,7 @@
 }
 
 void FontResource::startLoadLimitTimers() {
-  DCHECK(isLoading());
+  CHECK(isLoading());
   DCHECK_EQ(m_loadLimitState, LoadNotStarted);
   m_loadLimitState = UnderLimit;
   m_fontLoadShortLimitTimer.startOneShot(fontLoadWaitShortLimitSec,
@@ -144,6 +144,7 @@
 }
 
 void FontResource::fontLoadShortLimitCallback(TimerBase*) {
+  CHECK(isLoading());
   if (!isLoading())
     return;
   DCHECK_EQ(m_loadLimitState, UnderLimit);
@@ -154,6 +155,7 @@
 }
 
 void FontResource::fontLoadLongLimitCallback(TimerBase*) {
+  CHECK(isLoading());
   if (!isLoading())
     return;
   DCHECK_EQ(m_loadLimitState, ShortLimitExceeded);
diff --git a/ui/file_manager/video_player/js/media_controls.js b/ui/file_manager/video_player/js/media_controls.js
index 346ddcd8..de0b827 100644
--- a/ui/file_manager/video_player/js/media_controls.js
+++ b/ui/file_manager/video_player/js/media_controls.js
@@ -8,6 +8,70 @@
  */
 
 /**
+ * Model of a volume slider and a mute switch and its user interaction.
+ * @constructor
+ * @struct
+ */
+function VolumeModel() {
+  /**
+   * @type {boolean}
+   */
+  this.isMuted_ = false;
+
+  /**
+   * The volume level in [0..1].
+   * @type {number}
+   */
+  this.volume_ = 0.5;
+};
+
+/**
+ * After unmuting, the volume should be non-zero value to avoid that the mute
+ * button gives no response to user.
+ */
+VolumeModel.MIN_VOLUME_AFTER_UNMUTE = 0.01;
+
+/**
+ * @return {number} the value to be set as the volume level of a media element.
+ */
+VolumeModel.prototype.getMediaVolume = function() {
+  return this.isMuted_ ? 0 : this.volume_;
+};
+
+/**
+ * Handles operation to the volume level slider.
+ * @param {number} value new position of the slider in [0..1].
+ */
+VolumeModel.prototype.onVolumeChanged = function(value) {
+  if (value == 0) {
+    this.isMuted_ = true;
+  } else {
+    this.isMuted_ = false;
+    this.volume_ = value;
+  }
+};
+
+/**
+ * Toggles the mute state.
+ */
+VolumeModel.prototype.toggleMute = function() {
+  this.isMuted_ = !this.isMuted_;
+  if (!this.isMuted_) {
+    this.volume_ = Math.max(VolumeModel.MIN_VOLUME_AFTER_UNMUTE, this.volume_);
+  }
+};
+
+/**
+ * Sets the status of the model.
+ * @param {number} volume the volume level in [0..1].
+ * @param {boolean} mute whether to mute the sound.
+ */
+VolumeModel.prototype.set = function(volume, mute) {
+  this.volume_ = volume;
+  this.isMuted_ = mute;
+};
+
+/**
  * @param {!HTMLElement} containerElement The container for the controls.
  * @param {function(Event)} onMediaError Function to display an error message.
  * @constructor
@@ -24,7 +88,11 @@
   this.onMediaProgressBound_ = this.onMediaProgress_.bind(this);
   this.onMediaError_ = onMediaError || function() {};
 
-  this.savedVolume_ = 1;  // 100% volume.
+  /**
+   * @type {VolumeModel}
+   * @private
+   */
+  this.volumeModel_ = new VolumeModel();
 
   /**
    * @type {HTMLElement}
@@ -493,6 +561,8 @@
 
 MediaControls.KEY_NORMALIZED_VOLUME =
     MediaControls.STORAGE_PREFIX + 'normalized-volume';
+MediaControls.KEY_MUTED =
+    MediaControls.STORAGE_PREFIX + 'muted';
 
 /**
  * @param {HTMLElement=} opt_parent Parent element for the controls.
@@ -522,18 +592,24 @@
 };
 
 MediaControls.prototype.loadVolumeControlState = function() {
-  chrome.storage.local.get([MediaControls.KEY_NORMALIZED_VOLUME],
+  chrome.storage.local.get([MediaControls.KEY_NORMALIZED_VOLUME,
+                            MediaControls.KEY_MUTED],
       function(retrieved) {
         var normalizedVolume = (MediaControls.KEY_NORMALIZED_VOLUME
                                  in retrieved)
             ? retrieved[MediaControls.KEY_NORMALIZED_VOLUME] : 1;
-        this.volume_.value = this.volume_.max * normalizedVolume;
+        var isMuted = (MediaControls.KEY_MUTED in retrieved)
+            ? retrieved[MediaControls.KEY_MUTED] : false;
+        this.volumeModel_.set(normalizedVolume, isMuted);
+        this.reflectVolumeToUi_();
       }.bind(this));
 };
 
 MediaControls.prototype.saveVolumeControlState = function() {
   var valuesToStore = {};
-  valuesToStore[MediaControls.KEY_NORMALIZED_VOLUME] = this.media_.volume;
+  valuesToStore[MediaControls.KEY_NORMALIZED_VOLUME] =
+      this.volumeModel_.volume_;
+  valuesToStore[MediaControls.KEY_MUTED] = this.volumeModel_.isMuted_;
   chrome.storage.local.set(valuesToStore);
 };
 
@@ -542,17 +618,9 @@
  * @private
  */
 MediaControls.prototype.onSoundButtonClick_ = function() {
-  if (this.media_.volume == 0) {
-    this.volume_.value = (this.savedVolume_ || 1) * this.volume_.max;
-    this.soundButton_.setAttribute('aria-label',
-        str('MEDIA_PLAYER_MUTE_BUTTON_LABEL'));
-  } else {
-    this.savedVolume_ = this.media_.volume;
-    this.volume_.value = 0;
-    this.soundButton_.setAttribute('aria-label',
-        str('MEDIA_PLAYER_UNMUTE_BUTTON_LABEL'));
-  }
-  this.onVolumeChange_(this.volume_.ratio);
+  this.volumeModel_.toggleMute();
+  this.saveVolumeControlState();
+  this.reflectVolumeToUi_();
 };
 
 /**
@@ -568,6 +636,23 @@
 };
 
 /**
+ * Reflects volume model to the UI elements.
+ * @private
+ */
+MediaControls.prototype.reflectVolumeToUi_ = function() {
+  this.soundButton_.setAttribute('level',
+      MediaControls.getVolumeLevel_(this.volumeModel_.getMediaVolume()));
+  this.soundButton_.setAttribute('aria-label', this.volumeModel_.isMuted_
+                                 ? str('MEDIA_PLAYER_UNMUTE_BUTTON_LABEL')
+                                 : str('MEDIA_PLAYER_MUTE_BUTTON_LABEL'));
+  this.volume_.value = this.volumeModel_.getMediaVolume() * this.volume_.max
+  if (this.media_) {
+    this.media_.volume = this.volumeModel_.getMediaVolume();
+  }
+}
+
+/**
+ * Handles change event of the volume slider.
  * @param {number} value Volume [0..1].
  * @private
  */
@@ -575,12 +660,9 @@
   if (!this.media_)
     return;  // Media is detached.
 
-  this.media_.volume = value;
-  this.soundButton_.setAttribute('level', MediaControls.getVolumeLevel_(value));
-  this.soundButton_.setAttribute('aria-label',
-      value === 0 ? str('MEDIA_PLAYER_UNMUTE_BUTTON_LABEL')
-                  : str('MEDIA_PLAYER_MUTE_BUTTON_LABEL'));
+  this.volumeModel_.onVolumeChanged(value);
   this.saveVolumeControlState();
+  this.reflectVolumeToUi_();
 };
 
 /**
@@ -588,7 +670,7 @@
  */
 MediaControls.prototype.onVolumeDrag_ = function() {
   if (this.media_.volume !== 0) {
-    this.savedVolume_ = this.media_.volume;
+    this.volumeModel_.onVolumeChanged(this.media_.volume);;
   }
 };
 
@@ -680,10 +762,10 @@
   this.onMediaDuration_();
   this.onMediaPlay_(this.isPlaying());
   this.onMediaProgress_();
-  if (this.volume_) {
-    /* Copy the user selected volume to the new media element. */
-    this.savedVolume_ = this.media_.volume = this.volume_.ratio;
-  }
+
+  // Reflect the user specified volume to the media.
+  this.media_.volume = this.volumeModel_.getMediaVolume();
+
   if (this.media_.textTracks && this.media_.textTracks.length > 0) {
     this.attachTextTrack_(this.media_.textTracks[0]);
   } else {