blob: 58d7faa5a03c688da589466d2c640a268b189e1f [file] [log] [blame]
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import base64
import logging
from common import chrome_proxy_metrics as metrics
from telemetry.core import exceptions
from import page_test
def WaitForViaHeader(tab, url=""):
"""Wait until responses start coming back with the Chrome Proxy via header.
Poll |url| in |tab| until the Chrome Proxy via header is present in a
This function is useful when testing with the Data Saver API, since Chrome
won't actually start sending requests to the Data Reduction Proxy until the
Data Saver API fetch completes. This function can be used to wait for the Data
Saver API fetch to complete.
tab.Navigate('data:text/html;base64,%s' % base64.b64encode(
'window.via_header_found = false;'
'function PollDRPCheck(url, wanted_via) {'
'if (via_header_found) { return true; }'
'try {'
'var xmlhttp = new XMLHttpRequest();'
'xmlhttp.onload=function(e) {'
# Store the last response received for debugging, this will be shown
# in telemetry dumps if the request fails or times out.
'window.last_xhr_response_headers = xmlhttp.getAllResponseHeaders();'
'var via=xmlhttp.getResponseHeader("via");'
'if (via && via.indexOf(wanted_via) != -1) {'
'window.via_header_found = true;'
'} catch (err) {'
'/* Return normally if the xhr request failed. */'
'return false;'
'Waiting for Chrome to start using the DRP...'
# Ensure the page has finished loading before attempting the DRP check.
tab.WaitForJavaScriptExpression('performance.timing.loadEventEnd', 60)
expected_via_header = metrics.CHROME_PROXY_VIA_HEADER
if ChromeProxyValidation.extra_via_header:
expected_via_header = ChromeProxyValidation.extra_via_header
'PollDRPCheck("%s", "%s")' % (url, expected_via_header), 60)
class ChromeProxyValidation(page_test.PageTest):
"""Base class for all chrome proxy correctness measurements."""
# Value of the extra via header. |None| if no extra via header is expected.
extra_via_header = None
def __init__(self, restart_after_each_page=False, metrics=None,
super(ChromeProxyValidation, self).__init__(
self._metrics = metrics
self._page = None
def CustomizeBrowserOptions(self, options):
# Enable the chrome proxy (data reduction proxy).
self._is_chrome_proxy_enabled = True
# Disable quic option, otherwise request headers won't be visible.
def DisableChromeProxy(self):
self._is_chrome_proxy_enabled = False
def WillNavigateToPage(self, page, tab):
if self._is_chrome_proxy_enabled:
if self.clear_cache_before_each_run:
assert self._metrics
self._metrics.Start(page, tab)
def ValidateAndMeasurePage(self, page, tab, results):
self._page = page
# Wait for the load event.
tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
assert self._metrics
self._metrics.Stop(page, tab)
if ChromeProxyValidation.extra_via_header:
tab, results, ChromeProxyValidation.extra_via_header)
self.AddResults(tab, results)
def AddResults(self, tab, results):
raise NotImplementedError
def StopBrowserAfterPage(self, browser, page): # pylint: disable=W0613
if hasattr(page, 'restart_after') and page.restart_after:
return True
return False