blob: 02f0dfa4839d6d5dd32e171a593c3c9199b0ffeb [file] [log] [blame]
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from recipe_engine import recipe_api
BLACKLIST = '^(out|xcodebuild)[/\\\\](Release|Debug|Product)\w*[/\\\\]generated_tests'
# TODO(athom): move to third_party when swarming_client.path has a setter
SWARMING_CLIENT_PATH = 'tools/swarming_client'
SWARMING_CLIENT_REV = '88229872dd17e71658fe96763feaa77915d8cbd6'
'linux': '--chrome=browsers/chrome/google-chrome',
'mac': '--chrome=browsers/Google Chrome',
'win7': '--chrome=browsers\\Chrome\\Application\\chrome.exe',
'win10': '--chrome=browsers\\Chrome\\Application\\chrome.exe',
'win': '--chrome=browsers\\Chrome\\Application\\chrome.exe'
'linux': '--firefox=browsers/firefox/firefox',
'mac': '--firefox=browsers/',
'win7': '--firefox=browsers\\firefox\\firefox.exe',
'win10': '--firefox=browsers\\firefox\\firefox.exe',
'win': '--firefox=browsers\\firefox\\firefox.exe'
class DartApi(recipe_api.RecipeApi):
"""Recipe module for code commonly used in dart recipes. Shouldn't be used elsewhere."""
def checkout(self, clobber=False):
"""Checks out the dart code and prepares it for building."""
sdk =[0]
sdk.custom_deps['sdk/%s' % SWARMING_CLIENT_PATH] = \
with self.m.context(cwd=self.m.path['cache'].join('builder'),
self.m.bot_update.ensure_checkout(with_branch_heads=True, with_tags=True)
with self.m.context(cwd=self.m.path['checkout']):
if clobber:
self.m.path['checkout'].join('tools', ''))
def get_secret(self, name):
"""Decrypts the specified secret and returns the location of the result"""
cloudkms_dir = self.m.path['start_dir'].join('cloudkms')
{'infra/tools/luci/cloudkms/${platform}': 'latest'})
with self.m.context(cwd=self.m.path['cleanup']):
file_name = '%s.encrypted' % name'dart-ci-credentials', file_name, file_name)
executable_suffix = '.exe' if == 'win' else ''
secret_key = self.m.path['cleanup'].join('%s.key' % name)
self.m.step('cloudkms get key',
[cloudkms_dir.join('cloudkms%s' % executable_suffix), 'decrypt',
'-input', file_name,
'-output', secret_key, 'dart-ci/us-central1/dart-ci/dart-ci'])
return secret_key
def kill_tasks(self):
"""Kills leftover tasks from previous runs or steps."""
self.m.python('kill processes',
self.m.path['checkout'].join('tools', ''),
args=['--kill_browsers=True', '--kill_vsbuild=True'],
def build(self, build_args=[], isolate=None, name='build dart'):
"""Builds dart using the specified build_args
and optionally isolates the sdk for testing using the specified isolate.
If an isolate is specified, it returns the hash of the isolated archive.
build_args = build_args + ['--no-start-goma', '-j200']
with self.m.context(cwd=self.m.path['checkout']):
with self.m.depot_tools.on_path():
build_exit_status = None
self.m.path['checkout'].join('tools', ''),
timeout=20 * 60)
build_exit_status = 0
except self.m.step.StepTimeout as e:
raise self.m.step.StepFailure('Step "%s" timed out after 20 minutes' % name)
except self.m.step.StepFailure as e:
build_exit_status = e.retcode
raise e
if isolate is not None:
bots_path = self.m.path['checkout'].join('tools', 'bots')
isolate_paths = self.m.file.glob_paths("find isolate files", bots_path, '*.isolate',
for path in isolate_paths:
self.m.file.copy('copy %s to sdk root' % path.pieces[-1],
step_result = self.m.python(
'upload testing isolate',
args= ['archive',
'--ignore_broken_items', # TODO(athom) find a way to avoid that
'-i%s' % self.m.path['checkout'].join('%s.isolate' % isolate),
'-s%s' % self.m.path['checkout'].join('%s.isolated' % isolate)],
isolate_hash = step_result.stdout.strip()[:40]
step_result.presentation.step_text = 'isolate hash: %s' % isolate_hash
return isolate_hash
def upload_isolate(self, isolate_fileset):
"""Builds an isolate"""
if isolate_fileset =='parent_fileset_name', None):
step_result = self.m.python(
'upload testing fileset %s' % isolate_fileset,
args= ['archive',
'--blacklist=%s' % BLACKLIST,
'--ignore_broken_items', # TODO(athom) find a way to avoid that
'-i%s' % self.m.path['checkout'].join('%s' % isolate_fileset),
'-s%s' % self.m.path['checkout'].join('%s.isolated' % isolate_fileset)],
isolate_hash = step_result.stdout.strip()[:40]
step_result.presentation.step_text = 'swarming fileset hash: %s' % isolate_hash
return isolate_hash
def download_parent_isolate(self):
self.m.path['checkout'] = self.m.path['cleanup']
isolate_hash =['parent_fileset']
fileset_name =['parent_fileset_name']
with self.m.context(cwd=self.m.path['cleanup']):
step_result = self.m.python(
'downloading fileset %s' % fileset_name,
args= ['download',
'-s%s' % isolate_hash,
def shard(self, title, isolate_hash, test_args, os=None, cpu='x86-64',
pool='dart.tests', num_shards=0, last_shard_is_local=False,
"""Runs in the given isolate, sharded over several swarming tasks.
Requires the 'shards' build property to be set to the number of tasks.
Returns the created task(s), which are meant to be passed into collect().
if 'shards' in
num_shards = int(['shards'])
assert(num_shards > 0)
tasks = []
if os is None:
os =
for shard in range(num_shards):
# TODO(athom) collect all the triggers, and present as a single step
if last_shard_is_local and shard == num_shards - 1: break
task = self.m.swarming.task("%s_shard_%s" % (title, (shard + 1)),
raw_cmd=test_args +
['--shards=%s' % num_shards,
'--shard=%s' % (shard + 1),
os_names = {
'win': 'Windows',
'linux': 'Linux',
'mac': 'Mac'
task.dimensions['os'] = os_names.get(os, os)
task.dimensions['cpu'] = cpu
task.dimensions['pool'] = pool
task.dimensions.pop('gpu', None)
if 'shard_timeout' in
task.hard_timeout = int(['shard_timeout'])
return tasks
def collect(self, tasks):
"""Collects the results of a sharded test run."""
# TODO(mkroghj) remove when all swarming recipes has been converted to neo.
with self.m.step.defer_results():
# TODO(athom) collect all the output, and present as a single step
num_shards = int(['shards'])
for shard in range(num_shards):
task = tasks[shard]
path = self.m.path['cleanup'].join(str(shard))
task.task_output_dir = self.m.raw_io.output_dir(leak_to=path, name="results")
collect = self.m.swarming.collect_task(task)
output_dir = self.m.step.active_result.raw_io.output_dir
for filename in output_dir:
if "result.log" in filename: # pragma: no cover
contents = output_dir[filename]
self.m.step.active_result.presentation.logs['result.log'] = [contents]
def collect_all(self, deferred_tasks):
"""Collects the results of a sharded test run."""
with self.m.step.defer_results():
# TODO(athom) collect all the output, and present as a single step
for index_step,deferred_task in enumerate(deferred_tasks):
if deferred_task.is_ok:
for index_task,task in enumerate(deferred_task.get_result()):
path = self.m.path['cleanup'].join(str(index_step) + '_' + str(index_task))
task.task_output_dir = self.m.raw_io.output_dir(leak_to=path, name="results")
collect = self.m.swarming.collect_task(task)
output_dir = self.m.step.active_result.raw_io.output_dir
for filename in output_dir:
if "result.log" in filename: # pragma: no cover
contents = output_dir[filename]
self.m.step.active_result.presentation.logs['result.log'] = [contents]
def read_result_file(self, name, log_name, test_data=''):
"""Reads the result.log file
* name (str) - Name of step
* log_name (str) - Name of log
* test_data (str) - Some default data for this step to return when running
under simulation.
Returns (str) - The content of the file.
Raises file.Error
result_log_path = self.m.path['checkout'].join('logs', 'result.log')
read_data = self.m.file.read_text(
name, result_log_path, test_data)
self.m.step.active_result.presentation.logs[log_name] = [read_data]
self.m.file.remove("delete result.log", result_log_path)
except self.m.file.Error: # pragma: no cover
def read_debug_log(self):
"""Reads the debug.log file"""
if == 'win':
self.m.step('debug log',
['cmd.exe', '/c', 'type', '.debug.log'],
self.m.step('debug log',
['cat', '.debug.log'],
def test(self, test_data):
"""Reads the test-matrix.json file in checkout and performs each step listed
in the file
Raises StepFailure.
test_matrix_path = self.m.path['checkout'].join('tools',
read_json =
'read test-matrix.json',
step_test_data=lambda: self.m.json.test_api.output(test_data))
test_matrix = read_json.json.output
builder =['buildername']
if builder.endswith(('-be', '-try', '-stable', 'dev')):
builder = builder[0:builder.rfind('-')]
isolate_hashes = {}
global_config = test_matrix['global']
for config in test_matrix['builder_configurations']:
if builder in config['builders']:
self._run_steps(config, isolate_hashes, builder, global_config)
raise self.m.step.StepFailure(
'Error, could not find builder by name %s in test-matrix' % builder)
def _write_file_sets(self, filesets):
"""Writes the fileset to the root of the sdk to allow for swarming to pick
up the files and isolate the files.
* filesets - Filesets from the test-matrix
for fileset,files in filesets.iteritems():
isolate_fileset = { 'variables': { 'files': files } }
destination_path = self.m.path['checkout'].join(fileset)
self.m.file.write_text('write fileset %s to sdk root' % fileset,
def _build_isolates(self, config, isolate_hashes):
"""Isolate filesets from all steps in config and returns a dictionary with a
mapping from fileset to isolate_hash.
* config (dict) - Configuration of the builder, including the steps
Returns (dict) - A mapping from fileset to isolate_hashes
for step in config['steps']:
if 'fileset' in step and step['fileset'] not in isolate_hashes:
isolate_hash = self.upload_isolate(step['fileset'])
isolate_hashes[step['fileset']] = isolate_hash
def _get_option(self, builder_fragments, options, default_value):
"""Gets an option from builder_fragments in options, or returns the default
intersection = set(builder_fragments) & set(options)
if len(intersection) == 1:
return intersection.pop()
return default_value
def _get_specific_argument(self, arguments, options):
for arg in arguments:
for option in options:
if arg.startswith(option):
return arg[len(option):]
return None
def _has_specific_argument(self, arguments, options):
return self._get_specific_argument(arguments, options) is not None
def _run_steps(self, config, isolate_hashes, builder_name, global_config):
"""Executes all steps from a json test-matrix builder entry"""
# Find information from the builder name. It should be in the form
# <info>-<os>-<mode>-<arch>-<runtime> or <info>-<os>-<mode>-<arch>.
builder_fragments = builder_name.split('-')
system = self._get_option(
['linux', 'mac', 'win7', 'win8', 'win10', 'win'],
mode = self._get_option(
['debug', 'release', 'product'],
arch = self._get_option(
['ia32', 'x64', 'arm', 'armv6', 'armv5te', 'arm64', 'simarm', 'simarmv6',
'simarmv5te', 'simarm64', 'simdbc', 'simdbc64'],
runtime = self._get_option(
['none', 'd8', 'jsshell', 'ie9', 'ie10', 'ie11', 'ff',
'safari', 'chrome', 'safarimobilesim', 'drt', 'ie10', 'ie11'],
environment = {'system': system,
'mode': mode,
'arch': arch}
if runtime is not None:
environment['runtime'] = runtime
if runtime == 'chrome' or runtime == 'ff':
self._download_browser(runtime, global_config[runtime])
channel = 'try'
if 'branch' in
channels = {
"refs/heads/master": "be",
"refs/heads/stable": "stable",
"refs/heads/dev": "dev"
channel = channels.get(['branch'], 'try');
test_py_path = 'tools/'
build_py_path = 'tools/'
# Indexes the number of steps.
test_py_index = 0;
tasks = []
with self.m.step.defer_results():
for index,step in enumerate(config['steps']):
step_name = step['name']
# If script is not defined, use
script = step.get('script', test_py_path)
args = step.get('arguments', [])
is_build_step = script.endswith(build_py_path)
is_trigger = 'trigger' in step
is_test_py_step = script.endswith(test_py_path)
script = self.m.path['checkout'].join(*script.split('/'))
isolate_hash = None
shards = step.get('shards', 0)
local_shard = shards > 0 and index == len(config['steps']) - 1
if 'fileset' in step:
# We build isolates here, every time we see fileset, to wait for the
# building of Dart, which may be included in the fileset.
self._build_isolates(config, isolate_hashes)
isolate_hash = isolate_hashes[step['fileset']]
environment_variables = step.get('environment', {})
environment_variables['BUILDBOT_BUILDERNAME'] = builder_name + "-%s" % channel
with self.m.context(cwd=self.m.path['checkout'], env=environment_variables):
with self.m.depot_tools.on_path():
if is_build_step:
if not self._has_specific_argument(args, ['-m', '--mode']):
args = ['-m%s' % mode] + args
if not self._has_specific_argument(args, ['-a', '--arch']):
args = ['-a%s' % arch] + args
deferred_result =, build_args=args)
deferred_result.get_result() # raises build errors
elif is_trigger:
self.run_trigger(step_name, step, isolate_hash)
elif is_test_py_step:
append_logs = test_py_index > 0
self.run_test_py(step_name, append_logs, step,
isolate_hash, shards, local_shard,
environment, tasks, global_config)
if shards == 0 or local_shard:
# Only count indexes that are not sharded, to help with adding
# append-logs.
test_py_index += 1
self.run_script(step_name, script, args, isolate_hash, shards,
local_shard, environment, tasks)
def _copy_property(self, src, dest, key):
if key in src:
dest[key] = src[key]
def _download_browser(self, runtime, version):
# Download CIPD package
# dart/browsers/<runtime>/${platform} <version>
# to directory
# [sdk root]/browsers
# Shards must install this CIPD package to the same location -
# there is an argument to the swarming module task creation api for this.
if runtime == 'ff': runtime = 'firefox'
browser_path = self.m.path['checkout'].join('browsers')
self.m.file.ensure_directory('create browser cache', browser_path)
version_tag = 'version:%s' % version
package = 'dart/browsers/%s/${platform}' % runtime
self.m.cipd.ensure(browser_path, { package: version_tag })
def run_trigger(self, step_name, step, isolate_hash):
trigger_props = {}
self._copy_property(, trigger_props, 'git_revision')
self._copy_property(, trigger_props, 'revision')
trigger_props['parent_buildername'] =['buildername']
trigger_props['parent_build_id'] ='build_id', '')
if isolate_hash:
trigger_props['parent_fileset'] = isolate_hash
trigger_props['parent_fileset_name'] = step['fileset']
put_result = self.m.buildbucket.put(
'bucket': '',
'parameters': {
'builder_name': builder_name,
'properties': trigger_props,
'changes': [
'author': {
'email': author,
for author in'blamelist', [])
for builder_name in step['trigger']
self.m.step.active_result.presentation.step_text = step_name
for build in put_result.stdout['results']:
builder_tag = (x for x in build['build']['tags'] if x.startswith('builder:')).next()
builder_name = builder_tag[len('builder:'):]
self.m.step.active_result.presentation.links[builder_name] = build['build']['url']
def run_test_py(self, step_name, append_logs, step, isolate_hash, shards,
local_shard, environment, tasks, global_config):
"""Runs with default arguments, based on configuration from.
* step_name (str) - Name of the step
* append_logs (bool) - Add append_log to arguments
* step (dict) - Test-matrix step
* isolate_hash (String) - Hash of uploadet fileset/isolate if the
process is to be sharded
* shards (int) - The number of shards
* local_shard (bool) - Should the current builder be one of the shards.
* environment (dict) - Environment with runtime, arch, system etc
* tasks ([task]) - placeholder to put all swarming tasks in
* global_config (dict) - The global section from test_matrix.json.
Contains version tags for the pinned browsers Firefox and Chrome.
args = step.get('arguments', [])
test_args = ['--progress=buildbot',
if not self._has_specific_argument(args, ['-m', '--mode']):
test_args = ['-m%s' % environment['mode']] + test_args
if not self._has_specific_argument(args, ['-a', '--arch']):
test_args = ['-a%s' % environment['arch']] + test_args
if 'runtime' in environment and not self._has_specific_argument(
args, ['-r', '--runtime']):
test_args = test_args + ['-r%s' % environment['runtime']]
args = test_args + args
if append_logs:
args = args + ['--append_logs']
if environment['system'] in ['win7', 'win8', 'win10']:
args = args + ['--builder-tag=%s' % environment['system']]
# The --chrome flag is added here if the runtime for the bot is
# chrome. This also catches the case where there is a specific
# argument -r or --runtime. It misses the case where
# a recipe calls run_script directly with a command.
# The download of the browser from CIPD should also be moved
# here (perhaps checking if it is already done) so we catch
# specific test steps with runtime chrome in a bot without that
# global runtime.
cipd_packages = []
if any(arg in ['-rchrome', '--runtime=chrome'] for arg in args):
version_tag = 'version:%s' % global_config['chrome']
args = args + [CHROME_PATH_ARGUMENT[environment['system']]]
if any(arg in ['-rff', '--runtime=ff'] for arg in args):
version_tag = 'version:%s' % global_config['ff']
args = args + [FIREFOX_PATH_ARGUMENT[environment['system']]]
if 'exclude_tests' in step:
args = args + ['--exclude_suite=' + ','.join(step['exclude_tests'])]
if 'tests' in step:
args = args + step['tests']
with self.m.step.defer_results():
self.run_script(step_name, 'tools/', args, isolate_hash, shards,
local_shard, environment, tasks,
if shards == 0 or local_shard:
self.read_result_file('read results of %s' % step_name, 'result.log')
def run_script(self, step_name, script, args, isolate_hash, shards,
local_shard, environment, tasks, cipd_packages=[]):
"""Runs a specific script with current working directory to be checkout. If
the runtime (passed in environment) is a browser, and the system is linux,
xvfb is used. If an isolate_hash is passed in, it will swarm the command.
* step_name (str) - Name of the step
* script (str) - The script to invoke
* args ([str]) - Additional arguments to
* isolate_hash (str) - The isolate hash if the script should be swarmed
* shards (int) - The number of shards to invoke
* local_shard (bool) - Should the current builder be used as a shard
* environment (dict) - Environment with runtime, arch, system etc
* tasks ([task]) - placeholder to hold swarming tasks
* cipd_packages ([tuple]) - list of 3-tuples specifying a cipd package
to be downloaded
runtime = self._get_specific_argument(args, ['-r', '--runtime'])
if runtime is None:
runtime = environment.get('runtime', None)
use_xvfb = (runtime in ['drt', 'chrome', 'ff'] and
environment['system'] == 'linux')
with self.m.step.defer_results():
if use_xvfb:
xvfb_cmd = [
'--server-args=-screen 0 1024x768x24']
cmd = xvfb_cmd + ['python', '-u', script] + args
if isolate_hash:
tasks.append(self.shard(step_name, isolate_hash, cmd,
self.m.step(step_name, cmd)
if isolate_hash:
tasks.append(self.shard(step_name, isolate_hash, [script] + args,
elif '.py' in str(script):
self.m.python(step_name, script, args=args)
self.m.step(step_name, [script] + args)
if local_shard:
args = args + [
'--shards=%s' % shards,
'--shard=%s' % shards
self.run_script("%s_shard_%s" % (step_name, shards), script,
args, None, 0, False, environment, tasks, cipd_packages)