[V8] Rip out deprecated sanitizer-coverage logic

The sanitizer-coverage builds was removed a while back and is
unmaintained since a long time. Because there's no plan to bring it
back, this removes the recipe-side logic to reduce code complexity.

Bug: 1132088
Change-Id: I563d874a2c036b9b32cb348b1a67c71fb438f1d7
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/build/+/2494731
Commit-Queue: Liviu Rau <liviurau@chromium.org>
Reviewed-by: Liviu Rau <liviurau@chromium.org>
Auto-Submit: Michael Achenbach <machenbach@chromium.org>
diff --git a/recipes/recipe_modules/v8/api.py b/recipes/recipe_modules/v8/api.py
index 5f0b4f4..af267e2 100644
--- a/recipes/recipe_modules/v8/api.py
+++ b/recipes/recipe_modules/v8/api.py
@@ -301,8 +301,6 @@
     # Apply additional configs for coverage builders.
     if self.bot_config.get('coverage') == 'gcov':
       self.bot_config['disable_auto_bisect'] = True
-    elif self.bot_config.get('coverage') == 'sanitizer':
-      self.m.gclient.apply_config('llvm_compiler_rt')
 
     if self.bot_config.get('enable_swarming', True):
       self.m.gclient.c.got_revision_reverse_mapping[
@@ -421,22 +419,6 @@
       _, self.revision_number = self.m.commit_position.parse(self.revision_cp)
       self.revision_number = str(self.revision_number)
 
-  def calculate_patch_base_gerrit(self):
-    """Calculates the commit hash a gerrit patch was branched off."""
-    commits, _ = self.m.gitiles.log(
-        url=V8_URL,
-        ref='master..%s' % self.m.tryserver.gerrit_change_fetch_ref,
-        limit=100,
-        step_name='Get patches',
-        step_test_data=self.test_api.example_patch_range,
-    )
-    # There'll be at least one commit with the patch. Maybe more for dependent
-    # CLs.
-    assert len(commits) >= 1
-    # We don't support merges.
-    assert len(commits[-1]['parents']) == 1
-    return commits[-1]['parents'][0]
-
   def set_up_swarming(self):
     if self.bot_config.get('enable_swarming', True):
       self.m.chromium_swarming.check_client_version()
@@ -986,16 +968,6 @@
     result.presentation.links['report'] = (
       'https://storage.googleapis.com/chromium-v8/%s/index.html' % dest)
 
-  @property
-  def generate_sanitizer_coverage(self):
-    return self.bot_config.get('coverage') == 'sanitizer'
-
-  def create_coverage_context(self):
-    if self.generate_sanitizer_coverage:
-      return testing.SanitizerCoverageContext(self.m)
-    else:
-      return testing.NULL_COVERAGE
-
   def create_test(self, test):
     """Wrapper that allows to shortcut common tests with their names.
 
@@ -1038,36 +1010,27 @@
     non_swarming_tests = [t for t in tests if not t.uses_swarming]
     failed_tests = []
 
-    # Creates a coverage context if coverage is tracked. Null object otherwise.
-    coverage_context = self.create_coverage_context()
-
     with self.maybe_nest(swarming_tests, 'trigger tests'):
       # Make sure swarming triggers come first.
       # TODO(machenbach): Port this for rerun for bisection.
       for t in swarming_tests + non_swarming_tests:
         try:
-          t.pre_run(coverage_context=coverage_context)
+          t.pre_run()
         except self.m.step.InfraFailure:  # pragma: no cover
           raise
         except self.m.step.StepFailure:  # pragma: no cover
           failed_tests.append(t)
 
-    # Setup initial zero coverage after all swarming jobs are triggered.
-    coverage_context.setup()
-
     # Make sure non-swarming tests are run before swarming results are
     # collected.
     for t in non_swarming_tests + swarming_tests:
       try:
-        test_results += t.run(coverage_context=coverage_context)
+        test_results += t.run()
       except self.m.step.InfraFailure:  # pragma: no cover
         raise
       except self.m.step.StepFailure:  # pragma: no cover
         failed_tests.append(t)
 
-    # Upload accumulated coverage data.
-    coverage_context.maybe_upload()
-
     if failed_tests:
       failed_tests_names = [t.name for t in failed_tests]
       raise self.m.step.StepFailure(
diff --git a/recipes/recipe_modules/v8/gclient_config.py b/recipes/recipe_modules/v8/gclient_config.py
index eeda6a1..a6e13a1 100644
--- a/recipes/recipe_modules/v8/gclient_config.py
+++ b/recipes/recipe_modules/v8/gclient_config.py
@@ -33,12 +33,6 @@
   c.target_os.add('ios')
 
 
-@CONFIG_CTX(includes=['v8'])
-def llvm_compiler_rt(c):
-  c.solutions[0].custom_deps['v8/third_party/llvm/projects/compiler-rt'] = (
-    ChromiumGitURL(c, 'external', 'llvm.org', 'compiler-rt'))
-
-
 @CONFIG_CTX()
 def node_ci(c):
   soln = c.solutions.add()
diff --git a/recipes/recipe_modules/v8/resources/calculate_patch_base.py b/recipes/recipe_modules/v8/resources/calculate_patch_base.py
deleted file mode 100755
index b669164..0000000
--- a/recipes/recipe_modules/v8/resources/calculate_patch_base.py
+++ /dev/null
@@ -1,99 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Script to calculate the latest fitting git commit hash for a patch.
-
-A commit fits if the hashes of all base files from the patch are equal to
-the hashes of the respective files from the commit. As multiple such commits
-might exist, this returns the latest.
-
-This looks back for a maximum of 1000 commits. Doesn't support windows.
-
-Expects three arguments:
-1) a file containing the raw patch,
-2) folder with the checkout,
-3) a file to write the result hash to.
-"""
-
-import os
-import subprocess
-import sys
-
-# The revision from which v8 coverage is supported.
-# TODO(machenbach): Remove this as soon as it's supported for 1000+ revisions.
-V8_COVERAGE_SUPPORT_COMMIT = 'e7f99c1ed52a5724ffef41c361920786e97f240d'
-
-assert len(sys.argv) == 4
-
-# Read the raw patch.
-with open(sys.argv[1]) as f:
-  patch = f.read()
-
-# Absolute path to checkout folder.
-CHECKOUT = sys.argv[2]
-assert os.path.exists(CHECKOUT) and os.path.isdir(CHECKOUT)
-
-# Parse the patch and extract (file, hsh) tuples. The hsh is the hash of the
-# base file of a given file.
-base_hashes = []
-current_file = None
-for line in patch.splitlines():
-  if line.startswith('Index: '):
-    # "Index" header looks like this:
-    # Index: <file name>
-    current_file = line.split('Index: ')[1]
-  elif line.startswith('copy from '):
-    # If diff considers a file to be a copy of an existing file, the base hash
-    # is from the existing file. In this case, the diff contains a line after
-    # "Index" and before "index" that looks like:
-    # copy from <old file name>
-    current_file = line.split('copy from ')[1]
-  elif line.startswith('rename from '):
-    # Same as above with rename.
-    current_file = line.split('rename from ')[1]
-  elif line.startswith('index '):
-    # "index" header looks like this and comes a few lines after the one above:
-    # index <base hash>..<hash after patch> <mode>
-    assert current_file
-    hsh = line.split(' ')[1].split('..')[0]
-    if len(hsh) * '0' != hsh:
-      # We only care for existing files. New ones have a sequence of zeros as
-      # hash.
-      base_hashes.append((current_file, hsh))
-    current_file = None
-
-# Make sure we found something.
-assert base_hashes
-
-# Iterate over the last 1000 commits.
-for i in xrange(1000):
-  # Translate commit position relative to HEAD to its hash.
-  commit_hsh = subprocess.check_output(
-      ['git', '-C', CHECKOUT, 'log', '-n1', '--format=%H', 'HEAD~%d' % i]
-  ).strip()
-
-  # Iterate over all files of the patch and compare the hashes.
-  for f, file_hsh_from_patch in base_hashes:
-    file_hsh_from_commit = subprocess.check_output(
-        ['git', '-C', CHECKOUT, 'rev-parse', '%s:%s' % (commit_hsh, f)]
-    ).strip()
-    if not file_hsh_from_commit.startswith(file_hsh_from_patch):
-      # Check if file hashes match. The hash from the patch might be an
-      # abbreviation.
-      print 'Skipping %s as file %s has no matching base.' % (commit_hsh, f)
-      break
-  else:
-    # Loop terminated gracefully, all file hashes matched.
-    print 'Found a match: %s' % commit_hsh
-    with open(sys.argv[3], 'w') as out_file:
-      out_file.write(commit_hsh)
-    sys.exit(0)
-
-  if commit_hsh == V8_COVERAGE_SUPPORT_COMMIT:
-    print 'The CL is too old, code coverage is not supported. Please rebase.'
-    sys.exit(1)
-
-print 'Reached commit limit. Couldn\'t find an appropriate commit.'
-sys.exit(1)
diff --git a/recipes/recipe_modules/v8/resources/calculate_patch_base_test.py b/recipes/recipe_modules/v8/resources/calculate_patch_base_test.py
deleted file mode 100644
index ea6bf55..0000000
--- a/recipes/recipe_modules/v8/resources/calculate_patch_base_test.py
+++ /dev/null
@@ -1,179 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-import unittest
-
-
-LOCATION = os.path.dirname(os.path.abspath(__file__))
-
-
-# This branched off commit 2 below.
-PATCH1 = """
-Index: test2
-index b5d8dd0..257cc56 100644
---- a/test2
-+++ b/test2
-@@ -1 +1 @@
--2_2
-\ No newline at end of file
-+foo
-"""
-
-# This branched off commit 4 below.
-PATCH2 = """
-Index: test1
-index 65824f6..7601807 100644
---- a/test1
-+++ b/test1
-@@ -1 +1 @@
--1_4
-\ No newline at end of file
-+baz
-Index: test3
-index dca23b6..5716ca5 100644
---- a/test3
-+++ b/test3
-@@ -1 +1 @@
--3_4
-\ No newline at end of file
-+bar
-"""
-
-# This branched off commit 2 below and adds a new file.
-PATCH3 = """
-Index: baz
-new file mode 100644
-index 0000000..7601807
---- /dev/null
-+++ b/baz
-@@ -0,0 +1 @@
-+baz
-Index: test2
-index b5d8dd0..257cc56 100644
---- a/test2
-+++ b/test2
-@@ -1 +1 @@
--2_2
-\ No newline at end of file
-+foo
-"""
-
-# This branched off commit 2 below and adds a new file. The diff considers
-# the new file as a copy of "test2".
-PATCH4 = """
-Index: baz
-new file mode 100644
-copy from test2
-copy to baz
-index b5d8dd0..7601807
---- /dev/null
-+++ b/baz
-@@ -0,0 +1 @@
-+baz
-"""
-
-# Same as above with rename.
-PATCH5 = """
-Index: baz
-new file mode 100644
-rename from test2
-rename to baz
-index b5d8dd0..7601807
---- /dev/null
-+++ b/baz
-@@ -0,0 +1 @@
-+baz
-"""
-
-
-class PatchBaseTest(unittest.TestCase):
-  @classmethod
-  def git(cls, *args):
-    return subprocess.check_output(
-      ['git'] + list(args),
-      cwd=cls.repo
-    ).strip()
-
-  @classmethod
-  def write_file(cls, name, content):
-    with open(os.path.join(cls.repo, name), 'w') as f:
-      f.write(content)
-
-  @classmethod
-  def setUpClass(cls):
-    cls.repo = tempfile.mkdtemp()
-    cls.git('init')
-    cls.write_file('test1', '1_1')
-    cls.write_file('test2', '2_1')
-    cls.git('add', 'test1', 'test2')
-    cls.git('commit', '-m', 'Commit1')
-
-    cls.write_file('test1', '1_2')
-    cls.write_file('test2', '2_2')
-    cls.git('commit', '-am', 'Commit2')
-
-    cls.write_file('test1', '1_3')
-    cls.write_file('test2', '2_3')
-    cls.git('commit', '-am', 'Commit3')
-
-    cls.write_file('test1', '1_4')
-    cls.write_file('test2', '2_4')
-    cls.write_file('test3', '3_4')
-    cls.git('add', 'test3')
-    cls.git('commit', '-am', 'Commit4')
-
-    cls.write_file('test1', '1_5')
-    cls.write_file('test2', '2_5')
-    cls.git('commit', '-am', 'Commit5')
-
-  @classmethod
-  def tearDownClass(cls):
-    shutil.rmtree(cls.repo)
-
-  def setUp(self):
-    self.workdir = tempfile.mkdtemp()
-
-  def tearDown(self):
-    shutil.rmtree(self.workdir)
-
-  def calculate_patch_base(self, patch):
-    patch_file = os.path.join(self.workdir, 'patch')
-    result_file = os.path.join(self.workdir, 'result')
-    with open(patch_file, 'w') as f:
-      f.write(patch)
-    subprocess.check_call(
-      [
-        sys.executable, '-u', 'calculate_patch_base.py',
-        patch_file, self.repo, result_file,
-      ],
-      cwd=LOCATION,
-    )
-    with open(result_file) as f:
-      result = f.read().strip()
-      return self.git('log', '-n1', '--format=%s', result)
-
-  def testMatch(self):
-    commit_title = self.calculate_patch_base(PATCH1)
-    self.assertEquals('Commit2', commit_title)
-
-  def testMatchTwoFiles(self):
-    commit_title = self.calculate_patch_base(PATCH2)
-    self.assertEquals('Commit4', commit_title)
-
-  def testFileAdded(self):
-    commit_title = self.calculate_patch_base(PATCH3)
-    self.assertEquals('Commit2', commit_title)
-
-  def testFileCopied(self):
-    commit_title = self.calculate_patch_base(PATCH4)
-    self.assertEquals('Commit2', commit_title)
-
-  def testFileRename(self):
-    commit_title = self.calculate_patch_base(PATCH5)
-    self.assertEquals('Commit2', commit_title)
diff --git a/recipes/recipe_modules/v8/test_api.py b/recipes/recipe_modules/v8/test_api.py
index 04d712b..8a9e3eb 100644
--- a/recipes/recipe_modules/v8/test_api.py
+++ b/recipes/recipe_modules/v8/test_api.py
@@ -364,15 +364,6 @@
       'top100_avg_deps': 1.3,
     })
 
-  def example_patch_range(self):
-    # Gitiles returns changes in the order child -> parent.
-    return self.m.json.output({
-      'log': [
-        {'commit': '[child2 hsh]', 'parents': ['[child1 hsh]']},
-        {'commit': '[child1 hsh]', 'parents': ['[master-branch-point hsh]']},
-      ],
-    })
-
   def example_test_roots(self, *roots):
     """Simulates dynamically optained test-root directories."""
     return self.override_step_data(
diff --git a/recipes/recipe_modules/v8/testing.py b/recipes/recipe_modules/v8/testing.py
index 5d4cd08..9de8d01 100644
--- a/recipes/recipe_modules/v8/testing.py
+++ b/recipes/recipe_modules/v8/testing.py
@@ -210,132 +210,6 @@
 })
 
 
-class NullCoverageContext(object):
-  """Null object to represent testing without collecting coverage."""
-  def get_test_runner_args(self):
-    return []
-
-  def get_swarming_collect_args(self):
-    return []
-
-  def setup(self):
-    pass
-
-  def post_run(self):
-    pass
-
-  def maybe_upload(self):
-    pass
-
-NULL_COVERAGE = NullCoverageContext()
-
-
-class SanitizerCoverageContext(object):
-  """Context during testing to collect coverage data.
-
-  Only testing on swarming is supported.
-  """
-  def __init__(self, api):
-    self.api = api
-    self.coverage_dir = api.path.mkdtemp('coverage_output')
-
-  def get_test_runner_args(self):
-    """Returns the test runner arguments for collecting coverage data."""
-    return ['--sancov-dir', '${ISOLATED_OUTDIR}']
-
-  def get_swarming_collect_args(self):
-    """Returns the swarming collect step's arguments for merging."""
-    return [
-      '--coverage-dir', self.coverage_dir,
-      '--sancov-merger', self.api.path['checkout'].join(
-          'tools', 'sanitizers', 'sancov_merger.py'),
-    ]
-
-  def setup(self):
-    """Build data file with initial zero coverage data.
-
-    To be called before any coverage data from testing is merged in.
-    """
-    self.api.python(
-        'Initialize coverage data',
-        self.api.path['checkout'].join(
-            'tools', 'sanitizers', 'sancov_formatter.py'),
-        [
-          'all',
-          '--json-output', self.coverage_dir.join('data.json'),
-        ],
-    )
-
-  def post_run(self):
-    """Merge coverage data from one test run.
-
-    To be called after every test step. Requires existing initial zero
-    coverage data, obtained by calling setup().
-    """
-    self.api.python(
-        'Merge coverage data',
-        self.api.path['checkout'].join(
-            'tools', 'sanitizers', 'sancov_formatter.py'),
-        [
-          'merge',
-          '--json-input', self.coverage_dir.join('data.json'),
-          '--json-output', self.coverage_dir.join('data.json'),
-          '--coverage-dir', self.coverage_dir,
-        ],
-    )
-
-    self.api.python.inline(
-        'Purge sancov files',
-        """
-        import glob
-        import os
-        for f in glob.glob('%s'):
-          os.remove(f)
-        """ % self.coverage_dir.join('*.sancov'),
-    )
-
-  def maybe_upload(self):
-    """Uploads coverage data to google storage if on tryserver."""
-
-    if self.api.tryserver.gerrit_change:
-      cl = self.api.tryserver.gerrit_change
-      results_path = 'tryserver/sanitizer_coverage/gerrit/%d/%d/%s%d' % (
-        cl.change, cl.patchset, self.api.platform.name, self.api.v8.target_bits)
-
-
-      self.api.gsutil.upload(
-          self.coverage_dir.join('data.json'),
-          'chromium-v8',
-          results_path + '/data.json',
-      )
-
-      data_dir = self.api.path.mkdtemp('coverage_data')
-      self.api.python(
-          'Split coverage data',
-          self.api.path['checkout'].join(
-              'tools', 'sanitizers', 'sancov_formatter.py'),
-          [
-            'split',
-            '--json-input', self.coverage_dir.join('data.json'),
-            '--output-dir', data_dir,
-          ],
-          # Allow to work with older v8 revisions that don't have the split
-          # function in which case the directory will stay empty.
-          # TODO(machenbach): Remove this when v8's passed CP 34834 + 1000.
-          ok_ret='any',
-      )
-
-      self.api.gsutil(
-          [
-            '-m', 'cp', '-a', 'public-read', '-R', data_dir.join('*'),
-            'gs://chromium-v8/%s/' % results_path,
-          ],
-          'coverage data',
-          # Same as in the step above.
-          ok_ret='any',
-      )
-
-
 class BaseTest(object):
   def __init__(self, test_step_config, api):
     self.test_step_config = test_step_config
@@ -371,10 +245,10 @@
     # Run all tests by default.
     return True
 
-  def pre_run(self, test=None, coverage_context=NULL_COVERAGE, **kwargs):
+  def pre_run(self, test=None, **kwargs):
     pass  # pragma: no cover
 
-  def run(self, test=None, coverage_context=NULL_COVERAGE, **kwargs):
+  def run(self, test=None, **kwargs):
     raise NotImplementedError()  # pragma: no cover
 
   def rerun(self, failure_dict, **kwargs):  # pragma: no cover
@@ -394,7 +268,7 @@
       return False
     return True
 
-  def run(self, test=None, coverage_context=NULL_COVERAGE, **kwargs):
+  def run(self, test=None, **kwargs):
     test = test or self.api.v8.test_configs[self.name]
 
     full_args, env = self.api.v8._setup_test_runner(
@@ -421,7 +295,7 @@
     )
     return result_variants == set([True])
 
-  def post_run(self, test, coverage_context=NULL_COVERAGE):
+  def post_run(self, test):
     # The active step was either a local test run or the swarming collect step.
     step_result = self.api.step.active_result
     json_output = step_result.json.output
@@ -464,8 +338,6 @@
     if self.has_only_stress_opt_failures(json_output):
       self.api.step('Found isolated stress failures', cmd=None)
 
-    coverage_context.post_run()
-
     return TestResults(failures, flakes, infra_failures)
 
   def _add_bug_links(self, failures, presentation):
@@ -567,16 +439,18 @@
     """Returns true if the test uses swarming."""
     return True
 
-  def _v8_collect_step(self, task, coverage_context=NULL_COVERAGE, **kwargs):
+  def _v8_collect_step(self, task, **kwargs):
     """Produces a step that collects and processes a result of a v8 task."""
     # Placeholder for the merged json output.
     json_output = self.api.json.output(add_json_log=False)
 
     # Shim script's own arguments.
     args = [
-      '--temp-root-dir', self.api.path['tmp_base'],
-      '--merged-test-output', json_output,
-    ] + coverage_context.get_swarming_collect_args()
+        '--temp-root-dir',
+        self.api.path['tmp_base'],
+        '--merged-test-output',
+        json_output,
+    ]
 
     # Arguments for actual 'collect' command.
     args.append('--')
@@ -594,7 +468,7 @@
           step_test_data=kwargs.pop('step_test_data', None),
           **kwargs)
 
-  def pre_run(self, test=None, coverage_context=NULL_COVERAGE, **kwargs):
+  def pre_run(self, test=None, **kwargs):
     # Set up arguments for test runner.
     self.test = test or self.api.v8.test_configs[self.name]
     extra_args, _ = self.api.v8._setup_test_runner(
@@ -603,10 +477,10 @@
     # Let json results be stored in swarming's output folder. The collect
     # step will copy the folder's contents back to the client.
     extra_args += [
-      '--swarming',
-      '--json-test-results',
-      '${ISOLATED_OUTDIR}/output.json',
-    ] + coverage_context.get_test_runner_args()
+        '--swarming',
+        '--json-test-results',
+        '${ISOLATED_OUTDIR}/output.json',
+    ]
 
     # Initialize number of shards, either per test or per builder.
     shards = 1
@@ -625,12 +499,11 @@
         shards=shards,
         raw_cmd= [command] + extra_args,
     )
-    self.task.collect_step = lambda task, **kw: (
-        self._v8_collect_step(task, coverage_context, **kw))
+    self.task.collect_step = self._v8_collect_step
 
     _trigger_swarming_task(self.api, self.task, self.test_step_config)
 
-  def run(self, coverage_context=NULL_COVERAGE, **kwargs):
+  def run(self, **kwargs):
     # TODO(machenbach): Soften this when softening 'assert isolated_hash'
     # above.
     assert self.task
@@ -645,7 +518,7 @@
     except self.api.step.InfraFailure as e:
       result += TestResults.infra_failure(e)
 
-    return result + self.post_run(self.test, coverage_context)
+    return result + self.post_run(self.test)
 
   def rerun(self, failure_dict, **kwargs):
     self.pre_run(test=self._setup_rerun_config(failure_dict), **kwargs)
diff --git a/recipes/recipes/v8.expected/full_tryserver_v8_v8_foobar_sanitizer_coverage.json b/recipes/recipes/v8.expected/full_tryserver_v8_v8_foobar_sanitizer_coverage.json
deleted file mode 100644
index 3477d43..0000000
--- a/recipes/recipes/v8.expected/full_tryserver_v8_v8_foobar_sanitizer_coverage.json
+++ /dev/null
@@ -1,81 +0,0 @@
-[
-  {
-    "cmd": [
-      "python",
-      "-u",
-      "RECIPE_MODULE[depot_tools::gsutil]/resources/gsutil_smart_retry.py",
-      "--",
-      "RECIPE_REPO[depot_tools]/gsutil.py",
-      "----",
-      "cp",
-      "-a",
-      "public-read",
-      "{\"bot_default\": \"[dummy hash for bot_default]\"}",
-      "gs://chromium-v8/isolated/tryserver.v8/v8_foobar/[master-branch-point hsh].json"
-    ],
-    "infra_step": true,
-    "name": "build.gsutil upload",
-    "~followup_annotations": [
-      "@@@STEP_NEST_LEVEL@1@@@",
-      "@@@STEP_LINK@gsutil.upload@https://storage.cloud.google.com/chromium-v8/isolated/tryserver.v8/v8_foobar/[master-branch-point hsh].json@@@"
-    ]
-  },
-  {
-    "cmd": [
-      "python",
-      "-u",
-      "[CACHE]/builder/v8/tools/sanitizers/sancov_formatter.py",
-      "all",
-      "--json-output",
-      "[CLEANUP]/coverage_output_tmp_1/data.json"
-    ],
-    "name": "Initialize coverage data"
-  },
-  {
-    "cmd": [
-      "python",
-      "-u",
-      "[CACHE]/builder/v8/tools/sanitizers/sancov_formatter.py",
-      "merge",
-      "--json-input",
-      "[CLEANUP]/coverage_output_tmp_1/data.json",
-      "--json-output",
-      "[CLEANUP]/coverage_output_tmp_1/data.json",
-      "--coverage-dir",
-      "[CLEANUP]/coverage_output_tmp_1"
-    ],
-    "name": "Merge coverage data"
-  },
-  {
-    "cmd": [
-      "python",
-      "-u",
-      "[CACHE]/builder/v8/tools/sanitizers/sancov_formatter.py",
-      "split",
-      "--json-input",
-      "[CLEANUP]/coverage_output_tmp_1/data.json",
-      "--output-dir",
-      "[CLEANUP]/coverage_data_tmp_4"
-    ],
-    "name": "Split coverage data"
-  },
-  {
-    "cmd": [
-      "python",
-      "-u",
-      "RECIPE_MODULE[depot_tools::gsutil]/resources/gsutil_smart_retry.py",
-      "--",
-      "RECIPE_REPO[depot_tools]/gsutil.py",
-      "----",
-      "-m",
-      "cp",
-      "-a",
-      "public-read",
-      "-R",
-      "[CLEANUP]/coverage_data_tmp_4/*",
-      "gs://chromium-v8/tryserver/sanitizer_coverage/gerrit/456789/12/linux64/"
-    ],
-    "infra_step": true,
-    "name": "gsutil coverage data"
-  }
-]
\ No newline at end of file
diff --git a/recipes/recipes/v8.py b/recipes/recipes/v8.py
index 200bec2..02fcf97 100644
--- a/recipes/recipes/v8.py
+++ b/recipes/recipes/v8.py
@@ -33,49 +33,49 @@
 ]
 
 PROPERTIES = {
-  # Additional configurations to enable binary size tracking. The mapping
-  # consists of "binary" and "category".
-  'binary_size_tracking': Property(default=None, kind=dict),
-  # Deprecated.
-  'build_config': Property(default=None, kind=str),
-  # Switch to clobber build dir before runhooks.
-  'clobber': Property(default=False, kind=bool),
-  # Switch to clobber build dir before bot_update.
-  'clobber_all': Property(default=False, kind=bool),
-  # Additional configurations set for archiving builds to GS buckets for
-  # clusterfuzz. The mapping consists of "name", "bucket" and optional
-  # "bitness".
-  'clusterfuzz_archive': Property(default=None, kind=dict),
-  # Optional coverage setting. One of gcov|sanitizer.
-  'coverage': Property(default=None, kind=str),
-  # Mapping of custom dependencies to sync (dependency name as in DEPS file ->
-  # deps url).
-  'custom_deps': Property(default=None, kind=dict),
-  # Optional list of default targets. If not specified the implicit "all" target
-  # will be built.
-  'default_targets': Property(default=None, kind=list),
-  # Switch to enable/disable swarming.
-  'enable_swarming': Property(default=None, kind=bool),
-  # Mapping of additional gclient variables to set (map name -> value).
-  'gclient_vars': Property(default=None, kind=dict),
-  # Optional path to a different MB config. The path must be relative to the
-  # V8 checkout and using forward slashes.
-  'mb_config_path': Property(default=None, kind=str),
-  # Name of a gclient custom_var to set to 'True'.
-  # TODO(machenbach): Deprecate single boolean variables and use gclient_vars.
-  'set_gclient_var': Property(default=None, kind=str),
-  # One of intel|arm|mips.
-  'target_arch': Property(default=None, kind=str),
-  # One of android|fuchsia|linux|mac|win.
-  'target_platform': Property(default=None, kind=str),
-  # Weather to track and upload build-dependencies stats.
-  'track_build_dependencies': Property(default=None, kind=bool),
-  # List of tester names to trigger.
-  'triggers': Property(default=None, kind=list),
-  # Weather to trigger the internal trigger proxy.
-  'triggers_proxy': Property(default=False, kind=bool),
-  # Weather to use goma for compilation.
-  'use_goma': Property(default=True, kind=bool),
+    # Additional configurations to enable binary size tracking. The mapping
+    # consists of "binary" and "category".
+    'binary_size_tracking': Property(default=None, kind=dict),
+    # Deprecated.
+    'build_config': Property(default=None, kind=str),
+    # Switch to clobber build dir before runhooks.
+    'clobber': Property(default=False, kind=bool),
+    # Switch to clobber build dir before bot_update.
+    'clobber_all': Property(default=False, kind=bool),
+    # Additional configurations set for archiving builds to GS buckets for
+    # clusterfuzz. The mapping consists of "name", "bucket" and optional
+    # "bitness".
+    'clusterfuzz_archive': Property(default=None, kind=dict),
+    # Optional coverage setting. Set to "gcov" to use.
+    'coverage': Property(default=None, kind=str),
+    # Mapping of custom dependencies to sync (dependency name as in DEPS
+    # file -> deps url).
+    'custom_deps': Property(default=None, kind=dict),
+    # Optional list of default targets. If not specified the implicit "all"
+    # target will be built.
+    'default_targets': Property(default=None, kind=list),
+    # Switch to enable/disable swarming.
+    'enable_swarming': Property(default=None, kind=bool),
+    # Mapping of additional gclient variables to set (map name -> value).
+    'gclient_vars': Property(default=None, kind=dict),
+    # Optional path to a different MB config. The path must be relative to the
+    # V8 checkout and using forward slashes.
+    'mb_config_path': Property(default=None, kind=str),
+    # Name of a gclient custom_var to set to 'True'.
+    # TODO(machenbach): Deprecate single boolean variables, use gclient_vars.
+    'set_gclient_var': Property(default=None, kind=str),
+    # One of intel|arm|mips.
+    'target_arch': Property(default=None, kind=str),
+    # One of android|fuchsia|linux|mac|win.
+    'target_platform': Property(default=None, kind=str),
+    # Weather to track and upload build-dependencies stats.
+    'track_build_dependencies': Property(default=None, kind=bool),
+    # List of tester names to trigger.
+    'triggers': Property(default=None, kind=list),
+    # Weather to trigger the internal trigger proxy.
+    'triggers_proxy': Property(default=False, kind=bool),
+    # Weather to use goma for compilation.
+    'use_goma': Property(default=True, kind=bool),
 }
 
 
@@ -135,15 +135,7 @@
       if api.platform.is_win:
         api.chromium.taskkill()
 
-      if v8.generate_sanitizer_coverage:
-        # When collecting code coverage, we need to sync to the revision that
-        # fits to the patch for the line numbers to match.
-        revision = v8.calculate_patch_base_gerrit()
-        update_step = v8.checkout(
-            revision=revision, suffix='with patch base', clobber=clobber_all)
-      else:
-        update_step = v8.checkout(clobber=clobber_all)
-
+      update_step = v8.checkout(clobber=clobber_all)
       update_properties = update_step.json.output['properties']
 
       if update_properties.get('got_swarming_client_revision'):
@@ -963,28 +955,6 @@
       '""" to _path_/args.gn.\n'
   )
 
-  # Cover running sanitizer coverage.
-  yield (
-    api.v8.test(
-        'tryserver.v8',
-        'v8_foobar',
-        'sanitizer_coverage',
-        coverage='sanitizer',
-    ) +
-    api.step_data(
-        'build.lookup GN args', api.raw_io.stream_output(fake_gn_args_x64)) +
-    api.v8.test_spec_in_checkout(
-        'v8_foobar',
-        '{"tests": [{"name": "v8testing"}]}') +
-    api.post_process(Filter(
-        'Initialize coverage data',
-        'Merge coverage data',
-        'build.gsutil upload',
-        'Split coverage data',
-        'gsutil coverage data',
-    ))
-  )
-
   # Cover running gcov coverage.
   yield (
     api.v8.test(