[Instrumented libraries] Add .style.yapf

This will ensure `git cl format` will format changed python files.
This CL also manually runs `yapf --style=pep8` on existing files.

R=thestig

Change-Id: Ic476e9aa7df39a1754fa40a241a0550ef877eb88
Bug: 1496000
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/5009331
Reviewed-by: Lei Zhang <thestig@chromium.org>
Commit-Queue: Thomas Anderson <thomasanderson@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1220731}
NOKEYCHECK=True
GitOrigin-RevId: f522e702d09d4a0d9663624249d3019eb77cc08b
diff --git a/.style.yapf b/.style.yapf
new file mode 100644
index 0000000..557fa7b
--- /dev/null
+++ b/.style.yapf
@@ -0,0 +1,2 @@
+[style]
+based_on_style = pep8
diff --git a/focal/scripts/download_build_install.py b/focal/scripts/download_build_install.py
index 9c0e50d..e1d8fdc 100755
--- a/focal/scripts/download_build_install.py
+++ b/focal/scripts/download_build_install.py
@@ -2,7 +2,6 @@
 # Copyright 2013 The Chromium Authors
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Downloads, builds (with instrumentation) and installs shared libraries."""
 
 import argparse
@@ -20,594 +19,642 @@
 
 SCRIPT_ABSOLUTE_PATH = os.path.dirname(os.path.abspath(__file__))
 
-def unescape_flags(s):
-  """Un-escapes build flags received from GN.
 
-  GN escapes build flags as if they are to be inserted directly into a command
-  line, wrapping each flag in double quotes. When flags are passed via
-  CFLAGS/LDFLAGS instead, double quotes must be dropped.
-  """
-  if not s:
-    return []
-  try:
-    return ast.literal_eval(s)
-  except (SyntaxError, ValueError):
-    return shlex.split(s)
+def unescape_flags(s):
+    """Un-escapes build flags received from GN.
+
+    GN escapes build flags as if they are to be inserted directly into a command
+    line, wrapping each flag in double quotes. When flags are passed via
+    CFLAGS/LDFLAGS instead, double quotes must be dropped.
+    """
+    if not s:
+        return []
+    try:
+        return ast.literal_eval(s)
+    except (SyntaxError, ValueError):
+        return shlex.split(s)
 
 
 def real_path(path_relative_to_gn):
-  """Returns the absolute path to a file.
+    """Returns the absolute path to a file.
 
-  GN generates paths relative to the build directory, which is one
-  level above the location of this script. This function converts them to
-  absolute paths.
-  """
-  return os.path.realpath(os.path.join(SCRIPT_ABSOLUTE_PATH, '..',
-                                       path_relative_to_gn))
+    GN generates paths relative to the build directory, which is one
+    level above the location of this script. This function converts them to
+    absolute paths.
+    """
+    return os.path.realpath(
+        os.path.join(SCRIPT_ABSOLUTE_PATH, "..", path_relative_to_gn))
 
 
 class InstrumentedPackageBuilder(object):
-  """Checks out and builds a single instrumented package."""
-  def __init__(self, args, clobber):
-    self._cc = args.cc
-    self._cxx = args.cxx
-    self._extra_configure_flags = unescape_flags(args.extra_configure_flags)
-    self._libdir = args.libdir
-    self._package = args.package
-    self._patches = [real_path(patch) for patch in args.patch]
-    self._pre_build = \
-        real_path(args.pre_build) if args.pre_build else None
-    self._verbose = args.verbose
-    self._clobber = clobber
-    self._working_dir = os.path.join(
-        real_path(args.intermediate_dir), self._package, '')
+    """Checks out and builds a single instrumented package."""
+    def __init__(self, args, clobber):
+        self._cc = args.cc
+        self._cxx = args.cxx
+        self._extra_configure_flags = unescape_flags(
+            args.extra_configure_flags)
+        self._libdir = args.libdir
+        self._package = args.package
+        self._patches = [real_path(patch) for patch in args.patch]
+        self._pre_build = real_path(args.pre_build) if args.pre_build else None
+        self._verbose = args.verbose
+        self._clobber = clobber
+        self._working_dir = os.path.join(real_path(args.intermediate_dir),
+                                         self._package, "")
 
-    product_dir = real_path(args.product_dir)
-    self._destdir = os.path.join(
-        product_dir, 'instrumented_libraries')
-    self._source_archives_dir = os.path.join(
-        product_dir, 'instrumented_libraries', 'sources', self._package)
+        product_dir = real_path(args.product_dir)
+        self._destdir = os.path.join(product_dir, "instrumented_libraries")
+        self._source_archives_dir = os.path.join(product_dir,
+                                                 "instrumented_libraries",
+                                                 "sources", self._package)
 
-    self._cflags = unescape_flags(args.cflags)
-    if args.sanitizer_ignorelist:
-      ignorelist_file = real_path(args.sanitizer_ignorelist)
-      self._cflags += ['-fsanitize-blacklist=%s' % ignorelist_file]
+        self._cflags = unescape_flags(args.cflags)
+        if args.sanitizer_ignorelist:
+            ignorelist_file = real_path(args.sanitizer_ignorelist)
+            self._cflags += ["-fsanitize-blacklist=%s" % ignorelist_file]
 
-    self._ldflags = unescape_flags(args.ldflags)
+        self._ldflags = unescape_flags(args.ldflags)
 
-    self.init_build_env(eval(args.env))
+        self.init_build_env(eval(args.env))
 
-    self._git_url = args.git_url
-    self._git_revision = args.git_revision
+        self._git_url = args.git_url
+        self._git_revision = args.git_revision
 
-    self._make_targets = unescape_flags(args.make_targets)
+        self._make_targets = unescape_flags(args.make_targets)
 
-    # Initialized later.
-    self._source_dir = ''
-    self._source_archives = ''
+        # Initialized later.
+        self._source_dir = ""
+        self._source_archives = ""
 
-  def init_build_env(self, args_env):
-    self._build_env = os.environ.copy()
+    def init_build_env(self, args_env):
+        self._build_env = os.environ.copy()
 
-    self._build_env.update(dict(args_env))
+        self._build_env.update(dict(args_env))
 
-    self._build_env['CC'] = self._cc
-    self._build_env['CXX'] = self._cxx
+        self._build_env["CC"] = self._cc
+        self._build_env["CXX"] = self._cxx
 
-    self._build_env['CFLAGS'] = ' '.join(self._cflags)
-    self._build_env['CXXFLAGS'] = ' '.join(self._cflags)
-    self._build_env['LDFLAGS'] = ' '.join(self._ldflags)
+        self._build_env["CFLAGS"] = " ".join(self._cflags)
+        self._build_env["CXXFLAGS"] = " ".join(self._cflags)
+        self._build_env["LDFLAGS"] = " ".join(self._ldflags)
 
-    # libappindicator1 needs this.
-    self._build_env['CSC'] = '/usr/bin/mono-csc'
+        # libappindicator1 needs this.
+        self._build_env["CSC"] = "/usr/bin/mono-csc"
 
-  def shell_call(self, command, env=None, cwd=None, ignore_ret_code=False, shell=False):
-    """Wrapper around subprocess.Popen().
+    def shell_call(self,
+                   command,
+                   env=None,
+                   cwd=None,
+                   ignore_ret_code=False,
+                   shell=False):
+        """Wrapper around subprocess.Popen().
 
-    Calls command with specific environment and verbosity using
-    subprocess.Popen().
-    """
-    child = subprocess.Popen(
-        command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
-        env=env, shell=shell, cwd=cwd)
-    stdout = child.communicate()[0].decode('utf-8')
-    if ignore_ret_code:
-      if self._verbose:
-        print(stdout)
-      return stdout
-    if self._verbose or child.returncode:
-      print(stdout)
-    if child.returncode:
-      raise Exception('Failed to run: %s' % command)
-    return stdout
+        Calls command with specific environment and verbosity using
+        subprocess.Popen().
+        """
+        child = subprocess.Popen(
+            command,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.STDOUT,
+            env=env,
+            shell=shell,
+            cwd=cwd,
+        )
+        stdout = child.communicate()[0].decode("utf-8")
+        if ignore_ret_code:
+            if self._verbose:
+                print(stdout)
+            return stdout
+        if self._verbose or child.returncode:
+            print(stdout)
+        if child.returncode:
+            raise Exception("Failed to run: %s" % command)
+        return stdout
 
-  def maybe_download_source(self):
-    """Checks out the source code (if needed).
+    def maybe_download_source(self):
+        """Checks out the source code (if needed).
 
-    Checks out the source code for the package, if required (i.e. unless running
-    in no-clobber mode). Initializes self._source_dir and self._source_archives.
-    """
-    command = ''
-    get_fresh_source = self._clobber or not os.path.exists(self._working_dir)
-    if get_fresh_source:
-      shutil.rmtree(self._working_dir, ignore_errors=True)
-      os.makedirs(self._working_dir)
+        Checks out the source code for the package, if required (i.e. unless running
+        in no-clobber mode). Initializes self._source_dir and self._source_archives.
+        """
+        command = ""
+        get_fresh_source = self._clobber or not os.path.exists(
+            self._working_dir)
+        if get_fresh_source:
+            shutil.rmtree(self._working_dir, ignore_errors=True)
+            os.makedirs(self._working_dir)
 
-      if self._git_url:
-        command = ['git', 'clone', self._git_url]
-        self.shell_call(command, cwd=self._working_dir)
-      else:
-        # Download one source package at a time, otherwise, there will
-        # be connection errors in gnutls_handshake().
-        lock = open('apt-source-lock', 'w')
-        fcntl.flock(lock, fcntl.LOCK_EX)
-        command = ['apt-get', 'source', self._package]
-        self.shell_call(command, cwd=self._working_dir)
-        fcntl.flock(lock, fcntl.LOCK_UN)
+            if self._git_url:
+                command = ["git", "clone", self._git_url]
+                self.shell_call(command, cwd=self._working_dir)
+            else:
+                # Download one source package at a time, otherwise, there will
+                # be connection errors in gnutls_handshake().
+                lock = open("apt-source-lock", "w")
+                fcntl.flock(lock, fcntl.LOCK_EX)
+                command = ["apt-get", "source", self._package]
+                self.shell_call(command, cwd=self._working_dir)
+                fcntl.flock(lock, fcntl.LOCK_UN)
 
-    (dirpath, dirnames, filenames) = next(os.walk(self._working_dir))
+        (dirpath, dirnames, filenames) = next(os.walk(self._working_dir))
 
-    if len(dirnames) != 1:
-      raise Exception( '`%s\' must create exactly one subdirectory.' % command)
-    self._source_component = dirnames[0]
-    self._source_dir = os.path.join(dirpath, self._source_component, '')
-    if self._git_url:
-      self.shell_call(['git', 'checkout', self._git_revision],
-                      cwd=self._source_dir)
-    else:
-      if len(filenames) == 0:
-        raise Exception('Can\'t find source files after `%s\'.' % command)
-      self._source_archives = \
-          [os.path.join(dirpath, filename) for filename in filenames]
+        if len(dirnames) != 1:
+            raise Exception("`%s' must create exactly one subdirectory." %
+                            command)
+        self._source_component = dirnames[0]
+        self._source_dir = os.path.join(dirpath, self._source_component, "")
+        if self._git_url:
+            self.shell_call(["git", "checkout", self._git_revision],
+                            cwd=self._source_dir)
+        else:
+            if len(filenames) == 0:
+                raise Exception("Can't find source files after `%s'." %
+                                command)
+            self._source_archives = [
+                os.path.join(dirpath, filename) for filename in filenames
+            ]
 
-    return get_fresh_source
+        return get_fresh_source
 
-  def patch_source(self):
-    for patch in self._patches:
-      self.shell_call(['patch', '-p1', '-i', patch], cwd=self._source_dir)
-    if self._pre_build:
-      self.shell_call([self._pre_build], cwd=self._source_dir)
+    def patch_source(self):
+        for patch in self._patches:
+            self.shell_call(["patch", "-p1", "-i", patch],
+                            cwd=self._source_dir)
+        if self._pre_build:
+            self.shell_call([self._pre_build], cwd=self._source_dir)
 
-  def copy_source_archives(self):
-    """Copies the downloaded source archives to the output dir.
+    def copy_source_archives(self):
+        """Copies the downloaded source archives to the output dir.
 
-    For license compliance purposes, every Chromium build that includes
-    instrumented libraries must include their full source code.
-    """
-    shutil.rmtree(self._source_archives_dir, ignore_errors=True)
-    os.makedirs(self._source_archives_dir)
-    if self._git_url:
-      dest = os.path.join(self._source_archives_dir, self._source_component)
-      shutil.copytree(self._source_dir, dest)
-    else:
-      for filename in self._source_archives:
-        shutil.copy(filename, self._source_archives_dir)
-    for patch in self._patches:
-      shutil.copy(patch, self._source_archives_dir)
+        For license compliance purposes, every Chromium build that includes
+        instrumented libraries must include their full source code.
+        """
+        shutil.rmtree(self._source_archives_dir, ignore_errors=True)
+        os.makedirs(self._source_archives_dir)
+        if self._git_url:
+            dest = os.path.join(self._source_archives_dir,
+                                self._source_component)
+            shutil.copytree(self._source_dir, dest)
+        else:
+            for filename in self._source_archives:
+                shutil.copy(filename, self._source_archives_dir)
+        for patch in self._patches:
+            shutil.copy(patch, self._source_archives_dir)
 
-  def download_build_install(self):
-    got_fresh_source = self.maybe_download_source()
-    if got_fresh_source:
-      self.patch_source()
-      self.copy_source_archives()
+    def download_build_install(self):
+        got_fresh_source = self.maybe_download_source()
+        if got_fresh_source:
+            self.patch_source()
+            self.copy_source_archives()
 
-    if not os.path.exists(self.dest_libdir()):
-      os.makedirs(self.dest_libdir())
+        if not os.path.exists(self.dest_libdir()):
+            os.makedirs(self.dest_libdir())
 
-    try:
-      self.build_and_install()
-    except Exception as exception:
-      print('ERROR: Failed to build package %s. Have you '
-            'run src/third_party/instrumented_libraries/scripts/'
-            'install-build-deps.sh?' % self._package)
-      raise
+        try:
+            self.build_and_install()
+        except Exception as exception:
+            print("ERROR: Failed to build package %s. Have you "
+                  "run src/third_party/instrumented_libraries/scripts/"
+                  "install-build-deps.sh?" % self._package)
+            raise
 
-    # Touch a text file to indicate package is installed.
-    stamp_file = os.path.join(self._destdir, '%s.txt' % self._package)
-    open(stamp_file, 'w').close()
+        # Touch a text file to indicate package is installed.
+        stamp_file = os.path.join(self._destdir, "%s.txt" % self._package)
+        open(stamp_file, "w").close()
 
-    # Remove downloaded package and generated temporary build files. Failed
-    # builds intentionally skip this step to help debug build failures.
-    if self._clobber:
-      self.shell_call(['rm', '-rf', self._working_dir])
+        # Remove downloaded package and generated temporary build files. Failed
+        # builds intentionally skip this step to help debug build failures.
+        if self._clobber:
+            self.shell_call(["rm", "-rf", self._working_dir])
 
-  def fix_rpaths(self, directory):
-    # TODO(eugenis): reimplement fix_rpaths.sh in Python.
-    script = real_path('scripts/fix_rpaths.sh')
-    self.shell_call([script, directory])
+    def fix_rpaths(self, directory):
+        # TODO(eugenis): reimplement fix_rpaths.sh in Python.
+        script = real_path("scripts/fix_rpaths.sh")
+        self.shell_call([script, directory])
 
-  def temp_dir(self):
-    """Returns the directory which will be passed to `make install'."""
-    return os.path.join(self._source_dir, 'debian', 'instrumented_build')
+    def temp_dir(self):
+        """Returns the directory which will be passed to `make install'."""
+        return os.path.join(self._source_dir, "debian", "instrumented_build")
 
-  def temp_libdir(self):
-    """Returns the directory under temp_dir() containing the DSOs."""
-    return os.path.join(self.temp_dir(), self._libdir)
+    def temp_libdir(self):
+        """Returns the directory under temp_dir() containing the DSOs."""
+        return os.path.join(self.temp_dir(), self._libdir)
 
-  def dest_libdir(self):
-    """Returns the final location of the DSOs."""
-    return os.path.join(self._destdir, self._libdir)
+    def dest_libdir(self):
+        """Returns the final location of the DSOs."""
+        return os.path.join(self._destdir, self._libdir)
 
-  def cleanup_after_install(self):
-    """Removes unneeded files in self.temp_libdir()."""
-    # .la files are not needed, nuke them.
-    # In case --no-static is not supported, nuke any static libraries we built.
-    self.shell_call(
-        'find %s -name *.la -or -name *.a | xargs rm -f' % self.temp_libdir(), shell=True)
-    # .pc files are not needed.
-    self.shell_call(['rm', '-rf', '%s/pkgconfig' % self.temp_libdir()])
+    def cleanup_after_install(self):
+        """Removes unneeded files in self.temp_libdir()."""
+        # .la files are not needed, nuke them.
+        # In case --no-static is not supported, nuke any static libraries we built.
+        self.shell_call(
+            "find %s -name *.la -or -name *.a | xargs rm -f" %
+            self.temp_libdir(),
+            shell=True,
+        )
+        # .pc files are not needed.
+        self.shell_call(["rm", "-rf", "%s/pkgconfig" % self.temp_libdir()])
 
-  def make(self, args, env=None, cwd=None, ignore_ret_code=False):
-    """Invokes `make'.
+    def make(self, args, env=None, cwd=None, ignore_ret_code=False):
+        """Invokes `make'.
 
-    Invokes `make' with the specified args, using self._build_env and
-    self._source_dir by default.
-    """
-    if cwd is None:
-      cwd = self._source_dir
-    if env is None:
-      env = self._build_env
-    self.shell_call(['make'] + args, env=env, cwd=cwd,
-                    ignore_ret_code=ignore_ret_code)
+        Invokes `make' with the specified args, using self._build_env and
+        self._source_dir by default.
+        """
+        if cwd is None:
+            cwd = self._source_dir
+        if env is None:
+            env = self._build_env
+        self.shell_call(["make"] + args,
+                        env=env,
+                        cwd=cwd,
+                        ignore_ret_code=ignore_ret_code)
 
-  def make_install(self, args, **kwargs):
-    """Invokes `make install'."""
-    self.make(['install'] + args, **kwargs)
+    def make_install(self, args, **kwargs):
+        """Invokes `make install'."""
+        self.make(["install"] + args, **kwargs)
 
-  def build_and_install(self):
-    """Builds and installs the DSOs.
+    def build_and_install(self):
+        """Builds and installs the DSOs.
 
-    Builds the package with ./configure + make, installs it to a temporary
-    location, then moves the relevant files to their permanent location.
-    """
-    if os.path.exists(os.path.join(self._source_dir, 'configure')):
-      configure_cmd = [
-        './configure',
-        '--libdir=/%s/' % self._libdir,
-      ] + self._extra_configure_flags
-      self.shell_call(configure_cmd, env=self._build_env, cwd=self._source_dir)
+        Builds the package with ./configure + make, installs it to a temporary
+        location, then moves the relevant files to their permanent location.
+        """
+        if os.path.exists(os.path.join(self._source_dir, "configure")):
+            configure_cmd = [
+                "./configure",
+                "--libdir=/%s/" % self._libdir,
+            ] + self._extra_configure_flags
+            self.shell_call(configure_cmd,
+                            env=self._build_env,
+                            cwd=self._source_dir)
 
-    # Some makefiles use BUILDROOT or INSTALL_ROOT instead of DESTDIR.
-    args = ['DESTDIR', 'BUILDROOT', 'INSTALL_ROOT']
-    make_args = ['%s=%s' % (name, self.temp_dir()) for name in args]
-    self.make(make_args + self._make_targets)
+        # Some makefiles use BUILDROOT or INSTALL_ROOT instead of DESTDIR.
+        args = ["DESTDIR", "BUILDROOT", "INSTALL_ROOT"]
+        make_args = ["%s=%s" % (name, self.temp_dir()) for name in args]
+        self.make(make_args + self._make_targets)
 
-    self.make_install(make_args)
+        self.make_install(make_args)
 
-    self.post_install()
+        self.post_install()
 
-  def post_install(self):
-    self.cleanup_after_install()
+    def post_install(self):
+        self.cleanup_after_install()
 
-    self.fix_rpaths(self.temp_libdir())
+        self.fix_rpaths(self.temp_libdir())
 
-    # Now move the contents of the temporary destdir to their final place.
-    # We only care for the contents of LIBDIR.
-    self.shell_call('cp %s/* %s/ -rdf' % (self.temp_libdir(),
-                                          self.dest_libdir()), shell=True)
+        # Now move the contents of the temporary destdir to their final place.
+        # We only care for the contents of LIBDIR.
+        self.shell_call("cp %s/* %s/ -rdf" %
+                        (self.temp_libdir(), self.dest_libdir()),
+                        shell=True)
 
 
 class DebianBuilder(InstrumentedPackageBuilder):
-  """Builds a package using Debian's build system.
+    """Builds a package using Debian's build system.
 
-  TODO(spang): Probably the rest of the packages should also use this method..
-  """
+    TODO(spang): Probably the rest of the packages should also use this method..
+    """
+    def init_build_env(self, args_env):
+        self._build_env = os.environ.copy()
 
-  def init_build_env(self, args_env):
-    self._build_env = os.environ.copy()
+        self._build_env.update(dict(args_env))
 
-    self._build_env.update(dict(args_env))
+        self._build_env["CC"] = self._cc
+        self._build_env["CXX"] = self._cxx
 
-    self._build_env['CC'] = self._cc
-    self._build_env['CXX'] = self._cxx
+        self._build_env["DEB_CFLAGS_APPEND"] = " ".join(self._cflags)
+        self._build_env["DEB_CXXFLAGS_APPEND"] = " ".join(self._cflags)
+        self._build_env["DEB_LDFLAGS_APPEND"] = " ".join(self._ldflags)
+        self._build_env["DEB_BUILD_OPTIONS"] = (
+            "nocheck notest nodoc nostrip parallel=%d" % os.cpu_count())
 
-    self._build_env['DEB_CFLAGS_APPEND'] = ' '.join(self._cflags)
-    self._build_env['DEB_CXXFLAGS_APPEND'] = ' '.join(self._cflags)
-    self._build_env['DEB_LDFLAGS_APPEND'] = ' '.join(self._ldflags)
-    self._build_env['DEB_BUILD_OPTIONS'] = \
-      'nocheck notest nodoc nostrip parallel=%d' % os.cpu_count()
+    def build_and_install(self):
+        self.build_debian_packages()
+        self.install_packaged_libs()
 
-  def build_and_install(self):
-    self.build_debian_packages()
-    self.install_packaged_libs()
+    def build_debian_packages(self):
+        configure_cmd = ["dpkg-buildpackage", "-B", "-uc"]
+        self.shell_call(configure_cmd,
+                        env=self._build_env,
+                        cwd=self._source_dir)
 
-  def build_debian_packages(self):
-    configure_cmd = ['dpkg-buildpackage', '-B', '-uc']
-    self.shell_call(configure_cmd, env=self._build_env, cwd=self._source_dir)
+    def install_packaged_libs(self):
+        for deb_file in self.get_deb_files():
+            self.shell_call(["dpkg-deb", "-x", deb_file, self.temp_dir()])
 
-  def install_packaged_libs(self):
-    for deb_file in self.get_deb_files():
-      self.shell_call(['dpkg-deb', '-x', deb_file, self.temp_dir()])
+        dpkg_arch_cmd = ["dpkg-architecture", "-qDEB_HOST_MULTIARCH"]
+        dpkg_arch = self.shell_call(dpkg_arch_cmd).strip()
+        lib_dirs = [
+            "usr/lib/%s" % dpkg_arch,
+            "lib/%s" % dpkg_arch,
+        ]
+        lib_paths = [
+            path for lib_dir in lib_dirs for path in glob.glob(
+                os.path.join(self.temp_dir(), lib_dir, "*.so.*"))
+        ]
+        for lib_path in lib_paths:
+            dest_path = os.path.join(self.dest_libdir(),
+                                     os.path.basename(lib_path))
+            try:
+                os.unlink(dest_path)
+            except OSError as exception:
+                if exception.errno != errno.ENOENT:
+                    raise
+            if os.path.islink(lib_path):
+                if self._verbose:
+                    print("linking %s" % os.path.basename(lib_path))
+                os.symlink(os.readlink(lib_path), dest_path)
+            elif os.path.isfile(lib_path):
+                if self._verbose:
+                    print("copying %s" % os.path.basename(lib_path))
+                shutil.copy(lib_path, dest_path)
 
-    dpkg_arch_cmd = ['dpkg-architecture', '-qDEB_HOST_MULTIARCH']
-    dpkg_arch = self.shell_call(dpkg_arch_cmd).strip()
-    lib_dirs = [
-      "usr/lib/%s" % dpkg_arch,
-      "lib/%s" % dpkg_arch,
-    ]
-    lib_paths = [path for lib_dir in lib_dirs for path in
-                 glob.glob(os.path.join(self.temp_dir(), lib_dir, "*.so.*"))]
-    for lib_path in lib_paths:
-      dest_path = os.path.join(self.dest_libdir(), os.path.basename(lib_path))
-      try:
-        os.unlink(dest_path)
-      except OSError as exception:
-        if exception.errno != errno.ENOENT:
-          raise
-      if os.path.islink(lib_path):
-        if self._verbose:
-          print('linking %s' % os.path.basename(lib_path))
-        os.symlink(os.readlink(lib_path), dest_path)
-      elif os.path.isfile(lib_path):
-        if self._verbose:
-          print('copying %s' % os.path.basename(lib_path))
-        shutil.copy(lib_path, dest_path)
+    def get_deb_files(self):
+        deb_files = []
+        files_file = os.path.join(self._source_dir, "debian/files")
 
+        for line in open(files_file, "r").read().splitlines():
+            filename, category, section = line.split(" ")
+            if not filename.endswith(".deb"):
+                continue
+            pathname = os.path.join(self._source_dir, "..", filename)
+            deb_files.append(pathname)
 
-  def get_deb_files(self):
-    deb_files = []
-    files_file = os.path.join(self._source_dir, 'debian/files')
-
-    for line in open(files_file, 'r').read().splitlines():
-      filename, category, section = line.split(' ')
-      if not filename.endswith('.deb'):
-        continue
-      pathname = os.path.join(self._source_dir, '..', filename)
-      deb_files.append(pathname)
-
-    return deb_files
+        return deb_files
 
 
 class LibcapBuilder(InstrumentedPackageBuilder):
-  def build_and_install(self):
-    # libcap2 doesn't have a configure script
-    build_args = ['CC', 'CXX', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS']
-    make_args = [
-        '%s=%s' % (name, self._build_env[name]) for name in build_args
-    ]
-    self.make(make_args)
+    def build_and_install(self):
+        # libcap2 doesn't have a configure script
+        build_args = ["CC", "CXX", "CFLAGS", "CXXFLAGS", "LDFLAGS"]
+        make_args = [
+            "%s=%s" % (name, self._build_env[name]) for name in build_args
+        ]
+        self.make(make_args)
 
-    install_args = [
-        'DESTDIR=%s' % self.temp_dir(),
-        'lib=%s' % self._libdir,
-        # Skip a step that requires sudo.
-        'RAISE_SETFCAP=no'
-    ]
-    self.make_install(install_args)
+        install_args = [
+            "DESTDIR=%s" % self.temp_dir(),
+            "lib=%s" % self._libdir,
+            # Skip a step that requires sudo.
+            "RAISE_SETFCAP=no",
+        ]
+        self.make_install(install_args)
 
-    self.cleanup_after_install()
+        self.cleanup_after_install()
 
-    self.fix_rpaths(self.temp_libdir())
+        self.fix_rpaths(self.temp_libdir())
 
-    # Now move the contents of the temporary destdir to their final place.
-    # We only care for the contents of LIBDIR.
-    self.shell_call('cp %s/* %s/ -rdf' % (self.temp_libdir(),
-                                          self.dest_libdir()), shell=True)
+        # Now move the contents of the temporary destdir to their final place.
+        # We only care for the contents of LIBDIR.
+        self.shell_call("cp %s/* %s/ -rdf" %
+                        (self.temp_libdir(), self.dest_libdir()),
+                        shell=True)
 
 
 class LibcurlBuilder(DebianBuilder):
-  def build_and_install(self):
-    super().build_and_install()
-    # The libcurl packages don't specify a default libcurl.so, but this is
-    # required since libcurl.so is dlopen()ed by crashpad.  Normally,
-    # libcurl.so is installed by one of libcurl-{gnutls,nss,openssl}-dev.
-    # Doing a standalone instrumented build of a dev package is tricky,
-    # so we manually symlink libcurl.so instead.
-    libcurl_so = os.path.join(self.dest_libdir(), 'libcurl.so')
-    if not os.path.exists(libcurl_so):
-      os.symlink('libcurl.so.4', libcurl_so)
+    def build_and_install(self):
+        super().build_and_install()
+        # The libcurl packages don't specify a default libcurl.so, but this is
+        # required since libcurl.so is dlopen()ed by crashpad.  Normally,
+        # libcurl.so is installed by one of libcurl-{gnutls,nss,openssl}-dev.
+        # Doing a standalone instrumented build of a dev package is tricky,
+        # so we manually symlink libcurl.so instead.
+        libcurl_so = os.path.join(self.dest_libdir(), "libcurl.so")
+        if not os.path.exists(libcurl_so):
+            os.symlink("libcurl.so.4", libcurl_so)
 
 
 class Libpci3Builder(InstrumentedPackageBuilder):
-  def package_version(self):
-    """Guesses libpci3 version from source directory name."""
-    dir_name = os.path.split(os.path.normpath(self._source_dir))[-1]
-    match = re.match('pciutils-(\d+\.\d+\.\d+)', dir_name)
-    if match is None:
-      raise Exception(
-          'Unable to guess libpci3 version from directory name: %s' %  dir_name)
-    return match.group(1)
+    def package_version(self):
+        """Guesses libpci3 version from source directory name."""
+        dir_name = os.path.split(os.path.normpath(self._source_dir))[-1]
+        match = re.match("pciutils-(\d+\.\d+\.\d+)", dir_name)
+        if match is None:
+            raise Exception(
+                "Unable to guess libpci3 version from directory name: %s" %
+                dir_name)
+        return match.group(1)
 
-  def temp_libdir(self):
-    # DSOs have to be picked up from <source_dir>/lib, since `make install'
-    # doesn't actualy install them anywhere.
-    return os.path.join(self._source_dir, 'lib')
+    def temp_libdir(self):
+        # DSOs have to be picked up from <source_dir>/lib, since `make install'
+        # doesn't actualy install them anywhere.
+        return os.path.join(self._source_dir, "lib")
 
-  def build_and_install(self):
-    # pciutils doesn't have a configure script
-    # This build process follows debian/rules.
-    self.shell_call(['mkdir', '-p', '%s-udeb/usr/bin' % self.temp_dir()])
+    def build_and_install(self):
+        # pciutils doesn't have a configure script
+        # This build process follows debian/rules.
+        self.shell_call(["mkdir", "-p", "%s-udeb/usr/bin" % self.temp_dir()])
 
-    build_args = ['CC', 'CXX', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS']
-    make_args = [
-        '%s=%s' % (name, self._build_env[name]) for name in build_args
-    ]
-    make_args += [
-        'LIBDIR=/%s/' % self._libdir,
-        'PREFIX=/usr',
-        'SBINDIR=/usr/bin',
-        'IDSDIR=/usr/share/misc',
-        'SHARED=yes',
-        # pciutils fails to build due to unresolved libkmod symbols. The binary
-        # package has no dependencies on libkmod, so it looks like it was
-        # actually built without libkmod support.
-       'LIBKMOD=no',
-    ]
-    self.make(make_args)
+        build_args = ["CC", "CXX", "CFLAGS", "CXXFLAGS", "LDFLAGS"]
+        make_args = [
+            "%s=%s" % (name, self._build_env[name]) for name in build_args
+        ]
+        make_args += [
+            "LIBDIR=/%s/" % self._libdir,
+            "PREFIX=/usr",
+            "SBINDIR=/usr/bin",
+            "IDSDIR=/usr/share/misc",
+            "SHARED=yes",
+            # pciutils fails to build due to unresolved libkmod symbols. The binary
+            # package has no dependencies on libkmod, so it looks like it was
+            # actually built without libkmod support.
+            "LIBKMOD=no",
+        ]
+        self.make(make_args)
 
-    # `make install' is not needed.
-    self.fix_rpaths(self.temp_libdir())
+        # `make install' is not needed.
+        self.fix_rpaths(self.temp_libdir())
 
-    # Now install the DSOs to their final place.
-    self.shell_call(
-        'install -m 644 %s/libpci.so* %s' % (self.temp_libdir(),
-                                             self.dest_libdir()), shell=True)
-    self.shell_call(
-        'ln -sf libpci.so.%s %s/libpci.so.3' % (self.package_version(),
-                                                self.dest_libdir()), shell=True)
+        # Now install the DSOs to their final place.
+        self.shell_call(
+            "install -m 644 %s/libpci.so* %s" %
+            (self.temp_libdir(), self.dest_libdir()),
+            shell=True,
+        )
+        self.shell_call(
+            "ln -sf libpci.so.%s %s/libpci.so.3" %
+            (self.package_version(), self.dest_libdir()),
+            shell=True,
+        )
 
 
 class MesonBuilder(InstrumentedPackageBuilder):
-  def build_and_install(self):
-    meson_cmd = [
-      'meson',
-      'build',
-      '.',
-      '--prefix', '/',
-      '--libdir', self._libdir,
-      '--sbindir', 'bin',
-      '-Db_lundef=false',
-    ] + self._extra_configure_flags
+    def build_and_install(self):
+        meson_cmd = [
+            "meson",
+            "build",
+            ".",
+            "--prefix",
+            "/",
+            "--libdir",
+            self._libdir,
+            "--sbindir",
+            "bin",
+            "-Db_lundef=false",
+        ] + self._extra_configure_flags
 
-    self.shell_call(meson_cmd,
-                    env=self._build_env, cwd=self._source_dir)
-    self.shell_call(['ninja', '-C', 'build', 'install'],
-                    {**self._build_env, 'DESTDIR': self.temp_dir()},
-                    cwd=self._source_dir)
-    self.post_install()
+        self.shell_call(meson_cmd, env=self._build_env, cwd=self._source_dir)
+        self.shell_call(
+            ["ninja", "-C", "build", "install"],
+            {
+                **self._build_env, "DESTDIR": self.temp_dir()
+            },
+            cwd=self._source_dir,
+        )
+        self.post_install()
 
 
 class CmakeBuilder(InstrumentedPackageBuilder):
-  def build_and_install(self):
-    cmake_cmd = [
-      'cmake',
-      '.',
-      '-DCMAKE_INSTALL_PREFIX=/usr',
-      '-DCMAKE_INSTALL_LIBDIR=/%s/' % self._libdir,
-    ] + self._extra_configure_flags
-    self.shell_call(cmake_cmd, env=self._build_env,
-                    cwd=self._source_dir)
+    def build_and_install(self):
+        cmake_cmd = [
+            "cmake",
+            ".",
+            "-DCMAKE_INSTALL_PREFIX=/usr",
+            "-DCMAKE_INSTALL_LIBDIR=/%s/" % self._libdir,
+        ] + self._extra_configure_flags
+        self.shell_call(cmake_cmd, env=self._build_env, cwd=self._source_dir)
 
-    args = ['DESTDIR', 'BUILDROOT', 'INSTALL_ROOT']
-    make_args = ['%s=%s' % (name, self.temp_dir()) for name in args]
-    self.make(make_args)
-    self.make_install(make_args)
+        args = ["DESTDIR", "BUILDROOT", "INSTALL_ROOT"]
+        make_args = ["%s=%s" % (name, self.temp_dir()) for name in args]
+        self.make(make_args)
+        self.make_install(make_args)
 
-    self.post_install()
+        self.post_install()
 
 
 class NSSBuilder(InstrumentedPackageBuilder):
-  def build_and_install(self):
-    # NSS uses a build system that's different from configure/make/install. All
-    # flags must be passed as arguments to make.
-    make_args = [
-        # Do an optimized build.
-        'BUILD_OPT=1',
-        # CFLAGS/CXXFLAGS should not be used, as doing so overrides the flags in
-        # the makefile completely. The only way to append our flags is to tack
-        # them onto CC/CXX.
-        'CC="%s %s"' % (self._build_env['CC'], self._build_env['CFLAGS']),
-        'CXX="%s %s"' % (self._build_env['CXX'], self._build_env['CXXFLAGS']),
-        # We need to override ZDEFS_FLAG at least to avoid -Wl,-z,defs, which
-        # is not compatible with sanitizers. We also need some way to pass
-        # LDFLAGS without overriding the defaults. Conveniently, ZDEF_FLAG is
-        # always appended to link flags when building NSS on Linux, so we can
-        # just add our LDFLAGS here.
-        'ZDEFS_FLAG="-Wl,-z,nodefs %s"' % self._build_env['LDFLAGS'],
-        'NSPR_INCLUDE_DIR=/usr/include/nspr',
-        'NSPR_LIB_DIR=%s' % self.dest_libdir(),
-        'NSS_ENABLE_ECC=1'
-    ]
-    if platform.architecture()[0] == '64bit':
-      make_args.append('USE_64=1')
+    def build_and_install(self):
+        # NSS uses a build system that's different from configure/make/install. All
+        # flags must be passed as arguments to make.
+        make_args = [
+            # Do an optimized build.
+            "BUILD_OPT=1",
+            # CFLAGS/CXXFLAGS should not be used, as doing so overrides the flags in
+            # the makefile completely. The only way to append our flags is to tack
+            # them onto CC/CXX.
+            'CC="%s %s"' % (self._build_env["CC"], self._build_env["CFLAGS"]),
+            'CXX="%s %s"' %
+            (self._build_env["CXX"], self._build_env["CXXFLAGS"]),
+            # We need to override ZDEFS_FLAG at least to avoid -Wl,-z,defs, which
+            # is not compatible with sanitizers. We also need some way to pass
+            # LDFLAGS without overriding the defaults. Conveniently, ZDEF_FLAG is
+            # always appended to link flags when building NSS on Linux, so we can
+            # just add our LDFLAGS here.
+            'ZDEFS_FLAG="-Wl,-z,nodefs %s"' % self._build_env["LDFLAGS"],
+            "NSPR_INCLUDE_DIR=/usr/include/nspr",
+            "NSPR_LIB_DIR=%s" % self.dest_libdir(),
+            "NSS_ENABLE_ECC=1",
+        ]
+        if platform.architecture()[0] == "64bit":
+            make_args.append("USE_64=1")
 
-    # Make sure we don't override the default flags in the makefile.
-    for variable in ['CFLAGS', 'CXXFLAGS', 'LDFLAGS']:
-      del self._build_env[variable]
+        # Make sure we don't override the default flags in the makefile.
+        for variable in ["CFLAGS", "CXXFLAGS", "LDFLAGS"]:
+            del self._build_env[variable]
 
-    # Hardcoded paths.
-    temp_dir = os.path.join(self._source_dir, 'nss')
-    temp_libdir = os.path.join(temp_dir, 'lib')
+        # Hardcoded paths.
+        temp_dir = os.path.join(self._source_dir, "nss")
+        temp_libdir = os.path.join(temp_dir, "lib")
 
-    # The build happens in <source_dir>/nss.  Building fails after all
-    # the required DSOs have been built, so ignore the error.
-    self.make(make_args, cwd=temp_dir, ignore_ret_code=True)
+        # The build happens in <source_dir>/nss.  Building fails after all
+        # the required DSOs have been built, so ignore the error.
+        self.make(make_args, cwd=temp_dir, ignore_ret_code=True)
 
-    self.fix_rpaths(temp_libdir)
+        self.fix_rpaths(temp_libdir)
 
-    # 'make install' is not supported. Copy the DSOs manually.
-    for (dirpath, dirnames, filenames) in os.walk(temp_libdir):
-      for filename in filenames:
-        if filename.endswith('.so'):
-          full_path = os.path.join(dirpath, filename)
-          if self._verbose:
-            print('download_build_install.py: installing ' + full_path)
-          shutil.copy(full_path, self.dest_libdir())
+        # 'make install' is not supported. Copy the DSOs manually.
+        for (dirpath, dirnames, filenames) in os.walk(temp_libdir):
+            for filename in filenames:
+                if filename.endswith(".so"):
+                    full_path = os.path.join(dirpath, filename)
+                    if self._verbose:
+                        print("download_build_install.py: installing " +
+                              full_path)
+                    shutil.copy(full_path, self.dest_libdir())
 
 
 class StubBuilder(InstrumentedPackageBuilder):
-  def download_build_install(self):
-    self._touch(os.path.join(self._destdir, '%s.txt' % self._package))
-    self.shell_call(['mkdir', '-p', self.dest_libdir()])
-    self._touch(os.path.join(self.dest_libdir(), '%s.so.0' % self._package))
+    def download_build_install(self):
+        self._touch(os.path.join(self._destdir, "%s.txt" % self._package))
+        self.shell_call(["mkdir", "-p", self.dest_libdir()])
+        self._touch(os.path.join(self.dest_libdir(),
+                                 "%s.so.0" % self._package))
 
-  def _touch(self, path):
-    with open(path, 'w'):
-      pass
+    def _touch(self, path):
+        with open(path, "w"):
+            pass
 
 
 def main():
-  parser = argparse.ArgumentParser(
-      description='Download, build and install an instrumented package.')
+    parser = argparse.ArgumentParser(
+        description="Download, build and install an instrumented package.")
 
-  parser.add_argument('-p', '--package', required=True)
-  parser.add_argument(
-      '-i', '--product-dir', default='.',
-      help='Relative path to the directory with chrome binaries')
-  parser.add_argument(
-      '-m', '--intermediate-dir', default='.',
-      help='Relative path to the directory for temporary build files')
-  parser.add_argument('--extra-configure-flags', default='')
-  parser.add_argument('--cflags', default='')
-  parser.add_argument('--ldflags', default='')
-  parser.add_argument('-v', '--verbose', action='store_true')
-  parser.add_argument('--cc')
-  parser.add_argument('--cxx')
-  parser.add_argument('--patch', nargs='*', action='extend', default=[])
-  # This should be a shell script to run before building specific libraries.
-  # This will be run after applying the patches above.
-  parser.add_argument('--pre-build', default='')
-  parser.add_argument('--build-method', default='destdir')
-  parser.add_argument('--sanitizer-ignorelist', default='')
-  # The LIBDIR argument to configure/make.
-  parser.add_argument('--libdir', default='lib')
-  parser.add_argument('--env', default='')
-  parser.add_argument('--git-url', default='')
-  parser.add_argument('--git-revision', default='')
-  parser.add_argument('--make-targets', default='')
+    parser.add_argument("-p", "--package", required=True)
+    parser.add_argument(
+        "-i",
+        "--product-dir",
+        default=".",
+        help="Relative path to the directory with chrome binaries",
+    )
+    parser.add_argument(
+        "-m",
+        "--intermediate-dir",
+        default=".",
+        help="Relative path to the directory for temporary build files",
+    )
+    parser.add_argument("--extra-configure-flags", default="")
+    parser.add_argument("--cflags", default="")
+    parser.add_argument("--ldflags", default="")
+    parser.add_argument("-v", "--verbose", action="store_true")
+    parser.add_argument("--cc")
+    parser.add_argument("--cxx")
+    parser.add_argument("--patch", nargs="*", action="extend", default=[])
+    # This should be a shell script to run before building specific libraries.
+    # This will be run after applying the patches above.
+    parser.add_argument("--pre-build", default="")
+    parser.add_argument("--build-method", default="destdir")
+    parser.add_argument("--sanitizer-ignorelist", default="")
+    # The LIBDIR argument to configure/make.
+    parser.add_argument("--libdir", default="lib")
+    parser.add_argument("--env", default="")
+    parser.add_argument("--git-url", default="")
+    parser.add_argument("--git-revision", default="")
+    parser.add_argument("--make-targets", default="")
 
-  # Ignore all empty arguments because in several cases gn passes them to the
-  # script, but ArgumentParser treats them as positional arguments instead of
-  # ignoring (and doesn't have such options).
-  args = parser.parse_args([arg for arg in sys.argv[1:] if len(arg) != 0])
+    # Ignore all empty arguments because in several cases gn passes them to the
+    # script, but ArgumentParser treats them as positional arguments instead of
+    # ignoring (and doesn't have such options).
+    args = parser.parse_args([arg for arg in sys.argv[1:] if len(arg) != 0])
 
-  # Clobber by default, unless the developer wants to hack on the package's
-  # source code.
-  clobber = \
-        (os.environ.get('INSTRUMENTED_LIBRARIES_NO_CLOBBER', '') != '1')
+    # Clobber by default, unless the developer wants to hack on the package's
+    # source code.
+    clobber = os.environ.get("INSTRUMENTED_LIBRARIES_NO_CLOBBER", "") != "1"
 
-  if args.build_method == 'destdir':
-    builder = InstrumentedPackageBuilder(args, clobber)
-  elif args.build_method == 'custom_nss':
-    builder = NSSBuilder(args, clobber)
-  elif args.build_method == 'custom_libcap':
-    builder = LibcapBuilder(args, clobber)
-  elif args.build_method == 'custom_libcurl':
-    builder = LibcurlBuilder(args, clobber)
-  elif args.build_method == 'custom_libpci3':
-    builder = Libpci3Builder(args, clobber)
-  elif args.build_method == 'debian':
-    builder = DebianBuilder(args, clobber)
-  elif args.build_method == 'meson':
-    builder = MesonBuilder(args, clobber)
-  elif args.build_method == 'cmake':
-    builder = CmakeBuilder(args, clobber)
-  elif args.build_method == 'stub':
-    builder = StubBuilder(args, clobber)
-  else:
-    raise Exception('Unrecognized build method: %s' % args.build_method)
+    if args.build_method == "destdir":
+        builder = InstrumentedPackageBuilder(args, clobber)
+    elif args.build_method == "custom_nss":
+        builder = NSSBuilder(args, clobber)
+    elif args.build_method == "custom_libcap":
+        builder = LibcapBuilder(args, clobber)
+    elif args.build_method == "custom_libcurl":
+        builder = LibcurlBuilder(args, clobber)
+    elif args.build_method == "custom_libpci3":
+        builder = Libpci3Builder(args, clobber)
+    elif args.build_method == "debian":
+        builder = DebianBuilder(args, clobber)
+    elif args.build_method == "meson":
+        builder = MesonBuilder(args, clobber)
+    elif args.build_method == "cmake":
+        builder = CmakeBuilder(args, clobber)
+    elif args.build_method == "stub":
+        builder = StubBuilder(args, clobber)
+    else:
+        raise Exception("Unrecognized build method: %s" % args.build_method)
 
-  builder.download_build_install()
+    builder.download_build_install()
 
-if __name__ == '__main__':
-  main()
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/build_and_package.py b/scripts/build_and_package.py
index fe1d0bf..7229523 100755
--- a/scripts/build_and_package.py
+++ b/scripts/build_and_package.py
@@ -10,125 +10,133 @@
 import subprocess
 import tarfile
 
-
 BUILD_TYPES = {
-    'msan-no-origins': [
-        'is_msan = true',
-        'msan_track_origins = 0',
+    "msan-no-origins": [
+        "is_msan = true",
+        "msan_track_origins = 0",
     ],
-    'msan-chained-origins': [
-        'is_msan = true',
-        'msan_track_origins = 2',
+    "msan-chained-origins": [
+        "is_msan = true",
+        "msan_track_origins = 2",
     ],
 }
 
 
 class Error(Exception):
-  pass
+    pass
 
 
 class IncorrectReleaseError(Error):
-  pass
+    pass
 
 
 def _get_release():
-  return subprocess.check_output(['lsb_release', '-cs']).decode('utf-8').strip()
+    return subprocess.check_output(["lsb_release",
+                                    "-cs"]).decode("utf-8").strip()
 
 
 def _tar_filter(tar_info):
-  if tar_info.name.endswith('.txt'):
-    return None
-  return tar_info
+    if tar_info.name.endswith(".txt"):
+        return None
+    return tar_info
 
 
 def build_libraries(build_type, ubuntu_release, jobs, use_goma):
-  build_dir = 'out/Instrumented-%s' % build_type
-  if not os.path.exists(build_dir):
-    os.makedirs(build_dir)
+    build_dir = "out/Instrumented-%s" % build_type
+    if not os.path.exists(build_dir):
+        os.makedirs(build_dir)
 
-  gn_args = [
-      'is_debug = false',
-      'use_goma = %s' % str(use_goma).lower(),
-      'use_locally_built_instrumented_libraries = true',
-      'instrumented_libraries_release = "%s"' % ubuntu_release,
-  ] + BUILD_TYPES[build_type]
-  with open(os.path.join(build_dir, 'args.gn'), 'w') as f:
-    f.write('\n'.join(gn_args) + '\n')
-  subprocess.check_call(['gn', 'gen', build_dir, '--check'])
-  subprocess.check_call([
-      'ninja',
-      '-j%d' % jobs, '-C', build_dir,
-      'third_party/instrumented_libraries/%s:locally_built' % ubuntu_release
-  ])
-  with tarfile.open('%s.tgz' % build_type, mode='w:gz') as f:
-    f.add(
-        '%s/instrumented_libraries/lib' % build_dir,
-        arcname='lib',
-        filter=_tar_filter)
-    f.add(
-        '%s/instrumented_libraries/sources' % build_dir,
-        arcname='sources',
-        filter=_tar_filter)
+    gn_args = [
+        "is_debug = false",
+        "use_goma = %s" % str(use_goma).lower(),
+        "use_locally_built_instrumented_libraries = true",
+        'instrumented_libraries_release = "%s"' % ubuntu_release,
+    ] + BUILD_TYPES[build_type]
+    with open(os.path.join(build_dir, "args.gn"), "w") as f:
+        f.write("\n".join(gn_args) + "\n")
+    subprocess.check_call(["gn", "gen", build_dir, "--check"])
+    subprocess.check_call([
+        "ninja",
+        "-j%d" % jobs,
+        "-C",
+        build_dir,
+        "third_party/instrumented_libraries/%s:locally_built" % ubuntu_release,
+    ])
+    with tarfile.open("%s.tgz" % build_type, mode="w:gz") as f:
+        f.add(
+            "%s/instrumented_libraries/lib" % build_dir,
+            arcname="lib",
+            filter=_tar_filter,
+        )
+        f.add(
+            "%s/instrumented_libraries/sources" % build_dir,
+            arcname="sources",
+            filter=_tar_filter,
+        )
 
 
 def main():
-  parser = argparse.ArgumentParser(
-      description=__doc__,
-      formatter_class=argparse.ArgumentDefaultsHelpFormatter)
-  parser.add_argument(
-      '--jobs',
-      '-j',
-      type=int,
-      default=8,
-      help='the default number of jobs to use when running ninja')
-  parser.add_argument(
-      '--parallel',
-      action='store_true',
-      default=False,
-      help='whether to run all instrumented builds in parallel')
-  parser.add_argument(
-      '--use_goma',
-      action='store_true',
-      default=False,
-      help='whether to use goma to compile')
-  parser.add_argument(
-      'build_type',
-      nargs='*',
-      default='all',
-      choices=list(BUILD_TYPES.keys()) + ['all'],
-      help='the type of instrumented library to build')
-  parser.add_argument(
-      'release', help='the name of the Ubuntu release to build with')
-  args = parser.parse_args()
-  if args.build_type == 'all' or 'all' in args.build_type:
-    args.build_type = BUILD_TYPES.keys()
+    parser = argparse.ArgumentParser(
+        description=__doc__,
+        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser.add_argument(
+        "--jobs",
+        "-j",
+        type=int,
+        default=8,
+        help="the default number of jobs to use when running ninja",
+    )
+    parser.add_argument(
+        "--parallel",
+        action="store_true",
+        default=False,
+        help="whether to run all instrumented builds in parallel",
+    )
+    parser.add_argument(
+        "--use_goma",
+        action="store_true",
+        default=False,
+        help="whether to use goma to compile",
+    )
+    parser.add_argument(
+        "build_type",
+        nargs="*",
+        default="all",
+        choices=list(BUILD_TYPES.keys()) + ["all"],
+        help="the type of instrumented library to build",
+    )
+    parser.add_argument("release",
+                        help="the name of the Ubuntu release to build with")
+    args = parser.parse_args()
+    if args.build_type == "all" or "all" in args.build_type:
+        args.build_type = BUILD_TYPES.keys()
 
-  if args.release != _get_release():
-    raise IncorrectReleaseError(
-        'trying to build for %s but the current release is %s' %
-        (args.release, _get_release()))
-  build_types = sorted(set(args.build_type))
-  if args.parallel:
-    procs = []
+    if args.release != _get_release():
+        raise IncorrectReleaseError(
+            "trying to build for %s but the current release is %s" %
+            (args.release, _get_release()))
+    build_types = sorted(set(args.build_type))
+    if args.parallel:
+        procs = []
+        for build_type in build_types:
+            proc = multiprocessing.Process(
+                target=build_libraries,
+                args=(build_type, args.release, args.jobs, args.use_goma),
+            )
+            proc.start()
+            procs.append(proc)
+        for proc in procs:
+            proc.join()
+    else:
+        for build_type in build_types:
+            build_libraries(build_type, args.release, args.jobs, args.use_goma)
+    print("To upload, run:")
     for build_type in build_types:
-      proc = multiprocessing.Process(
-          target=build_libraries,
-          args=(build_type, args.release, args.jobs, args.use_goma))
-      proc.start()
-      procs.append(proc)
-    for proc in procs:
-      proc.join()
-  else:
-    for build_type in build_types:
-      build_libraries(build_type, args.release, args.jobs, args.use_goma)
-  print('To upload, run:')
-  for build_type in build_types:
-    print(
-        'upload_to_google_storage.py -b '
-        'chromium-instrumented-libraries %s-%s.tgz' %
-        (build_type, args.release))
-  print('You should then commit the resulting .sha1 files.')
+        print("upload_to_google_storage.py -b "
+              "chromium-instrumented-libraries %s-%s.tgz" %
+              (build_type, args.release))
+    print("You should then commit the resulting .sha1 files.")
 
 
-if __name__ == '__main__':
-  main()
+if __name__ == "__main__":
+    main()
diff --git a/scripts/unpack_binaries.py b/scripts/unpack_binaries.py
index 0f791d1..d485d4b 100755
--- a/scripts/unpack_binaries.py
+++ b/scripts/unpack_binaries.py
@@ -2,7 +2,6 @@
 # Copyright 2015 The Chromium Authors
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Unpacks pre-built sanitizer-instrumented third-party libraries."""
 
 import os
@@ -12,29 +11,33 @@
 
 
 def get_archive_name(archive_prefix, release):
-  return '%s-%s.tgz' % (archive_prefix, release)
+    return "%s-%s.tgz" % (archive_prefix, release)
 
 
 def main(archive_prefix, release, archive_dir, target_dir, stamp_dir=None):
-  shutil.rmtree(target_dir, ignore_errors=True)
+    shutil.rmtree(target_dir, ignore_errors=True)
 
-  os.mkdir(target_dir)
-  subprocess.check_call([
-      'tar',
-      '-zxf',
-      os.path.join(archive_dir, get_archive_name(archive_prefix, release)),
-      '-C',
-      target_dir])
-  stamp_file = os.path.join(stamp_dir or target_dir, '%s.txt' % archive_prefix)
-  open(stamp_file, 'w').close()
+    os.mkdir(target_dir)
+    subprocess.check_call([
+        "tar",
+        "-zxf",
+        os.path.join(archive_dir, get_archive_name(archive_prefix, release)),
+        "-C",
+        target_dir,
+    ])
+    stamp_file = os.path.join(stamp_dir or target_dir,
+                              "%s.txt" % archive_prefix)
+    open(stamp_file, "w").close()
 
-  if stamp_dir:
-    with open(os.path.join(stamp_dir, '%s.d' % archive_prefix), 'w') as f:
-      f.write('%s: %s' % (
-          stamp_file, os.path.join(archive_dir,
-                                   get_archive_name(archive_prefix, release))))
-  return 0
+    if stamp_dir:
+        with open(os.path.join(stamp_dir, "%s.d" % archive_prefix), "w") as f:
+            f.write("%s: %s" % (
+                stamp_file,
+                os.path.join(archive_dir,
+                             get_archive_name(archive_prefix, release)),
+            ))
+    return 0
 
 
-if __name__ == '__main__':
-  sys.exit(main(*sys.argv[1:]))
+if __name__ == "__main__":
+    sys.exit(main(*sys.argv[1:]))