Delete buildtools
Also add a README file that points to the new location.
BUG=927867
R=thakis
Change-Id: I7885490e3e24f894bca50aa219708bbdd2f8aa81
diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index dea1ab0..0000000
--- a/.gitignore
+++ /dev/null
@@ -1,13 +0,0 @@
-*.pyc
-clang_format/script/
-third_party/libc++/trunk
-third_party/libc++abi/trunk
-third_party/libunwind/trunk
-linux64/clang-format
-linux64/gn
-mac/clang-format
-mac/gn
-win/clang-format.exe
-win/gn.exe
-android/doclava/
-android/doclava.tar.gz
diff --git a/DEPS b/DEPS
deleted file mode 100644
index 99d9740..0000000
--- a/DEPS
+++ /dev/null
@@ -1,26 +0,0 @@
-use_relative_paths = True
-
-vars = {
- "chromium_url": "https://chromium.googlesource.com",
-
- # When changing these, also update the svn revisions in deps_revisions.gni
- "clang_format_revision": "96636aa0e9f047f17447f2d45a094d0b59ed7917",
- "libcxx_revision": "e713cc0acf1ae8b82f451bf58ebef67a46ceddfb",
- "libcxxabi_revision": "307bb62985575b2e3216a8cfd7e122e0574f33a9",
- "libunwind_revision": "69d9b84cca8354117b9fe9705a4430d789ee599b",
-}
-
-deps = {
- "clang_format/script":
- Var("chromium_url") + "/chromium/llvm-project/cfe/tools/clang-format.git@" +
- Var("clang_format_revision"),
- "third_party/libc++/trunk":
- Var("chromium_url") + "/chromium/llvm-project/libcxx.git" + "@" +
- Var("libcxx_revision"),
- "third_party/libc++abi/trunk":
- Var("chromium_url") + "/chromium/llvm-project/libcxxabi.git" + "@" +
- Var("libcxxabi_revision"),
- "third_party/libunwind/trunk":
- Var("chromium_url") + "/external/llvm.org/libunwind.git" + "@" +
- Var("libunwind_revision"),
-}
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index 972bb2e..0000000
--- a/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/README b/README
new file mode 100644
index 0000000..33dcd25
--- /dev/null
+++ b/README
@@ -0,0 +1 @@
+This repo has moved to https://chromium.googlesource.com/chromium/src/buildtools
diff --git a/README.txt b/README.txt
deleted file mode 100644
index 30325bf..0000000
--- a/README.txt
+++ /dev/null
@@ -1,50 +0,0 @@
-This repository contains hashes of build tools used by Chromium and related
-projects. The actual binaries are pulled from Google Storage, normally as part
-of a gclient hook.
-
-The repository is separate so that the shared build tools can be shared between
-the various Chromium-related projects without each one needing to maintain
-their own versionining of each binary.
-
-________________________________________
-UPDATING AND ROLLING BUILDTOOLS MANUALLY
-
-When you update buildtools, you should roll the new version into the Chromium
-repository right away. Otherwise, the next person who makes a change will end
-up rolling (and testing) your change. If there are any unresolved problems with
-your change, the next person will be blocked.
-
- - From the buildtools directory, make a branch, edit and upload normally.
-
- - Get your change reviewed and landed. There are no trybots so landing will
- be very fast.
-
- - Get the hash for the commit that commit-bot made. Make a new branch in
- the Chromium repository and paste the hash into the line in //DEPS
- labeled "buildtools_revision".
-
- - You can TBR changes to the DEPS file since the git hashes can't be reviewed
- in any practical way. Submit that patch to the commit queue.
-
- - If this roll identifies a problem with your patch, fix it promptly. If you
- are unable to fix it promptly, it's best to revert your buildtools patch
- to avoid blocking other people that want to make changes.
-
-________________________
-ADDING BINARIES MANUALLY
-
-One uploads new versions of the tools using the 'gsutil' binary from the
-Google Storage SDK:
-
- https://developers.google.com/storage/docs/gsutil
-
-There is a checked-in version of gsutil as part of depot_tools.
-
-To initialize gsutil's credentials:
-
- python ~/depot_tools/third_party/gsutil/gsutil config
-
- That will give a URL which you should log into with your web browser.
-
- Copy the code back to the command line util. Ignore the project ID (it's OK
- to just leave blank when prompted).
diff --git a/android/doclava.tar.gz.sha1 b/android/doclava.tar.gz.sha1
deleted file mode 100644
index 10ef37e..0000000
--- a/android/doclava.tar.gz.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1931becb8a8e21685f39c62854e9e814d64ccf1a
diff --git a/checkdeps/DEPS b/checkdeps/DEPS
deleted file mode 100644
index 7a57b0b..0000000
--- a/checkdeps/DEPS
+++ /dev/null
@@ -1,3 +0,0 @@
-skip_child_includes = [
- "testdata",
-]
diff --git a/checkdeps/OWNERS b/checkdeps/OWNERS
deleted file mode 100644
index 06fefbf..0000000
--- a/checkdeps/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-brettw@chromium.org
diff --git a/checkdeps/PRESUBMIT.py b/checkdeps/PRESUBMIT.py
deleted file mode 100644
index 10ef632..0000000
--- a/checkdeps/PRESUBMIT.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Presubmit script for checkdeps tool.
-"""
-
-
-def CheckChange(input_api, output_api):
- results = []
- results.extend(input_api.canned_checks.RunUnitTests(
- input_api, output_api,
- [input_api.os_path.join(input_api.PresubmitLocalPath(),
- 'checkdeps_test.py')]))
- return results
-
-
-# Mandatory entrypoint.
-def CheckChangeOnUpload(input_api, output_api):
- return CheckChange(input_api, output_api)
-
-
-# Mandatory entrypoint.
-def CheckChangeOnCommit(input_api, output_api):
- return CheckChange(input_api, output_api)
diff --git a/checkdeps/README.md b/checkdeps/README.md
deleted file mode 100644
index 12a89cb..0000000
--- a/checkdeps/README.md
+++ /dev/null
@@ -1,87 +0,0 @@
-# DEPS Files
-
-DEPS files specify which files the sources in a directory tree may include.
-
-## File format
-
-First you have the normal module-level deps. These are the ones used by
-gclient. An example would be:
-
-```
-deps = {
- "base":"http://foo.bar/trunk/base"
-}
-```
-
-DEPS files not in the top-level of a module won't need this. Then you have any
-additional include rules. You can add (using `+`) or subtract (using `-`) from
-the previously specified rules (including module-level deps). You can also
-specify a path that is allowed for now but that we intend to remove, using `!`;
-this is treated the same as `+` when `check_deps` is run by our bots, but a
-presubmit step will show a warning if you add a new include of a file that is
-only allowed by `!`.
-
-Note that for .java files, there is currently no difference between `+` and
-`!`, even in the presubmit step.
-
-```
-include_rules = [
- # Code should be able to use base (it's specified in the module-level
- # deps above), but nothing in "base/evil" because it's evil.
- "-base/evil",
-
- # But this one subdirectory of evil is OK.
- "+base/evil/not",
-
- # And it can include files from this other directory even though there is
- # no deps rule for it.
- "+tools/crime_fighter",
-
- # This dependency is allowed for now but work is ongoing to remove it,
- # so you shouldn't add further dependencies on it.
- "!base/evil/ok_for_now.h",
-]
-```
-
-If you have certain include rules that should only be applied for some files
-within this directory and subdirectories, you can write a section named
-`specific_include_rules` that is a hash map of regular expressions to the list
-of rules that should apply to files matching them. Note that such rules will
-always be applied before the rules from `include_rules` have been applied, but
-the order in which rules associated with different regular expressions is
-applied is arbitrary.
-
-```
-specific_include_rules = {
- ".*_(unit|browser|api)test\.cc": [
- "+libraries/testsupport",
- ],
-}
-```
-
-You can optionally ignore the rules inherited from parent directories, similar
-to "set noparent" in OWNERS files. For example, adding `noparent = True` in
-//ash/components/DEPS will cause rules from //ash/DEPS to be ignored, thereby
-forcing each //ash/component/foo to explicitly declare foo's dependencies.
-
-```
-noparent = True
-```
-
-# Directory structure
-
-DEPS files may be placed anywhere in the tree. Each one applies to all
-subdirectories, where there may be more DEPS files that provide additions or
-subtractions for their own sub-trees.
-
-There is an implicit rule for the current directory (where the DEPS file lives)
-and all of its subdirectories. This prevents you from having to explicitly
-allow the current directory everywhere. This implicit rule is applied first, so
-you can modify or remove it using the normal include rules.
-
-The rules are processed in order. This means you can explicitly allow a higher
-directory and then take away permissions from sub-parts, or the reverse.
-
-Note that all directory separators must be `/` slashes (Unix-style) and not
-backslashes. All directories should be relative to the source root and use
-only lowercase.
diff --git a/checkdeps/builddeps.py b/checkdeps/builddeps.py
deleted file mode 100755
index 2dc9351..0000000
--- a/checkdeps/builddeps.py
+++ /dev/null
@@ -1,378 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Traverses the source tree, parses all found DEPS files, and constructs
-a dependency rule table to be used by subclasses.
-
-See README.md for the format of the deps file.
-"""
-
-import copy
-import os.path
-import posixpath
-import subprocess
-
-from rules import Rule, Rules
-
-
-# Variable name used in the DEPS file to add or subtract include files from
-# the module-level deps.
-INCLUDE_RULES_VAR_NAME = 'include_rules'
-
-# Variable name used in the DEPS file to add or subtract include files
-# from module-level deps specific to files whose basename (last
-# component of path) matches a given regular expression.
-SPECIFIC_INCLUDE_RULES_VAR_NAME = 'specific_include_rules'
-
-# Optionally present in the DEPS file to list subdirectories which should not
-# be checked. This allows us to skip third party code, for example.
-SKIP_SUBDIRS_VAR_NAME = 'skip_child_includes'
-
-# Optionally discard rules from parent directories, similar to "noparent" in
-# OWNERS files. For example, if //ash/components has "noparent = True" then
-# it will not inherit rules from //ash/DEPS, forcing each //ash/component/foo
-# to declare all its dependencies.
-NOPARENT_VAR_NAME = 'noparent'
-
-
-class DepsBuilderError(Exception):
- """Base class for exceptions in this module."""
- pass
-
-
-def NormalizePath(path):
- """Returns a path normalized to how we write DEPS rules and compare paths."""
- return os.path.normcase(path).replace(os.path.sep, posixpath.sep)
-
-
-def _GitSourceDirectories(base_directory):
- """Returns set of normalized paths to subdirectories containing sources
- managed by git."""
- base_dir_norm = NormalizePath(base_directory)
- git_source_directories = set([base_dir_norm])
-
- git_cmd = 'git.bat' if os.name == 'nt' else 'git'
- git_ls_files_cmd = [git_cmd, 'ls-files']
- # FIXME: Use a context manager in Python 3.2+
- popen = subprocess.Popen(git_ls_files_cmd,
- stdout=subprocess.PIPE,
- bufsize=1, # line buffering, since read by line
- cwd=base_directory)
- try:
- try:
- for line in popen.stdout:
- dir_path = os.path.join(base_directory, os.path.dirname(line))
- dir_path_norm = NormalizePath(dir_path)
- # Add the directory as well as all the parent directories,
- # stopping once we reach an already-listed directory.
- while dir_path_norm not in git_source_directories:
- git_source_directories.add(dir_path_norm)
- dir_path_norm = posixpath.dirname(dir_path_norm)
- finally:
- popen.stdout.close()
- finally:
- popen.wait()
-
- return git_source_directories
-
-
-class DepsBuilder(object):
- """Parses include_rules from DEPS files."""
-
- def __init__(self,
- base_directory=None,
- extra_repos=[],
- verbose=False,
- being_tested=False,
- ignore_temp_rules=False,
- ignore_specific_rules=False):
- """Creates a new DepsBuilder.
-
- Args:
- base_directory: local path to root of checkout, e.g. C:\chr\src.
- verbose: Set to True for debug output.
- being_tested: Set to True to ignore the DEPS file at tools/checkdeps/DEPS.
- ignore_temp_rules: Ignore rules that start with Rule.TEMP_ALLOW ("!").
- """
- base_directory = (base_directory or
- os.path.join(os.path.dirname(__file__),
- os.path.pardir, os.path.pardir))
- self.base_directory = os.path.abspath(base_directory) # Local absolute path
- self.extra_repos = extra_repos
- self.verbose = verbose
- self._under_test = being_tested
- self._ignore_temp_rules = ignore_temp_rules
- self._ignore_specific_rules = ignore_specific_rules
- self._git_source_directories = None
-
- if os.path.exists(os.path.join(base_directory, '.git')):
- self.is_git = True
- elif os.path.exists(os.path.join(base_directory, '.svn')):
- self.is_git = False
- else:
- raise DepsBuilderError("%s is not a repository root" % base_directory)
-
- # Map of normalized directory paths to rules to use for those
- # directories, or None for directories that should be skipped.
- # Normalized is: absolute, lowercase, / for separator.
- self.directory_rules = {}
- self._ApplyDirectoryRulesAndSkipSubdirs(Rules(), self.base_directory)
-
- def _ApplyRules(self, existing_rules, includes, specific_includes,
- cur_dir_norm):
- """Applies the given include rules, returning the new rules.
-
- Args:
- existing_rules: A set of existing rules that will be combined.
- include: The list of rules from the "include_rules" section of DEPS.
- specific_includes: E.g. {'.*_unittest\.cc': ['+foo', '-blat']} rules
- from the "specific_include_rules" section of DEPS.
- cur_dir_norm: The current directory, normalized path. We will create an
- implicit rule that allows inclusion from this directory.
-
- Returns: A new set of rules combining the existing_rules with the other
- arguments.
- """
- rules = copy.deepcopy(existing_rules)
-
- # First apply the implicit "allow" rule for the current directory.
- base_dir_norm = NormalizePath(self.base_directory)
- if not cur_dir_norm.startswith(base_dir_norm):
- raise Exception(
- 'Internal error: base directory is not at the beginning for\n'
- ' %s and base dir\n'
- ' %s' % (cur_dir_norm, base_dir_norm))
- relative_dir = posixpath.relpath(cur_dir_norm, base_dir_norm)
-
- # Make the help string a little more meaningful.
- source = relative_dir or 'top level'
- rules.AddRule('+' + relative_dir,
- relative_dir,
- 'Default rule for ' + source)
-
- def ApplyOneRule(rule_str, dependee_regexp=None):
- """Deduces a sensible description for the rule being added, and
- adds the rule with its description to |rules|.
-
- If we are ignoring temporary rules, this function does nothing
- for rules beginning with the Rule.TEMP_ALLOW character.
- """
- if self._ignore_temp_rules and rule_str.startswith(Rule.TEMP_ALLOW):
- return
-
- rule_block_name = 'include_rules'
- if dependee_regexp:
- rule_block_name = 'specific_include_rules'
- if relative_dir:
- rule_description = relative_dir + "'s %s" % rule_block_name
- else:
- rule_description = 'the top level %s' % rule_block_name
- rules.AddRule(rule_str, relative_dir, rule_description, dependee_regexp)
-
- # Apply the additional explicit rules.
- for rule_str in includes:
- ApplyOneRule(rule_str)
-
- # Finally, apply the specific rules.
- if self._ignore_specific_rules:
- return rules
-
- for regexp, specific_rules in specific_includes.iteritems():
- for rule_str in specific_rules:
- ApplyOneRule(rule_str, regexp)
-
- return rules
-
- def _ApplyDirectoryRules(self, existing_rules, dir_path_local_abs):
- """Combines rules from the existing rules and the new directory.
-
- Any directory can contain a DEPS file. Top-level DEPS files can contain
- module dependencies which are used by gclient. We use these, along with
- additional include rules and implicit rules for the given directory, to
- come up with a combined set of rules to apply for the directory.
-
- Args:
- existing_rules: The rules for the parent directory. We'll add-on to these.
- dir_path_local_abs: The directory path that the DEPS file may live in (if
- it exists). This will also be used to generate the
- implicit rules. This is a local path.
-
- Returns: A 2-tuple of:
- (1) the combined set of rules to apply to the sub-tree,
- (2) a list of all subdirectories that should NOT be checked, as specified
- in the DEPS file (if any).
- Subdirectories are single words, hence no OS dependence.
- """
- dir_path_norm = NormalizePath(dir_path_local_abs)
-
- # Check the DEPS file in this directory.
- if self.verbose:
- print 'Applying rules from', dir_path_local_abs
- def FromImpl(*_):
- pass # NOP function so "From" doesn't fail.
-
- def FileImpl(_):
- pass # NOP function so "File" doesn't fail.
-
- class _VarImpl:
- def __init__(self, local_scope):
- self._local_scope = local_scope
-
- def Lookup(self, var_name):
- """Implements the Var syntax."""
- try:
- return self._local_scope['vars'][var_name]
- except KeyError:
- raise Exception('Var is not defined: %s' % var_name)
-
- local_scope = {}
- global_scope = {
- 'File': FileImpl,
- 'From': FromImpl,
- 'Var': _VarImpl(local_scope).Lookup,
- }
- deps_file_path = os.path.join(dir_path_local_abs, 'DEPS')
-
- # The second conditional here is to disregard the
- # tools/checkdeps/DEPS file while running tests. This DEPS file
- # has a skip_child_includes for 'testdata' which is necessary for
- # running production tests, since there are intentional DEPS
- # violations under the testdata directory. On the other hand when
- # running tests, we absolutely need to verify the contents of that
- # directory to trigger those intended violations and see that they
- # are handled correctly.
- if os.path.isfile(deps_file_path) and not (
- self._under_test and
- os.path.basename(dir_path_local_abs) == 'checkdeps'):
- execfile(deps_file_path, global_scope, local_scope)
- elif self.verbose:
- print ' No deps file found in', dir_path_local_abs
-
- # Even if a DEPS file does not exist we still invoke ApplyRules
- # to apply the implicit "allow" rule for the current directory
- include_rules = local_scope.get(INCLUDE_RULES_VAR_NAME, [])
- specific_include_rules = local_scope.get(SPECIFIC_INCLUDE_RULES_VAR_NAME,
- {})
- skip_subdirs = local_scope.get(SKIP_SUBDIRS_VAR_NAME, [])
- noparent = local_scope.get(NOPARENT_VAR_NAME, False)
- if noparent:
- parent_rules = Rules()
- else:
- parent_rules = existing_rules
-
- return (self._ApplyRules(parent_rules, include_rules,
- specific_include_rules, dir_path_norm),
- skip_subdirs)
-
- def _ApplyDirectoryRulesAndSkipSubdirs(self, parent_rules,
- dir_path_local_abs):
- """Given |parent_rules| and a subdirectory |dir_path_local_abs| of the
- directory that owns the |parent_rules|, add |dir_path_local_abs|'s rules to
- |self.directory_rules|, and add None entries for any of its
- subdirectories that should be skipped.
- """
- directory_rules, excluded_subdirs = self._ApplyDirectoryRules(
- parent_rules, dir_path_local_abs)
- dir_path_norm = NormalizePath(dir_path_local_abs)
- self.directory_rules[dir_path_norm] = directory_rules
- for subdir in excluded_subdirs:
- subdir_path_norm = posixpath.join(dir_path_norm, subdir)
- self.directory_rules[subdir_path_norm] = None
-
- def GetAllRulesAndFiles(self, dir_name=None):
- """Yields (rules, filenames) for each repository directory with DEPS rules.
-
- This walks the directory tree while staying in the repository. Specify
- |dir_name| to walk just one directory and its children; omit |dir_name| to
- walk the entire repository.
-
- Yields:
- Two-element (rules, filenames) tuples. |rules| is a rules.Rules object
- for a directory, and |filenames| is a list of the absolute local paths
- of all files in that directory.
- """
- if self.is_git and self._git_source_directories is None:
- self._git_source_directories = _GitSourceDirectories(self.base_directory)
- for repo in self.extra_repos:
- repo_path = os.path.join(self.base_directory, repo)
- self._git_source_directories.update(_GitSourceDirectories(repo_path))
-
- # Collect a list of all files and directories to check.
- files_to_check = []
- if dir_name and not os.path.isabs(dir_name):
- dir_name = os.path.join(self.base_directory, dir_name)
- dirs_to_check = [dir_name or self.base_directory]
- while dirs_to_check:
- current_dir = dirs_to_check.pop()
-
- # Check that this directory is part of the source repository. This
- # prevents us from descending into third-party code or directories
- # generated by the build system.
- if self.is_git:
- if NormalizePath(current_dir) not in self._git_source_directories:
- continue
- elif not os.path.exists(os.path.join(current_dir, '.svn')):
- continue
-
- current_dir_rules = self.GetDirectoryRules(current_dir)
-
- if not current_dir_rules:
- continue # Handle the 'skip_child_includes' case.
-
- current_dir_contents = sorted(os.listdir(current_dir))
- file_names = []
- sub_dirs = []
- for file_name in current_dir_contents:
- full_name = os.path.join(current_dir, file_name)
- if os.path.isdir(full_name):
- sub_dirs.append(full_name)
- else:
- file_names.append(full_name)
- dirs_to_check.extend(reversed(sub_dirs))
-
- yield (current_dir_rules, file_names)
-
- def GetDirectoryRules(self, dir_path_local):
- """Returns a Rules object to use for the given directory, or None
- if the given directory should be skipped.
-
- Also modifies |self.directory_rules| to store the Rules.
- This takes care of first building rules for parent directories (up to
- |self.base_directory|) if needed, which may add rules for skipped
- subdirectories.
-
- Args:
- dir_path_local: A local path to the directory you want rules for.
- Can be relative and unnormalized. It is the caller's responsibility
- to ensure that this is part of the repository rooted at
- |self.base_directory|.
- """
- if os.path.isabs(dir_path_local):
- dir_path_local_abs = dir_path_local
- else:
- dir_path_local_abs = os.path.join(self.base_directory, dir_path_local)
- dir_path_norm = NormalizePath(dir_path_local_abs)
-
- if dir_path_norm in self.directory_rules:
- return self.directory_rules[dir_path_norm]
-
- parent_dir_local_abs = os.path.dirname(dir_path_local_abs)
- parent_rules = self.GetDirectoryRules(parent_dir_local_abs)
- # We need to check for an entry for our dir_path again, since
- # GetDirectoryRules can modify entries for subdirectories, namely setting
- # to None if they should be skipped, via _ApplyDirectoryRulesAndSkipSubdirs.
- # For example, if dir_path == 'A/B/C' and A/B/DEPS specifies that the C
- # subdirectory be skipped, GetDirectoryRules('A/B') will fill in the entry
- # for 'A/B/C' as None.
- if dir_path_norm in self.directory_rules:
- return self.directory_rules[dir_path_norm]
-
- if parent_rules:
- self._ApplyDirectoryRulesAndSkipSubdirs(parent_rules, dir_path_local_abs)
- else:
- # If the parent directory should be skipped, then the current
- # directory should also be skipped.
- self.directory_rules[dir_path_norm] = None
- return self.directory_rules[dir_path_norm]
diff --git a/checkdeps/checkdeps.py b/checkdeps/checkdeps.py
deleted file mode 100755
index f4777ab..0000000
--- a/checkdeps/checkdeps.py
+++ /dev/null
@@ -1,289 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Makes sure that files include headers from allowed directories.
-
-Checks DEPS files in the source tree for rules, and applies those rules to
-"#include" and "import" directives in the .cpp, .java, and .proto source files.
-Any source file including something not permitted by the DEPS files will fail.
-
-See README.md for a detailed description of the DEPS format.
-"""
-
-import os
-import optparse
-import re
-import sys
-
-import proto_checker
-import cpp_checker
-import java_checker
-import results
-
-from builddeps import DepsBuilder
-from rules import Rule, Rules
-
-
-def _IsTestFile(filename):
- """Does a rudimentary check to try to skip test files; this could be
- improved but is good enough for now.
- """
- return re.match('(test|mock|dummy)_.*|.*_[a-z]*test\.(cc|mm|java)', filename)
-
-
-class DepsChecker(DepsBuilder):
- """Parses include_rules from DEPS files and verifies files in the
- source tree against them.
- """
-
- def __init__(self,
- base_directory=None,
- extra_repos=[],
- verbose=False,
- being_tested=False,
- ignore_temp_rules=False,
- skip_tests=False,
- resolve_dotdot=True):
- """Creates a new DepsChecker.
-
- Args:
- base_directory: OS-compatible path to root of checkout, e.g. C:\chr\src.
- verbose: Set to true for debug output.
- being_tested: Set to true to ignore the DEPS file at tools/checkdeps/DEPS.
- ignore_temp_rules: Ignore rules that start with Rule.TEMP_ALLOW ("!").
- """
- DepsBuilder.__init__(
- self, base_directory, extra_repos, verbose, being_tested,
- ignore_temp_rules)
-
- self._skip_tests = skip_tests
- self._resolve_dotdot = resolve_dotdot
- self.results_formatter = results.NormalResultsFormatter(verbose)
-
- def Report(self):
- """Prints a report of results, and returns an exit code for the process."""
- if self.results_formatter.GetResults():
- self.results_formatter.PrintResults()
- return 1
- print '\nSUCCESS\n'
- return 0
-
- def CheckDirectory(self, start_dir):
- """Checks all relevant source files in the specified directory and
- its subdirectories for compliance with DEPS rules throughout the
- tree (starting at |self.base_directory|). |start_dir| must be a
- subdirectory of |self.base_directory|.
-
- On completion, self.results_formatter has the results of
- processing, and calling Report() will print a report of results.
- """
- java = java_checker.JavaChecker(self.base_directory, self.verbose)
- cpp = cpp_checker.CppChecker(
- self.verbose, self._resolve_dotdot, self.base_directory)
- proto = proto_checker.ProtoChecker(
- self.verbose, self._resolve_dotdot, self.base_directory)
- checkers = dict(
- (extension, checker)
- for checker in [java, cpp, proto] for extension in checker.EXTENSIONS)
-
- for rules, file_paths in self.GetAllRulesAndFiles(start_dir):
- for full_name in file_paths:
- if self._skip_tests and _IsTestFile(os.path.basename(full_name)):
- continue
- file_extension = os.path.splitext(full_name)[1]
- if not file_extension in checkers:
- continue
- checker = checkers[file_extension]
- file_status = checker.CheckFile(rules, full_name)
- if file_status.HasViolations():
- self.results_formatter.AddError(file_status)
-
- def CheckIncludesAndImports(self, added_lines, checker):
- """Check new import/#include statements added in the change
- being presubmit checked.
-
- Args:
- added_lines: ((file_path, (changed_line, changed_line, ...), ...)
- checker: CppChecker/JavaChecker/ProtoChecker checker instance
-
- Return:
- A list of tuples, (bad_file_path, rule_type, rule_description)
- where rule_type is one of Rule.DISALLOW or Rule.TEMP_ALLOW and
- rule_description is human-readable. Empty if no problems.
- """
- problems = []
- for file_path, changed_lines in added_lines:
- if not checker.ShouldCheck(file_path):
- continue
- rules_for_file = self.GetDirectoryRules(os.path.dirname(file_path))
- if not rules_for_file:
- continue
- for line in changed_lines:
- is_include, violation = checker.CheckLine(
- rules_for_file, line, file_path, True)
- if not violation:
- continue
- rule_type = violation.violated_rule.allow
- if rule_type == Rule.ALLOW:
- continue
- violation_text = results.NormalResultsFormatter.FormatViolation(
- violation, self.verbose)
- problems.append((file_path, rule_type, violation_text))
- return problems
-
- def CheckAddedCppIncludes(self, added_includes):
- """This is used from PRESUBMIT.py to check new #include statements added in
- the change being presubmit checked.
-
- Args:
- added_includes: ((file_path, (include_line, include_line, ...), ...)
-
- Return:
- A list of tuples, (bad_file_path, rule_type, rule_description)
- where rule_type is one of Rule.DISALLOW or Rule.TEMP_ALLOW and
- rule_description is human-readable. Empty if no problems.
- """
- return self.CheckIncludesAndImports(
- added_includes, cpp_checker.CppChecker(self.verbose))
-
- def CheckAddedJavaImports(self, added_imports, allow_multiple_definitions=None):
- """This is used from PRESUBMIT.py to check new import statements added in
- the change being presubmit checked.
-
- Args:
- added_imports: ((file_path, (import_line, import_line, ...), ...)
- allow_multiple_definitions: [file_name, file_name, ...]. List of java file
- names allowing multipe definition in presubmit check.
-
- Return:
- A list of tuples, (bad_file_path, rule_type, rule_description)
- where rule_type is one of Rule.DISALLOW or Rule.TEMP_ALLOW and
- rule_description is human-readable. Empty if no problems.
- """
- return self.CheckIncludesAndImports(
- added_imports,
- java_checker.JavaChecker(self.base_directory, self.verbose,
- added_imports, allow_multiple_definitions))
-
- def CheckAddedProtoImports(self, added_imports):
- """This is used from PRESUBMIT.py to check new #import statements added in
- the change being presubmit checked.
-
- Args:
- added_imports : ((file_path, (import_line, import_line, ...), ...)
-
- Return:
- A list of tuples, (bad_file_path, rule_type, rule_description)
- where rule_type is one of Rule.DISALLOW or Rule.TEMP_ALLOW and
- rule_description is human-readable. Empty if no problems.
- """
- return self.CheckIncludesAndImports(
- added_imports, proto_checker.ProtoChecker(
- verbose=self.verbose, root_dir=self.base_directory))
-
-def PrintUsage():
- print """Usage: python checkdeps.py [--root <root>] [tocheck]
-
- --root ROOT Specifies the repository root. This defaults to "../../.."
- relative to the script file. This will be correct given the
- normal location of the script in "<root>/tools/checkdeps".
-
- --(others) There are a few lesser-used options; run with --help to show them.
-
- tocheck Specifies the directory, relative to root, to check. This defaults
- to "." so it checks everything.
-
-Examples:
- python checkdeps.py
- python checkdeps.py --root c:\\source chrome"""
-
-
-def main():
- option_parser = optparse.OptionParser()
- option_parser.add_option(
- '', '--root',
- default='', dest='base_directory',
- help='Specifies the repository root. This defaults '
- 'to "../../.." relative to the script file, which '
- 'will normally be the repository root.')
- option_parser.add_option(
- '', '--extra-repos',
- action='append', dest='extra_repos', default=[],
- help='Specifies extra repositories relative to root repository.')
- option_parser.add_option(
- '', '--ignore-temp-rules',
- action='store_true', dest='ignore_temp_rules', default=False,
- help='Ignore !-prefixed (temporary) rules.')
- option_parser.add_option(
- '', '--generate-temp-rules',
- action='store_true', dest='generate_temp_rules', default=False,
- help='Print rules to temporarily allow files that fail '
- 'dependency checking.')
- option_parser.add_option(
- '', '--count-violations',
- action='store_true', dest='count_violations', default=False,
- help='Count #includes in violation of intended rules.')
- option_parser.add_option(
- '', '--skip-tests',
- action='store_true', dest='skip_tests', default=False,
- help='Skip checking test files (best effort).')
- option_parser.add_option(
- '-v', '--verbose',
- action='store_true', default=False,
- help='Print debug logging')
- option_parser.add_option(
- '', '--json',
- help='Path to JSON output file')
- option_parser.add_option(
- '', '--no-resolve-dotdot',
- action='store_false', dest='resolve_dotdot', default=True,
- help='resolve leading ../ in include directive paths relative '
- 'to the file perfoming the inclusion.')
-
- options, args = option_parser.parse_args()
-
- deps_checker = DepsChecker(options.base_directory,
- extra_repos=options.extra_repos,
- verbose=options.verbose,
- ignore_temp_rules=options.ignore_temp_rules,
- skip_tests=options.skip_tests,
- resolve_dotdot=options.resolve_dotdot)
- base_directory = deps_checker.base_directory # Default if needed, normalized
-
- # Figure out which directory we have to check.
- start_dir = base_directory
- if len(args) == 1:
- # Directory specified. Start here. It's supposed to be relative to the
- # base directory.
- start_dir = os.path.abspath(os.path.join(base_directory, args[0]))
- elif len(args) >= 2 or (options.generate_temp_rules and
- options.count_violations):
- # More than one argument, or incompatible flags, we don't handle this.
- PrintUsage()
- return 1
-
- if not start_dir.startswith(deps_checker.base_directory):
- print 'Directory to check must be a subdirectory of the base directory,'
- print 'but %s is not a subdirectory of %s' % (start_dir, base_directory)
- return 1
-
- print 'Using base directory:', base_directory
- print 'Checking:', start_dir
-
- if options.generate_temp_rules:
- deps_checker.results_formatter = results.TemporaryRulesFormatter()
- elif options.count_violations:
- deps_checker.results_formatter = results.CountViolationsFormatter()
-
- if options.json:
- deps_checker.results_formatter = results.JSONResultsFormatter(
- options.json, deps_checker.results_formatter)
-
- deps_checker.CheckDirectory(start_dir)
- return deps_checker.Report()
-
-
-if '__main__' == __name__:
- sys.exit(main())
diff --git a/checkdeps/checkdeps_test.py b/checkdeps/checkdeps_test.py
deleted file mode 100755
index 6442d5b..0000000
--- a/checkdeps/checkdeps_test.py
+++ /dev/null
@@ -1,242 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for checkdeps.
-"""
-
-import os
-import unittest
-
-
-import builddeps
-import checkdeps
-import results
-
-
-class CheckDepsTest(unittest.TestCase):
-
- def setUp(self):
- self.deps_checker = checkdeps.DepsChecker(
- being_tested=True,
- base_directory=os.path.join(os.path.dirname(__file__), os.path.pardir))
-
- def ImplTestRegularCheckDepsRun(self, ignore_temp_rules, skip_tests):
- self.deps_checker._ignore_temp_rules = ignore_temp_rules
- self.deps_checker._skip_tests = skip_tests
- self.deps_checker.CheckDirectory(
- os.path.join(self.deps_checker.base_directory,
- 'checkdeps/testdata'))
-
- problems = self.deps_checker.results_formatter.GetResults()
- if skip_tests:
- self.failUnlessEqual(4, len(problems))
- else:
- self.failUnlessEqual(5, len(problems))
-
- def VerifySubstringsInProblems(key_path, substrings_in_sequence):
- """Finds the problem in |problems| that contains |key_path|,
- then verifies that each of |substrings_in_sequence| occurs in
- that problem, in the order they appear in
- |substrings_in_sequence|.
- """
- found = False
- key_path = os.path.normpath(key_path)
- for problem in problems:
- index = problem.find(key_path)
- if index != -1:
- for substring in substrings_in_sequence:
- index = problem.find(substring, index + 1)
- self.failUnless(index != -1, '%s in %s' % (substring, problem))
- found = True
- break
- if not found:
- self.fail('Found no problem for file %s' % key_path)
-
- if ignore_temp_rules:
- VerifySubstringsInProblems('testdata/allowed/test.h',
- ['-checkdeps/testdata/disallowed',
- 'temporarily_allowed.h',
- '-third_party/explicitly_disallowed',
- 'Because of no rule applying'])
- else:
- VerifySubstringsInProblems('testdata/allowed/test.h',
- ['-checkdeps/testdata/disallowed',
- '-third_party/explicitly_disallowed',
- 'Because of no rule applying'])
-
- VerifySubstringsInProblems('testdata/disallowed/test.h',
- ['-third_party/explicitly_disallowed',
- 'Because of no rule applying',
- 'Because of no rule applying'])
- VerifySubstringsInProblems('disallowed/allowed/test.h',
- ['-third_party/explicitly_disallowed',
- 'Because of no rule applying',
- 'Because of no rule applying'])
- VerifySubstringsInProblems('testdata/noparent/test.h',
- ['allowed/bad.h',
- 'Because of no rule applying'])
-
- if not skip_tests:
- VerifySubstringsInProblems('allowed/not_a_test.cc',
- ['-checkdeps/testdata/disallowed'])
-
- def testRegularCheckDepsRun(self):
- self.ImplTestRegularCheckDepsRun(False, False)
-
- def testRegularCheckDepsRunIgnoringTempRules(self):
- self.ImplTestRegularCheckDepsRun(True, False)
-
- def testRegularCheckDepsRunSkipTests(self):
- self.ImplTestRegularCheckDepsRun(False, True)
-
- def testRegularCheckDepsRunIgnoringTempRulesSkipTests(self):
- self.ImplTestRegularCheckDepsRun(True, True)
-
- def CountViolations(self, ignore_temp_rules):
- self.deps_checker._ignore_temp_rules = ignore_temp_rules
- self.deps_checker.results_formatter = results.CountViolationsFormatter()
- self.deps_checker.CheckDirectory(
- os.path.join(self.deps_checker.base_directory,
- 'checkdeps/testdata'))
- return self.deps_checker.results_formatter.GetResults()
-
- def testCountViolations(self):
- self.failUnlessEqual('11', self.CountViolations(False))
-
- def testCountViolationsIgnoringTempRules(self):
- self.failUnlessEqual('12', self.CountViolations(True))
-
- def testCountViolationsWithRelativePath(self):
- self.deps_checker.results_formatter = results.CountViolationsFormatter()
- self.deps_checker.CheckDirectory(
- os.path.join('checkdeps', 'testdata', 'allowed'))
- self.failUnlessEqual('4', self.deps_checker.results_formatter.GetResults())
-
- def testTempRulesGenerator(self):
- self.deps_checker.results_formatter = results.TemporaryRulesFormatter()
- self.deps_checker.CheckDirectory(
- os.path.join(self.deps_checker.base_directory,
- 'checkdeps/testdata/allowed'))
- temp_rules = self.deps_checker.results_formatter.GetResults()
- expected = [u' "!checkdeps/testdata/disallowed/bad.h",',
- u' "!checkdeps/testdata/disallowed/teststuff/bad.h",',
- u' "!third_party/explicitly_disallowed/bad.h",',
- u' "!third_party/no_rule/bad.h",']
- self.failUnlessEqual(expected, temp_rules)
-
- def testBadBaseDirectoryNotCheckoutRoot(self):
- # This assumes git. It's not a valid test if buildtools is fetched via svn.
- with self.assertRaises(builddeps.DepsBuilderError):
- checkdeps.DepsChecker(being_tested=True,
- base_directory=os.path.dirname(__file__))
-
- def testCheckAddedIncludesAllGood(self):
- problems = self.deps_checker.CheckAddedCppIncludes(
- [['checkdeps/testdata/allowed/test.cc',
- ['#include "checkdeps/testdata/allowed/good.h"',
- '#include "checkdeps/testdata/disallowed/allowed/good.h"']
- ]])
- self.failIf(problems)
-
- def testCheckAddedIncludesManyGarbageLines(self):
- garbage_lines = ["My name is Sam%d\n" % num for num in range(50)]
- problems = self.deps_checker.CheckAddedCppIncludes(
- [['checkdeps/testdata/allowed/test.cc', garbage_lines]])
- self.failIf(problems)
-
- def testCheckAddedIncludesNoRule(self):
- problems = self.deps_checker.CheckAddedCppIncludes(
- [['checkdeps/testdata/allowed/test.cc',
- ['#include "no_rule_for_this/nogood.h"']
- ]])
- self.failUnless(problems)
-
- def testCheckAddedIncludesSkippedDirectory(self):
- problems = self.deps_checker.CheckAddedCppIncludes(
- [['checkdeps/testdata/disallowed/allowed/skipped/test.cc',
- ['#include "whatever/whocares.h"']
- ]])
- self.failIf(problems)
-
- def testCheckAddedIncludesTempAllowed(self):
- problems = self.deps_checker.CheckAddedCppIncludes(
- [['checkdeps/testdata/allowed/test.cc',
- ['#include "checkdeps/testdata/disallowed/temporarily_allowed.h"']
- ]])
- self.failUnless(problems)
-
- def testCopyIsDeep(self):
- # Regression test for a bug where we were making shallow copies of
- # Rules objects and therefore all Rules objects shared the same
- # dictionary for specific rules.
- #
- # The first pair should bring in a rule from testdata/allowed/DEPS
- # into that global dictionary that allows the
- # temp_allowed_for_tests.h file to be included in files ending
- # with _unittest.cc, and the second pair should completely fail
- # once the bug is fixed, but succeed (with a temporary allowance)
- # if the bug is in place.
- problems = self.deps_checker.CheckAddedCppIncludes(
- [['checkdeps/testdata/allowed/test.cc',
- ['#include "/checkdeps/testdata/disallowed/temporarily_allowed.h"']
- ],
- ['checkdeps/testdata/disallowed/foo_unittest.cc',
- ['#include "checkdeps/testdata/bongo/temp_allowed_for_tests.h"']
- ]])
- # With the bug in place, there would be two problems reported, and
- # the second would be for foo_unittest.cc.
- self.failUnless(len(problems) == 1)
- self.failUnless(problems[0][0].endswith('/test.cc'))
-
- def testTraversalIsOrdered(self):
- dirs_traversed = []
- for rules, filenames in self.deps_checker.GetAllRulesAndFiles():
- self.failUnlessEqual(type(filenames), list)
- self.failUnlessEqual(filenames, sorted(filenames))
- if filenames:
- dir_names = set(os.path.dirname(file) for file in filenames)
- self.failUnlessEqual(1, len(dir_names))
- dirs_traversed.append(dir_names.pop())
- self.failUnlessEqual(dirs_traversed, sorted(dirs_traversed))
-
- def testCheckPartialImportsAreAllowed(self):
- problems = self.deps_checker.CheckAddedProtoImports(
- [['checkdeps/testdata/test.proto',
- ['import "no_rule_for_this/nogood.proto"']
- ]])
- self.failIf(problems)
-
- def testCheckAddedFullPathImportsAllowed(self):
- # NOTE: Base directory is buildtools.
- problems = self.deps_checker.CheckAddedProtoImports(
- [['checkdeps/testdata/test.proto',
- ['import "checkdeps/testdata/allowed/good.proto"',
- 'import "checkdeps/testdata/disallowed/sub_folder/good.proto"']
- ]])
- self.failIf(problems)
-
- def testCheckAddedFullPathImportsDisallowed(self):
- problems = self.deps_checker.CheckAddedProtoImports(
- [['checkdeps/testdata/test.proto',
- ['import "checkdeps/testdata/disallowed/bad.proto"']
- ]])
- self.failUnless(problems)
-
- def testCheckAddedFullPathImportsManyGarbageLines(self):
- garbage_lines = ["My name is Sam%d\n" % num for num in range(50)]
- problems = self.deps_checker.CheckAddedProtoImports(
- [['checkdeps/testdata/test.proto',
- garbage_lines]])
- self.failIf(problems)
-
- def testCheckAddedIncludesNoRuleFullPath(self):
- problems = self.deps_checker.CheckAddedProtoImports(
- [['checkdeps/testdata/test.proto',
- ['import "../tools/some.proto"']
- ]])
- self.failUnless(problems)
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/checkdeps/cpp_checker.py b/checkdeps/cpp_checker.py
deleted file mode 100644
index 3efad97..0000000
--- a/checkdeps/cpp_checker.py
+++ /dev/null
@@ -1,131 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Checks C++ and Objective-C files for illegal includes."""
-
-import codecs
-import os
-import re
-
-import results
-from rules import Rule, MessageRule
-
-
-class CppChecker(object):
-
- EXTENSIONS = [
- '.h',
- '.cc',
- '.cpp',
- '.m',
- '.mm',
- ]
-
- # The maximum number of non-include lines we can see before giving up.
- _MAX_UNINTERESTING_LINES = 50
-
- # The maximum line length, this is to be efficient in the case of very long
- # lines (which can't be #includes).
- _MAX_LINE_LENGTH = 128
-
- # This regular expression will be used to extract filenames from include
- # statements.
- _EXTRACT_INCLUDE_PATH = re.compile(
- '[ \t]*#[ \t]*(?:include|import)[ \t]+"(.*)"')
-
- def __init__(self, verbose, resolve_dotdot=False, root_dir=''):
- self._verbose = verbose
- self._resolve_dotdot = resolve_dotdot
- self._root_dir = root_dir
-
- def CheckLine(self, rules, line, dependee_path, fail_on_temp_allow=False):
- """Checks the given line with the given rule set.
-
- Returns a tuple (is_include, dependency_violation) where
- is_include is True only if the line is an #include or #import
- statement, and dependency_violation is an instance of
- results.DependencyViolation if the line violates a rule, or None
- if it does not.
- """
- found_item = self._EXTRACT_INCLUDE_PATH.match(line)
- if not found_item:
- return False, None # Not a match
-
- include_path = found_item.group(1)
-
- if '\\' in include_path:
- return True, results.DependencyViolation(
- include_path,
- MessageRule('Include paths may not include backslashes.'),
- rules)
-
- if '/' not in include_path:
- # Don't fail when no directory is specified. We may want to be more
- # strict about this in the future.
- if self._verbose:
- print ' WARNING: include specified with no directory: ' + include_path
- return True, None
-
- if self._resolve_dotdot and '../' in include_path:
- dependee_dir = os.path.dirname(dependee_path)
- include_path = os.path.join(dependee_dir, include_path)
- include_path = os.path.relpath(include_path, self._root_dir)
-
- rule = rules.RuleApplyingTo(include_path, dependee_path)
- if (rule.allow == Rule.DISALLOW or
- (fail_on_temp_allow and rule.allow == Rule.TEMP_ALLOW)):
- return True, results.DependencyViolation(include_path, rule, rules)
- return True, None
-
- def CheckFile(self, rules, filepath):
- if self._verbose:
- print 'Checking: ' + filepath
-
- dependee_status = results.DependeeStatus(filepath)
- ret_val = '' # We'll collect the error messages in here
- last_include = 0
- with codecs.open(filepath, encoding='utf-8') as f:
- in_if0 = 0
- for line_num, line in enumerate(f):
- if line_num - last_include > self._MAX_UNINTERESTING_LINES:
- break
-
- line = line.strip()
-
- # Check to see if we're at / inside an #if 0 block
- if line.startswith('#if 0'):
- in_if0 += 1
- continue
- if in_if0 > 0:
- if line.startswith('#if'):
- in_if0 += 1
- elif line.startswith('#endif'):
- in_if0 -= 1
- continue
-
- is_include, violation = self.CheckLine(rules, line, filepath)
- if is_include:
- last_include = line_num
- if violation:
- dependee_status.AddViolation(violation)
-
- return dependee_status
-
- @staticmethod
- def IsCppFile(file_path):
- """Returns True iff the given path ends in one of the extensions
- handled by this checker.
- """
- return os.path.splitext(file_path)[1] in CppChecker.EXTENSIONS
-
- def ShouldCheck(self, file_path):
- """Check if the new #include file path should be presubmit checked.
-
- Args:
- file_path: file path to be checked
-
- Return:
- bool: True if the file should be checked; False otherwise.
- """
- return self.IsCppFile(file_path)
diff --git a/checkdeps/graphdeps.py b/checkdeps/graphdeps.py
deleted file mode 100755
index aff3c76..0000000
--- a/checkdeps/graphdeps.py
+++ /dev/null
@@ -1,406 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Dumps a graph of allowed and disallowed inter-module dependencies described
-by the DEPS files in the source tree. Supports DOT and PNG as the output format.
-
-Enables filtering and differential highlighting of parts of the graph based on
-the specified criteria. This allows for a much easier visual analysis of the
-dependencies, including answering questions such as "if a new source must
-depend on modules A, B, and C, what valid options among the existing modules
-are there to put it in."
-
-See README.md for a detailed description of the DEPS format.
-"""
-
-import os
-import optparse
-import pipes
-import re
-import sys
-
-from builddeps import DepsBuilder
-from rules import Rule
-
-
-class DepsGrapher(DepsBuilder):
- """Parses include_rules from DEPS files and outputs a DOT graph of the
- allowed and disallowed dependencies between directories and specific file
- regexps. Can generate only a subgraph of the whole dependency graph
- corresponding to the provided inclusion and exclusion regexp filters.
- Also can highlight fanins and/or fanouts of certain nodes matching the
- provided regexp patterns.
- """
-
- def __init__(self,
- base_directory,
- extra_repos,
- verbose,
- being_tested,
- ignore_temp_rules,
- ignore_specific_rules,
- hide_disallowed_deps,
- out_file,
- out_format,
- layout_engine,
- unflatten_graph,
- incl,
- excl,
- hilite_fanins,
- hilite_fanouts):
- """Creates a new DepsGrapher.
-
- Args:
- base_directory: OS-compatible path to root of checkout, e.g. C:\chr\src.
- verbose: Set to true for debug output.
- being_tested: Set to true to ignore the DEPS file at tools/graphdeps/DEPS.
- ignore_temp_rules: Ignore rules that start with Rule.TEMP_ALLOW ("!").
- ignore_specific_rules: Ignore rules from specific_include_rules sections.
- hide_disallowed_deps: Hide disallowed dependencies from the output graph.
- out_file: Output file name.
- out_format: Output format (anything GraphViz dot's -T option supports).
- layout_engine: Layout engine for formats other than 'dot'
- (anything that GraphViz dot's -K option supports).
- unflatten_graph: Try to reformat the output graph so it is narrower and
- taller. Helps fight overly flat and wide graphs, but
- sometimes produces a worse result.
- incl: Include only nodes matching this regexp; such nodes' fanin/fanout
- is also included.
- excl: Exclude nodes matching this regexp; such nodes' fanin/fanout is
- processed independently.
- hilite_fanins: Highlight fanins of nodes matching this regexp with a
- different edge and node color.
- hilite_fanouts: Highlight fanouts of nodes matching this regexp with a
- different edge and node color.
- """
- DepsBuilder.__init__(
- self,
- base_directory,
- extra_repos,
- verbose,
- being_tested,
- ignore_temp_rules,
- ignore_specific_rules)
-
- self.ignore_temp_rules = ignore_temp_rules
- self.ignore_specific_rules = ignore_specific_rules
- self.hide_disallowed_deps = hide_disallowed_deps
- self.out_file = out_file
- self.out_format = out_format
- self.layout_engine = layout_engine
- self.unflatten_graph = unflatten_graph
- self.incl = incl
- self.excl = excl
- self.hilite_fanins = hilite_fanins
- self.hilite_fanouts = hilite_fanouts
-
- self.deps = set()
-
- def DumpDependencies(self):
- """ Builds a dependency rule table and dumps the corresponding dependency
- graph to all requested formats."""
- self._BuildDepsGraph()
- self._DumpDependencies()
-
- def _BuildDepsGraph(self):
- """Recursively traverses the source tree starting at the specified directory
- and builds a dependency graph representation in self.deps."""
- for (rules, _) in self.GetAllRulesAndFiles():
- deps = rules.AsDependencyTuples(
- include_general_rules=True,
- include_specific_rules=not self.ignore_specific_rules)
- self.deps.update(deps)
-
- def _DumpDependencies(self):
- """Dumps the built dependency graph to the specified file with specified
- format."""
- if self.out_format == 'dot' and not self.layout_engine:
- if self.unflatten_graph:
- pipe = pipes.Template()
- pipe.append('unflatten -l 2 -c 3', '--')
- out = pipe.open(self.out_file, 'w')
- else:
- out = open(self.out_file, 'w')
- else:
- pipe = pipes.Template()
- if self.unflatten_graph:
- pipe.append('unflatten -l 2 -c 3', '--')
- dot_cmd = 'dot -T' + self.out_format
- if self.layout_engine:
- dot_cmd += ' -K' + self.layout_engine
- pipe.append(dot_cmd, '--')
- out = pipe.open(self.out_file, 'w')
-
- self._DumpDependenciesImpl(self.deps, out)
- out.close()
-
- def _DumpDependenciesImpl(self, deps, out):
- """Computes nodes' and edges' properties for the dependency graph |deps| and
- carries out the actual dumping to a file/pipe |out|."""
- deps_graph = dict()
- deps_srcs = set()
-
- # Pre-initialize the graph with src->(dst, allow) pairs.
- for (allow, src, dst) in deps:
- if allow == Rule.TEMP_ALLOW and self.ignore_temp_rules:
- continue
-
- deps_srcs.add(src)
- if src not in deps_graph:
- deps_graph[src] = []
- deps_graph[src].append((dst, allow))
-
- # Add all hierarchical parents too, in case some of them don't have their
- # own DEPS, and therefore are missing from the list of rules. Those will
- # be recursively populated with their parents' rules in the next block.
- parent_src = os.path.dirname(src)
- while parent_src:
- if parent_src not in deps_graph:
- deps_graph[parent_src] = []
- parent_src = os.path.dirname(parent_src)
-
- # For every node, propagate its rules down to all its children.
- deps_srcs = list(deps_srcs)
- deps_srcs.sort()
- for src in deps_srcs:
- parent_src = os.path.dirname(src)
- if parent_src:
- # We presort the list, so parents are guaranteed to precede children.
- assert parent_src in deps_graph,\
- "src: %s, parent_src: %s" % (src, parent_src)
- for (dst, allow) in deps_graph[parent_src]:
- # Check that this node does not explicitly override a rule from the
- # parent that we're about to add.
- if ((dst, Rule.ALLOW) not in deps_graph[src]) and \
- ((dst, Rule.TEMP_ALLOW) not in deps_graph[src]) and \
- ((dst, Rule.DISALLOW) not in deps_graph[src]):
- deps_graph[src].append((dst, allow))
-
- node_props = {}
- edges = []
-
- # 1) Populate a list of edge specifications in DOT format;
- # 2) Populate a list of computed raw node attributes to be output as node
- # specifications in DOT format later on.
- # Edges and nodes are emphasized with color and line/border weight depending
- # on how many of incl/excl/hilite_fanins/hilite_fanouts filters they hit,
- # and in what way.
- for src in deps_graph.keys():
- for (dst, allow) in deps_graph[src]:
- if allow == Rule.DISALLOW and self.hide_disallowed_deps:
- continue
-
- if allow == Rule.ALLOW and src == dst:
- continue
-
- edge_spec = "%s->%s" % (src, dst)
- if not re.search(self.incl, edge_spec) or \
- re.search(self.excl, edge_spec):
- continue
-
- if src not in node_props:
- node_props[src] = {'hilite': None, 'degree': 0}
- if dst not in node_props:
- node_props[dst] = {'hilite': None, 'degree': 0}
-
- edge_weight = 1
-
- if self.hilite_fanouts and re.search(self.hilite_fanouts, src):
- node_props[src]['hilite'] = 'lightgreen'
- node_props[dst]['hilite'] = 'lightblue'
- node_props[dst]['degree'] += 1
- edge_weight += 1
-
- if self.hilite_fanins and re.search(self.hilite_fanins, dst):
- node_props[src]['hilite'] = 'lightblue'
- node_props[dst]['hilite'] = 'lightgreen'
- node_props[src]['degree'] += 1
- edge_weight += 1
-
- if allow == Rule.ALLOW:
- edge_color = (edge_weight > 1) and 'blue' or 'green'
- edge_style = 'solid'
- elif allow == Rule.TEMP_ALLOW:
- edge_color = (edge_weight > 1) and 'blue' or 'green'
- edge_style = 'dashed'
- else:
- edge_color = 'red'
- edge_style = 'dashed'
- edges.append(' "%s" -> "%s" [style=%s,color=%s,penwidth=%d];' % \
- (src, dst, edge_style, edge_color, edge_weight))
-
- # Reformat the computed raw node attributes into a final DOT representation.
- nodes = []
- for (node, attrs) in node_props.iteritems():
- attr_strs = []
- if attrs['hilite']:
- attr_strs.append('style=filled,fillcolor=%s' % attrs['hilite'])
- attr_strs.append('penwidth=%d' % (attrs['degree'] or 1))
- nodes.append(' "%s" [%s];' % (node, ','.join(attr_strs)))
-
- # Output nodes and edges to |out| (can be a file or a pipe).
- edges.sort()
- nodes.sort()
- out.write('digraph DEPS {\n'
- ' fontsize=8;\n')
- out.write('\n'.join(nodes))
- out.write('\n\n')
- out.write('\n'.join(edges))
- out.write('\n}\n')
- out.close()
-
-
-def PrintUsage():
- print """Usage: python graphdeps.py [--root <root>]
-
- --root ROOT Specifies the repository root. This defaults to "../../.."
- relative to the script file. This will be correct given the
- normal location of the script in "<root>/tools/graphdeps".
-
- --(others) There are a few lesser-used options; run with --help to show them.
-
-Examples:
- Dump the whole dependency graph:
- graphdeps.py
- Find a suitable place for a new source that must depend on /apps and
- /content/browser/renderer_host. Limit potential candidates to /apps,
- /chrome/browser and content/browser, and descendants of those three.
- Generate both DOT and PNG output. The output will highlight the fanins
- of /apps and /content/browser/renderer_host. Overlapping nodes in both fanins
- will be emphasized by a thicker border. Those nodes are the ones that are
- allowed to depend on both targets, therefore they are all legal candidates
- to place the new source in:
- graphdeps.py \
- --root=./src \
- --out=./DEPS.svg \
- --format=svg \
- --incl='^(apps|chrome/browser|content/browser)->.*' \
- --excl='.*->third_party' \
- --fanin='^(apps|content/browser/renderer_host)$' \
- --ignore-specific-rules \
- --ignore-temp-rules"""
-
-
-def main():
- option_parser = optparse.OptionParser()
- option_parser.add_option(
- "", "--root",
- default="", dest="base_directory",
- help="Specifies the repository root. This defaults "
- "to '../../..' relative to the script file, which "
- "will normally be the repository root.")
- option_parser.add_option(
- '', '--extra-repos',
- action='append', dest='extra_repos', default=[],
- help='Specifies extra repositories relative to root repository.')
- option_parser.add_option(
- "-f", "--format",
- dest="out_format", default="dot",
- help="Output file format. "
- "Can be anything that GraphViz dot's -T option supports. "
- "The most useful ones are: dot (text), svg (image), pdf (image)."
- "NOTES: dotty has a known problem with fonts when displaying DOT "
- "files on Ubuntu - if labels are unreadable, try other formats.")
- option_parser.add_option(
- "-o", "--out",
- dest="out_file", default="DEPS",
- help="Output file name. If the name does not end in an extension "
- "matching the output format, that extension is automatically "
- "appended.")
- option_parser.add_option(
- "-l", "--layout-engine",
- dest="layout_engine", default="",
- help="Layout rendering engine. "
- "Can be anything that GraphViz dot's -K option supports. "
- "The most useful are in decreasing order: dot, fdp, circo, osage. "
- "NOTE: '-f dot' and '-f dot -l dot' are different: the former "
- "will dump a raw DOT graph and stop; the latter will further "
- "filter it through 'dot -Tdot -Kdot' layout engine.")
- option_parser.add_option(
- "-i", "--incl",
- default="^.*$", dest="incl",
- help="Include only edges of the graph that match the specified regexp. "
- "The regexp is applied to edges of the graph formatted as "
- "'source_node->target_node', where the '->' part is vebatim. "
- "Therefore, a reliable regexp should look like "
- "'^(chrome|chrome/browser|chrome/common)->content/public/browser$' "
- "or similar, with both source and target node regexps present, "
- "explicit ^ and $, and otherwise being as specific as possible.")
- option_parser.add_option(
- "-e", "--excl",
- default="^$", dest="excl",
- help="Exclude dependent nodes that match the specified regexp. "
- "See --incl for details on the format.")
- option_parser.add_option(
- "", "--fanin",
- default="", dest="hilite_fanins",
- help="Highlight fanins of nodes matching the specified regexp.")
- option_parser.add_option(
- "", "--fanout",
- default="", dest="hilite_fanouts",
- help="Highlight fanouts of nodes matching the specified regexp.")
- option_parser.add_option(
- "", "--ignore-temp-rules",
- action="store_true", dest="ignore_temp_rules", default=False,
- help="Ignore !-prefixed (temporary) rules in DEPS files.")
- option_parser.add_option(
- "", "--ignore-specific-rules",
- action="store_true", dest="ignore_specific_rules", default=False,
- help="Ignore specific_include_rules section of DEPS files.")
- option_parser.add_option(
- "", "--hide-disallowed-deps",
- action="store_true", dest="hide_disallowed_deps", default=False,
- help="Hide disallowed dependencies in the output graph.")
- option_parser.add_option(
- "", "--unflatten",
- action="store_true", dest="unflatten_graph", default=False,
- help="Try to reformat the output graph so it is narrower and taller. "
- "Helps fight overly flat and wide graphs, but sometimes produces "
- "inferior results.")
- option_parser.add_option(
- "-v", "--verbose",
- action="store_true", default=False,
- help="Print debug logging")
- options, args = option_parser.parse_args()
-
- if not options.out_file.endswith(options.out_format):
- options.out_file += '.' + options.out_format
-
- deps_grapher = DepsGrapher(
- base_directory=options.base_directory,
- extra_repos=options.extra_repos,
- verbose=options.verbose,
- being_tested=False,
-
- ignore_temp_rules=options.ignore_temp_rules,
- ignore_specific_rules=options.ignore_specific_rules,
- hide_disallowed_deps=options.hide_disallowed_deps,
-
- out_file=options.out_file,
- out_format=options.out_format,
- layout_engine=options.layout_engine,
- unflatten_graph=options.unflatten_graph,
-
- incl=options.incl,
- excl=options.excl,
- hilite_fanins=options.hilite_fanins,
- hilite_fanouts=options.hilite_fanouts)
-
- if len(args) > 0:
- PrintUsage()
- return 1
-
- print 'Using base directory: ', deps_grapher.base_directory
- print 'include nodes : ', options.incl
- print 'exclude nodes : ', options.excl
- print 'highlight fanins of : ', options.hilite_fanins
- print 'highlight fanouts of: ', options.hilite_fanouts
-
- deps_grapher.DumpDependencies()
- return 0
-
-
-if '__main__' == __name__:
- sys.exit(main())
diff --git a/checkdeps/java_checker.py b/checkdeps/java_checker.py
deleted file mode 100644
index f59b776..0000000
--- a/checkdeps/java_checker.py
+++ /dev/null
@@ -1,187 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Checks Java files for illegal imports."""
-
-import codecs
-import os
-import re
-
-import results
-from rules import Rule
-
-
-class JavaChecker(object):
- """Import checker for Java files.
-
- The CheckFile method uses real filesystem paths, but Java imports work in
- terms of package names. To deal with this, we have an extra "prescan" pass
- that reads all the .java files and builds a mapping of class name -> filepath.
- In CheckFile, we convert each import statement into a real filepath, and check
- that against the rules in the DEPS files.
-
- Note that in Java you can always use classes in the same directory without an
- explicit import statement, so these imports can't be blocked with DEPS files.
- But that shouldn't be a problem, because same-package imports are pretty much
- always correct by definition. (If we find a case where this is *not* correct,
- it probably means the package is too big and needs to be split up.)
-
- Properties:
- _classmap: dict of fully-qualified Java class name -> filepath
- """
-
- EXTENSIONS = ['.java']
-
- # This regular expression will be used to extract filenames from import
- # statements.
- _EXTRACT_IMPORT_PATH = re.compile('^import\s+(?:static\s+)?([\w\.]+)\s*;')
-
- def __init__(self, base_directory, verbose, added_imports=None,
- allow_multiple_definitions=None):
- self._base_directory = base_directory
- self._verbose = verbose
- self._classmap = {}
- self._allow_multiple_definitions = allow_multiple_definitions or []
- if added_imports:
- added_classset = self._PrescanImportFiles(added_imports)
- self._PrescanFiles(added_classset)
-
- def _GetClassFullName(self, filepath):
- """Get the full class name of a file with package name."""
- if not os.path.isfile(filepath):
- return None
- with codecs.open(filepath, encoding='utf-8') as f:
- short_class_name, _ = os.path.splitext(os.path.basename(filepath))
- for line in f:
- for package in re.findall('^package\s+([\w\.]+);', line):
- return package + '.' + short_class_name
-
- def _IgnoreDir(self, d):
- # Skip hidden directories.
- if d.startswith('.'):
- return True
- # Skip the "out" directory, as dealing with generated files is awkward.
- # We don't want paths like "out/Release/lib.java" in our DEPS files.
- # TODO(husky): We need some way of determining the "real" path to
- # a generated file -- i.e., where it would be in source control if
- # it weren't generated.
- if d.startswith('out') or d in ('xcodebuild',):
- return True
- # Skip third-party directories.
- if d in ('third_party', 'ThirdParty'):
- return True
- return False
-
- def _PrescanFiles(self, added_classset):
- for root, dirs, files in os.walk(self._base_directory):
- # Skip unwanted subdirectories. TODO(husky): it would be better to do
- # this via the skip_child_includes flag in DEPS files. Maybe hoist this
- # prescan logic into checkdeps.py itself?
- dirs[:] = [d for d in dirs if not self._IgnoreDir(d)]
- for f in files:
- if f.endswith('.java'):
- self._PrescanFile(os.path.join(root, f), added_classset)
-
- def _PrescanImportFiles(self, added_imports):
- """Build a set of fully-qualified class affected by this patch.
-
- Prescan imported files and build classset to collect full class names
- with package name. This includes both changed files as well as changed imports.
-
- Args:
- added_imports : ((file_path, (import_line, import_line, ...), ...)
-
- Return:
- A set of full class names with package name of imported files.
- """
- classset = set()
- for filepath, changed_lines in (added_imports or []):
- if not self.ShouldCheck(filepath):
- continue
- full_class_name = self._GetClassFullName(filepath)
- if full_class_name:
- classset.add(full_class_name)
- for line in changed_lines:
- found_item = self._EXTRACT_IMPORT_PATH.match(line)
- if found_item:
- classset.add(found_item.group(1))
- return classset
-
- def _PrescanFile(self, filepath, added_classset):
- if self._verbose:
- print 'Prescanning: ' + filepath
- full_class_name = self._GetClassFullName(filepath)
- if full_class_name:
- if full_class_name in self._classmap:
- if self._verbose or full_class_name in added_classset:
- if not any((re.match(i, filepath) for i in self._allow_multiple_definitions)):
- print 'WARNING: multiple definitions of %s:' % full_class_name
- print ' ' + filepath
- print ' ' + self._classmap[full_class_name]
- print
- else:
- self._classmap[full_class_name] = filepath
- elif self._verbose:
- print 'WARNING: no package definition found in %s' % filepath
-
- def CheckLine(self, rules, line, filepath, fail_on_temp_allow=False):
- """Checks the given line with the given rule set.
-
- Returns a tuple (is_import, dependency_violation) where
- is_import is True only if the line is an import
- statement, and dependency_violation is an instance of
- results.DependencyViolation if the line violates a rule, or None
- if it does not.
- """
- found_item = self._EXTRACT_IMPORT_PATH.match(line)
- if not found_item:
- return False, None # Not a match
- clazz = found_item.group(1)
- if clazz not in self._classmap:
- # Importing a class from outside the Chromium tree. That's fine --
- # it's probably a Java or Android system class.
- return True, None
- import_path = os.path.relpath(
- self._classmap[clazz], self._base_directory)
- # Convert Windows paths to Unix style, as used in DEPS files.
- import_path = import_path.replace(os.path.sep, '/')
- rule = rules.RuleApplyingTo(import_path, filepath)
- if (rule.allow == Rule.DISALLOW or
- (fail_on_temp_allow and rule.allow == Rule.TEMP_ALLOW)):
- return True, results.DependencyViolation(import_path, rule, rules)
- return True, None
-
- def CheckFile(self, rules, filepath):
- if self._verbose:
- print 'Checking: ' + filepath
-
- dependee_status = results.DependeeStatus(filepath)
- with codecs.open(filepath, encoding='utf-8') as f:
- for line in f:
- is_import, violation = self.CheckLine(rules, line, filepath)
- if violation:
- dependee_status.AddViolation(violation)
- if '{' in line:
- # This is code, so we're finished reading imports for this file.
- break
-
- return dependee_status
-
- @staticmethod
- def IsJavaFile(filepath):
- """Returns True if the given path ends in the extensions
- handled by this checker.
- """
- return os.path.splitext(filepath)[1] in JavaChecker.EXTENSIONS
-
- def ShouldCheck(self, file_path):
- """Check if the new import file path should be presubmit checked.
-
- Args:
- file_path: file path to be checked
-
- Return:
- bool: True if the file should be checked; False otherwise.
- """
- return self.IsJavaFile(file_path)
diff --git a/checkdeps/proto_checker.py b/checkdeps/proto_checker.py
deleted file mode 100644
index a90628a..0000000
--- a/checkdeps/proto_checker.py
+++ /dev/null
@@ -1,125 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Checks protobuf files for illegal imports."""
-
-import codecs
-import os
-import re
-
-import results
-from rules import Rule, MessageRule
-
-
-class ProtoChecker(object):
-
- EXTENSIONS = [
- '.proto',
- ]
-
- # The maximum number of non-import lines we can see before giving up.
- _MAX_UNINTERESTING_LINES = 50
-
- # The maximum line length, this is to be efficient in the case of very long
- # lines (which can't be import).
- _MAX_LINE_LENGTH = 128
-
- # This regular expression will be used to extract filenames from import
- # statements.
- _EXTRACT_IMPORT_PATH = re.compile(
- '[ \t]*[ \t]*import[ \t]+"(.*)"')
-
- def __init__(self, verbose, resolve_dotdot=False, root_dir=''):
- self._verbose = verbose
- self._resolve_dotdot = resolve_dotdot
- self._root_dir = root_dir
-
- def IsFullPath(self, import_path):
- """Checks if the given path is a valid path starting from |_root_dir|."""
- match = re.match('(.*)/([^/]*\.proto)', import_path)
- if not match:
- return False
- return os.path.isdir(self._root_dir + "/" + match.group(1))
-
- def CheckLine(self, rules, line, dependee_path, fail_on_temp_allow=False):
- """Checks the given line with the given rule set.
-
- Returns a tuple (is_import, dependency_violation) where
- is_import is True only if the line is an import
- statement, and dependency_violation is an instance of
- results.DependencyViolation if the line violates a rule, or None
- if it does not.
- """
- found_item = self._EXTRACT_IMPORT_PATH.match(line)
- if not found_item:
- return False, None # Not a match
-
- import_path = found_item.group(1)
-
- if '\\' in import_path:
- return True, results.DependencyViolation(
- import_path,
- MessageRule('Import paths may not include backslashes.'),
- rules)
-
- if '/' not in import_path:
- # Don't fail when no directory is specified. We may want to be more
- # strict about this in the future.
- if self._verbose:
- print ' WARNING: import specified with no directory: ' + import_path
- return True, None
-
- if self._resolve_dotdot and '../' in import_path:
- dependee_dir = os.path.dirname(dependee_path)
- import_path = os.path.join(dependee_dir, import_path)
- import_path = os.path.relpath(import_path, self._root_dir)
-
- if not self.IsFullPath(import_path):
- return True, None
-
- rule = rules.RuleApplyingTo(import_path, dependee_path)
-
- if (rule.allow == Rule.DISALLOW or
- (fail_on_temp_allow and rule.allow == Rule.TEMP_ALLOW)):
- return True, results.DependencyViolation(import_path, rule, rules)
- return True, None
-
- def CheckFile(self, rules, filepath):
- if self._verbose:
- print 'Checking: ' + filepath
-
- dependee_status = results.DependeeStatus(filepath)
- last_import = 0
- with codecs.open(filepath, encoding='utf-8') as f:
- for line_num, line in enumerate(f):
- if line_num - last_import > self._MAX_UNINTERESTING_LINES:
- break
-
- line = line.strip()
-
- is_import, violation = self.CheckLine(rules, line, filepath)
- if is_import:
- last_import = line_num
- if violation:
- dependee_status.AddViolation(violation)
-
- return dependee_status
-
- @staticmethod
- def IsProtoFile(file_path):
- """Returns True iff the given path ends in one of the extensions
- handled by this checker.
- """
- return os.path.splitext(file_path)[1] in ProtoChecker.EXTENSIONS
-
- def ShouldCheck(self, file_path):
- """Check if the new #include file path should be presubmit checked.
-
- Args:
- file_path: file path to be checked
-
- Return:
- bool: True if the file should be checked; False otherwise.
- """
- return self.IsProtoFile(file_path)
diff --git a/checkdeps/results.py b/checkdeps/results.py
deleted file mode 100644
index b52880c..0000000
--- a/checkdeps/results.py
+++ /dev/null
@@ -1,178 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-"""Results object and results formatters for checkdeps tool."""
-
-
-import json
-
-
-class DependencyViolation(object):
- """A single dependency violation."""
-
- def __init__(self, include_path, violated_rule, rules):
- # The include or import path that is in violation of a rule.
- self.include_path = include_path
-
- # The violated rule.
- self.violated_rule = violated_rule
-
- # The set of rules containing self.violated_rule.
- self.rules = rules
-
-
-class DependeeStatus(object):
- """Results object for a dependee file."""
-
- def __init__(self, dependee_path):
- # Path of the file whose nonconforming dependencies are listed in
- # self.violations.
- self.dependee_path = dependee_path
-
- # List of DependencyViolation objects that apply to the dependee
- # file. May be empty.
- self.violations = []
-
- def AddViolation(self, violation):
- """Adds a violation."""
- self.violations.append(violation)
-
- def HasViolations(self):
- """Returns True if this dependee is violating one or more rules."""
- return not not self.violations
-
-
-class ResultsFormatter(object):
- """Base class for results formatters."""
-
- def AddError(self, dependee_status):
- """Add a formatted result to |self.results| for |dependee_status|,
- which is guaranteed to return True for
- |dependee_status.HasViolations|.
- """
- raise NotImplementedError()
-
- def GetResults(self):
- """Returns the results. May be overridden e.g. to process the
- results that have been accumulated.
- """
- raise NotImplementedError()
-
- def PrintResults(self):
- """Prints the results to stdout."""
- raise NotImplementedError()
-
-
-class NormalResultsFormatter(ResultsFormatter):
- """A results formatting object that produces the classical,
- detailed, human-readable output of the checkdeps tool.
- """
-
- def __init__(self, verbose):
- self.results = []
- self.verbose = verbose
-
- def AddError(self, dependee_status):
- lines = []
- lines.append('\nERROR in %s' % dependee_status.dependee_path)
- for violation in dependee_status.violations:
- lines.append(self.FormatViolation(violation, self.verbose))
- self.results.append('\n'.join(lines))
-
- @staticmethod
- def FormatViolation(violation, verbose=False):
- lines = []
- if verbose:
- lines.append(' For %s' % violation.rules)
- lines.append(
- ' Illegal include: "%s"\n Because of %s' %
- (violation.include_path, str(violation.violated_rule)))
- return '\n'.join(lines)
-
- def GetResults(self):
- return self.results
-
- def PrintResults(self):
- for result in self.results:
- print result
- if self.results:
- print '\nFAILED\n'
-
-
-class JSONResultsFormatter(ResultsFormatter):
- """A results formatter that outputs results to a file as JSON."""
-
- def __init__(self, output_path, wrapped_formatter=None):
- self.output_path = output_path
- self.wrapped_formatter = wrapped_formatter
-
- self.results = []
-
- def AddError(self, dependee_status):
- self.results.append({
- 'dependee_path': dependee_status.dependee_path,
- 'violations': [{
- 'include_path': violation.include_path,
- 'violated_rule': violation.violated_rule.AsDependencyTuple(),
- } for violation in dependee_status.violations]
- })
-
- if self.wrapped_formatter:
- self.wrapped_formatter.AddError(dependee_status)
-
- def GetResults(self):
- with open(self.output_path, 'w') as f:
- f.write(json.dumps(self.results))
-
- return self.results
-
- def PrintResults(self):
- if self.wrapped_formatter:
- self.wrapped_formatter.PrintResults()
- return
-
- print self.results
-
-
-class TemporaryRulesFormatter(ResultsFormatter):
- """A results formatter that produces a single line per nonconforming
- include. The combined output is suitable for directly pasting into a
- DEPS file as a list of temporary-allow rules.
- """
-
- def __init__(self):
- self.violations = set()
-
- def AddError(self, dependee_status):
- for violation in dependee_status.violations:
- self.violations.add(violation.include_path)
-
- def GetResults(self):
- return [' "!%s",' % path for path in sorted(self.violations)]
-
- def PrintResults(self):
- for result in self.GetResults():
- print result
-
-
-class CountViolationsFormatter(ResultsFormatter):
- """A results formatter that produces a number, the count of #include
- statements that are in violation of the dependency rules.
-
- Note that you normally want to instantiate DepsChecker with
- ignore_temp_rules=True when you use this formatter.
- """
-
- def __init__(self):
- self.count = 0
-
- def AddError(self, dependee_status):
- self.count += len(dependee_status.violations)
-
- def GetResults(self):
- return '%d' % self.count
-
- def PrintResults(self):
- print self.count
diff --git a/checkdeps/rules.py b/checkdeps/rules.py
deleted file mode 100644
index 199c18f..0000000
--- a/checkdeps/rules.py
+++ /dev/null
@@ -1,178 +0,0 @@
-# Copyright 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Base classes to represent dependency rules, used by checkdeps.py"""
-
-
-import os
-import re
-
-
-class Rule(object):
- """Specifies a single rule for an include, which can be one of
- ALLOW, DISALLOW and TEMP_ALLOW.
- """
-
- # These are the prefixes used to indicate each type of rule. These
- # are also used as values for self.allow to indicate which type of
- # rule this is.
- ALLOW = '+'
- DISALLOW = '-'
- TEMP_ALLOW = '!'
-
- def __init__(self, allow, directory, dependent_directory, source):
- self.allow = allow
- self._dir = directory
- self._dependent_dir = dependent_directory
- self._source = source
-
- def __str__(self):
- return '"%s%s" from %s.' % (self.allow, self._dir, self._source)
-
- def AsDependencyTuple(self):
- """Returns a tuple (allow, dependent dir, dependee dir) for this rule,
- which is fully self-sufficient to answer the question whether the dependent
- is allowed to depend on the dependee, without knowing the external
- context."""
- return self.allow, self._dependent_dir or '.', self._dir or '.'
-
- def ParentOrMatch(self, other):
- """Returns true if the input string is an exact match or is a parent
- of the current rule. For example, the input "foo" would match "foo/bar"."""
- return self._dir == other or self._dir.startswith(other + '/')
-
- def ChildOrMatch(self, other):
- """Returns true if the input string would be covered by this rule. For
- example, the input "foo/bar" would match the rule "foo"."""
- return self._dir == other or other.startswith(self._dir + '/')
-
-
-class MessageRule(Rule):
- """A rule that has a simple message as the reason for failing,
- unrelated to directory or source.
- """
-
- def __init__(self, reason):
- super(MessageRule, self).__init__(Rule.DISALLOW, '', '', '')
- self._reason = reason
-
- def __str__(self):
- return self._reason
-
-
-def ParseRuleString(rule_string, source):
- """Returns a tuple of a character indicating what type of rule this
- is, and a string holding the path the rule applies to.
- """
- if not rule_string:
- raise Exception('The rule string "%s" is empty\nin %s' %
- (rule_string, source))
-
- if not rule_string[0] in [Rule.ALLOW, Rule.DISALLOW, Rule.TEMP_ALLOW]:
- raise Exception(
- 'The rule string "%s" does not begin with a "+", "-" or "!".' %
- rule_string)
-
- return rule_string[0], rule_string[1:]
-
-
-class Rules(object):
- """Sets of rules for files in a directory.
-
- By default, rules are added to the set of rules applicable to all
- dependee files in the directory. Rules may also be added that apply
- only to dependee files whose filename (last component of their path)
- matches a given regular expression; hence there is one additional
- set of rules per unique regular expression.
- """
-
- def __init__(self):
- """Initializes the current rules with an empty rule list for all
- files.
- """
- # We keep the general rules out of the specific rules dictionary,
- # as we need to always process them last.
- self._general_rules = []
-
- # Keys are regular expression strings, values are arrays of rules
- # that apply to dependee files whose basename matches the regular
- # expression. These are applied before the general rules, but
- # their internal order is arbitrary.
- self._specific_rules = {}
-
- def __str__(self):
- result = ['Rules = {\n (apply to all files): [\n%s\n ],' % '\n'.join(
- ' %s' % x for x in self._general_rules)]
- for regexp, rules in self._specific_rules.iteritems():
- result.append(' (limited to files matching %s): [\n%s\n ]' % (
- regexp, '\n'.join(' %s' % x for x in rules)))
- result.append(' }')
- return '\n'.join(result)
-
- def AsDependencyTuples(self, include_general_rules, include_specific_rules):
- """Returns a list of tuples (allow, dependent dir, dependee dir) for the
- specified rules (general/specific). Currently only general rules are
- supported."""
- def AddDependencyTuplesImpl(deps, rules, extra_dependent_suffix=""):
- for rule in rules:
- (allow, dependent, dependee) = rule.AsDependencyTuple()
- tup = (allow, dependent + extra_dependent_suffix, dependee)
- deps.add(tup)
-
- deps = set()
- if include_general_rules:
- AddDependencyTuplesImpl(deps, self._general_rules)
- if include_specific_rules:
- for regexp, rules in self._specific_rules.iteritems():
- AddDependencyTuplesImpl(deps, rules, "/" + regexp)
- return deps
-
- def AddRule(self, rule_string, dependent_dir, source, dependee_regexp=None):
- """Adds a rule for the given rule string.
-
- Args:
- rule_string: The include_rule string read from the DEPS file to apply.
- source: A string representing the location of that string (filename, etc.)
- so that we can give meaningful errors.
- dependent_dir: The directory to which this rule applies.
- dependee_regexp: The rule will only be applied to dependee files
- whose filename (last component of their path)
- matches the expression. None to match all
- dependee files.
- """
- rule_type, rule_dir = ParseRuleString(rule_string, source)
-
- if not dependee_regexp:
- rules_to_update = self._general_rules
- else:
- if dependee_regexp in self._specific_rules:
- rules_to_update = self._specific_rules[dependee_regexp]
- else:
- rules_to_update = []
-
- # Remove any existing rules or sub-rules that apply. For example, if we're
- # passed "foo", we should remove "foo", "foo/bar", but not "foobar".
- rules_to_update = [x for x in rules_to_update
- if not x.ParentOrMatch(rule_dir)]
- rules_to_update.insert(0, Rule(rule_type, rule_dir, dependent_dir, source))
-
- if not dependee_regexp:
- self._general_rules = rules_to_update
- else:
- self._specific_rules[dependee_regexp] = rules_to_update
-
- def RuleApplyingTo(self, include_path, dependee_path):
- """Returns the rule that applies to |include_path| for a dependee
- file located at |dependee_path|.
- """
- dependee_filename = os.path.basename(dependee_path)
- for regexp, specific_rules in self._specific_rules.iteritems():
- if re.match(regexp, dependee_filename):
- for rule in specific_rules:
- if rule.ChildOrMatch(include_path):
- return rule
- for rule in self._general_rules:
- if rule.ChildOrMatch(include_path):
- return rule
- return MessageRule('no rule applying.')
diff --git a/checkdeps/testdata/DEPS b/checkdeps/testdata/DEPS
deleted file mode 100644
index 0b2cb5b..0000000
--- a/checkdeps/testdata/DEPS
+++ /dev/null
@@ -1,8 +0,0 @@
-include_rules = [
- "-checkdeps/testdata/disallowed",
- "+checkdeps/testdata/allowed",
- "-third_party/explicitly_disallowed",
-]
-skip_child_includes = [
- "checkdeps_test",
-]
diff --git a/checkdeps/testdata/allowed/DEPS b/checkdeps/testdata/allowed/DEPS
deleted file mode 100644
index cd47796..0000000
--- a/checkdeps/testdata/allowed/DEPS
+++ /dev/null
@@ -1,12 +0,0 @@
-include_rules = [
- "+checkdeps/testdata/disallowed/allowed",
- "!checkdeps/testdata/disallowed/temporarily_allowed.h",
- "+third_party/allowed_may_use",
-]
-
-specific_include_rules = {
- ".*_unittest\.cc": [
- "+checkdeps/testdata/disallowed/teststuff",
- "!checkdeps/testdata/bongo/temp_allowed_for_tests.h",
- ]
-}
diff --git a/checkdeps/testdata/allowed/foo_unittest.cc b/checkdeps/testdata/allowed/foo_unittest.cc
deleted file mode 100644
index 28a660b..0000000
--- a/checkdeps/testdata/allowed/foo_unittest.cc
+++ /dev/null
@@ -1,5 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "checkdeps/testdata/disallowed/teststuff/good.h"
diff --git a/checkdeps/testdata/allowed/not_a_test.cc b/checkdeps/testdata/allowed/not_a_test.cc
deleted file mode 100644
index 56f1fef..0000000
--- a/checkdeps/testdata/allowed/not_a_test.cc
+++ /dev/null
@@ -1,5 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "checkdeps/testdata/disallowed/teststuff/bad.h"
diff --git a/checkdeps/testdata/allowed/test.h b/checkdeps/testdata/allowed/test.h
deleted file mode 100644
index 411b431..0000000
--- a/checkdeps/testdata/allowed/test.h
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "checkdeps/testdata/allowed/good.h"
-#include "checkdeps/testdata/disallowed/bad.h"
-#include "checkdeps/testdata/disallowed/allowed/good.h"
-#include "checkdeps/testdata/disallowed/temporarily_allowed.h"
-#include "third_party/explicitly_disallowed/bad.h"
-#include "third_party/allowed_may_use/good.h"
-#include "third_party/no_rule/bad.h"
diff --git a/checkdeps/testdata/checkdeps_test/DEPS b/checkdeps/testdata/checkdeps_test/DEPS
deleted file mode 100644
index 91a9b99..0000000
--- a/checkdeps/testdata/checkdeps_test/DEPS
+++ /dev/null
@@ -1,5 +0,0 @@
-include_rules = [
- "-disallowed",
- "+allowed",
- "-third_party/explicitly_disallowed",
-]
diff --git a/checkdeps/testdata/checkdeps_test/allowed/DEPS b/checkdeps/testdata/checkdeps_test/allowed/DEPS
deleted file mode 100644
index 14aa4d4..0000000
--- a/checkdeps/testdata/checkdeps_test/allowed/DEPS
+++ /dev/null
@@ -1,11 +0,0 @@
-include_rules = [
- "+disallowed/allowed",
- "!disallowed/temporarily_allowed.h",
- "+third_party/allowed_may_use",
-]
-
-specific_include_rules = {
- ".*_unittest\.cc": [
- "+disallowed/teststuff",
- ]
-}
diff --git a/checkdeps/testdata/checkdeps_test/allowed/foo_unittest.cc b/checkdeps/testdata/checkdeps_test/allowed/foo_unittest.cc
deleted file mode 100644
index 1a507ec..0000000
--- a/checkdeps/testdata/checkdeps_test/allowed/foo_unittest.cc
+++ /dev/null
@@ -1,5 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "disallowed/teststuff/good.h"
diff --git a/checkdeps/testdata/checkdeps_test/allowed/not_a_test.cc b/checkdeps/testdata/checkdeps_test/allowed/not_a_test.cc
deleted file mode 100644
index 4278d64..0000000
--- a/checkdeps/testdata/checkdeps_test/allowed/not_a_test.cc
+++ /dev/null
@@ -1,5 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "disallowed/teststuff/bad.h"
diff --git a/checkdeps/testdata/checkdeps_test/allowed/test.h b/checkdeps/testdata/checkdeps_test/allowed/test.h
deleted file mode 100644
index 2dbd7a3..0000000
--- a/checkdeps/testdata/checkdeps_test/allowed/test.h
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "allowed/good.h"
-#include "disallowed/bad.h"
-#include "disallowed/allowed/good.h"
-#include "disallowed/temporarily_allowed.h"
-#include "third_party/explicitly_disallowed/bad.h"
-#include "third_party/allowed_may_use/good.h"
-#include "third_party/no_rule/bad.h"
diff --git a/checkdeps/testdata/checkdeps_test/disallowed/allowed/DEPS b/checkdeps/testdata/checkdeps_test/disallowed/allowed/DEPS
deleted file mode 100644
index 2be72b8..0000000
--- a/checkdeps/testdata/checkdeps_test/disallowed/allowed/DEPS
+++ /dev/null
@@ -1,3 +0,0 @@
-skip_child_includes = [
- "skipped",
-]
diff --git a/checkdeps/testdata/checkdeps_test/disallowed/allowed/skipped/test.h b/checkdeps/testdata/checkdeps_test/disallowed/allowed/skipped/test.h
deleted file mode 100644
index 8010596..0000000
--- a/checkdeps/testdata/checkdeps_test/disallowed/allowed/skipped/test.h
+++ /dev/null
@@ -1,5 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "whatever/whocares/ok.h"
diff --git a/checkdeps/testdata/checkdeps_test/disallowed/allowed/test.h b/checkdeps/testdata/checkdeps_test/disallowed/allowed/test.h
deleted file mode 100644
index aa5013d..0000000
--- a/checkdeps/testdata/checkdeps_test/disallowed/allowed/test.h
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "allowed/good.h"
-// Always allowed to include self and parents.
-#include "disallowed/good.h"
-#include "disallowed/allowed/good.h"
-#include "third_party/explicitly_disallowed/bad.h"
-#include "third_party/allowed_may_use/bad.h"
-#include "third_party/no_rule/bad.h"
diff --git a/checkdeps/testdata/checkdeps_test/disallowed/test.h b/checkdeps/testdata/checkdeps_test/disallowed/test.h
deleted file mode 100644
index 5520a68..0000000
--- a/checkdeps/testdata/checkdeps_test/disallowed/test.h
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "allowed/good.h"
-// Always allowed to include self.
-#include "disallowed/good.h"
-#include "disallowed/allowed/good.h"
-#include "third_party/explicitly_disallowed/bad.h"
-// Only allowed for code under allowed/.
-#include "third_party/allowed_may_use/bad.h"
-#include "third_party/no_rule/bad.h"
diff --git a/checkdeps/testdata/disallowed/allowed/DEPS b/checkdeps/testdata/disallowed/allowed/DEPS
deleted file mode 100644
index 2be72b8..0000000
--- a/checkdeps/testdata/disallowed/allowed/DEPS
+++ /dev/null
@@ -1,3 +0,0 @@
-skip_child_includes = [
- "skipped",
-]
diff --git a/checkdeps/testdata/disallowed/allowed/skipped/test.h b/checkdeps/testdata/disallowed/allowed/skipped/test.h
deleted file mode 100644
index 8010596..0000000
--- a/checkdeps/testdata/disallowed/allowed/skipped/test.h
+++ /dev/null
@@ -1,5 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "whatever/whocares/ok.h"
diff --git a/checkdeps/testdata/disallowed/allowed/test.h b/checkdeps/testdata/disallowed/allowed/test.h
deleted file mode 100644
index 363db43..0000000
--- a/checkdeps/testdata/disallowed/allowed/test.h
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "checkdeps/testdata/allowed/good.h"
-// Always allowed to include self and parents.
-#include "checkdeps/testdata/disallowed/good.h"
-#include "checkdeps/testdata/disallowed/allowed/good.h"
-#include "third_party/explicitly_disallowed/bad.h"
-#include "third_party/allowed_may_use/bad.h"
-#include "third_party/no_rule/bad.h"
diff --git a/checkdeps/testdata/disallowed/foo_unittest.cc b/checkdeps/testdata/disallowed/foo_unittest.cc
deleted file mode 100644
index f9ab3c9..0000000
--- a/checkdeps/testdata/disallowed/foo_unittest.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// Not allowed for code under disallowed/ but temporarily allowed
-// specifically for test code under allowed/. This regression tests a
-// bug where we were taking shallow copies of rules when generating
-// rules for subdirectories, so all rule objects were getting the same
-// dictionary for specific rules.
-#include "checkdeps/testdata/disallowed/temp_allowed_for_tests.h"
diff --git a/checkdeps/testdata/disallowed/test.h b/checkdeps/testdata/disallowed/test.h
deleted file mode 100644
index 4334a35..0000000
--- a/checkdeps/testdata/disallowed/test.h
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "checkdeps/testdata/allowed/good.h"
-// Always allowed to include self.
-#include "checkdeps/testdata/disallowed/good.h"
-#include "checkdeps/testdata/disallowed/allowed/good.h"
-#include "third_party/explicitly_disallowed/bad.h"
-// Only allowed for code under allowed/.
-#include "third_party/allowed_may_use/bad.h"
-#include "third_party/no_rule/bad.h"
diff --git a/checkdeps/testdata/noparent/DEPS b/checkdeps/testdata/noparent/DEPS
deleted file mode 100644
index 3e0f137..0000000
--- a/checkdeps/testdata/noparent/DEPS
+++ /dev/null
@@ -1,3 +0,0 @@
-# Removes the rules inherited from ../DEPS. In particular,
-# checkdeps/testdata/allowed is no longer allowed.
-noparent = True
diff --git a/checkdeps/testdata/noparent/test.h b/checkdeps/testdata/noparent/test.h
deleted file mode 100644
index 9245e12..0000000
--- a/checkdeps/testdata/noparent/test.h
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// Disallowed because noparent removes the +allowed from the parent dir.
-#include "checkdeps/testdata/allowed/bad.h"
-
-// Same-directory includes are still allowed.
-#include "checkdeps/testdata/noparent/self.h"
diff --git a/clang_format/OWNERS b/clang_format/OWNERS
deleted file mode 100644
index a14b04c..0000000
--- a/clang_format/OWNERS
+++ /dev/null
@@ -1,2 +0,0 @@
-nick@chromium.org
-thakis@chromium.org
diff --git a/clang_format/README.chromium b/clang_format/README.chromium
deleted file mode 100644
index 03f9c92..0000000
--- a/clang_format/README.chromium
+++ /dev/null
@@ -1,15 +0,0 @@
-Name: clang-format
-Short Name: clang-format
-URL: http://llvm.org/viewvc/llvm-project/cfe/trunk/tools/clang-format/
-Version: 8.0.0
-Date: 15 November 2018
-Revision: See DEPS
-License: University of Illinois/NCSA Open Source License
-License File: NOT_SHIPPED
-Security Critical: No
-
-Description:
-A tool for formatting C++ code to style.
-
-Local Modifications:
-None
diff --git a/clang_format/README.txt b/clang_format/README.txt
deleted file mode 100644
index 29b446c..0000000
--- a/clang_format/README.txt
+++ /dev/null
@@ -1,33 +0,0 @@
-This folder contains clang-format scripts. The binaries will be automatically
-downloaded from Google Storage by gclient runhooks for the current platform.
-
-For a walkthrough on how to maintain these binaries:
- https://chromium.googlesource.com/chromium/src/+/master/docs/updating_clang_format_binaries.md
-
-To upload a file:
- python ~/depot_tools/upload_to_google_storage.py -b chromium-clang-format <FILENAME>
-
-On Linux and Mac, check that clang-format has its +x bit set before you run this
-upload command. Don't upload Linux and Mac binaries from Windows, since
-upload_to_google_storage.py will not set the +x bit on google storage when it's
-run from Windows.
-
-To download a file given a .sha1 file:
- python ~/depot_tools/download_from_google_storage.py -b chromium-clang-format -s <FILENAME>.sha1
-
-List the contents of GN's Google Storage bucket:
- python ~/depot_tools/third_party/gsutil/gsutil ls gs://chromium-clang-format/
-
-To initialize gsutil's credentials:
- python ~/depot_tools/third_party/gsutil/gsutil config
-
- That will give a URL which you should log into with your web browser. The
- username should be the one that is on the ACL for the "chromium-clang-format"
- bucket (probably your @google.com address). Contact the build team for help
- getting access if necessary.
-
- Copy the code back to the command line util. Ignore the project ID (it's OK
- to just leave blank when prompted).
-
-gsutil documentation:
- https://developers.google.com/storage/docs/gsutil
diff --git a/codereview.settings b/codereview.settings
deleted file mode 100644
index ac1510a..0000000
--- a/codereview.settings
+++ /dev/null
@@ -1,4 +0,0 @@
-# This file is used by git-cl to get repository specific information.
-GERRIT_HOST: True
-CODE_REVIEW_SERVER: codereview.chromium.org
-PROJECT: buildtools
diff --git a/deps_revisions.gni b/deps_revisions.gni
deleted file mode 100644
index 147aeb0..0000000
--- a/deps_revisions.gni
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-declare_args() {
- # The svn revisions that belong to the git hashes in DEPS. Used to cause full
- # rebuilds on libc++ rolls.
- clang_format_svn_revision = "346566"
- libcxx_svn_revision = "349080"
- libcxxabi_svn_revision = "347903"
- libunwind_svn_revision = "348981"
-}
diff --git a/linux64/clang-format.sha1 b/linux64/clang-format.sha1
deleted file mode 100644
index e2b3199..0000000
--- a/linux64/clang-format.sha1
+++ /dev/null
@@ -1 +0,0 @@
-942fc8b1789144b8071d3fc03ff0fcbe1cf81ac8
\ No newline at end of file
diff --git a/linux64/gn.sha1 b/linux64/gn.sha1
deleted file mode 100644
index 862f00a..0000000
--- a/linux64/gn.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3523d50538357829725d4ed74b777a572ce0ac74
\ No newline at end of file
diff --git a/mac/clang-format.sha1 b/mac/clang-format.sha1
deleted file mode 100644
index d32c626..0000000
--- a/mac/clang-format.sha1
+++ /dev/null
@@ -1 +0,0 @@
-025ca7c75f37ef4a40f3a67d81ddd11d7d0cdb9b
\ No newline at end of file
diff --git a/mac/gn.sha1 b/mac/gn.sha1
deleted file mode 100644
index a5b3aaa..0000000
--- a/mac/gn.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d43122f6140d0711518aa909980cb009c4fbce3d
\ No newline at end of file
diff --git a/third_party/eu-strip/OWNERS b/third_party/eu-strip/OWNERS
deleted file mode 100644
index 4644c96..0000000
--- a/third_party/eu-strip/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-dpranke@chromium.org
-thestig@chromium.org
-thomasanderson@chromium.org
diff --git a/third_party/eu-strip/README.chromium b/third_party/eu-strip/README.chromium
deleted file mode 100644
index 5c621b0..0000000
--- a/third_party/eu-strip/README.chromium
+++ /dev/null
@@ -1,12 +0,0 @@
-Name: eu-strip
-URL: https://sourceware.org/elfutils/
-Version: 0.158
-Security Critical: no
-License: LGPL 3
-License File: NOT_SHIPPED
-
-Description:
-
-Patched eu-strip from elfutils.
-
-To build (on Trusty): ./build.sh in this directory.
diff --git a/third_party/eu-strip/bin/eu-strip b/third_party/eu-strip/bin/eu-strip
deleted file mode 100755
index 7dcb3c0..0000000
--- a/third_party/eu-strip/bin/eu-strip
+++ /dev/null
Binary files differ
diff --git a/third_party/eu-strip/build.sh b/third_party/eu-strip/build.sh
deleted file mode 100755
index 86f2b67..0000000
--- a/third_party/eu-strip/build.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh -xe
-
-rm -rf elfutils
-git clone git://sourceware.org/git/elfutils.git
-cd elfutils
-git checkout elfutils-0.170
-autoheader
-aclocal
-autoconf
-automake --add-missing
-patch -p1 < ../fix-elf-size.patch
-mkdir build
-cd build
-../configure --enable-maintainer-mode
-make -j40
-gcc -std=gnu99 -Wall -Wshadow -Wunused -Wextra -fgnu89-inline \
- -Wformat=2 -Werror -g -O2 -Wl,-rpath-link,libelf:libdw -Wl,--build-id=none -o eu-strip \
- src/strip.o libebl/libebl.a libelf/libelf.a lib/libeu.a libdw/libdw.a -ldl -lz
-./eu-strip -o ../../bin/eu-strip eu-strip
diff --git a/third_party/eu-strip/fix-elf-size.patch b/third_party/eu-strip/fix-elf-size.patch
deleted file mode 100644
index e3fdc8a..0000000
--- a/third_party/eu-strip/fix-elf-size.patch
+++ /dev/null
@@ -1,61 +0,0 @@
-diff --git a/libelf/elf32_updatenull.c b/libelf/elf32_updatenull.c
-index d83c0b3f..507e707b 100644
---- a/libelf/elf32_updatenull.c
-+++ b/libelf/elf32_updatenull.c
-@@ -137,7 +137,7 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum)
- return -1;
-
- /* At least the ELF header is there. */
-- off_t size = elf_typesize (LIBELFBITS, ELF_T_EHDR, 1);
-+ ElfW2(LIBELFBITS,Off) size = elf_typesize (LIBELFBITS, ELF_T_EHDR, 1);
-
- /* Set the program header position. */
- if (elf->state.ELFW(elf,LIBELFBITS).phdr == NULL)
-@@ -152,7 +152,7 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum)
- {
- /* The user is supposed to fill out e_phoff. Use it and
- e_phnum to determine the maximum extend. */
-- size = MAX ((size_t) size,
-+ size = MAX (size,
- ehdr->e_phoff
- + elf_typesize (LIBELFBITS, ELF_T_PHDR, phnum));
- }
-@@ -330,7 +330,7 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum)
-
- if (elf->flags & ELF_F_LAYOUT)
- {
-- size = MAX ((GElf_Word) size,
-+ size = MAX (size,
- (shdr->sh_type != SHT_NOBITS
- ? shdr->sh_offset + shdr->sh_size : 0));
-
-@@ -352,9 +352,9 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum)
- update_if_changed (shdr->sh_addralign, sh_align,
- scn->shdr_flags);
-
-- size = (size + sh_align - 1) & ~(sh_align - 1);
-+ size = (size + sh_align - 1) & ~(ElfW2(LIBELFBITS,Off))(sh_align - 1);
- int offset_changed = 0;
-- update_if_changed (shdr->sh_offset, (GElf_Word) size,
-+ update_if_changed (shdr->sh_offset, size,
- offset_changed);
- changed |= offset_changed;
-
-@@ -416,7 +416,7 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum)
- /* The user is supposed to fill out e_shoff. Use it and
- e_shnum (or sh_size of the dummy, first section header)
- to determine the maximum extend. */
-- size = MAX ((GElf_Word) size,
-+ size = MAX (size,
- (ehdr->e_shoff
- + (elf_typesize (LIBELFBITS, ELF_T_SHDR, shnum))));
- }
-@@ -430,7 +430,7 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum)
- #define SHDR_ALIGN sizeof (ElfW2(LIBELFBITS,Off))
- size = (size + SHDR_ALIGN - 1) & ~(SHDR_ALIGN - 1);
-
-- update_if_changed (ehdr->e_shoff, (GElf_Word) size, elf->flags);
-+ update_if_changed (ehdr->e_shoff, size, elf->flags);
-
- /* Account for the section header size. */
- size += elf_typesize (LIBELFBITS, ELF_T_SHDR, shnum);
diff --git a/third_party/libc++/BUILD.gn b/third_party/libc++/BUILD.gn
deleted file mode 100644
index 9bcd072..0000000
--- a/third_party/libc++/BUILD.gn
+++ /dev/null
@@ -1,142 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/c++/c++.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-import("//build/toolchain/toolchain.gni")
-
-# Used by libc++ and libc++abi.
-config("config") {
- cflags = [ "-fstrict-aliasing" ]
- if (is_win) {
- # libc++ wants to redefine the macros WIN32_LEAN_AND_MEAN and _CRT_RAND_S in its
- # implementation.
- cflags += [ "-Wno-macro-redefined" ]
- } else {
- cflags += [ "-fPIC" ]
- }
-}
-
-if (libcxx_is_shared) {
- _libcxx_target_type = "shared_library"
-} else {
- _libcxx_target_type = "source_set"
-}
-target(_libcxx_target_type, "libc++") {
- if (libcxx_is_shared) {
- no_default_deps = true
- }
- sources = [
- "trunk/src/algorithm.cpp",
- "trunk/src/any.cpp",
- "trunk/src/bind.cpp",
- "trunk/src/chrono.cpp",
- "trunk/src/condition_variable.cpp",
- "trunk/src/debug.cpp",
- "trunk/src/exception.cpp",
- "trunk/src/functional.cpp",
- "trunk/src/future.cpp",
- "trunk/src/hash.cpp",
- "trunk/src/ios.cpp",
- "trunk/src/iostream.cpp",
- "trunk/src/locale.cpp",
- "trunk/src/memory.cpp",
- "trunk/src/mutex.cpp",
- "trunk/src/new.cpp",
- "trunk/src/optional.cpp",
- "trunk/src/random.cpp",
- "trunk/src/regex.cpp",
- "trunk/src/shared_mutex.cpp",
- "trunk/src/stdexcept.cpp",
- "trunk/src/string.cpp",
- "trunk/src/strstream.cpp",
- "trunk/src/system_error.cpp",
- "trunk/src/thread.cpp",
- "trunk/src/typeinfo.cpp",
- "trunk/src/utility.cpp",
- "trunk/src/valarray.cpp",
- "trunk/src/variant.cpp",
- "trunk/src/vector.cpp",
- ]
- if (is_win) {
- sources += [
- "trunk/src/support/win32/locale_win32.cpp",
- "trunk/src/support/win32/support.cpp",
- "trunk/src/support/win32/thread_win32.cpp",
- ]
- }
- configs -= [
- "//build/config/compiler:chromium_code",
- "//build/config/compiler:no_exceptions",
- "//build/config/compiler:no_rtti",
- "//build/config/coverage:default_coverage",
- ]
- if (is_android && libcxx_is_shared) {
- configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
- }
- configs += [
- ":config",
- "//build/config/compiler:no_chromium_code",
- "//build/config/compiler:exceptions",
- "//build/config/compiler:rtti",
- "//build/config/sanitizers:sanitizer_options_link_helper",
- ]
- if (libcxx_is_shared) {
- configs -= [ "//build/config/gcc:symbol_visibility_hidden" ]
- configs += [ "//build/config/gcc:symbol_visibility_default" ]
- }
-
- defines = [ "_LIBCPP_BUILDING_LIBRARY" ]
- if (!is_clang && libcxx_is_shared) {
- # This is a temporary workaround to get libc++ builds working with
- # gcc. It can be removed with either
- # https://reviews.llvm.org/D35326 or
- # https://reviews.llvm.org/D35388 lands.
- defines += [ "_LIBCPP_EXTERN_TEMPLATE_TYPE_VIS=__attribute__((__visibility__(\"default\")))" ]
- }
- if (!libcxx_is_shared) {
- if (is_mac && is_clang) {
- # We want operator new/delete to be private on Mac, but these functions
- # are implicitly created by the compiler for each translation unit, as
- # specified in the C++ spec 3.7.4p2, which makes them always have default
- # visibility. This option is needed to force hidden visibility since
- # -fvisibility=hidden doesn't have the desired effect.
- cflags = [ "-fvisibility-global-new-delete-hidden" ]
- } else {
- defines += [
- # This resets the visibility to default only for the various
- # flavors of operator new and operator delete. These symbols
- # are weak and get overriden by Chromium-provided ones, but if
- # these symbols had hidden visibility, this would make the
- # Chromium symbols hidden too because elf visibility rules
- # require that linkers use the least visible form when merging,
- # and if this is hidden, then when we merge it with tcmalloc's
- # operator new, hidden visibility would win. However, tcmalloc
- # needs a visible operator new to also override operator new
- # references from system libraries.
- # TODO(lld): Ask lld for a --force-public-visibility flag or
- # similar to that overrides the default elf merging rules, and
- # make tcmalloc's gn config pass that to all its dependencies,
- # then remove this override here.
- "_LIBCPP_OVERRIDABLE_FUNC_VIS=__attribute__((__visibility__(\"default\")))",
- ]
- }
- }
- if (is_asan || is_tsan || is_msan) {
- # In {a,t,m}san configurations, operator new and operator delete will be
- # provided by the sanitizer runtime library. Since libc++ defines these
- # symbols with weak linkage, and the *san runtime uses strong linkage, it
- # should technically be OK to omit this, but it's added to be explicit.
- defines += [ "_LIBCPP_DISABLE_NEW_DELETE_DEFINITIONS" ]
- }
-
- if (!is_win) {
- defines += [ "LIBCXX_BUILDING_LIBCXXABI" ]
- if (!export_libcxxabi_from_executables) {
- deps = [
- "//buildtools/third_party/libc++abi",
- ]
- }
- }
-}
diff --git a/third_party/libc++/OWNERS b/third_party/libc++/OWNERS
deleted file mode 100644
index 361c2b5..0000000
--- a/third_party/libc++/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-earthdok@chromium.org
-glider@chromium.org
-thakis@chromium.org
-thomasanderson@chromium.org
diff --git a/third_party/libc++/README.chromium b/third_party/libc++/README.chromium
deleted file mode 100644
index 0c6fbee..0000000
--- a/third_party/libc++/README.chromium
+++ /dev/null
@@ -1,11 +0,0 @@
-Name: libcxx
-Short Name: libc++
-URL: http://libcxx.llvm.org/
-Version: 1.0
-License: MIT, University of Illinois/NCSA Open Source License
-License File: trunk/LICENSE.TXT
-Security Critical: yes
-
-Description:
-
-libc++ for Chromium.
diff --git a/third_party/libc++abi/BUILD.gn b/third_party/libc++abi/BUILD.gn
deleted file mode 100644
index 759327c..0000000
--- a/third_party/libc++abi/BUILD.gn
+++ /dev/null
@@ -1,86 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/c++/c++.gni")
-
-source_set("libc++abi") {
- visibility = [
- "//buildtools/third_party/libc++",
- "//build/config:executable_deps",
- "//build/config:loadable_module_deps",
- "//build/config:shared_library_deps",
- ]
- deps = []
-
- # Fuchsia builds don't link against any libraries that provide stack
- # unwinding symbols, unlike Linux does with glibc. Build and link against
- # libunwind manually to get this functionality.
- # TODO(thomasanderson): Move this to exe_and_shlib_deps.
- if (is_fuchsia) {
- deps += [ "//buildtools/third_party/libunwind" ]
- }
-
- sources = [
- "trunk/src/abort_message.cpp",
- "trunk/src/cxa_aux_runtime.cpp",
- "trunk/src/cxa_default_handlers.cpp",
- "trunk/src/cxa_exception.cpp",
- "trunk/src/cxa_exception_storage.cpp",
- "trunk/src/cxa_handlers.cpp",
-
- # This file is supposed to be used in fno-exception builds of
- # libc++abi. We build lib++/libc++abi with exceptions enabled.
- #"trunk/src/cxa_noexception.cpp",
- "trunk/src/cxa_personality.cpp",
- "trunk/src/cxa_unexpected.cpp",
- "trunk/src/cxa_vector.cpp",
- "trunk/src/cxa_virtual.cpp",
- "trunk/src/fallback_malloc.cpp",
- "trunk/src/private_typeinfo.cpp",
- "trunk/src/stdlib_exception.cpp",
- "trunk/src/stdlib_stdexcept.cpp",
- "trunk/src/stdlib_typeinfo.cpp",
- ]
-
- if (!is_tsan) {
- sources += [ "trunk/src/cxa_guard.cpp" ]
- }
-
- # See the comment in cxa_demangle_stub.cc for why we don't use LLVM's
- # demangler on android.
- if (is_android) {
- sources += [ "cxa_demangle_stub.cc" ]
- } else {
- sources += [ "trunk/src/cxa_demangle.cpp" ]
- }
-
- # This file should really be included on linux as well, but that
- # would introduce an unwanted glibc 2.18 dependency.
- if (is_fuchsia || (is_posix && !is_mac && !is_linux)) {
- sources += [ "trunk/src/cxa_thread_atexit.cpp" ]
- }
-
- defines = [
- "LIBCXXABI_SILENT_TERMINATE",
- "_LIBCPP_ENABLE_CXX17_REMOVED_UNEXPECTED_FUNCTIONS",
- ]
-
- configs -= [
- "//build/config/compiler:chromium_code",
- "//build/config/compiler:no_exceptions",
- "//build/config/compiler:no_rtti",
- "//build/config/coverage:default_coverage",
- ]
- configs += [
- "//build/config/compiler:no_chromium_code",
- "//build/config/compiler:exceptions",
- "//build/config/compiler:rtti",
- "//buildtools/third_party/libc++:config",
- ]
-
- if (export_libcxxabi_from_executables || libcxx_is_shared) {
- configs -= [ "//build/config/gcc:symbol_visibility_hidden" ]
- configs += [ "//build/config/gcc:symbol_visibility_default" ]
- }
-}
diff --git a/third_party/libc++abi/OWNERS b/third_party/libc++abi/OWNERS
deleted file mode 100644
index 361c2b5..0000000
--- a/third_party/libc++abi/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-earthdok@chromium.org
-glider@chromium.org
-thakis@chromium.org
-thomasanderson@chromium.org
diff --git a/third_party/libc++abi/README.chromium b/third_party/libc++abi/README.chromium
deleted file mode 100644
index a57429e..0000000
--- a/third_party/libc++abi/README.chromium
+++ /dev/null
@@ -1,11 +0,0 @@
-Name: libcxxabi
-Short Name: libc++abi
-URL: http://libcxxabi.llvm.org/
-Version: 1.0
-License: MIT, University of Illinois/NCSA Open Source License
-License File: trunk/LICENSE.TXT
-Security Critical: yes
-
-Description:
-
-libc++abi for Chromium.
diff --git a/third_party/libc++abi/cxa_demangle_stub.cc b/third_party/libc++abi/cxa_demangle_stub.cc
deleted file mode 100644
index 5c6560c..0000000
--- a/third_party/libc++abi/cxa_demangle_stub.cc
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stddef.h>
-
-#include <__cxxabi_config.h>
-
-extern "C"
-// LLVM's demangler is large, and we have no need of it. Overriding it with
-// our own stub version here stops a lot of code being pulled in from libc++.
-// More here:
-// https://llvm.org/svn/llvm-project/libcxxabi/trunk/src/cxa_demangle.cpp
-_LIBCXXABI_FUNC_VIS
-// This is a weak symbol to let android_crazy_linker override it in
-// //base/android/linker:chromium_android_linker.
-_LIBCXXABI_WEAK char* __cxa_demangle(const char* mangled_name,
- char* buf,
- size_t* n,
- int* status) {
- static const int kMemoryAllocFailure = -1; // LLVM's memory_alloc_failure.
- if (status)
- *status = kMemoryAllocFailure;
- return nullptr;
-}
diff --git a/third_party/libunwind/BUILD.gn b/third_party/libunwind/BUILD.gn
deleted file mode 100644
index 31bc075..0000000
--- a/third_party/libunwind/BUILD.gn
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/c++/c++.gni")
-
-config("libunwind_config") {
- cflags = [
- "-fstrict-aliasing",
- "-fPIC",
-
- # ValueAsBitPattern in Unwind-EHABI.cpp is only used on Debug builds.
- "-Wno-unused-function",
- ]
-}
-
-source_set("libunwind") {
- visibility = [
- "//buildtools/third_party/libc++abi",
- "//components/tracing",
- ]
-
- if (!is_component_build) {
- defines = [ "_LIBUNWIND_DISABLE_VISIBILITY_ANNOTATIONS" ]
- }
- include_dirs = [ "//buildtools/third_party/libunwind/trunk/include" ]
- sources = [
- # C++ sources
- "trunk/src/Unwind-EHABI.cpp",
- "trunk/src/libunwind.cpp",
-
- # C sources
- "trunk/src/Unwind-sjlj.c",
- "trunk/src/UnwindLevel1-gcc-ext.c",
- "trunk/src/UnwindLevel1.c",
-
- # ASM sources
- "trunk/src/UnwindRegistersRestore.S",
- "trunk/src/UnwindRegistersSave.S",
- ]
- configs -= [
- "//build/config/compiler:chromium_code",
- "//build/config/compiler:no_exceptions",
- "//build/config/compiler:no_rtti",
- "//build/config/coverage:default_coverage",
- ]
- configs += [
- "//build/config/compiler:no_chromium_code",
- "//build/config/compiler:exceptions",
- "//build/config/compiler:rtti",
-
- # Must be after no_chromium_code
- ":libunwind_config",
- ]
-}
diff --git a/third_party/libunwind/OWNERS b/third_party/libunwind/OWNERS
deleted file mode 100644
index dfaca80..0000000
--- a/third_party/libunwind/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-# You may only be an owner of libunwind if your username begins with 'th'.
-thakis@chromium.org
-thomasanderson@chromium.org
diff --git a/third_party/libunwind/README.chromium b/third_party/libunwind/README.chromium
deleted file mode 100644
index d787e87..0000000
--- a/third_party/libunwind/README.chromium
+++ /dev/null
@@ -1,11 +0,0 @@
-Name: libunwind
-URL: https://llvm.org/svn/llvm-project/libunwind/trunk/
-Version: 1.0
-License: MIT, University of Illinois/NCSA Open Source License
-License File: trunk/LICENSE.TXT
-Security Critical: yes
-
-Description:
-
-libunwind for Chromium.
-This is a dependency of libc++abi on ARM builds.
diff --git a/win/clang-format.exe.sha1 b/win/clang-format.exe.sha1
deleted file mode 100644
index d31c76f..0000000
--- a/win/clang-format.exe.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b5f5d8d5f8a8fcd2edb5b6cae37c0dc3e129c945
\ No newline at end of file
diff --git a/win/gn.exe.sha1 b/win/gn.exe.sha1
deleted file mode 100644
index adb9bc6..0000000
--- a/win/gn.exe.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e20768d93a6b4400de0d03bb8ceb46facdbe3883
\ No newline at end of file