| #!/usr/bin/env python3 |
| # Copyright 2019 The ChromiumOS Authors |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| |
| """Lint our source files.""" |
| |
| import fnmatch |
| import itertools |
| import logging |
| import os |
| from pathlib import Path |
| import sys |
| |
| import libdot |
| |
| |
| # All checks. |
| DEFAULT_CLOSURE_ARGS = ("--jscomp_error=*",) |
| |
| |
| # Options passed when invoking eslint. |
| ESLINT_ARGS = ( |
| "--cache", |
| "--cache-location", |
| libdot.LIBAPPS_DIR / ".eslintcache", |
| ) |
| |
| |
| def kokoro_comments_path(path, tool): |
| """Expand % markers that might exist in |path|.""" |
| if not path: |
| return None |
| else: |
| return path.replace("%(tool)", tool) |
| |
| |
| def is_generated_path(path): |
| """Return True if |path| is generated. |
| |
| Useful for filtering out comments for files not in the tree. |
| """ |
| return fnmatch.fnmatch(path, "*.concat.js") or fnmatch.fnmatch( |
| path, "*.rollup.js" |
| ) |
| |
| |
| def get_known_files(basedir: Path) -> list[Path]: |
| """Return list of files committed to the tree.""" |
| # -z appends \0 to each path, it doesn't delimit them. So strip off the |
| # trailing \0 to avoid a spurious '' entry at the end. |
| all_files = ( |
| libdot.run( |
| ["git", "ls-tree", "--name-only", "-r", "-z", "HEAD"], |
| cwd=basedir, |
| capture_output=True, |
| encoding="utf-8", |
| ) |
| .stdout[:-1] |
| .split("\0") |
| ) |
| return [basedir / x for x in all_files if (basedir / x).exists()] |
| |
| |
| def get_known_sources(basedir: Path) -> list[Path]: |
| """Get list of committed files that we could possibly lint.""" |
| return ( |
| x |
| for x in get_known_files(basedir) |
| if x.suffix |
| not in { |
| ".bz2", |
| ".gz", |
| ".log", |
| ".jpg", |
| ".ogg", |
| ".patch", |
| ".png", |
| ".webp", |
| ".xz", |
| } |
| ) |
| |
| |
| def lint_whitespace_data(path: Path, data: str) -> bool: |
| """Basic whitespace checks on text files.""" |
| ret = True |
| |
| if "\r" in data: |
| ret = False |
| logging.error(r"%s: \r not allowed in text files", path) |
| |
| if data.startswith("\n"): |
| ret = False |
| logging.error("%s: No leading blank lines allowed", path) |
| |
| if not data.endswith("\n"): |
| ret = False |
| logging.error("%s: Files must end with a newline", path) |
| |
| return ret |
| |
| |
| def lint_whitespace_files(paths: list[Path]) -> bool: |
| """Basic whitespace checks on text files.""" |
| ret = True |
| |
| for path in paths: |
| with open(path, mode="r", encoding="utf-8") as fp: |
| data = fp.read() |
| |
| ret &= lint_whitespace_data(path, data) |
| |
| return ret |
| |
| |
| def lint_html_files(paths: list[Path]) -> bool: |
| """Basic lint checks on HTML files.""" |
| logging.info("Linting HTML files %s", libdot.cmdstr(paths)) |
| ret = True |
| |
| META_CHARSET_TAG = "<meta charset='utf-8'/>" |
| |
| for path in paths: |
| with open(path, mode="r", encoding="utf-8") as fp: |
| data = fp.read() |
| |
| if META_CHARSET_TAG not in data: |
| ret = False |
| logging.error("%s: <head>: missing %s tag", path, META_CHARSET_TAG) |
| |
| ret &= lint_whitespace_data(path, data) |
| |
| return ret |
| |
| |
| def lint_text_files(paths: list[Path]) -> bool: |
| """Basic lint checks on HTML files.""" |
| logging.info("Linting text files %s", libdot.cmdstr(paths)) |
| return lint_whitespace_files(paths) |
| |
| |
| def _get_default_paths(basedir: Path) -> list[Path]: |
| """Get list of paths to lint by default.""" |
| most_files = sorted( |
| x for x in get_known_sources(basedir) if x.suffix not in {".js"} |
| ) |
| |
| # All files in js/*.js excluding generated files. |
| # Use relpath for nicer default output. |
| # Sort to ensure lib.js comes before lib_array.js, etc. |
| js_files = sorted( |
| itertools.chain( |
| [libdot.DIR / "index.js"], |
| (libdot.DIR / "js").glob("*.js"), |
| ) |
| ) |
| |
| return [os.path.relpath(x) for x in most_files + js_files] |
| |
| |
| def get_parser(): |
| """Get a command line parser.""" |
| parser = libdot.ArgumentParser(description=__doc__) |
| parser.add_argument( |
| "--version", action="store_true", help="Show linter versions." |
| ) |
| parser.add_argument( |
| "--fix", |
| action="store_true", |
| help="Run linters with --fix setting if possible.", |
| ) |
| parser.add_argument( |
| "--gerrit-comments-file", |
| help="Save errors for posting files to Gerrit.", |
| ) |
| parser.add_argument( |
| "--skip-mkdeps", |
| dest="run_mkdeps", |
| action="store_false", |
| default=True, |
| help="Skip (re)building of dependencies.", |
| ) |
| |
| group = parser.add_argument_group( |
| "File selection/filtering", |
| description="All files are linted by default", |
| ) |
| group.add_argument("--cpp", action="store_true", help="Lint C/C++ files.") |
| group.add_argument( |
| "--js", action="store_true", help="Lint JavaScript files." |
| ) |
| group.add_argument("--json", action="store_true", help="Lint JSON files.") |
| group.add_argument("--md", action="store_true", help="Lint Markdown files.") |
| group.add_argument("--py", action="store_true", help="Lint Python files.") |
| group.add_argument("--txt", action="store_true", help="Lint text files.") |
| |
| parser.add_argument("paths", nargs="*", help="Paths to lint.") |
| |
| return parser |
| |
| |
| def main( |
| argv, |
| get_default_paths=get_known_sources, |
| basedir=None, |
| mkdeps=None, |
| closure_args=DEFAULT_CLOSURE_ARGS, |
| ): |
| """The common main func for all linters. |
| |
| Args: |
| argv: The command line to process. |
| paths: The default set of files to lint. If the user doesn't specify |
| any, we'll use these instead. |
| basedir: The base source tree to search for default set of files to lint. |
| mkdeps: Callback to build dependencies after we've initialized. |
| closure_args: Extra arguments to pass to closure-compiler. |
| """ |
| parser = get_parser() |
| opts = parser.parse_args(argv) |
| libdot.node_and_npm_setup() |
| |
| if opts.version: |
| for func in ( |
| libdot.eslint.run, |
| libdot.closure_compiler.run, |
| libdot.pylint.run, |
| ): |
| func(["--version"]) |
| return 0 |
| |
| if not opts.paths: |
| opts.paths = [str(x) for x in get_default_paths(basedir)] |
| if not opts.paths: |
| print("No files to lint.") |
| return 0 |
| |
| if mkdeps: |
| if opts.run_mkdeps: |
| mkdeps(opts) |
| else: |
| logging.info("Skipping building dependencies due to --skip-mkdeps") |
| |
| lint_groups = {"cpp", "js", "json", "md", "py", "txt"} |
| lint_all = all(not getattr(opts, x) for x in lint_groups) |
| for group in lint_groups: |
| setattr(opts, group, lint_all or getattr(opts, group)) |
| |
| tolint_paths = set(opts.paths) |
| kwargs = { |
| "fix": opts.fix, |
| "gerrit_comments_file": opts.gerrit_comments_file, |
| } |
| checks = [] |
| |
| html_files = [ |
| x for x in opts.paths if x.endswith(".html") or x.endswith(".html.in") |
| ] |
| |
| js_files = [x for x in opts.paths if x.endswith(".js")] |
| tolint_paths -= set(js_files) |
| if js_files and opts.js: |
| js_kwargs = {"paths": js_files, **kwargs} |
| html_js_kwargs = {"paths": js_files, **kwargs} |
| checks += [ |
| libdot.eslint.perform(argv=ESLINT_ARGS, **html_js_kwargs), |
| libdot.closure_compiler.perform(argv=closure_args, **js_kwargs), |
| ] |
| |
| py_files = libdot.pylint.filter_known_files(opts.paths) |
| tolint_paths -= set(py_files) |
| if py_files and opts.py: |
| py_kwargs = {"paths": py_files, **kwargs} |
| checks += [libdot.pylint.perform(**py_kwargs)] |
| |
| cpp_files = libdot.cpplint.filter_known_files(opts.paths) |
| tolint_paths -= set(cpp_files) |
| if cpp_files and opts.cpp: |
| cpp_kwargs = {"paths": cpp_files, **kwargs} |
| checks += [libdot.cpplint.perform(**cpp_kwargs)] |
| |
| tolint_paths -= set(html_files) |
| if html_files and opts.txt: |
| checks += [lint_html_files(html_files)] |
| |
| md_files = libdot.mdlint.filter_known_files(opts.paths) |
| tolint_paths -= set(md_files) |
| if md_files and opts.md: |
| md_kwargs = {"paths": md_files, **kwargs} |
| checks += [ |
| lint_whitespace_files(md_files), |
| libdot.mdlint.perform(**md_kwargs), |
| ] |
| |
| json_files = libdot.jsonlint.filter_known_files(opts.paths) |
| tolint_paths -= set(json_files) |
| if json_files and opts.json: |
| json_kwargs = {"paths": json_files, **kwargs} |
| checks += [ |
| lint_whitespace_files(json_files), |
| libdot.jsonlint.perform(**json_kwargs), |
| ] |
| |
| TEXT_FILENAMES = { |
| ".clang-format", |
| ".gitignore", |
| ".markdownlintrc", |
| ".npmrc", |
| ".pylintrc", |
| "DIR_METADATA", |
| "Dockerfile", |
| "eslint.config.js", |
| "LICENSE", |
| "Makefile", |
| "METADATA", |
| "OWNERS", |
| } |
| TEXT_EXTENSIONS = { |
| ".cfg", |
| ".concat", |
| ".css", |
| ".el", |
| ".sh", |
| ".svg", |
| ".toml", |
| ".txt", |
| ".vim", |
| ".xml", |
| ".yaml", |
| } |
| text_files = [ |
| x |
| for x in opts.paths |
| if ( |
| os.path.basename(x) in TEXT_FILENAMES |
| or os.path.splitext(x)[1] in TEXT_EXTENSIONS |
| ) |
| ] |
| tolint_paths -= set(text_files) |
| if text_files and opts.txt: |
| checks += [lint_text_files(text_files)] |
| |
| if tolint_paths: |
| logging.warning("Linting skipped:\n%s", " ".join(sorted(tolint_paths))) |
| |
| return 0 if all(checks) else 1 |
| |
| |
| if __name__ == "__main__": |
| sys.exit( |
| main( |
| sys.argv[1:], |
| basedir=libdot.DIR, |
| get_default_paths=_get_default_paths, |
| ) |
| ) |