| #!/usr/bin/env python3 |
| # Copyright 2022 The ChromiumOS Authors |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| |
| # Disable pylint noise |
| # pylint: disable=import-error |
| # pylint: disable=redefined-outer-name |
| # pylint: disable=input-builtin |
| # pylint: disable=broad-except |
| # pylint: disable=wrong-import-order |
| # pylint: disable=global-statement |
| # pylint: disable=logging-fstring-interpolation |
| # pylint: disable=encoding-missing |
| # pylint: disable=not-context-manager |
| # pylint: disable=too-many-function-args |
| |
| """Creates a bisection branch between two KCR results.""" |
| |
| import argparse |
| from collections import OrderedDict |
| from datetime import datetime |
| import logging |
| import os |
| import pickle |
| import re |
| import sys |
| |
| from buildhelpers import verify_build |
| from githelpers import apply_patch |
| from githelpers import checkout |
| from githelpers import cherry_pick |
| from githelpers import commit_message |
| from githelpers import commit_subject |
| from githelpers import create_head |
| from githelpers import diff |
| from githelpers import generic_abort |
| from githelpers import generic_continue |
| from githelpers import is_merge |
| from githelpers import is_resolved |
| from githelpers import is_tag |
| from githelpers import list_shas |
| from githelpers import patch_title |
| from githelpers import ref_exists |
| from githelpers import revert |
| from githelpers import sha_in_head |
| from githelpers import show_file_at |
| from rich import print as rprint |
| from rich.highlighter import NullHighlighter |
| from rich.logging import RichHandler |
| from rich.markup import escape |
| from rich.progress import BarColumn |
| from rich.progress import MofNCompleteColumn |
| from rich.progress import Progress |
| from rich.progress import SpinnerColumn |
| from rich.progress import TextColumn |
| from rich.progress import TimeRemainingColumn |
| from rich.prompt import Confirm |
| from rich.prompt import Prompt |
| from rich.table import Table |
| import sh |
| |
| |
| FIXUP_PREFIX = "FIXUP: " |
| FROMLIST_PREFIX = "FROMLIST: " |
| FROMGIT_PREFIX = "FROMGIT: " |
| CHROMIUM_PREFIX = "CHROMIUM: " |
| BACKPORT_PREFIX = "BACKPORT: " |
| BACKPORT_FROMLIST_PREFIX = "BACKPORT: FROMLIST: " |
| BACKPORT_FROMGIT_PREFIX = "BACKPORT: FROMGIT: " |
| KERNELUPSTREAM_BRANCH_PREFIX = "cros/merge/continuous/chromeos-kernelupstream-" |
| |
| LOGFORMAT = "%(message)s" |
| |
| skip_reverts = ["kernel-rebase: normalization [autogenerated]"] |
| patch_dependencies = { |
| # hardcoded patch dependencies assignment: |
| # [patch_file] => [dependencies...] |
| # example: |
| # "25cf75bc087b35591c1080714a7e0487fd2ebfe3.patch": [ |
| # "8c516b25d6e9c70e6d76627932b14b0ef03a82c4", |
| # "86bdf3ebcfe1ded055282536fecce13001874740", |
| # "eb5618911af0ac069d2313b289d4c19ca3379401", |
| # ], |
| } |
| |
| # silence sh.py |
| logging.getLogger("sh").setLevel(logging.WARNING) |
| |
| file_handler = logging.FileHandler( |
| f"log/bisect_branch-{datetime.now():%Y-%m-%d-%H_%M_%S}.log" |
| ) |
| file_handler_format = logging.Formatter( |
| "%(asctime)s - %(name)s - %(levelname)s - %(message)s" |
| ) |
| file_handler.setFormatter(file_handler_format) |
| |
| logging.basicConfig( |
| level="NOTSET", |
| format="%(message)s", |
| datefmt="[%X]", |
| handlers=[ |
| RichHandler( |
| markup=True, |
| show_path=False, |
| show_level=False, |
| omit_repeated_times=False, |
| highlighter=NullHighlighter(), |
| keywords=None, |
| ), |
| file_handler, |
| ], |
| ) |
| |
| log = logging.getLogger("bisect_branch") |
| |
| dropped = 0 |
| dropped_resolutions = 0 |
| # Setup begin and end versions |
| default_begin = "6.1-rc6" |
| default_end = "6.1-rc7" |
| default_steps = 80 |
| default_board = "volteer-kernelnext" |
| |
| parser = argparse.ArgumentParser() |
| mode_group = parser.add_mutually_exclusive_group(required=True) |
| mode_group.add_argument( |
| "-c", |
| "--create", |
| action="store_true", |
| help="create bisection branch using first strategy", |
| ) |
| mode_group.add_argument( |
| "-s", |
| "--second-strategy", |
| action="store_true", |
| help="create bisection branch using second strategy", |
| ) |
| mode_group.add_argument( |
| "-x", "--verify", action="store_true", help="verify bisection only" |
| ) |
| parser.add_argument( |
| "-f", |
| "--force", |
| action="store_true", |
| help="force execution, delete bisection branch if exists, etc.", |
| ) |
| parser.add_argument( |
| "-k", |
| "--skip-kernel", |
| action="store_true", |
| help="skip kernel build, used to troubleshoot flow issues", |
| ) |
| parser.add_argument( |
| "-N", |
| "--non-interactive", |
| action="store_true", |
| help="completely non-interactive mode, fail fast on any error", |
| ) |
| parser.add_argument( |
| "-t", |
| "--triage", |
| action="store_true", |
| help=( |
| "triage mode, drop all conflicting patches which cannot be" |
| "resolved automatically and print them at the end" |
| ), |
| ) |
| parser.add_argument( |
| "-d", |
| "--debug", |
| action="store_true", |
| help="print additional messages, can be noisy", |
| ) |
| parser.add_argument( |
| "-K", |
| "--always-pass-kernel", |
| action="store_true", |
| help="build kernel, but do not exit on error", |
| ) |
| parser.add_argument( |
| "-n", |
| "--steps", |
| type=int, |
| default=default_steps, |
| help=f"number of steps to perform during verification (default: {default_steps})", |
| ) |
| parser.add_argument( |
| "-b", |
| "--board", |
| type=str, |
| default=default_board, |
| help=f"board to use during verification (default: {default_board})", |
| ) |
| parser.add_argument( |
| "begin", |
| type=str, |
| help=f"first tag/SHA of bisection (default: {default_begin})", |
| default=default_begin, |
| ) |
| parser.add_argument( |
| "end", |
| type=str, |
| help=f"last tag/SHA of bisection (default: {default_end})", |
| default=default_end, |
| ) |
| args = parser.parse_args() |
| |
| |
| def blog(prefix, msg, color="yellow"): |
| """Bisection logger, quick wrapper""" |
| log.info(f"[b][{color}]{prefix}[/{color}][/b] {msg}") |
| |
| |
| def patch_progress(): |
| """Return progressbar to show progress of applying patches.""" |
| |
| return Progress( |
| SpinnerColumn(), |
| TextColumn("[progress.description]{task.description}"), |
| BarColumn(), |
| MofNCompleteColumn(), |
| TimeRemainingColumn(), |
| transient=True, |
| ) |
| |
| |
| def non_interactive_exit(): |
| """Exits early if there's non-interactive mode enabled""" |
| if args.non_interactive: |
| log.error("Exiting early due to non-interactive switch enabled.") |
| sys.exit(-1) |
| |
| |
| def get_patches(ver): |
| """List of patches above upstream tag. |
| |
| Each patch is described as a dictionary with sha, content-hash, |
| commit-title and change-id fields. |
| """ |
| |
| patches_cache_file = f".bisect_cache_patches_{ver}.pickle" |
| if os.path.exists(patches_cache_file): |
| res = pickle.load(open(patches_cache_file, "rb")) |
| log.info(f"Found cached patches list for {ver}, nice.") |
| return res |
| |
| branch = f"{KERNELUPSTREAM_BRANCH_PREFIX}{ver}" |
| |
| checkout("kernel-upstream", branch) |
| shas = list_shas("kernel-upstream", f"v{ver}..HEAD") |
| shas_cnt = len(shas) |
| |
| log.info(f"Processing {shas_cnt} patches from {branch}...") |
| |
| res = [] |
| with patch_progress() as p: |
| for sha in p.track(shas, description="Getting patches..."): |
| content_hash = patch_title("kernel-upstream", sha) |
| commit_title = commit_subject("kernel-upstream", sha) |
| |
| commit_msg = commit_message("kernel-upstream", sha).splitlines() |
| change_id_prefix = "Change-Id: " |
| change_id = None |
| for msg_line in commit_msg: |
| line = msg_line.strip() |
| if line.startswith(change_id_prefix): |
| change_id = line[len(change_id_prefix) :] |
| break |
| |
| entry = {} |
| entry["sha"] = sha |
| entry["content-hash"] = content_hash |
| entry["commit-title"] = commit_title |
| entry["change-id"] = change_id |
| res.append(entry) |
| |
| res.reverse() |
| |
| pickle.dump(res, open(patches_cache_file, "wb")) |
| log.info( |
| f"Dumped cached patches list for {ver}, to be used in future calls." |
| ) |
| |
| return res |
| |
| |
| def is_same(patch1, patch2): |
| """Decides whether two patches are the same change. |
| |
| The patches might be different in content due to the rebase, |
| hence this function uses Change-Ids for comparison if available, |
| or patch titles if not. |
| """ |
| |
| chid1 = patch1["change-id"] |
| chid2 = patch2["change-id"] |
| title1 = patch1["commit-title"] |
| title2 = patch2["commit-title"] |
| |
| # prioritize change-id for commit comparison if available |
| if chid1 is not None and chid2 is not None: |
| return chid1 == chid2 |
| return title1 == title2 |
| |
| |
| def is_patch_upstreamed(patches_upstream, patch): |
| """Checks if patch is included in the upstream.""" |
| |
| for entry in patches_upstream: |
| if ( |
| patch["commit-title"] == entry["commit-title"] |
| or patch["commit-title"].strip(FROMLIST_PREFIX) |
| == entry["commit-title"] |
| or patch["commit-title"].strip(FROMGIT_PREFIX) |
| == entry["commit-title"] |
| or patch["commit-title"].strip(BACKPORT_FROMGIT_PREFIX) |
| == entry["commit-title"] |
| or patch["commit-title"].strip(CHROMIUM_PREFIX) |
| == entry["commit-title"] |
| ): |
| if args.debug: |
| log.info(f"Patch is included in upstream {patch} -> {entry}\n") |
| return True |
| |
| return False |
| |
| |
| def assert_empty(sha, is_commit_expected=0): |
| """Check if commit is empty.""" |
| |
| try: |
| cherry_pick("kernel-upstream", sha, use_am=False) |
| return bool(is_commit_expected) |
| except Exception: |
| if not is_resolved("kernel-upstream"): |
| generic_abort("kernel-upstream") |
| return False |
| |
| return True |
| |
| |
| def speculative_check_forward(old, fixup_old, new, fixup_new): |
| """Speculative check forward.""" |
| |
| # In order to verify whether replace disposition introduces any changes |
| # apply patches in the following order |
| # old -> fixup_old -> new -> fixup_new |
| # or |
| # fixup_old -> fixup_new |
| # in case when disposition holds fixup replace only |
| if old is not None: |
| checkout("kernel-upstream", old) |
| else: |
| checkout("kernel-upstream", fixup_old) |
| |
| if old is not None: |
| if not assert_empty(new): |
| return True |
| if fixup_old is not None and not assert_empty(fixup_old, 1): |
| return True |
| if fixup_new is not None and not assert_empty(fixup_new): |
| return True |
| else: |
| if not assert_empty(fixup_new): |
| return True |
| |
| return False |
| |
| |
| def speculative_check_reverse(old, fixup_old, new, fixup_new): |
| """Speculative check reverse.""" |
| |
| # To verify if disposition introduces any changes apply |
| # patches in reverse ordear. The aim of this operation is |
| # to catch a case when new/fixup_new patches are a subset |
| # of old/ixup_old patches |
| # new -> fixup_new -> old -> fixup_old |
| # or |
| # fixup_new -> fixup_old |
| # in case when disposition holds fixup replace only |
| if new is not None: |
| checkout("kernel-upstream", new) |
| else: |
| checkout("kernel-upstream", fixup_new) |
| |
| if new is not None: |
| if not assert_empty(old): |
| return True |
| if fixup_new is not None and not assert_empty(fixup_new, 1): |
| return True |
| if fixup_old is not None and not assert_empty(fixup_old): |
| return True |
| else: |
| if not assert_empty(fixup_old): |
| return True |
| |
| return False |
| |
| |
| def speculative_check(disp): |
| """True=retain, False=skip.""" |
| |
| old = disp["old"] |
| new = disp["new"] |
| fixup_old = disp["fixup_old"] |
| fixup_new = disp["fixup_new"] |
| |
| # Validate disposition |
| if old is None and new is not None: |
| raise AssertionError() |
| if old is not None and new is None: |
| raise AssertionError() |
| |
| if old is None and new is None: |
| if fixup_old is None and fixup_new is not None: |
| raise AssertionError() |
| if fixup_old is not None and fixup_new is None: |
| raise AssertionError() |
| |
| if old is None and fixup_old is None: |
| raise AssertionError() |
| if new is None and fixup_new is None: |
| raise AssertionError() |
| |
| if speculative_check_forward(old, fixup_old, new, fixup_new): |
| return True |
| if speculative_check_reverse(old, fixup_old, new, fixup_new): |
| return True |
| |
| return False |
| |
| |
| def filter_noops(disps): |
| """Removes dispositions that don't pass speculative_check(_).""" |
| |
| result = [] |
| i = 1 |
| for d in disps: |
| if ( |
| d["disposition"] != "replace" |
| and d["disposition"] != "replace-fixup" |
| ): |
| result.append(d) |
| continue |
| |
| if speculative_check(d): |
| result.append(d) |
| log.info("%s. .", str(i)) |
| else: |
| log.info("%s. X", str(i)) |
| i = i + 1 |
| |
| if args.debug: |
| log.debug("Replace dispositions which passed filter_noops:") |
| for i, d in enumerate(result): |
| if ( |
| d["disposition"] == "replace" |
| or d["disposition"] == "replace-fixup" |
| ): |
| log.debug(f"{i}.{d}\n") |
| |
| return result |
| |
| |
| def dispositions(patches_begin, patches_end, patches_upstream): |
| """List a sequence of ChromeOS patch operations that transform {begin} into {end}.""" |
| |
| # Collect all content-hashes that are duplicated across the two lists. |
| dupe_content_hashes = set() |
| for patch1 in patches_begin: |
| for patch2 in patches_end: |
| if patch1["content-hash"] == patch2["content-hash"]: |
| dupe_content_hashes.add(patch1["content-hash"]) |
| |
| # Remove the duplicates from both lists. |
| # This also removes all empty commits as the content-hash ignores |
| # commit messages, and there are many of them on both branches. |
| diff_patches_begin = [] |
| for patch in patches_begin: |
| if patch["content-hash"] not in dupe_content_hashes: |
| diff_patches_begin.append(patch) |
| |
| diff_patches_end = [] |
| for patch in patches_end: |
| if patch["content-hash"] not in dupe_content_hashes: |
| diff_patches_end.append(patch) |
| |
| # Prepare a sequence of dispositions to trasnform the private |
| # ChromeOS patches state from {begin} into that from {end} |
| dispositions_naive = [] |
| for patch in diff_patches_begin: |
| dispositions_naive.append({"disposition": "revert", "patch": patch}) |
| for patch in diff_patches_end: |
| dispositions_naive.append({"disposition": "pick", "patch": patch}) |
| |
| # Look for replacements, i.e. patches different on {begin} and {end} |
| # They will be squashed together |
| to_squash = [] |
| for disp1 in dispositions_naive: |
| for disp2 in dispositions_naive: |
| d1 = disp1["disposition"] |
| d2 = disp2["disposition"] |
| patch1 = disp1["patch"] |
| patch2 = disp2["patch"] |
| |
| is_fixup = patch1["commit-title"].startswith(FIXUP_PREFIX) |
| # squash pairs of revert-apply other than fixups |
| if ( |
| d1 == "revert" |
| and d2 == "pick" |
| and is_same(patch1, patch2) |
| and not is_fixup |
| ): |
| to_squash.append({"revert": patch1, "pick": patch2}) |
| |
| # Rewords the dispositions so that instead of simple pick/revert a wider |
| # Array of operations is supported: |
| # - Pick: |
| # * fixup_old: a fixup previously applied to the patch, reverted before pick |
| # * fixup_new: a fixup supposed to be applied to the patch, applied after pick |
| # * sha: sha of commit to pick |
| # * title: subject of the patch |
| # - Revert: |
| # * sha: sha of commit to revert |
| # * title: subject of the patch |
| # - Replace: |
| # * fixup_old: a fixup previously applied to the patch, reverted before revert of old |
| # * fixup_new: a fixup supposed to be applied to the patch, applied after pick of new |
| # * old: sha of patch as of {begin} |
| # * new: sha of patch as of {end} |
| # * title: subject of old |
| # - Replace-fixup: |
| # * fixup_old: a fixup to be reverted |
| # * fixup_new: a fixup to be applied |
| # * title: subject of the fixup |
| # |
| # The fixup fields will be populated later |
| dispositions = [] |
| for disp in dispositions_naive: |
| d = disp["disposition"] |
| patch = disp["patch"] |
| if d == "revert": |
| squashed = False |
| upstreamed = False |
| for squash in to_squash: |
| if ( |
| is_same(patch, squash["revert"]) |
| and patch["content-hash"] |
| and squash["pick"]["content-hash"] |
| ): |
| dispositions.append( |
| { |
| "disposition": "replace", |
| "old": patch["sha"], |
| "new": squash["pick"]["sha"], |
| "fixup_old": None, |
| "fixup_new": None, |
| "title": patch["commit-title"], |
| } |
| ) |
| squashed = True |
| break |
| |
| if not squashed: |
| upstreamed = is_patch_upstreamed(patches_upstream, patch) |
| |
| if not squashed and not upstreamed: |
| dispositions.append( |
| { |
| "disposition": "revert", |
| "sha": patch["sha"], |
| "title": patch["commit-title"], |
| } |
| ) |
| elif d == "pick": |
| skip = False |
| for squash in to_squash: |
| if is_same(patch, squash["pick"]): |
| skip = True |
| break |
| if not skip: |
| dispositions.append( |
| { |
| "disposition": "pick", |
| "sha": patch["sha"], |
| "fixup_old": None, |
| "fixup_new": None, |
| "title": patch["commit-title"], |
| } |
| ) |
| |
| # Populate the fixup_* fields and mark the moved fixups for |
| # removal from individual dispositions. |
| disps_to_skip = [] |
| for d1 in dispositions: |
| disp = d1["disposition"] |
| field = None |
| if disp == "revert": |
| field = "fixup_old" |
| elif disp == "pick": |
| field = "fixup_new" |
| |
| if d1["title"].startswith(FIXUP_PREFIX): |
| title = d1["title"].strip(FIXUP_PREFIX) |
| for d2 in dispositions: |
| if d2["title"] == title: |
| d2[field] = d1["sha"] |
| disps_to_skip.append(d1) |
| if args.debug: |
| log.debug("Fixups to skip:") |
| for i, d in enumerate(disps_to_skip): |
| log.debug(f"{i}.{d}\n") |
| |
| # Remove the fixups identified above. |
| dispositions = [d for d in dispositions if d not in disps_to_skip] |
| |
| # Search for fixup replacements only |
| disps_to_skip.clear() |
| for d1 in dispositions: |
| is_fixup = d1["title"].startswith(FIXUP_PREFIX) |
| for d2 in dispositions: |
| if is_fixup and d1["title"] == d2["title"]: |
| if ( |
| d1["disposition"] == "revert" |
| and d2["disposition"] == "pick" |
| ): |
| dispositions.append( |
| { |
| "disposition": "replace-fixup", |
| "old": None, |
| "new": None, |
| "fixup_old": d1["sha"], |
| "fixup_new": d2["sha"], |
| "title": d1["title"], |
| } |
| ) |
| disps_to_skip.append(d1) |
| disps_to_skip.append(d2) |
| if args.debug: |
| log.debug("========== DEBUG ==========") |
| log.debug("Fixup replacements:") |
| l1 = disps_to_skip[::2] |
| l2 = disps_to_skip[1::2] |
| for i, d in enumerate(l1): |
| dn = l2[i] |
| log.debug(f"{i}.{d} -> {dn}\n") |
| log.debug("===========================") |
| |
| # Remove the dispositions identified above |
| dispositions = [d for d in dispositions if d not in disps_to_skip] |
| |
| return dispositions |
| |
| |
| def upstream_picks(begin, end): |
| """Lists all upstream commits between {begin} and {end}.""" |
| |
| checkout("kernel-upstream", end) |
| shas = list_shas("kernel-upstream", f"{begin}..HEAD") |
| |
| shas.reverse() |
| |
| upstream = [] |
| with patch_progress() as p: |
| for sha in p.track(shas, description="Getting upstream patches.."): |
| if is_merge("kernel-upstream", sha): |
| continue |
| entry = {} |
| entry["sha"] = sha |
| entry["commit-title"] = commit_subject("kernel-upstream", sha) |
| upstream.append(entry) |
| |
| # skip merges, as they can't be cherry-picked directly |
| # and are always empty on upstream. |
| return upstream |
| |
| |
| def handle_error(e): |
| """UI for interaction with conflicts and other errors.""" |
| |
| global dropped |
| |
| while True: |
| if args.triage: |
| generic_abort("kernel-upstream") |
| dropped += 1 |
| log.info("Number of dropped commits : %s", str(dropped)) |
| return "d" |
| |
| option = Prompt.ask( |
| ( |
| "[b][red]Conflict occured[/red][/b]. " |
| "[b](c)[/b]ontinue, [b](s)[/b]top, [b](d)[/b]rop, what[b](?)[/b] > " |
| ), |
| choices=["c", "s", "d", "?"], |
| ) |
| |
| try: |
| if option == "c": |
| if not is_resolved("kernel-upstream"): |
| log.warning( |
| "Something still not resolved. Resolve and press c." |
| ) |
| continue |
| generic_continue("kernel-upstream") |
| return "c" |
| if option == "s": |
| sys.exit(-1) |
| if option == "d": |
| generic_abort("kernel-upstream") |
| return "d" |
| if option == "?": |
| log.info(e) |
| except Exception as err: |
| log.error(err) |
| fatal = Confirm("Is the exception fatal? [y/n]") |
| if fatal: |
| sys.exit(-1) |
| |
| |
| def squash(top_patches): |
| """Squashes len(top_patches) commits |
| |
| The commit message is the top_patches list formatted in a human-readable |
| way. |
| This list would best reflect the subjects of the squashed commits, but it |
| can |
| be anything as long as it's a list of strings. |
| """ |
| |
| n = len(top_patches) |
| msg = "Squash: [" |
| for patch in top_patches: |
| msg += patch |
| msg += ", " |
| msg += "]" |
| |
| with sh.pushd("kernel-upstream"): |
| sh.git("reset", f"HEAD~{n}") |
| |
| err = None |
| try: |
| sh.git("add", "-A") |
| sh.git("commit", "-m", msg) |
| except sh.ErrorReturnCode_1 as e: |
| if "nothing to commit" in str(e): |
| log.warning("Replace result is null, proceed without commit") |
| return False |
| err = e |
| except Exception as e: |
| err = e |
| |
| if err is not None: |
| handle_error(e) |
| |
| return False |
| |
| |
| def revert_sha(sha): |
| """Revert sha.""" |
| |
| try: |
| revert("kernel-upstream", sha) |
| return True |
| except Exception as e: |
| if "nothing to commit" in str(e): |
| log.info("Nothing to revert, skipping") |
| return True |
| choice = handle_error(e) |
| if choice == "c": |
| return True |
| |
| return False |
| |
| |
| def cherry_pick_sha(sha, use_am=False): |
| """Cherry pick sha.""" |
| |
| try: |
| cherry_pick("kernel-upstream", sha, use_am=use_am) |
| return True |
| except Exception as e: |
| if "--allow-empty" in str(e): |
| log.info("Nothing to commit, skipping") |
| return True |
| if "The previous cherry-pick is now empty" in str(e): |
| log.info("cherry-pick empty") |
| return True |
| choice = handle_error(e) |
| if choice == "c": |
| return True |
| |
| return False |
| |
| |
| def calculate_chromeos_dispositions(begin, end, from_upstream): |
| """Calculate patching dispositions.""" |
| |
| # List ChromeOS |
| log.info(f"List ChromeOS patches for {KERNELUPSTREAM_BRANCH_PREFIX}{begin}") |
| begin_patches = get_patches(begin) |
| log.info(f"List ChromeOS patches for {KERNELUPSTREAM_BRANCH_PREFIX}{end}") |
| end_patches = get_patches(end) |
| |
| log.info("Calculating dispositions") |
| disps = dispositions(begin_patches, end_patches, from_upstream) |
| |
| log.info("Removing no-op replaces") |
| disps = filter_noops(disps) |
| |
| reverts = 0 |
| picks = 0 |
| replacements = 0 |
| for disp in disps: |
| d = disp["disposition"] |
| if d == "revert": |
| reverts += 1 |
| elif d == "pick": |
| picks += 1 |
| elif d == "replace": |
| replacements += 1 |
| |
| log.info("Computed dispositions of ChromeOS patches:") |
| log.info(f"Patches to revert: {reverts}") |
| log.info(f"Patches to pick: {picks}") |
| log.info(f"Patches to replace: {replacements}") |
| log.info( |
| f"Patches that entered upstream between {begin} and {end}: {len(from_upstream)}" |
| ) |
| |
| return disps |
| |
| |
| def revert_chromeos_patches(disps): |
| """Revert unneeded ChromeOS patches based on dispositions.""" |
| |
| log.info("Revert unneeded ChromeOS patches") |
| for disp in disps: |
| if disp["disposition"] != "revert": |
| continue |
| if disp["title"] in skip_reverts: |
| continue |
| |
| log.info(f'Revert {disp["sha"]} {disp["title"]}') |
| revert_sha(disp["sha"]) |
| |
| |
| def pick_chromeos_patches(disps): |
| """Pick ChromeOS patches baded on dispositions.""" |
| |
| log.info("Replace changed ChromeOS patches") |
| for disp in disps: |
| if disp["disposition"] != "replace": |
| continue |
| |
| to_revert = disp["old"] |
| to_pick = disp["new"] |
| title = disp["title"] |
| log.info(f"Replace {to_revert} -> {to_pick} {title}") |
| |
| fixup_old = disp["fixup_old"] |
| fixup_new = disp["fixup_new"] |
| |
| applied = [] |
| if fixup_old is not None: |
| title = commit_subject("kernel-upstream", fixup_old) |
| log.info(f"Revert old fixup {fixup_old} {title}") |
| revert_sha(fixup_old) |
| applied.append(commit_subject("kernel-upstream", fixup_old)) |
| |
| title = commit_subject("kernel-upstream", to_revert) |
| log.info(f"Revert commit {to_revert} {title}") |
| revert_sha(to_revert) |
| applied.append(commit_subject("kernel-upstream", to_revert)) |
| |
| title = commit_subject("kernel-upstream", to_pick) |
| log.info(f"Pick commit {to_pick} {title}") |
| merge = is_merge("kernel-upstream", to_pick) |
| choice = cherry_pick_sha(to_pick, merge) |
| if choice == "d": |
| with sh.pushd("kernel-upstream"): |
| rollback = len(applied) |
| sh.git("reset", "--hard", f"HEAD~{rollback}") |
| continue |
| applied.append(commit_subject("kernel-upstream", to_pick)) |
| |
| if fixup_new is not None: |
| title = commit_subject("kernel-upstream", fixup_new) |
| log.info(f"Pick new fixup {fixup_new} {title}") |
| choice = cherry_pick_sha(fixup_new) |
| if choice == "d": |
| with sh.pushd("kernel-upstream"): |
| rollback = len(applied) |
| sh.git("reset", "--hard", f"HEAD~{rollback}") |
| continue |
| applied.append(commit_subject("kernel-upstream", fixup_new)) |
| |
| if len(applied) > 1: |
| squash(applied) |
| |
| log.info("Replace changed ChromeOS fixups") |
| for disp in disps: |
| if disp["disposition"] != "replace-fixup": |
| continue |
| if not disp["title"].startswith(FIXUP_PREFIX): |
| continue |
| |
| fixup_old = disp["fixup_old"] |
| fixup_new = disp["fixup_new"] |
| title = disp["title"] |
| log.info(f"Replace fixup {fixup_old} -> {fixup_new} {title}") |
| |
| revert_sha(fixup_old) |
| cherry_pick_sha(fixup_new) |
| |
| log.info("Pick ChromeOS patches") |
| for disp in disps: |
| if disp["disposition"] != "pick": |
| continue |
| |
| log.info(f'Pick {disp["sha"]} {disp["title"]}') |
| cherry_pick_sha(disp["sha"]) |
| |
| |
| def create_bisect_branch( |
| begin, end, pick_upstream_patches=True, create_final_commit=True |
| ): |
| """Create bisection branch.""" |
| |
| # Resultant branch name format |
| bisect_branch = f"kernelupstream-bisect-{begin}_{end}" |
| |
| log.info("Begin work on constructing a bisection branch...") |
| |
| begin_branch = f"{KERNELUPSTREAM_BRANCH_PREFIX}{begin}" |
| log.info(f"Checkout {begin_branch}") |
| checkout("kernel-upstream", begin_branch) |
| |
| log.info(f"Create branch {bisect_branch}") |
| create_head("kernel-upstream", bisect_branch, force=args.force) |
| |
| # List upstream patches between {begin} and {end} |
| log.info(f"List upstream patches between {begin} and {end}") |
| from_upstream = upstream_picks(f"v{begin}", f"v{end}") |
| log.info(f"Found {len(from_upstream)} upstream patches.") |
| |
| disps = calculate_chromeos_dispositions(begin, end, from_upstream) |
| |
| # Checkout bisection branch |
| checkout("kernel-upstream", bisect_branch) |
| |
| revert_chromeos_patches(disps) |
| |
| if pick_upstream_patches: |
| log.info("Cherry-pick upstream patches") |
| for entry in from_upstream: |
| log.info(f'Pick {entry["sha"]} {entry["commit-title"]}') |
| cherry_pick_sha(entry["sha"]) |
| else: |
| log.warning("NOTE: Not picking upstream patches as intended!") |
| |
| pick_chromeos_patches(disps) |
| |
| if create_final_commit: |
| # Apply a special patch to account for any remaining difference between this bisection |
| # branch and the {end}. |
| log.info( |
| f"Creating a final commit to make the tree exactly the same as {end}" |
| ) |
| end_branch = f"{KERNELUPSTREAM_BRANCH_PREFIX}{end}" |
| rem_diff = diff("kernel-upstream", f"HEAD..{end_branch}") |
| rem_diff_path = "/tmp/kcr_bisect_rem.patch" |
| with open(rem_diff_path, "w") as f: |
| f.write(rem_diff) |
| with sh.pushd("kernel-upstream"): |
| sh.git("apply", rem_diff_path) |
| sh.git("add", "-A") |
| sh.git( |
| "commit", |
| "-m", |
| f"KCR BISECT: Commit all remaining diff from {end}", |
| ) |
| |
| |
| def build_kernel(): |
| """Build kernel image and return true if exitcode was zero.""" |
| |
| if args.skip_kernel: |
| log.warning( |
| "[yellow]Skipping kernel build on purpose (--skip-kernel)[/yellow]" |
| ) |
| return True |
| |
| log.info("Building kernel...") |
| ret = verify_build(None, args.board) |
| if ret["exit_code"] == 0: |
| log.info("Built succesfully.") |
| return True |
| |
| log.info("Error building:") |
| if ret["error_line"] is not None: |
| l = ret["error_line"] |
| reg = re.compile("\x1b\\[[0-9;]*m") |
| err = reg.sub("", "\n".join(ret["output"].split("\n")[l - 7 : l])) |
| log.info(escape(err)) |
| else: |
| log.info("(No error line.)") |
| return False |
| |
| |
| def verify_bisect_branch(begin, end, steps_cnt): |
| """Verify bisection branch.""" |
| |
| # Resultant branch name format |
| bisect_branch = f"kernelupstream-bisect-{begin}-{end}" |
| |
| log.info(f"Checkout {bisect_branch}") |
| checkout("kernel-upstream", bisect_branch) |
| |
| shas = list_shas( |
| "kernel-upstream", f"{KERNELUPSTREAM_BRANCH_PREFIX}{begin}..HEAD" |
| ) |
| |
| shas_cnt = len(shas) |
| log.info( |
| f"There are {shas_cnt} commits on {bisect_branch} branch which " |
| f"will be verified in {steps_cnt} steps\n" |
| ) |
| |
| idx = 0 |
| res_dict = OrderedDict() |
| shas_cnt = len(shas) |
| step = round(shas_cnt / steps_cnt) |
| start_time = datetime.now() |
| log.info( |
| f"There are {shas_cnt} commits on {bisect_branch} branch which " |
| f"will be verified in {steps_cnt} steps (step size {step}) for " |
| f"{args.board} board.\n" |
| ) |
| |
| for i in range(1, steps_cnt + 1): |
| start = datetime.now() |
| |
| sha = list_shas("kernel-upstream", "HEAD~1..HEAD") |
| title = commit_subject("kernel-upstream", sha[0]) |
| log.info( |
| f"Step {i} of {steps_cnt}. Verifying build with HEAD set at commit {sha[0]} {title}" |
| ) |
| |
| res_dict[sha[0]] = build_kernel() |
| |
| end = datetime.now() |
| diff = end - start |
| log.info(f"Build took {diff}\n") |
| |
| idx += step |
| if idx >= shas_cnt: |
| break |
| checkout("kernel-upstream", f"HEAD~{step}") |
| |
| end_time = datetime.now() |
| log.info(f"Total verification time {end_time - start_time}") |
| |
| log.info("\nVerification summary:") |
| with open(bisect_branch + ".txt", "a") as f: |
| fail_cnt = 0 |
| f.write(args.board + ":\n") |
| for i, (key, val) in enumerate(res_dict.items()): |
| if val: |
| f.write(f"{key}\n") |
| status = "passed" |
| else: |
| status = "failed" |
| fail_cnt += 1 |
| log.info(f"{i+1}. commit {key} build {status}") |
| log.info(f"\nTotal number of builds : {len(res_dict)}") |
| log.info(f"Number of failed builds : {fail_cnt}") |
| |
| |
| def split_list(alist, splits=2): |
| """Try to split list in equal parts.""" |
| |
| length = len(alist) |
| return [ |
| alist[i * length // splits : (i + 1) * length // splits] |
| for i in range(splits) |
| ] |
| |
| |
| def bresenham_split(m, n): |
| """Find equally divided indices for selected range""" |
| return [i * n // m + n // (2 * m) for i in range(m)] |
| |
| |
| def get_steps_shas(repo, commit_range, steps): |
| """Retrieve SHAs for bisection steps""" |
| shas = list_shas(repo, commit_range) |
| |
| if steps == 1: # for single step, we assume you want to test tip of tree |
| return [shas[0]] |
| |
| shas.reverse() |
| indices = bresenham_split(steps, len(shas)) |
| |
| step_shas = [shas[i] for i in indices] |
| |
| # We should always have first and last SHA in our sequence even at the |
| # expense of having additional steps |
| |
| if step_shas[0] != shas[0]: |
| step_shas.insert(0, shas[0]) |
| |
| if step_shas[-1] != shas[-1]: |
| step_shas.append(shas[-1]) |
| |
| return step_shas |
| |
| |
| def get_headers(msg, header): |
| """Returns header content if any""" |
| contents = [] |
| |
| for line in str(msg).splitlines(): |
| if line.strip().startswith(header + ": "): |
| contents.append(line.strip().removeprefix(header + ": ").strip()) |
| |
| if len(contents) > 0: |
| return contents |
| return None |
| |
| |
| def get_patch_hash(diff): |
| """Returns hash from patch header""" |
| lines = diff.splitlines() |
| if lines[0].startswith("From "): |
| return lines[0].split(" ")[1] |
| return "0000000000000000000000000000000000000000" |
| |
| |
| def sblog(patch, msg, color="yellow"): |
| """Log message wrapper""" |
| |
| blog("DEPS", f'{patch["sha"]} -- {msg}', color) |
| |
| |
| def find_kcr_deps_in_commits(patch, kcr_patch, commits_for_patch): |
| """Find kcr dependencies""" |
| |
| # dict with file->change-id relation |
| kcr_file_to_cid = {} |
| |
| # mark commit without KCR dependencies and make use of it when there's |
| # an older commit with KCR deps too |
| commit_without_dependencies = None |
| |
| kcr_dependencies = OrderedDict() |
| |
| for commit in commits_for_patch: |
| change_id_matches = False |
| |
| # almost every commit in patches/ repository has Kcr-depends-on-sha |
| # header which specifies dependency for given patch, there can be |
| # more than one dependency for given patch |
| commit_msg = commit_message("patches", commit) |
| kcr_depends = get_headers(commit_msg, "Kcr-depends-on-sha") |
| kcr_dependencies[commit] = {} |
| |
| # sometimes there's no Kcr-depends-on-sha (e.g. merge commits) |
| if kcr_depends is None: |
| sblog(patch, f"{commit} does not provide dependency info", "yellow") |
| if not commit_without_dependencies: |
| commit_without_dependencies = commit |
| continue |
| |
| if commit_without_dependencies: |
| sblog( |
| patch, |
| f"{commit} does provide dependency info, but using newer {commit}", |
| ) |
| commit = commit_without_dependencies |
| else: |
| sblog(patch, f"{commit} does provide dependency info", "green") |
| |
| # we need to retrieve exact file at exact commit |
| fileat = show_file_at("patches", commit, kcr_patch) |
| |
| # change-id is just a smoke check as it always matches, but |
| # we have to be sure that we're applying fixup/patch for correct |
| # change and don't let bisection process go really weird |
| change_ids = get_headers(fileat, "Change-Id") |
| if patch["change-id"] in change_ids: |
| change_id_matches = True |
| kcr_file_to_cid[kcr_patch] = change_ids |
| |
| # dependencies are single hash or in '<patch from>:<dependency sha>' |
| # format |
| for depend in kcr_depends: |
| dependency = "" |
| deps = depend.split(":") |
| if len(deps) == 1: |
| dependency = deps[0].strip() |
| else: |
| if change_id_matches and (deps[0] == get_patch_hash(fileat)): |
| dependency = deps[1].strip() |
| # we store every dependency as unresolved by default |
| if dependency != "": |
| kcr_dependencies[commit][dependency] = False |
| |
| return (kcr_file_to_cid, kcr_dependencies) |
| |
| |
| def check_and_cherry_pick_sha( |
| patch, |
| ): # pylint: disable=too-many-return-statements |
| """Check if there is a Kcr-depends-on-sha information and return true if patch matches""" |
| |
| global dropped_resolutions |
| |
| # OrderedDict is needed because we want to store dependencies in historical |
| # order for future computations on idexes, such as checking the top commit |
| kcr_dependencies = OrderedDict() |
| |
| # check if commit actually needs dependency check, skip when no kcr-patch |
| # header was found, and pick the commit as is |
| msg = commit_message("kernel-upstream", patch["sha"]) |
| kcr_patch = get_headers(msg, "Kcr-patch") |
| |
| if kcr_patch is None: |
| return cherry_pick_sha( |
| patch["sha"] |
| ) # skip patches without replacements |
| |
| if len(kcr_patch) > 1: |
| sblog( |
| patch, |
| "more than one Kcr-patch fields detected, using the last one", |
| "red", |
| ) |
| |
| kcr_patch = kcr_patch[-1] |
| |
| # some patches do have incorrect path which should point to "fixups/" subdir |
| if not os.path.exists(os.path.join("patches/", kcr_patch)): |
| if not os.path.exists(os.path.join("patches/fixups/", kcr_patch)): |
| blog( |
| "FILE", |
| f'replacement or fixup but its file "{kcr_patch}" does not exist', |
| "red", |
| ) |
| non_interactive_exit() |
| return False |
| kcr_patch = f"fixups/{kcr_patch}" |
| |
| sblog(patch, f"replaced/fixed by: {kcr_patch}", "green") |
| |
| # list all commits for given kcr-patch file, there could be older |
| # commits with valid conflict resolutions too |
| commits_for_patch = list_shas("patches", os.path.normcase(kcr_patch)) |
| if len(commits_for_patch) > 1: |
| sblog(patch, f"more than one commit for patch {kcr_patch}") |
| |
| kcr_file_to_cid, kcr_dependencies_from_commits = find_kcr_deps_in_commits( |
| patch, kcr_patch, commits_for_patch |
| ) |
| kcr_dependencies.update(kcr_dependencies_from_commits) |
| |
| # make use of internal patch_dependencies dict |
| if kcr_patch in patch_dependencies.keys(): |
| kcr_dependencies["INTERNAL"] = {} |
| for v in patch_dependencies[kcr_patch]: |
| kcr_dependencies["INTERNAL"][v] = False |
| |
| # we need to flatten the dependency tree to check if it's useless |
| # which means there's no dependency info at all despite finding commits |
| # matching kcr-patch header. this is a rare occasion where every commit |
| # does not have kcr-depends-on-sha information stored |
| kcr_deps_values = [] |
| for kd in kcr_dependencies.values(): |
| for kv in kd.values(): |
| kcr_deps_values.append(kv) |
| |
| if len(kcr_deps_values) == 0: |
| sblog(patch, "no matching dependency info, applying") |
| return cherry_pick_sha( |
| patch["sha"] |
| ) # no dependencies info yet, apply with a good faith |
| |
| # iterate for every commit to actually check dependencies |
| for idx, commit in enumerate(kcr_dependencies.keys()): |
| if len(kcr_dependencies[commit].keys()) > 0: |
| for dep in kcr_dependencies[commit].keys(): |
| # check if given ref exist in repository at all |
| if not ref_exists("kernel-upstream", dep): |
| sblog(patch, f"{commit} {dep} SHA does not exist", "red") |
| non_interactive_exit() |
| kcr_dependencies[commit][dep] = False |
| |
| # check if given ref exists in current branch |
| if sha_in_head("kernel-upstream", dep): |
| sblog(patch, f"{commit} {dep} satisfied") |
| kcr_dependencies[commit][dep] = True |
| else: |
| sblog(patch, f"{commit} {dep} NOT SATISFIED", "red") |
| non_interactive_exit() |
| kcr_dependencies[commit][dep] = False |
| else: |
| sblog(patch, f"{commit} does not provide dependency info") |
| continue |
| |
| if False not in kcr_dependencies[commit].values(): |
| sblog(patch, f"{commit} satisfies all dependencies", "green") |
| if idx == 0: |
| # resolution is top of the head so we just need to pick it |
| # that's why we needed index of commits list, because everything |
| # older is not pulled into kernel repo by rebase script and |
| # it must be loaded manually by git-am |
| return cherry_pick_sha(patch["sha"]) |
| |
| # find matching patch for change-id |
| patchpath = "/dev/null" |
| for filepath, cids in kcr_file_to_cid.items(): |
| if patch["change-id"] in cids: |
| sblog( |
| patch, |
| f'{patch["change-id"]} in {filepath} for external patch', |
| ) |
| patchpath = filepath |
| break |
| if patchpath == "/dev/null": |
| sblog( |
| patch, |
| f"{commit} does not provide any conflict resolutions with matching change-ids", |
| "red", |
| ) |
| non_interactive_exit() |
| return False |
| |
| # load patch with git am from external patches/ repository |
| resolution_path = f"/tmp/.kcr-resolution-{commit}" |
| with open(resolution_path, "w") as f: |
| patchfile = show_file_at("patches", commit, patchpath) |
| f.write(str(patchfile)) |
| sblog(patch, f"resolution from history, {commit} picked externally") |
| return apply_patch("kernel-upstream", resolution_path, patch["sha"]) |
| |
| # if there's at least single "False" in kcr_dependencies tree that means |
| # have unmatched dependencies at all |
| for c in kcr_dependencies.values(): |
| if False in c.values(): |
| sblog(patch, "dependencies not satisfied", "red") |
| dropped_resolutions += 1 |
| non_interactive_exit() |
| return False |
| |
| # when everything else fails |
| sblog(patch, "dependency check failed", "red") |
| non_interactive_exit() |
| return False |
| |
| |
| def alternative_apply_patches(sha, branch_prefix, patches): |
| """Apply patches on specific SHA""" |
| |
| global dropped |
| global dropped_resolutions |
| dropped = 0 |
| dropped_resolutions = 0 |
| log.info( |
| (f"Checkout at {sha}: " f"{commit_subject('kernel-upstream', sha)}") |
| ) |
| checkout("kernel-upstream", sha) |
| |
| applying_branch = f"{branch_prefix}{sha}" |
| log.info(f"Creating branch {applying_branch}") |
| create_head("kernel-upstream", applying_branch, force=args.force) |
| |
| log.info(f"Cherry-pick {len(patches)} ChromeOS patches") |
| |
| with patch_progress() as p: |
| for commit in p.track( |
| patches, description="Applying ChromeOS commits..." |
| ): |
| if check_and_cherry_pick_sha(commit): |
| blog( |
| "PICK", |
| f'[green]{commit["sha"]}[/green] ' |
| + f'[dim]{commit["commit-title"]}[/dim]', |
| "green", |
| ) |
| else: |
| dropped += 1 |
| blog( |
| "DROP", |
| f'[red]{commit["sha"]}[/red] {commit["commit-title"]}', |
| "red", |
| ) |
| non_interactive_exit() |
| |
| log.info(f"Dropped patches count: {dropped}") |
| |
| |
| def alternative_bisect_branch(begin, end): |
| """Create bisect branch at begin, pick commits up to end in chunks and verify them.""" |
| |
| if args.steps is None: |
| steps_count = 100 |
| else: |
| steps_count = args.steps |
| |
| if not is_tag("kernel-upstream", f"v{begin}"): |
| log.error(f"Error parsing {begin} as tag. Check if such tag exists.") |
| sys.exit(-1) |
| |
| if not is_tag("kernel-upstream", f"v{end}"): |
| log.error(f"Error parsing {end} as tag. Check if such tag exists.") |
| sys.exit(-1) |
| |
| log.info(f"Get commits between {begin} and {end}") |
| |
| step_shas = get_steps_shas( |
| "kernel-upstream", f"v{begin}...v{end}", steps_count |
| ) |
| log.info( |
| f'Starting at {step_shas[0]}: {commit_subject("kernel-upstream", step_shas[0])}' |
| ) |
| |
| log.info(f"Get chromium patches from {end}") |
| cros_patches = get_patches(end) |
| |
| log.info(f"Checking out {begin}") |
| checkout("kernel-upstream", f"v{begin}") |
| |
| # build kernel for environment check |
| if not build_kernel(): |
| log.info(f"Kernel {begin} failed to build WITHOUT patches.") |
| log.info("Something seems to be wrong in your build environment.") |
| |
| for i, step_sha in enumerate(step_shas): |
| rprint(f"step {i+1}/{len(step_shas)}: {step_sha}") |
| |
| statistics_table = Table( |
| "STEP", |
| "FROM SHA", |
| "DROPPED", |
| "UNSATISFIED", |
| "BUILD", |
| title="Bisection statistics", |
| ) |
| |
| for step, step_sha in enumerate(step_shas): |
| log.info(f"Bisection step: {step+1}/{len(step_shas)}") |
| branch_prefix = f"local-bisect-{begin}_{end}-" |
| alternative_apply_patches(step_sha, branch_prefix, cros_patches) |
| |
| # Actually, this step should upload the kernel image to DUT and test it |
| build_status = build_kernel() |
| if build_status: |
| build_status_str = "OK" |
| elif build_status and args.skip_kernel: |
| build_status_str = "SKIP" |
| else: |
| build_status_str = "FAIL" |
| |
| statistics_table.add_row( |
| str(step + 1), |
| step_sha, |
| str(dropped), |
| str(dropped_resolutions), |
| build_status_str, |
| ) |
| if build_status: |
| log.info( |
| f"Kernel at {step_sha} w/ ChromeOS patches builds properly" |
| ) |
| checkout("kernel-upstream", f"v{begin}") |
| else: |
| if not args.always_pass_kernel: |
| log.info(f"Kernel {begin} failed to build successfully with") |
| log.info(f"applied patches on {step_sha}.") |
| log.info( |
| "Correct build errors, resolve conflicts, upload fixes," |
| ) |
| log.info("remove bisection branches and try again") |
| sys.exit(-1) |
| |
| rprint(statistics_table) |
| checkout("kernel-upstream", f"v{end}") |
| |
| |
| if args.create: |
| create_bisect_branch( |
| args.begin, |
| args.end, |
| pick_upstream_patches=True, |
| create_final_commit=True, |
| ) |
| elif args.verify: |
| verify_bisect_branch(args.begin, args.end, args.steps) |
| elif args.second_strategy: |
| alternative_bisect_branch(args.begin, args.end) |