blob: 60c99e766e93b56bab36c5273c35619f101deabe [file] [log] [blame]
#!/usr/bin/env python3
# Copyright 2021 The ChromiumOS Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This is contrib-quality code: not all functions/classes are
# documented.
# pylint: disable=import-error
# pylint: disable=wildcard-import
# pylint: disable=unused-wildcard-import
# pylint: disable=import-outside-toplevel
# pylint: disable=missing-function-docstring
# pylint: disable=redefined-outer-name
# pylint: disable=banned-string-format-function
# pylint: disable=consider-using-f-string
# pylint: disable=import-modules-only
"""Automatic rebase
This script automates much of the continuous rebase, which is a process
designed for carrying patches from the `living` Chrome OS branch (latest LTS)
to newer upstream kernels.
See go/cont-rebase for details
"""
from datetime import datetime
import os
import pickle
import re
import sqlite3
import sys
from buildhelpers import do_on_cros_sdk
from buildhelpers import verify_build
from common import rebasedb
from githelpers import *
from logging_console import LoggingConsole
# the import is not used directly, but instead intended to be used in the interactive mode
from mailing import load_and_notify # pylint: disable=unused-import
import rebase_config
from rebase_config import disp_overwrite
import sh
from config import *
def call_hook(sha, hook_type):
if "*" in rebase_config.commit_hooks:
entry = rebase_config.commit_hooks["*"]
if hook_type in entry["types"]:
hook = entry["hook"]
hook(sha, hook_type)
if sha in rebase_config.commit_hooks:
entry = rebase_config.commit_hooks[sha]
if hook_type in entry["types"]:
hook = entry["hook"]
hook(sha, hook_type)
def normalize():
kernelupstream_branch = branch_name("kernelupstream", rebase_target, None)
print(f"Checking out {kernelupstream_branch}")
checkout("kernel-upstream", kernelupstream_branch)
def in_linux_chrome(command):
return "cd ./data/repositories/linux-chrome/; " + command
def in_configs(command):
return (
"cd ./data/repositories/linux-chrome/CONFIGS/chromeos; " + command
)
def in_knext(command):
return "cd kernel-upstream; " + command
configs = [
(
"arm64-chromiumos-arm64-generic.flavour.config",
"arm64/chromiumos-arm64-generic.flavour.config",
),
(
"arm64-chromiumos-mediatek.flavour.config",
"arm64/chromiumos-mediatek.flavour.config",
),
(
"arm64-chromiumos-qualcomm.flavour.config",
"arm64/chromiumos-qualcomm.flavour.config",
),
(
"arm64-chromiumos-rockchip64.flavour.config",
"arm64/chromiumos-rockchip64.flavour.config",
),
(
"armel-chromiumos-arm-generic.flavour.config",
"armel/chromiumos-arm-generic.flavour.config",
),
(
"armel-chromiumos-rockchip.flavour.config",
"armel/chromiumos-rockchip.flavour.config",
),
(
"x86_64-chromeos-amd-stoneyridge.flavour.config",
"x86_64/chromeos-amd-stoneyridge.flavour.config",
),
(
"x86_64-chromeos-intel-denverton.flavour.config",
"x86_64/chromeos-intel-denverton.flavour.config",
),
(
"x86_64-chromeos-intel-pineview.flavour.config",
"x86_64/chromeos-intel-pineview.flavour.config",
),
(
"x86_64-chromiumos-x86_64-generic.flavour.config",
"x86_64/chromiumos-x86_64-generic.flavour.config",
),
]
iwls = [
"CONFIG_IWLWIFI=m",
"CONFIG_IWLDVM=m",
"CONFIG_IWLMVM=m",
"CONFIG_IWLWIFI_DEBUGFS=y",
]
iwl_fix_configs = [
"chromeos/config/chromeos/x86_64/chromeos-intel-denverton.flavour.config",
"chromeos/config/chromeos/x86_64/chromeos-intel-pineview.flavour.config",
]
genconfig = in_linux_chrome("chromeos/scripts/kernelconfig genconfig 2>&1")
commands = (
[
in_configs(
"cp "
+ config[0]
+ " ../../../../../kernel-upstream/chromeos/config/chromeos/"
+ config[1]
+ " 2>&1"
) # pylint: disable=C0301
for config in configs
]
+ [
in_knext('echo "' + iwl + '" >> ' + file)
for iwl in iwls
for file in iwl_fix_configs
]
+ [
in_knext("chromeos/scripts/kernelconfig olddefconfig 2>&1"),
in_knext("git add -A 2>&1"),
in_knext(
'git commit -m "kernel-rebase: normalization [autogenerated]" 2>&1'
),
]
)
while True:
print("genconfig")
res = do_on_cros_sdk(genconfig)
output = res["output"]
ec = res["exit_code"]
print(output, end="")
if ec != 0:
if "this assembler is not supported" in output:
print(
"This failure might be caused be the lack of commit e749464a877aa:"
)
print('"CHROMIUM: kernelconfig sets CROSS_COMPILE"')
return
break
for command in commands:
print("executing", command)
res = do_on_cros_sdk(command)
output = res["output"]
ec = res["exit_code"]
print(output, end="")
if ec != 0:
print("exit code:", ec)
return
class Rebaser:
"""Keeps all automatic rebase data"""
def __init__(self, branch_prefix="test"):
assert not is_dirty(
"kernel-upstream"
), "There's a local diff in kernel repo. Clean it to continue."
self.db = sqlite3.connect(rebasedb)
self.cur = self.db.cursor()
self.branch_prefix = branch_prefix
# Create topic dict (name->gid)
self.topics = {}
self.cur.execute("select topic, name from topics")
t = self.cur.fetchall()
for gid, name in t:
self.topics[name] = gid
print("Topic dict: ", self.topics)
# Handle extended dispositions
self.handle_ext_disposition(self.topics)
self.upstreamed = {
"upstream": 0,
"fromlist": 0,
"fromgit": 0,
"backport": 0,
}
self.total = {"upstream": 0, "fromlist": 0, "fromgit": 0, "backport": 0}
self.cur.execute("select subject, reason from commits")
t = self.cur.fetchall()
for subject, reason in t:
subject_l = subject.lower()
if "fromlist:" in subject_l:
self.total["fromlist"] += 1
if "upstream" in reason:
self.upstreamed["fromlist"] += 1
if "fromgit:" in subject_l:
self.total["fromgit"] += 1
if "upstream" in reason:
self.upstreamed["fromgit"] += 1
if "upstream:" in subject_l:
self.total["upstream"] += 1
if "upstream" in reason:
self.upstreamed["upstream"] += 1
if "backport:" in subject_l:
self.total["backport"] += 1
if "upstream" in reason:
self.upstreamed["backport"] += 1
self.kernel = None
# Pull chromeos branch
print("Fetching cros...")
fetch("kernel-upstream", "cros")
print("Fetching upstream...")
fetch("kernel-upstream", "upstream")
# Checkout to target branch
print("Checkout to", rebase_target, "...")
checkout("kernel-upstream", rebase_target)
def get_topic_dispositions(self, topic_list):
gids = []
for topic in topic_list:
gids.append(self.topics[topic])
gids = str(gids).replace("[", "(").replace("]", ")")
self.cur.execute(
"select disposition,sha,subject,reason,dsha from commits where topic in %s"
% gids
)
dispositions = self.cur.fetchall()
for i in range(len(dispositions)): # pylint: disable=C0200
disp = dispositions[i][0]
sha = dispositions[i][1]
subject = dispositions[i][2]
reason = dispositions[i][3]
dsha = dispositions[i][4]
# Assume there are only pick and drop dispositions
assert disp in [
"pick",
"drop",
"replace",
], "Unrecognized disposition."
# Modify dispositions according to overwrite
if sha in disp_overwrite:
# extended disposition are handled in handle_ext_disposition
if isinstance(disp_overwrite[sha], list):
continue
dispositions[i] = (
disp_overwrite[sha],
sha,
subject,
reason,
dsha,
)
return dispositions
# Handle extended dispositions
# topic_list: list of source topics
def handle_ext_disposition(self, topic_list):
print("Handle extended disposition overwrites")
for sha, val in disp_overwrite.items():
# currently we support 'move' extended disposition only
if not isinstance(val, list):
continue
assert val[0] == "move", "Unrecognized extended disposition"
dst = val[1]
assert dst in topic_list, f"Unrecognized dst {dst} topic"
self.topic_move(sha, dst)
# Rebase many topic branches joining them into one topic branch.
# end_name: name of the target branch
# topics: list of source topics
# is_triage: if set, skip over commits that require manual resolution
def rebase_multiple(self, end_name, topic_list, is_triage=False):
print("Checkout to", rebase_target, "...")
checkout("kernel-upstream", rebase_target)
if is_triage:
topic_branch = branch_name("triage", rebase_target, end_name)
print("Triage mode on. Using branch %s." % topic_branch)
with sh.pushd("kernel-upstream"):
try:
sh.git("branch", "-D", topic_branch)
except sh.ErrorReturnCode_1:
pass
else:
topic_branch = branch_name(
self.branch_prefix, rebase_target, end_name
)
try:
create_head("kernel-upstream", topic_branch)
except OSError as err:
print(err)
print("Branch already exists?")
return {}
print("Rebasing topics %s, branch %s" % (topic_list, end_name))
if output_patches_for_review:
sh.mkdir("-p", review_path(end_name, "/new"))
sh.mkdir("-p", review_path(end_name, "/original"))
print("Checkout to %s..." % topic_branch)
checkout("kernel-upstream", topic_branch)
dispositions = self.get_topic_dispositions(topic_list)
dropped = 0
noconflicts = 0
autoresolved = 0
manual = 0
fixup_manual = 0
index = 0
dispositions_with_deps = []
for i in dispositions:
sha = i[1]
if sha in rebase_config.patch_deps:
for dep in rebase_config.patch_deps[sha]:
print("Adding dependency", dep, "for patch", sha)
subject = "(fake subject) Dependency of " + sha
dispositions_with_deps.append(
["pick", dep, subject, "", ""]
)
dispositions_with_deps.append(i)
dispositions = dispositions_with_deps
for i in dispositions:
disp = i[0]
sha = i[1]
subject = i[2]
reason = i[3]
dsha = i[4]
if cp_or_am_in_progress("kernel-upstream"):
print(
"cherry-pick or am is currently in progress in kernel-upstream"
)
print("resolve and press enter to continue")
input()
if disp == "drop":
print("Drop commit (%s) %s: %s" % (reason, sha, subject))
# don't count commits dropped because of upstreaming, to be
# consistent with genspreadsheet.py
if reason != "upstream":
dropped += 1
continue
if disp == "replace":
# 'pick' commit from upstream, not from chromium db.
print("INFO: commit disposition is replace")
fsha = dsha
upstream_subject = commit_subject("kernel-upstream", fsha)
print(
'Replace commit %s: "%s" with upstream commit %s: "%s"'
% (sha, subject, fsha, upstream_subject)
)
else:
fsha = sha
print("Pick commit %s: %s" % (fsha, subject))
diff = replacement("kernel-upstream", fsha)
if diff is not None:
print("Patch replaced by previous conflict resolution:", diff)
# Make the path absolute
diff = os.getcwd() + "/" + diff
override_unresolved = False
try:
call_hook(fsha, "pre")
if diff is None:
cherry_pick("kernel-upstream", fsha)
if disp == "replace":
add_prefix_to_commit_subject(
"kernel-upstream", "UPSTREAM", upstream_subject
)
else:
apply_patch(
"kernel-upstream", diff, fsha
) # sha is only used for debugs
add_kcr_patch_tag(diff)
noconflicts += 1
index += 1
save_for_review("kernel-upstream", end_name, index, sha, fsha)
# No conflicts, check rerere and continue
call_hook(fsha, "post")
continue
except Exception as error: # pylint: disable=broad-except
if "could not build fake ancestor" in str(error):
override_unresolved = True
if debug:
sh.mkdir("-p", "debug/rebase/" + fsha)
with open(
"debug/rebase/" + fsha + "/cp_am_err",
"w",
encoding="utf-8",
) as f:
f.write(str(error))
call_hook(fsha, "conflict")
print("Conflicts found.")
# There were conflicts, check if autoresolved
# Autostage in git is assumed
# Files from patches shouldn't be autoresolved, so no path for handling
# git apply conflicts is added here
if is_resolved("kernel-upstream") and not override_unresolved:
print("All resolved automatically.")
autoresolved += 1
try:
with sh.pushd("kernel-upstream"):
sh.git("-c", "core.editor=true", "am", "--continue")
except sh.ErrorReturnCode_128 as e:
am_err = "No changes - did you forget" in str(e.stdout)
if am_err:
print(
"Cherry-pick/am empty due to conflict resolution. Skip."
)
with sh.pushd("kernel-upstream"):
sh.git("-c", "core.editor=true", "am", "--abort")
call_hook(fsha, "post_empty")
continue
raise e
if diff is not None:
add_kcr_patch_tag(diff)
index += 1
save_for_review("kernel-upstream", end_name, index, sha, fsha)
call_hook(fsha, "post")
elif is_triage:
# Conflict requires manual resolution - drop and continue
print("Commit requires manual resolution. Dropping it for now.")
manual += 1
with sh.pushd("kernel-upstream"):
sh.git("am", "--abort")
call_hook(fsha, "post_drop")
continue
print(
"""
Conflict requires manual resolution.
Resolve it in another window, add the changes by git add, then
type \'continue\' (c) here.
Or drop this patch by typing \'drop\' (d). It will be recorded in
rebase_config.py and dropped in subsequent rebases.
Or stop the rebase altogether (while keeping the changes that
were already made) by typing \'stop\' (s).
"""
)
cmd = ""
while cmd not in ["continue", "drop", "stop", "s", "c", "d"]:
cmd = input()
if cmd in ["continue", "c"]:
# Commit the change and continue
while not is_resolved("kernel-upstream"):
print("Something still unresolved. Resolve and hit enter.")
input()
manual += 1
try:
with sh.pushd("kernel-upstream"):
sh.git("-c", "core.editor=true", "am", "--continue")
except Exception as e: # pylint: disable=broad-except
err_s = str(e)
if "did you forget to use 'git add'" in err_s:
with sh.pushd("kernel-upstream"):
sh.git("am", "--skip")
call_hook(fsha, "post_drop")
print("Patch empty due to conflict resolution. Skip.")
else:
print("git am --continue failed:")
print(e)
print("Fatal? [y/n]")
ans = input()
if ans in ["y", "Y"]:
return {}
index += 1
save_for_review("kernel-upstream", end_name, index, sha, fsha)
call_hook(sha, "post")
save_head("kernel-upstream", fsha)
elif cmd in ["drop", "d"]:
dropped += 1
# Drop the commit and record as dropped in overlay
with sh.pushd("kernel-upstream"):
sh.git("am", "--abort")
with open("rebase_config.py", "a", encoding="utf-8") as f:
f.write(
"disp_overwrite['%s'] = '%s' # %s\n"
% (sha, "drop", subject)
)
else:
print(
"Stopped. %s commits dropped, %s applied cleanly, %s resolved"
" automatically, %s needing manual resolution"
% (dropped, noconflicts, autoresolved, manual)
)
with sh.pushd("kernel-upstream"):
sh.git("am", "--abort")
return {}
# Apply global reverts
for fsha in rebase_config.global_reverts:
with sh.pushd("kernel-upstream"):
sh.git("-c", "core.editor=true", "revert", fsha)
for topic in topic_list:
if topic in rebase_config.topic_patches:
# Apply patches and fixups for this particular topic
for name in rebase_config.topic_patches[topic]:
try:
call_hook("[nosha]", "pre")
patch_short = "patches/{}".format(name)
patch = os.getcwd() + "/" + patch_short
ret = apply_patch("kernel-upstream", patch, "[nosha]")
if ret.find("Patch already applied") != -1:
print(
"Patch: '"
+ patch_short
+ "' for "
+ topic
+ " already applied."
)
continue
add_kcr_patch_tag(patch_short, True)
# No conflicts, check rerere and continue
print(
"Applied '"
+ patch_short
+ "' to "
+ topic
+ " topic branch."
)
index += 1
save_for_review(
"kernel-upstream", end_name, index, None, "HEAD"
)
call_hook("[nosha]", "post")
continue
except sh.ErrorReturnCode_128:
print(
"Conflict found: '"
+ patch_short
+ "' for "
+ topic
+ " topic branch."
)
with sh.pushd("kernel-upstream"):
sh.git("am", "--abort")
fixup_manual += 1
call_hook("[nosha]", "post_drop")
print(
"Done. %s commits dropped, %s applied cleanly, %s resolved"
" automatically, %s+%s needing manual resolution"
% (dropped, noconflicts, autoresolved, manual, fixup_manual)
)
return {
"dropped": dropped,
"noconflicts": noconflicts,
"autoresolved": autoresolved,
"manual": manual,
"fixup_manual": fixup_manual,
}
# Shorthand for rebase_multiple
def rebase_one(self, t, is_triage=False):
return self.rebase_multiple(t, [t], is_triage)
# Moves commit into topic dst
# commit - sha string
# dst - topic name string
def topic_move(self, commit, dst):
dst_gid = self.topics[dst]
query = "select subject, topic from commits where sha='%s'" % commit
self.cur.execute(query)
ret = self.cur.fetchall()
if not ret:
print(
f"Warning commit {commit} not found for move to '{dst}' disposition."
"Fix rebase_config.py"
)
return
src_gid = ret[0][1]
src = ""
for topic_name, gid in self.topics.items():
if gid == src_gid:
src = src_gid
src_topic = topic_name
assert src != "", "No such topic?"
query = "update commits set topic=%d where sha='%s'" % (dst_gid, commit)
self.cur.execute(query)
query = "select subject, topic from commits where sha='%s'" % commit
self.cur.execute(query)
ret = self.cur.fetchall()
assert dst_gid == ret[0][1]
print(f"Moved commit {commit} '{ret[0][0]}' from {src_topic} to {dst}")
def topic_list(self, topic):
dst_gid = self.topics[topic]
query = (
"select sha, subject from commits where topic=%d and disposition='pick'"
% dst_gid
)
self.cur.execute(query)
ret = self.cur.fetchall()
for i in ret:
print(i[0], i[1])
def triage():
start = datetime.now()
verify_time = datetime.now() - start # about 0s
# Check if executor is alive, we'll need it for verifying build
if not do_on_cros_sdk("true", 1):
print("Is executor running?")
return None
r = Rebaser()
topic_stats = r.topics
upstream_stats = r.upstreamed
total_stats = r.total
topic_stderr = {}
for topic in list(topic_stats):
topic_branch = branch_name("triage", rebase_target, topic)
ret = r.rebase_one(topic, is_triage=True)
topic_stats[topic] = [
ret["dropped"]
+ ret["noconflicts"]
+ ret["autoresolved"]
+ ret["manual"]
+ ret["fixup_manual"],
ret["dropped"] + ret["noconflicts"] + ret["autoresolved"],
ret["manual"],
ret["fixup_manual"],
ret["noconflicts"],
False,
]
print("Verifying build...")
verify_one_start = datetime.now()
ret = verify_build(topic_branch)
verify_one_end = datetime.now()
verify_time += verify_one_end - verify_one_start
if ret["exit_code"] == 0:
print("Built %s succesfully." % topic)
topic_stats[topic][5] = True
else:
print("Error building %s:" % topic)
if ret["error_line"] is not None:
l = ret["error_line"]
reg = re.compile("\x1b\\[[0-9;]*m")
topic_stderr[topic] = reg.sub(
"", "\n".join(ret["output"].split("\n")[l - 7 : l])
)
print(topic_stderr[topic])
else:
print("(No error line.)")
with open(
"log/triage/"
+ topic_branch.replace(".", "_").replace("/", "-")
+ ".txt",
"w",
encoding="utf-8",
) as f:
f.write(ret["output"])
end = datetime.now()
elsapsed_total = end - start
applying_time = elsapsed_total - verify_time
print(f"Verifying builds took: {verify_time}")
print(f"Applying patches took: {applying_time}")
# Pickle the topic stats. Those can be loaded later by
# Mailing::load_and_notify()
with open("topic_stats.bin", "wb") as f:
pickle.dump(topic_stats, f)
with open("topic_stderr.bin", "wb") as f:
pickle.dump(topic_stderr, f)
with open("upstream_stats.bin", "wb") as f:
pickle.dump(upstream_stats, f)
with open("total_stats.bin", "wb") as f:
pickle.dump(total_stats, f)
return (topic_stats, topic_stderr)
def review_path(topic, suffix):
"""Returns path where patches should be saved for review + given suffix at the end"""
return "review/" + topic + "/" + suffix
# Disable broad-exception-caught warning, since we only print the exception here for debugging
# pylint: disable=W0703
def save_for_review(repo, topic, index, src_sha, new_sha):
"""Saves source (@src_sha) and resolved (@HEAD) patches as files for review"""
if not output_patches_for_review:
return
try:
filename = (
"{:04d}-".format(index) + patch_title(repo, new_sha) + ".patch"
)
if src_sha is not None:
path = review_path(topic, "original/") + filename
format_patch(repo, src_sha, path)
path = review_path(topic, "/new/") + filename
format_patch(repo, "HEAD", path)
except Exception as e:
print("Failed to save {} for review: {}".format(new_sha, e))
def save_head_user(prefix=None):
print("Current HEAD:")
sha = head_sha("kernel-upstream")
print(commit_message("kernel-upstream", sha))
print("This will record the current HEAD")
name = input("patch name: ")
if "/" in name:
print("patch name can't contains forward slashes!")
return
path = "patches/" + prefix + "/{}.patch".format(name)
if os.path.isfile(path):
print("Path exists!")
yn = input("proceed [y/n]:")
if yn.lower() not in ["y", "yes"]:
print("aborting")
return
save_head("kernel-upstream", sha, path_override=path, add_prefix=True)
def save_as_patch(prefix="patches"):
save_head_user(prefix)
def save_as_fixup(prefix="fixups"):
save_head_user(prefix)
def merge_topic_branches():
r = Rebaser()
topic_dict = r.topics
topic_list = rebase_config.merge_order_override
for from_config in rebase_config.merge_order_override:
if from_config not in topic_dict:
print(
"merge_order_override contains topics that aren't in line with topiclist"
)
sys.exit()
for topic in topic_dict:
if topic not in topic_list:
topic_list.append(topic)
topic_branches = [
branch_name("kernelupstream", rebase_target, topic)
for topic in topic_list
]
merged_branch = branch_name("kernelupstream", rebase_target, None)
print("checking out to ", rebase_target)
checkout("kernel-upstream", rebase_target)
try:
print("creating head", merged_branch)
create_head("kernel-upstream", merged_branch)
except OSError as err:
print(err)
print("Branch already exists?")
return
print("checking out to ", merged_branch)
checkout("kernel-upstream", merged_branch)
for topic_branch in topic_branches:
print("Merging", topic_branch)
try:
with sh.pushd("kernel-upstream"):
sh.git("merge", "--no-edit", topic_branch)
continue
except sh.ErrorReturnCode_1 as error:
if "not something we can merge" in str(error):
print(
"topic has no corresponding branch ("
+ topic_branch
+ "), skipping"
)
continue
print("Conflict found")
if is_resolved("kernel-upstream"):
print("Resolved automatically")
with sh.pushd("kernel-upstream"):
sh.git("-c", "core.editor=/bin/true", "merge", "--continue")
else:
print("Verify automatic resolution or resolve manually")
print("Enter [s]top to exit or c[ontinue] to proceed")
cmd = ""
while cmd not in ["continue", "stop", "s", "c"]:
cmd = input()
if cmd in ["stop", "s"]:
print("Exiting")
return
for fu in rebase_config.merge_fixups:
print("Applying fixup", fu)
try:
patch = "patches/fixups/{}.patch".format(fu)
patch = os.getcwd() + "/" + patch
apply_patch("kernel-upstream", patch, "[merge]")
except sh.ErrorReturnCode_128:
print("Conflict found")
with sh.pushd("kernel-upstream"):
sh.git("am", "--abort")
call_hook("[nosha]", "post_drop")
except Exception as err: # pylint: disable=broad-except
print("Uknown error occured:")
print(err)
print("Enter [s]top to exit or c[ontinue] to proceed")
# The script only performs basic setup by itself. Specific actions
# are done via an interactive Python shell.
lc = LoggingConsole(local=dict(globals(), **locals()))
lc.interact()