blob: 2994fd3da061f99d1cf8f3987be73fd16870fbed [file] [log] [blame]
# -*- coding: utf-8 -*-
# Copyright 2017 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Git utility."""
from __future__ import print_function
import bisect
import logging
import os
import re
import shutil
import stat
import subprocess
import tempfile
import time
from bisect_kit import cache_util
from bisect_kit import cli
from bisect_kit import util
logger = logging.getLogger(__name__)
GIT_FULL_COMMIT_ID_LENGTH = 40
# Minimal acceptable length of git commit id.
#
# For chromium, hash collision rate over number of digits:
# - 6 digits: 4.85%
# - 7 digits: 0.32%
# - 8 digits: 0.01%
# As foolproof check, 7 digits should be enough.
GIT_MIN_COMMIT_ID_LENGTH = 7
def is_git_rev(s):
"""Is a git hash-like version string.
It accepts shortened hash with at least 7 digits.
"""
if not GIT_MIN_COMMIT_ID_LENGTH <= len(s) <= GIT_FULL_COMMIT_ID_LENGTH:
return False
return bool(re.match(r'^[0-9a-f]+$', s))
def argtype_git_rev(s):
"""Validates git hash."""
if not is_git_rev(s):
msg = 'should be git hash, at least %d digits' % GIT_MIN_COMMIT_ID_LENGTH
raise cli.ArgTypeError(msg, '1a2b3c4d5e')
return s
def is_git_root(path):
"""Is given path root of git repo."""
return os.path.exists(os.path.join(path, '.git'))
def is_git_bare_dir(path):
"""Is inside .git folder or bare git checkout."""
if not os.path.isdir(path):
return False
try:
return util.check_output(
'git', 'rev-parse', '--is-bare-repository', cwd=path) == 'true\n'
except subprocess.CalledProcessError:
return False
def clone(git_repo, repo_url, reference=None):
if not os.path.exists(git_repo):
os.makedirs(git_repo)
cmd = ['git', 'clone', repo_url, '.']
if reference:
cmd += ['--reference', reference]
util.check_call(*cmd, cwd=git_repo)
def checkout_version(git_repo, rev):
"""git checkout.
Args:
git_repo: path of git repo.
rev: git commit revision to checkout.
"""
util.check_call('git', 'checkout', '-q', '-f', rev, cwd=git_repo)
def init(git_repo):
"""git init.
git_repo and its parent directories will be created if they don't exist.
Args:
git_repo: path of git repo.
"""
if not os.path.exists(git_repo):
os.makedirs(git_repo)
util.check_call('git', 'init', '-q', cwd=git_repo)
def commit_file(git_repo,
path,
message,
content,
commit_time=None,
author_time=None):
"""Commit a file.
Args:
git_repo: path of git repo
path: file path, relative to git_repo
message: commit message
content: file content
commit_time: commit timestamp
author_time: author timestamp
"""
if author_time is None:
author_time = commit_time
env = {}
if author_time:
env['GIT_AUTHOR_DATE'] = str(author_time)
if commit_time:
env['GIT_COMMITTER_DATE'] = str(commit_time)
full_path = os.path.join(git_repo, path)
dirname = os.path.dirname(full_path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(full_path, 'w') as f:
f.write(content)
util.check_call('git', 'add', path, cwd=git_repo)
util.check_call(
'git', 'commit', '-q', '-m', message, path, cwd=git_repo, env=env)
def config(git_repo, *args):
"""Wrapper of 'git config'.
Args:
git_repo: path of git repo.
args: parameters pass to 'git config'
"""
util.check_call('git', 'config', *args, cwd=git_repo)
def fetch(git_repo, *args):
"""Wrapper of 'git fetch' with retry support.
Args:
git_repo: path of git repo.
args: parameters pass to 'git fetch'
"""
tries = 0
while True:
tries += 1
stderr_lines = []
try:
util.check_call(
'git',
'fetch',
*args,
cwd=git_repo,
stderr_callback=stderr_lines.append)
return
except subprocess.CalledProcessError:
if tries >= 5:
logger.error('git fetch failed too much times')
raise
stderr = ''.join(stderr_lines)
# only retry 5xx internal server error
if 'The requested URL returned error: 5' in stderr:
delay = min(60, 10 * 2**tries)
logger.warning('git fetch failed, will retry %s seconds later', delay)
time.sleep(delay)
continue
raise
def _adjust_timestamp_increasingly(commits):
"""Adjust commit timestamps.
After adjust, the timestamps are increasing.
Args:
commits: list of (timestamp, commit hash)
Returns:
(adjusted count, list of (timestamp, commit hash))
"""
result = []
adjusted = 0
last_timestamp = -1
for timestamp, git_rev in commits:
if timestamp < last_timestamp:
adjusted += 1
timestamp = last_timestamp
else:
last_timestamp = timestamp
result.append((timestamp, git_rev))
return adjusted, result
class FastLookupFailed(Exception):
"""No data is cached for this query.
The caller should fallback to the original operation.
"""
class FastLookupEntry:
"""Cached commits from one branch of given time period.
With this class, we can look up commit via commit hash and timestamp fast.
"""
def __init__(self, git_repo, branch):
self.git_repo = git_repo
self.branch = branch
self.optimized_period = None
self.cached = []
self.commit_to_index = {}
def optimize(self, period):
assert period[0] <= period[1]
if (self.optimized_period and self.optimized_period[0] <= period[0] and
period[1] <= self.optimized_period[1]):
# already done
return
self.cached = get_revlist_by_period(self.git_repo, self.branch, period)
self.optimized_period = period
# Adjust timestamps, so we can do binary search by timestamp
_adjusted, self.cached = _adjust_timestamp_increasingly(self.cached)
self.commit_to_index = {}
for i, (_timestamp, rev) in enumerate(self.cached):
self.commit_to_index[rev] = i
def get_rev_by_time(self, timestamp):
if not self.optimized_period[0] <= timestamp <= self.optimized_period[1]:
raise FastLookupFailed
# Note that, the return value might be different as "git rev-list" if the
# actual commit timestamps are not fully increasing.
x = (timestamp, '')
idx = bisect.bisect_right(self.cached, x)
if idx == 0 and timestamp < self.cached[0][0]:
return None
if idx == len(self.cached) or self.cached[idx][0] != timestamp:
idx -= 1
return self.cached[idx][1]
def is_containing_commit(self, rev):
if rev in self.commit_to_index:
return True
raise FastLookupFailed
class FastLookup:
"""Collection of FastLookupEntry"""
def __init__(self):
self.entries = {}
self.target_period = None
def optimize(self, period):
self.target_period = period
def disable(self):
self.target_period = None
self.entries = {}
def get_rev_by_time(self, git_repo, timestamp, branch):
if not self.target_period:
raise FastLookupFailed
if not self.target_period[0] <= timestamp <= self.target_period[1]:
raise FastLookupFailed
if git_repo not in self.entries:
self.entries[git_repo] = {}
if branch not in self.entries[git_repo]:
self.entries[git_repo][branch] = FastLookupEntry(git_repo, branch)
entry = self.entries[git_repo][branch]
entry.optimize(self.target_period)
return entry.get_rev_by_time(timestamp)
def is_containing_commit(self, git_repo, rev):
# This function is optimized only after get_rev_by_time() is invoked.
if git_repo not in self.entries:
raise FastLookupFailed
for entry in self.entries[git_repo].values():
try:
return entry.is_containing_commit(rev)
except FastLookupFailed:
pass
raise FastLookupFailed
fast_lookup = FastLookup()
@cache_util.Cache.default_disabled
def is_containing_commit(git_repo, rev):
"""Determines given commit exists.
Args:
git_repo: path of git repo.
rev: git commit revision in query.
Returns:
True if rev is inside given git repo. If git_repo is not a git folder,
returns False as well.
"""
try:
return fast_lookup.is_containing_commit(git_repo, rev)
except FastLookupFailed:
pass
try:
return util.check_output(
'git', 'cat-file', '-t', rev, cwd=git_repo) == 'commit\n'
except subprocess.CalledProcessError:
return False
except OSError:
return False
@cache_util.Cache.default_disabled
def is_ancestor_commit(git_repo, old, new):
"""Determines `old` commit is ancestor of `new` commit.
Args:
git_repo: path of git repo.
old: the ancestor commit.
new: the descendant commit.
Returns:
True only if `old` is the ancestor of `new`. One commit is not considered
as ancestor of itself.
"""
return util.check_output(
'git',
'rev-list',
'--ancestry-path',
'-1',
'%s..%s' % (old, new),
cwd=git_repo) != ''
def _parse_commit_object(s):
meta = {}
header, meta['message'] = s.split('\n\n', 1)
for line in header.splitlines():
m = re.match(r'^tree (\w+)', line)
if m:
meta['tree'] = m.group(1)
continue
m = re.match(r'^parent (\w+)', line)
if m:
meta['parent'] = line.split()[1:]
continue
m = re.match(r'^(author|committer) (.*) (\d+) (\S+)$', line)
if m:
meta[m.group(1)] = m.group(2)
meta['%s_time' % m.group(1)] = int(m.group(3))
continue
return meta
@cache_util.Cache.default_disabled
def get_commit_metadata(git_repo, rev):
"""Get metadata of given commit.
Args:
git_repo: path of git repo.
rev: git commit revision in query.
Returns:
dict of metadata, including (if available):
tree: hash of git tree object
parent: list of parent commits; this field is unavailable for the very
first commit of git repo.
author: name and email of author
author_time: author timestamp (without timezone information)
committer: name and email of committer
committer_time: commit timestamp (without timezone information)
message: commit message text
"""
data = util.check_output(
'git', 'cat-file', '-p', rev, cwd=git_repo, log_stdout=False)
return _parse_commit_object(data)
def get_batch_commit_metadata(git_repo, revs):
query = '\n'.join(revs)
logger.debug('get_batch_commit_metadata %r', query)
with tempfile.NamedTemporaryFile('w+t') as f:
f.write(query)
f.flush()
# util.check_output doesn't support stdin, so use shell
# redirect instead.
# binary=True because we need to count size in bytes later.
data = util.check_output(
'sh',
'-c',
'git cat-file --batch < ' + f.name,
cwd=git_repo,
binary=True)
metas = {}
while data:
first_line, data = data.split(b'\n', 1)
m = re.match(r'^(\w+) (\w+)(?: (\d+))?', first_line.decode('utf8'))
assert m, repr(first_line)
object_name, object_type = m.group(1, 2)
if not m.group(3):
metas[object_name] = None
continue
assert object_type == 'commit', 'unsupported object type: %s' % object_type
object_size = int(m.group(3))
assert data[object_size] == ord(b'\n'), repr(data[object_size])
obj, data = data[:object_size], data[object_size + 1:]
metas[object_name] = _parse_commit_object(obj.decode('utf8'))
return metas
def get_revlist(git_repo, old, new):
"""Enumerates git commit between two revisions (inclusive).
Args:
git_repo: path of git repo.
old: git commit revision.
new: git commit revision.
Returns:
list of git revisions. The list contains the input revisions, old and new.
"""
assert old
assert new
cmd = ['git', 'rev-list', '--reverse', '%s^..%s' % (old, new)]
revlist = util.check_output(*cmd, cwd=git_repo).splitlines()
return revlist
def get_commit_log(git_repo, rev):
"""Get git commit log.
Args:
git_repo: path of git repo.
rev: git commit revision.
Returns:
commit log message
"""
cmd = ['git', 'log', '-1', '--format=%B', rev]
msg = util.check_output(*cmd, cwd=git_repo)
return msg
def get_commit_hash(git_repo, rev):
"""Get git commit hash.
Args:
git_repo: path of git repo.
rev: could be git tag, branch, or (shortened) commit hash
Returns:
full git commit hash
Raises:
ValueError: `rev` is not unique or doesn't exist
"""
try:
# Use '^{commit}' to restrict search only commits.
# Use '--' to avoid ambiguity, like matching rev against path name.
output = util.check_output(
'git', 'rev-parse', '%s^{commit}' % rev, '--', cwd=git_repo)
git_rev = output.rstrip('-\n')
except subprocess.CalledProcessError:
# Do not use 'git rev-parse --disambiguate' to determine uniqueness
# because it searches objects other than commits as well.
raise ValueError('%s is not unique or does not exist' % rev)
assert is_git_rev(git_rev)
return git_rev
def get_commit_time(git_repo, rev, path=None):
"""Get git commit timestamp.
Args:
git_repo: path of git repo
rev: git commit id, branch name, tag name, or other git object
path: path, relative to git_repo
Returns:
timestamp (int)
"""
cmd = ['git', 'log', '-1', '--format=%ct', rev]
if path:
cmd += ['--', path]
line = util.check_output(*cmd, cwd=git_repo)
return int(line)
def is_symbolic_link(git_repo, rev, path):
"""Check if a file is symbolic link.
Args:
git_repo: path of git repo
rev: git commit id
path: file path
Returns:
True if the specified file is a symbolic link in repo.
Raises:
ValueError if not found
"""
# format: 120000 blob 8735a8c1dd96ede39a21d983d5c96792fd15c1a5 default.xml
# TODO(kcwu): handle escaped path with special characters
splitted = util.check_output(
'git', 'ls-tree', rev, '--full-name', path, cwd=git_repo).split()
if len(splitted) >= 4 and splitted[3] == path:
return stat.S_ISLNK(int(splitted[0], 8))
raise ValueError('file %s is not found in repo:%s rev:%s' %
(path, git_repo, rev))
@cache_util.Cache.default_disabled
def get_file_from_revision(git_repo, rev, path):
"""Get file content of given revision.
Args:
git_repo: path of git repo
rev: git commit id
path: file path
Returns:
file content (str)
"""
result = util.check_output(
'git', 'show', '%s:%s' % (rev, path), cwd=git_repo, log_stdout=False)
# It might be a symbolic link.
# In extreme case, it's possible that filenames contain special characters,
# like newlines. In practice, it should be safe to assume no such cases and
# reduce disk i/o.
if '\n' not in result and is_symbolic_link(git_repo, rev, path):
return get_file_from_revision(git_repo, rev, result)
return result
def list_dir_from_revision(git_repo, rev, path):
"""Lists entries of directory of given revision.
Args:
git_repo: path of git repo
rev: git commit id
path: directory path, relative to git root
Returns:
list of names
Raises:
subprocess.CalledProcessError: if `path` doesn't exists in `rev`
"""
return util.check_output(
'git',
'ls-tree',
'--name-only',
'%s:%s' % (rev, path),
cwd=git_repo,
log_stdout=False).splitlines()
def get_rev_by_time(git_repo, timestamp, branch, path=None):
"""Query commit of given time.
Args:
git_repo: path of git repo.
timestamp: timestamp
branch: only query parent of the `branch`. If branch=None, it means 'HEAD'
(current branch, usually).
path: only query history of path, relative to git_repo
Returns:
git commit hash. None if path didn't exist at the given time.
"""
if not branch:
branch = 'HEAD'
if not path:
try:
return fast_lookup.get_rev_by_time(git_repo, timestamp, branch)
except FastLookupFailed:
pass
cmd = [
'git',
'rev-list',
'--first-parent',
'-1',
'--before',
str(timestamp),
branch,
]
if path:
cmd += ['--', path]
result = util.check_output(*cmd, cwd=git_repo).strip()
return result or None
def get_revlist_by_period(git_repo, branch, period):
# Find the last commit before period[0].
text = util.check_output(
'git',
'rev-list',
'--timestamp',
'-1',
'--before',
str(period[0] - 1),
branch,
cwd=git_repo)
# Find commits in the period.
text += util.check_output(
'git',
'rev-list',
'--timestamp',
'--reverse',
'--after',
str(period[0]),
'--before',
str(period[1]),
branch,
cwd=git_repo)
result = []
for line in text.splitlines():
timestamp, commit = line.split()
result.append((int(timestamp), commit))
return result
def reset_hard(git_repo):
"""Restore modified and deleted files.
This is simply wrapper of "git reset --hard".
Args:
git_repo: path of git repo.
"""
util.check_call('git', 'reset', '--hard', cwd=git_repo)
def list_untracked(git_repo, excludes=None):
"""List untracked files and directories.
Args:
git_repo: path of git repo.
excludes: files and/or directories to ignore, relative to git_repo
Returns:
list of paths, relative to git_repo
"""
exclude_flags = []
if excludes:
for exclude in excludes:
assert not os.path.isabs(exclude), 'should be relative'
exclude_flags += ['--exclude', '/' + re.escape(exclude)]
result = []
for path in util.check_output(
'git',
'ls-files',
'--others',
'--exclude-standard',
*exclude_flags,
cwd=git_repo).splitlines():
# Remove the trailing slash, which means directory.
path = path.rstrip('/')
result.append(path)
return result
def distclean(git_repo, excludes=None):
"""Clean up git repo directory.
Restore modified and deleted files. Delete untracked files.
Args:
git_repo: path of git repo.
excludes: files and/or directories to ignore, relative to git_repo
"""
reset_hard(git_repo)
# Delete untracked files.
for untracked in list_untracked(git_repo, excludes=excludes):
path = os.path.join(git_repo, untracked)
logger.debug('delete untracked: %s', path)
if os.path.islink(path):
os.unlink(path)
elif os.path.isdir(path):
shutil.rmtree(path)
else:
os.unlink(path)
def get_history(git_repo,
path=None,
branch=None,
after=None,
before=None,
padding_begin=False,
padding_end=False,
with_subject=False):
"""Get commit history of given path.
`after` and `before` could be outside of lifetime of `path`. `padding` is
used to control what to return for such cases.
Args:
git_repo: path of git repo.
path: path to query, relative to git_repo
branch: branch name or ref name
after: limit history after given time (inclusive)
before: limit history before given time (inclusive)
padding_begin: If True, pads returned result with dummy record at exact
'after' time, if 'path' existed at that time.
padding_end: If True, pads returned result with dummy record at exact
'before' time, if 'path' existed at that time.
with_subject: If True, return commit subject together
Returns:
List of (timestamp, git hash, subject); or (timestamp, git hash) depends
on with_subject flag. They are all events when `path` was added, removed,
modified, and start and end time if `padding` is true. If `padding` and
`with_subject` are both true, 'dummy subject' will be returned as padding
history's subject.
For each pair, at `timestamp`, the repo state is `git hash`. In other
words, `timestamp` is not necessary the commit time of `git hash` for the
padded entries.
"""
log_format = '%ct %H' if not with_subject else '%ct %H %s'
cmd = ['git', 'log', '--reverse', '--first-parent', '--format=' + log_format]
if after:
cmd += ['--after', str(after)]
if before:
cmd += ['--before', str(before)]
if branch:
assert not is_git_rev(branch)
cmd += [branch]
if path:
# '--' is necessary otherwise if `path` is removed in current revision, git
# will complain it's an ambiguous argument which may be path or something
# else (like git branch name, tag name, etc.)
cmd += ['--', path]
result = []
for line in util.check_output(*cmd, cwd=git_repo).splitlines():
# array = [timestamp, git_rev, subject] or [timestamp, git_rev]
array = line.split(' ', 2)
array[0] = int(array[0])
result.append(tuple(array))
if padding_begin or padding_end:
history = [0, '']
if with_subject:
history.append('dummy subject')
if padding_end:
assert before, 'padding_end=True make no sense if before=None'
if get_rev_by_time(git_repo, before, branch, path=path):
before = int(before)
if not result or result[-1][0] != before:
git_rev = get_rev_by_time(git_repo, before, branch)
assert git_rev
history[0:2] = [before, git_rev]
result.append(tuple(history))
if padding_begin:
assert after, 'padding_begin=True make no sense if after=None'
if get_rev_by_time(git_repo, after, branch, path=path):
after = int(after)
if not result or result[0][0] != after:
git_rev = get_rev_by_time(git_repo, after, branch)
assert git_rev
history[0:2] = [after, git_rev]
result.insert(0, tuple(history))
return result
def get_history_recursively(git_repo,
path,
after,
before,
parser_callback,
padding_end=True,
branch=None):
"""Get commit history of given path and its dependencies.
In comparison to get_history(), get_history_recursively also takes
dependencies into consideration. For example, if file A referenced file B,
get_history_recursively(A) will return commits of B in addition to A. This
applies recursively, so commits of C will be included if file B referenced
file C, and so on.
This function is file type neutral. `parser_callback(filename, content)` will
be invoked to parse file content and should return list of filename of
dependencies. If `parser_callback` returns None (usually syntax error), the
commit is omitted.
Args:
git_repo: path of git repo
path: path to query, relative to git_repo
after: limit history after given time (inclusive)
before: limit history before given time (inclusive)
parser_callback: callback to parse file content. See above comment.
padding_end: If True, pads returned result with dummy record at exact
'after' time, if 'path' existed at that time.
branch: branch name or ref name
Returns:
list of (commit timestamp, git hash)
"""
history = get_history(
git_repo,
path,
after=after,
before=before,
padding_begin=True,
branch=branch)
# Collect include information of each commit.
includes = {}
for commit_time, git_rev in history:
content = get_file_from_revision(git_repo, git_rev, path)
parse_result = parser_callback(path, content)
if parse_result is None:
continue
for include_name in parse_result:
if include_name not in includes:
includes[include_name] = set()
includes[include_name].add(git_rev)
# Analyze the start time and end time of each include.
dependencies = []
for include in includes:
appeared = None
for commit_time, git_rev in history:
if git_rev in includes[include]:
if not appeared:
appeared = commit_time
else:
if appeared:
# dependency file exists in time range [appeared, commit_time)
dependencies.append((include, appeared, commit_time - 1))
appeared = None
if appeared is not None:
dependencies.append((include, appeared, before))
# Recursion and merge.
result = list(history)
for include, appeared, disappeared in dependencies:
result += get_history_recursively(
git_repo,
include,
appeared,
disappeared,
parser_callback,
padding_end=False,
branch=branch)
# Sort and padding.
result.sort(key=lambda x: x[0])
if padding_end:
pad = (before,)
pad += result[-1][1:]
result.append(pad)
# Dedup.
result2 = []
for x in result:
if result2 and result2[-1] == x:
continue
result2.append(x)
return result2
def get_branches(git_repo, all_branches=True, commit=None, remote=False):
"""Get branches of a repository.
Args:
git_repo: path of git repo
all_branches: return remote branches if is set to True
commit: return branches containing this commit if is not None
remote: only remote tracking branches
Returns:
list of branch names
"""
cmd = ['git', 'branch', '--format=%(refname)']
if all_branches:
cmd += ['-a']
if commit:
cmd += ['--contains', commit]
if remote:
cmd.append('--remote')
result = []
for line in util.check_output(*cmd, cwd=git_repo).splitlines():
result.append(line.strip())
return result
def list_commits_between_commits(git_repo, old, new):
"""Get all commits between (old, new].
Args:
git_repo: path of git repo.
old: old commit hash (exclusive)
new: new commit hash (inclusive)
Returns:
list of (timestamp, rev)
"""
assert old and new
if old == new:
return []
assert is_ancestor_commit(git_repo, old, new)
commits = []
# --first-parent is necessary for Android, see following link for more
# discussion.
# https://docs.google.com/document/d/1c8qiq14_ObRRjLT62sk9r5V5cyCGHX66dLYab4MVnks/edit#heading=h.n3i6mt2n6xuu
for line in util.check_output(
'git',
'rev-list',
'--timestamp',
'--reverse',
'--first-parent',
'%s..%s' % (old, new),
cwd=git_repo).splitlines():
timestamp, git_rev = line.split()
commits.append((int(timestamp), git_rev))
# bisect-kit has a fundamental assumption that commit timestamps are
# increasing because we sort and bisect the commits by timestamp across git
# repos. If not increasing, we have to adjust the timestamp as workaround.
# This might lead to bad bisect result, however the bad probability is low in
# practice since most machines' clocks are good enough.
adjusted, commits = _adjust_timestamp_increasingly(commits)
if adjusted != 0:
logger.warning('Commit timestamps are not increasing')
logger.warning('%d timestamps adjusted', adjusted)
return commits