Merge commits up to v2.11.1

* tag 'v2.11.1':
  launcher: bump version for new release
  Fix bug in git trace2 event Write() function when no config present.
  drop pyversion & is_python3 checking
  strip python2-only coding:utf-8 & print_function settings

Change-Id: Ib769ae4b5f827166bb09c2635d46cb95237eb73f
diff --git a/error.py b/error.py
index 225eb59..9d92e47 100644
--- a/error.py
+++ b/error.py
@@ -133,3 +133,7 @@
 
   The common case is that the file wasn't present when we tried to run it.
   """
+
+class CacheApplyError(Exception):
+  """Thrown when errors happen in 'repo sync' with '--cache-dir' option.
+  """
diff --git a/project.py b/project.py
index 6c6534d..370328d 100644
--- a/project.py
+++ b/project.py
@@ -30,8 +30,9 @@
 from color import Coloring
 from git_command import GitCommand, git_require
 from git_config import GitConfig, IsId, GetSchemeFromUrl, GetUrlCookieFile, \
-    ID_RE
+    ID_RE, RefSpec
 from error import GitError, UploadError, DownloadError
+from error import CacheApplyError
 from error import ManifestInvalidRevisionError, ManifestInvalidPathError
 from error import NoManifestException
 import platform_utils
@@ -1037,6 +1038,68 @@
       _error("Cannot extract archive %s: %s", tarpath, str(e))
     return False
 
+  def CachePopulate(self, cache_dir, url):
+    """Populate cache in the cache_dir.
+
+    Args:
+      cache_dir: Directory to cache git files from Google Storage.
+      url: Git url of current repository.
+
+    Raises:
+      CacheApplyError if it fails to populate the git cache.
+    """
+    cmd = ['cache', 'populate', '--ignore_locks', '-v',
+           '--cache-dir', cache_dir, url]
+
+    if GitCommand(self, cmd, cwd=cache_dir).Wait() != 0:
+      raise CacheApplyError('Failed to populate cache. cache_dir: %s '
+                            'url: %s' % (cache_dir, url))
+
+  def CacheExists(self, cache_dir, url):
+    """Check the existence of the cache files.
+
+    Args:
+      cache_dir: Directory to cache git files.
+      url: Git url of current repository.
+
+    Raises:
+      CacheApplyError if the cache files do not exist.
+    """
+    cmd = ['cache', 'exists', '--quiet', '--cache-dir', cache_dir, url]
+
+    exist = GitCommand(self, cmd, cwd=self.gitdir, capture_stdout=True)
+    if exist.Wait() != 0:
+      raise CacheApplyError('Failed to execute git cache exists cmd. '
+                            'cache_dir: %s url: %s' % (cache_dir, url))
+
+    if not exist.stdout or not exist.stdout.strip():
+      raise CacheApplyError('Failed to find cache. cache_dir: %s '
+                            'url: %s' % (cache_dir, url))
+    return exist.stdout.strip()
+
+  def CacheApply(self, cache_dir):
+    """Apply git cache files populated from Google Storage buckets.
+
+    Args:
+      cache_dir: Directory to cache git files.
+
+    Raises:
+      CacheApplyError if it fails to apply git caches.
+    """
+    remote = self.GetRemote(self.remote.name)
+
+    self.CachePopulate(cache_dir, remote.url)
+
+    mirror_dir = self.CacheExists(cache_dir, remote.url)
+
+    refspec = RefSpec(True, 'refs/heads/*',
+                      'refs/remotes/%s/*' % remote.name)
+
+    fetch_cache_cmd = ['fetch', mirror_dir, str(refspec)]
+    if GitCommand(self, fetch_cache_cmd, self.gitdir).Wait() != 0:
+      raise CacheApplyError('Failed to fetch refs %s from %s' %
+                            (mirror_dir, str(refspec)))
+
   def Sync_NetworkHalf(self,
                        quiet=False,
                        verbose=False,
@@ -1050,7 +1113,8 @@
                        retry_fetches=0,
                        prune=False,
                        submodules=False,
-                       clone_filter=None):
+                       clone_filter=None,
+                       cache_dir=None):
     """Perform only the network IO portion of the sync process.
        Local working directory/branch state is not affected.
     """
@@ -1100,7 +1164,22 @@
     else:
       alt_dir = None
 
+    applied_cache = False
+    # If cache_dir is provided, and it's a new repository without
+    # alternative_dir, bootstrap this project repo with the git
+    # cache files.
+    if cache_dir is not None and is_new and alt_dir is None:
+      try:
+        self.CacheApply(cache_dir)
+        applied_cache = True
+        is_new = False
+      except CacheApplyError as e:
+        _error('Could not apply git cache: %s', e)
+        _error('Please check if you have the right GS credentials.')
+        _error('Please check if the cache files exist in GS.')
+
     if (clone_bundle
+            and not applied_cache
             and alt_dir is None
             and self._ApplyCloneBundle(initial=is_new, quiet=quiet, verbose=verbose)):
       is_new = False
diff --git a/repo b/repo
index 8f13015..0162f0e 100755
--- a/repo
+++ b/repo
@@ -141,7 +141,7 @@
 #
 REPO_URL = os.environ.get('REPO_URL', None)
 if not REPO_URL:
-  REPO_URL = 'https://gerrit.googlesource.com/git-repo'
+  REPO_URL = 'https://chromium.googlesource.com/external/repo'
 REPO_REV = os.environ.get('REPO_REV')
 if not REPO_REV:
   REPO_REV = 'stable'
diff --git a/subcmds/sync.py b/subcmds/sync.py
index 3482946..034c07c 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import glob
 import http.cookiejar as cookielib
 import json
 import netrc
@@ -48,7 +49,7 @@
   multiprocessing = None
 
 import event_log
-from git_command import GIT, git_require
+from git_command import GIT, git_require, GitCommand
 from git_config import GetUrlCookieFile
 from git_refs import R_HEADS, HEAD
 import gitc_utils
@@ -252,6 +253,11 @@
                  help='number of times to retry fetches on transient errors')
     p.add_option('--prune', dest='prune', action='store_true',
                  help='delete refs that no longer exist on the remote')
+    p.add_option('--cache-dir', dest='cache_dir', action='store',
+                 help='Use git-cache to populate project cache into this '
+                      'directory. Bootstrap the local repository from this '
+                      'directory if the project cache exists. This applies '
+                      'to the projects on chromium and chrome-internal.')
     if show_smart:
       p.add_option('-s', '--smart-sync',
                    dest='smart_sync', action='store_true',
@@ -329,6 +335,7 @@
             optimized_fetch=opt.optimized_fetch,
             retry_fetches=opt.retry_fetches,
             prune=opt.prune,
+            cache_dir=opt.cache_dir,
             clone_filter=clone_filter)
         self._fetch_times.Set(project, time.time() - start)
 
@@ -766,7 +773,8 @@
                                     optimized_fetch=opt.optimized_fetch,
                                     retry_fetches=opt.retry_fetches,
                                     submodules=self.manifest.HasSubmodules,
-                                    clone_filter=self.manifest.CloneFilter)
+                                    clone_filter=self.manifest.CloneFilter,
+                                    cache_dir=opt.cache_dir)
       finish = time.time()
       self.event_log.AddSync(mp, event_log.TASK_SYNC_NETWORK,
                              start, finish, success)
@@ -812,6 +820,30 @@
     opt.quiet = opt.output_mode is False
     opt.verbose = opt.output_mode is True
 
+    cache_dir = opt.cache_dir
+    if cache_dir:
+      if self.manifest.IsMirror or self.manifest.IsArchive:
+        print('fatal: --cache-dir is not supported with mirror or archive '
+              'repository.')
+        sys.exit(1)
+
+      if os.path.exists(cache_dir):
+        if not os.path.isdir(cache_dir):
+          print('fatal: cache_dir must be a directory', file=sys.stderr)
+          sys.exit(1)
+        else:
+          # Unlock the locks in the cache_dir.
+          unlock_cmd = ['cache', 'unlock', '-vv', '--force', '--all',
+                        '--cache-dir', cache_dir]
+          if GitCommand(None, unlock_cmd).Wait() != 0:
+            raise Exception('Failed to unlock cache_dir %s' % cache_dir)
+
+          locks = glob.glob(os.path.join(cache_dir, '*.lock'))
+          if locks:
+            raise Exception('Found %s after cache unlock.' % locks)
+      else:
+        os.makedirs(opt.cache_dir)
+
     if opt.manifest_name:
       self.manifest.Override(opt.manifest_name)