Merge tag 'v2.31' into stable

Required fix is merged upstream, now in upstream/stable history:
commit tools/repo@802cd0c6 "sync: Fix undefined variable in _FetchOne".

repo v2.31

* tag 'v2.31': (54 commits)
  tests: drop old unittest.main logic
  sync: fix manifest sync-j handling
  sync: Fix undefined variable in _FetchOne
  sync: finish marking REPO_AUTO_GC=1 as deprecated.
  test_manifest_config_properties: use assertEqual
  sync: cleanup output when not doing GC
  Revert "sync: save any cruft after calling git gc."
  sync: mark REPO_AUTO_GC=1 as deprecated.
  Fix ManifestProject.partial_clone_exclude property.
  release-process: update to use ./release/sign-tag.py
  wrapper.py: Replacing load_module() with exec_module()
  Fixed wrapper related warnings in tests
  test_capture: allow both Unix and Windows line sep
  test_bad_path_name_checks: allow Windows path sep
  test: Fix char encoding issues on windows
  test: fix path seperator errors on windows
  tox: Allow passing positional arguments
  trace: make test timeout after 2min
  tox: Make all tests timeout after 5min
  Update GH Action test-ci.yml dependencies
  ...

Change-Id: I693caeee6b0a4e7b35c9b9a8c859ee964a8bce4c
diff --git a/.github/workflows/test-ci.yml b/.github/workflows/test-ci.yml
index 1988185..0b9ef72 100644
--- a/.github/workflows/test-ci.yml
+++ b/.github/workflows/test-ci.yml
@@ -14,18 +14,18 @@
       fail-fast: false
       matrix:
         os: [ubuntu-latest, macos-latest, windows-latest]
-        python-version: [3.6, 3.7, 3.8, 3.9]
+        python-version: ['3.6', '3.7', '3.8', '3.9', '3.10']
     runs-on: ${{ matrix.os }}
 
     steps:
-    - uses: actions/checkout@v2
+    - uses: actions/checkout@v3
     - name: Set up Python ${{ matrix.python-version }}
-      uses: actions/setup-python@v1
+      uses: actions/setup-python@v4
       with:
         python-version: ${{ matrix.python-version }}
     - name: Install dependencies
       run: |
         python -m pip install --upgrade pip
-        pip install tox tox-gh-actions
+        python -m pip install tox tox-gh-actions
     - name: Test with tox
       run: tox
diff --git a/docs/manifest-format.md b/docs/manifest-format.md
index a2ecd03..729c58d 100644
--- a/docs/manifest-format.md
+++ b/docs/manifest-format.md
@@ -105,6 +105,8 @@
   <!ATTLIST extend-project groups CDATA #IMPLIED>
   <!ATTLIST extend-project revision CDATA #IMPLIED>
   <!ATTLIST extend-project remote CDATA #IMPLIED>
+  <!ATTLIST extend-project dest-branch CDATA #IMPLIED>
+  <!ATTLIST extend-project upstream CDATA #IMPLIED>
 
   <!ELEMENT remove-project EMPTY>
   <!ATTLIST remove-project name  CDATA #REQUIRED>
@@ -425,6 +427,12 @@
 Attribute `remote`: If specified, overrides the remote of the original
 project.  Same syntax as the corresponding element of `project`.
 
+Attribute `dest-branch`: If specified, overrides the dest-branch of the original
+project.  Same syntax as the corresponding element of `project`.
+
+Attribute `upstream`: If specified, overrides the upstream of the original
+project.  Same syntax as the corresponding element of `project`.
+
 ### Element annotation
 
 Zero or more annotation elements may be specified as children of a
diff --git a/docs/release-process.md b/docs/release-process.md
index f71a411..21ff1d8 100644
--- a/docs/release-process.md
+++ b/docs/release-process.md
@@ -143,23 +143,14 @@
 ***
 
 ```sh
-# Set the gpg key directory.
-$ export GNUPGHOME=~/.gnupg/repo/
-
-# Verify the listed key is “Repo Maintainer”.
-$ gpg -K
-
-# Pick whatever branch or commit you want to tag.
-$ r=main
-
 # Pick the new version.
-$ t=1.12.10
+$ t=v2.30
 
-# Create the signed tag.
-$ git tag -s v$t -u "Repo Maintainer <repo@android.kernel.org>" -m "repo $t" $r
+# Create a new signed tag with the current HEAD.
+$ ./release/sign-tag.py $t
 
 # Verify the signed tag.
-$ git show v$t
+$ git show $t
 ```
 
 ### Push the new release
@@ -168,11 +159,11 @@
 `stable` branch.
 
 Make sure you never push the tag itself to the stable branch!
-Only push the commit -- notice the use of `$t` and `$r` below.
+Only push the commit -- note the use of `^0` below.
 
 ```sh
-$ git push https://gerrit-review.googlesource.com/git-repo v$t
-$ git push https://gerrit-review.googlesource.com/git-repo $r:stable
+$ git push https://gerrit-review.googlesource.com/git-repo $t
+$ git push https://gerrit-review.googlesource.com/git-repo $t^0:stable
 ```
 
 If something goes horribly wrong, you can force push the previous version to the
@@ -195,7 +186,9 @@
 ```sh
 # If you haven't pushed to the stable branch yet, you can use origin/stable.
 # If you have pushed, change origin/stable to the previous release tag.
-$ git log --format="%h (%aN) %s" --no-merges origin/stable..$r
+# This assumes "main" is the current tagged release.  If it's newer, change it
+# to the current release tag too.
+$ git log --format="%h (%aN) %s" --no-merges origin/stable..main
 ```
 
 ## Project References
diff --git a/git_command.py b/git_command.py
index 19100fa..3a3bb34 100644
--- a/git_command.py
+++ b/git_command.py
@@ -16,6 +16,7 @@
 import os
 import sys
 import subprocess
+from typing import Any, Optional
 
 from error import GitError
 from git_refs import HEAD
@@ -157,6 +158,53 @@
   return False
 
 
+def _build_env(
+  _kwargs_only=(),
+  bare: Optional[bool] = False,
+  disable_editor: Optional[bool] = False,
+  ssh_proxy: Optional[Any] = None,
+  gitdir: Optional[str] = None,
+  objdir: Optional[str] = None
+):
+  """Constucts an env dict for command execution."""
+
+  assert _kwargs_only == (), '_build_env only accepts keyword arguments.'
+
+  env = GitCommand._GetBasicEnv()
+
+  if disable_editor:
+    env['GIT_EDITOR'] = ':'
+  if ssh_proxy:
+    env['REPO_SSH_SOCK'] = ssh_proxy.sock()
+    env['GIT_SSH'] = ssh_proxy.proxy
+    env['GIT_SSH_VARIANT'] = 'ssh'
+  if 'http_proxy' in env and 'darwin' == sys.platform:
+    s = "'http.proxy=%s'" % (env['http_proxy'],)
+    p = env.get('GIT_CONFIG_PARAMETERS')
+    if p is not None:
+      s = p + ' ' + s
+    env['GIT_CONFIG_PARAMETERS'] = s
+  if 'GIT_ALLOW_PROTOCOL' not in env:
+    env['GIT_ALLOW_PROTOCOL'] = (
+        'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
+  env['GIT_HTTP_USER_AGENT'] = user_agent.git
+
+  if objdir:
+    # Set to the place we want to save the objects.
+    env['GIT_OBJECT_DIRECTORY'] = objdir
+
+    alt_objects = os.path.join(gitdir, 'objects') if gitdir else None
+    if (alt_objects and
+        os.path.realpath(alt_objects) != os.path.realpath(objdir)):
+      # Allow git to search the original place in case of local or unique refs
+      # that git will attempt to resolve even if we aren't fetching them.
+      env['GIT_ALTERNATE_OBJECT_DIRECTORIES'] = alt_objects
+  if bare and gitdir is not None:
+      env[GIT_DIR] = gitdir
+
+  return env
+
+
 class GitCommand(object):
   """Wrapper around a single git invocation."""
 
@@ -173,30 +221,13 @@
                cwd=None,
                gitdir=None,
                objdir=None):
-    env = self._GetBasicEnv()
-
-    if disable_editor:
-      env['GIT_EDITOR'] = ':'
-    if ssh_proxy:
-      env['REPO_SSH_SOCK'] = ssh_proxy.sock()
-      env['GIT_SSH'] = ssh_proxy.proxy
-      env['GIT_SSH_VARIANT'] = 'ssh'
-    if 'http_proxy' in env and 'darwin' == sys.platform:
-      s = "'http.proxy=%s'" % (env['http_proxy'],)
-      p = env.get('GIT_CONFIG_PARAMETERS')
-      if p is not None:
-        s = p + ' ' + s
-      env['GIT_CONFIG_PARAMETERS'] = s
-    if 'GIT_ALLOW_PROTOCOL' not in env:
-      env['GIT_ALLOW_PROTOCOL'] = (
-          'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
-    env['GIT_HTTP_USER_AGENT'] = user_agent.git
 
     if project:
       if not cwd:
         cwd = project.worktree
       if not gitdir:
         gitdir = project.gitdir
+
     # Git on Windows wants its paths only using / for reliability.
     if platform_utils.isWindows():
       if objdir:
@@ -204,18 +235,16 @@
       if gitdir:
         gitdir = gitdir.replace('\\', '/')
 
-    if objdir:
-      # Set to the place we want to save the objects.
-      env['GIT_OBJECT_DIRECTORY'] = objdir
-      if gitdir:
-        # Allow git to search the original place in case of local or unique refs
-        # that git will attempt to resolve even if we aren't fetching them.
-        env['GIT_ALTERNATE_OBJECT_DIRECTORIES'] = gitdir + '/objects'
+    env = _build_env(
+      disable_editor=disable_editor,
+      ssh_proxy=ssh_proxy,
+      objdir=objdir,
+      gitdir=gitdir,
+      bare=bare,
+    )
 
     command = [GIT]
     if bare:
-      if gitdir:
-        env[GIT_DIR] = gitdir
       cwd = None
     command.append(cmdv[0])
     # Need to use the --progress flag for fetch/clone so output will be
@@ -230,12 +259,11 @@
     stderr = (subprocess.STDOUT if merge_output else
               (subprocess.PIPE if capture_stderr else None))
 
+    dbg = ''
     if IsTrace():
       global LAST_CWD
       global LAST_GITDIR
 
-      dbg = ''
-
       if cwd and LAST_CWD != cwd:
         if LAST_GITDIR or LAST_CWD:
           dbg += '\n'
@@ -263,31 +291,31 @@
         dbg += ' 2>|'
       elif stderr == subprocess.STDOUT:
         dbg += ' 2>&1'
-      Trace('%s', dbg)
 
-    try:
-      p = subprocess.Popen(command,
-                           cwd=cwd,
-                           env=env,
-                           encoding='utf-8',
-                           errors='backslashreplace',
-                           stdin=stdin,
-                           stdout=stdout,
-                           stderr=stderr)
-    except Exception as e:
-      raise GitError('%s: %s' % (command[1], e))
+    with Trace('git command %s %s with debug: %s', LAST_GITDIR, command, dbg):
+      try:
+        p = subprocess.Popen(command,
+                            cwd=cwd,
+                            env=env,
+                            encoding='utf-8',
+                            errors='backslashreplace',
+                            stdin=stdin,
+                            stdout=stdout,
+                            stderr=stderr)
+      except Exception as e:
+        raise GitError('%s: %s' % (command[1], e))
 
-    if ssh_proxy:
-      ssh_proxy.add_client(p)
-
-    self.process = p
-
-    try:
-      self.stdout, self.stderr = p.communicate(input=input)
-    finally:
       if ssh_proxy:
-        ssh_proxy.remove_client(p)
-    self.rc = p.wait()
+        ssh_proxy.add_client(p)
+
+      self.process = p
+
+      try:
+        self.stdout, self.stderr = p.communicate(input=input)
+      finally:
+        if ssh_proxy:
+          ssh_proxy.remove_client(p)
+      self.rc = p.wait()
 
   @staticmethod
   def _GetBasicEnv():
diff --git a/git_config.py b/git_config.py
index 6f80ae0..af1a101 100644
--- a/git_config.py
+++ b/git_config.py
@@ -22,6 +22,7 @@
 import ssl
 import subprocess
 import sys
+from typing import Union
 import urllib.error
 import urllib.request
 
@@ -117,7 +118,7 @@
       return self.defaults.Has(name, include_defaults=True)
     return False
 
-  def GetInt(self, name):
+  def GetInt(self, name: str) -> Union[int, None]:
     """Returns an integer from the configuration file.
 
     This follows the git config syntax.
@@ -126,7 +127,7 @@
       name: The key to lookup.
 
     Returns:
-      None if the value was not defined, or is not a boolean.
+      None if the value was not defined, or is not an int.
       Otherwise, the number itself.
     """
     v = self.GetString(name)
@@ -152,6 +153,9 @@
     try:
       return int(v, base=base) * mult
     except ValueError:
+      print(
+          f"warning: expected {name} to represent an integer, got {v} instead",
+          file=sys.stderr)
       return None
 
   def DumpConfigDict(self):
@@ -169,7 +173,7 @@
       config_dict[key] = self.GetString(key)
     return config_dict
 
-  def GetBoolean(self, name):
+  def GetBoolean(self, name: str) -> Union[str, None]:
     """Returns a boolean from the configuration file.
        None : The value was not defined, or is not a boolean.
        True : The value was set to true or yes.
@@ -183,6 +187,8 @@
       return True
     if v in ('false', 'no'):
       return False
+    print(f"warning: expected {name} to represent a boolean, got {v} instead",
+            file=sys.stderr)
     return None
 
   def SetBoolean(self, name, value):
@@ -191,7 +197,7 @@
       value = 'true' if value else 'false'
     self.SetString(name, value)
 
-  def GetString(self, name, all_keys=False):
+  def GetString(self, name: str, all_keys: bool = False) -> Union[str,  None]:
     """Get the first value for a key, or None if it is not defined.
 
        This configuration file is used first, if the key is not
@@ -219,8 +225,8 @@
     """Set the value(s) for a key.
        Only this configuration file is modified.
 
-       The supplied value should be either a string,
-       or a list of strings (to store multiple values).
+       The supplied value should be either a string, or a list of strings (to
+       store multiple values), or None (to delete the key).
     """
     key = _key(name)
 
@@ -349,9 +355,9 @@
     except OSError:
       return None
     try:
-      Trace(': parsing %s', self.file)
-      with open(self._json) as fd:
-        return json.load(fd)
+      with Trace(': parsing %s', self.file):
+        with open(self._json) as fd:
+          return json.load(fd)
     except (IOError, ValueError):
       platform_utils.remove(self._json, missing_ok=True)
       return None
diff --git a/git_refs.py b/git_refs.py
index 2d4a809..300d2b3 100644
--- a/git_refs.py
+++ b/git_refs.py
@@ -67,38 +67,37 @@
       self._LoadAll()
 
   def _NeedUpdate(self):
-    Trace(': scan refs %s', self._gitdir)
-
-    for name, mtime in self._mtime.items():
-      try:
-        if mtime != os.path.getmtime(os.path.join(self._gitdir, name)):
+    with Trace(': scan refs %s', self._gitdir):
+      for name, mtime in self._mtime.items():
+        try:
+          if mtime != os.path.getmtime(os.path.join(self._gitdir, name)):
+            return True
+        except OSError:
           return True
-      except OSError:
-        return True
-    return False
+      return False
 
   def _LoadAll(self):
-    Trace(': load refs %s', self._gitdir)
+    with Trace(': load refs %s', self._gitdir):
 
-    self._phyref = {}
-    self._symref = {}
-    self._mtime = {}
+      self._phyref = {}
+      self._symref = {}
+      self._mtime = {}
 
-    self._ReadPackedRefs()
-    self._ReadLoose('refs/')
-    self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
+      self._ReadPackedRefs()
+      self._ReadLoose('refs/')
+      self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
 
-    scan = self._symref
-    attempts = 0
-    while scan and attempts < 5:
-      scan_next = {}
-      for name, dest in scan.items():
-        if dest in self._phyref:
-          self._phyref[name] = self._phyref[dest]
-        else:
-          scan_next[name] = dest
-      scan = scan_next
-      attempts += 1
+      scan = self._symref
+      attempts = 0
+      while scan and attempts < 5:
+        scan_next = {}
+        for name, dest in scan.items():
+          if dest in self._phyref:
+            self._phyref[name] = self._phyref[dest]
+          else:
+            scan_next[name] = dest
+        scan = scan_next
+        attempts += 1
 
   def _ReadPackedRefs(self):
     path = os.path.join(self._gitdir, 'packed-refs')
diff --git a/git_trace2_event_log.py b/git_trace2_event_log.py
index 7426aba..2edab0e 100644
--- a/git_trace2_event_log.py
+++ b/git_trace2_event_log.py
@@ -110,7 +110,7 @@
     return {
         'event': event_name,
         'sid': self._full_sid,
-        'thread': threading.currentThread().getName(),
+        'thread': threading.current_thread().name,
         'time': datetime.datetime.utcnow().isoformat() + 'Z',
     }
 
diff --git a/main.py b/main.py
index c54f928..f4b6e7a 100755
--- a/main.py
+++ b/main.py
@@ -37,7 +37,7 @@
 
 from color import SetDefaultColoring
 import event_log
-from repo_trace import SetTrace
+from repo_trace import SetTrace, Trace, SetTraceToStderr
 from git_command import user_agent
 from git_config import RepoConfig
 from git_trace2_event_log import EventLog
@@ -109,6 +109,9 @@
 global_options.add_option('--trace',
                           dest='trace', action='store_true',
                           help='trace git command execution (REPO_TRACE=1)')
+global_options.add_option('--trace-to-stderr',
+                          dest='trace_to_stderr', action='store_true',
+                          help='trace outputs go to stderr in addition to .repo/TRACE_FILE')
 global_options.add_option('--trace-python',
                           dest='trace_python', action='store_true',
                           help='trace python command execution')
@@ -198,9 +201,6 @@
     """Execute the requested subcommand."""
     result = 0
 
-    if gopts.trace:
-      SetTrace()
-
     # Handle options that terminate quickly first.
     if gopts.help or gopts.help_all:
       self._PrintHelp(short=False, all_commands=gopts.help_all)
@@ -216,6 +216,21 @@
       self._PrintHelp(short=True)
       return 1
 
+    run = lambda: self._RunLong(name, gopts, argv) or 0
+    with Trace('starting new command: %s', ', '.join([name] + argv),
+               first_trace=True):
+      if gopts.trace_python:
+        import trace
+        tracer = trace.Trace(count=False, trace=True, timing=True,
+                             ignoredirs=set(sys.path[1:]))
+        result = tracer.runfunc(run)
+      else:
+        result = run()
+    return result
+
+  def _RunLong(self, name, gopts, argv):
+    """Execute the (longer running) requested subcommand."""
+    result = 0
     SetDefaultColoring(gopts.color)
 
     git_trace2_event_log = EventLog()
@@ -652,17 +667,18 @@
   Version.wrapper_path = opt.wrapper_path
 
   repo = _Repo(opt.repodir)
+
   try:
     init_http()
     name, gopts, argv = repo._ParseArgs(argv)
-    run = lambda: repo._Run(name, gopts, argv) or 0
-    if gopts.trace_python:
-      import trace
-      tracer = trace.Trace(count=False, trace=True, timing=True,
-                           ignoredirs=set(sys.path[1:]))
-      result = tracer.runfunc(run)
-    else:
-      result = run()
+
+    if gopts.trace:
+      SetTrace()
+
+    if gopts.trace_to_stderr:
+      SetTraceToStderr()
+
+    result = repo._Run(name, gopts, argv) or 0
   except KeyboardInterrupt:
     print('aborted by user', file=sys.stderr)
     result = 1
diff --git a/man/repo-gitc-init.1 b/man/repo-gitc-init.1
index d6ef5ab..02581c6 100644
--- a/man/repo-gitc-init.1
+++ b/man/repo-gitc-init.1
@@ -1,5 +1,5 @@
 .\" DO NOT MODIFY THIS FILE!  It was generated by help2man.
-.TH REPO "1" "August 2022" "repo gitc-init" "Repo Manual"
+.TH REPO "1" "October 2022" "repo gitc-init" "Repo Manual"
 .SH NAME
 repo \- repo gitc-init - manual page for repo gitc-init
 .SH SYNOPSIS
@@ -48,7 +48,7 @@
 .TP
 \fB\-\-manifest\-depth\fR=\fI\,DEPTH\/\fR
 create a shallow clone of the manifest repo with given
-depth; see git clone (default: 1)
+depth (0 for full clone); see git clone (default: 0)
 .SS Manifest (only) checkout options:
 .TP
 \fB\-\-current\-branch\fR
diff --git a/man/repo-init.1 b/man/repo-init.1
index 0d45bf7..cf93de1 100644
--- a/man/repo-init.1
+++ b/man/repo-init.1
@@ -1,5 +1,5 @@
 .\" DO NOT MODIFY THIS FILE!  It was generated by help2man.
-.TH REPO "1" "August 2022" "repo init" "Repo Manual"
+.TH REPO "1" "October 2022" "repo init" "Repo Manual"
 .SH NAME
 repo \- repo init - manual page for repo init
 .SH SYNOPSIS
@@ -48,7 +48,7 @@
 .TP
 \fB\-\-manifest\-depth\fR=\fI\,DEPTH\/\fR
 create a shallow clone of the manifest repo with given
-depth; see git clone (default: 1)
+depth (0 for full clone); see git clone (default: 0)
 .SS Manifest (only) checkout options:
 .TP
 \fB\-c\fR, \fB\-\-current\-branch\fR
diff --git a/man/repo-manifest.1 b/man/repo-manifest.1
index 382918c..e49836c 100644
--- a/man/repo-manifest.1
+++ b/man/repo-manifest.1
@@ -1,5 +1,5 @@
 .\" DO NOT MODIFY THIS FILE!  It was generated by help2man.
-.TH REPO "1" "July 2022" "repo manifest" "Repo Manual"
+.TH REPO "1" "October 2022" "repo manifest" "Repo Manual"
 .SH NAME
 repo \- repo manifest - manual page for repo manifest
 .SH SYNOPSIS
@@ -190,6 +190,8 @@
 <!ATTLIST extend\-project groups CDATA #IMPLIED>
 <!ATTLIST extend\-project revision CDATA #IMPLIED>
 <!ATTLIST extend\-project remote CDATA #IMPLIED>
+<!ATTLIST extend\-project dest\-branch CDATA #IMPLIED>
+<!ATTLIST extend\-project upstream CDATA #IMPLIED>
 .IP
 <!ELEMENT remove\-project EMPTY>
 <!ATTLIST remove\-project name  CDATA #REQUIRED>
@@ -485,6 +487,12 @@
 Attribute `remote`: If specified, overrides the remote of the original project.
 Same syntax as the corresponding element of `project`.
 .PP
+Attribute `dest\-branch`: If specified, overrides the dest\-branch of the original
+project. Same syntax as the corresponding element of `project`.
+.PP
+Attribute `upstream`: If specified, overrides the upstream of the original
+project. Same syntax as the corresponding element of `project`.
+.PP
 Element annotation
 .PP
 Zero or more annotation elements may be specified as children of a project or
@@ -600,7 +608,7 @@
 included manifests carry all parent include groups. Same syntax as the
 corresponding element of `project`.
 .PP
-Local Manifests
+Local Manifests 
 .PP
 Additional remotes and projects may be added through local manifest files stored
 in `$TOP_DIR/.repo/local_manifests/*.xml`.
diff --git a/man/repo-smartsync.1 b/man/repo-smartsync.1
index 8475adf..c1abbb3 100644
--- a/man/repo-smartsync.1
+++ b/man/repo-smartsync.1
@@ -1,5 +1,5 @@
 .\" DO NOT MODIFY THIS FILE!  It was generated by help2man.
-.TH REPO "1" "August 2022" "repo smartsync" "Repo Manual"
+.TH REPO "1" "November 2022" "repo smartsync" "Repo Manual"
 .SH NAME
 repo \- repo smartsync - manual page for repo smartsync
 .SH SYNOPSIS
@@ -105,6 +105,13 @@
 .TP
 \fB\-\-no\-prune\fR
 do not delete refs that no longer exist on the remote
+.TP
+\fB\-\-auto\-gc\fR
+run garbage collection on all synced projects
+.TP
+\fB\-\-no\-auto\-gc\fR
+do not run garbage collection on any projects
+(default)
 .SS Logging options:
 .TP
 \fB\-v\fR, \fB\-\-verbose\fR
diff --git a/man/repo-sync.1 b/man/repo-sync.1
index 9cc528d..f006c03 100644
--- a/man/repo-sync.1
+++ b/man/repo-sync.1
@@ -1,5 +1,5 @@
 .\" DO NOT MODIFY THIS FILE!  It was generated by help2man.
-.TH REPO "1" "August 2022" "repo sync" "Repo Manual"
+.TH REPO "1" "November 2022" "repo sync" "Repo Manual"
 .SH NAME
 repo \- repo sync - manual page for repo sync
 .SH SYNOPSIS
@@ -106,6 +106,13 @@
 \fB\-\-no\-prune\fR
 do not delete refs that no longer exist on the remote
 .TP
+\fB\-\-auto\-gc\fR
+run garbage collection on all synced projects
+.TP
+\fB\-\-no\-auto\-gc\fR
+do not run garbage collection on any projects
+(default)
+.TP
 \fB\-s\fR, \fB\-\-smart\-sync\fR
 smart sync using manifest from the latest known good
 build
@@ -200,6 +207,9 @@
 The \fB\-\-prune\fR option can be used to remove any refs that no longer exist on the
 remote.
 .PP
+The \fB\-\-auto\-gc\fR option can be used to trigger garbage collection on all projects.
+By default, repo does not run garbage collection.
+.PP
 SSH Connections
 .PP
 If at least one project remote URL uses an SSH connection (ssh://, git+ssh://,
diff --git a/man/repo.1 b/man/repo.1
index bdb705d..d2693a9 100644
--- a/man/repo.1
+++ b/man/repo.1
@@ -1,5 +1,5 @@
 .\" DO NOT MODIFY THIS FILE!  It was generated by help2man.
-.TH REPO "1" "July 2022" "repo" "Repo Manual"
+.TH REPO "1" "November 2022" "repo" "Repo Manual"
 .SH NAME
 repo \- repository management tool built on top of git
 .SH SYNOPSIS
@@ -25,6 +25,10 @@
 \fB\-\-trace\fR
 trace git command execution (REPO_TRACE=1)
 .TP
+\fB\-\-trace\-to\-stderr\fR
+trace outputs go to stderr in addition to
+\&.repo/TRACE_FILE
+.TP
 \fB\-\-trace\-python\fR
 trace python command execution
 .TP
diff --git a/manifest_xml.py b/manifest_xml.py
index ea274c7..129eb3f 100644
--- a/manifest_xml.py
+++ b/manifest_xml.py
@@ -1289,6 +1289,8 @@
           remote = self._default.remote
         else:
           remote = self._get_remote(node)
+        dest_branch = node.getAttribute('dest-branch')
+        upstream = node.getAttribute('upstream')
 
         named_projects = self._projects[name]
         if dest_path and not path and len(named_projects) > 1:
@@ -1304,6 +1306,10 @@
 
           if remote_name:
             p.remote = remote.ToRemoteSpec(name)
+          if dest_branch:
+            p.dest_branch = dest_branch
+          if upstream:
+            p.upstream = upstream
 
           if dest_path:
             del self._paths[p.relpath]
@@ -1940,11 +1946,14 @@
     fromKeys = sorted(fromProjects.keys())
     toKeys = sorted(toProjects.keys())
 
-    diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []}
+    diff = {'added': [], 'removed': [], 'missing': [], 'changed': [], 'unreachable': []}
 
     for proj in fromKeys:
       if proj not in toKeys:
         diff['removed'].append(fromProjects[proj])
+      elif not fromProjects[proj].Exists:
+        diff['missing'].append(toProjects[proj])
+        toKeys.remove(proj)
       else:
         fromProj = fromProjects[proj]
         toProj = toProjects[proj]
diff --git a/progress.py b/progress.py
index f9ac53a..526ce6c 100644
--- a/progress.py
+++ b/progress.py
@@ -15,7 +15,7 @@
 import os
 import sys
 from time import time
-from repo_trace import IsTrace
+from repo_trace import IsTraceToStderr
 
 _NOT_TTY = not os.isatty(2)
 
@@ -80,7 +80,7 @@
   def update(self, inc=1, msg=''):
     self._done += inc
 
-    if _NOT_TTY or IsTrace():
+    if _NOT_TTY or IsTraceToStderr():
       return
 
     if not self._show:
@@ -113,7 +113,7 @@
       sys.stderr.flush()
 
   def end(self):
-    if _NOT_TTY or IsTrace() or not self._show:
+    if _NOT_TTY or IsTraceToStderr() or not self._show:
       return
 
     duration = duration_str(time() - self._start)
diff --git a/project.py b/project.py
index 3db23e0..4022d84 100644
--- a/project.py
+++ b/project.py
@@ -26,6 +26,7 @@
 import tarfile
 import tempfile
 import time
+from typing import NamedTuple
 import urllib.parse
 
 from color import Coloring
@@ -40,17 +41,25 @@
 from error import NoManifestException, ManifestParseError
 import platform_utils
 import progress
-from repo_trace import IsTrace, Trace
+from repo_trace import Trace
 
 from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M, R_WORKTREE_M
 
 
+class SyncNetworkHalfResult(NamedTuple):
+  """Sync_NetworkHalf return value."""
+  # True if successful.
+  success: bool
+  # Did we query the remote? False when optimized_fetch is True and we have the
+  # commit already present.
+  remote_fetched: bool
+
 # Maximum sleep time allowed during retries.
 MAXIMUM_RETRY_SLEEP_SEC = 3600.0
 # +-10% random jitter is added to each Fetches retry sleep duration.
 RETRY_JITTER_PERCENT = 0.1
 
-# Whether to use alternates.
+# Whether to use alternates.  Switching back and forth is *NOT* supported.
 # TODO(vapier): Remove knob once behavior is verified.
 _ALTERNATES = os.environ.get('REPO_USE_ALTERNATES') == '1'
 
@@ -1133,7 +1142,7 @@
     if archive and not isinstance(self, MetaProject):
       if self.remote.url.startswith(('http://', 'https://')):
         _error("%s: Cannot fetch archives from http/https remotes.", self.name)
-        return False
+        return SyncNetworkHalfResult(False, False)
 
       name = self.relpath.replace('\\', '/')
       name = name.replace('/', '_')
@@ -1144,19 +1153,19 @@
         self._FetchArchive(tarpath, cwd=topdir)
       except GitError as e:
         _error('%s', e)
-        return False
+        return SyncNetworkHalfResult(False, False)
 
       # From now on, we only need absolute tarpath
       tarpath = os.path.join(topdir, tarpath)
 
       if not self._ExtractArchive(tarpath, path=topdir):
-        return False
+        return SyncNetworkHalfResult(False, True)
       try:
         platform_utils.remove(tarpath)
       except OSError as e:
         _warn("Cannot remove archive %s: %s", tarpath, str(e))
       self._CopyAndLinkFiles()
-      return True
+      return SyncNetworkHalfResult(True, True)
 
     # If the shared object dir already exists, don't try to rebootstrap with a
     # clone bundle download.  We should have the majority of objects already.
@@ -1224,9 +1233,11 @@
       depth = repo_depth
 
     # See if we can skip the network fetch entirely.
+    remote_fetched = False
     if not (optimized_fetch and
             (ID_RE.match(self.revisionExpr) and
              self._CheckForImmutableRevision())):
+      remote_fetched = True
       if not self._RemoteFetch(
               initial=is_new,
               quiet=quiet, verbose=verbose, output_redir=output_redir,
@@ -1235,7 +1246,7 @@
               submodules=submodules, force_sync=force_sync,
               ssh_proxy=ssh_proxy,
               clone_filter=clone_filter, retry_fetches=retry_fetches):
-        return False
+        return SyncNetworkHalfResult(False, remote_fetched)
 
     mp = self.manifest.manifestProject
     dissociate = mp.dissociate
@@ -1248,7 +1259,7 @@
         if p.stdout and output_redir:
           output_redir.write(p.stdout)
         if p.Wait() != 0:
-          return False
+          return SyncNetworkHalfResult(False, remote_fetched)
         platform_utils.remove(alternates_file)
 
     if self.worktree:
@@ -1257,7 +1268,7 @@
       self._InitMirrorHead()
       platform_utils.remove(os.path.join(self.gitdir, 'FETCH_HEAD'),
                             missing_ok=True)
-    return True
+    return SyncNetworkHalfResult(True, remote_fetched)
 
   def PostRepoUpgrade(self):
     self._InitHooks()
@@ -1455,6 +1466,8 @@
         cnt_mine += 1
 
     if not upstream_gain and cnt_mine == len(local_changes):
+      # The copy/linkfile config may have changed.
+      self._CopyAndLinkFiles()
       return
 
     if self.IsDirty(consider_untracked=False):
@@ -2411,16 +2424,16 @@
         srcUrl = 'http' + srcUrl[len('persistent-http'):]
       cmd += [srcUrl]
 
-      if IsTrace():
-        Trace('%s', ' '.join(cmd))
-      if verbose:
-        print('%s: Downloading bundle: %s' % (self.name, srcUrl))
-      stdout = None if verbose else subprocess.PIPE
-      stderr = None if verbose else subprocess.STDOUT
-      try:
-        proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr)
-      except OSError:
-        return False
+      proc = None
+      with Trace('Fetching bundle: %s', ' '.join(cmd)):
+        if verbose:
+          print('%s: Downloading bundle: %s' % (self.name, srcUrl))
+        stdout = None if verbose else subprocess.PIPE
+        stderr = None if verbose else subprocess.STDOUT
+        try:
+          proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr)
+        except OSError:
+          return False
 
       (output, _) = proc.communicate()
       curlret = proc.returncode
@@ -2802,35 +2815,6 @@
         else:
           raise
 
-  def _InitialCheckoutStart(self):
-    """Called when checking out a project for the first time.
-
-    This will use temporary non-visible paths so we can be safely interrupted
-    without leaving incomplete state behind.
-    """
-    paths = [f'{x}.tmp' for x in (self.relpath, self.worktree, self.gitdir, self.objdir)]
-    for p in paths:
-      platform_utils.rmtree(p, ignore_errors=True)
-    self.UpdatePaths(*paths)
-
-  def _InitialCheckoutFinalizeNetworkHalf(self):
-    """Finalize the object dirs after network syncing works."""
-    # Once the network half finishes, we can move the objects into the right
-    # place by removing the ".tmp" suffix on the dirs.
-    platform_utils.rmtree(self.gitdir[:-4], ignore_errors=True)
-    os.rename(self.gitdir, self.gitdir[:-4])
-    self.UpdatePaths(self.relpath, self.worktree, self.gitdir[:-4], self.objdir[:-4])
-
-  def _InitialCheckoutFinalizeLocalHalf(self):
-    """Finalize the initial checkout and make it available."""
-    assert self.gitdir == self.objdir
-    # Once the local half finishes, we can move the manifest dir into the right
-    # place by removing the ".tmp" suffix on the dirs.
-    platform_utils.rmtree(self.worktree[:-4], ignore_errors=True)
-    os.rename(self.worktree, self.worktree[:-4])
-    self.UpdatePaths(
-        self.relpath[:-4], self.worktree[:-4], self.gitdir, self.objdir)
-
   def _InitGitWorktree(self):
     """Init the project using git worktrees."""
     self.bare_git.worktree('prune')
@@ -3529,7 +3513,7 @@
   @property
   def partial_clone_exclude(self):
     """Partial clone exclude string"""
-    return self.config.GetBoolean('repo.partialcloneexclude')
+    return self.config.GetString('repo.partialcloneexclude')
 
   @property
   def manifest_platform(self):
@@ -3717,8 +3701,6 @@
               (GitConfig.ForUser().UrlInsteadOf(manifest_url),),
               file=sys.stderr)
 
-      self._InitialCheckoutStart()
-
       # The manifest project object doesn't keep track of the path on the
       # server where this git is located, so let's save that here.
       mirrored_manifest_git = None
@@ -3875,17 +3857,19 @@
           is_new=is_new, quiet=not verbose, verbose=verbose,
           clone_bundle=clone_bundle, current_branch_only=current_branch_only,
           tags=tags, submodules=submodules, clone_filter=clone_filter,
-          partial_clone_exclude=self.manifest.PartialCloneExclude):
+          partial_clone_exclude=self.manifest.PartialCloneExclude).success:
         r = self.GetRemote()
         print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
+
+        # Better delete the manifest git dir if we created it; otherwise next
+        # time (when user fixes problems) we won't go through the "is_new" logic.
+        if is_new:
+          platform_utils.rmtree(self.gitdir)
         return False
 
       if manifest_branch:
         self.MetaBranchSwitch(submodules=submodules)
 
-      if is_new:
-        self._InitialCheckoutFinalizeNetworkHalf()
-
       syncbuf = SyncBuffer(self.config)
       self.Sync_LocalHalf(syncbuf, submodules=submodules)
       syncbuf.Finish()
@@ -3908,9 +3892,6 @@
       with open(dest, 'wb') as f:
         f.write(manifest_data)
 
-    if is_new:
-      self._InitialCheckoutFinalizeLocalHalf()
-
     try:
       self.manifest.Link(manifest_name)
     except ManifestParseError as e:
diff --git a/release/update-manpages b/release/update-manpages
index ddbce0c..739cedb 100755
--- a/release/update-manpages
+++ b/release/update-manpages
@@ -59,27 +59,30 @@
   version = RepoSourceVersion()
   cmdlist = [['help2man', '-N', '-n', f'repo {cmd} - manual page for repo {cmd}',
     '-S', f'repo {cmd}', '-m', 'Repo Manual', f'--version-string={version}',
-    '-o', MANDIR.joinpath(f'repo-{cmd}.1.tmp'), TOPDIR.joinpath('repo'),
+    '-o', MANDIR.joinpath(f'repo-{cmd}.1.tmp'), './repo',
     '-h', f'help {cmd}'] for cmd in subcmds.all_commands]
   cmdlist.append(['help2man', '-N', '-n', 'repository management tool built on top of git',
     '-S', 'repo', '-m', 'Repo Manual', f'--version-string={version}',
-    '-o', MANDIR.joinpath('repo.1.tmp'), TOPDIR.joinpath('repo'),
+    '-o', MANDIR.joinpath('repo.1.tmp'), './repo',
     '-h', '--help-all'])
 
   with tempfile.TemporaryDirectory() as tempdir:
-    repo_dir = Path(tempdir) / '.repo'
+    tempdir = Path(tempdir)
+    repo_dir = tempdir / '.repo'
     repo_dir.mkdir()
     (repo_dir / 'repo').symlink_to(TOPDIR)
 
+    # Create a repo wrapper using the active Python executable.  We can't pass
+    # this directly to help2man as it's too simple, so insert it via shebang.
+    data = (TOPDIR / 'repo').read_text(encoding='utf-8')
+    tempbin = tempdir / 'repo'
+    tempbin.write_text(f'#!{sys.executable}\n' + data, encoding='utf-8')
+    tempbin.chmod(0o755)
+
     # Run all cmd in parallel, and wait for them to finish.
     with multiprocessing.Pool() as pool:
       pool.map(partial(worker, cwd=tempdir, check=True), cmdlist)
 
-  regex = (
-      (r'(It was generated by help2man) [0-9.]+', '\g<1>.'),
-      (r'^\.IP\n(.*:)\n', '.SS \g<1>\n'),
-      (r'^\.PP\nDescription', '.SH DETAILS'),
-  )
   for tmp_path in MANDIR.glob('*.1.tmp'):
     path = tmp_path.parent / tmp_path.stem
     old_data = path.read_text() if path.exists() else ''
@@ -87,8 +90,7 @@
     data = tmp_path.read_text()
     tmp_path.unlink()
 
-    for pattern, replacement in regex:
-      data = re.sub(pattern, replacement, data, flags=re.M)
+    data = replace_regex(data)
 
     # If the only thing that changed was the date, don't refresh.  This avoids
     # a lot of noise when only one file actually updates.
@@ -98,5 +100,25 @@
       path.write_text(data)
 
 
+def replace_regex(data):
+  """Replace semantically null regexes in the data.
+
+  Args:
+    data: manpage text.
+
+  Returns:
+    Updated manpage text.
+  """
+  regex = (
+      (r'(It was generated by help2man) [0-9.]+', '\g<1>.'),
+      (r'^\033\[[0-9;]*m([^\033]*)\033\[m', '\g<1>'),
+      (r'^\.IP\n(.*:)\n', '.SS \g<1>\n'),
+      (r'^\.PP\nDescription', '.SH DETAILS'),
+  )
+  for pattern, replacement in regex:
+    data = re.sub(pattern, replacement, data, flags=re.M)
+  return data
+
+
 if __name__ == '__main__':
   sys.exit(main(sys.argv[1:]))
diff --git a/repo b/repo
index c6acc72..9556832 100755
--- a/repo
+++ b/repo
@@ -149,7 +149,7 @@
 BUG_URL = 'https://bugs.chromium.org/p/gerrit/issues/entry?template=Repo+tool+issue'
 
 # increment this whenever we make important changes to this script
-VERSION = (2, 29)
+VERSION = (2, 30)
 
 # increment this if the MAINTAINER_KEYS block is modified
 KEYRING_VERSION = (2, 3)
@@ -316,9 +316,10 @@
                    help='download the manifest as a static file '
                         'rather then create a git checkout of '
                         'the manifest repo')
-  group.add_option('--manifest-depth', type='int', default=1, metavar='DEPTH',
+  group.add_option('--manifest-depth', type='int', default=0, metavar='DEPTH',
                    help='create a shallow clone of the manifest repo with '
-                        'given depth; see git clone (default: %default)')
+                        'given depth (0 for full clone); see git clone '
+                        '(default: %default)')
 
   # Options that only affect manifest project, and not any of the projects
   # specified in the manifest itself.
diff --git a/repo_trace.py b/repo_trace.py
index 7be0c04..d79408d 100644
--- a/repo_trace.py
+++ b/repo_trace.py
@@ -15,26 +15,133 @@
 """Logic for tracing repo interactions.
 
 Activated via `repo --trace ...` or `REPO_TRACE=1 repo ...`.
+
+Temporary: Tracing is always on. Set `REPO_TRACE=0` to turn off.
+To also include trace outputs in stderr do `repo --trace_to_stderr ...`
 """
 
 import sys
 import os
+import time
+from contextlib import ContextDecorator
+
+import platform_utils
 
 # Env var to implicitly turn on tracing.
 REPO_TRACE = 'REPO_TRACE'
 
-_TRACE = os.environ.get(REPO_TRACE) == '1'
+# Temporarily set tracing to always on unless user expicitly sets to 0.
+_TRACE = os.environ.get(REPO_TRACE) != '0'
+_TRACE_TO_STDERR = False
+_TRACE_FILE = None
+_TRACE_FILE_NAME = 'TRACE_FILE'
+_MAX_SIZE = 70  # in mb
+_NEW_COMMAND_SEP = '+++++++++++++++NEW COMMAND+++++++++++++++++++'
+
+
+def IsTraceToStderr():
+  return _TRACE_TO_STDERR
 
 
 def IsTrace():
   return _TRACE
 
 
+def SetTraceToStderr():
+  global _TRACE_TO_STDERR
+  _TRACE_TO_STDERR = True
+
+
 def SetTrace():
   global _TRACE
   _TRACE = True
 
 
-def Trace(fmt, *args):
-  if IsTrace():
-    print(fmt % args, file=sys.stderr)
+def _SetTraceFile(quiet):
+  global _TRACE_FILE
+  _TRACE_FILE = _GetTraceFile(quiet)
+
+
+class Trace(ContextDecorator):
+
+  def _time(self):
+    """Generate nanoseconds of time in a py3.6 safe way"""
+    return int(time.time() * 1e+9)
+
+  def __init__(self, fmt, *args, first_trace=False, quiet=True):
+    """Initialize the object.
+
+    Args:
+      fmt: The format string for the trace.
+      *args: Arguments to pass to formatting.
+      first_trace: Whether this is the first trace of a `repo` invocation.
+      quiet: Whether to suppress notification of trace file location.
+    """
+    if not IsTrace():
+      return
+    self._trace_msg = fmt % args
+
+    if not _TRACE_FILE:
+      _SetTraceFile(quiet)
+
+    if first_trace:
+      _ClearOldTraces()
+      self._trace_msg = f'{_NEW_COMMAND_SEP} {self._trace_msg}'
+
+  def __enter__(self):
+    if not IsTrace():
+      return self
+
+    print_msg = f'PID: {os.getpid()} START: {self._time()} :{self._trace_msg}\n'
+
+    with open(_TRACE_FILE, 'a') as f:
+      print(print_msg, file=f)
+
+    if _TRACE_TO_STDERR:
+      print(print_msg, file=sys.stderr)
+
+    return self
+
+  def __exit__(self, *exc):
+    if not IsTrace():
+      return False
+
+    print_msg = f'PID: {os.getpid()} END: {self._time()} :{self._trace_msg}\n'
+
+    with open(_TRACE_FILE, 'a') as f:
+      print(print_msg, file=f)
+
+    if _TRACE_TO_STDERR:
+      print(print_msg, file=sys.stderr)
+
+    return False
+
+
+def _GetTraceFile(quiet):
+  """Get the trace file or create one."""
+  # TODO: refactor to pass repodir to Trace.
+  repo_dir = os.path.dirname(os.path.dirname(__file__))
+  trace_file = os.path.join(repo_dir, _TRACE_FILE_NAME)
+  if not quiet:
+    print(f'Trace outputs in {trace_file}', file=sys.stderr)
+  return trace_file
+
+
+def _ClearOldTraces():
+  """Clear the oldest commands if trace file is too big.
+
+  Note: If the trace file contains output from two `repo`
+        commands that were running at the same time, this
+        will not work precisely.
+  """
+  if os.path.isfile(_TRACE_FILE):
+    while os.path.getsize(_TRACE_FILE) / (1024 * 1024) > _MAX_SIZE:
+      temp_file = _TRACE_FILE + '.tmp'
+      with open(_TRACE_FILE, 'r', errors='ignore') as fin:
+        with open(temp_file, 'w') as tf:
+          trace_lines = fin.readlines()
+          for i, l in enumerate(trace_lines):
+            if 'END:' in l and _NEW_COMMAND_SEP in l:
+              tf.writelines(trace_lines[i + 1:])
+              break
+      platform_utils.rename(temp_file, _TRACE_FILE)
diff --git a/run_tests b/run_tests
index 573dd44..7c9ff41 100755
--- a/run_tests
+++ b/run_tests
@@ -20,6 +20,7 @@
 import shutil
 import subprocess
 import sys
+import repo_trace
 
 
 def find_pytest():
diff --git a/ssh.py b/ssh.py
index 450383d..004fdba 100644
--- a/ssh.py
+++ b/ssh.py
@@ -182,28 +182,29 @@
     # be important because we can't tell that that 'git@myhost.com' is the same
     # as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
     check_command = command_base + ['-O', 'check']
-    try:
-      Trace(': %s', ' '.join(check_command))
-      check_process = subprocess.Popen(check_command,
-                                       stdout=subprocess.PIPE,
-                                       stderr=subprocess.PIPE)
-      check_process.communicate()  # read output, but ignore it...
-      isnt_running = check_process.wait()
+    with Trace('Call to ssh (check call): %s', ' '.join(check_command)):
+      try:
+        check_process = subprocess.Popen(check_command,
+                                        stdout=subprocess.PIPE,
+                                        stderr=subprocess.PIPE)
+        check_process.communicate()  # read output, but ignore it...
+        isnt_running = check_process.wait()
 
-      if not isnt_running:
-        # Our double-check found that the master _was_ infact running.  Add to
-        # the list of keys.
-        self._master_keys[key] = True
-        return True
-    except Exception:
-      # Ignore excpetions.  We we will fall back to the normal command and print
-      # to the log there.
-      pass
+        if not isnt_running:
+          # Our double-check found that the master _was_ infact running.  Add to
+          # the list of keys.
+          self._master_keys[key] = True
+          return True
+      except Exception:
+        # Ignore excpetions.  We we will fall back to the normal command and
+        # print to the log there.
+        pass
 
     command = command_base[:1] + ['-M', '-N'] + command_base[1:]
+    p = None
     try:
-      Trace(': %s', ' '.join(command))
-      p = subprocess.Popen(command)
+      with Trace('Call to ssh: %s', ' '.join(command)):
+        p = subprocess.Popen(command)
     except Exception as e:
       self._master_broken.value = True
       print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
diff --git a/subcmds/branches.py b/subcmds/branches.py
index b89cc2f..fcf67ef 100644
--- a/subcmds/branches.py
+++ b/subcmds/branches.py
@@ -155,11 +155,11 @@
         if i.IsSplitCurrent or (in_cnt <= project_cnt - in_cnt):
           in_type = 'in'
           for b in i.projects:
-            relpath = b.project.relpath
+            relpath = _RelPath(b.project)
             if not i.IsSplitCurrent or b.current:
-              paths.append(_RelPath(b.project))
+              paths.append(relpath)
             else:
-              non_cur_paths.append(_RelPath(b.project))
+              non_cur_paths.append(relpath)
         else:
           fmt = out.notinproject
           in_type = 'not in'
diff --git a/subcmds/diffmanifests.py b/subcmds/diffmanifests.py
index 0e5f410..4f9f5b0 100644
--- a/subcmds/diffmanifests.py
+++ b/subcmds/diffmanifests.py
@@ -77,33 +77,35 @@
                  metavar='<FORMAT>',
                  help='print the log using a custom git pretty format string')
 
-  def _printRawDiff(self, diff, pretty_format=None):
+  def _printRawDiff(self, diff, pretty_format=None, local=False):
+    _RelPath = lambda p: p.RelPath(local=local)
     for project in diff['added']:
-      self.printText("A %s %s" % (project.relpath, project.revisionExpr))
+      self.printText("A %s %s" % (_RelPath(project), project.revisionExpr))
       self.out.nl()
 
     for project in diff['removed']:
-      self.printText("R %s %s" % (project.relpath, project.revisionExpr))
+      self.printText("R %s %s" % (_RelPath(project), project.revisionExpr))
       self.out.nl()
 
     for project, otherProject in diff['changed']:
-      self.printText("C %s %s %s" % (project.relpath, project.revisionExpr,
+      self.printText("C %s %s %s" % (_RelPath(project), project.revisionExpr,
                                      otherProject.revisionExpr))
       self.out.nl()
       self._printLogs(project, otherProject, raw=True, color=False, pretty_format=pretty_format)
 
     for project, otherProject in diff['unreachable']:
-      self.printText("U %s %s %s" % (project.relpath, project.revisionExpr,
+      self.printText("U %s %s %s" % (_RelPath(project), project.revisionExpr,
                                      otherProject.revisionExpr))
       self.out.nl()
 
-  def _printDiff(self, diff, color=True, pretty_format=None):
+  def _printDiff(self, diff, color=True, pretty_format=None, local=False):
+    _RelPath = lambda p: p.RelPath(local=local)
     if diff['added']:
       self.out.nl()
       self.printText('added projects : \n')
       self.out.nl()
       for project in diff['added']:
-        self.printProject('\t%s' % (project.relpath))
+        self.printProject('\t%s' % (_RelPath(project)))
         self.printText(' at revision ')
         self.printRevision(project.revisionExpr)
         self.out.nl()
@@ -113,7 +115,17 @@
       self.printText('removed projects : \n')
       self.out.nl()
       for project in diff['removed']:
-        self.printProject('\t%s' % (project.relpath))
+        self.printProject('\t%s' % (_RelPath(project)))
+        self.printText(' at revision ')
+        self.printRevision(project.revisionExpr)
+        self.out.nl()
+
+    if diff['missing']:
+      self.out.nl()
+      self.printText('missing projects : \n')
+      self.out.nl()
+      for project in diff['missing']:
+        self.printProject('\t%s' % (_RelPath(project)))
         self.printText(' at revision ')
         self.printRevision(project.revisionExpr)
         self.out.nl()
@@ -123,7 +135,7 @@
       self.printText('changed projects : \n')
       self.out.nl()
       for project, otherProject in diff['changed']:
-        self.printProject('\t%s' % (project.relpath))
+        self.printProject('\t%s' % (_RelPath(project)))
         self.printText(' changed from ')
         self.printRevision(project.revisionExpr)
         self.printText(' to ')
@@ -138,7 +150,7 @@
       self.printText('projects with unreachable revisions : \n')
       self.out.nl()
       for project, otherProject in diff['unreachable']:
-        self.printProject('\t%s ' % (project.relpath))
+        self.printProject('\t%s ' % (_RelPath(project)))
         self.printRevision(project.revisionExpr)
         self.printText(' or ')
         self.printRevision(otherProject.revisionExpr)
@@ -204,6 +216,8 @@
 
     diff = manifest1.projectsDiff(manifest2)
     if opt.raw:
-      self._printRawDiff(diff, pretty_format=opt.pretty_format)
+      self._printRawDiff(diff, pretty_format=opt.pretty_format,
+                         local=opt.this_manifest_only)
     else:
-      self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format)
+      self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format,
+                      local=opt.this_manifest_only)
diff --git a/subcmds/gitc_init.py b/subcmds/gitc_init.py
index 1d81baf..e3a5813 100644
--- a/subcmds/gitc_init.py
+++ b/subcmds/gitc_init.py
@@ -68,7 +68,8 @@
         sys.exit(1)
       manifest_file = opt.manifest_file
 
-    manifest = GitcManifest(self.repodir, gitc_client)
+    manifest = GitcManifest(self.repodir, os.path.join(self.client_dir,
+                                                       '.manifest'))
     manifest.Override(manifest_file)
     gitc_utils.generate_gitc_manifest(None, manifest)
     print('Please run `cd %s` to view your GITC client.' %
diff --git a/subcmds/selfupdate.py b/subcmds/selfupdate.py
index 282f518..898bc3f 100644
--- a/subcmds/selfupdate.py
+++ b/subcmds/selfupdate.py
@@ -51,7 +51,7 @@
       _PostRepoUpgrade(self.manifest)
 
     else:
-      if not rp.Sync_NetworkHalf():
+      if not rp.Sync_NetworkHalf().success:
         print("error: can't update repo", file=sys.stderr)
         sys.exit(1)
 
diff --git a/subcmds/sync.py b/subcmds/sync.py
index de4ac3a..e4e7a97 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -25,6 +25,7 @@
 import sys
 import tempfile
 import time
+from typing import NamedTuple, List, Set
 import urllib.error
 import urllib.parse
 import urllib.request
@@ -58,12 +59,70 @@
 import platform_utils
 from project import SyncBuffer
 from progress import Progress
+from repo_trace import Trace
 import ssh
 from wrapper import Wrapper
 from manifest_xml import GitcManifest
 
 _ONE_DAY_S = 24 * 60 * 60
 
+# Env var to implicitly turn auto-gc back on.  This was added to allow a user to
+# revert a change in default behavior in v2.29.9.  Remove after 2023-04-01.
+_REPO_AUTO_GC = 'REPO_AUTO_GC'
+_AUTO_GC = os.environ.get(_REPO_AUTO_GC) == '1'
+
+
+class _FetchOneResult(NamedTuple):
+  """_FetchOne return value.
+
+  Attributes:
+    success (bool): True if successful.
+    project (Project): The fetched project.
+    start (float): The starting time.time().
+    finish (float): The ending time.time().
+    remote_fetched (bool): True if the remote was actually queried.
+  """
+  success: bool
+  project: Project
+  start: float
+  finish: float
+  remote_fetched: bool
+
+
+class _FetchResult(NamedTuple):
+  """_Fetch return value.
+
+  Attributes:
+    success (bool): True if successful.
+    projects (Set[str]): The names of the git directories of fetched projects.
+  """
+  success: bool
+  projects: Set[str]
+
+
+class _FetchMainResult(NamedTuple):
+  """_FetchMain return value.
+
+  Attributes:
+    all_projects (List[Project]): The fetched projects.
+  """
+  all_projects: List[Project]
+
+
+class _CheckoutOneResult(NamedTuple):
+  """_CheckoutOne return value.
+
+  Attributes:
+    success (bool): True if successful.
+    project (Project): The project.
+    start (float): The starting time.time().
+    finish (float): The ending time.time().
+  """
+  success: bool
+  project: Project
+  start: float
+  finish: float
+
 
 class Sync(Command, MirrorSafeCommand):
   COMMON = True
@@ -141,6 +200,9 @@
 The --prune option can be used to remove any refs that no longer
 exist on the remote.
 
+The --auto-gc option can be used to trigger garbage collection on all
+projects. By default, repo does not run garbage collection.
+
 # SSH Connections
 
 If at least one project remote URL uses an SSH connection (ssh://,
@@ -250,6 +312,10 @@
                  help='delete refs that no longer exist on the remote (default)')
     p.add_option('--no-prune', dest='prune', action='store_false',
                  help='do not delete refs that no longer exist on the remote')
+    p.add_option('--auto-gc', action='store_true', default=None,
+                 help='run garbage collection on all synced projects')
+    p.add_option('--no-auto-gc', dest='auto_gc', action='store_false',
+                 help='do not run garbage collection on any projects (default)')
     if show_smart:
       p.add_option('-s', '--smart-sync',
                    dest='smart_sync', action='store_true',
@@ -404,9 +470,10 @@
     """
     start = time.time()
     success = False
+    remote_fetched = False
     buf = io.StringIO()
     try:
-      success = project.Sync_NetworkHalf(
+      sync_result = project.Sync_NetworkHalf(
           quiet=opt.quiet,
           verbose=opt.verbose,
           output_redir=buf,
@@ -420,6 +487,8 @@
           ssh_proxy=self.ssh_proxy,
           clone_filter=project.manifest.CloneFilter,
           partial_clone_exclude=project.manifest.PartialCloneExclude)
+      success = sync_result.success
+      remote_fetched = sync_result.remote_fetched
 
       output = buf.getvalue()
       if (opt.verbose or not success) and output:
@@ -437,7 +506,7 @@
       raise
 
     finish = time.time()
-    return (success, project, start, finish)
+    return _FetchOneResult(success, project, start, finish, remote_fetched)
 
   @classmethod
   def _FetchInitChild(cls, ssh_proxy):
@@ -448,6 +517,7 @@
 
     jobs = opt.jobs_network
     fetched = set()
+    remote_fetched = set()
     pm = Progress('Fetching', len(projects), delay=False, quiet=opt.quiet)
 
     objdir_project_map = dict()
@@ -458,10 +528,16 @@
     def _ProcessResults(results_sets):
       ret = True
       for results in results_sets:
-        for (success, project, start, finish) in results:
+        for result in results:
+          success = result.success
+          project = result.project
+          start = result.start
+          finish = result.finish
           self._fetch_times.Set(project, finish - start)
           self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK,
                                  start, finish, success)
+          if result.remote_fetched:
+            remote_fetched.add(project)
           # Check for any errors before running any more tasks.
           # ...we'll let existing jobs finish, though.
           if not success:
@@ -519,7 +595,7 @@
     if not self.outer_client.manifest.IsArchive:
       self._GCProjects(projects, opt, err_event)
 
-    return (ret, fetched)
+    return _FetchResult(ret, fetched)
 
   def _FetchMain(self, opt, args, all_projects, err_event,
                  ssh_proxy, manifest):
@@ -545,7 +621,9 @@
     to_fetch.extend(all_projects)
     to_fetch.sort(key=self._fetch_times.Get, reverse=True)
 
-    success, fetched = self._Fetch(to_fetch, opt, err_event, ssh_proxy)
+    result = self._Fetch(to_fetch, opt, err_event, ssh_proxy)
+    success = result.success
+    fetched = result.projects
     if not success:
       err_event.set()
 
@@ -555,7 +633,7 @@
       if err_event.is_set():
         print('\nerror: Exited sync due to fetch errors.\n', file=sys.stderr)
         sys.exit(1)
-      return
+      return _FetchMainResult([])
 
     # Iteratively fetch missing and/or nested unregistered submodules
     previously_missing_set = set()
@@ -578,12 +656,14 @@
       if previously_missing_set == missing_set:
         break
       previously_missing_set = missing_set
-      success, new_fetched = self._Fetch(missing, opt, err_event, ssh_proxy)
+      result = self._Fetch(missing, opt, err_event, ssh_proxy)
+      success = result.success
+      new_fetched = result.projects
       if not success:
         err_event.set()
       fetched.update(new_fetched)
 
-    return all_projects
+    return _FetchMainResult(all_projects)
 
   def _CheckoutOne(self, detach_head, force_sync, project):
     """Checkout work tree for one project
@@ -615,7 +695,7 @@
     if not success:
       print('error: Cannot checkout %s' % (project.name), file=sys.stderr)
     finish = time.time()
-    return (success, project, start, finish)
+    return _CheckoutOneResult(success, project, start, finish)
 
   def _Checkout(self, all_projects, opt, err_results):
     """Checkout projects listed in all_projects
@@ -630,14 +710,18 @@
 
     def _ProcessResults(pool, pm, results):
       ret = True
-      for (success, project, start, finish) in results:
+      for result in results:
+        success = result.success
+        project = result.project
+        start = result.start
+        finish = result.finish
         self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL,
                                start, finish, success)
         # Check for any errors before running any more tasks.
         # ...we'll let existing jobs finish, though.
         if not success:
           ret = False
-          err_results.append(project.relpath)
+          err_results.append(project.RelPath(local=opt.this_manifest_only))
           if opt.fail_fast:
             if pool:
               pool.close()
@@ -652,33 +736,99 @@
         callback=_ProcessResults,
         output=Progress('Checking out', len(all_projects), quiet=opt.quiet)) and not err_results
 
+  @staticmethod
+  def _GetPreciousObjectsState(project: Project, opt):
+    """Get the preciousObjects state for the project.
+
+    Args:
+      project (Project): the project to examine, and possibly correct.
+      opt (optparse.Values): options given to sync.
+
+    Returns:
+      Expected state of extensions.preciousObjects:
+        False: Should be disabled. (not present)
+        True: Should be enabled.
+    """
+    if project.use_git_worktrees:
+      return False
+    projects = project.manifest.GetProjectsWithName(project.name,
+                                                    all_manifests=True)
+    if len(projects) == 1:
+      return False
+    relpath = project.RelPath(local=opt.this_manifest_only)
+    if len(projects) > 1:
+      # Objects are potentially shared with another project.
+      # See the logic in Project.Sync_NetworkHalf regarding UseAlternates.
+      # - When False, shared projects share (via symlink)
+      #   .repo/project-objects/{PROJECT_NAME}.git as the one-and-only objects
+      #   directory.  All objects are precious, since there is no project with a
+      #   complete set of refs.
+      # - When True, shared projects share (via info/alternates)
+      #   .repo/project-objects/{PROJECT_NAME}.git as an alternate object store,
+      #   which is written only on the first clone of the project, and is not
+      #   written subsequently.  (When Sync_NetworkHalf sees that it exists, it
+      #   makes sure that the alternates file points there, and uses a
+      #   project-local .git/objects directory for all syncs going forward.
+      # We do not support switching between the options.  The environment
+      # variable is present for testing and migration only.
+      return not project.UseAlternates
+    print(f'\r{relpath}: project not found in manifest.', file=sys.stderr)
+    return False
+
+  def _SetPreciousObjectsState(self, project: Project, opt):
+    """Correct the preciousObjects state for the project.
+
+    Args:
+      project: the project to examine, and possibly correct.
+      opt: options given to sync.
+    """
+    expected = self._GetPreciousObjectsState(project, opt)
+    actual = project.config.GetBoolean('extensions.preciousObjects') or False
+    relpath = project.RelPath(local=opt.this_manifest_only)
+
+    if expected != actual:
+      # If this is unexpected, log it and repair.
+      Trace(f'{relpath} expected preciousObjects={expected}, got {actual}')
+      if expected:
+        if not opt.quiet:
+          print('\r%s: Shared project %s found, disabling pruning.' %
+                (relpath, project.name))
+        if git_require((2, 7, 0)):
+          project.EnableRepositoryExtension('preciousObjects')
+        else:
+          # This isn't perfect, but it's the best we can do with old git.
+          print('\r%s: WARNING: shared projects are unreliable when using '
+                'old versions of git; please upgrade to git-2.7.0+.'
+                % (relpath,),
+                file=sys.stderr)
+          project.config.SetString('gc.pruneExpire', 'never')
+      else:
+        if not opt.quiet:
+          print(f'\r{relpath}: not shared, disabling pruning.')
+        project.config.SetString('extensions.preciousObjects', None)
+        project.config.SetString('gc.pruneExpire', None)
+
   def _GCProjects(self, projects, opt, err_event):
-    pm = Progress('Garbage collecting', len(projects), delay=False, quiet=opt.quiet)
+    """Perform garbage collection.
+
+    If We are skipping garbage collection (opt.auto_gc not set), we still want
+    to potentially mark objects precious, so that `git gc` does not discard
+    shared objects.
+    """
+    if not opt.auto_gc:
+      # Just repair preciousObjects state, and return.
+      for project in projects:
+        self._SetPreciousObjectsState(project, opt)
+      return
+
+    pm = Progress('Garbage collecting', len(projects), delay=False,
+                  quiet=opt.quiet)
     pm.update(inc=0, msg='prescan')
 
     tidy_dirs = {}
     for project in projects:
-      # Make sure pruning never kicks in with shared projects that do not use
-      # alternates to avoid corruption.
-      if (not project.use_git_worktrees and
-              len(project.manifest.GetProjectsWithName(project.name, all_manifests=True)) > 1):
-        if project.UseAlternates:
-          # Undo logic set by previous versions of repo.
-          project.config.SetString('extensions.preciousObjects', None)
-          project.config.SetString('gc.pruneExpire', None)
-        else:
-          if not opt.quiet:
-            print('\r%s: Shared project %s found, disabling pruning.' %
-                  (project.relpath, project.name))
-          if git_require((2, 7, 0)):
-            project.EnableRepositoryExtension('preciousObjects')
-          else:
-            # This isn't perfect, but it's the best we can do with old git.
-            print('\r%s: WARNING: shared projects are unreliable when using old '
-                  'versions of git; please upgrade to git-2.7.0+.'
-                  % (project.relpath,),
-                  file=sys.stderr)
-            project.config.SetString('gc.pruneExpire', 'never')
+      self._SetPreciousObjectsState(project, opt)
+
       project.config.SetString('gc.autoDetach', 'false')
       # Only call git gc once per objdir, but call pack-refs for the remainder.
       if project.objdir not in tidy_dirs:
@@ -697,6 +847,7 @@
     if jobs < 2:
       for (run_gc, bare_git) in tidy_dirs.values():
         pm.update(msg=bare_git._project.name)
+
         if run_gc:
           bare_git.gc('--auto')
         else:
@@ -1033,6 +1184,51 @@
     if opt.prune is None:
       opt.prune = True
 
+    if opt.auto_gc is None and _AUTO_GC:
+      print(f"Will run `git gc --auto` because {_REPO_AUTO_GC} is set.",
+            f'{_REPO_AUTO_GC} is deprecated and will be removed in a future',
+            'release.  Use `--auto-gc` instead.', file=sys.stderr)
+      opt.auto_gc = True
+
+  def _ValidateOptionsWithManifest(self, opt, mp):
+    """Like ValidateOptions, but after we've updated the manifest.
+
+    Needed to handle sync-xxx option defaults in the manifest.
+
+    Args:
+      opt: The options to process.
+      mp: The manifest project to pull defaults from.
+    """
+    if not opt.jobs:
+      # If the user hasn't made a choice, use the manifest value.
+      opt.jobs = mp.manifest.default.sync_j
+    if opt.jobs:
+      # If --jobs has a non-default value, propagate it as the default for
+      # --jobs-xxx flags too.
+      if not opt.jobs_network:
+        opt.jobs_network = opt.jobs
+      if not opt.jobs_checkout:
+        opt.jobs_checkout = opt.jobs
+    else:
+      # Neither user nor manifest have made a choice, so setup defaults.
+      if not opt.jobs_network:
+        opt.jobs_network = 1
+      if not opt.jobs_checkout:
+        opt.jobs_checkout = DEFAULT_LOCAL_JOBS
+      opt.jobs = os.cpu_count()
+
+    # Try to stay under user rlimit settings.
+    #
+    # Since each worker requires at 3 file descriptors to run `git fetch`, use
+    # that to scale down the number of jobs.  Unfortunately there isn't an easy
+    # way to determine this reliably as systems change, but it was last measured
+    # by hand in 2011.
+    soft_limit, _ = _rlimit_nofile()
+    jobs_soft_limit = max(1, (soft_limit - 5) // 3)
+    opt.jobs = min(opt.jobs, jobs_soft_limit)
+    opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
+    opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
+
   def Execute(self, opt, args):
     manifest = self.outer_manifest
     if not opt.outer_manifest:
@@ -1083,35 +1279,9 @@
     else:
       print('Skipping update of local manifest project.')
 
-    # Now that the manifests are up-to-date, setup the jobs value.
-    if opt.jobs is None:
-      # User has not made a choice, so use the manifest settings.
-      opt.jobs = mp.default.sync_j
-    if opt.jobs is not None:
-      # Neither user nor manifest have made a choice.
-      if opt.jobs_network is None:
-        opt.jobs_network = opt.jobs
-      if opt.jobs_checkout is None:
-        opt.jobs_checkout = opt.jobs
-    # Setup defaults if jobs==0.
-    if not opt.jobs:
-      if not opt.jobs_network:
-        opt.jobs_network = 1
-      if not opt.jobs_checkout:
-        opt.jobs_checkout = DEFAULT_LOCAL_JOBS
-      opt.jobs = os.cpu_count()
-
-    # Try to stay under user rlimit settings.
-    #
-    # Since each worker requires at 3 file descriptors to run `git fetch`, use
-    # that to scale down the number of jobs.  Unfortunately there isn't an easy
-    # way to determine this reliably as systems change, but it was last measured
-    # by hand in 2011.
-    soft_limit, _ = _rlimit_nofile()
-    jobs_soft_limit = max(1, (soft_limit - 5) // 3)
-    opt.jobs = min(opt.jobs, jobs_soft_limit)
-    opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
-    opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
+    # Now that the manifests are up-to-date, setup options whose defaults might
+    # be in the manifest.
+    self._ValidateOptionsWithManifest(opt, mp)
 
     superproject_logging_data = {}
     self._UpdateProjectsRevisionId(opt, args, superproject_logging_data,
@@ -1160,6 +1330,7 @@
 
     err_network_sync = False
     err_update_projects = False
+    err_update_linkfiles = False
 
     self._fetch_times = _FetchTimes(manifest)
     if not opt.local_only:
@@ -1167,8 +1338,9 @@
         with ssh.ProxyManager(manager) as ssh_proxy:
           # Initialize the socket dir once in the parent.
           ssh_proxy.sock()
-          all_projects = self._FetchMain(opt, args, all_projects, err_event,
-                                         ssh_proxy, manifest)
+          result = self._FetchMain(opt, args, all_projects, err_event,
+                                   ssh_proxy, manifest)
+          all_projects = result.all_projects
 
       if opt.network_only:
         return
diff --git a/subcmds/upload.py b/subcmds/upload.py
index d341458..0ad3ce2 100644
--- a/subcmds/upload.py
+++ b/subcmds/upload.py
@@ -278,8 +278,9 @@
     script = []
     script.append('# Uncomment the branches to upload:')
     for project, avail in pending:
+      project_path = project.RelPath(local=opt.this_manifest_only)
       script.append('#')
-      script.append('# project %s/:' % project.RelPath(local=opt.this_manifest_only))
+      script.append(f'# project {project_path}/:')
 
       b = {}
       for branch in avail:
@@ -302,8 +303,8 @@
           script.append('#         %s' % commit)
         b[name] = branch
 
-      projects[project.RelPath(local=opt.this_manifest_only)] = project
-      branches[project.name] = b
+      projects[project_path] = project
+      branches[project_path] = b
     script.append('')
 
     script = Editor.EditString("\n".join(script)).split("\n")
@@ -328,9 +329,10 @@
         name = m.group(1)
         if not project:
           _die('project for branch %s not in script', name)
-        branch = branches[project.name].get(name)
+        project_path = project.RelPath(local=opt.this_manifest_only)
+        branch = branches[project_path].get(name)
         if not branch:
-          _die('branch %s not in %s', name, project.RelPath(local=opt.this_manifest_only))
+          _die('branch %s not in %s', name, project_path)
         todo.append(branch)
     if not todo:
       _die("nothing uncommented for upload")
diff --git a/tests/test_git_command.py b/tests/test_git_command.py
index 93300a6..96408a2 100644
--- a/tests/test_git_command.py
+++ b/tests/test_git_command.py
@@ -15,6 +15,7 @@
 """Unittests for the git_command.py module."""
 
 import re
+import os
 import unittest
 
 try:
@@ -26,6 +27,38 @@
 import wrapper
 
 
+class GitCommandTest(unittest.TestCase):
+  """Tests the GitCommand class (via git_command.git)."""
+
+  def setUp(self):
+
+    def realpath_mock(val):
+      return val
+
+    mock.patch.object(os.path, 'realpath', side_effect=realpath_mock).start()
+
+  def tearDown(self):
+    mock.patch.stopall()
+
+  def test_alternative_setting_when_matching(self):
+    r = git_command._build_env(
+      objdir = os.path.join('zap', 'objects'),
+      gitdir = 'zap'
+    )
+
+    self.assertIsNone(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'))
+    self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('zap', 'objects'))
+
+  def test_alternative_setting_when_different(self):
+    r = git_command._build_env(
+      objdir = os.path.join('wow', 'objects'),
+      gitdir = 'zap'
+    )
+
+    self.assertEqual(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'), os.path.join('zap', 'objects'))
+    self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('wow', 'objects'))
+
+
 class GitCallUnitTest(unittest.TestCase):
   """Tests the _GitCall class (via git_command.git)."""
 
@@ -84,7 +117,8 @@
   """Test the git_require helper."""
 
   def setUp(self):
-    ver = wrapper.GitVersion(1, 2, 3, 4)
+    self.wrapper = wrapper.Wrapper()
+    ver = self.wrapper.GitVersion(1, 2, 3, 4)
     mock.patch.object(git_command.git, 'version_tuple', return_value=ver).start()
 
   def tearDown(self):
diff --git a/tests/test_git_config.py b/tests/test_git_config.py
index a4fad9e..63c148f 100644
--- a/tests/test_git_config.py
+++ b/tests/test_git_config.py
@@ -19,6 +19,7 @@
 import unittest
 
 import git_config
+import repo_trace
 
 
 def fixture(*paths):
@@ -33,9 +34,16 @@
   def setUp(self):
     """Create a GitConfig object using the test.gitconfig fixture.
     """
+
+    self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
+    repo_trace._TRACE_FILE = os.path.join(self.tempdirobj.name, 'TRACE_FILE_from_test')
+
     config_fixture = fixture('test.gitconfig')
     self.config = git_config.GitConfig(config_fixture)
 
+  def tearDown(self):
+    self.tempdirobj.cleanup()
+
   def test_GetString_with_empty_config_values(self):
     """
     Test config entries with no value.
@@ -109,9 +117,15 @@
   """Read/write tests of the GitConfig class."""
 
   def setUp(self):
+    self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
+    repo_trace._TRACE_FILE = os.path.join(self.tempdirobj.name, 'TRACE_FILE_from_test')
+
     self.tmpfile = tempfile.NamedTemporaryFile()
     self.config = self.get_config()
 
+  def tearDown(self):
+    self.tempdirobj.cleanup()
+
   def get_config(self):
     """Get a new GitConfig instance."""
     return git_config.GitConfig(self.tmpfile.name)
@@ -186,7 +200,3 @@
     for key, value in TESTS:
       self.assertEqual(sync_data[f'{git_config.SYNC_STATE_PREFIX}{key}'], value)
     self.assertTrue(sync_data[f'{git_config.SYNC_STATE_PREFIX}main.synctime'])
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tests/test_git_superproject.py b/tests/test_git_superproject.py
index 0ad9b01..1425f4c 100644
--- a/tests/test_git_superproject.py
+++ b/tests/test_git_superproject.py
@@ -24,6 +24,7 @@
 import git_superproject
 import git_trace2_event_log
 import manifest_xml
+import repo_trace
 from test_manifest_xml import sort_attributes
 
 
@@ -39,6 +40,7 @@
     """Set up superproject every time."""
     self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
     self.tempdir = self.tempdirobj.name
+    repo_trace._TRACE_FILE = os.path.join(self.tempdir, 'TRACE_FILE_from_test')
     self.repodir = os.path.join(self.tempdir, '.repo')
     self.manifest_file = os.path.join(
         self.repodir, manifest_xml.MANIFEST_FILE_NAME)
@@ -364,7 +366,3 @@
               'revision="52d3c9f7c107839ece2319d077de0cd922aa9d8f"/>'
               '<superproject name="superproject"/>'
               '</manifest>')
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tests/test_git_trace2_event_log.py b/tests/test_git_trace2_event_log.py
index 0623d32..7e7dfb7 100644
--- a/tests/test_git_trace2_event_log.py
+++ b/tests/test_git_trace2_event_log.py
@@ -369,7 +369,7 @@
         server_thread.start()
 
         with server_ready:
-          server_ready.wait()
+          server_ready.wait(timeout=120)
 
         self._event_log_module.StartEvent()
         path = self._event_log_module.Write(path=f'af_unix:{socket_path}')
@@ -385,7 +385,3 @@
     # Check for 'start' event specific fields.
     self.assertIn('argv', start_event)
     self.assertIsInstance(start_event['argv'], list)
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tests/test_manifest_xml.py b/tests/test_manifest_xml.py
index 48403c0..c7e814a 100644
--- a/tests/test_manifest_xml.py
+++ b/tests/test_manifest_xml.py
@@ -23,6 +23,7 @@
 
 import error
 import manifest_xml
+import repo_trace
 
 
 # Invalid paths that we don't want in the filesystem.
@@ -93,6 +94,7 @@
   def setUp(self):
     self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
     self.tempdir = self.tempdirobj.name
+    repo_trace._TRACE_FILE = os.path.join(self.tempdir, 'TRACE_FILE_from_test')
     self.repodir = os.path.join(self.tempdir, '.repo')
     self.manifest_dir = os.path.join(self.repodir, 'manifests')
     self.manifest_file = os.path.join(
@@ -115,7 +117,7 @@
 
   def getXmlManifest(self, data):
     """Helper to initialize a manifest for testing."""
-    with open(self.manifest_file, 'w') as fp:
+    with open(self.manifest_file, 'w', encoding="utf-8") as fp:
       fp.write(data)
     return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
 
@@ -262,10 +264,10 @@
         '<project name="r" groups="keep"/>'
         '</manifest>')
     self.assertEqual(
-        manifest.ToXml(omit_local=True).toxml(),
+        sort_attributes(manifest.ToXml(omit_local=True).toxml()),
         '<?xml version="1.0" ?><manifest>'
-        '<remote name="a" fetch=".."/><default remote="a" revision="r"/>'
-        '<project name="q"/><project name="r" groups="keep"/></manifest>')
+        '<remote fetch=".." name="a"/><default remote="a" revision="r"/>'
+        '<project name="q"/><project groups="keep" name="r"/></manifest>')
 
   def test_toxml_with_local(self):
     """Does include local_manifests projects when omit_local=False."""
@@ -277,11 +279,11 @@
         '<project name="r" groups="keep"/>'
         '</manifest>')
     self.assertEqual(
-        manifest.ToXml(omit_local=False).toxml(),
+        sort_attributes(manifest.ToXml(omit_local=False).toxml()),
         '<?xml version="1.0" ?><manifest>'
-        '<remote name="a" fetch=".."/><default remote="a" revision="r"/>'
-        '<project name="p" groups="local::me"/>'
-        '<project name="q"/><project name="r" groups="keep"/></manifest>')
+        '<remote fetch=".." name="a"/><default remote="a" revision="r"/>'
+        '<project groups="local::me" name="p"/>'
+        '<project name="q"/><project groups="keep" name="r"/></manifest>')
 
   def test_repo_hooks(self):
     """Check repo-hooks settings."""
@@ -426,7 +428,7 @@
     def parse(name):
       name = self.encodeXmlAttr(name)
       # Setup target of the include.
-      with open(os.path.join(self.manifest_dir, 'target.xml'), 'w') as fp:
+      with open(os.path.join(self.manifest_dir, 'target.xml'), 'w', encoding="utf-8") as fp:
         fp.write(f'<manifest><include name="{name}"/></manifest>')
 
       manifest = self.getXmlManifest("""
@@ -517,22 +519,22 @@
 """)
 
     manifest = parse('a/path/', 'foo')
-    self.assertEqual(manifest.projects[0].gitdir,
-                     os.path.join(self.tempdir, '.repo/projects/foo.git'))
-    self.assertEqual(manifest.projects[0].objdir,
-                     os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
+    self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
+                     os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
+    self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
+                     os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
 
     manifest = parse('a/path', 'foo/')
-    self.assertEqual(manifest.projects[0].gitdir,
-                     os.path.join(self.tempdir, '.repo/projects/foo.git'))
-    self.assertEqual(manifest.projects[0].objdir,
-                     os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
+    self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
+                     os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
+    self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
+                     os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
 
     manifest = parse('a/path', 'foo//////')
-    self.assertEqual(manifest.projects[0].gitdir,
-                     os.path.join(self.tempdir, '.repo/projects/foo.git'))
-    self.assertEqual(manifest.projects[0].objdir,
-                     os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
+    self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
+                     os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
+    self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
+                     os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
 
   def test_toplevel_path(self):
     """Check handling of path=. specially."""
@@ -549,8 +551,8 @@
 
     for path in ('.', './', './/', './//'):
       manifest = parse('server/path', path)
-      self.assertEqual(manifest.projects[0].gitdir,
-                       os.path.join(self.tempdir, '.repo/projects/..git'))
+      self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
+                       os.path.join(self.tempdir, '.repo', 'projects', '..git'))
 
   def test_bad_path_name_checks(self):
     """Check handling of bad path & name attributes."""
@@ -576,7 +578,7 @@
       parse('', 'ok')
 
     for path in INVALID_FS_PATHS:
-      if not path or path.endswith('/'):
+      if not path or path.endswith('/') or path.endswith(os.path.sep):
         continue
 
       with self.assertRaises(error.ManifestInvalidPathError):
@@ -874,3 +876,27 @@
     else:
       self.assertEqual(manifest.projects[0].relpath, 'bar')
       self.assertEqual(manifest.projects[1].relpath, 'y')
+
+  def test_extend_project_dest_branch(self):
+    manifest = self.getXmlManifest("""
+<manifest>
+  <remote name="default-remote" fetch="http://localhost" />
+  <default remote="default-remote" revision="refs/heads/main" dest-branch="foo" />
+  <project name="myproject" />
+  <extend-project name="myproject" dest-branch="bar" />
+</manifest>
+""")
+    self.assertEqual(len(manifest.projects), 1)
+    self.assertEqual(manifest.projects[0].dest_branch, 'bar')
+
+  def test_extend_project_upstream(self):
+    manifest = self.getXmlManifest("""
+<manifest>
+  <remote name="default-remote" fetch="http://localhost" />
+  <default remote="default-remote" revision="refs/heads/main" />
+  <project name="myproject" />
+  <extend-project name="myproject" upstream="bar" />
+</manifest>
+""")
+    self.assertEqual(len(manifest.projects), 1)
+    self.assertEqual(manifest.projects[0].upstream, 'bar')
diff --git a/tests/test_project.py b/tests/test_project.py
index acd44cc..7ab4498 100644
--- a/tests/test_project.py
+++ b/tests/test_project.py
@@ -22,10 +22,12 @@
 import unittest
 
 import error
+import manifest_xml
 import git_command
 import git_config
 import platform_utils
 import project
+import repo_trace
 
 
 @contextlib.contextmanager
@@ -64,6 +66,13 @@
 class ReviewableBranchTests(unittest.TestCase):
   """Check ReviewableBranch behavior."""
 
+  def setUp(self):
+    self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
+    repo_trace._TRACE_FILE = os.path.join(self.tempdirobj.name, 'TRACE_FILE_from_test')
+
+  def tearDown(self):
+    self.tempdirobj.cleanup()
+
   def test_smoke(self):
     """A quick run through everything."""
     with TempGitTree() as tempdir:
@@ -376,7 +385,7 @@
 
       # Make sure the dir was transformed into a symlink.
       self.assertTrue(dotgit.is_symlink())
-      self.assertEqual(os.readlink(dotgit), '../../.repo/projects/src/test.git')
+      self.assertEqual(os.readlink(dotgit), os.path.normpath('../../.repo/projects/src/test.git'))
 
       # Make sure files were moved over.
       gitdir = tempdir / '.repo/projects/src/test.git'
@@ -403,3 +412,81 @@
         self.assertTrue((dotgit / name).is_file())
       for name in self._SYMLINKS:
         self.assertTrue((dotgit / name).is_symlink())
+
+
+class ManifestPropertiesFetchedCorrectly(unittest.TestCase):
+  """Ensure properties are fetched properly."""
+
+  def setUpManifest(self, tempdir):
+    repo_trace._TRACE_FILE = os.path.join(tempdir, 'TRACE_FILE_from_test')
+
+    repodir = os.path.join(tempdir, '.repo')
+    manifest_dir = os.path.join(repodir, 'manifests')
+    manifest_file = os.path.join(
+        repodir, manifest_xml.MANIFEST_FILE_NAME)
+    local_manifest_dir = os.path.join(
+        repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME)
+    os.mkdir(repodir)
+    os.mkdir(manifest_dir)
+    manifest = manifest_xml.XmlManifest(repodir, manifest_file)
+
+    return project.ManifestProject(
+        manifest, 'test/manifest', os.path.join(tempdir, '.git'), tempdir)
+
+  def test_manifest_config_properties(self):
+    """Test we are fetching the manifest config properties correctly."""
+
+    with TempGitTree() as tempdir:
+      fakeproj = self.setUpManifest(tempdir)
+
+      # Set property using the expected Set method, then ensure
+      # the porperty functions are using the correct Get methods.
+      fakeproj.config.SetString(
+          'manifest.standalone', 'https://chicken/manifest.git')
+      self.assertEqual(
+          fakeproj.standalone_manifest_url, 'https://chicken/manifest.git')
+
+      fakeproj.config.SetString('manifest.groups', 'test-group, admin-group')
+      self.assertEqual(fakeproj.manifest_groups, 'test-group, admin-group')
+
+      fakeproj.config.SetString('repo.reference', 'mirror/ref')
+      self.assertEqual(fakeproj.reference, 'mirror/ref')
+
+      fakeproj.config.SetBoolean('repo.dissociate', False)
+      self.assertFalse(fakeproj.dissociate)
+
+      fakeproj.config.SetBoolean('repo.archive', False)
+      self.assertFalse(fakeproj.archive)
+
+      fakeproj.config.SetBoolean('repo.mirror', False)
+      self.assertFalse(fakeproj.mirror)
+
+      fakeproj.config.SetBoolean('repo.worktree', False)
+      self.assertFalse(fakeproj.use_worktree)
+
+      fakeproj.config.SetBoolean('repo.clonebundle', False)
+      self.assertFalse(fakeproj.clone_bundle)
+
+      fakeproj.config.SetBoolean('repo.submodules', False)
+      self.assertFalse(fakeproj.submodules)
+
+      fakeproj.config.SetBoolean('repo.git-lfs', False)
+      self.assertFalse(fakeproj.git_lfs)
+
+      fakeproj.config.SetBoolean('repo.superproject', False)
+      self.assertFalse(fakeproj.use_superproject)
+
+      fakeproj.config.SetBoolean('repo.partialclone', False)
+      self.assertFalse(fakeproj.partial_clone)
+
+      fakeproj.config.SetString('repo.depth', '48')
+      self.assertEqual(fakeproj.depth, '48')
+
+      fakeproj.config.SetString('repo.clonefilter', 'blob:limit=10M')
+      self.assertEqual(fakeproj.clone_filter, 'blob:limit=10M')
+
+      fakeproj.config.SetString('repo.partialcloneexclude', 'third_party/big_repo')
+      self.assertEqual(fakeproj.partial_clone_exclude, 'third_party/big_repo')
+
+      fakeproj.config.SetString('manifest.platform', 'auto')
+      self.assertEqual(fakeproj.manifest_platform, 'auto')
diff --git a/tests/test_subcmds_sync.py b/tests/test_subcmds_sync.py
index aad713f..236d54e 100644
--- a/tests/test_subcmds_sync.py
+++ b/tests/test_subcmds_sync.py
@@ -11,27 +11,26 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 """Unittests for the subcmds/sync.py module."""
 
+import os
+import unittest
 from unittest import mock
 
 import pytest
 
+import command
 from subcmds import sync
 
 
-@pytest.mark.parametrize(
-  'use_superproject, cli_args, result',
-  [
+@pytest.mark.parametrize('use_superproject, cli_args, result', [
     (True, ['--current-branch'], True),
     (True, ['--no-current-branch'], True),
     (True, [], True),
     (False, ['--current-branch'], True),
     (False, ['--no-current-branch'], False),
     (False, [], None),
-  ]
-)
+])
 def test_get_current_branch_only(use_superproject, cli_args, result):
   """Test Sync._GetCurrentBranchOnly logic.
 
@@ -41,5 +40,94 @@
   cmd = sync.Sync()
   opts, _ = cmd.OptionParser.parse_args(cli_args)
 
-  with mock.patch('git_superproject.UseSuperproject', return_value=use_superproject):
+  with mock.patch('git_superproject.UseSuperproject',
+                  return_value=use_superproject):
     assert cmd._GetCurrentBranchOnly(opts, cmd.manifest) == result
+
+
+# Used to patch os.cpu_count() for reliable results.
+OS_CPU_COUNT = 24
+
+@pytest.mark.parametrize('argv, jobs_manifest, jobs, jobs_net, jobs_check', [
+  # No user or manifest settings.
+  ([], None, OS_CPU_COUNT, 1, command.DEFAULT_LOCAL_JOBS),
+  # No user settings, so manifest settings control.
+  ([], 3, 3, 3, 3),
+  # User settings, but no manifest.
+  (['--jobs=4'], None, 4, 4, 4),
+  (['--jobs=4', '--jobs-network=5'], None, 4, 5, 4),
+  (['--jobs=4', '--jobs-checkout=6'], None, 4, 4, 6),
+  (['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], None, 4, 5, 6),
+  (['--jobs-network=5'], None, OS_CPU_COUNT, 5, command.DEFAULT_LOCAL_JOBS),
+  (['--jobs-checkout=6'], None, OS_CPU_COUNT, 1, 6),
+  (['--jobs-network=5', '--jobs-checkout=6'], None, OS_CPU_COUNT, 5, 6),
+  # User settings with manifest settings.
+  (['--jobs=4'], 3, 4, 4, 4),
+  (['--jobs=4', '--jobs-network=5'], 3, 4, 5, 4),
+  (['--jobs=4', '--jobs-checkout=6'], 3, 4, 4, 6),
+  (['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], 3, 4, 5, 6),
+  (['--jobs-network=5'], 3, 3, 5, 3),
+  (['--jobs-checkout=6'], 3, 3, 3, 6),
+  (['--jobs-network=5', '--jobs-checkout=6'], 3, 3, 5, 6),
+  # Settings that exceed rlimits get capped.
+  (['--jobs=1000000'], None, 83, 83, 83),
+  ([], 1000000, 83, 83, 83),
+])
+def test_cli_jobs(argv, jobs_manifest, jobs, jobs_net, jobs_check):
+  """Tests --jobs option behavior."""
+  mp = mock.MagicMock()
+  mp.manifest.default.sync_j = jobs_manifest
+
+  cmd = sync.Sync()
+  opts, args = cmd.OptionParser.parse_args(argv)
+  cmd.ValidateOptions(opts, args)
+
+  with mock.patch.object(sync, '_rlimit_nofile', return_value=(256, 256)):
+    with mock.patch.object(os, 'cpu_count', return_value=OS_CPU_COUNT):
+      cmd._ValidateOptionsWithManifest(opts, mp)
+      assert opts.jobs == jobs
+      assert opts.jobs_network == jobs_net
+      assert opts.jobs_checkout == jobs_check
+
+
+class GetPreciousObjectsState(unittest.TestCase):
+  """Tests for _GetPreciousObjectsState."""
+
+  def setUp(self):
+    """Common setup."""
+    self.cmd = sync.Sync()
+    self.project = p = mock.MagicMock(use_git_worktrees=False,
+                                      UseAlternates=False)
+    p.manifest.GetProjectsWithName.return_value = [p]
+
+    self.opt = mock.Mock(spec_set=['this_manifest_only'])
+    self.opt.this_manifest_only = False
+
+  def test_worktrees(self):
+    """False for worktrees."""
+    self.project.use_git_worktrees = True
+    self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
+
+  def test_not_shared(self):
+    """Singleton project."""
+    self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
+
+  def test_shared(self):
+    """Shared project."""
+    self.project.manifest.GetProjectsWithName.return_value = [
+        self.project, self.project
+    ]
+    self.assertTrue(self.cmd._GetPreciousObjectsState(self.project, self.opt))
+
+  def test_shared_with_alternates(self):
+    """Shared project, with alternates."""
+    self.project.manifest.GetProjectsWithName.return_value = [
+        self.project, self.project
+    ]
+    self.project.UseAlternates = True
+    self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
+
+  def test_not_found(self):
+    """Project not found in manifest."""
+    self.project.manifest.GetProjectsWithName.return_value = []
+    self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
diff --git a/tests/test_update_manpages.py b/tests/test_update_manpages.py
new file mode 100644
index 0000000..f0ef72a
--- /dev/null
+++ b/tests/test_update_manpages.py
@@ -0,0 +1,27 @@
+# Copyright 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Unittests for the update_manpages module."""
+
+import unittest
+import tests.update_manpages as um
+
+
+class UpdateManpagesTest(unittest.TestCase):
+  """Tests the update-manpages code."""
+
+  def test_replace_regex(self):
+    """Check that replace_regex works."""
+    data = '\n\033[1mSummary\033[m\n'
+    self.assertEqual(um.replace_regex(data),'\nSummary\n')
diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py
index 8447bec..2a0e542 100644
--- a/tests/test_wrapper.py
+++ b/tests/test_wrapper.py
@@ -59,12 +59,12 @@
   def test_python_constraints(self):
     """The launcher should never require newer than main.py."""
     self.assertGreaterEqual(main.MIN_PYTHON_VERSION_HARD,
-                            wrapper.MIN_PYTHON_VERSION_HARD)
+                            self.wrapper.MIN_PYTHON_VERSION_HARD)
     self.assertGreaterEqual(main.MIN_PYTHON_VERSION_SOFT,
-                            wrapper.MIN_PYTHON_VERSION_SOFT)
+                            self.wrapper.MIN_PYTHON_VERSION_SOFT)
     # Make sure the versions are themselves in sync.
-    self.assertGreaterEqual(wrapper.MIN_PYTHON_VERSION_SOFT,
-                            wrapper.MIN_PYTHON_VERSION_HARD)
+    self.assertGreaterEqual(self.wrapper.MIN_PYTHON_VERSION_SOFT,
+                            self.wrapper.MIN_PYTHON_VERSION_HARD)
 
   def test_init_parser(self):
     """Make sure 'init' GetParser works."""
@@ -159,7 +159,9 @@
   def test_capture(self):
     """Check capture_output handling."""
     ret = self.wrapper.run_command(['echo', 'hi'], capture_output=True)
-    self.assertEqual(ret.stdout, 'hi\n')
+    # echo command appends OS specific linesep, but on Windows + Git Bash
+    # we get UNIX ending, so we allow both.
+    self.assertIn(ret.stdout, ['hi' + os.linesep, 'hi\n'])
 
   def test_check(self):
     """Check check handling."""
@@ -456,7 +458,7 @@
     self.assertEqual('refs/heads/stable', rrev)
     self.assertEqual(self.REV_LIST[1], lrev)
 
-    with self.assertRaises(wrapper.CloneFailure):
+    with self.assertRaises(self.wrapper.CloneFailure):
       self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/heads/unknown')
 
   def test_explicit_tag(self):
@@ -465,7 +467,7 @@
     self.assertEqual('refs/tags/v1.0', rrev)
     self.assertEqual(self.REV_LIST[1], lrev)
 
-    with self.assertRaises(wrapper.CloneFailure):
+    with self.assertRaises(self.wrapper.CloneFailure):
       self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/tags/unknown')
 
   def test_branch_name(self):
@@ -500,7 +502,7 @@
 
   def test_unknown(self):
     """Check unknown ref/commit argument."""
-    with self.assertRaises(wrapper.CloneFailure):
+    with self.assertRaises(self.wrapper.CloneFailure):
       self.wrapper.resolve_repo_rev(self.GIT_DIR, 'boooooooya')
 
 
@@ -551,7 +553,3 @@
       rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, 'stable', repo_verify=False)
     self.assertEqual('refs/heads/stable', rrev)
     self.assertEqual(self.REV_LIST[1], lrev)
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tests/update_manpages.py b/tests/update_manpages.py
new file mode 120000
index 0000000..e89c5d8
--- /dev/null
+++ b/tests/update_manpages.py
@@ -0,0 +1 @@
+../release/update-manpages
\ No newline at end of file
diff --git a/tox.ini b/tox.ini
index aa4e297..9a8b3fc 100644
--- a/tox.ini
+++ b/tox.ini
@@ -15,7 +15,7 @@
 # https://tox.readthedocs.io/
 
 [tox]
-envlist = py36, py37, py38, py39
+envlist = py36, py37, py38, py39, py310
 
 [gh-actions]
 python =
@@ -23,11 +23,17 @@
     3.7: py37
     3.8: py38
     3.9: py39
+    3.10: py310
 
 [testenv]
-deps = pytest
-commands = {envpython} run_tests
+deps =
+    pytest
+    pytest-timeout
+commands = {envpython} run_tests {posargs}
 setenv =
     GIT_AUTHOR_NAME = Repo test author
     GIT_COMMITTER_NAME = Repo test committer
     EMAIL = repo@gerrit.nodomain
+
+[pytest]
+timeout = 300
diff --git a/wrapper.py b/wrapper.py
index b1aa4c5..65dcf3c 100644
--- a/wrapper.py
+++ b/wrapper.py
@@ -12,12 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-try:
-  from importlib.machinery import SourceFileLoader
-  _loader = lambda *args: SourceFileLoader(*args).load_module()
-except ImportError:
-  import imp
-  _loader = lambda *args: imp.load_source(*args)
+import importlib.machinery
+import importlib.util
 import os
 
 
@@ -31,5 +27,9 @@
 def Wrapper():
   global _wrapper_module
   if not _wrapper_module:
-    _wrapper_module = _loader('wrapper', WrapperPath())
+    modname = 'wrapper'
+    loader = importlib.machinery.SourceFileLoader(modname, WrapperPath())
+    spec = importlib.util.spec_from_loader(modname, loader)
+    _wrapper_module = importlib.util.module_from_spec(spec)
+    loader.exec_module(_wrapper_module)
   return _wrapper_module