Format codebase with black and check formatting in CQ
Apply rules set by https://gerrit-review.googlesource.com/c/git-repo/+/362954/ across the codebase and fix any lingering errors caught
by flake8. Also check black formatting in run_tests (and CQ).
Bug: b/267675342
Change-Id: I972d77649dac351150dcfeb1cd1ad0ea2efc1956
Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/363474
Reviewed-by: Mike Frysinger <vapier@google.com>
Tested-by: Gavin Mak <gavinmak@google.com>
Commit-Queue: Gavin Mak <gavinmak@google.com>
diff --git a/.flake8 b/.flake8
index 82453b5..dd7f4d3 100644
--- a/.flake8
+++ b/.flake8
@@ -1,5 +1,8 @@
[flake8]
max-line-length = 80
+per-file-ignores =
+ # E501: line too long
+ tests/test_git_superproject.py: E501
extend-ignore =
# E203: Whitespace before ':'
# See https://github.com/PyCQA/pycodestyle/issues/373
diff --git a/color.py b/color.py
index fdd7253..8f29b59 100644
--- a/color.py
+++ b/color.py
@@ -17,196 +17,200 @@
import pager
-COLORS = {None: -1,
- 'normal': -1,
- 'black': 0,
- 'red': 1,
- 'green': 2,
- 'yellow': 3,
- 'blue': 4,
- 'magenta': 5,
- 'cyan': 6,
- 'white': 7}
+COLORS = {
+ None: -1,
+ "normal": -1,
+ "black": 0,
+ "red": 1,
+ "green": 2,
+ "yellow": 3,
+ "blue": 4,
+ "magenta": 5,
+ "cyan": 6,
+ "white": 7,
+}
-ATTRS = {None: -1,
- 'bold': 1,
- 'dim': 2,
- 'ul': 4,
- 'blink': 5,
- 'reverse': 7}
+ATTRS = {None: -1, "bold": 1, "dim": 2, "ul": 4, "blink": 5, "reverse": 7}
RESET = "\033[m"
def is_color(s):
- return s in COLORS
+ return s in COLORS
def is_attr(s):
- return s in ATTRS
+ return s in ATTRS
def _Color(fg=None, bg=None, attr=None):
- fg = COLORS[fg]
- bg = COLORS[bg]
- attr = ATTRS[attr]
+ fg = COLORS[fg]
+ bg = COLORS[bg]
+ attr = ATTRS[attr]
- if attr >= 0 or fg >= 0 or bg >= 0:
- need_sep = False
- code = "\033["
+ if attr >= 0 or fg >= 0 or bg >= 0:
+ need_sep = False
+ code = "\033["
- if attr >= 0:
- code += chr(ord('0') + attr)
- need_sep = True
+ if attr >= 0:
+ code += chr(ord("0") + attr)
+ need_sep = True
- if fg >= 0:
- if need_sep:
- code += ';'
- need_sep = True
+ if fg >= 0:
+ if need_sep:
+ code += ";"
+ need_sep = True
- if fg < 8:
- code += '3%c' % (ord('0') + fg)
- else:
- code += '38;5;%d' % fg
+ if fg < 8:
+ code += "3%c" % (ord("0") + fg)
+ else:
+ code += "38;5;%d" % fg
- if bg >= 0:
- if need_sep:
- code += ';'
+ if bg >= 0:
+ if need_sep:
+ code += ";"
- if bg < 8:
- code += '4%c' % (ord('0') + bg)
- else:
- code += '48;5;%d' % bg
- code += 'm'
- else:
- code = ''
- return code
+ if bg < 8:
+ code += "4%c" % (ord("0") + bg)
+ else:
+ code += "48;5;%d" % bg
+ code += "m"
+ else:
+ code = ""
+ return code
DEFAULT = None
def SetDefaultColoring(state):
- """Set coloring behavior to |state|.
+ """Set coloring behavior to |state|.
- This is useful for overriding config options via the command line.
- """
- if state is None:
- # Leave it alone -- return quick!
- return
+ This is useful for overriding config options via the command line.
+ """
+ if state is None:
+ # Leave it alone -- return quick!
+ return
- global DEFAULT
- state = state.lower()
- if state in ('auto',):
- DEFAULT = state
- elif state in ('always', 'yes', 'true', True):
- DEFAULT = 'always'
- elif state in ('never', 'no', 'false', False):
- DEFAULT = 'never'
+ global DEFAULT
+ state = state.lower()
+ if state in ("auto",):
+ DEFAULT = state
+ elif state in ("always", "yes", "true", True):
+ DEFAULT = "always"
+ elif state in ("never", "no", "false", False):
+ DEFAULT = "never"
class Coloring(object):
- def __init__(self, config, section_type):
- self._section = 'color.%s' % section_type
- self._config = config
- self._out = sys.stdout
+ def __init__(self, config, section_type):
+ self._section = "color.%s" % section_type
+ self._config = config
+ self._out = sys.stdout
- on = DEFAULT
- if on is None:
- on = self._config.GetString(self._section)
- if on is None:
- on = self._config.GetString('color.ui')
+ on = DEFAULT
+ if on is None:
+ on = self._config.GetString(self._section)
+ if on is None:
+ on = self._config.GetString("color.ui")
- if on == 'auto':
- if pager.active or os.isatty(1):
- self._on = True
- else:
- self._on = False
- elif on in ('true', 'always'):
- self._on = True
- else:
- self._on = False
-
- def redirect(self, out):
- self._out = out
-
- @property
- def is_on(self):
- return self._on
-
- def write(self, fmt, *args):
- self._out.write(fmt % args)
-
- def flush(self):
- self._out.flush()
-
- def nl(self):
- self._out.write('\n')
-
- def printer(self, opt=None, fg=None, bg=None, attr=None):
- s = self
- c = self.colorer(opt, fg, bg, attr)
-
- def f(fmt, *args):
- s._out.write(c(fmt, *args))
- return f
-
- def nofmt_printer(self, opt=None, fg=None, bg=None, attr=None):
- s = self
- c = self.nofmt_colorer(opt, fg, bg, attr)
-
- def f(fmt):
- s._out.write(c(fmt))
- return f
-
- def colorer(self, opt=None, fg=None, bg=None, attr=None):
- if self._on:
- c = self._parse(opt, fg, bg, attr)
-
- def f(fmt, *args):
- output = fmt % args
- return ''.join([c, output, RESET])
- return f
- else:
-
- def f(fmt, *args):
- return fmt % args
- return f
-
- def nofmt_colorer(self, opt=None, fg=None, bg=None, attr=None):
- if self._on:
- c = self._parse(opt, fg, bg, attr)
-
- def f(fmt):
- return ''.join([c, fmt, RESET])
- return f
- else:
- def f(fmt):
- return fmt
- return f
-
- def _parse(self, opt, fg, bg, attr):
- if not opt:
- return _Color(fg, bg, attr)
-
- v = self._config.GetString('%s.%s' % (self._section, opt))
- if v is None:
- return _Color(fg, bg, attr)
-
- v = v.strip().lower()
- if v == "reset":
- return RESET
- elif v == '':
- return _Color(fg, bg, attr)
-
- have_fg = False
- for a in v.split(' '):
- if is_color(a):
- if have_fg:
- bg = a
+ if on == "auto":
+ if pager.active or os.isatty(1):
+ self._on = True
+ else:
+ self._on = False
+ elif on in ("true", "always"):
+ self._on = True
else:
- fg = a
- elif is_attr(a):
- attr = a
+ self._on = False
- return _Color(fg, bg, attr)
+ def redirect(self, out):
+ self._out = out
+
+ @property
+ def is_on(self):
+ return self._on
+
+ def write(self, fmt, *args):
+ self._out.write(fmt % args)
+
+ def flush(self):
+ self._out.flush()
+
+ def nl(self):
+ self._out.write("\n")
+
+ def printer(self, opt=None, fg=None, bg=None, attr=None):
+ s = self
+ c = self.colorer(opt, fg, bg, attr)
+
+ def f(fmt, *args):
+ s._out.write(c(fmt, *args))
+
+ return f
+
+ def nofmt_printer(self, opt=None, fg=None, bg=None, attr=None):
+ s = self
+ c = self.nofmt_colorer(opt, fg, bg, attr)
+
+ def f(fmt):
+ s._out.write(c(fmt))
+
+ return f
+
+ def colorer(self, opt=None, fg=None, bg=None, attr=None):
+ if self._on:
+ c = self._parse(opt, fg, bg, attr)
+
+ def f(fmt, *args):
+ output = fmt % args
+ return "".join([c, output, RESET])
+
+ return f
+ else:
+
+ def f(fmt, *args):
+ return fmt % args
+
+ return f
+
+ def nofmt_colorer(self, opt=None, fg=None, bg=None, attr=None):
+ if self._on:
+ c = self._parse(opt, fg, bg, attr)
+
+ def f(fmt):
+ return "".join([c, fmt, RESET])
+
+ return f
+ else:
+
+ def f(fmt):
+ return fmt
+
+ return f
+
+ def _parse(self, opt, fg, bg, attr):
+ if not opt:
+ return _Color(fg, bg, attr)
+
+ v = self._config.GetString("%s.%s" % (self._section, opt))
+ if v is None:
+ return _Color(fg, bg, attr)
+
+ v = v.strip().lower()
+ if v == "reset":
+ return RESET
+ elif v == "":
+ return _Color(fg, bg, attr)
+
+ have_fg = False
+ for a in v.split(" "):
+ if is_color(a):
+ if have_fg:
+ bg = a
+ else:
+ fg = a
+ elif is_attr(a):
+ attr = a
+
+ return _Color(fg, bg, attr)
diff --git a/command.py b/command.py
index 68f36f0..939a463 100644
--- a/command.py
+++ b/command.py
@@ -25,7 +25,7 @@
# Are we generating man-pages?
-GENERATE_MANPAGES = os.environ.get('_REPO_GENERATE_MANPAGES_') == ' indeed! '
+GENERATE_MANPAGES = os.environ.get("_REPO_GENERATE_MANPAGES_") == " indeed! "
# Number of projects to submit to a single worker process at a time.
@@ -43,403 +43,470 @@
class Command(object):
- """Base class for any command line action in repo.
- """
+ """Base class for any command line action in repo."""
- # Singleton for all commands to track overall repo command execution and
- # provide event summary to callers. Only used by sync subcommand currently.
- #
- # NB: This is being replaced by git trace2 events. See git_trace2_event_log.
- event_log = EventLog()
+ # Singleton for all commands to track overall repo command execution and
+ # provide event summary to callers. Only used by sync subcommand currently.
+ #
+ # NB: This is being replaced by git trace2 events. See git_trace2_event_log.
+ event_log = EventLog()
- # Whether this command is a "common" one, i.e. whether the user would commonly
- # use it or it's a more uncommon command. This is used by the help command to
- # show short-vs-full summaries.
- COMMON = False
+ # Whether this command is a "common" one, i.e. whether the user would
+ # commonly use it or it's a more uncommon command. This is used by the help
+ # command to show short-vs-full summaries.
+ COMMON = False
- # Whether this command supports running in parallel. If greater than 0,
- # it is the number of parallel jobs to default to.
- PARALLEL_JOBS = None
+ # Whether this command supports running in parallel. If greater than 0,
+ # it is the number of parallel jobs to default to.
+ PARALLEL_JOBS = None
- # Whether this command supports Multi-manifest. If False, then main.py will
- # iterate over the manifests and invoke the command once per (sub)manifest.
- # This is only checked after calling ValidateOptions, so that partially
- # migrated subcommands can set it to False.
- MULTI_MANIFEST_SUPPORT = True
+ # Whether this command supports Multi-manifest. If False, then main.py will
+ # iterate over the manifests and invoke the command once per (sub)manifest.
+ # This is only checked after calling ValidateOptions, so that partially
+ # migrated subcommands can set it to False.
+ MULTI_MANIFEST_SUPPORT = True
- def __init__(self, repodir=None, client=None, manifest=None, gitc_manifest=None,
- git_event_log=None, outer_client=None, outer_manifest=None):
- self.repodir = repodir
- self.client = client
- self.outer_client = outer_client or client
- self.manifest = manifest
- self.gitc_manifest = gitc_manifest
- self.git_event_log = git_event_log
- self.outer_manifest = outer_manifest
+ def __init__(
+ self,
+ repodir=None,
+ client=None,
+ manifest=None,
+ gitc_manifest=None,
+ git_event_log=None,
+ outer_client=None,
+ outer_manifest=None,
+ ):
+ self.repodir = repodir
+ self.client = client
+ self.outer_client = outer_client or client
+ self.manifest = manifest
+ self.gitc_manifest = gitc_manifest
+ self.git_event_log = git_event_log
+ self.outer_manifest = outer_manifest
- # Cache for the OptionParser property.
- self._optparse = None
+ # Cache for the OptionParser property.
+ self._optparse = None
- def WantPager(self, _opt):
- return False
+ def WantPager(self, _opt):
+ return False
- def ReadEnvironmentOptions(self, opts):
- """ Set options from environment variables. """
+ def ReadEnvironmentOptions(self, opts):
+ """Set options from environment variables."""
- env_options = self._RegisteredEnvironmentOptions()
+ env_options = self._RegisteredEnvironmentOptions()
- for env_key, opt_key in env_options.items():
- # Get the user-set option value if any
- opt_value = getattr(opts, opt_key)
+ for env_key, opt_key in env_options.items():
+ # Get the user-set option value if any
+ opt_value = getattr(opts, opt_key)
- # If the value is set, it means the user has passed it as a command
- # line option, and we should use that. Otherwise we can try to set it
- # with the value from the corresponding environment variable.
- if opt_value is not None:
- continue
+ # If the value is set, it means the user has passed it as a command
+ # line option, and we should use that. Otherwise we can try to set
+ # it with the value from the corresponding environment variable.
+ if opt_value is not None:
+ continue
- env_value = os.environ.get(env_key)
- if env_value is not None:
- setattr(opts, opt_key, env_value)
+ env_value = os.environ.get(env_key)
+ if env_value is not None:
+ setattr(opts, opt_key, env_value)
- return opts
+ return opts
- @property
- def OptionParser(self):
- if self._optparse is None:
- try:
- me = 'repo %s' % self.NAME
- usage = self.helpUsage.strip().replace('%prog', me)
- except AttributeError:
- usage = 'repo %s' % self.NAME
- epilog = 'Run `repo help %s` to view the detailed manual.' % self.NAME
- self._optparse = optparse.OptionParser(usage=usage, epilog=epilog)
- self._CommonOptions(self._optparse)
- self._Options(self._optparse)
- return self._optparse
+ @property
+ def OptionParser(self):
+ if self._optparse is None:
+ try:
+ me = "repo %s" % self.NAME
+ usage = self.helpUsage.strip().replace("%prog", me)
+ except AttributeError:
+ usage = "repo %s" % self.NAME
+ epilog = (
+ "Run `repo help %s` to view the detailed manual." % self.NAME
+ )
+ self._optparse = optparse.OptionParser(usage=usage, epilog=epilog)
+ self._CommonOptions(self._optparse)
+ self._Options(self._optparse)
+ return self._optparse
- def _CommonOptions(self, p, opt_v=True):
- """Initialize the option parser with common options.
+ def _CommonOptions(self, p, opt_v=True):
+ """Initialize the option parser with common options.
- These will show up for *all* subcommands, so use sparingly.
- NB: Keep in sync with repo:InitParser().
- """
- g = p.add_option_group('Logging options')
- opts = ['-v'] if opt_v else []
- g.add_option(*opts, '--verbose',
- dest='output_mode', action='store_true',
- help='show all output')
- g.add_option('-q', '--quiet',
- dest='output_mode', action='store_false',
- help='only show errors')
+ These will show up for *all* subcommands, so use sparingly.
+ NB: Keep in sync with repo:InitParser().
+ """
+ g = p.add_option_group("Logging options")
+ opts = ["-v"] if opt_v else []
+ g.add_option(
+ *opts,
+ "--verbose",
+ dest="output_mode",
+ action="store_true",
+ help="show all output",
+ )
+ g.add_option(
+ "-q",
+ "--quiet",
+ dest="output_mode",
+ action="store_false",
+ help="only show errors",
+ )
- if self.PARALLEL_JOBS is not None:
- default = 'based on number of CPU cores'
- if not GENERATE_MANPAGES:
- # Only include active cpu count if we aren't generating man pages.
- default = f'%default; {default}'
- p.add_option(
- '-j', '--jobs',
- type=int, default=self.PARALLEL_JOBS,
- help=f'number of jobs to run in parallel (default: {default})')
+ if self.PARALLEL_JOBS is not None:
+ default = "based on number of CPU cores"
+ if not GENERATE_MANPAGES:
+ # Only include active cpu count if we aren't generating man
+ # pages.
+ default = f"%default; {default}"
+ p.add_option(
+ "-j",
+ "--jobs",
+ type=int,
+ default=self.PARALLEL_JOBS,
+ help=f"number of jobs to run in parallel (default: {default})",
+ )
- m = p.add_option_group('Multi-manifest options')
- m.add_option('--outer-manifest', action='store_true', default=None,
- help='operate starting at the outermost manifest')
- m.add_option('--no-outer-manifest', dest='outer_manifest',
- action='store_false', help='do not operate on outer manifests')
- m.add_option('--this-manifest-only', action='store_true', default=None,
- help='only operate on this (sub)manifest')
- m.add_option('--no-this-manifest-only', '--all-manifests',
- dest='this_manifest_only', action='store_false',
- help='operate on this manifest and its submanifests')
+ m = p.add_option_group("Multi-manifest options")
+ m.add_option(
+ "--outer-manifest",
+ action="store_true",
+ default=None,
+ help="operate starting at the outermost manifest",
+ )
+ m.add_option(
+ "--no-outer-manifest",
+ dest="outer_manifest",
+ action="store_false",
+ help="do not operate on outer manifests",
+ )
+ m.add_option(
+ "--this-manifest-only",
+ action="store_true",
+ default=None,
+ help="only operate on this (sub)manifest",
+ )
+ m.add_option(
+ "--no-this-manifest-only",
+ "--all-manifests",
+ dest="this_manifest_only",
+ action="store_false",
+ help="operate on this manifest and its submanifests",
+ )
- def _Options(self, p):
- """Initialize the option parser with subcommand-specific options."""
+ def _Options(self, p):
+ """Initialize the option parser with subcommand-specific options."""
- def _RegisteredEnvironmentOptions(self):
- """Get options that can be set from environment variables.
+ def _RegisteredEnvironmentOptions(self):
+ """Get options that can be set from environment variables.
- Return a dictionary mapping environment variable name
- to option key name that it can override.
+ Return a dictionary mapping environment variable name
+ to option key name that it can override.
- Example: {'REPO_MY_OPTION': 'my_option'}
+ Example: {'REPO_MY_OPTION': 'my_option'}
- Will allow the option with key value 'my_option' to be set
- from the value in the environment variable named 'REPO_MY_OPTION'.
+ Will allow the option with key value 'my_option' to be set
+ from the value in the environment variable named 'REPO_MY_OPTION'.
- Note: This does not work properly for options that are explicitly
- set to None by the user, or options that are defined with a
- default value other than None.
+ Note: This does not work properly for options that are explicitly
+ set to None by the user, or options that are defined with a
+ default value other than None.
- """
- return {}
+ """
+ return {}
- def Usage(self):
- """Display usage and terminate.
- """
- self.OptionParser.print_usage()
- sys.exit(1)
+ def Usage(self):
+ """Display usage and terminate."""
+ self.OptionParser.print_usage()
+ sys.exit(1)
- def CommonValidateOptions(self, opt, args):
- """Validate common options."""
- opt.quiet = opt.output_mode is False
- opt.verbose = opt.output_mode is True
- if opt.outer_manifest is None:
- # By default, treat multi-manifest instances as a single manifest from
- # the user's perspective.
- opt.outer_manifest = True
+ def CommonValidateOptions(self, opt, args):
+ """Validate common options."""
+ opt.quiet = opt.output_mode is False
+ opt.verbose = opt.output_mode is True
+ if opt.outer_manifest is None:
+ # By default, treat multi-manifest instances as a single manifest
+ # from the user's perspective.
+ opt.outer_manifest = True
- def ValidateOptions(self, opt, args):
- """Validate the user options & arguments before executing.
+ def ValidateOptions(self, opt, args):
+ """Validate the user options & arguments before executing.
- This is meant to help break the code up into logical steps. Some tips:
- * Use self.OptionParser.error to display CLI related errors.
- * Adjust opt member defaults as makes sense.
- * Adjust the args list, but do so inplace so the caller sees updates.
- * Try to avoid updating self state. Leave that to Execute.
- """
+ This is meant to help break the code up into logical steps. Some tips:
+ * Use self.OptionParser.error to display CLI related errors.
+ * Adjust opt member defaults as makes sense.
+ * Adjust the args list, but do so inplace so the caller sees updates.
+ * Try to avoid updating self state. Leave that to Execute.
+ """
- def Execute(self, opt, args):
- """Perform the action, after option parsing is complete.
- """
- raise NotImplementedError
+ def Execute(self, opt, args):
+ """Perform the action, after option parsing is complete."""
+ raise NotImplementedError
- @staticmethod
- def ExecuteInParallel(jobs, func, inputs, callback, output=None, ordered=False):
- """Helper for managing parallel execution boiler plate.
+ @staticmethod
+ def ExecuteInParallel(
+ jobs, func, inputs, callback, output=None, ordered=False
+ ):
+ """Helper for managing parallel execution boiler plate.
- For subcommands that can easily split their work up.
+ For subcommands that can easily split their work up.
- Args:
- jobs: How many parallel processes to use.
- func: The function to apply to each of the |inputs|. Usually a
- functools.partial for wrapping additional arguments. It will be run
- in a separate process, so it must be pickalable, so nested functions
- won't work. Methods on the subcommand Command class should work.
- inputs: The list of items to process. Must be a list.
- callback: The function to pass the results to for processing. It will be
- executed in the main thread and process the results of |func| as they
- become available. Thus it may be a local nested function. Its return
- value is passed back directly. It takes three arguments:
- - The processing pool (or None with one job).
- - The |output| argument.
- - An iterator for the results.
- output: An output manager. May be progress.Progess or color.Coloring.
- ordered: Whether the jobs should be processed in order.
+ Args:
+ jobs: How many parallel processes to use.
+ func: The function to apply to each of the |inputs|. Usually a
+ functools.partial for wrapping additional arguments. It will be
+ run in a separate process, so it must be pickalable, so nested
+ functions won't work. Methods on the subcommand Command class
+ should work.
+ inputs: The list of items to process. Must be a list.
+ callback: The function to pass the results to for processing. It
+ will be executed in the main thread and process the results of
+ |func| as they become available. Thus it may be a local nested
+ function. Its return value is passed back directly. It takes
+ three arguments:
+ - The processing pool (or None with one job).
+ - The |output| argument.
+ - An iterator for the results.
+ output: An output manager. May be progress.Progess or
+ color.Coloring.
+ ordered: Whether the jobs should be processed in order.
- Returns:
- The |callback| function's results are returned.
- """
- try:
- # NB: Multiprocessing is heavy, so don't spin it up for one job.
- if len(inputs) == 1 or jobs == 1:
- return callback(None, output, (func(x) for x in inputs))
- else:
- with multiprocessing.Pool(jobs) as pool:
- submit = pool.imap if ordered else pool.imap_unordered
- return callback(pool, output, submit(func, inputs, chunksize=WORKER_BATCH_SIZE))
- finally:
- if isinstance(output, progress.Progress):
- output.end()
-
- def _ResetPathToProjectMap(self, projects):
- self._by_path = dict((p.worktree, p) for p in projects)
-
- def _UpdatePathToProjectMap(self, project):
- self._by_path[project.worktree] = project
-
- def _GetProjectByPath(self, manifest, path):
- project = None
- if os.path.exists(path):
- oldpath = None
- while (path and
- path != oldpath and
- path != manifest.topdir):
+ Returns:
+ The |callback| function's results are returned.
+ """
try:
- project = self._by_path[path]
- break
- except KeyError:
- oldpath = path
- path = os.path.dirname(path)
- if not project and path == manifest.topdir:
- try:
- project = self._by_path[path]
- except KeyError:
- pass
- else:
- try:
- project = self._by_path[path]
- except KeyError:
- pass
- return project
+ # NB: Multiprocessing is heavy, so don't spin it up for one job.
+ if len(inputs) == 1 or jobs == 1:
+ return callback(None, output, (func(x) for x in inputs))
+ else:
+ with multiprocessing.Pool(jobs) as pool:
+ submit = pool.imap if ordered else pool.imap_unordered
+ return callback(
+ pool,
+ output,
+ submit(func, inputs, chunksize=WORKER_BATCH_SIZE),
+ )
+ finally:
+ if isinstance(output, progress.Progress):
+ output.end()
- def GetProjects(self, args, manifest=None, groups='', missing_ok=False,
- submodules_ok=False, all_manifests=False):
- """A list of projects that match the arguments.
+ def _ResetPathToProjectMap(self, projects):
+ self._by_path = dict((p.worktree, p) for p in projects)
- Args:
- args: a list of (case-insensitive) strings, projects to search for.
- manifest: an XmlManifest, the manifest to use, or None for default.
- groups: a string, the manifest groups in use.
- missing_ok: a boolean, whether to allow missing projects.
- submodules_ok: a boolean, whether to allow submodules.
- all_manifests: a boolean, if True then all manifests and submanifests are
- used. If False, then only the local (sub)manifest is used.
+ def _UpdatePathToProjectMap(self, project):
+ self._by_path[project.worktree] = project
- Returns:
- A list of matching Project instances.
- """
- if all_manifests:
- if not manifest:
- manifest = self.manifest.outer_client
- all_projects_list = manifest.all_projects
- else:
- if not manifest:
- manifest = self.manifest
- all_projects_list = manifest.projects
- result = []
+ def _GetProjectByPath(self, manifest, path):
+ project = None
+ if os.path.exists(path):
+ oldpath = None
+ while path and path != oldpath and path != manifest.topdir:
+ try:
+ project = self._by_path[path]
+ break
+ except KeyError:
+ oldpath = path
+ path = os.path.dirname(path)
+ if not project and path == manifest.topdir:
+ try:
+ project = self._by_path[path]
+ except KeyError:
+ pass
+ else:
+ try:
+ project = self._by_path[path]
+ except KeyError:
+ pass
+ return project
- if not groups:
- groups = manifest.GetGroupsStr()
- groups = [x for x in re.split(r'[,\s]+', groups) if x]
+ def GetProjects(
+ self,
+ args,
+ manifest=None,
+ groups="",
+ missing_ok=False,
+ submodules_ok=False,
+ all_manifests=False,
+ ):
+ """A list of projects that match the arguments.
- if not args:
- derived_projects = {}
- for project in all_projects_list:
- if submodules_ok or project.sync_s:
- derived_projects.update((p.name, p)
- for p in project.GetDerivedSubprojects())
- all_projects_list.extend(derived_projects.values())
- for project in all_projects_list:
- if (missing_ok or project.Exists) and project.MatchesGroups(groups):
- result.append(project)
- else:
- self._ResetPathToProjectMap(all_projects_list)
+ Args:
+ args: a list of (case-insensitive) strings, projects to search for.
+ manifest: an XmlManifest, the manifest to use, or None for default.
+ groups: a string, the manifest groups in use.
+ missing_ok: a boolean, whether to allow missing projects.
+ submodules_ok: a boolean, whether to allow submodules.
+ all_manifests: a boolean, if True then all manifests and
+ submanifests are used. If False, then only the local
+ (sub)manifest is used.
- for arg in args:
- # We have to filter by manifest groups in case the requested project is
- # checked out multiple times or differently based on them.
- projects = [project
+ Returns:
+ A list of matching Project instances.
+ """
+ if all_manifests:
+ if not manifest:
+ manifest = self.manifest.outer_client
+ all_projects_list = manifest.all_projects
+ else:
+ if not manifest:
+ manifest = self.manifest
+ all_projects_list = manifest.projects
+ result = []
+
+ if not groups:
+ groups = manifest.GetGroupsStr()
+ groups = [x for x in re.split(r"[,\s]+", groups) if x]
+
+ if not args:
+ derived_projects = {}
+ for project in all_projects_list:
+ if submodules_ok or project.sync_s:
+ derived_projects.update(
+ (p.name, p) for p in project.GetDerivedSubprojects()
+ )
+ all_projects_list.extend(derived_projects.values())
+ for project in all_projects_list:
+ if (missing_ok or project.Exists) and project.MatchesGroups(
+ groups
+ ):
+ result.append(project)
+ else:
+ self._ResetPathToProjectMap(all_projects_list)
+
+ for arg in args:
+ # We have to filter by manifest groups in case the requested
+ # project is checked out multiple times or differently based on
+ # them.
+ projects = [
+ project
for project in manifest.GetProjectsWithName(
- arg, all_manifests=all_manifests)
- if project.MatchesGroups(groups)]
+ arg, all_manifests=all_manifests
+ )
+ if project.MatchesGroups(groups)
+ ]
- if not projects:
- path = os.path.abspath(arg).replace('\\', '/')
- tree = manifest
- if all_manifests:
- # Look for the deepest matching submanifest.
- for tree in reversed(list(manifest.all_manifests)):
- if path.startswith(tree.topdir):
- break
- project = self._GetProjectByPath(tree, path)
+ if not projects:
+ path = os.path.abspath(arg).replace("\\", "/")
+ tree = manifest
+ if all_manifests:
+ # Look for the deepest matching submanifest.
+ for tree in reversed(list(manifest.all_manifests)):
+ if path.startswith(tree.topdir):
+ break
+ project = self._GetProjectByPath(tree, path)
- # If it's not a derived project, update path->project mapping and
- # search again, as arg might actually point to a derived subproject.
- if (project and not project.Derived and (submodules_ok or
- project.sync_s)):
- search_again = False
- for subproject in project.GetDerivedSubprojects():
- self._UpdatePathToProjectMap(subproject)
- search_again = True
- if search_again:
- project = self._GetProjectByPath(manifest, path) or project
+ # If it's not a derived project, update path->project
+ # mapping and search again, as arg might actually point to
+ # a derived subproject.
+ if (
+ project
+ and not project.Derived
+ and (submodules_ok or project.sync_s)
+ ):
+ search_again = False
+ for subproject in project.GetDerivedSubprojects():
+ self._UpdatePathToProjectMap(subproject)
+ search_again = True
+ if search_again:
+ project = (
+ self._GetProjectByPath(manifest, path)
+ or project
+ )
- if project:
- projects = [project]
+ if project:
+ projects = [project]
- if not projects:
- raise NoSuchProjectError(arg)
+ if not projects:
+ raise NoSuchProjectError(arg)
- for project in projects:
- if not missing_ok and not project.Exists:
- raise NoSuchProjectError('%s (%s)' % (
- arg, project.RelPath(local=not all_manifests)))
- if not project.MatchesGroups(groups):
- raise InvalidProjectGroupsError(arg)
+ for project in projects:
+ if not missing_ok and not project.Exists:
+ raise NoSuchProjectError(
+ "%s (%s)"
+ % (arg, project.RelPath(local=not all_manifests))
+ )
+ if not project.MatchesGroups(groups):
+ raise InvalidProjectGroupsError(arg)
- result.extend(projects)
+ result.extend(projects)
- def _getpath(x):
- return x.relpath
- result.sort(key=_getpath)
- return result
+ def _getpath(x):
+ return x.relpath
- def FindProjects(self, args, inverse=False, all_manifests=False):
- """Find projects from command line arguments.
+ result.sort(key=_getpath)
+ return result
- Args:
- args: a list of (case-insensitive) strings, projects to search for.
- inverse: a boolean, if True, then projects not matching any |args| are
- returned.
- all_manifests: a boolean, if True then all manifests and submanifests are
- used. If False, then only the local (sub)manifest is used.
- """
- result = []
- patterns = [re.compile(r'%s' % a, re.IGNORECASE) for a in args]
- for project in self.GetProjects('', all_manifests=all_manifests):
- paths = [project.name, project.RelPath(local=not all_manifests)]
- for pattern in patterns:
- match = any(pattern.search(x) for x in paths)
- if not inverse and match:
- result.append(project)
- break
- if inverse and match:
- break
- else:
- if inverse:
- result.append(project)
- result.sort(key=lambda project: (project.manifest.path_prefix,
- project.relpath))
- return result
+ def FindProjects(self, args, inverse=False, all_manifests=False):
+ """Find projects from command line arguments.
- def ManifestList(self, opt):
- """Yields all of the manifests to traverse.
+ Args:
+ args: a list of (case-insensitive) strings, projects to search for.
+ inverse: a boolean, if True, then projects not matching any |args|
+ are returned.
+ all_manifests: a boolean, if True then all manifests and
+ submanifests are used. If False, then only the local
+ (sub)manifest is used.
+ """
+ result = []
+ patterns = [re.compile(r"%s" % a, re.IGNORECASE) for a in args]
+ for project in self.GetProjects("", all_manifests=all_manifests):
+ paths = [project.name, project.RelPath(local=not all_manifests)]
+ for pattern in patterns:
+ match = any(pattern.search(x) for x in paths)
+ if not inverse and match:
+ result.append(project)
+ break
+ if inverse and match:
+ break
+ else:
+ if inverse:
+ result.append(project)
+ result.sort(
+ key=lambda project: (project.manifest.path_prefix, project.relpath)
+ )
+ return result
- Args:
- opt: The command options.
- """
- top = self.outer_manifest
- if not opt.outer_manifest or opt.this_manifest_only:
- top = self.manifest
- yield top
- if not opt.this_manifest_only:
- for child in top.all_children:
- yield child
+ def ManifestList(self, opt):
+ """Yields all of the manifests to traverse.
+
+ Args:
+ opt: The command options.
+ """
+ top = self.outer_manifest
+ if not opt.outer_manifest or opt.this_manifest_only:
+ top = self.manifest
+ yield top
+ if not opt.this_manifest_only:
+ for child in top.all_children:
+ yield child
class InteractiveCommand(Command):
- """Command which requires user interaction on the tty and
- must not run within a pager, even if the user asks to.
- """
+ """Command which requires user interaction on the tty and must not run
+ within a pager, even if the user asks to.
+ """
- def WantPager(self, _opt):
- return False
+ def WantPager(self, _opt):
+ return False
class PagedCommand(Command):
- """Command which defaults to output in a pager, as its
- display tends to be larger than one screen full.
- """
+ """Command which defaults to output in a pager, as its display tends to be
+ larger than one screen full.
+ """
- def WantPager(self, _opt):
- return True
+ def WantPager(self, _opt):
+ return True
class MirrorSafeCommand(object):
- """Command permits itself to run within a mirror,
- and does not require a working directory.
- """
+ """Command permits itself to run within a mirror, and does not require a
+ working directory.
+ """
class GitcAvailableCommand(object):
- """Command that requires GITC to be available, but does
- not require the local client to be a GITC client.
- """
+ """Command that requires GITC to be available, but does not require the
+ local client to be a GITC client.
+ """
class GitcClientCommand(object):
- """Command that requires the local client to be a GITC
- client.
- """
+ """Command that requires the local client to be a GITC client."""
diff --git a/editor.py b/editor.py
index b84a42d..96835ab 100644
--- a/editor.py
+++ b/editor.py
@@ -23,93 +23,99 @@
class Editor(object):
- """Manages the user's preferred text editor."""
+ """Manages the user's preferred text editor."""
- _editor = None
- globalConfig = None
+ _editor = None
+ globalConfig = None
- @classmethod
- def _GetEditor(cls):
- if cls._editor is None:
- cls._editor = cls._SelectEditor()
- return cls._editor
+ @classmethod
+ def _GetEditor(cls):
+ if cls._editor is None:
+ cls._editor = cls._SelectEditor()
+ return cls._editor
- @classmethod
- def _SelectEditor(cls):
- e = os.getenv('GIT_EDITOR')
- if e:
- return e
+ @classmethod
+ def _SelectEditor(cls):
+ e = os.getenv("GIT_EDITOR")
+ if e:
+ return e
- if cls.globalConfig:
- e = cls.globalConfig.GetString('core.editor')
- if e:
- return e
+ if cls.globalConfig:
+ e = cls.globalConfig.GetString("core.editor")
+ if e:
+ return e
- e = os.getenv('VISUAL')
- if e:
- return e
+ e = os.getenv("VISUAL")
+ if e:
+ return e
- e = os.getenv('EDITOR')
- if e:
- return e
+ e = os.getenv("EDITOR")
+ if e:
+ return e
- if os.getenv('TERM') == 'dumb':
- print(
- """No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
+ if os.getenv("TERM") == "dumb":
+ print(
+ """No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
Tried to fall back to vi but terminal is dumb. Please configure at
-least one of these before using this command.""", file=sys.stderr)
- sys.exit(1)
+least one of these before using this command.""", # noqa: E501
+ file=sys.stderr,
+ )
+ sys.exit(1)
- return 'vi'
+ return "vi"
- @classmethod
- def EditString(cls, data):
- """Opens an editor to edit the given content.
+ @classmethod
+ def EditString(cls, data):
+ """Opens an editor to edit the given content.
- Args:
- data: The text to edit.
+ Args:
+ data: The text to edit.
- Returns:
- New value of edited text.
+ Returns:
+ New value of edited text.
- Raises:
- EditorError: The editor failed to run.
- """
- editor = cls._GetEditor()
- if editor == ':':
- return data
+ Raises:
+ EditorError: The editor failed to run.
+ """
+ editor = cls._GetEditor()
+ if editor == ":":
+ return data
- fd, path = tempfile.mkstemp()
- try:
- os.write(fd, data.encode('utf-8'))
- os.close(fd)
- fd = None
+ fd, path = tempfile.mkstemp()
+ try:
+ os.write(fd, data.encode("utf-8"))
+ os.close(fd)
+ fd = None
- if platform_utils.isWindows():
- # Split on spaces, respecting quoted strings
- import shlex
- args = shlex.split(editor)
- shell = False
- elif re.compile("^.*[$ \t'].*$").match(editor):
- args = [editor + ' "$@"', 'sh']
- shell = True
- else:
- args = [editor]
- shell = False
- args.append(path)
+ if platform_utils.isWindows():
+ # Split on spaces, respecting quoted strings
+ import shlex
- try:
- rc = subprocess.Popen(args, shell=shell).wait()
- except OSError as e:
- raise EditorError('editor failed, %s: %s %s'
- % (str(e), editor, path))
- if rc != 0:
- raise EditorError('editor failed with exit status %d: %s %s'
- % (rc, editor, path))
+ args = shlex.split(editor)
+ shell = False
+ elif re.compile("^.*[$ \t'].*$").match(editor):
+ args = [editor + ' "$@"', "sh"]
+ shell = True
+ else:
+ args = [editor]
+ shell = False
+ args.append(path)
- with open(path, mode='rb') as fd2:
- return fd2.read().decode('utf-8')
- finally:
- if fd:
- os.close(fd)
- platform_utils.remove(path)
+ try:
+ rc = subprocess.Popen(args, shell=shell).wait()
+ except OSError as e:
+ raise EditorError(
+ "editor failed, %s: %s %s" % (str(e), editor, path)
+ )
+ if rc != 0:
+ raise EditorError(
+ "editor failed with exit status %d: %s %s"
+ % (rc, editor, path)
+ )
+
+ with open(path, mode="rb") as fd2:
+ return fd2.read().decode("utf-8")
+ finally:
+ if fd:
+ os.close(fd)
+ platform_utils.remove(path)
diff --git a/error.py b/error.py
index cbefcb7..3cf34d5 100644
--- a/error.py
+++ b/error.py
@@ -14,122 +14,112 @@
class ManifestParseError(Exception):
- """Failed to parse the manifest file.
- """
+ """Failed to parse the manifest file."""
class ManifestInvalidRevisionError(ManifestParseError):
- """The revision value in a project is incorrect.
- """
+ """The revision value in a project is incorrect."""
class ManifestInvalidPathError(ManifestParseError):
- """A path used in <copyfile> or <linkfile> is incorrect.
- """
+ """A path used in <copyfile> or <linkfile> is incorrect."""
class NoManifestException(Exception):
- """The required manifest does not exist.
- """
+ """The required manifest does not exist."""
- def __init__(self, path, reason):
- super().__init__(path, reason)
- self.path = path
- self.reason = reason
+ def __init__(self, path, reason):
+ super().__init__(path, reason)
+ self.path = path
+ self.reason = reason
- def __str__(self):
- return self.reason
+ def __str__(self):
+ return self.reason
class EditorError(Exception):
- """Unspecified error from the user's text editor.
- """
+ """Unspecified error from the user's text editor."""
- def __init__(self, reason):
- super().__init__(reason)
- self.reason = reason
+ def __init__(self, reason):
+ super().__init__(reason)
+ self.reason = reason
- def __str__(self):
- return self.reason
+ def __str__(self):
+ return self.reason
class GitError(Exception):
- """Unspecified internal error from git.
- """
+ """Unspecified internal error from git."""
- def __init__(self, command):
- super().__init__(command)
- self.command = command
+ def __init__(self, command):
+ super().__init__(command)
+ self.command = command
- def __str__(self):
- return self.command
+ def __str__(self):
+ return self.command
class UploadError(Exception):
- """A bundle upload to Gerrit did not succeed.
- """
+ """A bundle upload to Gerrit did not succeed."""
- def __init__(self, reason):
- super().__init__(reason)
- self.reason = reason
+ def __init__(self, reason):
+ super().__init__(reason)
+ self.reason = reason
- def __str__(self):
- return self.reason
+ def __str__(self):
+ return self.reason
class DownloadError(Exception):
- """Cannot download a repository.
- """
+ """Cannot download a repository."""
- def __init__(self, reason):
- super().__init__(reason)
- self.reason = reason
+ def __init__(self, reason):
+ super().__init__(reason)
+ self.reason = reason
- def __str__(self):
- return self.reason
+ def __str__(self):
+ return self.reason
class NoSuchProjectError(Exception):
- """A specified project does not exist in the work tree.
- """
+ """A specified project does not exist in the work tree."""
- def __init__(self, name=None):
- super().__init__(name)
- self.name = name
+ def __init__(self, name=None):
+ super().__init__(name)
+ self.name = name
- def __str__(self):
- if self.name is None:
- return 'in current directory'
- return self.name
+ def __str__(self):
+ if self.name is None:
+ return "in current directory"
+ return self.name
class InvalidProjectGroupsError(Exception):
- """A specified project is not suitable for the specified groups
- """
+ """A specified project is not suitable for the specified groups"""
- def __init__(self, name=None):
- super().__init__(name)
- self.name = name
+ def __init__(self, name=None):
+ super().__init__(name)
+ self.name = name
- def __str__(self):
- if self.name is None:
- return 'in current directory'
- return self.name
+ def __str__(self):
+ if self.name is None:
+ return "in current directory"
+ return self.name
class RepoChangedException(Exception):
- """Thrown if 'repo sync' results in repo updating its internal
- repo or manifest repositories. In this special case we must
- use exec to re-execute repo with the new code and manifest.
- """
+ """Thrown if 'repo sync' results in repo updating its internal
+ repo or manifest repositories. In this special case we must
+ use exec to re-execute repo with the new code and manifest.
+ """
- def __init__(self, extra_args=None):
- super().__init__(extra_args)
- self.extra_args = extra_args or []
+ def __init__(self, extra_args=None):
+ super().__init__(extra_args)
+ self.extra_args = extra_args or []
class HookError(Exception):
- """Thrown if a 'repo-hook' could not be run.
+ """Thrown if a 'repo-hook' could not be run.
- The common case is that the file wasn't present when we tried to run it.
- """
+ The common case is that the file wasn't present when we tried to run it.
+ """
diff --git a/event_log.py b/event_log.py
index c77c564..b1f8bdf 100644
--- a/event_log.py
+++ b/event_log.py
@@ -15,161 +15,169 @@
import json
import multiprocessing
-TASK_COMMAND = 'command'
-TASK_SYNC_NETWORK = 'sync-network'
-TASK_SYNC_LOCAL = 'sync-local'
+TASK_COMMAND = "command"
+TASK_SYNC_NETWORK = "sync-network"
+TASK_SYNC_LOCAL = "sync-local"
class EventLog(object):
- """Event log that records events that occurred during a repo invocation.
+ """Event log that records events that occurred during a repo invocation.
- Events are written to the log as a consecutive JSON entries, one per line.
- Each entry contains the following keys:
- - id: A ('RepoOp', ID) tuple, suitable for storing in a datastore.
- The ID is only unique for the invocation of the repo command.
- - name: Name of the object being operated upon.
- - task_name: The task that was performed.
- - start: Timestamp of when the operation started.
- - finish: Timestamp of when the operation finished.
- - success: Boolean indicating if the operation was successful.
- - try_count: A counter indicating the try count of this task.
+ Events are written to the log as a consecutive JSON entries, one per line.
+ Each entry contains the following keys:
+ - id: A ('RepoOp', ID) tuple, suitable for storing in a datastore.
+ The ID is only unique for the invocation of the repo command.
+ - name: Name of the object being operated upon.
+ - task_name: The task that was performed.
+ - start: Timestamp of when the operation started.
+ - finish: Timestamp of when the operation finished.
+ - success: Boolean indicating if the operation was successful.
+ - try_count: A counter indicating the try count of this task.
- Optionally:
- - parent: A ('RepoOp', ID) tuple indicating the parent event for nested
- events.
+ Optionally:
+ - parent: A ('RepoOp', ID) tuple indicating the parent event for nested
+ events.
- Valid task_names include:
- - command: The invocation of a subcommand.
- - sync-network: The network component of a sync command.
- - sync-local: The local component of a sync command.
+ Valid task_names include:
+ - command: The invocation of a subcommand.
+ - sync-network: The network component of a sync command.
+ - sync-local: The local component of a sync command.
- Specific tasks may include additional informational properties.
- """
-
- def __init__(self):
- """Initializes the event log."""
- self._log = []
- self._parent = None
-
- def Add(self, name, task_name, start, finish=None, success=None,
- try_count=1, kind='RepoOp'):
- """Add an event to the log.
-
- Args:
- name: Name of the object being operated upon.
- task_name: A sub-task that was performed for name.
- start: Timestamp of when the operation started.
- finish: Timestamp of when the operation finished.
- success: Boolean indicating if the operation was successful.
- try_count: A counter indicating the try count of this task.
- kind: The kind of the object for the unique identifier.
-
- Returns:
- A dictionary of the event added to the log.
+ Specific tasks may include additional informational properties.
"""
- event = {
- 'id': (kind, _NextEventId()),
- 'name': name,
- 'task_name': task_name,
- 'start_time': start,
- 'try': try_count,
- }
- if self._parent:
- event['parent'] = self._parent['id']
+ def __init__(self):
+ """Initializes the event log."""
+ self._log = []
+ self._parent = None
- if success is not None or finish is not None:
- self.FinishEvent(event, finish, success)
+ def Add(
+ self,
+ name,
+ task_name,
+ start,
+ finish=None,
+ success=None,
+ try_count=1,
+ kind="RepoOp",
+ ):
+ """Add an event to the log.
- self._log.append(event)
- return event
+ Args:
+ name: Name of the object being operated upon.
+ task_name: A sub-task that was performed for name.
+ start: Timestamp of when the operation started.
+ finish: Timestamp of when the operation finished.
+ success: Boolean indicating if the operation was successful.
+ try_count: A counter indicating the try count of this task.
+ kind: The kind of the object for the unique identifier.
- def AddSync(self, project, task_name, start, finish, success):
- """Add a event to the log for a sync command.
+ Returns:
+ A dictionary of the event added to the log.
+ """
+ event = {
+ "id": (kind, _NextEventId()),
+ "name": name,
+ "task_name": task_name,
+ "start_time": start,
+ "try": try_count,
+ }
- Args:
- project: Project being synced.
- task_name: A sub-task that was performed for name.
- One of (TASK_SYNC_NETWORK, TASK_SYNC_LOCAL)
- start: Timestamp of when the operation started.
- finish: Timestamp of when the operation finished.
- success: Boolean indicating if the operation was successful.
+ if self._parent:
+ event["parent"] = self._parent["id"]
- Returns:
- A dictionary of the event added to the log.
- """
- event = self.Add(project.relpath, task_name, start, finish, success)
- if event is not None:
- event['project'] = project.name
- if project.revisionExpr:
- event['revision'] = project.revisionExpr
- if project.remote.url:
- event['project_url'] = project.remote.url
- if project.remote.fetchUrl:
- event['remote_url'] = project.remote.fetchUrl
- try:
- event['git_hash'] = project.GetCommitRevisionId()
- except Exception:
- pass
- return event
+ if success is not None or finish is not None:
+ self.FinishEvent(event, finish, success)
- def GetStatusString(self, success):
- """Converst a boolean success to a status string.
+ self._log.append(event)
+ return event
- Args:
- success: Boolean indicating if the operation was successful.
+ def AddSync(self, project, task_name, start, finish, success):
+ """Add a event to the log for a sync command.
- Returns:
- status string.
- """
- return 'pass' if success else 'fail'
+ Args:
+ project: Project being synced.
+ task_name: A sub-task that was performed for name.
+ One of (TASK_SYNC_NETWORK, TASK_SYNC_LOCAL)
+ start: Timestamp of when the operation started.
+ finish: Timestamp of when the operation finished.
+ success: Boolean indicating if the operation was successful.
- def FinishEvent(self, event, finish, success):
- """Finishes an incomplete event.
+ Returns:
+ A dictionary of the event added to the log.
+ """
+ event = self.Add(project.relpath, task_name, start, finish, success)
+ if event is not None:
+ event["project"] = project.name
+ if project.revisionExpr:
+ event["revision"] = project.revisionExpr
+ if project.remote.url:
+ event["project_url"] = project.remote.url
+ if project.remote.fetchUrl:
+ event["remote_url"] = project.remote.fetchUrl
+ try:
+ event["git_hash"] = project.GetCommitRevisionId()
+ except Exception:
+ pass
+ return event
- Args:
- event: An event that has been added to the log.
- finish: Timestamp of when the operation finished.
- success: Boolean indicating if the operation was successful.
+ def GetStatusString(self, success):
+ """Converst a boolean success to a status string.
- Returns:
- A dictionary of the event added to the log.
- """
- event['status'] = self.GetStatusString(success)
- event['finish_time'] = finish
- return event
+ Args:
+ success: Boolean indicating if the operation was successful.
- def SetParent(self, event):
- """Set a parent event for all new entities.
+ Returns:
+ status string.
+ """
+ return "pass" if success else "fail"
- Args:
- event: The event to use as a parent.
- """
- self._parent = event
+ def FinishEvent(self, event, finish, success):
+ """Finishes an incomplete event.
- def Write(self, filename):
- """Writes the log out to a file.
+ Args:
+ event: An event that has been added to the log.
+ finish: Timestamp of when the operation finished.
+ success: Boolean indicating if the operation was successful.
- Args:
- filename: The file to write the log to.
- """
- with open(filename, 'w+') as f:
- for e in self._log:
- json.dump(e, f, sort_keys=True)
- f.write('\n')
+ Returns:
+ A dictionary of the event added to the log.
+ """
+ event["status"] = self.GetStatusString(success)
+ event["finish_time"] = finish
+ return event
+
+ def SetParent(self, event):
+ """Set a parent event for all new entities.
+
+ Args:
+ event: The event to use as a parent.
+ """
+ self._parent = event
+
+ def Write(self, filename):
+ """Writes the log out to a file.
+
+ Args:
+ filename: The file to write the log to.
+ """
+ with open(filename, "w+") as f:
+ for e in self._log:
+ json.dump(e, f, sort_keys=True)
+ f.write("\n")
# An integer id that is unique across this invocation of the program.
-_EVENT_ID = multiprocessing.Value('i', 1)
+_EVENT_ID = multiprocessing.Value("i", 1)
def _NextEventId():
- """Helper function for grabbing the next unique id.
+ """Helper function for grabbing the next unique id.
- Returns:
- A unique, to this invocation of the program, integer id.
- """
- with _EVENT_ID.get_lock():
- val = _EVENT_ID.value
- _EVENT_ID.value += 1
- return val
+ Returns:
+ A unique, to this invocation of the program, integer id.
+ """
+ with _EVENT_ID.get_lock():
+ val = _EVENT_ID.value
+ _EVENT_ID.value += 1
+ return val
diff --git a/fetch.py b/fetch.py
index c954a9c..31f8152 100644
--- a/fetch.py
+++ b/fetch.py
@@ -21,25 +21,29 @@
def fetch_file(url, verbose=False):
- """Fetch a file from the specified source using the appropriate protocol.
+ """Fetch a file from the specified source using the appropriate protocol.
- Returns:
- The contents of the file as bytes.
- """
- scheme = urlparse(url).scheme
- if scheme == 'gs':
- cmd = ['gsutil', 'cat', url]
- try:
- result = subprocess.run(
- cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
- check=True)
- if result.stderr and verbose:
- print('warning: non-fatal error running "gsutil": %s' % result.stderr,
- file=sys.stderr)
- return result.stdout
- except subprocess.CalledProcessError as e:
- print('fatal: error running "gsutil": %s' % e.stderr,
- file=sys.stderr)
- sys.exit(1)
- with urlopen(url) as f:
- return f.read()
+ Returns:
+ The contents of the file as bytes.
+ """
+ scheme = urlparse(url).scheme
+ if scheme == "gs":
+ cmd = ["gsutil", "cat", url]
+ try:
+ result = subprocess.run(
+ cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True
+ )
+ if result.stderr and verbose:
+ print(
+ 'warning: non-fatal error running "gsutil": %s'
+ % result.stderr,
+ file=sys.stderr,
+ )
+ return result.stdout
+ except subprocess.CalledProcessError as e:
+ print(
+ 'fatal: error running "gsutil": %s' % e.stderr, file=sys.stderr
+ )
+ sys.exit(1)
+ with urlopen(url) as f:
+ return f.read()
diff --git a/git_command.py b/git_command.py
index d4d4bed..c7245ad 100644
--- a/git_command.py
+++ b/git_command.py
@@ -24,7 +24,7 @@
from repo_trace import REPO_TRACE, IsTrace, Trace
from wrapper import Wrapper
-GIT = 'git'
+GIT = "git"
# NB: These do not need to be kept in sync with the repo launcher script.
# These may be much newer as it allows the repo launcher to roll between
# different repo releases while source versions might require a newer git.
@@ -36,126 +36,138 @@
# git-1.7 is in (EOL) Ubuntu Precise. git-1.9 is in Ubuntu Trusty.
MIN_GIT_VERSION_SOFT = (1, 9, 1)
MIN_GIT_VERSION_HARD = (1, 7, 2)
-GIT_DIR = 'GIT_DIR'
+GIT_DIR = "GIT_DIR"
LAST_GITDIR = None
LAST_CWD = None
class _GitCall(object):
- @functools.lru_cache(maxsize=None)
- def version_tuple(self):
- ret = Wrapper().ParseGitVersion()
- if ret is None:
- print('fatal: unable to detect git version', file=sys.stderr)
- sys.exit(1)
- return ret
+ @functools.lru_cache(maxsize=None)
+ def version_tuple(self):
+ ret = Wrapper().ParseGitVersion()
+ if ret is None:
+ print("fatal: unable to detect git version", file=sys.stderr)
+ sys.exit(1)
+ return ret
- def __getattr__(self, name):
- name = name.replace('_', '-')
+ def __getattr__(self, name):
+ name = name.replace("_", "-")
- def fun(*cmdv):
- command = [name]
- command.extend(cmdv)
- return GitCommand(None, command).Wait() == 0
- return fun
+ def fun(*cmdv):
+ command = [name]
+ command.extend(cmdv)
+ return GitCommand(None, command).Wait() == 0
+
+ return fun
git = _GitCall()
def RepoSourceVersion():
- """Return the version of the repo.git tree."""
- ver = getattr(RepoSourceVersion, 'version', None)
+ """Return the version of the repo.git tree."""
+ ver = getattr(RepoSourceVersion, "version", None)
- # We avoid GitCommand so we don't run into circular deps -- GitCommand needs
- # to initialize version info we provide.
- if ver is None:
- env = GitCommand._GetBasicEnv()
+ # We avoid GitCommand so we don't run into circular deps -- GitCommand needs
+ # to initialize version info we provide.
+ if ver is None:
+ env = GitCommand._GetBasicEnv()
- proj = os.path.dirname(os.path.abspath(__file__))
- env[GIT_DIR] = os.path.join(proj, '.git')
- result = subprocess.run([GIT, 'describe', HEAD], stdout=subprocess.PIPE,
- stderr=subprocess.DEVNULL, encoding='utf-8',
- env=env, check=False)
- if result.returncode == 0:
- ver = result.stdout.strip()
- if ver.startswith('v'):
- ver = ver[1:]
- else:
- ver = 'unknown'
- setattr(RepoSourceVersion, 'version', ver)
+ proj = os.path.dirname(os.path.abspath(__file__))
+ env[GIT_DIR] = os.path.join(proj, ".git")
+ result = subprocess.run(
+ [GIT, "describe", HEAD],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ encoding="utf-8",
+ env=env,
+ check=False,
+ )
+ if result.returncode == 0:
+ ver = result.stdout.strip()
+ if ver.startswith("v"):
+ ver = ver[1:]
+ else:
+ ver = "unknown"
+ setattr(RepoSourceVersion, "version", ver)
- return ver
+ return ver
class UserAgent(object):
- """Mange User-Agent settings when talking to external services
+ """Mange User-Agent settings when talking to external services
- We follow the style as documented here:
- https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
- """
+ We follow the style as documented here:
+ https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
+ """
- _os = None
- _repo_ua = None
- _git_ua = None
+ _os = None
+ _repo_ua = None
+ _git_ua = None
- @property
- def os(self):
- """The operating system name."""
- if self._os is None:
- os_name = sys.platform
- if os_name.lower().startswith('linux'):
- os_name = 'Linux'
- elif os_name == 'win32':
- os_name = 'Win32'
- elif os_name == 'cygwin':
- os_name = 'Cygwin'
- elif os_name == 'darwin':
- os_name = 'Darwin'
- self._os = os_name
+ @property
+ def os(self):
+ """The operating system name."""
+ if self._os is None:
+ os_name = sys.platform
+ if os_name.lower().startswith("linux"):
+ os_name = "Linux"
+ elif os_name == "win32":
+ os_name = "Win32"
+ elif os_name == "cygwin":
+ os_name = "Cygwin"
+ elif os_name == "darwin":
+ os_name = "Darwin"
+ self._os = os_name
- return self._os
+ return self._os
- @property
- def repo(self):
- """The UA when connecting directly from repo."""
- if self._repo_ua is None:
- py_version = sys.version_info
- self._repo_ua = 'git-repo/%s (%s) git/%s Python/%d.%d.%d' % (
- RepoSourceVersion(),
- self.os,
- git.version_tuple().full,
- py_version.major, py_version.minor, py_version.micro)
+ @property
+ def repo(self):
+ """The UA when connecting directly from repo."""
+ if self._repo_ua is None:
+ py_version = sys.version_info
+ self._repo_ua = "git-repo/%s (%s) git/%s Python/%d.%d.%d" % (
+ RepoSourceVersion(),
+ self.os,
+ git.version_tuple().full,
+ py_version.major,
+ py_version.minor,
+ py_version.micro,
+ )
- return self._repo_ua
+ return self._repo_ua
- @property
- def git(self):
- """The UA when running git."""
- if self._git_ua is None:
- self._git_ua = 'git/%s (%s) git-repo/%s' % (
- git.version_tuple().full,
- self.os,
- RepoSourceVersion())
+ @property
+ def git(self):
+ """The UA when running git."""
+ if self._git_ua is None:
+ self._git_ua = "git/%s (%s) git-repo/%s" % (
+ git.version_tuple().full,
+ self.os,
+ RepoSourceVersion(),
+ )
- return self._git_ua
+ return self._git_ua
user_agent = UserAgent()
-def git_require(min_version, fail=False, msg=''):
- git_version = git.version_tuple()
- if min_version <= git_version:
- return True
- if fail:
- need = '.'.join(map(str, min_version))
- if msg:
- msg = ' for ' + msg
- print('fatal: git %s or later required%s' % (need, msg), file=sys.stderr)
- sys.exit(1)
- return False
+def git_require(min_version, fail=False, msg=""):
+ git_version = git.version_tuple()
+ if min_version <= git_version:
+ return True
+ if fail:
+ need = ".".join(map(str, min_version))
+ if msg:
+ msg = " for " + msg
+ print(
+ "fatal: git %s or later required%s" % (need, msg), file=sys.stderr
+ )
+ sys.exit(1)
+ return False
def _build_env(
@@ -164,175 +176,194 @@
disable_editor: Optional[bool] = False,
ssh_proxy: Optional[Any] = None,
gitdir: Optional[str] = None,
- objdir: Optional[str] = None
+ objdir: Optional[str] = None,
):
- """Constucts an env dict for command execution."""
+ """Constucts an env dict for command execution."""
- assert _kwargs_only == (), '_build_env only accepts keyword arguments.'
+ assert _kwargs_only == (), "_build_env only accepts keyword arguments."
- env = GitCommand._GetBasicEnv()
+ env = GitCommand._GetBasicEnv()
- if disable_editor:
- env['GIT_EDITOR'] = ':'
- if ssh_proxy:
- env['REPO_SSH_SOCK'] = ssh_proxy.sock()
- env['GIT_SSH'] = ssh_proxy.proxy
- env['GIT_SSH_VARIANT'] = 'ssh'
- if 'http_proxy' in env and 'darwin' == sys.platform:
- s = "'http.proxy=%s'" % (env['http_proxy'],)
- p = env.get('GIT_CONFIG_PARAMETERS')
- if p is not None:
- s = p + ' ' + s
- env['GIT_CONFIG_PARAMETERS'] = s
- if 'GIT_ALLOW_PROTOCOL' not in env:
- env['GIT_ALLOW_PROTOCOL'] = (
- 'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
- env['GIT_HTTP_USER_AGENT'] = user_agent.git
+ if disable_editor:
+ env["GIT_EDITOR"] = ":"
+ if ssh_proxy:
+ env["REPO_SSH_SOCK"] = ssh_proxy.sock()
+ env["GIT_SSH"] = ssh_proxy.proxy
+ env["GIT_SSH_VARIANT"] = "ssh"
+ if "http_proxy" in env and "darwin" == sys.platform:
+ s = "'http.proxy=%s'" % (env["http_proxy"],)
+ p = env.get("GIT_CONFIG_PARAMETERS")
+ if p is not None:
+ s = p + " " + s
+ env["GIT_CONFIG_PARAMETERS"] = s
+ if "GIT_ALLOW_PROTOCOL" not in env:
+ env[
+ "GIT_ALLOW_PROTOCOL"
+ ] = "file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc"
+ env["GIT_HTTP_USER_AGENT"] = user_agent.git
- if objdir:
- # Set to the place we want to save the objects.
- env['GIT_OBJECT_DIRECTORY'] = objdir
+ if objdir:
+ # Set to the place we want to save the objects.
+ env["GIT_OBJECT_DIRECTORY"] = objdir
- alt_objects = os.path.join(gitdir, 'objects') if gitdir else None
- if alt_objects and os.path.realpath(alt_objects) != os.path.realpath(objdir):
- # Allow git to search the original place in case of local or unique refs
- # that git will attempt to resolve even if we aren't fetching them.
- env['GIT_ALTERNATE_OBJECT_DIRECTORIES'] = alt_objects
- if bare and gitdir is not None:
- env[GIT_DIR] = gitdir
+ alt_objects = os.path.join(gitdir, "objects") if gitdir else None
+ if alt_objects and os.path.realpath(alt_objects) != os.path.realpath(
+ objdir
+ ):
+ # Allow git to search the original place in case of local or unique
+ # refs that git will attempt to resolve even if we aren't fetching
+ # them.
+ env["GIT_ALTERNATE_OBJECT_DIRECTORIES"] = alt_objects
+ if bare and gitdir is not None:
+ env[GIT_DIR] = gitdir
- return env
+ return env
class GitCommand(object):
- """Wrapper around a single git invocation."""
+ """Wrapper around a single git invocation."""
- def __init__(self,
- project,
- cmdv,
- bare=False,
- input=None,
- capture_stdout=False,
- capture_stderr=False,
- merge_output=False,
- disable_editor=False,
- ssh_proxy=None,
- cwd=None,
- gitdir=None,
- objdir=None):
+ def __init__(
+ self,
+ project,
+ cmdv,
+ bare=False,
+ input=None,
+ capture_stdout=False,
+ capture_stderr=False,
+ merge_output=False,
+ disable_editor=False,
+ ssh_proxy=None,
+ cwd=None,
+ gitdir=None,
+ objdir=None,
+ ):
+ if project:
+ if not cwd:
+ cwd = project.worktree
+ if not gitdir:
+ gitdir = project.gitdir
- if project:
- if not cwd:
- cwd = project.worktree
- if not gitdir:
- gitdir = project.gitdir
+ # Git on Windows wants its paths only using / for reliability.
+ if platform_utils.isWindows():
+ if objdir:
+ objdir = objdir.replace("\\", "/")
+ if gitdir:
+ gitdir = gitdir.replace("\\", "/")
- # Git on Windows wants its paths only using / for reliability.
- if platform_utils.isWindows():
- if objdir:
- objdir = objdir.replace('\\', '/')
- if gitdir:
- gitdir = gitdir.replace('\\', '/')
+ env = _build_env(
+ disable_editor=disable_editor,
+ ssh_proxy=ssh_proxy,
+ objdir=objdir,
+ gitdir=gitdir,
+ bare=bare,
+ )
- env = _build_env(
- disable_editor=disable_editor,
- ssh_proxy=ssh_proxy,
- objdir=objdir,
- gitdir=gitdir,
- bare=bare,
- )
+ command = [GIT]
+ if bare:
+ cwd = None
+ command.append(cmdv[0])
+ # Need to use the --progress flag for fetch/clone so output will be
+ # displayed as by default git only does progress output if stderr is a
+ # TTY.
+ if sys.stderr.isatty() and cmdv[0] in ("fetch", "clone"):
+ if "--progress" not in cmdv and "--quiet" not in cmdv:
+ command.append("--progress")
+ command.extend(cmdv[1:])
- command = [GIT]
- if bare:
- cwd = None
- command.append(cmdv[0])
- # Need to use the --progress flag for fetch/clone so output will be
- # displayed as by default git only does progress output if stderr is a TTY.
- if sys.stderr.isatty() and cmdv[0] in ('fetch', 'clone'):
- if '--progress' not in cmdv and '--quiet' not in cmdv:
- command.append('--progress')
- command.extend(cmdv[1:])
+ stdin = subprocess.PIPE if input else None
+ stdout = subprocess.PIPE if capture_stdout else None
+ stderr = (
+ subprocess.STDOUT
+ if merge_output
+ else (subprocess.PIPE if capture_stderr else None)
+ )
- stdin = subprocess.PIPE if input else None
- stdout = subprocess.PIPE if capture_stdout else None
- stderr = (subprocess.STDOUT if merge_output else
- (subprocess.PIPE if capture_stderr else None))
+ dbg = ""
+ if IsTrace():
+ global LAST_CWD
+ global LAST_GITDIR
- dbg = ''
- if IsTrace():
- global LAST_CWD
- global LAST_GITDIR
+ if cwd and LAST_CWD != cwd:
+ if LAST_GITDIR or LAST_CWD:
+ dbg += "\n"
+ dbg += ": cd %s\n" % cwd
+ LAST_CWD = cwd
- if cwd and LAST_CWD != cwd:
- if LAST_GITDIR or LAST_CWD:
- dbg += '\n'
- dbg += ': cd %s\n' % cwd
- LAST_CWD = cwd
+ if GIT_DIR in env and LAST_GITDIR != env[GIT_DIR]:
+ if LAST_GITDIR or LAST_CWD:
+ dbg += "\n"
+ dbg += ": export GIT_DIR=%s\n" % env[GIT_DIR]
+ LAST_GITDIR = env[GIT_DIR]
- if GIT_DIR in env and LAST_GITDIR != env[GIT_DIR]:
- if LAST_GITDIR or LAST_CWD:
- dbg += '\n'
- dbg += ': export GIT_DIR=%s\n' % env[GIT_DIR]
- LAST_GITDIR = env[GIT_DIR]
+ if "GIT_OBJECT_DIRECTORY" in env:
+ dbg += (
+ ": export GIT_OBJECT_DIRECTORY=%s\n"
+ % env["GIT_OBJECT_DIRECTORY"]
+ )
+ if "GIT_ALTERNATE_OBJECT_DIRECTORIES" in env:
+ dbg += ": export GIT_ALTERNATE_OBJECT_DIRECTORIES=%s\n" % (
+ env["GIT_ALTERNATE_OBJECT_DIRECTORIES"]
+ )
- if 'GIT_OBJECT_DIRECTORY' in env:
- dbg += ': export GIT_OBJECT_DIRECTORY=%s\n' % env['GIT_OBJECT_DIRECTORY']
- if 'GIT_ALTERNATE_OBJECT_DIRECTORIES' in env:
- dbg += ': export GIT_ALTERNATE_OBJECT_DIRECTORIES=%s\n' % (
- env['GIT_ALTERNATE_OBJECT_DIRECTORIES'])
+ dbg += ": "
+ dbg += " ".join(command)
+ if stdin == subprocess.PIPE:
+ dbg += " 0<|"
+ if stdout == subprocess.PIPE:
+ dbg += " 1>|"
+ if stderr == subprocess.PIPE:
+ dbg += " 2>|"
+ elif stderr == subprocess.STDOUT:
+ dbg += " 2>&1"
- dbg += ': '
- dbg += ' '.join(command)
- if stdin == subprocess.PIPE:
- dbg += ' 0<|'
- if stdout == subprocess.PIPE:
- dbg += ' 1>|'
- if stderr == subprocess.PIPE:
- dbg += ' 2>|'
- elif stderr == subprocess.STDOUT:
- dbg += ' 2>&1'
+ with Trace(
+ "git command %s %s with debug: %s", LAST_GITDIR, command, dbg
+ ):
+ try:
+ p = subprocess.Popen(
+ command,
+ cwd=cwd,
+ env=env,
+ encoding="utf-8",
+ errors="backslashreplace",
+ stdin=stdin,
+ stdout=stdout,
+ stderr=stderr,
+ )
+ except Exception as e:
+ raise GitError("%s: %s" % (command[1], e))
- with Trace('git command %s %s with debug: %s', LAST_GITDIR, command, dbg):
- try:
- p = subprocess.Popen(command,
- cwd=cwd,
- env=env,
- encoding='utf-8',
- errors='backslashreplace',
- stdin=stdin,
- stdout=stdout,
- stderr=stderr)
- except Exception as e:
- raise GitError('%s: %s' % (command[1], e))
+ if ssh_proxy:
+ ssh_proxy.add_client(p)
- if ssh_proxy:
- ssh_proxy.add_client(p)
+ self.process = p
- self.process = p
+ try:
+ self.stdout, self.stderr = p.communicate(input=input)
+ finally:
+ if ssh_proxy:
+ ssh_proxy.remove_client(p)
+ self.rc = p.wait()
- try:
- self.stdout, self.stderr = p.communicate(input=input)
- finally:
- if ssh_proxy:
- ssh_proxy.remove_client(p)
- self.rc = p.wait()
+ @staticmethod
+ def _GetBasicEnv():
+ """Return a basic env for running git under.
- @staticmethod
- def _GetBasicEnv():
- """Return a basic env for running git under.
+ This is guaranteed to be side-effect free.
+ """
+ env = os.environ.copy()
+ for key in (
+ REPO_TRACE,
+ GIT_DIR,
+ "GIT_ALTERNATE_OBJECT_DIRECTORIES",
+ "GIT_OBJECT_DIRECTORY",
+ "GIT_WORK_TREE",
+ "GIT_GRAFT_FILE",
+ "GIT_INDEX_FILE",
+ ):
+ env.pop(key, None)
+ return env
- This is guaranteed to be side-effect free.
- """
- env = os.environ.copy()
- for key in (REPO_TRACE,
- GIT_DIR,
- 'GIT_ALTERNATE_OBJECT_DIRECTORIES',
- 'GIT_OBJECT_DIRECTORY',
- 'GIT_WORK_TREE',
- 'GIT_GRAFT_FILE',
- 'GIT_INDEX_FILE'):
- env.pop(key, None)
- return env
-
- def Wait(self):
- return self.rc
+ def Wait(self):
+ return self.rc
diff --git a/git_config.py b/git_config.py
index 9ad979a..05b3c1e 100644
--- a/git_config.py
+++ b/git_config.py
@@ -34,23 +34,23 @@
# Prefix that is prepended to all the keys of SyncAnalysisState's data
# that is saved in the config.
-SYNC_STATE_PREFIX = 'repo.syncstate.'
+SYNC_STATE_PREFIX = "repo.syncstate."
-ID_RE = re.compile(r'^[0-9a-f]{40}$')
+ID_RE = re.compile(r"^[0-9a-f]{40}$")
REVIEW_CACHE = dict()
def IsChange(rev):
- return rev.startswith(R_CHANGES)
+ return rev.startswith(R_CHANGES)
def IsId(rev):
- return ID_RE.match(rev)
+ return ID_RE.match(rev)
def IsTag(rev):
- return rev.startswith(R_TAGS)
+ return rev.startswith(R_TAGS)
def IsImmutable(rev):
@@ -58,765 +58,785 @@
def _key(name):
- parts = name.split('.')
- if len(parts) < 2:
- return name.lower()
- parts[0] = parts[0].lower()
- parts[-1] = parts[-1].lower()
- return '.'.join(parts)
+ parts = name.split(".")
+ if len(parts) < 2:
+ return name.lower()
+ parts[0] = parts[0].lower()
+ parts[-1] = parts[-1].lower()
+ return ".".join(parts)
class GitConfig(object):
- _ForUser = None
+ _ForUser = None
- _ForSystem = None
- _SYSTEM_CONFIG = '/etc/gitconfig'
+ _ForSystem = None
+ _SYSTEM_CONFIG = "/etc/gitconfig"
- @classmethod
- def ForSystem(cls):
- if cls._ForSystem is None:
- cls._ForSystem = cls(configfile=cls._SYSTEM_CONFIG)
- return cls._ForSystem
+ @classmethod
+ def ForSystem(cls):
+ if cls._ForSystem is None:
+ cls._ForSystem = cls(configfile=cls._SYSTEM_CONFIG)
+ return cls._ForSystem
- @classmethod
- def ForUser(cls):
- if cls._ForUser is None:
- cls._ForUser = cls(configfile=cls._getUserConfig())
- return cls._ForUser
+ @classmethod
+ def ForUser(cls):
+ if cls._ForUser is None:
+ cls._ForUser = cls(configfile=cls._getUserConfig())
+ return cls._ForUser
- @staticmethod
- def _getUserConfig():
- return os.path.expanduser('~/.gitconfig')
+ @staticmethod
+ def _getUserConfig():
+ return os.path.expanduser("~/.gitconfig")
- @classmethod
- def ForRepository(cls, gitdir, defaults=None):
- return cls(configfile=os.path.join(gitdir, 'config'),
- defaults=defaults)
+ @classmethod
+ def ForRepository(cls, gitdir, defaults=None):
+ return cls(configfile=os.path.join(gitdir, "config"), defaults=defaults)
- def __init__(self, configfile, defaults=None, jsonFile=None):
- self.file = configfile
- self.defaults = defaults
- self._cache_dict = None
- self._section_dict = None
- self._remotes = {}
- self._branches = {}
+ def __init__(self, configfile, defaults=None, jsonFile=None):
+ self.file = configfile
+ self.defaults = defaults
+ self._cache_dict = None
+ self._section_dict = None
+ self._remotes = {}
+ self._branches = {}
- self._json = jsonFile
- if self._json is None:
- self._json = os.path.join(
- os.path.dirname(self.file),
- '.repo_' + os.path.basename(self.file) + '.json')
+ self._json = jsonFile
+ if self._json is None:
+ self._json = os.path.join(
+ os.path.dirname(self.file),
+ ".repo_" + os.path.basename(self.file) + ".json",
+ )
- def ClearCache(self):
- """Clear the in-memory cache of config."""
- self._cache_dict = None
+ def ClearCache(self):
+ """Clear the in-memory cache of config."""
+ self._cache_dict = None
- def Has(self, name, include_defaults=True):
- """Return true if this configuration file has the key.
- """
- if _key(name) in self._cache:
- return True
- if include_defaults and self.defaults:
- return self.defaults.Has(name, include_defaults=True)
- return False
+ def Has(self, name, include_defaults=True):
+ """Return true if this configuration file has the key."""
+ if _key(name) in self._cache:
+ return True
+ if include_defaults and self.defaults:
+ return self.defaults.Has(name, include_defaults=True)
+ return False
- def GetInt(self, name: str) -> Union[int, None]:
- """Returns an integer from the configuration file.
+ def GetInt(self, name: str) -> Union[int, None]:
+ """Returns an integer from the configuration file.
- This follows the git config syntax.
+ This follows the git config syntax.
- Args:
- name: The key to lookup.
+ Args:
+ name: The key to lookup.
- Returns:
- None if the value was not defined, or is not an int.
- Otherwise, the number itself.
- """
- v = self.GetString(name)
- if v is None:
- return None
- v = v.strip()
+ Returns:
+ None if the value was not defined, or is not an int.
+ Otherwise, the number itself.
+ """
+ v = self.GetString(name)
+ if v is None:
+ return None
+ v = v.strip()
- mult = 1
- if v.endswith('k'):
- v = v[:-1]
- mult = 1024
- elif v.endswith('m'):
- v = v[:-1]
- mult = 1024 * 1024
- elif v.endswith('g'):
- v = v[:-1]
- mult = 1024 * 1024 * 1024
+ mult = 1
+ if v.endswith("k"):
+ v = v[:-1]
+ mult = 1024
+ elif v.endswith("m"):
+ v = v[:-1]
+ mult = 1024 * 1024
+ elif v.endswith("g"):
+ v = v[:-1]
+ mult = 1024 * 1024 * 1024
- base = 10
- if v.startswith('0x'):
- base = 16
+ base = 10
+ if v.startswith("0x"):
+ base = 16
- try:
- return int(v, base=base) * mult
- except ValueError:
- print(
- f"warning: expected {name} to represent an integer, got {v} instead",
- file=sys.stderr)
- return None
+ try:
+ return int(v, base=base) * mult
+ except ValueError:
+ print(
+ f"warning: expected {name} to represent an integer, got {v} "
+ "instead",
+ file=sys.stderr,
+ )
+ return None
- def DumpConfigDict(self):
- """Returns the current configuration dict.
+ def DumpConfigDict(self):
+ """Returns the current configuration dict.
- Configuration data is information only (e.g. logging) and
- should not be considered a stable data-source.
+ Configuration data is information only (e.g. logging) and
+ should not be considered a stable data-source.
- Returns:
- dict of {<key>, <value>} for git configuration cache.
- <value> are strings converted by GetString.
- """
- config_dict = {}
- for key in self._cache:
- config_dict[key] = self.GetString(key)
- return config_dict
+ Returns:
+ dict of {<key>, <value>} for git configuration cache.
+ <value> are strings converted by GetString.
+ """
+ config_dict = {}
+ for key in self._cache:
+ config_dict[key] = self.GetString(key)
+ return config_dict
- def GetBoolean(self, name: str) -> Union[str, None]:
- """Returns a boolean from the configuration file.
- None : The value was not defined, or is not a boolean.
- True : The value was set to true or yes.
- False: The value was set to false or no.
- """
- v = self.GetString(name)
- if v is None:
- return None
- v = v.lower()
- if v in ('true', 'yes'):
- return True
- if v in ('false', 'no'):
- return False
- print(f"warning: expected {name} to represent a boolean, got {v} instead",
- file=sys.stderr)
- return None
+ def GetBoolean(self, name: str) -> Union[str, None]:
+ """Returns a boolean from the configuration file.
- def SetBoolean(self, name, value):
- """Set the truthy value for a key."""
- if value is not None:
- value = 'true' if value else 'false'
- self.SetString(name, value)
-
- def GetString(self, name: str, all_keys: bool = False) -> Union[str, None]:
- """Get the first value for a key, or None if it is not defined.
-
- This configuration file is used first, if the key is not
- defined or all_keys = True then the defaults are also searched.
- """
- try:
- v = self._cache[_key(name)]
- except KeyError:
- if self.defaults:
- return self.defaults.GetString(name, all_keys=all_keys)
- v = []
-
- if not all_keys:
- if v:
- return v[0]
- return None
-
- r = []
- r.extend(v)
- if self.defaults:
- r.extend(self.defaults.GetString(name, all_keys=True))
- return r
-
- def SetString(self, name, value):
- """Set the value(s) for a key.
- Only this configuration file is modified.
-
- The supplied value should be either a string, or a list of strings (to
- store multiple values), or None (to delete the key).
- """
- key = _key(name)
-
- try:
- old = self._cache[key]
- except KeyError:
- old = []
-
- if value is None:
- if old:
- del self._cache[key]
- self._do('--unset-all', name)
-
- elif isinstance(value, list):
- if len(value) == 0:
- self.SetString(name, None)
-
- elif len(value) == 1:
- self.SetString(name, value[0])
-
- elif old != value:
- self._cache[key] = list(value)
- self._do('--replace-all', name, value[0])
- for i in range(1, len(value)):
- self._do('--add', name, value[i])
-
- elif len(old) != 1 or old[0] != value:
- self._cache[key] = [value]
- self._do('--replace-all', name, value)
-
- def GetRemote(self, name):
- """Get the remote.$name.* configuration values as an object.
- """
- try:
- r = self._remotes[name]
- except KeyError:
- r = Remote(self, name)
- self._remotes[r.name] = r
- return r
-
- def GetBranch(self, name):
- """Get the branch.$name.* configuration values as an object.
- """
- try:
- b = self._branches[name]
- except KeyError:
- b = Branch(self, name)
- self._branches[b.name] = b
- return b
-
- def GetSyncAnalysisStateData(self):
- """Returns data to be logged for the analysis of sync performance."""
- return {k: v for k, v in self.DumpConfigDict().items() if k.startswith(SYNC_STATE_PREFIX)}
-
- def UpdateSyncAnalysisState(self, options, superproject_logging_data):
- """Update Config's SYNC_STATE_PREFIX* data with the latest sync data.
-
- Args:
- options: Options passed to sync returned from optparse. See _Options().
- superproject_logging_data: A dictionary of superproject data that is to be logged.
-
- Returns:
- SyncAnalysisState object.
- """
- return SyncAnalysisState(self, options, superproject_logging_data)
-
- def GetSubSections(self, section):
- """List all subsection names matching $section.*.*
- """
- return self._sections.get(section, set())
-
- def HasSection(self, section, subsection=''):
- """Does at least one key in section.subsection exist?
- """
- try:
- return subsection in self._sections[section]
- except KeyError:
- return False
-
- def UrlInsteadOf(self, url):
- """Resolve any url.*.insteadof references.
- """
- for new_url in self.GetSubSections('url'):
- for old_url in self.GetString('url.%s.insteadof' % new_url, True):
- if old_url is not None and url.startswith(old_url):
- return new_url + url[len(old_url):]
- return url
-
- @property
- def _sections(self):
- d = self._section_dict
- if d is None:
- d = {}
- for name in self._cache.keys():
- p = name.split('.')
- if 2 == len(p):
- section = p[0]
- subsect = ''
- else:
- section = p[0]
- subsect = '.'.join(p[1:-1])
- if section not in d:
- d[section] = set()
- d[section].add(subsect)
- self._section_dict = d
- return d
-
- @property
- def _cache(self):
- if self._cache_dict is None:
- self._cache_dict = self._Read()
- return self._cache_dict
-
- def _Read(self):
- d = self._ReadJson()
- if d is None:
- d = self._ReadGit()
- self._SaveJson(d)
- return d
-
- def _ReadJson(self):
- try:
- if os.path.getmtime(self._json) <= os.path.getmtime(self.file):
- platform_utils.remove(self._json)
+ Returns:
+ None: The value was not defined, or is not a boolean.
+ True: The value was set to true or yes.
+ False: The value was set to false or no.
+ """
+ v = self.GetString(name)
+ if v is None:
+ return None
+ v = v.lower()
+ if v in ("true", "yes"):
+ return True
+ if v in ("false", "no"):
+ return False
+ print(
+ f"warning: expected {name} to represent a boolean, got {v} instead",
+ file=sys.stderr,
+ )
return None
- except OSError:
- return None
- try:
- with Trace(': parsing %s', self.file):
- with open(self._json) as fd:
- return json.load(fd)
- except (IOError, ValueError):
- platform_utils.remove(self._json, missing_ok=True)
- return None
- def _SaveJson(self, cache):
- try:
- with open(self._json, 'w') as fd:
- json.dump(cache, fd, indent=2)
- except (IOError, TypeError):
- platform_utils.remove(self._json, missing_ok=True)
+ def SetBoolean(self, name, value):
+ """Set the truthy value for a key."""
+ if value is not None:
+ value = "true" if value else "false"
+ self.SetString(name, value)
- def _ReadGit(self):
- """
- Read configuration data from git.
+ def GetString(self, name: str, all_keys: bool = False) -> Union[str, None]:
+ """Get the first value for a key, or None if it is not defined.
- This internal method populates the GitConfig cache.
+ This configuration file is used first, if the key is not
+ defined or all_keys = True then the defaults are also searched.
+ """
+ try:
+ v = self._cache[_key(name)]
+ except KeyError:
+ if self.defaults:
+ return self.defaults.GetString(name, all_keys=all_keys)
+ v = []
- """
- c = {}
- if not os.path.exists(self.file):
- return c
+ if not all_keys:
+ if v:
+ return v[0]
+ return None
- d = self._do('--null', '--list')
- for line in d.rstrip('\0').split('\0'):
- if '\n' in line:
- key, val = line.split('\n', 1)
- else:
- key = line
- val = None
+ r = []
+ r.extend(v)
+ if self.defaults:
+ r.extend(self.defaults.GetString(name, all_keys=True))
+ return r
- if key in c:
- c[key].append(val)
- else:
- c[key] = [val]
+ def SetString(self, name, value):
+ """Set the value(s) for a key.
+ Only this configuration file is modified.
- return c
+ The supplied value should be either a string, or a list of strings (to
+ store multiple values), or None (to delete the key).
+ """
+ key = _key(name)
- def _do(self, *args):
- if self.file == self._SYSTEM_CONFIG:
- command = ['config', '--system', '--includes']
- else:
- command = ['config', '--file', self.file, '--includes']
- command.extend(args)
+ try:
+ old = self._cache[key]
+ except KeyError:
+ old = []
- p = GitCommand(None,
- command,
- capture_stdout=True,
- capture_stderr=True)
- if p.Wait() == 0:
- return p.stdout
- else:
- raise GitError('git config %s: %s' % (str(args), p.stderr))
+ if value is None:
+ if old:
+ del self._cache[key]
+ self._do("--unset-all", name)
+
+ elif isinstance(value, list):
+ if len(value) == 0:
+ self.SetString(name, None)
+
+ elif len(value) == 1:
+ self.SetString(name, value[0])
+
+ elif old != value:
+ self._cache[key] = list(value)
+ self._do("--replace-all", name, value[0])
+ for i in range(1, len(value)):
+ self._do("--add", name, value[i])
+
+ elif len(old) != 1 or old[0] != value:
+ self._cache[key] = [value]
+ self._do("--replace-all", name, value)
+
+ def GetRemote(self, name):
+ """Get the remote.$name.* configuration values as an object."""
+ try:
+ r = self._remotes[name]
+ except KeyError:
+ r = Remote(self, name)
+ self._remotes[r.name] = r
+ return r
+
+ def GetBranch(self, name):
+ """Get the branch.$name.* configuration values as an object."""
+ try:
+ b = self._branches[name]
+ except KeyError:
+ b = Branch(self, name)
+ self._branches[b.name] = b
+ return b
+
+ def GetSyncAnalysisStateData(self):
+ """Returns data to be logged for the analysis of sync performance."""
+ return {
+ k: v
+ for k, v in self.DumpConfigDict().items()
+ if k.startswith(SYNC_STATE_PREFIX)
+ }
+
+ def UpdateSyncAnalysisState(self, options, superproject_logging_data):
+ """Update Config's SYNC_STATE_PREFIX* data with the latest sync data.
+
+ Args:
+ options: Options passed to sync returned from optparse. See
+ _Options().
+ superproject_logging_data: A dictionary of superproject data that is
+ to be logged.
+
+ Returns:
+ SyncAnalysisState object.
+ """
+ return SyncAnalysisState(self, options, superproject_logging_data)
+
+ def GetSubSections(self, section):
+ """List all subsection names matching $section.*.*"""
+ return self._sections.get(section, set())
+
+ def HasSection(self, section, subsection=""):
+ """Does at least one key in section.subsection exist?"""
+ try:
+ return subsection in self._sections[section]
+ except KeyError:
+ return False
+
+ def UrlInsteadOf(self, url):
+ """Resolve any url.*.insteadof references."""
+ for new_url in self.GetSubSections("url"):
+ for old_url in self.GetString("url.%s.insteadof" % new_url, True):
+ if old_url is not None and url.startswith(old_url):
+ return new_url + url[len(old_url) :]
+ return url
+
+ @property
+ def _sections(self):
+ d = self._section_dict
+ if d is None:
+ d = {}
+ for name in self._cache.keys():
+ p = name.split(".")
+ if 2 == len(p):
+ section = p[0]
+ subsect = ""
+ else:
+ section = p[0]
+ subsect = ".".join(p[1:-1])
+ if section not in d:
+ d[section] = set()
+ d[section].add(subsect)
+ self._section_dict = d
+ return d
+
+ @property
+ def _cache(self):
+ if self._cache_dict is None:
+ self._cache_dict = self._Read()
+ return self._cache_dict
+
+ def _Read(self):
+ d = self._ReadJson()
+ if d is None:
+ d = self._ReadGit()
+ self._SaveJson(d)
+ return d
+
+ def _ReadJson(self):
+ try:
+ if os.path.getmtime(self._json) <= os.path.getmtime(self.file):
+ platform_utils.remove(self._json)
+ return None
+ except OSError:
+ return None
+ try:
+ with Trace(": parsing %s", self.file):
+ with open(self._json) as fd:
+ return json.load(fd)
+ except (IOError, ValueError):
+ platform_utils.remove(self._json, missing_ok=True)
+ return None
+
+ def _SaveJson(self, cache):
+ try:
+ with open(self._json, "w") as fd:
+ json.dump(cache, fd, indent=2)
+ except (IOError, TypeError):
+ platform_utils.remove(self._json, missing_ok=True)
+
+ def _ReadGit(self):
+ """
+ Read configuration data from git.
+
+ This internal method populates the GitConfig cache.
+
+ """
+ c = {}
+ if not os.path.exists(self.file):
+ return c
+
+ d = self._do("--null", "--list")
+ for line in d.rstrip("\0").split("\0"):
+ if "\n" in line:
+ key, val = line.split("\n", 1)
+ else:
+ key = line
+ val = None
+
+ if key in c:
+ c[key].append(val)
+ else:
+ c[key] = [val]
+
+ return c
+
+ def _do(self, *args):
+ if self.file == self._SYSTEM_CONFIG:
+ command = ["config", "--system", "--includes"]
+ else:
+ command = ["config", "--file", self.file, "--includes"]
+ command.extend(args)
+
+ p = GitCommand(None, command, capture_stdout=True, capture_stderr=True)
+ if p.Wait() == 0:
+ return p.stdout
+ else:
+ raise GitError("git config %s: %s" % (str(args), p.stderr))
class RepoConfig(GitConfig):
- """User settings for repo itself."""
+ """User settings for repo itself."""
- @staticmethod
- def _getUserConfig():
- repo_config_dir = os.getenv('REPO_CONFIG_DIR', os.path.expanduser('~'))
- return os.path.join(repo_config_dir, '.repoconfig/config')
+ @staticmethod
+ def _getUserConfig():
+ repo_config_dir = os.getenv("REPO_CONFIG_DIR", os.path.expanduser("~"))
+ return os.path.join(repo_config_dir, ".repoconfig/config")
class RefSpec(object):
- """A Git refspec line, split into its components:
+ """A Git refspec line, split into its components:
- forced: True if the line starts with '+'
- src: Left side of the line
- dst: Right side of the line
- """
+ forced: True if the line starts with '+'
+ src: Left side of the line
+ dst: Right side of the line
+ """
- @classmethod
- def FromString(cls, rs):
- lhs, rhs = rs.split(':', 2)
- if lhs.startswith('+'):
- lhs = lhs[1:]
- forced = True
- else:
- forced = False
- return cls(forced, lhs, rhs)
+ @classmethod
+ def FromString(cls, rs):
+ lhs, rhs = rs.split(":", 2)
+ if lhs.startswith("+"):
+ lhs = lhs[1:]
+ forced = True
+ else:
+ forced = False
+ return cls(forced, lhs, rhs)
- def __init__(self, forced, lhs, rhs):
- self.forced = forced
- self.src = lhs
- self.dst = rhs
+ def __init__(self, forced, lhs, rhs):
+ self.forced = forced
+ self.src = lhs
+ self.dst = rhs
- def SourceMatches(self, rev):
- if self.src:
- if rev == self.src:
- return True
- if self.src.endswith('/*') and rev.startswith(self.src[:-1]):
- return True
- return False
+ def SourceMatches(self, rev):
+ if self.src:
+ if rev == self.src:
+ return True
+ if self.src.endswith("/*") and rev.startswith(self.src[:-1]):
+ return True
+ return False
- def DestMatches(self, ref):
- if self.dst:
- if ref == self.dst:
- return True
- if self.dst.endswith('/*') and ref.startswith(self.dst[:-1]):
- return True
- return False
+ def DestMatches(self, ref):
+ if self.dst:
+ if ref == self.dst:
+ return True
+ if self.dst.endswith("/*") and ref.startswith(self.dst[:-1]):
+ return True
+ return False
- def MapSource(self, rev):
- if self.src.endswith('/*'):
- return self.dst[:-1] + rev[len(self.src) - 1:]
- return self.dst
+ def MapSource(self, rev):
+ if self.src.endswith("/*"):
+ return self.dst[:-1] + rev[len(self.src) - 1 :]
+ return self.dst
- def __str__(self):
- s = ''
- if self.forced:
- s += '+'
- if self.src:
- s += self.src
- if self.dst:
- s += ':'
- s += self.dst
- return s
+ def __str__(self):
+ s = ""
+ if self.forced:
+ s += "+"
+ if self.src:
+ s += self.src
+ if self.dst:
+ s += ":"
+ s += self.dst
+ return s
-URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
+URI_ALL = re.compile(r"^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/")
def GetSchemeFromUrl(url):
- m = URI_ALL.match(url)
- if m:
- return m.group(1)
- return None
+ m = URI_ALL.match(url)
+ if m:
+ return m.group(1)
+ return None
@contextlib.contextmanager
def GetUrlCookieFile(url, quiet):
- if url.startswith('persistent-'):
- try:
- p = subprocess.Popen(
- ['git-remote-persistent-https', '-print_config', url],
- stdin=subprocess.PIPE, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- try:
- cookieprefix = 'http.cookiefile='
- proxyprefix = 'http.proxy='
- cookiefile = None
- proxy = None
- for line in p.stdout:
- line = line.strip().decode('utf-8')
- if line.startswith(cookieprefix):
- cookiefile = os.path.expanduser(line[len(cookieprefix):])
- if line.startswith(proxyprefix):
- proxy = line[len(proxyprefix):]
- # Leave subprocess open, as cookie file may be transient.
- if cookiefile or proxy:
- yield cookiefile, proxy
- return
- finally:
- p.stdin.close()
- if p.wait():
- err_msg = p.stderr.read().decode('utf-8')
- if ' -print_config' in err_msg:
- pass # Persistent proxy doesn't support -print_config.
- elif not quiet:
- print(err_msg, file=sys.stderr)
- except OSError as e:
- if e.errno == errno.ENOENT:
- pass # No persistent proxy.
- raise
- cookiefile = GitConfig.ForUser().GetString('http.cookiefile')
- if cookiefile:
- cookiefile = os.path.expanduser(cookiefile)
- yield cookiefile, None
+ if url.startswith("persistent-"):
+ try:
+ p = subprocess.Popen(
+ ["git-remote-persistent-https", "-print_config", url],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ try:
+ cookieprefix = "http.cookiefile="
+ proxyprefix = "http.proxy="
+ cookiefile = None
+ proxy = None
+ for line in p.stdout:
+ line = line.strip().decode("utf-8")
+ if line.startswith(cookieprefix):
+ cookiefile = os.path.expanduser(
+ line[len(cookieprefix) :]
+ )
+ if line.startswith(proxyprefix):
+ proxy = line[len(proxyprefix) :]
+ # Leave subprocess open, as cookie file may be transient.
+ if cookiefile or proxy:
+ yield cookiefile, proxy
+ return
+ finally:
+ p.stdin.close()
+ if p.wait():
+ err_msg = p.stderr.read().decode("utf-8")
+ if " -print_config" in err_msg:
+ pass # Persistent proxy doesn't support -print_config.
+ elif not quiet:
+ print(err_msg, file=sys.stderr)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ pass # No persistent proxy.
+ raise
+ cookiefile = GitConfig.ForUser().GetString("http.cookiefile")
+ if cookiefile:
+ cookiefile = os.path.expanduser(cookiefile)
+ yield cookiefile, None
class Remote(object):
- """Configuration options related to a remote.
- """
+ """Configuration options related to a remote."""
- def __init__(self, config, name):
- self._config = config
- self.name = name
- self.url = self._Get('url')
- self.pushUrl = self._Get('pushurl')
- self.review = self._Get('review')
- self.projectname = self._Get('projectname')
- self.fetch = list(map(RefSpec.FromString,
- self._Get('fetch', all_keys=True)))
- self._review_url = None
+ def __init__(self, config, name):
+ self._config = config
+ self.name = name
+ self.url = self._Get("url")
+ self.pushUrl = self._Get("pushurl")
+ self.review = self._Get("review")
+ self.projectname = self._Get("projectname")
+ self.fetch = list(
+ map(RefSpec.FromString, self._Get("fetch", all_keys=True))
+ )
+ self._review_url = None
- def _InsteadOf(self):
- globCfg = GitConfig.ForUser()
- urlList = globCfg.GetSubSections('url')
- longest = ""
- longestUrl = ""
+ def _InsteadOf(self):
+ globCfg = GitConfig.ForUser()
+ urlList = globCfg.GetSubSections("url")
+ longest = ""
+ longestUrl = ""
- for url in urlList:
- key = "url." + url + ".insteadOf"
- insteadOfList = globCfg.GetString(key, all_keys=True)
+ for url in urlList:
+ key = "url." + url + ".insteadOf"
+ insteadOfList = globCfg.GetString(key, all_keys=True)
- for insteadOf in insteadOfList:
- if (self.url.startswith(insteadOf)
- and len(insteadOf) > len(longest)):
- longest = insteadOf
- longestUrl = url
+ for insteadOf in insteadOfList:
+ if self.url.startswith(insteadOf) and len(insteadOf) > len(
+ longest
+ ):
+ longest = insteadOf
+ longestUrl = url
- if len(longest) == 0:
- return self.url
+ if len(longest) == 0:
+ return self.url
- return self.url.replace(longest, longestUrl, 1)
+ return self.url.replace(longest, longestUrl, 1)
- def PreConnectFetch(self, ssh_proxy):
- """Run any setup for this remote before we connect to it.
+ def PreConnectFetch(self, ssh_proxy):
+ """Run any setup for this remote before we connect to it.
- In practice, if the remote is using SSH, we'll attempt to create a new
- SSH master session to it for reuse across projects.
+ In practice, if the remote is using SSH, we'll attempt to create a new
+ SSH master session to it for reuse across projects.
- Args:
- ssh_proxy: The SSH settings for managing master sessions.
+ Args:
+ ssh_proxy: The SSH settings for managing master sessions.
- Returns:
- Whether the preconnect phase for this remote was successful.
- """
- if not ssh_proxy:
- return True
+ Returns:
+ Whether the preconnect phase for this remote was successful.
+ """
+ if not ssh_proxy:
+ return True
- connectionUrl = self._InsteadOf()
- return ssh_proxy.preconnect(connectionUrl)
+ connectionUrl = self._InsteadOf()
+ return ssh_proxy.preconnect(connectionUrl)
- def ReviewUrl(self, userEmail, validate_certs):
- if self._review_url is None:
- if self.review is None:
- return None
+ def ReviewUrl(self, userEmail, validate_certs):
+ if self._review_url is None:
+ if self.review is None:
+ return None
- u = self.review
- if u.startswith('persistent-'):
- u = u[len('persistent-'):]
- if u.split(':')[0] not in ('http', 'https', 'sso', 'ssh'):
- u = 'http://%s' % u
- if u.endswith('/Gerrit'):
- u = u[:len(u) - len('/Gerrit')]
- if u.endswith('/ssh_info'):
- u = u[:len(u) - len('/ssh_info')]
- if not u.endswith('/'):
- u += '/'
- http_url = u
+ u = self.review
+ if u.startswith("persistent-"):
+ u = u[len("persistent-") :]
+ if u.split(":")[0] not in ("http", "https", "sso", "ssh"):
+ u = "http://%s" % u
+ if u.endswith("/Gerrit"):
+ u = u[: len(u) - len("/Gerrit")]
+ if u.endswith("/ssh_info"):
+ u = u[: len(u) - len("/ssh_info")]
+ if not u.endswith("/"):
+ u += "/"
+ http_url = u
- if u in REVIEW_CACHE:
- self._review_url = REVIEW_CACHE[u]
- elif 'REPO_HOST_PORT_INFO' in os.environ:
- host, port = os.environ['REPO_HOST_PORT_INFO'].split()
- self._review_url = self._SshReviewUrl(userEmail, host, port)
- REVIEW_CACHE[u] = self._review_url
- elif u.startswith('sso:') or u.startswith('ssh:'):
- self._review_url = u # Assume it's right
- REVIEW_CACHE[u] = self._review_url
- elif 'REPO_IGNORE_SSH_INFO' in os.environ:
- self._review_url = http_url
- REVIEW_CACHE[u] = self._review_url
- else:
- try:
- info_url = u + 'ssh_info'
- if not validate_certs:
- context = ssl._create_unverified_context()
- info = urllib.request.urlopen(info_url, context=context).read()
- else:
- info = urllib.request.urlopen(info_url).read()
- if info == b'NOT_AVAILABLE' or b'<' in info:
- # If `info` contains '<', we assume the server gave us some sort
- # of HTML response back, like maybe a login page.
- #
- # Assume HTTP if SSH is not enabled or ssh_info doesn't look right.
- self._review_url = http_url
- else:
- info = info.decode('utf-8')
- host, port = info.split()
- self._review_url = self._SshReviewUrl(userEmail, host, port)
- except urllib.error.HTTPError as e:
- raise UploadError('%s: %s' % (self.review, str(e)))
- except urllib.error.URLError as e:
- raise UploadError('%s: %s' % (self.review, str(e)))
- except HTTPException as e:
- raise UploadError('%s: %s' % (self.review, e.__class__.__name__))
+ if u in REVIEW_CACHE:
+ self._review_url = REVIEW_CACHE[u]
+ elif "REPO_HOST_PORT_INFO" in os.environ:
+ host, port = os.environ["REPO_HOST_PORT_INFO"].split()
+ self._review_url = self._SshReviewUrl(userEmail, host, port)
+ REVIEW_CACHE[u] = self._review_url
+ elif u.startswith("sso:") or u.startswith("ssh:"):
+ self._review_url = u # Assume it's right
+ REVIEW_CACHE[u] = self._review_url
+ elif "REPO_IGNORE_SSH_INFO" in os.environ:
+ self._review_url = http_url
+ REVIEW_CACHE[u] = self._review_url
+ else:
+ try:
+ info_url = u + "ssh_info"
+ if not validate_certs:
+ context = ssl._create_unverified_context()
+ info = urllib.request.urlopen(
+ info_url, context=context
+ ).read()
+ else:
+ info = urllib.request.urlopen(info_url).read()
+ if info == b"NOT_AVAILABLE" or b"<" in info:
+ # If `info` contains '<', we assume the server gave us
+ # some sort of HTML response back, like maybe a login
+ # page.
+ #
+ # Assume HTTP if SSH is not enabled or ssh_info doesn't
+ # look right.
+ self._review_url = http_url
+ else:
+ info = info.decode("utf-8")
+ host, port = info.split()
+ self._review_url = self._SshReviewUrl(
+ userEmail, host, port
+ )
+ except urllib.error.HTTPError as e:
+ raise UploadError("%s: %s" % (self.review, str(e)))
+ except urllib.error.URLError as e:
+ raise UploadError("%s: %s" % (self.review, str(e)))
+ except HTTPException as e:
+ raise UploadError(
+ "%s: %s" % (self.review, e.__class__.__name__)
+ )
- REVIEW_CACHE[u] = self._review_url
- return self._review_url + self.projectname
+ REVIEW_CACHE[u] = self._review_url
+ return self._review_url + self.projectname
- def _SshReviewUrl(self, userEmail, host, port):
- username = self._config.GetString('review.%s.username' % self.review)
- if username is None:
- username = userEmail.split('@')[0]
- return 'ssh://%s@%s:%s/' % (username, host, port)
+ def _SshReviewUrl(self, userEmail, host, port):
+ username = self._config.GetString("review.%s.username" % self.review)
+ if username is None:
+ username = userEmail.split("@")[0]
+ return "ssh://%s@%s:%s/" % (username, host, port)
- def ToLocal(self, rev):
- """Convert a remote revision string to something we have locally.
- """
- if self.name == '.' or IsId(rev):
- return rev
+ def ToLocal(self, rev):
+ """Convert a remote revision string to something we have locally."""
+ if self.name == "." or IsId(rev):
+ return rev
- if not rev.startswith('refs/'):
- rev = R_HEADS + rev
+ if not rev.startswith("refs/"):
+ rev = R_HEADS + rev
- for spec in self.fetch:
- if spec.SourceMatches(rev):
- return spec.MapSource(rev)
+ for spec in self.fetch:
+ if spec.SourceMatches(rev):
+ return spec.MapSource(rev)
- if not rev.startswith(R_HEADS):
- return rev
+ if not rev.startswith(R_HEADS):
+ return rev
- raise GitError('%s: remote %s does not have %s' %
- (self.projectname, self.name, rev))
+ raise GitError(
+ "%s: remote %s does not have %s"
+ % (self.projectname, self.name, rev)
+ )
- def WritesTo(self, ref):
- """True if the remote stores to the tracking ref.
- """
- for spec in self.fetch:
- if spec.DestMatches(ref):
- return True
- return False
+ def WritesTo(self, ref):
+ """True if the remote stores to the tracking ref."""
+ for spec in self.fetch:
+ if spec.DestMatches(ref):
+ return True
+ return False
- def ResetFetch(self, mirror=False):
- """Set the fetch refspec to its default value.
- """
- if mirror:
- dst = 'refs/heads/*'
- else:
- dst = 'refs/remotes/%s/*' % self.name
- self.fetch = [RefSpec(True, 'refs/heads/*', dst)]
+ def ResetFetch(self, mirror=False):
+ """Set the fetch refspec to its default value."""
+ if mirror:
+ dst = "refs/heads/*"
+ else:
+ dst = "refs/remotes/%s/*" % self.name
+ self.fetch = [RefSpec(True, "refs/heads/*", dst)]
- def Save(self):
- """Save this remote to the configuration.
- """
- self._Set('url', self.url)
- if self.pushUrl is not None:
- self._Set('pushurl', self.pushUrl + '/' + self.projectname)
- else:
- self._Set('pushurl', self.pushUrl)
- self._Set('review', self.review)
- self._Set('projectname', self.projectname)
- self._Set('fetch', list(map(str, self.fetch)))
+ def Save(self):
+ """Save this remote to the configuration."""
+ self._Set("url", self.url)
+ if self.pushUrl is not None:
+ self._Set("pushurl", self.pushUrl + "/" + self.projectname)
+ else:
+ self._Set("pushurl", self.pushUrl)
+ self._Set("review", self.review)
+ self._Set("projectname", self.projectname)
+ self._Set("fetch", list(map(str, self.fetch)))
- def _Set(self, key, value):
- key = 'remote.%s.%s' % (self.name, key)
- return self._config.SetString(key, value)
+ def _Set(self, key, value):
+ key = "remote.%s.%s" % (self.name, key)
+ return self._config.SetString(key, value)
- def _Get(self, key, all_keys=False):
- key = 'remote.%s.%s' % (self.name, key)
- return self._config.GetString(key, all_keys=all_keys)
+ def _Get(self, key, all_keys=False):
+ key = "remote.%s.%s" % (self.name, key)
+ return self._config.GetString(key, all_keys=all_keys)
class Branch(object):
- """Configuration options related to a single branch.
- """
+ """Configuration options related to a single branch."""
- def __init__(self, config, name):
- self._config = config
- self.name = name
- self.merge = self._Get('merge')
+ def __init__(self, config, name):
+ self._config = config
+ self.name = name
+ self.merge = self._Get("merge")
- r = self._Get('remote')
- if r:
- self.remote = self._config.GetRemote(r)
- else:
- self.remote = None
+ r = self._Get("remote")
+ if r:
+ self.remote = self._config.GetRemote(r)
+ else:
+ self.remote = None
- @property
- def LocalMerge(self):
- """Convert the merge spec to a local name.
- """
- if self.remote and self.merge:
- return self.remote.ToLocal(self.merge)
- return None
+ @property
+ def LocalMerge(self):
+ """Convert the merge spec to a local name."""
+ if self.remote and self.merge:
+ return self.remote.ToLocal(self.merge)
+ return None
- def Save(self):
- """Save this branch back into the configuration.
- """
- if self._config.HasSection('branch', self.name):
- if self.remote:
- self._Set('remote', self.remote.name)
- else:
- self._Set('remote', None)
- self._Set('merge', self.merge)
+ def Save(self):
+ """Save this branch back into the configuration."""
+ if self._config.HasSection("branch", self.name):
+ if self.remote:
+ self._Set("remote", self.remote.name)
+ else:
+ self._Set("remote", None)
+ self._Set("merge", self.merge)
- else:
- with open(self._config.file, 'a') as fd:
- fd.write('[branch "%s"]\n' % self.name)
- if self.remote:
- fd.write('\tremote = %s\n' % self.remote.name)
- if self.merge:
- fd.write('\tmerge = %s\n' % self.merge)
+ else:
+ with open(self._config.file, "a") as fd:
+ fd.write('[branch "%s"]\n' % self.name)
+ if self.remote:
+ fd.write("\tremote = %s\n" % self.remote.name)
+ if self.merge:
+ fd.write("\tmerge = %s\n" % self.merge)
- def _Set(self, key, value):
- key = 'branch.%s.%s' % (self.name, key)
- return self._config.SetString(key, value)
+ def _Set(self, key, value):
+ key = "branch.%s.%s" % (self.name, key)
+ return self._config.SetString(key, value)
- def _Get(self, key, all_keys=False):
- key = 'branch.%s.%s' % (self.name, key)
- return self._config.GetString(key, all_keys=all_keys)
+ def _Get(self, key, all_keys=False):
+ key = "branch.%s.%s" % (self.name, key)
+ return self._config.GetString(key, all_keys=all_keys)
class SyncAnalysisState:
- """Configuration options related to logging of sync state for analysis.
+ """Configuration options related to logging of sync state for analysis.
- This object is versioned.
- """
- def __init__(self, config, options, superproject_logging_data):
- """Initializes SyncAnalysisState.
-
- Saves the following data into the |config| object.
- - sys.argv, options, superproject's logging data.
- - repo.*, branch.* and remote.* parameters from config object.
- - Current time as synctime.
- - Version number of the object.
-
- All the keys saved by this object are prepended with SYNC_STATE_PREFIX.
-
- Args:
- config: GitConfig object to store all options.
- options: Options passed to sync returned from optparse. See _Options().
- superproject_logging_data: A dictionary of superproject data that is to be logged.
+ This object is versioned.
"""
- self._config = config
- now = datetime.datetime.utcnow()
- self._Set('main.synctime', now.isoformat() + 'Z')
- self._Set('main.version', '1')
- self._Set('sys.argv', sys.argv)
- for key, value in superproject_logging_data.items():
- self._Set(f'superproject.{key}', value)
- for key, value in options.__dict__.items():
- self._Set(f'options.{key}', value)
- config_items = config.DumpConfigDict().items()
- EXTRACT_NAMESPACES = {'repo', 'branch', 'remote'}
- self._SetDictionary({k: v for k, v in config_items
- if not k.startswith(SYNC_STATE_PREFIX) and
- k.split('.', 1)[0] in EXTRACT_NAMESPACES})
- def _SetDictionary(self, data):
- """Save all key/value pairs of |data| dictionary.
+ def __init__(self, config, options, superproject_logging_data):
+ """Initializes SyncAnalysisState.
- Args:
- data: A dictionary whose key/value are to be saved.
- """
- for key, value in data.items():
- self._Set(key, value)
+ Saves the following data into the |config| object.
+ - sys.argv, options, superproject's logging data.
+ - repo.*, branch.* and remote.* parameters from config object.
+ - Current time as synctime.
+ - Version number of the object.
- def _Set(self, key, value):
- """Set the |value| for a |key| in the |_config| member.
+ All the keys saved by this object are prepended with SYNC_STATE_PREFIX.
- |key| is prepended with the value of SYNC_STATE_PREFIX constant.
+ Args:
+ config: GitConfig object to store all options.
+ options: Options passed to sync returned from optparse. See
+ _Options().
+ superproject_logging_data: A dictionary of superproject data that is
+ to be logged.
+ """
+ self._config = config
+ now = datetime.datetime.utcnow()
+ self._Set("main.synctime", now.isoformat() + "Z")
+ self._Set("main.version", "1")
+ self._Set("sys.argv", sys.argv)
+ for key, value in superproject_logging_data.items():
+ self._Set(f"superproject.{key}", value)
+ for key, value in options.__dict__.items():
+ self._Set(f"options.{key}", value)
+ config_items = config.DumpConfigDict().items()
+ EXTRACT_NAMESPACES = {"repo", "branch", "remote"}
+ self._SetDictionary(
+ {
+ k: v
+ for k, v in config_items
+ if not k.startswith(SYNC_STATE_PREFIX)
+ and k.split(".", 1)[0] in EXTRACT_NAMESPACES
+ }
+ )
- Args:
- key: Name of the key.
- value: |value| could be of any type. If it is 'bool', it will be saved
- as a Boolean and for all other types, it will be saved as a String.
- """
- if value is None:
- return
- sync_key = f'{SYNC_STATE_PREFIX}{key}'
- sync_key = sync_key.replace('_', '')
- if isinstance(value, str):
- self._config.SetString(sync_key, value)
- elif isinstance(value, bool):
- self._config.SetBoolean(sync_key, value)
- else:
- self._config.SetString(sync_key, str(value))
+ def _SetDictionary(self, data):
+ """Save all key/value pairs of |data| dictionary.
+
+ Args:
+ data: A dictionary whose key/value are to be saved.
+ """
+ for key, value in data.items():
+ self._Set(key, value)
+
+ def _Set(self, key, value):
+ """Set the |value| for a |key| in the |_config| member.
+
+ |key| is prepended with the value of SYNC_STATE_PREFIX constant.
+
+ Args:
+ key: Name of the key.
+ value: |value| could be of any type. If it is 'bool', it will be
+ saved as a Boolean and for all other types, it will be saved as
+ a String.
+ """
+ if value is None:
+ return
+ sync_key = f"{SYNC_STATE_PREFIX}{key}"
+ sync_key = sync_key.replace("_", "")
+ if isinstance(value, str):
+ self._config.SetString(sync_key, value)
+ elif isinstance(value, bool):
+ self._config.SetBoolean(sync_key, value)
+ else:
+ self._config.SetString(sync_key, str(value))
diff --git a/git_refs.py b/git_refs.py
index 300d2b3..aca1f90 100644
--- a/git_refs.py
+++ b/git_refs.py
@@ -16,149 +16,150 @@
from repo_trace import Trace
import platform_utils
-HEAD = 'HEAD'
-R_CHANGES = 'refs/changes/'
-R_HEADS = 'refs/heads/'
-R_TAGS = 'refs/tags/'
-R_PUB = 'refs/published/'
-R_WORKTREE = 'refs/worktree/'
-R_WORKTREE_M = R_WORKTREE + 'm/'
-R_M = 'refs/remotes/m/'
+HEAD = "HEAD"
+R_CHANGES = "refs/changes/"
+R_HEADS = "refs/heads/"
+R_TAGS = "refs/tags/"
+R_PUB = "refs/published/"
+R_WORKTREE = "refs/worktree/"
+R_WORKTREE_M = R_WORKTREE + "m/"
+R_M = "refs/remotes/m/"
class GitRefs(object):
- def __init__(self, gitdir):
- self._gitdir = gitdir
- self._phyref = None
- self._symref = None
- self._mtime = {}
+ def __init__(self, gitdir):
+ self._gitdir = gitdir
+ self._phyref = None
+ self._symref = None
+ self._mtime = {}
- @property
- def all(self):
- self._EnsureLoaded()
- return self._phyref
+ @property
+ def all(self):
+ self._EnsureLoaded()
+ return self._phyref
- def get(self, name):
- try:
- return self.all[name]
- except KeyError:
- return ''
-
- def deleted(self, name):
- if self._phyref is not None:
- if name in self._phyref:
- del self._phyref[name]
-
- if name in self._symref:
- del self._symref[name]
-
- if name in self._mtime:
- del self._mtime[name]
-
- def symref(self, name):
- try:
- self._EnsureLoaded()
- return self._symref[name]
- except KeyError:
- return ''
-
- def _EnsureLoaded(self):
- if self._phyref is None or self._NeedUpdate():
- self._LoadAll()
-
- def _NeedUpdate(self):
- with Trace(': scan refs %s', self._gitdir):
- for name, mtime in self._mtime.items():
+ def get(self, name):
try:
- if mtime != os.path.getmtime(os.path.join(self._gitdir, name)):
- return True
+ return self.all[name]
+ except KeyError:
+ return ""
+
+ def deleted(self, name):
+ if self._phyref is not None:
+ if name in self._phyref:
+ del self._phyref[name]
+
+ if name in self._symref:
+ del self._symref[name]
+
+ if name in self._mtime:
+ del self._mtime[name]
+
+ def symref(self, name):
+ try:
+ self._EnsureLoaded()
+ return self._symref[name]
+ except KeyError:
+ return ""
+
+ def _EnsureLoaded(self):
+ if self._phyref is None or self._NeedUpdate():
+ self._LoadAll()
+
+ def _NeedUpdate(self):
+ with Trace(": scan refs %s", self._gitdir):
+ for name, mtime in self._mtime.items():
+ try:
+ if mtime != os.path.getmtime(
+ os.path.join(self._gitdir, name)
+ ):
+ return True
+ except OSError:
+ return True
+ return False
+
+ def _LoadAll(self):
+ with Trace(": load refs %s", self._gitdir):
+ self._phyref = {}
+ self._symref = {}
+ self._mtime = {}
+
+ self._ReadPackedRefs()
+ self._ReadLoose("refs/")
+ self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
+
+ scan = self._symref
+ attempts = 0
+ while scan and attempts < 5:
+ scan_next = {}
+ for name, dest in scan.items():
+ if dest in self._phyref:
+ self._phyref[name] = self._phyref[dest]
+ else:
+ scan_next[name] = dest
+ scan = scan_next
+ attempts += 1
+
+ def _ReadPackedRefs(self):
+ path = os.path.join(self._gitdir, "packed-refs")
+ try:
+ fd = open(path, "r")
+ mtime = os.path.getmtime(path)
+ except IOError:
+ return
except OSError:
- return True
- return False
+ return
+ try:
+ for line in fd:
+ line = str(line)
+ if line[0] == "#":
+ continue
+ if line[0] == "^":
+ continue
- def _LoadAll(self):
- with Trace(': load refs %s', self._gitdir):
+ line = line[:-1]
+ p = line.split(" ")
+ ref_id = p[0]
+ name = p[1]
- self._phyref = {}
- self._symref = {}
- self._mtime = {}
+ self._phyref[name] = ref_id
+ finally:
+ fd.close()
+ self._mtime["packed-refs"] = mtime
- self._ReadPackedRefs()
- self._ReadLoose('refs/')
- self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
+ def _ReadLoose(self, prefix):
+ base = os.path.join(self._gitdir, prefix)
+ for name in platform_utils.listdir(base):
+ p = os.path.join(base, name)
+ # We don't implement the full ref validation algorithm, just the
+ # simple rules that would show up in local filesystems.
+ # https://git-scm.com/docs/git-check-ref-format
+ if name.startswith(".") or name.endswith(".lock"):
+ pass
+ elif platform_utils.isdir(p):
+ self._mtime[prefix] = os.path.getmtime(base)
+ self._ReadLoose(prefix + name + "/")
+ else:
+ self._ReadLoose1(p, prefix + name)
- scan = self._symref
- attempts = 0
- while scan and attempts < 5:
- scan_next = {}
- for name, dest in scan.items():
- if dest in self._phyref:
- self._phyref[name] = self._phyref[dest]
- else:
- scan_next[name] = dest
- scan = scan_next
- attempts += 1
+ def _ReadLoose1(self, path, name):
+ try:
+ with open(path) as fd:
+ mtime = os.path.getmtime(path)
+ ref_id = fd.readline()
+ except (OSError, UnicodeError):
+ return
- def _ReadPackedRefs(self):
- path = os.path.join(self._gitdir, 'packed-refs')
- try:
- fd = open(path, 'r')
- mtime = os.path.getmtime(path)
- except IOError:
- return
- except OSError:
- return
- try:
- for line in fd:
- line = str(line)
- if line[0] == '#':
- continue
- if line[0] == '^':
- continue
+ try:
+ ref_id = ref_id.decode()
+ except AttributeError:
+ pass
+ if not ref_id:
+ return
+ ref_id = ref_id[:-1]
- line = line[:-1]
- p = line.split(' ')
- ref_id = p[0]
- name = p[1]
-
- self._phyref[name] = ref_id
- finally:
- fd.close()
- self._mtime['packed-refs'] = mtime
-
- def _ReadLoose(self, prefix):
- base = os.path.join(self._gitdir, prefix)
- for name in platform_utils.listdir(base):
- p = os.path.join(base, name)
- # We don't implement the full ref validation algorithm, just the simple
- # rules that would show up in local filesystems.
- # https://git-scm.com/docs/git-check-ref-format
- if name.startswith('.') or name.endswith('.lock'):
- pass
- elif platform_utils.isdir(p):
- self._mtime[prefix] = os.path.getmtime(base)
- self._ReadLoose(prefix + name + '/')
- else:
- self._ReadLoose1(p, prefix + name)
-
- def _ReadLoose1(self, path, name):
- try:
- with open(path) as fd:
- mtime = os.path.getmtime(path)
- ref_id = fd.readline()
- except (OSError, UnicodeError):
- return
-
- try:
- ref_id = ref_id.decode()
- except AttributeError:
- pass
- if not ref_id:
- return
- ref_id = ref_id[:-1]
-
- if ref_id.startswith('ref: '):
- self._symref[name] = ref_id[5:]
- else:
- self._phyref[name] = ref_id
- self._mtime[name] = mtime
+ if ref_id.startswith("ref: "):
+ self._symref[name] = ref_id[5:]
+ else:
+ self._phyref[name] = ref_id
+ self._mtime[name] = mtime
diff --git a/git_superproject.py b/git_superproject.py
index 69a4d1f..f1b4f23 100644
--- a/git_superproject.py
+++ b/git_superproject.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Provide functionality to get all projects and their commit ids from Superproject.
+"""Provide functionality to get projects and their commit ids from Superproject.
For more information on superproject, check out:
https://en.wikibooks.org/wiki/Git/Submodules_and_Superprojects
@@ -33,434 +33,524 @@
from git_config import RepoConfig
from git_refs import GitRefs
-_SUPERPROJECT_GIT_NAME = 'superproject.git'
-_SUPERPROJECT_MANIFEST_NAME = 'superproject_override.xml'
+_SUPERPROJECT_GIT_NAME = "superproject.git"
+_SUPERPROJECT_MANIFEST_NAME = "superproject_override.xml"
class SyncResult(NamedTuple):
- """Return the status of sync and whether caller should exit."""
+ """Return the status of sync and whether caller should exit."""
- # Whether the superproject sync was successful.
- success: bool
- # Whether the caller should exit.
- fatal: bool
+ # Whether the superproject sync was successful.
+ success: bool
+ # Whether the caller should exit.
+ fatal: bool
class CommitIdsResult(NamedTuple):
- """Return the commit ids and whether caller should exit."""
+ """Return the commit ids and whether caller should exit."""
- # A dictionary with the projects/commit ids on success, otherwise None.
- commit_ids: dict
- # Whether the caller should exit.
- fatal: bool
+ # A dictionary with the projects/commit ids on success, otherwise None.
+ commit_ids: dict
+ # Whether the caller should exit.
+ fatal: bool
class UpdateProjectsResult(NamedTuple):
- """Return the overriding manifest file and whether caller should exit."""
+ """Return the overriding manifest file and whether caller should exit."""
- # Path name of the overriding manifest file if successful, otherwise None.
- manifest_path: str
- # Whether the caller should exit.
- fatal: bool
+ # Path name of the overriding manifest file if successful, otherwise None.
+ manifest_path: str
+ # Whether the caller should exit.
+ fatal: bool
class Superproject(object):
- """Get commit ids from superproject.
+ """Get commit ids from superproject.
- Initializes a local copy of a superproject for the manifest. This allows
- lookup of commit ids for all projects. It contains _project_commit_ids which
- is a dictionary with project/commit id entries.
- """
- def __init__(self, manifest, name, remote, revision,
- superproject_dir='exp-superproject'):
- """Initializes superproject.
-
- Args:
- manifest: A Manifest object that is to be written to a file.
- name: The unique name of the superproject
- remote: The RemoteSpec for the remote.
- revision: The name of the git branch to track.
- superproject_dir: Relative path under |manifest.subdir| to checkout
- superproject.
+ Initializes a local copy of a superproject for the manifest. This allows
+ lookup of commit ids for all projects. It contains _project_commit_ids which
+ is a dictionary with project/commit id entries.
"""
- self._project_commit_ids = None
- self._manifest = manifest
- self.name = name
- self.remote = remote
- self.revision = self._branch = revision
- self._repodir = manifest.repodir
- self._superproject_dir = superproject_dir
- self._superproject_path = manifest.SubmanifestInfoDir(manifest.path_prefix,
- superproject_dir)
- self._manifest_path = os.path.join(self._superproject_path,
- _SUPERPROJECT_MANIFEST_NAME)
- git_name = hashlib.md5(remote.name.encode('utf8')).hexdigest() + '-'
- self._remote_url = remote.url
- self._work_git_name = git_name + _SUPERPROJECT_GIT_NAME
- self._work_git = os.path.join(self._superproject_path, self._work_git_name)
- # The following are command arguemnts, rather than superproject attributes,
- # and were included here originally. They should eventually become
- # arguments that are passed down from the public methods, instead of being
- # treated as attributes.
- self._git_event_log = None
- self._quiet = False
- self._print_messages = False
+ def __init__(
+ self,
+ manifest,
+ name,
+ remote,
+ revision,
+ superproject_dir="exp-superproject",
+ ):
+ """Initializes superproject.
- def SetQuiet(self, value):
- """Set the _quiet attribute."""
- self._quiet = value
+ Args:
+ manifest: A Manifest object that is to be written to a file.
+ name: The unique name of the superproject
+ remote: The RemoteSpec for the remote.
+ revision: The name of the git branch to track.
+ superproject_dir: Relative path under |manifest.subdir| to checkout
+ superproject.
+ """
+ self._project_commit_ids = None
+ self._manifest = manifest
+ self.name = name
+ self.remote = remote
+ self.revision = self._branch = revision
+ self._repodir = manifest.repodir
+ self._superproject_dir = superproject_dir
+ self._superproject_path = manifest.SubmanifestInfoDir(
+ manifest.path_prefix, superproject_dir
+ )
+ self._manifest_path = os.path.join(
+ self._superproject_path, _SUPERPROJECT_MANIFEST_NAME
+ )
+ git_name = hashlib.md5(remote.name.encode("utf8")).hexdigest() + "-"
+ self._remote_url = remote.url
+ self._work_git_name = git_name + _SUPERPROJECT_GIT_NAME
+ self._work_git = os.path.join(
+ self._superproject_path, self._work_git_name
+ )
- def SetPrintMessages(self, value):
- """Set the _print_messages attribute."""
- self._print_messages = value
+ # The following are command arguemnts, rather than superproject
+ # attributes, and were included here originally. They should eventually
+ # become arguments that are passed down from the public methods, instead
+ # of being treated as attributes.
+ self._git_event_log = None
+ self._quiet = False
+ self._print_messages = False
- @property
- def project_commit_ids(self):
- """Returns a dictionary of projects and their commit ids."""
- return self._project_commit_ids
+ def SetQuiet(self, value):
+ """Set the _quiet attribute."""
+ self._quiet = value
- @property
- def manifest_path(self):
- """Returns the manifest path if the path exists or None."""
- return self._manifest_path if os.path.exists(self._manifest_path) else None
+ def SetPrintMessages(self, value):
+ """Set the _print_messages attribute."""
+ self._print_messages = value
- def _LogMessage(self, fmt, *inputs):
- """Logs message to stderr and _git_event_log."""
- message = f'{self._LogMessagePrefix()} {fmt.format(*inputs)}'
- if self._print_messages:
- print(message, file=sys.stderr)
- self._git_event_log.ErrorEvent(message, fmt)
+ @property
+ def project_commit_ids(self):
+ """Returns a dictionary of projects and their commit ids."""
+ return self._project_commit_ids
- def _LogMessagePrefix(self):
- """Returns the prefix string to be logged in each log message"""
- return f'repo superproject branch: {self._branch} url: {self._remote_url}'
+ @property
+ def manifest_path(self):
+ """Returns the manifest path if the path exists or None."""
+ return (
+ self._manifest_path if os.path.exists(self._manifest_path) else None
+ )
- def _LogError(self, fmt, *inputs):
- """Logs error message to stderr and _git_event_log."""
- self._LogMessage(f'error: {fmt}', *inputs)
+ def _LogMessage(self, fmt, *inputs):
+ """Logs message to stderr and _git_event_log."""
+ message = f"{self._LogMessagePrefix()} {fmt.format(*inputs)}"
+ if self._print_messages:
+ print(message, file=sys.stderr)
+ self._git_event_log.ErrorEvent(message, fmt)
- def _LogWarning(self, fmt, *inputs):
- """Logs warning message to stderr and _git_event_log."""
- self._LogMessage(f'warning: {fmt}', *inputs)
+ def _LogMessagePrefix(self):
+ """Returns the prefix string to be logged in each log message"""
+ return (
+ f"repo superproject branch: {self._branch} url: {self._remote_url}"
+ )
- def _Init(self):
- """Sets up a local Git repository to get a copy of a superproject.
+ def _LogError(self, fmt, *inputs):
+ """Logs error message to stderr and _git_event_log."""
+ self._LogMessage(f"error: {fmt}", *inputs)
- Returns:
- True if initialization is successful, or False.
- """
- if not os.path.exists(self._superproject_path):
- os.mkdir(self._superproject_path)
- if not self._quiet and not os.path.exists(self._work_git):
- print('%s: Performing initial setup for superproject; this might take '
- 'several minutes.' % self._work_git)
- cmd = ['init', '--bare', self._work_git_name]
- p = GitCommand(None,
- cmd,
- cwd=self._superproject_path,
- capture_stdout=True,
- capture_stderr=True)
- retval = p.Wait()
- if retval:
- self._LogWarning('git init call failed, command: git {}, '
- 'return code: {}, stderr: {}', cmd, retval, p.stderr)
- return False
- return True
+ def _LogWarning(self, fmt, *inputs):
+ """Logs warning message to stderr and _git_event_log."""
+ self._LogMessage(f"warning: {fmt}", *inputs)
- def _Fetch(self):
- """Fetches a local copy of a superproject for the manifest based on |_remote_url|.
+ def _Init(self):
+ """Sets up a local Git repository to get a copy of a superproject.
- Returns:
- True if fetch is successful, or False.
- """
- if not os.path.exists(self._work_git):
- self._LogWarning('git fetch missing directory: {}', self._work_git)
- return False
- if not git_require((2, 28, 0)):
- self._LogWarning('superproject requires a git version 2.28 or later')
- return False
- cmd = ['fetch', self._remote_url, '--depth', '1', '--force', '--no-tags',
- '--filter', 'blob:none']
+ Returns:
+ True if initialization is successful, or False.
+ """
+ if not os.path.exists(self._superproject_path):
+ os.mkdir(self._superproject_path)
+ if not self._quiet and not os.path.exists(self._work_git):
+ print(
+ "%s: Performing initial setup for superproject; this might "
+ "take several minutes." % self._work_git
+ )
+ cmd = ["init", "--bare", self._work_git_name]
+ p = GitCommand(
+ None,
+ cmd,
+ cwd=self._superproject_path,
+ capture_stdout=True,
+ capture_stderr=True,
+ )
+ retval = p.Wait()
+ if retval:
+ self._LogWarning(
+ "git init call failed, command: git {}, "
+ "return code: {}, stderr: {}",
+ cmd,
+ retval,
+ p.stderr,
+ )
+ return False
+ return True
- # Check if there is a local ref that we can pass to --negotiation-tip.
- # If this is the first fetch, it does not exist yet.
- # We use --negotiation-tip to speed up the fetch. Superproject branches do
- # not share commits. So this lets git know it only needs to send commits
- # reachable from the specified local refs.
- rev_commit = GitRefs(self._work_git).get(f'refs/heads/{self.revision}')
- if rev_commit:
- cmd.extend(['--negotiation-tip', rev_commit])
+ def _Fetch(self):
+ """Fetches a superproject for the manifest based on |_remote_url|.
- if self._branch:
- cmd += [self._branch + ':' + self._branch]
- p = GitCommand(None,
- cmd,
- cwd=self._work_git,
- capture_stdout=True,
- capture_stderr=True)
- retval = p.Wait()
- if retval:
- self._LogWarning('git fetch call failed, command: git {}, '
- 'return code: {}, stderr: {}', cmd, retval, p.stderr)
- return False
- return True
+ This runs git fetch which stores a local copy the superproject.
- def _LsTree(self):
- """Gets the commit ids for all projects.
+ Returns:
+ True if fetch is successful, or False.
+ """
+ if not os.path.exists(self._work_git):
+ self._LogWarning("git fetch missing directory: {}", self._work_git)
+ return False
+ if not git_require((2, 28, 0)):
+ self._LogWarning(
+ "superproject requires a git version 2.28 or later"
+ )
+ return False
+ cmd = [
+ "fetch",
+ self._remote_url,
+ "--depth",
+ "1",
+ "--force",
+ "--no-tags",
+ "--filter",
+ "blob:none",
+ ]
- Works only in git repositories.
+ # Check if there is a local ref that we can pass to --negotiation-tip.
+ # If this is the first fetch, it does not exist yet.
+ # We use --negotiation-tip to speed up the fetch. Superproject branches
+ # do not share commits. So this lets git know it only needs to send
+ # commits reachable from the specified local refs.
+ rev_commit = GitRefs(self._work_git).get(f"refs/heads/{self.revision}")
+ if rev_commit:
+ cmd.extend(["--negotiation-tip", rev_commit])
- Returns:
- data: data returned from 'git ls-tree ...' instead of None.
- """
- if not os.path.exists(self._work_git):
- self._LogWarning('git ls-tree missing directory: {}', self._work_git)
- return None
- data = None
- branch = 'HEAD' if not self._branch else self._branch
- cmd = ['ls-tree', '-z', '-r', branch]
+ if self._branch:
+ cmd += [self._branch + ":" + self._branch]
+ p = GitCommand(
+ None,
+ cmd,
+ cwd=self._work_git,
+ capture_stdout=True,
+ capture_stderr=True,
+ )
+ retval = p.Wait()
+ if retval:
+ self._LogWarning(
+ "git fetch call failed, command: git {}, "
+ "return code: {}, stderr: {}",
+ cmd,
+ retval,
+ p.stderr,
+ )
+ return False
+ return True
- p = GitCommand(None,
- cmd,
- cwd=self._work_git,
- capture_stdout=True,
- capture_stderr=True)
- retval = p.Wait()
- if retval == 0:
- data = p.stdout
- else:
- self._LogWarning('git ls-tree call failed, command: git {}, '
- 'return code: {}, stderr: {}', cmd, retval, p.stderr)
- return data
+ def _LsTree(self):
+ """Gets the commit ids for all projects.
- def Sync(self, git_event_log):
- """Gets a local copy of a superproject for the manifest.
+ Works only in git repositories.
- Args:
- git_event_log: an EventLog, for git tracing.
+ Returns:
+ data: data returned from 'git ls-tree ...' instead of None.
+ """
+ if not os.path.exists(self._work_git):
+ self._LogWarning(
+ "git ls-tree missing directory: {}", self._work_git
+ )
+ return None
+ data = None
+ branch = "HEAD" if not self._branch else self._branch
+ cmd = ["ls-tree", "-z", "-r", branch]
- Returns:
- SyncResult
- """
- self._git_event_log = git_event_log
- if not self._manifest.superproject:
- self._LogWarning('superproject tag is not defined in manifest: {}',
- self._manifest.manifestFile)
- return SyncResult(False, False)
+ p = GitCommand(
+ None,
+ cmd,
+ cwd=self._work_git,
+ capture_stdout=True,
+ capture_stderr=True,
+ )
+ retval = p.Wait()
+ if retval == 0:
+ data = p.stdout
+ else:
+ self._LogWarning(
+ "git ls-tree call failed, command: git {}, "
+ "return code: {}, stderr: {}",
+ cmd,
+ retval,
+ p.stderr,
+ )
+ return data
- _PrintBetaNotice()
+ def Sync(self, git_event_log):
+ """Gets a local copy of a superproject for the manifest.
- should_exit = True
- if not self._remote_url:
- self._LogWarning('superproject URL is not defined in manifest: {}',
- self._manifest.manifestFile)
- return SyncResult(False, should_exit)
+ Args:
+ git_event_log: an EventLog, for git tracing.
- if not self._Init():
- return SyncResult(False, should_exit)
- if not self._Fetch():
- return SyncResult(False, should_exit)
- if not self._quiet:
- print('%s: Initial setup for superproject completed.' % self._work_git)
- return SyncResult(True, False)
+ Returns:
+ SyncResult
+ """
+ self._git_event_log = git_event_log
+ if not self._manifest.superproject:
+ self._LogWarning(
+ "superproject tag is not defined in manifest: {}",
+ self._manifest.manifestFile,
+ )
+ return SyncResult(False, False)
- def _GetAllProjectsCommitIds(self):
- """Get commit ids for all projects from superproject and save them in _project_commit_ids.
+ _PrintBetaNotice()
- Returns:
- CommitIdsResult
- """
- sync_result = self.Sync(self._git_event_log)
- if not sync_result.success:
- return CommitIdsResult(None, sync_result.fatal)
+ should_exit = True
+ if not self._remote_url:
+ self._LogWarning(
+ "superproject URL is not defined in manifest: {}",
+ self._manifest.manifestFile,
+ )
+ return SyncResult(False, should_exit)
- data = self._LsTree()
- if not data:
- self._LogWarning('git ls-tree failed to return data for manifest: {}',
- self._manifest.manifestFile)
- return CommitIdsResult(None, True)
+ if not self._Init():
+ return SyncResult(False, should_exit)
+ if not self._Fetch():
+ return SyncResult(False, should_exit)
+ if not self._quiet:
+ print(
+ "%s: Initial setup for superproject completed." % self._work_git
+ )
+ return SyncResult(True, False)
- # Parse lines like the following to select lines starting with '160000' and
- # build a dictionary with project path (last element) and its commit id (3rd element).
- #
- # 160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00
- # 120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00
- commit_ids = {}
- for line in data.split('\x00'):
- ls_data = line.split(None, 3)
- if not ls_data:
- break
- if ls_data[0] == '160000':
- commit_ids[ls_data[3]] = ls_data[2]
+ def _GetAllProjectsCommitIds(self):
+ """Get commit ids for all projects from superproject and save them.
- self._project_commit_ids = commit_ids
- return CommitIdsResult(commit_ids, False)
+ Commit ids are saved in _project_commit_ids.
- def _WriteManifestFile(self):
- """Writes manifest to a file.
+ Returns:
+ CommitIdsResult
+ """
+ sync_result = self.Sync(self._git_event_log)
+ if not sync_result.success:
+ return CommitIdsResult(None, sync_result.fatal)
- Returns:
- manifest_path: Path name of the file into which manifest is written instead of None.
- """
- if not os.path.exists(self._superproject_path):
- self._LogWarning('missing superproject directory: {}', self._superproject_path)
- return None
- manifest_str = self._manifest.ToXml(groups=self._manifest.GetGroupsStr(),
- omit_local=True).toxml()
- manifest_path = self._manifest_path
- try:
- with open(manifest_path, 'w', encoding='utf-8') as fp:
- fp.write(manifest_str)
- except IOError as e:
- self._LogError('cannot write manifest to : {} {}',
- manifest_path, e)
- return None
- return manifest_path
+ data = self._LsTree()
+ if not data:
+ self._LogWarning(
+ "git ls-tree failed to return data for manifest: {}",
+ self._manifest.manifestFile,
+ )
+ return CommitIdsResult(None, True)
- def _SkipUpdatingProjectRevisionId(self, project):
- """Checks if a project's revision id needs to be updated or not.
+ # Parse lines like the following to select lines starting with '160000'
+ # and build a dictionary with project path (last element) and its commit
+ # id (3rd element).
+ #
+ # 160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00
+ # 120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00 # noqa: E501
+ commit_ids = {}
+ for line in data.split("\x00"):
+ ls_data = line.split(None, 3)
+ if not ls_data:
+ break
+ if ls_data[0] == "160000":
+ commit_ids[ls_data[3]] = ls_data[2]
- Revision id for projects from local manifest will not be updated.
+ self._project_commit_ids = commit_ids
+ return CommitIdsResult(commit_ids, False)
- Args:
- project: project whose revision id is being updated.
+ def _WriteManifestFile(self):
+ """Writes manifest to a file.
- Returns:
- True if a project's revision id should not be updated, or False,
- """
- path = project.relpath
- if not path:
- return True
- # Skip the project with revisionId.
- if project.revisionId:
- return True
- # Skip the project if it comes from the local manifest.
- return project.manifest.IsFromLocalManifest(project)
+ Returns:
+ manifest_path: Path name of the file into which manifest is written
+ instead of None.
+ """
+ if not os.path.exists(self._superproject_path):
+ self._LogWarning(
+ "missing superproject directory: {}", self._superproject_path
+ )
+ return None
+ manifest_str = self._manifest.ToXml(
+ groups=self._manifest.GetGroupsStr(), omit_local=True
+ ).toxml()
+ manifest_path = self._manifest_path
+ try:
+ with open(manifest_path, "w", encoding="utf-8") as fp:
+ fp.write(manifest_str)
+ except IOError as e:
+ self._LogError("cannot write manifest to : {} {}", manifest_path, e)
+ return None
+ return manifest_path
- def UpdateProjectsRevisionId(self, projects, git_event_log):
- """Update revisionId of every project in projects with the commit id.
+ def _SkipUpdatingProjectRevisionId(self, project):
+ """Checks if a project's revision id needs to be updated or not.
- Args:
- projects: a list of projects whose revisionId needs to be updated.
- git_event_log: an EventLog, for git tracing.
+ Revision id for projects from local manifest will not be updated.
- Returns:
- UpdateProjectsResult
- """
- self._git_event_log = git_event_log
- commit_ids_result = self._GetAllProjectsCommitIds()
- commit_ids = commit_ids_result.commit_ids
- if not commit_ids:
- return UpdateProjectsResult(None, commit_ids_result.fatal)
+ Args:
+ project: project whose revision id is being updated.
- projects_missing_commit_ids = []
- for project in projects:
- if self._SkipUpdatingProjectRevisionId(project):
- continue
- path = project.relpath
- commit_id = commit_ids.get(path)
- if not commit_id:
- projects_missing_commit_ids.append(path)
+ Returns:
+ True if a project's revision id should not be updated, or False,
+ """
+ path = project.relpath
+ if not path:
+ return True
+ # Skip the project with revisionId.
+ if project.revisionId:
+ return True
+ # Skip the project if it comes from the local manifest.
+ return project.manifest.IsFromLocalManifest(project)
- # If superproject doesn't have a commit id for a project, then report an
- # error event and continue as if do not use superproject is specified.
- if projects_missing_commit_ids:
- self._LogWarning('please file a bug using {} to report missing '
- 'commit_ids for: {}', self._manifest.contactinfo.bugurl,
- projects_missing_commit_ids)
- return UpdateProjectsResult(None, False)
+ def UpdateProjectsRevisionId(self, projects, git_event_log):
+ """Update revisionId of every project in projects with the commit id.
- for project in projects:
- if not self._SkipUpdatingProjectRevisionId(project):
- project.SetRevisionId(commit_ids.get(project.relpath))
+ Args:
+ projects: a list of projects whose revisionId needs to be updated.
+ git_event_log: an EventLog, for git tracing.
- manifest_path = self._WriteManifestFile()
- return UpdateProjectsResult(manifest_path, False)
+ Returns:
+ UpdateProjectsResult
+ """
+ self._git_event_log = git_event_log
+ commit_ids_result = self._GetAllProjectsCommitIds()
+ commit_ids = commit_ids_result.commit_ids
+ if not commit_ids:
+ return UpdateProjectsResult(None, commit_ids_result.fatal)
+
+ projects_missing_commit_ids = []
+ for project in projects:
+ if self._SkipUpdatingProjectRevisionId(project):
+ continue
+ path = project.relpath
+ commit_id = commit_ids.get(path)
+ if not commit_id:
+ projects_missing_commit_ids.append(path)
+
+ # If superproject doesn't have a commit id for a project, then report an
+ # error event and continue as if do not use superproject is specified.
+ if projects_missing_commit_ids:
+ self._LogWarning(
+ "please file a bug using {} to report missing "
+ "commit_ids for: {}",
+ self._manifest.contactinfo.bugurl,
+ projects_missing_commit_ids,
+ )
+ return UpdateProjectsResult(None, False)
+
+ for project in projects:
+ if not self._SkipUpdatingProjectRevisionId(project):
+ project.SetRevisionId(commit_ids.get(project.relpath))
+
+ manifest_path = self._WriteManifestFile()
+ return UpdateProjectsResult(manifest_path, False)
@functools.lru_cache(maxsize=10)
def _PrintBetaNotice():
- """Print the notice of beta status."""
- print('NOTICE: --use-superproject is in beta; report any issues to the '
- 'address described in `repo version`', file=sys.stderr)
+ """Print the notice of beta status."""
+ print(
+ "NOTICE: --use-superproject is in beta; report any issues to the "
+ "address described in `repo version`",
+ file=sys.stderr,
+ )
@functools.lru_cache(maxsize=None)
def _UseSuperprojectFromConfiguration():
- """Returns the user choice of whether to use superproject."""
- user_cfg = RepoConfig.ForUser()
- time_now = int(time.time())
+ """Returns the user choice of whether to use superproject."""
+ user_cfg = RepoConfig.ForUser()
+ time_now = int(time.time())
- user_value = user_cfg.GetBoolean('repo.superprojectChoice')
- if user_value is not None:
- user_expiration = user_cfg.GetInt('repo.superprojectChoiceExpire')
- if user_expiration is None or user_expiration <= 0 or user_expiration >= time_now:
- # TODO(b/190688390) - Remove prompt when we are comfortable with the new
- # default value.
- if user_value:
- print(('You are currently enrolled in Git submodules experiment '
- '(go/android-submodules-quickstart). Use --no-use-superproject '
- 'to override.\n'), file=sys.stderr)
- else:
- print(('You are not currently enrolled in Git submodules experiment '
- '(go/android-submodules-quickstart). Use --use-superproject '
- 'to override.\n'), file=sys.stderr)
- return user_value
+ user_value = user_cfg.GetBoolean("repo.superprojectChoice")
+ if user_value is not None:
+ user_expiration = user_cfg.GetInt("repo.superprojectChoiceExpire")
+ if (
+ user_expiration is None
+ or user_expiration <= 0
+ or user_expiration >= time_now
+ ):
+ # TODO(b/190688390) - Remove prompt when we are comfortable with the
+ # new default value.
+ if user_value:
+ print(
+ (
+ "You are currently enrolled in Git submodules "
+ "experiment (go/android-submodules-quickstart). Use "
+ "--no-use-superproject to override.\n"
+ ),
+ file=sys.stderr,
+ )
+ else:
+ print(
+ (
+ "You are not currently enrolled in Git submodules "
+ "experiment (go/android-submodules-quickstart). Use "
+ "--use-superproject to override.\n"
+ ),
+ file=sys.stderr,
+ )
+ return user_value
- # We don't have an unexpired choice, ask for one.
- system_cfg = RepoConfig.ForSystem()
- system_value = system_cfg.GetBoolean('repo.superprojectChoice')
- if system_value:
- # The system configuration is proposing that we should enable the
- # use of superproject. Treat the user as enrolled for two weeks.
- #
- # TODO(b/190688390) - Remove prompt when we are comfortable with the new
- # default value.
- userchoice = True
- time_choiceexpire = time_now + (86400 * 14)
- user_cfg.SetString('repo.superprojectChoiceExpire', str(time_choiceexpire))
- user_cfg.SetBoolean('repo.superprojectChoice', userchoice)
- print('You are automatically enrolled in Git submodules experiment '
- '(go/android-submodules-quickstart) for another two weeks.\n',
- file=sys.stderr)
- return True
+ # We don't have an unexpired choice, ask for one.
+ system_cfg = RepoConfig.ForSystem()
+ system_value = system_cfg.GetBoolean("repo.superprojectChoice")
+ if system_value:
+ # The system configuration is proposing that we should enable the
+ # use of superproject. Treat the user as enrolled for two weeks.
+ #
+ # TODO(b/190688390) - Remove prompt when we are comfortable with the new
+ # default value.
+ userchoice = True
+ time_choiceexpire = time_now + (86400 * 14)
+ user_cfg.SetString(
+ "repo.superprojectChoiceExpire", str(time_choiceexpire)
+ )
+ user_cfg.SetBoolean("repo.superprojectChoice", userchoice)
+ print(
+ "You are automatically enrolled in Git submodules experiment "
+ "(go/android-submodules-quickstart) for another two weeks.\n",
+ file=sys.stderr,
+ )
+ return True
- # For all other cases, we would not use superproject by default.
- return False
+ # For all other cases, we would not use superproject by default.
+ return False
def PrintMessages(use_superproject, manifest):
- """Returns a boolean if error/warning messages are to be printed.
+ """Returns a boolean if error/warning messages are to be printed.
- Args:
- use_superproject: option value from optparse.
- manifest: manifest to use.
- """
- return use_superproject is not None or bool(manifest.superproject)
+ Args:
+ use_superproject: option value from optparse.
+ manifest: manifest to use.
+ """
+ return use_superproject is not None or bool(manifest.superproject)
def UseSuperproject(use_superproject, manifest):
- """Returns a boolean if use-superproject option is enabled.
+ """Returns a boolean if use-superproject option is enabled.
- Args:
- use_superproject: option value from optparse.
- manifest: manifest to use.
+ Args:
+ use_superproject: option value from optparse.
+ manifest: manifest to use.
- Returns:
- Whether the superproject should be used.
- """
+ Returns:
+ Whether the superproject should be used.
+ """
- if not manifest.superproject:
- # This (sub) manifest does not have a superproject definition.
- return False
- elif use_superproject is not None:
- return use_superproject
- else:
- client_value = manifest.manifestProject.use_superproject
- if client_value is not None:
- return client_value
- elif manifest.superproject:
- return _UseSuperprojectFromConfiguration()
+ if not manifest.superproject:
+ # This (sub) manifest does not have a superproject definition.
+ return False
+ elif use_superproject is not None:
+ return use_superproject
else:
- return False
+ client_value = manifest.manifestProject.use_superproject
+ if client_value is not None:
+ return client_value
+ elif manifest.superproject:
+ return _UseSuperprojectFromConfiguration()
+ else:
+ return False
diff --git a/git_trace2_event_log.py b/git_trace2_event_log.py
index 2edab0e..d90e903 100644
--- a/git_trace2_event_log.py
+++ b/git_trace2_event_log.py
@@ -41,291 +41,330 @@
class EventLog(object):
- """Event log that records events that occurred during a repo invocation.
+ """Event log that records events that occurred during a repo invocation.
- Events are written to the log as a consecutive JSON entries, one per line.
- Entries follow the git trace2 EVENT format.
+ Events are written to the log as a consecutive JSON entries, one per line.
+ Entries follow the git trace2 EVENT format.
- Each entry contains the following common keys:
- - event: The event name
- - sid: session-id - Unique string to allow process instance to be identified.
- - thread: The thread name.
- - time: is the UTC time of the event.
+ Each entry contains the following common keys:
+ - event: The event name
+ - sid: session-id - Unique string to allow process instance to be
+ identified.
+ - thread: The thread name.
+ - time: is the UTC time of the event.
- Valid 'event' names and event specific fields are documented here:
- https://git-scm.com/docs/api-trace2#_event_format
- """
-
- def __init__(self, env=None):
- """Initializes the event log."""
- self._log = []
- # Try to get session-id (sid) from environment (setup in repo launcher).
- KEY = 'GIT_TRACE2_PARENT_SID'
- if env is None:
- env = os.environ
-
- now = datetime.datetime.utcnow()
-
- # Save both our sid component and the complete sid.
- # We use our sid component (self._sid) as the unique filename prefix and
- # the full sid (self._full_sid) in the log itself.
- self._sid = 'repo-%s-P%08x' % (now.strftime('%Y%m%dT%H%M%SZ'), os.getpid())
- parent_sid = env.get(KEY)
- # Append our sid component to the parent sid (if it exists).
- if parent_sid is not None:
- self._full_sid = parent_sid + '/' + self._sid
- else:
- self._full_sid = self._sid
-
- # Set/update the environment variable.
- # Environment handling across systems is messy.
- try:
- env[KEY] = self._full_sid
- except UnicodeEncodeError:
- env[KEY] = self._full_sid.encode()
-
- # Add a version event to front of the log.
- self._AddVersionEvent()
-
- @property
- def full_sid(self):
- return self._full_sid
-
- def _AddVersionEvent(self):
- """Adds a 'version' event at the beginning of current log."""
- version_event = self._CreateEventDict('version')
- version_event['evt'] = "2"
- version_event['exe'] = RepoSourceVersion()
- self._log.insert(0, version_event)
-
- def _CreateEventDict(self, event_name):
- """Returns a dictionary with the common keys/values for git trace2 events.
-
- Args:
- event_name: The event name.
-
- Returns:
- Dictionary with the common event fields populated.
- """
- return {
- 'event': event_name,
- 'sid': self._full_sid,
- 'thread': threading.current_thread().name,
- 'time': datetime.datetime.utcnow().isoformat() + 'Z',
- }
-
- def StartEvent(self):
- """Append a 'start' event to the current log."""
- start_event = self._CreateEventDict('start')
- start_event['argv'] = sys.argv
- self._log.append(start_event)
-
- def ExitEvent(self, result):
- """Append an 'exit' event to the current log.
-
- Args:
- result: Exit code of the event
- """
- exit_event = self._CreateEventDict('exit')
-
- # Consider 'None' success (consistent with event_log result handling).
- if result is None:
- result = 0
- exit_event['code'] = result
- self._log.append(exit_event)
-
- def CommandEvent(self, name, subcommands):
- """Append a 'command' event to the current log.
-
- Args:
- name: Name of the primary command (ex: repo, git)
- subcommands: List of the sub-commands (ex: version, init, sync)
- """
- command_event = self._CreateEventDict('command')
- command_event['name'] = name
- command_event['subcommands'] = subcommands
- self._log.append(command_event)
-
- def LogConfigEvents(self, config, event_dict_name):
- """Append a |event_dict_name| event for each config key in |config|.
-
- Args:
- config: Configuration dictionary.
- event_dict_name: Name of the event dictionary for items to be logged under.
- """
- for param, value in config.items():
- event = self._CreateEventDict(event_dict_name)
- event['param'] = param
- event['value'] = value
- self._log.append(event)
-
- def DefParamRepoEvents(self, config):
- """Append a 'def_param' event for each repo.* config key to the current log.
-
- Args:
- config: Repo configuration dictionary
- """
- # Only output the repo.* config parameters.
- repo_config = {k: v for k, v in config.items() if k.startswith('repo.')}
- self.LogConfigEvents(repo_config, 'def_param')
-
- def GetDataEventName(self, value):
- """Returns 'data-json' if the value is an array else returns 'data'."""
- return 'data-json' if value[0] == '[' and value[-1] == ']' else 'data'
-
- def LogDataConfigEvents(self, config, prefix):
- """Append a 'data' event for each config key/value in |config| to the current log.
-
- For each keyX and valueX of the config, "key" field of the event is '|prefix|/keyX'
- and the "value" of the "key" field is valueX.
-
- Args:
- config: Configuration dictionary.
- prefix: Prefix for each key that is logged.
- """
- for key, value in config.items():
- event = self._CreateEventDict(self.GetDataEventName(value))
- event['key'] = f'{prefix}/{key}'
- event['value'] = value
- self._log.append(event)
-
- def ErrorEvent(self, msg, fmt):
- """Append a 'error' event to the current log."""
- error_event = self._CreateEventDict('error')
- error_event['msg'] = msg
- error_event['fmt'] = fmt
- self._log.append(error_event)
-
- def _GetEventTargetPath(self):
- """Get the 'trace2.eventtarget' path from git configuration.
-
- Returns:
- path: git config's 'trace2.eventtarget' path if it exists, or None
- """
- path = None
- cmd = ['config', '--get', 'trace2.eventtarget']
- # TODO(https://crbug.com/gerrit/13706): Use GitConfig when it supports
- # system git config variables.
- p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
- bare=True)
- retval = p.Wait()
- if retval == 0:
- # Strip trailing carriage-return in path.
- path = p.stdout.rstrip('\n')
- elif retval != 1:
- # `git config --get` is documented to produce an exit status of `1` if
- # the requested variable is not present in the configuration. Report any
- # other return value as an error.
- print("repo: error: 'git config --get' call failed with return code: %r, stderr: %r" % (
- retval, p.stderr), file=sys.stderr)
- return path
-
- def _WriteLog(self, write_fn):
- """Writes the log out using a provided writer function.
-
- Generate compact JSON output for each item in the log, and write it using
- write_fn.
-
- Args:
- write_fn: A function that accepts byts and writes them to a destination.
+ Valid 'event' names and event specific fields are documented here:
+ https://git-scm.com/docs/api-trace2#_event_format
"""
- for e in self._log:
- # Dump in compact encoding mode.
- # See 'Compact encoding' in Python docs:
- # https://docs.python.org/3/library/json.html#module-json
- write_fn(json.dumps(e, indent=None, separators=(',', ':')).encode('utf-8') + b'\n')
+ def __init__(self, env=None):
+ """Initializes the event log."""
+ self._log = []
+ # Try to get session-id (sid) from environment (setup in repo launcher).
+ KEY = "GIT_TRACE2_PARENT_SID"
+ if env is None:
+ env = os.environ
- def Write(self, path=None):
- """Writes the log out to a file or socket.
+ now = datetime.datetime.utcnow()
- Log is only written if 'path' or 'git config --get trace2.eventtarget'
- provide a valid path (or socket) to write logs to.
+ # Save both our sid component and the complete sid.
+ # We use our sid component (self._sid) as the unique filename prefix and
+ # the full sid (self._full_sid) in the log itself.
+ self._sid = "repo-%s-P%08x" % (
+ now.strftime("%Y%m%dT%H%M%SZ"),
+ os.getpid(),
+ )
+ parent_sid = env.get(KEY)
+ # Append our sid component to the parent sid (if it exists).
+ if parent_sid is not None:
+ self._full_sid = parent_sid + "/" + self._sid
+ else:
+ self._full_sid = self._sid
- Logging filename format follows the git trace2 style of being a unique
- (exclusive writable) file.
-
- Args:
- path: Path to where logs should be written. The path may have a prefix of
- the form "af_unix:[{stream|dgram}:]", in which case the path is
- treated as a Unix domain socket. See
- https://git-scm.com/docs/api-trace2#_enabling_a_target for details.
-
- Returns:
- log_path: Path to the log file or socket if log is written, otherwise None
- """
- log_path = None
- # If no logging path is specified, get the path from 'trace2.eventtarget'.
- if path is None:
- path = self._GetEventTargetPath()
-
- # If no logging path is specified, exit.
- if path is None:
- return None
-
- path_is_socket = False
- socket_type = None
- if isinstance(path, str):
- parts = path.split(':', 1)
- if parts[0] == 'af_unix' and len(parts) == 2:
- path_is_socket = True
- path = parts[1]
- parts = path.split(':', 1)
- if parts[0] == 'stream' and len(parts) == 2:
- socket_type = socket.SOCK_STREAM
- path = parts[1]
- elif parts[0] == 'dgram' and len(parts) == 2:
- socket_type = socket.SOCK_DGRAM
- path = parts[1]
- else:
- # Get absolute path.
- path = os.path.abspath(os.path.expanduser(path))
- else:
- raise TypeError('path: str required but got %s.' % type(path))
-
- # Git trace2 requires a directory to write log to.
-
- # TODO(https://crbug.com/gerrit/13706): Support file (append) mode also.
- if not (path_is_socket or os.path.isdir(path)):
- return None
-
- if path_is_socket:
- if socket_type == socket.SOCK_STREAM or socket_type is None:
+ # Set/update the environment variable.
+ # Environment handling across systems is messy.
try:
- with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock:
- sock.connect(path)
- self._WriteLog(sock.sendall)
- return f'af_unix:stream:{path}'
- except OSError as err:
- # If we tried to connect to a DGRAM socket using STREAM, ignore the
- # attempt and continue to DGRAM below. Otherwise, issue a warning.
- if err.errno != errno.EPROTOTYPE:
- print(f'repo: warning: git trace2 logging failed: {err}', file=sys.stderr)
+ env[KEY] = self._full_sid
+ except UnicodeEncodeError:
+ env[KEY] = self._full_sid.encode()
+
+ # Add a version event to front of the log.
+ self._AddVersionEvent()
+
+ @property
+ def full_sid(self):
+ return self._full_sid
+
+ def _AddVersionEvent(self):
+ """Adds a 'version' event at the beginning of current log."""
+ version_event = self._CreateEventDict("version")
+ version_event["evt"] = "2"
+ version_event["exe"] = RepoSourceVersion()
+ self._log.insert(0, version_event)
+
+ def _CreateEventDict(self, event_name):
+ """Returns a dictionary with common keys/values for git trace2 events.
+
+ Args:
+ event_name: The event name.
+
+ Returns:
+ Dictionary with the common event fields populated.
+ """
+ return {
+ "event": event_name,
+ "sid": self._full_sid,
+ "thread": threading.current_thread().name,
+ "time": datetime.datetime.utcnow().isoformat() + "Z",
+ }
+
+ def StartEvent(self):
+ """Append a 'start' event to the current log."""
+ start_event = self._CreateEventDict("start")
+ start_event["argv"] = sys.argv
+ self._log.append(start_event)
+
+ def ExitEvent(self, result):
+ """Append an 'exit' event to the current log.
+
+ Args:
+ result: Exit code of the event
+ """
+ exit_event = self._CreateEventDict("exit")
+
+ # Consider 'None' success (consistent with event_log result handling).
+ if result is None:
+ result = 0
+ exit_event["code"] = result
+ self._log.append(exit_event)
+
+ def CommandEvent(self, name, subcommands):
+ """Append a 'command' event to the current log.
+
+ Args:
+ name: Name of the primary command (ex: repo, git)
+ subcommands: List of the sub-commands (ex: version, init, sync)
+ """
+ command_event = self._CreateEventDict("command")
+ command_event["name"] = name
+ command_event["subcommands"] = subcommands
+ self._log.append(command_event)
+
+ def LogConfigEvents(self, config, event_dict_name):
+ """Append a |event_dict_name| event for each config key in |config|.
+
+ Args:
+ config: Configuration dictionary.
+ event_dict_name: Name of the event dictionary for items to be logged
+ under.
+ """
+ for param, value in config.items():
+ event = self._CreateEventDict(event_dict_name)
+ event["param"] = param
+ event["value"] = value
+ self._log.append(event)
+
+ def DefParamRepoEvents(self, config):
+ """Append 'def_param' events for repo config keys to the current log.
+
+ This appends one event for each repo.* config key.
+
+ Args:
+ config: Repo configuration dictionary
+ """
+ # Only output the repo.* config parameters.
+ repo_config = {k: v for k, v in config.items() if k.startswith("repo.")}
+ self.LogConfigEvents(repo_config, "def_param")
+
+ def GetDataEventName(self, value):
+ """Returns 'data-json' if the value is an array else returns 'data'."""
+ return "data-json" if value[0] == "[" and value[-1] == "]" else "data"
+
+ def LogDataConfigEvents(self, config, prefix):
+ """Append a 'data' event for each entry in |config| to the current log.
+
+ For each keyX and valueX of the config, "key" field of the event is
+ '|prefix|/keyX' and the "value" of the "key" field is valueX.
+
+ Args:
+ config: Configuration dictionary.
+ prefix: Prefix for each key that is logged.
+ """
+ for key, value in config.items():
+ event = self._CreateEventDict(self.GetDataEventName(value))
+ event["key"] = f"{prefix}/{key}"
+ event["value"] = value
+ self._log.append(event)
+
+ def ErrorEvent(self, msg, fmt):
+ """Append a 'error' event to the current log."""
+ error_event = self._CreateEventDict("error")
+ error_event["msg"] = msg
+ error_event["fmt"] = fmt
+ self._log.append(error_event)
+
+ def _GetEventTargetPath(self):
+ """Get the 'trace2.eventtarget' path from git configuration.
+
+ Returns:
+ path: git config's 'trace2.eventtarget' path if it exists, or None
+ """
+ path = None
+ cmd = ["config", "--get", "trace2.eventtarget"]
+ # TODO(https://crbug.com/gerrit/13706): Use GitConfig when it supports
+ # system git config variables.
+ p = GitCommand(
+ None, cmd, capture_stdout=True, capture_stderr=True, bare=True
+ )
+ retval = p.Wait()
+ if retval == 0:
+ # Strip trailing carriage-return in path.
+ path = p.stdout.rstrip("\n")
+ elif retval != 1:
+ # `git config --get` is documented to produce an exit status of `1`
+ # if the requested variable is not present in the configuration.
+ # Report any other return value as an error.
+ print(
+ "repo: error: 'git config --get' call failed with return code: "
+ "%r, stderr: %r" % (retval, p.stderr),
+ file=sys.stderr,
+ )
+ return path
+
+ def _WriteLog(self, write_fn):
+ """Writes the log out using a provided writer function.
+
+ Generate compact JSON output for each item in the log, and write it
+ using write_fn.
+
+ Args:
+ write_fn: A function that accepts byts and writes them to a
+ destination.
+ """
+
+ for e in self._log:
+ # Dump in compact encoding mode.
+ # See 'Compact encoding' in Python docs:
+ # https://docs.python.org/3/library/json.html#module-json
+ write_fn(
+ json.dumps(e, indent=None, separators=(",", ":")).encode(
+ "utf-8"
+ )
+ + b"\n"
+ )
+
+ def Write(self, path=None):
+ """Writes the log out to a file or socket.
+
+ Log is only written if 'path' or 'git config --get trace2.eventtarget'
+ provide a valid path (or socket) to write logs to.
+
+ Logging filename format follows the git trace2 style of being a unique
+ (exclusive writable) file.
+
+ Args:
+ path: Path to where logs should be written. The path may have a
+ prefix of the form "af_unix:[{stream|dgram}:]", in which case
+ the path is treated as a Unix domain socket. See
+ https://git-scm.com/docs/api-trace2#_enabling_a_target for
+ details.
+
+ Returns:
+ log_path: Path to the log file or socket if log is written,
+ otherwise None
+ """
+ log_path = None
+ # If no logging path is specified, get the path from
+ # 'trace2.eventtarget'.
+ if path is None:
+ path = self._GetEventTargetPath()
+
+ # If no logging path is specified, exit.
+ if path is None:
return None
- if socket_type == socket.SOCK_DGRAM or socket_type is None:
- try:
- with socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) as sock:
- self._WriteLog(lambda bs: sock.sendto(bs, path))
- return f'af_unix:dgram:{path}'
- except OSError as err:
- print(f'repo: warning: git trace2 logging failed: {err}', file=sys.stderr)
- return None
- # Tried to open a socket but couldn't connect (SOCK_STREAM) or write
- # (SOCK_DGRAM).
- print('repo: warning: git trace2 logging failed: could not write to socket', file=sys.stderr)
- return None
- # Path is an absolute path
- # Use NamedTemporaryFile to generate a unique filename as required by git trace2.
- try:
- with tempfile.NamedTemporaryFile(mode='xb', prefix=self._sid, dir=path,
- delete=False) as f:
- # TODO(https://crbug.com/gerrit/13706): Support writing events as they
- # occur.
- self._WriteLog(f.write)
- log_path = f.name
- except FileExistsError as err:
- print('repo: warning: git trace2 logging failed: %r' % err,
- file=sys.stderr)
- return None
- return log_path
+ path_is_socket = False
+ socket_type = None
+ if isinstance(path, str):
+ parts = path.split(":", 1)
+ if parts[0] == "af_unix" and len(parts) == 2:
+ path_is_socket = True
+ path = parts[1]
+ parts = path.split(":", 1)
+ if parts[0] == "stream" and len(parts) == 2:
+ socket_type = socket.SOCK_STREAM
+ path = parts[1]
+ elif parts[0] == "dgram" and len(parts) == 2:
+ socket_type = socket.SOCK_DGRAM
+ path = parts[1]
+ else:
+ # Get absolute path.
+ path = os.path.abspath(os.path.expanduser(path))
+ else:
+ raise TypeError("path: str required but got %s." % type(path))
+
+ # Git trace2 requires a directory to write log to.
+
+ # TODO(https://crbug.com/gerrit/13706): Support file (append) mode also.
+ if not (path_is_socket or os.path.isdir(path)):
+ return None
+
+ if path_is_socket:
+ if socket_type == socket.SOCK_STREAM or socket_type is None:
+ try:
+ with socket.socket(
+ socket.AF_UNIX, socket.SOCK_STREAM
+ ) as sock:
+ sock.connect(path)
+ self._WriteLog(sock.sendall)
+ return f"af_unix:stream:{path}"
+ except OSError as err:
+ # If we tried to connect to a DGRAM socket using STREAM,
+ # ignore the attempt and continue to DGRAM below. Otherwise,
+ # issue a warning.
+ if err.errno != errno.EPROTOTYPE:
+ print(
+ f"repo: warning: git trace2 logging failed: {err}",
+ file=sys.stderr,
+ )
+ return None
+ if socket_type == socket.SOCK_DGRAM or socket_type is None:
+ try:
+ with socket.socket(
+ socket.AF_UNIX, socket.SOCK_DGRAM
+ ) as sock:
+ self._WriteLog(lambda bs: sock.sendto(bs, path))
+ return f"af_unix:dgram:{path}"
+ except OSError as err:
+ print(
+ f"repo: warning: git trace2 logging failed: {err}",
+ file=sys.stderr,
+ )
+ return None
+ # Tried to open a socket but couldn't connect (SOCK_STREAM) or write
+ # (SOCK_DGRAM).
+ print(
+ "repo: warning: git trace2 logging failed: could not write to "
+ "socket",
+ file=sys.stderr,
+ )
+ return None
+
+ # Path is an absolute path
+ # Use NamedTemporaryFile to generate a unique filename as required by
+ # git trace2.
+ try:
+ with tempfile.NamedTemporaryFile(
+ mode="xb", prefix=self._sid, dir=path, delete=False
+ ) as f:
+ # TODO(https://crbug.com/gerrit/13706): Support writing events
+ # as they occur.
+ self._WriteLog(f.write)
+ log_path = f.name
+ except FileExistsError as err:
+ print(
+ "repo: warning: git trace2 logging failed: %r" % err,
+ file=sys.stderr,
+ )
+ return None
+ return log_path
diff --git a/gitc_utils.py b/gitc_utils.py
index dfcfd2a..7b72048 100644
--- a/gitc_utils.py
+++ b/gitc_utils.py
@@ -28,128 +28,139 @@
def get_gitc_manifest_dir():
- return wrapper.Wrapper().get_gitc_manifest_dir()
+ return wrapper.Wrapper().get_gitc_manifest_dir()
def parse_clientdir(gitc_fs_path):
- return wrapper.Wrapper().gitc_parse_clientdir(gitc_fs_path)
+ return wrapper.Wrapper().gitc_parse_clientdir(gitc_fs_path)
def _get_project_revision(args):
- """Worker for _set_project_revisions to lookup one project remote."""
- (i, url, expr) = args
- gitcmd = git_command.GitCommand(
- None, ['ls-remote', url, expr], capture_stdout=True, cwd='/tmp')
- rc = gitcmd.Wait()
- return (i, rc, gitcmd.stdout.split('\t', 1)[0])
+ """Worker for _set_project_revisions to lookup one project remote."""
+ (i, url, expr) = args
+ gitcmd = git_command.GitCommand(
+ None, ["ls-remote", url, expr], capture_stdout=True, cwd="/tmp"
+ )
+ rc = gitcmd.Wait()
+ return (i, rc, gitcmd.stdout.split("\t", 1)[0])
def _set_project_revisions(projects):
- """Sets the revisionExpr for a list of projects.
+ """Sets the revisionExpr for a list of projects.
- Because of the limit of open file descriptors allowed, length of projects
- should not be overly large. Recommend calling this function multiple times
- with each call not exceeding NUM_BATCH_RETRIEVE_REVISIONID projects.
+ Because of the limit of open file descriptors allowed, length of projects
+ should not be overly large. Recommend calling this function multiple times
+ with each call not exceeding NUM_BATCH_RETRIEVE_REVISIONID projects.
- Args:
- projects: List of project objects to set the revionExpr for.
- """
- # Retrieve the commit id for each project based off of it's current
- # revisionExpr and it is not already a commit id.
- with multiprocessing.Pool(NUM_BATCH_RETRIEVE_REVISIONID) as pool:
- results_iter = pool.imap_unordered(
- _get_project_revision,
- ((i, project.remote.url, project.revisionExpr)
- for i, project in enumerate(projects)
- if not git_config.IsId(project.revisionExpr)),
- chunksize=8)
- for (i, rc, revisionExpr) in results_iter:
- project = projects[i]
- if rc:
- print('FATAL: Failed to retrieve revisionExpr for %s' % project.name)
- pool.terminate()
- sys.exit(1)
- if not revisionExpr:
- pool.terminate()
- raise ManifestParseError('Invalid SHA-1 revision project %s (%s)' %
- (project.remote.url, project.revisionExpr))
- project.revisionExpr = revisionExpr
+ Args:
+ projects: List of project objects to set the revionExpr for.
+ """
+ # Retrieve the commit id for each project based off of its current
+ # revisionExpr and it is not already a commit id.
+ with multiprocessing.Pool(NUM_BATCH_RETRIEVE_REVISIONID) as pool:
+ results_iter = pool.imap_unordered(
+ _get_project_revision,
+ (
+ (i, project.remote.url, project.revisionExpr)
+ for i, project in enumerate(projects)
+ if not git_config.IsId(project.revisionExpr)
+ ),
+ chunksize=8,
+ )
+ for i, rc, revisionExpr in results_iter:
+ project = projects[i]
+ if rc:
+ print(
+ "FATAL: Failed to retrieve revisionExpr for %s"
+ % project.name
+ )
+ pool.terminate()
+ sys.exit(1)
+ if not revisionExpr:
+ pool.terminate()
+ raise ManifestParseError(
+ "Invalid SHA-1 revision project %s (%s)"
+ % (project.remote.url, project.revisionExpr)
+ )
+ project.revisionExpr = revisionExpr
def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
- """Generate a manifest for shafsd to use for this GITC client.
+ """Generate a manifest for shafsd to use for this GITC client.
- Args:
- gitc_manifest: Current gitc manifest, or None if there isn't one yet.
- manifest: A GitcManifest object loaded with the current repo manifest.
- paths: List of project paths we want to update.
- """
+ Args:
+ gitc_manifest: Current gitc manifest, or None if there isn't one yet.
+ manifest: A GitcManifest object loaded with the current repo manifest.
+ paths: List of project paths we want to update.
+ """
- print('Generating GITC Manifest by fetching revision SHAs for each '
- 'project.')
- if paths is None:
- paths = list(manifest.paths.keys())
+ print(
+ "Generating GITC Manifest by fetching revision SHAs for each "
+ "project."
+ )
+ if paths is None:
+ paths = list(manifest.paths.keys())
- groups = [x for x in re.split(r'[,\s]+', manifest.GetGroupsStr()) if x]
+ groups = [x for x in re.split(r"[,\s]+", manifest.GetGroupsStr()) if x]
- # Convert the paths to projects, and filter them to the matched groups.
- projects = [manifest.paths[p] for p in paths]
- projects = [p for p in projects if p.MatchesGroups(groups)]
+ # Convert the paths to projects, and filter them to the matched groups.
+ projects = [manifest.paths[p] for p in paths]
+ projects = [p for p in projects if p.MatchesGroups(groups)]
- if gitc_manifest is not None:
- for path, proj in manifest.paths.items():
- if not proj.MatchesGroups(groups):
- continue
+ if gitc_manifest is not None:
+ for path, proj in manifest.paths.items():
+ if not proj.MatchesGroups(groups):
+ continue
- if not proj.upstream and not git_config.IsId(proj.revisionExpr):
- proj.upstream = proj.revisionExpr
+ if not proj.upstream and not git_config.IsId(proj.revisionExpr):
+ proj.upstream = proj.revisionExpr
- if path not in gitc_manifest.paths:
- # Any new projects need their first revision, even if we weren't asked
- # for them.
- projects.append(proj)
- elif path not in paths:
- # And copy revisions from the previous manifest if we're not updating
- # them now.
- gitc_proj = gitc_manifest.paths[path]
- if gitc_proj.old_revision:
- proj.revisionExpr = None
- proj.old_revision = gitc_proj.old_revision
- else:
- proj.revisionExpr = gitc_proj.revisionExpr
+ if path not in gitc_manifest.paths:
+ # Any new projects need their first revision, even if we weren't
+ # asked for them.
+ projects.append(proj)
+ elif path not in paths:
+ # And copy revisions from the previous manifest if we're not
+ # updating them now.
+ gitc_proj = gitc_manifest.paths[path]
+ if gitc_proj.old_revision:
+ proj.revisionExpr = None
+ proj.old_revision = gitc_proj.old_revision
+ else:
+ proj.revisionExpr = gitc_proj.revisionExpr
- _set_project_revisions(projects)
+ _set_project_revisions(projects)
- if gitc_manifest is not None:
- for path, proj in gitc_manifest.paths.items():
- if proj.old_revision and path in paths:
- # If we updated a project that has been started, keep the old-revision
- # updated.
- repo_proj = manifest.paths[path]
- repo_proj.old_revision = repo_proj.revisionExpr
- repo_proj.revisionExpr = None
+ if gitc_manifest is not None:
+ for path, proj in gitc_manifest.paths.items():
+ if proj.old_revision and path in paths:
+ # If we updated a project that has been started, keep the
+ # old-revision updated.
+ repo_proj = manifest.paths[path]
+ repo_proj.old_revision = repo_proj.revisionExpr
+ repo_proj.revisionExpr = None
- # Convert URLs from relative to absolute.
- for _name, remote in manifest.remotes.items():
- remote.fetchUrl = remote.resolvedFetchUrl
+ # Convert URLs from relative to absolute.
+ for _name, remote in manifest.remotes.items():
+ remote.fetchUrl = remote.resolvedFetchUrl
- # Save the manifest.
- save_manifest(manifest)
+ # Save the manifest.
+ save_manifest(manifest)
def save_manifest(manifest, client_dir=None):
- """Save the manifest file in the client_dir.
+ """Save the manifest file in the client_dir.
- Args:
- manifest: Manifest object to save.
- client_dir: Client directory to save the manifest in.
- """
- if not client_dir:
- manifest_file = manifest.manifestFile
- else:
- manifest_file = os.path.join(client_dir, '.manifest')
- with open(manifest_file, 'w') as f:
- manifest.Save(f, groups=manifest.GetGroupsStr())
- # TODO(sbasi/jorg): Come up with a solution to remove the sleep below.
- # Give the GITC filesystem time to register the manifest changes.
- time.sleep(3)
+ Args:
+ manifest: Manifest object to save.
+ client_dir: Client directory to save the manifest in.
+ """
+ if not client_dir:
+ manifest_file = manifest.manifestFile
+ else:
+ manifest_file = os.path.join(client_dir, ".manifest")
+ with open(manifest_file, "w") as f:
+ manifest.Save(f, groups=manifest.GetGroupsStr())
+ # TODO(sbasi/jorg): Come up with a solution to remove the sleep below.
+ # Give the GITC filesystem time to register the manifest changes.
+ time.sleep(3)
diff --git a/hooks.py b/hooks.py
index 67c21a2..decf069 100644
--- a/hooks.py
+++ b/hooks.py
@@ -26,271 +26,293 @@
class RepoHook(object):
- """A RepoHook contains information about a script to run as a hook.
+ """A RepoHook contains information about a script to run as a hook.
- Hooks are used to run a python script before running an upload (for instance,
- to run presubmit checks). Eventually, we may have hooks for other actions.
+ Hooks are used to run a python script before running an upload (for
+ instance, to run presubmit checks). Eventually, we may have hooks for other
+ actions.
- This shouldn't be confused with files in the 'repo/hooks' directory. Those
- files are copied into each '.git/hooks' folder for each project. Repo-level
- hooks are associated instead with repo actions.
+ This shouldn't be confused with files in the 'repo/hooks' directory. Those
+ files are copied into each '.git/hooks' folder for each project. Repo-level
+ hooks are associated instead with repo actions.
- Hooks are always python. When a hook is run, we will load the hook into the
- interpreter and execute its main() function.
+ Hooks are always python. When a hook is run, we will load the hook into the
+ interpreter and execute its main() function.
- Combinations of hook option flags:
- - no-verify=False, verify=False (DEFAULT):
- If stdout is a tty, can prompt about running hooks if needed.
- If user denies running hooks, the action is cancelled. If stdout is
- not a tty and we would need to prompt about hooks, action is
- cancelled.
- - no-verify=False, verify=True:
- Always run hooks with no prompt.
- - no-verify=True, verify=False:
- Never run hooks, but run action anyway (AKA bypass hooks).
- - no-verify=True, verify=True:
- Invalid
- """
-
- def __init__(self,
- hook_type,
- hooks_project,
- repo_topdir,
- manifest_url,
- bypass_hooks=False,
- allow_all_hooks=False,
- ignore_hooks=False,
- abort_if_user_denies=False):
- """RepoHook constructor.
-
- Params:
- hook_type: A string representing the type of hook. This is also used
- to figure out the name of the file containing the hook. For
- example: 'pre-upload'.
- hooks_project: The project containing the repo hooks.
- If you have a manifest, this is manifest.repo_hooks_project.
- OK if this is None, which will make the hook a no-op.
- repo_topdir: The top directory of the repo client checkout.
- This is the one containing the .repo directory. Scripts will
- run with CWD as this directory.
- If you have a manifest, this is manifest.topdir.
- manifest_url: The URL to the manifest git repo.
- bypass_hooks: If True, then 'Do not run the hook'.
- allow_all_hooks: If True, then 'Run the hook without prompting'.
- ignore_hooks: If True, then 'Do not abort action if hooks fail'.
- abort_if_user_denies: If True, we'll abort running the hook if the user
- doesn't allow us to run the hook.
+ Combinations of hook option flags:
+ - no-verify=False, verify=False (DEFAULT):
+ If stdout is a tty, can prompt about running hooks if needed.
+ If user denies running hooks, the action is cancelled. If stdout is
+ not a tty and we would need to prompt about hooks, action is
+ cancelled.
+ - no-verify=False, verify=True:
+ Always run hooks with no prompt.
+ - no-verify=True, verify=False:
+ Never run hooks, but run action anyway (AKA bypass hooks).
+ - no-verify=True, verify=True:
+ Invalid
"""
- self._hook_type = hook_type
- self._hooks_project = hooks_project
- self._repo_topdir = repo_topdir
- self._manifest_url = manifest_url
- self._bypass_hooks = bypass_hooks
- self._allow_all_hooks = allow_all_hooks
- self._ignore_hooks = ignore_hooks
- self._abort_if_user_denies = abort_if_user_denies
- # Store the full path to the script for convenience.
- if self._hooks_project:
- self._script_fullpath = os.path.join(self._hooks_project.worktree,
- self._hook_type + '.py')
- else:
- self._script_fullpath = None
+ def __init__(
+ self,
+ hook_type,
+ hooks_project,
+ repo_topdir,
+ manifest_url,
+ bypass_hooks=False,
+ allow_all_hooks=False,
+ ignore_hooks=False,
+ abort_if_user_denies=False,
+ ):
+ """RepoHook constructor.
- def _GetHash(self):
- """Return a hash of the contents of the hooks directory.
+ Params:
+ hook_type: A string representing the type of hook. This is also used
+ to figure out the name of the file containing the hook. For
+ example: 'pre-upload'.
+ hooks_project: The project containing the repo hooks.
+ If you have a manifest, this is manifest.repo_hooks_project.
+ OK if this is None, which will make the hook a no-op.
+ repo_topdir: The top directory of the repo client checkout.
+ This is the one containing the .repo directory. Scripts will
+ run with CWD as this directory.
+ If you have a manifest, this is manifest.topdir.
+ manifest_url: The URL to the manifest git repo.
+ bypass_hooks: If True, then 'Do not run the hook'.
+ allow_all_hooks: If True, then 'Run the hook without prompting'.
+ ignore_hooks: If True, then 'Do not abort action if hooks fail'.
+ abort_if_user_denies: If True, we'll abort running the hook if the
+ user doesn't allow us to run the hook.
+ """
+ self._hook_type = hook_type
+ self._hooks_project = hooks_project
+ self._repo_topdir = repo_topdir
+ self._manifest_url = manifest_url
+ self._bypass_hooks = bypass_hooks
+ self._allow_all_hooks = allow_all_hooks
+ self._ignore_hooks = ignore_hooks
+ self._abort_if_user_denies = abort_if_user_denies
- We'll just use git to do this. This hash has the property that if anything
- changes in the directory we will return a different has.
+ # Store the full path to the script for convenience.
+ if self._hooks_project:
+ self._script_fullpath = os.path.join(
+ self._hooks_project.worktree, self._hook_type + ".py"
+ )
+ else:
+ self._script_fullpath = None
- SECURITY CONSIDERATION:
- This hash only represents the contents of files in the hook directory, not
- any other files imported or called by hooks. Changes to imported files
- can change the script behavior without affecting the hash.
+ def _GetHash(self):
+ """Return a hash of the contents of the hooks directory.
- Returns:
- A string representing the hash. This will always be ASCII so that it can
- be printed to the user easily.
- """
- assert self._hooks_project, "Must have hooks to calculate their hash."
+ We'll just use git to do this. This hash has the property that if
+ anything changes in the directory we will return a different has.
- # We will use the work_git object rather than just calling GetRevisionId().
- # That gives us a hash of the latest checked in version of the files that
- # the user will actually be executing. Specifically, GetRevisionId()
- # doesn't appear to change even if a user checks out a different version
- # of the hooks repo (via git checkout) nor if a user commits their own revs.
- #
- # NOTE: Local (non-committed) changes will not be factored into this hash.
- # I think this is OK, since we're really only worried about warning the user
- # about upstream changes.
- return self._hooks_project.work_git.rev_parse(HEAD)
+ SECURITY CONSIDERATION:
+ This hash only represents the contents of files in the hook
+ directory, not any other files imported or called by hooks. Changes
+ to imported files can change the script behavior without affecting
+ the hash.
- def _GetMustVerb(self):
- """Return 'must' if the hook is required; 'should' if not."""
- if self._abort_if_user_denies:
- return 'must'
- else:
- return 'should'
+ Returns:
+ A string representing the hash. This will always be ASCII so that
+ it can be printed to the user easily.
+ """
+ assert self._hooks_project, "Must have hooks to calculate their hash."
- def _CheckForHookApproval(self):
- """Check to see whether this hook has been approved.
+ # We will use the work_git object rather than just calling
+ # GetRevisionId(). That gives us a hash of the latest checked in version
+ # of the files that the user will actually be executing. Specifically,
+ # GetRevisionId() doesn't appear to change even if a user checks out a
+ # different version of the hooks repo (via git checkout) nor if a user
+ # commits their own revs.
+ #
+ # NOTE: Local (non-committed) changes will not be factored into this
+ # hash. I think this is OK, since we're really only worried about
+ # warning the user about upstream changes.
+ return self._hooks_project.work_git.rev_parse(HEAD)
- We'll accept approval of manifest URLs if they're using secure transports.
- This way the user can say they trust the manifest hoster. For insecure
- hosts, we fall back to checking the hash of the hooks repo.
+ def _GetMustVerb(self):
+ """Return 'must' if the hook is required; 'should' if not."""
+ if self._abort_if_user_denies:
+ return "must"
+ else:
+ return "should"
- Note that we ask permission for each individual hook even though we use
- the hash of all hooks when detecting changes. We'd like the user to be
- able to approve / deny each hook individually. We only use the hash of all
- hooks because there is no other easy way to detect changes to local imports.
+ def _CheckForHookApproval(self):
+ """Check to see whether this hook has been approved.
- Returns:
- True if this hook is approved to run; False otherwise.
+ We'll accept approval of manifest URLs if they're using secure
+ transports. This way the user can say they trust the manifest hoster.
+ For insecure hosts, we fall back to checking the hash of the hooks repo.
- Raises:
- HookError: Raised if the user doesn't approve and abort_if_user_denies
- was passed to the consturctor.
- """
- if self._ManifestUrlHasSecureScheme():
- return self._CheckForHookApprovalManifest()
- else:
- return self._CheckForHookApprovalHash()
+ Note that we ask permission for each individual hook even though we use
+ the hash of all hooks when detecting changes. We'd like the user to be
+ able to approve / deny each hook individually. We only use the hash of
+ all hooks because there is no other easy way to detect changes to local
+ imports.
- def _CheckForHookApprovalHelper(self, subkey, new_val, main_prompt,
- changed_prompt):
- """Check for approval for a particular attribute and hook.
+ Returns:
+ True if this hook is approved to run; False otherwise.
- Args:
- subkey: The git config key under [repo.hooks.<hook_type>] to store the
- last approved string.
- new_val: The new value to compare against the last approved one.
- main_prompt: Message to display to the user to ask for approval.
- changed_prompt: Message explaining why we're re-asking for approval.
+ Raises:
+ HookError: Raised if the user doesn't approve and
+ abort_if_user_denies was passed to the consturctor.
+ """
+ if self._ManifestUrlHasSecureScheme():
+ return self._CheckForHookApprovalManifest()
+ else:
+ return self._CheckForHookApprovalHash()
- Returns:
- True if this hook is approved to run; False otherwise.
+ def _CheckForHookApprovalHelper(
+ self, subkey, new_val, main_prompt, changed_prompt
+ ):
+ """Check for approval for a particular attribute and hook.
- Raises:
- HookError: Raised if the user doesn't approve and abort_if_user_denies
- was passed to the consturctor.
- """
- hooks_config = self._hooks_project.config
- git_approval_key = 'repo.hooks.%s.%s' % (self._hook_type, subkey)
+ Args:
+ subkey: The git config key under [repo.hooks.<hook_type>] to store
+ the last approved string.
+ new_val: The new value to compare against the last approved one.
+ main_prompt: Message to display to the user to ask for approval.
+ changed_prompt: Message explaining why we're re-asking for approval.
- # Get the last value that the user approved for this hook; may be None.
- old_val = hooks_config.GetString(git_approval_key)
+ Returns:
+ True if this hook is approved to run; False otherwise.
- if old_val is not None:
- # User previously approved hook and asked not to be prompted again.
- if new_val == old_val:
- # Approval matched. We're done.
- return True
- else:
- # Give the user a reason why we're prompting, since they last told
- # us to "never ask again".
- prompt = 'WARNING: %s\n\n' % (changed_prompt,)
- else:
- prompt = ''
+ Raises:
+ HookError: Raised if the user doesn't approve and
+ abort_if_user_denies was passed to the consturctor.
+ """
+ hooks_config = self._hooks_project.config
+ git_approval_key = "repo.hooks.%s.%s" % (self._hook_type, subkey)
- # Prompt the user if we're not on a tty; on a tty we'll assume "no".
- if sys.stdout.isatty():
- prompt += main_prompt + ' (yes/always/NO)? '
- response = input(prompt).lower()
- print()
+ # Get the last value that the user approved for this hook; may be None.
+ old_val = hooks_config.GetString(git_approval_key)
- # User is doing a one-time approval.
- if response in ('y', 'yes'):
- return True
- elif response == 'always':
- hooks_config.SetString(git_approval_key, new_val)
- return True
+ if old_val is not None:
+ # User previously approved hook and asked not to be prompted again.
+ if new_val == old_val:
+ # Approval matched. We're done.
+ return True
+ else:
+ # Give the user a reason why we're prompting, since they last
+ # told us to "never ask again".
+ prompt = "WARNING: %s\n\n" % (changed_prompt,)
+ else:
+ prompt = ""
- # For anything else, we'll assume no approval.
- if self._abort_if_user_denies:
- raise HookError('You must allow the %s hook or use --no-verify.' %
- self._hook_type)
+ # Prompt the user if we're not on a tty; on a tty we'll assume "no".
+ if sys.stdout.isatty():
+ prompt += main_prompt + " (yes/always/NO)? "
+ response = input(prompt).lower()
+ print()
- return False
+ # User is doing a one-time approval.
+ if response in ("y", "yes"):
+ return True
+ elif response == "always":
+ hooks_config.SetString(git_approval_key, new_val)
+ return True
- def _ManifestUrlHasSecureScheme(self):
- """Check if the URI for the manifest is a secure transport."""
- secure_schemes = ('file', 'https', 'ssh', 'persistent-https', 'sso', 'rpc')
- parse_results = urllib.parse.urlparse(self._manifest_url)
- return parse_results.scheme in secure_schemes
+ # For anything else, we'll assume no approval.
+ if self._abort_if_user_denies:
+ raise HookError(
+ "You must allow the %s hook or use --no-verify."
+ % self._hook_type
+ )
- def _CheckForHookApprovalManifest(self):
- """Check whether the user has approved this manifest host.
+ return False
- Returns:
- True if this hook is approved to run; False otherwise.
- """
- return self._CheckForHookApprovalHelper(
- 'approvedmanifest',
- self._manifest_url,
- 'Run hook scripts from %s' % (self._manifest_url,),
- 'Manifest URL has changed since %s was allowed.' % (self._hook_type,))
+ def _ManifestUrlHasSecureScheme(self):
+ """Check if the URI for the manifest is a secure transport."""
+ secure_schemes = (
+ "file",
+ "https",
+ "ssh",
+ "persistent-https",
+ "sso",
+ "rpc",
+ )
+ parse_results = urllib.parse.urlparse(self._manifest_url)
+ return parse_results.scheme in secure_schemes
- def _CheckForHookApprovalHash(self):
- """Check whether the user has approved the hooks repo.
+ def _CheckForHookApprovalManifest(self):
+ """Check whether the user has approved this manifest host.
- Returns:
- True if this hook is approved to run; False otherwise.
- """
- prompt = ('Repo %s run the script:\n'
- ' %s\n'
- '\n'
- 'Do you want to allow this script to run')
- return self._CheckForHookApprovalHelper(
- 'approvedhash',
- self._GetHash(),
- prompt % (self._GetMustVerb(), self._script_fullpath),
- 'Scripts have changed since %s was allowed.' % (self._hook_type,))
+ Returns:
+ True if this hook is approved to run; False otherwise.
+ """
+ return self._CheckForHookApprovalHelper(
+ "approvedmanifest",
+ self._manifest_url,
+ "Run hook scripts from %s" % (self._manifest_url,),
+ "Manifest URL has changed since %s was allowed."
+ % (self._hook_type,),
+ )
- @staticmethod
- def _ExtractInterpFromShebang(data):
- """Extract the interpreter used in the shebang.
+ def _CheckForHookApprovalHash(self):
+ """Check whether the user has approved the hooks repo.
- Try to locate the interpreter the script is using (ignoring `env`).
+ Returns:
+ True if this hook is approved to run; False otherwise.
+ """
+ prompt = (
+ "Repo %s run the script:\n"
+ " %s\n"
+ "\n"
+ "Do you want to allow this script to run"
+ )
+ return self._CheckForHookApprovalHelper(
+ "approvedhash",
+ self._GetHash(),
+ prompt % (self._GetMustVerb(), self._script_fullpath),
+ "Scripts have changed since %s was allowed." % (self._hook_type,),
+ )
- Args:
- data: The file content of the script.
+ @staticmethod
+ def _ExtractInterpFromShebang(data):
+ """Extract the interpreter used in the shebang.
- Returns:
- The basename of the main script interpreter, or None if a shebang is not
- used or could not be parsed out.
- """
- firstline = data.splitlines()[:1]
- if not firstline:
- return None
+ Try to locate the interpreter the script is using (ignoring `env`).
- # The format here can be tricky.
- shebang = firstline[0].strip()
- m = re.match(r'^#!\s*([^\s]+)(?:\s+([^\s]+))?', shebang)
- if not m:
- return None
+ Args:
+ data: The file content of the script.
- # If the using `env`, find the target program.
- interp = m.group(1)
- if os.path.basename(interp) == 'env':
- interp = m.group(2)
+ Returns:
+ The basename of the main script interpreter, or None if a shebang is
+ not used or could not be parsed out.
+ """
+ firstline = data.splitlines()[:1]
+ if not firstline:
+ return None
- return interp
+ # The format here can be tricky.
+ shebang = firstline[0].strip()
+ m = re.match(r"^#!\s*([^\s]+)(?:\s+([^\s]+))?", shebang)
+ if not m:
+ return None
- def _ExecuteHookViaReexec(self, interp, context, **kwargs):
- """Execute the hook script through |interp|.
+ # If the using `env`, find the target program.
+ interp = m.group(1)
+ if os.path.basename(interp) == "env":
+ interp = m.group(2)
- Note: Support for this feature should be dropped ~Jun 2021.
+ return interp
- Args:
- interp: The Python program to run.
- context: Basic Python context to execute the hook inside.
- kwargs: Arbitrary arguments to pass to the hook script.
+ def _ExecuteHookViaReexec(self, interp, context, **kwargs):
+ """Execute the hook script through |interp|.
- Raises:
- HookError: When the hooks failed for any reason.
- """
- # This logic needs to be kept in sync with _ExecuteHookViaImport below.
- script = """
+ Note: Support for this feature should be dropped ~Jun 2021.
+
+ Args:
+ interp: The Python program to run.
+ context: Basic Python context to execute the hook inside.
+ kwargs: Arbitrary arguments to pass to the hook script.
+
+ Raises:
+ HookError: When the hooks failed for any reason.
+ """
+ # This logic needs to be kept in sync with _ExecuteHookViaImport below.
+ script = """
import json, os, sys
path = '''%(path)s'''
kwargs = json.loads('''%(kwargs)s''')
@@ -300,210 +322,240 @@
exec(compile(data, path, 'exec'), context)
context['main'](**kwargs)
""" % {
- 'path': self._script_fullpath,
- 'kwargs': json.dumps(kwargs),
- 'context': json.dumps(context),
- }
+ "path": self._script_fullpath,
+ "kwargs": json.dumps(kwargs),
+ "context": json.dumps(context),
+ }
- # We pass the script via stdin to avoid OS argv limits. It also makes
- # unhandled exception tracebacks less verbose/confusing for users.
- cmd = [interp, '-c', 'import sys; exec(sys.stdin.read())']
- proc = subprocess.Popen(cmd, stdin=subprocess.PIPE)
- proc.communicate(input=script.encode('utf-8'))
- if proc.returncode:
- raise HookError('Failed to run %s hook.' % (self._hook_type,))
+ # We pass the script via stdin to avoid OS argv limits. It also makes
+ # unhandled exception tracebacks less verbose/confusing for users.
+ cmd = [interp, "-c", "import sys; exec(sys.stdin.read())"]
+ proc = subprocess.Popen(cmd, stdin=subprocess.PIPE)
+ proc.communicate(input=script.encode("utf-8"))
+ if proc.returncode:
+ raise HookError("Failed to run %s hook." % (self._hook_type,))
- def _ExecuteHookViaImport(self, data, context, **kwargs):
- """Execute the hook code in |data| directly.
+ def _ExecuteHookViaImport(self, data, context, **kwargs):
+ """Execute the hook code in |data| directly.
- Args:
- data: The code of the hook to execute.
- context: Basic Python context to execute the hook inside.
- kwargs: Arbitrary arguments to pass to the hook script.
+ Args:
+ data: The code of the hook to execute.
+ context: Basic Python context to execute the hook inside.
+ kwargs: Arbitrary arguments to pass to the hook script.
- Raises:
- HookError: When the hooks failed for any reason.
- """
- # Exec, storing global context in the context dict. We catch exceptions
- # and convert to a HookError w/ just the failing traceback.
- try:
- exec(compile(data, self._script_fullpath, 'exec'), context)
- except Exception:
- raise HookError('%s\nFailed to import %s hook; see traceback above.' %
- (traceback.format_exc(), self._hook_type))
-
- # Running the script should have defined a main() function.
- if 'main' not in context:
- raise HookError('Missing main() in: "%s"' % self._script_fullpath)
-
- # Call the main function in the hook. If the hook should cause the
- # build to fail, it will raise an Exception. We'll catch that convert
- # to a HookError w/ just the failing traceback.
- try:
- context['main'](**kwargs)
- except Exception:
- raise HookError('%s\nFailed to run main() for %s hook; see traceback '
- 'above.' % (traceback.format_exc(), self._hook_type))
-
- def _ExecuteHook(self, **kwargs):
- """Actually execute the given hook.
-
- This will run the hook's 'main' function in our python interpreter.
-
- Args:
- kwargs: Keyword arguments to pass to the hook. These are often specific
- to the hook type. For instance, pre-upload hooks will contain
- a project_list.
- """
- # Keep sys.path and CWD stashed away so that we can always restore them
- # upon function exit.
- orig_path = os.getcwd()
- orig_syspath = sys.path
-
- try:
- # Always run hooks with CWD as topdir.
- os.chdir(self._repo_topdir)
-
- # Put the hook dir as the first item of sys.path so hooks can do
- # relative imports. We want to replace the repo dir as [0] so
- # hooks can't import repo files.
- sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:]
-
- # Initial global context for the hook to run within.
- context = {'__file__': self._script_fullpath}
-
- # Add 'hook_should_take_kwargs' to the arguments to be passed to main.
- # We don't actually want hooks to define their main with this argument--
- # it's there to remind them that their hook should always take **kwargs.
- # For instance, a pre-upload hook should be defined like:
- # def main(project_list, **kwargs):
- #
- # This allows us to later expand the API without breaking old hooks.
- kwargs = kwargs.copy()
- kwargs['hook_should_take_kwargs'] = True
-
- # See what version of python the hook has been written against.
- data = open(self._script_fullpath).read()
- interp = self._ExtractInterpFromShebang(data)
- reexec = False
- if interp:
- prog = os.path.basename(interp)
- if prog.startswith('python2') and sys.version_info.major != 2:
- reexec = True
- elif prog.startswith('python3') and sys.version_info.major == 2:
- reexec = True
-
- # Attempt to execute the hooks through the requested version of Python.
- if reexec:
+ Raises:
+ HookError: When the hooks failed for any reason.
+ """
+ # Exec, storing global context in the context dict. We catch exceptions
+ # and convert to a HookError w/ just the failing traceback.
try:
- self._ExecuteHookViaReexec(interp, context, **kwargs)
- except OSError as e:
- if e.errno == errno.ENOENT:
- # We couldn't find the interpreter, so fallback to importing.
+ exec(compile(data, self._script_fullpath, "exec"), context)
+ except Exception:
+ raise HookError(
+ "%s\nFailed to import %s hook; see traceback above."
+ % (traceback.format_exc(), self._hook_type)
+ )
+
+ # Running the script should have defined a main() function.
+ if "main" not in context:
+ raise HookError('Missing main() in: "%s"' % self._script_fullpath)
+
+ # Call the main function in the hook. If the hook should cause the
+ # build to fail, it will raise an Exception. We'll catch that convert
+ # to a HookError w/ just the failing traceback.
+ try:
+ context["main"](**kwargs)
+ except Exception:
+ raise HookError(
+ "%s\nFailed to run main() for %s hook; see traceback "
+ "above." % (traceback.format_exc(), self._hook_type)
+ )
+
+ def _ExecuteHook(self, **kwargs):
+ """Actually execute the given hook.
+
+ This will run the hook's 'main' function in our python interpreter.
+
+ Args:
+ kwargs: Keyword arguments to pass to the hook. These are often
+ specific to the hook type. For instance, pre-upload hooks will
+ contain a project_list.
+ """
+ # Keep sys.path and CWD stashed away so that we can always restore them
+ # upon function exit.
+ orig_path = os.getcwd()
+ orig_syspath = sys.path
+
+ try:
+ # Always run hooks with CWD as topdir.
+ os.chdir(self._repo_topdir)
+
+ # Put the hook dir as the first item of sys.path so hooks can do
+ # relative imports. We want to replace the repo dir as [0] so
+ # hooks can't import repo files.
+ sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:]
+
+ # Initial global context for the hook to run within.
+ context = {"__file__": self._script_fullpath}
+
+ # Add 'hook_should_take_kwargs' to the arguments to be passed to
+ # main. We don't actually want hooks to define their main with this
+ # argument--it's there to remind them that their hook should always
+ # take **kwargs.
+ # For instance, a pre-upload hook should be defined like:
+ # def main(project_list, **kwargs):
+ #
+ # This allows us to later expand the API without breaking old hooks.
+ kwargs = kwargs.copy()
+ kwargs["hook_should_take_kwargs"] = True
+
+ # See what version of python the hook has been written against.
+ data = open(self._script_fullpath).read()
+ interp = self._ExtractInterpFromShebang(data)
reexec = False
- else:
- raise
+ if interp:
+ prog = os.path.basename(interp)
+ if prog.startswith("python2") and sys.version_info.major != 2:
+ reexec = True
+ elif prog.startswith("python3") and sys.version_info.major == 2:
+ reexec = True
- # Run the hook by importing directly.
- if not reexec:
- self._ExecuteHookViaImport(data, context, **kwargs)
- finally:
- # Restore sys.path and CWD.
- sys.path = orig_syspath
- os.chdir(orig_path)
+ # Attempt to execute the hooks through the requested version of
+ # Python.
+ if reexec:
+ try:
+ self._ExecuteHookViaReexec(interp, context, **kwargs)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ # We couldn't find the interpreter, so fallback to
+ # importing.
+ reexec = False
+ else:
+ raise
- def _CheckHook(self):
- # Bail with a nice error if we can't find the hook.
- if not os.path.isfile(self._script_fullpath):
- raise HookError('Couldn\'t find repo hook: %s' % self._script_fullpath)
+ # Run the hook by importing directly.
+ if not reexec:
+ self._ExecuteHookViaImport(data, context, **kwargs)
+ finally:
+ # Restore sys.path and CWD.
+ sys.path = orig_syspath
+ os.chdir(orig_path)
- def Run(self, **kwargs):
- """Run the hook.
+ def _CheckHook(self):
+ # Bail with a nice error if we can't find the hook.
+ if not os.path.isfile(self._script_fullpath):
+ raise HookError(
+ "Couldn't find repo hook: %s" % self._script_fullpath
+ )
- If the hook doesn't exist (because there is no hooks project or because
- this particular hook is not enabled), this is a no-op.
+ def Run(self, **kwargs):
+ """Run the hook.
- Args:
- user_allows_all_hooks: If True, we will never prompt about running the
- hook--we'll just assume it's OK to run it.
- kwargs: Keyword arguments to pass to the hook. These are often specific
- to the hook type. For instance, pre-upload hooks will contain
- a project_list.
+ If the hook doesn't exist (because there is no hooks project or because
+ this particular hook is not enabled), this is a no-op.
- Returns:
- True: On success or ignore hooks by user-request
- False: The hook failed. The caller should respond with aborting the action.
- Some examples in which False is returned:
- * Finding the hook failed while it was enabled, or
- * the user declined to run a required hook (from _CheckForHookApproval)
- In all these cases the user did not pass the proper arguments to
- ignore the result through the option combinations as listed in
- AddHookOptionGroup().
- """
- # Do not do anything in case bypass_hooks is set, or
- # no-op if there is no hooks project or if hook is disabled.
- if (self._bypass_hooks or
- not self._hooks_project or
- self._hook_type not in self._hooks_project.enabled_repo_hooks):
- return True
+ Args:
+ user_allows_all_hooks: If True, we will never prompt about running
+ the hook--we'll just assume it's OK to run it.
+ kwargs: Keyword arguments to pass to the hook. These are often
+ specific to the hook type. For instance, pre-upload hooks will
+ contain a project_list.
- passed = True
- try:
- self._CheckHook()
+ Returns:
+ True: On success or ignore hooks by user-request
+ False: The hook failed. The caller should respond with aborting the
+ action. Some examples in which False is returned:
+ * Finding the hook failed while it was enabled, or
+ * the user declined to run a required hook (from
+ _CheckForHookApproval)
+ In all these cases the user did not pass the proper arguments to
+ ignore the result through the option combinations as listed in
+ AddHookOptionGroup().
+ """
+ # Do not do anything in case bypass_hooks is set, or
+ # no-op if there is no hooks project or if hook is disabled.
+ if (
+ self._bypass_hooks
+ or not self._hooks_project
+ or self._hook_type not in self._hooks_project.enabled_repo_hooks
+ ):
+ return True
- # Make sure the user is OK with running the hook.
- if self._allow_all_hooks or self._CheckForHookApproval():
- # Run the hook with the same version of python we're using.
- self._ExecuteHook(**kwargs)
- except SystemExit as e:
- passed = False
- print('ERROR: %s hooks exited with exit code: %s' % (self._hook_type, str(e)),
- file=sys.stderr)
- except HookError as e:
- passed = False
- print('ERROR: %s' % str(e), file=sys.stderr)
+ passed = True
+ try:
+ self._CheckHook()
- if not passed and self._ignore_hooks:
- print('\nWARNING: %s hooks failed, but continuing anyways.' % self._hook_type,
- file=sys.stderr)
- passed = True
+ # Make sure the user is OK with running the hook.
+ if self._allow_all_hooks or self._CheckForHookApproval():
+ # Run the hook with the same version of python we're using.
+ self._ExecuteHook(**kwargs)
+ except SystemExit as e:
+ passed = False
+ print(
+ "ERROR: %s hooks exited with exit code: %s"
+ % (self._hook_type, str(e)),
+ file=sys.stderr,
+ )
+ except HookError as e:
+ passed = False
+ print("ERROR: %s" % str(e), file=sys.stderr)
- return passed
+ if not passed and self._ignore_hooks:
+ print(
+ "\nWARNING: %s hooks failed, but continuing anyways."
+ % self._hook_type,
+ file=sys.stderr,
+ )
+ passed = True
- @classmethod
- def FromSubcmd(cls, manifest, opt, *args, **kwargs):
- """Method to construct the repo hook class
+ return passed
- Args:
- manifest: The current active manifest for this command from which we
- extract a couple of fields.
- opt: Contains the commandline options for the action of this hook.
- It should contain the options added by AddHookOptionGroup() in which
- we are interested in RepoHook execution.
- """
- for key in ('bypass_hooks', 'allow_all_hooks', 'ignore_hooks'):
- kwargs.setdefault(key, getattr(opt, key))
- kwargs.update({
- 'hooks_project': manifest.repo_hooks_project,
- 'repo_topdir': manifest.topdir,
- 'manifest_url': manifest.manifestProject.GetRemote('origin').url,
- })
- return cls(*args, **kwargs)
+ @classmethod
+ def FromSubcmd(cls, manifest, opt, *args, **kwargs):
+ """Method to construct the repo hook class
- @staticmethod
- def AddOptionGroup(parser, name):
- """Help options relating to the various hooks."""
+ Args:
+ manifest: The current active manifest for this command from which we
+ extract a couple of fields.
+ opt: Contains the commandline options for the action of this hook.
+ It should contain the options added by AddHookOptionGroup() in
+ which we are interested in RepoHook execution.
+ """
+ for key in ("bypass_hooks", "allow_all_hooks", "ignore_hooks"):
+ kwargs.setdefault(key, getattr(opt, key))
+ kwargs.update(
+ {
+ "hooks_project": manifest.repo_hooks_project,
+ "repo_topdir": manifest.topdir,
+ "manifest_url": manifest.manifestProject.GetRemote(
+ "origin"
+ ).url,
+ }
+ )
+ return cls(*args, **kwargs)
- # Note that verify and no-verify are NOT opposites of each other, which
- # is why they store to different locations. We are using them to match
- # 'git commit' syntax.
- group = parser.add_option_group(name + ' hooks')
- group.add_option('--no-verify',
- dest='bypass_hooks', action='store_true',
- help='Do not run the %s hook.' % name)
- group.add_option('--verify',
- dest='allow_all_hooks', action='store_true',
- help='Run the %s hook without prompting.' % name)
- group.add_option('--ignore-hooks',
- action='store_true',
- help='Do not abort if %s hooks fail.' % name)
+ @staticmethod
+ def AddOptionGroup(parser, name):
+ """Help options relating to the various hooks."""
+
+ # Note that verify and no-verify are NOT opposites of each other, which
+ # is why they store to different locations. We are using them to match
+ # 'git commit' syntax.
+ group = parser.add_option_group(name + " hooks")
+ group.add_option(
+ "--no-verify",
+ dest="bypass_hooks",
+ action="store_true",
+ help="Do not run the %s hook." % name,
+ )
+ group.add_option(
+ "--verify",
+ dest="allow_all_hooks",
+ action="store_true",
+ help="Run the %s hook without prompting." % name,
+ )
+ group.add_option(
+ "--ignore-hooks",
+ action="store_true",
+ help="Do not abort if %s hooks fail." % name,
+ )
diff --git a/main.py b/main.py
index f4b6e7a..6dcb66f 100755
--- a/main.py
+++ b/main.py
@@ -31,9 +31,9 @@
import urllib.request
try:
- import kerberos
+ import kerberos
except ImportError:
- kerberos = None
+ kerberos = None
from color import SetDefaultColoring
import event_log
@@ -74,347 +74,442 @@
MIN_PYTHON_VERSION_HARD = (3, 6)
if sys.version_info.major < 3:
- print('repo: error: Python 2 is no longer supported; '
- 'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION_SOFT),
- file=sys.stderr)
- sys.exit(1)
-else:
- if sys.version_info < MIN_PYTHON_VERSION_HARD:
- print('repo: error: Python 3 version is too old; '
- 'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION_SOFT),
- file=sys.stderr)
+ print(
+ "repo: error: Python 2 is no longer supported; "
+ "Please upgrade to Python {}.{}+.".format(*MIN_PYTHON_VERSION_SOFT),
+ file=sys.stderr,
+ )
sys.exit(1)
- elif sys.version_info < MIN_PYTHON_VERSION_SOFT:
- print('repo: warning: your Python 3 version is no longer supported; '
- 'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION_SOFT),
- file=sys.stderr)
+else:
+ if sys.version_info < MIN_PYTHON_VERSION_HARD:
+ print(
+ "repo: error: Python 3 version is too old; "
+ "Please upgrade to Python {}.{}+.".format(*MIN_PYTHON_VERSION_SOFT),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ elif sys.version_info < MIN_PYTHON_VERSION_SOFT:
+ print(
+ "repo: warning: your Python 3 version is no longer supported; "
+ "Please upgrade to Python {}.{}+.".format(*MIN_PYTHON_VERSION_SOFT),
+ file=sys.stderr,
+ )
global_options = optparse.OptionParser(
- usage='repo [-p|--paginate|--no-pager] COMMAND [ARGS]',
- add_help_option=False)
-global_options.add_option('-h', '--help', action='store_true',
- help='show this help message and exit')
-global_options.add_option('--help-all', action='store_true',
- help='show this help message with all subcommands and exit')
-global_options.add_option('-p', '--paginate',
- dest='pager', action='store_true',
- help='display command output in the pager')
-global_options.add_option('--no-pager',
- dest='pager', action='store_false',
- help='disable the pager')
-global_options.add_option('--color',
- choices=('auto', 'always', 'never'), default=None,
- help='control color usage: auto, always, never')
-global_options.add_option('--trace',
- dest='trace', action='store_true',
- help='trace git command execution (REPO_TRACE=1)')
-global_options.add_option('--trace-to-stderr',
- dest='trace_to_stderr', action='store_true',
- help='trace outputs go to stderr in addition to .repo/TRACE_FILE')
-global_options.add_option('--trace-python',
- dest='trace_python', action='store_true',
- help='trace python command execution')
-global_options.add_option('--time',
- dest='time', action='store_true',
- help='time repo command execution')
-global_options.add_option('--version',
- dest='show_version', action='store_true',
- help='display this version of repo')
-global_options.add_option('--show-toplevel',
- action='store_true',
- help='display the path of the top-level directory of '
- 'the repo client checkout')
-global_options.add_option('--event-log',
- dest='event_log', action='store',
- help='filename of event log to append timeline to')
-global_options.add_option('--git-trace2-event-log', action='store',
- help='directory to write git trace2 event log to')
-global_options.add_option('--submanifest-path', action='store',
- metavar='REL_PATH', help='submanifest path')
+ usage="repo [-p|--paginate|--no-pager] COMMAND [ARGS]",
+ add_help_option=False,
+)
+global_options.add_option(
+ "-h", "--help", action="store_true", help="show this help message and exit"
+)
+global_options.add_option(
+ "--help-all",
+ action="store_true",
+ help="show this help message with all subcommands and exit",
+)
+global_options.add_option(
+ "-p",
+ "--paginate",
+ dest="pager",
+ action="store_true",
+ help="display command output in the pager",
+)
+global_options.add_option(
+ "--no-pager", dest="pager", action="store_false", help="disable the pager"
+)
+global_options.add_option(
+ "--color",
+ choices=("auto", "always", "never"),
+ default=None,
+ help="control color usage: auto, always, never",
+)
+global_options.add_option(
+ "--trace",
+ dest="trace",
+ action="store_true",
+ help="trace git command execution (REPO_TRACE=1)",
+)
+global_options.add_option(
+ "--trace-to-stderr",
+ dest="trace_to_stderr",
+ action="store_true",
+ help="trace outputs go to stderr in addition to .repo/TRACE_FILE",
+)
+global_options.add_option(
+ "--trace-python",
+ dest="trace_python",
+ action="store_true",
+ help="trace python command execution",
+)
+global_options.add_option(
+ "--time",
+ dest="time",
+ action="store_true",
+ help="time repo command execution",
+)
+global_options.add_option(
+ "--version",
+ dest="show_version",
+ action="store_true",
+ help="display this version of repo",
+)
+global_options.add_option(
+ "--show-toplevel",
+ action="store_true",
+ help="display the path of the top-level directory of "
+ "the repo client checkout",
+)
+global_options.add_option(
+ "--event-log",
+ dest="event_log",
+ action="store",
+ help="filename of event log to append timeline to",
+)
+global_options.add_option(
+ "--git-trace2-event-log",
+ action="store",
+ help="directory to write git trace2 event log to",
+)
+global_options.add_option(
+ "--submanifest-path",
+ action="store",
+ metavar="REL_PATH",
+ help="submanifest path",
+)
class _Repo(object):
- def __init__(self, repodir):
- self.repodir = repodir
- self.commands = all_commands
+ def __init__(self, repodir):
+ self.repodir = repodir
+ self.commands = all_commands
- def _PrintHelp(self, short: bool = False, all_commands: bool = False):
- """Show --help screen."""
- global_options.print_help()
- print()
- if short:
- commands = ' '.join(sorted(self.commands))
- wrapped_commands = textwrap.wrap(commands, width=77)
- print('Available commands:\n %s' % ('\n '.join(wrapped_commands),))
- print('\nRun `repo help <command>` for command-specific details.')
- print('Bug reports:', Wrapper().BUG_URL)
- else:
- cmd = self.commands['help']()
- if all_commands:
- cmd.PrintAllCommandsBody()
- else:
- cmd.PrintCommonCommandsBody()
-
- def _ParseArgs(self, argv):
- """Parse the main `repo` command line options."""
- for i, arg in enumerate(argv):
- if not arg.startswith('-'):
- name = arg
- glob = argv[:i]
- argv = argv[i + 1:]
- break
- else:
- name = None
- glob = argv
- argv = []
- gopts, _gargs = global_options.parse_args(glob)
-
- if name:
- name, alias_args = self._ExpandAlias(name)
- argv = alias_args + argv
-
- return (name, gopts, argv)
-
- def _ExpandAlias(self, name):
- """Look up user registered aliases."""
- # We don't resolve aliases for existing subcommands. This matches git.
- if name in self.commands:
- return name, []
-
- key = 'alias.%s' % (name,)
- alias = RepoConfig.ForRepository(self.repodir).GetString(key)
- if alias is None:
- alias = RepoConfig.ForUser().GetString(key)
- if alias is None:
- return name, []
-
- args = alias.strip().split(' ', 1)
- name = args[0]
- if len(args) == 2:
- args = shlex.split(args[1])
- else:
- args = []
- return name, args
-
- def _Run(self, name, gopts, argv):
- """Execute the requested subcommand."""
- result = 0
-
- # Handle options that terminate quickly first.
- if gopts.help or gopts.help_all:
- self._PrintHelp(short=False, all_commands=gopts.help_all)
- return 0
- elif gopts.show_version:
- # Always allow global --version regardless of subcommand validity.
- name = 'version'
- elif gopts.show_toplevel:
- print(os.path.dirname(self.repodir))
- return 0
- elif not name:
- # No subcommand specified, so show the help/subcommand.
- self._PrintHelp(short=True)
- return 1
-
- run = lambda: self._RunLong(name, gopts, argv) or 0
- with Trace('starting new command: %s', ', '.join([name] + argv),
- first_trace=True):
- if gopts.trace_python:
- import trace
- tracer = trace.Trace(count=False, trace=True, timing=True,
- ignoredirs=set(sys.path[1:]))
- result = tracer.runfunc(run)
- else:
- result = run()
- return result
-
- def _RunLong(self, name, gopts, argv):
- """Execute the (longer running) requested subcommand."""
- result = 0
- SetDefaultColoring(gopts.color)
-
- git_trace2_event_log = EventLog()
- outer_client = RepoClient(self.repodir)
- repo_client = outer_client
- if gopts.submanifest_path:
- repo_client = RepoClient(self.repodir,
- submanifest_path=gopts.submanifest_path,
- outer_client=outer_client)
- gitc_manifest = None
- gitc_client_name = gitc_utils.parse_clientdir(os.getcwd())
- if gitc_client_name:
- gitc_manifest = GitcClient(self.repodir, gitc_client_name)
- repo_client.isGitcClient = True
-
- try:
- cmd = self.commands[name](
- repodir=self.repodir,
- client=repo_client,
- manifest=repo_client.manifest,
- outer_client=outer_client,
- outer_manifest=outer_client.manifest,
- gitc_manifest=gitc_manifest,
- git_event_log=git_trace2_event_log)
- except KeyError:
- print("repo: '%s' is not a repo command. See 'repo help'." % name,
- file=sys.stderr)
- return 1
-
- Editor.globalConfig = cmd.client.globalConfig
-
- if not isinstance(cmd, MirrorSafeCommand) and cmd.manifest.IsMirror:
- print("fatal: '%s' requires a working directory" % name,
- file=sys.stderr)
- return 1
-
- if isinstance(cmd, GitcAvailableCommand) and not gitc_utils.get_gitc_manifest_dir():
- print("fatal: '%s' requires GITC to be available" % name,
- file=sys.stderr)
- return 1
-
- if isinstance(cmd, GitcClientCommand) and not gitc_client_name:
- print("fatal: '%s' requires a GITC client" % name,
- file=sys.stderr)
- return 1
-
- try:
- copts, cargs = cmd.OptionParser.parse_args(argv)
- copts = cmd.ReadEnvironmentOptions(copts)
- except NoManifestException as e:
- print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
- file=sys.stderr)
- print('error: manifest missing or unreadable -- please run init',
- file=sys.stderr)
- return 1
-
- if gopts.pager is not False and not isinstance(cmd, InteractiveCommand):
- config = cmd.client.globalConfig
- if gopts.pager:
- use_pager = True
- else:
- use_pager = config.GetBoolean('pager.%s' % name)
- if use_pager is None:
- use_pager = cmd.WantPager(copts)
- if use_pager:
- RunPager(config)
-
- start = time.time()
- cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
- cmd.event_log.SetParent(cmd_event)
- git_trace2_event_log.StartEvent()
- git_trace2_event_log.CommandEvent(name='repo', subcommands=[name])
-
- try:
- cmd.CommonValidateOptions(copts, cargs)
- cmd.ValidateOptions(copts, cargs)
-
- this_manifest_only = copts.this_manifest_only
- outer_manifest = copts.outer_manifest
- if cmd.MULTI_MANIFEST_SUPPORT or this_manifest_only:
- result = cmd.Execute(copts, cargs)
- elif outer_manifest and repo_client.manifest.is_submanifest:
- # The command does not support multi-manifest, we are using a
- # submanifest, and the command line is for the outermost manifest.
- # Re-run using the outermost manifest, which will recurse through the
- # submanifests.
- gopts.submanifest_path = ''
- result = self._Run(name, gopts, argv)
- else:
- # No multi-manifest support. Run the command in the current
- # (sub)manifest, and then any child submanifests.
- result = cmd.Execute(copts, cargs)
- for submanifest in repo_client.manifest.submanifests.values():
- spec = submanifest.ToSubmanifestSpec()
- gopts.submanifest_path = submanifest.repo_client.path_prefix
- child_argv = argv[:]
- child_argv.append('--no-outer-manifest')
- # Not all subcommands support the 3 manifest options, so only add them
- # if the original command includes them.
- if hasattr(copts, 'manifest_url'):
- child_argv.extend(['--manifest-url', spec.manifestUrl])
- if hasattr(copts, 'manifest_name'):
- child_argv.extend(['--manifest-name', spec.manifestName])
- if hasattr(copts, 'manifest_branch'):
- child_argv.extend(['--manifest-branch', spec.revision])
- result = self._Run(name, gopts, child_argv) or result
- except (DownloadError, ManifestInvalidRevisionError,
- NoManifestException) as e:
- print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
- file=sys.stderr)
- if isinstance(e, NoManifestException):
- print('error: manifest missing or unreadable -- please run init',
- file=sys.stderr)
- result = 1
- except NoSuchProjectError as e:
- if e.name:
- print('error: project %s not found' % e.name, file=sys.stderr)
- else:
- print('error: no project in current directory', file=sys.stderr)
- result = 1
- except InvalidProjectGroupsError as e:
- if e.name:
- print('error: project group must be enabled for project %s' % e.name, file=sys.stderr)
- else:
- print('error: project group must be enabled for the project in the current directory',
- file=sys.stderr)
- result = 1
- except SystemExit as e:
- if e.code:
- result = e.code
- raise
- finally:
- finish = time.time()
- elapsed = finish - start
- hours, remainder = divmod(elapsed, 3600)
- minutes, seconds = divmod(remainder, 60)
- if gopts.time:
- if hours == 0:
- print('real\t%dm%.3fs' % (minutes, seconds), file=sys.stderr)
+ def _PrintHelp(self, short: bool = False, all_commands: bool = False):
+ """Show --help screen."""
+ global_options.print_help()
+ print()
+ if short:
+ commands = " ".join(sorted(self.commands))
+ wrapped_commands = textwrap.wrap(commands, width=77)
+ print(
+ "Available commands:\n %s" % ("\n ".join(wrapped_commands),)
+ )
+ print("\nRun `repo help <command>` for command-specific details.")
+ print("Bug reports:", Wrapper().BUG_URL)
else:
- print('real\t%dh%dm%.3fs' % (hours, minutes, seconds),
- file=sys.stderr)
+ cmd = self.commands["help"]()
+ if all_commands:
+ cmd.PrintAllCommandsBody()
+ else:
+ cmd.PrintCommonCommandsBody()
- cmd.event_log.FinishEvent(cmd_event, finish,
- result is None or result == 0)
- git_trace2_event_log.DefParamRepoEvents(
- cmd.manifest.manifestProject.config.DumpConfigDict())
- git_trace2_event_log.ExitEvent(result)
+ def _ParseArgs(self, argv):
+ """Parse the main `repo` command line options."""
+ for i, arg in enumerate(argv):
+ if not arg.startswith("-"):
+ name = arg
+ glob = argv[:i]
+ argv = argv[i + 1 :]
+ break
+ else:
+ name = None
+ glob = argv
+ argv = []
+ gopts, _gargs = global_options.parse_args(glob)
- if gopts.event_log:
- cmd.event_log.Write(os.path.abspath(
- os.path.expanduser(gopts.event_log)))
+ if name:
+ name, alias_args = self._ExpandAlias(name)
+ argv = alias_args + argv
- git_trace2_event_log.Write(gopts.git_trace2_event_log)
- return result
+ return (name, gopts, argv)
+
+ def _ExpandAlias(self, name):
+ """Look up user registered aliases."""
+ # We don't resolve aliases for existing subcommands. This matches git.
+ if name in self.commands:
+ return name, []
+
+ key = "alias.%s" % (name,)
+ alias = RepoConfig.ForRepository(self.repodir).GetString(key)
+ if alias is None:
+ alias = RepoConfig.ForUser().GetString(key)
+ if alias is None:
+ return name, []
+
+ args = alias.strip().split(" ", 1)
+ name = args[0]
+ if len(args) == 2:
+ args = shlex.split(args[1])
+ else:
+ args = []
+ return name, args
+
+ def _Run(self, name, gopts, argv):
+ """Execute the requested subcommand."""
+ result = 0
+
+ # Handle options that terminate quickly first.
+ if gopts.help or gopts.help_all:
+ self._PrintHelp(short=False, all_commands=gopts.help_all)
+ return 0
+ elif gopts.show_version:
+ # Always allow global --version regardless of subcommand validity.
+ name = "version"
+ elif gopts.show_toplevel:
+ print(os.path.dirname(self.repodir))
+ return 0
+ elif not name:
+ # No subcommand specified, so show the help/subcommand.
+ self._PrintHelp(short=True)
+ return 1
+
+ run = lambda: self._RunLong(name, gopts, argv) or 0
+ with Trace(
+ "starting new command: %s",
+ ", ".join([name] + argv),
+ first_trace=True,
+ ):
+ if gopts.trace_python:
+ import trace
+
+ tracer = trace.Trace(
+ count=False,
+ trace=True,
+ timing=True,
+ ignoredirs=set(sys.path[1:]),
+ )
+ result = tracer.runfunc(run)
+ else:
+ result = run()
+ return result
+
+ def _RunLong(self, name, gopts, argv):
+ """Execute the (longer running) requested subcommand."""
+ result = 0
+ SetDefaultColoring(gopts.color)
+
+ git_trace2_event_log = EventLog()
+ outer_client = RepoClient(self.repodir)
+ repo_client = outer_client
+ if gopts.submanifest_path:
+ repo_client = RepoClient(
+ self.repodir,
+ submanifest_path=gopts.submanifest_path,
+ outer_client=outer_client,
+ )
+ gitc_manifest = None
+ gitc_client_name = gitc_utils.parse_clientdir(os.getcwd())
+ if gitc_client_name:
+ gitc_manifest = GitcClient(self.repodir, gitc_client_name)
+ repo_client.isGitcClient = True
+
+ try:
+ cmd = self.commands[name](
+ repodir=self.repodir,
+ client=repo_client,
+ manifest=repo_client.manifest,
+ outer_client=outer_client,
+ outer_manifest=outer_client.manifest,
+ gitc_manifest=gitc_manifest,
+ git_event_log=git_trace2_event_log,
+ )
+ except KeyError:
+ print(
+ "repo: '%s' is not a repo command. See 'repo help'." % name,
+ file=sys.stderr,
+ )
+ return 1
+
+ Editor.globalConfig = cmd.client.globalConfig
+
+ if not isinstance(cmd, MirrorSafeCommand) and cmd.manifest.IsMirror:
+ print(
+ "fatal: '%s' requires a working directory" % name,
+ file=sys.stderr,
+ )
+ return 1
+
+ if (
+ isinstance(cmd, GitcAvailableCommand)
+ and not gitc_utils.get_gitc_manifest_dir()
+ ):
+ print(
+ "fatal: '%s' requires GITC to be available" % name,
+ file=sys.stderr,
+ )
+ return 1
+
+ if isinstance(cmd, GitcClientCommand) and not gitc_client_name:
+ print("fatal: '%s' requires a GITC client" % name, file=sys.stderr)
+ return 1
+
+ try:
+ copts, cargs = cmd.OptionParser.parse_args(argv)
+ copts = cmd.ReadEnvironmentOptions(copts)
+ except NoManifestException as e:
+ print(
+ "error: in `%s`: %s" % (" ".join([name] + argv), str(e)),
+ file=sys.stderr,
+ )
+ print(
+ "error: manifest missing or unreadable -- please run init",
+ file=sys.stderr,
+ )
+ return 1
+
+ if gopts.pager is not False and not isinstance(cmd, InteractiveCommand):
+ config = cmd.client.globalConfig
+ if gopts.pager:
+ use_pager = True
+ else:
+ use_pager = config.GetBoolean("pager.%s" % name)
+ if use_pager is None:
+ use_pager = cmd.WantPager(copts)
+ if use_pager:
+ RunPager(config)
+
+ start = time.time()
+ cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
+ cmd.event_log.SetParent(cmd_event)
+ git_trace2_event_log.StartEvent()
+ git_trace2_event_log.CommandEvent(name="repo", subcommands=[name])
+
+ try:
+ cmd.CommonValidateOptions(copts, cargs)
+ cmd.ValidateOptions(copts, cargs)
+
+ this_manifest_only = copts.this_manifest_only
+ outer_manifest = copts.outer_manifest
+ if cmd.MULTI_MANIFEST_SUPPORT or this_manifest_only:
+ result = cmd.Execute(copts, cargs)
+ elif outer_manifest and repo_client.manifest.is_submanifest:
+ # The command does not support multi-manifest, we are using a
+ # submanifest, and the command line is for the outermost
+ # manifest. Re-run using the outermost manifest, which will
+ # recurse through the submanifests.
+ gopts.submanifest_path = ""
+ result = self._Run(name, gopts, argv)
+ else:
+ # No multi-manifest support. Run the command in the current
+ # (sub)manifest, and then any child submanifests.
+ result = cmd.Execute(copts, cargs)
+ for submanifest in repo_client.manifest.submanifests.values():
+ spec = submanifest.ToSubmanifestSpec()
+ gopts.submanifest_path = submanifest.repo_client.path_prefix
+ child_argv = argv[:]
+ child_argv.append("--no-outer-manifest")
+ # Not all subcommands support the 3 manifest options, so
+ # only add them if the original command includes them.
+ if hasattr(copts, "manifest_url"):
+ child_argv.extend(["--manifest-url", spec.manifestUrl])
+ if hasattr(copts, "manifest_name"):
+ child_argv.extend(
+ ["--manifest-name", spec.manifestName]
+ )
+ if hasattr(copts, "manifest_branch"):
+ child_argv.extend(["--manifest-branch", spec.revision])
+ result = self._Run(name, gopts, child_argv) or result
+ except (
+ DownloadError,
+ ManifestInvalidRevisionError,
+ NoManifestException,
+ ) as e:
+ print(
+ "error: in `%s`: %s" % (" ".join([name] + argv), str(e)),
+ file=sys.stderr,
+ )
+ if isinstance(e, NoManifestException):
+ print(
+ "error: manifest missing or unreadable -- please run init",
+ file=sys.stderr,
+ )
+ result = 1
+ except NoSuchProjectError as e:
+ if e.name:
+ print("error: project %s not found" % e.name, file=sys.stderr)
+ else:
+ print("error: no project in current directory", file=sys.stderr)
+ result = 1
+ except InvalidProjectGroupsError as e:
+ if e.name:
+ print(
+ "error: project group must be enabled for project %s"
+ % e.name,
+ file=sys.stderr,
+ )
+ else:
+ print(
+ "error: project group must be enabled for the project in "
+ "the current directory",
+ file=sys.stderr,
+ )
+ result = 1
+ except SystemExit as e:
+ if e.code:
+ result = e.code
+ raise
+ finally:
+ finish = time.time()
+ elapsed = finish - start
+ hours, remainder = divmod(elapsed, 3600)
+ minutes, seconds = divmod(remainder, 60)
+ if gopts.time:
+ if hours == 0:
+ print(
+ "real\t%dm%.3fs" % (minutes, seconds), file=sys.stderr
+ )
+ else:
+ print(
+ "real\t%dh%dm%.3fs" % (hours, minutes, seconds),
+ file=sys.stderr,
+ )
+
+ cmd.event_log.FinishEvent(
+ cmd_event, finish, result is None or result == 0
+ )
+ git_trace2_event_log.DefParamRepoEvents(
+ cmd.manifest.manifestProject.config.DumpConfigDict()
+ )
+ git_trace2_event_log.ExitEvent(result)
+
+ if gopts.event_log:
+ cmd.event_log.Write(
+ os.path.abspath(os.path.expanduser(gopts.event_log))
+ )
+
+ git_trace2_event_log.Write(gopts.git_trace2_event_log)
+ return result
def _CheckWrapperVersion(ver_str, repo_path):
- """Verify the repo launcher is new enough for this checkout.
+ """Verify the repo launcher is new enough for this checkout.
- Args:
- ver_str: The version string passed from the repo launcher when it ran us.
- repo_path: The path to the repo launcher that loaded us.
- """
- # Refuse to work with really old wrapper versions. We don't test these,
- # so might as well require a somewhat recent sane version.
- # v1.15 of the repo launcher was released in ~Mar 2012.
- MIN_REPO_VERSION = (1, 15)
- min_str = '.'.join(str(x) for x in MIN_REPO_VERSION)
+ Args:
+ ver_str: The version string passed from the repo launcher when it ran
+ us.
+ repo_path: The path to the repo launcher that loaded us.
+ """
+ # Refuse to work with really old wrapper versions. We don't test these,
+ # so might as well require a somewhat recent sane version.
+ # v1.15 of the repo launcher was released in ~Mar 2012.
+ MIN_REPO_VERSION = (1, 15)
+ min_str = ".".join(str(x) for x in MIN_REPO_VERSION)
- if not repo_path:
- repo_path = '~/bin/repo'
+ if not repo_path:
+ repo_path = "~/bin/repo"
- if not ver_str:
- print('no --wrapper-version argument', file=sys.stderr)
- sys.exit(1)
+ if not ver_str:
+ print("no --wrapper-version argument", file=sys.stderr)
+ sys.exit(1)
- # Pull out the version of the repo launcher we know about to compare.
- exp = Wrapper().VERSION
- ver = tuple(map(int, ver_str.split('.')))
+ # Pull out the version of the repo launcher we know about to compare.
+ exp = Wrapper().VERSION
+ ver = tuple(map(int, ver_str.split(".")))
- exp_str = '.'.join(map(str, exp))
- if ver < MIN_REPO_VERSION:
- print("""
+ exp_str = ".".join(map(str, exp))
+ if ver < MIN_REPO_VERSION:
+ print(
+ """
repo: error:
!!! Your version of repo %s is too old.
!!! We need at least version %s.
@@ -422,284 +517,321 @@
!!! You must upgrade before you can continue:
cp %s %s
-""" % (ver_str, min_str, exp_str, WrapperPath(), repo_path), file=sys.stderr)
- sys.exit(1)
+"""
+ % (ver_str, min_str, exp_str, WrapperPath(), repo_path),
+ file=sys.stderr,
+ )
+ sys.exit(1)
- if exp > ver:
- print('\n... A new version of repo (%s) is available.' % (exp_str,),
- file=sys.stderr)
- if os.access(repo_path, os.W_OK):
- print("""\
+ if exp > ver:
+ print(
+ "\n... A new version of repo (%s) is available." % (exp_str,),
+ file=sys.stderr,
+ )
+ if os.access(repo_path, os.W_OK):
+ print(
+ """\
... You should upgrade soon:
cp %s %s
-""" % (WrapperPath(), repo_path), file=sys.stderr)
- else:
- print("""\
+"""
+ % (WrapperPath(), repo_path),
+ file=sys.stderr,
+ )
+ else:
+ print(
+ """\
... New version is available at: %s
... The launcher is run from: %s
!!! The launcher is not writable. Please talk to your sysadmin or distro
!!! to get an update installed.
-""" % (WrapperPath(), repo_path), file=sys.stderr)
+"""
+ % (WrapperPath(), repo_path),
+ file=sys.stderr,
+ )
def _CheckRepoDir(repo_dir):
- if not repo_dir:
- print('no --repo-dir argument', file=sys.stderr)
- sys.exit(1)
+ if not repo_dir:
+ print("no --repo-dir argument", file=sys.stderr)
+ sys.exit(1)
def _PruneOptions(argv, opt):
- i = 0
- while i < len(argv):
- a = argv[i]
- if a == '--':
- break
- if a.startswith('--'):
- eq = a.find('=')
- if eq > 0:
- a = a[0:eq]
- if not opt.has_option(a):
- del argv[i]
- continue
- i += 1
+ i = 0
+ while i < len(argv):
+ a = argv[i]
+ if a == "--":
+ break
+ if a.startswith("--"):
+ eq = a.find("=")
+ if eq > 0:
+ a = a[0:eq]
+ if not opt.has_option(a):
+ del argv[i]
+ continue
+ i += 1
class _UserAgentHandler(urllib.request.BaseHandler):
- def http_request(self, req):
- req.add_header('User-Agent', user_agent.repo)
- return req
+ def http_request(self, req):
+ req.add_header("User-Agent", user_agent.repo)
+ return req
- def https_request(self, req):
- req.add_header('User-Agent', user_agent.repo)
- return req
+ def https_request(self, req):
+ req.add_header("User-Agent", user_agent.repo)
+ return req
def _AddPasswordFromUserInput(handler, msg, req):
- # If repo could not find auth info from netrc, try to get it from user input
- url = req.get_full_url()
- user, password = handler.passwd.find_user_password(None, url)
- if user is None:
- print(msg)
- try:
- user = input('User: ')
- password = getpass.getpass()
- except KeyboardInterrupt:
- return
- handler.passwd.add_password(None, url, user, password)
+ # If repo could not find auth info from netrc, try to get it from user input
+ url = req.get_full_url()
+ user, password = handler.passwd.find_user_password(None, url)
+ if user is None:
+ print(msg)
+ try:
+ user = input("User: ")
+ password = getpass.getpass()
+ except KeyboardInterrupt:
+ return
+ handler.passwd.add_password(None, url, user, password)
class _BasicAuthHandler(urllib.request.HTTPBasicAuthHandler):
- def http_error_401(self, req, fp, code, msg, headers):
- _AddPasswordFromUserInput(self, msg, req)
- return urllib.request.HTTPBasicAuthHandler.http_error_401(
- self, req, fp, code, msg, headers)
+ def http_error_401(self, req, fp, code, msg, headers):
+ _AddPasswordFromUserInput(self, msg, req)
+ return urllib.request.HTTPBasicAuthHandler.http_error_401(
+ self, req, fp, code, msg, headers
+ )
- def http_error_auth_reqed(self, authreq, host, req, headers):
- try:
- old_add_header = req.add_header
+ def http_error_auth_reqed(self, authreq, host, req, headers):
+ try:
+ old_add_header = req.add_header
- def _add_header(name, val):
- val = val.replace('\n', '')
- old_add_header(name, val)
- req.add_header = _add_header
- return urllib.request.AbstractBasicAuthHandler.http_error_auth_reqed(
- self, authreq, host, req, headers)
- except Exception:
- reset = getattr(self, 'reset_retry_count', None)
- if reset is not None:
- reset()
- elif getattr(self, 'retried', None):
- self.retried = 0
- raise
+ def _add_header(name, val):
+ val = val.replace("\n", "")
+ old_add_header(name, val)
+
+ req.add_header = _add_header
+ return (
+ urllib.request.AbstractBasicAuthHandler.http_error_auth_reqed(
+ self, authreq, host, req, headers
+ )
+ )
+ except Exception:
+ reset = getattr(self, "reset_retry_count", None)
+ if reset is not None:
+ reset()
+ elif getattr(self, "retried", None):
+ self.retried = 0
+ raise
class _DigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
- def http_error_401(self, req, fp, code, msg, headers):
- _AddPasswordFromUserInput(self, msg, req)
- return urllib.request.HTTPDigestAuthHandler.http_error_401(
- self, req, fp, code, msg, headers)
+ def http_error_401(self, req, fp, code, msg, headers):
+ _AddPasswordFromUserInput(self, msg, req)
+ return urllib.request.HTTPDigestAuthHandler.http_error_401(
+ self, req, fp, code, msg, headers
+ )
- def http_error_auth_reqed(self, auth_header, host, req, headers):
- try:
- old_add_header = req.add_header
+ def http_error_auth_reqed(self, auth_header, host, req, headers):
+ try:
+ old_add_header = req.add_header
- def _add_header(name, val):
- val = val.replace('\n', '')
- old_add_header(name, val)
- req.add_header = _add_header
- return urllib.request.AbstractDigestAuthHandler.http_error_auth_reqed(
- self, auth_header, host, req, headers)
- except Exception:
- reset = getattr(self, 'reset_retry_count', None)
- if reset is not None:
- reset()
- elif getattr(self, 'retried', None):
- self.retried = 0
- raise
+ def _add_header(name, val):
+ val = val.replace("\n", "")
+ old_add_header(name, val)
+
+ req.add_header = _add_header
+ return (
+ urllib.request.AbstractDigestAuthHandler.http_error_auth_reqed(
+ self, auth_header, host, req, headers
+ )
+ )
+ except Exception:
+ reset = getattr(self, "reset_retry_count", None)
+ if reset is not None:
+ reset()
+ elif getattr(self, "retried", None):
+ self.retried = 0
+ raise
class _KerberosAuthHandler(urllib.request.BaseHandler):
- def __init__(self):
- self.retried = 0
- self.context = None
- self.handler_order = urllib.request.BaseHandler.handler_order - 50
+ def __init__(self):
+ self.retried = 0
+ self.context = None
+ self.handler_order = urllib.request.BaseHandler.handler_order - 50
- def http_error_401(self, req, fp, code, msg, headers):
- host = req.get_host()
- retry = self.http_error_auth_reqed('www-authenticate', host, req, headers)
- return retry
+ def http_error_401(self, req, fp, code, msg, headers):
+ host = req.get_host()
+ retry = self.http_error_auth_reqed(
+ "www-authenticate", host, req, headers
+ )
+ return retry
- def http_error_auth_reqed(self, auth_header, host, req, headers):
- try:
- spn = "HTTP@%s" % host
- authdata = self._negotiate_get_authdata(auth_header, headers)
+ def http_error_auth_reqed(self, auth_header, host, req, headers):
+ try:
+ spn = "HTTP@%s" % host
+ authdata = self._negotiate_get_authdata(auth_header, headers)
- if self.retried > 3:
- raise urllib.request.HTTPError(req.get_full_url(), 401,
- "Negotiate auth failed", headers, None)
- else:
- self.retried += 1
+ if self.retried > 3:
+ raise urllib.request.HTTPError(
+ req.get_full_url(),
+ 401,
+ "Negotiate auth failed",
+ headers,
+ None,
+ )
+ else:
+ self.retried += 1
- neghdr = self._negotiate_get_svctk(spn, authdata)
- if neghdr is None:
+ neghdr = self._negotiate_get_svctk(spn, authdata)
+ if neghdr is None:
+ return None
+
+ req.add_unredirected_header("Authorization", neghdr)
+ response = self.parent.open(req)
+
+ srvauth = self._negotiate_get_authdata(auth_header, response.info())
+ if self._validate_response(srvauth):
+ return response
+ except kerberos.GSSError:
+ return None
+ except Exception:
+ self.reset_retry_count()
+ raise
+ finally:
+ self._clean_context()
+
+ def reset_retry_count(self):
+ self.retried = 0
+
+ def _negotiate_get_authdata(self, auth_header, headers):
+ authhdr = headers.get(auth_header, None)
+ if authhdr is not None:
+ for mech_tuple in authhdr.split(","):
+ mech, __, authdata = mech_tuple.strip().partition(" ")
+ if mech.lower() == "negotiate":
+ return authdata.strip()
return None
- req.add_unredirected_header('Authorization', neghdr)
- response = self.parent.open(req)
+ def _negotiate_get_svctk(self, spn, authdata):
+ if authdata is None:
+ return None
- srvauth = self._negotiate_get_authdata(auth_header, response.info())
- if self._validate_response(srvauth):
- return response
- except kerberos.GSSError:
- return None
- except Exception:
- self.reset_retry_count()
- raise
- finally:
- self._clean_context()
+ result, self.context = kerberos.authGSSClientInit(spn)
+ if result < kerberos.AUTH_GSS_COMPLETE:
+ return None
- def reset_retry_count(self):
- self.retried = 0
+ result = kerberos.authGSSClientStep(self.context, authdata)
+ if result < kerberos.AUTH_GSS_CONTINUE:
+ return None
- def _negotiate_get_authdata(self, auth_header, headers):
- authhdr = headers.get(auth_header, None)
- if authhdr is not None:
- for mech_tuple in authhdr.split(","):
- mech, __, authdata = mech_tuple.strip().partition(" ")
- if mech.lower() == "negotiate":
- return authdata.strip()
- return None
+ response = kerberos.authGSSClientResponse(self.context)
+ return "Negotiate %s" % response
- def _negotiate_get_svctk(self, spn, authdata):
- if authdata is None:
- return None
+ def _validate_response(self, authdata):
+ if authdata is None:
+ return None
+ result = kerberos.authGSSClientStep(self.context, authdata)
+ if result == kerberos.AUTH_GSS_COMPLETE:
+ return True
+ return None
- result, self.context = kerberos.authGSSClientInit(spn)
- if result < kerberos.AUTH_GSS_COMPLETE:
- return None
-
- result = kerberos.authGSSClientStep(self.context, authdata)
- if result < kerberos.AUTH_GSS_CONTINUE:
- return None
-
- response = kerberos.authGSSClientResponse(self.context)
- return "Negotiate %s" % response
-
- def _validate_response(self, authdata):
- if authdata is None:
- return None
- result = kerberos.authGSSClientStep(self.context, authdata)
- if result == kerberos.AUTH_GSS_COMPLETE:
- return True
- return None
-
- def _clean_context(self):
- if self.context is not None:
- kerberos.authGSSClientClean(self.context)
- self.context = None
+ def _clean_context(self):
+ if self.context is not None:
+ kerberos.authGSSClientClean(self.context)
+ self.context = None
def init_http():
- handlers = [_UserAgentHandler()]
+ handlers = [_UserAgentHandler()]
- mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
- try:
- n = netrc.netrc()
- for host in n.hosts:
- p = n.hosts[host]
- mgr.add_password(p[1], 'http://%s/' % host, p[0], p[2])
- mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2])
- except netrc.NetrcParseError:
- pass
- except IOError:
- pass
- handlers.append(_BasicAuthHandler(mgr))
- handlers.append(_DigestAuthHandler(mgr))
- if kerberos:
- handlers.append(_KerberosAuthHandler())
+ mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
+ try:
+ n = netrc.netrc()
+ for host in n.hosts:
+ p = n.hosts[host]
+ mgr.add_password(p[1], "http://%s/" % host, p[0], p[2])
+ mgr.add_password(p[1], "https://%s/" % host, p[0], p[2])
+ except netrc.NetrcParseError:
+ pass
+ except IOError:
+ pass
+ handlers.append(_BasicAuthHandler(mgr))
+ handlers.append(_DigestAuthHandler(mgr))
+ if kerberos:
+ handlers.append(_KerberosAuthHandler())
- if 'http_proxy' in os.environ:
- url = os.environ['http_proxy']
- handlers.append(urllib.request.ProxyHandler({'http': url, 'https': url}))
- if 'REPO_CURL_VERBOSE' in os.environ:
- handlers.append(urllib.request.HTTPHandler(debuglevel=1))
- handlers.append(urllib.request.HTTPSHandler(debuglevel=1))
- urllib.request.install_opener(urllib.request.build_opener(*handlers))
+ if "http_proxy" in os.environ:
+ url = os.environ["http_proxy"]
+ handlers.append(
+ urllib.request.ProxyHandler({"http": url, "https": url})
+ )
+ if "REPO_CURL_VERBOSE" in os.environ:
+ handlers.append(urllib.request.HTTPHandler(debuglevel=1))
+ handlers.append(urllib.request.HTTPSHandler(debuglevel=1))
+ urllib.request.install_opener(urllib.request.build_opener(*handlers))
def _Main(argv):
- result = 0
+ result = 0
- opt = optparse.OptionParser(usage="repo wrapperinfo -- ...")
- opt.add_option("--repo-dir", dest="repodir",
- help="path to .repo/")
- opt.add_option("--wrapper-version", dest="wrapper_version",
- help="version of the wrapper script")
- opt.add_option("--wrapper-path", dest="wrapper_path",
- help="location of the wrapper script")
- _PruneOptions(argv, opt)
- opt, argv = opt.parse_args(argv)
+ opt = optparse.OptionParser(usage="repo wrapperinfo -- ...")
+ opt.add_option("--repo-dir", dest="repodir", help="path to .repo/")
+ opt.add_option(
+ "--wrapper-version",
+ dest="wrapper_version",
+ help="version of the wrapper script",
+ )
+ opt.add_option(
+ "--wrapper-path",
+ dest="wrapper_path",
+ help="location of the wrapper script",
+ )
+ _PruneOptions(argv, opt)
+ opt, argv = opt.parse_args(argv)
- _CheckWrapperVersion(opt.wrapper_version, opt.wrapper_path)
- _CheckRepoDir(opt.repodir)
+ _CheckWrapperVersion(opt.wrapper_version, opt.wrapper_path)
+ _CheckRepoDir(opt.repodir)
- Version.wrapper_version = opt.wrapper_version
- Version.wrapper_path = opt.wrapper_path
+ Version.wrapper_version = opt.wrapper_version
+ Version.wrapper_path = opt.wrapper_path
- repo = _Repo(opt.repodir)
+ repo = _Repo(opt.repodir)
- try:
- init_http()
- name, gopts, argv = repo._ParseArgs(argv)
-
- if gopts.trace:
- SetTrace()
-
- if gopts.trace_to_stderr:
- SetTraceToStderr()
-
- result = repo._Run(name, gopts, argv) or 0
- except KeyboardInterrupt:
- print('aborted by user', file=sys.stderr)
- result = 1
- except ManifestParseError as mpe:
- print('fatal: %s' % mpe, file=sys.stderr)
- result = 1
- except RepoChangedException as rce:
- # If repo changed, re-exec ourselves.
- #
- argv = list(sys.argv)
- argv.extend(rce.extra_args)
try:
- os.execv(sys.executable, [__file__] + argv)
- except OSError as e:
- print('fatal: cannot restart repo after upgrade', file=sys.stderr)
- print('fatal: %s' % e, file=sys.stderr)
- result = 128
+ init_http()
+ name, gopts, argv = repo._ParseArgs(argv)
- TerminatePager()
- sys.exit(result)
+ if gopts.trace:
+ SetTrace()
+
+ if gopts.trace_to_stderr:
+ SetTraceToStderr()
+
+ result = repo._Run(name, gopts, argv) or 0
+ except KeyboardInterrupt:
+ print("aborted by user", file=sys.stderr)
+ result = 1
+ except ManifestParseError as mpe:
+ print("fatal: %s" % mpe, file=sys.stderr)
+ result = 1
+ except RepoChangedException as rce:
+ # If repo changed, re-exec ourselves.
+ #
+ argv = list(sys.argv)
+ argv.extend(rce.extra_args)
+ try:
+ os.execv(sys.executable, [__file__] + argv)
+ except OSError as e:
+ print("fatal: cannot restart repo after upgrade", file=sys.stderr)
+ print("fatal: %s" % e, file=sys.stderr)
+ result = 128
+
+ TerminatePager()
+ sys.exit(result)
-if __name__ == '__main__':
- _Main(sys.argv[1:])
+if __name__ == "__main__":
+ _Main(sys.argv[1:])
diff --git a/manifest_xml.py b/manifest_xml.py
index 5b83f36..9603906 100644
--- a/manifest_xml.py
+++ b/manifest_xml.py
@@ -26,415 +26,452 @@
from git_refs import R_HEADS, HEAD
from git_superproject import Superproject
import platform_utils
-from project import (Annotation, RemoteSpec, Project, RepoProject,
- ManifestProject)
-from error import (ManifestParseError, ManifestInvalidPathError,
- ManifestInvalidRevisionError)
+from project import (
+ Annotation,
+ RemoteSpec,
+ Project,
+ RepoProject,
+ ManifestProject,
+)
+from error import (
+ ManifestParseError,
+ ManifestInvalidPathError,
+ ManifestInvalidRevisionError,
+)
from wrapper import Wrapper
-MANIFEST_FILE_NAME = 'manifest.xml'
-LOCAL_MANIFEST_NAME = 'local_manifest.xml'
-LOCAL_MANIFESTS_DIR_NAME = 'local_manifests'
-SUBMANIFEST_DIR = 'submanifests'
+MANIFEST_FILE_NAME = "manifest.xml"
+LOCAL_MANIFEST_NAME = "local_manifest.xml"
+LOCAL_MANIFESTS_DIR_NAME = "local_manifests"
+SUBMANIFEST_DIR = "submanifests"
# Limit submanifests to an arbitrary depth for loop detection.
MAX_SUBMANIFEST_DEPTH = 8
# Add all projects from sub manifest into a group.
-SUBMANIFEST_GROUP_PREFIX = 'submanifest:'
+SUBMANIFEST_GROUP_PREFIX = "submanifest:"
# Add all projects from local manifest into a group.
-LOCAL_MANIFEST_GROUP_PREFIX = 'local:'
+LOCAL_MANIFEST_GROUP_PREFIX = "local:"
# ContactInfo has the self-registered bug url, supplied by the manifest authors.
-ContactInfo = collections.namedtuple('ContactInfo', 'bugurl')
+ContactInfo = collections.namedtuple("ContactInfo", "bugurl")
# urljoin gets confused if the scheme is not known.
-urllib.parse.uses_relative.extend([
- 'ssh',
- 'git',
- 'persistent-https',
- 'sso',
- 'rpc'])
-urllib.parse.uses_netloc.extend([
- 'ssh',
- 'git',
- 'persistent-https',
- 'sso',
- 'rpc'])
+urllib.parse.uses_relative.extend(
+ ["ssh", "git", "persistent-https", "sso", "rpc"]
+)
+urllib.parse.uses_netloc.extend(
+ ["ssh", "git", "persistent-https", "sso", "rpc"]
+)
def XmlBool(node, attr, default=None):
- """Determine boolean value of |node|'s |attr|.
+ """Determine boolean value of |node|'s |attr|.
- Invalid values will issue a non-fatal warning.
+ Invalid values will issue a non-fatal warning.
- Args:
- node: XML node whose attributes we access.
- attr: The attribute to access.
- default: If the attribute is not set (value is empty), then use this.
+ Args:
+ node: XML node whose attributes we access.
+ attr: The attribute to access.
+ default: If the attribute is not set (value is empty), then use this.
- Returns:
- True if the attribute is a valid string representing true.
- False if the attribute is a valid string representing false.
- |default| otherwise.
- """
- value = node.getAttribute(attr)
- s = value.lower()
- if s == '':
- return default
- elif s in {'yes', 'true', '1'}:
- return True
- elif s in {'no', 'false', '0'}:
- return False
- else:
- print('warning: manifest: %s="%s": ignoring invalid XML boolean' %
- (attr, value), file=sys.stderr)
- return default
+ Returns:
+ True if the attribute is a valid string representing true.
+ False if the attribute is a valid string representing false.
+ |default| otherwise.
+ """
+ value = node.getAttribute(attr)
+ s = value.lower()
+ if s == "":
+ return default
+ elif s in {"yes", "true", "1"}:
+ return True
+ elif s in {"no", "false", "0"}:
+ return False
+ else:
+ print(
+ 'warning: manifest: %s="%s": ignoring invalid XML boolean'
+ % (attr, value),
+ file=sys.stderr,
+ )
+ return default
def XmlInt(node, attr, default=None):
- """Determine integer value of |node|'s |attr|.
+ """Determine integer value of |node|'s |attr|.
- Args:
- node: XML node whose attributes we access.
- attr: The attribute to access.
- default: If the attribute is not set (value is empty), then use this.
+ Args:
+ node: XML node whose attributes we access.
+ attr: The attribute to access.
+ default: If the attribute is not set (value is empty), then use this.
- Returns:
- The number if the attribute is a valid number.
+ Returns:
+ The number if the attribute is a valid number.
- Raises:
- ManifestParseError: The number is invalid.
- """
- value = node.getAttribute(attr)
- if not value:
- return default
+ Raises:
+ ManifestParseError: The number is invalid.
+ """
+ value = node.getAttribute(attr)
+ if not value:
+ return default
- try:
- return int(value)
- except ValueError:
- raise ManifestParseError('manifest: invalid %s="%s" integer' %
- (attr, value))
+ try:
+ return int(value)
+ except ValueError:
+ raise ManifestParseError(
+ 'manifest: invalid %s="%s" integer' % (attr, value)
+ )
class _Default(object):
- """Project defaults within the manifest."""
+ """Project defaults within the manifest."""
- revisionExpr = None
- destBranchExpr = None
- upstreamExpr = None
- remote = None
- sync_j = None
- sync_c = False
- sync_s = False
- sync_tags = True
+ revisionExpr = None
+ destBranchExpr = None
+ upstreamExpr = None
+ remote = None
+ sync_j = None
+ sync_c = False
+ sync_s = False
+ sync_tags = True
- def __eq__(self, other):
- if not isinstance(other, _Default):
- return False
- return self.__dict__ == other.__dict__
+ def __eq__(self, other):
+ if not isinstance(other, _Default):
+ return False
+ return self.__dict__ == other.__dict__
- def __ne__(self, other):
- if not isinstance(other, _Default):
- return True
- return self.__dict__ != other.__dict__
+ def __ne__(self, other):
+ if not isinstance(other, _Default):
+ return True
+ return self.__dict__ != other.__dict__
class _XmlRemote(object):
- def __init__(self,
- name,
- alias=None,
- fetch=None,
- pushUrl=None,
- manifestUrl=None,
- review=None,
- revision=None):
- self.name = name
- self.fetchUrl = fetch
- self.pushUrl = pushUrl
- self.manifestUrl = manifestUrl
- self.remoteAlias = alias
- self.reviewUrl = review
- self.revision = revision
- self.resolvedFetchUrl = self._resolveFetchUrl()
- self.annotations = []
+ def __init__(
+ self,
+ name,
+ alias=None,
+ fetch=None,
+ pushUrl=None,
+ manifestUrl=None,
+ review=None,
+ revision=None,
+ ):
+ self.name = name
+ self.fetchUrl = fetch
+ self.pushUrl = pushUrl
+ self.manifestUrl = manifestUrl
+ self.remoteAlias = alias
+ self.reviewUrl = review
+ self.revision = revision
+ self.resolvedFetchUrl = self._resolveFetchUrl()
+ self.annotations = []
- def __eq__(self, other):
- if not isinstance(other, _XmlRemote):
- return False
- return (sorted(self.annotations) == sorted(other.annotations) and
- self.name == other.name and self.fetchUrl == other.fetchUrl and
- self.pushUrl == other.pushUrl and self.remoteAlias == other.remoteAlias
- and self.reviewUrl == other.reviewUrl and self.revision == other.revision)
+ def __eq__(self, other):
+ if not isinstance(other, _XmlRemote):
+ return False
+ return (
+ sorted(self.annotations) == sorted(other.annotations)
+ and self.name == other.name
+ and self.fetchUrl == other.fetchUrl
+ and self.pushUrl == other.pushUrl
+ and self.remoteAlias == other.remoteAlias
+ and self.reviewUrl == other.reviewUrl
+ and self.revision == other.revision
+ )
- def __ne__(self, other):
- return not self.__eq__(other)
+ def __ne__(self, other):
+ return not self.__eq__(other)
- def _resolveFetchUrl(self):
- if self.fetchUrl is None:
- return ''
- url = self.fetchUrl.rstrip('/')
- manifestUrl = self.manifestUrl.rstrip('/')
- # urljoin will gets confused over quite a few things. The ones we care
- # about here are:
- # * no scheme in the base url, like <hostname:port>
- # We handle no scheme by replacing it with an obscure protocol, gopher
- # and then replacing it with the original when we are done.
+ def _resolveFetchUrl(self):
+ if self.fetchUrl is None:
+ return ""
+ url = self.fetchUrl.rstrip("/")
+ manifestUrl = self.manifestUrl.rstrip("/")
+ # urljoin will gets confused over quite a few things. The ones we care
+ # about here are:
+ # * no scheme in the base url, like <hostname:port>
+ # We handle no scheme by replacing it with an obscure protocol, gopher
+ # and then replacing it with the original when we are done.
- if manifestUrl.find(':') != manifestUrl.find('/') - 1:
- url = urllib.parse.urljoin('gopher://' + manifestUrl, url)
- url = re.sub(r'^gopher://', '', url)
- else:
- url = urllib.parse.urljoin(manifestUrl, url)
- return url
+ if manifestUrl.find(":") != manifestUrl.find("/") - 1:
+ url = urllib.parse.urljoin("gopher://" + manifestUrl, url)
+ url = re.sub(r"^gopher://", "", url)
+ else:
+ url = urllib.parse.urljoin(manifestUrl, url)
+ return url
- def ToRemoteSpec(self, projectName):
- fetchUrl = self.resolvedFetchUrl.rstrip('/')
- url = fetchUrl + '/' + projectName
- remoteName = self.name
- if self.remoteAlias:
- remoteName = self.remoteAlias
- return RemoteSpec(remoteName,
- url=url,
- pushUrl=self.pushUrl,
- review=self.reviewUrl,
- orig_name=self.name,
- fetchUrl=self.fetchUrl)
+ def ToRemoteSpec(self, projectName):
+ fetchUrl = self.resolvedFetchUrl.rstrip("/")
+ url = fetchUrl + "/" + projectName
+ remoteName = self.name
+ if self.remoteAlias:
+ remoteName = self.remoteAlias
+ return RemoteSpec(
+ remoteName,
+ url=url,
+ pushUrl=self.pushUrl,
+ review=self.reviewUrl,
+ orig_name=self.name,
+ fetchUrl=self.fetchUrl,
+ )
- def AddAnnotation(self, name, value, keep):
- self.annotations.append(Annotation(name, value, keep))
+ def AddAnnotation(self, name, value, keep):
+ self.annotations.append(Annotation(name, value, keep))
class _XmlSubmanifest:
- """Manage the <submanifest> element specified in the manifest.
+ """Manage the <submanifest> element specified in the manifest.
- Attributes:
- name: a string, the name for this submanifest.
- remote: a string, the remote.name for this submanifest.
- project: a string, the name of the manifest project.
- revision: a string, the commitish.
- manifestName: a string, the submanifest file name.
- groups: a list of strings, the groups to add to all projects in the submanifest.
- default_groups: a list of strings, the default groups to sync.
- path: a string, the relative path for the submanifest checkout.
- parent: an XmlManifest, the parent manifest.
- annotations: (derived) a list of annotations.
- present: (derived) a boolean, whether the sub manifest file is present.
- """
- def __init__(self,
- name,
- remote=None,
- project=None,
- revision=None,
- manifestName=None,
- groups=None,
- default_groups=None,
- path=None,
- parent=None):
- self.name = name
- self.remote = remote
- self.project = project
- self.revision = revision
- self.manifestName = manifestName
- self.groups = groups
- self.default_groups = default_groups
- self.path = path
- self.parent = parent
- self.annotations = []
- outer_client = parent._outer_client or parent
- if self.remote and not self.project:
- raise ManifestParseError(
- f'Submanifest {name}: must specify project when remote is given.')
- # Construct the absolute path to the manifest file using the parent's
- # method, so that we can correctly create our repo_client.
- manifestFile = parent.SubmanifestInfoDir(
- os.path.join(parent.path_prefix, self.relpath),
- os.path.join('manifests', manifestName or 'default.xml'))
- linkFile = parent.SubmanifestInfoDir(
- os.path.join(parent.path_prefix, self.relpath), MANIFEST_FILE_NAME)
- rc = self.repo_client = RepoClient(
- parent.repodir, linkFile, parent_groups=','.join(groups) or '',
- submanifest_path=self.relpath, outer_client=outer_client,
- default_groups=default_groups)
+ Attributes:
+ name: a string, the name for this submanifest.
+ remote: a string, the remote.name for this submanifest.
+ project: a string, the name of the manifest project.
+ revision: a string, the commitish.
+ manifestName: a string, the submanifest file name.
+ groups: a list of strings, the groups to add to all projects in the
+ submanifest.
+ default_groups: a list of strings, the default groups to sync.
+ path: a string, the relative path for the submanifest checkout.
+ parent: an XmlManifest, the parent manifest.
+ annotations: (derived) a list of annotations.
+ present: (derived) a boolean, whether the sub manifest file is present.
+ """
- self.present = os.path.exists(manifestFile)
+ def __init__(
+ self,
+ name,
+ remote=None,
+ project=None,
+ revision=None,
+ manifestName=None,
+ groups=None,
+ default_groups=None,
+ path=None,
+ parent=None,
+ ):
+ self.name = name
+ self.remote = remote
+ self.project = project
+ self.revision = revision
+ self.manifestName = manifestName
+ self.groups = groups
+ self.default_groups = default_groups
+ self.path = path
+ self.parent = parent
+ self.annotations = []
+ outer_client = parent._outer_client or parent
+ if self.remote and not self.project:
+ raise ManifestParseError(
+ f"Submanifest {name}: must specify project when remote is "
+ "given."
+ )
+ # Construct the absolute path to the manifest file using the parent's
+ # method, so that we can correctly create our repo_client.
+ manifestFile = parent.SubmanifestInfoDir(
+ os.path.join(parent.path_prefix, self.relpath),
+ os.path.join("manifests", manifestName or "default.xml"),
+ )
+ linkFile = parent.SubmanifestInfoDir(
+ os.path.join(parent.path_prefix, self.relpath), MANIFEST_FILE_NAME
+ )
+ self.repo_client = RepoClient(
+ parent.repodir,
+ linkFile,
+ parent_groups=",".join(groups) or "",
+ submanifest_path=self.relpath,
+ outer_client=outer_client,
+ default_groups=default_groups,
+ )
- def __eq__(self, other):
- if not isinstance(other, _XmlSubmanifest):
- return False
- return (
- self.name == other.name and
- self.remote == other.remote and
- self.project == other.project and
- self.revision == other.revision and
- self.manifestName == other.manifestName and
- self.groups == other.groups and
- self.default_groups == other.default_groups and
- self.path == other.path and
- sorted(self.annotations) == sorted(other.annotations))
+ self.present = os.path.exists(manifestFile)
- def __ne__(self, other):
- return not self.__eq__(other)
+ def __eq__(self, other):
+ if not isinstance(other, _XmlSubmanifest):
+ return False
+ return (
+ self.name == other.name
+ and self.remote == other.remote
+ and self.project == other.project
+ and self.revision == other.revision
+ and self.manifestName == other.manifestName
+ and self.groups == other.groups
+ and self.default_groups == other.default_groups
+ and self.path == other.path
+ and sorted(self.annotations) == sorted(other.annotations)
+ )
- def ToSubmanifestSpec(self):
- """Return a SubmanifestSpec object, populating attributes"""
- mp = self.parent.manifestProject
- remote = self.parent.remotes[self.remote or self.parent.default.remote.name]
- # If a project was given, generate the url from the remote and project.
- # If not, use this manifestProject's url.
- if self.project:
- manifestUrl = remote.ToRemoteSpec(self.project).url
- else:
- manifestUrl = mp.GetRemote().url
- manifestName = self.manifestName or 'default.xml'
- revision = self.revision or self.name
- path = self.path or revision.split('/')[-1]
- groups = self.groups or []
- default_groups = self.default_groups or []
+ def __ne__(self, other):
+ return not self.__eq__(other)
- return SubmanifestSpec(self.name, manifestUrl, manifestName, revision, path,
- groups)
+ def ToSubmanifestSpec(self):
+ """Return a SubmanifestSpec object, populating attributes"""
+ mp = self.parent.manifestProject
+ remote = self.parent.remotes[
+ self.remote or self.parent.default.remote.name
+ ]
+ # If a project was given, generate the url from the remote and project.
+ # If not, use this manifestProject's url.
+ if self.project:
+ manifestUrl = remote.ToRemoteSpec(self.project).url
+ else:
+ manifestUrl = mp.GetRemote().url
+ manifestName = self.manifestName or "default.xml"
+ revision = self.revision or self.name
+ path = self.path or revision.split("/")[-1]
+ groups = self.groups or []
- @property
- def relpath(self):
- """The path of this submanifest relative to the parent manifest."""
- revision = self.revision or self.name
- return self.path or revision.split('/')[-1]
+ return SubmanifestSpec(
+ self.name, manifestUrl, manifestName, revision, path, groups
+ )
- def GetGroupsStr(self):
- """Returns the `groups` given for this submanifest."""
- if self.groups:
- return ','.join(self.groups)
- return ''
+ @property
+ def relpath(self):
+ """The path of this submanifest relative to the parent manifest."""
+ revision = self.revision or self.name
+ return self.path or revision.split("/")[-1]
- def GetDefaultGroupsStr(self):
- """Returns the `default-groups` given for this submanifest."""
- return ','.join(self.default_groups or [])
+ def GetGroupsStr(self):
+ """Returns the `groups` given for this submanifest."""
+ if self.groups:
+ return ",".join(self.groups)
+ return ""
- def AddAnnotation(self, name, value, keep):
- """Add annotations to the submanifest."""
- self.annotations.append(Annotation(name, value, keep))
+ def GetDefaultGroupsStr(self):
+ """Returns the `default-groups` given for this submanifest."""
+ return ",".join(self.default_groups or [])
+
+ def AddAnnotation(self, name, value, keep):
+ """Add annotations to the submanifest."""
+ self.annotations.append(Annotation(name, value, keep))
class SubmanifestSpec:
- """The submanifest element, with all fields expanded."""
+ """The submanifest element, with all fields expanded."""
- def __init__(self,
- name,
- manifestUrl,
- manifestName,
- revision,
- path,
- groups):
- self.name = name
- self.manifestUrl = manifestUrl
- self.manifestName = manifestName
- self.revision = revision
- self.path = path
- self.groups = groups or []
+ def __init__(self, name, manifestUrl, manifestName, revision, path, groups):
+ self.name = name
+ self.manifestUrl = manifestUrl
+ self.manifestName = manifestName
+ self.revision = revision
+ self.path = path
+ self.groups = groups or []
class XmlManifest(object):
- """manages the repo configuration file"""
+ """manages the repo configuration file"""
- def __init__(self, repodir, manifest_file, local_manifests=None,
- outer_client=None, parent_groups='', submanifest_path='',
- default_groups=None):
- """Initialize.
+ def __init__(
+ self,
+ repodir,
+ manifest_file,
+ local_manifests=None,
+ outer_client=None,
+ parent_groups="",
+ submanifest_path="",
+ default_groups=None,
+ ):
+ """Initialize.
- Args:
- repodir: Path to the .repo/ dir for holding all internal checkout state.
- It must be in the top directory of the repo client checkout.
- manifest_file: Full path to the manifest file to parse. This will usually
- be |repodir|/|MANIFEST_FILE_NAME|.
- local_manifests: Full path to the directory of local override manifests.
- This will usually be |repodir|/|LOCAL_MANIFESTS_DIR_NAME|.
- outer_client: RepoClient of the outer manifest.
- parent_groups: a string, the groups to apply to this projects.
- submanifest_path: The submanifest root relative to the repo root.
- default_groups: a string, the default manifest groups to use.
- """
- # TODO(vapier): Move this out of this class.
- self.globalConfig = GitConfig.ForUser()
+ Args:
+ repodir: Path to the .repo/ dir for holding all internal checkout
+ state. It must be in the top directory of the repo client
+ checkout.
+ manifest_file: Full path to the manifest file to parse. This will
+ usually be |repodir|/|MANIFEST_FILE_NAME|.
+ local_manifests: Full path to the directory of local override
+ manifests. This will usually be
+ |repodir|/|LOCAL_MANIFESTS_DIR_NAME|.
+ outer_client: RepoClient of the outer manifest.
+ parent_groups: a string, the groups to apply to this projects.
+ submanifest_path: The submanifest root relative to the repo root.
+ default_groups: a string, the default manifest groups to use.
+ """
+ # TODO(vapier): Move this out of this class.
+ self.globalConfig = GitConfig.ForUser()
- self.repodir = os.path.abspath(repodir)
- self._CheckLocalPath(submanifest_path)
- self.topdir = os.path.dirname(self.repodir)
- if submanifest_path:
- # This avoids a trailing os.path.sep when submanifest_path is empty.
- self.topdir = os.path.join(self.topdir, submanifest_path)
- if manifest_file != os.path.abspath(manifest_file):
- raise ManifestParseError('manifest_file must be abspath')
- self.manifestFile = manifest_file
- if not outer_client or outer_client == self:
- # manifestFileOverrides only exists in the outer_client's manifest, since
- # that is the only instance left when Unload() is called on the outer
- # manifest.
- self.manifestFileOverrides = {}
- self.local_manifests = local_manifests
- self._load_local_manifests = True
- self.parent_groups = parent_groups
- self.default_groups = default_groups
+ self.repodir = os.path.abspath(repodir)
+ self._CheckLocalPath(submanifest_path)
+ self.topdir = os.path.dirname(self.repodir)
+ if submanifest_path:
+ # This avoids a trailing os.path.sep when submanifest_path is empty.
+ self.topdir = os.path.join(self.topdir, submanifest_path)
+ if manifest_file != os.path.abspath(manifest_file):
+ raise ManifestParseError("manifest_file must be abspath")
+ self.manifestFile = manifest_file
+ if not outer_client or outer_client == self:
+ # manifestFileOverrides only exists in the outer_client's manifest,
+ # since that is the only instance left when Unload() is called on
+ # the outer manifest.
+ self.manifestFileOverrides = {}
+ self.local_manifests = local_manifests
+ self._load_local_manifests = True
+ self.parent_groups = parent_groups
+ self.default_groups = default_groups
- if outer_client and self.isGitcClient:
- raise ManifestParseError('Multi-manifest is incompatible with `gitc-init`')
+ if outer_client and self.isGitcClient:
+ raise ManifestParseError(
+ "Multi-manifest is incompatible with `gitc-init`"
+ )
- if submanifest_path and not outer_client:
- # If passing a submanifest_path, there must be an outer_client.
- raise ManifestParseError(f'Bad call to {self.__class__.__name__}')
+ if submanifest_path and not outer_client:
+ # If passing a submanifest_path, there must be an outer_client.
+ raise ManifestParseError(f"Bad call to {self.__class__.__name__}")
- # If self._outer_client is None, this is not a checkout that supports
- # multi-tree.
- self._outer_client = outer_client or self
+ # If self._outer_client is None, this is not a checkout that supports
+ # multi-tree.
+ self._outer_client = outer_client or self
- self.repoProject = RepoProject(self, 'repo',
- gitdir=os.path.join(repodir, 'repo/.git'),
- worktree=os.path.join(repodir, 'repo'))
+ self.repoProject = RepoProject(
+ self,
+ "repo",
+ gitdir=os.path.join(repodir, "repo/.git"),
+ worktree=os.path.join(repodir, "repo"),
+ )
- mp = self.SubmanifestProject(self.path_prefix)
- self.manifestProject = mp
+ mp = self.SubmanifestProject(self.path_prefix)
+ self.manifestProject = mp
- # This is a bit hacky, but we're in a chicken & egg situation: all the
- # normal repo settings live in the manifestProject which we just setup
- # above, so we couldn't easily query before that. We assume Project()
- # init doesn't care if this changes afterwards.
- if os.path.exists(mp.gitdir) and mp.use_worktree:
- mp.use_git_worktrees = True
+ # This is a bit hacky, but we're in a chicken & egg situation: all the
+ # normal repo settings live in the manifestProject which we just setup
+ # above, so we couldn't easily query before that. We assume Project()
+ # init doesn't care if this changes afterwards.
+ if os.path.exists(mp.gitdir) and mp.use_worktree:
+ mp.use_git_worktrees = True
- self.Unload()
+ self.Unload()
- def Override(self, name, load_local_manifests=True):
- """Use a different manifest, just for the current instantiation.
- """
- path = None
+ def Override(self, name, load_local_manifests=True):
+ """Use a different manifest, just for the current instantiation."""
+ path = None
- # Look for a manifest by path in the filesystem (including the cwd).
- if not load_local_manifests:
- local_path = os.path.abspath(name)
- if os.path.isfile(local_path):
- path = local_path
+ # Look for a manifest by path in the filesystem (including the cwd).
+ if not load_local_manifests:
+ local_path = os.path.abspath(name)
+ if os.path.isfile(local_path):
+ path = local_path
- # Look for manifests by name from the manifests repo.
- if path is None:
- path = os.path.join(self.manifestProject.worktree, name)
- if not os.path.isfile(path):
- raise ManifestParseError('manifest %s not found' % name)
+ # Look for manifests by name from the manifests repo.
+ if path is None:
+ path = os.path.join(self.manifestProject.worktree, name)
+ if not os.path.isfile(path):
+ raise ManifestParseError("manifest %s not found" % name)
- self._load_local_manifests = load_local_manifests
- self._outer_client.manifestFileOverrides[self.path_prefix] = path
- self.Unload()
- self._Load()
+ self._load_local_manifests = load_local_manifests
+ self._outer_client.manifestFileOverrides[self.path_prefix] = path
+ self.Unload()
+ self._Load()
- def Link(self, name):
- """Update the repo metadata to use a different manifest.
- """
- self.Override(name)
+ def Link(self, name):
+ """Update the repo metadata to use a different manifest."""
+ self.Override(name)
- # Old versions of repo would generate symlinks we need to clean up.
- platform_utils.remove(self.manifestFile, missing_ok=True)
- # This file is interpreted as if it existed inside the manifest repo.
- # That allows us to use <include> with the relative file name.
- with open(self.manifestFile, 'w') as fp:
- fp.write("""<?xml version="1.0" encoding="UTF-8"?>
+ # Old versions of repo would generate symlinks we need to clean up.
+ platform_utils.remove(self.manifestFile, missing_ok=True)
+ # This file is interpreted as if it existed inside the manifest repo.
+ # That allows us to use <include> with the relative file name.
+ with open(self.manifestFile, "w") as fp:
+ fp.write(
+ """<?xml version="1.0" encoding="UTF-8"?>
<!--
DO NOT EDIT THIS FILE! It is generated by repo and changes will be discarded.
If you want to use a different manifest, use `repo init -m <file>` instead.
@@ -448,1591 +485,1803 @@
<manifest>
<include name="%s" />
</manifest>
-""" % (name,))
+"""
+ % (name,)
+ )
- def _RemoteToXml(self, r, doc, root):
- e = doc.createElement('remote')
- root.appendChild(e)
- e.setAttribute('name', r.name)
- e.setAttribute('fetch', r.fetchUrl)
- if r.pushUrl is not None:
- e.setAttribute('pushurl', r.pushUrl)
- if r.remoteAlias is not None:
- e.setAttribute('alias', r.remoteAlias)
- if r.reviewUrl is not None:
- e.setAttribute('review', r.reviewUrl)
- if r.revision is not None:
- e.setAttribute('revision', r.revision)
+ def _RemoteToXml(self, r, doc, root):
+ e = doc.createElement("remote")
+ root.appendChild(e)
+ e.setAttribute("name", r.name)
+ e.setAttribute("fetch", r.fetchUrl)
+ if r.pushUrl is not None:
+ e.setAttribute("pushurl", r.pushUrl)
+ if r.remoteAlias is not None:
+ e.setAttribute("alias", r.remoteAlias)
+ if r.reviewUrl is not None:
+ e.setAttribute("review", r.reviewUrl)
+ if r.revision is not None:
+ e.setAttribute("revision", r.revision)
- for a in r.annotations:
- if a.keep == 'true':
- ae = doc.createElement('annotation')
- ae.setAttribute('name', a.name)
- ae.setAttribute('value', a.value)
- e.appendChild(ae)
+ for a in r.annotations:
+ if a.keep == "true":
+ ae = doc.createElement("annotation")
+ ae.setAttribute("name", a.name)
+ ae.setAttribute("value", a.value)
+ e.appendChild(ae)
- def _SubmanifestToXml(self, r, doc, root):
- """Generate XML <submanifest/> node."""
- e = doc.createElement('submanifest')
- root.appendChild(e)
- e.setAttribute('name', r.name)
- if r.remote is not None:
- e.setAttribute('remote', r.remote)
- if r.project is not None:
- e.setAttribute('project', r.project)
- if r.manifestName is not None:
- e.setAttribute('manifest-name', r.manifestName)
- if r.revision is not None:
- e.setAttribute('revision', r.revision)
- if r.path is not None:
- e.setAttribute('path', r.path)
- if r.groups:
- e.setAttribute('groups', r.GetGroupsStr())
- if r.default_groups:
- e.setAttribute('default-groups', r.GetDefaultGroupsStr())
+ def _SubmanifestToXml(self, r, doc, root):
+ """Generate XML <submanifest/> node."""
+ e = doc.createElement("submanifest")
+ root.appendChild(e)
+ e.setAttribute("name", r.name)
+ if r.remote is not None:
+ e.setAttribute("remote", r.remote)
+ if r.project is not None:
+ e.setAttribute("project", r.project)
+ if r.manifestName is not None:
+ e.setAttribute("manifest-name", r.manifestName)
+ if r.revision is not None:
+ e.setAttribute("revision", r.revision)
+ if r.path is not None:
+ e.setAttribute("path", r.path)
+ if r.groups:
+ e.setAttribute("groups", r.GetGroupsStr())
+ if r.default_groups:
+ e.setAttribute("default-groups", r.GetDefaultGroupsStr())
- for a in r.annotations:
- if a.keep == 'true':
- ae = doc.createElement('annotation')
- ae.setAttribute('name', a.name)
- ae.setAttribute('value', a.value)
- e.appendChild(ae)
+ for a in r.annotations:
+ if a.keep == "true":
+ ae = doc.createElement("annotation")
+ ae.setAttribute("name", a.name)
+ ae.setAttribute("value", a.value)
+ e.appendChild(ae)
- def _ParseList(self, field):
- """Parse fields that contain flattened lists.
+ def _ParseList(self, field):
+ """Parse fields that contain flattened lists.
- These are whitespace & comma separated. Empty elements will be discarded.
- """
- return [x for x in re.split(r'[,\s]+', field) if x]
+ These are whitespace & comma separated. Empty elements will be
+ discarded.
+ """
+ return [x for x in re.split(r"[,\s]+", field) if x]
- def ToXml(self, peg_rev=False, peg_rev_upstream=True,
- peg_rev_dest_branch=True, groups=None, omit_local=False):
- """Return the current manifest XML."""
- mp = self.manifestProject
+ def ToXml(
+ self,
+ peg_rev=False,
+ peg_rev_upstream=True,
+ peg_rev_dest_branch=True,
+ groups=None,
+ omit_local=False,
+ ):
+ """Return the current manifest XML."""
+ mp = self.manifestProject
- if groups is None:
- groups = mp.manifest_groups
- if groups:
- groups = self._ParseList(groups)
+ if groups is None:
+ groups = mp.manifest_groups
+ if groups:
+ groups = self._ParseList(groups)
- doc = xml.dom.minidom.Document()
- root = doc.createElement('manifest')
- if self.is_submanifest:
- root.setAttribute('path', self.path_prefix)
- doc.appendChild(root)
+ doc = xml.dom.minidom.Document()
+ root = doc.createElement("manifest")
+ if self.is_submanifest:
+ root.setAttribute("path", self.path_prefix)
+ doc.appendChild(root)
- # Save out the notice. There's a little bit of work here to give it the
- # right whitespace, which assumes that the notice is automatically indented
- # by 4 by minidom.
- if self.notice:
- notice_element = root.appendChild(doc.createElement('notice'))
- notice_lines = self.notice.splitlines()
- indented_notice = ('\n'.join(" " * 4 + line for line in notice_lines))[4:]
- notice_element.appendChild(doc.createTextNode(indented_notice))
+ # Save out the notice. There's a little bit of work here to give it the
+ # right whitespace, which assumes that the notice is automatically
+ # indented by 4 by minidom.
+ if self.notice:
+ notice_element = root.appendChild(doc.createElement("notice"))
+ notice_lines = self.notice.splitlines()
+ indented_notice = (
+ "\n".join(" " * 4 + line for line in notice_lines)
+ )[4:]
+ notice_element.appendChild(doc.createTextNode(indented_notice))
- d = self.default
+ d = self.default
- for r in sorted(self.remotes):
- self._RemoteToXml(self.remotes[r], doc, root)
- if self.remotes:
- root.appendChild(doc.createTextNode(''))
+ for r in sorted(self.remotes):
+ self._RemoteToXml(self.remotes[r], doc, root)
+ if self.remotes:
+ root.appendChild(doc.createTextNode(""))
- have_default = False
- e = doc.createElement('default')
- if d.remote:
- have_default = True
- e.setAttribute('remote', d.remote.name)
- if d.revisionExpr:
- have_default = True
- e.setAttribute('revision', d.revisionExpr)
- if d.destBranchExpr:
- have_default = True
- e.setAttribute('dest-branch', d.destBranchExpr)
- if d.upstreamExpr:
- have_default = True
- e.setAttribute('upstream', d.upstreamExpr)
- if d.sync_j is not None:
- have_default = True
- e.setAttribute('sync-j', '%d' % d.sync_j)
- if d.sync_c:
- have_default = True
- e.setAttribute('sync-c', 'true')
- if d.sync_s:
- have_default = True
- e.setAttribute('sync-s', 'true')
- if not d.sync_tags:
- have_default = True
- e.setAttribute('sync-tags', 'false')
- if have_default:
- root.appendChild(e)
- root.appendChild(doc.createTextNode(''))
+ have_default = False
+ e = doc.createElement("default")
+ if d.remote:
+ have_default = True
+ e.setAttribute("remote", d.remote.name)
+ if d.revisionExpr:
+ have_default = True
+ e.setAttribute("revision", d.revisionExpr)
+ if d.destBranchExpr:
+ have_default = True
+ e.setAttribute("dest-branch", d.destBranchExpr)
+ if d.upstreamExpr:
+ have_default = True
+ e.setAttribute("upstream", d.upstreamExpr)
+ if d.sync_j is not None:
+ have_default = True
+ e.setAttribute("sync-j", "%d" % d.sync_j)
+ if d.sync_c:
+ have_default = True
+ e.setAttribute("sync-c", "true")
+ if d.sync_s:
+ have_default = True
+ e.setAttribute("sync-s", "true")
+ if not d.sync_tags:
+ have_default = True
+ e.setAttribute("sync-tags", "false")
+ if have_default:
+ root.appendChild(e)
+ root.appendChild(doc.createTextNode(""))
- if self._manifest_server:
- e = doc.createElement('manifest-server')
- e.setAttribute('url', self._manifest_server)
- root.appendChild(e)
- root.appendChild(doc.createTextNode(''))
+ if self._manifest_server:
+ e = doc.createElement("manifest-server")
+ e.setAttribute("url", self._manifest_server)
+ root.appendChild(e)
+ root.appendChild(doc.createTextNode(""))
- for r in sorted(self.submanifests):
- self._SubmanifestToXml(self.submanifests[r], doc, root)
- if self.submanifests:
- root.appendChild(doc.createTextNode(''))
+ for r in sorted(self.submanifests):
+ self._SubmanifestToXml(self.submanifests[r], doc, root)
+ if self.submanifests:
+ root.appendChild(doc.createTextNode(""))
- def output_projects(parent, parent_node, projects):
- for project_name in projects:
- for project in self._projects[project_name]:
- output_project(parent, parent_node, project)
+ def output_projects(parent, parent_node, projects):
+ for project_name in projects:
+ for project in self._projects[project_name]:
+ output_project(parent, parent_node, project)
- def output_project(parent, parent_node, p):
- if not p.MatchesGroups(groups):
- return
+ def output_project(parent, parent_node, p):
+ if not p.MatchesGroups(groups):
+ return
- if omit_local and self.IsFromLocalManifest(p):
- return
+ if omit_local and self.IsFromLocalManifest(p):
+ return
- name = p.name
- relpath = p.relpath
- if parent:
- name = self._UnjoinName(parent.name, name)
- relpath = self._UnjoinRelpath(parent.relpath, relpath)
+ name = p.name
+ relpath = p.relpath
+ if parent:
+ name = self._UnjoinName(parent.name, name)
+ relpath = self._UnjoinRelpath(parent.relpath, relpath)
- e = doc.createElement('project')
- parent_node.appendChild(e)
- e.setAttribute('name', name)
- if relpath != name:
- e.setAttribute('path', relpath)
- remoteName = None
- if d.remote:
- remoteName = d.remote.name
- if not d.remote or p.remote.orig_name != remoteName:
- remoteName = p.remote.orig_name
- e.setAttribute('remote', remoteName)
- if peg_rev:
- if self.IsMirror:
- value = p.bare_git.rev_parse(p.revisionExpr + '^0')
+ e = doc.createElement("project")
+ parent_node.appendChild(e)
+ e.setAttribute("name", name)
+ if relpath != name:
+ e.setAttribute("path", relpath)
+ remoteName = None
+ if d.remote:
+ remoteName = d.remote.name
+ if not d.remote or p.remote.orig_name != remoteName:
+ remoteName = p.remote.orig_name
+ e.setAttribute("remote", remoteName)
+ if peg_rev:
+ if self.IsMirror:
+ value = p.bare_git.rev_parse(p.revisionExpr + "^0")
+ else:
+ value = p.work_git.rev_parse(HEAD + "^0")
+ e.setAttribute("revision", value)
+ if peg_rev_upstream:
+ if p.upstream:
+ e.setAttribute("upstream", p.upstream)
+ elif value != p.revisionExpr:
+ # Only save the origin if the origin is not a sha1, and
+ # the default isn't our value
+ e.setAttribute("upstream", p.revisionExpr)
+
+ if peg_rev_dest_branch:
+ if p.dest_branch:
+ e.setAttribute("dest-branch", p.dest_branch)
+ elif value != p.revisionExpr:
+ e.setAttribute("dest-branch", p.revisionExpr)
+
+ else:
+ revision = (
+ self.remotes[p.remote.orig_name].revision or d.revisionExpr
+ )
+ if not revision or revision != p.revisionExpr:
+ e.setAttribute("revision", p.revisionExpr)
+ elif p.revisionId:
+ e.setAttribute("revision", p.revisionId)
+ if p.upstream and (
+ p.upstream != p.revisionExpr or p.upstream != d.upstreamExpr
+ ):
+ e.setAttribute("upstream", p.upstream)
+
+ if p.dest_branch and p.dest_branch != d.destBranchExpr:
+ e.setAttribute("dest-branch", p.dest_branch)
+
+ for c in p.copyfiles:
+ ce = doc.createElement("copyfile")
+ ce.setAttribute("src", c.src)
+ ce.setAttribute("dest", c.dest)
+ e.appendChild(ce)
+
+ for lf in p.linkfiles:
+ le = doc.createElement("linkfile")
+ le.setAttribute("src", lf.src)
+ le.setAttribute("dest", lf.dest)
+ e.appendChild(le)
+
+ default_groups = ["all", "name:%s" % p.name, "path:%s" % p.relpath]
+ egroups = [g for g in p.groups if g not in default_groups]
+ if egroups:
+ e.setAttribute("groups", ",".join(egroups))
+
+ for a in p.annotations:
+ if a.keep == "true":
+ ae = doc.createElement("annotation")
+ ae.setAttribute("name", a.name)
+ ae.setAttribute("value", a.value)
+ e.appendChild(ae)
+
+ if p.sync_c:
+ e.setAttribute("sync-c", "true")
+
+ if p.sync_s:
+ e.setAttribute("sync-s", "true")
+
+ if not p.sync_tags:
+ e.setAttribute("sync-tags", "false")
+
+ if p.clone_depth:
+ e.setAttribute("clone-depth", str(p.clone_depth))
+
+ self._output_manifest_project_extras(p, e)
+
+ if p.subprojects:
+ subprojects = set(subp.name for subp in p.subprojects)
+ output_projects(p, e, list(sorted(subprojects)))
+
+ projects = set(p.name for p in self._paths.values() if not p.parent)
+ output_projects(None, root, list(sorted(projects)))
+
+ if self._repo_hooks_project:
+ root.appendChild(doc.createTextNode(""))
+ e = doc.createElement("repo-hooks")
+ e.setAttribute("in-project", self._repo_hooks_project.name)
+ e.setAttribute(
+ "enabled-list",
+ " ".join(self._repo_hooks_project.enabled_repo_hooks),
+ )
+ root.appendChild(e)
+
+ if self._superproject:
+ root.appendChild(doc.createTextNode(""))
+ e = doc.createElement("superproject")
+ e.setAttribute("name", self._superproject.name)
+ remoteName = None
+ if d.remote:
+ remoteName = d.remote.name
+ remote = self._superproject.remote
+ if not d.remote or remote.orig_name != remoteName:
+ remoteName = remote.orig_name
+ e.setAttribute("remote", remoteName)
+ revision = remote.revision or d.revisionExpr
+ if not revision or revision != self._superproject.revision:
+ e.setAttribute("revision", self._superproject.revision)
+ root.appendChild(e)
+
+ if self._contactinfo.bugurl != Wrapper().BUG_URL:
+ root.appendChild(doc.createTextNode(""))
+ e = doc.createElement("contactinfo")
+ e.setAttribute("bugurl", self._contactinfo.bugurl)
+ root.appendChild(e)
+
+ return doc
+
+ def ToDict(self, **kwargs):
+ """Return the current manifest as a dictionary."""
+ # Elements that may only appear once.
+ SINGLE_ELEMENTS = {
+ "notice",
+ "default",
+ "manifest-server",
+ "repo-hooks",
+ "superproject",
+ "contactinfo",
+ }
+ # Elements that may be repeated.
+ MULTI_ELEMENTS = {
+ "remote",
+ "remove-project",
+ "project",
+ "extend-project",
+ "include",
+ "submanifest",
+ # These are children of 'project' nodes.
+ "annotation",
+ "project",
+ "copyfile",
+ "linkfile",
+ }
+
+ doc = self.ToXml(**kwargs)
+ ret = {}
+
+ def append_children(ret, node):
+ for child in node.childNodes:
+ if child.nodeType == xml.dom.Node.ELEMENT_NODE:
+ attrs = child.attributes
+ element = dict(
+ (attrs.item(i).localName, attrs.item(i).value)
+ for i in range(attrs.length)
+ )
+ if child.nodeName in SINGLE_ELEMENTS:
+ ret[child.nodeName] = element
+ elif child.nodeName in MULTI_ELEMENTS:
+ ret.setdefault(child.nodeName, []).append(element)
+ else:
+ raise ManifestParseError(
+ 'Unhandled element "%s"' % (child.nodeName,)
+ )
+
+ append_children(element, child)
+
+ append_children(ret, doc.firstChild)
+
+ return ret
+
+ def Save(self, fd, **kwargs):
+ """Write the current manifest out to the given file descriptor."""
+ doc = self.ToXml(**kwargs)
+ doc.writexml(fd, "", " ", "\n", "UTF-8")
+
+ def _output_manifest_project_extras(self, p, e):
+ """Manifests can modify e if they support extra project attributes."""
+
+ @property
+ def is_multimanifest(self):
+ """Whether this is a multimanifest checkout.
+
+ This is safe to use as long as the outermost manifest XML has been
+ parsed.
+ """
+ return bool(self._outer_client._submanifests)
+
+ @property
+ def is_submanifest(self):
+ """Whether this manifest is a submanifest.
+
+ This is safe to use as long as the outermost manifest XML has been
+ parsed.
+ """
+ return self._outer_client and self._outer_client != self
+
+ @property
+ def outer_client(self):
+ """The instance of the outermost manifest client."""
+ self._Load()
+ return self._outer_client
+
+ @property
+ def all_manifests(self):
+ """Generator yielding all (sub)manifests, in depth-first order."""
+ self._Load()
+ outer = self._outer_client
+ yield outer
+ for tree in outer.all_children:
+ yield tree
+
+ @property
+ def all_children(self):
+ """Generator yielding all (present) child submanifests."""
+ self._Load()
+ for child in self._submanifests.values():
+ if child.repo_client:
+ yield child.repo_client
+ for tree in child.repo_client.all_children:
+ yield tree
+
+ @property
+ def path_prefix(self):
+ """The path of this submanifest, relative to the outermost manifest."""
+ if not self._outer_client or self == self._outer_client:
+ return ""
+ return os.path.relpath(self.topdir, self._outer_client.topdir)
+
+ @property
+ def all_paths(self):
+ """All project paths for all (sub)manifests.
+
+ See also `paths`.
+
+ Returns:
+ A dictionary of {path: Project()}. `path` is relative to the outer
+ manifest.
+ """
+ ret = {}
+ for tree in self.all_manifests:
+ prefix = tree.path_prefix
+ ret.update(
+ {os.path.join(prefix, k): v for k, v in tree.paths.items()}
+ )
+ return ret
+
+ @property
+ def all_projects(self):
+ """All projects for all (sub)manifests. See `projects`."""
+ return list(
+ itertools.chain.from_iterable(
+ x._paths.values() for x in self.all_manifests
+ )
+ )
+
+ @property
+ def paths(self):
+ """Return all paths for this manifest.
+
+ Returns:
+ A dictionary of {path: Project()}. `path` is relative to this
+ manifest.
+ """
+ self._Load()
+ return self._paths
+
+ @property
+ def projects(self):
+ """Return a list of all Projects in this manifest."""
+ self._Load()
+ return list(self._paths.values())
+
+ @property
+ def remotes(self):
+ """Return a list of remotes for this manifest."""
+ self._Load()
+ return self._remotes
+
+ @property
+ def default(self):
+ """Return default values for this manifest."""
+ self._Load()
+ return self._default
+
+ @property
+ def submanifests(self):
+ """All submanifests in this manifest."""
+ self._Load()
+ return self._submanifests
+
+ @property
+ def repo_hooks_project(self):
+ self._Load()
+ return self._repo_hooks_project
+
+ @property
+ def superproject(self):
+ self._Load()
+ return self._superproject
+
+ @property
+ def contactinfo(self):
+ self._Load()
+ return self._contactinfo
+
+ @property
+ def notice(self):
+ self._Load()
+ return self._notice
+
+ @property
+ def manifest_server(self):
+ self._Load()
+ return self._manifest_server
+
+ @property
+ def CloneBundle(self):
+ clone_bundle = self.manifestProject.clone_bundle
+ if clone_bundle is None:
+ return False if self.manifestProject.partial_clone else True
else:
- value = p.work_git.rev_parse(HEAD + '^0')
- e.setAttribute('revision', value)
- if peg_rev_upstream:
- if p.upstream:
- e.setAttribute('upstream', p.upstream)
- elif value != p.revisionExpr:
- # Only save the origin if the origin is not a sha1, and the default
- # isn't our value
- e.setAttribute('upstream', p.revisionExpr)
+ return clone_bundle
- if peg_rev_dest_branch:
- if p.dest_branch:
- e.setAttribute('dest-branch', p.dest_branch)
- elif value != p.revisionExpr:
- e.setAttribute('dest-branch', p.revisionExpr)
+ @property
+ def CloneFilter(self):
+ if self.manifestProject.partial_clone:
+ return self.manifestProject.clone_filter
+ return None
- else:
- revision = self.remotes[p.remote.orig_name].revision or d.revisionExpr
- if not revision or revision != p.revisionExpr:
- e.setAttribute('revision', p.revisionExpr)
- elif p.revisionId:
- e.setAttribute('revision', p.revisionId)
- if (p.upstream and (p.upstream != p.revisionExpr or
- p.upstream != d.upstreamExpr)):
- e.setAttribute('upstream', p.upstream)
+ @property
+ def PartialCloneExclude(self):
+ exclude = self.manifest.manifestProject.partial_clone_exclude or ""
+ return set(x.strip() for x in exclude.split(","))
- if p.dest_branch and p.dest_branch != d.destBranchExpr:
- e.setAttribute('dest-branch', p.dest_branch)
+ def SetManifestOverride(self, path):
+ """Override manifestFile. The caller must call Unload()"""
+ self._outer_client.manifest.manifestFileOverrides[
+ self.path_prefix
+ ] = path
- for c in p.copyfiles:
- ce = doc.createElement('copyfile')
- ce.setAttribute('src', c.src)
- ce.setAttribute('dest', c.dest)
- e.appendChild(ce)
+ @property
+ def UseLocalManifests(self):
+ return self._load_local_manifests
- for l in p.linkfiles:
- le = doc.createElement('linkfile')
- le.setAttribute('src', l.src)
- le.setAttribute('dest', l.dest)
- e.appendChild(le)
+ def SetUseLocalManifests(self, value):
+ self._load_local_manifests = value
- default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath]
- egroups = [g for g in p.groups if g not in default_groups]
- if egroups:
- e.setAttribute('groups', ','.join(egroups))
+ @property
+ def HasLocalManifests(self):
+ return self._load_local_manifests and self.local_manifests
- for a in p.annotations:
- if a.keep == "true":
- ae = doc.createElement('annotation')
- ae.setAttribute('name', a.name)
- ae.setAttribute('value', a.value)
- e.appendChild(ae)
+ def IsFromLocalManifest(self, project):
+ """Is the project from a local manifest?"""
+ return any(
+ x.startswith(LOCAL_MANIFEST_GROUP_PREFIX) for x in project.groups
+ )
- if p.sync_c:
- e.setAttribute('sync-c', 'true')
+ @property
+ def IsMirror(self):
+ return self.manifestProject.mirror
- if p.sync_s:
- e.setAttribute('sync-s', 'true')
+ @property
+ def UseGitWorktrees(self):
+ return self.manifestProject.use_worktree
- if not p.sync_tags:
- e.setAttribute('sync-tags', 'false')
+ @property
+ def IsArchive(self):
+ return self.manifestProject.archive
- if p.clone_depth:
- e.setAttribute('clone-depth', str(p.clone_depth))
+ @property
+ def HasSubmodules(self):
+ return self.manifestProject.submodules
- self._output_manifest_project_extras(p, e)
+ @property
+ def EnableGitLfs(self):
+ return self.manifestProject.git_lfs
- if p.subprojects:
- subprojects = set(subp.name for subp in p.subprojects)
- output_projects(p, e, list(sorted(subprojects)))
+ def FindManifestByPath(self, path):
+ """Returns the manifest containing path."""
+ path = os.path.abspath(path)
+ manifest = self._outer_client or self
+ old = None
+ while manifest._submanifests and manifest != old:
+ old = manifest
+ for name in manifest._submanifests:
+ tree = manifest._submanifests[name]
+ if path.startswith(tree.repo_client.manifest.topdir):
+ manifest = tree.repo_client
+ break
+ return manifest
- projects = set(p.name for p in self._paths.values() if not p.parent)
- output_projects(None, root, list(sorted(projects)))
+ @property
+ def subdir(self):
+ """Returns the path for per-submanifest objects for this manifest."""
+ return self.SubmanifestInfoDir(self.path_prefix)
- if self._repo_hooks_project:
- root.appendChild(doc.createTextNode(''))
- e = doc.createElement('repo-hooks')
- e.setAttribute('in-project', self._repo_hooks_project.name)
- e.setAttribute('enabled-list',
- ' '.join(self._repo_hooks_project.enabled_repo_hooks))
- root.appendChild(e)
+ def SubmanifestInfoDir(self, submanifest_path, object_path=""):
+ """Return the path to submanifest-specific info for a submanifest.
- if self._superproject:
- root.appendChild(doc.createTextNode(''))
- e = doc.createElement('superproject')
- e.setAttribute('name', self._superproject.name)
- remoteName = None
- if d.remote:
- remoteName = d.remote.name
- remote = self._superproject.remote
- if not d.remote or remote.orig_name != remoteName:
- remoteName = remote.orig_name
- e.setAttribute('remote', remoteName)
- revision = remote.revision or d.revisionExpr
- if not revision or revision != self._superproject.revision:
- e.setAttribute('revision', self._superproject.revision)
- root.appendChild(e)
+ Return the full path of the directory in which to put per-manifest
+ objects.
- if self._contactinfo.bugurl != Wrapper().BUG_URL:
- root.appendChild(doc.createTextNode(''))
- e = doc.createElement('contactinfo')
- e.setAttribute('bugurl', self._contactinfo.bugurl)
- root.appendChild(e)
+ Args:
+ submanifest_path: a string, the path of the submanifest, relative to
+ the outermost topdir. If empty, then repodir is returned.
+ object_path: a string, relative path to append to the submanifest
+ info directory path.
+ """
+ if submanifest_path:
+ return os.path.join(
+ self.repodir, SUBMANIFEST_DIR, submanifest_path, object_path
+ )
+ else:
+ return os.path.join(self.repodir, object_path)
- return doc
+ def SubmanifestProject(self, submanifest_path):
+ """Return a manifestProject for a submanifest."""
+ subdir = self.SubmanifestInfoDir(submanifest_path)
+ mp = ManifestProject(
+ self,
+ "manifests",
+ gitdir=os.path.join(subdir, "manifests.git"),
+ worktree=os.path.join(subdir, "manifests"),
+ )
+ return mp
- def ToDict(self, **kwargs):
- """Return the current manifest as a dictionary."""
- # Elements that may only appear once.
- SINGLE_ELEMENTS = {
- 'notice',
- 'default',
- 'manifest-server',
- 'repo-hooks',
- 'superproject',
- 'contactinfo',
- }
- # Elements that may be repeated.
- MULTI_ELEMENTS = {
- 'remote',
- 'remove-project',
- 'project',
- 'extend-project',
- 'include',
- 'submanifest',
- # These are children of 'project' nodes.
- 'annotation',
- 'project',
- 'copyfile',
- 'linkfile',
- }
+ def GetDefaultGroupsStr(self, with_platform=True):
+ """Returns the default group string to use.
- doc = self.ToXml(**kwargs)
- ret = {}
+ Args:
+ with_platform: a boolean, whether to include the group for the
+ underlying platform.
+ """
+ groups = ",".join(self.default_groups or ["default"])
+ if with_platform:
+ groups += f",platform-{platform.system().lower()}"
+ return groups
- def append_children(ret, node):
- for child in node.childNodes:
- if child.nodeType == xml.dom.Node.ELEMENT_NODE:
- attrs = child.attributes
- element = dict((attrs.item(i).localName, attrs.item(i).value)
- for i in range(attrs.length))
- if child.nodeName in SINGLE_ELEMENTS:
- ret[child.nodeName] = element
- elif child.nodeName in MULTI_ELEMENTS:
- ret.setdefault(child.nodeName, []).append(element)
- else:
- raise ManifestParseError('Unhandled element "%s"' % (child.nodeName,))
+ def GetGroupsStr(self):
+ """Returns the manifest group string that should be synced."""
+ return (
+ self.manifestProject.manifest_groups or self.GetDefaultGroupsStr()
+ )
- append_children(element, child)
+ def Unload(self):
+ """Unload the manifest.
- append_children(ret, doc.firstChild)
+ If the manifest files have been changed since Load() was called, this
+ will cause the new/updated manifest to be used.
- return ret
+ """
+ self._loaded = False
+ self._projects = {}
+ self._paths = {}
+ self._remotes = {}
+ self._default = None
+ self._submanifests = {}
+ self._repo_hooks_project = None
+ self._superproject = None
+ self._contactinfo = ContactInfo(Wrapper().BUG_URL)
+ self._notice = None
+ self.branch = None
+ self._manifest_server = None
- def Save(self, fd, **kwargs):
- """Write the current manifest out to the given file descriptor."""
- doc = self.ToXml(**kwargs)
- doc.writexml(fd, '', ' ', '\n', 'UTF-8')
+ def Load(self):
+ """Read the manifest into memory."""
+ # Do not expose internal arguments.
+ self._Load()
- def _output_manifest_project_extras(self, p, e):
- """Manifests can modify e if they support extra project attributes."""
+ def _Load(self, initial_client=None, submanifest_depth=0):
+ if submanifest_depth > MAX_SUBMANIFEST_DEPTH:
+ raise ManifestParseError(
+ "maximum submanifest depth %d exceeded." % MAX_SUBMANIFEST_DEPTH
+ )
+ if not self._loaded:
+ if self._outer_client and self._outer_client != self:
+ # This will load all clients.
+ self._outer_client._Load(initial_client=self)
- @property
- def is_multimanifest(self):
- """Whether this is a multimanifest checkout.
+ savedManifestFile = self.manifestFile
+ override = self._outer_client.manifestFileOverrides.get(
+ self.path_prefix
+ )
+ if override:
+ self.manifestFile = override
- This is safe to use as long as the outermost manifest XML has been parsed.
- """
- return bool(self._outer_client._submanifests)
+ try:
+ m = self.manifestProject
+ b = m.GetBranch(m.CurrentBranch).merge
+ if b is not None and b.startswith(R_HEADS):
+ b = b[len(R_HEADS) :]
+ self.branch = b
- @property
- def is_submanifest(self):
- """Whether this manifest is a submanifest.
+ parent_groups = self.parent_groups
+ if self.path_prefix:
+ parent_groups = (
+ f"{SUBMANIFEST_GROUP_PREFIX}:path:"
+ f"{self.path_prefix},{parent_groups}"
+ )
- This is safe to use as long as the outermost manifest XML has been parsed.
- """
- return self._outer_client and self._outer_client != self
+ # The manifestFile was specified by the user which is why we
+ # allow include paths to point anywhere.
+ nodes = []
+ nodes.append(
+ self._ParseManifestXml(
+ self.manifestFile,
+ self.manifestProject.worktree,
+ parent_groups=parent_groups,
+ restrict_includes=False,
+ )
+ )
- @property
- def outer_client(self):
- """The instance of the outermost manifest client."""
- self._Load()
- return self._outer_client
-
- @property
- def all_manifests(self):
- """Generator yielding all (sub)manifests, in depth-first order."""
- self._Load()
- outer = self._outer_client
- yield outer
- for tree in outer.all_children:
- yield tree
-
- @property
- def all_children(self):
- """Generator yielding all (present) child submanifests."""
- self._Load()
- for child in self._submanifests.values():
- if child.repo_client:
- yield child.repo_client
- for tree in child.repo_client.all_children:
- yield tree
-
- @property
- def path_prefix(self):
- """The path of this submanifest, relative to the outermost manifest."""
- if not self._outer_client or self == self._outer_client:
- return ''
- return os.path.relpath(self.topdir, self._outer_client.topdir)
-
- @property
- def all_paths(self):
- """All project paths for all (sub)manifests.
-
- See also `paths`.
-
- Returns:
- A dictionary of {path: Project()}. `path` is relative to the outer
- manifest.
- """
- ret = {}
- for tree in self.all_manifests:
- prefix = tree.path_prefix
- ret.update({os.path.join(prefix, k): v for k, v in tree.paths.items()})
- return ret
-
- @property
- def all_projects(self):
- """All projects for all (sub)manifests. See `projects`."""
- return list(itertools.chain.from_iterable(x._paths.values() for x in self.all_manifests))
-
- @property
- def paths(self):
- """Return all paths for this manifest.
-
- Returns:
- A dictionary of {path: Project()}. `path` is relative to this manifest.
- """
- self._Load()
- return self._paths
-
- @property
- def projects(self):
- """Return a list of all Projects in this manifest."""
- self._Load()
- return list(self._paths.values())
-
- @property
- def remotes(self):
- """Return a list of remotes for this manifest."""
- self._Load()
- return self._remotes
-
- @property
- def default(self):
- """Return default values for this manifest."""
- self._Load()
- return self._default
-
- @property
- def submanifests(self):
- """All submanifests in this manifest."""
- self._Load()
- return self._submanifests
-
- @property
- def repo_hooks_project(self):
- self._Load()
- return self._repo_hooks_project
-
- @property
- def superproject(self):
- self._Load()
- return self._superproject
-
- @property
- def contactinfo(self):
- self._Load()
- return self._contactinfo
-
- @property
- def notice(self):
- self._Load()
- return self._notice
-
- @property
- def manifest_server(self):
- self._Load()
- return self._manifest_server
-
- @property
- def CloneBundle(self):
- clone_bundle = self.manifestProject.clone_bundle
- if clone_bundle is None:
- return False if self.manifestProject.partial_clone else True
- else:
- return clone_bundle
-
- @property
- def CloneFilter(self):
- if self.manifestProject.partial_clone:
- return self.manifestProject.clone_filter
- return None
-
- @property
- def PartialCloneExclude(self):
- exclude = self.manifest.manifestProject.partial_clone_exclude or ''
- return set(x.strip() for x in exclude.split(','))
-
- def SetManifestOverride(self, path):
- """Override manifestFile. The caller must call Unload()"""
- self._outer_client.manifest.manifestFileOverrides[self.path_prefix] = path
-
- @property
- def UseLocalManifests(self):
- return self._load_local_manifests
-
- def SetUseLocalManifests(self, value):
- self._load_local_manifests = value
-
- @property
- def HasLocalManifests(self):
- return self._load_local_manifests and self.local_manifests
-
- def IsFromLocalManifest(self, project):
- """Is the project from a local manifest?"""
- return any(x.startswith(LOCAL_MANIFEST_GROUP_PREFIX)
- for x in project.groups)
-
- @property
- def IsMirror(self):
- return self.manifestProject.mirror
-
- @property
- def UseGitWorktrees(self):
- return self.manifestProject.use_worktree
-
- @property
- def IsArchive(self):
- return self.manifestProject.archive
-
- @property
- def HasSubmodules(self):
- return self.manifestProject.submodules
-
- @property
- def EnableGitLfs(self):
- return self.manifestProject.git_lfs
-
- def FindManifestByPath(self, path):
- """Returns the manifest containing path."""
- path = os.path.abspath(path)
- manifest = self._outer_client or self
- old = None
- while manifest._submanifests and manifest != old:
- old = manifest
- for name in manifest._submanifests:
- tree = manifest._submanifests[name]
- if path.startswith(tree.repo_client.manifest.topdir):
- manifest = tree.repo_client
- break
- return manifest
-
- @property
- def subdir(self):
- """Returns the path for per-submanifest objects for this manifest."""
- return self.SubmanifestInfoDir(self.path_prefix)
-
- def SubmanifestInfoDir(self, submanifest_path, object_path=''):
- """Return the path to submanifest-specific info for a submanifest.
-
- Return the full path of the directory in which to put per-manifest objects.
-
- Args:
- submanifest_path: a string, the path of the submanifest, relative to the
- outermost topdir. If empty, then repodir is returned.
- object_path: a string, relative path to append to the submanifest info
- directory path.
- """
- if submanifest_path:
- return os.path.join(self.repodir, SUBMANIFEST_DIR, submanifest_path,
- object_path)
- else:
- return os.path.join(self.repodir, object_path)
-
- def SubmanifestProject(self, submanifest_path):
- """Return a manifestProject for a submanifest."""
- subdir = self.SubmanifestInfoDir(submanifest_path)
- mp = ManifestProject(self, 'manifests',
- gitdir=os.path.join(subdir, 'manifests.git'),
- worktree=os.path.join(subdir, 'manifests'))
- return mp
-
- def GetDefaultGroupsStr(self, with_platform=True):
- """Returns the default group string to use.
-
- Args:
- with_platform: a boolean, whether to include the group for the
- underlying platform.
- """
- groups = ','.join(self.default_groups or ['default'])
- if with_platform:
- groups += f',platform-{platform.system().lower()}'
- return groups
+ if self._load_local_manifests and self.local_manifests:
+ try:
+ for local_file in sorted(
+ platform_utils.listdir(self.local_manifests)
+ ):
+ if local_file.endswith(".xml"):
+ local = os.path.join(
+ self.local_manifests, local_file
+ )
+ # Since local manifests are entirely managed by
+ # the user, allow them to point anywhere the
+ # user wants.
+ local_group = (
+ f"{LOCAL_MANIFEST_GROUP_PREFIX}:"
+ f"{local_file[:-4]}"
+ )
+ nodes.append(
+ self._ParseManifestXml(
+ local,
+ self.subdir,
+ parent_groups=(
+ f"{local_group},{parent_groups}"
+ ),
+ restrict_includes=False,
+ )
+ )
+ except OSError:
+ pass
- def GetGroupsStr(self):
- """Returns the manifest group string that should be synced."""
- return self.manifestProject.manifest_groups or self.GetDefaultGroupsStr()
+ try:
+ self._ParseManifest(nodes)
+ except ManifestParseError as e:
+ # There was a problem parsing, unload ourselves in case they
+ # catch this error and try again later, we will show the
+ # correct error
+ self.Unload()
+ raise e
- def Unload(self):
- """Unload the manifest.
+ if self.IsMirror:
+ self._AddMetaProjectMirror(self.repoProject)
+ self._AddMetaProjectMirror(self.manifestProject)
- If the manifest files have been changed since Load() was called, this will
- cause the new/updated manifest to be used.
+ self._loaded = True
+ finally:
+ if override:
+ self.manifestFile = savedManifestFile
- """
- self._loaded = False
- self._projects = {}
- self._paths = {}
- self._remotes = {}
- self._default = None
- self._submanifests = {}
- self._repo_hooks_project = None
- self._superproject = None
- self._contactinfo = ContactInfo(Wrapper().BUG_URL)
- self._notice = None
- self.branch = None
- self._manifest_server = None
+ # Now that we have loaded this manifest, load any submanifests as
+ # well. We need to do this after self._loaded is set to avoid
+ # looping.
+ for name in self._submanifests:
+ tree = self._submanifests[name]
+ tree.ToSubmanifestSpec()
+ present = os.path.exists(
+ os.path.join(self.subdir, MANIFEST_FILE_NAME)
+ )
+ if present and tree.present and not tree.repo_client:
+ if initial_client and initial_client.topdir == self.topdir:
+ tree.repo_client = self
+ tree.present = present
+ elif not os.path.exists(self.subdir):
+ tree.present = False
+ if present and tree.present:
+ tree.repo_client._Load(
+ initial_client=initial_client,
+ submanifest_depth=submanifest_depth + 1,
+ )
- def Load(self):
- """Read the manifest into memory."""
- # Do not expose internal arguments.
- self._Load()
+ def _ParseManifestXml(
+ self, path, include_root, parent_groups="", restrict_includes=True
+ ):
+ """Parse a manifest XML and return the computed nodes.
- def _Load(self, initial_client=None, submanifest_depth=0):
- if submanifest_depth > MAX_SUBMANIFEST_DEPTH:
- raise ManifestParseError('maximum submanifest depth %d exceeded.' %
- MAX_SUBMANIFEST_DEPTH)
- if not self._loaded:
- if self._outer_client and self._outer_client != self:
- # This will load all clients.
- self._outer_client._Load(initial_client=self)
+ Args:
+ path: The XML file to read & parse.
+ include_root: The path to interpret include "name"s relative to.
+ parent_groups: The groups to apply to this projects.
+ restrict_includes: Whether to constrain the "name" attribute of
+ includes.
- savedManifestFile = self.manifestFile
- override = self._outer_client.manifestFileOverrides.get(self.path_prefix)
- if override:
- self.manifestFile = override
+ Returns:
+ List of XML nodes.
+ """
+ try:
+ root = xml.dom.minidom.parse(path)
+ except (OSError, xml.parsers.expat.ExpatError) as e:
+ raise ManifestParseError(
+ "error parsing manifest %s: %s" % (path, e)
+ )
- try:
- m = self.manifestProject
- b = m.GetBranch(m.CurrentBranch).merge
- if b is not None and b.startswith(R_HEADS):
- b = b[len(R_HEADS):]
- self.branch = b
+ if not root or not root.childNodes:
+ raise ManifestParseError("no root node in %s" % (path,))
- parent_groups = self.parent_groups
- if self.path_prefix:
- parent_groups = f'{SUBMANIFEST_GROUP_PREFIX}:path:{self.path_prefix},{parent_groups}'
+ for manifest in root.childNodes:
+ if manifest.nodeName == "manifest":
+ break
+ else:
+ raise ManifestParseError("no <manifest> in %s" % (path,))
- # The manifestFile was specified by the user which is why we allow include
- # paths to point anywhere.
nodes = []
- nodes.append(self._ParseManifestXml(
- self.manifestFile, self.manifestProject.worktree,
- parent_groups=parent_groups, restrict_includes=False))
+ for node in manifest.childNodes:
+ if node.nodeName == "include":
+ name = self._reqatt(node, "name")
+ if restrict_includes:
+ msg = self._CheckLocalPath(name)
+ if msg:
+ raise ManifestInvalidPathError(
+ '<include> invalid "name": %s: %s' % (name, msg)
+ )
+ include_groups = ""
+ if parent_groups:
+ include_groups = parent_groups
+ if node.hasAttribute("groups"):
+ include_groups = (
+ node.getAttribute("groups") + "," + include_groups
+ )
+ fp = os.path.join(include_root, name)
+ if not os.path.isfile(fp):
+ raise ManifestParseError(
+ "include [%s/]%s doesn't exist or isn't a file"
+ % (include_root, name)
+ )
+ try:
+ nodes.extend(
+ self._ParseManifestXml(fp, include_root, include_groups)
+ )
+ # should isolate this to the exact exception, but that's
+ # tricky. actual parsing implementation may vary.
+ except (
+ KeyboardInterrupt,
+ RuntimeError,
+ SystemExit,
+ ManifestParseError,
+ ):
+ raise
+ except Exception as e:
+ raise ManifestParseError(
+ "failed parsing included manifest %s: %s" % (name, e)
+ )
+ else:
+ if parent_groups and node.nodeName == "project":
+ nodeGroups = parent_groups
+ if node.hasAttribute("groups"):
+ nodeGroups = (
+ node.getAttribute("groups") + "," + nodeGroups
+ )
+ node.setAttribute("groups", nodeGroups)
+ nodes.append(node)
+ return nodes
- if self._load_local_manifests and self.local_manifests:
- try:
- for local_file in sorted(platform_utils.listdir(self.local_manifests)):
- if local_file.endswith('.xml'):
- local = os.path.join(self.local_manifests, local_file)
- # Since local manifests are entirely managed by the user, allow
- # them to point anywhere the user wants.
- local_group = f'{LOCAL_MANIFEST_GROUP_PREFIX}:{local_file[:-4]}'
- nodes.append(self._ParseManifestXml(
- local, self.subdir,
- parent_groups=f'{local_group},{parent_groups}',
- restrict_includes=False))
- except OSError:
- pass
+ def _ParseManifest(self, node_list):
+ for node in itertools.chain(*node_list):
+ if node.nodeName == "remote":
+ remote = self._ParseRemote(node)
+ if remote:
+ if remote.name in self._remotes:
+ if remote != self._remotes[remote.name]:
+ raise ManifestParseError(
+ "remote %s already exists with different "
+ "attributes" % (remote.name)
+ )
+ else:
+ self._remotes[remote.name] = remote
- try:
- self._ParseManifest(nodes)
- except ManifestParseError as e:
- # There was a problem parsing, unload ourselves in case they catch
- # this error and try again later, we will show the correct error
- self.Unload()
- raise e
+ for node in itertools.chain(*node_list):
+ if node.nodeName == "default":
+ new_default = self._ParseDefault(node)
+ emptyDefault = (
+ not node.hasAttributes() and not node.hasChildNodes()
+ )
+ if self._default is None:
+ self._default = new_default
+ elif not emptyDefault and new_default != self._default:
+ raise ManifestParseError(
+ "duplicate default in %s" % (self.manifestFile)
+ )
- if self.IsMirror:
- self._AddMetaProjectMirror(self.repoProject)
- self._AddMetaProjectMirror(self.manifestProject)
-
- self._loaded = True
- finally:
- if override:
- self.manifestFile = savedManifestFile
-
- # Now that we have loaded this manifest, load any submanifests as well.
- # We need to do this after self._loaded is set to avoid looping.
- for name in self._submanifests:
- tree = self._submanifests[name]
- spec = tree.ToSubmanifestSpec()
- present = os.path.exists(os.path.join(self.subdir, MANIFEST_FILE_NAME))
- if present and tree.present and not tree.repo_client:
- if initial_client and initial_client.topdir == self.topdir:
- tree.repo_client = self
- tree.present = present
- elif not os.path.exists(self.subdir):
- tree.present = False
- if present and tree.present:
- tree.repo_client._Load(initial_client=initial_client,
- submanifest_depth=submanifest_depth + 1)
-
- def _ParseManifestXml(self, path, include_root, parent_groups='',
- restrict_includes=True):
- """Parse a manifest XML and return the computed nodes.
-
- Args:
- path: The XML file to read & parse.
- include_root: The path to interpret include "name"s relative to.
- parent_groups: The groups to apply to this projects.
- restrict_includes: Whether to constrain the "name" attribute of includes.
-
- Returns:
- List of XML nodes.
- """
- try:
- root = xml.dom.minidom.parse(path)
- except (OSError, xml.parsers.expat.ExpatError) as e:
- raise ManifestParseError("error parsing manifest %s: %s" % (path, e))
-
- if not root or not root.childNodes:
- raise ManifestParseError("no root node in %s" % (path,))
-
- for manifest in root.childNodes:
- if manifest.nodeName == 'manifest':
- break
- else:
- raise ManifestParseError("no <manifest> in %s" % (path,))
-
- nodes = []
- for node in manifest.childNodes:
- if node.nodeName == 'include':
- name = self._reqatt(node, 'name')
- if restrict_includes:
- msg = self._CheckLocalPath(name)
- if msg:
- raise ManifestInvalidPathError(
- '<include> invalid "name": %s: %s' % (name, msg))
- include_groups = ''
- if parent_groups:
- include_groups = parent_groups
- if node.hasAttribute('groups'):
- include_groups = node.getAttribute('groups') + ',' + include_groups
- fp = os.path.join(include_root, name)
- if not os.path.isfile(fp):
- raise ManifestParseError("include [%s/]%s doesn't exist or isn't a file"
- % (include_root, name))
- try:
- nodes.extend(self._ParseManifestXml(fp, include_root, include_groups))
- # should isolate this to the exact exception, but that's
- # tricky. actual parsing implementation may vary.
- except (KeyboardInterrupt, RuntimeError, SystemExit, ManifestParseError):
- raise
- except Exception as e:
- raise ManifestParseError(
- "failed parsing included manifest %s: %s" % (name, e))
- else:
- if parent_groups and node.nodeName == 'project':
- nodeGroups = parent_groups
- if node.hasAttribute('groups'):
- nodeGroups = node.getAttribute('groups') + ',' + nodeGroups
- node.setAttribute('groups', nodeGroups)
- nodes.append(node)
- return nodes
-
- def _ParseManifest(self, node_list):
- for node in itertools.chain(*node_list):
- if node.nodeName == 'remote':
- remote = self._ParseRemote(node)
- if remote:
- if remote.name in self._remotes:
- if remote != self._remotes[remote.name]:
- raise ManifestParseError(
- 'remote %s already exists with different attributes' %
- (remote.name))
- else:
- self._remotes[remote.name] = remote
-
- for node in itertools.chain(*node_list):
- if node.nodeName == 'default':
- new_default = self._ParseDefault(node)
- emptyDefault = not node.hasAttributes() and not node.hasChildNodes()
if self._default is None:
- self._default = new_default
- elif not emptyDefault and new_default != self._default:
- raise ManifestParseError('duplicate default in %s' %
- (self.manifestFile))
+ self._default = _Default()
- if self._default is None:
- self._default = _Default()
+ submanifest_paths = set()
+ for node in itertools.chain(*node_list):
+ if node.nodeName == "submanifest":
+ submanifest = self._ParseSubmanifest(node)
+ if submanifest:
+ if submanifest.name in self._submanifests:
+ if submanifest != self._submanifests[submanifest.name]:
+ raise ManifestParseError(
+ "submanifest %s already exists with different "
+ "attributes" % (submanifest.name)
+ )
+ else:
+ self._submanifests[submanifest.name] = submanifest
+ submanifest_paths.add(submanifest.relpath)
- submanifest_paths = set()
- for node in itertools.chain(*node_list):
- if node.nodeName == 'submanifest':
- submanifest = self._ParseSubmanifest(node)
- if submanifest:
- if submanifest.name in self._submanifests:
- if submanifest != self._submanifests[submanifest.name]:
- raise ManifestParseError(
- 'submanifest %s already exists with different attributes' %
- (submanifest.name))
- else:
- self._submanifests[submanifest.name] = submanifest
- submanifest_paths.add(submanifest.relpath)
+ for node in itertools.chain(*node_list):
+ if node.nodeName == "notice":
+ if self._notice is not None:
+ raise ManifestParseError(
+ "duplicate notice in %s" % (self.manifestFile)
+ )
+ self._notice = self._ParseNotice(node)
- for node in itertools.chain(*node_list):
- if node.nodeName == 'notice':
- if self._notice is not None:
- raise ManifestParseError(
- 'duplicate notice in %s' %
- (self.manifestFile))
- self._notice = self._ParseNotice(node)
+ for node in itertools.chain(*node_list):
+ if node.nodeName == "manifest-server":
+ url = self._reqatt(node, "url")
+ if self._manifest_server is not None:
+ raise ManifestParseError(
+ "duplicate manifest-server in %s" % (self.manifestFile)
+ )
+ self._manifest_server = url
- for node in itertools.chain(*node_list):
- if node.nodeName == 'manifest-server':
- url = self._reqatt(node, 'url')
- if self._manifest_server is not None:
- raise ManifestParseError(
- 'duplicate manifest-server in %s' %
- (self.manifestFile))
- self._manifest_server = url
+ def recursively_add_projects(project):
+ projects = self._projects.setdefault(project.name, [])
+ if project.relpath is None:
+ raise ManifestParseError(
+ "missing path for %s in %s"
+ % (project.name, self.manifestFile)
+ )
+ if project.relpath in self._paths:
+ raise ManifestParseError(
+ "duplicate path %s in %s"
+ % (project.relpath, self.manifestFile)
+ )
+ for tree in submanifest_paths:
+ if project.relpath.startswith(tree):
+ raise ManifestParseError(
+ "project %s conflicts with submanifest path %s"
+ % (project.relpath, tree)
+ )
+ self._paths[project.relpath] = project
+ projects.append(project)
+ for subproject in project.subprojects:
+ recursively_add_projects(subproject)
- def recursively_add_projects(project):
- projects = self._projects.setdefault(project.name, [])
- if project.relpath is None:
- raise ManifestParseError(
- 'missing path for %s in %s' %
- (project.name, self.manifestFile))
- if project.relpath in self._paths:
- raise ManifestParseError(
- 'duplicate path %s in %s' %
- (project.relpath, self.manifestFile))
- for tree in submanifest_paths:
- if project.relpath.startswith(tree):
- raise ManifestParseError(
- 'project %s conflicts with submanifest path %s' %
- (project.relpath, tree))
- self._paths[project.relpath] = project
- projects.append(project)
- for subproject in project.subprojects:
- recursively_add_projects(subproject)
+ repo_hooks_project = None
+ enabled_repo_hooks = None
+ for node in itertools.chain(*node_list):
+ if node.nodeName == "project":
+ project = self._ParseProject(node)
+ recursively_add_projects(project)
+ if node.nodeName == "extend-project":
+ name = self._reqatt(node, "name")
- repo_hooks_project = None
- enabled_repo_hooks = None
- for node in itertools.chain(*node_list):
- if node.nodeName == 'project':
- project = self._ParseProject(node)
- recursively_add_projects(project)
- if node.nodeName == 'extend-project':
- name = self._reqatt(node, 'name')
+ if name not in self._projects:
+ raise ManifestParseError(
+ "extend-project element specifies non-existent "
+ "project: %s" % name
+ )
+
+ path = node.getAttribute("path")
+ dest_path = node.getAttribute("dest-path")
+ groups = node.getAttribute("groups")
+ if groups:
+ groups = self._ParseList(groups)
+ revision = node.getAttribute("revision")
+ remote_name = node.getAttribute("remote")
+ if not remote_name:
+ remote = self._default.remote
+ else:
+ remote = self._get_remote(node)
+ dest_branch = node.getAttribute("dest-branch")
+ upstream = node.getAttribute("upstream")
+
+ named_projects = self._projects[name]
+ if dest_path and not path and len(named_projects) > 1:
+ raise ManifestParseError(
+ "extend-project cannot use dest-path when "
+ "matching multiple projects: %s" % name
+ )
+ for p in self._projects[name]:
+ if path and p.relpath != path:
+ continue
+ if groups:
+ p.groups.extend(groups)
+ if revision:
+ p.SetRevision(revision)
+
+ if remote_name:
+ p.remote = remote.ToRemoteSpec(name)
+ if dest_branch:
+ p.dest_branch = dest_branch
+ if upstream:
+ p.upstream = upstream
+
+ if dest_path:
+ del self._paths[p.relpath]
+ (
+ relpath,
+ worktree,
+ gitdir,
+ objdir,
+ _,
+ ) = self.GetProjectPaths(name, dest_path, remote.name)
+ p.UpdatePaths(relpath, worktree, gitdir, objdir)
+ self._paths[p.relpath] = p
+
+ if node.nodeName == "repo-hooks":
+ # Only one project can be the hooks project
+ if repo_hooks_project is not None:
+ raise ManifestParseError(
+ "duplicate repo-hooks in %s" % (self.manifestFile)
+ )
+
+ # Get the name of the project and the (space-separated) list of
+ # enabled.
+ repo_hooks_project = self._reqatt(node, "in-project")
+ enabled_repo_hooks = self._ParseList(
+ self._reqatt(node, "enabled-list")
+ )
+ if node.nodeName == "superproject":
+ name = self._reqatt(node, "name")
+ # There can only be one superproject.
+ if self._superproject:
+ raise ManifestParseError(
+ "duplicate superproject in %s" % (self.manifestFile)
+ )
+ remote_name = node.getAttribute("remote")
+ if not remote_name:
+ remote = self._default.remote
+ else:
+ remote = self._get_remote(node)
+ if remote is None:
+ raise ManifestParseError(
+ "no remote for superproject %s within %s"
+ % (name, self.manifestFile)
+ )
+ revision = node.getAttribute("revision") or remote.revision
+ if not revision:
+ revision = self._default.revisionExpr
+ if not revision:
+ raise ManifestParseError(
+ "no revision for superproject %s within %s"
+ % (name, self.manifestFile)
+ )
+ self._superproject = Superproject(
+ self,
+ name=name,
+ remote=remote.ToRemoteSpec(name),
+ revision=revision,
+ )
+ if node.nodeName == "contactinfo":
+ bugurl = self._reqatt(node, "bugurl")
+ # This element can be repeated, later entries will clobber
+ # earlier ones.
+ self._contactinfo = ContactInfo(bugurl)
+
+ if node.nodeName == "remove-project":
+ name = self._reqatt(node, "name")
+
+ if name in self._projects:
+ for p in self._projects[name]:
+ del self._paths[p.relpath]
+ del self._projects[name]
+
+ # If the manifest removes the hooks project, treat it as if
+ # it deleted
+ # the repo-hooks element too.
+ if repo_hooks_project == name:
+ repo_hooks_project = None
+ elif not XmlBool(node, "optional", False):
+ raise ManifestParseError(
+ "remove-project element specifies non-existent "
+ "project: %s" % name
+ )
+
+ # Store repo hooks project information.
+ if repo_hooks_project:
+ # Store a reference to the Project.
+ try:
+ repo_hooks_projects = self._projects[repo_hooks_project]
+ except KeyError:
+ raise ManifestParseError(
+ "project %s not found for repo-hooks" % (repo_hooks_project)
+ )
+
+ if len(repo_hooks_projects) != 1:
+ raise ManifestParseError(
+ "internal error parsing repo-hooks in %s"
+ % (self.manifestFile)
+ )
+ self._repo_hooks_project = repo_hooks_projects[0]
+ # Store the enabled hooks in the Project object.
+ self._repo_hooks_project.enabled_repo_hooks = enabled_repo_hooks
+
+ def _AddMetaProjectMirror(self, m):
+ name = None
+ m_url = m.GetRemote().url
+ if m_url.endswith("/.git"):
+ raise ManifestParseError("refusing to mirror %s" % m_url)
+
+ if self._default and self._default.remote:
+ url = self._default.remote.resolvedFetchUrl
+ if not url.endswith("/"):
+ url += "/"
+ if m_url.startswith(url):
+ remote = self._default.remote
+ name = m_url[len(url) :]
+
+ if name is None:
+ s = m_url.rindex("/") + 1
+ manifestUrl = self.manifestProject.config.GetString(
+ "remote.origin.url"
+ )
+ remote = _XmlRemote(
+ "origin", fetch=m_url[:s], manifestUrl=manifestUrl
+ )
+ name = m_url[s:]
+
+ if name.endswith(".git"):
+ name = name[:-4]
if name not in self._projects:
- raise ManifestParseError('extend-project element specifies non-existent '
- 'project: %s' % name)
+ m.PreSync()
+ gitdir = os.path.join(self.topdir, "%s.git" % name)
+ project = Project(
+ manifest=self,
+ name=name,
+ remote=remote.ToRemoteSpec(name),
+ gitdir=gitdir,
+ objdir=gitdir,
+ worktree=None,
+ relpath=name or None,
+ revisionExpr=m.revisionExpr,
+ revisionId=None,
+ )
+ self._projects[project.name] = [project]
+ self._paths[project.relpath] = project
- path = node.getAttribute('path')
- dest_path = node.getAttribute('dest-path')
- groups = node.getAttribute('groups')
- if groups:
- groups = self._ParseList(groups)
- revision = node.getAttribute('revision')
- remote_name = node.getAttribute('remote')
- if not remote_name:
- remote = self._default.remote
+ def _ParseRemote(self, node):
+ """
+ reads a <remote> element from the manifest file
+ """
+ name = self._reqatt(node, "name")
+ alias = node.getAttribute("alias")
+ if alias == "":
+ alias = None
+ fetch = self._reqatt(node, "fetch")
+ pushUrl = node.getAttribute("pushurl")
+ if pushUrl == "":
+ pushUrl = None
+ review = node.getAttribute("review")
+ if review == "":
+ review = None
+ revision = node.getAttribute("revision")
+ if revision == "":
+ revision = None
+ manifestUrl = self.manifestProject.config.GetString("remote.origin.url")
+
+ remote = _XmlRemote(
+ name, alias, fetch, pushUrl, manifestUrl, review, revision
+ )
+
+ for n in node.childNodes:
+ if n.nodeName == "annotation":
+ self._ParseAnnotation(remote, n)
+
+ return remote
+
+ def _ParseDefault(self, node):
+ """
+ reads a <default> element from the manifest file
+ """
+ d = _Default()
+ d.remote = self._get_remote(node)
+ d.revisionExpr = node.getAttribute("revision")
+ if d.revisionExpr == "":
+ d.revisionExpr = None
+
+ d.destBranchExpr = node.getAttribute("dest-branch") or None
+ d.upstreamExpr = node.getAttribute("upstream") or None
+
+ d.sync_j = XmlInt(node, "sync-j", None)
+ if d.sync_j is not None and d.sync_j <= 0:
+ raise ManifestParseError(
+ '%s: sync-j must be greater than 0, not "%s"'
+ % (self.manifestFile, d.sync_j)
+ )
+
+ d.sync_c = XmlBool(node, "sync-c", False)
+ d.sync_s = XmlBool(node, "sync-s", False)
+ d.sync_tags = XmlBool(node, "sync-tags", True)
+ return d
+
+ def _ParseNotice(self, node):
+ """
+ reads a <notice> element from the manifest file
+
+ The <notice> element is distinct from other tags in the XML in that the
+ data is conveyed between the start and end tag (it's not an
+ empty-element tag).
+
+ The white space (carriage returns, indentation) for the notice element
+ is relevant and is parsed in a way that is based on how python
+ docstrings work. In fact, the code is remarkably similar to here:
+ http://www.python.org/dev/peps/pep-0257/
+ """
+ # Get the data out of the node...
+ notice = node.childNodes[0].data
+
+ # Figure out minimum indentation, skipping the first line (the same line
+ # as the <notice> tag)...
+ minIndent = sys.maxsize
+ lines = notice.splitlines()
+ for line in lines[1:]:
+ lstrippedLine = line.lstrip()
+ if lstrippedLine:
+ indent = len(line) - len(lstrippedLine)
+ minIndent = min(indent, minIndent)
+
+ # Strip leading / trailing blank lines and also indentation.
+ cleanLines = [lines[0].strip()]
+ for line in lines[1:]:
+ cleanLines.append(line[minIndent:].rstrip())
+
+ # Clear completely blank lines from front and back...
+ while cleanLines and not cleanLines[0]:
+ del cleanLines[0]
+ while cleanLines and not cleanLines[-1]:
+ del cleanLines[-1]
+
+ return "\n".join(cleanLines)
+
+ def _ParseSubmanifest(self, node):
+ """Reads a <submanifest> element from the manifest file."""
+ name = self._reqatt(node, "name")
+ remote = node.getAttribute("remote")
+ if remote == "":
+ remote = None
+ project = node.getAttribute("project")
+ if project == "":
+ project = None
+ revision = node.getAttribute("revision")
+ if revision == "":
+ revision = None
+ manifestName = node.getAttribute("manifest-name")
+ if manifestName == "":
+ manifestName = None
+ groups = ""
+ if node.hasAttribute("groups"):
+ groups = node.getAttribute("groups")
+ groups = self._ParseList(groups)
+ default_groups = self._ParseList(node.getAttribute("default-groups"))
+ path = node.getAttribute("path")
+ if path == "":
+ path = None
+ if revision:
+ msg = self._CheckLocalPath(revision.split("/")[-1])
+ if msg:
+ raise ManifestInvalidPathError(
+ '<submanifest> invalid "revision": %s: %s'
+ % (revision, msg)
+ )
+ else:
+ msg = self._CheckLocalPath(name)
+ if msg:
+ raise ManifestInvalidPathError(
+ '<submanifest> invalid "name": %s: %s' % (name, msg)
+ )
else:
- remote = self._get_remote(node)
- dest_branch = node.getAttribute('dest-branch')
- upstream = node.getAttribute('upstream')
+ msg = self._CheckLocalPath(path)
+ if msg:
+ raise ManifestInvalidPathError(
+ '<submanifest> invalid "path": %s: %s' % (path, msg)
+ )
- named_projects = self._projects[name]
- if dest_path and not path and len(named_projects) > 1:
- raise ManifestParseError('extend-project cannot use dest-path when '
- 'matching multiple projects: %s' % name)
- for p in self._projects[name]:
- if path and p.relpath != path:
- continue
- if groups:
- p.groups.extend(groups)
- if revision:
- p.SetRevision(revision)
+ submanifest = _XmlSubmanifest(
+ name,
+ remote,
+ project,
+ revision,
+ manifestName,
+ groups,
+ default_groups,
+ path,
+ self,
+ )
- if remote_name:
- p.remote = remote.ToRemoteSpec(name)
- if dest_branch:
- p.dest_branch = dest_branch
- if upstream:
- p.upstream = upstream
+ for n in node.childNodes:
+ if n.nodeName == "annotation":
+ self._ParseAnnotation(submanifest, n)
- if dest_path:
- del self._paths[p.relpath]
- relpath, worktree, gitdir, objdir, _ = self.GetProjectPaths(
- name, dest_path, remote.name)
- p.UpdatePaths(relpath, worktree, gitdir, objdir)
- self._paths[p.relpath] = p
+ return submanifest
- if node.nodeName == 'repo-hooks':
- # Only one project can be the hooks project
- if repo_hooks_project is not None:
- raise ManifestParseError(
- 'duplicate repo-hooks in %s' %
- (self.manifestFile))
+ def _JoinName(self, parent_name, name):
+ return os.path.join(parent_name, name)
- # Get the name of the project and the (space-separated) list of enabled.
- repo_hooks_project = self._reqatt(node, 'in-project')
- enabled_repo_hooks = self._ParseList(self._reqatt(node, 'enabled-list'))
- if node.nodeName == 'superproject':
- name = self._reqatt(node, 'name')
- # There can only be one superproject.
- if self._superproject:
- raise ManifestParseError(
- 'duplicate superproject in %s' %
- (self.manifestFile))
- remote_name = node.getAttribute('remote')
- if not remote_name:
- remote = self._default.remote
- else:
- remote = self._get_remote(node)
+ def _UnjoinName(self, parent_name, name):
+ return os.path.relpath(name, parent_name)
+
+ def _ParseProject(self, node, parent=None, **extra_proj_attrs):
+ """
+ reads a <project> element from the manifest file
+ """
+ name = self._reqatt(node, "name")
+ msg = self._CheckLocalPath(name, dir_ok=True)
+ if msg:
+ raise ManifestInvalidPathError(
+ '<project> invalid "name": %s: %s' % (name, msg)
+ )
+ if parent:
+ name = self._JoinName(parent.name, name)
+
+ remote = self._get_remote(node)
if remote is None:
- raise ManifestParseError("no remote for superproject %s within %s" %
- (name, self.manifestFile))
- revision = node.getAttribute('revision') or remote.revision
- if not revision:
- revision = self._default.revisionExpr
- if not revision:
- raise ManifestParseError('no revision for superproject %s within %s' %
- (name, self.manifestFile))
- self._superproject = Superproject(self,
- name=name,
- remote=remote.ToRemoteSpec(name),
- revision=revision)
- if node.nodeName == 'contactinfo':
- bugurl = self._reqatt(node, 'bugurl')
- # This element can be repeated, later entries will clobber earlier ones.
- self._contactinfo = ContactInfo(bugurl)
+ remote = self._default.remote
+ if remote is None:
+ raise ManifestParseError(
+ "no remote for project %s within %s" % (name, self.manifestFile)
+ )
- if node.nodeName == 'remove-project':
- name = self._reqatt(node, 'name')
+ revisionExpr = node.getAttribute("revision") or remote.revision
+ if not revisionExpr:
+ revisionExpr = self._default.revisionExpr
+ if not revisionExpr:
+ raise ManifestParseError(
+ "no revision for project %s within %s"
+ % (name, self.manifestFile)
+ )
- if name in self._projects:
- for p in self._projects[name]:
- del self._paths[p.relpath]
- del self._projects[name]
-
- # If the manifest removes the hooks project, treat it as if it deleted
- # the repo-hooks element too.
- if repo_hooks_project == name:
- repo_hooks_project = None
- elif not XmlBool(node, 'optional', False):
- raise ManifestParseError('remove-project element specifies non-existent '
- 'project: %s' % name)
-
- # Store repo hooks project information.
- if repo_hooks_project:
- # Store a reference to the Project.
- try:
- repo_hooks_projects = self._projects[repo_hooks_project]
- except KeyError:
- raise ManifestParseError(
- 'project %s not found for repo-hooks' %
- (repo_hooks_project))
-
- if len(repo_hooks_projects) != 1:
- raise ManifestParseError(
- 'internal error parsing repo-hooks in %s' %
- (self.manifestFile))
- self._repo_hooks_project = repo_hooks_projects[0]
- # Store the enabled hooks in the Project object.
- self._repo_hooks_project.enabled_repo_hooks = enabled_repo_hooks
-
- def _AddMetaProjectMirror(self, m):
- name = None
- m_url = m.GetRemote().url
- if m_url.endswith('/.git'):
- raise ManifestParseError('refusing to mirror %s' % m_url)
-
- if self._default and self._default.remote:
- url = self._default.remote.resolvedFetchUrl
- if not url.endswith('/'):
- url += '/'
- if m_url.startswith(url):
- remote = self._default.remote
- name = m_url[len(url):]
-
- if name is None:
- s = m_url.rindex('/') + 1
- manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
- remote = _XmlRemote('origin', fetch=m_url[:s], manifestUrl=manifestUrl)
- name = m_url[s:]
-
- if name.endswith('.git'):
- name = name[:-4]
-
- if name not in self._projects:
- m.PreSync()
- gitdir = os.path.join(self.topdir, '%s.git' % name)
- project = Project(manifest=self,
- name=name,
- remote=remote.ToRemoteSpec(name),
- gitdir=gitdir,
- objdir=gitdir,
- worktree=None,
- relpath=name or None,
- revisionExpr=m.revisionExpr,
- revisionId=None)
- self._projects[project.name] = [project]
- self._paths[project.relpath] = project
-
- def _ParseRemote(self, node):
- """
- reads a <remote> element from the manifest file
- """
- name = self._reqatt(node, 'name')
- alias = node.getAttribute('alias')
- if alias == '':
- alias = None
- fetch = self._reqatt(node, 'fetch')
- pushUrl = node.getAttribute('pushurl')
- if pushUrl == '':
- pushUrl = None
- review = node.getAttribute('review')
- if review == '':
- review = None
- revision = node.getAttribute('revision')
- if revision == '':
- revision = None
- manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
-
- remote = _XmlRemote(name, alias, fetch, pushUrl, manifestUrl, review, revision)
-
- for n in node.childNodes:
- if n.nodeName == 'annotation':
- self._ParseAnnotation(remote, n)
-
- return remote
-
- def _ParseDefault(self, node):
- """
- reads a <default> element from the manifest file
- """
- d = _Default()
- d.remote = self._get_remote(node)
- d.revisionExpr = node.getAttribute('revision')
- if d.revisionExpr == '':
- d.revisionExpr = None
-
- d.destBranchExpr = node.getAttribute('dest-branch') or None
- d.upstreamExpr = node.getAttribute('upstream') or None
-
- d.sync_j = XmlInt(node, 'sync-j', None)
- if d.sync_j is not None and d.sync_j <= 0:
- raise ManifestParseError('%s: sync-j must be greater than 0, not "%s"' %
- (self.manifestFile, d.sync_j))
-
- d.sync_c = XmlBool(node, 'sync-c', False)
- d.sync_s = XmlBool(node, 'sync-s', False)
- d.sync_tags = XmlBool(node, 'sync-tags', True)
- return d
-
- def _ParseNotice(self, node):
- """
- reads a <notice> element from the manifest file
-
- The <notice> element is distinct from other tags in the XML in that the
- data is conveyed between the start and end tag (it's not an empty-element
- tag).
-
- The white space (carriage returns, indentation) for the notice element is
- relevant and is parsed in a way that is based on how python docstrings work.
- In fact, the code is remarkably similar to here:
- http://www.python.org/dev/peps/pep-0257/
- """
- # Get the data out of the node...
- notice = node.childNodes[0].data
-
- # Figure out minimum indentation, skipping the first line (the same line
- # as the <notice> tag)...
- minIndent = sys.maxsize
- lines = notice.splitlines()
- for line in lines[1:]:
- lstrippedLine = line.lstrip()
- if lstrippedLine:
- indent = len(line) - len(lstrippedLine)
- minIndent = min(indent, minIndent)
-
- # Strip leading / trailing blank lines and also indentation.
- cleanLines = [lines[0].strip()]
- for line in lines[1:]:
- cleanLines.append(line[minIndent:].rstrip())
-
- # Clear completely blank lines from front and back...
- while cleanLines and not cleanLines[0]:
- del cleanLines[0]
- while cleanLines and not cleanLines[-1]:
- del cleanLines[-1]
-
- return '\n'.join(cleanLines)
-
- def _ParseSubmanifest(self, node):
- """Reads a <submanifest> element from the manifest file."""
- name = self._reqatt(node, 'name')
- remote = node.getAttribute('remote')
- if remote == '':
- remote = None
- project = node.getAttribute('project')
- if project == '':
- project = None
- revision = node.getAttribute('revision')
- if revision == '':
- revision = None
- manifestName = node.getAttribute('manifest-name')
- if manifestName == '':
- manifestName = None
- groups = ''
- if node.hasAttribute('groups'):
- groups = node.getAttribute('groups')
- groups = self._ParseList(groups)
- default_groups = self._ParseList(node.getAttribute('default-groups'))
- path = node.getAttribute('path')
- if path == '':
- path = None
- if revision:
- msg = self._CheckLocalPath(revision.split('/')[-1])
- if msg:
- raise ManifestInvalidPathError(
- '<submanifest> invalid "revision": %s: %s' % (revision, msg))
- else:
- msg = self._CheckLocalPath(name)
- if msg:
- raise ManifestInvalidPathError(
- '<submanifest> invalid "name": %s: %s' % (name, msg))
- else:
- msg = self._CheckLocalPath(path)
- if msg:
- raise ManifestInvalidPathError(
- '<submanifest> invalid "path": %s: %s' % (path, msg))
-
- submanifest = _XmlSubmanifest(name, remote, project, revision, manifestName,
- groups, default_groups, path, self)
-
- for n in node.childNodes:
- if n.nodeName == 'annotation':
- self._ParseAnnotation(submanifest, n)
-
- return submanifest
-
- def _JoinName(self, parent_name, name):
- return os.path.join(parent_name, name)
-
- def _UnjoinName(self, parent_name, name):
- return os.path.relpath(name, parent_name)
-
- def _ParseProject(self, node, parent=None, **extra_proj_attrs):
- """
- reads a <project> element from the manifest file
- """
- name = self._reqatt(node, 'name')
- msg = self._CheckLocalPath(name, dir_ok=True)
- if msg:
- raise ManifestInvalidPathError(
- '<project> invalid "name": %s: %s' % (name, msg))
- if parent:
- name = self._JoinName(parent.name, name)
-
- remote = self._get_remote(node)
- if remote is None:
- remote = self._default.remote
- if remote is None:
- raise ManifestParseError("no remote for project %s within %s" %
- (name, self.manifestFile))
-
- revisionExpr = node.getAttribute('revision') or remote.revision
- if not revisionExpr:
- revisionExpr = self._default.revisionExpr
- if not revisionExpr:
- raise ManifestParseError("no revision for project %s within %s" %
- (name, self.manifestFile))
-
- path = node.getAttribute('path')
- if not path:
- path = name
- else:
- # NB: The "." project is handled specially in Project.Sync_LocalHalf.
- msg = self._CheckLocalPath(path, dir_ok=True, cwd_dot_ok=True)
- if msg:
- raise ManifestInvalidPathError(
- '<project> invalid "path": %s: %s' % (path, msg))
-
- rebase = XmlBool(node, 'rebase', True)
- sync_c = XmlBool(node, 'sync-c', False)
- sync_s = XmlBool(node, 'sync-s', self._default.sync_s)
- sync_tags = XmlBool(node, 'sync-tags', self._default.sync_tags)
-
- clone_depth = XmlInt(node, 'clone-depth')
- if clone_depth is not None and clone_depth <= 0:
- raise ManifestParseError('%s: clone-depth must be greater than 0, not "%s"' %
- (self.manifestFile, clone_depth))
-
- dest_branch = node.getAttribute('dest-branch') or self._default.destBranchExpr
-
- upstream = node.getAttribute('upstream') or self._default.upstreamExpr
-
- groups = ''
- if node.hasAttribute('groups'):
- groups = node.getAttribute('groups')
- groups = self._ParseList(groups)
-
- if parent is None:
- relpath, worktree, gitdir, objdir, use_git_worktrees = \
- self.GetProjectPaths(name, path, remote.name)
- else:
- use_git_worktrees = False
- relpath, worktree, gitdir, objdir = \
- self.GetSubprojectPaths(parent, name, path)
-
- default_groups = ['all', 'name:%s' % name, 'path:%s' % relpath]
- groups.extend(set(default_groups).difference(groups))
-
- if self.IsMirror and node.hasAttribute('force-path'):
- if XmlBool(node, 'force-path', False):
- gitdir = os.path.join(self.topdir, '%s.git' % path)
-
- project = Project(manifest=self,
- name=name,
- remote=remote.ToRemoteSpec(name),
- gitdir=gitdir,
- objdir=objdir,
- worktree=worktree,
- relpath=relpath,
- revisionExpr=revisionExpr,
- revisionId=None,
- rebase=rebase,
- groups=groups,
- sync_c=sync_c,
- sync_s=sync_s,
- sync_tags=sync_tags,
- clone_depth=clone_depth,
- upstream=upstream,
- parent=parent,
- dest_branch=dest_branch,
- use_git_worktrees=use_git_worktrees,
- **extra_proj_attrs)
-
- for n in node.childNodes:
- if n.nodeName == 'copyfile':
- self._ParseCopyFile(project, n)
- if n.nodeName == 'linkfile':
- self._ParseLinkFile(project, n)
- if n.nodeName == 'annotation':
- self._ParseAnnotation(project, n)
- if n.nodeName == 'project':
- project.subprojects.append(self._ParseProject(n, parent=project))
-
- return project
-
- def GetProjectPaths(self, name, path, remote):
- """Return the paths for a project.
-
- Args:
- name: a string, the name of the project.
- path: a string, the path of the project.
- remote: a string, the remote.name of the project.
-
- Returns:
- A tuple of (relpath, worktree, gitdir, objdir, use_git_worktrees) for the
- project with |name| and |path|.
- """
- # The manifest entries might have trailing slashes. Normalize them to avoid
- # unexpected filesystem behavior since we do string concatenation below.
- path = path.rstrip('/')
- name = name.rstrip('/')
- remote = remote.rstrip('/')
- use_git_worktrees = False
- use_remote_name = self.is_multimanifest
- relpath = path
- if self.IsMirror:
- worktree = None
- gitdir = os.path.join(self.topdir, '%s.git' % name)
- objdir = gitdir
- else:
- if use_remote_name:
- namepath = os.path.join(remote, f'{name}.git')
- else:
- namepath = f'{name}.git'
- worktree = os.path.join(self.topdir, path).replace('\\', '/')
- gitdir = os.path.join(self.subdir, 'projects', '%s.git' % path)
- # We allow people to mix git worktrees & non-git worktrees for now.
- # This allows for in situ migration of repo clients.
- if os.path.exists(gitdir) or not self.UseGitWorktrees:
- objdir = os.path.join(self.repodir, 'project-objects', namepath)
- else:
- use_git_worktrees = True
- gitdir = os.path.join(self.repodir, 'worktrees', namepath)
- objdir = gitdir
- return relpath, worktree, gitdir, objdir, use_git_worktrees
-
- def GetProjectsWithName(self, name, all_manifests=False):
- """All projects with |name|.
-
- Args:
- name: a string, the name of the project.
- all_manifests: a boolean, if True, then all manifests are searched. If
- False, then only this manifest is searched.
-
- Returns:
- A list of Project instances with name |name|.
- """
- if all_manifests:
- return list(itertools.chain.from_iterable(
- x._projects.get(name, []) for x in self.all_manifests))
- return self._projects.get(name, [])
-
- def GetSubprojectName(self, parent, submodule_path):
- return os.path.join(parent.name, submodule_path)
-
- def _JoinRelpath(self, parent_relpath, relpath):
- return os.path.join(parent_relpath, relpath)
-
- def _UnjoinRelpath(self, parent_relpath, relpath):
- return os.path.relpath(relpath, parent_relpath)
-
- def GetSubprojectPaths(self, parent, name, path):
- # The manifest entries might have trailing slashes. Normalize them to avoid
- # unexpected filesystem behavior since we do string concatenation below.
- path = path.rstrip('/')
- name = name.rstrip('/')
- relpath = self._JoinRelpath(parent.relpath, path)
- gitdir = os.path.join(parent.gitdir, 'subprojects', '%s.git' % path)
- objdir = os.path.join(parent.gitdir, 'subproject-objects', '%s.git' % name)
- if self.IsMirror:
- worktree = None
- else:
- worktree = os.path.join(parent.worktree, path).replace('\\', '/')
- return relpath, worktree, gitdir, objdir
-
- @staticmethod
- def _CheckLocalPath(path, dir_ok=False, cwd_dot_ok=False):
- """Verify |path| is reasonable for use in filesystem paths.
-
- Used with <copyfile> & <linkfile> & <project> elements.
-
- This only validates the |path| in isolation: it does not check against the
- current filesystem state. Thus it is suitable as a first-past in a parser.
-
- It enforces a number of constraints:
- * No empty paths.
- * No "~" in paths.
- * No Unicode codepoints that filesystems might elide when normalizing.
- * No relative path components like "." or "..".
- * No absolute paths.
- * No ".git" or ".repo*" path components.
-
- Args:
- path: The path name to validate.
- dir_ok: Whether |path| may force a directory (e.g. end in a /).
- cwd_dot_ok: Whether |path| may be just ".".
-
- Returns:
- None if |path| is OK, a failure message otherwise.
- """
- if not path:
- return 'empty paths not allowed'
-
- if '~' in path:
- return '~ not allowed (due to 8.3 filenames on Windows filesystems)'
-
- path_codepoints = set(path)
-
- # Some filesystems (like Apple's HFS+) try to normalize Unicode codepoints
- # which means there are alternative names for ".git". Reject paths with
- # these in it as there shouldn't be any reasonable need for them here.
- # The set of codepoints here was cribbed from jgit's implementation:
- # https://eclipse.googlesource.com/jgit/jgit/+/9110037e3e9461ff4dac22fee84ef3694ed57648/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java#884
- BAD_CODEPOINTS = {
- u'\u200C', # ZERO WIDTH NON-JOINER
- u'\u200D', # ZERO WIDTH JOINER
- u'\u200E', # LEFT-TO-RIGHT MARK
- u'\u200F', # RIGHT-TO-LEFT MARK
- u'\u202A', # LEFT-TO-RIGHT EMBEDDING
- u'\u202B', # RIGHT-TO-LEFT EMBEDDING
- u'\u202C', # POP DIRECTIONAL FORMATTING
- u'\u202D', # LEFT-TO-RIGHT OVERRIDE
- u'\u202E', # RIGHT-TO-LEFT OVERRIDE
- u'\u206A', # INHIBIT SYMMETRIC SWAPPING
- u'\u206B', # ACTIVATE SYMMETRIC SWAPPING
- u'\u206C', # INHIBIT ARABIC FORM SHAPING
- u'\u206D', # ACTIVATE ARABIC FORM SHAPING
- u'\u206E', # NATIONAL DIGIT SHAPES
- u'\u206F', # NOMINAL DIGIT SHAPES
- u'\uFEFF', # ZERO WIDTH NO-BREAK SPACE
- }
- if BAD_CODEPOINTS & path_codepoints:
- # This message is more expansive than reality, but should be fine.
- return 'Unicode combining characters not allowed'
-
- # Reject newlines as there shouldn't be any legitmate use for them, they'll
- # be confusing to users, and they can easily break tools that expect to be
- # able to iterate over newline delimited lists. This even applies to our
- # own code like .repo/project.list.
- if {'\r', '\n'} & path_codepoints:
- return 'Newlines not allowed'
-
- # Assume paths might be used on case-insensitive filesystems.
- path = path.lower()
-
- # Split up the path by its components. We can't use os.path.sep exclusively
- # as some platforms (like Windows) will convert / to \ and that bypasses all
- # our constructed logic here. Especially since manifest authors only use
- # / in their paths.
- resep = re.compile(r'[/%s]' % re.escape(os.path.sep))
- # Strip off trailing slashes as those only produce '' elements, and we use
- # parts to look for individual bad components.
- parts = resep.split(path.rstrip('/'))
-
- # Some people use src="." to create stable links to projects. Lets allow
- # that but reject all other uses of "." to keep things simple.
- if not cwd_dot_ok or parts != ['.']:
- for part in set(parts):
- if part in {'.', '..', '.git'} or part.startswith('.repo'):
- return 'bad component: %s' % (part,)
-
- if not dir_ok and resep.match(path[-1]):
- return 'dirs not allowed'
-
- # NB: The two abspath checks here are to handle platforms with multiple
- # filesystem path styles (e.g. Windows).
- norm = os.path.normpath(path)
- if (norm == '..' or
- (len(norm) >= 3 and norm.startswith('..') and resep.match(norm[0])) or
- os.path.isabs(norm) or
- norm.startswith('/')):
- return 'path cannot be outside'
-
- @classmethod
- def _ValidateFilePaths(cls, element, src, dest):
- """Verify |src| & |dest| are reasonable for <copyfile> & <linkfile>.
-
- We verify the path independent of any filesystem state as we won't have a
- checkout available to compare to. i.e. This is for parsing validation
- purposes only.
-
- We'll do full/live sanity checking before we do the actual filesystem
- modifications in _CopyFile/_LinkFile/etc...
- """
- # |dest| is the file we write to or symlink we create.
- # It is relative to the top of the repo client checkout.
- msg = cls._CheckLocalPath(dest)
- if msg:
- raise ManifestInvalidPathError(
- '<%s> invalid "dest": %s: %s' % (element, dest, msg))
-
- # |src| is the file we read from or path we point to for symlinks.
- # It is relative to the top of the git project checkout.
- is_linkfile = element == 'linkfile'
- msg = cls._CheckLocalPath(src, dir_ok=is_linkfile, cwd_dot_ok=is_linkfile)
- if msg:
- raise ManifestInvalidPathError(
- '<%s> invalid "src": %s: %s' % (element, src, msg))
-
- def _ParseCopyFile(self, project, node):
- src = self._reqatt(node, 'src')
- dest = self._reqatt(node, 'dest')
- if not self.IsMirror:
- # src is project relative;
- # dest is relative to the top of the tree.
- # We only validate paths if we actually plan to process them.
- self._ValidateFilePaths('copyfile', src, dest)
- project.AddCopyFile(src, dest, self.topdir)
-
- def _ParseLinkFile(self, project, node):
- src = self._reqatt(node, 'src')
- dest = self._reqatt(node, 'dest')
- if not self.IsMirror:
- # src is project relative;
- # dest is relative to the top of the tree.
- # We only validate paths if we actually plan to process them.
- self._ValidateFilePaths('linkfile', src, dest)
- project.AddLinkFile(src, dest, self.topdir)
-
- def _ParseAnnotation(self, element, node):
- name = self._reqatt(node, 'name')
- value = self._reqatt(node, 'value')
- try:
- keep = self._reqatt(node, 'keep').lower()
- except ManifestParseError:
- keep = "true"
- if keep != "true" and keep != "false":
- raise ManifestParseError('optional "keep" attribute must be '
- '"true" or "false"')
- element.AddAnnotation(name, value, keep)
-
- def _get_remote(self, node):
- name = node.getAttribute('remote')
- if not name:
- return None
-
- v = self._remotes.get(name)
- if not v:
- raise ManifestParseError("remote %s not defined in %s" %
- (name, self.manifestFile))
- return v
-
- def _reqatt(self, node, attname):
- """
- reads a required attribute from the node.
- """
- v = node.getAttribute(attname)
- if not v:
- raise ManifestParseError("no %s in <%s> within %s" %
- (attname, node.nodeName, self.manifestFile))
- return v
-
- def projectsDiff(self, manifest):
- """return the projects differences between two manifests.
-
- The diff will be from self to given manifest.
-
- """
- fromProjects = self.paths
- toProjects = manifest.paths
-
- fromKeys = sorted(fromProjects.keys())
- toKeys = sorted(toProjects.keys())
-
- diff = {'added': [], 'removed': [], 'missing': [], 'changed': [], 'unreachable': []}
-
- for proj in fromKeys:
- if proj not in toKeys:
- diff['removed'].append(fromProjects[proj])
- elif not fromProjects[proj].Exists:
- diff['missing'].append(toProjects[proj])
- toKeys.remove(proj)
- else:
- fromProj = fromProjects[proj]
- toProj = toProjects[proj]
- try:
- fromRevId = fromProj.GetCommitRevisionId()
- toRevId = toProj.GetCommitRevisionId()
- except ManifestInvalidRevisionError:
- diff['unreachable'].append((fromProj, toProj))
+ path = node.getAttribute("path")
+ if not path:
+ path = name
else:
- if fromRevId != toRevId:
- diff['changed'].append((fromProj, toProj))
- toKeys.remove(proj)
+ # NB: The "." project is handled specially in
+ # Project.Sync_LocalHalf.
+ msg = self._CheckLocalPath(path, dir_ok=True, cwd_dot_ok=True)
+ if msg:
+ raise ManifestInvalidPathError(
+ '<project> invalid "path": %s: %s' % (path, msg)
+ )
- for proj in toKeys:
- diff['added'].append(toProjects[proj])
+ rebase = XmlBool(node, "rebase", True)
+ sync_c = XmlBool(node, "sync-c", False)
+ sync_s = XmlBool(node, "sync-s", self._default.sync_s)
+ sync_tags = XmlBool(node, "sync-tags", self._default.sync_tags)
- return diff
+ clone_depth = XmlInt(node, "clone-depth")
+ if clone_depth is not None and clone_depth <= 0:
+ raise ManifestParseError(
+ '%s: clone-depth must be greater than 0, not "%s"'
+ % (self.manifestFile, clone_depth)
+ )
+
+ dest_branch = (
+ node.getAttribute("dest-branch") or self._default.destBranchExpr
+ )
+
+ upstream = node.getAttribute("upstream") or self._default.upstreamExpr
+
+ groups = ""
+ if node.hasAttribute("groups"):
+ groups = node.getAttribute("groups")
+ groups = self._ParseList(groups)
+
+ if parent is None:
+ (
+ relpath,
+ worktree,
+ gitdir,
+ objdir,
+ use_git_worktrees,
+ ) = self.GetProjectPaths(name, path, remote.name)
+ else:
+ use_git_worktrees = False
+ relpath, worktree, gitdir, objdir = self.GetSubprojectPaths(
+ parent, name, path
+ )
+
+ default_groups = ["all", "name:%s" % name, "path:%s" % relpath]
+ groups.extend(set(default_groups).difference(groups))
+
+ if self.IsMirror and node.hasAttribute("force-path"):
+ if XmlBool(node, "force-path", False):
+ gitdir = os.path.join(self.topdir, "%s.git" % path)
+
+ project = Project(
+ manifest=self,
+ name=name,
+ remote=remote.ToRemoteSpec(name),
+ gitdir=gitdir,
+ objdir=objdir,
+ worktree=worktree,
+ relpath=relpath,
+ revisionExpr=revisionExpr,
+ revisionId=None,
+ rebase=rebase,
+ groups=groups,
+ sync_c=sync_c,
+ sync_s=sync_s,
+ sync_tags=sync_tags,
+ clone_depth=clone_depth,
+ upstream=upstream,
+ parent=parent,
+ dest_branch=dest_branch,
+ use_git_worktrees=use_git_worktrees,
+ **extra_proj_attrs,
+ )
+
+ for n in node.childNodes:
+ if n.nodeName == "copyfile":
+ self._ParseCopyFile(project, n)
+ if n.nodeName == "linkfile":
+ self._ParseLinkFile(project, n)
+ if n.nodeName == "annotation":
+ self._ParseAnnotation(project, n)
+ if n.nodeName == "project":
+ project.subprojects.append(
+ self._ParseProject(n, parent=project)
+ )
+
+ return project
+
+ def GetProjectPaths(self, name, path, remote):
+ """Return the paths for a project.
+
+ Args:
+ name: a string, the name of the project.
+ path: a string, the path of the project.
+ remote: a string, the remote.name of the project.
+
+ Returns:
+ A tuple of (relpath, worktree, gitdir, objdir, use_git_worktrees)
+ for the project with |name| and |path|.
+ """
+ # The manifest entries might have trailing slashes. Normalize them to
+ # avoid unexpected filesystem behavior since we do string concatenation
+ # below.
+ path = path.rstrip("/")
+ name = name.rstrip("/")
+ remote = remote.rstrip("/")
+ use_git_worktrees = False
+ use_remote_name = self.is_multimanifest
+ relpath = path
+ if self.IsMirror:
+ worktree = None
+ gitdir = os.path.join(self.topdir, "%s.git" % name)
+ objdir = gitdir
+ else:
+ if use_remote_name:
+ namepath = os.path.join(remote, f"{name}.git")
+ else:
+ namepath = f"{name}.git"
+ worktree = os.path.join(self.topdir, path).replace("\\", "/")
+ gitdir = os.path.join(self.subdir, "projects", "%s.git" % path)
+ # We allow people to mix git worktrees & non-git worktrees for now.
+ # This allows for in situ migration of repo clients.
+ if os.path.exists(gitdir) or not self.UseGitWorktrees:
+ objdir = os.path.join(self.repodir, "project-objects", namepath)
+ else:
+ use_git_worktrees = True
+ gitdir = os.path.join(self.repodir, "worktrees", namepath)
+ objdir = gitdir
+ return relpath, worktree, gitdir, objdir, use_git_worktrees
+
+ def GetProjectsWithName(self, name, all_manifests=False):
+ """All projects with |name|.
+
+ Args:
+ name: a string, the name of the project.
+ all_manifests: a boolean, if True, then all manifests are searched.
+ If False, then only this manifest is searched.
+
+ Returns:
+ A list of Project instances with name |name|.
+ """
+ if all_manifests:
+ return list(
+ itertools.chain.from_iterable(
+ x._projects.get(name, []) for x in self.all_manifests
+ )
+ )
+ return self._projects.get(name, [])
+
+ def GetSubprojectName(self, parent, submodule_path):
+ return os.path.join(parent.name, submodule_path)
+
+ def _JoinRelpath(self, parent_relpath, relpath):
+ return os.path.join(parent_relpath, relpath)
+
+ def _UnjoinRelpath(self, parent_relpath, relpath):
+ return os.path.relpath(relpath, parent_relpath)
+
+ def GetSubprojectPaths(self, parent, name, path):
+ # The manifest entries might have trailing slashes. Normalize them to
+ # avoid unexpected filesystem behavior since we do string concatenation
+ # below.
+ path = path.rstrip("/")
+ name = name.rstrip("/")
+ relpath = self._JoinRelpath(parent.relpath, path)
+ gitdir = os.path.join(parent.gitdir, "subprojects", "%s.git" % path)
+ objdir = os.path.join(
+ parent.gitdir, "subproject-objects", "%s.git" % name
+ )
+ if self.IsMirror:
+ worktree = None
+ else:
+ worktree = os.path.join(parent.worktree, path).replace("\\", "/")
+ return relpath, worktree, gitdir, objdir
+
+ @staticmethod
+ def _CheckLocalPath(path, dir_ok=False, cwd_dot_ok=False):
+ """Verify |path| is reasonable for use in filesystem paths.
+
+ Used with <copyfile> & <linkfile> & <project> elements.
+
+ This only validates the |path| in isolation: it does not check against
+ the current filesystem state. Thus it is suitable as a first-past in a
+ parser.
+
+ It enforces a number of constraints:
+ * No empty paths.
+ * No "~" in paths.
+ * No Unicode codepoints that filesystems might elide when normalizing.
+ * No relative path components like "." or "..".
+ * No absolute paths.
+ * No ".git" or ".repo*" path components.
+
+ Args:
+ path: The path name to validate.
+ dir_ok: Whether |path| may force a directory (e.g. end in a /).
+ cwd_dot_ok: Whether |path| may be just ".".
+
+ Returns:
+ None if |path| is OK, a failure message otherwise.
+ """
+ if not path:
+ return "empty paths not allowed"
+
+ if "~" in path:
+ return "~ not allowed (due to 8.3 filenames on Windows filesystems)"
+
+ path_codepoints = set(path)
+
+ # Some filesystems (like Apple's HFS+) try to normalize Unicode
+ # codepoints which means there are alternative names for ".git". Reject
+ # paths with these in it as there shouldn't be any reasonable need for
+ # them here. The set of codepoints here was cribbed from jgit's
+ # implementation:
+ # https://eclipse.googlesource.com/jgit/jgit/+/9110037e3e9461ff4dac22fee84ef3694ed57648/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java#884
+ BAD_CODEPOINTS = {
+ "\u200C", # ZERO WIDTH NON-JOINER
+ "\u200D", # ZERO WIDTH JOINER
+ "\u200E", # LEFT-TO-RIGHT MARK
+ "\u200F", # RIGHT-TO-LEFT MARK
+ "\u202A", # LEFT-TO-RIGHT EMBEDDING
+ "\u202B", # RIGHT-TO-LEFT EMBEDDING
+ "\u202C", # POP DIRECTIONAL FORMATTING
+ "\u202D", # LEFT-TO-RIGHT OVERRIDE
+ "\u202E", # RIGHT-TO-LEFT OVERRIDE
+ "\u206A", # INHIBIT SYMMETRIC SWAPPING
+ "\u206B", # ACTIVATE SYMMETRIC SWAPPING
+ "\u206C", # INHIBIT ARABIC FORM SHAPING
+ "\u206D", # ACTIVATE ARABIC FORM SHAPING
+ "\u206E", # NATIONAL DIGIT SHAPES
+ "\u206F", # NOMINAL DIGIT SHAPES
+ "\uFEFF", # ZERO WIDTH NO-BREAK SPACE
+ }
+ if BAD_CODEPOINTS & path_codepoints:
+ # This message is more expansive than reality, but should be fine.
+ return "Unicode combining characters not allowed"
+
+ # Reject newlines as there shouldn't be any legitmate use for them,
+ # they'll be confusing to users, and they can easily break tools that
+ # expect to be able to iterate over newline delimited lists. This even
+ # applies to our own code like .repo/project.list.
+ if {"\r", "\n"} & path_codepoints:
+ return "Newlines not allowed"
+
+ # Assume paths might be used on case-insensitive filesystems.
+ path = path.lower()
+
+ # Split up the path by its components. We can't use os.path.sep
+ # exclusively as some platforms (like Windows) will convert / to \ and
+ # that bypasses all our constructed logic here. Especially since
+ # manifest authors only use / in their paths.
+ resep = re.compile(r"[/%s]" % re.escape(os.path.sep))
+ # Strip off trailing slashes as those only produce '' elements, and we
+ # use parts to look for individual bad components.
+ parts = resep.split(path.rstrip("/"))
+
+ # Some people use src="." to create stable links to projects. Lets
+ # allow that but reject all other uses of "." to keep things simple.
+ if not cwd_dot_ok or parts != ["."]:
+ for part in set(parts):
+ if part in {".", "..", ".git"} or part.startswith(".repo"):
+ return "bad component: %s" % (part,)
+
+ if not dir_ok and resep.match(path[-1]):
+ return "dirs not allowed"
+
+ # NB: The two abspath checks here are to handle platforms with multiple
+ # filesystem path styles (e.g. Windows).
+ norm = os.path.normpath(path)
+ if (
+ norm == ".."
+ or (
+ len(norm) >= 3
+ and norm.startswith("..")
+ and resep.match(norm[0])
+ )
+ or os.path.isabs(norm)
+ or norm.startswith("/")
+ ):
+ return "path cannot be outside"
+
+ @classmethod
+ def _ValidateFilePaths(cls, element, src, dest):
+ """Verify |src| & |dest| are reasonable for <copyfile> & <linkfile>.
+
+ We verify the path independent of any filesystem state as we won't have
+ a checkout available to compare to. i.e. This is for parsing validation
+ purposes only.
+
+ We'll do full/live sanity checking before we do the actual filesystem
+ modifications in _CopyFile/_LinkFile/etc...
+ """
+ # |dest| is the file we write to or symlink we create.
+ # It is relative to the top of the repo client checkout.
+ msg = cls._CheckLocalPath(dest)
+ if msg:
+ raise ManifestInvalidPathError(
+ '<%s> invalid "dest": %s: %s' % (element, dest, msg)
+ )
+
+ # |src| is the file we read from or path we point to for symlinks.
+ # It is relative to the top of the git project checkout.
+ is_linkfile = element == "linkfile"
+ msg = cls._CheckLocalPath(
+ src, dir_ok=is_linkfile, cwd_dot_ok=is_linkfile
+ )
+ if msg:
+ raise ManifestInvalidPathError(
+ '<%s> invalid "src": %s: %s' % (element, src, msg)
+ )
+
+ def _ParseCopyFile(self, project, node):
+ src = self._reqatt(node, "src")
+ dest = self._reqatt(node, "dest")
+ if not self.IsMirror:
+ # src is project relative;
+ # dest is relative to the top of the tree.
+ # We only validate paths if we actually plan to process them.
+ self._ValidateFilePaths("copyfile", src, dest)
+ project.AddCopyFile(src, dest, self.topdir)
+
+ def _ParseLinkFile(self, project, node):
+ src = self._reqatt(node, "src")
+ dest = self._reqatt(node, "dest")
+ if not self.IsMirror:
+ # src is project relative;
+ # dest is relative to the top of the tree.
+ # We only validate paths if we actually plan to process them.
+ self._ValidateFilePaths("linkfile", src, dest)
+ project.AddLinkFile(src, dest, self.topdir)
+
+ def _ParseAnnotation(self, element, node):
+ name = self._reqatt(node, "name")
+ value = self._reqatt(node, "value")
+ try:
+ keep = self._reqatt(node, "keep").lower()
+ except ManifestParseError:
+ keep = "true"
+ if keep != "true" and keep != "false":
+ raise ManifestParseError(
+ 'optional "keep" attribute must be ' '"true" or "false"'
+ )
+ element.AddAnnotation(name, value, keep)
+
+ def _get_remote(self, node):
+ name = node.getAttribute("remote")
+ if not name:
+ return None
+
+ v = self._remotes.get(name)
+ if not v:
+ raise ManifestParseError(
+ "remote %s not defined in %s" % (name, self.manifestFile)
+ )
+ return v
+
+ def _reqatt(self, node, attname):
+ """
+ reads a required attribute from the node.
+ """
+ v = node.getAttribute(attname)
+ if not v:
+ raise ManifestParseError(
+ "no %s in <%s> within %s"
+ % (attname, node.nodeName, self.manifestFile)
+ )
+ return v
+
+ def projectsDiff(self, manifest):
+ """return the projects differences between two manifests.
+
+ The diff will be from self to given manifest.
+
+ """
+ fromProjects = self.paths
+ toProjects = manifest.paths
+
+ fromKeys = sorted(fromProjects.keys())
+ toKeys = sorted(toProjects.keys())
+
+ diff = {
+ "added": [],
+ "removed": [],
+ "missing": [],
+ "changed": [],
+ "unreachable": [],
+ }
+
+ for proj in fromKeys:
+ if proj not in toKeys:
+ diff["removed"].append(fromProjects[proj])
+ elif not fromProjects[proj].Exists:
+ diff["missing"].append(toProjects[proj])
+ toKeys.remove(proj)
+ else:
+ fromProj = fromProjects[proj]
+ toProj = toProjects[proj]
+ try:
+ fromRevId = fromProj.GetCommitRevisionId()
+ toRevId = toProj.GetCommitRevisionId()
+ except ManifestInvalidRevisionError:
+ diff["unreachable"].append((fromProj, toProj))
+ else:
+ if fromRevId != toRevId:
+ diff["changed"].append((fromProj, toProj))
+ toKeys.remove(proj)
+
+ for proj in toKeys:
+ diff["added"].append(toProjects[proj])
+
+ return diff
class GitcManifest(XmlManifest):
- """Parser for GitC (git-in-the-cloud) manifests."""
+ """Parser for GitC (git-in-the-cloud) manifests."""
- def _ParseProject(self, node, parent=None):
- """Override _ParseProject and add support for GITC specific attributes."""
- return super()._ParseProject(
- node, parent=parent, old_revision=node.getAttribute('old-revision'))
+ def _ParseProject(self, node, parent=None):
+ """Override _ParseProject and add support for GITC specific attributes.""" # noqa: E501
+ return super()._ParseProject(
+ node, parent=parent, old_revision=node.getAttribute("old-revision")
+ )
- def _output_manifest_project_extras(self, p, e):
- """Output GITC Specific Project attributes"""
- if p.old_revision:
- e.setAttribute('old-revision', str(p.old_revision))
+ def _output_manifest_project_extras(self, p, e):
+ """Output GITC Specific Project attributes"""
+ if p.old_revision:
+ e.setAttribute("old-revision", str(p.old_revision))
class RepoClient(XmlManifest):
- """Manages a repo client checkout."""
+ """Manages a repo client checkout."""
- def __init__(self, repodir, manifest_file=None, submanifest_path='', **kwargs):
- """Initialize.
+ def __init__(
+ self, repodir, manifest_file=None, submanifest_path="", **kwargs
+ ):
+ """Initialize.
- Args:
- repodir: Path to the .repo/ dir for holding all internal checkout state.
- It must be in the top directory of the repo client checkout.
- manifest_file: Full path to the manifest file to parse. This will usually
- be |repodir|/|MANIFEST_FILE_NAME|.
- submanifest_path: The submanifest root relative to the repo root.
- **kwargs: Additional keyword arguments, passed to XmlManifest.
- """
- self.isGitcClient = False
- submanifest_path = submanifest_path or ''
- if submanifest_path:
- self._CheckLocalPath(submanifest_path)
- prefix = os.path.join(repodir, SUBMANIFEST_DIR, submanifest_path)
- else:
- prefix = repodir
+ Args:
+ repodir: Path to the .repo/ dir for holding all internal checkout
+ state. It must be in the top directory of the repo client
+ checkout.
+ manifest_file: Full path to the manifest file to parse. This will
+ usually be |repodir|/|MANIFEST_FILE_NAME|.
+ submanifest_path: The submanifest root relative to the repo root.
+ **kwargs: Additional keyword arguments, passed to XmlManifest.
+ """
+ self.isGitcClient = False
+ submanifest_path = submanifest_path or ""
+ if submanifest_path:
+ self._CheckLocalPath(submanifest_path)
+ prefix = os.path.join(repodir, SUBMANIFEST_DIR, submanifest_path)
+ else:
+ prefix = repodir
- if os.path.exists(os.path.join(prefix, LOCAL_MANIFEST_NAME)):
- print('error: %s is not supported; put local manifests in `%s` instead' %
- (LOCAL_MANIFEST_NAME, os.path.join(prefix, LOCAL_MANIFESTS_DIR_NAME)),
- file=sys.stderr)
- sys.exit(1)
+ if os.path.exists(os.path.join(prefix, LOCAL_MANIFEST_NAME)):
+ print(
+ "error: %s is not supported; put local manifests in `%s` "
+ "instead"
+ % (
+ LOCAL_MANIFEST_NAME,
+ os.path.join(prefix, LOCAL_MANIFESTS_DIR_NAME),
+ ),
+ file=sys.stderr,
+ )
+ sys.exit(1)
- if manifest_file is None:
- manifest_file = os.path.join(prefix, MANIFEST_FILE_NAME)
- local_manifests = os.path.abspath(os.path.join(prefix, LOCAL_MANIFESTS_DIR_NAME))
- super().__init__(repodir, manifest_file, local_manifests,
- submanifest_path=submanifest_path, **kwargs)
+ if manifest_file is None:
+ manifest_file = os.path.join(prefix, MANIFEST_FILE_NAME)
+ local_manifests = os.path.abspath(
+ os.path.join(prefix, LOCAL_MANIFESTS_DIR_NAME)
+ )
+ super().__init__(
+ repodir,
+ manifest_file,
+ local_manifests,
+ submanifest_path=submanifest_path,
+ **kwargs,
+ )
- # TODO: Completely separate manifest logic out of the client.
- self.manifest = self
+ # TODO: Completely separate manifest logic out of the client.
+ self.manifest = self
class GitcClient(RepoClient, GitcManifest):
- """Manages a GitC client checkout."""
+ """Manages a GitC client checkout."""
- def __init__(self, repodir, gitc_client_name):
- """Initialize the GitcManifest object."""
- self.gitc_client_name = gitc_client_name
- self.gitc_client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(),
- gitc_client_name)
+ def __init__(self, repodir, gitc_client_name):
+ """Initialize the GitcManifest object."""
+ self.gitc_client_name = gitc_client_name
+ self.gitc_client_dir = os.path.join(
+ gitc_utils.get_gitc_manifest_dir(), gitc_client_name
+ )
- super().__init__(repodir, os.path.join(self.gitc_client_dir, '.manifest'))
- self.isGitcClient = True
+ super().__init__(
+ repodir, os.path.join(self.gitc_client_dir, ".manifest")
+ )
+ self.isGitcClient = True
diff --git a/pager.py b/pager.py
index 438597e..dbd5ae8 100644
--- a/pager.py
+++ b/pager.py
@@ -26,102 +26,101 @@
def RunPager(globalConfig):
- if not os.isatty(0) or not os.isatty(1):
- return
- pager = _SelectPager(globalConfig)
- if pager == '' or pager == 'cat':
- return
+ if not os.isatty(0) or not os.isatty(1):
+ return
+ pager = _SelectPager(globalConfig)
+ if pager == "" or pager == "cat":
+ return
- if platform_utils.isWindows():
- _PipePager(pager)
- else:
- _ForkPager(pager)
+ if platform_utils.isWindows():
+ _PipePager(pager)
+ else:
+ _ForkPager(pager)
def TerminatePager():
- global pager_process, old_stdout, old_stderr
- if pager_process:
- sys.stdout.flush()
- sys.stderr.flush()
- pager_process.stdin.close()
- pager_process.wait()
- pager_process = None
- # Restore initial stdout/err in case there is more output in this process
- # after shutting down the pager process
- sys.stdout = old_stdout
- sys.stderr = old_stderr
+ global pager_process, old_stdout, old_stderr
+ if pager_process:
+ sys.stdout.flush()
+ sys.stderr.flush()
+ pager_process.stdin.close()
+ pager_process.wait()
+ pager_process = None
+ # Restore initial stdout/err in case there is more output in this
+ # process after shutting down the pager process.
+ sys.stdout = old_stdout
+ sys.stderr = old_stderr
def _PipePager(pager):
- global pager_process, old_stdout, old_stderr
- assert pager_process is None, "Only one active pager process at a time"
- # Create pager process, piping stdout/err into its stdin
- try:
- pager_process = subprocess.Popen([pager], stdin=subprocess.PIPE, stdout=sys.stdout,
- stderr=sys.stderr)
- except FileNotFoundError:
- sys.exit(f'fatal: cannot start pager "{pager}"')
- old_stdout = sys.stdout
- old_stderr = sys.stderr
- sys.stdout = pager_process.stdin
- sys.stderr = pager_process.stdin
+ global pager_process, old_stdout, old_stderr
+ assert pager_process is None, "Only one active pager process at a time"
+ # Create pager process, piping stdout/err into its stdin.
+ try:
+ pager_process = subprocess.Popen(
+ [pager], stdin=subprocess.PIPE, stdout=sys.stdout, stderr=sys.stderr
+ )
+ except FileNotFoundError:
+ sys.exit(f'fatal: cannot start pager "{pager}"')
+ old_stdout = sys.stdout
+ old_stderr = sys.stderr
+ sys.stdout = pager_process.stdin
+ sys.stderr = pager_process.stdin
def _ForkPager(pager):
- global active
- # This process turns into the pager; a child it forks will
- # do the real processing and output back to the pager. This
- # is necessary to keep the pager in control of the tty.
- #
- try:
- r, w = os.pipe()
- pid = os.fork()
- if not pid:
- os.dup2(w, 1)
- os.dup2(w, 2)
- os.close(r)
- os.close(w)
- active = True
- return
+ global active
+ # This process turns into the pager; a child it forks will
+ # do the real processing and output back to the pager. This
+ # is necessary to keep the pager in control of the tty.
+ try:
+ r, w = os.pipe()
+ pid = os.fork()
+ if not pid:
+ os.dup2(w, 1)
+ os.dup2(w, 2)
+ os.close(r)
+ os.close(w)
+ active = True
+ return
- os.dup2(r, 0)
- os.close(r)
- os.close(w)
+ os.dup2(r, 0)
+ os.close(r)
+ os.close(w)
- _BecomePager(pager)
- except Exception:
- print("fatal: cannot start pager '%s'" % pager, file=sys.stderr)
- sys.exit(255)
+ _BecomePager(pager)
+ except Exception:
+ print("fatal: cannot start pager '%s'" % pager, file=sys.stderr)
+ sys.exit(255)
def _SelectPager(globalConfig):
- try:
- return os.environ['GIT_PAGER']
- except KeyError:
- pass
+ try:
+ return os.environ["GIT_PAGER"]
+ except KeyError:
+ pass
- pager = globalConfig.GetString('core.pager')
- if pager:
- return pager
+ pager = globalConfig.GetString("core.pager")
+ if pager:
+ return pager
- try:
- return os.environ['PAGER']
- except KeyError:
- pass
+ try:
+ return os.environ["PAGER"]
+ except KeyError:
+ pass
- return 'less'
+ return "less"
def _BecomePager(pager):
- # Delaying execution of the pager until we have output
- # ready works around a long-standing bug in popularly
- # available versions of 'less', a better 'more'.
- #
- _a, _b, _c = select.select([0], [], [0])
+ # Delaying execution of the pager until we have output
+ # ready works around a long-standing bug in popularly
+ # available versions of 'less', a better 'more'.
+ _a, _b, _c = select.select([0], [], [0])
- os.environ['LESS'] = 'FRSX'
+ os.environ["LESS"] = "FRSX"
- try:
- os.execvp(pager, [pager])
- except OSError:
- os.execv('/bin/sh', ['sh', '-c', pager])
+ try:
+ os.execvp(pager, [pager])
+ except OSError:
+ os.execv("/bin/sh", ["sh", "-c", pager])
diff --git a/platform_utils.py b/platform_utils.py
index 0203249..2c48e62 100644
--- a/platform_utils.py
+++ b/platform_utils.py
@@ -20,246 +20,264 @@
def isWindows():
- """ Returns True when running with the native port of Python for Windows,
- False when running on any other platform (including the Cygwin port of
- Python).
- """
- # Note: The cygwin port of Python returns "CYGWIN_NT_xxx"
- return platform.system() == "Windows"
+ """Returns True when running with the native port of Python for Windows,
+ False when running on any other platform (including the Cygwin port of
+ Python).
+ """
+ # Note: The cygwin port of Python returns "CYGWIN_NT_xxx"
+ return platform.system() == "Windows"
def symlink(source, link_name):
- """Creates a symbolic link pointing to source named link_name.
- Note: On Windows, source must exist on disk, as the implementation needs
- to know whether to create a "File" or a "Directory" symbolic link.
- """
- if isWindows():
- import platform_utils_win32
- source = _validate_winpath(source)
- link_name = _validate_winpath(link_name)
- target = os.path.join(os.path.dirname(link_name), source)
- if isdir(target):
- platform_utils_win32.create_dirsymlink(_makelongpath(source), link_name)
+ """Creates a symbolic link pointing to source named link_name.
+
+ Note: On Windows, source must exist on disk, as the implementation needs
+ to know whether to create a "File" or a "Directory" symbolic link.
+ """
+ if isWindows():
+ import platform_utils_win32
+
+ source = _validate_winpath(source)
+ link_name = _validate_winpath(link_name)
+ target = os.path.join(os.path.dirname(link_name), source)
+ if isdir(target):
+ platform_utils_win32.create_dirsymlink(
+ _makelongpath(source), link_name
+ )
+ else:
+ platform_utils_win32.create_filesymlink(
+ _makelongpath(source), link_name
+ )
else:
- platform_utils_win32.create_filesymlink(_makelongpath(source), link_name)
- else:
- return os.symlink(source, link_name)
+ return os.symlink(source, link_name)
def _validate_winpath(path):
- path = os.path.normpath(path)
- if _winpath_is_valid(path):
- return path
- raise ValueError("Path \"%s\" must be a relative path or an absolute "
- "path starting with a drive letter".format(path))
+ path = os.path.normpath(path)
+ if _winpath_is_valid(path):
+ return path
+ raise ValueError(
+ 'Path "{}" must be a relative path or an absolute '
+ "path starting with a drive letter".format(path)
+ )
def _winpath_is_valid(path):
- """Windows only: returns True if path is relative (e.g. ".\\foo") or is
- absolute including a drive letter (e.g. "c:\\foo"). Returns False if path
- is ambiguous (e.g. "x:foo" or "\\foo").
- """
- assert isWindows()
- path = os.path.normpath(path)
- drive, tail = os.path.splitdrive(path)
- if tail:
- if not drive:
- return tail[0] != os.sep # "\\foo" is invalid
+ """Windows only: returns True if path is relative (e.g. ".\\foo") or is
+ absolute including a drive letter (e.g. "c:\\foo"). Returns False if path
+ is ambiguous (e.g. "x:foo" or "\\foo").
+ """
+ assert isWindows()
+ path = os.path.normpath(path)
+ drive, tail = os.path.splitdrive(path)
+ if tail:
+ if not drive:
+ return tail[0] != os.sep # "\\foo" is invalid
+ else:
+ return tail[0] == os.sep # "x:foo" is invalid
else:
- return tail[0] == os.sep # "x:foo" is invalid
- else:
- return not drive # "x:" is invalid
+ return not drive # "x:" is invalid
def _makelongpath(path):
- """Return the input path normalized to support the Windows long path syntax
- ("\\\\?\\" prefix) if needed, i.e. if the input path is longer than the
- MAX_PATH limit.
- """
- if isWindows():
- # Note: MAX_PATH is 260, but, for directories, the maximum value is actually 246.
- if len(path) < 246:
- return path
- if path.startswith(u"\\\\?\\"):
- return path
- if not os.path.isabs(path):
- return path
- # Append prefix and ensure unicode so that the special longpath syntax
- # is supported by underlying Win32 API calls
- return u"\\\\?\\" + os.path.normpath(path)
- else:
- return path
+ """Return the input path normalized to support the Windows long path syntax
+ ("\\\\?\\" prefix) if needed, i.e. if the input path is longer than the
+ MAX_PATH limit.
+ """
+ if isWindows():
+ # Note: MAX_PATH is 260, but, for directories, the maximum value is
+ # actually 246.
+ if len(path) < 246:
+ return path
+ if path.startswith("\\\\?\\"):
+ return path
+ if not os.path.isabs(path):
+ return path
+ # Append prefix and ensure unicode so that the special longpath syntax
+ # is supported by underlying Win32 API calls
+ return "\\\\?\\" + os.path.normpath(path)
+ else:
+ return path
def rmtree(path, ignore_errors=False):
- """shutil.rmtree(path) wrapper with support for long paths on Windows.
+ """shutil.rmtree(path) wrapper with support for long paths on Windows.
- Availability: Unix, Windows."""
- onerror = None
- if isWindows():
- path = _makelongpath(path)
- onerror = handle_rmtree_error
- shutil.rmtree(path, ignore_errors=ignore_errors, onerror=onerror)
+ Availability: Unix, Windows.
+ """
+ onerror = None
+ if isWindows():
+ path = _makelongpath(path)
+ onerror = handle_rmtree_error
+ shutil.rmtree(path, ignore_errors=ignore_errors, onerror=onerror)
def handle_rmtree_error(function, path, excinfo):
- # Allow deleting read-only files
- os.chmod(path, stat.S_IWRITE)
- function(path)
+ # Allow deleting read-only files.
+ os.chmod(path, stat.S_IWRITE)
+ function(path)
def rename(src, dst):
- """os.rename(src, dst) wrapper with support for long paths on Windows.
+ """os.rename(src, dst) wrapper with support for long paths on Windows.
- Availability: Unix, Windows."""
- if isWindows():
- # On Windows, rename fails if destination exists, see
- # https://docs.python.org/2/library/os.html#os.rename
- try:
- os.rename(_makelongpath(src), _makelongpath(dst))
- except OSError as e:
- if e.errno == errno.EEXIST:
- os.remove(_makelongpath(dst))
- os.rename(_makelongpath(src), _makelongpath(dst))
- else:
- raise
- else:
- shutil.move(src, dst)
+ Availability: Unix, Windows.
+ """
+ if isWindows():
+ # On Windows, rename fails if destination exists, see
+ # https://docs.python.org/2/library/os.html#os.rename
+ try:
+ os.rename(_makelongpath(src), _makelongpath(dst))
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ os.remove(_makelongpath(dst))
+ os.rename(_makelongpath(src), _makelongpath(dst))
+ else:
+ raise
+ else:
+ shutil.move(src, dst)
def remove(path, missing_ok=False):
- """Remove (delete) the file path. This is a replacement for os.remove that
- allows deleting read-only files on Windows, with support for long paths and
- for deleting directory symbolic links.
+ """Remove (delete) the file path. This is a replacement for os.remove that
+ allows deleting read-only files on Windows, with support for long paths and
+ for deleting directory symbolic links.
- Availability: Unix, Windows."""
- longpath = _makelongpath(path) if isWindows() else path
- try:
- os.remove(longpath)
- except OSError as e:
- if e.errno == errno.EACCES:
- os.chmod(longpath, stat.S_IWRITE)
- # Directory symbolic links must be deleted with 'rmdir'.
- if islink(longpath) and isdir(longpath):
- os.rmdir(longpath)
- else:
+ Availability: Unix, Windows.
+ """
+ longpath = _makelongpath(path) if isWindows() else path
+ try:
os.remove(longpath)
- elif missing_ok and e.errno == errno.ENOENT:
- pass
- else:
- raise
+ except OSError as e:
+ if e.errno == errno.EACCES:
+ os.chmod(longpath, stat.S_IWRITE)
+ # Directory symbolic links must be deleted with 'rmdir'.
+ if islink(longpath) and isdir(longpath):
+ os.rmdir(longpath)
+ else:
+ os.remove(longpath)
+ elif missing_ok and e.errno == errno.ENOENT:
+ pass
+ else:
+ raise
def walk(top, topdown=True, onerror=None, followlinks=False):
- """os.walk(path) wrapper with support for long paths on Windows.
+ """os.walk(path) wrapper with support for long paths on Windows.
- Availability: Windows, Unix.
- """
- if isWindows():
- return _walk_windows_impl(top, topdown, onerror, followlinks)
- else:
- return os.walk(top, topdown, onerror, followlinks)
+ Availability: Windows, Unix.
+ """
+ if isWindows():
+ return _walk_windows_impl(top, topdown, onerror, followlinks)
+ else:
+ return os.walk(top, topdown, onerror, followlinks)
def _walk_windows_impl(top, topdown, onerror, followlinks):
- try:
- names = listdir(top)
- except Exception as err:
- if onerror is not None:
- onerror(err)
- return
+ try:
+ names = listdir(top)
+ except Exception as err:
+ if onerror is not None:
+ onerror(err)
+ return
- dirs, nondirs = [], []
- for name in names:
- if isdir(os.path.join(top, name)):
- dirs.append(name)
- else:
- nondirs.append(name)
+ dirs, nondirs = [], []
+ for name in names:
+ if isdir(os.path.join(top, name)):
+ dirs.append(name)
+ else:
+ nondirs.append(name)
- if topdown:
- yield top, dirs, nondirs
- for name in dirs:
- new_path = os.path.join(top, name)
- if followlinks or not islink(new_path):
- for x in _walk_windows_impl(new_path, topdown, onerror, followlinks):
- yield x
- if not topdown:
- yield top, dirs, nondirs
+ if topdown:
+ yield top, dirs, nondirs
+ for name in dirs:
+ new_path = os.path.join(top, name)
+ if followlinks or not islink(new_path):
+ for x in _walk_windows_impl(
+ new_path, topdown, onerror, followlinks
+ ):
+ yield x
+ if not topdown:
+ yield top, dirs, nondirs
def listdir(path):
- """os.listdir(path) wrapper with support for long paths on Windows.
+ """os.listdir(path) wrapper with support for long paths on Windows.
- Availability: Windows, Unix.
- """
- return os.listdir(_makelongpath(path))
+ Availability: Windows, Unix.
+ """
+ return os.listdir(_makelongpath(path))
def rmdir(path):
- """os.rmdir(path) wrapper with support for long paths on Windows.
+ """os.rmdir(path) wrapper with support for long paths on Windows.
- Availability: Windows, Unix.
- """
- os.rmdir(_makelongpath(path))
+ Availability: Windows, Unix.
+ """
+ os.rmdir(_makelongpath(path))
def isdir(path):
- """os.path.isdir(path) wrapper with support for long paths on Windows.
+ """os.path.isdir(path) wrapper with support for long paths on Windows.
- Availability: Windows, Unix.
- """
- return os.path.isdir(_makelongpath(path))
+ Availability: Windows, Unix.
+ """
+ return os.path.isdir(_makelongpath(path))
def islink(path):
- """os.path.islink(path) wrapper with support for long paths on Windows.
+ """os.path.islink(path) wrapper with support for long paths on Windows.
- Availability: Windows, Unix.
- """
- if isWindows():
- import platform_utils_win32
- return platform_utils_win32.islink(_makelongpath(path))
- else:
- return os.path.islink(path)
+ Availability: Windows, Unix.
+ """
+ if isWindows():
+ import platform_utils_win32
+
+ return platform_utils_win32.islink(_makelongpath(path))
+ else:
+ return os.path.islink(path)
def readlink(path):
- """Return a string representing the path to which the symbolic link
- points. The result may be either an absolute or relative pathname;
- if it is relative, it may be converted to an absolute pathname using
- os.path.join(os.path.dirname(path), result).
+ """Return a string representing the path to which the symbolic link
+ points. The result may be either an absolute or relative pathname;
+ if it is relative, it may be converted to an absolute pathname using
+ os.path.join(os.path.dirname(path), result).
- Availability: Windows, Unix.
- """
- if isWindows():
- import platform_utils_win32
- return platform_utils_win32.readlink(_makelongpath(path))
- else:
- return os.readlink(path)
+ Availability: Windows, Unix.
+ """
+ if isWindows():
+ import platform_utils_win32
+
+ return platform_utils_win32.readlink(_makelongpath(path))
+ else:
+ return os.readlink(path)
def realpath(path):
- """Return the canonical path of the specified filename, eliminating
- any symbolic links encountered in the path.
+ """Return the canonical path of the specified filename, eliminating
+ any symbolic links encountered in the path.
- Availability: Windows, Unix.
- """
- if isWindows():
- current_path = os.path.abspath(path)
- path_tail = []
- for c in range(0, 100): # Avoid cycles
- if islink(current_path):
- target = readlink(current_path)
- current_path = os.path.join(os.path.dirname(current_path), target)
- else:
- basename = os.path.basename(current_path)
- if basename == '':
- path_tail.append(current_path)
- break
- path_tail.append(basename)
- current_path = os.path.dirname(current_path)
- path_tail.reverse()
- result = os.path.normpath(os.path.join(*path_tail))
- return result
- else:
- return os.path.realpath(path)
+ Availability: Windows, Unix.
+ """
+ if isWindows():
+ current_path = os.path.abspath(path)
+ path_tail = []
+ for c in range(0, 100): # Avoid cycles
+ if islink(current_path):
+ target = readlink(current_path)
+ current_path = os.path.join(
+ os.path.dirname(current_path), target
+ )
+ else:
+ basename = os.path.basename(current_path)
+ if basename == "":
+ path_tail.append(current_path)
+ break
+ path_tail.append(basename)
+ current_path = os.path.dirname(current_path)
+ path_tail.reverse()
+ result = os.path.normpath(os.path.join(*path_tail))
+ return result
+ else:
+ return os.path.realpath(path)
diff --git a/platform_utils_win32.py b/platform_utils_win32.py
index bf916d4..e9b15f4 100644
--- a/platform_utils_win32.py
+++ b/platform_utils_win32.py
@@ -19,7 +19,7 @@
from ctypes.wintypes import BOOL, BOOLEAN, LPCWSTR, DWORD, HANDLE
from ctypes.wintypes import WCHAR, USHORT, LPVOID, ULONG, LPDWORD
-kernel32 = WinDLL('kernel32', use_last_error=True)
+kernel32 = WinDLL("kernel32", use_last_error=True)
UCHAR = c_ubyte
@@ -31,14 +31,17 @@
# Win32 API entry points
CreateSymbolicLinkW = kernel32.CreateSymbolicLinkW
CreateSymbolicLinkW.restype = BOOLEAN
-CreateSymbolicLinkW.argtypes = (LPCWSTR, # lpSymlinkFileName In
- LPCWSTR, # lpTargetFileName In
- DWORD) # dwFlags In
+CreateSymbolicLinkW.argtypes = (
+ LPCWSTR, # lpSymlinkFileName In
+ LPCWSTR, # lpTargetFileName In
+ DWORD, # dwFlags In
+)
# Symbolic link creation flags
SYMBOLIC_LINK_FLAG_FILE = 0x00
SYMBOLIC_LINK_FLAG_DIRECTORY = 0x01
-# symlink support for CreateSymbolicLink() starting with Windows 10 (1703, v10.0.14972)
+# symlink support for CreateSymbolicLink() starting with Windows 10 (1703,
+# v10.0.14972)
SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE = 0x02
GetFileAttributesW = kernel32.GetFileAttributesW
@@ -50,13 +53,15 @@
CreateFileW = kernel32.CreateFileW
CreateFileW.restype = HANDLE
-CreateFileW.argtypes = (LPCWSTR, # lpFileName In
- DWORD, # dwDesiredAccess In
- DWORD, # dwShareMode In
- LPVOID, # lpSecurityAttributes In_opt
- DWORD, # dwCreationDisposition In
- DWORD, # dwFlagsAndAttributes In
- HANDLE) # hTemplateFile In_opt
+CreateFileW.argtypes = (
+ LPCWSTR, # lpFileName In
+ DWORD, # dwDesiredAccess In
+ DWORD, # dwShareMode In
+ LPVOID, # lpSecurityAttributes In_opt
+ DWORD, # dwCreationDisposition In
+ DWORD, # dwFlagsAndAttributes In
+ HANDLE, # hTemplateFile In_opt
+)
CloseHandle = kernel32.CloseHandle
CloseHandle.restype = BOOL
@@ -69,14 +74,16 @@
DeviceIoControl = kernel32.DeviceIoControl
DeviceIoControl.restype = BOOL
-DeviceIoControl.argtypes = (HANDLE, # hDevice In
- DWORD, # dwIoControlCode In
- LPVOID, # lpInBuffer In_opt
- DWORD, # nInBufferSize In
- LPVOID, # lpOutBuffer Out_opt
- DWORD, # nOutBufferSize In
- LPDWORD, # lpBytesReturned Out_opt
- LPVOID) # lpOverlapped Inout_opt
+DeviceIoControl.argtypes = (
+ HANDLE, # hDevice In
+ DWORD, # dwIoControlCode In
+ LPVOID, # lpInBuffer In_opt
+ DWORD, # nInBufferSize In
+ LPVOID, # lpOutBuffer Out_opt
+ DWORD, # nOutBufferSize In
+ LPDWORD, # lpBytesReturned Out_opt
+ LPVOID, # lpOverlapped Inout_opt
+)
# Device I/O control flags and options
FSCTL_GET_REPARSE_POINT = 0x000900A8
@@ -86,124 +93,138 @@
class GENERIC_REPARSE_BUFFER(Structure):
- _fields_ = (('DataBuffer', UCHAR * 1),)
+ _fields_ = (("DataBuffer", UCHAR * 1),)
class SYMBOLIC_LINK_REPARSE_BUFFER(Structure):
- _fields_ = (('SubstituteNameOffset', USHORT),
- ('SubstituteNameLength', USHORT),
- ('PrintNameOffset', USHORT),
- ('PrintNameLength', USHORT),
- ('Flags', ULONG),
- ('PathBuffer', WCHAR * 1))
+ _fields_ = (
+ ("SubstituteNameOffset", USHORT),
+ ("SubstituteNameLength", USHORT),
+ ("PrintNameOffset", USHORT),
+ ("PrintNameLength", USHORT),
+ ("Flags", ULONG),
+ ("PathBuffer", WCHAR * 1),
+ )
- @property
- def PrintName(self):
- arrayt = WCHAR * (self.PrintNameLength // 2)
- offset = type(self).PathBuffer.offset + self.PrintNameOffset
- return arrayt.from_address(addressof(self) + offset).value
+ @property
+ def PrintName(self):
+ arrayt = WCHAR * (self.PrintNameLength // 2)
+ offset = type(self).PathBuffer.offset + self.PrintNameOffset
+ return arrayt.from_address(addressof(self) + offset).value
class MOUNT_POINT_REPARSE_BUFFER(Structure):
- _fields_ = (('SubstituteNameOffset', USHORT),
- ('SubstituteNameLength', USHORT),
- ('PrintNameOffset', USHORT),
- ('PrintNameLength', USHORT),
- ('PathBuffer', WCHAR * 1))
+ _fields_ = (
+ ("SubstituteNameOffset", USHORT),
+ ("SubstituteNameLength", USHORT),
+ ("PrintNameOffset", USHORT),
+ ("PrintNameLength", USHORT),
+ ("PathBuffer", WCHAR * 1),
+ )
- @property
- def PrintName(self):
- arrayt = WCHAR * (self.PrintNameLength // 2)
- offset = type(self).PathBuffer.offset + self.PrintNameOffset
- return arrayt.from_address(addressof(self) + offset).value
+ @property
+ def PrintName(self):
+ arrayt = WCHAR * (self.PrintNameLength // 2)
+ offset = type(self).PathBuffer.offset + self.PrintNameOffset
+ return arrayt.from_address(addressof(self) + offset).value
class REPARSE_DATA_BUFFER(Structure):
- class REPARSE_BUFFER(Union):
- _fields_ = (('SymbolicLinkReparseBuffer', SYMBOLIC_LINK_REPARSE_BUFFER),
- ('MountPointReparseBuffer', MOUNT_POINT_REPARSE_BUFFER),
- ('GenericReparseBuffer', GENERIC_REPARSE_BUFFER))
- _fields_ = (('ReparseTag', ULONG),
- ('ReparseDataLength', USHORT),
- ('Reserved', USHORT),
- ('ReparseBuffer', REPARSE_BUFFER))
- _anonymous_ = ('ReparseBuffer',)
+ class REPARSE_BUFFER(Union):
+ _fields_ = (
+ ("SymbolicLinkReparseBuffer", SYMBOLIC_LINK_REPARSE_BUFFER),
+ ("MountPointReparseBuffer", MOUNT_POINT_REPARSE_BUFFER),
+ ("GenericReparseBuffer", GENERIC_REPARSE_BUFFER),
+ )
+
+ _fields_ = (
+ ("ReparseTag", ULONG),
+ ("ReparseDataLength", USHORT),
+ ("Reserved", USHORT),
+ ("ReparseBuffer", REPARSE_BUFFER),
+ )
+ _anonymous_ = ("ReparseBuffer",)
def create_filesymlink(source, link_name):
- """Creates a Windows file symbolic link source pointing to link_name."""
- _create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_FILE)
+ """Creates a Windows file symbolic link source pointing to link_name."""
+ _create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_FILE)
def create_dirsymlink(source, link_name):
- """Creates a Windows directory symbolic link source pointing to link_name.
- """
- _create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_DIRECTORY)
+ """Creates a Windows directory symbolic link source pointing to link_name.""" # noqa: E501
+ _create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_DIRECTORY)
def _create_symlink(source, link_name, dwFlags):
- if not CreateSymbolicLinkW(link_name, source,
- dwFlags | SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE):
- # See https://github.com/golang/go/pull/24307/files#diff-b87bc12e4da2497308f9ef746086e4f0
- # "the unprivileged create flag is unsupported below Windows 10 (1703, v10.0.14972).
- # retry without it."
- if not CreateSymbolicLinkW(link_name, source, dwFlags):
- code = get_last_error()
- error_desc = FormatError(code).strip()
- if code == ERROR_PRIVILEGE_NOT_HELD:
- raise OSError(errno.EPERM, error_desc, link_name)
- _raise_winerror(
- code,
- 'Error creating symbolic link \"%s\"'.format(link_name))
+ if not CreateSymbolicLinkW(
+ link_name,
+ source,
+ dwFlags | SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE,
+ ):
+ # See https://github.com/golang/go/pull/24307/files#diff-b87bc12e4da2497308f9ef746086e4f0 # noqa: E501
+ # "the unprivileged create flag is unsupported below Windows 10 (1703,
+ # v10.0.14972). retry without it."
+ if not CreateSymbolicLinkW(link_name, source, dwFlags):
+ code = get_last_error()
+ error_desc = FormatError(code).strip()
+ if code == ERROR_PRIVILEGE_NOT_HELD:
+ raise OSError(errno.EPERM, error_desc, link_name)
+ _raise_winerror(
+ code, 'Error creating symbolic link "{}"'.format(link_name)
+ )
def islink(path):
- result = GetFileAttributesW(path)
- if result == INVALID_FILE_ATTRIBUTES:
- return False
- return bool(result & FILE_ATTRIBUTE_REPARSE_POINT)
+ result = GetFileAttributesW(path)
+ if result == INVALID_FILE_ATTRIBUTES:
+ return False
+ return bool(result & FILE_ATTRIBUTE_REPARSE_POINT)
def readlink(path):
- reparse_point_handle = CreateFileW(path,
- 0,
- 0,
- None,
- OPEN_EXISTING,
- FILE_FLAG_OPEN_REPARSE_POINT |
- FILE_FLAG_BACKUP_SEMANTICS,
- None)
- if reparse_point_handle == INVALID_HANDLE_VALUE:
+ reparse_point_handle = CreateFileW(
+ path,
+ 0,
+ 0,
+ None,
+ OPEN_EXISTING,
+ FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS,
+ None,
+ )
+ if reparse_point_handle == INVALID_HANDLE_VALUE:
+ _raise_winerror(
+ get_last_error(), 'Error opening symbolic link "{}"'.format(path)
+ )
+ target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE)
+ n_bytes_returned = DWORD()
+ io_result = DeviceIoControl(
+ reparse_point_handle,
+ FSCTL_GET_REPARSE_POINT,
+ None,
+ 0,
+ target_buffer,
+ len(target_buffer),
+ byref(n_bytes_returned),
+ None,
+ )
+ CloseHandle(reparse_point_handle)
+ if not io_result:
+ _raise_winerror(
+ get_last_error(), 'Error reading symbolic link "{}"'.format(path)
+ )
+ rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer)
+ if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK:
+ return rdb.SymbolicLinkReparseBuffer.PrintName
+ elif rdb.ReparseTag == IO_REPARSE_TAG_MOUNT_POINT:
+ return rdb.MountPointReparseBuffer.PrintName
+ # Unsupported reparse point type.
_raise_winerror(
- get_last_error(),
- 'Error opening symbolic link \"%s\"'.format(path))
- target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE)
- n_bytes_returned = DWORD()
- io_result = DeviceIoControl(reparse_point_handle,
- FSCTL_GET_REPARSE_POINT,
- None,
- 0,
- target_buffer,
- len(target_buffer),
- byref(n_bytes_returned),
- None)
- CloseHandle(reparse_point_handle)
- if not io_result:
- _raise_winerror(
- get_last_error(),
- 'Error reading symbolic link \"%s\"'.format(path))
- rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer)
- if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK:
- return rdb.SymbolicLinkReparseBuffer.PrintName
- elif rdb.ReparseTag == IO_REPARSE_TAG_MOUNT_POINT:
- return rdb.MountPointReparseBuffer.PrintName
- # Unsupported reparse point type
- _raise_winerror(
- ERROR_NOT_SUPPORTED,
- 'Error reading symbolic link \"%s\"'.format(path))
+ ERROR_NOT_SUPPORTED, 'Error reading symbolic link "{}"'.format(path)
+ )
def _raise_winerror(code, error_desc):
- win_error_desc = FormatError(code).strip()
- error_desc = "%s: %s".format(error_desc, win_error_desc)
- raise WinError(code, error_desc)
+ win_error_desc = FormatError(code).strip()
+ error_desc = "{0}: {1}".format(error_desc, win_error_desc)
+ raise WinError(code, error_desc)
diff --git a/progress.py b/progress.py
index 526ce6c..d1a7c54 100644
--- a/progress.py
+++ b/progress.py
@@ -22,115 +22,136 @@
# This will erase all content in the current line (wherever the cursor is).
# It does not move the cursor, so this is usually followed by \r to move to
# column 0.
-CSI_ERASE_LINE = '\x1b[2K'
+CSI_ERASE_LINE = "\x1b[2K"
# This will erase all content in the current line after the cursor. This is
# useful for partial updates & progress messages as the terminal can display
# it better.
-CSI_ERASE_LINE_AFTER = '\x1b[K'
+CSI_ERASE_LINE_AFTER = "\x1b[K"
def duration_str(total):
- """A less noisy timedelta.__str__.
+ """A less noisy timedelta.__str__.
- The default timedelta stringification contains a lot of leading zeros and
- uses microsecond resolution. This makes for noisy output.
- """
- hours, rem = divmod(total, 3600)
- mins, secs = divmod(rem, 60)
- ret = '%.3fs' % (secs,)
- if mins:
- ret = '%im%s' % (mins, ret)
- if hours:
- ret = '%ih%s' % (hours, ret)
- return ret
+ The default timedelta stringification contains a lot of leading zeros and
+ uses microsecond resolution. This makes for noisy output.
+ """
+ hours, rem = divmod(total, 3600)
+ mins, secs = divmod(rem, 60)
+ ret = "%.3fs" % (secs,)
+ if mins:
+ ret = "%im%s" % (mins, ret)
+ if hours:
+ ret = "%ih%s" % (hours, ret)
+ return ret
class Progress(object):
- def __init__(self, title, total=0, units='', print_newline=False, delay=True,
- quiet=False):
- self._title = title
- self._total = total
- self._done = 0
- self._start = time()
- self._show = not delay
- self._units = units
- self._print_newline = print_newline
- # Only show the active jobs section if we run more than one in parallel.
- self._show_jobs = False
- self._active = 0
+ def __init__(
+ self,
+ title,
+ total=0,
+ units="",
+ print_newline=False,
+ delay=True,
+ quiet=False,
+ ):
+ self._title = title
+ self._total = total
+ self._done = 0
+ self._start = time()
+ self._show = not delay
+ self._units = units
+ self._print_newline = print_newline
+ # Only show the active jobs section if we run more than one in parallel.
+ self._show_jobs = False
+ self._active = 0
- # When quiet, never show any output. It's a bit hacky, but reusing the
- # existing logic that delays initial output keeps the rest of the class
- # clean. Basically we set the start time to years in the future.
- if quiet:
- self._show = False
- self._start += 2**32
+ # When quiet, never show any output. It's a bit hacky, but reusing the
+ # existing logic that delays initial output keeps the rest of the class
+ # clean. Basically we set the start time to years in the future.
+ if quiet:
+ self._show = False
+ self._start += 2**32
- def start(self, name):
- self._active += 1
- if not self._show_jobs:
- self._show_jobs = self._active > 1
- self.update(inc=0, msg='started ' + name)
+ def start(self, name):
+ self._active += 1
+ if not self._show_jobs:
+ self._show_jobs = self._active > 1
+ self.update(inc=0, msg="started " + name)
- def finish(self, name):
- self.update(msg='finished ' + name)
- self._active -= 1
+ def finish(self, name):
+ self.update(msg="finished " + name)
+ self._active -= 1
- def update(self, inc=1, msg=''):
- self._done += inc
+ def update(self, inc=1, msg=""):
+ self._done += inc
- if _NOT_TTY or IsTraceToStderr():
- return
+ if _NOT_TTY or IsTraceToStderr():
+ return
- if not self._show:
- if 0.5 <= time() - self._start:
- self._show = True
- else:
- return
+ if not self._show:
+ if 0.5 <= time() - self._start:
+ self._show = True
+ else:
+ return
- if self._total <= 0:
- sys.stderr.write('\r%s: %d,%s' % (
- self._title,
- self._done,
- CSI_ERASE_LINE_AFTER))
- sys.stderr.flush()
- else:
- p = (100 * self._done) / self._total
- if self._show_jobs:
- jobs = '[%d job%s] ' % (self._active, 's' if self._active > 1 else '')
- else:
- jobs = ''
- sys.stderr.write('\r%s: %2d%% %s(%d%s/%d%s)%s%s%s%s' % (
- self._title,
- p,
- jobs,
- self._done, self._units,
- self._total, self._units,
- ' ' if msg else '', msg,
- CSI_ERASE_LINE_AFTER,
- '\n' if self._print_newline else ''))
- sys.stderr.flush()
+ if self._total <= 0:
+ sys.stderr.write(
+ "\r%s: %d,%s" % (self._title, self._done, CSI_ERASE_LINE_AFTER)
+ )
+ sys.stderr.flush()
+ else:
+ p = (100 * self._done) / self._total
+ if self._show_jobs:
+ jobs = "[%d job%s] " % (
+ self._active,
+ "s" if self._active > 1 else "",
+ )
+ else:
+ jobs = ""
+ sys.stderr.write(
+ "\r%s: %2d%% %s(%d%s/%d%s)%s%s%s%s"
+ % (
+ self._title,
+ p,
+ jobs,
+ self._done,
+ self._units,
+ self._total,
+ self._units,
+ " " if msg else "",
+ msg,
+ CSI_ERASE_LINE_AFTER,
+ "\n" if self._print_newline else "",
+ )
+ )
+ sys.stderr.flush()
- def end(self):
- if _NOT_TTY or IsTraceToStderr() or not self._show:
- return
+ def end(self):
+ if _NOT_TTY or IsTraceToStderr() or not self._show:
+ return
- duration = duration_str(time() - self._start)
- if self._total <= 0:
- sys.stderr.write('\r%s: %d, done in %s%s\n' % (
- self._title,
- self._done,
- duration,
- CSI_ERASE_LINE_AFTER))
- sys.stderr.flush()
- else:
- p = (100 * self._done) / self._total
- sys.stderr.write('\r%s: %3d%% (%d%s/%d%s), done in %s%s\n' % (
- self._title,
- p,
- self._done, self._units,
- self._total, self._units,
- duration,
- CSI_ERASE_LINE_AFTER))
- sys.stderr.flush()
+ duration = duration_str(time() - self._start)
+ if self._total <= 0:
+ sys.stderr.write(
+ "\r%s: %d, done in %s%s\n"
+ % (self._title, self._done, duration, CSI_ERASE_LINE_AFTER)
+ )
+ sys.stderr.flush()
+ else:
+ p = (100 * self._done) / self._total
+ sys.stderr.write(
+ "\r%s: %3d%% (%d%s/%d%s), done in %s%s\n"
+ % (
+ self._title,
+ p,
+ self._done,
+ self._units,
+ self._total,
+ self._units,
+ duration,
+ CSI_ERASE_LINE_AFTER,
+ )
+ )
+ sys.stderr.flush()
diff --git a/project.py b/project.py
index 3ccfd14..887fe83 100644
--- a/project.py
+++ b/project.py
@@ -32,8 +32,13 @@
from color import Coloring
import fetch
from git_command import GitCommand, git_require
-from git_config import GitConfig, IsId, GetSchemeFromUrl, GetUrlCookieFile, \
- ID_RE
+from git_config import (
+ GitConfig,
+ IsId,
+ GetSchemeFromUrl,
+ GetUrlCookieFile,
+ ID_RE,
+)
import git_superproject
from git_trace2_event_log import EventLog
from error import GitError, UploadError, DownloadError
@@ -47,12 +52,13 @@
class SyncNetworkHalfResult(NamedTuple):
- """Sync_NetworkHalf return value."""
- # True if successful.
- success: bool
- # Did we query the remote? False when optimized_fetch is True and we have the
- # commit already present.
- remote_fetched: bool
+ """Sync_NetworkHalf return value."""
+
+ # True if successful.
+ success: bool
+ # Did we query the remote? False when optimized_fetch is True and we have
+ # the commit already present.
+ remote_fetched: bool
# Maximum sleep time allowed during retries.
@@ -62,3904 +68,4362 @@
# Whether to use alternates. Switching back and forth is *NOT* supported.
# TODO(vapier): Remove knob once behavior is verified.
-_ALTERNATES = os.environ.get('REPO_USE_ALTERNATES') == '1'
+_ALTERNATES = os.environ.get("REPO_USE_ALTERNATES") == "1"
def _lwrite(path, content):
- lock = '%s.lock' % path
+ lock = "%s.lock" % path
- # Maintain Unix line endings on all OS's to match git behavior.
- with open(lock, 'w', newline='\n') as fd:
- fd.write(content)
+ # Maintain Unix line endings on all OS's to match git behavior.
+ with open(lock, "w", newline="\n") as fd:
+ fd.write(content)
- try:
- platform_utils.rename(lock, path)
- except OSError:
- platform_utils.remove(lock)
- raise
+ try:
+ platform_utils.rename(lock, path)
+ except OSError:
+ platform_utils.remove(lock)
+ raise
def _error(fmt, *args):
- msg = fmt % args
- print('error: %s' % msg, file=sys.stderr)
+ msg = fmt % args
+ print("error: %s" % msg, file=sys.stderr)
def _warn(fmt, *args):
- msg = fmt % args
- print('warn: %s' % msg, file=sys.stderr)
+ msg = fmt % args
+ print("warn: %s" % msg, file=sys.stderr)
def not_rev(r):
- return '^' + r
+ return "^" + r
def sq(r):
- return "'" + r.replace("'", "'\''") + "'"
+ return "'" + r.replace("'", "'''") + "'"
_project_hook_list = None
def _ProjectHooks():
- """List the hooks present in the 'hooks' directory.
+ """List the hooks present in the 'hooks' directory.
- These hooks are project hooks and are copied to the '.git/hooks' directory
- of all subprojects.
+ These hooks are project hooks and are copied to the '.git/hooks' directory
+ of all subprojects.
- This function caches the list of hooks (based on the contents of the
- 'repo/hooks' directory) on the first call.
+ This function caches the list of hooks (based on the contents of the
+ 'repo/hooks' directory) on the first call.
- Returns:
- A list of absolute paths to all of the files in the hooks directory.
- """
- global _project_hook_list
- if _project_hook_list is None:
- d = platform_utils.realpath(os.path.abspath(os.path.dirname(__file__)))
- d = os.path.join(d, 'hooks')
- _project_hook_list = [os.path.join(d, x) for x in platform_utils.listdir(d)]
- return _project_hook_list
+ Returns:
+ A list of absolute paths to all of the files in the hooks directory.
+ """
+ global _project_hook_list
+ if _project_hook_list is None:
+ d = platform_utils.realpath(os.path.abspath(os.path.dirname(__file__)))
+ d = os.path.join(d, "hooks")
+ _project_hook_list = [
+ os.path.join(d, x) for x in platform_utils.listdir(d)
+ ]
+ return _project_hook_list
class DownloadedChange(object):
- _commit_cache = None
+ _commit_cache = None
- def __init__(self, project, base, change_id, ps_id, commit):
- self.project = project
- self.base = base
- self.change_id = change_id
- self.ps_id = ps_id
- self.commit = commit
+ def __init__(self, project, base, change_id, ps_id, commit):
+ self.project = project
+ self.base = base
+ self.change_id = change_id
+ self.ps_id = ps_id
+ self.commit = commit
- @property
- def commits(self):
- if self._commit_cache is None:
- self._commit_cache = self.project.bare_git.rev_list('--abbrev=8',
- '--abbrev-commit',
- '--pretty=oneline',
- '--reverse',
- '--date-order',
- not_rev(self.base),
- self.commit,
- '--')
- return self._commit_cache
+ @property
+ def commits(self):
+ if self._commit_cache is None:
+ self._commit_cache = self.project.bare_git.rev_list(
+ "--abbrev=8",
+ "--abbrev-commit",
+ "--pretty=oneline",
+ "--reverse",
+ "--date-order",
+ not_rev(self.base),
+ self.commit,
+ "--",
+ )
+ return self._commit_cache
class ReviewableBranch(object):
- _commit_cache = None
- _base_exists = None
+ _commit_cache = None
+ _base_exists = None
- def __init__(self, project, branch, base):
- self.project = project
- self.branch = branch
- self.base = base
+ def __init__(self, project, branch, base):
+ self.project = project
+ self.branch = branch
+ self.base = base
- @property
- def name(self):
- return self.branch.name
+ @property
+ def name(self):
+ return self.branch.name
- @property
- def commits(self):
- if self._commit_cache is None:
- args = ('--abbrev=8', '--abbrev-commit', '--pretty=oneline', '--reverse',
- '--date-order', not_rev(self.base), R_HEADS + self.name, '--')
- try:
- self._commit_cache = self.project.bare_git.rev_list(*args)
- except GitError:
- # We weren't able to probe the commits for this branch. Was it tracking
- # a branch that no longer exists? If so, return no commits. Otherwise,
- # rethrow the error as we don't know what's going on.
- if self.base_exists:
- raise
+ @property
+ def commits(self):
+ if self._commit_cache is None:
+ args = (
+ "--abbrev=8",
+ "--abbrev-commit",
+ "--pretty=oneline",
+ "--reverse",
+ "--date-order",
+ not_rev(self.base),
+ R_HEADS + self.name,
+ "--",
+ )
+ try:
+ self._commit_cache = self.project.bare_git.rev_list(*args)
+ except GitError:
+ # We weren't able to probe the commits for this branch. Was it
+ # tracking a branch that no longer exists? If so, return no
+ # commits. Otherwise, rethrow the error as we don't know what's
+ # going on.
+ if self.base_exists:
+ raise
- self._commit_cache = []
+ self._commit_cache = []
- return self._commit_cache
+ return self._commit_cache
- @property
- def unabbrev_commits(self):
- r = dict()
- for commit in self.project.bare_git.rev_list(not_rev(self.base),
- R_HEADS + self.name,
- '--'):
- r[commit[0:8]] = commit
- return r
+ @property
+ def unabbrev_commits(self):
+ r = dict()
+ for commit in self.project.bare_git.rev_list(
+ not_rev(self.base), R_HEADS + self.name, "--"
+ ):
+ r[commit[0:8]] = commit
+ return r
- @property
- def date(self):
- return self.project.bare_git.log('--pretty=format:%cd',
- '-n', '1',
- R_HEADS + self.name,
- '--')
+ @property
+ def date(self):
+ return self.project.bare_git.log(
+ "--pretty=format:%cd", "-n", "1", R_HEADS + self.name, "--"
+ )
- @property
- def base_exists(self):
- """Whether the branch we're tracking exists.
+ @property
+ def base_exists(self):
+ """Whether the branch we're tracking exists.
- Normally it should, but sometimes branches we track can get deleted.
- """
- if self._base_exists is None:
- try:
- self.project.bare_git.rev_parse('--verify', not_rev(self.base))
- # If we're still here, the base branch exists.
- self._base_exists = True
- except GitError:
- # If we failed to verify, the base branch doesn't exist.
- self._base_exists = False
+ Normally it should, but sometimes branches we track can get deleted.
+ """
+ if self._base_exists is None:
+ try:
+ self.project.bare_git.rev_parse("--verify", not_rev(self.base))
+ # If we're still here, the base branch exists.
+ self._base_exists = True
+ except GitError:
+ # If we failed to verify, the base branch doesn't exist.
+ self._base_exists = False
- return self._base_exists
+ return self._base_exists
- def UploadForReview(self, people,
- dryrun=False,
- auto_topic=False,
- hashtags=(),
- labels=(),
- private=False,
- notify=None,
- wip=False,
- ready=False,
- dest_branch=None,
- validate_certs=True,
- push_options=None):
- self.project.UploadForReview(branch=self.name,
- people=people,
- dryrun=dryrun,
- auto_topic=auto_topic,
- hashtags=hashtags,
- labels=labels,
- private=private,
- notify=notify,
- wip=wip,
- ready=ready,
- dest_branch=dest_branch,
- validate_certs=validate_certs,
- push_options=push_options)
+ def UploadForReview(
+ self,
+ people,
+ dryrun=False,
+ auto_topic=False,
+ hashtags=(),
+ labels=(),
+ private=False,
+ notify=None,
+ wip=False,
+ ready=False,
+ dest_branch=None,
+ validate_certs=True,
+ push_options=None,
+ ):
+ self.project.UploadForReview(
+ branch=self.name,
+ people=people,
+ dryrun=dryrun,
+ auto_topic=auto_topic,
+ hashtags=hashtags,
+ labels=labels,
+ private=private,
+ notify=notify,
+ wip=wip,
+ ready=ready,
+ dest_branch=dest_branch,
+ validate_certs=validate_certs,
+ push_options=push_options,
+ )
- def GetPublishedRefs(self):
- refs = {}
- output = self.project.bare_git.ls_remote(
- self.branch.remote.SshReviewUrl(self.project.UserEmail),
- 'refs/changes/*')
- for line in output.split('\n'):
- try:
- (sha, ref) = line.split()
- refs[sha] = ref
- except ValueError:
- pass
+ def GetPublishedRefs(self):
+ refs = {}
+ output = self.project.bare_git.ls_remote(
+ self.branch.remote.SshReviewUrl(self.project.UserEmail),
+ "refs/changes/*",
+ )
+ for line in output.split("\n"):
+ try:
+ (sha, ref) = line.split()
+ refs[sha] = ref
+ except ValueError:
+ pass
- return refs
+ return refs
class StatusColoring(Coloring):
+ def __init__(self, config):
+ super().__init__(config, "status")
+ self.project = self.printer("header", attr="bold")
+ self.branch = self.printer("header", attr="bold")
+ self.nobranch = self.printer("nobranch", fg="red")
+ self.important = self.printer("important", fg="red")
- def __init__(self, config):
- super().__init__(config, 'status')
- self.project = self.printer('header', attr='bold')
- self.branch = self.printer('header', attr='bold')
- self.nobranch = self.printer('nobranch', fg='red')
- self.important = self.printer('important', fg='red')
-
- self.added = self.printer('added', fg='green')
- self.changed = self.printer('changed', fg='red')
- self.untracked = self.printer('untracked', fg='red')
+ self.added = self.printer("added", fg="green")
+ self.changed = self.printer("changed", fg="red")
+ self.untracked = self.printer("untracked", fg="red")
class DiffColoring(Coloring):
-
- def __init__(self, config):
- super().__init__(config, 'diff')
- self.project = self.printer('header', attr='bold')
- self.fail = self.printer('fail', fg='red')
+ def __init__(self, config):
+ super().__init__(config, "diff")
+ self.project = self.printer("header", attr="bold")
+ self.fail = self.printer("fail", fg="red")
class Annotation(object):
+ def __init__(self, name, value, keep):
+ self.name = name
+ self.value = value
+ self.keep = keep
- def __init__(self, name, value, keep):
- self.name = name
- self.value = value
- self.keep = keep
+ def __eq__(self, other):
+ if not isinstance(other, Annotation):
+ return False
+ return self.__dict__ == other.__dict__
- def __eq__(self, other):
- if not isinstance(other, Annotation):
- return False
- return self.__dict__ == other.__dict__
-
- def __lt__(self, other):
- # This exists just so that lists of Annotation objects can be sorted, for
- # use in comparisons.
- if not isinstance(other, Annotation):
- raise ValueError('comparison is not between two Annotation objects')
- if self.name == other.name:
- if self.value == other.value:
- return self.keep < other.keep
- return self.value < other.value
- return self.name < other.name
+ def __lt__(self, other):
+ # This exists just so that lists of Annotation objects can be sorted,
+ # for use in comparisons.
+ if not isinstance(other, Annotation):
+ raise ValueError("comparison is not between two Annotation objects")
+ if self.name == other.name:
+ if self.value == other.value:
+ return self.keep < other.keep
+ return self.value < other.value
+ return self.name < other.name
def _SafeExpandPath(base, subpath, skipfinal=False):
- """Make sure |subpath| is completely safe under |base|.
+ """Make sure |subpath| is completely safe under |base|.
- We make sure no intermediate symlinks are traversed, and that the final path
- is not a special file (e.g. not a socket or fifo).
+ We make sure no intermediate symlinks are traversed, and that the final path
+ is not a special file (e.g. not a socket or fifo).
- NB: We rely on a number of paths already being filtered out while parsing the
- manifest. See the validation logic in manifest_xml.py for more details.
- """
- # Split up the path by its components. We can't use os.path.sep exclusively
- # as some platforms (like Windows) will convert / to \ and that bypasses all
- # our constructed logic here. Especially since manifest authors only use
- # / in their paths.
- resep = re.compile(r'[/%s]' % re.escape(os.path.sep))
- components = resep.split(subpath)
- if skipfinal:
- # Whether the caller handles the final component itself.
- finalpart = components.pop()
+ NB: We rely on a number of paths already being filtered out while parsing
+ the manifest. See the validation logic in manifest_xml.py for more details.
+ """
+ # Split up the path by its components. We can't use os.path.sep exclusively
+ # as some platforms (like Windows) will convert / to \ and that bypasses all
+ # our constructed logic here. Especially since manifest authors only use
+ # / in their paths.
+ resep = re.compile(r"[/%s]" % re.escape(os.path.sep))
+ components = resep.split(subpath)
+ if skipfinal:
+ # Whether the caller handles the final component itself.
+ finalpart = components.pop()
- path = base
- for part in components:
- if part in {'.', '..'}:
- raise ManifestInvalidPathError(
- '%s: "%s" not allowed in paths' % (subpath, part))
+ path = base
+ for part in components:
+ if part in {".", ".."}:
+ raise ManifestInvalidPathError(
+ '%s: "%s" not allowed in paths' % (subpath, part)
+ )
- path = os.path.join(path, part)
- if platform_utils.islink(path):
- raise ManifestInvalidPathError(
- '%s: traversing symlinks not allow' % (path,))
+ path = os.path.join(path, part)
+ if platform_utils.islink(path):
+ raise ManifestInvalidPathError(
+ "%s: traversing symlinks not allow" % (path,)
+ )
- if os.path.exists(path):
- if not os.path.isfile(path) and not platform_utils.isdir(path):
- raise ManifestInvalidPathError(
- '%s: only regular files & directories allowed' % (path,))
+ if os.path.exists(path):
+ if not os.path.isfile(path) and not platform_utils.isdir(path):
+ raise ManifestInvalidPathError(
+ "%s: only regular files & directories allowed" % (path,)
+ )
- if skipfinal:
- path = os.path.join(path, finalpart)
+ if skipfinal:
+ path = os.path.join(path, finalpart)
- return path
+ return path
class _CopyFile(object):
- """Container for <copyfile> manifest element."""
+ """Container for <copyfile> manifest element."""
- def __init__(self, git_worktree, src, topdir, dest):
- """Register a <copyfile> request.
+ def __init__(self, git_worktree, src, topdir, dest):
+ """Register a <copyfile> request.
- Args:
- git_worktree: Absolute path to the git project checkout.
- src: Relative path under |git_worktree| of file to read.
- topdir: Absolute path to the top of the repo client checkout.
- dest: Relative path under |topdir| of file to write.
- """
- self.git_worktree = git_worktree
- self.topdir = topdir
- self.src = src
- self.dest = dest
+ Args:
+ git_worktree: Absolute path to the git project checkout.
+ src: Relative path under |git_worktree| of file to read.
+ topdir: Absolute path to the top of the repo client checkout.
+ dest: Relative path under |topdir| of file to write.
+ """
+ self.git_worktree = git_worktree
+ self.topdir = topdir
+ self.src = src
+ self.dest = dest
- def _Copy(self):
- src = _SafeExpandPath(self.git_worktree, self.src)
- dest = _SafeExpandPath(self.topdir, self.dest)
+ def _Copy(self):
+ src = _SafeExpandPath(self.git_worktree, self.src)
+ dest = _SafeExpandPath(self.topdir, self.dest)
- if platform_utils.isdir(src):
- raise ManifestInvalidPathError(
- '%s: copying from directory not supported' % (self.src,))
- if platform_utils.isdir(dest):
- raise ManifestInvalidPathError(
- '%s: copying to directory not allowed' % (self.dest,))
+ if platform_utils.isdir(src):
+ raise ManifestInvalidPathError(
+ "%s: copying from directory not supported" % (self.src,)
+ )
+ if platform_utils.isdir(dest):
+ raise ManifestInvalidPathError(
+ "%s: copying to directory not allowed" % (self.dest,)
+ )
- # copy file if it does not exist or is out of date
- if not os.path.exists(dest) or not filecmp.cmp(src, dest):
- try:
- # remove existing file first, since it might be read-only
- if os.path.exists(dest):
- platform_utils.remove(dest)
- else:
- dest_dir = os.path.dirname(dest)
- if not platform_utils.isdir(dest_dir):
- os.makedirs(dest_dir)
- shutil.copy(src, dest)
- # make the file read-only
- mode = os.stat(dest)[stat.ST_MODE]
- mode = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
- os.chmod(dest, mode)
- except IOError:
- _error('Cannot copy file %s to %s', src, dest)
+ # Copy file if it does not exist or is out of date.
+ if not os.path.exists(dest) or not filecmp.cmp(src, dest):
+ try:
+ # Remove existing file first, since it might be read-only.
+ if os.path.exists(dest):
+ platform_utils.remove(dest)
+ else:
+ dest_dir = os.path.dirname(dest)
+ if not platform_utils.isdir(dest_dir):
+ os.makedirs(dest_dir)
+ shutil.copy(src, dest)
+ # Make the file read-only.
+ mode = os.stat(dest)[stat.ST_MODE]
+ mode = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
+ os.chmod(dest, mode)
+ except IOError:
+ _error("Cannot copy file %s to %s", src, dest)
class _LinkFile(object):
- """Container for <linkfile> manifest element."""
+ """Container for <linkfile> manifest element."""
- def __init__(self, git_worktree, src, topdir, dest):
- """Register a <linkfile> request.
+ def __init__(self, git_worktree, src, topdir, dest):
+ """Register a <linkfile> request.
- Args:
- git_worktree: Absolute path to the git project checkout.
- src: Target of symlink relative to path under |git_worktree|.
- topdir: Absolute path to the top of the repo client checkout.
- dest: Relative path under |topdir| of symlink to create.
- """
- self.git_worktree = git_worktree
- self.topdir = topdir
- self.src = src
- self.dest = dest
+ Args:
+ git_worktree: Absolute path to the git project checkout.
+ src: Target of symlink relative to path under |git_worktree|.
+ topdir: Absolute path to the top of the repo client checkout.
+ dest: Relative path under |topdir| of symlink to create.
+ """
+ self.git_worktree = git_worktree
+ self.topdir = topdir
+ self.src = src
+ self.dest = dest
- def __linkIt(self, relSrc, absDest):
- # link file if it does not exist or is out of date
- if not platform_utils.islink(absDest) or (platform_utils.readlink(absDest) != relSrc):
- try:
- # remove existing file first, since it might be read-only
- if os.path.lexists(absDest):
- platform_utils.remove(absDest)
+ def __linkIt(self, relSrc, absDest):
+ # Link file if it does not exist or is out of date.
+ if not platform_utils.islink(absDest) or (
+ platform_utils.readlink(absDest) != relSrc
+ ):
+ try:
+ # Remove existing file first, since it might be read-only.
+ if os.path.lexists(absDest):
+ platform_utils.remove(absDest)
+ else:
+ dest_dir = os.path.dirname(absDest)
+ if not platform_utils.isdir(dest_dir):
+ os.makedirs(dest_dir)
+ platform_utils.symlink(relSrc, absDest)
+ except IOError:
+ _error("Cannot link file %s to %s", relSrc, absDest)
+
+ def _Link(self):
+ """Link the self.src & self.dest paths.
+
+ Handles wild cards on the src linking all of the files in the source in
+ to the destination directory.
+ """
+ # Some people use src="." to create stable links to projects. Let's
+ # allow that but reject all other uses of "." to keep things simple.
+ if self.src == ".":
+ src = self.git_worktree
else:
- dest_dir = os.path.dirname(absDest)
- if not platform_utils.isdir(dest_dir):
- os.makedirs(dest_dir)
- platform_utils.symlink(relSrc, absDest)
- except IOError:
- _error('Cannot link file %s to %s', relSrc, absDest)
+ src = _SafeExpandPath(self.git_worktree, self.src)
- def _Link(self):
- """Link the self.src & self.dest paths.
+ if not glob.has_magic(src):
+ # Entity does not contain a wild card so just a simple one to one
+ # link operation.
+ dest = _SafeExpandPath(self.topdir, self.dest, skipfinal=True)
+ # dest & src are absolute paths at this point. Make sure the target
+ # of the symlink is relative in the context of the repo client
+ # checkout.
+ relpath = os.path.relpath(src, os.path.dirname(dest))
+ self.__linkIt(relpath, dest)
+ else:
+ dest = _SafeExpandPath(self.topdir, self.dest)
+ # Entity contains a wild card.
+ if os.path.exists(dest) and not platform_utils.isdir(dest):
+ _error(
+ "Link error: src with wildcard, %s must be a directory",
+ dest,
+ )
+ else:
+ for absSrcFile in glob.glob(src):
+ # Create a releative path from source dir to destination
+ # dir.
+ absSrcDir = os.path.dirname(absSrcFile)
+ relSrcDir = os.path.relpath(absSrcDir, dest)
- Handles wild cards on the src linking all of the files in the source in to
- the destination directory.
- """
- # Some people use src="." to create stable links to projects. Lets allow
- # that but reject all other uses of "." to keep things simple.
- if self.src == '.':
- src = self.git_worktree
- else:
- src = _SafeExpandPath(self.git_worktree, self.src)
+ # Get the source file name.
+ srcFile = os.path.basename(absSrcFile)
- if not glob.has_magic(src):
- # Entity does not contain a wild card so just a simple one to one link operation.
- dest = _SafeExpandPath(self.topdir, self.dest, skipfinal=True)
- # dest & src are absolute paths at this point. Make sure the target of
- # the symlink is relative in the context of the repo client checkout.
- relpath = os.path.relpath(src, os.path.dirname(dest))
- self.__linkIt(relpath, dest)
- else:
- dest = _SafeExpandPath(self.topdir, self.dest)
- # Entity contains a wild card.
- if os.path.exists(dest) and not platform_utils.isdir(dest):
- _error('Link error: src with wildcard, %s must be a directory', dest)
- else:
- for absSrcFile in glob.glob(src):
- # Create a releative path from source dir to destination dir
- absSrcDir = os.path.dirname(absSrcFile)
- relSrcDir = os.path.relpath(absSrcDir, dest)
-
- # Get the source file name
- srcFile = os.path.basename(absSrcFile)
-
- # Now form the final full paths to srcFile. They will be
- # absolute for the desintaiton and relative for the srouce.
- absDest = os.path.join(dest, srcFile)
- relSrc = os.path.join(relSrcDir, srcFile)
- self.__linkIt(relSrc, absDest)
+ # Now form the final full paths to srcFile. They will be
+ # absolute for the desintaiton and relative for the source.
+ absDest = os.path.join(dest, srcFile)
+ relSrc = os.path.join(relSrcDir, srcFile)
+ self.__linkIt(relSrc, absDest)
class RemoteSpec(object):
-
- def __init__(self,
- name,
- url=None,
- pushUrl=None,
- review=None,
- revision=None,
- orig_name=None,
- fetchUrl=None):
- self.name = name
- self.url = url
- self.pushUrl = pushUrl
- self.review = review
- self.revision = revision
- self.orig_name = orig_name
- self.fetchUrl = fetchUrl
+ def __init__(
+ self,
+ name,
+ url=None,
+ pushUrl=None,
+ review=None,
+ revision=None,
+ orig_name=None,
+ fetchUrl=None,
+ ):
+ self.name = name
+ self.url = url
+ self.pushUrl = pushUrl
+ self.review = review
+ self.revision = revision
+ self.orig_name = orig_name
+ self.fetchUrl = fetchUrl
class Project(object):
- # These objects can be shared between several working trees.
- @property
- def shareable_dirs(self):
- """Return the shareable directories"""
- if self.UseAlternates:
- return ['hooks', 'rr-cache']
- else:
- return ['hooks', 'objects', 'rr-cache']
+ # These objects can be shared between several working trees.
+ @property
+ def shareable_dirs(self):
+ """Return the shareable directories"""
+ if self.UseAlternates:
+ return ["hooks", "rr-cache"]
+ else:
+ return ["hooks", "objects", "rr-cache"]
- def __init__(self,
- manifest,
- name,
- remote,
- gitdir,
- objdir,
- worktree,
- relpath,
- revisionExpr,
- revisionId,
- rebase=True,
- groups=None,
- sync_c=False,
- sync_s=False,
- sync_tags=True,
- clone_depth=None,
- upstream=None,
- parent=None,
- use_git_worktrees=False,
- is_derived=False,
- dest_branch=None,
- optimized_fetch=False,
- retry_fetches=0,
- old_revision=None):
- """Init a Project object.
-
- Args:
- manifest: The XmlManifest object.
- name: The `name` attribute of manifest.xml's project element.
- remote: RemoteSpec object specifying its remote's properties.
- gitdir: Absolute path of git directory.
- objdir: Absolute path of directory to store git objects.
- worktree: Absolute path of git working tree.
- relpath: Relative path of git working tree to repo's top directory.
- revisionExpr: The `revision` attribute of manifest.xml's project element.
- revisionId: git commit id for checking out.
- rebase: The `rebase` attribute of manifest.xml's project element.
- groups: The `groups` attribute of manifest.xml's project element.
- sync_c: The `sync-c` attribute of manifest.xml's project element.
- sync_s: The `sync-s` attribute of manifest.xml's project element.
- sync_tags: The `sync-tags` attribute of manifest.xml's project element.
- upstream: The `upstream` attribute of manifest.xml's project element.
- parent: The parent Project object.
- use_git_worktrees: Whether to use `git worktree` for this project.
- is_derived: False if the project was explicitly defined in the manifest;
- True if the project is a discovered submodule.
- dest_branch: The branch to which to push changes for review by default.
- optimized_fetch: If True, when a project is set to a sha1 revision, only
- fetch from the remote if the sha1 is not present locally.
- retry_fetches: Retry remote fetches n times upon receiving transient error
- with exponential backoff and jitter.
- old_revision: saved git commit id for open GITC projects.
- """
- self.client = self.manifest = manifest
- self.name = name
- self.remote = remote
- self.UpdatePaths(relpath, worktree, gitdir, objdir)
- self.SetRevision(revisionExpr, revisionId=revisionId)
-
- self.rebase = rebase
- self.groups = groups
- self.sync_c = sync_c
- self.sync_s = sync_s
- self.sync_tags = sync_tags
- self.clone_depth = clone_depth
- self.upstream = upstream
- self.parent = parent
- # NB: Do not use this setting in __init__ to change behavior so that the
- # manifest.git checkout can inspect & change it after instantiating. See
- # the XmlManifest init code for more info.
- self.use_git_worktrees = use_git_worktrees
- self.is_derived = is_derived
- self.optimized_fetch = optimized_fetch
- self.retry_fetches = max(0, retry_fetches)
- self.subprojects = []
-
- self.snapshots = {}
- self.copyfiles = []
- self.linkfiles = []
- self.annotations = []
- self.dest_branch = dest_branch
- self.old_revision = old_revision
-
- # This will be filled in if a project is later identified to be the
- # project containing repo hooks.
- self.enabled_repo_hooks = []
-
- def RelPath(self, local=True):
- """Return the path for the project relative to a manifest.
-
- Args:
- local: a boolean, if True, the path is relative to the local
- (sub)manifest. If false, the path is relative to the
- outermost manifest.
- """
- if local:
- return self.relpath
- return os.path.join(self.manifest.path_prefix, self.relpath)
-
- def SetRevision(self, revisionExpr, revisionId=None):
- """Set revisionId based on revision expression and id"""
- self.revisionExpr = revisionExpr
- if revisionId is None and revisionExpr and IsId(revisionExpr):
- self.revisionId = self.revisionExpr
- else:
- self.revisionId = revisionId
-
- def UpdatePaths(self, relpath, worktree, gitdir, objdir):
- """Update paths used by this project"""
- self.gitdir = gitdir.replace('\\', '/')
- self.objdir = objdir.replace('\\', '/')
- if worktree:
- self.worktree = os.path.normpath(worktree).replace('\\', '/')
- else:
- self.worktree = None
- self.relpath = relpath
-
- self.config = GitConfig.ForRepository(gitdir=self.gitdir,
- defaults=self.manifest.globalConfig)
-
- if self.worktree:
- self.work_git = self._GitGetByExec(self, bare=False, gitdir=self.gitdir)
- else:
- self.work_git = None
- self.bare_git = self._GitGetByExec(self, bare=True, gitdir=self.gitdir)
- self.bare_ref = GitRefs(self.gitdir)
- self.bare_objdir = self._GitGetByExec(self, bare=True, gitdir=self.objdir)
-
- @property
- def UseAlternates(self):
- """Whether git alternates are in use.
-
- This will be removed once migration to alternates is complete.
- """
- return _ALTERNATES or self.manifest.is_multimanifest
-
- @property
- def Derived(self):
- return self.is_derived
-
- @property
- def Exists(self):
- return platform_utils.isdir(self.gitdir) and platform_utils.isdir(self.objdir)
-
- @property
- def CurrentBranch(self):
- """Obtain the name of the currently checked out branch.
-
- The branch name omits the 'refs/heads/' prefix.
- None is returned if the project is on a detached HEAD, or if the work_git is
- otheriwse inaccessible (e.g. an incomplete sync).
- """
- try:
- b = self.work_git.GetHead()
- except NoManifestException:
- # If the local checkout is in a bad state, don't barf. Let the callers
- # process this like the head is unreadable.
- return None
- if b.startswith(R_HEADS):
- return b[len(R_HEADS):]
- return None
-
- def IsRebaseInProgress(self):
- return (os.path.exists(self.work_git.GetDotgitPath('rebase-apply')) or
- os.path.exists(self.work_git.GetDotgitPath('rebase-merge')) or
- os.path.exists(os.path.join(self.worktree, '.dotest')))
-
- def IsDirty(self, consider_untracked=True):
- """Is the working directory modified in some way?
- """
- self.work_git.update_index('-q',
- '--unmerged',
- '--ignore-missing',
- '--refresh')
- if self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD):
- return True
- if self.work_git.DiffZ('diff-files'):
- return True
- if consider_untracked and self.UntrackedFiles():
- return True
- return False
-
- _userident_name = None
- _userident_email = None
-
- @property
- def UserName(self):
- """Obtain the user's personal name.
- """
- if self._userident_name is None:
- self._LoadUserIdentity()
- return self._userident_name
-
- @property
- def UserEmail(self):
- """Obtain the user's email address. This is very likely
- to be their Gerrit login.
- """
- if self._userident_email is None:
- self._LoadUserIdentity()
- return self._userident_email
-
- def _LoadUserIdentity(self):
- u = self.bare_git.var('GIT_COMMITTER_IDENT')
- m = re.compile("^(.*) <([^>]*)> ").match(u)
- if m:
- self._userident_name = m.group(1)
- self._userident_email = m.group(2)
- else:
- self._userident_name = ''
- self._userident_email = ''
-
- def GetRemote(self, name=None):
- """Get the configuration for a single remote.
-
- Defaults to the current project's remote.
- """
- if name is None:
- name = self.remote.name
- return self.config.GetRemote(name)
-
- def GetBranch(self, name):
- """Get the configuration for a single branch.
- """
- return self.config.GetBranch(name)
-
- def GetBranches(self):
- """Get all existing local branches.
- """
- current = self.CurrentBranch
- all_refs = self._allrefs
- heads = {}
-
- for name, ref_id in all_refs.items():
- if name.startswith(R_HEADS):
- name = name[len(R_HEADS):]
- b = self.GetBranch(name)
- b.current = name == current
- b.published = None
- b.revision = ref_id
- heads[name] = b
-
- for name, ref_id in all_refs.items():
- if name.startswith(R_PUB):
- name = name[len(R_PUB):]
- b = heads.get(name)
- if b:
- b.published = ref_id
-
- return heads
-
- def MatchesGroups(self, manifest_groups):
- """Returns true if the manifest groups specified at init should cause
- this project to be synced.
- Prefixing a manifest group with "-" inverts the meaning of a group.
- All projects are implicitly labelled with "all".
-
- labels are resolved in order. In the example case of
- project_groups: "all,group1,group2"
- manifest_groups: "-group1,group2"
- the project will be matched.
-
- The special manifest group "default" will match any project that
- does not have the special project group "notdefault"
- """
- default_groups = self.manifest.default_groups or ['default']
- expanded_manifest_groups = manifest_groups or default_groups
- expanded_project_groups = ['all'] + (self.groups or [])
- if 'notdefault' not in expanded_project_groups:
- expanded_project_groups += ['default']
-
- matched = False
- for group in expanded_manifest_groups:
- if group.startswith('-') and group[1:] in expanded_project_groups:
- matched = False
- elif group in expanded_project_groups:
- matched = True
-
- return matched
-
-# Status Display ##
- def UncommitedFiles(self, get_all=True):
- """Returns a list of strings, uncommitted files in the git tree.
-
- Args:
- get_all: a boolean, if True - get information about all different
- uncommitted files. If False - return as soon as any kind of
- uncommitted files is detected.
- """
- details = []
- self.work_git.update_index('-q',
- '--unmerged',
- '--ignore-missing',
- '--refresh')
- if self.IsRebaseInProgress():
- details.append("rebase in progress")
- if not get_all:
- return details
-
- changes = self.work_git.DiffZ('diff-index', '--cached', HEAD).keys()
- if changes:
- details.extend(changes)
- if not get_all:
- return details
-
- changes = self.work_git.DiffZ('diff-files').keys()
- if changes:
- details.extend(changes)
- if not get_all:
- return details
-
- changes = self.UntrackedFiles()
- if changes:
- details.extend(changes)
-
- return details
-
- def UntrackedFiles(self):
- """Returns a list of strings, untracked files in the git tree."""
- return self.work_git.LsOthers()
-
- def HasChanges(self):
- """Returns true if there are uncommitted changes.
- """
- return bool(self.UncommitedFiles(get_all=False))
-
- def PrintWorkTreeStatus(self, output_redir=None, quiet=False, local=False):
- """Prints the status of the repository to stdout.
-
- Args:
- output_redir: If specified, redirect the output to this object.
- quiet: If True then only print the project name. Do not print
- the modified files, branch name, etc.
- local: a boolean, if True, the path is relative to the local
- (sub)manifest. If false, the path is relative to the
- outermost manifest.
- """
- if not platform_utils.isdir(self.worktree):
- if output_redir is None:
- output_redir = sys.stdout
- print(file=output_redir)
- print('project %s/' % self.RelPath(local), file=output_redir)
- print(' missing (run "repo sync")', file=output_redir)
- return
-
- self.work_git.update_index('-q',
- '--unmerged',
- '--ignore-missing',
- '--refresh')
- rb = self.IsRebaseInProgress()
- di = self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD)
- df = self.work_git.DiffZ('diff-files')
- do = self.work_git.LsOthers()
- if not rb and not di and not df and not do and not self.CurrentBranch:
- return 'CLEAN'
-
- out = StatusColoring(self.config)
- if output_redir is not None:
- out.redirect(output_redir)
- out.project('project %-40s', self.RelPath(local) + '/ ')
-
- if quiet:
- out.nl()
- return 'DIRTY'
-
- branch = self.CurrentBranch
- if branch is None:
- out.nobranch('(*** NO BRANCH ***)')
- else:
- out.branch('branch %s', branch)
- out.nl()
-
- if rb:
- out.important('prior sync failed; rebase still in progress')
- out.nl()
-
- paths = list()
- paths.extend(di.keys())
- paths.extend(df.keys())
- paths.extend(do)
-
- for p in sorted(set(paths)):
- try:
- i = di[p]
- except KeyError:
- i = None
-
- try:
- f = df[p]
- except KeyError:
- f = None
-
- if i:
- i_status = i.status.upper()
- else:
- i_status = '-'
-
- if f:
- f_status = f.status.lower()
- else:
- f_status = '-'
-
- if i and i.src_path:
- line = ' %s%s\t%s => %s (%s%%)' % (i_status, f_status,
- i.src_path, p, i.level)
- else:
- line = ' %s%s\t%s' % (i_status, f_status, p)
-
- if i and not f:
- out.added('%s', line)
- elif (i and f) or (not i and f):
- out.changed('%s', line)
- elif not i and not f:
- out.untracked('%s', line)
- else:
- out.write('%s', line)
- out.nl()
-
- return 'DIRTY'
-
- def PrintWorkTreeDiff(self, absolute_paths=False, output_redir=None,
- local=False):
- """Prints the status of the repository to stdout.
- """
- out = DiffColoring(self.config)
- if output_redir:
- out.redirect(output_redir)
- cmd = ['diff']
- if out.is_on:
- cmd.append('--color')
- cmd.append(HEAD)
- if absolute_paths:
- cmd.append('--src-prefix=a/%s/' % self.RelPath(local))
- cmd.append('--dst-prefix=b/%s/' % self.RelPath(local))
- cmd.append('--')
- try:
- p = GitCommand(self,
- cmd,
- capture_stdout=True,
- capture_stderr=True)
- p.Wait()
- except GitError as e:
- out.nl()
- out.project('project %s/' % self.RelPath(local))
- out.nl()
- out.fail('%s', str(e))
- out.nl()
- return False
- if p.stdout:
- out.nl()
- out.project('project %s/' % self.RelPath(local))
- out.nl()
- out.write('%s', p.stdout)
- return p.Wait() == 0
-
-# Publish / Upload ##
- def WasPublished(self, branch, all_refs=None):
- """Was the branch published (uploaded) for code review?
- If so, returns the SHA-1 hash of the last published
- state for the branch.
- """
- key = R_PUB + branch
- if all_refs is None:
- try:
- return self.bare_git.rev_parse(key)
- except GitError:
- return None
- else:
- try:
- return all_refs[key]
- except KeyError:
- return None
-
- def CleanPublishedCache(self, all_refs=None):
- """Prunes any stale published refs.
- """
- if all_refs is None:
- all_refs = self._allrefs
- heads = set()
- canrm = {}
- for name, ref_id in all_refs.items():
- if name.startswith(R_HEADS):
- heads.add(name)
- elif name.startswith(R_PUB):
- canrm[name] = ref_id
-
- for name, ref_id in canrm.items():
- n = name[len(R_PUB):]
- if R_HEADS + n not in heads:
- self.bare_git.DeleteRef(name, ref_id)
-
- def GetUploadableBranches(self, selected_branch=None):
- """List any branches which can be uploaded for review.
- """
- heads = {}
- pubed = {}
-
- for name, ref_id in self._allrefs.items():
- if name.startswith(R_HEADS):
- heads[name[len(R_HEADS):]] = ref_id
- elif name.startswith(R_PUB):
- pubed[name[len(R_PUB):]] = ref_id
-
- ready = []
- for branch, ref_id in heads.items():
- if branch in pubed and pubed[branch] == ref_id:
- continue
- if selected_branch and branch != selected_branch:
- continue
-
- rb = self.GetUploadableBranch(branch)
- if rb:
- ready.append(rb)
- return ready
-
- def GetUploadableBranch(self, branch_name):
- """Get a single uploadable branch, or None.
- """
- branch = self.GetBranch(branch_name)
- base = branch.LocalMerge
- if branch.LocalMerge:
- rb = ReviewableBranch(self, branch, base)
- if rb.commits:
- return rb
- return None
-
- def UploadForReview(self, branch=None,
- people=([], []),
- dryrun=False,
- auto_topic=False,
- hashtags=(),
- labels=(),
- private=False,
- notify=None,
- wip=False,
- ready=False,
- dest_branch=None,
- validate_certs=True,
- push_options=None):
- """Uploads the named branch for code review.
- """
- if branch is None:
- branch = self.CurrentBranch
- if branch is None:
- raise GitError('not currently on a branch')
-
- branch = self.GetBranch(branch)
- if not branch.LocalMerge:
- raise GitError('branch %s does not track a remote' % branch.name)
- if not branch.remote.review:
- raise GitError('remote %s has no review url' % branch.remote.name)
-
- # Basic validity check on label syntax.
- for label in labels:
- if not re.match(r'^.+[+-][0-9]+$', label):
- raise UploadError(
- f'invalid label syntax "{label}": labels use forms like '
- 'CodeReview+1 or Verified-1')
-
- if dest_branch is None:
- dest_branch = self.dest_branch
- if dest_branch is None:
- dest_branch = branch.merge
- if not dest_branch.startswith(R_HEADS):
- dest_branch = R_HEADS + dest_branch
-
- if not branch.remote.projectname:
- branch.remote.projectname = self.name
- branch.remote.Save()
-
- url = branch.remote.ReviewUrl(self.UserEmail, validate_certs)
- if url is None:
- raise UploadError('review not configured')
- cmd = ['push']
- if dryrun:
- cmd.append('-n')
-
- if url.startswith('ssh://'):
- cmd.append('--receive-pack=gerrit receive-pack')
-
- for push_option in (push_options or []):
- cmd.append('-o')
- cmd.append(push_option)
-
- cmd.append(url)
-
- if dest_branch.startswith(R_HEADS):
- dest_branch = dest_branch[len(R_HEADS):]
-
- ref_spec = '%s:refs/for/%s' % (R_HEADS + branch.name, dest_branch)
- opts = []
- if auto_topic:
- opts += ['topic=' + branch.name]
- opts += ['t=%s' % p for p in hashtags]
- # NB: No need to encode labels as they've been validated above.
- opts += ['l=%s' % p for p in labels]
-
- opts += ['r=%s' % p for p in people[0]]
- opts += ['cc=%s' % p for p in people[1]]
- if notify:
- opts += ['notify=' + notify]
- if private:
- opts += ['private']
- if wip:
- opts += ['wip']
- if ready:
- opts += ['ready']
- if opts:
- ref_spec = ref_spec + '%' + ','.join(opts)
- cmd.append(ref_spec)
-
- if GitCommand(self, cmd, bare=True).Wait() != 0:
- raise UploadError('Upload failed')
-
- if not dryrun:
- msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
- self.bare_git.UpdateRef(R_PUB + branch.name,
- R_HEADS + branch.name,
- message=msg)
-
-# Sync ##
- def _ExtractArchive(self, tarpath, path=None):
- """Extract the given tar on its current location
-
- Args:
- - tarpath: The path to the actual tar file
-
- """
- try:
- with tarfile.open(tarpath, 'r') as tar:
- tar.extractall(path=path)
- return True
- except (IOError, tarfile.TarError) as e:
- _error("Cannot extract archive %s: %s", tarpath, str(e))
- return False
-
- def Sync_NetworkHalf(self,
- quiet=False,
- verbose=False,
- output_redir=None,
- is_new=None,
- current_branch_only=None,
- force_sync=False,
- clone_bundle=True,
- tags=None,
- archive=False,
- optimized_fetch=False,
- retry_fetches=0,
- prune=False,
- submodules=False,
- ssh_proxy=None,
- clone_filter=None,
- partial_clone_exclude=set()):
- """Perform only the network IO portion of the sync process.
- Local working directory/branch state is not affected.
- """
- if archive and not isinstance(self, MetaProject):
- if self.remote.url.startswith(('http://', 'https://')):
- _error("%s: Cannot fetch archives from http/https remotes.", self.name)
- return SyncNetworkHalfResult(False, False)
-
- name = self.relpath.replace('\\', '/')
- name = name.replace('/', '_')
- tarpath = '%s.tar' % name
- topdir = self.manifest.topdir
-
- try:
- self._FetchArchive(tarpath, cwd=topdir)
- except GitError as e:
- _error('%s', e)
- return SyncNetworkHalfResult(False, False)
-
- # From now on, we only need absolute tarpath
- tarpath = os.path.join(topdir, tarpath)
-
- if not self._ExtractArchive(tarpath, path=topdir):
- return SyncNetworkHalfResult(False, True)
- try:
- platform_utils.remove(tarpath)
- except OSError as e:
- _warn("Cannot remove archive %s: %s", tarpath, str(e))
- self._CopyAndLinkFiles()
- return SyncNetworkHalfResult(True, True)
-
- # If the shared object dir already exists, don't try to rebootstrap with a
- # clone bundle download. We should have the majority of objects already.
- if clone_bundle and os.path.exists(self.objdir):
- clone_bundle = False
-
- if self.name in partial_clone_exclude:
- clone_bundle = True
- clone_filter = None
-
- if is_new is None:
- is_new = not self.Exists
- if is_new:
- self._InitGitDir(force_sync=force_sync, quiet=quiet)
- else:
- self._UpdateHooks(quiet=quiet)
- self._InitRemote()
-
- if self.UseAlternates:
- # If gitdir/objects is a symlink, migrate it from the old layout.
- gitdir_objects = os.path.join(self.gitdir, 'objects')
- if platform_utils.islink(gitdir_objects):
- platform_utils.remove(gitdir_objects, missing_ok=True)
- gitdir_alt = os.path.join(self.gitdir, 'objects/info/alternates')
- if not os.path.exists(gitdir_alt):
- os.makedirs(os.path.dirname(gitdir_alt), exist_ok=True)
- _lwrite(gitdir_alt, os.path.join(
- os.path.relpath(self.objdir, gitdir_objects), 'objects') + '\n')
-
- if is_new:
- alt = os.path.join(self.objdir, 'objects/info/alternates')
- try:
- with open(alt) as fd:
- # This works for both absolute and relative alternate directories.
- alt_dir = os.path.join(self.objdir, 'objects', fd.readline().rstrip())
- except IOError:
- alt_dir = None
- else:
- alt_dir = None
-
- if (clone_bundle
- and alt_dir is None
- and self._ApplyCloneBundle(initial=is_new, quiet=quiet, verbose=verbose)):
- is_new = False
-
- if current_branch_only is None:
- if self.sync_c:
- current_branch_only = True
- elif not self.manifest._loaded:
- # Manifest cannot check defaults until it syncs.
- current_branch_only = False
- elif self.manifest.default.sync_c:
- current_branch_only = True
-
- if tags is None:
- tags = self.sync_tags
-
- if self.clone_depth:
- depth = self.clone_depth
- else:
- depth = self.manifest.manifestProject.depth
-
- # See if we can skip the network fetch entirely.
- remote_fetched = False
- if not (optimized_fetch and
- (ID_RE.match(self.revisionExpr) and
- self._CheckForImmutableRevision())):
- remote_fetched = True
- if not self._RemoteFetch(
- initial=is_new,
- quiet=quiet, verbose=verbose, output_redir=output_redir,
- alt_dir=alt_dir, current_branch_only=current_branch_only,
- tags=tags, prune=prune, depth=depth,
- submodules=submodules, force_sync=force_sync,
- ssh_proxy=ssh_proxy,
- clone_filter=clone_filter, retry_fetches=retry_fetches):
- return SyncNetworkHalfResult(False, remote_fetched)
-
- mp = self.manifest.manifestProject
- dissociate = mp.dissociate
- if dissociate:
- alternates_file = os.path.join(self.objdir, 'objects/info/alternates')
- if os.path.exists(alternates_file):
- cmd = ['repack', '-a', '-d']
- p = GitCommand(self, cmd, bare=True, capture_stdout=bool(output_redir),
- merge_output=bool(output_redir))
- if p.stdout and output_redir:
- output_redir.write(p.stdout)
- if p.Wait() != 0:
- return SyncNetworkHalfResult(False, remote_fetched)
- platform_utils.remove(alternates_file)
-
- if self.worktree:
- self._InitMRef()
- else:
- self._InitMirrorHead()
- platform_utils.remove(os.path.join(self.gitdir, 'FETCH_HEAD'),
- missing_ok=True)
- return SyncNetworkHalfResult(True, remote_fetched)
-
- def PostRepoUpgrade(self):
- self._InitHooks()
-
- def _CopyAndLinkFiles(self):
- if self.client.isGitcClient:
- return
- for copyfile in self.copyfiles:
- copyfile._Copy()
- for linkfile in self.linkfiles:
- linkfile._Link()
-
- def GetCommitRevisionId(self):
- """Get revisionId of a commit.
-
- Use this method instead of GetRevisionId to get the id of the commit rather
- than the id of the current git object (for example, a tag)
-
- """
- if not self.revisionExpr.startswith(R_TAGS):
- return self.GetRevisionId(self._allrefs)
-
- try:
- return self.bare_git.rev_list(self.revisionExpr, '-1')[0]
- except GitError:
- raise ManifestInvalidRevisionError('revision %s in %s not found' %
- (self.revisionExpr, self.name))
-
- def GetRevisionId(self, all_refs=None):
- if self.revisionId:
- return self.revisionId
-
- rem = self.GetRemote()
- rev = rem.ToLocal(self.revisionExpr)
-
- if all_refs is not None and rev in all_refs:
- return all_refs[rev]
-
- try:
- return self.bare_git.rev_parse('--verify', '%s^0' % rev)
- except GitError:
- raise ManifestInvalidRevisionError('revision %s in %s not found' %
- (self.revisionExpr, self.name))
-
- def SetRevisionId(self, revisionId):
- if self.revisionExpr:
- self.upstream = self.revisionExpr
-
- self.revisionId = revisionId
-
- def Sync_LocalHalf(self, syncbuf, force_sync=False, submodules=False):
- """Perform only the local IO portion of the sync process.
- Network access is not required.
- """
- if not os.path.exists(self.gitdir):
- syncbuf.fail(self,
- 'Cannot checkout %s due to missing network sync; Run '
- '`repo sync -n %s` first.' %
- (self.name, self.name))
- return
-
- self._InitWorkTree(force_sync=force_sync, submodules=submodules)
- all_refs = self.bare_ref.all
- self.CleanPublishedCache(all_refs)
- revid = self.GetRevisionId(all_refs)
-
- # Special case the root of the repo client checkout. Make sure it doesn't
- # contain files being checked out to dirs we don't allow.
- if self.relpath == '.':
- PROTECTED_PATHS = {'.repo'}
- paths = set(self.work_git.ls_tree('-z', '--name-only', '--', revid).split('\0'))
- bad_paths = paths & PROTECTED_PATHS
- if bad_paths:
- syncbuf.fail(self,
- 'Refusing to checkout project that writes to protected '
- 'paths: %s' % (', '.join(bad_paths),))
- return
-
- def _doff():
- self._FastForward(revid)
- self._CopyAndLinkFiles()
-
- def _dosubmodules():
- self._SyncSubmodules(quiet=True)
-
- head = self.work_git.GetHead()
- if head.startswith(R_HEADS):
- branch = head[len(R_HEADS):]
- try:
- head = all_refs[head]
- except KeyError:
- head = None
- else:
- branch = None
-
- if branch is None or syncbuf.detach_head:
- # Currently on a detached HEAD. The user is assumed to
- # not have any local modifications worth worrying about.
- #
- if self.IsRebaseInProgress():
- syncbuf.fail(self, _PriorSyncFailedError())
- return
-
- if head == revid:
- # No changes; don't do anything further.
- # Except if the head needs to be detached
- #
- if not syncbuf.detach_head:
- # The copy/linkfile config may have changed.
- self._CopyAndLinkFiles()
- return
- else:
- lost = self._revlist(not_rev(revid), HEAD)
- if lost:
- syncbuf.info(self, "discarding %d commits", len(lost))
-
- try:
- self._Checkout(revid, quiet=True)
- if submodules:
- self._SyncSubmodules(quiet=True)
- except GitError as e:
- syncbuf.fail(self, e)
- return
- self._CopyAndLinkFiles()
- return
-
- if head == revid:
- # No changes; don't do anything further.
- #
- # The copy/linkfile config may have changed.
- self._CopyAndLinkFiles()
- return
-
- branch = self.GetBranch(branch)
-
- if not branch.LocalMerge:
- # The current branch has no tracking configuration.
- # Jump off it to a detached HEAD.
- #
- syncbuf.info(self,
- "leaving %s; does not track upstream",
- branch.name)
- try:
- self._Checkout(revid, quiet=True)
- if submodules:
- self._SyncSubmodules(quiet=True)
- except GitError as e:
- syncbuf.fail(self, e)
- return
- self._CopyAndLinkFiles()
- return
-
- upstream_gain = self._revlist(not_rev(HEAD), revid)
-
- # See if we can perform a fast forward merge. This can happen if our
- # branch isn't in the exact same state as we last published.
- try:
- self.work_git.merge_base('--is-ancestor', HEAD, revid)
- # Skip the published logic.
- pub = False
- except GitError:
- pub = self.WasPublished(branch.name, all_refs)
-
- if pub:
- not_merged = self._revlist(not_rev(revid), pub)
- if not_merged:
- if upstream_gain:
- # The user has published this branch and some of those
- # commits are not yet merged upstream. We do not want
- # to rewrite the published commits so we punt.
- #
- syncbuf.fail(self,
- "branch %s is published (but not merged) and is now "
- "%d commits behind" % (branch.name, len(upstream_gain)))
- return
- elif pub == head:
- # All published commits are merged, and thus we are a
- # strict subset. We can fast-forward safely.
- #
- syncbuf.later1(self, _doff)
- if submodules:
- syncbuf.later1(self, _dosubmodules)
- return
-
- # Examine the local commits not in the remote. Find the
- # last one attributed to this user, if any.
- #
- local_changes = self._revlist(not_rev(revid), HEAD, format='%H %ce')
- last_mine = None
- cnt_mine = 0
- for commit in local_changes:
- commit_id, committer_email = commit.split(' ', 1)
- if committer_email == self.UserEmail:
- last_mine = commit_id
- cnt_mine += 1
-
- if not upstream_gain and cnt_mine == len(local_changes):
- # The copy/linkfile config may have changed.
- self._CopyAndLinkFiles()
- return
-
- if self.IsDirty(consider_untracked=False):
- syncbuf.fail(self, _DirtyError())
- return
-
- # If the upstream switched on us, warn the user.
- #
- if branch.merge != self.revisionExpr:
- if branch.merge and self.revisionExpr:
- syncbuf.info(self,
- 'manifest switched %s...%s',
- branch.merge,
- self.revisionExpr)
- elif branch.merge:
- syncbuf.info(self,
- 'manifest no longer tracks %s',
- branch.merge)
-
- if cnt_mine < len(local_changes):
- # Upstream rebased. Not everything in HEAD
- # was created by this user.
- #
- syncbuf.info(self,
- "discarding %d commits removed from upstream",
- len(local_changes) - cnt_mine)
-
- branch.remote = self.GetRemote()
- if not ID_RE.match(self.revisionExpr):
- # in case of manifest sync the revisionExpr might be a SHA1
- branch.merge = self.revisionExpr
- if not branch.merge.startswith('refs/'):
- branch.merge = R_HEADS + branch.merge
- branch.Save()
-
- if cnt_mine > 0 and self.rebase:
- def _docopyandlink():
- self._CopyAndLinkFiles()
-
- def _dorebase():
- self._Rebase(upstream='%s^1' % last_mine, onto=revid)
- syncbuf.later2(self, _dorebase)
- if submodules:
- syncbuf.later2(self, _dosubmodules)
- syncbuf.later2(self, _docopyandlink)
- elif local_changes:
- try:
- self._ResetHard(revid)
- if submodules:
- self._SyncSubmodules(quiet=True)
- self._CopyAndLinkFiles()
- except GitError as e:
- syncbuf.fail(self, e)
- return
- else:
- syncbuf.later1(self, _doff)
- if submodules:
- syncbuf.later1(self, _dosubmodules)
-
- def AddCopyFile(self, src, dest, topdir):
- """Mark |src| for copying to |dest| (relative to |topdir|).
-
- No filesystem changes occur here. Actual copying happens later on.
-
- Paths should have basic validation run on them before being queued.
- Further checking will be handled when the actual copy happens.
- """
- self.copyfiles.append(_CopyFile(self.worktree, src, topdir, dest))
-
- def AddLinkFile(self, src, dest, topdir):
- """Mark |dest| to create a symlink (relative to |topdir|) pointing to |src|.
-
- No filesystem changes occur here. Actual linking happens later on.
-
- Paths should have basic validation run on them before being queued.
- Further checking will be handled when the actual link happens.
- """
- self.linkfiles.append(_LinkFile(self.worktree, src, topdir, dest))
-
- def AddAnnotation(self, name, value, keep):
- self.annotations.append(Annotation(name, value, keep))
-
- def DownloadPatchSet(self, change_id, patch_id):
- """Download a single patch set of a single change to FETCH_HEAD.
- """
- remote = self.GetRemote()
-
- cmd = ['fetch', remote.name]
- cmd.append('refs/changes/%2.2d/%d/%d'
- % (change_id % 100, change_id, patch_id))
- if GitCommand(self, cmd, bare=True).Wait() != 0:
- return None
- return DownloadedChange(self,
- self.GetRevisionId(),
- change_id,
- patch_id,
- self.bare_git.rev_parse('FETCH_HEAD'))
-
- def DeleteWorktree(self, quiet=False, force=False):
- """Delete the source checkout and any other housekeeping tasks.
-
- This currently leaves behind the internal .repo/ cache state. This helps
- when switching branches or manifest changes get reverted as we don't have
- to redownload all the git objects. But we should do some GC at some point.
-
- Args:
- quiet: Whether to hide normal messages.
- force: Always delete tree even if dirty.
-
- Returns:
- True if the worktree was completely cleaned out.
- """
- if self.IsDirty():
- if force:
- print('warning: %s: Removing dirty project: uncommitted changes lost.' %
- (self.RelPath(local=False),), file=sys.stderr)
- else:
- print('error: %s: Cannot remove project: uncommitted changes are '
- 'present.\n' % (self.RelPath(local=False),), file=sys.stderr)
- return False
-
- if not quiet:
- print('%s: Deleting obsolete checkout.' % (self.RelPath(local=False),))
-
- # Unlock and delink from the main worktree. We don't use git's worktree
- # remove because it will recursively delete projects -- we handle that
- # ourselves below. https://crbug.com/git/48
- if self.use_git_worktrees:
- needle = platform_utils.realpath(self.gitdir)
- # Find the git worktree commondir under .repo/worktrees/.
- output = self.bare_git.worktree('list', '--porcelain').splitlines()[0]
- assert output.startswith('worktree '), output
- commondir = output[9:]
- # Walk each of the git worktrees to see where they point.
- configs = os.path.join(commondir, 'worktrees')
- for name in os.listdir(configs):
- gitdir = os.path.join(configs, name, 'gitdir')
- with open(gitdir) as fp:
- relpath = fp.read().strip()
- # Resolve the checkout path and see if it matches this project.
- fullpath = platform_utils.realpath(os.path.join(configs, name, relpath))
- if fullpath == needle:
- platform_utils.rmtree(os.path.join(configs, name))
-
- # Delete the .git directory first, so we're less likely to have a partially
- # working git repository around. There shouldn't be any git projects here,
- # so rmtree works.
-
- # Try to remove plain files first in case of git worktrees. If this fails
- # for any reason, we'll fall back to rmtree, and that'll display errors if
- # it can't remove things either.
- try:
- platform_utils.remove(self.gitdir)
- except OSError:
- pass
- try:
- platform_utils.rmtree(self.gitdir)
- except OSError as e:
- if e.errno != errno.ENOENT:
- print('error: %s: %s' % (self.gitdir, e), file=sys.stderr)
- print('error: %s: Failed to delete obsolete checkout; remove manually, '
- 'then run `repo sync -l`.' % (self.RelPath(local=False),),
- file=sys.stderr)
- return False
-
- # Delete everything under the worktree, except for directories that contain
- # another git project.
- dirs_to_remove = []
- failed = False
- for root, dirs, files in platform_utils.walk(self.worktree):
- for f in files:
- path = os.path.join(root, f)
- try:
- platform_utils.remove(path)
- except OSError as e:
- if e.errno != errno.ENOENT:
- print('error: %s: Failed to remove: %s' % (path, e), file=sys.stderr)
- failed = True
- dirs[:] = [d for d in dirs
- if not os.path.lexists(os.path.join(root, d, '.git'))]
- dirs_to_remove += [os.path.join(root, d) for d in dirs
- if os.path.join(root, d) not in dirs_to_remove]
- for d in reversed(dirs_to_remove):
- if platform_utils.islink(d):
- try:
- platform_utils.remove(d)
- except OSError as e:
- if e.errno != errno.ENOENT:
- print('error: %s: Failed to remove: %s' % (d, e), file=sys.stderr)
- failed = True
- elif not platform_utils.listdir(d):
- try:
- platform_utils.rmdir(d)
- except OSError as e:
- if e.errno != errno.ENOENT:
- print('error: %s: Failed to remove: %s' % (d, e), file=sys.stderr)
- failed = True
- if failed:
- print('error: %s: Failed to delete obsolete checkout.' % (self.RelPath(local=False),),
- file=sys.stderr)
- print(' Remove manually, then run `repo sync -l`.', file=sys.stderr)
- return False
-
- # Try deleting parent dirs if they are empty.
- path = self.worktree
- while path != self.manifest.topdir:
- try:
- platform_utils.rmdir(path)
- except OSError as e:
- if e.errno != errno.ENOENT:
- break
- path = os.path.dirname(path)
-
- return True
-
-# Branch Management ##
- def StartBranch(self, name, branch_merge='', revision=None):
- """Create a new branch off the manifest's revision.
- """
- if not branch_merge:
- branch_merge = self.revisionExpr
- head = self.work_git.GetHead()
- if head == (R_HEADS + name):
- return True
-
- all_refs = self.bare_ref.all
- if R_HEADS + name in all_refs:
- return GitCommand(self, ['checkout', '-q', name, '--']).Wait() == 0
-
- branch = self.GetBranch(name)
- branch.remote = self.GetRemote()
- branch.merge = branch_merge
- if not branch.merge.startswith('refs/') and not ID_RE.match(branch_merge):
- branch.merge = R_HEADS + branch_merge
-
- if revision is None:
- revid = self.GetRevisionId(all_refs)
- else:
- revid = self.work_git.rev_parse(revision)
-
- if head.startswith(R_HEADS):
- try:
- head = all_refs[head]
- except KeyError:
- head = None
- if revid and head and revid == head:
- ref = R_HEADS + name
- self.work_git.update_ref(ref, revid)
- self.work_git.symbolic_ref(HEAD, ref)
- branch.Save()
- return True
-
- if GitCommand(self, ['checkout', '-q', '-b', branch.name, revid]).Wait() == 0:
- branch.Save()
- return True
- return False
-
- def CheckoutBranch(self, name):
- """Checkout a local topic branch.
+ def __init__(
+ self,
+ manifest,
+ name,
+ remote,
+ gitdir,
+ objdir,
+ worktree,
+ relpath,
+ revisionExpr,
+ revisionId,
+ rebase=True,
+ groups=None,
+ sync_c=False,
+ sync_s=False,
+ sync_tags=True,
+ clone_depth=None,
+ upstream=None,
+ parent=None,
+ use_git_worktrees=False,
+ is_derived=False,
+ dest_branch=None,
+ optimized_fetch=False,
+ retry_fetches=0,
+ old_revision=None,
+ ):
+ """Init a Project object.
Args:
- name: The name of the branch to checkout.
+ manifest: The XmlManifest object.
+ name: The `name` attribute of manifest.xml's project element.
+ remote: RemoteSpec object specifying its remote's properties.
+ gitdir: Absolute path of git directory.
+ objdir: Absolute path of directory to store git objects.
+ worktree: Absolute path of git working tree.
+ relpath: Relative path of git working tree to repo's top directory.
+ revisionExpr: The `revision` attribute of manifest.xml's project
+ element.
+ revisionId: git commit id for checking out.
+ rebase: The `rebase` attribute of manifest.xml's project element.
+ groups: The `groups` attribute of manifest.xml's project element.
+ sync_c: The `sync-c` attribute of manifest.xml's project element.
+ sync_s: The `sync-s` attribute of manifest.xml's project element.
+ sync_tags: The `sync-tags` attribute of manifest.xml's project
+ element.
+ upstream: The `upstream` attribute of manifest.xml's project
+ element.
+ parent: The parent Project object.
+ use_git_worktrees: Whether to use `git worktree` for this project.
+ is_derived: False if the project was explicitly defined in the
+ manifest; True if the project is a discovered submodule.
+ dest_branch: The branch to which to push changes for review by
+ default.
+ optimized_fetch: If True, when a project is set to a sha1 revision,
+ only fetch from the remote if the sha1 is not present locally.
+ retry_fetches: Retry remote fetches n times upon receiving transient
+ error with exponential backoff and jitter.
+ old_revision: saved git commit id for open GITC projects.
+ """
+ self.client = self.manifest = manifest
+ self.name = name
+ self.remote = remote
+ self.UpdatePaths(relpath, worktree, gitdir, objdir)
+ self.SetRevision(revisionExpr, revisionId=revisionId)
+
+ self.rebase = rebase
+ self.groups = groups
+ self.sync_c = sync_c
+ self.sync_s = sync_s
+ self.sync_tags = sync_tags
+ self.clone_depth = clone_depth
+ self.upstream = upstream
+ self.parent = parent
+ # NB: Do not use this setting in __init__ to change behavior so that the
+ # manifest.git checkout can inspect & change it after instantiating.
+ # See the XmlManifest init code for more info.
+ self.use_git_worktrees = use_git_worktrees
+ self.is_derived = is_derived
+ self.optimized_fetch = optimized_fetch
+ self.retry_fetches = max(0, retry_fetches)
+ self.subprojects = []
+
+ self.snapshots = {}
+ self.copyfiles = []
+ self.linkfiles = []
+ self.annotations = []
+ self.dest_branch = dest_branch
+ self.old_revision = old_revision
+
+ # This will be filled in if a project is later identified to be the
+ # project containing repo hooks.
+ self.enabled_repo_hooks = []
+
+ def RelPath(self, local=True):
+ """Return the path for the project relative to a manifest.
+
+ Args:
+ local: a boolean, if True, the path is relative to the local
+ (sub)manifest. If false, the path is relative to the outermost
+ manifest.
+ """
+ if local:
+ return self.relpath
+ return os.path.join(self.manifest.path_prefix, self.relpath)
+
+ def SetRevision(self, revisionExpr, revisionId=None):
+ """Set revisionId based on revision expression and id"""
+ self.revisionExpr = revisionExpr
+ if revisionId is None and revisionExpr and IsId(revisionExpr):
+ self.revisionId = self.revisionExpr
+ else:
+ self.revisionId = revisionId
+
+ def UpdatePaths(self, relpath, worktree, gitdir, objdir):
+ """Update paths used by this project"""
+ self.gitdir = gitdir.replace("\\", "/")
+ self.objdir = objdir.replace("\\", "/")
+ if worktree:
+ self.worktree = os.path.normpath(worktree).replace("\\", "/")
+ else:
+ self.worktree = None
+ self.relpath = relpath
+
+ self.config = GitConfig.ForRepository(
+ gitdir=self.gitdir, defaults=self.manifest.globalConfig
+ )
+
+ if self.worktree:
+ self.work_git = self._GitGetByExec(
+ self, bare=False, gitdir=self.gitdir
+ )
+ else:
+ self.work_git = None
+ self.bare_git = self._GitGetByExec(self, bare=True, gitdir=self.gitdir)
+ self.bare_ref = GitRefs(self.gitdir)
+ self.bare_objdir = self._GitGetByExec(
+ self, bare=True, gitdir=self.objdir
+ )
+
+ @property
+ def UseAlternates(self):
+ """Whether git alternates are in use.
+
+ This will be removed once migration to alternates is complete.
+ """
+ return _ALTERNATES or self.manifest.is_multimanifest
+
+ @property
+ def Derived(self):
+ return self.is_derived
+
+ @property
+ def Exists(self):
+ return platform_utils.isdir(self.gitdir) and platform_utils.isdir(
+ self.objdir
+ )
+
+ @property
+ def CurrentBranch(self):
+ """Obtain the name of the currently checked out branch.
+
+ The branch name omits the 'refs/heads/' prefix.
+ None is returned if the project is on a detached HEAD, or if the
+ work_git is otheriwse inaccessible (e.g. an incomplete sync).
+ """
+ try:
+ b = self.work_git.GetHead()
+ except NoManifestException:
+ # If the local checkout is in a bad state, don't barf. Let the
+ # callers process this like the head is unreadable.
+ return None
+ if b.startswith(R_HEADS):
+ return b[len(R_HEADS) :]
+ return None
+
+ def IsRebaseInProgress(self):
+ return (
+ os.path.exists(self.work_git.GetDotgitPath("rebase-apply"))
+ or os.path.exists(self.work_git.GetDotgitPath("rebase-merge"))
+ or os.path.exists(os.path.join(self.worktree, ".dotest"))
+ )
+
+ def IsDirty(self, consider_untracked=True):
+ """Is the working directory modified in some way?"""
+ self.work_git.update_index(
+ "-q", "--unmerged", "--ignore-missing", "--refresh"
+ )
+ if self.work_git.DiffZ("diff-index", "-M", "--cached", HEAD):
+ return True
+ if self.work_git.DiffZ("diff-files"):
+ return True
+ if consider_untracked and self.UntrackedFiles():
+ return True
+ return False
+
+ _userident_name = None
+ _userident_email = None
+
+ @property
+ def UserName(self):
+ """Obtain the user's personal name."""
+ if self._userident_name is None:
+ self._LoadUserIdentity()
+ return self._userident_name
+
+ @property
+ def UserEmail(self):
+ """Obtain the user's email address. This is very likely
+ to be their Gerrit login.
+ """
+ if self._userident_email is None:
+ self._LoadUserIdentity()
+ return self._userident_email
+
+ def _LoadUserIdentity(self):
+ u = self.bare_git.var("GIT_COMMITTER_IDENT")
+ m = re.compile("^(.*) <([^>]*)> ").match(u)
+ if m:
+ self._userident_name = m.group(1)
+ self._userident_email = m.group(2)
+ else:
+ self._userident_name = ""
+ self._userident_email = ""
+
+ def GetRemote(self, name=None):
+ """Get the configuration for a single remote.
+
+ Defaults to the current project's remote.
+ """
+ if name is None:
+ name = self.remote.name
+ return self.config.GetRemote(name)
+
+ def GetBranch(self, name):
+ """Get the configuration for a single branch."""
+ return self.config.GetBranch(name)
+
+ def GetBranches(self):
+ """Get all existing local branches."""
+ current = self.CurrentBranch
+ all_refs = self._allrefs
+ heads = {}
+
+ for name, ref_id in all_refs.items():
+ if name.startswith(R_HEADS):
+ name = name[len(R_HEADS) :]
+ b = self.GetBranch(name)
+ b.current = name == current
+ b.published = None
+ b.revision = ref_id
+ heads[name] = b
+
+ for name, ref_id in all_refs.items():
+ if name.startswith(R_PUB):
+ name = name[len(R_PUB) :]
+ b = heads.get(name)
+ if b:
+ b.published = ref_id
+
+ return heads
+
+ def MatchesGroups(self, manifest_groups):
+ """Returns true if the manifest groups specified at init should cause
+ this project to be synced.
+ Prefixing a manifest group with "-" inverts the meaning of a group.
+ All projects are implicitly labelled with "all".
+
+ labels are resolved in order. In the example case of
+ project_groups: "all,group1,group2"
+ manifest_groups: "-group1,group2"
+ the project will be matched.
+
+ The special manifest group "default" will match any project that
+ does not have the special project group "notdefault"
+ """
+ default_groups = self.manifest.default_groups or ["default"]
+ expanded_manifest_groups = manifest_groups or default_groups
+ expanded_project_groups = ["all"] + (self.groups or [])
+ if "notdefault" not in expanded_project_groups:
+ expanded_project_groups += ["default"]
+
+ matched = False
+ for group in expanded_manifest_groups:
+ if group.startswith("-") and group[1:] in expanded_project_groups:
+ matched = False
+ elif group in expanded_project_groups:
+ matched = True
+
+ return matched
+
+ def UncommitedFiles(self, get_all=True):
+ """Returns a list of strings, uncommitted files in the git tree.
+
+ Args:
+ get_all: a boolean, if True - get information about all different
+ uncommitted files. If False - return as soon as any kind of
+ uncommitted files is detected.
+ """
+ details = []
+ self.work_git.update_index(
+ "-q", "--unmerged", "--ignore-missing", "--refresh"
+ )
+ if self.IsRebaseInProgress():
+ details.append("rebase in progress")
+ if not get_all:
+ return details
+
+ changes = self.work_git.DiffZ("diff-index", "--cached", HEAD).keys()
+ if changes:
+ details.extend(changes)
+ if not get_all:
+ return details
+
+ changes = self.work_git.DiffZ("diff-files").keys()
+ if changes:
+ details.extend(changes)
+ if not get_all:
+ return details
+
+ changes = self.UntrackedFiles()
+ if changes:
+ details.extend(changes)
+
+ return details
+
+ def UntrackedFiles(self):
+ """Returns a list of strings, untracked files in the git tree."""
+ return self.work_git.LsOthers()
+
+ def HasChanges(self):
+ """Returns true if there are uncommitted changes."""
+ return bool(self.UncommitedFiles(get_all=False))
+
+ def PrintWorkTreeStatus(self, output_redir=None, quiet=False, local=False):
+ """Prints the status of the repository to stdout.
+
+ Args:
+ output_redir: If specified, redirect the output to this object.
+ quiet: If True then only print the project name. Do not print
+ the modified files, branch name, etc.
+ local: a boolean, if True, the path is relative to the local
+ (sub)manifest. If false, the path is relative to the outermost
+ manifest.
+ """
+ if not platform_utils.isdir(self.worktree):
+ if output_redir is None:
+ output_redir = sys.stdout
+ print(file=output_redir)
+ print("project %s/" % self.RelPath(local), file=output_redir)
+ print(' missing (run "repo sync")', file=output_redir)
+ return
+
+ self.work_git.update_index(
+ "-q", "--unmerged", "--ignore-missing", "--refresh"
+ )
+ rb = self.IsRebaseInProgress()
+ di = self.work_git.DiffZ("diff-index", "-M", "--cached", HEAD)
+ df = self.work_git.DiffZ("diff-files")
+ do = self.work_git.LsOthers()
+ if not rb and not di and not df and not do and not self.CurrentBranch:
+ return "CLEAN"
+
+ out = StatusColoring(self.config)
+ if output_redir is not None:
+ out.redirect(output_redir)
+ out.project("project %-40s", self.RelPath(local) + "/ ")
+
+ if quiet:
+ out.nl()
+ return "DIRTY"
+
+ branch = self.CurrentBranch
+ if branch is None:
+ out.nobranch("(*** NO BRANCH ***)")
+ else:
+ out.branch("branch %s", branch)
+ out.nl()
+
+ if rb:
+ out.important("prior sync failed; rebase still in progress")
+ out.nl()
+
+ paths = list()
+ paths.extend(di.keys())
+ paths.extend(df.keys())
+ paths.extend(do)
+
+ for p in sorted(set(paths)):
+ try:
+ i = di[p]
+ except KeyError:
+ i = None
+
+ try:
+ f = df[p]
+ except KeyError:
+ f = None
+
+ if i:
+ i_status = i.status.upper()
+ else:
+ i_status = "-"
+
+ if f:
+ f_status = f.status.lower()
+ else:
+ f_status = "-"
+
+ if i and i.src_path:
+ line = " %s%s\t%s => %s (%s%%)" % (
+ i_status,
+ f_status,
+ i.src_path,
+ p,
+ i.level,
+ )
+ else:
+ line = " %s%s\t%s" % (i_status, f_status, p)
+
+ if i and not f:
+ out.added("%s", line)
+ elif (i and f) or (not i and f):
+ out.changed("%s", line)
+ elif not i and not f:
+ out.untracked("%s", line)
+ else:
+ out.write("%s", line)
+ out.nl()
+
+ return "DIRTY"
+
+ def PrintWorkTreeDiff(
+ self, absolute_paths=False, output_redir=None, local=False
+ ):
+ """Prints the status of the repository to stdout."""
+ out = DiffColoring(self.config)
+ if output_redir:
+ out.redirect(output_redir)
+ cmd = ["diff"]
+ if out.is_on:
+ cmd.append("--color")
+ cmd.append(HEAD)
+ if absolute_paths:
+ cmd.append("--src-prefix=a/%s/" % self.RelPath(local))
+ cmd.append("--dst-prefix=b/%s/" % self.RelPath(local))
+ cmd.append("--")
+ try:
+ p = GitCommand(self, cmd, capture_stdout=True, capture_stderr=True)
+ p.Wait()
+ except GitError as e:
+ out.nl()
+ out.project("project %s/" % self.RelPath(local))
+ out.nl()
+ out.fail("%s", str(e))
+ out.nl()
+ return False
+ if p.stdout:
+ out.nl()
+ out.project("project %s/" % self.RelPath(local))
+ out.nl()
+ out.write("%s", p.stdout)
+ return p.Wait() == 0
+
+ def WasPublished(self, branch, all_refs=None):
+ """Was the branch published (uploaded) for code review?
+ If so, returns the SHA-1 hash of the last published
+ state for the branch.
+ """
+ key = R_PUB + branch
+ if all_refs is None:
+ try:
+ return self.bare_git.rev_parse(key)
+ except GitError:
+ return None
+ else:
+ try:
+ return all_refs[key]
+ except KeyError:
+ return None
+
+ def CleanPublishedCache(self, all_refs=None):
+ """Prunes any stale published refs."""
+ if all_refs is None:
+ all_refs = self._allrefs
+ heads = set()
+ canrm = {}
+ for name, ref_id in all_refs.items():
+ if name.startswith(R_HEADS):
+ heads.add(name)
+ elif name.startswith(R_PUB):
+ canrm[name] = ref_id
+
+ for name, ref_id in canrm.items():
+ n = name[len(R_PUB) :]
+ if R_HEADS + n not in heads:
+ self.bare_git.DeleteRef(name, ref_id)
+
+ def GetUploadableBranches(self, selected_branch=None):
+ """List any branches which can be uploaded for review."""
+ heads = {}
+ pubed = {}
+
+ for name, ref_id in self._allrefs.items():
+ if name.startswith(R_HEADS):
+ heads[name[len(R_HEADS) :]] = ref_id
+ elif name.startswith(R_PUB):
+ pubed[name[len(R_PUB) :]] = ref_id
+
+ ready = []
+ for branch, ref_id in heads.items():
+ if branch in pubed and pubed[branch] == ref_id:
+ continue
+ if selected_branch and branch != selected_branch:
+ continue
+
+ rb = self.GetUploadableBranch(branch)
+ if rb:
+ ready.append(rb)
+ return ready
+
+ def GetUploadableBranch(self, branch_name):
+ """Get a single uploadable branch, or None."""
+ branch = self.GetBranch(branch_name)
+ base = branch.LocalMerge
+ if branch.LocalMerge:
+ rb = ReviewableBranch(self, branch, base)
+ if rb.commits:
+ return rb
+ return None
+
+ def UploadForReview(
+ self,
+ branch=None,
+ people=([], []),
+ dryrun=False,
+ auto_topic=False,
+ hashtags=(),
+ labels=(),
+ private=False,
+ notify=None,
+ wip=False,
+ ready=False,
+ dest_branch=None,
+ validate_certs=True,
+ push_options=None,
+ ):
+ """Uploads the named branch for code review."""
+ if branch is None:
+ branch = self.CurrentBranch
+ if branch is None:
+ raise GitError("not currently on a branch")
+
+ branch = self.GetBranch(branch)
+ if not branch.LocalMerge:
+ raise GitError("branch %s does not track a remote" % branch.name)
+ if not branch.remote.review:
+ raise GitError("remote %s has no review url" % branch.remote.name)
+
+ # Basic validity check on label syntax.
+ for label in labels:
+ if not re.match(r"^.+[+-][0-9]+$", label):
+ raise UploadError(
+ f'invalid label syntax "{label}": labels use forms like '
+ "CodeReview+1 or Verified-1"
+ )
+
+ if dest_branch is None:
+ dest_branch = self.dest_branch
+ if dest_branch is None:
+ dest_branch = branch.merge
+ if not dest_branch.startswith(R_HEADS):
+ dest_branch = R_HEADS + dest_branch
+
+ if not branch.remote.projectname:
+ branch.remote.projectname = self.name
+ branch.remote.Save()
+
+ url = branch.remote.ReviewUrl(self.UserEmail, validate_certs)
+ if url is None:
+ raise UploadError("review not configured")
+ cmd = ["push"]
+ if dryrun:
+ cmd.append("-n")
+
+ if url.startswith("ssh://"):
+ cmd.append("--receive-pack=gerrit receive-pack")
+
+ for push_option in push_options or []:
+ cmd.append("-o")
+ cmd.append(push_option)
+
+ cmd.append(url)
+
+ if dest_branch.startswith(R_HEADS):
+ dest_branch = dest_branch[len(R_HEADS) :]
+
+ ref_spec = "%s:refs/for/%s" % (R_HEADS + branch.name, dest_branch)
+ opts = []
+ if auto_topic:
+ opts += ["topic=" + branch.name]
+ opts += ["t=%s" % p for p in hashtags]
+ # NB: No need to encode labels as they've been validated above.
+ opts += ["l=%s" % p for p in labels]
+
+ opts += ["r=%s" % p for p in people[0]]
+ opts += ["cc=%s" % p for p in people[1]]
+ if notify:
+ opts += ["notify=" + notify]
+ if private:
+ opts += ["private"]
+ if wip:
+ opts += ["wip"]
+ if ready:
+ opts += ["ready"]
+ if opts:
+ ref_spec = ref_spec + "%" + ",".join(opts)
+ cmd.append(ref_spec)
+
+ if GitCommand(self, cmd, bare=True).Wait() != 0:
+ raise UploadError("Upload failed")
+
+ if not dryrun:
+ msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
+ self.bare_git.UpdateRef(
+ R_PUB + branch.name, R_HEADS + branch.name, message=msg
+ )
+
+ def _ExtractArchive(self, tarpath, path=None):
+ """Extract the given tar on its current location
+
+ Args:
+ tarpath: The path to the actual tar file
+
+ """
+ try:
+ with tarfile.open(tarpath, "r") as tar:
+ tar.extractall(path=path)
+ return True
+ except (IOError, tarfile.TarError) as e:
+ _error("Cannot extract archive %s: %s", tarpath, str(e))
+ return False
+
+ def Sync_NetworkHalf(
+ self,
+ quiet=False,
+ verbose=False,
+ output_redir=None,
+ is_new=None,
+ current_branch_only=None,
+ force_sync=False,
+ clone_bundle=True,
+ tags=None,
+ archive=False,
+ optimized_fetch=False,
+ retry_fetches=0,
+ prune=False,
+ submodules=False,
+ ssh_proxy=None,
+ clone_filter=None,
+ partial_clone_exclude=set(),
+ ):
+ """Perform only the network IO portion of the sync process.
+ Local working directory/branch state is not affected.
+ """
+ if archive and not isinstance(self, MetaProject):
+ if self.remote.url.startswith(("http://", "https://")):
+ _error(
+ "%s: Cannot fetch archives from http/https remotes.",
+ self.name,
+ )
+ return SyncNetworkHalfResult(False, False)
+
+ name = self.relpath.replace("\\", "/")
+ name = name.replace("/", "_")
+ tarpath = "%s.tar" % name
+ topdir = self.manifest.topdir
+
+ try:
+ self._FetchArchive(tarpath, cwd=topdir)
+ except GitError as e:
+ _error("%s", e)
+ return SyncNetworkHalfResult(False, False)
+
+ # From now on, we only need absolute tarpath.
+ tarpath = os.path.join(topdir, tarpath)
+
+ if not self._ExtractArchive(tarpath, path=topdir):
+ return SyncNetworkHalfResult(False, True)
+ try:
+ platform_utils.remove(tarpath)
+ except OSError as e:
+ _warn("Cannot remove archive %s: %s", tarpath, str(e))
+ self._CopyAndLinkFiles()
+ return SyncNetworkHalfResult(True, True)
+
+ # If the shared object dir already exists, don't try to rebootstrap with
+ # a clone bundle download. We should have the majority of objects
+ # already.
+ if clone_bundle and os.path.exists(self.objdir):
+ clone_bundle = False
+
+ if self.name in partial_clone_exclude:
+ clone_bundle = True
+ clone_filter = None
+
+ if is_new is None:
+ is_new = not self.Exists
+ if is_new:
+ self._InitGitDir(force_sync=force_sync, quiet=quiet)
+ else:
+ self._UpdateHooks(quiet=quiet)
+ self._InitRemote()
+
+ if self.UseAlternates:
+ # If gitdir/objects is a symlink, migrate it from the old layout.
+ gitdir_objects = os.path.join(self.gitdir, "objects")
+ if platform_utils.islink(gitdir_objects):
+ platform_utils.remove(gitdir_objects, missing_ok=True)
+ gitdir_alt = os.path.join(self.gitdir, "objects/info/alternates")
+ if not os.path.exists(gitdir_alt):
+ os.makedirs(os.path.dirname(gitdir_alt), exist_ok=True)
+ _lwrite(
+ gitdir_alt,
+ os.path.join(
+ os.path.relpath(self.objdir, gitdir_objects), "objects"
+ )
+ + "\n",
+ )
+
+ if is_new:
+ alt = os.path.join(self.objdir, "objects/info/alternates")
+ try:
+ with open(alt) as fd:
+ # This works for both absolute and relative alternate
+ # directories.
+ alt_dir = os.path.join(
+ self.objdir, "objects", fd.readline().rstrip()
+ )
+ except IOError:
+ alt_dir = None
+ else:
+ alt_dir = None
+
+ if (
+ clone_bundle
+ and alt_dir is None
+ and self._ApplyCloneBundle(
+ initial=is_new, quiet=quiet, verbose=verbose
+ )
+ ):
+ is_new = False
+
+ if current_branch_only is None:
+ if self.sync_c:
+ current_branch_only = True
+ elif not self.manifest._loaded:
+ # Manifest cannot check defaults until it syncs.
+ current_branch_only = False
+ elif self.manifest.default.sync_c:
+ current_branch_only = True
+
+ if tags is None:
+ tags = self.sync_tags
+
+ if self.clone_depth:
+ depth = self.clone_depth
+ else:
+ depth = self.manifest.manifestProject.depth
+
+ # See if we can skip the network fetch entirely.
+ remote_fetched = False
+ if not (
+ optimized_fetch
+ and (
+ ID_RE.match(self.revisionExpr)
+ and self._CheckForImmutableRevision()
+ )
+ ):
+ remote_fetched = True
+ if not self._RemoteFetch(
+ initial=is_new,
+ quiet=quiet,
+ verbose=verbose,
+ output_redir=output_redir,
+ alt_dir=alt_dir,
+ current_branch_only=current_branch_only,
+ tags=tags,
+ prune=prune,
+ depth=depth,
+ submodules=submodules,
+ force_sync=force_sync,
+ ssh_proxy=ssh_proxy,
+ clone_filter=clone_filter,
+ retry_fetches=retry_fetches,
+ ):
+ return SyncNetworkHalfResult(False, remote_fetched)
+
+ mp = self.manifest.manifestProject
+ dissociate = mp.dissociate
+ if dissociate:
+ alternates_file = os.path.join(
+ self.objdir, "objects/info/alternates"
+ )
+ if os.path.exists(alternates_file):
+ cmd = ["repack", "-a", "-d"]
+ p = GitCommand(
+ self,
+ cmd,
+ bare=True,
+ capture_stdout=bool(output_redir),
+ merge_output=bool(output_redir),
+ )
+ if p.stdout and output_redir:
+ output_redir.write(p.stdout)
+ if p.Wait() != 0:
+ return SyncNetworkHalfResult(False, remote_fetched)
+ platform_utils.remove(alternates_file)
+
+ if self.worktree:
+ self._InitMRef()
+ else:
+ self._InitMirrorHead()
+ platform_utils.remove(
+ os.path.join(self.gitdir, "FETCH_HEAD"), missing_ok=True
+ )
+ return SyncNetworkHalfResult(True, remote_fetched)
+
+ def PostRepoUpgrade(self):
+ self._InitHooks()
+
+ def _CopyAndLinkFiles(self):
+ if self.client.isGitcClient:
+ return
+ for copyfile in self.copyfiles:
+ copyfile._Copy()
+ for linkfile in self.linkfiles:
+ linkfile._Link()
+
+ def GetCommitRevisionId(self):
+ """Get revisionId of a commit.
+
+ Use this method instead of GetRevisionId to get the id of the commit
+ rather than the id of the current git object (for example, a tag)
+
+ """
+ if not self.revisionExpr.startswith(R_TAGS):
+ return self.GetRevisionId(self._allrefs)
+
+ try:
+ return self.bare_git.rev_list(self.revisionExpr, "-1")[0]
+ except GitError:
+ raise ManifestInvalidRevisionError(
+ "revision %s in %s not found" % (self.revisionExpr, self.name)
+ )
+
+ def GetRevisionId(self, all_refs=None):
+ if self.revisionId:
+ return self.revisionId
+
+ rem = self.GetRemote()
+ rev = rem.ToLocal(self.revisionExpr)
+
+ if all_refs is not None and rev in all_refs:
+ return all_refs[rev]
+
+ try:
+ return self.bare_git.rev_parse("--verify", "%s^0" % rev)
+ except GitError:
+ raise ManifestInvalidRevisionError(
+ "revision %s in %s not found" % (self.revisionExpr, self.name)
+ )
+
+ def SetRevisionId(self, revisionId):
+ if self.revisionExpr:
+ self.upstream = self.revisionExpr
+
+ self.revisionId = revisionId
+
+ def Sync_LocalHalf(self, syncbuf, force_sync=False, submodules=False):
+ """Perform only the local IO portion of the sync process.
+
+ Network access is not required.
+ """
+ if not os.path.exists(self.gitdir):
+ syncbuf.fail(
+ self,
+ "Cannot checkout %s due to missing network sync; Run "
+ "`repo sync -n %s` first." % (self.name, self.name),
+ )
+ return
+
+ self._InitWorkTree(force_sync=force_sync, submodules=submodules)
+ all_refs = self.bare_ref.all
+ self.CleanPublishedCache(all_refs)
+ revid = self.GetRevisionId(all_refs)
+
+ # Special case the root of the repo client checkout. Make sure it
+ # doesn't contain files being checked out to dirs we don't allow.
+ if self.relpath == ".":
+ PROTECTED_PATHS = {".repo"}
+ paths = set(
+ self.work_git.ls_tree("-z", "--name-only", "--", revid).split(
+ "\0"
+ )
+ )
+ bad_paths = paths & PROTECTED_PATHS
+ if bad_paths:
+ syncbuf.fail(
+ self,
+ "Refusing to checkout project that writes to protected "
+ "paths: %s" % (", ".join(bad_paths),),
+ )
+ return
+
+ def _doff():
+ self._FastForward(revid)
+ self._CopyAndLinkFiles()
+
+ def _dosubmodules():
+ self._SyncSubmodules(quiet=True)
+
+ head = self.work_git.GetHead()
+ if head.startswith(R_HEADS):
+ branch = head[len(R_HEADS) :]
+ try:
+ head = all_refs[head]
+ except KeyError:
+ head = None
+ else:
+ branch = None
+
+ if branch is None or syncbuf.detach_head:
+ # Currently on a detached HEAD. The user is assumed to
+ # not have any local modifications worth worrying about.
+ if self.IsRebaseInProgress():
+ syncbuf.fail(self, _PriorSyncFailedError())
+ return
+
+ if head == revid:
+ # No changes; don't do anything further.
+ # Except if the head needs to be detached.
+ if not syncbuf.detach_head:
+ # The copy/linkfile config may have changed.
+ self._CopyAndLinkFiles()
+ return
+ else:
+ lost = self._revlist(not_rev(revid), HEAD)
+ if lost:
+ syncbuf.info(self, "discarding %d commits", len(lost))
+
+ try:
+ self._Checkout(revid, quiet=True)
+ if submodules:
+ self._SyncSubmodules(quiet=True)
+ except GitError as e:
+ syncbuf.fail(self, e)
+ return
+ self._CopyAndLinkFiles()
+ return
+
+ if head == revid:
+ # No changes; don't do anything further.
+ #
+ # The copy/linkfile config may have changed.
+ self._CopyAndLinkFiles()
+ return
+
+ branch = self.GetBranch(branch)
+
+ if not branch.LocalMerge:
+ # The current branch has no tracking configuration.
+ # Jump off it to a detached HEAD.
+ syncbuf.info(
+ self, "leaving %s; does not track upstream", branch.name
+ )
+ try:
+ self._Checkout(revid, quiet=True)
+ if submodules:
+ self._SyncSubmodules(quiet=True)
+ except GitError as e:
+ syncbuf.fail(self, e)
+ return
+ self._CopyAndLinkFiles()
+ return
+
+ upstream_gain = self._revlist(not_rev(HEAD), revid)
+
+ # See if we can perform a fast forward merge. This can happen if our
+ # branch isn't in the exact same state as we last published.
+ try:
+ self.work_git.merge_base("--is-ancestor", HEAD, revid)
+ # Skip the published logic.
+ pub = False
+ except GitError:
+ pub = self.WasPublished(branch.name, all_refs)
+
+ if pub:
+ not_merged = self._revlist(not_rev(revid), pub)
+ if not_merged:
+ if upstream_gain:
+ # The user has published this branch and some of those
+ # commits are not yet merged upstream. We do not want
+ # to rewrite the published commits so we punt.
+ syncbuf.fail(
+ self,
+ "branch %s is published (but not merged) and is now "
+ "%d commits behind" % (branch.name, len(upstream_gain)),
+ )
+ return
+ elif pub == head:
+ # All published commits are merged, and thus we are a
+ # strict subset. We can fast-forward safely.
+ syncbuf.later1(self, _doff)
+ if submodules:
+ syncbuf.later1(self, _dosubmodules)
+ return
+
+ # Examine the local commits not in the remote. Find the
+ # last one attributed to this user, if any.
+ local_changes = self._revlist(not_rev(revid), HEAD, format="%H %ce")
+ last_mine = None
+ cnt_mine = 0
+ for commit in local_changes:
+ commit_id, committer_email = commit.split(" ", 1)
+ if committer_email == self.UserEmail:
+ last_mine = commit_id
+ cnt_mine += 1
+
+ if not upstream_gain and cnt_mine == len(local_changes):
+ # The copy/linkfile config may have changed.
+ self._CopyAndLinkFiles()
+ return
+
+ if self.IsDirty(consider_untracked=False):
+ syncbuf.fail(self, _DirtyError())
+ return
+
+ # If the upstream switched on us, warn the user.
+ if branch.merge != self.revisionExpr:
+ if branch.merge and self.revisionExpr:
+ syncbuf.info(
+ self,
+ "manifest switched %s...%s",
+ branch.merge,
+ self.revisionExpr,
+ )
+ elif branch.merge:
+ syncbuf.info(self, "manifest no longer tracks %s", branch.merge)
+
+ if cnt_mine < len(local_changes):
+ # Upstream rebased. Not everything in HEAD was created by this user.
+ syncbuf.info(
+ self,
+ "discarding %d commits removed from upstream",
+ len(local_changes) - cnt_mine,
+ )
+
+ branch.remote = self.GetRemote()
+ if not ID_RE.match(self.revisionExpr):
+ # In case of manifest sync the revisionExpr might be a SHA1.
+ branch.merge = self.revisionExpr
+ if not branch.merge.startswith("refs/"):
+ branch.merge = R_HEADS + branch.merge
+ branch.Save()
+
+ if cnt_mine > 0 and self.rebase:
+
+ def _docopyandlink():
+ self._CopyAndLinkFiles()
+
+ def _dorebase():
+ self._Rebase(upstream="%s^1" % last_mine, onto=revid)
+
+ syncbuf.later2(self, _dorebase)
+ if submodules:
+ syncbuf.later2(self, _dosubmodules)
+ syncbuf.later2(self, _docopyandlink)
+ elif local_changes:
+ try:
+ self._ResetHard(revid)
+ if submodules:
+ self._SyncSubmodules(quiet=True)
+ self._CopyAndLinkFiles()
+ except GitError as e:
+ syncbuf.fail(self, e)
+ return
+ else:
+ syncbuf.later1(self, _doff)
+ if submodules:
+ syncbuf.later1(self, _dosubmodules)
+
+ def AddCopyFile(self, src, dest, topdir):
+ """Mark |src| for copying to |dest| (relative to |topdir|).
+
+ No filesystem changes occur here. Actual copying happens later on.
+
+ Paths should have basic validation run on them before being queued.
+ Further checking will be handled when the actual copy happens.
+ """
+ self.copyfiles.append(_CopyFile(self.worktree, src, topdir, dest))
+
+ def AddLinkFile(self, src, dest, topdir):
+ """Mark |dest| to create a symlink (relative to |topdir|) pointing to
+ |src|.
+
+ No filesystem changes occur here. Actual linking happens later on.
+
+ Paths should have basic validation run on them before being queued.
+ Further checking will be handled when the actual link happens.
+ """
+ self.linkfiles.append(_LinkFile(self.worktree, src, topdir, dest))
+
+ def AddAnnotation(self, name, value, keep):
+ self.annotations.append(Annotation(name, value, keep))
+
+ def DownloadPatchSet(self, change_id, patch_id):
+ """Download a single patch set of a single change to FETCH_HEAD."""
+ remote = self.GetRemote()
+
+ cmd = ["fetch", remote.name]
+ cmd.append(
+ "refs/changes/%2.2d/%d/%d" % (change_id % 100, change_id, patch_id)
+ )
+ if GitCommand(self, cmd, bare=True).Wait() != 0:
+ return None
+ return DownloadedChange(
+ self,
+ self.GetRevisionId(),
+ change_id,
+ patch_id,
+ self.bare_git.rev_parse("FETCH_HEAD"),
+ )
+
+ def DeleteWorktree(self, quiet=False, force=False):
+ """Delete the source checkout and any other housekeeping tasks.
+
+ This currently leaves behind the internal .repo/ cache state. This
+ helps when switching branches or manifest changes get reverted as we
+ don't have to redownload all the git objects. But we should do some GC
+ at some point.
+
+ Args:
+ quiet: Whether to hide normal messages.
+ force: Always delete tree even if dirty.
Returns:
- True if the checkout succeeded; False if it didn't; None if the branch
- didn't exist.
- """
- rev = R_HEADS + name
- head = self.work_git.GetHead()
- if head == rev:
- # Already on the branch
- #
- return True
+ True if the worktree was completely cleaned out.
+ """
+ if self.IsDirty():
+ if force:
+ print(
+ "warning: %s: Removing dirty project: uncommitted changes "
+ "lost." % (self.RelPath(local=False),),
+ file=sys.stderr,
+ )
+ else:
+ print(
+ "error: %s: Cannot remove project: uncommitted changes are "
+ "present.\n" % (self.RelPath(local=False),),
+ file=sys.stderr,
+ )
+ return False
- all_refs = self.bare_ref.all
- try:
- revid = all_refs[rev]
- except KeyError:
- # Branch does not exist in this project
- #
- return None
+ if not quiet:
+ print(
+ "%s: Deleting obsolete checkout." % (self.RelPath(local=False),)
+ )
- if head.startswith(R_HEADS):
- try:
- head = all_refs[head]
- except KeyError:
- head = None
+ # Unlock and delink from the main worktree. We don't use git's worktree
+ # remove because it will recursively delete projects -- we handle that
+ # ourselves below. https://crbug.com/git/48
+ if self.use_git_worktrees:
+ needle = platform_utils.realpath(self.gitdir)
+ # Find the git worktree commondir under .repo/worktrees/.
+ output = self.bare_git.worktree("list", "--porcelain").splitlines()[
+ 0
+ ]
+ assert output.startswith("worktree "), output
+ commondir = output[9:]
+ # Walk each of the git worktrees to see where they point.
+ configs = os.path.join(commondir, "worktrees")
+ for name in os.listdir(configs):
+ gitdir = os.path.join(configs, name, "gitdir")
+ with open(gitdir) as fp:
+ relpath = fp.read().strip()
+ # Resolve the checkout path and see if it matches this project.
+ fullpath = platform_utils.realpath(
+ os.path.join(configs, name, relpath)
+ )
+ if fullpath == needle:
+ platform_utils.rmtree(os.path.join(configs, name))
- if head == revid:
- # Same revision; just update HEAD to point to the new
- # target branch, but otherwise take no other action.
- #
- _lwrite(self.work_git.GetDotgitPath(subpath=HEAD),
- 'ref: %s%s\n' % (R_HEADS, name))
- return True
+ # Delete the .git directory first, so we're less likely to have a
+ # partially working git repository around. There shouldn't be any git
+ # projects here, so rmtree works.
- return GitCommand(self,
- ['checkout', name, '--'],
- capture_stdout=True,
- capture_stderr=True).Wait() == 0
-
- def AbandonBranch(self, name):
- """Destroy a local topic branch.
-
- Args:
- name: The name of the branch to abandon.
-
- Returns:
- True if the abandon succeeded; False if it didn't; None if the branch
- didn't exist.
- """
- rev = R_HEADS + name
- all_refs = self.bare_ref.all
- if rev not in all_refs:
- # Doesn't exist
- return None
-
- head = self.work_git.GetHead()
- if head == rev:
- # We can't destroy the branch while we are sitting
- # on it. Switch to a detached HEAD.
- #
- head = all_refs[head]
-
- revid = self.GetRevisionId(all_refs)
- if head == revid:
- _lwrite(self.work_git.GetDotgitPath(subpath=HEAD), '%s\n' % revid)
- else:
- self._Checkout(revid, quiet=True)
-
- return GitCommand(self,
- ['branch', '-D', name],
- capture_stdout=True,
- capture_stderr=True).Wait() == 0
-
- def PruneHeads(self):
- """Prune any topic branches already merged into upstream.
- """
- cb = self.CurrentBranch
- kill = []
- left = self._allrefs
- for name in left.keys():
- if name.startswith(R_HEADS):
- name = name[len(R_HEADS):]
- if cb is None or name != cb:
- kill.append(name)
-
- # Minor optimization: If there's nothing to prune, then don't try to read
- # any project state.
- if not kill and not cb:
- return []
-
- rev = self.GetRevisionId(left)
- if cb is not None \
- and not self._revlist(HEAD + '...' + rev) \
- and not self.IsDirty(consider_untracked=False):
- self.work_git.DetachHead(HEAD)
- kill.append(cb)
-
- if kill:
- old = self.bare_git.GetHead()
-
- try:
- self.bare_git.DetachHead(rev)
-
- b = ['branch', '-d']
- b.extend(kill)
- b = GitCommand(self, b, bare=True,
- capture_stdout=True,
- capture_stderr=True)
- b.Wait()
- finally:
- if ID_RE.match(old):
- self.bare_git.DetachHead(old)
- else:
- self.bare_git.SetHead(old)
- left = self._allrefs
-
- for branch in kill:
- if (R_HEADS + branch) not in left:
- self.CleanPublishedCache()
- break
-
- if cb and cb not in kill:
- kill.append(cb)
- kill.sort()
-
- kept = []
- for branch in kill:
- if R_HEADS + branch in left:
- branch = self.GetBranch(branch)
- base = branch.LocalMerge
- if not base:
- base = rev
- kept.append(ReviewableBranch(self, branch, base))
- return kept
-
-# Submodule Management ##
- def GetRegisteredSubprojects(self):
- result = []
-
- def rec(subprojects):
- if not subprojects:
- return
- result.extend(subprojects)
- for p in subprojects:
- rec(p.subprojects)
- rec(self.subprojects)
- return result
-
- def _GetSubmodules(self):
- # Unfortunately we cannot call `git submodule status --recursive` here
- # because the working tree might not exist yet, and it cannot be used
- # without a working tree in its current implementation.
-
- def get_submodules(gitdir, rev):
- # Parse .gitmodules for submodule sub_paths and sub_urls
- sub_paths, sub_urls = parse_gitmodules(gitdir, rev)
- if not sub_paths:
- return []
- # Run `git ls-tree` to read SHAs of submodule object, which happen to be
- # revision of submodule repository
- sub_revs = git_ls_tree(gitdir, rev, sub_paths)
- submodules = []
- for sub_path, sub_url in zip(sub_paths, sub_urls):
+ # Try to remove plain files first in case of git worktrees. If this
+ # fails for any reason, we'll fall back to rmtree, and that'll display
+ # errors if it can't remove things either.
try:
- sub_rev = sub_revs[sub_path]
- except KeyError:
- # Ignore non-exist submodules
- continue
- submodules.append((sub_rev, sub_path, sub_url))
- return submodules
+ platform_utils.remove(self.gitdir)
+ except OSError:
+ pass
+ try:
+ platform_utils.rmtree(self.gitdir)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ print("error: %s: %s" % (self.gitdir, e), file=sys.stderr)
+ print(
+ "error: %s: Failed to delete obsolete checkout; remove "
+ "manually, then run `repo sync -l`."
+ % (self.RelPath(local=False),),
+ file=sys.stderr,
+ )
+ return False
- re_path = re.compile(r'^submodule\.(.+)\.path=(.*)$')
- re_url = re.compile(r'^submodule\.(.+)\.url=(.*)$')
+ # Delete everything under the worktree, except for directories that
+ # contain another git project.
+ dirs_to_remove = []
+ failed = False
+ for root, dirs, files in platform_utils.walk(self.worktree):
+ for f in files:
+ path = os.path.join(root, f)
+ try:
+ platform_utils.remove(path)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ print(
+ "error: %s: Failed to remove: %s" % (path, e),
+ file=sys.stderr,
+ )
+ failed = True
+ dirs[:] = [
+ d
+ for d in dirs
+ if not os.path.lexists(os.path.join(root, d, ".git"))
+ ]
+ dirs_to_remove += [
+ os.path.join(root, d)
+ for d in dirs
+ if os.path.join(root, d) not in dirs_to_remove
+ ]
+ for d in reversed(dirs_to_remove):
+ if platform_utils.islink(d):
+ try:
+ platform_utils.remove(d)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ print(
+ "error: %s: Failed to remove: %s" % (d, e),
+ file=sys.stderr,
+ )
+ failed = True
+ elif not platform_utils.listdir(d):
+ try:
+ platform_utils.rmdir(d)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ print(
+ "error: %s: Failed to remove: %s" % (d, e),
+ file=sys.stderr,
+ )
+ failed = True
+ if failed:
+ print(
+ "error: %s: Failed to delete obsolete checkout."
+ % (self.RelPath(local=False),),
+ file=sys.stderr,
+ )
+ print(
+ " Remove manually, then run `repo sync -l`.",
+ file=sys.stderr,
+ )
+ return False
- def parse_gitmodules(gitdir, rev):
- cmd = ['cat-file', 'blob', '%s:.gitmodules' % rev]
- try:
- p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
- bare=True, gitdir=gitdir)
- except GitError:
- return [], []
- if p.Wait() != 0:
- return [], []
+ # Try deleting parent dirs if they are empty.
+ path = self.worktree
+ while path != self.manifest.topdir:
+ try:
+ platform_utils.rmdir(path)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ break
+ path = os.path.dirname(path)
- gitmodules_lines = []
- fd, temp_gitmodules_path = tempfile.mkstemp()
- try:
- os.write(fd, p.stdout.encode('utf-8'))
- os.close(fd)
- cmd = ['config', '--file', temp_gitmodules_path, '--list']
- p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
- bare=True, gitdir=gitdir)
- if p.Wait() != 0:
- return [], []
- gitmodules_lines = p.stdout.split('\n')
- except GitError:
- return [], []
- finally:
- platform_utils.remove(temp_gitmodules_path)
+ return True
- names = set()
- paths = {}
- urls = {}
- for line in gitmodules_lines:
- if not line:
- continue
- m = re_path.match(line)
- if m:
- names.add(m.group(1))
- paths[m.group(1)] = m.group(2)
- continue
- m = re_url.match(line)
- if m:
- names.add(m.group(1))
- urls[m.group(1)] = m.group(2)
- continue
- names = sorted(names)
- return ([paths.get(name, '') for name in names],
- [urls.get(name, '') for name in names])
-
- def git_ls_tree(gitdir, rev, paths):
- cmd = ['ls-tree', rev, '--']
- cmd.extend(paths)
- try:
- p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
- bare=True, gitdir=gitdir)
- except GitError:
- return []
- if p.Wait() != 0:
- return []
- objects = {}
- for line in p.stdout.split('\n'):
- if not line.strip():
- continue
- object_rev, object_path = line.split()[2:4]
- objects[object_path] = object_rev
- return objects
-
- try:
- rev = self.GetRevisionId()
- except GitError:
- return []
- return get_submodules(self.gitdir, rev)
-
- def GetDerivedSubprojects(self):
- result = []
- if not self.Exists:
- # If git repo does not exist yet, querying its submodules will
- # mess up its states; so return here.
- return result
- for rev, path, url in self._GetSubmodules():
- name = self.manifest.GetSubprojectName(self, path)
- relpath, worktree, gitdir, objdir = \
- self.manifest.GetSubprojectPaths(self, name, path)
- project = self.manifest.paths.get(relpath)
- if project:
- result.extend(project.GetDerivedSubprojects())
- continue
-
- if url.startswith('..'):
- url = urllib.parse.urljoin("%s/" % self.remote.url, url)
- remote = RemoteSpec(self.remote.name,
- url=url,
- pushUrl=self.remote.pushUrl,
- review=self.remote.review,
- revision=self.remote.revision)
- subproject = Project(manifest=self.manifest,
- name=name,
- remote=remote,
- gitdir=gitdir,
- objdir=objdir,
- worktree=worktree,
- relpath=relpath,
- revisionExpr=rev,
- revisionId=rev,
- rebase=self.rebase,
- groups=self.groups,
- sync_c=self.sync_c,
- sync_s=self.sync_s,
- sync_tags=self.sync_tags,
- parent=self,
- is_derived=True)
- result.append(subproject)
- result.extend(subproject.GetDerivedSubprojects())
- return result
-
-# Direct Git Commands ##
- def EnableRepositoryExtension(self, key, value='true', version=1):
- """Enable git repository extension |key| with |value|.
-
- Args:
- key: The extension to enabled. Omit the "extensions." prefix.
- value: The value to use for the extension.
- version: The minimum git repository version needed.
- """
- # Make sure the git repo version is new enough already.
- found_version = self.config.GetInt('core.repositoryFormatVersion')
- if found_version is None:
- found_version = 0
- if found_version < version:
- self.config.SetString('core.repositoryFormatVersion', str(version))
-
- # Enable the extension!
- self.config.SetString('extensions.%s' % (key,), value)
-
- def ResolveRemoteHead(self, name=None):
- """Find out what the default branch (HEAD) points to.
-
- Normally this points to refs/heads/master, but projects are moving to main.
- Support whatever the server uses rather than hardcoding "master" ourselves.
- """
- if name is None:
- name = self.remote.name
-
- # The output will look like (NB: tabs are separators):
- # ref: refs/heads/master HEAD
- # 5f6803b100bb3cd0f534e96e88c91373e8ed1c44 HEAD
- output = self.bare_git.ls_remote('-q', '--symref', '--exit-code', name, 'HEAD')
-
- for line in output.splitlines():
- lhs, rhs = line.split('\t', 1)
- if rhs == 'HEAD' and lhs.startswith('ref:'):
- return lhs[4:].strip()
-
- return None
-
- def _CheckForImmutableRevision(self):
- try:
- # if revision (sha or tag) is not present then following function
- # throws an error.
- self.bare_git.rev_list('-1', '--missing=allow-any',
- '%s^0' % self.revisionExpr, '--')
- if self.upstream:
- rev = self.GetRemote().ToLocal(self.upstream)
- self.bare_git.rev_list('-1', '--missing=allow-any',
- '%s^0' % rev, '--')
- self.bare_git.merge_base('--is-ancestor', self.revisionExpr, rev)
- return True
- except GitError:
- # There is no such persistent revision. We have to fetch it.
- return False
-
- def _FetchArchive(self, tarpath, cwd=None):
- cmd = ['archive', '-v', '-o', tarpath]
- cmd.append('--remote=%s' % self.remote.url)
- cmd.append('--prefix=%s/' % self.RelPath(local=False))
- cmd.append(self.revisionExpr)
-
- command = GitCommand(self, cmd, cwd=cwd,
- capture_stdout=True,
- capture_stderr=True)
-
- if command.Wait() != 0:
- raise GitError('git archive %s: %s' % (self.name, command.stderr))
-
- def _RemoteFetch(self, name=None,
- current_branch_only=False,
- initial=False,
- quiet=False,
- verbose=False,
- output_redir=None,
- alt_dir=None,
- tags=True,
- prune=False,
- depth=None,
- submodules=False,
- ssh_proxy=None,
- force_sync=False,
- clone_filter=None,
- retry_fetches=2,
- retry_sleep_initial_sec=4.0,
- retry_exp_factor=2.0):
- is_sha1 = False
- tag_name = None
- # The depth should not be used when fetching to a mirror because
- # it will result in a shallow repository that cannot be cloned or
- # fetched from.
- # The repo project should also never be synced with partial depth.
- if self.manifest.IsMirror or self.relpath == '.repo/repo':
- depth = None
-
- if depth:
- current_branch_only = True
-
- if ID_RE.match(self.revisionExpr) is not None:
- is_sha1 = True
-
- if current_branch_only:
- if self.revisionExpr.startswith(R_TAGS):
- # This is a tag and its commit id should never change.
- tag_name = self.revisionExpr[len(R_TAGS):]
- elif self.upstream and self.upstream.startswith(R_TAGS):
- # This is a tag and its commit id should never change.
- tag_name = self.upstream[len(R_TAGS):]
-
- if is_sha1 or tag_name is not None:
- if self._CheckForImmutableRevision():
- if verbose:
- print('Skipped fetching project %s (already have persistent ref)'
- % self.name)
- return True
- if is_sha1 and not depth:
- # When syncing a specific commit and --depth is not set:
- # * if upstream is explicitly specified and is not a sha1, fetch only
- # upstream as users expect only upstream to be fetch.
- # Note: The commit might not be in upstream in which case the sync
- # will fail.
- # * otherwise, fetch all branches to make sure we end up with the
- # specific commit.
- if self.upstream:
- current_branch_only = not ID_RE.match(self.upstream)
- else:
- current_branch_only = False
-
- if not name:
- name = self.remote.name
-
- remote = self.GetRemote(name)
- if not remote.PreConnectFetch(ssh_proxy):
- ssh_proxy = None
-
- if initial:
- if alt_dir and 'objects' == os.path.basename(alt_dir):
- ref_dir = os.path.dirname(alt_dir)
- packed_refs = os.path.join(self.gitdir, 'packed-refs')
+ def StartBranch(self, name, branch_merge="", revision=None):
+ """Create a new branch off the manifest's revision."""
+ if not branch_merge:
+ branch_merge = self.revisionExpr
+ head = self.work_git.GetHead()
+ if head == (R_HEADS + name):
+ return True
all_refs = self.bare_ref.all
- ids = set(all_refs.values())
- tmp = set()
+ if R_HEADS + name in all_refs:
+ return GitCommand(self, ["checkout", "-q", name, "--"]).Wait() == 0
- for r, ref_id in GitRefs(ref_dir).all.items():
- if r not in all_refs:
- if r.startswith(R_TAGS) or remote.WritesTo(r):
- all_refs[r] = ref_id
- ids.add(ref_id)
- continue
+ branch = self.GetBranch(name)
+ branch.remote = self.GetRemote()
+ branch.merge = branch_merge
+ if not branch.merge.startswith("refs/") and not ID_RE.match(
+ branch_merge
+ ):
+ branch.merge = R_HEADS + branch_merge
- if ref_id in ids:
- continue
-
- r = 'refs/_alt/%s' % ref_id
- all_refs[r] = ref_id
- ids.add(ref_id)
- tmp.add(r)
-
- tmp_packed_lines = []
- old_packed_lines = []
-
- for r in sorted(all_refs):
- line = '%s %s\n' % (all_refs[r], r)
- tmp_packed_lines.append(line)
- if r not in tmp:
- old_packed_lines.append(line)
-
- tmp_packed = ''.join(tmp_packed_lines)
- old_packed = ''.join(old_packed_lines)
- _lwrite(packed_refs, tmp_packed)
- else:
- alt_dir = None
-
- cmd = ['fetch']
-
- if clone_filter:
- git_require((2, 19, 0), fail=True, msg='partial clones')
- cmd.append('--filter=%s' % clone_filter)
- self.EnableRepositoryExtension('partialclone', self.remote.name)
-
- if depth:
- cmd.append('--depth=%s' % depth)
- else:
- # If this repo has shallow objects, then we don't know which refs have
- # shallow objects or not. Tell git to unshallow all fetched refs. Don't
- # do this with projects that don't have shallow objects, since it is less
- # efficient.
- if os.path.exists(os.path.join(self.gitdir, 'shallow')):
- cmd.append('--depth=2147483647')
-
- if not verbose:
- cmd.append('--quiet')
- if not quiet and sys.stdout.isatty():
- cmd.append('--progress')
- if not self.worktree:
- cmd.append('--update-head-ok')
- cmd.append(name)
-
- if force_sync:
- cmd.append('--force')
-
- if prune:
- cmd.append('--prune')
-
- # Always pass something for --recurse-submodules, git with GIT_DIR behaves
- # incorrectly when not given `--recurse-submodules=no`. (b/218891912)
- cmd.append(f'--recurse-submodules={"on-demand" if submodules else "no"}')
-
- spec = []
- if not current_branch_only:
- # Fetch whole repo
- spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
- elif tag_name is not None:
- spec.append('tag')
- spec.append(tag_name)
-
- if self.manifest.IsMirror and not current_branch_only:
- branch = None
- else:
- branch = self.revisionExpr
- if (not self.manifest.IsMirror and is_sha1 and depth
- and git_require((1, 8, 3))):
- # Shallow checkout of a specific commit, fetch from that commit and not
- # the heads only as the commit might be deeper in the history.
- spec.append(branch)
- if self.upstream:
- spec.append(self.upstream)
- else:
- if is_sha1:
- branch = self.upstream
- if branch is not None and branch.strip():
- if not branch.startswith('refs/'):
- branch = R_HEADS + branch
- spec.append(str((u'+%s:' % branch) + remote.ToLocal(branch)))
-
- # If mirroring repo and we cannot deduce the tag or branch to fetch, fetch
- # whole repo.
- if self.manifest.IsMirror and not spec:
- spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
-
- # If using depth then we should not get all the tags since they may
- # be outside of the depth.
- if not tags or depth:
- cmd.append('--no-tags')
- else:
- cmd.append('--tags')
- spec.append(str((u'+refs/tags/*:') + remote.ToLocal('refs/tags/*')))
-
- cmd.extend(spec)
-
- # At least one retry minimum due to git remote prune.
- retry_fetches = max(retry_fetches, 2)
- retry_cur_sleep = retry_sleep_initial_sec
- ok = prune_tried = False
- for try_n in range(retry_fetches):
- gitcmd = GitCommand(
- self, cmd, bare=True, objdir=os.path.join(self.objdir, 'objects'),
- ssh_proxy=ssh_proxy,
- merge_output=True, capture_stdout=quiet or bool(output_redir))
- if gitcmd.stdout and not quiet and output_redir:
- output_redir.write(gitcmd.stdout)
- ret = gitcmd.Wait()
- if ret == 0:
- ok = True
- break
-
- # Retry later due to HTTP 429 Too Many Requests.
- elif (gitcmd.stdout and
- 'error:' in gitcmd.stdout and
- 'HTTP 429' in gitcmd.stdout):
- # Fallthru to sleep+retry logic at the bottom.
- pass
-
- # Try to prune remote branches once in case there are conflicts.
- # For example, if the remote had refs/heads/upstream, but deleted that and
- # now has refs/heads/upstream/foo.
- elif (gitcmd.stdout and
- 'error:' in gitcmd.stdout and
- 'git remote prune' in gitcmd.stdout and
- not prune_tried):
- prune_tried = True
- prunecmd = GitCommand(self, ['remote', 'prune', name], bare=True,
- ssh_proxy=ssh_proxy)
- ret = prunecmd.Wait()
- if ret:
- break
- print('retrying fetch after pruning remote branches', file=output_redir)
- # Continue right away so we don't sleep as we shouldn't need to.
- continue
- elif current_branch_only and is_sha1 and ret == 128:
- # Exit code 128 means "couldn't find the ref you asked for"; if we're
- # in sha1 mode, we just tried sync'ing from the upstream field; it
- # doesn't exist, thus abort the optimization attempt and do a full sync.
- break
- elif ret < 0:
- # Git died with a signal, exit immediately
- break
-
- # Figure out how long to sleep before the next attempt, if there is one.
- if not verbose and gitcmd.stdout:
- print('\n%s:\n%s' % (self.name, gitcmd.stdout), end='', file=output_redir)
- if try_n < retry_fetches - 1:
- print('%s: sleeping %s seconds before retrying' % (self.name, retry_cur_sleep),
- file=output_redir)
- time.sleep(retry_cur_sleep)
- retry_cur_sleep = min(retry_exp_factor * retry_cur_sleep,
- MAXIMUM_RETRY_SLEEP_SEC)
- retry_cur_sleep *= (1 - random.uniform(-RETRY_JITTER_PERCENT,
- RETRY_JITTER_PERCENT))
-
- if initial:
- if alt_dir:
- if old_packed != '':
- _lwrite(packed_refs, old_packed)
+ if revision is None:
+ revid = self.GetRevisionId(all_refs)
else:
- platform_utils.remove(packed_refs)
- self.bare_git.pack_refs('--all', '--prune')
+ revid = self.work_git.rev_parse(revision)
- if is_sha1 and current_branch_only:
- # We just synced the upstream given branch; verify we
- # got what we wanted, else trigger a second run of all
- # refs.
- if not self._CheckForImmutableRevision():
- # Sync the current branch only with depth set to None.
- # We always pass depth=None down to avoid infinite recursion.
- return self._RemoteFetch(
- name=name, quiet=quiet, verbose=verbose, output_redir=output_redir,
- current_branch_only=current_branch_only and depth,
- initial=False, alt_dir=alt_dir,
- depth=None, ssh_proxy=ssh_proxy, clone_filter=clone_filter)
-
- return ok
-
- def _ApplyCloneBundle(self, initial=False, quiet=False, verbose=False):
- if initial and (self.manifest.manifestProject.depth or self.clone_depth):
- return False
-
- remote = self.GetRemote()
- bundle_url = remote.url + '/clone.bundle'
- bundle_url = GitConfig.ForUser().UrlInsteadOf(bundle_url)
- if GetSchemeFromUrl(bundle_url) not in ('http', 'https',
- 'persistent-http',
- 'persistent-https'):
- return False
-
- bundle_dst = os.path.join(self.gitdir, 'clone.bundle')
- bundle_tmp = os.path.join(self.gitdir, 'clone.bundle.tmp')
-
- exist_dst = os.path.exists(bundle_dst)
- exist_tmp = os.path.exists(bundle_tmp)
-
- if not initial and not exist_dst and not exist_tmp:
- return False
-
- if not exist_dst:
- exist_dst = self._FetchBundle(bundle_url, bundle_tmp, bundle_dst, quiet,
- verbose)
- if not exist_dst:
- return False
-
- cmd = ['fetch']
- if not verbose:
- cmd.append('--quiet')
- if not quiet and sys.stdout.isatty():
- cmd.append('--progress')
- if not self.worktree:
- cmd.append('--update-head-ok')
- cmd.append(bundle_dst)
- for f in remote.fetch:
- cmd.append(str(f))
- cmd.append('+refs/tags/*:refs/tags/*')
-
- ok = GitCommand(
- self, cmd, bare=True, objdir=os.path.join(self.objdir, 'objects')).Wait() == 0
- platform_utils.remove(bundle_dst, missing_ok=True)
- platform_utils.remove(bundle_tmp, missing_ok=True)
- return ok
-
- def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet, verbose):
- platform_utils.remove(dstPath, missing_ok=True)
-
- cmd = ['curl', '--fail', '--output', tmpPath, '--netrc', '--location']
- if quiet:
- cmd += ['--silent', '--show-error']
- if os.path.exists(tmpPath):
- size = os.stat(tmpPath).st_size
- if size >= 1024:
- cmd += ['--continue-at', '%d' % (size,)]
- else:
- platform_utils.remove(tmpPath)
- with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, proxy):
- if cookiefile:
- cmd += ['--cookie', cookiefile]
- if proxy:
- cmd += ['--proxy', proxy]
- elif 'http_proxy' in os.environ and 'darwin' == sys.platform:
- cmd += ['--proxy', os.environ['http_proxy']]
- if srcUrl.startswith('persistent-https'):
- srcUrl = 'http' + srcUrl[len('persistent-https'):]
- elif srcUrl.startswith('persistent-http'):
- srcUrl = 'http' + srcUrl[len('persistent-http'):]
- cmd += [srcUrl]
-
- proc = None
- with Trace('Fetching bundle: %s', ' '.join(cmd)):
- if verbose:
- print('%s: Downloading bundle: %s' % (self.name, srcUrl))
- stdout = None if verbose else subprocess.PIPE
- stderr = None if verbose else subprocess.STDOUT
- try:
- proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr)
- except OSError:
- return False
-
- (output, _) = proc.communicate()
- curlret = proc.returncode
-
- if curlret == 22:
- # From curl man page:
- # 22: HTTP page not retrieved. The requested url was not found or
- # returned another error with the HTTP error code being 400 or above.
- # This return code only appears if -f, --fail is used.
- if verbose:
- print('%s: Unable to retrieve clone.bundle; ignoring.' % self.name)
- if output:
- print('Curl output:\n%s' % output)
- return False
- elif curlret and not verbose and output:
- print('%s' % output, file=sys.stderr)
-
- if os.path.exists(tmpPath):
- if curlret == 0 and self._IsValidBundle(tmpPath, quiet):
- platform_utils.rename(tmpPath, dstPath)
- return True
- else:
- platform_utils.remove(tmpPath)
- return False
- else:
- return False
-
- def _IsValidBundle(self, path, quiet):
- try:
- with open(path, 'rb') as f:
- if f.read(16) == b'# v2 git bundle\n':
- return True
- else:
- if not quiet:
- print("Invalid clone.bundle file; ignoring.", file=sys.stderr)
- return False
- except OSError:
- return False
-
- def _Checkout(self, rev, quiet=False):
- cmd = ['checkout']
- if quiet:
- cmd.append('-q')
- cmd.append(rev)
- cmd.append('--')
- if GitCommand(self, cmd).Wait() != 0:
- if self._allrefs:
- raise GitError('%s checkout %s ' % (self.name, rev))
-
- def _CherryPick(self, rev, ffonly=False, record_origin=False):
- cmd = ['cherry-pick']
- if ffonly:
- cmd.append('--ff')
- if record_origin:
- cmd.append('-x')
- cmd.append(rev)
- cmd.append('--')
- if GitCommand(self, cmd).Wait() != 0:
- if self._allrefs:
- raise GitError('%s cherry-pick %s ' % (self.name, rev))
-
- def _LsRemote(self, refs):
- cmd = ['ls-remote', self.remote.name, refs]
- p = GitCommand(self, cmd, capture_stdout=True)
- if p.Wait() == 0:
- return p.stdout
- return None
-
- def _Revert(self, rev):
- cmd = ['revert']
- cmd.append('--no-edit')
- cmd.append(rev)
- cmd.append('--')
- if GitCommand(self, cmd).Wait() != 0:
- if self._allrefs:
- raise GitError('%s revert %s ' % (self.name, rev))
-
- def _ResetHard(self, rev, quiet=True):
- cmd = ['reset', '--hard']
- if quiet:
- cmd.append('-q')
- cmd.append(rev)
- if GitCommand(self, cmd).Wait() != 0:
- raise GitError('%s reset --hard %s ' % (self.name, rev))
-
- def _SyncSubmodules(self, quiet=True):
- cmd = ['submodule', 'update', '--init', '--recursive']
- if quiet:
- cmd.append('-q')
- if GitCommand(self, cmd).Wait() != 0:
- raise GitError('%s submodule update --init --recursive ' % self.name)
-
- def _Rebase(self, upstream, onto=None):
- cmd = ['rebase']
- if onto is not None:
- cmd.extend(['--onto', onto])
- cmd.append(upstream)
- if GitCommand(self, cmd).Wait() != 0:
- raise GitError('%s rebase %s ' % (self.name, upstream))
-
- def _FastForward(self, head, ffonly=False):
- cmd = ['merge', '--no-stat', head]
- if ffonly:
- cmd.append("--ff-only")
- if GitCommand(self, cmd).Wait() != 0:
- raise GitError('%s merge %s ' % (self.name, head))
-
- def _InitGitDir(self, mirror_git=None, force_sync=False, quiet=False):
- init_git_dir = not os.path.exists(self.gitdir)
- init_obj_dir = not os.path.exists(self.objdir)
- try:
- # Initialize the bare repository, which contains all of the objects.
- if init_obj_dir:
- os.makedirs(self.objdir)
- self.bare_objdir.init()
-
- self._UpdateHooks(quiet=quiet)
-
- if self.use_git_worktrees:
- # Enable per-worktree config file support if possible. This is more a
- # nice-to-have feature for users rather than a hard requirement.
- if git_require((2, 20, 0)):
- self.EnableRepositoryExtension('worktreeConfig')
-
- # If we have a separate directory to hold refs, initialize it as well.
- if self.objdir != self.gitdir:
- if init_git_dir:
- os.makedirs(self.gitdir)
-
- if init_obj_dir or init_git_dir:
- self._ReferenceGitDir(self.objdir, self.gitdir, copy_all=True)
- try:
- self._CheckDirReference(self.objdir, self.gitdir)
- except GitError as e:
- if force_sync:
- print("Retrying clone after deleting %s" %
- self.gitdir, file=sys.stderr)
+ if head.startswith(R_HEADS):
try:
- platform_utils.rmtree(platform_utils.realpath(self.gitdir))
- if self.worktree and os.path.exists(platform_utils.realpath
- (self.worktree)):
- platform_utils.rmtree(platform_utils.realpath(self.worktree))
- return self._InitGitDir(mirror_git=mirror_git, force_sync=False,
- quiet=quiet)
- except Exception:
- raise e
- raise e
+ head = all_refs[head]
+ except KeyError:
+ head = None
+ if revid and head and revid == head:
+ ref = R_HEADS + name
+ self.work_git.update_ref(ref, revid)
+ self.work_git.symbolic_ref(HEAD, ref)
+ branch.Save()
+ return True
- if init_git_dir:
- mp = self.manifest.manifestProject
- ref_dir = mp.reference or ''
+ if (
+ GitCommand(
+ self, ["checkout", "-q", "-b", branch.name, revid]
+ ).Wait()
+ == 0
+ ):
+ branch.Save()
+ return True
+ return False
- def _expanded_ref_dirs():
- """Iterate through the possible git reference directory paths."""
- name = self.name + '.git'
- yield mirror_git or os.path.join(ref_dir, name)
- for prefix in '', self.remote.name:
- yield os.path.join(ref_dir, '.repo', 'project-objects', prefix, name)
- yield os.path.join(ref_dir, '.repo', 'worktrees', prefix, name)
+ def CheckoutBranch(self, name):
+ """Checkout a local topic branch.
- if ref_dir or mirror_git:
- found_ref_dir = None
- for path in _expanded_ref_dirs():
- if os.path.exists(path):
- found_ref_dir = path
- break
- ref_dir = found_ref_dir
+ Args:
+ name: The name of the branch to checkout.
- if ref_dir:
- if not os.path.isabs(ref_dir):
- # The alternate directory is relative to the object database.
- ref_dir = os.path.relpath(ref_dir,
- os.path.join(self.objdir, 'objects'))
- _lwrite(os.path.join(self.objdir, 'objects/info/alternates'),
- os.path.join(ref_dir, 'objects') + '\n')
+ Returns:
+ True if the checkout succeeded; False if it didn't; None if the
+ branch didn't exist.
+ """
+ rev = R_HEADS + name
+ head = self.work_git.GetHead()
+ if head == rev:
+ # Already on the branch.
+ return True
- m = self.manifest.manifestProject.config
- for key in ['user.name', 'user.email']:
- if m.Has(key, include_defaults=False):
- self.config.SetString(key, m.GetString(key))
- if not self.manifest.EnableGitLfs:
- self.config.SetString('filter.lfs.smudge', 'git-lfs smudge --skip -- %f')
- self.config.SetString('filter.lfs.process', 'git-lfs filter-process --skip')
- self.config.SetBoolean('core.bare', True if self.manifest.IsMirror else None)
- except Exception:
- if init_obj_dir and os.path.exists(self.objdir):
- platform_utils.rmtree(self.objdir)
- if init_git_dir and os.path.exists(self.gitdir):
- platform_utils.rmtree(self.gitdir)
- raise
+ all_refs = self.bare_ref.all
+ try:
+ revid = all_refs[rev]
+ except KeyError:
+ # Branch does not exist in this project.
+ return None
- def _UpdateHooks(self, quiet=False):
- if os.path.exists(self.objdir):
- self._InitHooks(quiet=quiet)
+ if head.startswith(R_HEADS):
+ try:
+ head = all_refs[head]
+ except KeyError:
+ head = None
- def _InitHooks(self, quiet=False):
- hooks = platform_utils.realpath(os.path.join(self.objdir, 'hooks'))
- if not os.path.exists(hooks):
- os.makedirs(hooks)
+ if head == revid:
+ # Same revision; just update HEAD to point to the new
+ # target branch, but otherwise take no other action.
+ _lwrite(
+ self.work_git.GetDotgitPath(subpath=HEAD),
+ "ref: %s%s\n" % (R_HEADS, name),
+ )
+ return True
- # Delete sample hooks. They're noise.
- for hook in glob.glob(os.path.join(hooks, '*.sample')):
- try:
- platform_utils.remove(hook, missing_ok=True)
- except PermissionError:
- pass
+ return (
+ GitCommand(
+ self,
+ ["checkout", name, "--"],
+ capture_stdout=True,
+ capture_stderr=True,
+ ).Wait()
+ == 0
+ )
- for stock_hook in _ProjectHooks():
- name = os.path.basename(stock_hook)
+ def AbandonBranch(self, name):
+ """Destroy a local topic branch.
- if name in ('commit-msg',) and not self.remote.review \
- and self is not self.manifest.manifestProject:
- # Don't install a Gerrit Code Review hook if this
- # project does not appear to use it for reviews.
- #
- # Since the manifest project is one of those, but also
- # managed through gerrit, it's excluded
- continue
+ Args:
+ name: The name of the branch to abandon.
- dst = os.path.join(hooks, name)
- if platform_utils.islink(dst):
- continue
- if os.path.exists(dst):
- # If the files are the same, we'll leave it alone. We create symlinks
- # below by default but fallback to hardlinks if the OS blocks them.
- # So if we're here, it's probably because we made a hardlink below.
- if not filecmp.cmp(stock_hook, dst, shallow=False):
- if not quiet:
- _warn("%s: Not replacing locally modified %s hook",
- self.RelPath(local=False), name)
- continue
- try:
- platform_utils.symlink(
- os.path.relpath(stock_hook, os.path.dirname(dst)), dst)
- except OSError as e:
- if e.errno == errno.EPERM:
- try:
- os.link(stock_hook, dst)
- except OSError:
- raise GitError(self._get_symlink_error_message())
+ Returns:
+ True if the abandon succeeded; False if it didn't; None if the
+ branch didn't exist.
+ """
+ rev = R_HEADS + name
+ all_refs = self.bare_ref.all
+ if rev not in all_refs:
+ # Doesn't exist
+ return None
+
+ head = self.work_git.GetHead()
+ if head == rev:
+ # We can't destroy the branch while we are sitting
+ # on it. Switch to a detached HEAD.
+ head = all_refs[head]
+
+ revid = self.GetRevisionId(all_refs)
+ if head == revid:
+ _lwrite(
+ self.work_git.GetDotgitPath(subpath=HEAD), "%s\n" % revid
+ )
+ else:
+ self._Checkout(revid, quiet=True)
+
+ return (
+ GitCommand(
+ self,
+ ["branch", "-D", name],
+ capture_stdout=True,
+ capture_stderr=True,
+ ).Wait()
+ == 0
+ )
+
+ def PruneHeads(self):
+ """Prune any topic branches already merged into upstream."""
+ cb = self.CurrentBranch
+ kill = []
+ left = self._allrefs
+ for name in left.keys():
+ if name.startswith(R_HEADS):
+ name = name[len(R_HEADS) :]
+ if cb is None or name != cb:
+ kill.append(name)
+
+ # Minor optimization: If there's nothing to prune, then don't try to
+ # read any project state.
+ if not kill and not cb:
+ return []
+
+ rev = self.GetRevisionId(left)
+ if (
+ cb is not None
+ and not self._revlist(HEAD + "..." + rev)
+ and not self.IsDirty(consider_untracked=False)
+ ):
+ self.work_git.DetachHead(HEAD)
+ kill.append(cb)
+
+ if kill:
+ old = self.bare_git.GetHead()
+
+ try:
+ self.bare_git.DetachHead(rev)
+
+ b = ["branch", "-d"]
+ b.extend(kill)
+ b = GitCommand(
+ self, b, bare=True, capture_stdout=True, capture_stderr=True
+ )
+ b.Wait()
+ finally:
+ if ID_RE.match(old):
+ self.bare_git.DetachHead(old)
+ else:
+ self.bare_git.SetHead(old)
+ left = self._allrefs
+
+ for branch in kill:
+ if (R_HEADS + branch) not in left:
+ self.CleanPublishedCache()
+ break
+
+ if cb and cb not in kill:
+ kill.append(cb)
+ kill.sort()
+
+ kept = []
+ for branch in kill:
+ if R_HEADS + branch in left:
+ branch = self.GetBranch(branch)
+ base = branch.LocalMerge
+ if not base:
+ base = rev
+ kept.append(ReviewableBranch(self, branch, base))
+ return kept
+
+ def GetRegisteredSubprojects(self):
+ result = []
+
+ def rec(subprojects):
+ if not subprojects:
+ return
+ result.extend(subprojects)
+ for p in subprojects:
+ rec(p.subprojects)
+
+ rec(self.subprojects)
+ return result
+
+ def _GetSubmodules(self):
+ # Unfortunately we cannot call `git submodule status --recursive` here
+ # because the working tree might not exist yet, and it cannot be used
+ # without a working tree in its current implementation.
+
+ def get_submodules(gitdir, rev):
+ # Parse .gitmodules for submodule sub_paths and sub_urls.
+ sub_paths, sub_urls = parse_gitmodules(gitdir, rev)
+ if not sub_paths:
+ return []
+ # Run `git ls-tree` to read SHAs of submodule object, which happen
+ # to be revision of submodule repository.
+ sub_revs = git_ls_tree(gitdir, rev, sub_paths)
+ submodules = []
+ for sub_path, sub_url in zip(sub_paths, sub_urls):
+ try:
+ sub_rev = sub_revs[sub_path]
+ except KeyError:
+ # Ignore non-exist submodules.
+ continue
+ submodules.append((sub_rev, sub_path, sub_url))
+ return submodules
+
+ re_path = re.compile(r"^submodule\.(.+)\.path=(.*)$")
+ re_url = re.compile(r"^submodule\.(.+)\.url=(.*)$")
+
+ def parse_gitmodules(gitdir, rev):
+ cmd = ["cat-file", "blob", "%s:.gitmodules" % rev]
+ try:
+ p = GitCommand(
+ None,
+ cmd,
+ capture_stdout=True,
+ capture_stderr=True,
+ bare=True,
+ gitdir=gitdir,
+ )
+ except GitError:
+ return [], []
+ if p.Wait() != 0:
+ return [], []
+
+ gitmodules_lines = []
+ fd, temp_gitmodules_path = tempfile.mkstemp()
+ try:
+ os.write(fd, p.stdout.encode("utf-8"))
+ os.close(fd)
+ cmd = ["config", "--file", temp_gitmodules_path, "--list"]
+ p = GitCommand(
+ None,
+ cmd,
+ capture_stdout=True,
+ capture_stderr=True,
+ bare=True,
+ gitdir=gitdir,
+ )
+ if p.Wait() != 0:
+ return [], []
+ gitmodules_lines = p.stdout.split("\n")
+ except GitError:
+ return [], []
+ finally:
+ platform_utils.remove(temp_gitmodules_path)
+
+ names = set()
+ paths = {}
+ urls = {}
+ for line in gitmodules_lines:
+ if not line:
+ continue
+ m = re_path.match(line)
+ if m:
+ names.add(m.group(1))
+ paths[m.group(1)] = m.group(2)
+ continue
+ m = re_url.match(line)
+ if m:
+ names.add(m.group(1))
+ urls[m.group(1)] = m.group(2)
+ continue
+ names = sorted(names)
+ return (
+ [paths.get(name, "") for name in names],
+ [urls.get(name, "") for name in names],
+ )
+
+ def git_ls_tree(gitdir, rev, paths):
+ cmd = ["ls-tree", rev, "--"]
+ cmd.extend(paths)
+ try:
+ p = GitCommand(
+ None,
+ cmd,
+ capture_stdout=True,
+ capture_stderr=True,
+ bare=True,
+ gitdir=gitdir,
+ )
+ except GitError:
+ return []
+ if p.Wait() != 0:
+ return []
+ objects = {}
+ for line in p.stdout.split("\n"):
+ if not line.strip():
+ continue
+ object_rev, object_path = line.split()[2:4]
+ objects[object_path] = object_rev
+ return objects
+
+ try:
+ rev = self.GetRevisionId()
+ except GitError:
+ return []
+ return get_submodules(self.gitdir, rev)
+
+ def GetDerivedSubprojects(self):
+ result = []
+ if not self.Exists:
+ # If git repo does not exist yet, querying its submodules will
+ # mess up its states; so return here.
+ return result
+ for rev, path, url in self._GetSubmodules():
+ name = self.manifest.GetSubprojectName(self, path)
+ (
+ relpath,
+ worktree,
+ gitdir,
+ objdir,
+ ) = self.manifest.GetSubprojectPaths(self, name, path)
+ project = self.manifest.paths.get(relpath)
+ if project:
+ result.extend(project.GetDerivedSubprojects())
+ continue
+
+ if url.startswith(".."):
+ url = urllib.parse.urljoin("%s/" % self.remote.url, url)
+ remote = RemoteSpec(
+ self.remote.name,
+ url=url,
+ pushUrl=self.remote.pushUrl,
+ review=self.remote.review,
+ revision=self.remote.revision,
+ )
+ subproject = Project(
+ manifest=self.manifest,
+ name=name,
+ remote=remote,
+ gitdir=gitdir,
+ objdir=objdir,
+ worktree=worktree,
+ relpath=relpath,
+ revisionExpr=rev,
+ revisionId=rev,
+ rebase=self.rebase,
+ groups=self.groups,
+ sync_c=self.sync_c,
+ sync_s=self.sync_s,
+ sync_tags=self.sync_tags,
+ parent=self,
+ is_derived=True,
+ )
+ result.append(subproject)
+ result.extend(subproject.GetDerivedSubprojects())
+ return result
+
+ def EnableRepositoryExtension(self, key, value="true", version=1):
+ """Enable git repository extension |key| with |value|.
+
+ Args:
+ key: The extension to enabled. Omit the "extensions." prefix.
+ value: The value to use for the extension.
+ version: The minimum git repository version needed.
+ """
+ # Make sure the git repo version is new enough already.
+ found_version = self.config.GetInt("core.repositoryFormatVersion")
+ if found_version is None:
+ found_version = 0
+ if found_version < version:
+ self.config.SetString("core.repositoryFormatVersion", str(version))
+
+ # Enable the extension!
+ self.config.SetString("extensions.%s" % (key,), value)
+
+ def ResolveRemoteHead(self, name=None):
+ """Find out what the default branch (HEAD) points to.
+
+ Normally this points to refs/heads/master, but projects are moving to
+ main. Support whatever the server uses rather than hardcoding "master"
+ ourselves.
+ """
+ if name is None:
+ name = self.remote.name
+
+ # The output will look like (NB: tabs are separators):
+ # ref: refs/heads/master HEAD
+ # 5f6803b100bb3cd0f534e96e88c91373e8ed1c44 HEAD
+ output = self.bare_git.ls_remote(
+ "-q", "--symref", "--exit-code", name, "HEAD"
+ )
+
+ for line in output.splitlines():
+ lhs, rhs = line.split("\t", 1)
+ if rhs == "HEAD" and lhs.startswith("ref:"):
+ return lhs[4:].strip()
+
+ return None
+
+ def _CheckForImmutableRevision(self):
+ try:
+ # if revision (sha or tag) is not present then following function
+ # throws an error.
+ self.bare_git.rev_list(
+ "-1", "--missing=allow-any", "%s^0" % self.revisionExpr, "--"
+ )
+ if self.upstream:
+ rev = self.GetRemote().ToLocal(self.upstream)
+ self.bare_git.rev_list(
+ "-1", "--missing=allow-any", "%s^0" % rev, "--"
+ )
+ self.bare_git.merge_base(
+ "--is-ancestor", self.revisionExpr, rev
+ )
+ return True
+ except GitError:
+ # There is no such persistent revision. We have to fetch it.
+ return False
+
+ def _FetchArchive(self, tarpath, cwd=None):
+ cmd = ["archive", "-v", "-o", tarpath]
+ cmd.append("--remote=%s" % self.remote.url)
+ cmd.append("--prefix=%s/" % self.RelPath(local=False))
+ cmd.append(self.revisionExpr)
+
+ command = GitCommand(
+ self, cmd, cwd=cwd, capture_stdout=True, capture_stderr=True
+ )
+
+ if command.Wait() != 0:
+ raise GitError("git archive %s: %s" % (self.name, command.stderr))
+
+ def _RemoteFetch(
+ self,
+ name=None,
+ current_branch_only=False,
+ initial=False,
+ quiet=False,
+ verbose=False,
+ output_redir=None,
+ alt_dir=None,
+ tags=True,
+ prune=False,
+ depth=None,
+ submodules=False,
+ ssh_proxy=None,
+ force_sync=False,
+ clone_filter=None,
+ retry_fetches=2,
+ retry_sleep_initial_sec=4.0,
+ retry_exp_factor=2.0,
+ ):
+ is_sha1 = False
+ tag_name = None
+ # The depth should not be used when fetching to a mirror because
+ # it will result in a shallow repository that cannot be cloned or
+ # fetched from.
+ # The repo project should also never be synced with partial depth.
+ if self.manifest.IsMirror or self.relpath == ".repo/repo":
+ depth = None
+
+ if depth:
+ current_branch_only = True
+
+ if ID_RE.match(self.revisionExpr) is not None:
+ is_sha1 = True
+
+ if current_branch_only:
+ if self.revisionExpr.startswith(R_TAGS):
+ # This is a tag and its commit id should never change.
+ tag_name = self.revisionExpr[len(R_TAGS) :]
+ elif self.upstream and self.upstream.startswith(R_TAGS):
+ # This is a tag and its commit id should never change.
+ tag_name = self.upstream[len(R_TAGS) :]
+
+ if is_sha1 or tag_name is not None:
+ if self._CheckForImmutableRevision():
+ if verbose:
+ print(
+ "Skipped fetching project %s (already have "
+ "persistent ref)" % self.name
+ )
+ return True
+ if is_sha1 and not depth:
+ # When syncing a specific commit and --depth is not set:
+ # * if upstream is explicitly specified and is not a sha1, fetch
+ # only upstream as users expect only upstream to be fetch.
+ # Note: The commit might not be in upstream in which case the
+ # sync will fail.
+ # * otherwise, fetch all branches to make sure we end up with
+ # the specific commit.
+ if self.upstream:
+ current_branch_only = not ID_RE.match(self.upstream)
+ else:
+ current_branch_only = False
+
+ if not name:
+ name = self.remote.name
+
+ remote = self.GetRemote(name)
+ if not remote.PreConnectFetch(ssh_proxy):
+ ssh_proxy = None
+
+ if initial:
+ if alt_dir and "objects" == os.path.basename(alt_dir):
+ ref_dir = os.path.dirname(alt_dir)
+ packed_refs = os.path.join(self.gitdir, "packed-refs")
+
+ all_refs = self.bare_ref.all
+ ids = set(all_refs.values())
+ tmp = set()
+
+ for r, ref_id in GitRefs(ref_dir).all.items():
+ if r not in all_refs:
+ if r.startswith(R_TAGS) or remote.WritesTo(r):
+ all_refs[r] = ref_id
+ ids.add(ref_id)
+ continue
+
+ if ref_id in ids:
+ continue
+
+ r = "refs/_alt/%s" % ref_id
+ all_refs[r] = ref_id
+ ids.add(ref_id)
+ tmp.add(r)
+
+ tmp_packed_lines = []
+ old_packed_lines = []
+
+ for r in sorted(all_refs):
+ line = "%s %s\n" % (all_refs[r], r)
+ tmp_packed_lines.append(line)
+ if r not in tmp:
+ old_packed_lines.append(line)
+
+ tmp_packed = "".join(tmp_packed_lines)
+ old_packed = "".join(old_packed_lines)
+ _lwrite(packed_refs, tmp_packed)
+ else:
+ alt_dir = None
+
+ cmd = ["fetch"]
+
+ if clone_filter:
+ git_require((2, 19, 0), fail=True, msg="partial clones")
+ cmd.append("--filter=%s" % clone_filter)
+ self.EnableRepositoryExtension("partialclone", self.remote.name)
+
+ if depth:
+ cmd.append("--depth=%s" % depth)
else:
- raise
+ # If this repo has shallow objects, then we don't know which refs
+ # have shallow objects or not. Tell git to unshallow all fetched
+ # refs. Don't do this with projects that don't have shallow
+ # objects, since it is less efficient.
+ if os.path.exists(os.path.join(self.gitdir, "shallow")):
+ cmd.append("--depth=2147483647")
- def _InitRemote(self):
- if self.remote.url:
- remote = self.GetRemote()
- remote.url = self.remote.url
- remote.pushUrl = self.remote.pushUrl
- remote.review = self.remote.review
- remote.projectname = self.name
+ if not verbose:
+ cmd.append("--quiet")
+ if not quiet and sys.stdout.isatty():
+ cmd.append("--progress")
+ if not self.worktree:
+ cmd.append("--update-head-ok")
+ cmd.append(name)
- if self.worktree:
- remote.ResetFetch(mirror=False)
- else:
- remote.ResetFetch(mirror=True)
- remote.Save()
+ if force_sync:
+ cmd.append("--force")
- def _InitMRef(self):
- """Initialize the pseudo m/<manifest branch> ref."""
- if self.manifest.branch:
- if self.use_git_worktrees:
- # Set up the m/ space to point to the worktree-specific ref space.
- # We'll update the worktree-specific ref space on each checkout.
- ref = R_M + self.manifest.branch
- if not self.bare_ref.symref(ref):
- self.bare_git.symbolic_ref(
- '-m', 'redirecting to worktree scope',
- ref, R_WORKTREE_M + self.manifest.branch)
+ if prune:
+ cmd.append("--prune")
- # We can't update this ref with git worktrees until it exists.
- # We'll wait until the initial checkout to set it.
- if not os.path.exists(self.worktree):
- return
+ # Always pass something for --recurse-submodules, git with GIT_DIR
+ # behaves incorrectly when not given `--recurse-submodules=no`.
+ # (b/218891912)
+ cmd.append(
+ f'--recurse-submodules={"on-demand" if submodules else "no"}'
+ )
- base = R_WORKTREE_M
- active_git = self.work_git
+ spec = []
+ if not current_branch_only:
+ # Fetch whole repo.
+ spec.append(
+ str(("+refs/heads/*:") + remote.ToLocal("refs/heads/*"))
+ )
+ elif tag_name is not None:
+ spec.append("tag")
+ spec.append(tag_name)
- self._InitAnyMRef(HEAD, self.bare_git, detach=True)
- else:
- base = R_M
- active_git = self.bare_git
-
- self._InitAnyMRef(base + self.manifest.branch, active_git)
-
- def _InitMirrorHead(self):
- self._InitAnyMRef(HEAD, self.bare_git)
-
- def _InitAnyMRef(self, ref, active_git, detach=False):
- """Initialize |ref| in |active_git| to the value in the manifest.
-
- This points |ref| to the <project> setting in the manifest.
-
- Args:
- ref: The branch to update.
- active_git: The git repository to make updates in.
- detach: Whether to update target of symbolic refs, or overwrite the ref
- directly (and thus make it non-symbolic).
- """
- cur = self.bare_ref.symref(ref)
-
- if self.revisionId:
- if cur != '' or self.bare_ref.get(ref) != self.revisionId:
- msg = 'manifest set to %s' % self.revisionId
- dst = self.revisionId + '^0'
- active_git.UpdateRef(ref, dst, message=msg, detach=True)
- else:
- remote = self.GetRemote()
- dst = remote.ToLocal(self.revisionExpr)
- if cur != dst:
- msg = 'manifest set to %s' % self.revisionExpr
- if detach:
- active_git.UpdateRef(ref, dst, message=msg, detach=True)
+ if self.manifest.IsMirror and not current_branch_only:
+ branch = None
else:
- active_git.symbolic_ref('-m', msg, ref, dst)
-
- def _CheckDirReference(self, srcdir, destdir):
- # Git worktrees don't use symlinks to share at all.
- if self.use_git_worktrees:
- return
-
- for name in self.shareable_dirs:
- # Try to self-heal a bit in simple cases.
- dst_path = os.path.join(destdir, name)
- src_path = os.path.join(srcdir, name)
-
- dst = platform_utils.realpath(dst_path)
- if os.path.lexists(dst):
- src = platform_utils.realpath(src_path)
- # Fail if the links are pointing to the wrong place
- if src != dst:
- _error('%s is different in %s vs %s', name, destdir, srcdir)
- raise GitError('--force-sync not enabled; cannot overwrite a local '
- 'work tree. If you\'re comfortable with the '
- 'possibility of losing the work tree\'s git metadata,'
- ' use `repo sync --force-sync {0}` to '
- 'proceed.'.format(self.RelPath(local=False)))
-
- def _ReferenceGitDir(self, gitdir, dotgit, copy_all):
- """Update |dotgit| to reference |gitdir|, using symlinks where possible.
-
- Args:
- gitdir: The bare git repository. Must already be initialized.
- dotgit: The repository you would like to initialize.
- copy_all: If true, copy all remaining files from |gitdir| -> |dotgit|.
- This saves you the effort of initializing |dotgit| yourself.
- """
- symlink_dirs = self.shareable_dirs[:]
- to_symlink = symlink_dirs
-
- to_copy = []
- if copy_all:
- to_copy = platform_utils.listdir(gitdir)
-
- dotgit = platform_utils.realpath(dotgit)
- for name in set(to_copy).union(to_symlink):
- try:
- src = platform_utils.realpath(os.path.join(gitdir, name))
- dst = os.path.join(dotgit, name)
-
- if os.path.lexists(dst):
- continue
-
- # If the source dir doesn't exist, create an empty dir.
- if name in symlink_dirs and not os.path.lexists(src):
- os.makedirs(src)
-
- if name in to_symlink:
- platform_utils.symlink(
- os.path.relpath(src, os.path.dirname(dst)), dst)
- elif copy_all and not platform_utils.islink(dst):
- if platform_utils.isdir(src):
- shutil.copytree(src, dst)
- elif os.path.isfile(src):
- shutil.copy(src, dst)
-
- except OSError as e:
- if e.errno == errno.EPERM:
- raise DownloadError(self._get_symlink_error_message())
+ branch = self.revisionExpr
+ if (
+ not self.manifest.IsMirror
+ and is_sha1
+ and depth
+ and git_require((1, 8, 3))
+ ):
+ # Shallow checkout of a specific commit, fetch from that commit and
+ # not the heads only as the commit might be deeper in the history.
+ spec.append(branch)
+ if self.upstream:
+ spec.append(self.upstream)
else:
- raise
+ if is_sha1:
+ branch = self.upstream
+ if branch is not None and branch.strip():
+ if not branch.startswith("refs/"):
+ branch = R_HEADS + branch
+ spec.append(str(("+%s:" % branch) + remote.ToLocal(branch)))
- def _InitGitWorktree(self):
- """Init the project using git worktrees."""
- self.bare_git.worktree('prune')
- self.bare_git.worktree('add', '-ff', '--checkout', '--detach', '--lock',
- self.worktree, self.GetRevisionId())
+ # If mirroring repo and we cannot deduce the tag or branch to fetch,
+ # fetch whole repo.
+ if self.manifest.IsMirror and not spec:
+ spec.append(
+ str(("+refs/heads/*:") + remote.ToLocal("refs/heads/*"))
+ )
- # Rewrite the internal state files to use relative paths between the
- # checkouts & worktrees.
- dotgit = os.path.join(self.worktree, '.git')
- with open(dotgit, 'r') as fp:
- # Figure out the checkout->worktree path.
- setting = fp.read()
- assert setting.startswith('gitdir:')
- git_worktree_path = setting.split(':', 1)[1].strip()
- # Some platforms (e.g. Windows) won't let us update dotgit in situ because
- # of file permissions. Delete it and recreate it from scratch to avoid.
- platform_utils.remove(dotgit)
- # Use relative path from checkout->worktree & maintain Unix line endings
- # on all OS's to match git behavior.
- with open(dotgit, 'w', newline='\n') as fp:
- print('gitdir:', os.path.relpath(git_worktree_path, self.worktree),
- file=fp)
- # Use relative path from worktree->checkout & maintain Unix line endings
- # on all OS's to match git behavior.
- with open(os.path.join(git_worktree_path, 'gitdir'), 'w', newline='\n') as fp:
- print(os.path.relpath(dotgit, git_worktree_path), file=fp)
+ # If using depth then we should not get all the tags since they may
+ # be outside of the depth.
+ if not tags or depth:
+ cmd.append("--no-tags")
+ else:
+ cmd.append("--tags")
+ spec.append(str(("+refs/tags/*:") + remote.ToLocal("refs/tags/*")))
- self._InitMRef()
+ cmd.extend(spec)
- def _InitWorkTree(self, force_sync=False, submodules=False):
- """Setup the worktree .git path.
+ # At least one retry minimum due to git remote prune.
+ retry_fetches = max(retry_fetches, 2)
+ retry_cur_sleep = retry_sleep_initial_sec
+ ok = prune_tried = False
+ for try_n in range(retry_fetches):
+ gitcmd = GitCommand(
+ self,
+ cmd,
+ bare=True,
+ objdir=os.path.join(self.objdir, "objects"),
+ ssh_proxy=ssh_proxy,
+ merge_output=True,
+ capture_stdout=quiet or bool(output_redir),
+ )
+ if gitcmd.stdout and not quiet and output_redir:
+ output_redir.write(gitcmd.stdout)
+ ret = gitcmd.Wait()
+ if ret == 0:
+ ok = True
+ break
- This is the user-visible path like src/foo/.git/.
+ # Retry later due to HTTP 429 Too Many Requests.
+ elif (
+ gitcmd.stdout
+ and "error:" in gitcmd.stdout
+ and "HTTP 429" in gitcmd.stdout
+ ):
+ # Fallthru to sleep+retry logic at the bottom.
+ pass
- With non-git-worktrees, this will be a symlink to the .repo/projects/ path.
- With git-worktrees, this will be a .git file using "gitdir: ..." syntax.
+ # Try to prune remote branches once in case there are conflicts.
+ # For example, if the remote had refs/heads/upstream, but deleted
+ # that and now has refs/heads/upstream/foo.
+ elif (
+ gitcmd.stdout
+ and "error:" in gitcmd.stdout
+ and "git remote prune" in gitcmd.stdout
+ and not prune_tried
+ ):
+ prune_tried = True
+ prunecmd = GitCommand(
+ self,
+ ["remote", "prune", name],
+ bare=True,
+ ssh_proxy=ssh_proxy,
+ )
+ ret = prunecmd.Wait()
+ if ret:
+ break
+ print(
+ "retrying fetch after pruning remote branches",
+ file=output_redir,
+ )
+ # Continue right away so we don't sleep as we shouldn't need to.
+ continue
+ elif current_branch_only and is_sha1 and ret == 128:
+ # Exit code 128 means "couldn't find the ref you asked for"; if
+ # we're in sha1 mode, we just tried sync'ing from the upstream
+ # field; it doesn't exist, thus abort the optimization attempt
+ # and do a full sync.
+ break
+ elif ret < 0:
+ # Git died with a signal, exit immediately.
+ break
- Older checkouts had .git/ directories. If we see that, migrate it.
+ # Figure out how long to sleep before the next attempt, if there is
+ # one.
+ if not verbose and gitcmd.stdout:
+ print(
+ "\n%s:\n%s" % (self.name, gitcmd.stdout),
+ end="",
+ file=output_redir,
+ )
+ if try_n < retry_fetches - 1:
+ print(
+ "%s: sleeping %s seconds before retrying"
+ % (self.name, retry_cur_sleep),
+ file=output_redir,
+ )
+ time.sleep(retry_cur_sleep)
+ retry_cur_sleep = min(
+ retry_exp_factor * retry_cur_sleep, MAXIMUM_RETRY_SLEEP_SEC
+ )
+ retry_cur_sleep *= 1 - random.uniform(
+ -RETRY_JITTER_PERCENT, RETRY_JITTER_PERCENT
+ )
- This also handles changes in the manifest. Maybe this project was backed
- by "foo/bar" on the server, but now it's "new/foo/bar". We have to update
- the path we point to under .repo/projects/ to match.
- """
- dotgit = os.path.join(self.worktree, '.git')
+ if initial:
+ if alt_dir:
+ if old_packed != "":
+ _lwrite(packed_refs, old_packed)
+ else:
+ platform_utils.remove(packed_refs)
+ self.bare_git.pack_refs("--all", "--prune")
- # If using an old layout style (a directory), migrate it.
- if not platform_utils.islink(dotgit) and platform_utils.isdir(dotgit):
- self._MigrateOldWorkTreeGitDir(dotgit)
+ if is_sha1 and current_branch_only:
+ # We just synced the upstream given branch; verify we
+ # got what we wanted, else trigger a second run of all
+ # refs.
+ if not self._CheckForImmutableRevision():
+ # Sync the current branch only with depth set to None.
+ # We always pass depth=None down to avoid infinite recursion.
+ return self._RemoteFetch(
+ name=name,
+ quiet=quiet,
+ verbose=verbose,
+ output_redir=output_redir,
+ current_branch_only=current_branch_only and depth,
+ initial=False,
+ alt_dir=alt_dir,
+ depth=None,
+ ssh_proxy=ssh_proxy,
+ clone_filter=clone_filter,
+ )
- init_dotgit = not os.path.exists(dotgit)
- if self.use_git_worktrees:
- if init_dotgit:
- self._InitGitWorktree()
- self._CopyAndLinkFiles()
- else:
- if not init_dotgit:
- # See if the project has changed.
- if platform_utils.realpath(self.gitdir) != platform_utils.realpath(dotgit):
- platform_utils.remove(dotgit)
+ return ok
- if init_dotgit or not os.path.exists(dotgit):
- os.makedirs(self.worktree, exist_ok=True)
- platform_utils.symlink(os.path.relpath(self.gitdir, self.worktree), dotgit)
+ def _ApplyCloneBundle(self, initial=False, quiet=False, verbose=False):
+ if initial and (
+ self.manifest.manifestProject.depth or self.clone_depth
+ ):
+ return False
- if init_dotgit:
- _lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId())
+ remote = self.GetRemote()
+ bundle_url = remote.url + "/clone.bundle"
+ bundle_url = GitConfig.ForUser().UrlInsteadOf(bundle_url)
+ if GetSchemeFromUrl(bundle_url) not in (
+ "http",
+ "https",
+ "persistent-http",
+ "persistent-https",
+ ):
+ return False
- # Finish checking out the worktree.
- cmd = ['read-tree', '--reset', '-u', '-v', HEAD]
+ bundle_dst = os.path.join(self.gitdir, "clone.bundle")
+ bundle_tmp = os.path.join(self.gitdir, "clone.bundle.tmp")
+
+ exist_dst = os.path.exists(bundle_dst)
+ exist_tmp = os.path.exists(bundle_tmp)
+
+ if not initial and not exist_dst and not exist_tmp:
+ return False
+
+ if not exist_dst:
+ exist_dst = self._FetchBundle(
+ bundle_url, bundle_tmp, bundle_dst, quiet, verbose
+ )
+ if not exist_dst:
+ return False
+
+ cmd = ["fetch"]
+ if not verbose:
+ cmd.append("--quiet")
+ if not quiet and sys.stdout.isatty():
+ cmd.append("--progress")
+ if not self.worktree:
+ cmd.append("--update-head-ok")
+ cmd.append(bundle_dst)
+ for f in remote.fetch:
+ cmd.append(str(f))
+ cmd.append("+refs/tags/*:refs/tags/*")
+
+ ok = (
+ GitCommand(
+ self,
+ cmd,
+ bare=True,
+ objdir=os.path.join(self.objdir, "objects"),
+ ).Wait()
+ == 0
+ )
+ platform_utils.remove(bundle_dst, missing_ok=True)
+ platform_utils.remove(bundle_tmp, missing_ok=True)
+ return ok
+
+ def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet, verbose):
+ platform_utils.remove(dstPath, missing_ok=True)
+
+ cmd = ["curl", "--fail", "--output", tmpPath, "--netrc", "--location"]
+ if quiet:
+ cmd += ["--silent", "--show-error"]
+ if os.path.exists(tmpPath):
+ size = os.stat(tmpPath).st_size
+ if size >= 1024:
+ cmd += ["--continue-at", "%d" % (size,)]
+ else:
+ platform_utils.remove(tmpPath)
+ with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, proxy):
+ if cookiefile:
+ cmd += ["--cookie", cookiefile]
+ if proxy:
+ cmd += ["--proxy", proxy]
+ elif "http_proxy" in os.environ and "darwin" == sys.platform:
+ cmd += ["--proxy", os.environ["http_proxy"]]
+ if srcUrl.startswith("persistent-https"):
+ srcUrl = "http" + srcUrl[len("persistent-https") :]
+ elif srcUrl.startswith("persistent-http"):
+ srcUrl = "http" + srcUrl[len("persistent-http") :]
+ cmd += [srcUrl]
+
+ proc = None
+ with Trace("Fetching bundle: %s", " ".join(cmd)):
+ if verbose:
+ print("%s: Downloading bundle: %s" % (self.name, srcUrl))
+ stdout = None if verbose else subprocess.PIPE
+ stderr = None if verbose else subprocess.STDOUT
+ try:
+ proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr)
+ except OSError:
+ return False
+
+ (output, _) = proc.communicate()
+ curlret = proc.returncode
+
+ if curlret == 22:
+ # From curl man page:
+ # 22: HTTP page not retrieved. The requested url was not found
+ # or returned another error with the HTTP error code being 400
+ # or above. This return code only appears if -f, --fail is used.
+ if verbose:
+ print(
+ "%s: Unable to retrieve clone.bundle; ignoring."
+ % self.name
+ )
+ if output:
+ print("Curl output:\n%s" % output)
+ return False
+ elif curlret and not verbose and output:
+ print("%s" % output, file=sys.stderr)
+
+ if os.path.exists(tmpPath):
+ if curlret == 0 and self._IsValidBundle(tmpPath, quiet):
+ platform_utils.rename(tmpPath, dstPath)
+ return True
+ else:
+ platform_utils.remove(tmpPath)
+ return False
+ else:
+ return False
+
+ def _IsValidBundle(self, path, quiet):
+ try:
+ with open(path, "rb") as f:
+ if f.read(16) == b"# v2 git bundle\n":
+ return True
+ else:
+ if not quiet:
+ print(
+ "Invalid clone.bundle file; ignoring.",
+ file=sys.stderr,
+ )
+ return False
+ except OSError:
+ return False
+
+ def _Checkout(self, rev, quiet=False):
+ cmd = ["checkout"]
+ if quiet:
+ cmd.append("-q")
+ cmd.append(rev)
+ cmd.append("--")
if GitCommand(self, cmd).Wait() != 0:
- raise GitError('Cannot initialize work tree for ' + self.name)
+ if self._allrefs:
+ raise GitError("%s checkout %s " % (self.name, rev))
- if submodules:
- self._SyncSubmodules(quiet=True)
- self._CopyAndLinkFiles()
+ def _CherryPick(self, rev, ffonly=False, record_origin=False):
+ cmd = ["cherry-pick"]
+ if ffonly:
+ cmd.append("--ff")
+ if record_origin:
+ cmd.append("-x")
+ cmd.append(rev)
+ cmd.append("--")
+ if GitCommand(self, cmd).Wait() != 0:
+ if self._allrefs:
+ raise GitError("%s cherry-pick %s " % (self.name, rev))
- @classmethod
- def _MigrateOldWorkTreeGitDir(cls, dotgit):
- """Migrate the old worktree .git/ dir style to a symlink.
+ def _LsRemote(self, refs):
+ cmd = ["ls-remote", self.remote.name, refs]
+ p = GitCommand(self, cmd, capture_stdout=True)
+ if p.Wait() == 0:
+ return p.stdout
+ return None
- This logic specifically only uses state from |dotgit| to figure out where to
- move content and not |self|. This way if the backing project also changed
- places, we only do the .git/ dir to .git symlink migration here. The path
- updates will happen independently.
- """
- # Figure out where in .repo/projects/ it's pointing to.
- if not os.path.islink(os.path.join(dotgit, 'refs')):
- raise GitError(f'{dotgit}: unsupported checkout state')
- gitdir = os.path.dirname(os.path.realpath(os.path.join(dotgit, 'refs')))
+ def _Revert(self, rev):
+ cmd = ["revert"]
+ cmd.append("--no-edit")
+ cmd.append(rev)
+ cmd.append("--")
+ if GitCommand(self, cmd).Wait() != 0:
+ if self._allrefs:
+ raise GitError("%s revert %s " % (self.name, rev))
- # Remove known symlink paths that exist in .repo/projects/.
- KNOWN_LINKS = {
- 'config', 'description', 'hooks', 'info', 'logs', 'objects',
- 'packed-refs', 'refs', 'rr-cache', 'shallow', 'svn',
- }
- # Paths that we know will be in both, but are safe to clobber in .repo/projects/.
- SAFE_TO_CLOBBER = {
- 'COMMIT_EDITMSG', 'FETCH_HEAD', 'HEAD', 'gc.log', 'gitk.cache', 'index',
- 'ORIG_HEAD',
- }
+ def _ResetHard(self, rev, quiet=True):
+ cmd = ["reset", "--hard"]
+ if quiet:
+ cmd.append("-q")
+ cmd.append(rev)
+ if GitCommand(self, cmd).Wait() != 0:
+ raise GitError("%s reset --hard %s " % (self.name, rev))
- # First see if we'd succeed before starting the migration.
- unknown_paths = []
- for name in platform_utils.listdir(dotgit):
- # Ignore all temporary/backup names. These are common with vim & emacs.
- if name.endswith('~') or (name[0] == '#' and name[-1] == '#'):
- continue
+ def _SyncSubmodules(self, quiet=True):
+ cmd = ["submodule", "update", "--init", "--recursive"]
+ if quiet:
+ cmd.append("-q")
+ if GitCommand(self, cmd).Wait() != 0:
+ raise GitError(
+ "%s submodule update --init --recursive " % self.name
+ )
- dotgit_path = os.path.join(dotgit, name)
- if name in KNOWN_LINKS:
- if not platform_utils.islink(dotgit_path):
- unknown_paths.append(f'{dotgit_path}: should be a symlink')
- else:
- gitdir_path = os.path.join(gitdir, name)
- if name not in SAFE_TO_CLOBBER and os.path.exists(gitdir_path):
- unknown_paths.append(f'{dotgit_path}: unknown file; please file a bug')
- if unknown_paths:
- raise GitError('Aborting migration: ' + '\n'.join(unknown_paths))
+ def _Rebase(self, upstream, onto=None):
+ cmd = ["rebase"]
+ if onto is not None:
+ cmd.extend(["--onto", onto])
+ cmd.append(upstream)
+ if GitCommand(self, cmd).Wait() != 0:
+ raise GitError("%s rebase %s " % (self.name, upstream))
- # Now walk the paths and sync the .git/ to .repo/projects/.
- for name in platform_utils.listdir(dotgit):
- dotgit_path = os.path.join(dotgit, name)
+ def _FastForward(self, head, ffonly=False):
+ cmd = ["merge", "--no-stat", head]
+ if ffonly:
+ cmd.append("--ff-only")
+ if GitCommand(self, cmd).Wait() != 0:
+ raise GitError("%s merge %s " % (self.name, head))
- # Ignore all temporary/backup names. These are common with vim & emacs.
- if name.endswith('~') or (name[0] == '#' and name[-1] == '#'):
- platform_utils.remove(dotgit_path)
- elif name in KNOWN_LINKS:
- platform_utils.remove(dotgit_path)
- else:
- gitdir_path = os.path.join(gitdir, name)
- platform_utils.remove(gitdir_path, missing_ok=True)
- platform_utils.rename(dotgit_path, gitdir_path)
+ def _InitGitDir(self, mirror_git=None, force_sync=False, quiet=False):
+ init_git_dir = not os.path.exists(self.gitdir)
+ init_obj_dir = not os.path.exists(self.objdir)
+ try:
+ # Initialize the bare repository, which contains all of the objects.
+ if init_obj_dir:
+ os.makedirs(self.objdir)
+ self.bare_objdir.init()
- # Now that the dir should be empty, clear it out, and symlink it over.
- platform_utils.rmdir(dotgit)
- platform_utils.symlink(os.path.relpath(gitdir, os.path.dirname(dotgit)), dotgit)
+ self._UpdateHooks(quiet=quiet)
- def _get_symlink_error_message(self):
- if platform_utils.isWindows():
- return ('Unable to create symbolic link. Please re-run the command as '
- 'Administrator, or see '
- 'https://github.com/git-for-windows/git/wiki/Symbolic-Links '
- 'for other options.')
- return 'filesystem must support symlinks'
+ if self.use_git_worktrees:
+ # Enable per-worktree config file support if possible. This
+ # is more a nice-to-have feature for users rather than a
+ # hard requirement.
+ if git_require((2, 20, 0)):
+ self.EnableRepositoryExtension("worktreeConfig")
- def _revlist(self, *args, **kw):
- a = []
- a.extend(args)
- a.append('--')
- return self.work_git.rev_list(*a, **kw)
+ # If we have a separate directory to hold refs, initialize it as
+ # well.
+ if self.objdir != self.gitdir:
+ if init_git_dir:
+ os.makedirs(self.gitdir)
- @property
- def _allrefs(self):
- return self.bare_ref.all
+ if init_obj_dir or init_git_dir:
+ self._ReferenceGitDir(
+ self.objdir, self.gitdir, copy_all=True
+ )
+ try:
+ self._CheckDirReference(self.objdir, self.gitdir)
+ except GitError as e:
+ if force_sync:
+ print(
+ "Retrying clone after deleting %s" % self.gitdir,
+ file=sys.stderr,
+ )
+ try:
+ platform_utils.rmtree(
+ platform_utils.realpath(self.gitdir)
+ )
+ if self.worktree and os.path.exists(
+ platform_utils.realpath(self.worktree)
+ ):
+ platform_utils.rmtree(
+ platform_utils.realpath(self.worktree)
+ )
+ return self._InitGitDir(
+ mirror_git=mirror_git,
+ force_sync=False,
+ quiet=quiet,
+ )
+ except Exception:
+ raise e
+ raise e
- def _getLogs(self, rev1, rev2, oneline=False, color=True, pretty_format=None):
- """Get logs between two revisions of this project."""
- comp = '..'
- if rev1:
- revs = [rev1]
- if rev2:
- revs.extend([comp, rev2])
- cmd = ['log', ''.join(revs)]
- out = DiffColoring(self.config)
- if out.is_on and color:
- cmd.append('--color')
- if pretty_format is not None:
- cmd.append('--pretty=format:%s' % pretty_format)
- if oneline:
- cmd.append('--oneline')
+ if init_git_dir:
+ mp = self.manifest.manifestProject
+ ref_dir = mp.reference or ""
- try:
- log = GitCommand(self, cmd, capture_stdout=True, capture_stderr=True)
- if log.Wait() == 0:
- return log.stdout
- except GitError:
- # worktree may not exist if groups changed for example. In that case,
- # try in gitdir instead.
- if not os.path.exists(self.worktree):
- return self.bare_git.log(*cmd[1:])
+ def _expanded_ref_dirs():
+ """Iterate through possible git reference dir paths."""
+ name = self.name + ".git"
+ yield mirror_git or os.path.join(ref_dir, name)
+ for prefix in "", self.remote.name:
+ yield os.path.join(
+ ref_dir, ".repo", "project-objects", prefix, name
+ )
+ yield os.path.join(
+ ref_dir, ".repo", "worktrees", prefix, name
+ )
+
+ if ref_dir or mirror_git:
+ found_ref_dir = None
+ for path in _expanded_ref_dirs():
+ if os.path.exists(path):
+ found_ref_dir = path
+ break
+ ref_dir = found_ref_dir
+
+ if ref_dir:
+ if not os.path.isabs(ref_dir):
+ # The alternate directory is relative to the object
+ # database.
+ ref_dir = os.path.relpath(
+ ref_dir, os.path.join(self.objdir, "objects")
+ )
+ _lwrite(
+ os.path.join(
+ self.objdir, "objects/info/alternates"
+ ),
+ os.path.join(ref_dir, "objects") + "\n",
+ )
+
+ m = self.manifest.manifestProject.config
+ for key in ["user.name", "user.email"]:
+ if m.Has(key, include_defaults=False):
+ self.config.SetString(key, m.GetString(key))
+ if not self.manifest.EnableGitLfs:
+ self.config.SetString(
+ "filter.lfs.smudge", "git-lfs smudge --skip -- %f"
+ )
+ self.config.SetString(
+ "filter.lfs.process", "git-lfs filter-process --skip"
+ )
+ self.config.SetBoolean(
+ "core.bare", True if self.manifest.IsMirror else None
+ )
+ except Exception:
+ if init_obj_dir and os.path.exists(self.objdir):
+ platform_utils.rmtree(self.objdir)
+ if init_git_dir and os.path.exists(self.gitdir):
+ platform_utils.rmtree(self.gitdir)
+ raise
+
+ def _UpdateHooks(self, quiet=False):
+ if os.path.exists(self.objdir):
+ self._InitHooks(quiet=quiet)
+
+ def _InitHooks(self, quiet=False):
+ hooks = platform_utils.realpath(os.path.join(self.objdir, "hooks"))
+ if not os.path.exists(hooks):
+ os.makedirs(hooks)
+
+ # Delete sample hooks. They're noise.
+ for hook in glob.glob(os.path.join(hooks, "*.sample")):
+ try:
+ platform_utils.remove(hook, missing_ok=True)
+ except PermissionError:
+ pass
+
+ for stock_hook in _ProjectHooks():
+ name = os.path.basename(stock_hook)
+
+ if (
+ name in ("commit-msg",)
+ and not self.remote.review
+ and self is not self.manifest.manifestProject
+ ):
+ # Don't install a Gerrit Code Review hook if this
+ # project does not appear to use it for reviews.
+ #
+ # Since the manifest project is one of those, but also
+ # managed through gerrit, it's excluded.
+ continue
+
+ dst = os.path.join(hooks, name)
+ if platform_utils.islink(dst):
+ continue
+ if os.path.exists(dst):
+ # If the files are the same, we'll leave it alone. We create
+ # symlinks below by default but fallback to hardlinks if the OS
+ # blocks them. So if we're here, it's probably because we made a
+ # hardlink below.
+ if not filecmp.cmp(stock_hook, dst, shallow=False):
+ if not quiet:
+ _warn(
+ "%s: Not replacing locally modified %s hook",
+ self.RelPath(local=False),
+ name,
+ )
+ continue
+ try:
+ platform_utils.symlink(
+ os.path.relpath(stock_hook, os.path.dirname(dst)), dst
+ )
+ except OSError as e:
+ if e.errno == errno.EPERM:
+ try:
+ os.link(stock_hook, dst)
+ except OSError:
+ raise GitError(self._get_symlink_error_message())
+ else:
+ raise
+
+ def _InitRemote(self):
+ if self.remote.url:
+ remote = self.GetRemote()
+ remote.url = self.remote.url
+ remote.pushUrl = self.remote.pushUrl
+ remote.review = self.remote.review
+ remote.projectname = self.name
+
+ if self.worktree:
+ remote.ResetFetch(mirror=False)
+ else:
+ remote.ResetFetch(mirror=True)
+ remote.Save()
+
+ def _InitMRef(self):
+ """Initialize the pseudo m/<manifest branch> ref."""
+ if self.manifest.branch:
+ if self.use_git_worktrees:
+ # Set up the m/ space to point to the worktree-specific ref
+ # space. We'll update the worktree-specific ref space on each
+ # checkout.
+ ref = R_M + self.manifest.branch
+ if not self.bare_ref.symref(ref):
+ self.bare_git.symbolic_ref(
+ "-m",
+ "redirecting to worktree scope",
+ ref,
+ R_WORKTREE_M + self.manifest.branch,
+ )
+
+ # We can't update this ref with git worktrees until it exists.
+ # We'll wait until the initial checkout to set it.
+ if not os.path.exists(self.worktree):
+ return
+
+ base = R_WORKTREE_M
+ active_git = self.work_git
+
+ self._InitAnyMRef(HEAD, self.bare_git, detach=True)
+ else:
+ base = R_M
+ active_git = self.bare_git
+
+ self._InitAnyMRef(base + self.manifest.branch, active_git)
+
+ def _InitMirrorHead(self):
+ self._InitAnyMRef(HEAD, self.bare_git)
+
+ def _InitAnyMRef(self, ref, active_git, detach=False):
+ """Initialize |ref| in |active_git| to the value in the manifest.
+
+ This points |ref| to the <project> setting in the manifest.
+
+ Args:
+ ref: The branch to update.
+ active_git: The git repository to make updates in.
+ detach: Whether to update target of symbolic refs, or overwrite the
+ ref directly (and thus make it non-symbolic).
+ """
+ cur = self.bare_ref.symref(ref)
+
+ if self.revisionId:
+ if cur != "" or self.bare_ref.get(ref) != self.revisionId:
+ msg = "manifest set to %s" % self.revisionId
+ dst = self.revisionId + "^0"
+ active_git.UpdateRef(ref, dst, message=msg, detach=True)
else:
- raise
- return None
+ remote = self.GetRemote()
+ dst = remote.ToLocal(self.revisionExpr)
+ if cur != dst:
+ msg = "manifest set to %s" % self.revisionExpr
+ if detach:
+ active_git.UpdateRef(ref, dst, message=msg, detach=True)
+ else:
+ active_git.symbolic_ref("-m", msg, ref, dst)
- def getAddedAndRemovedLogs(self, toProject, oneline=False, color=True,
- pretty_format=None):
- """Get the list of logs from this revision to given revisionId"""
- logs = {}
- selfId = self.GetRevisionId(self._allrefs)
- toId = toProject.GetRevisionId(toProject._allrefs)
+ def _CheckDirReference(self, srcdir, destdir):
+ # Git worktrees don't use symlinks to share at all.
+ if self.use_git_worktrees:
+ return
- logs['added'] = self._getLogs(selfId, toId, oneline=oneline, color=color,
- pretty_format=pretty_format)
- logs['removed'] = self._getLogs(toId, selfId, oneline=oneline, color=color,
- pretty_format=pretty_format)
- return logs
+ for name in self.shareable_dirs:
+ # Try to self-heal a bit in simple cases.
+ dst_path = os.path.join(destdir, name)
+ src_path = os.path.join(srcdir, name)
- class _GitGetByExec(object):
+ dst = platform_utils.realpath(dst_path)
+ if os.path.lexists(dst):
+ src = platform_utils.realpath(src_path)
+ # Fail if the links are pointing to the wrong place.
+ if src != dst:
+ _error("%s is different in %s vs %s", name, destdir, srcdir)
+ raise GitError(
+ "--force-sync not enabled; cannot overwrite a local "
+ "work tree. If you're comfortable with the "
+ "possibility of losing the work tree's git metadata,"
+ " use `repo sync --force-sync {0}` to "
+ "proceed.".format(self.RelPath(local=False))
+ )
- def __init__(self, project, bare, gitdir):
- self._project = project
- self._bare = bare
- self._gitdir = gitdir
+ def _ReferenceGitDir(self, gitdir, dotgit, copy_all):
+ """Update |dotgit| to reference |gitdir|, using symlinks where possible.
- # __getstate__ and __setstate__ are required for pickling because __getattr__ exists.
- def __getstate__(self):
- return (self._project, self._bare, self._gitdir)
+ Args:
+ gitdir: The bare git repository. Must already be initialized.
+ dotgit: The repository you would like to initialize.
+ copy_all: If true, copy all remaining files from |gitdir| ->
+ |dotgit|. This saves you the effort of initializing |dotgit|
+ yourself.
+ """
+ symlink_dirs = self.shareable_dirs[:]
+ to_symlink = symlink_dirs
- def __setstate__(self, state):
- self._project, self._bare, self._gitdir = state
+ to_copy = []
+ if copy_all:
+ to_copy = platform_utils.listdir(gitdir)
- def LsOthers(self):
- p = GitCommand(self._project,
- ['ls-files',
- '-z',
- '--others',
- '--exclude-standard'],
- bare=False,
- gitdir=self._gitdir,
- capture_stdout=True,
- capture_stderr=True)
- if p.Wait() == 0:
- out = p.stdout
- if out:
- # Backslash is not anomalous
- return out[:-1].split('\0')
- return []
+ dotgit = platform_utils.realpath(dotgit)
+ for name in set(to_copy).union(to_symlink):
+ try:
+ src = platform_utils.realpath(os.path.join(gitdir, name))
+ dst = os.path.join(dotgit, name)
- def DiffZ(self, name, *args):
- cmd = [name]
- cmd.append('-z')
- cmd.append('--ignore-submodules')
- cmd.extend(args)
- p = GitCommand(self._project,
- cmd,
- gitdir=self._gitdir,
- bare=False,
- capture_stdout=True,
- capture_stderr=True)
- p.Wait()
- r = {}
- out = p.stdout
- if out:
- out = iter(out[:-1].split('\0'))
- while out:
- try:
- info = next(out)
- path = next(out)
- except StopIteration:
- break
+ if os.path.lexists(dst):
+ continue
- class _Info(object):
+ # If the source dir doesn't exist, create an empty dir.
+ if name in symlink_dirs and not os.path.lexists(src):
+ os.makedirs(src)
- def __init__(self, path, omode, nmode, oid, nid, state):
- self.path = path
- self.src_path = None
- self.old_mode = omode
- self.new_mode = nmode
- self.old_id = oid
- self.new_id = nid
+ if name in to_symlink:
+ platform_utils.symlink(
+ os.path.relpath(src, os.path.dirname(dst)), dst
+ )
+ elif copy_all and not platform_utils.islink(dst):
+ if platform_utils.isdir(src):
+ shutil.copytree(src, dst)
+ elif os.path.isfile(src):
+ shutil.copy(src, dst)
- if len(state) == 1:
- self.status = state
- self.level = None
- else:
- self.status = state[:1]
- self.level = state[1:]
- while self.level.startswith('0'):
- self.level = self.level[1:]
+ except OSError as e:
+ if e.errno == errno.EPERM:
+ raise DownloadError(self._get_symlink_error_message())
+ else:
+ raise
- info = info[1:].split(' ')
- info = _Info(path, *info)
- if info.status in ('R', 'C'):
- info.src_path = info.path
- info.path = next(out)
- r[info.path] = info
- return r
+ def _InitGitWorktree(self):
+ """Init the project using git worktrees."""
+ self.bare_git.worktree("prune")
+ self.bare_git.worktree(
+ "add",
+ "-ff",
+ "--checkout",
+ "--detach",
+ "--lock",
+ self.worktree,
+ self.GetRevisionId(),
+ )
- def GetDotgitPath(self, subpath=None):
- """Return the full path to the .git dir.
-
- As a convenience, append |subpath| if provided.
- """
- if self._bare:
- dotgit = self._gitdir
- else:
- dotgit = os.path.join(self._project.worktree, '.git')
- if os.path.isfile(dotgit):
- # Git worktrees use a "gitdir:" syntax to point to the scratch space.
- with open(dotgit) as fp:
+ # Rewrite the internal state files to use relative paths between the
+ # checkouts & worktrees.
+ dotgit = os.path.join(self.worktree, ".git")
+ with open(dotgit, "r") as fp:
+ # Figure out the checkout->worktree path.
setting = fp.read()
- assert setting.startswith('gitdir:')
- gitdir = setting.split(':', 1)[1].strip()
- dotgit = os.path.normpath(os.path.join(self._project.worktree, gitdir))
+ assert setting.startswith("gitdir:")
+ git_worktree_path = setting.split(":", 1)[1].strip()
+ # Some platforms (e.g. Windows) won't let us update dotgit in situ
+ # because of file permissions. Delete it and recreate it from scratch
+ # to avoid.
+ platform_utils.remove(dotgit)
+ # Use relative path from checkout->worktree & maintain Unix line endings
+ # on all OS's to match git behavior.
+ with open(dotgit, "w", newline="\n") as fp:
+ print(
+ "gitdir:",
+ os.path.relpath(git_worktree_path, self.worktree),
+ file=fp,
+ )
+ # Use relative path from worktree->checkout & maintain Unix line endings
+ # on all OS's to match git behavior.
+ with open(
+ os.path.join(git_worktree_path, "gitdir"), "w", newline="\n"
+ ) as fp:
+ print(os.path.relpath(dotgit, git_worktree_path), file=fp)
- return dotgit if subpath is None else os.path.join(dotgit, subpath)
+ self._InitMRef()
- def GetHead(self):
- """Return the ref that HEAD points to."""
- path = self.GetDotgitPath(subpath=HEAD)
- try:
- with open(path) as fd:
- line = fd.readline()
- except IOError as e:
- raise NoManifestException(path, str(e))
- try:
- line = line.decode()
- except AttributeError:
- pass
- if line.startswith('ref: '):
- return line[5:-1]
- return line[:-1]
+ def _InitWorkTree(self, force_sync=False, submodules=False):
+ """Setup the worktree .git path.
- def SetHead(self, ref, message=None):
- cmdv = []
- if message is not None:
- cmdv.extend(['-m', message])
- cmdv.append(HEAD)
- cmdv.append(ref)
- self.symbolic_ref(*cmdv)
+ This is the user-visible path like src/foo/.git/.
- def DetachHead(self, new, message=None):
- cmdv = ['--no-deref']
- if message is not None:
- cmdv.extend(['-m', message])
- cmdv.append(HEAD)
- cmdv.append(new)
- self.update_ref(*cmdv)
+ With non-git-worktrees, this will be a symlink to the .repo/projects/
+ path. With git-worktrees, this will be a .git file using "gitdir: ..."
+ syntax.
- def UpdateRef(self, name, new, old=None,
- message=None,
- detach=False):
- cmdv = []
- if message is not None:
- cmdv.extend(['-m', message])
- if detach:
- cmdv.append('--no-deref')
- cmdv.append(name)
- cmdv.append(new)
- if old is not None:
- cmdv.append(old)
- self.update_ref(*cmdv)
+ Older checkouts had .git/ directories. If we see that, migrate it.
- def DeleteRef(self, name, old=None):
- if not old:
- old = self.rev_parse(name)
- self.update_ref('-d', name, old)
- self._project.bare_ref.deleted(name)
+ This also handles changes in the manifest. Maybe this project was
+ backed by "foo/bar" on the server, but now it's "new/foo/bar". We have
+ to update the path we point to under .repo/projects/ to match.
+ """
+ dotgit = os.path.join(self.worktree, ".git")
- def rev_list(self, *args, **kw):
- if 'format' in kw:
- cmdv = ['log', '--pretty=format:%s' % kw['format']]
- else:
- cmdv = ['rev-list']
- cmdv.extend(args)
- p = GitCommand(self._project,
- cmdv,
- bare=self._bare,
- gitdir=self._gitdir,
- capture_stdout=True,
- capture_stderr=True)
- if p.Wait() != 0:
- raise GitError('%s rev-list %s: %s' %
- (self._project.name, str(args), p.stderr))
- return p.stdout.splitlines()
+ # If using an old layout style (a directory), migrate it.
+ if not platform_utils.islink(dotgit) and platform_utils.isdir(dotgit):
+ self._MigrateOldWorkTreeGitDir(dotgit)
- def __getattr__(self, name):
- """Allow arbitrary git commands using pythonic syntax.
+ init_dotgit = not os.path.exists(dotgit)
+ if self.use_git_worktrees:
+ if init_dotgit:
+ self._InitGitWorktree()
+ self._CopyAndLinkFiles()
+ else:
+ if not init_dotgit:
+ # See if the project has changed.
+ if platform_utils.realpath(
+ self.gitdir
+ ) != platform_utils.realpath(dotgit):
+ platform_utils.remove(dotgit)
- This allows you to do things like:
- git_obj.rev_parse('HEAD')
+ if init_dotgit or not os.path.exists(dotgit):
+ os.makedirs(self.worktree, exist_ok=True)
+ platform_utils.symlink(
+ os.path.relpath(self.gitdir, self.worktree), dotgit
+ )
- Since we don't have a 'rev_parse' method defined, the __getattr__ will
- run. We'll replace the '_' with a '-' and try to run a git command.
- Any other positional arguments will be passed to the git command, and the
- following keyword arguments are supported:
- config: An optional dict of git config options to be passed with '-c'.
+ if init_dotgit:
+ _lwrite(
+ os.path.join(dotgit, HEAD), "%s\n" % self.GetRevisionId()
+ )
- Args:
- name: The name of the git command to call. Any '_' characters will
- be replaced with '-'.
+ # Finish checking out the worktree.
+ cmd = ["read-tree", "--reset", "-u", "-v", HEAD]
+ if GitCommand(self, cmd).Wait() != 0:
+ raise GitError(
+ "Cannot initialize work tree for " + self.name
+ )
- Returns:
- A callable object that will try to call git with the named command.
- """
- name = name.replace('_', '-')
+ if submodules:
+ self._SyncSubmodules(quiet=True)
+ self._CopyAndLinkFiles()
- def runner(*args, **kwargs):
- cmdv = []
- config = kwargs.pop('config', None)
- for k in kwargs:
- raise TypeError('%s() got an unexpected keyword argument %r'
- % (name, k))
- if config is not None:
- for k, v in config.items():
- cmdv.append('-c')
- cmdv.append('%s=%s' % (k, v))
- cmdv.append(name)
- cmdv.extend(args)
- p = GitCommand(self._project,
- cmdv,
- bare=self._bare,
- gitdir=self._gitdir,
- capture_stdout=True,
- capture_stderr=True)
- if p.Wait() != 0:
- raise GitError('%s %s: %s' %
- (self._project.name, name, p.stderr))
- r = p.stdout
- if r.endswith('\n') and r.index('\n') == len(r) - 1:
- return r[:-1]
- return r
- return runner
+ @classmethod
+ def _MigrateOldWorkTreeGitDir(cls, dotgit):
+ """Migrate the old worktree .git/ dir style to a symlink.
+
+ This logic specifically only uses state from |dotgit| to figure out
+ where to move content and not |self|. This way if the backing project
+ also changed places, we only do the .git/ dir to .git symlink migration
+ here. The path updates will happen independently.
+ """
+ # Figure out where in .repo/projects/ it's pointing to.
+ if not os.path.islink(os.path.join(dotgit, "refs")):
+ raise GitError(f"{dotgit}: unsupported checkout state")
+ gitdir = os.path.dirname(os.path.realpath(os.path.join(dotgit, "refs")))
+
+ # Remove known symlink paths that exist in .repo/projects/.
+ KNOWN_LINKS = {
+ "config",
+ "description",
+ "hooks",
+ "info",
+ "logs",
+ "objects",
+ "packed-refs",
+ "refs",
+ "rr-cache",
+ "shallow",
+ "svn",
+ }
+ # Paths that we know will be in both, but are safe to clobber in
+ # .repo/projects/.
+ SAFE_TO_CLOBBER = {
+ "COMMIT_EDITMSG",
+ "FETCH_HEAD",
+ "HEAD",
+ "gc.log",
+ "gitk.cache",
+ "index",
+ "ORIG_HEAD",
+ }
+
+ # First see if we'd succeed before starting the migration.
+ unknown_paths = []
+ for name in platform_utils.listdir(dotgit):
+ # Ignore all temporary/backup names. These are common with vim &
+ # emacs.
+ if name.endswith("~") or (name[0] == "#" and name[-1] == "#"):
+ continue
+
+ dotgit_path = os.path.join(dotgit, name)
+ if name in KNOWN_LINKS:
+ if not platform_utils.islink(dotgit_path):
+ unknown_paths.append(f"{dotgit_path}: should be a symlink")
+ else:
+ gitdir_path = os.path.join(gitdir, name)
+ if name not in SAFE_TO_CLOBBER and os.path.exists(gitdir_path):
+ unknown_paths.append(
+ f"{dotgit_path}: unknown file; please file a bug"
+ )
+ if unknown_paths:
+ raise GitError("Aborting migration: " + "\n".join(unknown_paths))
+
+ # Now walk the paths and sync the .git/ to .repo/projects/.
+ for name in platform_utils.listdir(dotgit):
+ dotgit_path = os.path.join(dotgit, name)
+
+ # Ignore all temporary/backup names. These are common with vim &
+ # emacs.
+ if name.endswith("~") or (name[0] == "#" and name[-1] == "#"):
+ platform_utils.remove(dotgit_path)
+ elif name in KNOWN_LINKS:
+ platform_utils.remove(dotgit_path)
+ else:
+ gitdir_path = os.path.join(gitdir, name)
+ platform_utils.remove(gitdir_path, missing_ok=True)
+ platform_utils.rename(dotgit_path, gitdir_path)
+
+ # Now that the dir should be empty, clear it out, and symlink it over.
+ platform_utils.rmdir(dotgit)
+ platform_utils.symlink(
+ os.path.relpath(gitdir, os.path.dirname(dotgit)), dotgit
+ )
+
+ def _get_symlink_error_message(self):
+ if platform_utils.isWindows():
+ return (
+ "Unable to create symbolic link. Please re-run the command as "
+ "Administrator, or see "
+ "https://github.com/git-for-windows/git/wiki/Symbolic-Links "
+ "for other options."
+ )
+ return "filesystem must support symlinks"
+
+ def _revlist(self, *args, **kw):
+ a = []
+ a.extend(args)
+ a.append("--")
+ return self.work_git.rev_list(*a, **kw)
+
+ @property
+ def _allrefs(self):
+ return self.bare_ref.all
+
+ def _getLogs(
+ self, rev1, rev2, oneline=False, color=True, pretty_format=None
+ ):
+ """Get logs between two revisions of this project."""
+ comp = ".."
+ if rev1:
+ revs = [rev1]
+ if rev2:
+ revs.extend([comp, rev2])
+ cmd = ["log", "".join(revs)]
+ out = DiffColoring(self.config)
+ if out.is_on and color:
+ cmd.append("--color")
+ if pretty_format is not None:
+ cmd.append("--pretty=format:%s" % pretty_format)
+ if oneline:
+ cmd.append("--oneline")
+
+ try:
+ log = GitCommand(
+ self, cmd, capture_stdout=True, capture_stderr=True
+ )
+ if log.Wait() == 0:
+ return log.stdout
+ except GitError:
+ # worktree may not exist if groups changed for example. In that
+ # case, try in gitdir instead.
+ if not os.path.exists(self.worktree):
+ return self.bare_git.log(*cmd[1:])
+ else:
+ raise
+ return None
+
+ def getAddedAndRemovedLogs(
+ self, toProject, oneline=False, color=True, pretty_format=None
+ ):
+ """Get the list of logs from this revision to given revisionId"""
+ logs = {}
+ selfId = self.GetRevisionId(self._allrefs)
+ toId = toProject.GetRevisionId(toProject._allrefs)
+
+ logs["added"] = self._getLogs(
+ selfId,
+ toId,
+ oneline=oneline,
+ color=color,
+ pretty_format=pretty_format,
+ )
+ logs["removed"] = self._getLogs(
+ toId,
+ selfId,
+ oneline=oneline,
+ color=color,
+ pretty_format=pretty_format,
+ )
+ return logs
+
+ class _GitGetByExec(object):
+ def __init__(self, project, bare, gitdir):
+ self._project = project
+ self._bare = bare
+ self._gitdir = gitdir
+
+ # __getstate__ and __setstate__ are required for pickling because
+ # __getattr__ exists.
+ def __getstate__(self):
+ return (self._project, self._bare, self._gitdir)
+
+ def __setstate__(self, state):
+ self._project, self._bare, self._gitdir = state
+
+ def LsOthers(self):
+ p = GitCommand(
+ self._project,
+ ["ls-files", "-z", "--others", "--exclude-standard"],
+ bare=False,
+ gitdir=self._gitdir,
+ capture_stdout=True,
+ capture_stderr=True,
+ )
+ if p.Wait() == 0:
+ out = p.stdout
+ if out:
+ # Backslash is not anomalous.
+ return out[:-1].split("\0")
+ return []
+
+ def DiffZ(self, name, *args):
+ cmd = [name]
+ cmd.append("-z")
+ cmd.append("--ignore-submodules")
+ cmd.extend(args)
+ p = GitCommand(
+ self._project,
+ cmd,
+ gitdir=self._gitdir,
+ bare=False,
+ capture_stdout=True,
+ capture_stderr=True,
+ )
+ p.Wait()
+ r = {}
+ out = p.stdout
+ if out:
+ out = iter(out[:-1].split("\0"))
+ while out:
+ try:
+ info = next(out)
+ path = next(out)
+ except StopIteration:
+ break
+
+ class _Info(object):
+ def __init__(self, path, omode, nmode, oid, nid, state):
+ self.path = path
+ self.src_path = None
+ self.old_mode = omode
+ self.new_mode = nmode
+ self.old_id = oid
+ self.new_id = nid
+
+ if len(state) == 1:
+ self.status = state
+ self.level = None
+ else:
+ self.status = state[:1]
+ self.level = state[1:]
+ while self.level.startswith("0"):
+ self.level = self.level[1:]
+
+ info = info[1:].split(" ")
+ info = _Info(path, *info)
+ if info.status in ("R", "C"):
+ info.src_path = info.path
+ info.path = next(out)
+ r[info.path] = info
+ return r
+
+ def GetDotgitPath(self, subpath=None):
+ """Return the full path to the .git dir.
+
+ As a convenience, append |subpath| if provided.
+ """
+ if self._bare:
+ dotgit = self._gitdir
+ else:
+ dotgit = os.path.join(self._project.worktree, ".git")
+ if os.path.isfile(dotgit):
+ # Git worktrees use a "gitdir:" syntax to point to the
+ # scratch space.
+ with open(dotgit) as fp:
+ setting = fp.read()
+ assert setting.startswith("gitdir:")
+ gitdir = setting.split(":", 1)[1].strip()
+ dotgit = os.path.normpath(
+ os.path.join(self._project.worktree, gitdir)
+ )
+
+ return dotgit if subpath is None else os.path.join(dotgit, subpath)
+
+ def GetHead(self):
+ """Return the ref that HEAD points to."""
+ path = self.GetDotgitPath(subpath=HEAD)
+ try:
+ with open(path) as fd:
+ line = fd.readline()
+ except IOError as e:
+ raise NoManifestException(path, str(e))
+ try:
+ line = line.decode()
+ except AttributeError:
+ pass
+ if line.startswith("ref: "):
+ return line[5:-1]
+ return line[:-1]
+
+ def SetHead(self, ref, message=None):
+ cmdv = []
+ if message is not None:
+ cmdv.extend(["-m", message])
+ cmdv.append(HEAD)
+ cmdv.append(ref)
+ self.symbolic_ref(*cmdv)
+
+ def DetachHead(self, new, message=None):
+ cmdv = ["--no-deref"]
+ if message is not None:
+ cmdv.extend(["-m", message])
+ cmdv.append(HEAD)
+ cmdv.append(new)
+ self.update_ref(*cmdv)
+
+ def UpdateRef(self, name, new, old=None, message=None, detach=False):
+ cmdv = []
+ if message is not None:
+ cmdv.extend(["-m", message])
+ if detach:
+ cmdv.append("--no-deref")
+ cmdv.append(name)
+ cmdv.append(new)
+ if old is not None:
+ cmdv.append(old)
+ self.update_ref(*cmdv)
+
+ def DeleteRef(self, name, old=None):
+ if not old:
+ old = self.rev_parse(name)
+ self.update_ref("-d", name, old)
+ self._project.bare_ref.deleted(name)
+
+ def rev_list(self, *args, **kw):
+ if "format" in kw:
+ cmdv = ["log", "--pretty=format:%s" % kw["format"]]
+ else:
+ cmdv = ["rev-list"]
+ cmdv.extend(args)
+ p = GitCommand(
+ self._project,
+ cmdv,
+ bare=self._bare,
+ gitdir=self._gitdir,
+ capture_stdout=True,
+ capture_stderr=True,
+ )
+ if p.Wait() != 0:
+ raise GitError(
+ "%s rev-list %s: %s"
+ % (self._project.name, str(args), p.stderr)
+ )
+ return p.stdout.splitlines()
+
+ def __getattr__(self, name):
+ """Allow arbitrary git commands using pythonic syntax.
+
+ This allows you to do things like:
+ git_obj.rev_parse('HEAD')
+
+ Since we don't have a 'rev_parse' method defined, the __getattr__
+ will run. We'll replace the '_' with a '-' and try to run a git
+ command. Any other positional arguments will be passed to the git
+ command, and the following keyword arguments are supported:
+ config: An optional dict of git config options to be passed with
+ '-c'.
+
+ Args:
+ name: The name of the git command to call. Any '_' characters
+ will be replaced with '-'.
+
+ Returns:
+ A callable object that will try to call git with the named
+ command.
+ """
+ name = name.replace("_", "-")
+
+ def runner(*args, **kwargs):
+ cmdv = []
+ config = kwargs.pop("config", None)
+ for k in kwargs:
+ raise TypeError(
+ "%s() got an unexpected keyword argument %r" % (name, k)
+ )
+ if config is not None:
+ for k, v in config.items():
+ cmdv.append("-c")
+ cmdv.append("%s=%s" % (k, v))
+ cmdv.append(name)
+ cmdv.extend(args)
+ p = GitCommand(
+ self._project,
+ cmdv,
+ bare=self._bare,
+ gitdir=self._gitdir,
+ capture_stdout=True,
+ capture_stderr=True,
+ )
+ if p.Wait() != 0:
+ raise GitError(
+ "%s %s: %s" % (self._project.name, name, p.stderr)
+ )
+ r = p.stdout
+ if r.endswith("\n") and r.index("\n") == len(r) - 1:
+ return r[:-1]
+ return r
+
+ return runner
class _PriorSyncFailedError(Exception):
-
- def __str__(self):
- return 'prior sync failed; rebase still in progress'
+ def __str__(self):
+ return "prior sync failed; rebase still in progress"
class _DirtyError(Exception):
-
- def __str__(self):
- return 'contains uncommitted changes'
+ def __str__(self):
+ return "contains uncommitted changes"
class _InfoMessage(object):
+ def __init__(self, project, text):
+ self.project = project
+ self.text = text
- def __init__(self, project, text):
- self.project = project
- self.text = text
-
- def Print(self, syncbuf):
- syncbuf.out.info('%s/: %s', self.project.RelPath(local=False), self.text)
- syncbuf.out.nl()
+ def Print(self, syncbuf):
+ syncbuf.out.info(
+ "%s/: %s", self.project.RelPath(local=False), self.text
+ )
+ syncbuf.out.nl()
class _Failure(object):
+ def __init__(self, project, why):
+ self.project = project
+ self.why = why
- def __init__(self, project, why):
- self.project = project
- self.why = why
-
- def Print(self, syncbuf):
- syncbuf.out.fail('error: %s/: %s',
- self.project.RelPath(local=False),
- str(self.why))
- syncbuf.out.nl()
+ def Print(self, syncbuf):
+ syncbuf.out.fail(
+ "error: %s/: %s", self.project.RelPath(local=False), str(self.why)
+ )
+ syncbuf.out.nl()
class _Later(object):
+ def __init__(self, project, action):
+ self.project = project
+ self.action = action
- def __init__(self, project, action):
- self.project = project
- self.action = action
-
- def Run(self, syncbuf):
- out = syncbuf.out
- out.project('project %s/', self.project.RelPath(local=False))
- out.nl()
- try:
- self.action()
- out.nl()
- return True
- except GitError:
- out.nl()
- return False
+ def Run(self, syncbuf):
+ out = syncbuf.out
+ out.project("project %s/", self.project.RelPath(local=False))
+ out.nl()
+ try:
+ self.action()
+ out.nl()
+ return True
+ except GitError:
+ out.nl()
+ return False
class _SyncColoring(Coloring):
-
- def __init__(self, config):
- super().__init__(config, 'reposync')
- self.project = self.printer('header', attr='bold')
- self.info = self.printer('info')
- self.fail = self.printer('fail', fg='red')
+ def __init__(self, config):
+ super().__init__(config, "reposync")
+ self.project = self.printer("header", attr="bold")
+ self.info = self.printer("info")
+ self.fail = self.printer("fail", fg="red")
class SyncBuffer(object):
+ def __init__(self, config, detach_head=False):
+ self._messages = []
+ self._failures = []
+ self._later_queue1 = []
+ self._later_queue2 = []
- def __init__(self, config, detach_head=False):
- self._messages = []
- self._failures = []
- self._later_queue1 = []
- self._later_queue2 = []
+ self.out = _SyncColoring(config)
+ self.out.redirect(sys.stderr)
- self.out = _SyncColoring(config)
- self.out.redirect(sys.stderr)
+ self.detach_head = detach_head
+ self.clean = True
+ self.recent_clean = True
- self.detach_head = detach_head
- self.clean = True
- self.recent_clean = True
+ def info(self, project, fmt, *args):
+ self._messages.append(_InfoMessage(project, fmt % args))
- def info(self, project, fmt, *args):
- self._messages.append(_InfoMessage(project, fmt % args))
-
- def fail(self, project, err=None):
- self._failures.append(_Failure(project, err))
- self._MarkUnclean()
-
- def later1(self, project, what):
- self._later_queue1.append(_Later(project, what))
-
- def later2(self, project, what):
- self._later_queue2.append(_Later(project, what))
-
- def Finish(self):
- self._PrintMessages()
- self._RunLater()
- self._PrintMessages()
- return self.clean
-
- def Recently(self):
- recent_clean = self.recent_clean
- self.recent_clean = True
- return recent_clean
-
- def _MarkUnclean(self):
- self.clean = False
- self.recent_clean = False
-
- def _RunLater(self):
- for q in ['_later_queue1', '_later_queue2']:
- if not self._RunQueue(q):
- return
-
- def _RunQueue(self, queue):
- for m in getattr(self, queue):
- if not m.Run(self):
+ def fail(self, project, err=None):
+ self._failures.append(_Failure(project, err))
self._MarkUnclean()
- return False
- setattr(self, queue, [])
- return True
- def _PrintMessages(self):
- if self._messages or self._failures:
- if os.isatty(2):
- self.out.write(progress.CSI_ERASE_LINE)
- self.out.write('\r')
+ def later1(self, project, what):
+ self._later_queue1.append(_Later(project, what))
- for m in self._messages:
- m.Print(self)
- for m in self._failures:
- m.Print(self)
+ def later2(self, project, what):
+ self._later_queue2.append(_Later(project, what))
- self._messages = []
- self._failures = []
+ def Finish(self):
+ self._PrintMessages()
+ self._RunLater()
+ self._PrintMessages()
+ return self.clean
+
+ def Recently(self):
+ recent_clean = self.recent_clean
+ self.recent_clean = True
+ return recent_clean
+
+ def _MarkUnclean(self):
+ self.clean = False
+ self.recent_clean = False
+
+ def _RunLater(self):
+ for q in ["_later_queue1", "_later_queue2"]:
+ if not self._RunQueue(q):
+ return
+
+ def _RunQueue(self, queue):
+ for m in getattr(self, queue):
+ if not m.Run(self):
+ self._MarkUnclean()
+ return False
+ setattr(self, queue, [])
+ return True
+
+ def _PrintMessages(self):
+ if self._messages or self._failures:
+ if os.isatty(2):
+ self.out.write(progress.CSI_ERASE_LINE)
+ self.out.write("\r")
+
+ for m in self._messages:
+ m.Print(self)
+ for m in self._failures:
+ m.Print(self)
+
+ self._messages = []
+ self._failures = []
class MetaProject(Project):
- """A special project housed under .repo."""
+ """A special project housed under .repo."""
- def __init__(self, manifest, name, gitdir, worktree):
- Project.__init__(self,
- manifest=manifest,
- name=name,
- gitdir=gitdir,
- objdir=gitdir,
- worktree=worktree,
- remote=RemoteSpec('origin'),
- relpath='.repo/%s' % name,
- revisionExpr='refs/heads/master',
- revisionId=None,
- groups=None)
+ def __init__(self, manifest, name, gitdir, worktree):
+ Project.__init__(
+ self,
+ manifest=manifest,
+ name=name,
+ gitdir=gitdir,
+ objdir=gitdir,
+ worktree=worktree,
+ remote=RemoteSpec("origin"),
+ relpath=".repo/%s" % name,
+ revisionExpr="refs/heads/master",
+ revisionId=None,
+ groups=None,
+ )
- def PreSync(self):
- if self.Exists:
- cb = self.CurrentBranch
- if cb:
- base = self.GetBranch(cb).merge
- if base:
- self.revisionExpr = base
- self.revisionId = None
+ def PreSync(self):
+ if self.Exists:
+ cb = self.CurrentBranch
+ if cb:
+ base = self.GetBranch(cb).merge
+ if base:
+ self.revisionExpr = base
+ self.revisionId = None
- @property
- def HasChanges(self):
- """Has the remote received new commits not yet checked out?"""
- if not self.remote or not self.revisionExpr:
- return False
+ @property
+ def HasChanges(self):
+ """Has the remote received new commits not yet checked out?"""
+ if not self.remote or not self.revisionExpr:
+ return False
- all_refs = self.bare_ref.all
- revid = self.GetRevisionId(all_refs)
- head = self.work_git.GetHead()
- if head.startswith(R_HEADS):
- try:
- head = all_refs[head]
- except KeyError:
- head = None
+ all_refs = self.bare_ref.all
+ revid = self.GetRevisionId(all_refs)
+ head = self.work_git.GetHead()
+ if head.startswith(R_HEADS):
+ try:
+ head = all_refs[head]
+ except KeyError:
+ head = None
- if revid == head:
- return False
- elif self._revlist(not_rev(HEAD), revid):
- return True
- return False
+ if revid == head:
+ return False
+ elif self._revlist(not_rev(HEAD), revid):
+ return True
+ return False
class RepoProject(MetaProject):
- """The MetaProject for repo itself."""
+ """The MetaProject for repo itself."""
- @property
- def LastFetch(self):
- try:
- fh = os.path.join(self.gitdir, 'FETCH_HEAD')
- return os.path.getmtime(fh)
- except OSError:
- return 0
+ @property
+ def LastFetch(self):
+ try:
+ fh = os.path.join(self.gitdir, "FETCH_HEAD")
+ return os.path.getmtime(fh)
+ except OSError:
+ return 0
class ManifestProject(MetaProject):
- """The MetaProject for manifests."""
+ """The MetaProject for manifests."""
- def MetaBranchSwitch(self, submodules=False):
- """Prepare for manifest branch switch."""
+ def MetaBranchSwitch(self, submodules=False):
+ """Prepare for manifest branch switch."""
- # detach and delete manifest branch, allowing a new
- # branch to take over
- syncbuf = SyncBuffer(self.config, detach_head=True)
- self.Sync_LocalHalf(syncbuf, submodules=submodules)
- syncbuf.Finish()
+ # detach and delete manifest branch, allowing a new
+ # branch to take over
+ syncbuf = SyncBuffer(self.config, detach_head=True)
+ self.Sync_LocalHalf(syncbuf, submodules=submodules)
+ syncbuf.Finish()
- return GitCommand(self,
- ['update-ref', '-d', 'refs/heads/default'],
- capture_stdout=True,
- capture_stderr=True).Wait() == 0
+ return (
+ GitCommand(
+ self,
+ ["update-ref", "-d", "refs/heads/default"],
+ capture_stdout=True,
+ capture_stderr=True,
+ ).Wait()
+ == 0
+ )
- @property
- def standalone_manifest_url(self):
- """The URL of the standalone manifest, or None."""
- return self.config.GetString('manifest.standalone')
+ @property
+ def standalone_manifest_url(self):
+ """The URL of the standalone manifest, or None."""
+ return self.config.GetString("manifest.standalone")
- @property
- def manifest_groups(self):
- """The manifest groups string."""
- return self.config.GetString('manifest.groups')
+ @property
+ def manifest_groups(self):
+ """The manifest groups string."""
+ return self.config.GetString("manifest.groups")
- @property
- def reference(self):
- """The --reference for this manifest."""
- return self.config.GetString('repo.reference')
+ @property
+ def reference(self):
+ """The --reference for this manifest."""
+ return self.config.GetString("repo.reference")
- @property
- def dissociate(self):
- """Whether to dissociate."""
- return self.config.GetBoolean('repo.dissociate')
+ @property
+ def dissociate(self):
+ """Whether to dissociate."""
+ return self.config.GetBoolean("repo.dissociate")
- @property
- def archive(self):
- """Whether we use archive."""
- return self.config.GetBoolean('repo.archive')
+ @property
+ def archive(self):
+ """Whether we use archive."""
+ return self.config.GetBoolean("repo.archive")
- @property
- def mirror(self):
- """Whether we use mirror."""
- return self.config.GetBoolean('repo.mirror')
+ @property
+ def mirror(self):
+ """Whether we use mirror."""
+ return self.config.GetBoolean("repo.mirror")
- @property
- def use_worktree(self):
- """Whether we use worktree."""
- return self.config.GetBoolean('repo.worktree')
+ @property
+ def use_worktree(self):
+ """Whether we use worktree."""
+ return self.config.GetBoolean("repo.worktree")
- @property
- def clone_bundle(self):
- """Whether we use clone_bundle."""
- return self.config.GetBoolean('repo.clonebundle')
+ @property
+ def clone_bundle(self):
+ """Whether we use clone_bundle."""
+ return self.config.GetBoolean("repo.clonebundle")
- @property
- def submodules(self):
- """Whether we use submodules."""
- return self.config.GetBoolean('repo.submodules')
+ @property
+ def submodules(self):
+ """Whether we use submodules."""
+ return self.config.GetBoolean("repo.submodules")
- @property
- def git_lfs(self):
- """Whether we use git_lfs."""
- return self.config.GetBoolean('repo.git-lfs')
+ @property
+ def git_lfs(self):
+ """Whether we use git_lfs."""
+ return self.config.GetBoolean("repo.git-lfs")
- @property
- def use_superproject(self):
- """Whether we use superproject."""
- return self.config.GetBoolean('repo.superproject')
+ @property
+ def use_superproject(self):
+ """Whether we use superproject."""
+ return self.config.GetBoolean("repo.superproject")
- @property
- def partial_clone(self):
- """Whether this is a partial clone."""
- return self.config.GetBoolean('repo.partialclone')
+ @property
+ def partial_clone(self):
+ """Whether this is a partial clone."""
+ return self.config.GetBoolean("repo.partialclone")
- @property
- def depth(self):
- """Partial clone depth."""
- return self.config.GetString('repo.depth')
+ @property
+ def depth(self):
+ """Partial clone depth."""
+ return self.config.GetString("repo.depth")
- @property
- def clone_filter(self):
- """The clone filter."""
- return self.config.GetString('repo.clonefilter')
+ @property
+ def clone_filter(self):
+ """The clone filter."""
+ return self.config.GetString("repo.clonefilter")
- @property
- def partial_clone_exclude(self):
- """Partial clone exclude string"""
- return self.config.GetString('repo.partialcloneexclude')
+ @property
+ def partial_clone_exclude(self):
+ """Partial clone exclude string"""
+ return self.config.GetString("repo.partialcloneexclude")
- @property
- def manifest_platform(self):
- """The --platform argument from `repo init`."""
- return self.config.GetString('manifest.platform')
+ @property
+ def manifest_platform(self):
+ """The --platform argument from `repo init`."""
+ return self.config.GetString("manifest.platform")
- @property
- def _platform_name(self):
- """Return the name of the platform."""
- return platform.system().lower()
+ @property
+ def _platform_name(self):
+ """Return the name of the platform."""
+ return platform.system().lower()
- def SyncWithPossibleInit(self, submanifest, verbose=False,
- current_branch_only=False, tags='', git_event_log=None):
- """Sync a manifestProject, possibly for the first time.
+ def SyncWithPossibleInit(
+ self,
+ submanifest,
+ verbose=False,
+ current_branch_only=False,
+ tags="",
+ git_event_log=None,
+ ):
+ """Sync a manifestProject, possibly for the first time.
- Call Sync() with arguments from the most recent `repo init`. If this is a
- new sub manifest, then inherit options from the parent's manifestProject.
+ Call Sync() with arguments from the most recent `repo init`. If this is
+ a new sub manifest, then inherit options from the parent's
+ manifestProject.
- This is used by subcmds.Sync() to do an initial download of new sub
- manifests.
+ This is used by subcmds.Sync() to do an initial download of new sub
+ manifests.
- Args:
- submanifest: an XmlSubmanifest, the submanifest to re-sync.
- verbose: a boolean, whether to show all output, rather than only errors.
- current_branch_only: a boolean, whether to only fetch the current manifest
- branch from the server.
- tags: a boolean, whether to fetch tags.
- git_event_log: an EventLog, for git tracing.
- """
- # TODO(lamontjones): when refactoring sync (and init?) consider how to
- # better get the init options that we should use for new submanifests that
- # are added when syncing an existing workspace.
- git_event_log = git_event_log or EventLog()
- spec = submanifest.ToSubmanifestSpec()
- # Use the init options from the existing manifestProject, or the parent if
- # it doesn't exist.
- #
- # Today, we only support changing manifest_groups on the sub-manifest, with
- # no supported-for-the-user way to change the other arguments from those
- # specified by the outermost manifest.
- #
- # TODO(lamontjones): determine which of these should come from the outermost
- # manifest and which should come from the parent manifest.
- mp = self if self.Exists else submanifest.parent.manifestProject
- return self.Sync(
- manifest_url=spec.manifestUrl,
- manifest_branch=spec.revision,
- standalone_manifest=mp.standalone_manifest_url,
- groups=mp.manifest_groups,
- platform=mp.manifest_platform,
- mirror=mp.mirror,
- dissociate=mp.dissociate,
- reference=mp.reference,
- worktree=mp.use_worktree,
- submodules=mp.submodules,
- archive=mp.archive,
- partial_clone=mp.partial_clone,
- clone_filter=mp.clone_filter,
- partial_clone_exclude=mp.partial_clone_exclude,
- clone_bundle=mp.clone_bundle,
- git_lfs=mp.git_lfs,
- use_superproject=mp.use_superproject,
- verbose=verbose,
- current_branch_only=current_branch_only,
- tags=tags,
- depth=mp.depth,
- git_event_log=git_event_log,
- manifest_name=spec.manifestName,
- this_manifest_only=True,
- outer_manifest=False,
- )
-
- def Sync(self, _kwargs_only=(), manifest_url='', manifest_branch=None,
- standalone_manifest=False, groups='', mirror=False, reference='',
- dissociate=False, worktree=False, submodules=False, archive=False,
- partial_clone=None, depth=None, clone_filter='blob:none',
- partial_clone_exclude=None, clone_bundle=None, git_lfs=None,
- use_superproject=None, verbose=False, current_branch_only=False,
- git_event_log=None, platform='', manifest_name='default.xml',
- tags='', this_manifest_only=False, outer_manifest=True):
- """Sync the manifest and all submanifests.
-
- Args:
- manifest_url: a string, the URL of the manifest project.
- manifest_branch: a string, the manifest branch to use.
- standalone_manifest: a boolean, whether to store the manifest as a static
- file.
- groups: a string, restricts the checkout to projects with the specified
- groups.
- mirror: a boolean, whether to create a mirror of the remote repository.
- reference: a string, location of a repo instance to use as a reference.
- dissociate: a boolean, whether to dissociate from reference mirrors after
- clone.
- worktree: a boolean, whether to use git-worktree to manage projects.
- submodules: a boolean, whether sync submodules associated with the
- manifest project.
- archive: a boolean, whether to checkout each project as an archive. See
- git-archive.
- partial_clone: a boolean, whether to perform a partial clone.
- depth: an int, how deep of a shallow clone to create.
- clone_filter: a string, filter to use with partial_clone.
- partial_clone_exclude : a string, comma-delimeted list of project namess
- to exclude from partial clone.
- clone_bundle: a boolean, whether to enable /clone.bundle on HTTP/HTTPS.
- git_lfs: a boolean, whether to enable git LFS support.
- use_superproject: a boolean, whether to use the manifest superproject to
- sync projects.
- verbose: a boolean, whether to show all output, rather than only errors.
- current_branch_only: a boolean, whether to only fetch the current manifest
- branch from the server.
- platform: a string, restrict the checkout to projects with the specified
- platform group.
- git_event_log: an EventLog, for git tracing.
- tags: a boolean, whether to fetch tags.
- manifest_name: a string, the name of the manifest file to use.
- this_manifest_only: a boolean, whether to only operate on the current sub
- manifest.
- outer_manifest: a boolean, whether to start at the outermost manifest.
-
- Returns:
- a boolean, whether the sync was successful.
- """
- assert _kwargs_only == (), 'Sync only accepts keyword arguments.'
-
- groups = groups or self.manifest.GetDefaultGroupsStr(with_platform=False)
- platform = platform or 'auto'
- git_event_log = git_event_log or EventLog()
- if outer_manifest and self.manifest.is_submanifest:
- # In a multi-manifest checkout, use the outer manifest unless we are told
- # not to.
- return self.client.outer_manifest.manifestProject.Sync(
- manifest_url=manifest_url,
- manifest_branch=manifest_branch,
- standalone_manifest=standalone_manifest,
- groups=groups,
- platform=platform,
- mirror=mirror,
- dissociate=dissociate,
- reference=reference,
- worktree=worktree,
- submodules=submodules,
- archive=archive,
- partial_clone=partial_clone,
- clone_filter=clone_filter,
- partial_clone_exclude=partial_clone_exclude,
- clone_bundle=clone_bundle,
- git_lfs=git_lfs,
- use_superproject=use_superproject,
- verbose=verbose,
- current_branch_only=current_branch_only,
- tags=tags,
- depth=depth,
- git_event_log=git_event_log,
- manifest_name=manifest_name,
- this_manifest_only=this_manifest_only,
- outer_manifest=False)
-
- # If repo has already been initialized, we take -u with the absence of
- # --standalone-manifest to mean "transition to a standard repo set up",
- # which necessitates starting fresh.
- # If --standalone-manifest is set, we always tear everything down and start
- # anew.
- if self.Exists:
- was_standalone_manifest = self.config.GetString('manifest.standalone')
- if was_standalone_manifest and not manifest_url:
- print('fatal: repo was initialized with a standlone manifest, '
- 'cannot be re-initialized without --manifest-url/-u')
- return False
-
- if standalone_manifest or (was_standalone_manifest and manifest_url):
- self.config.ClearCache()
- if self.gitdir and os.path.exists(self.gitdir):
- platform_utils.rmtree(self.gitdir)
- if self.worktree and os.path.exists(self.worktree):
- platform_utils.rmtree(self.worktree)
-
- is_new = not self.Exists
- if is_new:
- if not manifest_url:
- print('fatal: manifest url is required.', file=sys.stderr)
- return False
-
- if verbose:
- print('Downloading manifest from %s' %
- (GitConfig.ForUser().UrlInsteadOf(manifest_url),),
- file=sys.stderr)
-
- # The manifest project object doesn't keep track of the path on the
- # server where this git is located, so let's save that here.
- mirrored_manifest_git = None
- if reference:
- manifest_git_path = urllib.parse.urlparse(manifest_url).path[1:]
- mirrored_manifest_git = os.path.join(reference, manifest_git_path)
- if not mirrored_manifest_git.endswith(".git"):
- mirrored_manifest_git += ".git"
- if not os.path.exists(mirrored_manifest_git):
- mirrored_manifest_git = os.path.join(reference,
- '.repo/manifests.git')
-
- self._InitGitDir(mirror_git=mirrored_manifest_git)
-
- # If standalone_manifest is set, mark the project as "standalone" -- we'll
- # still do much of the manifests.git set up, but will avoid actual syncs to
- # a remote.
- if standalone_manifest:
- self.config.SetString('manifest.standalone', manifest_url)
- elif not manifest_url and not manifest_branch:
- # If -u is set and --standalone-manifest is not, then we're not in
- # standalone mode. Otherwise, use config to infer what we were in the last
- # init.
- standalone_manifest = bool(self.config.GetString('manifest.standalone'))
- if not standalone_manifest:
- self.config.SetString('manifest.standalone', None)
-
- self._ConfigureDepth(depth)
-
- # Set the remote URL before the remote branch as we might need it below.
- if manifest_url:
- r = self.GetRemote()
- r.url = manifest_url
- r.ResetFetch()
- r.Save()
-
- if not standalone_manifest:
- if manifest_branch:
- if manifest_branch == 'HEAD':
- manifest_branch = self.ResolveRemoteHead()
- if manifest_branch is None:
- print('fatal: unable to resolve HEAD', file=sys.stderr)
- return False
- self.revisionExpr = manifest_branch
- else:
- if is_new:
- default_branch = self.ResolveRemoteHead()
- if default_branch is None:
- # If the remote doesn't have HEAD configured, default to master.
- default_branch = 'refs/heads/master'
- self.revisionExpr = default_branch
- else:
- self.PreSync()
-
- groups = re.split(r'[,\s]+', groups or '')
- all_platforms = ['linux', 'darwin', 'windows']
- platformize = lambda x: 'platform-' + x
- if platform == 'auto':
- if not mirror and not self.mirror:
- groups.append(platformize(self._platform_name))
- elif platform == 'all':
- groups.extend(map(platformize, all_platforms))
- elif platform in all_platforms:
- groups.append(platformize(platform))
- elif platform != 'none':
- print('fatal: invalid platform flag', file=sys.stderr)
- return False
- self.config.SetString('manifest.platform', platform)
-
- groups = [x for x in groups if x]
- groupstr = ','.join(groups)
- if platform == 'auto' and groupstr == self.manifest.GetDefaultGroupsStr():
- groupstr = None
- self.config.SetString('manifest.groups', groupstr)
-
- if reference:
- self.config.SetString('repo.reference', reference)
-
- if dissociate:
- self.config.SetBoolean('repo.dissociate', dissociate)
-
- if worktree:
- if mirror:
- print('fatal: --mirror and --worktree are incompatible',
- file=sys.stderr)
- return False
- if submodules:
- print('fatal: --submodules and --worktree are incompatible',
- file=sys.stderr)
- return False
- self.config.SetBoolean('repo.worktree', worktree)
- if is_new:
- self.use_git_worktrees = True
- print('warning: --worktree is experimental!', file=sys.stderr)
-
- if archive:
- if is_new:
- self.config.SetBoolean('repo.archive', archive)
- else:
- print('fatal: --archive is only supported when initializing a new '
- 'workspace.', file=sys.stderr)
- print('Either delete the .repo folder in this workspace, or initialize '
- 'in another location.', file=sys.stderr)
- return False
-
- if mirror:
- if is_new:
- self.config.SetBoolean('repo.mirror', mirror)
- else:
- print('fatal: --mirror is only supported when initializing a new '
- 'workspace.', file=sys.stderr)
- print('Either delete the .repo folder in this workspace, or initialize '
- 'in another location.', file=sys.stderr)
- return False
-
- if partial_clone is not None:
- if mirror:
- print('fatal: --mirror and --partial-clone are mutually exclusive',
- file=sys.stderr)
- return False
- self.config.SetBoolean('repo.partialclone', partial_clone)
- if clone_filter:
- self.config.SetString('repo.clonefilter', clone_filter)
- elif self.partial_clone:
- clone_filter = self.clone_filter
- else:
- clone_filter = None
-
- if partial_clone_exclude is not None:
- self.config.SetString('repo.partialcloneexclude', partial_clone_exclude)
-
- if clone_bundle is None:
- clone_bundle = False if partial_clone else True
- else:
- self.config.SetBoolean('repo.clonebundle', clone_bundle)
-
- if submodules:
- self.config.SetBoolean('repo.submodules', submodules)
-
- if git_lfs is not None:
- if git_lfs:
- git_require((2, 17, 0), fail=True, msg='Git LFS support')
-
- self.config.SetBoolean('repo.git-lfs', git_lfs)
- if not is_new:
- print('warning: Changing --git-lfs settings will only affect new project checkouts.\n'
- ' Existing projects will require manual updates.\n', file=sys.stderr)
-
- if use_superproject is not None:
- self.config.SetBoolean('repo.superproject', use_superproject)
-
- if not standalone_manifest:
- success = self.Sync_NetworkHalf(
- is_new=is_new, quiet=not verbose, verbose=verbose,
- clone_bundle=clone_bundle, current_branch_only=current_branch_only,
- tags=tags, submodules=submodules, clone_filter=clone_filter,
- partial_clone_exclude=self.manifest.PartialCloneExclude).success
- if not success:
- r = self.GetRemote()
- print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
-
- # Better delete the manifest git dir if we created it; otherwise next
- # time (when user fixes problems) we won't go through the "is_new" logic.
- if is_new:
- platform_utils.rmtree(self.gitdir)
- return False
-
- if manifest_branch:
- self.MetaBranchSwitch(submodules=submodules)
-
- syncbuf = SyncBuffer(self.config)
- self.Sync_LocalHalf(syncbuf, submodules=submodules)
- syncbuf.Finish()
-
- if is_new or self.CurrentBranch is None:
- if not self.StartBranch('default'):
- print('fatal: cannot create default in manifest', file=sys.stderr)
- return False
-
- if not manifest_name:
- print('fatal: manifest name (-m) is required.', file=sys.stderr)
- return False
-
- elif is_new:
- # This is a new standalone manifest.
- manifest_name = 'default.xml'
- manifest_data = fetch.fetch_file(manifest_url, verbose=verbose)
- dest = os.path.join(self.worktree, manifest_name)
- os.makedirs(os.path.dirname(dest), exist_ok=True)
- with open(dest, 'wb') as f:
- f.write(manifest_data)
-
- try:
- self.manifest.Link(manifest_name)
- except ManifestParseError as e:
- print("fatal: manifest '%s' not available" % manifest_name,
- file=sys.stderr)
- print('fatal: %s' % str(e), file=sys.stderr)
- return False
-
- if not this_manifest_only:
- for submanifest in self.manifest.submanifests.values():
+ Args:
+ submanifest: an XmlSubmanifest, the submanifest to re-sync.
+ verbose: a boolean, whether to show all output, rather than only
+ errors.
+ current_branch_only: a boolean, whether to only fetch the current
+ manifest branch from the server.
+ tags: a boolean, whether to fetch tags.
+ git_event_log: an EventLog, for git tracing.
+ """
+ # TODO(lamontjones): when refactoring sync (and init?) consider how to
+ # better get the init options that we should use for new submanifests
+ # that are added when syncing an existing workspace.
+ git_event_log = git_event_log or EventLog()
spec = submanifest.ToSubmanifestSpec()
- submanifest.repo_client.manifestProject.Sync(
+ # Use the init options from the existing manifestProject, or the parent
+ # if it doesn't exist.
+ #
+ # Today, we only support changing manifest_groups on the sub-manifest,
+ # with no supported-for-the-user way to change the other arguments from
+ # those specified by the outermost manifest.
+ #
+ # TODO(lamontjones): determine which of these should come from the
+ # outermost manifest and which should come from the parent manifest.
+ mp = self if self.Exists else submanifest.parent.manifestProject
+ return self.Sync(
manifest_url=spec.manifestUrl,
manifest_branch=spec.revision,
- standalone_manifest=standalone_manifest,
- groups=self.manifest_groups,
- platform=platform,
- mirror=mirror,
- dissociate=dissociate,
- reference=reference,
- worktree=worktree,
- submodules=submodules,
- archive=archive,
- partial_clone=partial_clone,
- clone_filter=clone_filter,
- partial_clone_exclude=partial_clone_exclude,
- clone_bundle=clone_bundle,
- git_lfs=git_lfs,
- use_superproject=use_superproject,
+ standalone_manifest=mp.standalone_manifest_url,
+ groups=mp.manifest_groups,
+ platform=mp.manifest_platform,
+ mirror=mp.mirror,
+ dissociate=mp.dissociate,
+ reference=mp.reference,
+ worktree=mp.use_worktree,
+ submodules=mp.submodules,
+ archive=mp.archive,
+ partial_clone=mp.partial_clone,
+ clone_filter=mp.clone_filter,
+ partial_clone_exclude=mp.partial_clone_exclude,
+ clone_bundle=mp.clone_bundle,
+ git_lfs=mp.git_lfs,
+ use_superproject=mp.use_superproject,
verbose=verbose,
current_branch_only=current_branch_only,
tags=tags,
- depth=depth,
+ depth=mp.depth,
git_event_log=git_event_log,
manifest_name=spec.manifestName,
- this_manifest_only=False,
+ this_manifest_only=True,
outer_manifest=False,
)
- # Lastly, if the manifest has a <superproject> then have the superproject
- # sync it (if it will be used).
- if git_superproject.UseSuperproject(use_superproject, self.manifest):
- sync_result = self.manifest.superproject.Sync(git_event_log)
- if not sync_result.success:
- submanifest = ''
- if self.manifest.path_prefix:
- submanifest = f'for {self.manifest.path_prefix} '
- print(f'warning: git update of superproject {submanifest}failed, repo '
- 'sync will not use superproject to fetch source; while this '
- 'error is not fatal, and you can continue to run repo sync, '
- 'please run repo init with the --no-use-superproject option to '
- 'stop seeing this warning', file=sys.stderr)
- if sync_result.fatal and use_superproject is not None:
- return False
+ def Sync(
+ self,
+ _kwargs_only=(),
+ manifest_url="",
+ manifest_branch=None,
+ standalone_manifest=False,
+ groups="",
+ mirror=False,
+ reference="",
+ dissociate=False,
+ worktree=False,
+ submodules=False,
+ archive=False,
+ partial_clone=None,
+ depth=None,
+ clone_filter="blob:none",
+ partial_clone_exclude=None,
+ clone_bundle=None,
+ git_lfs=None,
+ use_superproject=None,
+ verbose=False,
+ current_branch_only=False,
+ git_event_log=None,
+ platform="",
+ manifest_name="default.xml",
+ tags="",
+ this_manifest_only=False,
+ outer_manifest=True,
+ ):
+ """Sync the manifest and all submanifests.
- return True
+ Args:
+ manifest_url: a string, the URL of the manifest project.
+ manifest_branch: a string, the manifest branch to use.
+ standalone_manifest: a boolean, whether to store the manifest as a
+ static file.
+ groups: a string, restricts the checkout to projects with the
+ specified groups.
+ mirror: a boolean, whether to create a mirror of the remote
+ repository.
+ reference: a string, location of a repo instance to use as a
+ reference.
+ dissociate: a boolean, whether to dissociate from reference mirrors
+ after clone.
+ worktree: a boolean, whether to use git-worktree to manage projects.
+ submodules: a boolean, whether sync submodules associated with the
+ manifest project.
+ archive: a boolean, whether to checkout each project as an archive.
+ See git-archive.
+ partial_clone: a boolean, whether to perform a partial clone.
+ depth: an int, how deep of a shallow clone to create.
+ clone_filter: a string, filter to use with partial_clone.
+ partial_clone_exclude : a string, comma-delimeted list of project
+ names to exclude from partial clone.
+ clone_bundle: a boolean, whether to enable /clone.bundle on
+ HTTP/HTTPS.
+ git_lfs: a boolean, whether to enable git LFS support.
+ use_superproject: a boolean, whether to use the manifest
+ superproject to sync projects.
+ verbose: a boolean, whether to show all output, rather than only
+ errors.
+ current_branch_only: a boolean, whether to only fetch the current
+ manifest branch from the server.
+ platform: a string, restrict the checkout to projects with the
+ specified platform group.
+ git_event_log: an EventLog, for git tracing.
+ tags: a boolean, whether to fetch tags.
+ manifest_name: a string, the name of the manifest file to use.
+ this_manifest_only: a boolean, whether to only operate on the
+ current sub manifest.
+ outer_manifest: a boolean, whether to start at the outermost
+ manifest.
- def _ConfigureDepth(self, depth):
- """Configure the depth we'll sync down.
+ Returns:
+ a boolean, whether the sync was successful.
+ """
+ assert _kwargs_only == (), "Sync only accepts keyword arguments."
- Args:
- depth: an int, how deep of a partial clone to create.
- """
- # Opt.depth will be non-None if user actually passed --depth to repo init.
- if depth is not None:
- if depth > 0:
- # Positive values will set the depth.
- depth = str(depth)
- else:
- # Negative numbers will clear the depth; passing None to SetString
- # will do that.
- depth = None
+ groups = groups or self.manifest.GetDefaultGroupsStr(
+ with_platform=False
+ )
+ platform = platform or "auto"
+ git_event_log = git_event_log or EventLog()
+ if outer_manifest and self.manifest.is_submanifest:
+ # In a multi-manifest checkout, use the outer manifest unless we are
+ # told not to.
+ return self.client.outer_manifest.manifestProject.Sync(
+ manifest_url=manifest_url,
+ manifest_branch=manifest_branch,
+ standalone_manifest=standalone_manifest,
+ groups=groups,
+ platform=platform,
+ mirror=mirror,
+ dissociate=dissociate,
+ reference=reference,
+ worktree=worktree,
+ submodules=submodules,
+ archive=archive,
+ partial_clone=partial_clone,
+ clone_filter=clone_filter,
+ partial_clone_exclude=partial_clone_exclude,
+ clone_bundle=clone_bundle,
+ git_lfs=git_lfs,
+ use_superproject=use_superproject,
+ verbose=verbose,
+ current_branch_only=current_branch_only,
+ tags=tags,
+ depth=depth,
+ git_event_log=git_event_log,
+ manifest_name=manifest_name,
+ this_manifest_only=this_manifest_only,
+ outer_manifest=False,
+ )
- # We store the depth in the main manifest project.
- self.config.SetString('repo.depth', depth)
+ # If repo has already been initialized, we take -u with the absence of
+ # --standalone-manifest to mean "transition to a standard repo set up",
+ # which necessitates starting fresh.
+ # If --standalone-manifest is set, we always tear everything down and
+ # start anew.
+ if self.Exists:
+ was_standalone_manifest = self.config.GetString(
+ "manifest.standalone"
+ )
+ if was_standalone_manifest and not manifest_url:
+ print(
+ "fatal: repo was initialized with a standlone manifest, "
+ "cannot be re-initialized without --manifest-url/-u"
+ )
+ return False
+
+ if standalone_manifest or (
+ was_standalone_manifest and manifest_url
+ ):
+ self.config.ClearCache()
+ if self.gitdir and os.path.exists(self.gitdir):
+ platform_utils.rmtree(self.gitdir)
+ if self.worktree and os.path.exists(self.worktree):
+ platform_utils.rmtree(self.worktree)
+
+ is_new = not self.Exists
+ if is_new:
+ if not manifest_url:
+ print("fatal: manifest url is required.", file=sys.stderr)
+ return False
+
+ if verbose:
+ print(
+ "Downloading manifest from %s"
+ % (GitConfig.ForUser().UrlInsteadOf(manifest_url),),
+ file=sys.stderr,
+ )
+
+ # The manifest project object doesn't keep track of the path on the
+ # server where this git is located, so let's save that here.
+ mirrored_manifest_git = None
+ if reference:
+ manifest_git_path = urllib.parse.urlparse(manifest_url).path[1:]
+ mirrored_manifest_git = os.path.join(
+ reference, manifest_git_path
+ )
+ if not mirrored_manifest_git.endswith(".git"):
+ mirrored_manifest_git += ".git"
+ if not os.path.exists(mirrored_manifest_git):
+ mirrored_manifest_git = os.path.join(
+ reference, ".repo/manifests.git"
+ )
+
+ self._InitGitDir(mirror_git=mirrored_manifest_git)
+
+ # If standalone_manifest is set, mark the project as "standalone" --
+ # we'll still do much of the manifests.git set up, but will avoid actual
+ # syncs to a remote.
+ if standalone_manifest:
+ self.config.SetString("manifest.standalone", manifest_url)
+ elif not manifest_url and not manifest_branch:
+ # If -u is set and --standalone-manifest is not, then we're not in
+ # standalone mode. Otherwise, use config to infer what we were in
+ # the last init.
+ standalone_manifest = bool(
+ self.config.GetString("manifest.standalone")
+ )
+ if not standalone_manifest:
+ self.config.SetString("manifest.standalone", None)
+
+ self._ConfigureDepth(depth)
+
+ # Set the remote URL before the remote branch as we might need it below.
+ if manifest_url:
+ r = self.GetRemote()
+ r.url = manifest_url
+ r.ResetFetch()
+ r.Save()
+
+ if not standalone_manifest:
+ if manifest_branch:
+ if manifest_branch == "HEAD":
+ manifest_branch = self.ResolveRemoteHead()
+ if manifest_branch is None:
+ print("fatal: unable to resolve HEAD", file=sys.stderr)
+ return False
+ self.revisionExpr = manifest_branch
+ else:
+ if is_new:
+ default_branch = self.ResolveRemoteHead()
+ if default_branch is None:
+ # If the remote doesn't have HEAD configured, default to
+ # master.
+ default_branch = "refs/heads/master"
+ self.revisionExpr = default_branch
+ else:
+ self.PreSync()
+
+ groups = re.split(r"[,\s]+", groups or "")
+ all_platforms = ["linux", "darwin", "windows"]
+ platformize = lambda x: "platform-" + x
+ if platform == "auto":
+ if not mirror and not self.mirror:
+ groups.append(platformize(self._platform_name))
+ elif platform == "all":
+ groups.extend(map(platformize, all_platforms))
+ elif platform in all_platforms:
+ groups.append(platformize(platform))
+ elif platform != "none":
+ print("fatal: invalid platform flag", file=sys.stderr)
+ return False
+ self.config.SetString("manifest.platform", platform)
+
+ groups = [x for x in groups if x]
+ groupstr = ",".join(groups)
+ if (
+ platform == "auto"
+ and groupstr == self.manifest.GetDefaultGroupsStr()
+ ):
+ groupstr = None
+ self.config.SetString("manifest.groups", groupstr)
+
+ if reference:
+ self.config.SetString("repo.reference", reference)
+
+ if dissociate:
+ self.config.SetBoolean("repo.dissociate", dissociate)
+
+ if worktree:
+ if mirror:
+ print(
+ "fatal: --mirror and --worktree are incompatible",
+ file=sys.stderr,
+ )
+ return False
+ if submodules:
+ print(
+ "fatal: --submodules and --worktree are incompatible",
+ file=sys.stderr,
+ )
+ return False
+ self.config.SetBoolean("repo.worktree", worktree)
+ if is_new:
+ self.use_git_worktrees = True
+ print("warning: --worktree is experimental!", file=sys.stderr)
+
+ if archive:
+ if is_new:
+ self.config.SetBoolean("repo.archive", archive)
+ else:
+ print(
+ "fatal: --archive is only supported when initializing a "
+ "new workspace.",
+ file=sys.stderr,
+ )
+ print(
+ "Either delete the .repo folder in this workspace, or "
+ "initialize in another location.",
+ file=sys.stderr,
+ )
+ return False
+
+ if mirror:
+ if is_new:
+ self.config.SetBoolean("repo.mirror", mirror)
+ else:
+ print(
+ "fatal: --mirror is only supported when initializing a new "
+ "workspace.",
+ file=sys.stderr,
+ )
+ print(
+ "Either delete the .repo folder in this workspace, or "
+ "initialize in another location.",
+ file=sys.stderr,
+ )
+ return False
+
+ if partial_clone is not None:
+ if mirror:
+ print(
+ "fatal: --mirror and --partial-clone are mutually "
+ "exclusive",
+ file=sys.stderr,
+ )
+ return False
+ self.config.SetBoolean("repo.partialclone", partial_clone)
+ if clone_filter:
+ self.config.SetString("repo.clonefilter", clone_filter)
+ elif self.partial_clone:
+ clone_filter = self.clone_filter
+ else:
+ clone_filter = None
+
+ if partial_clone_exclude is not None:
+ self.config.SetString(
+ "repo.partialcloneexclude", partial_clone_exclude
+ )
+
+ if clone_bundle is None:
+ clone_bundle = False if partial_clone else True
+ else:
+ self.config.SetBoolean("repo.clonebundle", clone_bundle)
+
+ if submodules:
+ self.config.SetBoolean("repo.submodules", submodules)
+
+ if git_lfs is not None:
+ if git_lfs:
+ git_require((2, 17, 0), fail=True, msg="Git LFS support")
+
+ self.config.SetBoolean("repo.git-lfs", git_lfs)
+ if not is_new:
+ print(
+ "warning: Changing --git-lfs settings will only affect new "
+ "project checkouts.\n"
+ " Existing projects will require manual updates.\n",
+ file=sys.stderr,
+ )
+
+ if use_superproject is not None:
+ self.config.SetBoolean("repo.superproject", use_superproject)
+
+ if not standalone_manifest:
+ success = self.Sync_NetworkHalf(
+ is_new=is_new,
+ quiet=not verbose,
+ verbose=verbose,
+ clone_bundle=clone_bundle,
+ current_branch_only=current_branch_only,
+ tags=tags,
+ submodules=submodules,
+ clone_filter=clone_filter,
+ partial_clone_exclude=self.manifest.PartialCloneExclude,
+ ).success
+ if not success:
+ r = self.GetRemote()
+ print(
+ "fatal: cannot obtain manifest %s" % r.url, file=sys.stderr
+ )
+
+ # Better delete the manifest git dir if we created it; otherwise
+ # next time (when user fixes problems) we won't go through the
+ # "is_new" logic.
+ if is_new:
+ platform_utils.rmtree(self.gitdir)
+ return False
+
+ if manifest_branch:
+ self.MetaBranchSwitch(submodules=submodules)
+
+ syncbuf = SyncBuffer(self.config)
+ self.Sync_LocalHalf(syncbuf, submodules=submodules)
+ syncbuf.Finish()
+
+ if is_new or self.CurrentBranch is None:
+ if not self.StartBranch("default"):
+ print(
+ "fatal: cannot create default in manifest",
+ file=sys.stderr,
+ )
+ return False
+
+ if not manifest_name:
+ print("fatal: manifest name (-m) is required.", file=sys.stderr)
+ return False
+
+ elif is_new:
+ # This is a new standalone manifest.
+ manifest_name = "default.xml"
+ manifest_data = fetch.fetch_file(manifest_url, verbose=verbose)
+ dest = os.path.join(self.worktree, manifest_name)
+ os.makedirs(os.path.dirname(dest), exist_ok=True)
+ with open(dest, "wb") as f:
+ f.write(manifest_data)
+
+ try:
+ self.manifest.Link(manifest_name)
+ except ManifestParseError as e:
+ print(
+ "fatal: manifest '%s' not available" % manifest_name,
+ file=sys.stderr,
+ )
+ print("fatal: %s" % str(e), file=sys.stderr)
+ return False
+
+ if not this_manifest_only:
+ for submanifest in self.manifest.submanifests.values():
+ spec = submanifest.ToSubmanifestSpec()
+ submanifest.repo_client.manifestProject.Sync(
+ manifest_url=spec.manifestUrl,
+ manifest_branch=spec.revision,
+ standalone_manifest=standalone_manifest,
+ groups=self.manifest_groups,
+ platform=platform,
+ mirror=mirror,
+ dissociate=dissociate,
+ reference=reference,
+ worktree=worktree,
+ submodules=submodules,
+ archive=archive,
+ partial_clone=partial_clone,
+ clone_filter=clone_filter,
+ partial_clone_exclude=partial_clone_exclude,
+ clone_bundle=clone_bundle,
+ git_lfs=git_lfs,
+ use_superproject=use_superproject,
+ verbose=verbose,
+ current_branch_only=current_branch_only,
+ tags=tags,
+ depth=depth,
+ git_event_log=git_event_log,
+ manifest_name=spec.manifestName,
+ this_manifest_only=False,
+ outer_manifest=False,
+ )
+
+ # Lastly, if the manifest has a <superproject> then have the
+ # superproject sync it (if it will be used).
+ if git_superproject.UseSuperproject(use_superproject, self.manifest):
+ sync_result = self.manifest.superproject.Sync(git_event_log)
+ if not sync_result.success:
+ submanifest = ""
+ if self.manifest.path_prefix:
+ submanifest = f"for {self.manifest.path_prefix} "
+ print(
+ f"warning: git update of superproject {submanifest}failed, "
+ "repo sync will not use superproject to fetch source; "
+ "while this error is not fatal, and you can continue to "
+ "run repo sync, please run repo init with the "
+ "--no-use-superproject option to stop seeing this warning",
+ file=sys.stderr,
+ )
+ if sync_result.fatal and use_superproject is not None:
+ return False
+
+ return True
+
+ def _ConfigureDepth(self, depth):
+ """Configure the depth we'll sync down.
+
+ Args:
+ depth: an int, how deep of a partial clone to create.
+ """
+ # Opt.depth will be non-None if user actually passed --depth to repo
+ # init.
+ if depth is not None:
+ if depth > 0:
+ # Positive values will set the depth.
+ depth = str(depth)
+ else:
+ # Negative numbers will clear the depth; passing None to
+ # SetString will do that.
+ depth = None
+
+ # We store the depth in the main manifest project.
+ self.config.SetString("repo.depth", depth)
diff --git a/release/sign-launcher.py b/release/sign-launcher.py
index ffe23cc..8656612 100755
--- a/release/sign-launcher.py
+++ b/release/sign-launcher.py
@@ -28,43 +28,56 @@
def sign(opts):
- """Sign the launcher!"""
- output = ''
- for key in opts.keys:
- # We use ! at the end of the key so that gpg uses this specific key.
- # Otherwise it uses the key as a lookup into the overall key and uses the
- # default signing key. i.e. It will see that KEYID_RSA is a subkey of
- # another key, and use the primary key to sign instead of the subkey.
- cmd = ['gpg', '--homedir', opts.gpgdir, '-u', f'{key}!', '--batch', '--yes',
- '--armor', '--detach-sign', '--output', '-', opts.launcher]
- ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE)
- output += ret.stdout
+ """Sign the launcher!"""
+ output = ""
+ for key in opts.keys:
+ # We use ! at the end of the key so that gpg uses this specific key.
+ # Otherwise it uses the key as a lookup into the overall key and uses
+ # the default signing key. i.e. It will see that KEYID_RSA is a subkey
+ # of another key, and use the primary key to sign instead of the subkey.
+ cmd = [
+ "gpg",
+ "--homedir",
+ opts.gpgdir,
+ "-u",
+ f"{key}!",
+ "--batch",
+ "--yes",
+ "--armor",
+ "--detach-sign",
+ "--output",
+ "-",
+ opts.launcher,
+ ]
+ ret = util.run(opts, cmd, encoding="utf-8", stdout=subprocess.PIPE)
+ output += ret.stdout
- # Save the combined signatures into one file.
- with open(f'{opts.launcher}.asc', 'w', encoding='utf-8') as fp:
- fp.write(output)
+ # Save the combined signatures into one file.
+ with open(f"{opts.launcher}.asc", "w", encoding="utf-8") as fp:
+ fp.write(output)
def check(opts):
- """Check the signature."""
- util.run(opts, ['gpg', '--verify', f'{opts.launcher}.asc'])
+ """Check the signature."""
+ util.run(opts, ["gpg", "--verify", f"{opts.launcher}.asc"])
def get_version(opts):
- """Get the version from |launcher|."""
- # Make sure we don't search $PATH when signing the "repo" file in the cwd.
- launcher = os.path.join('.', opts.launcher)
- cmd = [launcher, '--version']
- ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE)
- m = re.search(r'repo launcher version ([0-9.]+)', ret.stdout)
- if not m:
- sys.exit(f'{opts.launcher}: unable to detect repo version')
- return m.group(1)
+ """Get the version from |launcher|."""
+ # Make sure we don't search $PATH when signing the "repo" file in the cwd.
+ launcher = os.path.join(".", opts.launcher)
+ cmd = [launcher, "--version"]
+ ret = util.run(opts, cmd, encoding="utf-8", stdout=subprocess.PIPE)
+ m = re.search(r"repo launcher version ([0-9.]+)", ret.stdout)
+ if not m:
+ sys.exit(f"{opts.launcher}: unable to detect repo version")
+ return m.group(1)
def postmsg(opts, version):
- """Helpful info to show at the end for release manager."""
- print(f"""
+ """Helpful info to show at the end for release manager."""
+ print(
+ f"""
Repo launcher bucket:
gs://git-repo-downloads/
@@ -81,55 +94,72 @@
gsutil ls -la gs://git-repo-downloads/repo gs://git-repo-downloads/repo.asc
gsutil cp -a public-read gs://git-repo-downloads/repo#<unique id> gs://git-repo-downloads/repo
gsutil cp -a public-read gs://git-repo-downloads/repo.asc#<unique id> gs://git-repo-downloads/repo.asc
-""")
+""" # noqa: E501
+ )
def get_parser():
- """Get a CLI parser."""
- parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument('-n', '--dry-run',
- dest='dryrun', action='store_true',
- help='show everything that would be done')
- parser.add_argument('--gpgdir',
- default=os.path.join(util.HOMEDIR, '.gnupg', 'repo'),
- help='path to dedicated gpg dir with release keys '
- '(default: ~/.gnupg/repo/)')
- parser.add_argument('--keyid', dest='keys', default=[], action='append',
- help='alternative signing keys to use')
- parser.add_argument('launcher',
- default=os.path.join(util.TOPDIR, 'repo'), nargs='?',
- help='the launcher script to sign')
- return parser
+ """Get a CLI parser."""
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument(
+ "-n",
+ "--dry-run",
+ dest="dryrun",
+ action="store_true",
+ help="show everything that would be done",
+ )
+ parser.add_argument(
+ "--gpgdir",
+ default=os.path.join(util.HOMEDIR, ".gnupg", "repo"),
+ help="path to dedicated gpg dir with release keys "
+ "(default: ~/.gnupg/repo/)",
+ )
+ parser.add_argument(
+ "--keyid",
+ dest="keys",
+ default=[],
+ action="append",
+ help="alternative signing keys to use",
+ )
+ parser.add_argument(
+ "launcher",
+ default=os.path.join(util.TOPDIR, "repo"),
+ nargs="?",
+ help="the launcher script to sign",
+ )
+ return parser
def main(argv):
- """The main func!"""
- parser = get_parser()
- opts = parser.parse_args(argv)
+ """The main func!"""
+ parser = get_parser()
+ opts = parser.parse_args(argv)
- if not os.path.exists(opts.gpgdir):
- parser.error(f'--gpgdir does not exist: {opts.gpgdir}')
- if not os.path.exists(opts.launcher):
- parser.error(f'launcher does not exist: {opts.launcher}')
+ if not os.path.exists(opts.gpgdir):
+ parser.error(f"--gpgdir does not exist: {opts.gpgdir}")
+ if not os.path.exists(opts.launcher):
+ parser.error(f"launcher does not exist: {opts.launcher}")
- opts.launcher = os.path.relpath(opts.launcher)
- print(f'Signing "{opts.launcher}" launcher script and saving to '
- f'"{opts.launcher}.asc"')
+ opts.launcher = os.path.relpath(opts.launcher)
+ print(
+ f'Signing "{opts.launcher}" launcher script and saving to '
+ f'"{opts.launcher}.asc"'
+ )
- if opts.keys:
- print(f'Using custom keys to sign: {" ".join(opts.keys)}')
- else:
- print('Using official Repo release keys to sign')
- opts.keys = [util.KEYID_DSA, util.KEYID_RSA, util.KEYID_ECC]
- util.import_release_key(opts)
+ if opts.keys:
+ print(f'Using custom keys to sign: {" ".join(opts.keys)}')
+ else:
+ print("Using official Repo release keys to sign")
+ opts.keys = [util.KEYID_DSA, util.KEYID_RSA, util.KEYID_ECC]
+ util.import_release_key(opts)
- version = get_version(opts)
- sign(opts)
- check(opts)
- postmsg(opts, version)
+ version = get_version(opts)
+ sign(opts)
+ check(opts)
+ postmsg(opts, version)
- return 0
+ return 0
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/release/sign-tag.py b/release/sign-tag.py
index 605437c..fbfe7b2 100755
--- a/release/sign-tag.py
+++ b/release/sign-tag.py
@@ -35,46 +35,61 @@
KEYID = util.KEYID_DSA
# Regular expression to validate tag names.
-RE_VALID_TAG = r'^v([0-9]+[.])+[0-9]+$'
+RE_VALID_TAG = r"^v([0-9]+[.])+[0-9]+$"
def sign(opts):
- """Tag the commit & sign it!"""
- # We use ! at the end of the key so that gpg uses this specific key.
- # Otherwise it uses the key as a lookup into the overall key and uses the
- # default signing key. i.e. It will see that KEYID_RSA is a subkey of
- # another key, and use the primary key to sign instead of the subkey.
- cmd = ['git', 'tag', '-s', opts.tag, '-u', f'{opts.key}!',
- '-m', f'repo {opts.tag}', opts.commit]
+ """Tag the commit & sign it!"""
+ # We use ! at the end of the key so that gpg uses this specific key.
+ # Otherwise it uses the key as a lookup into the overall key and uses the
+ # default signing key. i.e. It will see that KEYID_RSA is a subkey of
+ # another key, and use the primary key to sign instead of the subkey.
+ cmd = [
+ "git",
+ "tag",
+ "-s",
+ opts.tag,
+ "-u",
+ f"{opts.key}!",
+ "-m",
+ f"repo {opts.tag}",
+ opts.commit,
+ ]
- key = 'GNUPGHOME'
- print('+', f'export {key}="{opts.gpgdir}"')
- oldvalue = os.getenv(key)
- os.putenv(key, opts.gpgdir)
- util.run(opts, cmd)
- if oldvalue is None:
- os.unsetenv(key)
- else:
- os.putenv(key, oldvalue)
+ key = "GNUPGHOME"
+ print("+", f'export {key}="{opts.gpgdir}"')
+ oldvalue = os.getenv(key)
+ os.putenv(key, opts.gpgdir)
+ util.run(opts, cmd)
+ if oldvalue is None:
+ os.unsetenv(key)
+ else:
+ os.putenv(key, oldvalue)
def check(opts):
- """Check the signature."""
- util.run(opts, ['git', 'tag', '--verify', opts.tag])
+ """Check the signature."""
+ util.run(opts, ["git", "tag", "--verify", opts.tag])
def postmsg(opts):
- """Helpful info to show at the end for release manager."""
- cmd = ['git', 'rev-parse', 'remotes/origin/stable']
- ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE)
- current_release = ret.stdout.strip()
+ """Helpful info to show at the end for release manager."""
+ cmd = ["git", "rev-parse", "remotes/origin/stable"]
+ ret = util.run(opts, cmd, encoding="utf-8", stdout=subprocess.PIPE)
+ current_release = ret.stdout.strip()
- cmd = ['git', 'log', '--format=%h (%aN) %s', '--no-merges',
- f'remotes/origin/stable..{opts.tag}']
- ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE)
- shortlog = ret.stdout.strip()
+ cmd = [
+ "git",
+ "log",
+ "--format=%h (%aN) %s",
+ "--no-merges",
+ f"remotes/origin/stable..{opts.tag}",
+ ]
+ ret = util.run(opts, cmd, encoding="utf-8", stdout=subprocess.PIPE)
+ shortlog = ret.stdout.strip()
- print(f"""
+ print(
+ f"""
Here's the short log since the last release.
{shortlog}
@@ -84,57 +99,69 @@
To roll back a release:
git push origin --force {current_release}:stable -n
-""")
+"""
+ )
def get_parser():
- """Get a CLI parser."""
- parser = argparse.ArgumentParser(
- description=__doc__,
- formatter_class=argparse.RawDescriptionHelpFormatter)
- parser.add_argument('-n', '--dry-run',
- dest='dryrun', action='store_true',
- help='show everything that would be done')
- parser.add_argument('--gpgdir',
- default=os.path.join(util.HOMEDIR, '.gnupg', 'repo'),
- help='path to dedicated gpg dir with release keys '
- '(default: ~/.gnupg/repo/)')
- parser.add_argument('-f', '--force', action='store_true',
- help='force signing of any tag')
- parser.add_argument('--keyid', dest='key',
- help='alternative signing key to use')
- parser.add_argument('tag',
- help='the tag to create (e.g. "v2.0")')
- parser.add_argument('commit', default='HEAD', nargs='?',
- help='the commit to tag')
- return parser
+ """Get a CLI parser."""
+ parser = argparse.ArgumentParser(
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ )
+ parser.add_argument(
+ "-n",
+ "--dry-run",
+ dest="dryrun",
+ action="store_true",
+ help="show everything that would be done",
+ )
+ parser.add_argument(
+ "--gpgdir",
+ default=os.path.join(util.HOMEDIR, ".gnupg", "repo"),
+ help="path to dedicated gpg dir with release keys "
+ "(default: ~/.gnupg/repo/)",
+ )
+ parser.add_argument(
+ "-f", "--force", action="store_true", help="force signing of any tag"
+ )
+ parser.add_argument(
+ "--keyid", dest="key", help="alternative signing key to use"
+ )
+ parser.add_argument("tag", help='the tag to create (e.g. "v2.0")')
+ parser.add_argument(
+ "commit", default="HEAD", nargs="?", help="the commit to tag"
+ )
+ return parser
def main(argv):
- """The main func!"""
- parser = get_parser()
- opts = parser.parse_args(argv)
+ """The main func!"""
+ parser = get_parser()
+ opts = parser.parse_args(argv)
- if not os.path.exists(opts.gpgdir):
- parser.error(f'--gpgdir does not exist: {opts.gpgdir}')
+ if not os.path.exists(opts.gpgdir):
+ parser.error(f"--gpgdir does not exist: {opts.gpgdir}")
- if not opts.force and not re.match(RE_VALID_TAG, opts.tag):
- parser.error(f'tag "{opts.tag}" does not match regex "{RE_VALID_TAG}"; '
- 'use --force to sign anyways')
+ if not opts.force and not re.match(RE_VALID_TAG, opts.tag):
+ parser.error(
+ f'tag "{opts.tag}" does not match regex "{RE_VALID_TAG}"; '
+ "use --force to sign anyways"
+ )
- if opts.key:
- print(f'Using custom key to sign: {opts.key}')
- else:
- print('Using official Repo release key to sign')
- opts.key = KEYID
- util.import_release_key(opts)
+ if opts.key:
+ print(f"Using custom key to sign: {opts.key}")
+ else:
+ print("Using official Repo release key to sign")
+ opts.key = KEYID
+ util.import_release_key(opts)
- sign(opts)
- check(opts)
- postmsg(opts)
+ sign(opts)
+ check(opts)
+ postmsg(opts)
- return 0
+ return 0
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/release/update_manpages.py b/release/update_manpages.py
index d1bf892..cd2acc0 100644
--- a/release/update_manpages.py
+++ b/release/update_manpages.py
@@ -29,91 +29,125 @@
import tempfile
TOPDIR = Path(__file__).resolve().parent.parent
-MANDIR = TOPDIR.joinpath('man')
+MANDIR = TOPDIR.joinpath("man")
# Load repo local modules.
sys.path.insert(0, str(TOPDIR))
from git_command import RepoSourceVersion
import subcmds
+
def worker(cmd, **kwargs):
- subprocess.run(cmd, **kwargs)
+ subprocess.run(cmd, **kwargs)
+
def main(argv):
- parser = argparse.ArgumentParser(description=__doc__)
- opts = parser.parse_args(argv)
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.parse_args(argv)
- if not shutil.which('help2man'):
- sys.exit('Please install help2man to continue.')
+ if not shutil.which("help2man"):
+ sys.exit("Please install help2man to continue.")
- # Let repo know we're generating man pages so it can avoid some dynamic
- # behavior (like probing active number of CPUs). We use a weird name &
- # value to make it less likely for users to set this var themselves.
- os.environ['_REPO_GENERATE_MANPAGES_'] = ' indeed! '
+ # Let repo know we're generating man pages so it can avoid some dynamic
+ # behavior (like probing active number of CPUs). We use a weird name &
+ # value to make it less likely for users to set this var themselves.
+ os.environ["_REPO_GENERATE_MANPAGES_"] = " indeed! "
- # "repo branch" is an alias for "repo branches".
- del subcmds.all_commands['branch']
- (MANDIR / 'repo-branch.1').write_text('.so man1/repo-branches.1')
+ # "repo branch" is an alias for "repo branches".
+ del subcmds.all_commands["branch"]
+ (MANDIR / "repo-branch.1").write_text(".so man1/repo-branches.1")
- version = RepoSourceVersion()
- cmdlist = [['help2man', '-N', '-n', f'repo {cmd} - manual page for repo {cmd}',
- '-S', f'repo {cmd}', '-m', 'Repo Manual', f'--version-string={version}',
- '-o', MANDIR.joinpath(f'repo-{cmd}.1.tmp'), './repo',
- '-h', f'help {cmd}'] for cmd in subcmds.all_commands]
- cmdlist.append(['help2man', '-N', '-n', 'repository management tool built on top of git',
- '-S', 'repo', '-m', 'Repo Manual', f'--version-string={version}',
- '-o', MANDIR.joinpath('repo.1.tmp'), './repo',
- '-h', '--help-all'])
+ version = RepoSourceVersion()
+ cmdlist = [
+ [
+ "help2man",
+ "-N",
+ "-n",
+ f"repo {cmd} - manual page for repo {cmd}",
+ "-S",
+ f"repo {cmd}",
+ "-m",
+ "Repo Manual",
+ f"--version-string={version}",
+ "-o",
+ MANDIR.joinpath(f"repo-{cmd}.1.tmp"),
+ "./repo",
+ "-h",
+ f"help {cmd}",
+ ]
+ for cmd in subcmds.all_commands
+ ]
+ cmdlist.append(
+ [
+ "help2man",
+ "-N",
+ "-n",
+ "repository management tool built on top of git",
+ "-S",
+ "repo",
+ "-m",
+ "Repo Manual",
+ f"--version-string={version}",
+ "-o",
+ MANDIR.joinpath("repo.1.tmp"),
+ "./repo",
+ "-h",
+ "--help-all",
+ ]
+ )
- with tempfile.TemporaryDirectory() as tempdir:
- tempdir = Path(tempdir)
- repo_dir = tempdir / '.repo'
- repo_dir.mkdir()
- (repo_dir / 'repo').symlink_to(TOPDIR)
+ with tempfile.TemporaryDirectory() as tempdir:
+ tempdir = Path(tempdir)
+ repo_dir = tempdir / ".repo"
+ repo_dir.mkdir()
+ (repo_dir / "repo").symlink_to(TOPDIR)
- # Create a repo wrapper using the active Python executable. We can't pass
- # this directly to help2man as it's too simple, so insert it via shebang.
- data = (TOPDIR / 'repo').read_text(encoding='utf-8')
- tempbin = tempdir / 'repo'
- tempbin.write_text(f'#!{sys.executable}\n' + data, encoding='utf-8')
- tempbin.chmod(0o755)
+ # Create a repo wrapper using the active Python executable. We can't
+ # pass this directly to help2man as it's too simple, so insert it via
+ # shebang.
+ data = (TOPDIR / "repo").read_text(encoding="utf-8")
+ tempbin = tempdir / "repo"
+ tempbin.write_text(f"#!{sys.executable}\n" + data, encoding="utf-8")
+ tempbin.chmod(0o755)
- # Run all cmd in parallel, and wait for them to finish.
- with multiprocessing.Pool() as pool:
- pool.map(partial(worker, cwd=tempdir, check=True), cmdlist)
+ # Run all cmd in parallel, and wait for them to finish.
+ with multiprocessing.Pool() as pool:
+ pool.map(partial(worker, cwd=tempdir, check=True), cmdlist)
- for tmp_path in MANDIR.glob('*.1.tmp'):
- path = tmp_path.parent / tmp_path.stem
- old_data = path.read_text() if path.exists() else ''
+ for tmp_path in MANDIR.glob("*.1.tmp"):
+ path = tmp_path.parent / tmp_path.stem
+ old_data = path.read_text() if path.exists() else ""
- data = tmp_path.read_text()
- tmp_path.unlink()
+ data = tmp_path.read_text()
+ tmp_path.unlink()
- data = replace_regex(data)
+ data = replace_regex(data)
- # If the only thing that changed was the date, don't refresh. This avoids
- # a lot of noise when only one file actually updates.
- old_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r'\1', old_data, flags=re.M)
- new_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r'\1', data, flags=re.M)
- if old_data != new_data:
- path.write_text(data)
+ # If the only thing that changed was the date, don't refresh. This
+ # avoids a lot of noise when only one file actually updates.
+ old_data = re.sub(
+ r'^(\.TH REPO "1" ")([^"]+)', r"\1", old_data, flags=re.M
+ )
+ new_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r"\1", data, flags=re.M)
+ if old_data != new_data:
+ path.write_text(data)
def replace_regex(data):
- """Replace semantically null regexes in the data.
+ """Replace semantically null regexes in the data.
- Args:
- data: manpage text.
+ Args:
+ data: manpage text.
- Returns:
- Updated manpage text.
- """
- regex = (
- (r'(It was generated by help2man) [0-9.]+', r'\g<1>.'),
- (r'^\033\[[0-9;]*m([^\033]*)\033\[m', r'\g<1>'),
- (r'^\.IP\n(.*:)\n', r'.SS \g<1>\n'),
- (r'^\.PP\nDescription', r'.SH DETAILS'),
- )
- for pattern, replacement in regex:
- data = re.sub(pattern, replacement, data, flags=re.M)
- return data
+ Returns:
+ Updated manpage text.
+ """
+ regex = (
+ (r"(It was generated by help2man) [0-9.]+", r"\g<1>."),
+ (r"^\033\[[0-9;]*m([^\033]*)\033\[m", r"\g<1>"),
+ (r"^\.IP\n(.*:)\n", r".SS \g<1>\n"),
+ (r"^\.PP\nDescription", r".SH DETAILS"),
+ )
+ for pattern, replacement in regex:
+ data = re.sub(pattern, replacement, data, flags=re.M)
+ return data
diff --git a/release/util.py b/release/util.py
index 9d0eb1d..df7a563 100644
--- a/release/util.py
+++ b/release/util.py
@@ -20,54 +20,60 @@
import sys
-assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
+assert sys.version_info >= (3, 6), "This module requires Python 3.6+"
TOPDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-HOMEDIR = os.path.expanduser('~')
+HOMEDIR = os.path.expanduser("~")
# These are the release keys we sign with.
-KEYID_DSA = '8BB9AD793E8E6153AF0F9A4416530D5E920F5C65'
-KEYID_RSA = 'A34A13BE8E76BFF46A0C022DA2E75A824AAB9624'
-KEYID_ECC = 'E1F9040D7A3F6DAFAC897CD3D3B95DA243E48A39'
+KEYID_DSA = "8BB9AD793E8E6153AF0F9A4416530D5E920F5C65"
+KEYID_RSA = "A34A13BE8E76BFF46A0C022DA2E75A824AAB9624"
+KEYID_ECC = "E1F9040D7A3F6DAFAC897CD3D3B95DA243E48A39"
def cmdstr(cmd):
- """Get a nicely quoted shell command."""
- ret = []
- for arg in cmd:
- if not re.match(r'^[a-zA-Z0-9/_.=-]+$', arg):
- arg = f'"{arg}"'
- ret.append(arg)
- return ' '.join(ret)
+ """Get a nicely quoted shell command."""
+ ret = []
+ for arg in cmd:
+ if not re.match(r"^[a-zA-Z0-9/_.=-]+$", arg):
+ arg = f'"{arg}"'
+ ret.append(arg)
+ return " ".join(ret)
def run(opts, cmd, check=True, **kwargs):
- """Helper around subprocess.run to include logging."""
- print('+', cmdstr(cmd))
- if opts.dryrun:
- cmd = ['true', '--'] + cmd
- try:
- return subprocess.run(cmd, check=check, **kwargs)
- except subprocess.CalledProcessError as e:
- print(f'aborting: {e}', file=sys.stderr)
- sys.exit(1)
+ """Helper around subprocess.run to include logging."""
+ print("+", cmdstr(cmd))
+ if opts.dryrun:
+ cmd = ["true", "--"] + cmd
+ try:
+ return subprocess.run(cmd, check=check, **kwargs)
+ except subprocess.CalledProcessError as e:
+ print(f"aborting: {e}", file=sys.stderr)
+ sys.exit(1)
def import_release_key(opts):
- """Import the public key of the official release repo signing key."""
- # Extract the key from our repo launcher.
- launcher = getattr(opts, 'launcher', os.path.join(TOPDIR, 'repo'))
- print(f'Importing keys from "{launcher}" launcher script')
- with open(launcher, encoding='utf-8') as fp:
- data = fp.read()
+ """Import the public key of the official release repo signing key."""
+ # Extract the key from our repo launcher.
+ launcher = getattr(opts, "launcher", os.path.join(TOPDIR, "repo"))
+ print(f'Importing keys from "{launcher}" launcher script')
+ with open(launcher, encoding="utf-8") as fp:
+ data = fp.read()
- keys = re.findall(
- r'\n-----BEGIN PGP PUBLIC KEY BLOCK-----\n[^-]*'
- r'\n-----END PGP PUBLIC KEY BLOCK-----\n', data, flags=re.M)
- run(opts, ['gpg', '--import'], input='\n'.join(keys).encode('utf-8'))
+ keys = re.findall(
+ r"\n-----BEGIN PGP PUBLIC KEY BLOCK-----\n[^-]*"
+ r"\n-----END PGP PUBLIC KEY BLOCK-----\n",
+ data,
+ flags=re.M,
+ )
+ run(opts, ["gpg", "--import"], input="\n".join(keys).encode("utf-8"))
- print('Marking keys as fully trusted')
- run(opts, ['gpg', '--import-ownertrust'],
- input=f'{KEYID_DSA}:6:\n'.encode('utf-8'))
+ print("Marking keys as fully trusted")
+ run(
+ opts,
+ ["gpg", "--import-ownertrust"],
+ input=f"{KEYID_DSA}:6:\n".encode("utf-8"),
+ )
diff --git a/repo b/repo
index ce3df05..c32e52d 100755
--- a/repo
+++ b/repo
@@ -506,10 +506,10 @@
"""Parse a path in the GITC FS and return its client name.
Args:
- gitc_fs_path: A subdirectory path within the GITC_FS_ROOT_DIR.
+ gitc_fs_path: A subdirectory path within the GITC_FS_ROOT_DIR.
Returns:
- The GITC client name.
+ The GITC client name.
"""
if gitc_fs_path == GITC_FS_ROOT_DIR:
return None
@@ -942,14 +942,14 @@
* xxx: Branch or tag or commit.
Args:
- cwd: The git checkout to run in.
- committish: The REPO_REV argument to resolve.
+ cwd: The git checkout to run in.
+ committish: The REPO_REV argument to resolve.
Returns:
- A tuple of (remote ref, commit) as makes sense for the committish.
- For branches, this will look like ('refs/heads/stable', <revision>).
- For tags, this will look like ('refs/tags/v1.0', <revision>).
- For commits, this will be (<revision>, <revision>).
+ A tuple of (remote ref, commit) as makes sense for the committish.
+ For branches, this will look like ('refs/heads/stable', <revision>).
+ For tags, this will look like ('refs/tags/v1.0', <revision>).
+ For commits, this will be (<revision>, <revision>).
"""
def resolve(committish):
ret = run_git('rev-parse', '--verify', '%s^{commit}' % (committish,),
@@ -1104,7 +1104,7 @@
"""Initialize.
Args:
- requirements: A dictionary of settings.
+ requirements: A dictionary of settings.
"""
self.requirements = requirements
diff --git a/repo_trace.py b/repo_trace.py
index 1ba86c7..4946217 100644
--- a/repo_trace.py
+++ b/repo_trace.py
@@ -29,138 +29,142 @@
import platform_utils
# Env var to implicitly turn on tracing.
-REPO_TRACE = 'REPO_TRACE'
+REPO_TRACE = "REPO_TRACE"
# Temporarily set tracing to always on unless user expicitly sets to 0.
-_TRACE = os.environ.get(REPO_TRACE) != '0'
+_TRACE = os.environ.get(REPO_TRACE) != "0"
_TRACE_TO_STDERR = False
_TRACE_FILE = None
-_TRACE_FILE_NAME = 'TRACE_FILE'
+_TRACE_FILE_NAME = "TRACE_FILE"
_MAX_SIZE = 70 # in MiB
-_NEW_COMMAND_SEP = '+++++++++++++++NEW COMMAND+++++++++++++++++++'
+_NEW_COMMAND_SEP = "+++++++++++++++NEW COMMAND+++++++++++++++++++"
def IsTraceToStderr():
- """Whether traces are written to stderr."""
- return _TRACE_TO_STDERR
+ """Whether traces are written to stderr."""
+ return _TRACE_TO_STDERR
def IsTrace():
- """Whether tracing is enabled."""
- return _TRACE
+ """Whether tracing is enabled."""
+ return _TRACE
def SetTraceToStderr():
- """Enables tracing logging to stderr."""
- global _TRACE_TO_STDERR
- _TRACE_TO_STDERR = True
+ """Enables tracing logging to stderr."""
+ global _TRACE_TO_STDERR
+ _TRACE_TO_STDERR = True
def SetTrace():
- """Enables tracing."""
- global _TRACE
- _TRACE = True
+ """Enables tracing."""
+ global _TRACE
+ _TRACE = True
def _SetTraceFile(quiet):
- """Sets the trace file location."""
- global _TRACE_FILE
- _TRACE_FILE = _GetTraceFile(quiet)
+ """Sets the trace file location."""
+ global _TRACE_FILE
+ _TRACE_FILE = _GetTraceFile(quiet)
class Trace(ContextDecorator):
- """Used to capture and save git traces."""
+ """Used to capture and save git traces."""
- def _time(self):
- """Generate nanoseconds of time in a py3.6 safe way"""
- return int(time.time() * 1e+9)
+ def _time(self):
+ """Generate nanoseconds of time in a py3.6 safe way"""
+ return int(time.time() * 1e9)
- def __init__(self, fmt, *args, first_trace=False, quiet=True):
- """Initialize the object.
+ def __init__(self, fmt, *args, first_trace=False, quiet=True):
+ """Initialize the object.
- Args:
- fmt: The format string for the trace.
- *args: Arguments to pass to formatting.
- first_trace: Whether this is the first trace of a `repo` invocation.
- quiet: Whether to suppress notification of trace file location.
- """
- if not IsTrace():
- return
- self._trace_msg = fmt % args
+ Args:
+ fmt: The format string for the trace.
+ *args: Arguments to pass to formatting.
+ first_trace: Whether this is the first trace of a `repo` invocation.
+ quiet: Whether to suppress notification of trace file location.
+ """
+ if not IsTrace():
+ return
+ self._trace_msg = fmt % args
- if not _TRACE_FILE:
- _SetTraceFile(quiet)
+ if not _TRACE_FILE:
+ _SetTraceFile(quiet)
- if first_trace:
- _ClearOldTraces()
- self._trace_msg = f'{_NEW_COMMAND_SEP} {self._trace_msg}'
+ if first_trace:
+ _ClearOldTraces()
+ self._trace_msg = f"{_NEW_COMMAND_SEP} {self._trace_msg}"
- def __enter__(self):
- if not IsTrace():
- return self
+ def __enter__(self):
+ if not IsTrace():
+ return self
- print_msg = f'PID: {os.getpid()} START: {self._time()} :{self._trace_msg}\n'
+ print_msg = (
+ f"PID: {os.getpid()} START: {self._time()} :{self._trace_msg}\n"
+ )
- with open(_TRACE_FILE, 'a') as f:
- print(print_msg, file=f)
+ with open(_TRACE_FILE, "a") as f:
+ print(print_msg, file=f)
- if _TRACE_TO_STDERR:
- print(print_msg, file=sys.stderr)
+ if _TRACE_TO_STDERR:
+ print(print_msg, file=sys.stderr)
- return self
+ return self
- def __exit__(self, *exc):
- if not IsTrace():
- return False
+ def __exit__(self, *exc):
+ if not IsTrace():
+ return False
- print_msg = f'PID: {os.getpid()} END: {self._time()} :{self._trace_msg}\n'
+ print_msg = (
+ f"PID: {os.getpid()} END: {self._time()} :{self._trace_msg}\n"
+ )
- with open(_TRACE_FILE, 'a') as f:
- print(print_msg, file=f)
+ with open(_TRACE_FILE, "a") as f:
+ print(print_msg, file=f)
- if _TRACE_TO_STDERR:
- print(print_msg, file=sys.stderr)
+ if _TRACE_TO_STDERR:
+ print(print_msg, file=sys.stderr)
- return False
+ return False
def _GetTraceFile(quiet):
- """Get the trace file or create one."""
- # TODO: refactor to pass repodir to Trace.
- repo_dir = os.path.dirname(os.path.dirname(__file__))
- trace_file = os.path.join(repo_dir, _TRACE_FILE_NAME)
- if not quiet:
- print(f'Trace outputs in {trace_file}', file=sys.stderr)
- return trace_file
+ """Get the trace file or create one."""
+ # TODO: refactor to pass repodir to Trace.
+ repo_dir = os.path.dirname(os.path.dirname(__file__))
+ trace_file = os.path.join(repo_dir, _TRACE_FILE_NAME)
+ if not quiet:
+ print(f"Trace outputs in {trace_file}", file=sys.stderr)
+ return trace_file
def _ClearOldTraces():
- """Clear the oldest commands if trace file is too big."""
- try:
- with open(_TRACE_FILE, 'r', errors='ignore') as f:
- if os.path.getsize(f.name) / (1024 * 1024) <= _MAX_SIZE:
+ """Clear the oldest commands if trace file is too big."""
+ try:
+ with open(_TRACE_FILE, "r", errors="ignore") as f:
+ if os.path.getsize(f.name) / (1024 * 1024) <= _MAX_SIZE:
+ return
+ trace_lines = f.readlines()
+ except FileNotFoundError:
return
- trace_lines = f.readlines()
- except FileNotFoundError:
- return
- while sum(len(x) for x in trace_lines) / (1024 * 1024) > _MAX_SIZE:
- for i, line in enumerate(trace_lines):
- if 'END:' in line and _NEW_COMMAND_SEP in line:
- trace_lines = trace_lines[i + 1:]
- break
- else:
- # The last chunk is bigger than _MAX_SIZE, so just throw everything away.
- trace_lines = []
+ while sum(len(x) for x in trace_lines) / (1024 * 1024) > _MAX_SIZE:
+ for i, line in enumerate(trace_lines):
+ if "END:" in line and _NEW_COMMAND_SEP in line:
+ trace_lines = trace_lines[i + 1 :]
+ break
+ else:
+ # The last chunk is bigger than _MAX_SIZE, so just throw everything
+ # away.
+ trace_lines = []
- while trace_lines and trace_lines[-1] == '\n':
- trace_lines = trace_lines[:-1]
- # Write to a temporary file with a unique name in the same filesystem
- # before replacing the original trace file.
- temp_dir, temp_prefix = os.path.split(_TRACE_FILE)
- with tempfile.NamedTemporaryFile('w',
- dir=temp_dir,
- prefix=temp_prefix,
- delete=False) as f:
- f.writelines(trace_lines)
- platform_utils.rename(f.name, _TRACE_FILE)
+ while trace_lines and trace_lines[-1] == "\n":
+ trace_lines = trace_lines[:-1]
+ # Write to a temporary file with a unique name in the same filesystem
+ # before replacing the original trace file.
+ temp_dir, temp_prefix = os.path.split(_TRACE_FILE)
+ with tempfile.NamedTemporaryFile(
+ "w", dir=temp_dir, prefix=temp_prefix, delete=False
+ ) as f:
+ f.writelines(trace_lines)
+ platform_utils.rename(f.name, _TRACE_FILE)
diff --git a/run_tests b/run_tests
index 0ea098a..e76f9d8 100755
--- a/run_tests
+++ b/run_tests
@@ -13,10 +13,28 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Wrapper to run pytest with the right settings."""
+"""Wrapper to run black and pytest with the right settings."""
+import os
+import subprocess
import sys
import pytest
-if __name__ == '__main__':
- sys.exit(pytest.main(sys.argv[1:]))
+
+def run_black():
+ """Returns the exit code of running `black --check`."""
+ dirpath = os.path.dirname(os.path.realpath(__file__))
+ return subprocess.run(
+ [sys.executable, "-m", "black", "--check", dirpath], check=False
+ ).returncode
+
+
+def main(argv):
+ """The main entry."""
+ black_ret = 0 if argv else run_black()
+ pytest_ret = pytest.main(argv)
+ return 0 if not black_ret and not pytest_ret else 1
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/run_tests.vpython3 b/run_tests.vpython3
index d0e821d..0c790bc 100644
--- a/run_tests.vpython3
+++ b/run_tests.vpython3
@@ -26,8 +26,8 @@
# Required by pytest==6.2.2
wheel: <
- name: "infra/python/wheels/packaging-py2_py3"
- version: "version:16.8"
+ name: "infra/python/wheels/packaging-py3"
+ version: "version:23.0"
>
# Required by pytest==6.2.2
@@ -59,3 +59,44 @@
name: "infra/python/wheels/six-py2_py3"
version: "version:1.16.0"
>
+
+wheel: <
+ name: "infra/python/wheels/black-py3"
+ version: "version:23.1.0"
+>
+
+# Required by black==23.1.0
+wheel: <
+ name: "infra/python/wheels/mypy-extensions-py3"
+ version: "version:0.4.3"
+>
+
+# Required by black==23.1.0
+wheel: <
+ name: "infra/python/wheels/tomli-py3"
+ version: "version:2.0.1"
+>
+
+# Required by black==23.1.0
+wheel: <
+ name: "infra/python/wheels/platformdirs-py3"
+ version: "version:2.5.2"
+>
+
+# Required by black==23.1.0
+wheel: <
+ name: "infra/python/wheels/pathspec-py3"
+ version: "version:0.9.0"
+>
+
+# Required by black==23.1.0
+wheel: <
+ name: "infra/python/wheels/typing-extensions-py3"
+ version: "version:4.3.0"
+>
+
+# Required by black==23.1.0
+wheel: <
+ name: "infra/python/wheels/click-py3"
+ version: "version:8.0.3"
+>
diff --git a/setup.py b/setup.py
index 848b3f6..f50eb47 100755
--- a/setup.py
+++ b/setup.py
@@ -23,39 +23,39 @@
# Rip out the first intro paragraph.
-with open(os.path.join(TOPDIR, 'README.md')) as fp:
+with open(os.path.join(TOPDIR, "README.md")) as fp:
lines = fp.read().splitlines()[2:]
- end = lines.index('')
- long_description = ' '.join(lines[0:end])
+ end = lines.index("")
+ long_description = " ".join(lines[0:end])
# https://packaging.python.org/tutorials/packaging-projects/
setuptools.setup(
- name='repo',
- version='2',
- maintainer='Various',
- maintainer_email='repo-discuss@googlegroups.com',
- description='Repo helps manage many Git repositories',
+ name="repo",
+ version="2",
+ maintainer="Various",
+ maintainer_email="repo-discuss@googlegroups.com",
+ description="Repo helps manage many Git repositories",
long_description=long_description,
- long_description_content_type='text/plain',
- url='https://gerrit.googlesource.com/git-repo/',
+ long_description_content_type="text/plain",
+ url="https://gerrit.googlesource.com/git-repo/",
project_urls={
- 'Bug Tracker': 'https://bugs.chromium.org/p/gerrit/issues/list?q=component:Applications%3Erepo',
+ "Bug Tracker": "https://bugs.chromium.org/p/gerrit/issues/list?q=component:Applications%3Erepo", # noqa: E501
},
# https://pypi.org/classifiers/
classifiers=[
- 'Development Status :: 6 - Mature',
- 'Environment :: Console',
- 'Intended Audience :: Developers',
- 'License :: OSI Approved :: Apache Software License',
- 'Natural Language :: English',
- 'Operating System :: MacOS :: MacOS X',
- 'Operating System :: Microsoft :: Windows :: Windows 10',
- 'Operating System :: POSIX :: Linux',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3 :: Only',
- 'Topic :: Software Development :: Version Control :: Git',
+ "Development Status :: 6 - Mature",
+ "Environment :: Console",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Apache Software License",
+ "Natural Language :: English",
+ "Operating System :: MacOS :: MacOS X",
+ "Operating System :: Microsoft :: Windows :: Windows 10",
+ "Operating System :: POSIX :: Linux",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3 :: Only",
+ "Topic :: Software Development :: Version Control :: Git",
],
- python_requires='>=3.6',
- packages=['subcmds'],
+ python_requires=">=3.6",
+ packages=["subcmds"],
)
diff --git a/ssh.py b/ssh.py
index 004fdba..1d7ebe3 100644
--- a/ssh.py
+++ b/ssh.py
@@ -28,254 +28,264 @@
from repo_trace import Trace
-PROXY_PATH = os.path.join(os.path.dirname(__file__), 'git_ssh')
+PROXY_PATH = os.path.join(os.path.dirname(__file__), "git_ssh")
def _run_ssh_version():
- """run ssh -V to display the version number"""
- return subprocess.check_output(['ssh', '-V'], stderr=subprocess.STDOUT).decode()
+ """run ssh -V to display the version number"""
+ return subprocess.check_output(
+ ["ssh", "-V"], stderr=subprocess.STDOUT
+ ).decode()
def _parse_ssh_version(ver_str=None):
- """parse a ssh version string into a tuple"""
- if ver_str is None:
- ver_str = _run_ssh_version()
- m = re.match(r'^OpenSSH_([0-9.]+)(p[0-9]+)?\s', ver_str)
- if m:
- return tuple(int(x) for x in m.group(1).split('.'))
- else:
- return ()
+ """parse a ssh version string into a tuple"""
+ if ver_str is None:
+ ver_str = _run_ssh_version()
+ m = re.match(r"^OpenSSH_([0-9.]+)(p[0-9]+)?\s", ver_str)
+ if m:
+ return tuple(int(x) for x in m.group(1).split("."))
+ else:
+ return ()
@functools.lru_cache(maxsize=None)
def version():
- """return ssh version as a tuple"""
- try:
- return _parse_ssh_version()
- except FileNotFoundError:
- print('fatal: ssh not installed', file=sys.stderr)
- sys.exit(1)
- except subprocess.CalledProcessError:
- print('fatal: unable to detect ssh version', file=sys.stderr)
- sys.exit(1)
+ """return ssh version as a tuple"""
+ try:
+ return _parse_ssh_version()
+ except FileNotFoundError:
+ print("fatal: ssh not installed", file=sys.stderr)
+ sys.exit(1)
+ except subprocess.CalledProcessError:
+ print("fatal: unable to detect ssh version", file=sys.stderr)
+ sys.exit(1)
-URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
-URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
+URI_SCP = re.compile(r"^([^@:]*@?[^:/]{1,}):")
+URI_ALL = re.compile(r"^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/")
class ProxyManager:
- """Manage various ssh clients & masters that we spawn.
+ """Manage various ssh clients & masters that we spawn.
- This will take care of sharing state between multiprocessing children, and
- make sure that if we crash, we don't leak any of the ssh sessions.
+ This will take care of sharing state between multiprocessing children, and
+ make sure that if we crash, we don't leak any of the ssh sessions.
- The code should work with a single-process scenario too, and not add too much
- overhead due to the manager.
- """
-
- # Path to the ssh program to run which will pass our master settings along.
- # Set here more as a convenience API.
- proxy = PROXY_PATH
-
- def __init__(self, manager):
- # Protect access to the list of active masters.
- self._lock = multiprocessing.Lock()
- # List of active masters (pid). These will be spawned on demand, and we are
- # responsible for shutting them all down at the end.
- self._masters = manager.list()
- # Set of active masters indexed by "host:port" information.
- # The value isn't used, but multiprocessing doesn't provide a set class.
- self._master_keys = manager.dict()
- # Whether ssh masters are known to be broken, so we give up entirely.
- self._master_broken = manager.Value('b', False)
- # List of active ssh sesssions. Clients will be added & removed as
- # connections finish, so this list is just for safety & cleanup if we crash.
- self._clients = manager.list()
- # Path to directory for holding master sockets.
- self._sock_path = None
-
- def __enter__(self):
- """Enter a new context."""
- return self
-
- def __exit__(self, exc_type, exc_value, traceback):
- """Exit a context & clean up all resources."""
- self.close()
-
- def add_client(self, proc):
- """Track a new ssh session."""
- self._clients.append(proc.pid)
-
- def remove_client(self, proc):
- """Remove a completed ssh session."""
- try:
- self._clients.remove(proc.pid)
- except ValueError:
- pass
-
- def add_master(self, proc):
- """Track a new master connection."""
- self._masters.append(proc.pid)
-
- def _terminate(self, procs):
- """Kill all |procs|."""
- for pid in procs:
- try:
- os.kill(pid, signal.SIGTERM)
- os.waitpid(pid, 0)
- except OSError:
- pass
-
- # The multiprocessing.list() API doesn't provide many standard list()
- # methods, so we have to manually clear the list.
- while True:
- try:
- procs.pop(0)
- except:
- break
-
- def close(self):
- """Close this active ssh session.
-
- Kill all ssh clients & masters we created, and nuke the socket dir.
+ The code should work with a single-process scenario too, and not add too
+ much overhead due to the manager.
"""
- self._terminate(self._clients)
- self._terminate(self._masters)
- d = self.sock(create=False)
- if d:
- try:
- platform_utils.rmdir(os.path.dirname(d))
- except OSError:
- pass
+ # Path to the ssh program to run which will pass our master settings along.
+ # Set here more as a convenience API.
+ proxy = PROXY_PATH
- def _open_unlocked(self, host, port=None):
- """Make sure a ssh master session exists for |host| & |port|.
+ def __init__(self, manager):
+ # Protect access to the list of active masters.
+ self._lock = multiprocessing.Lock()
+ # List of active masters (pid). These will be spawned on demand, and we
+ # are responsible for shutting them all down at the end.
+ self._masters = manager.list()
+ # Set of active masters indexed by "host:port" information.
+ # The value isn't used, but multiprocessing doesn't provide a set class.
+ self._master_keys = manager.dict()
+ # Whether ssh masters are known to be broken, so we give up entirely.
+ self._master_broken = manager.Value("b", False)
+ # List of active ssh sesssions. Clients will be added & removed as
+ # connections finish, so this list is just for safety & cleanup if we
+ # crash.
+ self._clients = manager.list()
+ # Path to directory for holding master sockets.
+ self._sock_path = None
- If one doesn't exist already, we'll create it.
+ def __enter__(self):
+ """Enter a new context."""
+ return self
- We won't grab any locks, so the caller has to do that. This helps keep the
- business logic of actually creating the master separate from grabbing locks.
- """
- # Check to see whether we already think that the master is running; if we
- # think it's already running, return right away.
- if port is not None:
- key = '%s:%s' % (host, port)
- else:
- key = host
+ def __exit__(self, exc_type, exc_value, traceback):
+ """Exit a context & clean up all resources."""
+ self.close()
- if key in self._master_keys:
- return True
+ def add_client(self, proc):
+ """Track a new ssh session."""
+ self._clients.append(proc.pid)
- if self._master_broken.value or 'GIT_SSH' in os.environ:
- # Failed earlier, so don't retry.
- return False
+ def remove_client(self, proc):
+ """Remove a completed ssh session."""
+ try:
+ self._clients.remove(proc.pid)
+ except ValueError:
+ pass
- # We will make two calls to ssh; this is the common part of both calls.
- command_base = ['ssh', '-o', 'ControlPath %s' % self.sock(), host]
- if port is not None:
- command_base[1:1] = ['-p', str(port)]
+ def add_master(self, proc):
+ """Track a new master connection."""
+ self._masters.append(proc.pid)
- # Since the key wasn't in _master_keys, we think that master isn't running.
- # ...but before actually starting a master, we'll double-check. This can
- # be important because we can't tell that that 'git@myhost.com' is the same
- # as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
- check_command = command_base + ['-O', 'check']
- with Trace('Call to ssh (check call): %s', ' '.join(check_command)):
- try:
- check_process = subprocess.Popen(check_command,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- check_process.communicate() # read output, but ignore it...
- isnt_running = check_process.wait()
+ def _terminate(self, procs):
+ """Kill all |procs|."""
+ for pid in procs:
+ try:
+ os.kill(pid, signal.SIGTERM)
+ os.waitpid(pid, 0)
+ except OSError:
+ pass
- if not isnt_running:
- # Our double-check found that the master _was_ infact running. Add to
- # the list of keys.
- self._master_keys[key] = True
- return True
- except Exception:
- # Ignore excpetions. We we will fall back to the normal command and
- # print to the log there.
- pass
+ # The multiprocessing.list() API doesn't provide many standard list()
+ # methods, so we have to manually clear the list.
+ while True:
+ try:
+ procs.pop(0)
+ except: # noqa: E722
+ break
- command = command_base[:1] + ['-M', '-N'] + command_base[1:]
- p = None
- try:
- with Trace('Call to ssh: %s', ' '.join(command)):
- p = subprocess.Popen(command)
- except Exception as e:
- self._master_broken.value = True
- print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
- % (host, port, str(e)), file=sys.stderr)
- return False
+ def close(self):
+ """Close this active ssh session.
- time.sleep(1)
- ssh_died = (p.poll() is not None)
- if ssh_died:
- return False
+ Kill all ssh clients & masters we created, and nuke the socket dir.
+ """
+ self._terminate(self._clients)
+ self._terminate(self._masters)
- self.add_master(p)
- self._master_keys[key] = True
- return True
+ d = self.sock(create=False)
+ if d:
+ try:
+ platform_utils.rmdir(os.path.dirname(d))
+ except OSError:
+ pass
- def _open(self, host, port=None):
- """Make sure a ssh master session exists for |host| & |port|.
+ def _open_unlocked(self, host, port=None):
+ """Make sure a ssh master session exists for |host| & |port|.
- If one doesn't exist already, we'll create it.
+ If one doesn't exist already, we'll create it.
- This will obtain any necessary locks to avoid inter-process races.
- """
- # Bail before grabbing the lock if we already know that we aren't going to
- # try creating new masters below.
- if sys.platform in ('win32', 'cygwin'):
- return False
+ We won't grab any locks, so the caller has to do that. This helps keep
+ the business logic of actually creating the master separate from
+ grabbing locks.
+ """
+ # Check to see whether we already think that the master is running; if
+ # we think it's already running, return right away.
+ if port is not None:
+ key = "%s:%s" % (host, port)
+ else:
+ key = host
- # Acquire the lock. This is needed to prevent opening multiple masters for
- # the same host when we're running "repo sync -jN" (for N > 1) _and_ the
- # manifest <remote fetch="ssh://xyz"> specifies a different host from the
- # one that was passed to repo init.
- with self._lock:
- return self._open_unlocked(host, port)
+ if key in self._master_keys:
+ return True
- def preconnect(self, url):
- """If |uri| will create a ssh connection, setup the ssh master for it."""
- m = URI_ALL.match(url)
- if m:
- scheme = m.group(1)
- host = m.group(2)
- if ':' in host:
- host, port = host.split(':')
- else:
- port = None
- if scheme in ('ssh', 'git+ssh', 'ssh+git'):
- return self._open(host, port)
- return False
+ if self._master_broken.value or "GIT_SSH" in os.environ:
+ # Failed earlier, so don't retry.
+ return False
- m = URI_SCP.match(url)
- if m:
- host = m.group(1)
- return self._open(host)
+ # We will make two calls to ssh; this is the common part of both calls.
+ command_base = ["ssh", "-o", "ControlPath %s" % self.sock(), host]
+ if port is not None:
+ command_base[1:1] = ["-p", str(port)]
- return False
+ # Since the key wasn't in _master_keys, we think that master isn't
+ # running... but before actually starting a master, we'll double-check.
+ # This can be important because we can't tell that that 'git@myhost.com'
+ # is the same as 'myhost.com' where "User git" is setup in the user's
+ # ~/.ssh/config file.
+ check_command = command_base + ["-O", "check"]
+ with Trace("Call to ssh (check call): %s", " ".join(check_command)):
+ try:
+ check_process = subprocess.Popen(
+ check_command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ check_process.communicate() # read output, but ignore it...
+ isnt_running = check_process.wait()
- def sock(self, create=True):
- """Return the path to the ssh socket dir.
+ if not isnt_running:
+ # Our double-check found that the master _was_ infact
+ # running. Add to the list of keys.
+ self._master_keys[key] = True
+ return True
+ except Exception:
+ # Ignore excpetions. We we will fall back to the normal command
+ # and print to the log there.
+ pass
- This has all the master sockets so clients can talk to them.
- """
- if self._sock_path is None:
- if not create:
- return None
- tmp_dir = '/tmp'
- if not os.path.exists(tmp_dir):
- tmp_dir = tempfile.gettempdir()
- if version() < (6, 7):
- tokens = '%r@%h:%p'
- else:
- tokens = '%C' # hash of %l%h%p%r
- self._sock_path = os.path.join(
- tempfile.mkdtemp('', 'ssh-', tmp_dir),
- 'master-' + tokens)
- return self._sock_path
+ command = command_base[:1] + ["-M", "-N"] + command_base[1:]
+ p = None
+ try:
+ with Trace("Call to ssh: %s", " ".join(command)):
+ p = subprocess.Popen(command)
+ except Exception as e:
+ self._master_broken.value = True
+ print(
+ "\nwarn: cannot enable ssh control master for %s:%s\n%s"
+ % (host, port, str(e)),
+ file=sys.stderr,
+ )
+ return False
+
+ time.sleep(1)
+ ssh_died = p.poll() is not None
+ if ssh_died:
+ return False
+
+ self.add_master(p)
+ self._master_keys[key] = True
+ return True
+
+ def _open(self, host, port=None):
+ """Make sure a ssh master session exists for |host| & |port|.
+
+ If one doesn't exist already, we'll create it.
+
+ This will obtain any necessary locks to avoid inter-process races.
+ """
+ # Bail before grabbing the lock if we already know that we aren't going
+ # to try creating new masters below.
+ if sys.platform in ("win32", "cygwin"):
+ return False
+
+ # Acquire the lock. This is needed to prevent opening multiple masters
+ # for the same host when we're running "repo sync -jN" (for N > 1) _and_
+ # the manifest <remote fetch="ssh://xyz"> specifies a different host
+ # from the one that was passed to repo init.
+ with self._lock:
+ return self._open_unlocked(host, port)
+
+ def preconnect(self, url):
+ """If |uri| will create a ssh connection, setup the ssh master for it.""" # noqa: E501
+ m = URI_ALL.match(url)
+ if m:
+ scheme = m.group(1)
+ host = m.group(2)
+ if ":" in host:
+ host, port = host.split(":")
+ else:
+ port = None
+ if scheme in ("ssh", "git+ssh", "ssh+git"):
+ return self._open(host, port)
+ return False
+
+ m = URI_SCP.match(url)
+ if m:
+ host = m.group(1)
+ return self._open(host)
+
+ return False
+
+ def sock(self, create=True):
+ """Return the path to the ssh socket dir.
+
+ This has all the master sockets so clients can talk to them.
+ """
+ if self._sock_path is None:
+ if not create:
+ return None
+ tmp_dir = "/tmp"
+ if not os.path.exists(tmp_dir):
+ tmp_dir = tempfile.gettempdir()
+ if version() < (6, 7):
+ tokens = "%r@%h:%p"
+ else:
+ tokens = "%C" # hash of %l%h%p%r
+ self._sock_path = os.path.join(
+ tempfile.mkdtemp("", "ssh-", tmp_dir), "master-" + tokens
+ )
+ return self._sock_path
diff --git a/subcmds/__init__.py b/subcmds/__init__.py
index 051dda0..4e41afc 100644
--- a/subcmds/__init__.py
+++ b/subcmds/__init__.py
@@ -19,31 +19,29 @@
my_dir = os.path.dirname(__file__)
for py in os.listdir(my_dir):
- if py == '__init__.py':
- continue
+ if py == "__init__.py":
+ continue
- if py.endswith('.py'):
- name = py[:-3]
+ if py.endswith(".py"):
+ name = py[:-3]
- clsn = name.capitalize()
- while clsn.find('_') > 0:
- h = clsn.index('_')
- clsn = clsn[0:h] + clsn[h + 1:].capitalize()
+ clsn = name.capitalize()
+ while clsn.find("_") > 0:
+ h = clsn.index("_")
+ clsn = clsn[0:h] + clsn[h + 1 :].capitalize()
- mod = __import__(__name__,
- globals(),
- locals(),
- ['%s' % name])
- mod = getattr(mod, name)
- try:
- cmd = getattr(mod, clsn)
- except AttributeError:
- raise SyntaxError('%s/%s does not define class %s' % (
- __name__, py, clsn))
+ mod = __import__(__name__, globals(), locals(), ["%s" % name])
+ mod = getattr(mod, name)
+ try:
+ cmd = getattr(mod, clsn)
+ except AttributeError:
+ raise SyntaxError(
+ "%s/%s does not define class %s" % (__name__, py, clsn)
+ )
- name = name.replace('_', '-')
- cmd.NAME = name
- all_commands[name] = cmd
+ name = name.replace("_", "-")
+ cmd.NAME = name
+ all_commands[name] = cmd
# Add 'branch' as an alias for 'branches'.
-all_commands['branch'] = all_commands['branches']
+all_commands["branch"] = all_commands["branches"]
diff --git a/subcmds/abandon.py b/subcmds/abandon.py
index c3d2d5b..1f687f5 100644
--- a/subcmds/abandon.py
+++ b/subcmds/abandon.py
@@ -23,9 +23,9 @@
class Abandon(Command):
- COMMON = True
- helpSummary = "Permanently abandon a development branch"
- helpUsage = """
+ COMMON = True
+ helpSummary = "Permanently abandon a development branch"
+ helpUsage = """
%prog [--all | <branchname>] [<project>...]
This subcommand permanently abandons a development branch by
@@ -33,83 +33,104 @@
It is equivalent to "git branch -D <branchname>".
"""
- PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
+ PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
- def _Options(self, p):
- p.add_option('--all',
- dest='all', action='store_true',
- help='delete all branches in all projects')
+ def _Options(self, p):
+ p.add_option(
+ "--all",
+ dest="all",
+ action="store_true",
+ help="delete all branches in all projects",
+ )
- def ValidateOptions(self, opt, args):
- if not opt.all and not args:
- self.Usage()
+ def ValidateOptions(self, opt, args):
+ if not opt.all and not args:
+ self.Usage()
- if not opt.all:
- nb = args[0]
- if not git.check_ref_format('heads/%s' % nb):
- self.OptionParser.error("'%s' is not a valid branch name" % nb)
- else:
- args.insert(0, "'All local branches'")
-
- def _ExecuteOne(self, all_branches, nb, project):
- """Abandon one project."""
- if all_branches:
- branches = project.GetBranches()
- else:
- branches = [nb]
-
- ret = {}
- for name in branches:
- status = project.AbandonBranch(name)
- if status is not None:
- ret[name] = status
- return (ret, project)
-
- def Execute(self, opt, args):
- nb = args[0]
- err = defaultdict(list)
- success = defaultdict(list)
- all_projects = self.GetProjects(args[1:], all_manifests=not opt.this_manifest_only)
- _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
-
- def _ProcessResults(_pool, pm, states):
- for (results, project) in states:
- for branch, status in results.items():
- if status:
- success[branch].append(project)
- else:
- err[branch].append(project)
- pm.update()
-
- self.ExecuteInParallel(
- opt.jobs,
- functools.partial(self._ExecuteOne, opt.all, nb),
- all_projects,
- callback=_ProcessResults,
- output=Progress('Abandon %s' % (nb,), len(all_projects), quiet=opt.quiet))
-
- width = max(itertools.chain(
- [25], (len(x) for x in itertools.chain(success, err))))
- if err:
- for br in err.keys():
- err_msg = "error: cannot abandon %s" % br
- print(err_msg, file=sys.stderr)
- for proj in err[br]:
- print(' ' * len(err_msg) + " | %s" % _RelPath(proj), file=sys.stderr)
- sys.exit(1)
- elif not success:
- print('error: no project has local branch(es) : %s' % nb,
- file=sys.stderr)
- sys.exit(1)
- else:
- # Everything below here is displaying status.
- if opt.quiet:
- return
- print('Abandoned branches:')
- for br in success.keys():
- if len(all_projects) > 1 and len(all_projects) == len(success[br]):
- result = "all project"
+ if not opt.all:
+ nb = args[0]
+ if not git.check_ref_format("heads/%s" % nb):
+ self.OptionParser.error("'%s' is not a valid branch name" % nb)
else:
- result = "%s" % (
- ('\n' + ' ' * width + '| ').join(_RelPath(p) for p in success[br]))
- print("%s%s| %s\n" % (br, ' ' * (width - len(br)), result))
+ args.insert(0, "'All local branches'")
+
+ def _ExecuteOne(self, all_branches, nb, project):
+ """Abandon one project."""
+ if all_branches:
+ branches = project.GetBranches()
+ else:
+ branches = [nb]
+
+ ret = {}
+ for name in branches:
+ status = project.AbandonBranch(name)
+ if status is not None:
+ ret[name] = status
+ return (ret, project)
+
+ def Execute(self, opt, args):
+ nb = args[0]
+ err = defaultdict(list)
+ success = defaultdict(list)
+ all_projects = self.GetProjects(
+ args[1:], all_manifests=not opt.this_manifest_only
+ )
+ _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
+
+ def _ProcessResults(_pool, pm, states):
+ for results, project in states:
+ for branch, status in results.items():
+ if status:
+ success[branch].append(project)
+ else:
+ err[branch].append(project)
+ pm.update()
+
+ self.ExecuteInParallel(
+ opt.jobs,
+ functools.partial(self._ExecuteOne, opt.all, nb),
+ all_projects,
+ callback=_ProcessResults,
+ output=Progress(
+ "Abandon %s" % (nb,), len(all_projects), quiet=opt.quiet
+ ),
+ )
+
+ width = max(
+ itertools.chain(
+ [25], (len(x) for x in itertools.chain(success, err))
+ )
+ )
+ if err:
+ for br in err.keys():
+ err_msg = "error: cannot abandon %s" % br
+ print(err_msg, file=sys.stderr)
+ for proj in err[br]:
+ print(
+ " " * len(err_msg) + " | %s" % _RelPath(proj),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ elif not success:
+ print(
+ "error: no project has local branch(es) : %s" % nb,
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ else:
+ # Everything below here is displaying status.
+ if opt.quiet:
+ return
+ print("Abandoned branches:")
+ for br in success.keys():
+ if len(all_projects) > 1 and len(all_projects) == len(
+ success[br]
+ ):
+ result = "all project"
+ else:
+ result = "%s" % (
+ ("\n" + " " * width + "| ").join(
+ _RelPath(p) for p in success[br]
+ )
+ )
+ print("%s%s| %s\n" % (br, " " * (width - len(br)), result))
diff --git a/subcmds/branches.py b/subcmds/branches.py
index fcf67ef..4d5bb19 100644
--- a/subcmds/branches.py
+++ b/subcmds/branches.py
@@ -20,51 +20,51 @@
class BranchColoring(Coloring):
- def __init__(self, config):
- Coloring.__init__(self, config, 'branch')
- self.current = self.printer('current', fg='green')
- self.local = self.printer('local')
- self.notinproject = self.printer('notinproject', fg='red')
+ def __init__(self, config):
+ Coloring.__init__(self, config, "branch")
+ self.current = self.printer("current", fg="green")
+ self.local = self.printer("local")
+ self.notinproject = self.printer("notinproject", fg="red")
class BranchInfo(object):
- def __init__(self, name):
- self.name = name
- self.current = 0
- self.published = 0
- self.published_equal = 0
- self.projects = []
+ def __init__(self, name):
+ self.name = name
+ self.current = 0
+ self.published = 0
+ self.published_equal = 0
+ self.projects = []
- def add(self, b):
- if b.current:
- self.current += 1
- if b.published:
- self.published += 1
- if b.revision == b.published:
- self.published_equal += 1
- self.projects.append(b)
+ def add(self, b):
+ if b.current:
+ self.current += 1
+ if b.published:
+ self.published += 1
+ if b.revision == b.published:
+ self.published_equal += 1
+ self.projects.append(b)
- @property
- def IsCurrent(self):
- return self.current > 0
+ @property
+ def IsCurrent(self):
+ return self.current > 0
- @property
- def IsSplitCurrent(self):
- return self.current != 0 and self.current != len(self.projects)
+ @property
+ def IsSplitCurrent(self):
+ return self.current != 0 and self.current != len(self.projects)
- @property
- def IsPublished(self):
- return self.published > 0
+ @property
+ def IsPublished(self):
+ return self.published > 0
- @property
- def IsPublishedEqual(self):
- return self.published_equal == len(self.projects)
+ @property
+ def IsPublishedEqual(self):
+ return self.published_equal == len(self.projects)
class Branches(Command):
- COMMON = True
- helpSummary = "View current topic branches"
- helpUsage = """
+ COMMON = True
+ helpSummary = "View current topic branches"
+ helpUsage = """
%prog [<project>...]
Summarizes the currently available topic branches.
@@ -95,111 +95,114 @@
is shown, then the branch appears in all projects.
"""
- PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
+ PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
- def Execute(self, opt, args):
- projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only)
- out = BranchColoring(self.manifest.manifestProject.config)
- all_branches = {}
- project_cnt = len(projects)
+ def Execute(self, opt, args):
+ projects = self.GetProjects(
+ args, all_manifests=not opt.this_manifest_only
+ )
+ out = BranchColoring(self.manifest.manifestProject.config)
+ all_branches = {}
+ project_cnt = len(projects)
- def _ProcessResults(_pool, _output, results):
- for name, b in itertools.chain.from_iterable(results):
- if name not in all_branches:
- all_branches[name] = BranchInfo(name)
- all_branches[name].add(b)
+ def _ProcessResults(_pool, _output, results):
+ for name, b in itertools.chain.from_iterable(results):
+ if name not in all_branches:
+ all_branches[name] = BranchInfo(name)
+ all_branches[name].add(b)
- self.ExecuteInParallel(
- opt.jobs,
- expand_project_to_branches,
- projects,
- callback=_ProcessResults)
+ self.ExecuteInParallel(
+ opt.jobs,
+ expand_project_to_branches,
+ projects,
+ callback=_ProcessResults,
+ )
- names = sorted(all_branches)
+ names = sorted(all_branches)
- if not names:
- print(' (no branches)', file=sys.stderr)
- return
+ if not names:
+ print(" (no branches)", file=sys.stderr)
+ return
- width = 25
- for name in names:
- if width < len(name):
- width = len(name)
+ width = 25
+ for name in names:
+ if width < len(name):
+ width = len(name)
- for name in names:
- i = all_branches[name]
- in_cnt = len(i.projects)
+ for name in names:
+ i = all_branches[name]
+ in_cnt = len(i.projects)
- if i.IsCurrent:
- current = '*'
- hdr = out.current
- else:
- current = ' '
- hdr = out.local
-
- if i.IsPublishedEqual:
- published = 'P'
- elif i.IsPublished:
- published = 'p'
- else:
- published = ' '
-
- hdr('%c%c %-*s' % (current, published, width, name))
- out.write(' |')
-
- _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
- if in_cnt < project_cnt:
- fmt = out.write
- paths = []
- non_cur_paths = []
- if i.IsSplitCurrent or (in_cnt <= project_cnt - in_cnt):
- in_type = 'in'
- for b in i.projects:
- relpath = _RelPath(b.project)
- if not i.IsSplitCurrent or b.current:
- paths.append(relpath)
+ if i.IsCurrent:
+ current = "*"
+ hdr = out.current
else:
- non_cur_paths.append(relpath)
- else:
- fmt = out.notinproject
- in_type = 'not in'
- have = set()
- for b in i.projects:
- have.add(_RelPath(b.project))
- for p in projects:
- if _RelPath(p) not in have:
- paths.append(_RelPath(p))
+ current = " "
+ hdr = out.local
- s = ' %s %s' % (in_type, ', '.join(paths))
- if not i.IsSplitCurrent and (width + 7 + len(s) < 80):
- fmt = out.current if i.IsCurrent else fmt
- fmt(s)
- else:
- fmt(' %s:' % in_type)
- fmt = out.current if i.IsCurrent else out.write
- for p in paths:
+ if i.IsPublishedEqual:
+ published = "P"
+ elif i.IsPublished:
+ published = "p"
+ else:
+ published = " "
+
+ hdr("%c%c %-*s" % (current, published, width, name))
+ out.write(" |")
+
+ _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
+ if in_cnt < project_cnt:
+ fmt = out.write
+ paths = []
+ non_cur_paths = []
+ if i.IsSplitCurrent or (in_cnt <= project_cnt - in_cnt):
+ in_type = "in"
+ for b in i.projects:
+ relpath = _RelPath(b.project)
+ if not i.IsSplitCurrent or b.current:
+ paths.append(relpath)
+ else:
+ non_cur_paths.append(relpath)
+ else:
+ fmt = out.notinproject
+ in_type = "not in"
+ have = set()
+ for b in i.projects:
+ have.add(_RelPath(b.project))
+ for p in projects:
+ if _RelPath(p) not in have:
+ paths.append(_RelPath(p))
+
+ s = " %s %s" % (in_type, ", ".join(paths))
+ if not i.IsSplitCurrent and (width + 7 + len(s) < 80):
+ fmt = out.current if i.IsCurrent else fmt
+ fmt(s)
+ else:
+ fmt(" %s:" % in_type)
+ fmt = out.current if i.IsCurrent else out.write
+ for p in paths:
+ out.nl()
+ fmt(width * " " + " %s" % p)
+ fmt = out.write
+ for p in non_cur_paths:
+ out.nl()
+ fmt(width * " " + " %s" % p)
+ else:
+ out.write(" in all projects")
out.nl()
- fmt(width * ' ' + ' %s' % p)
- fmt = out.write
- for p in non_cur_paths:
- out.nl()
- fmt(width * ' ' + ' %s' % p)
- else:
- out.write(' in all projects')
- out.nl()
def expand_project_to_branches(project):
- """Expands a project into a list of branch names & associated information.
+ """Expands a project into a list of branch names & associated information.
- Args:
- project: project.Project
+ Args:
+ project: project.Project
- Returns:
- List[Tuple[str, git_config.Branch]]
- """
- branches = []
- for name, b in project.GetBranches().items():
- b.project = project
- branches.append((name, b))
- return branches
+ Returns:
+ List[Tuple[str, git_config.Branch]]
+ """
+ branches = []
+ for name, b in project.GetBranches().items():
+ b.project = project
+ branches.append((name, b))
+ return branches
diff --git a/subcmds/checkout.py b/subcmds/checkout.py
index 768b602..08012a8 100644
--- a/subcmds/checkout.py
+++ b/subcmds/checkout.py
@@ -20,12 +20,12 @@
class Checkout(Command):
- COMMON = True
- helpSummary = "Checkout a branch for development"
- helpUsage = """
+ COMMON = True
+ helpSummary = "Checkout a branch for development"
+ helpUsage = """
%prog <branchname> [<project>...]
"""
- helpDescription = """
+ helpDescription = """
The '%prog' command checks out an existing branch that was previously
created by 'repo start'.
@@ -33,43 +33,50 @@
repo forall [<project>...] -c git checkout <branchname>
"""
- PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
+ PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
- def ValidateOptions(self, opt, args):
- if not args:
- self.Usage()
+ def ValidateOptions(self, opt, args):
+ if not args:
+ self.Usage()
- def _ExecuteOne(self, nb, project):
- """Checkout one project."""
- return (project.CheckoutBranch(nb), project)
+ def _ExecuteOne(self, nb, project):
+ """Checkout one project."""
+ return (project.CheckoutBranch(nb), project)
- def Execute(self, opt, args):
- nb = args[0]
- err = []
- success = []
- all_projects = self.GetProjects(args[1:], all_manifests=not opt.this_manifest_only)
+ def Execute(self, opt, args):
+ nb = args[0]
+ err = []
+ success = []
+ all_projects = self.GetProjects(
+ args[1:], all_manifests=not opt.this_manifest_only
+ )
- def _ProcessResults(_pool, pm, results):
- for status, project in results:
- if status is not None:
- if status:
- success.append(project)
- else:
- err.append(project)
- pm.update()
+ def _ProcessResults(_pool, pm, results):
+ for status, project in results:
+ if status is not None:
+ if status:
+ success.append(project)
+ else:
+ err.append(project)
+ pm.update()
- self.ExecuteInParallel(
- opt.jobs,
- functools.partial(self._ExecuteOne, nb),
- all_projects,
- callback=_ProcessResults,
- output=Progress('Checkout %s' % (nb,), len(all_projects), quiet=opt.quiet))
+ self.ExecuteInParallel(
+ opt.jobs,
+ functools.partial(self._ExecuteOne, nb),
+ all_projects,
+ callback=_ProcessResults,
+ output=Progress(
+ "Checkout %s" % (nb,), len(all_projects), quiet=opt.quiet
+ ),
+ )
- if err:
- for p in err:
- print("error: %s/: cannot checkout %s" % (p.relpath, nb),
- file=sys.stderr)
- sys.exit(1)
- elif not success:
- print('error: no project has branch %s' % nb, file=sys.stderr)
- sys.exit(1)
+ if err:
+ for p in err:
+ print(
+ "error: %s/: cannot checkout %s" % (p.relpath, nb),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ elif not success:
+ print("error: no project has branch %s" % nb, file=sys.stderr)
+ sys.exit(1)
diff --git a/subcmds/cherry_pick.py b/subcmds/cherry_pick.py
index eecf4e1..4cfb8c8 100644
--- a/subcmds/cherry_pick.py
+++ b/subcmds/cherry_pick.py
@@ -17,96 +17,107 @@
from command import Command
from git_command import GitCommand
-CHANGE_ID_RE = re.compile(r'^\s*Change-Id: I([0-9a-f]{40})\s*$')
+CHANGE_ID_RE = re.compile(r"^\s*Change-Id: I([0-9a-f]{40})\s*$")
class CherryPick(Command):
- COMMON = True
- helpSummary = "Cherry-pick a change."
- helpUsage = """
+ COMMON = True
+ helpSummary = "Cherry-pick a change."
+ helpUsage = """
%prog <sha1>
"""
- helpDescription = """
+ helpDescription = """
'%prog' cherry-picks a change from one branch to another.
The change id will be updated, and a reference to the old
change id will be added.
"""
- def ValidateOptions(self, opt, args):
- if len(args) != 1:
- self.Usage()
+ def ValidateOptions(self, opt, args):
+ if len(args) != 1:
+ self.Usage()
- def Execute(self, opt, args):
- reference = args[0]
+ def Execute(self, opt, args):
+ reference = args[0]
- p = GitCommand(None,
- ['rev-parse', '--verify', reference],
- capture_stdout=True,
- capture_stderr=True)
- if p.Wait() != 0:
- print(p.stderr, file=sys.stderr)
- sys.exit(1)
- sha1 = p.stdout.strip()
+ p = GitCommand(
+ None,
+ ["rev-parse", "--verify", reference],
+ capture_stdout=True,
+ capture_stderr=True,
+ )
+ if p.Wait() != 0:
+ print(p.stderr, file=sys.stderr)
+ sys.exit(1)
+ sha1 = p.stdout.strip()
- p = GitCommand(None, ['cat-file', 'commit', sha1], capture_stdout=True)
- if p.Wait() != 0:
- print("error: Failed to retrieve old commit message", file=sys.stderr)
- sys.exit(1)
- old_msg = self._StripHeader(p.stdout)
+ p = GitCommand(None, ["cat-file", "commit", sha1], capture_stdout=True)
+ if p.Wait() != 0:
+ print(
+ "error: Failed to retrieve old commit message", file=sys.stderr
+ )
+ sys.exit(1)
+ old_msg = self._StripHeader(p.stdout)
- p = GitCommand(None,
- ['cherry-pick', sha1],
- capture_stdout=True,
- capture_stderr=True)
- status = p.Wait()
+ p = GitCommand(
+ None,
+ ["cherry-pick", sha1],
+ capture_stdout=True,
+ capture_stderr=True,
+ )
+ status = p.Wait()
- if p.stdout:
- print(p.stdout.strip(), file=sys.stdout)
- if p.stderr:
- print(p.stderr.strip(), file=sys.stderr)
+ if p.stdout:
+ print(p.stdout.strip(), file=sys.stdout)
+ if p.stderr:
+ print(p.stderr.strip(), file=sys.stderr)
- if status == 0:
- # The cherry-pick was applied correctly. We just need to edit the
- # commit message.
- new_msg = self._Reformat(old_msg, sha1)
+ if status == 0:
+ # The cherry-pick was applied correctly. We just need to edit the
+ # commit message.
+ new_msg = self._Reformat(old_msg, sha1)
- p = GitCommand(None, ['commit', '--amend', '-F', '-'],
- input=new_msg,
- capture_stdout=True,
- capture_stderr=True)
- if p.Wait() != 0:
- print("error: Failed to update commit message", file=sys.stderr)
- sys.exit(1)
+ p = GitCommand(
+ None,
+ ["commit", "--amend", "-F", "-"],
+ input=new_msg,
+ capture_stdout=True,
+ capture_stderr=True,
+ )
+ if p.Wait() != 0:
+ print("error: Failed to update commit message", file=sys.stderr)
+ sys.exit(1)
- else:
- print('NOTE: When committing (please see above) and editing the commit '
- 'message, please remove the old Change-Id-line and add:')
- print(self._GetReference(sha1), file=sys.stderr)
- print(file=sys.stderr)
+ else:
+ print(
+ "NOTE: When committing (please see above) and editing the "
+ "commit message, please remove the old Change-Id-line and add:"
+ )
+ print(self._GetReference(sha1), file=sys.stderr)
+ print(file=sys.stderr)
- def _IsChangeId(self, line):
- return CHANGE_ID_RE.match(line)
+ def _IsChangeId(self, line):
+ return CHANGE_ID_RE.match(line)
- def _GetReference(self, sha1):
- return "(cherry picked from commit %s)" % sha1
+ def _GetReference(self, sha1):
+ return "(cherry picked from commit %s)" % sha1
- def _StripHeader(self, commit_msg):
- lines = commit_msg.splitlines()
- return "\n".join(lines[lines.index("") + 1:])
+ def _StripHeader(self, commit_msg):
+ lines = commit_msg.splitlines()
+ return "\n".join(lines[lines.index("") + 1 :])
- def _Reformat(self, old_msg, sha1):
- new_msg = []
+ def _Reformat(self, old_msg, sha1):
+ new_msg = []
- for line in old_msg.splitlines():
- if not self._IsChangeId(line):
- new_msg.append(line)
+ for line in old_msg.splitlines():
+ if not self._IsChangeId(line):
+ new_msg.append(line)
- # Add a blank line between the message and the change id/reference
- try:
- if new_msg[-1].strip() != "":
- new_msg.append("")
- except IndexError:
- pass
+ # Add a blank line between the message and the change id/reference.
+ try:
+ if new_msg[-1].strip() != "":
+ new_msg.append("")
+ except IndexError:
+ pass
- new_msg.append(self._GetReference(sha1))
- return "\n".join(new_msg)
+ new_msg.append(self._GetReference(sha1))
+ return "\n".join(new_msg)
diff --git a/subcmds/diff.py b/subcmds/diff.py
index a606ee9..5c627c0 100644
--- a/subcmds/diff.py
+++ b/subcmds/diff.py
@@ -19,54 +19,63 @@
class Diff(PagedCommand):
- COMMON = True
- helpSummary = "Show changes between commit and working tree"
- helpUsage = """
+ COMMON = True
+ helpSummary = "Show changes between commit and working tree"
+ helpUsage = """
%prog [<project>...]
The -u option causes '%prog' to generate diff output with file paths
relative to the repository root, so the output can be applied
to the Unix 'patch' command.
"""
- PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
+ PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
- def _Options(self, p):
- p.add_option('-u', '--absolute',
- dest='absolute', action='store_true',
- help='paths are relative to the repository root')
+ def _Options(self, p):
+ p.add_option(
+ "-u",
+ "--absolute",
+ dest="absolute",
+ action="store_true",
+ help="paths are relative to the repository root",
+ )
- def _ExecuteOne(self, absolute, local, project):
- """Obtains the diff for a specific project.
+ def _ExecuteOne(self, absolute, local, project):
+ """Obtains the diff for a specific project.
- Args:
- absolute: Paths are relative to the root.
- local: a boolean, if True, the path is relative to the local
- (sub)manifest. If false, the path is relative to the
- outermost manifest.
- project: Project to get status of.
+ Args:
+ absolute: Paths are relative to the root.
+ local: a boolean, if True, the path is relative to the local
+ (sub)manifest. If false, the path is relative to the outermost
+ manifest.
+ project: Project to get status of.
- Returns:
- The status of the project.
- """
- buf = io.StringIO()
- ret = project.PrintWorkTreeDiff(absolute, output_redir=buf, local=local)
- return (ret, buf.getvalue())
+ Returns:
+ The status of the project.
+ """
+ buf = io.StringIO()
+ ret = project.PrintWorkTreeDiff(absolute, output_redir=buf, local=local)
+ return (ret, buf.getvalue())
- def Execute(self, opt, args):
- all_projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only)
+ def Execute(self, opt, args):
+ all_projects = self.GetProjects(
+ args, all_manifests=not opt.this_manifest_only
+ )
- def _ProcessResults(_pool, _output, results):
- ret = 0
- for (state, output) in results:
- if output:
- print(output, end='')
- if not state:
- ret = 1
- return ret
+ def _ProcessResults(_pool, _output, results):
+ ret = 0
+ for state, output in results:
+ if output:
+ print(output, end="")
+ if not state:
+ ret = 1
+ return ret
- return self.ExecuteInParallel(
- opt.jobs,
- functools.partial(self._ExecuteOne, opt.absolute, opt.this_manifest_only),
- all_projects,
- callback=_ProcessResults,
- ordered=True)
+ return self.ExecuteInParallel(
+ opt.jobs,
+ functools.partial(
+ self._ExecuteOne, opt.absolute, opt.this_manifest_only
+ ),
+ all_projects,
+ callback=_ProcessResults,
+ ordered=True,
+ )
diff --git a/subcmds/diffmanifests.py b/subcmds/diffmanifests.py
index 4f9f5b0..b446dbd 100644
--- a/subcmds/diffmanifests.py
+++ b/subcmds/diffmanifests.py
@@ -18,24 +18,24 @@
class _Coloring(Coloring):
- def __init__(self, config):
- Coloring.__init__(self, config, "status")
+ def __init__(self, config):
+ Coloring.__init__(self, config, "status")
class Diffmanifests(PagedCommand):
- """ A command to see logs in projects represented by manifests
+ """A command to see logs in projects represented by manifests
- This is used to see deeper differences between manifests. Where a simple
- diff would only show a diff of sha1s for example, this command will display
- the logs of the project between both sha1s, allowing user to see diff at a
- deeper level.
- """
+ This is used to see deeper differences between manifests. Where a simple
+ diff would only show a diff of sha1s for example, this command will display
+ the logs of the project between both sha1s, allowing user to see diff at a
+ deeper level.
+ """
- COMMON = True
- helpSummary = "Manifest diff utility"
- helpUsage = """%prog manifest1.xml [manifest2.xml] [options]"""
+ COMMON = True
+ helpSummary = "Manifest diff utility"
+ helpUsage = """%prog manifest1.xml [manifest2.xml] [options]"""
- helpDescription = """
+ helpDescription = """
The %prog command shows differences between project revisions of manifest1 and
manifest2. if manifest2 is not specified, current manifest.xml will be used
instead. Both absolute and relative paths may be used for manifests. Relative
@@ -65,159 +65,209 @@
"""
- def _Options(self, p):
- p.add_option('--raw',
- dest='raw', action='store_true',
- help='display raw diff')
- p.add_option('--no-color',
- dest='color', action='store_false', default=True,
- help='does not display the diff in color')
- p.add_option('--pretty-format',
- dest='pretty_format', action='store',
- metavar='<FORMAT>',
- help='print the log using a custom git pretty format string')
+ def _Options(self, p):
+ p.add_option(
+ "--raw", dest="raw", action="store_true", help="display raw diff"
+ )
+ p.add_option(
+ "--no-color",
+ dest="color",
+ action="store_false",
+ default=True,
+ help="does not display the diff in color",
+ )
+ p.add_option(
+ "--pretty-format",
+ dest="pretty_format",
+ action="store",
+ metavar="<FORMAT>",
+ help="print the log using a custom git pretty format string",
+ )
- def _printRawDiff(self, diff, pretty_format=None, local=False):
- _RelPath = lambda p: p.RelPath(local=local)
- for project in diff['added']:
- self.printText("A %s %s" % (_RelPath(project), project.revisionExpr))
- self.out.nl()
-
- for project in diff['removed']:
- self.printText("R %s %s" % (_RelPath(project), project.revisionExpr))
- self.out.nl()
-
- for project, otherProject in diff['changed']:
- self.printText("C %s %s %s" % (_RelPath(project), project.revisionExpr,
- otherProject.revisionExpr))
- self.out.nl()
- self._printLogs(project, otherProject, raw=True, color=False, pretty_format=pretty_format)
-
- for project, otherProject in diff['unreachable']:
- self.printText("U %s %s %s" % (_RelPath(project), project.revisionExpr,
- otherProject.revisionExpr))
- self.out.nl()
-
- def _printDiff(self, diff, color=True, pretty_format=None, local=False):
- _RelPath = lambda p: p.RelPath(local=local)
- if diff['added']:
- self.out.nl()
- self.printText('added projects : \n')
- self.out.nl()
- for project in diff['added']:
- self.printProject('\t%s' % (_RelPath(project)))
- self.printText(' at revision ')
- self.printRevision(project.revisionExpr)
- self.out.nl()
-
- if diff['removed']:
- self.out.nl()
- self.printText('removed projects : \n')
- self.out.nl()
- for project in diff['removed']:
- self.printProject('\t%s' % (_RelPath(project)))
- self.printText(' at revision ')
- self.printRevision(project.revisionExpr)
- self.out.nl()
-
- if diff['missing']:
- self.out.nl()
- self.printText('missing projects : \n')
- self.out.nl()
- for project in diff['missing']:
- self.printProject('\t%s' % (_RelPath(project)))
- self.printText(' at revision ')
- self.printRevision(project.revisionExpr)
- self.out.nl()
-
- if diff['changed']:
- self.out.nl()
- self.printText('changed projects : \n')
- self.out.nl()
- for project, otherProject in diff['changed']:
- self.printProject('\t%s' % (_RelPath(project)))
- self.printText(' changed from ')
- self.printRevision(project.revisionExpr)
- self.printText(' to ')
- self.printRevision(otherProject.revisionExpr)
- self.out.nl()
- self._printLogs(project, otherProject, raw=False, color=color,
- pretty_format=pretty_format)
- self.out.nl()
-
- if diff['unreachable']:
- self.out.nl()
- self.printText('projects with unreachable revisions : \n')
- self.out.nl()
- for project, otherProject in diff['unreachable']:
- self.printProject('\t%s ' % (_RelPath(project)))
- self.printRevision(project.revisionExpr)
- self.printText(' or ')
- self.printRevision(otherProject.revisionExpr)
- self.printText(' not found')
- self.out.nl()
-
- def _printLogs(self, project, otherProject, raw=False, color=True,
- pretty_format=None):
-
- logs = project.getAddedAndRemovedLogs(otherProject,
- oneline=(pretty_format is None),
- color=color,
- pretty_format=pretty_format)
- if logs['removed']:
- removedLogs = logs['removed'].split('\n')
- for log in removedLogs:
- if log.strip():
- if raw:
- self.printText(' R ' + log)
- self.out.nl()
- else:
- self.printRemoved('\t\t[-] ')
- self.printText(log)
+ def _printRawDiff(self, diff, pretty_format=None, local=False):
+ _RelPath = lambda p: p.RelPath(local=local)
+ for project in diff["added"]:
+ self.printText(
+ "A %s %s" % (_RelPath(project), project.revisionExpr)
+ )
self.out.nl()
- if logs['added']:
- addedLogs = logs['added'].split('\n')
- for log in addedLogs:
- if log.strip():
- if raw:
- self.printText(' A ' + log)
- self.out.nl()
- else:
- self.printAdded('\t\t[+] ')
- self.printText(log)
+ for project in diff["removed"]:
+ self.printText(
+ "R %s %s" % (_RelPath(project), project.revisionExpr)
+ )
self.out.nl()
- def ValidateOptions(self, opt, args):
- if not args or len(args) > 2:
- self.OptionParser.error('missing manifests to diff')
- if opt.this_manifest_only is False:
- raise self.OptionParser.error(
- '`diffmanifest` only supports the current tree')
+ for project, otherProject in diff["changed"]:
+ self.printText(
+ "C %s %s %s"
+ % (
+ _RelPath(project),
+ project.revisionExpr,
+ otherProject.revisionExpr,
+ )
+ )
+ self.out.nl()
+ self._printLogs(
+ project,
+ otherProject,
+ raw=True,
+ color=False,
+ pretty_format=pretty_format,
+ )
- def Execute(self, opt, args):
- self.out = _Coloring(self.client.globalConfig)
- self.printText = self.out.nofmt_printer('text')
- if opt.color:
- self.printProject = self.out.nofmt_printer('project', attr='bold')
- self.printAdded = self.out.nofmt_printer('green', fg='green', attr='bold')
- self.printRemoved = self.out.nofmt_printer('red', fg='red', attr='bold')
- self.printRevision = self.out.nofmt_printer('revision', fg='yellow')
- else:
- self.printProject = self.printAdded = self.printRemoved = self.printRevision = self.printText
+ for project, otherProject in diff["unreachable"]:
+ self.printText(
+ "U %s %s %s"
+ % (
+ _RelPath(project),
+ project.revisionExpr,
+ otherProject.revisionExpr,
+ )
+ )
+ self.out.nl()
- manifest1 = RepoClient(self.repodir)
- manifest1.Override(args[0], load_local_manifests=False)
- if len(args) == 1:
- manifest2 = self.manifest
- else:
- manifest2 = RepoClient(self.repodir)
- manifest2.Override(args[1], load_local_manifests=False)
+ def _printDiff(self, diff, color=True, pretty_format=None, local=False):
+ _RelPath = lambda p: p.RelPath(local=local)
+ if diff["added"]:
+ self.out.nl()
+ self.printText("added projects : \n")
+ self.out.nl()
+ for project in diff["added"]:
+ self.printProject("\t%s" % (_RelPath(project)))
+ self.printText(" at revision ")
+ self.printRevision(project.revisionExpr)
+ self.out.nl()
- diff = manifest1.projectsDiff(manifest2)
- if opt.raw:
- self._printRawDiff(diff, pretty_format=opt.pretty_format,
- local=opt.this_manifest_only)
- else:
- self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format,
- local=opt.this_manifest_only)
+ if diff["removed"]:
+ self.out.nl()
+ self.printText("removed projects : \n")
+ self.out.nl()
+ for project in diff["removed"]:
+ self.printProject("\t%s" % (_RelPath(project)))
+ self.printText(" at revision ")
+ self.printRevision(project.revisionExpr)
+ self.out.nl()
+
+ if diff["missing"]:
+ self.out.nl()
+ self.printText("missing projects : \n")
+ self.out.nl()
+ for project in diff["missing"]:
+ self.printProject("\t%s" % (_RelPath(project)))
+ self.printText(" at revision ")
+ self.printRevision(project.revisionExpr)
+ self.out.nl()
+
+ if diff["changed"]:
+ self.out.nl()
+ self.printText("changed projects : \n")
+ self.out.nl()
+ for project, otherProject in diff["changed"]:
+ self.printProject("\t%s" % (_RelPath(project)))
+ self.printText(" changed from ")
+ self.printRevision(project.revisionExpr)
+ self.printText(" to ")
+ self.printRevision(otherProject.revisionExpr)
+ self.out.nl()
+ self._printLogs(
+ project,
+ otherProject,
+ raw=False,
+ color=color,
+ pretty_format=pretty_format,
+ )
+ self.out.nl()
+
+ if diff["unreachable"]:
+ self.out.nl()
+ self.printText("projects with unreachable revisions : \n")
+ self.out.nl()
+ for project, otherProject in diff["unreachable"]:
+ self.printProject("\t%s " % (_RelPath(project)))
+ self.printRevision(project.revisionExpr)
+ self.printText(" or ")
+ self.printRevision(otherProject.revisionExpr)
+ self.printText(" not found")
+ self.out.nl()
+
+ def _printLogs(
+ self, project, otherProject, raw=False, color=True, pretty_format=None
+ ):
+ logs = project.getAddedAndRemovedLogs(
+ otherProject,
+ oneline=(pretty_format is None),
+ color=color,
+ pretty_format=pretty_format,
+ )
+ if logs["removed"]:
+ removedLogs = logs["removed"].split("\n")
+ for log in removedLogs:
+ if log.strip():
+ if raw:
+ self.printText(" R " + log)
+ self.out.nl()
+ else:
+ self.printRemoved("\t\t[-] ")
+ self.printText(log)
+ self.out.nl()
+
+ if logs["added"]:
+ addedLogs = logs["added"].split("\n")
+ for log in addedLogs:
+ if log.strip():
+ if raw:
+ self.printText(" A " + log)
+ self.out.nl()
+ else:
+ self.printAdded("\t\t[+] ")
+ self.printText(log)
+ self.out.nl()
+
+ def ValidateOptions(self, opt, args):
+ if not args or len(args) > 2:
+ self.OptionParser.error("missing manifests to diff")
+ if opt.this_manifest_only is False:
+ raise self.OptionParser.error(
+ "`diffmanifest` only supports the current tree"
+ )
+
+ def Execute(self, opt, args):
+ self.out = _Coloring(self.client.globalConfig)
+ self.printText = self.out.nofmt_printer("text")
+ if opt.color:
+ self.printProject = self.out.nofmt_printer("project", attr="bold")
+ self.printAdded = self.out.nofmt_printer(
+ "green", fg="green", attr="bold"
+ )
+ self.printRemoved = self.out.nofmt_printer(
+ "red", fg="red", attr="bold"
+ )
+ self.printRevision = self.out.nofmt_printer("revision", fg="yellow")
+ else:
+ self.printProject = (
+ self.printAdded
+ ) = self.printRemoved = self.printRevision = self.printText
+
+ manifest1 = RepoClient(self.repodir)
+ manifest1.Override(args[0], load_local_manifests=False)
+ if len(args) == 1:
+ manifest2 = self.manifest
+ else:
+ manifest2 = RepoClient(self.repodir)
+ manifest2.Override(args[1], load_local_manifests=False)
+
+ diff = manifest1.projectsDiff(manifest2)
+ if opt.raw:
+ self._printRawDiff(
+ diff,
+ pretty_format=opt.pretty_format,
+ local=opt.this_manifest_only,
+ )
+ else:
+ self._printDiff(
+ diff,
+ color=opt.color,
+ pretty_format=opt.pretty_format,
+ local=opt.this_manifest_only,
+ )
diff --git a/subcmds/download.py b/subcmds/download.py
index 1582484..d81d1f8 100644
--- a/subcmds/download.py
+++ b/subcmds/download.py
@@ -18,143 +18,187 @@
from command import Command
from error import GitError, NoSuchProjectError
-CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
+CHANGE_RE = re.compile(r"^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$")
class Download(Command):
- COMMON = True
- helpSummary = "Download and checkout a change"
- helpUsage = """
+ COMMON = True
+ helpSummary = "Download and checkout a change"
+ helpUsage = """
%prog {[project] change[/patchset]}...
"""
- helpDescription = """
+ helpDescription = """
The '%prog' command downloads a change from the review system and
makes it available in your project's local working directory.
If no project is specified try to use current directory as a project.
"""
- def _Options(self, p):
- p.add_option('-b', '--branch',
- help='create a new branch first')
- p.add_option('-c', '--cherry-pick',
- dest='cherrypick', action='store_true',
- help="cherry-pick instead of checkout")
- p.add_option('-x', '--record-origin', action='store_true',
- help='pass -x when cherry-picking')
- p.add_option('-r', '--revert',
- dest='revert', action='store_true',
- help="revert instead of checkout")
- p.add_option('-f', '--ff-only',
- dest='ffonly', action='store_true',
- help="force fast-forward merge")
+ def _Options(self, p):
+ p.add_option("-b", "--branch", help="create a new branch first")
+ p.add_option(
+ "-c",
+ "--cherry-pick",
+ dest="cherrypick",
+ action="store_true",
+ help="cherry-pick instead of checkout",
+ )
+ p.add_option(
+ "-x",
+ "--record-origin",
+ action="store_true",
+ help="pass -x when cherry-picking",
+ )
+ p.add_option(
+ "-r",
+ "--revert",
+ dest="revert",
+ action="store_true",
+ help="revert instead of checkout",
+ )
+ p.add_option(
+ "-f",
+ "--ff-only",
+ dest="ffonly",
+ action="store_true",
+ help="force fast-forward merge",
+ )
- def _ParseChangeIds(self, opt, args):
- if not args:
- self.Usage()
+ def _ParseChangeIds(self, opt, args):
+ if not args:
+ self.Usage()
- to_get = []
- project = None
+ to_get = []
+ project = None
- for a in args:
- m = CHANGE_RE.match(a)
- if m:
- if not project:
- project = self.GetProjects(".")[0]
- print('Defaulting to cwd project', project.name)
- chg_id = int(m.group(1))
- if m.group(2):
- ps_id = int(m.group(2))
- else:
- ps_id = 1
- refs = 'refs/changes/%2.2d/%d/' % (chg_id % 100, chg_id)
- output = project._LsRemote(refs + '*')
- if output:
- regex = refs + r'(\d+)'
- rcomp = re.compile(regex, re.I)
- for line in output.splitlines():
- match = rcomp.search(line)
- if match:
- ps_id = max(int(match.group(1)), ps_id)
- to_get.append((project, chg_id, ps_id))
- else:
- projects = self.GetProjects([a], all_manifests=not opt.this_manifest_only)
- if len(projects) > 1:
- # If the cwd is one of the projects, assume they want that.
- try:
- project = self.GetProjects('.')[0]
- except NoSuchProjectError:
- project = None
- if project not in projects:
- print('error: %s matches too many projects; please re-run inside '
- 'the project checkout.' % (a,), file=sys.stderr)
- for project in projects:
- print(' %s/ @ %s' % (project.RelPath(local=opt.this_manifest_only),
- project.revisionExpr), file=sys.stderr)
- sys.exit(1)
- else:
- project = projects[0]
- print('Defaulting to cwd project', project.name)
- return to_get
+ for a in args:
+ m = CHANGE_RE.match(a)
+ if m:
+ if not project:
+ project = self.GetProjects(".")[0]
+ print("Defaulting to cwd project", project.name)
+ chg_id = int(m.group(1))
+ if m.group(2):
+ ps_id = int(m.group(2))
+ else:
+ ps_id = 1
+ refs = "refs/changes/%2.2d/%d/" % (chg_id % 100, chg_id)
+ output = project._LsRemote(refs + "*")
+ if output:
+ regex = refs + r"(\d+)"
+ rcomp = re.compile(regex, re.I)
+ for line in output.splitlines():
+ match = rcomp.search(line)
+ if match:
+ ps_id = max(int(match.group(1)), ps_id)
+ to_get.append((project, chg_id, ps_id))
+ else:
+ projects = self.GetProjects(
+ [a], all_manifests=not opt.this_manifest_only
+ )
+ if len(projects) > 1:
+ # If the cwd is one of the projects, assume they want that.
+ try:
+ project = self.GetProjects(".")[0]
+ except NoSuchProjectError:
+ project = None
+ if project not in projects:
+ print(
+ "error: %s matches too many projects; please "
+ "re-run inside the project checkout." % (a,),
+ file=sys.stderr,
+ )
+ for project in projects:
+ print(
+ " %s/ @ %s"
+ % (
+ project.RelPath(
+ local=opt.this_manifest_only
+ ),
+ project.revisionExpr,
+ ),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ else:
+ project = projects[0]
+ print("Defaulting to cwd project", project.name)
+ return to_get
- def ValidateOptions(self, opt, args):
- if opt.record_origin:
- if not opt.cherrypick:
- self.OptionParser.error('-x only makes sense with --cherry-pick')
+ def ValidateOptions(self, opt, args):
+ if opt.record_origin:
+ if not opt.cherrypick:
+ self.OptionParser.error(
+ "-x only makes sense with --cherry-pick"
+ )
- if opt.ffonly:
- self.OptionParser.error('-x and --ff are mutually exclusive options')
+ if opt.ffonly:
+ self.OptionParser.error(
+ "-x and --ff are mutually exclusive options"
+ )
- def Execute(self, opt, args):
- for project, change_id, ps_id in self._ParseChangeIds(opt, args):
- dl = project.DownloadPatchSet(change_id, ps_id)
- if not dl:
- print('[%s] change %d/%d not found'
- % (project.name, change_id, ps_id),
- file=sys.stderr)
- sys.exit(1)
+ def Execute(self, opt, args):
+ for project, change_id, ps_id in self._ParseChangeIds(opt, args):
+ dl = project.DownloadPatchSet(change_id, ps_id)
+ if not dl:
+ print(
+ "[%s] change %d/%d not found"
+ % (project.name, change_id, ps_id),
+ file=sys.stderr,
+ )
+ sys.exit(1)
- if not opt.revert and not dl.commits:
- print('[%s] change %d/%d has already been merged'
- % (project.name, change_id, ps_id),
- file=sys.stderr)
- continue
+ if not opt.revert and not dl.commits:
+ print(
+ "[%s] change %d/%d has already been merged"
+ % (project.name, change_id, ps_id),
+ file=sys.stderr,
+ )
+ continue
- if len(dl.commits) > 1:
- print('[%s] %d/%d depends on %d unmerged changes:'
- % (project.name, change_id, ps_id, len(dl.commits)),
- file=sys.stderr)
- for c in dl.commits:
- print(' %s' % (c), file=sys.stderr)
+ if len(dl.commits) > 1:
+ print(
+ "[%s] %d/%d depends on %d unmerged changes:"
+ % (project.name, change_id, ps_id, len(dl.commits)),
+ file=sys.stderr,
+ )
+ for c in dl.commits:
+ print(" %s" % (c), file=sys.stderr)
- if opt.cherrypick:
- mode = 'cherry-pick'
- elif opt.revert:
- mode = 'revert'
- elif opt.ffonly:
- mode = 'fast-forward merge'
- else:
- mode = 'checkout'
+ if opt.cherrypick:
+ mode = "cherry-pick"
+ elif opt.revert:
+ mode = "revert"
+ elif opt.ffonly:
+ mode = "fast-forward merge"
+ else:
+ mode = "checkout"
- # We'll combine the branch+checkout operation, but all the rest need a
- # dedicated branch start.
- if opt.branch and mode != 'checkout':
- project.StartBranch(opt.branch)
+ # We'll combine the branch+checkout operation, but all the rest need
+ # a dedicated branch start.
+ if opt.branch and mode != "checkout":
+ project.StartBranch(opt.branch)
- try:
- if opt.cherrypick:
- project._CherryPick(dl.commit, ffonly=opt.ffonly,
- record_origin=opt.record_origin)
- elif opt.revert:
- project._Revert(dl.commit)
- elif opt.ffonly:
- project._FastForward(dl.commit, ffonly=True)
- else:
- if opt.branch:
- project.StartBranch(opt.branch, revision=dl.commit)
- else:
- project._Checkout(dl.commit)
+ try:
+ if opt.cherrypick:
+ project._CherryPick(
+ dl.commit,
+ ffonly=opt.ffonly,
+ record_origin=opt.record_origin,
+ )
+ elif opt.revert:
+ project._Revert(dl.commit)
+ elif opt.ffonly:
+ project._FastForward(dl.commit, ffonly=True)
+ else:
+ if opt.branch:
+ project.StartBranch(opt.branch, revision=dl.commit)
+ else:
+ project._Checkout(dl.commit)
- except GitError:
- print('[%s] Could not complete the %s of %s'
- % (project.name, mode, dl.commit), file=sys.stderr)
- sys.exit(1)
+ except GitError:
+ print(
+ "[%s] Could not complete the %s of %s"
+ % (project.name, mode, dl.commit),
+ file=sys.stderr,
+ )
+ sys.exit(1)
diff --git a/subcmds/forall.py b/subcmds/forall.py
index f9f34e3..0a89735 100644
--- a/subcmds/forall.py
+++ b/subcmds/forall.py
@@ -23,31 +23,36 @@
import subprocess
from color import Coloring
-from command import DEFAULT_LOCAL_JOBS, Command, MirrorSafeCommand, WORKER_BATCH_SIZE
+from command import (
+ DEFAULT_LOCAL_JOBS,
+ Command,
+ MirrorSafeCommand,
+ WORKER_BATCH_SIZE,
+)
from error import ManifestInvalidRevisionError
_CAN_COLOR = [
- 'branch',
- 'diff',
- 'grep',
- 'log',
+ "branch",
+ "diff",
+ "grep",
+ "log",
]
class ForallColoring(Coloring):
- def __init__(self, config):
- Coloring.__init__(self, config, 'forall')
- self.project = self.printer('project', attr='bold')
+ def __init__(self, config):
+ Coloring.__init__(self, config, "forall")
+ self.project = self.printer("project", attr="bold")
class Forall(Command, MirrorSafeCommand):
- COMMON = False
- helpSummary = "Run a shell command in each project"
- helpUsage = """
+ COMMON = False
+ helpSummary = "Run a shell command in each project"
+ helpUsage = """
%prog [<project>...] -c <command> [<arg>...]
%prog -r str1 [str2] ... -c <command> [<arg>...]
"""
- helpDescription = """
+ helpDescription = """
Executes the same shell command in each project.
The -r option allows running the command only on projects matching
@@ -125,236 +130,285 @@
If -e is used, when a command exits unsuccessfully, '%prog' will abort
without iterating through the remaining projects.
"""
- PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
+ PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
- @staticmethod
- def _cmd_option(option, _opt_str, _value, parser):
- setattr(parser.values, option.dest, list(parser.rargs))
- while parser.rargs:
- del parser.rargs[0]
+ @staticmethod
+ def _cmd_option(option, _opt_str, _value, parser):
+ setattr(parser.values, option.dest, list(parser.rargs))
+ while parser.rargs:
+ del parser.rargs[0]
- def _Options(self, p):
- p.add_option('-r', '--regex',
- dest='regex', action='store_true',
- help='execute the command only on projects matching regex or wildcard expression')
- p.add_option('-i', '--inverse-regex',
- dest='inverse_regex', action='store_true',
- help='execute the command only on projects not matching regex or '
- 'wildcard expression')
- p.add_option('-g', '--groups',
- dest='groups',
- help='execute the command only on projects matching the specified groups')
- p.add_option('-c', '--command',
- help='command (and arguments) to execute',
- dest='command',
- action='callback',
- callback=self._cmd_option)
- p.add_option('-e', '--abort-on-errors',
- dest='abort_on_errors', action='store_true',
- help='abort if a command exits unsuccessfully')
- p.add_option('--ignore-missing', action='store_true',
- help='silently skip & do not exit non-zero due missing '
- 'checkouts')
+ def _Options(self, p):
+ p.add_option(
+ "-r",
+ "--regex",
+ dest="regex",
+ action="store_true",
+ help="execute the command only on projects matching regex or "
+ "wildcard expression",
+ )
+ p.add_option(
+ "-i",
+ "--inverse-regex",
+ dest="inverse_regex",
+ action="store_true",
+ help="execute the command only on projects not matching regex or "
+ "wildcard expression",
+ )
+ p.add_option(
+ "-g",
+ "--groups",
+ dest="groups",
+ help="execute the command only on projects matching the specified "
+ "groups",
+ )
+ p.add_option(
+ "-c",
+ "--command",
+ help="command (and arguments) to execute",
+ dest="command",
+ action="callback",
+ callback=self._cmd_option,
+ )
+ p.add_option(
+ "-e",
+ "--abort-on-errors",
+ dest="abort_on_errors",
+ action="store_true",
+ help="abort if a command exits unsuccessfully",
+ )
+ p.add_option(
+ "--ignore-missing",
+ action="store_true",
+ help="silently skip & do not exit non-zero due missing "
+ "checkouts",
+ )
- g = p.get_option_group('--quiet')
- g.add_option('-p',
- dest='project_header', action='store_true',
- help='show project headers before output')
- p.add_option('--interactive',
- action='store_true',
- help='force interactive usage')
+ g = p.get_option_group("--quiet")
+ g.add_option(
+ "-p",
+ dest="project_header",
+ action="store_true",
+ help="show project headers before output",
+ )
+ p.add_option(
+ "--interactive", action="store_true", help="force interactive usage"
+ )
- def WantPager(self, opt):
- return opt.project_header and opt.jobs == 1
+ def WantPager(self, opt):
+ return opt.project_header and opt.jobs == 1
- def ValidateOptions(self, opt, args):
- if not opt.command:
- self.Usage()
+ def ValidateOptions(self, opt, args):
+ if not opt.command:
+ self.Usage()
- def Execute(self, opt, args):
- cmd = [opt.command[0]]
- all_trees = not opt.this_manifest_only
+ def Execute(self, opt, args):
+ cmd = [opt.command[0]]
+ all_trees = not opt.this_manifest_only
- shell = True
- if re.compile(r'^[a-z0-9A-Z_/\.-]+$').match(cmd[0]):
- shell = False
+ shell = True
+ if re.compile(r"^[a-z0-9A-Z_/\.-]+$").match(cmd[0]):
+ shell = False
- if shell:
- cmd.append(cmd[0])
- cmd.extend(opt.command[1:])
+ if shell:
+ cmd.append(cmd[0])
+ cmd.extend(opt.command[1:])
- # Historically, forall operated interactively, and in serial. If the user
- # has selected 1 job, then default to interacive mode.
- if opt.jobs == 1:
- opt.interactive = True
+ # Historically, forall operated interactively, and in serial. If the
+ # user has selected 1 job, then default to interacive mode.
+ if opt.jobs == 1:
+ opt.interactive = True
- if opt.project_header \
- and not shell \
- and cmd[0] == 'git':
- # If this is a direct git command that can enable colorized
- # output and the user prefers coloring, add --color into the
- # command line because we are going to wrap the command into
- # a pipe and git won't know coloring should activate.
- #
- for cn in cmd[1:]:
- if not cn.startswith('-'):
- break
- else:
- cn = None
- if cn and cn in _CAN_COLOR:
- class ColorCmd(Coloring):
- def __init__(self, config, cmd):
- Coloring.__init__(self, config, cmd)
- if ColorCmd(self.manifest.manifestProject.config, cn).is_on:
- cmd.insert(cmd.index(cn) + 1, '--color')
+ if opt.project_header and not shell and cmd[0] == "git":
+ # If this is a direct git command that can enable colorized
+ # output and the user prefers coloring, add --color into the
+ # command line because we are going to wrap the command into
+ # a pipe and git won't know coloring should activate.
+ #
+ for cn in cmd[1:]:
+ if not cn.startswith("-"):
+ break
+ else:
+ cn = None
+ if cn and cn in _CAN_COLOR:
- mirror = self.manifest.IsMirror
- rc = 0
+ class ColorCmd(Coloring):
+ def __init__(self, config, cmd):
+ Coloring.__init__(self, config, cmd)
- smart_sync_manifest_name = "smart_sync_override.xml"
- smart_sync_manifest_path = os.path.join(
- self.manifest.manifestProject.worktree, smart_sync_manifest_name)
+ if ColorCmd(self.manifest.manifestProject.config, cn).is_on:
+ cmd.insert(cmd.index(cn) + 1, "--color")
- if os.path.isfile(smart_sync_manifest_path):
- self.manifest.Override(smart_sync_manifest_path)
+ mirror = self.manifest.IsMirror
+ rc = 0
- if opt.regex:
- projects = self.FindProjects(args, all_manifests=all_trees)
- elif opt.inverse_regex:
- projects = self.FindProjects(args, inverse=True, all_manifests=all_trees)
- else:
- projects = self.GetProjects(args, groups=opt.groups, all_manifests=all_trees)
+ smart_sync_manifest_name = "smart_sync_override.xml"
+ smart_sync_manifest_path = os.path.join(
+ self.manifest.manifestProject.worktree, smart_sync_manifest_name
+ )
- os.environ['REPO_COUNT'] = str(len(projects))
+ if os.path.isfile(smart_sync_manifest_path):
+ self.manifest.Override(smart_sync_manifest_path)
- try:
- config = self.manifest.manifestProject.config
- with multiprocessing.Pool(opt.jobs, InitWorker) as pool:
- results_it = pool.imap(
- functools.partial(DoWorkWrapper, mirror, opt, cmd, shell, config),
- enumerate(projects),
- chunksize=WORKER_BATCH_SIZE)
- first = True
- for (r, output) in results_it:
- if output:
- if first:
- first = False
- elif opt.project_header:
- print()
- # To simplify the DoWorkWrapper, take care of automatic newlines.
- end = '\n'
- if output[-1] == '\n':
- end = ''
- print(output, end=end)
- rc = rc or r
- if r != 0 and opt.abort_on_errors:
- raise Exception('Aborting due to previous error')
- except (KeyboardInterrupt, WorkerKeyboardInterrupt):
- # Catch KeyboardInterrupt raised inside and outside of workers
- rc = rc or errno.EINTR
- except Exception as e:
- # Catch any other exceptions raised
- print('forall: unhandled error, terminating the pool: %s: %s' %
- (type(e).__name__, e),
- file=sys.stderr)
- rc = rc or getattr(e, 'errno', 1)
- if rc != 0:
- sys.exit(rc)
+ if opt.regex:
+ projects = self.FindProjects(args, all_manifests=all_trees)
+ elif opt.inverse_regex:
+ projects = self.FindProjects(
+ args, inverse=True, all_manifests=all_trees
+ )
+ else:
+ projects = self.GetProjects(
+ args, groups=opt.groups, all_manifests=all_trees
+ )
+
+ os.environ["REPO_COUNT"] = str(len(projects))
+
+ try:
+ config = self.manifest.manifestProject.config
+ with multiprocessing.Pool(opt.jobs, InitWorker) as pool:
+ results_it = pool.imap(
+ functools.partial(
+ DoWorkWrapper, mirror, opt, cmd, shell, config
+ ),
+ enumerate(projects),
+ chunksize=WORKER_BATCH_SIZE,
+ )
+ first = True
+ for r, output in results_it:
+ if output:
+ if first:
+ first = False
+ elif opt.project_header:
+ print()
+ # To simplify the DoWorkWrapper, take care of automatic
+ # newlines.
+ end = "\n"
+ if output[-1] == "\n":
+ end = ""
+ print(output, end=end)
+ rc = rc or r
+ if r != 0 and opt.abort_on_errors:
+ raise Exception("Aborting due to previous error")
+ except (KeyboardInterrupt, WorkerKeyboardInterrupt):
+ # Catch KeyboardInterrupt raised inside and outside of workers
+ rc = rc or errno.EINTR
+ except Exception as e:
+ # Catch any other exceptions raised
+ print(
+ "forall: unhandled error, terminating the pool: %s: %s"
+ % (type(e).__name__, e),
+ file=sys.stderr,
+ )
+ rc = rc or getattr(e, "errno", 1)
+ if rc != 0:
+ sys.exit(rc)
class WorkerKeyboardInterrupt(Exception):
- """ Keyboard interrupt exception for worker processes. """
+ """Keyboard interrupt exception for worker processes."""
def InitWorker():
- signal.signal(signal.SIGINT, signal.SIG_IGN)
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
def DoWorkWrapper(mirror, opt, cmd, shell, config, args):
- """ A wrapper around the DoWork() method.
+ """A wrapper around the DoWork() method.
- Catch the KeyboardInterrupt exceptions here and re-raise them as a different,
- ``Exception``-based exception to stop it flooding the console with stacktraces
- and making the parent hang indefinitely.
+ Catch the KeyboardInterrupt exceptions here and re-raise them as a
+ different, ``Exception``-based exception to stop it flooding the console
+ with stacktraces and making the parent hang indefinitely.
- """
- cnt, project = args
- try:
- return DoWork(project, mirror, opt, cmd, shell, cnt, config)
- except KeyboardInterrupt:
- print('%s: Worker interrupted' % project.name)
- raise WorkerKeyboardInterrupt()
+ """
+ cnt, project = args
+ try:
+ return DoWork(project, mirror, opt, cmd, shell, cnt, config)
+ except KeyboardInterrupt:
+ print("%s: Worker interrupted" % project.name)
+ raise WorkerKeyboardInterrupt()
def DoWork(project, mirror, opt, cmd, shell, cnt, config):
- env = os.environ.copy()
+ env = os.environ.copy()
- def setenv(name, val):
- if val is None:
- val = ''
- env[name] = val
+ def setenv(name, val):
+ if val is None:
+ val = ""
+ env[name] = val
- setenv('REPO_PROJECT', project.name)
- setenv('REPO_OUTERPATH', project.manifest.path_prefix)
- setenv('REPO_INNERPATH', project.relpath)
- setenv('REPO_PATH', project.RelPath(local=opt.this_manifest_only))
- setenv('REPO_REMOTE', project.remote.name)
- try:
- # If we aren't in a fully synced state and we don't have the ref the manifest
- # wants, then this will fail. Ignore it for the purposes of this code.
- lrev = '' if mirror else project.GetRevisionId()
- except ManifestInvalidRevisionError:
- lrev = ''
- setenv('REPO_LREV', lrev)
- setenv('REPO_RREV', project.revisionExpr)
- setenv('REPO_UPSTREAM', project.upstream)
- setenv('REPO_DEST_BRANCH', project.dest_branch)
- setenv('REPO_I', str(cnt + 1))
- for annotation in project.annotations:
- setenv("REPO__%s" % (annotation.name), annotation.value)
+ setenv("REPO_PROJECT", project.name)
+ setenv("REPO_OUTERPATH", project.manifest.path_prefix)
+ setenv("REPO_INNERPATH", project.relpath)
+ setenv("REPO_PATH", project.RelPath(local=opt.this_manifest_only))
+ setenv("REPO_REMOTE", project.remote.name)
+ try:
+ # If we aren't in a fully synced state and we don't have the ref the
+ # manifest wants, then this will fail. Ignore it for the purposes of
+ # this code.
+ lrev = "" if mirror else project.GetRevisionId()
+ except ManifestInvalidRevisionError:
+ lrev = ""
+ setenv("REPO_LREV", lrev)
+ setenv("REPO_RREV", project.revisionExpr)
+ setenv("REPO_UPSTREAM", project.upstream)
+ setenv("REPO_DEST_BRANCH", project.dest_branch)
+ setenv("REPO_I", str(cnt + 1))
+ for annotation in project.annotations:
+ setenv("REPO__%s" % (annotation.name), annotation.value)
- if mirror:
- setenv('GIT_DIR', project.gitdir)
- cwd = project.gitdir
- else:
- cwd = project.worktree
+ if mirror:
+ setenv("GIT_DIR", project.gitdir)
+ cwd = project.gitdir
+ else:
+ cwd = project.worktree
- if not os.path.exists(cwd):
- # Allow the user to silently ignore missing checkouts so they can run on
- # partial checkouts (good for infra recovery tools).
- if opt.ignore_missing:
- return (0, '')
+ if not os.path.exists(cwd):
+ # Allow the user to silently ignore missing checkouts so they can run on
+ # partial checkouts (good for infra recovery tools).
+ if opt.ignore_missing:
+ return (0, "")
- output = ''
- if ((opt.project_header and opt.verbose)
- or not opt.project_header):
- output = 'skipping %s/' % project.RelPath(local=opt.this_manifest_only)
- return (1, output)
+ output = ""
+ if (opt.project_header and opt.verbose) or not opt.project_header:
+ output = "skipping %s/" % project.RelPath(
+ local=opt.this_manifest_only
+ )
+ return (1, output)
- if opt.verbose:
- stderr = subprocess.STDOUT
- else:
- stderr = subprocess.DEVNULL
+ if opt.verbose:
+ stderr = subprocess.STDOUT
+ else:
+ stderr = subprocess.DEVNULL
- stdin = None if opt.interactive else subprocess.DEVNULL
+ stdin = None if opt.interactive else subprocess.DEVNULL
- result = subprocess.run(
- cmd, cwd=cwd, shell=shell, env=env, check=False,
- encoding='utf-8', errors='replace',
- stdin=stdin, stdout=subprocess.PIPE, stderr=stderr)
+ result = subprocess.run(
+ cmd,
+ cwd=cwd,
+ shell=shell,
+ env=env,
+ check=False,
+ encoding="utf-8",
+ errors="replace",
+ stdin=stdin,
+ stdout=subprocess.PIPE,
+ stderr=stderr,
+ )
- output = result.stdout
- if opt.project_header:
- if output:
- buf = io.StringIO()
- out = ForallColoring(config)
- out.redirect(buf)
- if mirror:
- project_header_path = project.name
- else:
- project_header_path = project.RelPath(local=opt.this_manifest_only)
- out.project('project %s/' % project_header_path)
- out.nl()
- buf.write(output)
- output = buf.getvalue()
- return (result.returncode, output)
+ output = result.stdout
+ if opt.project_header:
+ if output:
+ buf = io.StringIO()
+ out = ForallColoring(config)
+ out.redirect(buf)
+ if mirror:
+ project_header_path = project.name
+ else:
+ project_header_path = project.RelPath(
+ local=opt.this_manifest_only
+ )
+ out.project("project %s/" % project_header_path)
+ out.nl()
+ buf.write(output)
+ output = buf.getvalue()
+ return (result.returncode, output)
diff --git a/subcmds/gitc_delete.py b/subcmds/gitc_delete.py
index df74946..ae9d4d1 100644
--- a/subcmds/gitc_delete.py
+++ b/subcmds/gitc_delete.py
@@ -19,28 +19,34 @@
class GitcDelete(Command, GitcClientCommand):
- COMMON = True
- visible_everywhere = False
- helpSummary = "Delete a GITC Client."
- helpUsage = """
+ COMMON = True
+ visible_everywhere = False
+ helpSummary = "Delete a GITC Client."
+ helpUsage = """
%prog
"""
- helpDescription = """
+ helpDescription = """
This subcommand deletes the current GITC client, deleting the GITC manifest
and all locally downloaded sources.
"""
- def _Options(self, p):
- p.add_option('-f', '--force',
- dest='force', action='store_true',
- help='force the deletion (no prompt)')
+ def _Options(self, p):
+ p.add_option(
+ "-f",
+ "--force",
+ dest="force",
+ action="store_true",
+ help="force the deletion (no prompt)",
+ )
- def Execute(self, opt, args):
- if not opt.force:
- prompt = ('This will delete GITC client: %s\nAre you sure? (yes/no) ' %
- self.gitc_manifest.gitc_client_name)
- response = input(prompt).lower()
- if not response == 'yes':
- print('Response was not "yes"\n Exiting...')
- sys.exit(1)
- platform_utils.rmtree(self.gitc_manifest.gitc_client_dir)
+ def Execute(self, opt, args):
+ if not opt.force:
+ prompt = (
+ "This will delete GITC client: %s\nAre you sure? (yes/no) "
+ % self.gitc_manifest.gitc_client_name
+ )
+ response = input(prompt).lower()
+ if not response == "yes":
+ print('Response was not "yes"\n Exiting...')
+ sys.exit(1)
+ platform_utils.rmtree(self.gitc_manifest.gitc_client_dir)
diff --git a/subcmds/gitc_init.py b/subcmds/gitc_init.py
index e3a5813..54791d5 100644
--- a/subcmds/gitc_init.py
+++ b/subcmds/gitc_init.py
@@ -23,13 +23,13 @@
class GitcInit(init.Init, GitcAvailableCommand):
- COMMON = True
- MULTI_MANIFEST_SUPPORT = False
- helpSummary = "Initialize a GITC Client."
- helpUsage = """
+ COMMON = True
+ MULTI_MANIFEST_SUPPORT = False
+ helpSummary = "Initialize a GITC Client."
+ helpUsage = """
%prog [options] [client name]
"""
- helpDescription = """
+ helpDescription = """
The '%prog' command is ran to initialize a new GITC client for use
with the GITC file system.
@@ -47,30 +47,41 @@
use for this GITC client.
"""
- def _Options(self, p):
- super()._Options(p, gitc_init=True)
+ def _Options(self, p):
+ super()._Options(p, gitc_init=True)
- def Execute(self, opt, args):
- gitc_client = gitc_utils.parse_clientdir(os.getcwd())
- if not gitc_client or (opt.gitc_client and gitc_client != opt.gitc_client):
- print('fatal: Please update your repo command. See go/gitc for instructions.',
- file=sys.stderr)
- sys.exit(1)
- self.client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(),
- gitc_client)
- super().Execute(opt, args)
+ def Execute(self, opt, args):
+ gitc_client = gitc_utils.parse_clientdir(os.getcwd())
+ if not gitc_client or (
+ opt.gitc_client and gitc_client != opt.gitc_client
+ ):
+ print(
+ "fatal: Please update your repo command. See go/gitc for "
+ "instructions.",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ self.client_dir = os.path.join(
+ gitc_utils.get_gitc_manifest_dir(), gitc_client
+ )
+ super().Execute(opt, args)
- manifest_file = self.manifest.manifestFile
- if opt.manifest_file:
- if not os.path.exists(opt.manifest_file):
- print('fatal: Specified manifest file %s does not exist.' %
- opt.manifest_file)
- sys.exit(1)
- manifest_file = opt.manifest_file
+ manifest_file = self.manifest.manifestFile
+ if opt.manifest_file:
+ if not os.path.exists(opt.manifest_file):
+ print(
+ "fatal: Specified manifest file %s does not exist."
+ % opt.manifest_file
+ )
+ sys.exit(1)
+ manifest_file = opt.manifest_file
- manifest = GitcManifest(self.repodir, os.path.join(self.client_dir,
- '.manifest'))
- manifest.Override(manifest_file)
- gitc_utils.generate_gitc_manifest(None, manifest)
- print('Please run `cd %s` to view your GITC client.' %
- os.path.join(wrapper.Wrapper().GITC_FS_ROOT_DIR, gitc_client))
+ manifest = GitcManifest(
+ self.repodir, os.path.join(self.client_dir, ".manifest")
+ )
+ manifest.Override(manifest_file)
+ gitc_utils.generate_gitc_manifest(None, manifest)
+ print(
+ "Please run `cd %s` to view your GITC client."
+ % os.path.join(wrapper.Wrapper().GITC_FS_ROOT_DIR, gitc_client)
+ )
diff --git a/subcmds/grep.py b/subcmds/grep.py
index 93c9ae5..5cd3376 100644
--- a/subcmds/grep.py
+++ b/subcmds/grep.py
@@ -22,19 +22,19 @@
class GrepColoring(Coloring):
- def __init__(self, config):
- Coloring.__init__(self, config, 'grep')
- self.project = self.printer('project', attr='bold')
- self.fail = self.printer('fail', fg='red')
+ def __init__(self, config):
+ Coloring.__init__(self, config, "grep")
+ self.project = self.printer("project", attr="bold")
+ self.fail = self.printer("fail", fg="red")
class Grep(PagedCommand):
- COMMON = True
- helpSummary = "Print lines matching a pattern"
- helpUsage = """
+ COMMON = True
+ helpSummary = "Print lines matching a pattern"
+ helpUsage = """
%prog {pattern | -e pattern} [<project>...]
"""
- helpDescription = """
+ helpDescription = """
Search for the specified patterns in all project files.
# Boolean Options
@@ -62,215 +62,304 @@
repo grep --all-match -e NODE -e Unexpected
"""
- PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
+ PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
- @staticmethod
- def _carry_option(_option, opt_str, value, parser):
- pt = getattr(parser.values, 'cmd_argv', None)
- if pt is None:
- pt = []
- setattr(parser.values, 'cmd_argv', pt)
+ @staticmethod
+ def _carry_option(_option, opt_str, value, parser):
+ pt = getattr(parser.values, "cmd_argv", None)
+ if pt is None:
+ pt = []
+ setattr(parser.values, "cmd_argv", pt)
- if opt_str == '-(':
- pt.append('(')
- elif opt_str == '-)':
- pt.append(')')
- else:
- pt.append(opt_str)
+ if opt_str == "-(":
+ pt.append("(")
+ elif opt_str == "-)":
+ pt.append(")")
+ else:
+ pt.append(opt_str)
- if value is not None:
- pt.append(value)
+ if value is not None:
+ pt.append(value)
- def _CommonOptions(self, p):
- """Override common options slightly."""
- super()._CommonOptions(p, opt_v=False)
+ def _CommonOptions(self, p):
+ """Override common options slightly."""
+ super()._CommonOptions(p, opt_v=False)
- def _Options(self, p):
- g = p.add_option_group('Sources')
- g.add_option('--cached',
- action='callback', callback=self._carry_option,
- help='Search the index, instead of the work tree')
- g.add_option('-r', '--revision',
- dest='revision', action='append', metavar='TREEish',
- help='Search TREEish, instead of the work tree')
+ def _Options(self, p):
+ g = p.add_option_group("Sources")
+ g.add_option(
+ "--cached",
+ action="callback",
+ callback=self._carry_option,
+ help="Search the index, instead of the work tree",
+ )
+ g.add_option(
+ "-r",
+ "--revision",
+ dest="revision",
+ action="append",
+ metavar="TREEish",
+ help="Search TREEish, instead of the work tree",
+ )
- g = p.add_option_group('Pattern')
- g.add_option('-e',
- action='callback', callback=self._carry_option,
- metavar='PATTERN', type='str',
- help='Pattern to search for')
- g.add_option('-i', '--ignore-case',
- action='callback', callback=self._carry_option,
- help='Ignore case differences')
- g.add_option('-a', '--text',
- action='callback', callback=self._carry_option,
- help="Process binary files as if they were text")
- g.add_option('-I',
- action='callback', callback=self._carry_option,
- help="Don't match the pattern in binary files")
- g.add_option('-w', '--word-regexp',
- action='callback', callback=self._carry_option,
- help='Match the pattern only at word boundaries')
- g.add_option('-v', '--invert-match',
- action='callback', callback=self._carry_option,
- help='Select non-matching lines')
- g.add_option('-G', '--basic-regexp',
- action='callback', callback=self._carry_option,
- help='Use POSIX basic regexp for patterns (default)')
- g.add_option('-E', '--extended-regexp',
- action='callback', callback=self._carry_option,
- help='Use POSIX extended regexp for patterns')
- g.add_option('-F', '--fixed-strings',
- action='callback', callback=self._carry_option,
- help='Use fixed strings (not regexp) for pattern')
+ g = p.add_option_group("Pattern")
+ g.add_option(
+ "-e",
+ action="callback",
+ callback=self._carry_option,
+ metavar="PATTERN",
+ type="str",
+ help="Pattern to search for",
+ )
+ g.add_option(
+ "-i",
+ "--ignore-case",
+ action="callback",
+ callback=self._carry_option,
+ help="Ignore case differences",
+ )
+ g.add_option(
+ "-a",
+ "--text",
+ action="callback",
+ callback=self._carry_option,
+ help="Process binary files as if they were text",
+ )
+ g.add_option(
+ "-I",
+ action="callback",
+ callback=self._carry_option,
+ help="Don't match the pattern in binary files",
+ )
+ g.add_option(
+ "-w",
+ "--word-regexp",
+ action="callback",
+ callback=self._carry_option,
+ help="Match the pattern only at word boundaries",
+ )
+ g.add_option(
+ "-v",
+ "--invert-match",
+ action="callback",
+ callback=self._carry_option,
+ help="Select non-matching lines",
+ )
+ g.add_option(
+ "-G",
+ "--basic-regexp",
+ action="callback",
+ callback=self._carry_option,
+ help="Use POSIX basic regexp for patterns (default)",
+ )
+ g.add_option(
+ "-E",
+ "--extended-regexp",
+ action="callback",
+ callback=self._carry_option,
+ help="Use POSIX extended regexp for patterns",
+ )
+ g.add_option(
+ "-F",
+ "--fixed-strings",
+ action="callback",
+ callback=self._carry_option,
+ help="Use fixed strings (not regexp) for pattern",
+ )
- g = p.add_option_group('Pattern Grouping')
- g.add_option('--all-match',
- action='callback', callback=self._carry_option,
- help='Limit match to lines that have all patterns')
- g.add_option('--and', '--or', '--not',
- action='callback', callback=self._carry_option,
- help='Boolean operators to combine patterns')
- g.add_option('-(', '-)',
- action='callback', callback=self._carry_option,
- help='Boolean operator grouping')
+ g = p.add_option_group("Pattern Grouping")
+ g.add_option(
+ "--all-match",
+ action="callback",
+ callback=self._carry_option,
+ help="Limit match to lines that have all patterns",
+ )
+ g.add_option(
+ "--and",
+ "--or",
+ "--not",
+ action="callback",
+ callback=self._carry_option,
+ help="Boolean operators to combine patterns",
+ )
+ g.add_option(
+ "-(",
+ "-)",
+ action="callback",
+ callback=self._carry_option,
+ help="Boolean operator grouping",
+ )
- g = p.add_option_group('Output')
- g.add_option('-n',
- action='callback', callback=self._carry_option,
- help='Prefix the line number to matching lines')
- g.add_option('-C',
- action='callback', callback=self._carry_option,
- metavar='CONTEXT', type='str',
- help='Show CONTEXT lines around match')
- g.add_option('-B',
- action='callback', callback=self._carry_option,
- metavar='CONTEXT', type='str',
- help='Show CONTEXT lines before match')
- g.add_option('-A',
- action='callback', callback=self._carry_option,
- metavar='CONTEXT', type='str',
- help='Show CONTEXT lines after match')
- g.add_option('-l', '--name-only', '--files-with-matches',
- action='callback', callback=self._carry_option,
- help='Show only file names containing matching lines')
- g.add_option('-L', '--files-without-match',
- action='callback', callback=self._carry_option,
- help='Show only file names not containing matching lines')
+ g = p.add_option_group("Output")
+ g.add_option(
+ "-n",
+ action="callback",
+ callback=self._carry_option,
+ help="Prefix the line number to matching lines",
+ )
+ g.add_option(
+ "-C",
+ action="callback",
+ callback=self._carry_option,
+ metavar="CONTEXT",
+ type="str",
+ help="Show CONTEXT lines around match",
+ )
+ g.add_option(
+ "-B",
+ action="callback",
+ callback=self._carry_option,
+ metavar="CONTEXT",
+ type="str",
+ help="Show CONTEXT lines before match",
+ )
+ g.add_option(
+ "-A",
+ action="callback",
+ callback=self._carry_option,
+ metavar="CONTEXT",
+ type="str",
+ help="Show CONTEXT lines after match",
+ )
+ g.add_option(
+ "-l",
+ "--name-only",
+ "--files-with-matches",
+ action="callback",
+ callback=self._carry_option,
+ help="Show only file names containing matching lines",
+ )
+ g.add_option(
+ "-L",
+ "--files-without-match",
+ action="callback",
+ callback=self._carry_option,
+ help="Show only file names not containing matching lines",
+ )
- def _ExecuteOne(self, cmd_argv, project):
- """Process one project."""
- try:
- p = GitCommand(project,
- cmd_argv,
- bare=False,
- capture_stdout=True,
- capture_stderr=True)
- except GitError as e:
- return (project, -1, None, str(e))
+ def _ExecuteOne(self, cmd_argv, project):
+ """Process one project."""
+ try:
+ p = GitCommand(
+ project,
+ cmd_argv,
+ bare=False,
+ capture_stdout=True,
+ capture_stderr=True,
+ )
+ except GitError as e:
+ return (project, -1, None, str(e))
- return (project, p.Wait(), p.stdout, p.stderr)
+ return (project, p.Wait(), p.stdout, p.stderr)
- @staticmethod
- def _ProcessResults(full_name, have_rev, opt, _pool, out, results):
- git_failed = False
- bad_rev = False
- have_match = False
- _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
+ @staticmethod
+ def _ProcessResults(full_name, have_rev, opt, _pool, out, results):
+ git_failed = False
+ bad_rev = False
+ have_match = False
+ _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
- for project, rc, stdout, stderr in results:
- if rc < 0:
- git_failed = True
- out.project('--- project %s ---' % _RelPath(project))
- out.nl()
- out.fail('%s', stderr)
- out.nl()
- continue
+ for project, rc, stdout, stderr in results:
+ if rc < 0:
+ git_failed = True
+ out.project("--- project %s ---" % _RelPath(project))
+ out.nl()
+ out.fail("%s", stderr)
+ out.nl()
+ continue
- if rc:
- # no results
- if stderr:
- if have_rev and 'fatal: ambiguous argument' in stderr:
- bad_rev = True
- else:
- out.project('--- project %s ---' % _RelPath(project))
- out.nl()
- out.fail('%s', stderr.strip())
- out.nl()
- continue
- have_match = True
+ if rc:
+ # no results
+ if stderr:
+ if have_rev and "fatal: ambiguous argument" in stderr:
+ bad_rev = True
+ else:
+ out.project("--- project %s ---" % _RelPath(project))
+ out.nl()
+ out.fail("%s", stderr.strip())
+ out.nl()
+ continue
+ have_match = True
- # We cut the last element, to avoid a blank line.
- r = stdout.split('\n')
- r = r[0:-1]
+ # We cut the last element, to avoid a blank line.
+ r = stdout.split("\n")
+ r = r[0:-1]
- if have_rev and full_name:
- for line in r:
- rev, line = line.split(':', 1)
- out.write("%s", rev)
- out.write(':')
- out.project(_RelPath(project))
- out.write('/')
- out.write("%s", line)
- out.nl()
- elif full_name:
- for line in r:
- out.project(_RelPath(project))
- out.write('/')
- out.write("%s", line)
- out.nl()
- else:
- for line in r:
- print(line)
+ if have_rev and full_name:
+ for line in r:
+ rev, line = line.split(":", 1)
+ out.write("%s", rev)
+ out.write(":")
+ out.project(_RelPath(project))
+ out.write("/")
+ out.write("%s", line)
+ out.nl()
+ elif full_name:
+ for line in r:
+ out.project(_RelPath(project))
+ out.write("/")
+ out.write("%s", line)
+ out.nl()
+ else:
+ for line in r:
+ print(line)
- return (git_failed, bad_rev, have_match)
+ return (git_failed, bad_rev, have_match)
- def Execute(self, opt, args):
- out = GrepColoring(self.manifest.manifestProject.config)
+ def Execute(self, opt, args):
+ out = GrepColoring(self.manifest.manifestProject.config)
- cmd_argv = ['grep']
- if out.is_on:
- cmd_argv.append('--color')
- cmd_argv.extend(getattr(opt, 'cmd_argv', []))
+ cmd_argv = ["grep"]
+ if out.is_on:
+ cmd_argv.append("--color")
+ cmd_argv.extend(getattr(opt, "cmd_argv", []))
- if '-e' not in cmd_argv:
- if not args:
- self.Usage()
- cmd_argv.append('-e')
- cmd_argv.append(args[0])
- args = args[1:]
+ if "-e" not in cmd_argv:
+ if not args:
+ self.Usage()
+ cmd_argv.append("-e")
+ cmd_argv.append(args[0])
+ args = args[1:]
- projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only)
+ projects = self.GetProjects(
+ args, all_manifests=not opt.this_manifest_only
+ )
- full_name = False
- if len(projects) > 1:
- cmd_argv.append('--full-name')
- full_name = True
+ full_name = False
+ if len(projects) > 1:
+ cmd_argv.append("--full-name")
+ full_name = True
- have_rev = False
- if opt.revision:
- if '--cached' in cmd_argv:
- print('fatal: cannot combine --cached and --revision', file=sys.stderr)
- sys.exit(1)
- have_rev = True
- cmd_argv.extend(opt.revision)
- cmd_argv.append('--')
+ have_rev = False
+ if opt.revision:
+ if "--cached" in cmd_argv:
+ print(
+ "fatal: cannot combine --cached and --revision",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ have_rev = True
+ cmd_argv.extend(opt.revision)
+ cmd_argv.append("--")
- git_failed, bad_rev, have_match = self.ExecuteInParallel(
- opt.jobs,
- functools.partial(self._ExecuteOne, cmd_argv),
- projects,
- callback=functools.partial(self._ProcessResults, full_name, have_rev, opt),
- output=out,
- ordered=True)
+ git_failed, bad_rev, have_match = self.ExecuteInParallel(
+ opt.jobs,
+ functools.partial(self._ExecuteOne, cmd_argv),
+ projects,
+ callback=functools.partial(
+ self._ProcessResults, full_name, have_rev, opt
+ ),
+ output=out,
+ ordered=True,
+ )
- if git_failed:
- sys.exit(1)
- elif have_match:
- sys.exit(0)
- elif have_rev and bad_rev:
- for r in opt.revision:
- print("error: can't search revision %s" % r, file=sys.stderr)
- sys.exit(1)
- else:
- sys.exit(1)
+ if git_failed:
+ sys.exit(1)
+ elif have_match:
+ sys.exit(0)
+ elif have_rev and bad_rev:
+ for r in opt.revision:
+ print("error: can't search revision %s" % r, file=sys.stderr)
+ sys.exit(1)
+ else:
+ sys.exit(1)
diff --git a/subcmds/help.py b/subcmds/help.py
index 1ad391d..50a4804 100644
--- a/subcmds/help.py
+++ b/subcmds/help.py
@@ -18,163 +18,193 @@
from subcmds import all_commands
from color import Coloring
-from command import PagedCommand, MirrorSafeCommand, GitcAvailableCommand, GitcClientCommand
+from command import (
+ PagedCommand,
+ MirrorSafeCommand,
+ GitcAvailableCommand,
+ GitcClientCommand,
+)
import gitc_utils
from wrapper import Wrapper
class Help(PagedCommand, MirrorSafeCommand):
- COMMON = False
- helpSummary = "Display detailed help on a command"
- helpUsage = """
+ COMMON = False
+ helpSummary = "Display detailed help on a command"
+ helpUsage = """
%prog [--all|command]
"""
- helpDescription = """
+ helpDescription = """
Displays detailed usage information about a command.
"""
- def _PrintCommands(self, commandNames):
- """Helper to display |commandNames| summaries."""
- maxlen = 0
- for name in commandNames:
- maxlen = max(maxlen, len(name))
- fmt = ' %%-%ds %%s' % maxlen
+ def _PrintCommands(self, commandNames):
+ """Helper to display |commandNames| summaries."""
+ maxlen = 0
+ for name in commandNames:
+ maxlen = max(maxlen, len(name))
+ fmt = " %%-%ds %%s" % maxlen
- for name in commandNames:
- command = all_commands[name]()
- try:
- summary = command.helpSummary.strip()
- except AttributeError:
- summary = ''
- print(fmt % (name, summary))
+ for name in commandNames:
+ command = all_commands[name]()
+ try:
+ summary = command.helpSummary.strip()
+ except AttributeError:
+ summary = ""
+ print(fmt % (name, summary))
- def _PrintAllCommands(self):
- print('usage: repo COMMAND [ARGS]')
- self.PrintAllCommandsBody()
+ def _PrintAllCommands(self):
+ print("usage: repo COMMAND [ARGS]")
+ self.PrintAllCommandsBody()
- def PrintAllCommandsBody(self):
- print('The complete list of recognized repo commands is:')
- commandNames = list(sorted(all_commands))
- self._PrintCommands(commandNames)
- print("See 'repo help <command>' for more information on a "
- 'specific command.')
- print('Bug reports:', Wrapper().BUG_URL)
+ def PrintAllCommandsBody(self):
+ print("The complete list of recognized repo commands is:")
+ commandNames = list(sorted(all_commands))
+ self._PrintCommands(commandNames)
+ print(
+ "See 'repo help <command>' for more information on a "
+ "specific command."
+ )
+ print("Bug reports:", Wrapper().BUG_URL)
- def _PrintCommonCommands(self):
- print('usage: repo COMMAND [ARGS]')
- self.PrintCommonCommandsBody()
+ def _PrintCommonCommands(self):
+ print("usage: repo COMMAND [ARGS]")
+ self.PrintCommonCommandsBody()
- def PrintCommonCommandsBody(self):
- print('The most commonly used repo commands are:')
+ def PrintCommonCommandsBody(self):
+ print("The most commonly used repo commands are:")
- def gitc_supported(cmd):
- if not isinstance(cmd, GitcAvailableCommand) and not isinstance(cmd, GitcClientCommand):
- return True
- if self.client.isGitcClient:
- return True
- if isinstance(cmd, GitcClientCommand):
- return False
- if gitc_utils.get_gitc_manifest_dir():
- return True
- return False
+ def gitc_supported(cmd):
+ if not isinstance(cmd, GitcAvailableCommand) and not isinstance(
+ cmd, GitcClientCommand
+ ):
+ return True
+ if self.client.isGitcClient:
+ return True
+ if isinstance(cmd, GitcClientCommand):
+ return False
+ if gitc_utils.get_gitc_manifest_dir():
+ return True
+ return False
- commandNames = list(sorted([name
- for name, command in all_commands.items()
- if command.COMMON and gitc_supported(command)]))
- self._PrintCommands(commandNames)
+ commandNames = list(
+ sorted(
+ [
+ name
+ for name, command in all_commands.items()
+ if command.COMMON and gitc_supported(command)
+ ]
+ )
+ )
+ self._PrintCommands(commandNames)
- print(
- "See 'repo help <command>' for more information on a specific command.\n"
- "See 'repo help --all' for a complete list of recognized commands.")
- print('Bug reports:', Wrapper().BUG_URL)
+ print(
+ "See 'repo help <command>' for more information on a specific "
+ "command.\nSee 'repo help --all' for a complete list of recognized "
+ "commands."
+ )
+ print("Bug reports:", Wrapper().BUG_URL)
- def _PrintCommandHelp(self, cmd, header_prefix=''):
- class _Out(Coloring):
- def __init__(self, gc):
- Coloring.__init__(self, gc, 'help')
- self.heading = self.printer('heading', attr='bold')
- self._first = True
+ def _PrintCommandHelp(self, cmd, header_prefix=""):
+ class _Out(Coloring):
+ def __init__(self, gc):
+ Coloring.__init__(self, gc, "help")
+ self.heading = self.printer("heading", attr="bold")
+ self._first = True
- def _PrintSection(self, heading, bodyAttr):
- try:
- body = getattr(cmd, bodyAttr)
- except AttributeError:
- return
- if body == '' or body is None:
- return
+ def _PrintSection(self, heading, bodyAttr):
+ try:
+ body = getattr(cmd, bodyAttr)
+ except AttributeError:
+ return
+ if body == "" or body is None:
+ return
- if not self._first:
- self.nl()
- self._first = False
+ if not self._first:
+ self.nl()
+ self._first = False
- self.heading('%s%s', header_prefix, heading)
- self.nl()
- self.nl()
+ self.heading("%s%s", header_prefix, heading)
+ self.nl()
+ self.nl()
- me = 'repo %s' % cmd.NAME
- body = body.strip()
- body = body.replace('%prog', me)
+ me = "repo %s" % cmd.NAME
+ body = body.strip()
+ body = body.replace("%prog", me)
- # Extract the title, but skip any trailing {#anchors}.
- asciidoc_hdr = re.compile(r'^\n?#+ ([^{]+)(\{#.+\})?$')
- for para in body.split("\n\n"):
- if para.startswith(' '):
- self.write('%s', para)
- self.nl()
- self.nl()
- continue
+ # Extract the title, but skip any trailing {#anchors}.
+ asciidoc_hdr = re.compile(r"^\n?#+ ([^{]+)(\{#.+\})?$")
+ for para in body.split("\n\n"):
+ if para.startswith(" "):
+ self.write("%s", para)
+ self.nl()
+ self.nl()
+ continue
- m = asciidoc_hdr.match(para)
- if m:
- self.heading('%s%s', header_prefix, m.group(1))
- self.nl()
- self.nl()
- continue
+ m = asciidoc_hdr.match(para)
+ if m:
+ self.heading("%s%s", header_prefix, m.group(1))
+ self.nl()
+ self.nl()
+ continue
- lines = textwrap.wrap(para.replace(' ', ' '), width=80,
- break_long_words=False, break_on_hyphens=False)
- for line in lines:
- self.write('%s', line)
- self.nl()
- self.nl()
+ lines = textwrap.wrap(
+ para.replace(" ", " "),
+ width=80,
+ break_long_words=False,
+ break_on_hyphens=False,
+ )
+ for line in lines:
+ self.write("%s", line)
+ self.nl()
+ self.nl()
- out = _Out(self.client.globalConfig)
- out._PrintSection('Summary', 'helpSummary')
- cmd.OptionParser.print_help()
- out._PrintSection('Description', 'helpDescription')
+ out = _Out(self.client.globalConfig)
+ out._PrintSection("Summary", "helpSummary")
+ cmd.OptionParser.print_help()
+ out._PrintSection("Description", "helpDescription")
- def _PrintAllCommandHelp(self):
- for name in sorted(all_commands):
- cmd = all_commands[name](manifest=self.manifest)
- self._PrintCommandHelp(cmd, header_prefix='[%s] ' % (name,))
+ def _PrintAllCommandHelp(self):
+ for name in sorted(all_commands):
+ cmd = all_commands[name](manifest=self.manifest)
+ self._PrintCommandHelp(cmd, header_prefix="[%s] " % (name,))
- def _Options(self, p):
- p.add_option('-a', '--all',
- dest='show_all', action='store_true',
- help='show the complete list of commands')
- p.add_option('--help-all',
- dest='show_all_help', action='store_true',
- help='show the --help of all commands')
+ def _Options(self, p):
+ p.add_option(
+ "-a",
+ "--all",
+ dest="show_all",
+ action="store_true",
+ help="show the complete list of commands",
+ )
+ p.add_option(
+ "--help-all",
+ dest="show_all_help",
+ action="store_true",
+ help="show the --help of all commands",
+ )
- def Execute(self, opt, args):
- if len(args) == 0:
- if opt.show_all_help:
- self._PrintAllCommandHelp()
- elif opt.show_all:
- self._PrintAllCommands()
- else:
- self._PrintCommonCommands()
+ def Execute(self, opt, args):
+ if len(args) == 0:
+ if opt.show_all_help:
+ self._PrintAllCommandHelp()
+ elif opt.show_all:
+ self._PrintAllCommands()
+ else:
+ self._PrintCommonCommands()
- elif len(args) == 1:
- name = args[0]
+ elif len(args) == 1:
+ name = args[0]
- try:
- cmd = all_commands[name](manifest=self.manifest)
- except KeyError:
- print("repo: '%s' is not a repo command." % name, file=sys.stderr)
- sys.exit(1)
+ try:
+ cmd = all_commands[name](manifest=self.manifest)
+ except KeyError:
+ print(
+ "repo: '%s' is not a repo command." % name, file=sys.stderr
+ )
+ sys.exit(1)
- self._PrintCommandHelp(cmd)
+ self._PrintCommandHelp(cmd)
- else:
- self._PrintCommandHelp(self)
+ else:
+ self._PrintCommandHelp(self)
diff --git a/subcmds/info.py b/subcmds/info.py
index baa4c5b..6e7f3ed 100644
--- a/subcmds/info.py
+++ b/subcmds/info.py
@@ -20,203 +20,234 @@
class _Coloring(Coloring):
- def __init__(self, config):
- Coloring.__init__(self, config, "status")
+ def __init__(self, config):
+ Coloring.__init__(self, config, "status")
class Info(PagedCommand):
- COMMON = True
- helpSummary = "Get info on the manifest branch, current branch or unmerged branches"
- helpUsage = "%prog [-dl] [-o [-c]] [<project>...]"
+ COMMON = True
+ helpSummary = (
+ "Get info on the manifest branch, current branch or unmerged branches"
+ )
+ helpUsage = "%prog [-dl] [-o [-c]] [<project>...]"
- def _Options(self, p):
- p.add_option('-d', '--diff',
- dest='all', action='store_true',
- help="show full info and commit diff including remote branches")
- p.add_option('-o', '--overview',
- dest='overview', action='store_true',
- help='show overview of all local commits')
- p.add_option('-c', '--current-branch',
- dest="current_branch", action="store_true",
- help="consider only checked out branches")
- p.add_option('--no-current-branch',
- dest='current_branch', action='store_false',
- help='consider all local branches')
- # Turn this into a warning & remove this someday.
- p.add_option('-b',
- dest='current_branch', action='store_true',
- help=optparse.SUPPRESS_HELP)
- p.add_option('-l', '--local-only',
- dest="local", action="store_true",
- help="disable all remote operations")
+ def _Options(self, p):
+ p.add_option(
+ "-d",
+ "--diff",
+ dest="all",
+ action="store_true",
+ help="show full info and commit diff including remote branches",
+ )
+ p.add_option(
+ "-o",
+ "--overview",
+ dest="overview",
+ action="store_true",
+ help="show overview of all local commits",
+ )
+ p.add_option(
+ "-c",
+ "--current-branch",
+ dest="current_branch",
+ action="store_true",
+ help="consider only checked out branches",
+ )
+ p.add_option(
+ "--no-current-branch",
+ dest="current_branch",
+ action="store_false",
+ help="consider all local branches",
+ )
+ # Turn this into a warning & remove this someday.
+ p.add_option(
+ "-b",
+ dest="current_branch",
+ action="store_true",
+ help=optparse.SUPPRESS_HELP,
+ )
+ p.add_option(
+ "-l",
+ "--local-only",
+ dest="local",
+ action="store_true",
+ help="disable all remote operations",
+ )
- def Execute(self, opt, args):
- self.out = _Coloring(self.client.globalConfig)
- self.heading = self.out.printer('heading', attr='bold')
- self.headtext = self.out.nofmt_printer('headtext', fg='yellow')
- self.redtext = self.out.printer('redtext', fg='red')
- self.sha = self.out.printer("sha", fg='yellow')
- self.text = self.out.nofmt_printer('text')
- self.dimtext = self.out.printer('dimtext', attr='dim')
+ def Execute(self, opt, args):
+ self.out = _Coloring(self.client.globalConfig)
+ self.heading = self.out.printer("heading", attr="bold")
+ self.headtext = self.out.nofmt_printer("headtext", fg="yellow")
+ self.redtext = self.out.printer("redtext", fg="red")
+ self.sha = self.out.printer("sha", fg="yellow")
+ self.text = self.out.nofmt_printer("text")
+ self.dimtext = self.out.printer("dimtext", attr="dim")
- self.opt = opt
+ self.opt = opt
- if not opt.this_manifest_only:
- self.manifest = self.manifest.outer_client
- manifestConfig = self.manifest.manifestProject.config
- mergeBranch = manifestConfig.GetBranch("default").merge
- manifestGroups = self.manifest.GetGroupsStr()
+ if not opt.this_manifest_only:
+ self.manifest = self.manifest.outer_client
+ manifestConfig = self.manifest.manifestProject.config
+ mergeBranch = manifestConfig.GetBranch("default").merge
+ manifestGroups = self.manifest.GetGroupsStr()
- self.heading("Manifest branch: ")
- if self.manifest.default.revisionExpr:
- self.headtext(self.manifest.default.revisionExpr)
- self.out.nl()
- self.heading("Manifest merge branch: ")
- self.headtext(mergeBranch)
- self.out.nl()
- self.heading("Manifest groups: ")
- self.headtext(manifestGroups)
- self.out.nl()
-
- self.printSeparator()
-
- if not opt.overview:
- self._printDiffInfo(opt, args)
- else:
- self._printCommitOverview(opt, args)
-
- def printSeparator(self):
- self.text("----------------------------")
- self.out.nl()
-
- def _printDiffInfo(self, opt, args):
- # We let exceptions bubble up to main as they'll be well structured.
- projs = self.GetProjects(args, all_manifests=not opt.this_manifest_only)
-
- for p in projs:
- self.heading("Project: ")
- self.headtext(p.name)
- self.out.nl()
-
- self.heading("Mount path: ")
- self.headtext(p.worktree)
- self.out.nl()
-
- self.heading("Current revision: ")
- self.headtext(p.GetRevisionId())
- self.out.nl()
-
- currentBranch = p.CurrentBranch
- if currentBranch:
- self.heading('Current branch: ')
- self.headtext(currentBranch)
+ self.heading("Manifest branch: ")
+ if self.manifest.default.revisionExpr:
+ self.headtext(self.manifest.default.revisionExpr)
+ self.out.nl()
+ self.heading("Manifest merge branch: ")
+ self.headtext(mergeBranch)
+ self.out.nl()
+ self.heading("Manifest groups: ")
+ self.headtext(manifestGroups)
self.out.nl()
- self.heading("Manifest revision: ")
- self.headtext(p.revisionExpr)
- self.out.nl()
+ self.printSeparator()
- localBranches = list(p.GetBranches().keys())
- self.heading("Local Branches: ")
- self.redtext(str(len(localBranches)))
- if localBranches:
- self.text(" [")
- self.text(", ".join(localBranches))
- self.text("]")
- self.out.nl()
+ if not opt.overview:
+ self._printDiffInfo(opt, args)
+ else:
+ self._printCommitOverview(opt, args)
- if self.opt.all:
- self.findRemoteLocalDiff(p)
-
- self.printSeparator()
-
- def findRemoteLocalDiff(self, project):
- # Fetch all the latest commits.
- if not self.opt.local:
- project.Sync_NetworkHalf(quiet=True, current_branch_only=True)
-
- branch = self.manifest.manifestProject.config.GetBranch('default').merge
- if branch.startswith(R_HEADS):
- branch = branch[len(R_HEADS):]
- logTarget = R_M + branch
-
- bareTmp = project.bare_git._bare
- project.bare_git._bare = False
- localCommits = project.bare_git.rev_list(
- '--abbrev=8',
- '--abbrev-commit',
- '--pretty=oneline',
- logTarget + "..",
- '--')
-
- originCommits = project.bare_git.rev_list(
- '--abbrev=8',
- '--abbrev-commit',
- '--pretty=oneline',
- ".." + logTarget,
- '--')
- project.bare_git._bare = bareTmp
-
- self.heading("Local Commits: ")
- self.redtext(str(len(localCommits)))
- self.dimtext(" (on current branch)")
- self.out.nl()
-
- for c in localCommits:
- split = c.split()
- self.sha(split[0] + " ")
- self.text(" ".join(split[1:]))
- self.out.nl()
-
- self.printSeparator()
-
- self.heading("Remote Commits: ")
- self.redtext(str(len(originCommits)))
- self.out.nl()
-
- for c in originCommits:
- split = c.split()
- self.sha(split[0] + " ")
- self.text(" ".join(split[1:]))
- self.out.nl()
-
- def _printCommitOverview(self, opt, args):
- all_branches = []
- for project in self.GetProjects(args, all_manifests=not opt.this_manifest_only):
- br = [project.GetUploadableBranch(x)
- for x in project.GetBranches()]
- br = [x for x in br if x]
- if self.opt.current_branch:
- br = [x for x in br if x.name == project.CurrentBranch]
- all_branches.extend(br)
-
- if not all_branches:
- return
-
- self.out.nl()
- self.heading('Projects Overview')
- project = None
-
- for branch in all_branches:
- if project != branch.project:
- project = branch.project
- self.out.nl()
- self.headtext(project.RelPath(local=opt.this_manifest_only))
+ def printSeparator(self):
+ self.text("----------------------------")
self.out.nl()
- commits = branch.commits
- date = branch.date
- self.text('%s %-33s (%2d commit%s, %s)' % (
- branch.name == project.CurrentBranch and '*' or ' ',
- branch.name,
- len(commits),
- len(commits) != 1 and 's' or '',
- date))
- self.out.nl()
+ def _printDiffInfo(self, opt, args):
+ # We let exceptions bubble up to main as they'll be well structured.
+ projs = self.GetProjects(args, all_manifests=not opt.this_manifest_only)
- for commit in commits:
- split = commit.split()
- self.text('{0:38}{1} '.format('', '-'))
- self.sha(split[0] + " ")
- self.text(" ".join(split[1:]))
+ for p in projs:
+ self.heading("Project: ")
+ self.headtext(p.name)
+ self.out.nl()
+
+ self.heading("Mount path: ")
+ self.headtext(p.worktree)
+ self.out.nl()
+
+ self.heading("Current revision: ")
+ self.headtext(p.GetRevisionId())
+ self.out.nl()
+
+ currentBranch = p.CurrentBranch
+ if currentBranch:
+ self.heading("Current branch: ")
+ self.headtext(currentBranch)
+ self.out.nl()
+
+ self.heading("Manifest revision: ")
+ self.headtext(p.revisionExpr)
+ self.out.nl()
+
+ localBranches = list(p.GetBranches().keys())
+ self.heading("Local Branches: ")
+ self.redtext(str(len(localBranches)))
+ if localBranches:
+ self.text(" [")
+ self.text(", ".join(localBranches))
+ self.text("]")
+ self.out.nl()
+
+ if self.opt.all:
+ self.findRemoteLocalDiff(p)
+
+ self.printSeparator()
+
+ def findRemoteLocalDiff(self, project):
+ # Fetch all the latest commits.
+ if not self.opt.local:
+ project.Sync_NetworkHalf(quiet=True, current_branch_only=True)
+
+ branch = self.manifest.manifestProject.config.GetBranch("default").merge
+ if branch.startswith(R_HEADS):
+ branch = branch[len(R_HEADS) :]
+ logTarget = R_M + branch
+
+ bareTmp = project.bare_git._bare
+ project.bare_git._bare = False
+ localCommits = project.bare_git.rev_list(
+ "--abbrev=8",
+ "--abbrev-commit",
+ "--pretty=oneline",
+ logTarget + "..",
+ "--",
+ )
+
+ originCommits = project.bare_git.rev_list(
+ "--abbrev=8",
+ "--abbrev-commit",
+ "--pretty=oneline",
+ ".." + logTarget,
+ "--",
+ )
+ project.bare_git._bare = bareTmp
+
+ self.heading("Local Commits: ")
+ self.redtext(str(len(localCommits)))
+ self.dimtext(" (on current branch)")
self.out.nl()
+
+ for c in localCommits:
+ split = c.split()
+ self.sha(split[0] + " ")
+ self.text(" ".join(split[1:]))
+ self.out.nl()
+
+ self.printSeparator()
+
+ self.heading("Remote Commits: ")
+ self.redtext(str(len(originCommits)))
+ self.out.nl()
+
+ for c in originCommits:
+ split = c.split()
+ self.sha(split[0] + " ")
+ self.text(" ".join(split[1:]))
+ self.out.nl()
+
+ def _printCommitOverview(self, opt, args):
+ all_branches = []
+ for project in self.GetProjects(
+ args, all_manifests=not opt.this_manifest_only
+ ):
+ br = [project.GetUploadableBranch(x) for x in project.GetBranches()]
+ br = [x for x in br if x]
+ if self.opt.current_branch:
+ br = [x for x in br if x.name == project.CurrentBranch]
+ all_branches.extend(br)
+
+ if not all_branches:
+ return
+
+ self.out.nl()
+ self.heading("Projects Overview")
+ project = None
+
+ for branch in all_branches:
+ if project != branch.project:
+ project = branch.project
+ self.out.nl()
+ self.headtext(project.RelPath(local=opt.this_manifest_only))
+ self.out.nl()
+
+ commits = branch.commits
+ date = branch.date
+ self.text(
+ "%s %-33s (%2d commit%s, %s)"
+ % (
+ branch.name == project.CurrentBranch and "*" or " ",
+ branch.name,
+ len(commits),
+ len(commits) != 1 and "s" or "",
+ date,
+ )
+ )
+ self.out.nl()
+
+ for commit in commits:
+ split = commit.split()
+ self.text("{0:38}{1} ".format("", "-"))
+ self.sha(split[0] + " ")
+ self.text(" ".join(split[1:]))
+ self.out.nl()
diff --git a/subcmds/init.py b/subcmds/init.py
index 813fa59..b5c2e3b 100644
--- a/subcmds/init.py
+++ b/subcmds/init.py
@@ -22,13 +22,13 @@
class Init(InteractiveCommand, MirrorSafeCommand):
- COMMON = True
- MULTI_MANIFEST_SUPPORT = True
- helpSummary = "Initialize a repo client checkout in the current directory"
- helpUsage = """
+ COMMON = True
+ MULTI_MANIFEST_SUPPORT = True
+ helpSummary = "Initialize a repo client checkout in the current directory"
+ helpUsage = """
%prog [options] [manifest url]
"""
- helpDescription = """
+ helpDescription = """
The '%prog' command is run once to install and initialize repo.
The latest repo source code and manifest collection is downloaded
from the server and is installed in the .repo/ directory in the
@@ -77,243 +77,303 @@
to update the working directory files.
"""
- def _CommonOptions(self, p):
- """Disable due to re-use of Wrapper()."""
+ def _CommonOptions(self, p):
+ """Disable due to re-use of Wrapper()."""
- def _Options(self, p, gitc_init=False):
- Wrapper().InitParser(p, gitc_init=gitc_init)
- m = p.add_option_group('Multi-manifest')
- m.add_option('--outer-manifest', action='store_true', default=True,
- help='operate starting at the outermost manifest')
- m.add_option('--no-outer-manifest', dest='outer_manifest',
- action='store_false', help='do not operate on outer manifests')
- m.add_option('--this-manifest-only', action='store_true', default=None,
- help='only operate on this (sub)manifest')
- m.add_option('--no-this-manifest-only', '--all-manifests',
- dest='this_manifest_only', action='store_false',
- help='operate on this manifest and its submanifests')
+ def _Options(self, p, gitc_init=False):
+ Wrapper().InitParser(p, gitc_init=gitc_init)
+ m = p.add_option_group("Multi-manifest")
+ m.add_option(
+ "--outer-manifest",
+ action="store_true",
+ default=True,
+ help="operate starting at the outermost manifest",
+ )
+ m.add_option(
+ "--no-outer-manifest",
+ dest="outer_manifest",
+ action="store_false",
+ help="do not operate on outer manifests",
+ )
+ m.add_option(
+ "--this-manifest-only",
+ action="store_true",
+ default=None,
+ help="only operate on this (sub)manifest",
+ )
+ m.add_option(
+ "--no-this-manifest-only",
+ "--all-manifests",
+ dest="this_manifest_only",
+ action="store_false",
+ help="operate on this manifest and its submanifests",
+ )
- def _RegisteredEnvironmentOptions(self):
- return {'REPO_MANIFEST_URL': 'manifest_url',
- 'REPO_MIRROR_LOCATION': 'reference'}
+ def _RegisteredEnvironmentOptions(self):
+ return {
+ "REPO_MANIFEST_URL": "manifest_url",
+ "REPO_MIRROR_LOCATION": "reference",
+ }
- def _SyncManifest(self, opt):
- """Call manifestProject.Sync with arguments from opt.
+ def _SyncManifest(self, opt):
+ """Call manifestProject.Sync with arguments from opt.
- Args:
- opt: options from optparse.
- """
- # Normally this value is set when instantiating the project, but the
- # manifest project is special and is created when instantiating the
- # manifest which happens before we parse options.
- self.manifest.manifestProject.clone_depth = opt.manifest_depth
- if not self.manifest.manifestProject.Sync(
- manifest_url=opt.manifest_url,
- manifest_branch=opt.manifest_branch,
- standalone_manifest=opt.standalone_manifest,
- groups=opt.groups,
- platform=opt.platform,
- mirror=opt.mirror,
- dissociate=opt.dissociate,
- reference=opt.reference,
- worktree=opt.worktree,
- submodules=opt.submodules,
- archive=opt.archive,
- partial_clone=opt.partial_clone,
- clone_filter=opt.clone_filter,
- partial_clone_exclude=opt.partial_clone_exclude,
- clone_bundle=opt.clone_bundle,
- git_lfs=opt.git_lfs,
- use_superproject=opt.use_superproject,
- verbose=opt.verbose,
- current_branch_only=opt.current_branch_only,
- tags=opt.tags,
- depth=opt.depth,
- git_event_log=self.git_event_log,
- manifest_name=opt.manifest_name):
- sys.exit(1)
+ Args:
+ opt: options from optparse.
+ """
+ # Normally this value is set when instantiating the project, but the
+ # manifest project is special and is created when instantiating the
+ # manifest which happens before we parse options.
+ self.manifest.manifestProject.clone_depth = opt.manifest_depth
+ if not self.manifest.manifestProject.Sync(
+ manifest_url=opt.manifest_url,
+ manifest_branch=opt.manifest_branch,
+ standalone_manifest=opt.standalone_manifest,
+ groups=opt.groups,
+ platform=opt.platform,
+ mirror=opt.mirror,
+ dissociate=opt.dissociate,
+ reference=opt.reference,
+ worktree=opt.worktree,
+ submodules=opt.submodules,
+ archive=opt.archive,
+ partial_clone=opt.partial_clone,
+ clone_filter=opt.clone_filter,
+ partial_clone_exclude=opt.partial_clone_exclude,
+ clone_bundle=opt.clone_bundle,
+ git_lfs=opt.git_lfs,
+ use_superproject=opt.use_superproject,
+ verbose=opt.verbose,
+ current_branch_only=opt.current_branch_only,
+ tags=opt.tags,
+ depth=opt.depth,
+ git_event_log=self.git_event_log,
+ manifest_name=opt.manifest_name,
+ ):
+ sys.exit(1)
- def _Prompt(self, prompt, value):
- print('%-10s [%s]: ' % (prompt, value), end='', flush=True)
- a = sys.stdin.readline().strip()
- if a == '':
- return value
- return a
+ def _Prompt(self, prompt, value):
+ print("%-10s [%s]: " % (prompt, value), end="", flush=True)
+ a = sys.stdin.readline().strip()
+ if a == "":
+ return value
+ return a
- def _ShouldConfigureUser(self, opt, existing_checkout):
- gc = self.client.globalConfig
- mp = self.manifest.manifestProject
+ def _ShouldConfigureUser(self, opt, existing_checkout):
+ gc = self.client.globalConfig
+ mp = self.manifest.manifestProject
- # If we don't have local settings, get from global.
- if not mp.config.Has('user.name') or not mp.config.Has('user.email'):
- if not gc.Has('user.name') or not gc.Has('user.email'):
- return True
+ # If we don't have local settings, get from global.
+ if not mp.config.Has("user.name") or not mp.config.Has("user.email"):
+ if not gc.Has("user.name") or not gc.Has("user.email"):
+ return True
- mp.config.SetString('user.name', gc.GetString('user.name'))
- mp.config.SetString('user.email', gc.GetString('user.email'))
+ mp.config.SetString("user.name", gc.GetString("user.name"))
+ mp.config.SetString("user.email", gc.GetString("user.email"))
- if not opt.quiet and not existing_checkout or opt.verbose:
- print()
- print('Your identity is: %s <%s>' % (mp.config.GetString('user.name'),
- mp.config.GetString('user.email')))
- print("If you want to change this, please re-run 'repo init' with --config-name")
- return False
+ if not opt.quiet and not existing_checkout or opt.verbose:
+ print()
+ print(
+ "Your identity is: %s <%s>"
+ % (
+ mp.config.GetString("user.name"),
+ mp.config.GetString("user.email"),
+ )
+ )
+ print(
+ "If you want to change this, please re-run 'repo init' with "
+ "--config-name"
+ )
+ return False
- def _ConfigureUser(self, opt):
- mp = self.manifest.manifestProject
+ def _ConfigureUser(self, opt):
+ mp = self.manifest.manifestProject
- while True:
- if not opt.quiet:
+ while True:
+ if not opt.quiet:
+ print()
+ name = self._Prompt("Your Name", mp.UserName)
+ email = self._Prompt("Your Email", mp.UserEmail)
+
+ if not opt.quiet:
+ print()
+ print("Your identity is: %s <%s>" % (name, email))
+ print("is this correct [y/N]? ", end="", flush=True)
+ a = sys.stdin.readline().strip().lower()
+ if a in ("yes", "y", "t", "true"):
+ break
+
+ if name != mp.UserName:
+ mp.config.SetString("user.name", name)
+ if email != mp.UserEmail:
+ mp.config.SetString("user.email", email)
+
+ def _HasColorSet(self, gc):
+ for n in ["ui", "diff", "status"]:
+ if gc.Has("color.%s" % n):
+ return True
+ return False
+
+ def _ConfigureColor(self):
+ gc = self.client.globalConfig
+ if self._HasColorSet(gc):
+ return
+
+ class _Test(Coloring):
+ def __init__(self):
+ Coloring.__init__(self, gc, "test color display")
+ self._on = True
+
+ out = _Test()
+
print()
- name = self._Prompt('Your Name', mp.UserName)
- email = self._Prompt('Your Email', mp.UserEmail)
+ print("Testing colorized output (for 'repo diff', 'repo status'):")
- if not opt.quiet:
+ for c in ["black", "red", "green", "yellow", "blue", "magenta", "cyan"]:
+ out.write(" ")
+ out.printer(fg=c)(" %-6s ", c)
+ out.write(" ")
+ out.printer(fg="white", bg="black")(" %s " % "white")
+ out.nl()
+
+ for c in ["bold", "dim", "ul", "reverse"]:
+ out.write(" ")
+ out.printer(fg="black", attr=c)(" %-6s ", c)
+ out.nl()
+
+ print(
+ "Enable color display in this user account (y/N)? ",
+ end="",
+ flush=True,
+ )
+ a = sys.stdin.readline().strip().lower()
+ if a in ("y", "yes", "t", "true", "on"):
+ gc.SetString("color.ui", "auto")
+
+ def _DisplayResult(self):
+ if self.manifest.IsMirror:
+ init_type = "mirror "
+ else:
+ init_type = ""
+
print()
- print('Your identity is: %s <%s>' % (name, email))
- print('is this correct [y/N]? ', end='', flush=True)
- a = sys.stdin.readline().strip().lower()
- if a in ('yes', 'y', 't', 'true'):
- break
+ print(
+ "repo %shas been initialized in %s"
+ % (init_type, self.manifest.topdir)
+ )
- if name != mp.UserName:
- mp.config.SetString('user.name', name)
- if email != mp.UserEmail:
- mp.config.SetString('user.email', email)
+ current_dir = os.getcwd()
+ if current_dir != self.manifest.topdir:
+ print(
+ "If this is not the directory in which you want to initialize "
+ "repo, please run:"
+ )
+ print(" rm -r %s" % os.path.join(self.manifest.topdir, ".repo"))
+ print("and try again.")
- def _HasColorSet(self, gc):
- for n in ['ui', 'diff', 'status']:
- if gc.Has('color.%s' % n):
- return True
- return False
+ def ValidateOptions(self, opt, args):
+ if opt.reference:
+ opt.reference = os.path.expanduser(opt.reference)
- def _ConfigureColor(self):
- gc = self.client.globalConfig
- if self._HasColorSet(gc):
- return
+ # Check this here, else manifest will be tagged "not new" and init won't
+ # be possible anymore without removing the .repo/manifests directory.
+ if opt.mirror:
+ if opt.archive:
+ self.OptionParser.error(
+ "--mirror and --archive cannot be used " "together."
+ )
+ if opt.use_superproject is not None:
+ self.OptionParser.error(
+ "--mirror and --use-superproject cannot be "
+ "used together."
+ )
+ if opt.archive and opt.use_superproject is not None:
+ self.OptionParser.error(
+ "--archive and --use-superproject cannot be used " "together."
+ )
- class _Test(Coloring):
- def __init__(self):
- Coloring.__init__(self, gc, 'test color display')
- self._on = True
- out = _Test()
+ if opt.standalone_manifest and (
+ opt.manifest_branch or opt.manifest_name != "default.xml"
+ ):
+ self.OptionParser.error(
+ "--manifest-branch and --manifest-name cannot"
+ " be used with --standalone-manifest."
+ )
- print()
- print("Testing colorized output (for 'repo diff', 'repo status'):")
+ if args:
+ if opt.manifest_url:
+ self.OptionParser.error(
+ "--manifest-url option and URL argument both specified: "
+ "only use one to select the manifest URL."
+ )
- for c in ['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan']:
- out.write(' ')
- out.printer(fg=c)(' %-6s ', c)
- out.write(' ')
- out.printer(fg='white', bg='black')(' %s ' % 'white')
- out.nl()
+ opt.manifest_url = args.pop(0)
- for c in ['bold', 'dim', 'ul', 'reverse']:
- out.write(' ')
- out.printer(fg='black', attr=c)(' %-6s ', c)
- out.nl()
+ if args:
+ self.OptionParser.error("too many arguments to init")
- print('Enable color display in this user account (y/N)? ', end='', flush=True)
- a = sys.stdin.readline().strip().lower()
- if a in ('y', 'yes', 't', 'true', 'on'):
- gc.SetString('color.ui', 'auto')
+ def Execute(self, opt, args):
+ git_require(MIN_GIT_VERSION_HARD, fail=True)
+ if not git_require(MIN_GIT_VERSION_SOFT):
+ print(
+ "repo: warning: git-%s+ will soon be required; please upgrade "
+ "your version of git to maintain support."
+ % (".".join(str(x) for x in MIN_GIT_VERSION_SOFT),),
+ file=sys.stderr,
+ )
- def _DisplayResult(self):
- if self.manifest.IsMirror:
- init_type = 'mirror '
- else:
- init_type = ''
+ rp = self.manifest.repoProject
- print()
- print('repo %shas been initialized in %s' % (init_type, self.manifest.topdir))
+ # Handle new --repo-url requests.
+ if opt.repo_url:
+ remote = rp.GetRemote("origin")
+ remote.url = opt.repo_url
+ remote.Save()
- current_dir = os.getcwd()
- if current_dir != self.manifest.topdir:
- print('If this is not the directory in which you want to initialize '
- 'repo, please run:')
- print(' rm -r %s' % os.path.join(self.manifest.topdir, '.repo'))
- print('and try again.')
+ # Handle new --repo-rev requests.
+ if opt.repo_rev:
+ wrapper = Wrapper()
+ try:
+ remote_ref, rev = wrapper.check_repo_rev(
+ rp.gitdir,
+ opt.repo_rev,
+ repo_verify=opt.repo_verify,
+ quiet=opt.quiet,
+ )
+ except wrapper.CloneFailure:
+ print(
+ "fatal: double check your --repo-rev setting.",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ branch = rp.GetBranch("default")
+ branch.merge = remote_ref
+ rp.work_git.reset("--hard", rev)
+ branch.Save()
- def ValidateOptions(self, opt, args):
- if opt.reference:
- opt.reference = os.path.expanduser(opt.reference)
+ if opt.worktree:
+ # Older versions of git supported worktree, but had dangerous gc
+ # bugs.
+ git_require((2, 15, 0), fail=True, msg="git gc worktree corruption")
- # Check this here, else manifest will be tagged "not new" and init won't be
- # possible anymore without removing the .repo/manifests directory.
- if opt.mirror:
- if opt.archive:
- self.OptionParser.error('--mirror and --archive cannot be used '
- 'together.')
- if opt.use_superproject is not None:
- self.OptionParser.error('--mirror and --use-superproject cannot be '
- 'used together.')
- if opt.archive and opt.use_superproject is not None:
- self.OptionParser.error('--archive and --use-superproject cannot be used '
- 'together.')
+ # Provide a short notice that we're reinitializing an existing checkout.
+ # Sometimes developers might not realize that they're in one, or that
+ # repo doesn't do nested checkouts.
+ existing_checkout = self.manifest.manifestProject.Exists
+ if not opt.quiet and existing_checkout:
+ print(
+ "repo: reusing existing repo client checkout in",
+ self.manifest.topdir,
+ )
- if opt.standalone_manifest and (opt.manifest_branch or
- opt.manifest_name != 'default.xml'):
- self.OptionParser.error('--manifest-branch and --manifest-name cannot'
- ' be used with --standalone-manifest.')
+ self._SyncManifest(opt)
- if args:
- if opt.manifest_url:
- self.OptionParser.error(
- '--manifest-url option and URL argument both specified: only use '
- 'one to select the manifest URL.')
+ if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
+ if opt.config_name or self._ShouldConfigureUser(
+ opt, existing_checkout
+ ):
+ self._ConfigureUser(opt)
+ self._ConfigureColor()
- opt.manifest_url = args.pop(0)
-
- if args:
- self.OptionParser.error('too many arguments to init')
-
- def Execute(self, opt, args):
- git_require(MIN_GIT_VERSION_HARD, fail=True)
- if not git_require(MIN_GIT_VERSION_SOFT):
- print('repo: warning: git-%s+ will soon be required; please upgrade your '
- 'version of git to maintain support.'
- % ('.'.join(str(x) for x in MIN_GIT_VERSION_SOFT),),
- file=sys.stderr)
-
- rp = self.manifest.repoProject
-
- # Handle new --repo-url requests.
- if opt.repo_url:
- remote = rp.GetRemote('origin')
- remote.url = opt.repo_url
- remote.Save()
-
- # Handle new --repo-rev requests.
- if opt.repo_rev:
- wrapper = Wrapper()
- try:
- remote_ref, rev = wrapper.check_repo_rev(
- rp.gitdir, opt.repo_rev, repo_verify=opt.repo_verify, quiet=opt.quiet)
- except wrapper.CloneFailure:
- print('fatal: double check your --repo-rev setting.', file=sys.stderr)
- sys.exit(1)
- branch = rp.GetBranch('default')
- branch.merge = remote_ref
- rp.work_git.reset('--hard', rev)
- branch.Save()
-
- if opt.worktree:
- # Older versions of git supported worktree, but had dangerous gc bugs.
- git_require((2, 15, 0), fail=True, msg='git gc worktree corruption')
-
- # Provide a short notice that we're reinitializing an existing checkout.
- # Sometimes developers might not realize that they're in one, or that
- # repo doesn't do nested checkouts.
- existing_checkout = self.manifest.manifestProject.Exists
- if not opt.quiet and existing_checkout:
- print('repo: reusing existing repo client checkout in', self.manifest.topdir)
-
- self._SyncManifest(opt)
-
- if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
- if opt.config_name or self._ShouldConfigureUser(opt, existing_checkout):
- self._ConfigureUser(opt)
- self._ConfigureColor()
-
- if not opt.quiet:
- self._DisplayResult()
+ if not opt.quiet:
+ self._DisplayResult()
diff --git a/subcmds/list.py b/subcmds/list.py
index ad8036e..24e3e1f 100644
--- a/subcmds/list.py
+++ b/subcmds/list.py
@@ -18,13 +18,13 @@
class List(Command, MirrorSafeCommand):
- COMMON = True
- helpSummary = "List projects and their associated directories"
- helpUsage = """
+ COMMON = True
+ helpSummary = "List projects and their associated directories"
+ helpUsage = """
%prog [-f] [<project>...]
%prog [-f] -r str1 [str2]...
"""
- helpDescription = """
+ helpDescription = """
List all projects; pass '.' to list the project for the cwd.
By default, only projects that currently exist in the checkout are shown. If
@@ -35,69 +35,103 @@
This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
"""
- def _Options(self, p):
- p.add_option('-r', '--regex',
- dest='regex', action='store_true',
- help='filter the project list based on regex or wildcard matching of strings')
- p.add_option('-g', '--groups',
- dest='groups',
- help='filter the project list based on the groups the project is in')
- p.add_option('-a', '--all',
- action='store_true',
- help='show projects regardless of checkout state')
- p.add_option('-n', '--name-only',
- dest='name_only', action='store_true',
- help='display only the name of the repository')
- p.add_option('-p', '--path-only',
- dest='path_only', action='store_true',
- help='display only the path of the repository')
- p.add_option('-f', '--fullpath',
- dest='fullpath', action='store_true',
- help='display the full work tree path instead of the relative path')
- p.add_option('--relative-to', metavar='PATH',
- help='display paths relative to this one (default: top of repo client checkout)')
+ def _Options(self, p):
+ p.add_option(
+ "-r",
+ "--regex",
+ dest="regex",
+ action="store_true",
+ help="filter the project list based on regex or wildcard matching "
+ "of strings",
+ )
+ p.add_option(
+ "-g",
+ "--groups",
+ dest="groups",
+ help="filter the project list based on the groups the project is "
+ "in",
+ )
+ p.add_option(
+ "-a",
+ "--all",
+ action="store_true",
+ help="show projects regardless of checkout state",
+ )
+ p.add_option(
+ "-n",
+ "--name-only",
+ dest="name_only",
+ action="store_true",
+ help="display only the name of the repository",
+ )
+ p.add_option(
+ "-p",
+ "--path-only",
+ dest="path_only",
+ action="store_true",
+ help="display only the path of the repository",
+ )
+ p.add_option(
+ "-f",
+ "--fullpath",
+ dest="fullpath",
+ action="store_true",
+ help="display the full work tree path instead of the relative path",
+ )
+ p.add_option(
+ "--relative-to",
+ metavar="PATH",
+ help="display paths relative to this one (default: top of repo "
+ "client checkout)",
+ )
- def ValidateOptions(self, opt, args):
- if opt.fullpath and opt.name_only:
- self.OptionParser.error('cannot combine -f and -n')
+ def ValidateOptions(self, opt, args):
+ if opt.fullpath and opt.name_only:
+ self.OptionParser.error("cannot combine -f and -n")
- # Resolve any symlinks so the output is stable.
- if opt.relative_to:
- opt.relative_to = os.path.realpath(opt.relative_to)
+ # Resolve any symlinks so the output is stable.
+ if opt.relative_to:
+ opt.relative_to = os.path.realpath(opt.relative_to)
- def Execute(self, opt, args):
- """List all projects and the associated directories.
+ def Execute(self, opt, args):
+ """List all projects and the associated directories.
- This may be possible to do with 'repo forall', but repo newbies have
- trouble figuring that out. The idea here is that it should be more
- discoverable.
+ This may be possible to do with 'repo forall', but repo newbies have
+ trouble figuring that out. The idea here is that it should be more
+ discoverable.
- Args:
- opt: The options.
- args: Positional args. Can be a list of projects to list, or empty.
- """
- if not opt.regex:
- projects = self.GetProjects(args, groups=opt.groups, missing_ok=opt.all,
- all_manifests=not opt.this_manifest_only)
- else:
- projects = self.FindProjects(args, all_manifests=not opt.this_manifest_only)
+ Args:
+ opt: The options.
+ args: Positional args. Can be a list of projects to list, or empty.
+ """
+ if not opt.regex:
+ projects = self.GetProjects(
+ args,
+ groups=opt.groups,
+ missing_ok=opt.all,
+ all_manifests=not opt.this_manifest_only,
+ )
+ else:
+ projects = self.FindProjects(
+ args, all_manifests=not opt.this_manifest_only
+ )
- def _getpath(x):
- if opt.fullpath:
- return x.worktree
- if opt.relative_to:
- return os.path.relpath(x.worktree, opt.relative_to)
- return x.RelPath(local=opt.this_manifest_only)
+ def _getpath(x):
+ if opt.fullpath:
+ return x.worktree
+ if opt.relative_to:
+ return os.path.relpath(x.worktree, opt.relative_to)
+ return x.RelPath(local=opt.this_manifest_only)
- lines = []
- for project in projects:
- if opt.name_only and not opt.path_only:
- lines.append("%s" % (project.name))
- elif opt.path_only and not opt.name_only:
- lines.append("%s" % (_getpath(project)))
- else:
- lines.append("%s : %s" % (_getpath(project), project.name))
+ lines = []
+ for project in projects:
+ if opt.name_only and not opt.path_only:
+ lines.append("%s" % (project.name))
+ elif opt.path_only and not opt.name_only:
+ lines.append("%s" % (_getpath(project)))
+ else:
+ lines.append("%s : %s" % (_getpath(project), project.name))
- if lines:
- lines.sort()
- print('\n'.join(lines))
+ if lines:
+ lines.sort()
+ print("\n".join(lines))
diff --git a/subcmds/manifest.py b/subcmds/manifest.py
index f4602a5..f72df34 100644
--- a/subcmds/manifest.py
+++ b/subcmds/manifest.py
@@ -20,12 +20,12 @@
class Manifest(PagedCommand):
- COMMON = False
- helpSummary = "Manifest inspection utility"
- helpUsage = """
+ COMMON = False
+ helpSummary = "Manifest inspection utility"
+ helpUsage = """
%prog [-o {-|NAME.xml}] [-m MANIFEST.xml] [-r]
"""
- _helpDescription = """
+ _helpDescription = """
With the -o option, exports the current manifest for inspection.
The manifest and (if present) local_manifests/ are combined
@@ -40,92 +40,136 @@
to indicate the remote ref to push changes to via 'repo upload'.
"""
- @property
- def helpDescription(self):
- helptext = self._helpDescription + '\n'
- r = os.path.dirname(__file__)
- r = os.path.dirname(r)
- with open(os.path.join(r, 'docs', 'manifest-format.md')) as fd:
- for line in fd:
- helptext += line
- return helptext
+ @property
+ def helpDescription(self):
+ helptext = self._helpDescription + "\n"
+ r = os.path.dirname(__file__)
+ r = os.path.dirname(r)
+ with open(os.path.join(r, "docs", "manifest-format.md")) as fd:
+ for line in fd:
+ helptext += line
+ return helptext
- def _Options(self, p):
- p.add_option('-r', '--revision-as-HEAD',
- dest='peg_rev', action='store_true',
- help='save revisions as current HEAD')
- p.add_option('-m', '--manifest-name',
- help='temporary manifest to use for this sync', metavar='NAME.xml')
- p.add_option('--suppress-upstream-revision', dest='peg_rev_upstream',
- default=True, action='store_false',
- help='if in -r mode, do not write the upstream field '
- '(only of use if the branch names for a sha1 manifest are '
- 'sensitive)')
- p.add_option('--suppress-dest-branch', dest='peg_rev_dest_branch',
- default=True, action='store_false',
- help='if in -r mode, do not write the dest-branch field '
- '(only of use if the branch names for a sha1 manifest are '
- 'sensitive)')
- p.add_option('--json', default=False, action='store_true',
- help='output manifest in JSON format (experimental)')
- p.add_option('--pretty', default=False, action='store_true',
- help='format output for humans to read')
- p.add_option('--no-local-manifests', default=False, action='store_true',
- dest='ignore_local_manifests', help='ignore local manifests')
- p.add_option('-o', '--output-file',
- dest='output_file',
- default='-',
- help='file to save the manifest to. (Filename prefix for multi-tree.)',
- metavar='-|NAME.xml')
+ def _Options(self, p):
+ p.add_option(
+ "-r",
+ "--revision-as-HEAD",
+ dest="peg_rev",
+ action="store_true",
+ help="save revisions as current HEAD",
+ )
+ p.add_option(
+ "-m",
+ "--manifest-name",
+ help="temporary manifest to use for this sync",
+ metavar="NAME.xml",
+ )
+ p.add_option(
+ "--suppress-upstream-revision",
+ dest="peg_rev_upstream",
+ default=True,
+ action="store_false",
+ help="if in -r mode, do not write the upstream field "
+ "(only of use if the branch names for a sha1 manifest are "
+ "sensitive)",
+ )
+ p.add_option(
+ "--suppress-dest-branch",
+ dest="peg_rev_dest_branch",
+ default=True,
+ action="store_false",
+ help="if in -r mode, do not write the dest-branch field "
+ "(only of use if the branch names for a sha1 manifest are "
+ "sensitive)",
+ )
+ p.add_option(
+ "--json",
+ default=False,
+ action="store_true",
+ help="output manifest in JSON format (experimental)",
+ )
+ p.add_option(
+ "--pretty",
+ default=False,
+ action="store_true",
+ help="format output for humans to read",
+ )
+ p.add_option(
+ "--no-local-manifests",
+ default=False,
+ action="store_true",
+ dest="ignore_local_manifests",
+ help="ignore local manifests",
+ )
+ p.add_option(
+ "-o",
+ "--output-file",
+ dest="output_file",
+ default="-",
+ help="file to save the manifest to. (Filename prefix for "
+ "multi-tree.)",
+ metavar="-|NAME.xml",
+ )
- def _Output(self, opt):
- # If alternate manifest is specified, override the manifest file that we're using.
- if opt.manifest_name:
- self.manifest.Override(opt.manifest_name, False)
+ def _Output(self, opt):
+ # If alternate manifest is specified, override the manifest file that
+ # we're using.
+ if opt.manifest_name:
+ self.manifest.Override(opt.manifest_name, False)
- for manifest in self.ManifestList(opt):
- output_file = opt.output_file
- if output_file == '-':
- fd = sys.stdout
- else:
- if manifest.path_prefix:
- output_file = f'{opt.output_file}:{manifest.path_prefix.replace("/", "%2f")}'
- fd = open(output_file, 'w')
+ for manifest in self.ManifestList(opt):
+ output_file = opt.output_file
+ if output_file == "-":
+ fd = sys.stdout
+ else:
+ if manifest.path_prefix:
+ output_file = (
+ f"{opt.output_file}:"
+ f'{manifest.path_prefix.replace("/", "%2f")}'
+ )
+ fd = open(output_file, "w")
- manifest.SetUseLocalManifests(not opt.ignore_local_manifests)
+ manifest.SetUseLocalManifests(not opt.ignore_local_manifests)
- if opt.json:
- print('warning: --json is experimental!', file=sys.stderr)
- doc = manifest.ToDict(peg_rev=opt.peg_rev,
- peg_rev_upstream=opt.peg_rev_upstream,
- peg_rev_dest_branch=opt.peg_rev_dest_branch)
+ if opt.json:
+ print("warning: --json is experimental!", file=sys.stderr)
+ doc = manifest.ToDict(
+ peg_rev=opt.peg_rev,
+ peg_rev_upstream=opt.peg_rev_upstream,
+ peg_rev_dest_branch=opt.peg_rev_dest_branch,
+ )
- json_settings = {
- # JSON style guide says Uunicode characters are fully allowed.
- 'ensure_ascii': False,
- # We use 2 space indent to match JSON style guide.
- 'indent': 2 if opt.pretty else None,
- 'separators': (',', ': ') if opt.pretty else (',', ':'),
- 'sort_keys': True,
- }
- fd.write(json.dumps(doc, **json_settings))
- else:
- manifest.Save(fd,
- peg_rev=opt.peg_rev,
- peg_rev_upstream=opt.peg_rev_upstream,
- peg_rev_dest_branch=opt.peg_rev_dest_branch)
- if output_file != '-':
- fd.close()
- if manifest.path_prefix:
- print(f'Saved {manifest.path_prefix} submanifest to {output_file}',
- file=sys.stderr)
- else:
- print(f'Saved manifest to {output_file}', file=sys.stderr)
+ json_settings = {
+ # JSON style guide says Unicode characters are fully
+ # allowed.
+ "ensure_ascii": False,
+ # We use 2 space indent to match JSON style guide.
+ "indent": 2 if opt.pretty else None,
+ "separators": (",", ": ") if opt.pretty else (",", ":"),
+ "sort_keys": True,
+ }
+ fd.write(json.dumps(doc, **json_settings))
+ else:
+ manifest.Save(
+ fd,
+ peg_rev=opt.peg_rev,
+ peg_rev_upstream=opt.peg_rev_upstream,
+ peg_rev_dest_branch=opt.peg_rev_dest_branch,
+ )
+ if output_file != "-":
+ fd.close()
+ if manifest.path_prefix:
+ print(
+ f"Saved {manifest.path_prefix} submanifest to "
+ f"{output_file}",
+ file=sys.stderr,
+ )
+ else:
+ print(f"Saved manifest to {output_file}", file=sys.stderr)
+ def ValidateOptions(self, opt, args):
+ if args:
+ self.Usage()
- def ValidateOptions(self, opt, args):
- if args:
- self.Usage()
-
- def Execute(self, opt, args):
- self._Output(opt)
+ def Execute(self, opt, args):
+ self._Output(opt)
diff --git a/subcmds/overview.py b/subcmds/overview.py
index 11dba95..8ccad61 100644
--- a/subcmds/overview.py
+++ b/subcmds/overview.py
@@ -19,12 +19,12 @@
class Overview(PagedCommand):
- COMMON = True
- helpSummary = "Display overview of unmerged project branches"
- helpUsage = """
+ COMMON = True
+ helpSummary = "Display overview of unmerged project branches"
+ helpUsage = """
%prog [--current-branch] [<project>...]
"""
- helpDescription = """
+ helpDescription = """
The '%prog' command is used to display an overview of the projects branches,
and list any local commits that have not yet been merged into the project.
@@ -33,59 +33,77 @@
are displayed.
"""
- def _Options(self, p):
- p.add_option('-c', '--current-branch',
- dest="current_branch", action="store_true",
- help="consider only checked out branches")
- p.add_option('--no-current-branch',
- dest='current_branch', action='store_false',
- help='consider all local branches')
- # Turn this into a warning & remove this someday.
- p.add_option('-b',
- dest='current_branch', action='store_true',
- help=optparse.SUPPRESS_HELP)
+ def _Options(self, p):
+ p.add_option(
+ "-c",
+ "--current-branch",
+ dest="current_branch",
+ action="store_true",
+ help="consider only checked out branches",
+ )
+ p.add_option(
+ "--no-current-branch",
+ dest="current_branch",
+ action="store_false",
+ help="consider all local branches",
+ )
+ # Turn this into a warning & remove this someday.
+ p.add_option(
+ "-b",
+ dest="current_branch",
+ action="store_true",
+ help=optparse.SUPPRESS_HELP,
+ )
- def Execute(self, opt, args):
- all_branches = []
- for project in self.GetProjects(args, all_manifests=not opt.this_manifest_only):
- br = [project.GetUploadableBranch(x)
- for x in project.GetBranches()]
- br = [x for x in br if x]
- if opt.current_branch:
- br = [x for x in br if x.name == project.CurrentBranch]
- all_branches.extend(br)
+ def Execute(self, opt, args):
+ all_branches = []
+ for project in self.GetProjects(
+ args, all_manifests=not opt.this_manifest_only
+ ):
+ br = [project.GetUploadableBranch(x) for x in project.GetBranches()]
+ br = [x for x in br if x]
+ if opt.current_branch:
+ br = [x for x in br if x.name == project.CurrentBranch]
+ all_branches.extend(br)
- if not all_branches:
- return
+ if not all_branches:
+ return
- class Report(Coloring):
- def __init__(self, config):
- Coloring.__init__(self, config, 'status')
- self.project = self.printer('header', attr='bold')
- self.text = self.printer('text')
+ class Report(Coloring):
+ def __init__(self, config):
+ Coloring.__init__(self, config, "status")
+ self.project = self.printer("header", attr="bold")
+ self.text = self.printer("text")
- out = Report(all_branches[0].project.config)
- out.text("Deprecated. See repo info -o.")
- out.nl()
- out.project('Projects Overview')
- out.nl()
-
- project = None
-
- for branch in all_branches:
- if project != branch.project:
- project = branch.project
+ out = Report(all_branches[0].project.config)
+ out.text("Deprecated. See repo info -o.")
out.nl()
- out.project('project %s/' % project.RelPath(local=opt.this_manifest_only))
+ out.project("Projects Overview")
out.nl()
- commits = branch.commits
- date = branch.date
- print('%s %-33s (%2d commit%s, %s)' % (
- branch.name == project.CurrentBranch and '*' or ' ',
- branch.name,
- len(commits),
- len(commits) != 1 and 's' or ' ',
- date))
- for commit in commits:
- print('%-35s - %s' % ('', commit))
+ project = None
+
+ for branch in all_branches:
+ if project != branch.project:
+ project = branch.project
+ out.nl()
+ out.project(
+ "project %s/"
+ % project.RelPath(local=opt.this_manifest_only)
+ )
+ out.nl()
+
+ commits = branch.commits
+ date = branch.date
+ print(
+ "%s %-33s (%2d commit%s, %s)"
+ % (
+ branch.name == project.CurrentBranch and "*" or " ",
+ branch.name,
+ len(commits),
+ len(commits) != 1 and "s" or " ",
+ date,
+ )
+ )
+ for commit in commits:
+ print("%-35s - %s" % ("", commit))
diff --git a/subcmds/prune.py b/subcmds/prune.py
index 251acca..5a68c14 100644
--- a/subcmds/prune.py
+++ b/subcmds/prune.py
@@ -19,63 +19,76 @@
class Prune(PagedCommand):
- COMMON = True
- helpSummary = "Prune (delete) already merged topics"
- helpUsage = """
+ COMMON = True
+ helpSummary = "Prune (delete) already merged topics"
+ helpUsage = """
%prog [<project>...]
"""
- PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
+ PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
- def _ExecuteOne(self, project):
- """Process one project."""
- return project.PruneHeads()
+ def _ExecuteOne(self, project):
+ """Process one project."""
+ return project.PruneHeads()
- def Execute(self, opt, args):
- projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only)
+ def Execute(self, opt, args):
+ projects = self.GetProjects(
+ args, all_manifests=not opt.this_manifest_only
+ )
- # NB: Should be able to refactor this module to display summary as results
- # come back from children.
- def _ProcessResults(_pool, _output, results):
- return list(itertools.chain.from_iterable(results))
+ # NB: Should be able to refactor this module to display summary as
+ # results come back from children.
+ def _ProcessResults(_pool, _output, results):
+ return list(itertools.chain.from_iterable(results))
- all_branches = self.ExecuteInParallel(
- opt.jobs,
- self._ExecuteOne,
- projects,
- callback=_ProcessResults,
- ordered=True)
+ all_branches = self.ExecuteInParallel(
+ opt.jobs,
+ self._ExecuteOne,
+ projects,
+ callback=_ProcessResults,
+ ordered=True,
+ )
- if not all_branches:
- return
+ if not all_branches:
+ return
- class Report(Coloring):
- def __init__(self, config):
- Coloring.__init__(self, config, 'status')
- self.project = self.printer('header', attr='bold')
+ class Report(Coloring):
+ def __init__(self, config):
+ Coloring.__init__(self, config, "status")
+ self.project = self.printer("header", attr="bold")
- out = Report(all_branches[0].project.config)
- out.project('Pending Branches')
- out.nl()
-
- project = None
-
- for branch in all_branches:
- if project != branch.project:
- project = branch.project
- out.nl()
- out.project('project %s/' % project.RelPath(local=opt.this_manifest_only))
+ out = Report(all_branches[0].project.config)
+ out.project("Pending Branches")
out.nl()
- print('%s %-33s ' % (
- branch.name == project.CurrentBranch and '*' or ' ',
- branch.name), end='')
+ project = None
- if not branch.base_exists:
- print('(ignoring: tracking branch is gone: %s)' % (branch.base,))
- else:
- commits = branch.commits
- date = branch.date
- print('(%2d commit%s, %s)' % (
- len(commits),
- len(commits) != 1 and 's' or ' ',
- date))
+ for branch in all_branches:
+ if project != branch.project:
+ project = branch.project
+ out.nl()
+ out.project(
+ "project %s/"
+ % project.RelPath(local=opt.this_manifest_only)
+ )
+ out.nl()
+
+ print(
+ "%s %-33s "
+ % (
+ branch.name == project.CurrentBranch and "*" or " ",
+ branch.name,
+ ),
+ end="",
+ )
+
+ if not branch.base_exists:
+ print(
+ "(ignoring: tracking branch is gone: %s)" % (branch.base,)
+ )
+ else:
+ commits = branch.commits
+ date = branch.date
+ print(
+ "(%2d commit%s, %s)"
+ % (len(commits), len(commits) != 1 and "s" or " ", date)
+ )
diff --git a/subcmds/rebase.py b/subcmds/rebase.py
index 3d1a63e..dc4f580 100644
--- a/subcmds/rebase.py
+++ b/subcmds/rebase.py
@@ -20,146 +20,193 @@
class RebaseColoring(Coloring):
- def __init__(self, config):
- Coloring.__init__(self, config, 'rebase')
- self.project = self.printer('project', attr='bold')
- self.fail = self.printer('fail', fg='red')
+ def __init__(self, config):
+ Coloring.__init__(self, config, "rebase")
+ self.project = self.printer("project", attr="bold")
+ self.fail = self.printer("fail", fg="red")
class Rebase(Command):
- COMMON = True
- helpSummary = "Rebase local branches on upstream branch"
- helpUsage = """
+ COMMON = True
+ helpSummary = "Rebase local branches on upstream branch"
+ helpUsage = """
%prog {[<project>...] | -i <project>...}
"""
- helpDescription = """
+ helpDescription = """
'%prog' uses git rebase to move local changes in the current topic branch to
the HEAD of the upstream history, useful when you have made commits in a topic
branch but need to incorporate new upstream changes "underneath" them.
"""
- def _Options(self, p):
- g = p.get_option_group('--quiet')
- g.add_option('-i', '--interactive',
- dest="interactive", action="store_true",
- help="interactive rebase (single project only)")
+ def _Options(self, p):
+ g = p.get_option_group("--quiet")
+ g.add_option(
+ "-i",
+ "--interactive",
+ dest="interactive",
+ action="store_true",
+ help="interactive rebase (single project only)",
+ )
- p.add_option('--fail-fast',
- dest='fail_fast', action='store_true',
- help='stop rebasing after first error is hit')
- p.add_option('-f', '--force-rebase',
- dest='force_rebase', action='store_true',
- help='pass --force-rebase to git rebase')
- p.add_option('--no-ff',
- dest='ff', default=True, action='store_false',
- help='pass --no-ff to git rebase')
- p.add_option('--autosquash',
- dest='autosquash', action='store_true',
- help='pass --autosquash to git rebase')
- p.add_option('--whitespace',
- dest='whitespace', action='store', metavar='WS',
- help='pass --whitespace to git rebase')
- p.add_option('--auto-stash',
- dest='auto_stash', action='store_true',
- help='stash local modifications before starting')
- p.add_option('-m', '--onto-manifest',
- dest='onto_manifest', action='store_true',
- help='rebase onto the manifest version instead of upstream '
- 'HEAD (this helps to make sure the local tree stays '
- 'consistent if you previously synced to a manifest)')
+ p.add_option(
+ "--fail-fast",
+ dest="fail_fast",
+ action="store_true",
+ help="stop rebasing after first error is hit",
+ )
+ p.add_option(
+ "-f",
+ "--force-rebase",
+ dest="force_rebase",
+ action="store_true",
+ help="pass --force-rebase to git rebase",
+ )
+ p.add_option(
+ "--no-ff",
+ dest="ff",
+ default=True,
+ action="store_false",
+ help="pass --no-ff to git rebase",
+ )
+ p.add_option(
+ "--autosquash",
+ dest="autosquash",
+ action="store_true",
+ help="pass --autosquash to git rebase",
+ )
+ p.add_option(
+ "--whitespace",
+ dest="whitespace",
+ action="store",
+ metavar="WS",
+ help="pass --whitespace to git rebase",
+ )
+ p.add_option(
+ "--auto-stash",
+ dest="auto_stash",
+ action="store_true",
+ help="stash local modifications before starting",
+ )
+ p.add_option(
+ "-m",
+ "--onto-manifest",
+ dest="onto_manifest",
+ action="store_true",
+ help="rebase onto the manifest version instead of upstream "
+ "HEAD (this helps to make sure the local tree stays "
+ "consistent if you previously synced to a manifest)",
+ )
- def Execute(self, opt, args):
- all_projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only)
- one_project = len(all_projects) == 1
+ def Execute(self, opt, args):
+ all_projects = self.GetProjects(
+ args, all_manifests=not opt.this_manifest_only
+ )
+ one_project = len(all_projects) == 1
- if opt.interactive and not one_project:
- print('error: interactive rebase not supported with multiple projects',
- file=sys.stderr)
- if len(args) == 1:
- print('note: project %s is mapped to more than one path' % (args[0],),
- file=sys.stderr)
- return 1
+ if opt.interactive and not one_project:
+ print(
+ "error: interactive rebase not supported with multiple "
+ "projects",
+ file=sys.stderr,
+ )
+ if len(args) == 1:
+ print(
+ "note: project %s is mapped to more than one path"
+ % (args[0],),
+ file=sys.stderr,
+ )
+ return 1
- # Setup the common git rebase args that we use for all projects.
- common_args = ['rebase']
- if opt.whitespace:
- common_args.append('--whitespace=%s' % opt.whitespace)
- if opt.quiet:
- common_args.append('--quiet')
- if opt.force_rebase:
- common_args.append('--force-rebase')
- if not opt.ff:
- common_args.append('--no-ff')
- if opt.autosquash:
- common_args.append('--autosquash')
- if opt.interactive:
- common_args.append('-i')
+ # Setup the common git rebase args that we use for all projects.
+ common_args = ["rebase"]
+ if opt.whitespace:
+ common_args.append("--whitespace=%s" % opt.whitespace)
+ if opt.quiet:
+ common_args.append("--quiet")
+ if opt.force_rebase:
+ common_args.append("--force-rebase")
+ if not opt.ff:
+ common_args.append("--no-ff")
+ if opt.autosquash:
+ common_args.append("--autosquash")
+ if opt.interactive:
+ common_args.append("-i")
- config = self.manifest.manifestProject.config
- out = RebaseColoring(config)
- out.redirect(sys.stdout)
- _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
+ config = self.manifest.manifestProject.config
+ out = RebaseColoring(config)
+ out.redirect(sys.stdout)
+ _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
- ret = 0
- for project in all_projects:
- if ret and opt.fail_fast:
- break
+ ret = 0
+ for project in all_projects:
+ if ret and opt.fail_fast:
+ break
- cb = project.CurrentBranch
- if not cb:
- if one_project:
- print("error: project %s has a detached HEAD" % _RelPath(project),
- file=sys.stderr)
- return 1
- # ignore branches with detatched HEADs
- continue
+ cb = project.CurrentBranch
+ if not cb:
+ if one_project:
+ print(
+ "error: project %s has a detached HEAD"
+ % _RelPath(project),
+ file=sys.stderr,
+ )
+ return 1
+ # Ignore branches with detached HEADs.
+ continue
- upbranch = project.GetBranch(cb)
- if not upbranch.LocalMerge:
- if one_project:
- print("error: project %s does not track any remote branches"
- % _RelPath(project), file=sys.stderr)
- return 1
- # ignore branches without remotes
- continue
+ upbranch = project.GetBranch(cb)
+ if not upbranch.LocalMerge:
+ if one_project:
+ print(
+ "error: project %s does not track any remote branches"
+ % _RelPath(project),
+ file=sys.stderr,
+ )
+ return 1
+ # Ignore branches without remotes.
+ continue
- args = common_args[:]
- if opt.onto_manifest:
- args.append('--onto')
- args.append(project.revisionExpr)
+ args = common_args[:]
+ if opt.onto_manifest:
+ args.append("--onto")
+ args.append(project.revisionExpr)
- args.append(upbranch.LocalMerge)
+ args.append(upbranch.LocalMerge)
- out.project('project %s: rebasing %s -> %s',
- _RelPath(project), cb, upbranch.LocalMerge)
- out.nl()
- out.flush()
+ out.project(
+ "project %s: rebasing %s -> %s",
+ _RelPath(project),
+ cb,
+ upbranch.LocalMerge,
+ )
+ out.nl()
+ out.flush()
- needs_stash = False
- if opt.auto_stash:
- stash_args = ["update-index", "--refresh", "-q"]
+ needs_stash = False
+ if opt.auto_stash:
+ stash_args = ["update-index", "--refresh", "-q"]
- if GitCommand(project, stash_args).Wait() != 0:
- needs_stash = True
- # Dirty index, requires stash...
- stash_args = ["stash"]
+ if GitCommand(project, stash_args).Wait() != 0:
+ needs_stash = True
+ # Dirty index, requires stash...
+ stash_args = ["stash"]
- if GitCommand(project, stash_args).Wait() != 0:
- ret += 1
- continue
+ if GitCommand(project, stash_args).Wait() != 0:
+ ret += 1
+ continue
- if GitCommand(project, args).Wait() != 0:
- ret += 1
- continue
+ if GitCommand(project, args).Wait() != 0:
+ ret += 1
+ continue
- if needs_stash:
- stash_args.append('pop')
- stash_args.append('--quiet')
- if GitCommand(project, stash_args).Wait() != 0:
- ret += 1
+ if needs_stash:
+ stash_args.append("pop")
+ stash_args.append("--quiet")
+ if GitCommand(project, stash_args).Wait() != 0:
+ ret += 1
- if ret:
- out.fail('%i projects had errors', ret)
- out.nl()
+ if ret:
+ out.fail("%i projects had errors", ret)
+ out.nl()
- return ret
+ return ret
diff --git a/subcmds/selfupdate.py b/subcmds/selfupdate.py
index 898bc3f..d5d0a83 100644
--- a/subcmds/selfupdate.py
+++ b/subcmds/selfupdate.py
@@ -21,12 +21,12 @@
class Selfupdate(Command, MirrorSafeCommand):
- COMMON = False
- helpSummary = "Update repo to the latest version"
- helpUsage = """
+ COMMON = False
+ helpSummary = "Update repo to the latest version"
+ helpUsage = """
%prog
"""
- helpDescription = """
+ helpDescription = """
The '%prog' command upgrades repo to the latest version, if a
newer version is available.
@@ -34,28 +34,33 @@
need to be performed by an end-user.
"""
- def _Options(self, p):
- g = p.add_option_group('repo Version options')
- g.add_option('--no-repo-verify',
- dest='repo_verify', default=True, action='store_false',
- help='do not verify repo source code')
- g.add_option('--repo-upgraded',
- dest='repo_upgraded', action='store_true',
- help=SUPPRESS_HELP)
+ def _Options(self, p):
+ g = p.add_option_group("repo Version options")
+ g.add_option(
+ "--no-repo-verify",
+ dest="repo_verify",
+ default=True,
+ action="store_false",
+ help="do not verify repo source code",
+ )
+ g.add_option(
+ "--repo-upgraded",
+ dest="repo_upgraded",
+ action="store_true",
+ help=SUPPRESS_HELP,
+ )
- def Execute(self, opt, args):
- rp = self.manifest.repoProject
- rp.PreSync()
+ def Execute(self, opt, args):
+ rp = self.manifest.repoProject
+ rp.PreSync()
- if opt.repo_upgraded:
- _PostRepoUpgrade(self.manifest)
+ if opt.repo_upgraded:
+ _PostRepoUpgrade(self.manifest)
- else:
- if not rp.Sync_NetworkHalf().success:
- print("error: can't update repo", file=sys.stderr)
- sys.exit(1)
+ else:
+ if not rp.Sync_NetworkHalf().success:
+ print("error: can't update repo", file=sys.stderr)
+ sys.exit(1)
- rp.bare_git.gc('--auto')
- _PostRepoFetch(rp,
- repo_verify=opt.repo_verify,
- verbose=True)
+ rp.bare_git.gc("--auto")
+ _PostRepoFetch(rp, repo_verify=opt.repo_verify, verbose=True)
diff --git a/subcmds/smartsync.py b/subcmds/smartsync.py
index d91d59c..49d0997 100644
--- a/subcmds/smartsync.py
+++ b/subcmds/smartsync.py
@@ -16,18 +16,18 @@
class Smartsync(Sync):
- COMMON = True
- helpSummary = "Update working tree to the latest known good revision"
- helpUsage = """
+ COMMON = True
+ helpSummary = "Update working tree to the latest known good revision"
+ helpUsage = """
%prog [<project>...]
"""
- helpDescription = """
+ helpDescription = """
The '%prog' command is a shortcut for sync -s.
"""
- def _Options(self, p):
- Sync._Options(self, p, show_smart=False)
+ def _Options(self, p):
+ Sync._Options(self, p, show_smart=False)
- def Execute(self, opt, args):
- opt.smart_sync = True
- Sync.Execute(self, opt, args)
+ def Execute(self, opt, args):
+ opt.smart_sync = True
+ Sync.Execute(self, opt, args)
diff --git a/subcmds/stage.py b/subcmds/stage.py
index bdb7201..4d54eb1 100644
--- a/subcmds/stage.py
+++ b/subcmds/stage.py
@@ -20,98 +20,111 @@
class _ProjectList(Coloring):
- def __init__(self, gc):
- Coloring.__init__(self, gc, 'interactive')
- self.prompt = self.printer('prompt', fg='blue', attr='bold')
- self.header = self.printer('header', attr='bold')
- self.help = self.printer('help', fg='red', attr='bold')
+ def __init__(self, gc):
+ Coloring.__init__(self, gc, "interactive")
+ self.prompt = self.printer("prompt", fg="blue", attr="bold")
+ self.header = self.printer("header", attr="bold")
+ self.help = self.printer("help", fg="red", attr="bold")
class Stage(InteractiveCommand):
- COMMON = True
- helpSummary = "Stage file(s) for commit"
- helpUsage = """
+ COMMON = True
+ helpSummary = "Stage file(s) for commit"
+ helpUsage = """
%prog -i [<project>...]
"""
- helpDescription = """
+ helpDescription = """
The '%prog' command stages files to prepare the next commit.
"""
- def _Options(self, p):
- g = p.get_option_group('--quiet')
- g.add_option('-i', '--interactive',
- dest='interactive', action='store_true',
- help='use interactive staging')
+ def _Options(self, p):
+ g = p.get_option_group("--quiet")
+ g.add_option(
+ "-i",
+ "--interactive",
+ dest="interactive",
+ action="store_true",
+ help="use interactive staging",
+ )
- def Execute(self, opt, args):
- if opt.interactive:
- self._Interactive(opt, args)
- else:
- self.Usage()
+ def Execute(self, opt, args):
+ if opt.interactive:
+ self._Interactive(opt, args)
+ else:
+ self.Usage()
- def _Interactive(self, opt, args):
- all_projects = [
- p for p in self.GetProjects(args, all_manifests=not opt.this_manifest_only)
- if p.IsDirty()]
- if not all_projects:
- print('no projects have uncommitted modifications', file=sys.stderr)
- return
+ def _Interactive(self, opt, args):
+ all_projects = [
+ p
+ for p in self.GetProjects(
+ args, all_manifests=not opt.this_manifest_only
+ )
+ if p.IsDirty()
+ ]
+ if not all_projects:
+ print("no projects have uncommitted modifications", file=sys.stderr)
+ return
- out = _ProjectList(self.manifest.manifestProject.config)
- while True:
- out.header(' %s', 'project')
- out.nl()
+ out = _ProjectList(self.manifest.manifestProject.config)
+ while True:
+ out.header(" %s", "project")
+ out.nl()
- for i in range(len(all_projects)):
- project = all_projects[i]
- out.write('%3d: %s', i + 1,
- project.RelPath(local=opt.this_manifest_only) + '/')
- out.nl()
- out.nl()
+ for i in range(len(all_projects)):
+ project = all_projects[i]
+ out.write(
+ "%3d: %s",
+ i + 1,
+ project.RelPath(local=opt.this_manifest_only) + "/",
+ )
+ out.nl()
+ out.nl()
- out.write('%3d: (', 0)
- out.prompt('q')
- out.write('uit)')
- out.nl()
+ out.write("%3d: (", 0)
+ out.prompt("q")
+ out.write("uit)")
+ out.nl()
- out.prompt('project> ')
- out.flush()
- try:
- a = sys.stdin.readline()
- except KeyboardInterrupt:
- out.nl()
- break
- if a == '':
- out.nl()
- break
+ out.prompt("project> ")
+ out.flush()
+ try:
+ a = sys.stdin.readline()
+ except KeyboardInterrupt:
+ out.nl()
+ break
+ if a == "":
+ out.nl()
+ break
- a = a.strip()
- if a.lower() in ('q', 'quit', 'exit'):
- break
- if not a:
- continue
+ a = a.strip()
+ if a.lower() in ("q", "quit", "exit"):
+ break
+ if not a:
+ continue
- try:
- a_index = int(a)
- except ValueError:
- a_index = None
+ try:
+ a_index = int(a)
+ except ValueError:
+ a_index = None
- if a_index is not None:
- if a_index == 0:
- break
- if 0 < a_index and a_index <= len(all_projects):
- _AddI(all_projects[a_index - 1])
- continue
+ if a_index is not None:
+ if a_index == 0:
+ break
+ if 0 < a_index and a_index <= len(all_projects):
+ _AddI(all_projects[a_index - 1])
+ continue
- projects = [
- p for p in all_projects
- if a in [p.name, p.RelPath(local=opt.this_manifest_only)]]
- if len(projects) == 1:
- _AddI(projects[0])
- continue
- print('Bye.')
+ projects = [
+ p
+ for p in all_projects
+ if a in [p.name, p.RelPath(local=opt.this_manifest_only)]
+ ]
+ if len(projects) == 1:
+ _AddI(projects[0])
+ continue
+ print("Bye.")
def _AddI(project):
- p = GitCommand(project, ['add', '--interactive'], bare=False)
- p.Wait()
+ p = GitCommand(project, ["add", "--interactive"], bare=False)
+ p.Wait()
diff --git a/subcmds/start.py b/subcmds/start.py
index 809df96..d7772b3 100644
--- a/subcmds/start.py
+++ b/subcmds/start.py
@@ -25,119 +25,147 @@
class Start(Command):
- COMMON = True
- helpSummary = "Start a new branch for development"
- helpUsage = """
+ COMMON = True
+ helpSummary = "Start a new branch for development"
+ helpUsage = """
%prog <newbranchname> [--all | <project>...]
"""
- helpDescription = """
+ helpDescription = """
'%prog' begins a new branch of development, starting from the
revision specified in the manifest.
"""
- PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
+ PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
- def _Options(self, p):
- p.add_option('--all',
- dest='all', action='store_true',
- help='begin branch in all projects')
- p.add_option('-r', '--rev', '--revision', dest='revision',
- help='point branch at this revision instead of upstream')
- p.add_option('--head', '--HEAD',
- dest='revision', action='store_const', const='HEAD',
- help='abbreviation for --rev HEAD')
+ def _Options(self, p):
+ p.add_option(
+ "--all",
+ dest="all",
+ action="store_true",
+ help="begin branch in all projects",
+ )
+ p.add_option(
+ "-r",
+ "--rev",
+ "--revision",
+ dest="revision",
+ help="point branch at this revision instead of upstream",
+ )
+ p.add_option(
+ "--head",
+ "--HEAD",
+ dest="revision",
+ action="store_const",
+ const="HEAD",
+ help="abbreviation for --rev HEAD",
+ )
- def ValidateOptions(self, opt, args):
- if not args:
- self.Usage()
+ def ValidateOptions(self, opt, args):
+ if not args:
+ self.Usage()
- nb = args[0]
- if not git.check_ref_format('heads/%s' % nb):
- self.OptionParser.error("'%s' is not a valid name" % nb)
+ nb = args[0]
+ if not git.check_ref_format("heads/%s" % nb):
+ self.OptionParser.error("'%s' is not a valid name" % nb)
- def _ExecuteOne(self, revision, nb, project):
- """Start one project."""
- # If the current revision is immutable, such as a SHA1, a tag or
- # a change, then we can't push back to it. Substitute with
- # dest_branch, if defined; or with manifest default revision instead.
- branch_merge = ''
- if IsImmutable(project.revisionExpr):
- if project.dest_branch:
- branch_merge = project.dest_branch
- else:
- branch_merge = self.manifest.default.revisionExpr
+ def _ExecuteOne(self, revision, nb, project):
+ """Start one project."""
+ # If the current revision is immutable, such as a SHA1, a tag or
+ # a change, then we can't push back to it. Substitute with
+ # dest_branch, if defined; or with manifest default revision instead.
+ branch_merge = ""
+ if IsImmutable(project.revisionExpr):
+ if project.dest_branch:
+ branch_merge = project.dest_branch
+ else:
+ branch_merge = self.manifest.default.revisionExpr
- try:
- ret = project.StartBranch(
- nb, branch_merge=branch_merge, revision=revision)
- except Exception as e:
- print('error: unable to checkout %s: %s' % (project.name, e), file=sys.stderr)
- ret = False
- return (ret, project)
+ try:
+ ret = project.StartBranch(
+ nb, branch_merge=branch_merge, revision=revision
+ )
+ except Exception as e:
+ print(
+ "error: unable to checkout %s: %s" % (project.name, e),
+ file=sys.stderr,
+ )
+ ret = False
+ return (ret, project)
- def Execute(self, opt, args):
- nb = args[0]
- err = []
- projects = []
- if not opt.all:
- projects = args[1:]
- if len(projects) < 1:
- projects = ['.'] # start it in the local project by default
+ def Execute(self, opt, args):
+ nb = args[0]
+ err = []
+ projects = []
+ if not opt.all:
+ projects = args[1:]
+ if len(projects) < 1:
+ projects = ["."] # start it in the local project by default
- all_projects = self.GetProjects(projects,
- missing_ok=bool(self.gitc_manifest),
- all_manifests=not opt.this_manifest_only)
+ all_projects = self.GetProjects(
+ projects,
+ missing_ok=bool(self.gitc_manifest),
+ all_manifests=not opt.this_manifest_only,
+ )
- # This must happen after we find all_projects, since GetProjects may need
- # the local directory, which will disappear once we save the GITC manifest.
- if self.gitc_manifest:
- gitc_projects = self.GetProjects(projects, manifest=self.gitc_manifest,
- missing_ok=True)
- for project in gitc_projects:
- if project.old_revision:
- project.already_synced = True
- else:
- project.already_synced = False
- project.old_revision = project.revisionExpr
- project.revisionExpr = None
- # Save the GITC manifest.
- gitc_utils.save_manifest(self.gitc_manifest)
+ # This must happen after we find all_projects, since GetProjects may
+ # need the local directory, which will disappear once we save the GITC
+ # manifest.
+ if self.gitc_manifest:
+ gitc_projects = self.GetProjects(
+ projects, manifest=self.gitc_manifest, missing_ok=True
+ )
+ for project in gitc_projects:
+ if project.old_revision:
+ project.already_synced = True
+ else:
+ project.already_synced = False
+ project.old_revision = project.revisionExpr
+ project.revisionExpr = None
+ # Save the GITC manifest.
+ gitc_utils.save_manifest(self.gitc_manifest)
- # Make sure we have a valid CWD
- if not os.path.exists(os.getcwd()):
- os.chdir(self.manifest.topdir)
+ # Make sure we have a valid CWD.
+ if not os.path.exists(os.getcwd()):
+ os.chdir(self.manifest.topdir)
- pm = Progress('Syncing %s' % nb, len(all_projects), quiet=opt.quiet)
- for project in all_projects:
- gitc_project = self.gitc_manifest.paths[project.relpath]
- # Sync projects that have not been opened.
- if not gitc_project.already_synced:
- proj_localdir = os.path.join(self.gitc_manifest.gitc_client_dir,
- project.relpath)
- project.worktree = proj_localdir
- if not os.path.exists(proj_localdir):
- os.makedirs(proj_localdir)
- project.Sync_NetworkHalf()
- sync_buf = SyncBuffer(self.manifest.manifestProject.config)
- project.Sync_LocalHalf(sync_buf)
- project.revisionId = gitc_project.old_revision
- pm.update()
- pm.end()
+ pm = Progress("Syncing %s" % nb, len(all_projects), quiet=opt.quiet)
+ for project in all_projects:
+ gitc_project = self.gitc_manifest.paths[project.relpath]
+ # Sync projects that have not been opened.
+ if not gitc_project.already_synced:
+ proj_localdir = os.path.join(
+ self.gitc_manifest.gitc_client_dir, project.relpath
+ )
+ project.worktree = proj_localdir
+ if not os.path.exists(proj_localdir):
+ os.makedirs(proj_localdir)
+ project.Sync_NetworkHalf()
+ sync_buf = SyncBuffer(self.manifest.manifestProject.config)
+ project.Sync_LocalHalf(sync_buf)
+ project.revisionId = gitc_project.old_revision
+ pm.update()
+ pm.end()
- def _ProcessResults(_pool, pm, results):
- for (result, project) in results:
- if not result:
- err.append(project)
- pm.update()
+ def _ProcessResults(_pool, pm, results):
+ for result, project in results:
+ if not result:
+ err.append(project)
+ pm.update()
- self.ExecuteInParallel(
- opt.jobs,
- functools.partial(self._ExecuteOne, opt.revision, nb),
- all_projects,
- callback=_ProcessResults,
- output=Progress('Starting %s' % (nb,), len(all_projects), quiet=opt.quiet))
+ self.ExecuteInParallel(
+ opt.jobs,
+ functools.partial(self._ExecuteOne, opt.revision, nb),
+ all_projects,
+ callback=_ProcessResults,
+ output=Progress(
+ "Starting %s" % (nb,), len(all_projects), quiet=opt.quiet
+ ),
+ )
- if err:
- for p in err:
- print("error: %s/: cannot start %s" % (p.RelPath(local=opt.this_manifest_only), nb),
- file=sys.stderr)
- sys.exit(1)
+ if err:
+ for p in err:
+ print(
+ "error: %s/: cannot start %s"
+ % (p.RelPath(local=opt.this_manifest_only), nb),
+ file=sys.stderr,
+ )
+ sys.exit(1)
diff --git a/subcmds/status.py b/subcmds/status.py
index 572c72f..6e0026f 100644
--- a/subcmds/status.py
+++ b/subcmds/status.py
@@ -24,12 +24,12 @@
class Status(PagedCommand):
- COMMON = True
- helpSummary = "Show the working tree status"
- helpUsage = """
+ COMMON = True
+ helpSummary = "Show the working tree status"
+ helpUsage = """
%prog [<project>...]
"""
- helpDescription = """
+ helpDescription = """
'%prog' compares the working tree to the staging area (aka index),
and the most recent commit on this branch (HEAD), in each project
specified. A summary is displayed, one line per file where there
@@ -76,109 +76,128 @@
d: deleted ( in index, not in work tree )
"""
- PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
+ PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
- def _Options(self, p):
- p.add_option('-o', '--orphans',
- dest='orphans', action='store_true',
- help="include objects in working directory outside of repo projects")
+ def _Options(self, p):
+ p.add_option(
+ "-o",
+ "--orphans",
+ dest="orphans",
+ action="store_true",
+ help="include objects in working directory outside of repo "
+ "projects",
+ )
- def _StatusHelper(self, quiet, local, project):
- """Obtains the status for a specific project.
+ def _StatusHelper(self, quiet, local, project):
+ """Obtains the status for a specific project.
- Obtains the status for a project, redirecting the output to
- the specified object.
+ Obtains the status for a project, redirecting the output to
+ the specified object.
- Args:
- quiet: Where to output the status.
- local: a boolean, if True, the path is relative to the local
- (sub)manifest. If false, the path is relative to the
- outermost manifest.
- project: Project to get status of.
+ Args:
+ quiet: Where to output the status.
+ local: a boolean, if True, the path is relative to the local
+ (sub)manifest. If false, the path is relative to the outermost
+ manifest.
+ project: Project to get status of.
- Returns:
- The status of the project.
- """
- buf = io.StringIO()
- ret = project.PrintWorkTreeStatus(quiet=quiet, output_redir=buf,
- local=local)
- return (ret, buf.getvalue())
+ Returns:
+ The status of the project.
+ """
+ buf = io.StringIO()
+ ret = project.PrintWorkTreeStatus(
+ quiet=quiet, output_redir=buf, local=local
+ )
+ return (ret, buf.getvalue())
- def _FindOrphans(self, dirs, proj_dirs, proj_dirs_parents, outstring):
- """find 'dirs' that are present in 'proj_dirs_parents' but not in 'proj_dirs'"""
- status_header = ' --\t'
- for item in dirs:
- if not platform_utils.isdir(item):
- outstring.append(''.join([status_header, item]))
- continue
- if item in proj_dirs:
- continue
- if item in proj_dirs_parents:
- self._FindOrphans(glob.glob('%s/.*' % item) +
- glob.glob('%s/*' % item),
- proj_dirs, proj_dirs_parents, outstring)
- continue
- outstring.append(''.join([status_header, item, '/']))
+ def _FindOrphans(self, dirs, proj_dirs, proj_dirs_parents, outstring):
+ """find 'dirs' that are present in 'proj_dirs_parents' but not in 'proj_dirs'""" # noqa: E501
+ status_header = " --\t"
+ for item in dirs:
+ if not platform_utils.isdir(item):
+ outstring.append("".join([status_header, item]))
+ continue
+ if item in proj_dirs:
+ continue
+ if item in proj_dirs_parents:
+ self._FindOrphans(
+ glob.glob("%s/.*" % item) + glob.glob("%s/*" % item),
+ proj_dirs,
+ proj_dirs_parents,
+ outstring,
+ )
+ continue
+ outstring.append("".join([status_header, item, "/"]))
- def Execute(self, opt, args):
- all_projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only)
+ def Execute(self, opt, args):
+ all_projects = self.GetProjects(
+ args, all_manifests=not opt.this_manifest_only
+ )
- def _ProcessResults(_pool, _output, results):
- ret = 0
- for (state, output) in results:
- if output:
- print(output, end='')
- if state == 'CLEAN':
- ret += 1
- return ret
+ def _ProcessResults(_pool, _output, results):
+ ret = 0
+ for state, output in results:
+ if output:
+ print(output, end="")
+ if state == "CLEAN":
+ ret += 1
+ return ret
- counter = self.ExecuteInParallel(
- opt.jobs,
- functools.partial(self._StatusHelper, opt.quiet, opt.this_manifest_only),
- all_projects,
- callback=_ProcessResults,
- ordered=True)
+ counter = self.ExecuteInParallel(
+ opt.jobs,
+ functools.partial(
+ self._StatusHelper, opt.quiet, opt.this_manifest_only
+ ),
+ all_projects,
+ callback=_ProcessResults,
+ ordered=True,
+ )
- if not opt.quiet and len(all_projects) == counter:
- print('nothing to commit (working directory clean)')
+ if not opt.quiet and len(all_projects) == counter:
+ print("nothing to commit (working directory clean)")
- if opt.orphans:
- proj_dirs = set()
- proj_dirs_parents = set()
- for project in self.GetProjects(None, missing_ok=True, all_manifests=not opt.this_manifest_only):
- relpath = project.RelPath(local=opt.this_manifest_only)
- proj_dirs.add(relpath)
- (head, _tail) = os.path.split(relpath)
- while head != "":
- proj_dirs_parents.add(head)
- (head, _tail) = os.path.split(head)
- proj_dirs.add('.repo')
+ if opt.orphans:
+ proj_dirs = set()
+ proj_dirs_parents = set()
+ for project in self.GetProjects(
+ None, missing_ok=True, all_manifests=not opt.this_manifest_only
+ ):
+ relpath = project.RelPath(local=opt.this_manifest_only)
+ proj_dirs.add(relpath)
+ (head, _tail) = os.path.split(relpath)
+ while head != "":
+ proj_dirs_parents.add(head)
+ (head, _tail) = os.path.split(head)
+ proj_dirs.add(".repo")
- class StatusColoring(Coloring):
- def __init__(self, config):
- Coloring.__init__(self, config, 'status')
- self.project = self.printer('header', attr='bold')
- self.untracked = self.printer('untracked', fg='red')
+ class StatusColoring(Coloring):
+ def __init__(self, config):
+ Coloring.__init__(self, config, "status")
+ self.project = self.printer("header", attr="bold")
+ self.untracked = self.printer("untracked", fg="red")
- orig_path = os.getcwd()
- try:
- os.chdir(self.manifest.topdir)
+ orig_path = os.getcwd()
+ try:
+ os.chdir(self.manifest.topdir)
- outstring = []
- self._FindOrphans(glob.glob('.*') +
- glob.glob('*'),
- proj_dirs, proj_dirs_parents, outstring)
+ outstring = []
+ self._FindOrphans(
+ glob.glob(".*") + glob.glob("*"),
+ proj_dirs,
+ proj_dirs_parents,
+ outstring,
+ )
- if outstring:
- output = StatusColoring(self.client.globalConfig)
- output.project('Objects not within a project (orphans)')
- output.nl()
- for entry in outstring:
- output.untracked(entry)
- output.nl()
- else:
- print('No orphan files or directories')
+ if outstring:
+ output = StatusColoring(self.client.globalConfig)
+ output.project("Objects not within a project (orphans)")
+ output.nl()
+ for entry in outstring:
+ output.untracked(entry)
+ output.nl()
+ else:
+ print("No orphan files or directories")
- finally:
- # Restore CWD.
- os.chdir(orig_path)
+ finally:
+ # Restore CWD.
+ os.chdir(orig_path)
diff --git a/subcmds/sync.py b/subcmds/sync.py
index 9a8ca8f..eabaa68 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -33,18 +33,21 @@
import xmlrpc.client
try:
- import threading as _threading
+ import threading as _threading
except ImportError:
- import dummy_threading as _threading
+ import dummy_threading as _threading
try:
- import resource
+ import resource
- def _rlimit_nofile():
- return resource.getrlimit(resource.RLIMIT_NOFILE)
+ def _rlimit_nofile():
+ return resource.getrlimit(resource.RLIMIT_NOFILE)
+
except ImportError:
- def _rlimit_nofile():
- return (256, 256)
+
+ def _rlimit_nofile():
+ return (256, 256)
+
import event_log
from git_command import git_require
@@ -54,7 +57,12 @@
import gitc_utils
from project import Project
from project import RemoteSpec
-from command import Command, DEFAULT_LOCAL_JOBS, MirrorSafeCommand, WORKER_BATCH_SIZE
+from command import (
+ Command,
+ DEFAULT_LOCAL_JOBS,
+ MirrorSafeCommand,
+ WORKER_BATCH_SIZE,
+)
from error import RepoChangedException, GitError
import platform_utils
from project import SyncBuffer
@@ -68,70 +76,74 @@
# Env var to implicitly turn auto-gc back on. This was added to allow a user to
# revert a change in default behavior in v2.29.9. Remove after 2023-04-01.
-_REPO_AUTO_GC = 'REPO_AUTO_GC'
-_AUTO_GC = os.environ.get(_REPO_AUTO_GC) == '1'
+_REPO_AUTO_GC = "REPO_AUTO_GC"
+_AUTO_GC = os.environ.get(_REPO_AUTO_GC) == "1"
class _FetchOneResult(NamedTuple):
- """_FetchOne return value.
+ """_FetchOne return value.
- Attributes:
- success (bool): True if successful.
- project (Project): The fetched project.
- start (float): The starting time.time().
- finish (float): The ending time.time().
- remote_fetched (bool): True if the remote was actually queried.
- """
- success: bool
- project: Project
- start: float
- finish: float
- remote_fetched: bool
+ Attributes:
+ success (bool): True if successful.
+ project (Project): The fetched project.
+ start (float): The starting time.time().
+ finish (float): The ending time.time().
+ remote_fetched (bool): True if the remote was actually queried.
+ """
+
+ success: bool
+ project: Project
+ start: float
+ finish: float
+ remote_fetched: bool
class _FetchResult(NamedTuple):
- """_Fetch return value.
+ """_Fetch return value.
- Attributes:
- success (bool): True if successful.
- projects (Set[str]): The names of the git directories of fetched projects.
- """
- success: bool
- projects: Set[str]
+ Attributes:
+ success (bool): True if successful.
+ projects (Set[str]): The names of the git directories of fetched projects.
+ """
+
+ success: bool
+ projects: Set[str]
class _FetchMainResult(NamedTuple):
- """_FetchMain return value.
+ """_FetchMain return value.
- Attributes:
- all_projects (List[Project]): The fetched projects.
- """
- all_projects: List[Project]
+ Attributes:
+ all_projects (List[Project]): The fetched projects.
+ """
+
+ all_projects: List[Project]
class _CheckoutOneResult(NamedTuple):
- """_CheckoutOne return value.
+ """_CheckoutOne return value.
- Attributes:
- success (bool): True if successful.
- project (Project): The project.
- start (float): The starting time.time().
- finish (float): The ending time.time().
- """
- success: bool
- project: Project
- start: float
- finish: float
+ Attributes:
+ success (bool): True if successful.
+ project (Project): The project.
+ start (float): The starting time.time().
+ finish (float): The ending time.time().
+ """
+
+ success: bool
+ project: Project
+ start: float
+ finish: float
class Sync(Command, MirrorSafeCommand):
- COMMON = True
- MULTI_MANIFEST_SUPPORT = True
- helpSummary = "Update working tree to the latest revision"
- helpUsage = """
+ COMMON = True
+ MULTI_MANIFEST_SUPPORT = True
+ helpSummary = "Update working tree to the latest revision"
+ helpUsage = """
%prog [<project>...]
"""
- helpDescription = """
+ helpDescription = """
The '%prog' command synchronizes local project directories
with the remote repositories specified in the manifest. If a local
project does not yet exist, it will clone a new local directory from
@@ -230,1293 +242,1604 @@
later is required to fix a server side protocol bug.
"""
- # A value of 0 means we want parallel jobs, but we'll determine the default
- # value later on.
- PARALLEL_JOBS = 0
+ # A value of 0 means we want parallel jobs, but we'll determine the default
+ # value later on.
+ PARALLEL_JOBS = 0
- def _Options(self, p, show_smart=True):
- p.add_option('--jobs-network', default=None, type=int, metavar='JOBS',
- help='number of network jobs to run in parallel (defaults to --jobs or 1)')
- p.add_option('--jobs-checkout', default=None, type=int, metavar='JOBS',
- help='number of local checkout jobs to run in parallel (defaults to --jobs or '
- f'{DEFAULT_LOCAL_JOBS})')
+ def _Options(self, p, show_smart=True):
+ p.add_option(
+ "--jobs-network",
+ default=None,
+ type=int,
+ metavar="JOBS",
+ help="number of network jobs to run in parallel (defaults to "
+ "--jobs or 1)",
+ )
+ p.add_option(
+ "--jobs-checkout",
+ default=None,
+ type=int,
+ metavar="JOBS",
+ help="number of local checkout jobs to run in parallel (defaults "
+ f"to --jobs or {DEFAULT_LOCAL_JOBS})",
+ )
- p.add_option('-f', '--force-broken',
- dest='force_broken', action='store_true',
- help='obsolete option (to be deleted in the future)')
- p.add_option('--fail-fast',
- dest='fail_fast', action='store_true',
- help='stop syncing after first error is hit')
- p.add_option('--force-sync',
- dest='force_sync', action='store_true',
- help="overwrite an existing git directory if it needs to "
- "point to a different object directory. WARNING: this "
- "may cause loss of data")
- p.add_option('--force-remove-dirty',
- dest='force_remove_dirty', action='store_true',
- help="force remove projects with uncommitted modifications if "
- "projects no longer exist in the manifest. "
- "WARNING: this may cause loss of data")
- p.add_option('-l', '--local-only',
- dest='local_only', action='store_true',
- help="only update working tree, don't fetch")
- p.add_option('--no-manifest-update', '--nmu',
- dest='mp_update', action='store_false', default='true',
- help='use the existing manifest checkout as-is. '
- '(do not update to the latest revision)')
- p.add_option('-n', '--network-only',
- dest='network_only', action='store_true',
- help="fetch only, don't update working tree")
- p.add_option('-d', '--detach',
- dest='detach_head', action='store_true',
- help='detach projects back to manifest revision')
- p.add_option('-c', '--current-branch',
- dest='current_branch_only', action='store_true',
- help='fetch only current branch from server')
- p.add_option('--no-current-branch',
- dest='current_branch_only', action='store_false',
- help='fetch all branches from server')
- p.add_option('-m', '--manifest-name',
- dest='manifest_name',
- help='temporary manifest to use for this sync', metavar='NAME.xml')
- p.add_option('--clone-bundle', action='store_true',
- help='enable use of /clone.bundle on HTTP/HTTPS')
- p.add_option('--no-clone-bundle', dest='clone_bundle', action='store_false',
- help='disable use of /clone.bundle on HTTP/HTTPS')
- p.add_option('-u', '--manifest-server-username', action='store',
- dest='manifest_server_username',
- help='username to authenticate with the manifest server')
- p.add_option('-p', '--manifest-server-password', action='store',
- dest='manifest_server_password',
- help='password to authenticate with the manifest server')
- p.add_option('--fetch-submodules',
- dest='fetch_submodules', action='store_true',
- help='fetch submodules from server')
- p.add_option('--use-superproject', action='store_true',
- help='use the manifest superproject to sync projects; implies -c')
- p.add_option('--no-use-superproject', action='store_false',
- dest='use_superproject',
- help='disable use of manifest superprojects')
- p.add_option('--tags', action='store_true',
- help='fetch tags')
- p.add_option('--no-tags',
- dest='tags', action='store_false',
- help="don't fetch tags (default)")
- p.add_option('--optimized-fetch',
- dest='optimized_fetch', action='store_true',
- help='only fetch projects fixed to sha1 if revision does not exist locally')
- p.add_option('--retry-fetches',
- default=0, action='store', type='int',
- help='number of times to retry fetches on transient errors')
- p.add_option('--prune', action='store_true',
- help='delete refs that no longer exist on the remote (default)')
- p.add_option('--no-prune', dest='prune', action='store_false',
- help='do not delete refs that no longer exist on the remote')
- p.add_option('--auto-gc', action='store_true', default=None,
- help='run garbage collection on all synced projects')
- p.add_option('--no-auto-gc', dest='auto_gc', action='store_false',
- help='do not run garbage collection on any projects (default)')
- if show_smart:
- p.add_option('-s', '--smart-sync',
- dest='smart_sync', action='store_true',
- help='smart sync using manifest from the latest known good build')
- p.add_option('-t', '--smart-tag',
- dest='smart_tag', action='store',
- help='smart sync using manifest from a known tag')
+ p.add_option(
+ "-f",
+ "--force-broken",
+ dest="force_broken",
+ action="store_true",
+ help="obsolete option (to be deleted in the future)",
+ )
+ p.add_option(
+ "--fail-fast",
+ dest="fail_fast",
+ action="store_true",
+ help="stop syncing after first error is hit",
+ )
+ p.add_option(
+ "--force-sync",
+ dest="force_sync",
+ action="store_true",
+ help="overwrite an existing git directory if it needs to "
+ "point to a different object directory. WARNING: this "
+ "may cause loss of data",
+ )
+ p.add_option(
+ "--force-remove-dirty",
+ dest="force_remove_dirty",
+ action="store_true",
+ help="force remove projects with uncommitted modifications if "
+ "projects no longer exist in the manifest. "
+ "WARNING: this may cause loss of data",
+ )
+ p.add_option(
+ "-l",
+ "--local-only",
+ dest="local_only",
+ action="store_true",
+ help="only update working tree, don't fetch",
+ )
+ p.add_option(
+ "--no-manifest-update",
+ "--nmu",
+ dest="mp_update",
+ action="store_false",
+ default="true",
+ help="use the existing manifest checkout as-is. "
+ "(do not update to the latest revision)",
+ )
+ p.add_option(
+ "-n",
+ "--network-only",
+ dest="network_only",
+ action="store_true",
+ help="fetch only, don't update working tree",
+ )
+ p.add_option(
+ "-d",
+ "--detach",
+ dest="detach_head",
+ action="store_true",
+ help="detach projects back to manifest revision",
+ )
+ p.add_option(
+ "-c",
+ "--current-branch",
+ dest="current_branch_only",
+ action="store_true",
+ help="fetch only current branch from server",
+ )
+ p.add_option(
+ "--no-current-branch",
+ dest="current_branch_only",
+ action="store_false",
+ help="fetch all branches from server",
+ )
+ p.add_option(
+ "-m",
+ "--manifest-name",
+ dest="manifest_name",
+ help="temporary manifest to use for this sync",
+ metavar="NAME.xml",
+ )
+ p.add_option(
+ "--clone-bundle",
+ action="store_true",
+ help="enable use of /clone.bundle on HTTP/HTTPS",
+ )
+ p.add_option(
+ "--no-clone-bundle",
+ dest="clone_bundle",
+ action="store_false",
+ help="disable use of /clone.bundle on HTTP/HTTPS",
+ )
+ p.add_option(
+ "-u",
+ "--manifest-server-username",
+ action="store",
+ dest="manifest_server_username",
+ help="username to authenticate with the manifest server",
+ )
+ p.add_option(
+ "-p",
+ "--manifest-server-password",
+ action="store",
+ dest="manifest_server_password",
+ help="password to authenticate with the manifest server",
+ )
+ p.add_option(
+ "--fetch-submodules",
+ dest="fetch_submodules",
+ action="store_true",
+ help="fetch submodules from server",
+ )
+ p.add_option(
+ "--use-superproject",
+ action="store_true",
+ help="use the manifest superproject to sync projects; implies -c",
+ )
+ p.add_option(
+ "--no-use-superproject",
+ action="store_false",
+ dest="use_superproject",
+ help="disable use of manifest superprojects",
+ )
+ p.add_option("--tags", action="store_true", help="fetch tags")
+ p.add_option(
+ "--no-tags",
+ dest="tags",
+ action="store_false",
+ help="don't fetch tags (default)",
+ )
+ p.add_option(
+ "--optimized-fetch",
+ dest="optimized_fetch",
+ action="store_true",
+ help="only fetch projects fixed to sha1 if revision does not exist "
+ "locally",
+ )
+ p.add_option(
+ "--retry-fetches",
+ default=0,
+ action="store",
+ type="int",
+ help="number of times to retry fetches on transient errors",
+ )
+ p.add_option(
+ "--prune",
+ action="store_true",
+ help="delete refs that no longer exist on the remote (default)",
+ )
+ p.add_option(
+ "--no-prune",
+ dest="prune",
+ action="store_false",
+ help="do not delete refs that no longer exist on the remote",
+ )
+ p.add_option(
+ "--auto-gc",
+ action="store_true",
+ default=None,
+ help="run garbage collection on all synced projects",
+ )
+ p.add_option(
+ "--no-auto-gc",
+ dest="auto_gc",
+ action="store_false",
+ help="do not run garbage collection on any projects (default)",
+ )
+ if show_smart:
+ p.add_option(
+ "-s",
+ "--smart-sync",
+ dest="smart_sync",
+ action="store_true",
+ help="smart sync using manifest from the latest known good "
+ "build",
+ )
+ p.add_option(
+ "-t",
+ "--smart-tag",
+ dest="smart_tag",
+ action="store",
+ help="smart sync using manifest from a known tag",
+ )
- g = p.add_option_group('repo Version options')
- g.add_option('--no-repo-verify',
- dest='repo_verify', default=True, action='store_false',
- help='do not verify repo source code')
- g.add_option('--repo-upgraded',
- dest='repo_upgraded', action='store_true',
- help=SUPPRESS_HELP)
+ g = p.add_option_group("repo Version options")
+ g.add_option(
+ "--no-repo-verify",
+ dest="repo_verify",
+ default=True,
+ action="store_false",
+ help="do not verify repo source code",
+ )
+ g.add_option(
+ "--repo-upgraded",
+ dest="repo_upgraded",
+ action="store_true",
+ help=SUPPRESS_HELP,
+ )
- def _GetBranch(self, manifest_project):
- """Returns the branch name for getting the approved smartsync manifest.
+ def _GetBranch(self, manifest_project):
+ """Returns the branch name for getting the approved smartsync manifest.
- Args:
- manifest_project: the manifestProject to query.
- """
- b = manifest_project.GetBranch(manifest_project.CurrentBranch)
- branch = b.merge
- if branch.startswith(R_HEADS):
- branch = branch[len(R_HEADS):]
- return branch
+ Args:
+ manifest_project: The manifestProject to query.
+ """
+ b = manifest_project.GetBranch(manifest_project.CurrentBranch)
+ branch = b.merge
+ if branch.startswith(R_HEADS):
+ branch = branch[len(R_HEADS) :]
+ return branch
- def _GetCurrentBranchOnly(self, opt, manifest):
- """Returns whether current-branch or use-superproject options are enabled.
+ def _GetCurrentBranchOnly(self, opt, manifest):
+ """Returns whether current-branch or use-superproject options are
+ enabled.
- Args:
- opt: Program options returned from optparse. See _Options().
- manifest: The manifest to use.
+ Args:
+ opt: Program options returned from optparse. See _Options().
+ manifest: The manifest to use.
- Returns:
- True if a superproject is requested, otherwise the value of the
- current_branch option (True, False or None).
- """
- return git_superproject.UseSuperproject(opt.use_superproject, manifest) or opt.current_branch_only
+ Returns:
+ True if a superproject is requested, otherwise the value of the
+ current_branch option (True, False or None).
+ """
+ return (
+ git_superproject.UseSuperproject(opt.use_superproject, manifest)
+ or opt.current_branch_only
+ )
- def _UpdateProjectsRevisionId(self, opt, args, superproject_logging_data,
- manifest):
- """Update revisionId of projects with the commit hash from the superproject.
+ def _UpdateProjectsRevisionId(
+ self, opt, args, superproject_logging_data, manifest
+ ):
+ """Update revisionId of projects with the commit from the superproject.
- This function updates each project's revisionId with the commit hash from
- the superproject. It writes the updated manifest into a file and reloads
- the manifest from it. When appropriate, sub manifests are also processed.
+ This function updates each project's revisionId with the commit hash
+ from the superproject. It writes the updated manifest into a file and
+ reloads the manifest from it. When appropriate, sub manifests are also
+ processed.
- Args:
- opt: Program options returned from optparse. See _Options().
- args: Arguments to pass to GetProjects. See the GetProjects
- docstring for details.
- superproject_logging_data: A dictionary of superproject data to log.
- manifest: The manifest to use.
- """
- have_superproject = manifest.superproject or any(
- m.superproject for m in manifest.all_children)
- if not have_superproject:
- return
+ Args:
+ opt: Program options returned from optparse. See _Options().
+ args: Arguments to pass to GetProjects. See the GetProjects
+ docstring for details.
+ superproject_logging_data: A dictionary of superproject data to log.
+ manifest: The manifest to use.
+ """
+ have_superproject = manifest.superproject or any(
+ m.superproject for m in manifest.all_children
+ )
+ if not have_superproject:
+ return
- if opt.local_only and manifest.superproject:
- manifest_path = manifest.superproject.manifest_path
- if manifest_path:
- self._ReloadManifest(manifest_path, manifest)
- return
+ if opt.local_only and manifest.superproject:
+ manifest_path = manifest.superproject.manifest_path
+ if manifest_path:
+ self._ReloadManifest(manifest_path, manifest)
+ return
- all_projects = self.GetProjects(args,
- missing_ok=True,
- submodules_ok=opt.fetch_submodules,
- manifest=manifest,
- all_manifests=not opt.this_manifest_only)
+ all_projects = self.GetProjects(
+ args,
+ missing_ok=True,
+ submodules_ok=opt.fetch_submodules,
+ manifest=manifest,
+ all_manifests=not opt.this_manifest_only,
+ )
- per_manifest = collections.defaultdict(list)
- manifest_paths = {}
- if opt.this_manifest_only:
- per_manifest[manifest.path_prefix] = all_projects
- else:
- for p in all_projects:
- per_manifest[p.manifest.path_prefix].append(p)
+ per_manifest = collections.defaultdict(list)
+ if opt.this_manifest_only:
+ per_manifest[manifest.path_prefix] = all_projects
+ else:
+ for p in all_projects:
+ per_manifest[p.manifest.path_prefix].append(p)
- superproject_logging_data = {}
- need_unload = False
- for m in self.ManifestList(opt):
- if not m.path_prefix in per_manifest:
- continue
- use_super = git_superproject.UseSuperproject(opt.use_superproject, m)
- if superproject_logging_data:
- superproject_logging_data['multimanifest'] = True
- superproject_logging_data.update(
- superproject=use_super,
- haslocalmanifests=bool(m.HasLocalManifests),
- hassuperprojecttag=bool(m.superproject),
- )
- if use_super and (m.IsMirror or m.IsArchive):
- # Don't use superproject, because we have no working tree.
- use_super = False
- superproject_logging_data['superproject'] = False
- superproject_logging_data['noworktree'] = True
- if opt.use_superproject is not False:
- print(f'{m.path_prefix}: not using superproject because there is no '
- 'working tree.')
+ superproject_logging_data = {}
+ need_unload = False
+ for m in self.ManifestList(opt):
+ if m.path_prefix not in per_manifest:
+ continue
+ use_super = git_superproject.UseSuperproject(
+ opt.use_superproject, m
+ )
+ if superproject_logging_data:
+ superproject_logging_data["multimanifest"] = True
+ superproject_logging_data.update(
+ superproject=use_super,
+ haslocalmanifests=bool(m.HasLocalManifests),
+ hassuperprojecttag=bool(m.superproject),
+ )
+ if use_super and (m.IsMirror or m.IsArchive):
+ # Don't use superproject, because we have no working tree.
+ use_super = False
+ superproject_logging_data["superproject"] = False
+ superproject_logging_data["noworktree"] = True
+ if opt.use_superproject is not False:
+ print(
+ f"{m.path_prefix}: not using superproject because "
+ "there is no working tree."
+ )
- if not use_super:
- continue
- m.superproject.SetQuiet(opt.quiet)
- print_messages = git_superproject.PrintMessages(opt.use_superproject, m)
- m.superproject.SetPrintMessages(print_messages)
- update_result = m.superproject.UpdateProjectsRevisionId(
- per_manifest[m.path_prefix], git_event_log=self.git_event_log)
- manifest_path = update_result.manifest_path
- superproject_logging_data['updatedrevisionid'] = bool(manifest_path)
- if manifest_path:
- m.SetManifestOverride(manifest_path)
- need_unload = True
- else:
- if print_messages:
- print(f'{m.path_prefix}: warning: Update of revisionId from '
- 'superproject has failed, repo sync will not use superproject '
- 'to fetch the source. ',
- 'Please resync with the --no-use-superproject option to avoid '
- 'this repo warning.',
- file=sys.stderr)
- if update_result.fatal and opt.use_superproject is not None:
- sys.exit(1)
- if need_unload:
- m.outer_client.manifest.Unload()
+ if not use_super:
+ continue
+ m.superproject.SetQuiet(opt.quiet)
+ print_messages = git_superproject.PrintMessages(
+ opt.use_superproject, m
+ )
+ m.superproject.SetPrintMessages(print_messages)
+ update_result = m.superproject.UpdateProjectsRevisionId(
+ per_manifest[m.path_prefix], git_event_log=self.git_event_log
+ )
+ manifest_path = update_result.manifest_path
+ superproject_logging_data["updatedrevisionid"] = bool(manifest_path)
+ if manifest_path:
+ m.SetManifestOverride(manifest_path)
+ need_unload = True
+ else:
+ if print_messages:
+ print(
+ f"{m.path_prefix}: warning: Update of revisionId from "
+ "superproject has failed, repo sync will not use "
+ "superproject to fetch the source. ",
+ "Please resync with the --no-use-superproject option "
+ "to avoid this repo warning.",
+ file=sys.stderr,
+ )
+ if update_result.fatal and opt.use_superproject is not None:
+ sys.exit(1)
+ if need_unload:
+ m.outer_client.manifest.Unload()
- def _FetchProjectList(self, opt, projects):
- """Main function of the fetch worker.
+ def _FetchProjectList(self, opt, projects):
+ """Main function of the fetch worker.
- The projects we're given share the same underlying git object store, so we
- have to fetch them in serial.
+ The projects we're given share the same underlying git object store, so
+ we have to fetch them in serial.
- Delegates most of the work to _FetchHelper.
+ Delegates most of the work to _FetchHelper.
- Args:
- opt: Program options returned from optparse. See _Options().
- projects: Projects to fetch.
- """
- return [self._FetchOne(opt, x) for x in projects]
+ Args:
+ opt: Program options returned from optparse. See _Options().
+ projects: Projects to fetch.
+ """
+ return [self._FetchOne(opt, x) for x in projects]
- def _FetchOne(self, opt, project):
- """Fetch git objects for a single project.
+ def _FetchOne(self, opt, project):
+ """Fetch git objects for a single project.
- Args:
- opt: Program options returned from optparse. See _Options().
- project: Project object for the project to fetch.
+ Args:
+ opt: Program options returned from optparse. See _Options().
+ project: Project object for the project to fetch.
- Returns:
- Whether the fetch was successful.
- """
- start = time.time()
- success = False
- remote_fetched = False
- buf = io.StringIO()
- try:
- sync_result = project.Sync_NetworkHalf(
- quiet=opt.quiet,
- verbose=opt.verbose,
- output_redir=buf,
- current_branch_only=self._GetCurrentBranchOnly(opt, project.manifest),
- force_sync=opt.force_sync,
- clone_bundle=opt.clone_bundle,
- tags=opt.tags, archive=project.manifest.IsArchive,
- optimized_fetch=opt.optimized_fetch,
- retry_fetches=opt.retry_fetches,
- prune=opt.prune,
- ssh_proxy=self.ssh_proxy,
- clone_filter=project.manifest.CloneFilter,
- partial_clone_exclude=project.manifest.PartialCloneExclude)
- success = sync_result.success
- remote_fetched = sync_result.remote_fetched
+ Returns:
+ Whether the fetch was successful.
+ """
+ start = time.time()
+ success = False
+ remote_fetched = False
+ buf = io.StringIO()
+ try:
+ sync_result = project.Sync_NetworkHalf(
+ quiet=opt.quiet,
+ verbose=opt.verbose,
+ output_redir=buf,
+ current_branch_only=self._GetCurrentBranchOnly(
+ opt, project.manifest
+ ),
+ force_sync=opt.force_sync,
+ clone_bundle=opt.clone_bundle,
+ tags=opt.tags,
+ archive=project.manifest.IsArchive,
+ optimized_fetch=opt.optimized_fetch,
+ retry_fetches=opt.retry_fetches,
+ prune=opt.prune,
+ ssh_proxy=self.ssh_proxy,
+ clone_filter=project.manifest.CloneFilter,
+ partial_clone_exclude=project.manifest.PartialCloneExclude,
+ )
+ success = sync_result.success
+ remote_fetched = sync_result.remote_fetched
- output = buf.getvalue()
- if (opt.verbose or not success) and output:
- print('\n' + output.rstrip())
+ output = buf.getvalue()
+ if (opt.verbose or not success) and output:
+ print("\n" + output.rstrip())
- if not success:
- print('error: Cannot fetch %s from %s'
- % (project.name, project.remote.url),
- file=sys.stderr)
- except KeyboardInterrupt:
- print(f'Keyboard interrupt while processing {project.name}')
- except GitError as e:
- print('error.GitError: Cannot fetch %s' % str(e), file=sys.stderr)
- except Exception as e:
- print('error: Cannot fetch %s (%s: %s)'
- % (project.name, type(e).__name__, str(e)), file=sys.stderr)
- raise
+ if not success:
+ print(
+ "error: Cannot fetch %s from %s"
+ % (project.name, project.remote.url),
+ file=sys.stderr,
+ )
+ except KeyboardInterrupt:
+ print(f"Keyboard interrupt while processing {project.name}")
+ except GitError as e:
+ print("error.GitError: Cannot fetch %s" % str(e), file=sys.stderr)
+ except Exception as e:
+ print(
+ "error: Cannot fetch %s (%s: %s)"
+ % (project.name, type(e).__name__, str(e)),
+ file=sys.stderr,
+ )
+ raise
- finish = time.time()
- return _FetchOneResult(success, project, start, finish, remote_fetched)
+ finish = time.time()
+ return _FetchOneResult(success, project, start, finish, remote_fetched)
- @classmethod
- def _FetchInitChild(cls, ssh_proxy):
- cls.ssh_proxy = ssh_proxy
+ @classmethod
+ def _FetchInitChild(cls, ssh_proxy):
+ cls.ssh_proxy = ssh_proxy
- def _Fetch(self, projects, opt, err_event, ssh_proxy):
- ret = True
+ def _Fetch(self, projects, opt, err_event, ssh_proxy):
+ ret = True
- jobs = opt.jobs_network
- fetched = set()
- remote_fetched = set()
- pm = Progress('Fetching', len(projects), delay=False, quiet=opt.quiet)
+ jobs = opt.jobs_network
+ fetched = set()
+ remote_fetched = set()
+ pm = Progress("Fetching", len(projects), delay=False, quiet=opt.quiet)
- objdir_project_map = dict()
- for project in projects:
- objdir_project_map.setdefault(project.objdir, []).append(project)
- projects_list = list(objdir_project_map.values())
+ objdir_project_map = dict()
+ for project in projects:
+ objdir_project_map.setdefault(project.objdir, []).append(project)
+ projects_list = list(objdir_project_map.values())
- def _ProcessResults(results_sets):
- ret = True
- for results in results_sets:
- for result in results:
- success = result.success
- project = result.project
- start = result.start
- finish = result.finish
- self._fetch_times.Set(project, finish - start)
- self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK,
- start, finish, success)
- if result.remote_fetched:
- remote_fetched.add(project)
- # Check for any errors before running any more tasks.
- # ...we'll let existing jobs finish, though.
- if not success:
- ret = False
- else:
- fetched.add(project.gitdir)
- pm.update(msg=f'Last synced: {project.name}')
- if not ret and opt.fail_fast:
- break
- return ret
-
- # We pass the ssh proxy settings via the class. This allows multiprocessing
- # to pickle it up when spawning children. We can't pass it as an argument
- # to _FetchProjectList below as multiprocessing is unable to pickle those.
- Sync.ssh_proxy = None
-
- # NB: Multiprocessing is heavy, so don't spin it up for one job.
- if len(projects_list) == 1 or jobs == 1:
- self._FetchInitChild(ssh_proxy)
- if not _ProcessResults(self._FetchProjectList(opt, x) for x in projects_list):
- ret = False
- else:
- # Favor throughput over responsiveness when quiet. It seems that imap()
- # will yield results in batches relative to chunksize, so even as the
- # children finish a sync, we won't see the result until one child finishes
- # ~chunksize jobs. When using a large --jobs with large chunksize, this
- # can be jarring as there will be a large initial delay where repo looks
- # like it isn't doing anything and sits at 0%, but then suddenly completes
- # a lot of jobs all at once. Since this code is more network bound, we
- # can accept a bit more CPU overhead with a smaller chunksize so that the
- # user sees more immediate & continuous feedback.
- if opt.quiet:
- chunksize = WORKER_BATCH_SIZE
- else:
- pm.update(inc=0, msg='warming up')
- chunksize = 4
- with multiprocessing.Pool(jobs, initializer=self._FetchInitChild,
- initargs=(ssh_proxy,)) as pool:
- results = pool.imap_unordered(
- functools.partial(self._FetchProjectList, opt),
- projects_list,
- chunksize=chunksize)
- if not _ProcessResults(results):
- ret = False
- pool.close()
-
- # Cleanup the reference now that we're done with it, and we're going to
- # release any resources it points to. If we don't, later multiprocessing
- # usage (e.g. checkouts) will try to pickle and then crash.
- del Sync.ssh_proxy
-
- pm.end()
- self._fetch_times.Save()
-
- if not self.outer_client.manifest.IsArchive:
- self._GCProjects(projects, opt, err_event)
-
- return _FetchResult(ret, fetched)
-
- def _FetchMain(self, opt, args, all_projects, err_event,
- ssh_proxy, manifest):
- """The main network fetch loop.
-
- Args:
- opt: Program options returned from optparse. See _Options().
- args: Command line args used to filter out projects.
- all_projects: List of all projects that should be fetched.
- err_event: Whether an error was hit while processing.
- ssh_proxy: SSH manager for clients & masters.
- manifest: The manifest to use.
-
- Returns:
- List of all projects that should be checked out.
- """
- rp = manifest.repoProject
-
- to_fetch = []
- now = time.time()
- if _ONE_DAY_S <= (now - rp.LastFetch):
- to_fetch.append(rp)
- to_fetch.extend(all_projects)
- to_fetch.sort(key=self._fetch_times.Get, reverse=True)
-
- result = self._Fetch(to_fetch, opt, err_event, ssh_proxy)
- success = result.success
- fetched = result.projects
- if not success:
- err_event.set()
-
- _PostRepoFetch(rp, opt.repo_verify)
- if opt.network_only:
- # bail out now; the rest touches the working tree
- if err_event.is_set():
- print('\nerror: Exited sync due to fetch errors.\n', file=sys.stderr)
- sys.exit(1)
- return _FetchMainResult([])
-
- # Iteratively fetch missing and/or nested unregistered submodules
- previously_missing_set = set()
- while True:
- self._ReloadManifest(None, manifest)
- all_projects = self.GetProjects(args,
- missing_ok=True,
- submodules_ok=opt.fetch_submodules,
- manifest=manifest,
- all_manifests=not opt.this_manifest_only)
- missing = []
- for project in all_projects:
- if project.gitdir not in fetched:
- missing.append(project)
- if not missing:
- break
- # Stop us from non-stopped fetching actually-missing repos: If set of
- # missing repos has not been changed from last fetch, we break.
- missing_set = set(p.name for p in missing)
- if previously_missing_set == missing_set:
- break
- previously_missing_set = missing_set
- result = self._Fetch(missing, opt, err_event, ssh_proxy)
- success = result.success
- new_fetched = result.projects
- if not success:
- err_event.set()
- fetched.update(new_fetched)
-
- return _FetchMainResult(all_projects)
-
- def _CheckoutOne(self, detach_head, force_sync, project):
- """Checkout work tree for one project
-
- Args:
- detach_head: Whether to leave a detached HEAD.
- force_sync: Force checking out of the repo.
- project: Project object for the project to checkout.
-
- Returns:
- Whether the fetch was successful.
- """
- start = time.time()
- syncbuf = SyncBuffer(project.manifest.manifestProject.config,
- detach_head=detach_head)
- success = False
- try:
- project.Sync_LocalHalf(syncbuf, force_sync=force_sync)
- success = syncbuf.Finish()
- except GitError as e:
- print('error.GitError: Cannot checkout %s: %s' %
- (project.name, str(e)), file=sys.stderr)
- except Exception as e:
- print('error: Cannot checkout %s: %s: %s' %
- (project.name, type(e).__name__, str(e)),
- file=sys.stderr)
- raise
-
- if not success:
- print('error: Cannot checkout %s' % (project.name), file=sys.stderr)
- finish = time.time()
- return _CheckoutOneResult(success, project, start, finish)
-
- def _Checkout(self, all_projects, opt, err_results):
- """Checkout projects listed in all_projects
-
- Args:
- all_projects: List of all projects that should be checked out.
- opt: Program options returned from optparse. See _Options().
- err_results: A list of strings, paths to git repos where checkout failed.
- """
- # Only checkout projects with worktrees.
- all_projects = [x for x in all_projects if x.worktree]
-
- def _ProcessResults(pool, pm, results):
- ret = True
- for result in results:
- success = result.success
- project = result.project
- start = result.start
- finish = result.finish
- self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL,
- start, finish, success)
- # Check for any errors before running any more tasks.
- # ...we'll let existing jobs finish, though.
- if not success:
- ret = False
- err_results.append(project.RelPath(local=opt.this_manifest_only))
- if opt.fail_fast:
- if pool:
- pool.close()
+ def _ProcessResults(results_sets):
+ ret = True
+ for results in results_sets:
+ for result in results:
+ success = result.success
+ project = result.project
+ start = result.start
+ finish = result.finish
+ self._fetch_times.Set(project, finish - start)
+ self.event_log.AddSync(
+ project,
+ event_log.TASK_SYNC_NETWORK,
+ start,
+ finish,
+ success,
+ )
+ if result.remote_fetched:
+ remote_fetched.add(project)
+ # Check for any errors before running any more tasks.
+ # ...we'll let existing jobs finish, though.
+ if not success:
+ ret = False
+ else:
+ fetched.add(project.gitdir)
+ pm.update(msg=f"Last synced: {project.name}")
+ if not ret and opt.fail_fast:
+ break
return ret
- pm.update(msg=project.name)
- return ret
- return self.ExecuteInParallel(
- opt.jobs_checkout,
- functools.partial(self._CheckoutOne, opt.detach_head, opt.force_sync),
- all_projects,
- callback=_ProcessResults,
- output=Progress('Checking out', len(all_projects), quiet=opt.quiet)) and not err_results
+ # We pass the ssh proxy settings via the class. This allows
+ # multiprocessing to pickle it up when spawning children. We can't pass
+ # it as an argument to _FetchProjectList below as multiprocessing is
+ # unable to pickle those.
+ Sync.ssh_proxy = None
- @staticmethod
- def _GetPreciousObjectsState(project: Project, opt):
- """Get the preciousObjects state for the project.
-
- Args:
- project (Project): the project to examine, and possibly correct.
- opt (optparse.Values): options given to sync.
-
- Returns:
- Expected state of extensions.preciousObjects:
- False: Should be disabled. (not present)
- True: Should be enabled.
- """
- if project.use_git_worktrees:
- return False
- projects = project.manifest.GetProjectsWithName(project.name,
- all_manifests=True)
- if len(projects) == 1:
- return False
- relpath = project.RelPath(local=opt.this_manifest_only)
- if len(projects) > 1:
- # Objects are potentially shared with another project.
- # See the logic in Project.Sync_NetworkHalf regarding UseAlternates.
- # - When False, shared projects share (via symlink)
- # .repo/project-objects/{PROJECT_NAME}.git as the one-and-only objects
- # directory. All objects are precious, since there is no project with a
- # complete set of refs.
- # - When True, shared projects share (via info/alternates)
- # .repo/project-objects/{PROJECT_NAME}.git as an alternate object store,
- # which is written only on the first clone of the project, and is not
- # written subsequently. (When Sync_NetworkHalf sees that it exists, it
- # makes sure that the alternates file points there, and uses a
- # project-local .git/objects directory for all syncs going forward.
- # We do not support switching between the options. The environment
- # variable is present for testing and migration only.
- return not project.UseAlternates
-
- return False
-
- def _SetPreciousObjectsState(self, project: Project, opt):
- """Correct the preciousObjects state for the project.
-
- Args:
- project: the project to examine, and possibly correct.
- opt: options given to sync.
- """
- expected = self._GetPreciousObjectsState(project, opt)
- actual = project.config.GetBoolean('extensions.preciousObjects') or False
- relpath = project.RelPath(local=opt.this_manifest_only)
-
- if expected != actual:
- # If this is unexpected, log it and repair.
- Trace(f'{relpath} expected preciousObjects={expected}, got {actual}')
- if expected:
- if not opt.quiet:
- print('\r%s: Shared project %s found, disabling pruning.' %
- (relpath, project.name))
- if git_require((2, 7, 0)):
- project.EnableRepositoryExtension('preciousObjects')
+ # NB: Multiprocessing is heavy, so don't spin it up for one job.
+ if len(projects_list) == 1 or jobs == 1:
+ self._FetchInitChild(ssh_proxy)
+ if not _ProcessResults(
+ self._FetchProjectList(opt, x) for x in projects_list
+ ):
+ ret = False
else:
- # This isn't perfect, but it's the best we can do with old git.
- print('\r%s: WARNING: shared projects are unreliable when using '
- 'old versions of git; please upgrade to git-2.7.0+.'
- % (relpath,),
- file=sys.stderr)
- project.config.SetString('gc.pruneExpire', 'never')
- else:
- if not opt.quiet:
- print(f'\r{relpath}: not shared, disabling pruning.')
- project.config.SetString('extensions.preciousObjects', None)
- project.config.SetString('gc.pruneExpire', None)
+ # Favor throughput over responsiveness when quiet. It seems that
+ # imap() will yield results in batches relative to chunksize, so
+ # even as the children finish a sync, we won't see the result until
+ # one child finishes ~chunksize jobs. When using a large --jobs
+ # with large chunksize, this can be jarring as there will be a large
+ # initial delay where repo looks like it isn't doing anything and
+ # sits at 0%, but then suddenly completes a lot of jobs all at once.
+ # Since this code is more network bound, we can accept a bit more
+ # CPU overhead with a smaller chunksize so that the user sees more
+ # immediate & continuous feedback.
+ if opt.quiet:
+ chunksize = WORKER_BATCH_SIZE
+ else:
+ pm.update(inc=0, msg="warming up")
+ chunksize = 4
+ with multiprocessing.Pool(
+ jobs, initializer=self._FetchInitChild, initargs=(ssh_proxy,)
+ ) as pool:
+ results = pool.imap_unordered(
+ functools.partial(self._FetchProjectList, opt),
+ projects_list,
+ chunksize=chunksize,
+ )
+ if not _ProcessResults(results):
+ ret = False
+ pool.close()
- def _GCProjects(self, projects, opt, err_event):
- """Perform garbage collection.
+ # Cleanup the reference now that we're done with it, and we're going to
+ # release any resources it points to. If we don't, later
+ # multiprocessing usage (e.g. checkouts) will try to pickle and then
+ # crash.
+ del Sync.ssh_proxy
- If We are skipping garbage collection (opt.auto_gc not set), we still want
- to potentially mark objects precious, so that `git gc` does not discard
- shared objects.
- """
- if not opt.auto_gc:
- # Just repair preciousObjects state, and return.
- for project in projects:
- self._SetPreciousObjectsState(project, opt)
- return
+ pm.end()
+ self._fetch_times.Save()
- pm = Progress('Garbage collecting', len(projects), delay=False,
- quiet=opt.quiet)
- pm.update(inc=0, msg='prescan')
+ if not self.outer_client.manifest.IsArchive:
+ self._GCProjects(projects, opt, err_event)
- tidy_dirs = {}
- for project in projects:
- self._SetPreciousObjectsState(project, opt)
+ return _FetchResult(ret, fetched)
- project.config.SetString('gc.autoDetach', 'false')
- # Only call git gc once per objdir, but call pack-refs for the remainder.
- if project.objdir not in tidy_dirs:
- tidy_dirs[project.objdir] = (
- True, # Run a full gc.
- project.bare_git,
- )
- elif project.gitdir not in tidy_dirs:
- tidy_dirs[project.gitdir] = (
- False, # Do not run a full gc; just run pack-refs.
- project.bare_git,
- )
+ def _FetchMain(
+ self, opt, args, all_projects, err_event, ssh_proxy, manifest
+ ):
+ """The main network fetch loop.
- jobs = opt.jobs
+ Args:
+ opt: Program options returned from optparse. See _Options().
+ args: Command line args used to filter out projects.
+ all_projects: List of all projects that should be fetched.
+ err_event: Whether an error was hit while processing.
+ ssh_proxy: SSH manager for clients & masters.
+ manifest: The manifest to use.
- if jobs < 2:
- for (run_gc, bare_git) in tidy_dirs.values():
- pm.update(msg=bare_git._project.name)
+ Returns:
+ List of all projects that should be checked out.
+ """
+ rp = manifest.repoProject
- if run_gc:
- bare_git.gc('--auto')
- else:
- bare_git.pack_refs()
- pm.end()
- return
+ to_fetch = []
+ now = time.time()
+ if _ONE_DAY_S <= (now - rp.LastFetch):
+ to_fetch.append(rp)
+ to_fetch.extend(all_projects)
+ to_fetch.sort(key=self._fetch_times.Get, reverse=True)
- cpu_count = os.cpu_count()
- config = {'pack.threads': cpu_count // jobs if cpu_count > jobs else 1}
+ result = self._Fetch(to_fetch, opt, err_event, ssh_proxy)
+ success = result.success
+ fetched = result.projects
+ if not success:
+ err_event.set()
- threads = set()
- sem = _threading.Semaphore(jobs)
+ _PostRepoFetch(rp, opt.repo_verify)
+ if opt.network_only:
+ # Bail out now; the rest touches the working tree.
+ if err_event.is_set():
+ print(
+ "\nerror: Exited sync due to fetch errors.\n",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ return _FetchMainResult([])
- def tidy_up(run_gc, bare_git):
- pm.start(bare_git._project.name)
- try:
- try:
- if run_gc:
- bare_git.gc('--auto', config=config)
- else:
- bare_git.pack_refs(config=config)
- except GitError:
- err_event.set()
- except Exception:
- err_event.set()
- raise
- finally:
- pm.finish(bare_git._project.name)
- sem.release()
-
- for (run_gc, bare_git) in tidy_dirs.values():
- if err_event.is_set() and opt.fail_fast:
- break
- sem.acquire()
- t = _threading.Thread(target=tidy_up, args=(run_gc, bare_git,))
- t.daemon = True
- threads.add(t)
- t.start()
-
- for t in threads:
- t.join()
- pm.end()
-
- def _ReloadManifest(self, manifest_name, manifest):
- """Reload the manfiest from the file specified by the |manifest_name|.
-
- It unloads the manifest if |manifest_name| is None.
-
- Args:
- manifest_name: Manifest file to be reloaded.
- manifest: The manifest to use.
- """
- if manifest_name:
- # Override calls Unload already
- manifest.Override(manifest_name)
- else:
- manifest.Unload()
-
- def UpdateProjectList(self, opt, manifest):
- """Update the cached projects list for |manifest|
-
- In a multi-manifest checkout, each manifest has its own project.list.
-
- Args:
- opt: Program options returned from optparse. See _Options().
- manifest: The manifest to use.
-
- Returns:
- 0: success
- 1: failure
- """
- new_project_paths = []
- for project in self.GetProjects(None, missing_ok=True, manifest=manifest,
- all_manifests=False):
- if project.relpath:
- new_project_paths.append(project.relpath)
- file_name = 'project.list'
- file_path = os.path.join(manifest.subdir, file_name)
- old_project_paths = []
-
- if os.path.exists(file_path):
- with open(file_path, 'r') as fd:
- old_project_paths = fd.read().split('\n')
- # In reversed order, so subfolders are deleted before parent folder.
- for path in sorted(old_project_paths, reverse=True):
- if not path:
- continue
- if path not in new_project_paths:
- # If the path has already been deleted, we don't need to do it
- gitdir = os.path.join(manifest.topdir, path, '.git')
- if os.path.exists(gitdir):
- project = Project(
+ # Iteratively fetch missing and/or nested unregistered submodules.
+ previously_missing_set = set()
+ while True:
+ self._ReloadManifest(None, manifest)
+ all_projects = self.GetProjects(
+ args,
+ missing_ok=True,
+ submodules_ok=opt.fetch_submodules,
manifest=manifest,
- name=path,
- remote=RemoteSpec('origin'),
- gitdir=gitdir,
- objdir=gitdir,
- use_git_worktrees=os.path.isfile(gitdir),
- worktree=os.path.join(manifest.topdir, path),
- relpath=path,
- revisionExpr='HEAD',
- revisionId=None,
- groups=None)
- if not project.DeleteWorktree(
- quiet=opt.quiet,
- force=opt.force_remove_dirty):
- return 1
+ all_manifests=not opt.this_manifest_only,
+ )
+ missing = []
+ for project in all_projects:
+ if project.gitdir not in fetched:
+ missing.append(project)
+ if not missing:
+ break
+ # Stop us from non-stopped fetching actually-missing repos: If set
+ # of missing repos has not been changed from last fetch, we break.
+ missing_set = set(p.name for p in missing)
+ if previously_missing_set == missing_set:
+ break
+ previously_missing_set = missing_set
+ result = self._Fetch(missing, opt, err_event, ssh_proxy)
+ success = result.success
+ new_fetched = result.projects
+ if not success:
+ err_event.set()
+ fetched.update(new_fetched)
- new_project_paths.sort()
- with open(file_path, 'w') as fd:
- fd.write('\n'.join(new_project_paths))
- fd.write('\n')
- return 0
+ return _FetchMainResult(all_projects)
- def UpdateCopyLinkfileList(self, manifest):
- """Save all dests of copyfile and linkfile, and update them if needed.
+ def _CheckoutOne(self, detach_head, force_sync, project):
+ """Checkout work tree for one project
- Returns:
- Whether update was successful.
- """
- new_paths = {}
- new_linkfile_paths = []
- new_copyfile_paths = []
- for project in self.GetProjects(None, missing_ok=True,
- manifest=manifest, all_manifests=False):
- new_linkfile_paths.extend(x.dest for x in project.linkfiles)
- new_copyfile_paths.extend(x.dest for x in project.copyfiles)
+ Args:
+ detach_head: Whether to leave a detached HEAD.
+ force_sync: Force checking out of the repo.
+ project: Project object for the project to checkout.
- new_paths = {
- 'linkfile': new_linkfile_paths,
- 'copyfile': new_copyfile_paths,
- }
-
- copylinkfile_name = 'copy-link-files.json'
- copylinkfile_path = os.path.join(manifest.subdir, copylinkfile_name)
- old_copylinkfile_paths = {}
-
- if os.path.exists(copylinkfile_path):
- with open(copylinkfile_path, 'rb') as fp:
- try:
- old_copylinkfile_paths = json.load(fp)
- except Exception:
- print('error: %s is not a json formatted file.' %
- copylinkfile_path, file=sys.stderr)
- platform_utils.remove(copylinkfile_path)
- return False
-
- need_remove_files = []
- need_remove_files.extend(
- set(old_copylinkfile_paths.get('linkfile', [])) -
- set(new_linkfile_paths))
- need_remove_files.extend(
- set(old_copylinkfile_paths.get('copyfile', [])) -
- set(new_copyfile_paths))
-
- for need_remove_file in need_remove_files:
- # Try to remove the updated copyfile or linkfile.
- # So, if the file is not exist, nothing need to do.
- platform_utils.remove(need_remove_file, missing_ok=True)
-
- # Create copy-link-files.json, save dest path of "copyfile" and "linkfile".
- with open(copylinkfile_path, 'w', encoding='utf-8') as fp:
- json.dump(new_paths, fp)
- return True
-
- def _SmartSyncSetup(self, opt, smart_sync_manifest_path, manifest):
- if not manifest.manifest_server:
- print('error: cannot smart sync: no manifest server defined in '
- 'manifest', file=sys.stderr)
- sys.exit(1)
-
- manifest_server = manifest.manifest_server
- if not opt.quiet:
- print('Using manifest server %s' % manifest_server)
-
- if '@' not in manifest_server:
- username = None
- password = None
- if opt.manifest_server_username and opt.manifest_server_password:
- username = opt.manifest_server_username
- password = opt.manifest_server_password
- else:
- try:
- info = netrc.netrc()
- except IOError:
- # .netrc file does not exist or could not be opened
- pass
- else:
- try:
- parse_result = urllib.parse.urlparse(manifest_server)
- if parse_result.hostname:
- auth = info.authenticators(parse_result.hostname)
- if auth:
- username, _account, password = auth
- else:
- print('No credentials found for %s in .netrc'
- % parse_result.hostname, file=sys.stderr)
- except netrc.NetrcParseError as e:
- print('Error parsing .netrc file: %s' % e, file=sys.stderr)
-
- if (username and password):
- manifest_server = manifest_server.replace('://', '://%s:%s@' %
- (username, password),
- 1)
-
- transport = PersistentTransport(manifest_server)
- if manifest_server.startswith('persistent-'):
- manifest_server = manifest_server[len('persistent-'):]
-
- try:
- server = xmlrpc.client.Server(manifest_server, transport=transport)
- if opt.smart_sync:
- branch = self._GetBranch(manifest.manifestProject)
-
- if 'SYNC_TARGET' in os.environ:
- target = os.environ['SYNC_TARGET']
- [success, manifest_str] = server.GetApprovedManifest(branch, target)
- elif ('TARGET_PRODUCT' in os.environ and
- 'TARGET_BUILD_VARIANT' in os.environ):
- target = '%s-%s' % (os.environ['TARGET_PRODUCT'],
- os.environ['TARGET_BUILD_VARIANT'])
- [success, manifest_str] = server.GetApprovedManifest(branch, target)
- else:
- [success, manifest_str] = server.GetApprovedManifest(branch)
- else:
- assert(opt.smart_tag)
- [success, manifest_str] = server.GetManifest(opt.smart_tag)
-
- if success:
- manifest_name = os.path.basename(smart_sync_manifest_path)
- try:
- with open(smart_sync_manifest_path, 'w') as f:
- f.write(manifest_str)
- except IOError as e:
- print('error: cannot write manifest to %s:\n%s'
- % (smart_sync_manifest_path, e),
- file=sys.stderr)
- sys.exit(1)
- self._ReloadManifest(manifest_name, manifest)
- else:
- print('error: manifest server RPC call failed: %s' %
- manifest_str, file=sys.stderr)
- sys.exit(1)
- except (socket.error, IOError, xmlrpc.client.Fault) as e:
- print('error: cannot connect to manifest server %s:\n%s'
- % (manifest.manifest_server, e), file=sys.stderr)
- sys.exit(1)
- except xmlrpc.client.ProtocolError as e:
- print('error: cannot connect to manifest server %s:\n%d %s'
- % (manifest.manifest_server, e.errcode, e.errmsg),
- file=sys.stderr)
- sys.exit(1)
-
- return manifest_name
-
- def _UpdateAllManifestProjects(self, opt, mp, manifest_name):
- """Fetch & update the local manifest project.
-
- After syncing the manifest project, if the manifest has any sub manifests,
- those are recursively processed.
-
- Args:
- opt: Program options returned from optparse. See _Options().
- mp: the manifestProject to query.
- manifest_name: Manifest file to be reloaded.
- """
- if not mp.standalone_manifest_url:
- self._UpdateManifestProject(opt, mp, manifest_name)
-
- if mp.manifest.submanifests:
- for submanifest in mp.manifest.submanifests.values():
- child = submanifest.repo_client.manifest
- child.manifestProject.SyncWithPossibleInit(
- submanifest,
- current_branch_only=self._GetCurrentBranchOnly(opt, child),
- verbose=opt.verbose,
- tags=opt.tags,
- git_event_log=self.git_event_log,
+ Returns:
+ Whether the fetch was successful.
+ """
+ start = time.time()
+ syncbuf = SyncBuffer(
+ project.manifest.manifestProject.config, detach_head=detach_head
)
- self._UpdateAllManifestProjects(opt, child.manifestProject, None)
-
- def _UpdateManifestProject(self, opt, mp, manifest_name):
- """Fetch & update the local manifest project.
-
- Args:
- opt: Program options returned from optparse. See _Options().
- mp: the manifestProject to query.
- manifest_name: Manifest file to be reloaded.
- """
- if not opt.local_only:
- start = time.time()
- success = mp.Sync_NetworkHalf(quiet=opt.quiet, verbose=opt.verbose,
- current_branch_only=self._GetCurrentBranchOnly(opt, mp.manifest),
- force_sync=opt.force_sync,
- tags=opt.tags,
- optimized_fetch=opt.optimized_fetch,
- retry_fetches=opt.retry_fetches,
- submodules=mp.manifest.HasSubmodules,
- clone_filter=mp.manifest.CloneFilter,
- partial_clone_exclude=mp.manifest.PartialCloneExclude)
- finish = time.time()
- self.event_log.AddSync(mp, event_log.TASK_SYNC_NETWORK,
- start, finish, success)
-
- if mp.HasChanges:
- syncbuf = SyncBuffer(mp.config)
- start = time.time()
- mp.Sync_LocalHalf(syncbuf, submodules=mp.manifest.HasSubmodules)
- clean = syncbuf.Finish()
- self.event_log.AddSync(mp, event_log.TASK_SYNC_LOCAL,
- start, time.time(), clean)
- if not clean:
- sys.exit(1)
- self._ReloadManifest(manifest_name, mp.manifest)
-
- def ValidateOptions(self, opt, args):
- if opt.force_broken:
- print('warning: -f/--force-broken is now the default behavior, and the '
- 'options are deprecated', file=sys.stderr)
- if opt.network_only and opt.detach_head:
- self.OptionParser.error('cannot combine -n and -d')
- if opt.network_only and opt.local_only:
- self.OptionParser.error('cannot combine -n and -l')
- if opt.manifest_name and opt.smart_sync:
- self.OptionParser.error('cannot combine -m and -s')
- if opt.manifest_name and opt.smart_tag:
- self.OptionParser.error('cannot combine -m and -t')
- if opt.manifest_server_username or opt.manifest_server_password:
- if not (opt.smart_sync or opt.smart_tag):
- self.OptionParser.error('-u and -p may only be combined with -s or -t')
- if None in [opt.manifest_server_username, opt.manifest_server_password]:
- self.OptionParser.error('both -u and -p must be given')
-
- if opt.prune is None:
- opt.prune = True
-
- if opt.auto_gc is None and _AUTO_GC:
- print(f"Will run `git gc --auto` because {_REPO_AUTO_GC} is set.",
- f'{_REPO_AUTO_GC} is deprecated and will be removed in a future',
- 'release. Use `--auto-gc` instead.', file=sys.stderr)
- opt.auto_gc = True
-
- def _ValidateOptionsWithManifest(self, opt, mp):
- """Like ValidateOptions, but after we've updated the manifest.
-
- Needed to handle sync-xxx option defaults in the manifest.
-
- Args:
- opt: The options to process.
- mp: The manifest project to pull defaults from.
- """
- if not opt.jobs:
- # If the user hasn't made a choice, use the manifest value.
- opt.jobs = mp.manifest.default.sync_j
- if opt.jobs:
- # If --jobs has a non-default value, propagate it as the default for
- # --jobs-xxx flags too.
- if not opt.jobs_network:
- opt.jobs_network = opt.jobs
- if not opt.jobs_checkout:
- opt.jobs_checkout = opt.jobs
- else:
- # Neither user nor manifest have made a choice, so setup defaults.
- if not opt.jobs_network:
- opt.jobs_network = 1
- if not opt.jobs_checkout:
- opt.jobs_checkout = DEFAULT_LOCAL_JOBS
- opt.jobs = os.cpu_count()
-
- # Try to stay under user rlimit settings.
- #
- # Since each worker requires at 3 file descriptors to run `git fetch`, use
- # that to scale down the number of jobs. Unfortunately there isn't an easy
- # way to determine this reliably as systems change, but it was last measured
- # by hand in 2011.
- soft_limit, _ = _rlimit_nofile()
- jobs_soft_limit = max(1, (soft_limit - 5) // 3)
- opt.jobs = min(opt.jobs, jobs_soft_limit)
- opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
- opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
-
- def Execute(self, opt, args):
- manifest = self.outer_manifest
- if not opt.outer_manifest:
- manifest = self.manifest
-
- if opt.manifest_name:
- manifest.Override(opt.manifest_name)
-
- manifest_name = opt.manifest_name
- smart_sync_manifest_path = os.path.join(
- manifest.manifestProject.worktree, 'smart_sync_override.xml')
-
- if opt.clone_bundle is None:
- opt.clone_bundle = manifest.CloneBundle
-
- if opt.smart_sync or opt.smart_tag:
- manifest_name = self._SmartSyncSetup(opt, smart_sync_manifest_path, manifest)
- else:
- if os.path.isfile(smart_sync_manifest_path):
+ success = False
try:
- platform_utils.remove(smart_sync_manifest_path)
- except OSError as e:
- print('error: failed to remove existing smart sync override manifest: %s' %
- e, file=sys.stderr)
+ project.Sync_LocalHalf(syncbuf, force_sync=force_sync)
+ success = syncbuf.Finish()
+ except GitError as e:
+ print(
+ "error.GitError: Cannot checkout %s: %s"
+ % (project.name, str(e)),
+ file=sys.stderr,
+ )
+ except Exception as e:
+ print(
+ "error: Cannot checkout %s: %s: %s"
+ % (project.name, type(e).__name__, str(e)),
+ file=sys.stderr,
+ )
+ raise
- err_event = multiprocessing.Event()
+ if not success:
+ print("error: Cannot checkout %s" % (project.name), file=sys.stderr)
+ finish = time.time()
+ return _CheckoutOneResult(success, project, start, finish)
- rp = manifest.repoProject
- rp.PreSync()
- cb = rp.CurrentBranch
- if cb:
- base = rp.GetBranch(cb).merge
- if not base or not base.startswith('refs/heads/'):
- print('warning: repo is not tracking a remote branch, so it will not '
- 'receive updates; run `repo init --repo-rev=stable` to fix.',
- file=sys.stderr)
+ def _Checkout(self, all_projects, opt, err_results):
+ """Checkout projects listed in all_projects
- for m in self.ManifestList(opt):
- if not m.manifestProject.standalone_manifest_url:
- m.manifestProject.PreSync()
+ Args:
+ all_projects: List of all projects that should be checked out.
+ opt: Program options returned from optparse. See _Options().
+ err_results: A list of strings, paths to git repos where checkout
+ failed.
+ """
+ # Only checkout projects with worktrees.
+ all_projects = [x for x in all_projects if x.worktree]
- if opt.repo_upgraded:
- _PostRepoUpgrade(manifest, quiet=opt.quiet)
+ def _ProcessResults(pool, pm, results):
+ ret = True
+ for result in results:
+ success = result.success
+ project = result.project
+ start = result.start
+ finish = result.finish
+ self.event_log.AddSync(
+ project, event_log.TASK_SYNC_LOCAL, start, finish, success
+ )
+ # Check for any errors before running any more tasks.
+ # ...we'll let existing jobs finish, though.
+ if not success:
+ ret = False
+ err_results.append(
+ project.RelPath(local=opt.this_manifest_only)
+ )
+ if opt.fail_fast:
+ if pool:
+ pool.close()
+ return ret
+ pm.update(msg=project.name)
+ return ret
- mp = manifest.manifestProject
- if opt.mp_update:
- self._UpdateAllManifestProjects(opt, mp, manifest_name)
- else:
- print('Skipping update of local manifest project.')
+ return (
+ self.ExecuteInParallel(
+ opt.jobs_checkout,
+ functools.partial(
+ self._CheckoutOne, opt.detach_head, opt.force_sync
+ ),
+ all_projects,
+ callback=_ProcessResults,
+ output=Progress(
+ "Checking out", len(all_projects), quiet=opt.quiet
+ ),
+ )
+ and not err_results
+ )
- # Now that the manifests are up-to-date, setup options whose defaults might
- # be in the manifest.
- self._ValidateOptionsWithManifest(opt, mp)
+ @staticmethod
+ def _GetPreciousObjectsState(project: Project, opt):
+ """Get the preciousObjects state for the project.
- superproject_logging_data = {}
- self._UpdateProjectsRevisionId(opt, args, superproject_logging_data,
- manifest)
+ Args:
+ project (Project): the project to examine, and possibly correct.
+ opt (optparse.Values): options given to sync.
- if self.gitc_manifest:
- gitc_manifest_projects = self.GetProjects(args, missing_ok=True)
- gitc_projects = []
- opened_projects = []
- for project in gitc_manifest_projects:
- if project.relpath in self.gitc_manifest.paths and \
- self.gitc_manifest.paths[project.relpath].old_revision:
- opened_projects.append(project.relpath)
- else:
- gitc_projects.append(project.relpath)
+ Returns:
+ Expected state of extensions.preciousObjects:
+ False: Should be disabled. (not present)
+ True: Should be enabled.
+ """
+ if project.use_git_worktrees:
+ return False
+ projects = project.manifest.GetProjectsWithName(
+ project.name, all_manifests=True
+ )
+ if len(projects) == 1:
+ return False
+ if len(projects) > 1:
+ # Objects are potentially shared with another project.
+ # See the logic in Project.Sync_NetworkHalf regarding UseAlternates.
+ # - When False, shared projects share (via symlink)
+ # .repo/project-objects/{PROJECT_NAME}.git as the one-and-only
+ # objects directory. All objects are precious, since there is no
+ # project with a complete set of refs.
+ # - When True, shared projects share (via info/alternates)
+ # .repo/project-objects/{PROJECT_NAME}.git as an alternate object
+ # store, which is written only on the first clone of the project,
+ # and is not written subsequently. (When Sync_NetworkHalf sees
+ # that it exists, it makes sure that the alternates file points
+ # there, and uses a project-local .git/objects directory for all
+ # syncs going forward.
+ # We do not support switching between the options. The environment
+ # variable is present for testing and migration only.
+ return not project.UseAlternates
- if not args:
- gitc_projects = None
+ return False
- if gitc_projects != [] and not opt.local_only:
- print('Updating GITC client: %s' % self.gitc_manifest.gitc_client_name)
- manifest = GitcManifest(self.repodir, self.gitc_manifest.gitc_client_name)
+ def _SetPreciousObjectsState(self, project: Project, opt):
+ """Correct the preciousObjects state for the project.
+
+ Args:
+ project: the project to examine, and possibly correct.
+ opt: options given to sync.
+ """
+ expected = self._GetPreciousObjectsState(project, opt)
+ actual = (
+ project.config.GetBoolean("extensions.preciousObjects") or False
+ )
+ relpath = project.RelPath(local=opt.this_manifest_only)
+
+ if expected != actual:
+ # If this is unexpected, log it and repair.
+ Trace(
+ f"{relpath} expected preciousObjects={expected}, got {actual}"
+ )
+ if expected:
+ if not opt.quiet:
+ print(
+ "\r%s: Shared project %s found, disabling pruning."
+ % (relpath, project.name)
+ )
+ if git_require((2, 7, 0)):
+ project.EnableRepositoryExtension("preciousObjects")
+ else:
+ # This isn't perfect, but it's the best we can do with old
+ # git.
+ print(
+ "\r%s: WARNING: shared projects are unreliable when "
+ "using old versions of git; please upgrade to "
+ "git-2.7.0+." % (relpath,),
+ file=sys.stderr,
+ )
+ project.config.SetString("gc.pruneExpire", "never")
+ else:
+ if not opt.quiet:
+ print(f"\r{relpath}: not shared, disabling pruning.")
+ project.config.SetString("extensions.preciousObjects", None)
+ project.config.SetString("gc.pruneExpire", None)
+
+ def _GCProjects(self, projects, opt, err_event):
+ """Perform garbage collection.
+
+ If We are skipping garbage collection (opt.auto_gc not set), we still
+ want to potentially mark objects precious, so that `git gc` does not
+ discard shared objects.
+ """
+ if not opt.auto_gc:
+ # Just repair preciousObjects state, and return.
+ for project in projects:
+ self._SetPreciousObjectsState(project, opt)
+ return
+
+ pm = Progress(
+ "Garbage collecting", len(projects), delay=False, quiet=opt.quiet
+ )
+ pm.update(inc=0, msg="prescan")
+
+ tidy_dirs = {}
+ for project in projects:
+ self._SetPreciousObjectsState(project, opt)
+
+ project.config.SetString("gc.autoDetach", "false")
+ # Only call git gc once per objdir, but call pack-refs for the
+ # remainder.
+ if project.objdir not in tidy_dirs:
+ tidy_dirs[project.objdir] = (
+ True, # Run a full gc.
+ project.bare_git,
+ )
+ elif project.gitdir not in tidy_dirs:
+ tidy_dirs[project.gitdir] = (
+ False, # Do not run a full gc; just run pack-refs.
+ project.bare_git,
+ )
+
+ jobs = opt.jobs
+
+ if jobs < 2:
+ for run_gc, bare_git in tidy_dirs.values():
+ pm.update(msg=bare_git._project.name)
+
+ if run_gc:
+ bare_git.gc("--auto")
+ else:
+ bare_git.pack_refs()
+ pm.end()
+ return
+
+ cpu_count = os.cpu_count()
+ config = {"pack.threads": cpu_count // jobs if cpu_count > jobs else 1}
+
+ threads = set()
+ sem = _threading.Semaphore(jobs)
+
+ def tidy_up(run_gc, bare_git):
+ pm.start(bare_git._project.name)
+ try:
+ try:
+ if run_gc:
+ bare_git.gc("--auto", config=config)
+ else:
+ bare_git.pack_refs(config=config)
+ except GitError:
+ err_event.set()
+ except Exception:
+ err_event.set()
+ raise
+ finally:
+ pm.finish(bare_git._project.name)
+ sem.release()
+
+ for run_gc, bare_git in tidy_dirs.values():
+ if err_event.is_set() and opt.fail_fast:
+ break
+ sem.acquire()
+ t = _threading.Thread(
+ target=tidy_up,
+ args=(
+ run_gc,
+ bare_git,
+ ),
+ )
+ t.daemon = True
+ threads.add(t)
+ t.start()
+
+ for t in threads:
+ t.join()
+ pm.end()
+
+ def _ReloadManifest(self, manifest_name, manifest):
+ """Reload the manfiest from the file specified by the |manifest_name|.
+
+ It unloads the manifest if |manifest_name| is None.
+
+ Args:
+ manifest_name: Manifest file to be reloaded.
+ manifest: The manifest to use.
+ """
if manifest_name:
- manifest.Override(manifest_name)
+ # Override calls Unload already.
+ manifest.Override(manifest_name)
else:
- manifest.Override(manifest.manifestFile)
- gitc_utils.generate_gitc_manifest(self.gitc_manifest,
- manifest,
- gitc_projects)
- print('GITC client successfully synced.')
+ manifest.Unload()
- # The opened projects need to be synced as normal, therefore we
- # generate a new args list to represent the opened projects.
- # TODO: make this more reliable -- if there's a project name/path overlap,
- # this may choose the wrong project.
- args = [os.path.relpath(manifest.paths[path].worktree, os.getcwd())
- for path in opened_projects]
- if not args:
- return
+ def UpdateProjectList(self, opt, manifest):
+ """Update the cached projects list for |manifest|
- all_projects = self.GetProjects(args,
- missing_ok=True,
- submodules_ok=opt.fetch_submodules,
- manifest=manifest,
- all_manifests=not opt.this_manifest_only)
+ In a multi-manifest checkout, each manifest has its own project.list.
- err_network_sync = False
- err_update_projects = False
- err_update_linkfiles = False
+ Args:
+ opt: Program options returned from optparse. See _Options().
+ manifest: The manifest to use.
- self._fetch_times = _FetchTimes(manifest)
- if not opt.local_only:
- with multiprocessing.Manager() as manager:
- with ssh.ProxyManager(manager) as ssh_proxy:
- # Initialize the socket dir once in the parent.
- ssh_proxy.sock()
- result = self._FetchMain(opt, args, all_projects, err_event,
- ssh_proxy, manifest)
- all_projects = result.all_projects
+ Returns:
+ 0: success
+ 1: failure
+ """
+ new_project_paths = []
+ for project in self.GetProjects(
+ None, missing_ok=True, manifest=manifest, all_manifests=False
+ ):
+ if project.relpath:
+ new_project_paths.append(project.relpath)
+ file_name = "project.list"
+ file_path = os.path.join(manifest.subdir, file_name)
+ old_project_paths = []
- if opt.network_only:
- return
+ if os.path.exists(file_path):
+ with open(file_path, "r") as fd:
+ old_project_paths = fd.read().split("\n")
+ # In reversed order, so subfolders are deleted before parent folder.
+ for path in sorted(old_project_paths, reverse=True):
+ if not path:
+ continue
+ if path not in new_project_paths:
+ # If the path has already been deleted, we don't need to do
+ # it.
+ gitdir = os.path.join(manifest.topdir, path, ".git")
+ if os.path.exists(gitdir):
+ project = Project(
+ manifest=manifest,
+ name=path,
+ remote=RemoteSpec("origin"),
+ gitdir=gitdir,
+ objdir=gitdir,
+ use_git_worktrees=os.path.isfile(gitdir),
+ worktree=os.path.join(manifest.topdir, path),
+ relpath=path,
+ revisionExpr="HEAD",
+ revisionId=None,
+ groups=None,
+ )
+ if not project.DeleteWorktree(
+ quiet=opt.quiet, force=opt.force_remove_dirty
+ ):
+ return 1
- # If we saw an error, exit with code 1 so that other scripts can check.
- if err_event.is_set():
- err_network_sync = True
- if opt.fail_fast:
- print('\nerror: Exited sync due to fetch errors.\n'
- 'Local checkouts *not* updated. Resolve network issues & '
- 'retry.\n'
- '`repo sync -l` will update some local checkouts.',
- file=sys.stderr)
- sys.exit(1)
+ new_project_paths.sort()
+ with open(file_path, "w") as fd:
+ fd.write("\n".join(new_project_paths))
+ fd.write("\n")
+ return 0
- for m in self.ManifestList(opt):
- if m.IsMirror or m.IsArchive:
- # bail out now, we have no working tree
- continue
+ def UpdateCopyLinkfileList(self, manifest):
+ """Save all dests of copyfile and linkfile, and update them if needed.
- if self.UpdateProjectList(opt, m):
- err_event.set()
- err_update_projects = True
- if opt.fail_fast:
- print('\nerror: Local checkouts *not* updated.', file=sys.stderr)
- sys.exit(1)
+ Returns:
+ Whether update was successful.
+ """
+ new_paths = {}
+ new_linkfile_paths = []
+ new_copyfile_paths = []
+ for project in self.GetProjects(
+ None, missing_ok=True, manifest=manifest, all_manifests=False
+ ):
+ new_linkfile_paths.extend(x.dest for x in project.linkfiles)
+ new_copyfile_paths.extend(x.dest for x in project.copyfiles)
- err_update_linkfiles = not self.UpdateCopyLinkfileList(m)
- if err_update_linkfiles:
- err_event.set()
- if opt.fail_fast:
- print('\nerror: Local update copyfile or linkfile failed.', file=sys.stderr)
- sys.exit(1)
+ new_paths = {
+ "linkfile": new_linkfile_paths,
+ "copyfile": new_copyfile_paths,
+ }
- err_results = []
- # NB: We don't exit here because this is the last step.
- err_checkout = not self._Checkout(all_projects, opt, err_results)
- if err_checkout:
- err_event.set()
+ copylinkfile_name = "copy-link-files.json"
+ copylinkfile_path = os.path.join(manifest.subdir, copylinkfile_name)
+ old_copylinkfile_paths = {}
- printed_notices = set()
- # If there's a notice that's supposed to print at the end of the sync,
- # print it now... But avoid printing duplicate messages, and preserve
- # order.
- for m in sorted(self.ManifestList(opt), key=lambda x: x.path_prefix):
- if m.notice and m.notice not in printed_notices:
- print(m.notice)
- printed_notices.add(m.notice)
+ if os.path.exists(copylinkfile_path):
+ with open(copylinkfile_path, "rb") as fp:
+ try:
+ old_copylinkfile_paths = json.load(fp)
+ except Exception:
+ print(
+ "error: %s is not a json formatted file."
+ % copylinkfile_path,
+ file=sys.stderr,
+ )
+ platform_utils.remove(copylinkfile_path)
+ return False
- # If we saw an error, exit with code 1 so that other scripts can check.
- if err_event.is_set():
- print('\nerror: Unable to fully sync the tree.', file=sys.stderr)
- if err_network_sync:
- print('error: Downloading network changes failed.', file=sys.stderr)
- if err_update_projects:
- print('error: Updating local project lists failed.', file=sys.stderr)
- if err_update_linkfiles:
- print('error: Updating copyfiles or linkfiles failed.', file=sys.stderr)
- if err_checkout:
- print('error: Checking out local projects failed.', file=sys.stderr)
- if err_results:
- print('Failing repos:\n%s' % '\n'.join(err_results), file=sys.stderr)
- print('Try re-running with "-j1 --fail-fast" to exit at the first error.',
- file=sys.stderr)
- sys.exit(1)
+ need_remove_files = []
+ need_remove_files.extend(
+ set(old_copylinkfile_paths.get("linkfile", []))
+ - set(new_linkfile_paths)
+ )
+ need_remove_files.extend(
+ set(old_copylinkfile_paths.get("copyfile", []))
+ - set(new_copyfile_paths)
+ )
- # Log the previous sync analysis state from the config.
- self.git_event_log.LogDataConfigEvents(mp.config.GetSyncAnalysisStateData(),
- 'previous_sync_state')
+ for need_remove_file in need_remove_files:
+ # Try to remove the updated copyfile or linkfile.
+ # So, if the file is not exist, nothing need to do.
+ platform_utils.remove(need_remove_file, missing_ok=True)
- # Update and log with the new sync analysis state.
- mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data)
- self.git_event_log.LogDataConfigEvents(mp.config.GetSyncAnalysisStateData(),
- 'current_sync_state')
+ # Create copy-link-files.json, save dest path of "copyfile" and
+ # "linkfile".
+ with open(copylinkfile_path, "w", encoding="utf-8") as fp:
+ json.dump(new_paths, fp)
+ return True
- if not opt.quiet:
- print('repo sync has finished successfully.')
+ def _SmartSyncSetup(self, opt, smart_sync_manifest_path, manifest):
+ if not manifest.manifest_server:
+ print(
+ "error: cannot smart sync: no manifest server defined in "
+ "manifest",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ manifest_server = manifest.manifest_server
+ if not opt.quiet:
+ print("Using manifest server %s" % manifest_server)
+
+ if "@" not in manifest_server:
+ username = None
+ password = None
+ if opt.manifest_server_username and opt.manifest_server_password:
+ username = opt.manifest_server_username
+ password = opt.manifest_server_password
+ else:
+ try:
+ info = netrc.netrc()
+ except IOError:
+ # .netrc file does not exist or could not be opened.
+ pass
+ else:
+ try:
+ parse_result = urllib.parse.urlparse(manifest_server)
+ if parse_result.hostname:
+ auth = info.authenticators(parse_result.hostname)
+ if auth:
+ username, _account, password = auth
+ else:
+ print(
+ "No credentials found for %s in .netrc"
+ % parse_result.hostname,
+ file=sys.stderr,
+ )
+ except netrc.NetrcParseError as e:
+ print(
+ "Error parsing .netrc file: %s" % e, file=sys.stderr
+ )
+
+ if username and password:
+ manifest_server = manifest_server.replace(
+ "://", "://%s:%s@" % (username, password), 1
+ )
+
+ transport = PersistentTransport(manifest_server)
+ if manifest_server.startswith("persistent-"):
+ manifest_server = manifest_server[len("persistent-") :]
+
+ try:
+ server = xmlrpc.client.Server(manifest_server, transport=transport)
+ if opt.smart_sync:
+ branch = self._GetBranch(manifest.manifestProject)
+
+ if "SYNC_TARGET" in os.environ:
+ target = os.environ["SYNC_TARGET"]
+ [success, manifest_str] = server.GetApprovedManifest(
+ branch, target
+ )
+ elif (
+ "TARGET_PRODUCT" in os.environ
+ and "TARGET_BUILD_VARIANT" in os.environ
+ ):
+ target = "%s-%s" % (
+ os.environ["TARGET_PRODUCT"],
+ os.environ["TARGET_BUILD_VARIANT"],
+ )
+ [success, manifest_str] = server.GetApprovedManifest(
+ branch, target
+ )
+ else:
+ [success, manifest_str] = server.GetApprovedManifest(branch)
+ else:
+ assert opt.smart_tag
+ [success, manifest_str] = server.GetManifest(opt.smart_tag)
+
+ if success:
+ manifest_name = os.path.basename(smart_sync_manifest_path)
+ try:
+ with open(smart_sync_manifest_path, "w") as f:
+ f.write(manifest_str)
+ except IOError as e:
+ print(
+ "error: cannot write manifest to %s:\n%s"
+ % (smart_sync_manifest_path, e),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ self._ReloadManifest(manifest_name, manifest)
+ else:
+ print(
+ "error: manifest server RPC call failed: %s" % manifest_str,
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ except (socket.error, IOError, xmlrpc.client.Fault) as e:
+ print(
+ "error: cannot connect to manifest server %s:\n%s"
+ % (manifest.manifest_server, e),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ except xmlrpc.client.ProtocolError as e:
+ print(
+ "error: cannot connect to manifest server %s:\n%d %s"
+ % (manifest.manifest_server, e.errcode, e.errmsg),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ return manifest_name
+
+ def _UpdateAllManifestProjects(self, opt, mp, manifest_name):
+ """Fetch & update the local manifest project.
+
+ After syncing the manifest project, if the manifest has any sub
+ manifests, those are recursively processed.
+
+ Args:
+ opt: Program options returned from optparse. See _Options().
+ mp: the manifestProject to query.
+ manifest_name: Manifest file to be reloaded.
+ """
+ if not mp.standalone_manifest_url:
+ self._UpdateManifestProject(opt, mp, manifest_name)
+
+ if mp.manifest.submanifests:
+ for submanifest in mp.manifest.submanifests.values():
+ child = submanifest.repo_client.manifest
+ child.manifestProject.SyncWithPossibleInit(
+ submanifest,
+ current_branch_only=self._GetCurrentBranchOnly(opt, child),
+ verbose=opt.verbose,
+ tags=opt.tags,
+ git_event_log=self.git_event_log,
+ )
+ self._UpdateAllManifestProjects(
+ opt, child.manifestProject, None
+ )
+
+ def _UpdateManifestProject(self, opt, mp, manifest_name):
+ """Fetch & update the local manifest project.
+
+ Args:
+ opt: Program options returned from optparse. See _Options().
+ mp: the manifestProject to query.
+ manifest_name: Manifest file to be reloaded.
+ """
+ if not opt.local_only:
+ start = time.time()
+ success = mp.Sync_NetworkHalf(
+ quiet=opt.quiet,
+ verbose=opt.verbose,
+ current_branch_only=self._GetCurrentBranchOnly(
+ opt, mp.manifest
+ ),
+ force_sync=opt.force_sync,
+ tags=opt.tags,
+ optimized_fetch=opt.optimized_fetch,
+ retry_fetches=opt.retry_fetches,
+ submodules=mp.manifest.HasSubmodules,
+ clone_filter=mp.manifest.CloneFilter,
+ partial_clone_exclude=mp.manifest.PartialCloneExclude,
+ )
+ finish = time.time()
+ self.event_log.AddSync(
+ mp, event_log.TASK_SYNC_NETWORK, start, finish, success
+ )
+
+ if mp.HasChanges:
+ syncbuf = SyncBuffer(mp.config)
+ start = time.time()
+ mp.Sync_LocalHalf(syncbuf, submodules=mp.manifest.HasSubmodules)
+ clean = syncbuf.Finish()
+ self.event_log.AddSync(
+ mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean
+ )
+ if not clean:
+ sys.exit(1)
+ self._ReloadManifest(manifest_name, mp.manifest)
+
+ def ValidateOptions(self, opt, args):
+ if opt.force_broken:
+ print(
+ "warning: -f/--force-broken is now the default behavior, and "
+ "the options are deprecated",
+ file=sys.stderr,
+ )
+ if opt.network_only and opt.detach_head:
+ self.OptionParser.error("cannot combine -n and -d")
+ if opt.network_only and opt.local_only:
+ self.OptionParser.error("cannot combine -n and -l")
+ if opt.manifest_name and opt.smart_sync:
+ self.OptionParser.error("cannot combine -m and -s")
+ if opt.manifest_name and opt.smart_tag:
+ self.OptionParser.error("cannot combine -m and -t")
+ if opt.manifest_server_username or opt.manifest_server_password:
+ if not (opt.smart_sync or opt.smart_tag):
+ self.OptionParser.error(
+ "-u and -p may only be combined with -s or -t"
+ )
+ if None in [
+ opt.manifest_server_username,
+ opt.manifest_server_password,
+ ]:
+ self.OptionParser.error("both -u and -p must be given")
+
+ if opt.prune is None:
+ opt.prune = True
+
+ if opt.auto_gc is None and _AUTO_GC:
+ print(
+ f"Will run `git gc --auto` because {_REPO_AUTO_GC} is set.",
+ f"{_REPO_AUTO_GC} is deprecated and will be removed in a ",
+ "future release. Use `--auto-gc` instead.",
+ file=sys.stderr,
+ )
+ opt.auto_gc = True
+
+ def _ValidateOptionsWithManifest(self, opt, mp):
+ """Like ValidateOptions, but after we've updated the manifest.
+
+ Needed to handle sync-xxx option defaults in the manifest.
+
+ Args:
+ opt: The options to process.
+ mp: The manifest project to pull defaults from.
+ """
+ if not opt.jobs:
+ # If the user hasn't made a choice, use the manifest value.
+ opt.jobs = mp.manifest.default.sync_j
+ if opt.jobs:
+ # If --jobs has a non-default value, propagate it as the default for
+ # --jobs-xxx flags too.
+ if not opt.jobs_network:
+ opt.jobs_network = opt.jobs
+ if not opt.jobs_checkout:
+ opt.jobs_checkout = opt.jobs
+ else:
+ # Neither user nor manifest have made a choice, so setup defaults.
+ if not opt.jobs_network:
+ opt.jobs_network = 1
+ if not opt.jobs_checkout:
+ opt.jobs_checkout = DEFAULT_LOCAL_JOBS
+ opt.jobs = os.cpu_count()
+
+ # Try to stay under user rlimit settings.
+ #
+ # Since each worker requires at 3 file descriptors to run `git fetch`,
+ # use that to scale down the number of jobs. Unfortunately there isn't
+ # an easy way to determine this reliably as systems change, but it was
+ # last measured by hand in 2011.
+ soft_limit, _ = _rlimit_nofile()
+ jobs_soft_limit = max(1, (soft_limit - 5) // 3)
+ opt.jobs = min(opt.jobs, jobs_soft_limit)
+ opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
+ opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
+
+ def Execute(self, opt, args):
+ manifest = self.outer_manifest
+ if not opt.outer_manifest:
+ manifest = self.manifest
+
+ if opt.manifest_name:
+ manifest.Override(opt.manifest_name)
+
+ manifest_name = opt.manifest_name
+ smart_sync_manifest_path = os.path.join(
+ manifest.manifestProject.worktree, "smart_sync_override.xml"
+ )
+
+ if opt.clone_bundle is None:
+ opt.clone_bundle = manifest.CloneBundle
+
+ if opt.smart_sync or opt.smart_tag:
+ manifest_name = self._SmartSyncSetup(
+ opt, smart_sync_manifest_path, manifest
+ )
+ else:
+ if os.path.isfile(smart_sync_manifest_path):
+ try:
+ platform_utils.remove(smart_sync_manifest_path)
+ except OSError as e:
+ print(
+ "error: failed to remove existing smart sync override "
+ "manifest: %s" % e,
+ file=sys.stderr,
+ )
+
+ err_event = multiprocessing.Event()
+
+ rp = manifest.repoProject
+ rp.PreSync()
+ cb = rp.CurrentBranch
+ if cb:
+ base = rp.GetBranch(cb).merge
+ if not base or not base.startswith("refs/heads/"):
+ print(
+ "warning: repo is not tracking a remote branch, so it will "
+ "not receive updates; run `repo init --repo-rev=stable` to "
+ "fix.",
+ file=sys.stderr,
+ )
+
+ for m in self.ManifestList(opt):
+ if not m.manifestProject.standalone_manifest_url:
+ m.manifestProject.PreSync()
+
+ if opt.repo_upgraded:
+ _PostRepoUpgrade(manifest, quiet=opt.quiet)
+
+ mp = manifest.manifestProject
+ if opt.mp_update:
+ self._UpdateAllManifestProjects(opt, mp, manifest_name)
+ else:
+ print("Skipping update of local manifest project.")
+
+ # Now that the manifests are up-to-date, setup options whose defaults
+ # might be in the manifest.
+ self._ValidateOptionsWithManifest(opt, mp)
+
+ superproject_logging_data = {}
+ self._UpdateProjectsRevisionId(
+ opt, args, superproject_logging_data, manifest
+ )
+
+ if self.gitc_manifest:
+ gitc_manifest_projects = self.GetProjects(args, missing_ok=True)
+ gitc_projects = []
+ opened_projects = []
+ for project in gitc_manifest_projects:
+ if (
+ project.relpath in self.gitc_manifest.paths
+ and self.gitc_manifest.paths[project.relpath].old_revision
+ ):
+ opened_projects.append(project.relpath)
+ else:
+ gitc_projects.append(project.relpath)
+
+ if not args:
+ gitc_projects = None
+
+ if gitc_projects != [] and not opt.local_only:
+ print(
+ "Updating GITC client: %s"
+ % self.gitc_manifest.gitc_client_name
+ )
+ manifest = GitcManifest(
+ self.repodir, self.gitc_manifest.gitc_client_name
+ )
+ if manifest_name:
+ manifest.Override(manifest_name)
+ else:
+ manifest.Override(manifest.manifestFile)
+ gitc_utils.generate_gitc_manifest(
+ self.gitc_manifest, manifest, gitc_projects
+ )
+ print("GITC client successfully synced.")
+
+ # The opened projects need to be synced as normal, therefore we
+ # generate a new args list to represent the opened projects.
+ # TODO: make this more reliable -- if there's a project name/path
+ # overlap, this may choose the wrong project.
+ args = [
+ os.path.relpath(manifest.paths[path].worktree, os.getcwd())
+ for path in opened_projects
+ ]
+ if not args:
+ return
+
+ all_projects = self.GetProjects(
+ args,
+ missing_ok=True,
+ submodules_ok=opt.fetch_submodules,
+ manifest=manifest,
+ all_manifests=not opt.this_manifest_only,
+ )
+
+ err_network_sync = False
+ err_update_projects = False
+ err_update_linkfiles = False
+
+ self._fetch_times = _FetchTimes(manifest)
+ if not opt.local_only:
+ with multiprocessing.Manager() as manager:
+ with ssh.ProxyManager(manager) as ssh_proxy:
+ # Initialize the socket dir once in the parent.
+ ssh_proxy.sock()
+ result = self._FetchMain(
+ opt, args, all_projects, err_event, ssh_proxy, manifest
+ )
+ all_projects = result.all_projects
+
+ if opt.network_only:
+ return
+
+ # If we saw an error, exit with code 1 so that other scripts can
+ # check.
+ if err_event.is_set():
+ err_network_sync = True
+ if opt.fail_fast:
+ print(
+ "\nerror: Exited sync due to fetch errors.\n"
+ "Local checkouts *not* updated. Resolve network issues "
+ "& retry.\n"
+ "`repo sync -l` will update some local checkouts.",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ for m in self.ManifestList(opt):
+ if m.IsMirror or m.IsArchive:
+ # Bail out now, we have no working tree.
+ continue
+
+ if self.UpdateProjectList(opt, m):
+ err_event.set()
+ err_update_projects = True
+ if opt.fail_fast:
+ print(
+ "\nerror: Local checkouts *not* updated.",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ err_update_linkfiles = not self.UpdateCopyLinkfileList(m)
+ if err_update_linkfiles:
+ err_event.set()
+ if opt.fail_fast:
+ print(
+ "\nerror: Local update copyfile or linkfile failed.",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ err_results = []
+ # NB: We don't exit here because this is the last step.
+ err_checkout = not self._Checkout(all_projects, opt, err_results)
+ if err_checkout:
+ err_event.set()
+
+ printed_notices = set()
+ # If there's a notice that's supposed to print at the end of the sync,
+ # print it now... But avoid printing duplicate messages, and preserve
+ # order.
+ for m in sorted(self.ManifestList(opt), key=lambda x: x.path_prefix):
+ if m.notice and m.notice not in printed_notices:
+ print(m.notice)
+ printed_notices.add(m.notice)
+
+ # If we saw an error, exit with code 1 so that other scripts can check.
+ if err_event.is_set():
+ print("\nerror: Unable to fully sync the tree.", file=sys.stderr)
+ if err_network_sync:
+ print(
+ "error: Downloading network changes failed.",
+ file=sys.stderr,
+ )
+ if err_update_projects:
+ print(
+ "error: Updating local project lists failed.",
+ file=sys.stderr,
+ )
+ if err_update_linkfiles:
+ print(
+ "error: Updating copyfiles or linkfiles failed.",
+ file=sys.stderr,
+ )
+ if err_checkout:
+ print(
+ "error: Checking out local projects failed.",
+ file=sys.stderr,
+ )
+ if err_results:
+ print(
+ "Failing repos:\n%s" % "\n".join(err_results),
+ file=sys.stderr,
+ )
+ print(
+ 'Try re-running with "-j1 --fail-fast" to exit at the first '
+ "error.",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ # Log the previous sync analysis state from the config.
+ self.git_event_log.LogDataConfigEvents(
+ mp.config.GetSyncAnalysisStateData(), "previous_sync_state"
+ )
+
+ # Update and log with the new sync analysis state.
+ mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data)
+ self.git_event_log.LogDataConfigEvents(
+ mp.config.GetSyncAnalysisStateData(), "current_sync_state"
+ )
+
+ if not opt.quiet:
+ print("repo sync has finished successfully.")
def _PostRepoUpgrade(manifest, quiet=False):
- # Link the docs for the internal .repo/ layout for people
- link = os.path.join(manifest.repodir, 'internal-fs-layout.md')
- if not platform_utils.islink(link):
- target = os.path.join('repo', 'docs', 'internal-fs-layout.md')
- try:
- platform_utils.symlink(target, link)
- except Exception:
- pass
+ # Link the docs for the internal .repo/ layout for people.
+ link = os.path.join(manifest.repodir, "internal-fs-layout.md")
+ if not platform_utils.islink(link):
+ target = os.path.join("repo", "docs", "internal-fs-layout.md")
+ try:
+ platform_utils.symlink(target, link)
+ except Exception:
+ pass
- wrapper = Wrapper()
- if wrapper.NeedSetupGnuPG():
- wrapper.SetupGnuPG(quiet)
- for project in manifest.projects:
- if project.Exists:
- project.PostRepoUpgrade()
+ wrapper = Wrapper()
+ if wrapper.NeedSetupGnuPG():
+ wrapper.SetupGnuPG(quiet)
+ for project in manifest.projects:
+ if project.Exists:
+ project.PostRepoUpgrade()
def _PostRepoFetch(rp, repo_verify=True, verbose=False):
- if rp.HasChanges:
- print('info: A new version of repo is available', file=sys.stderr)
- wrapper = Wrapper()
- try:
- rev = rp.bare_git.describe(rp.GetRevisionId())
- except GitError:
- rev = None
- _, new_rev = wrapper.check_repo_rev(rp.gitdir, rev, repo_verify=repo_verify)
- # See if we're held back due to missing signed tag.
- current_revid = rp.bare_git.rev_parse('HEAD')
- new_revid = rp.bare_git.rev_parse('--verify', new_rev)
- if current_revid != new_revid:
- # We want to switch to the new rev, but also not trash any uncommitted
- # changes. This helps with local testing/hacking.
- # If a local change has been made, we will throw that away.
- # We also have to make sure this will switch to an older commit if that's
- # the latest tag in order to support release rollback.
- try:
- rp.work_git.reset('--keep', new_rev)
- except GitError as e:
- sys.exit(str(e))
- print('info: Restarting repo with latest version', file=sys.stderr)
- raise RepoChangedException(['--repo-upgraded'])
+ if rp.HasChanges:
+ print("info: A new version of repo is available", file=sys.stderr)
+ wrapper = Wrapper()
+ try:
+ rev = rp.bare_git.describe(rp.GetRevisionId())
+ except GitError:
+ rev = None
+ _, new_rev = wrapper.check_repo_rev(
+ rp.gitdir, rev, repo_verify=repo_verify
+ )
+ # See if we're held back due to missing signed tag.
+ current_revid = rp.bare_git.rev_parse("HEAD")
+ new_revid = rp.bare_git.rev_parse("--verify", new_rev)
+ if current_revid != new_revid:
+ # We want to switch to the new rev, but also not trash any
+ # uncommitted changes. This helps with local testing/hacking.
+ # If a local change has been made, we will throw that away.
+ # We also have to make sure this will switch to an older commit if
+ # that's the latest tag in order to support release rollback.
+ try:
+ rp.work_git.reset("--keep", new_rev)
+ except GitError as e:
+ sys.exit(str(e))
+ print("info: Restarting repo with latest version", file=sys.stderr)
+ raise RepoChangedException(["--repo-upgraded"])
+ else:
+ print(
+ "warning: Skipped upgrade to unverified version",
+ file=sys.stderr,
+ )
else:
- print('warning: Skipped upgrade to unverified version', file=sys.stderr)
- else:
- if verbose:
- print('repo version %s is current' % rp.work_git.describe(HEAD),
- file=sys.stderr)
+ if verbose:
+ print(
+ "repo version %s is current" % rp.work_git.describe(HEAD),
+ file=sys.stderr,
+ )
class _FetchTimes(object):
- _ALPHA = 0.5
+ _ALPHA = 0.5
- def __init__(self, manifest):
- self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json')
- self._times = None
- self._seen = set()
+ def __init__(self, manifest):
+ self._path = os.path.join(manifest.repodir, ".repo_fetchtimes.json")
+ self._times = None
+ self._seen = set()
- def Get(self, project):
- self._Load()
- return self._times.get(project.name, _ONE_DAY_S)
+ def Get(self, project):
+ self._Load()
+ return self._times.get(project.name, _ONE_DAY_S)
- def Set(self, project, t):
- self._Load()
- name = project.name
- old = self._times.get(name, t)
- self._seen.add(name)
- a = self._ALPHA
- self._times[name] = (a * t) + ((1 - a) * old)
+ def Set(self, project, t):
+ self._Load()
+ name = project.name
+ old = self._times.get(name, t)
+ self._seen.add(name)
+ a = self._ALPHA
+ self._times[name] = (a * t) + ((1 - a) * old)
- def _Load(self):
- if self._times is None:
- try:
- with open(self._path) as f:
- self._times = json.load(f)
- except (IOError, ValueError):
- platform_utils.remove(self._path, missing_ok=True)
- self._times = {}
+ def _Load(self):
+ if self._times is None:
+ try:
+ with open(self._path) as f:
+ self._times = json.load(f)
+ except (IOError, ValueError):
+ platform_utils.remove(self._path, missing_ok=True)
+ self._times = {}
- def Save(self):
- if self._times is None:
- return
+ def Save(self):
+ if self._times is None:
+ return
- to_delete = []
- for name in self._times:
- if name not in self._seen:
- to_delete.append(name)
- for name in to_delete:
- del self._times[name]
+ to_delete = []
+ for name in self._times:
+ if name not in self._seen:
+ to_delete.append(name)
+ for name in to_delete:
+ del self._times[name]
- try:
- with open(self._path, 'w') as f:
- json.dump(self._times, f, indent=2)
- except (IOError, TypeError):
- platform_utils.remove(self._path, missing_ok=True)
+ try:
+ with open(self._path, "w") as f:
+ json.dump(self._times, f, indent=2)
+ except (IOError, TypeError):
+ platform_utils.remove(self._path, missing_ok=True)
+
# This is a replacement for xmlrpc.client.Transport using urllib2
# and supporting persistent-http[s]. It cannot change hosts from
@@ -1525,98 +1848,105 @@
class PersistentTransport(xmlrpc.client.Transport):
- def __init__(self, orig_host):
- self.orig_host = orig_host
+ def __init__(self, orig_host):
+ self.orig_host = orig_host
- def request(self, host, handler, request_body, verbose=False):
- with GetUrlCookieFile(self.orig_host, not verbose) as (cookiefile, proxy):
- # Python doesn't understand cookies with the #HttpOnly_ prefix
- # Since we're only using them for HTTP, copy the file temporarily,
- # stripping those prefixes away.
- if cookiefile:
- tmpcookiefile = tempfile.NamedTemporaryFile(mode='w')
- tmpcookiefile.write("# HTTP Cookie File")
- try:
- with open(cookiefile) as f:
- for line in f:
- if line.startswith("#HttpOnly_"):
- line = line[len("#HttpOnly_"):]
- tmpcookiefile.write(line)
- tmpcookiefile.flush()
+ def request(self, host, handler, request_body, verbose=False):
+ with GetUrlCookieFile(self.orig_host, not verbose) as (
+ cookiefile,
+ proxy,
+ ):
+ # Python doesn't understand cookies with the #HttpOnly_ prefix
+ # Since we're only using them for HTTP, copy the file temporarily,
+ # stripping those prefixes away.
+ if cookiefile:
+ tmpcookiefile = tempfile.NamedTemporaryFile(mode="w")
+ tmpcookiefile.write("# HTTP Cookie File")
+ try:
+ with open(cookiefile) as f:
+ for line in f:
+ if line.startswith("#HttpOnly_"):
+ line = line[len("#HttpOnly_") :]
+ tmpcookiefile.write(line)
+ tmpcookiefile.flush()
- cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name)
- try:
- cookiejar.load()
- except cookielib.LoadError:
- cookiejar = cookielib.CookieJar()
- finally:
- tmpcookiefile.close()
- else:
- cookiejar = cookielib.CookieJar()
+ cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name)
+ try:
+ cookiejar.load()
+ except cookielib.LoadError:
+ cookiejar = cookielib.CookieJar()
+ finally:
+ tmpcookiefile.close()
+ else:
+ cookiejar = cookielib.CookieJar()
- proxyhandler = urllib.request.ProxyHandler
- if proxy:
- proxyhandler = urllib.request.ProxyHandler({
- "http": proxy,
- "https": proxy})
+ proxyhandler = urllib.request.ProxyHandler
+ if proxy:
+ proxyhandler = urllib.request.ProxyHandler(
+ {"http": proxy, "https": proxy}
+ )
- opener = urllib.request.build_opener(
- urllib.request.HTTPCookieProcessor(cookiejar),
- proxyhandler)
+ opener = urllib.request.build_opener(
+ urllib.request.HTTPCookieProcessor(cookiejar), proxyhandler
+ )
- url = urllib.parse.urljoin(self.orig_host, handler)
- parse_results = urllib.parse.urlparse(url)
+ url = urllib.parse.urljoin(self.orig_host, handler)
+ parse_results = urllib.parse.urlparse(url)
- scheme = parse_results.scheme
- if scheme == 'persistent-http':
- scheme = 'http'
- if scheme == 'persistent-https':
- # If we're proxying through persistent-https, use http. The
- # proxy itself will do the https.
- if proxy:
- scheme = 'http'
- else:
- scheme = 'https'
+ scheme = parse_results.scheme
+ if scheme == "persistent-http":
+ scheme = "http"
+ if scheme == "persistent-https":
+ # If we're proxying through persistent-https, use http. The
+ # proxy itself will do the https.
+ if proxy:
+ scheme = "http"
+ else:
+ scheme = "https"
- # Parse out any authentication information using the base class
- host, extra_headers, _ = self.get_host_info(parse_results.netloc)
+ # Parse out any authentication information using the base class.
+ host, extra_headers, _ = self.get_host_info(parse_results.netloc)
- url = urllib.parse.urlunparse((
- scheme,
- host,
- parse_results.path,
- parse_results.params,
- parse_results.query,
- parse_results.fragment))
+ url = urllib.parse.urlunparse(
+ (
+ scheme,
+ host,
+ parse_results.path,
+ parse_results.params,
+ parse_results.query,
+ parse_results.fragment,
+ )
+ )
- request = urllib.request.Request(url, request_body)
- if extra_headers is not None:
- for (name, header) in extra_headers:
- request.add_header(name, header)
- request.add_header('Content-Type', 'text/xml')
- try:
- response = opener.open(request)
- except urllib.error.HTTPError as e:
- if e.code == 501:
- # We may have been redirected through a login process
- # but our POST turned into a GET. Retry.
- response = opener.open(request)
- else:
- raise
+ request = urllib.request.Request(url, request_body)
+ if extra_headers is not None:
+ for name, header in extra_headers:
+ request.add_header(name, header)
+ request.add_header("Content-Type", "text/xml")
+ try:
+ response = opener.open(request)
+ except urllib.error.HTTPError as e:
+ if e.code == 501:
+ # We may have been redirected through a login process
+ # but our POST turned into a GET. Retry.
+ response = opener.open(request)
+ else:
+ raise
- p, u = xmlrpc.client.getparser()
- # Response should be fairly small, so read it all at once.
- # This way we can show it to the user in case of error (e.g. HTML).
- data = response.read()
- try:
- p.feed(data)
- except xml.parsers.expat.ExpatError as e:
- raise IOError(
- f'Parsing the manifest failed: {e}\n'
- f'Please report this to your manifest server admin.\n'
- f'Here is the full response:\n{data.decode("utf-8")}')
- p.close()
- return u.close()
+ p, u = xmlrpc.client.getparser()
+ # Response should be fairly small, so read it all at once.
+ # This way we can show it to the user in case of error (e.g. HTML).
+ data = response.read()
+ try:
+ p.feed(data)
+ except xml.parsers.expat.ExpatError as e:
+ raise IOError(
+ f"Parsing the manifest failed: {e}\n"
+ f"Please report this to your manifest server admin.\n"
+ f'Here is the full response:\n{data.decode("utf-8")}'
+ )
+ p.close()
+ return u.close()
- def close(self):
- pass
+ def close(self):
+ pass
diff --git a/subcmds/upload.py b/subcmds/upload.py
index 9c27923..63216af 100644
--- a/subcmds/upload.py
+++ b/subcmds/upload.py
@@ -32,69 +32,77 @@
def _VerifyPendingCommits(branches: List[ReviewableBranch]) -> bool:
- """Perform basic safety checks on the given set of branches.
+ """Perform basic safety checks on the given set of branches.
- Ensures that each branch does not have a "large" number of commits
- and, if so, prompts the user to confirm they want to proceed with
- the upload.
+ Ensures that each branch does not have a "large" number of commits
+ and, if so, prompts the user to confirm they want to proceed with
+ the upload.
- Returns true if all branches pass the safety check or the user
- confirmed. Returns false if the upload should be aborted.
- """
+ Returns true if all branches pass the safety check or the user
+ confirmed. Returns false if the upload should be aborted.
+ """
- # Determine if any branch has a suspicious number of commits.
- many_commits = False
- for branch in branches:
- # Get the user's unusual threshold for the branch.
- #
- # Each branch may be configured to have a different threshold.
- remote = branch.project.GetBranch(branch.name).remote
- key = f'review.{remote.review}.uploadwarningthreshold'
- threshold = branch.project.config.GetInt(key)
- if threshold is None:
- threshold = _DEFAULT_UNUSUAL_COMMIT_THRESHOLD
+ # Determine if any branch has a suspicious number of commits.
+ many_commits = False
+ for branch in branches:
+ # Get the user's unusual threshold for the branch.
+ #
+ # Each branch may be configured to have a different threshold.
+ remote = branch.project.GetBranch(branch.name).remote
+ key = f"review.{remote.review}.uploadwarningthreshold"
+ threshold = branch.project.config.GetInt(key)
+ if threshold is None:
+ threshold = _DEFAULT_UNUSUAL_COMMIT_THRESHOLD
- # If the branch has more commits than the threshold, show a warning.
- if len(branch.commits) > threshold:
- many_commits = True
- break
+ # If the branch has more commits than the threshold, show a warning.
+ if len(branch.commits) > threshold:
+ many_commits = True
+ break
- # If any branch has many commits, prompt the user.
- if many_commits:
- if len(branches) > 1:
- print('ATTENTION: One or more branches has an unusually high number '
- 'of commits.')
- else:
- print('ATTENTION: You are uploading an unusually high number of commits.')
- print('YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across '
- 'branches?)')
- answer = input(
- "If you are sure you intend to do this, type 'yes': ").strip()
- return answer == 'yes'
+ # If any branch has many commits, prompt the user.
+ if many_commits:
+ if len(branches) > 1:
+ print(
+ "ATTENTION: One or more branches has an unusually high number "
+ "of commits."
+ )
+ else:
+ print(
+ "ATTENTION: You are uploading an unusually high number of "
+ "commits."
+ )
+ print(
+ "YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across "
+ "branches?)"
+ )
+ answer = input(
+ "If you are sure you intend to do this, type 'yes': "
+ ).strip()
+ return answer == "yes"
- return True
+ return True
def _die(fmt, *args):
- msg = fmt % args
- print('error: %s' % msg, file=sys.stderr)
- sys.exit(1)
+ msg = fmt % args
+ print("error: %s" % msg, file=sys.stderr)
+ sys.exit(1)
def _SplitEmails(values):
- result = []
- for value in values:
- result.extend([s.strip() for s in value.split(',')])
- return result
+ result = []
+ for value in values:
+ result.extend([s.strip() for s in value.split(",")])
+ return result
class Upload(InteractiveCommand):
- COMMON = True
- helpSummary = "Upload changes for code review"
- helpUsage = """
+ COMMON = True
+ helpSummary = "Upload changes for code review"
+ helpUsage = """
%prog [--re --cc] [<project>]...
"""
- helpDescription = """
+ helpDescription = """
The '%prog' command is used to send changes to the Gerrit Code
Review system. It searches for topic branches in local projects
that have not yet been published for review. If multiple topic
@@ -195,443 +203,611 @@
Gerrit Code Review: https://www.gerritcodereview.com/
"""
- PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
+ PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
- def _Options(self, p):
- p.add_option('-t',
- dest='auto_topic', action='store_true',
- help='send local branch name to Gerrit Code Review')
- p.add_option('--hashtag', '--ht',
- dest='hashtags', action='append', default=[],
- help='add hashtags (comma delimited) to the review')
- p.add_option('--hashtag-branch', '--htb',
- action='store_true',
- help='add local branch name as a hashtag')
- p.add_option('-l', '--label',
- dest='labels', action='append', default=[],
- help='add a label when uploading')
- p.add_option('--re', '--reviewers',
- type='string', action='append', dest='reviewers',
- help='request reviews from these people')
- p.add_option('--cc',
- type='string', action='append', dest='cc',
- help='also send email to these email addresses')
- p.add_option('--br', '--branch',
- type='string', action='store', dest='branch',
- help='(local) branch to upload')
- p.add_option('-c', '--current-branch',
- dest='current_branch', action='store_true',
- help='upload current git branch')
- p.add_option('--no-current-branch',
- dest='current_branch', action='store_false',
- help='upload all git branches')
- # Turn this into a warning & remove this someday.
- p.add_option('--cbr',
- dest='current_branch', action='store_true',
- help=optparse.SUPPRESS_HELP)
- p.add_option('--ne', '--no-emails',
- action='store_false', dest='notify', default=True,
- help='do not send e-mails on upload')
- p.add_option('-p', '--private',
- action='store_true', dest='private', default=False,
- help='upload as a private change (deprecated; use --wip)')
- p.add_option('-w', '--wip',
- action='store_true', dest='wip', default=False,
- help='upload as a work-in-progress change')
- p.add_option('-r', '--ready',
- action='store_true', default=False,
- help='mark change as ready (clears work-in-progress setting)')
- p.add_option('-o', '--push-option',
- type='string', action='append', dest='push_options',
- default=[],
- help='additional push options to transmit')
- p.add_option('-D', '--destination', '--dest',
- type='string', action='store', dest='dest_branch',
- metavar='BRANCH',
- help='submit for review on this target branch')
- p.add_option('-n', '--dry-run',
- dest='dryrun', default=False, action='store_true',
- help='do everything except actually upload the CL')
- p.add_option('-y', '--yes',
- default=False, action='store_true',
- help='answer yes to all safe prompts')
- p.add_option('--ignore-untracked-files',
- action='store_true', default=False,
- help='ignore untracked files in the working copy')
- p.add_option('--no-ignore-untracked-files',
- dest='ignore_untracked_files', action='store_false',
- help='always ask about untracked files in the working copy')
- p.add_option('--no-cert-checks',
- dest='validate_certs', action='store_false', default=True,
- help='disable verifying ssl certs (unsafe)')
- RepoHook.AddOptionGroup(p, 'pre-upload')
+ def _Options(self, p):
+ p.add_option(
+ "-t",
+ dest="auto_topic",
+ action="store_true",
+ help="send local branch name to Gerrit Code Review",
+ )
+ p.add_option(
+ "--hashtag",
+ "--ht",
+ dest="hashtags",
+ action="append",
+ default=[],
+ help="add hashtags (comma delimited) to the review",
+ )
+ p.add_option(
+ "--hashtag-branch",
+ "--htb",
+ action="store_true",
+ help="add local branch name as a hashtag",
+ )
+ p.add_option(
+ "-l",
+ "--label",
+ dest="labels",
+ action="append",
+ default=[],
+ help="add a label when uploading",
+ )
+ p.add_option(
+ "--re",
+ "--reviewers",
+ type="string",
+ action="append",
+ dest="reviewers",
+ help="request reviews from these people",
+ )
+ p.add_option(
+ "--cc",
+ type="string",
+ action="append",
+ dest="cc",
+ help="also send email to these email addresses",
+ )
+ p.add_option(
+ "--br",
+ "--branch",
+ type="string",
+ action="store",
+ dest="branch",
+ help="(local) branch to upload",
+ )
+ p.add_option(
+ "-c",
+ "--current-branch",
+ dest="current_branch",
+ action="store_true",
+ help="upload current git branch",
+ )
+ p.add_option(
+ "--no-current-branch",
+ dest="current_branch",
+ action="store_false",
+ help="upload all git branches",
+ )
+ # Turn this into a warning & remove this someday.
+ p.add_option(
+ "--cbr",
+ dest="current_branch",
+ action="store_true",
+ help=optparse.SUPPRESS_HELP,
+ )
+ p.add_option(
+ "--ne",
+ "--no-emails",
+ action="store_false",
+ dest="notify",
+ default=True,
+ help="do not send e-mails on upload",
+ )
+ p.add_option(
+ "-p",
+ "--private",
+ action="store_true",
+ dest="private",
+ default=False,
+ help="upload as a private change (deprecated; use --wip)",
+ )
+ p.add_option(
+ "-w",
+ "--wip",
+ action="store_true",
+ dest="wip",
+ default=False,
+ help="upload as a work-in-progress change",
+ )
+ p.add_option(
+ "-r",
+ "--ready",
+ action="store_true",
+ default=False,
+ help="mark change as ready (clears work-in-progress setting)",
+ )
+ p.add_option(
+ "-o",
+ "--push-option",
+ type="string",
+ action="append",
+ dest="push_options",
+ default=[],
+ help="additional push options to transmit",
+ )
+ p.add_option(
+ "-D",
+ "--destination",
+ "--dest",
+ type="string",
+ action="store",
+ dest="dest_branch",
+ metavar="BRANCH",
+ help="submit for review on this target branch",
+ )
+ p.add_option(
+ "-n",
+ "--dry-run",
+ dest="dryrun",
+ default=False,
+ action="store_true",
+ help="do everything except actually upload the CL",
+ )
+ p.add_option(
+ "-y",
+ "--yes",
+ default=False,
+ action="store_true",
+ help="answer yes to all safe prompts",
+ )
+ p.add_option(
+ "--ignore-untracked-files",
+ action="store_true",
+ default=False,
+ help="ignore untracked files in the working copy",
+ )
+ p.add_option(
+ "--no-ignore-untracked-files",
+ dest="ignore_untracked_files",
+ action="store_false",
+ help="always ask about untracked files in the working copy",
+ )
+ p.add_option(
+ "--no-cert-checks",
+ dest="validate_certs",
+ action="store_false",
+ default=True,
+ help="disable verifying ssl certs (unsafe)",
+ )
+ RepoHook.AddOptionGroup(p, "pre-upload")
- def _SingleBranch(self, opt, branch, people):
- project = branch.project
- name = branch.name
- remote = project.GetBranch(name).remote
-
- key = 'review.%s.autoupload' % remote.review
- answer = project.config.GetBoolean(key)
-
- if answer is False:
- _die("upload blocked by %s = false" % key)
-
- if answer is None:
- date = branch.date
- commit_list = branch.commits
-
- destination = opt.dest_branch or project.dest_branch or project.revisionExpr
- print('Upload project %s/ to remote branch %s%s:' %
- (project.RelPath(local=opt.this_manifest_only), destination,
- ' (private)' if opt.private else ''))
- print(' branch %s (%2d commit%s, %s):' % (
- name,
- len(commit_list),
- len(commit_list) != 1 and 's' or '',
- date))
- for commit in commit_list:
- print(' %s' % commit)
-
- print('to %s (y/N)? ' % remote.review, end='', flush=True)
- if opt.yes:
- print('<--yes>')
- answer = True
- else:
- answer = sys.stdin.readline().strip().lower()
- answer = answer in ('y', 'yes', '1', 'true', 't')
- if not answer:
- _die("upload aborted by user")
-
- # Perform some basic safety checks prior to uploading.
- if not opt.yes and not _VerifyPendingCommits([branch]):
- _die("upload aborted by user")
-
- self._UploadAndReport(opt, [branch], people)
-
- def _MultipleBranches(self, opt, pending, people):
- projects = {}
- branches = {}
-
- script = []
- script.append('# Uncomment the branches to upload:')
- for project, avail in pending:
- project_path = project.RelPath(local=opt.this_manifest_only)
- script.append('#')
- script.append(f'# project {project_path}/:')
-
- b = {}
- for branch in avail:
- if branch is None:
- continue
+ def _SingleBranch(self, opt, branch, people):
+ project = branch.project
name = branch.name
- date = branch.date
- commit_list = branch.commits
+ remote = project.GetBranch(name).remote
- if b:
- script.append('#')
- destination = opt.dest_branch or project.dest_branch or project.revisionExpr
- script.append('# branch %s (%2d commit%s, %s) to remote branch %s:' % (
- name,
- len(commit_list),
- len(commit_list) != 1 and 's' or '',
- date,
- destination))
- for commit in commit_list:
- script.append('# %s' % commit)
- b[name] = branch
+ key = "review.%s.autoupload" % remote.review
+ answer = project.config.GetBoolean(key)
- projects[project_path] = project
- branches[project_path] = b
- script.append('')
+ if answer is False:
+ _die("upload blocked by %s = false" % key)
- script = Editor.EditString("\n".join(script)).split("\n")
+ if answer is None:
+ date = branch.date
+ commit_list = branch.commits
- project_re = re.compile(r'^#?\s*project\s*([^\s]+)/:$')
- branch_re = re.compile(r'^\s*branch\s*([^\s(]+)\s*\(.*')
+ destination = (
+ opt.dest_branch or project.dest_branch or project.revisionExpr
+ )
+ print(
+ "Upload project %s/ to remote branch %s%s:"
+ % (
+ project.RelPath(local=opt.this_manifest_only),
+ destination,
+ " (private)" if opt.private else "",
+ )
+ )
+ print(
+ " branch %s (%2d commit%s, %s):"
+ % (
+ name,
+ len(commit_list),
+ len(commit_list) != 1 and "s" or "",
+ date,
+ )
+ )
+ for commit in commit_list:
+ print(" %s" % commit)
- project = None
- todo = []
-
- for line in script:
- m = project_re.match(line)
- if m:
- name = m.group(1)
- project = projects.get(name)
- if not project:
- _die('project %s not available for upload', name)
- continue
-
- m = branch_re.match(line)
- if m:
- name = m.group(1)
- if not project:
- _die('project for branch %s not in script', name)
- project_path = project.RelPath(local=opt.this_manifest_only)
- branch = branches[project_path].get(name)
- if not branch:
- _die('branch %s not in %s', name, project_path)
- todo.append(branch)
- if not todo:
- _die("nothing uncommented for upload")
-
- # Perform some basic safety checks prior to uploading.
- if not opt.yes and not _VerifyPendingCommits(todo):
- _die("upload aborted by user")
-
- self._UploadAndReport(opt, todo, people)
-
- def _AppendAutoList(self, branch, people):
- """
- Appends the list of reviewers in the git project's config.
- Appends the list of users in the CC list in the git project's config if a
- non-empty reviewer list was found.
- """
- name = branch.name
- project = branch.project
-
- key = 'review.%s.autoreviewer' % project.GetBranch(name).remote.review
- raw_list = project.config.GetString(key)
- if raw_list is not None:
- people[0].extend([entry.strip() for entry in raw_list.split(',')])
-
- key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
- raw_list = project.config.GetString(key)
- if raw_list is not None and len(people[0]) > 0:
- people[1].extend([entry.strip() for entry in raw_list.split(',')])
-
- def _FindGerritChange(self, branch):
- last_pub = branch.project.WasPublished(branch.name)
- if last_pub is None:
- return ""
-
- refs = branch.GetPublishedRefs()
- try:
- # refs/changes/XYZ/N --> XYZ
- return refs.get(last_pub).split('/')[-2]
- except (AttributeError, IndexError):
- return ""
-
- def _UploadAndReport(self, opt, todo, original_people):
- have_errors = False
- for branch in todo:
- try:
- people = copy.deepcopy(original_people)
- self._AppendAutoList(branch, people)
-
- # Check if there are local changes that may have been forgotten
- changes = branch.project.UncommitedFiles()
- if opt.ignore_untracked_files:
- untracked = set(branch.project.UntrackedFiles())
- changes = [x for x in changes if x not in untracked]
-
- if changes:
- key = 'review.%s.autoupload' % branch.project.remote.review
- answer = branch.project.config.GetBoolean(key)
-
- # if they want to auto upload, let's not ask because it could be automated
- if answer is None:
- print()
- print('Uncommitted changes in %s (did you forget to amend?):'
- % branch.project.name)
- print('\n'.join(changes))
- print('Continue uploading? (y/N) ', end='', flush=True)
+ print("to %s (y/N)? " % remote.review, end="", flush=True)
if opt.yes:
- print('<--yes>')
- a = 'yes'
+ print("<--yes>")
+ answer = True
else:
- a = sys.stdin.readline().strip().lower()
- if a not in ('y', 'yes', 't', 'true', 'on'):
- print("skipping upload", file=sys.stderr)
- branch.uploaded = False
- branch.error = 'User aborted'
- continue
+ answer = sys.stdin.readline().strip().lower()
+ answer = answer in ("y", "yes", "1", "true", "t")
+ if not answer:
+ _die("upload aborted by user")
- # Check if topic branches should be sent to the server during upload
- if opt.auto_topic is not True:
- key = 'review.%s.uploadtopic' % branch.project.remote.review
- opt.auto_topic = branch.project.config.GetBoolean(key)
+ # Perform some basic safety checks prior to uploading.
+ if not opt.yes and not _VerifyPendingCommits([branch]):
+ _die("upload aborted by user")
- def _ExpandCommaList(value):
- """Split |value| up into comma delimited entries."""
- if not value:
- return
- for ret in value.split(','):
- ret = ret.strip()
- if ret:
- yield ret
+ self._UploadAndReport(opt, [branch], people)
- # Check if hashtags should be included.
- key = 'review.%s.uploadhashtags' % branch.project.remote.review
- hashtags = set(_ExpandCommaList(branch.project.config.GetString(key)))
- for tag in opt.hashtags:
- hashtags.update(_ExpandCommaList(tag))
- if opt.hashtag_branch:
- hashtags.add(branch.name)
+ def _MultipleBranches(self, opt, pending, people):
+ projects = {}
+ branches = {}
- # Check if labels should be included.
- key = 'review.%s.uploadlabels' % branch.project.remote.review
- labels = set(_ExpandCommaList(branch.project.config.GetString(key)))
- for label in opt.labels:
- labels.update(_ExpandCommaList(label))
+ script = []
+ script.append("# Uncomment the branches to upload:")
+ for project, avail in pending:
+ project_path = project.RelPath(local=opt.this_manifest_only)
+ script.append("#")
+ script.append(f"# project {project_path}/:")
- # Handle e-mail notifications.
- if opt.notify is False:
- notify = 'NONE'
+ b = {}
+ for branch in avail:
+ if branch is None:
+ continue
+ name = branch.name
+ date = branch.date
+ commit_list = branch.commits
+
+ if b:
+ script.append("#")
+ destination = (
+ opt.dest_branch
+ or project.dest_branch
+ or project.revisionExpr
+ )
+ script.append(
+ "# branch %s (%2d commit%s, %s) to remote branch %s:"
+ % (
+ name,
+ len(commit_list),
+ len(commit_list) != 1 and "s" or "",
+ date,
+ destination,
+ )
+ )
+ for commit in commit_list:
+ script.append("# %s" % commit)
+ b[name] = branch
+
+ projects[project_path] = project
+ branches[project_path] = b
+ script.append("")
+
+ script = Editor.EditString("\n".join(script)).split("\n")
+
+ project_re = re.compile(r"^#?\s*project\s*([^\s]+)/:$")
+ branch_re = re.compile(r"^\s*branch\s*([^\s(]+)\s*\(.*")
+
+ project = None
+ todo = []
+
+ for line in script:
+ m = project_re.match(line)
+ if m:
+ name = m.group(1)
+ project = projects.get(name)
+ if not project:
+ _die("project %s not available for upload", name)
+ continue
+
+ m = branch_re.match(line)
+ if m:
+ name = m.group(1)
+ if not project:
+ _die("project for branch %s not in script", name)
+ project_path = project.RelPath(local=opt.this_manifest_only)
+ branch = branches[project_path].get(name)
+ if not branch:
+ _die("branch %s not in %s", name, project_path)
+ todo.append(branch)
+ if not todo:
+ _die("nothing uncommented for upload")
+
+ # Perform some basic safety checks prior to uploading.
+ if not opt.yes and not _VerifyPendingCommits(todo):
+ _die("upload aborted by user")
+
+ self._UploadAndReport(opt, todo, people)
+
+ def _AppendAutoList(self, branch, people):
+ """
+ Appends the list of reviewers in the git project's config.
+ Appends the list of users in the CC list in the git project's config if
+ a non-empty reviewer list was found.
+ """
+ name = branch.name
+ project = branch.project
+
+ key = "review.%s.autoreviewer" % project.GetBranch(name).remote.review
+ raw_list = project.config.GetString(key)
+ if raw_list is not None:
+ people[0].extend([entry.strip() for entry in raw_list.split(",")])
+
+ key = "review.%s.autocopy" % project.GetBranch(name).remote.review
+ raw_list = project.config.GetString(key)
+ if raw_list is not None and len(people[0]) > 0:
+ people[1].extend([entry.strip() for entry in raw_list.split(",")])
+
+ def _FindGerritChange(self, branch):
+ last_pub = branch.project.WasPublished(branch.name)
+ if last_pub is None:
+ return ""
+
+ refs = branch.GetPublishedRefs()
+ try:
+ # refs/changes/XYZ/N --> XYZ
+ return refs.get(last_pub).split("/")[-2]
+ except (AttributeError, IndexError):
+ return ""
+
+ def _UploadAndReport(self, opt, todo, original_people):
+ have_errors = False
+ for branch in todo:
+ try:
+ people = copy.deepcopy(original_people)
+ self._AppendAutoList(branch, people)
+
+ # Check if there are local changes that may have been forgotten.
+ changes = branch.project.UncommitedFiles()
+ if opt.ignore_untracked_files:
+ untracked = set(branch.project.UntrackedFiles())
+ changes = [x for x in changes if x not in untracked]
+
+ if changes:
+ key = "review.%s.autoupload" % branch.project.remote.review
+ answer = branch.project.config.GetBoolean(key)
+
+ # If they want to auto upload, let's not ask because it
+ # could be automated.
+ if answer is None:
+ print()
+ print(
+ "Uncommitted changes in %s (did you forget to "
+ "amend?):" % branch.project.name
+ )
+ print("\n".join(changes))
+ print("Continue uploading? (y/N) ", end="", flush=True)
+ if opt.yes:
+ print("<--yes>")
+ a = "yes"
+ else:
+ a = sys.stdin.readline().strip().lower()
+ if a not in ("y", "yes", "t", "true", "on"):
+ print("skipping upload", file=sys.stderr)
+ branch.uploaded = False
+ branch.error = "User aborted"
+ continue
+
+ # Check if topic branches should be sent to the server during
+ # upload.
+ if opt.auto_topic is not True:
+ key = "review.%s.uploadtopic" % branch.project.remote.review
+ opt.auto_topic = branch.project.config.GetBoolean(key)
+
+ def _ExpandCommaList(value):
+ """Split |value| up into comma delimited entries."""
+ if not value:
+ return
+ for ret in value.split(","):
+ ret = ret.strip()
+ if ret:
+ yield ret
+
+ # Check if hashtags should be included.
+ key = "review.%s.uploadhashtags" % branch.project.remote.review
+ hashtags = set(
+ _ExpandCommaList(branch.project.config.GetString(key))
+ )
+ for tag in opt.hashtags:
+ hashtags.update(_ExpandCommaList(tag))
+ if opt.hashtag_branch:
+ hashtags.add(branch.name)
+
+ # Check if labels should be included.
+ key = "review.%s.uploadlabels" % branch.project.remote.review
+ labels = set(
+ _ExpandCommaList(branch.project.config.GetString(key))
+ )
+ for label in opt.labels:
+ labels.update(_ExpandCommaList(label))
+
+ # Handle e-mail notifications.
+ if opt.notify is False:
+ notify = "NONE"
+ else:
+ key = (
+ "review.%s.uploadnotify" % branch.project.remote.review
+ )
+ notify = branch.project.config.GetString(key)
+
+ destination = opt.dest_branch or branch.project.dest_branch
+
+ if branch.project.dest_branch and not opt.dest_branch:
+ merge_branch = self._GetMergeBranch(
+ branch.project, local_branch=branch.name
+ )
+
+ full_dest = destination
+ if not full_dest.startswith(R_HEADS):
+ full_dest = R_HEADS + full_dest
+
+ # If the merge branch of the local branch is different from
+ # the project's revision AND destination, this might not be
+ # intentional.
+ if (
+ merge_branch
+ and merge_branch != branch.project.revisionExpr
+ and merge_branch != full_dest
+ ):
+ print(
+ f"For local branch {branch.name}: merge branch "
+ f"{merge_branch} does not match destination branch "
+ f"{destination}"
+ )
+ print("skipping upload.")
+ print(
+ f"Please use `--destination {destination}` if this "
+ "is intentional"
+ )
+ branch.uploaded = False
+ continue
+
+ branch.UploadForReview(
+ people,
+ dryrun=opt.dryrun,
+ auto_topic=opt.auto_topic,
+ hashtags=hashtags,
+ labels=labels,
+ private=opt.private,
+ notify=notify,
+ wip=opt.wip,
+ ready=opt.ready,
+ dest_branch=destination,
+ validate_certs=opt.validate_certs,
+ push_options=opt.push_options,
+ )
+
+ branch.uploaded = True
+ except UploadError as e:
+ branch.error = e
+ branch.uploaded = False
+ have_errors = True
+
+ print(file=sys.stderr)
+ print("-" * 70, file=sys.stderr)
+
+ if have_errors:
+ for branch in todo:
+ if not branch.uploaded:
+ if len(str(branch.error)) <= 30:
+ fmt = " (%s)"
+ else:
+ fmt = "\n (%s)"
+ print(
+ ("[FAILED] %-15s %-15s" + fmt)
+ % (
+ branch.project.RelPath(local=opt.this_manifest_only)
+ + "/",
+ branch.name,
+ str(branch.error),
+ ),
+ file=sys.stderr,
+ )
+ print()
+
+ for branch in todo:
+ if branch.uploaded:
+ print(
+ "[OK ] %-15s %s"
+ % (
+ branch.project.RelPath(local=opt.this_manifest_only)
+ + "/",
+ branch.name,
+ ),
+ file=sys.stderr,
+ )
+
+ if have_errors:
+ sys.exit(1)
+
+ def _GetMergeBranch(self, project, local_branch=None):
+ if local_branch is None:
+ p = GitCommand(
+ project,
+ ["rev-parse", "--abbrev-ref", "HEAD"],
+ capture_stdout=True,
+ capture_stderr=True,
+ )
+ p.Wait()
+ local_branch = p.stdout.strip()
+ p = GitCommand(
+ project,
+ ["config", "--get", "branch.%s.merge" % local_branch],
+ capture_stdout=True,
+ capture_stderr=True,
+ )
+ p.Wait()
+ merge_branch = p.stdout.strip()
+ return merge_branch
+
+ @staticmethod
+ def _GatherOne(opt, project):
+ """Figure out the upload status for |project|."""
+ if opt.current_branch:
+ cbr = project.CurrentBranch
+ up_branch = project.GetUploadableBranch(cbr)
+ avail = [up_branch] if up_branch else None
else:
- key = 'review.%s.uploadnotify' % branch.project.remote.review
- notify = branch.project.config.GetString(key)
+ avail = project.GetUploadableBranches(opt.branch)
+ return (project, avail)
- destination = opt.dest_branch or branch.project.dest_branch
+ def Execute(self, opt, args):
+ projects = self.GetProjects(
+ args, all_manifests=not opt.this_manifest_only
+ )
- if branch.project.dest_branch and not opt.dest_branch:
+ def _ProcessResults(_pool, _out, results):
+ pending = []
+ for result in results:
+ project, avail = result
+ if avail is None:
+ print(
+ 'repo: error: %s: Unable to upload branch "%s". '
+ "You might be able to fix the branch by running:\n"
+ " git branch --set-upstream-to m/%s"
+ % (
+ project.RelPath(local=opt.this_manifest_only),
+ project.CurrentBranch,
+ project.manifest.branch,
+ ),
+ file=sys.stderr,
+ )
+ elif avail:
+ pending.append(result)
+ return pending
- merge_branch = self._GetMergeBranch(
- branch.project, local_branch=branch.name)
+ pending = self.ExecuteInParallel(
+ opt.jobs,
+ functools.partial(self._GatherOne, opt),
+ projects,
+ callback=_ProcessResults,
+ )
- full_dest = destination
- if not full_dest.startswith(R_HEADS):
- full_dest = R_HEADS + full_dest
+ if not pending:
+ if opt.branch is None:
+ print(
+ "repo: error: no branches ready for upload", file=sys.stderr
+ )
+ else:
+ print(
+ 'repo: error: no branches named "%s" ready for upload'
+ % (opt.branch,),
+ file=sys.stderr,
+ )
+ return 1
- # If the merge branch of the local branch is different from the
- # project's revision AND destination, this might not be intentional.
- if (merge_branch and merge_branch != branch.project.revisionExpr
- and merge_branch != full_dest):
- print(f'For local branch {branch.name}: merge branch '
- f'{merge_branch} does not match destination branch '
- f'{destination}')
- print('skipping upload.')
- print(f'Please use `--destination {destination}` if this is intentional')
- branch.uploaded = False
- continue
+ manifests = {
+ project.manifest.topdir: project.manifest
+ for (project, available) in pending
+ }
+ ret = 0
+ for manifest in manifests.values():
+ pending_proj_names = [
+ project.name
+ for (project, available) in pending
+ if project.manifest.topdir == manifest.topdir
+ ]
+ pending_worktrees = [
+ project.worktree
+ for (project, available) in pending
+ if project.manifest.topdir == manifest.topdir
+ ]
+ hook = RepoHook.FromSubcmd(
+ hook_type="pre-upload",
+ manifest=manifest,
+ opt=opt,
+ abort_if_user_denies=True,
+ )
+ if not hook.Run(
+ project_list=pending_proj_names, worktree_list=pending_worktrees
+ ):
+ ret = 1
+ if ret:
+ return ret
- branch.UploadForReview(people,
- dryrun=opt.dryrun,
- auto_topic=opt.auto_topic,
- hashtags=hashtags,
- labels=labels,
- private=opt.private,
- notify=notify,
- wip=opt.wip,
- ready=opt.ready,
- dest_branch=destination,
- validate_certs=opt.validate_certs,
- push_options=opt.push_options)
+ reviewers = _SplitEmails(opt.reviewers) if opt.reviewers else []
+ cc = _SplitEmails(opt.cc) if opt.cc else []
+ people = (reviewers, cc)
- branch.uploaded = True
- except UploadError as e:
- branch.error = e
- branch.uploaded = False
- have_errors = True
-
- print(file=sys.stderr)
- print('----------------------------------------------------------------------', file=sys.stderr)
-
- if have_errors:
- for branch in todo:
- if not branch.uploaded:
- if len(str(branch.error)) <= 30:
- fmt = ' (%s)'
- else:
- fmt = '\n (%s)'
- print(('[FAILED] %-15s %-15s' + fmt) % (
- branch.project.RelPath(local=opt.this_manifest_only) + '/',
- branch.name,
- str(branch.error)),
- file=sys.stderr)
- print()
-
- for branch in todo:
- if branch.uploaded:
- print('[OK ] %-15s %s' % (
- branch.project.RelPath(local=opt.this_manifest_only) + '/',
- branch.name),
- file=sys.stderr)
-
- if have_errors:
- sys.exit(1)
-
- def _GetMergeBranch(self, project, local_branch=None):
- if local_branch is None:
- p = GitCommand(project,
- ['rev-parse', '--abbrev-ref', 'HEAD'],
- capture_stdout=True,
- capture_stderr=True)
- p.Wait()
- local_branch = p.stdout.strip()
- p = GitCommand(project,
- ['config', '--get', 'branch.%s.merge' % local_branch],
- capture_stdout=True,
- capture_stderr=True)
- p.Wait()
- merge_branch = p.stdout.strip()
- return merge_branch
-
- @staticmethod
- def _GatherOne(opt, project):
- """Figure out the upload status for |project|."""
- if opt.current_branch:
- cbr = project.CurrentBranch
- up_branch = project.GetUploadableBranch(cbr)
- avail = [up_branch] if up_branch else None
- else:
- avail = project.GetUploadableBranches(opt.branch)
- return (project, avail)
-
- def Execute(self, opt, args):
- projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only)
-
- def _ProcessResults(_pool, _out, results):
- pending = []
- for result in results:
- project, avail = result
- if avail is None:
- print('repo: error: %s: Unable to upload branch "%s". '
- 'You might be able to fix the branch by running:\n'
- ' git branch --set-upstream-to m/%s' %
- (project.RelPath(local=opt.this_manifest_only), project.CurrentBranch,
- project.manifest.branch),
- file=sys.stderr)
- elif avail:
- pending.append(result)
- return pending
-
- pending = self.ExecuteInParallel(
- opt.jobs,
- functools.partial(self._GatherOne, opt),
- projects,
- callback=_ProcessResults)
-
- if not pending:
- if opt.branch is None:
- print('repo: error: no branches ready for upload', file=sys.stderr)
- else:
- print('repo: error: no branches named "%s" ready for upload' %
- (opt.branch,), file=sys.stderr)
- return 1
-
- manifests = {project.manifest.topdir: project.manifest
- for (project, available) in pending}
- ret = 0
- for manifest in manifests.values():
- pending_proj_names = [project.name for (project, available) in pending
- if project.manifest.topdir == manifest.topdir]
- pending_worktrees = [project.worktree for (project, available) in pending
- if project.manifest.topdir == manifest.topdir]
- hook = RepoHook.FromSubcmd(
- hook_type='pre-upload', manifest=manifest,
- opt=opt, abort_if_user_denies=True)
- if not hook.Run(project_list=pending_proj_names,
- worktree_list=pending_worktrees):
- ret = 1
- if ret:
- return ret
-
- reviewers = _SplitEmails(opt.reviewers) if opt.reviewers else []
- cc = _SplitEmails(opt.cc) if opt.cc else []
- people = (reviewers, cc)
-
- if len(pending) == 1 and len(pending[0][1]) == 1:
- self._SingleBranch(opt, pending[0][1][0], people)
- else:
- self._MultipleBranches(opt, pending, people)
+ if len(pending) == 1 and len(pending[0][1]) == 1:
+ self._SingleBranch(opt, pending[0][1][0], people)
+ else:
+ self._MultipleBranches(opt, pending, people)
diff --git a/subcmds/version.py b/subcmds/version.py
index c68cb0a..c539db6 100644
--- a/subcmds/version.py
+++ b/subcmds/version.py
@@ -22,45 +22,52 @@
class Version(Command, MirrorSafeCommand):
- wrapper_version = None
- wrapper_path = None
+ wrapper_version = None
+ wrapper_path = None
- COMMON = False
- helpSummary = "Display the version of repo"
- helpUsage = """
+ COMMON = False
+ helpSummary = "Display the version of repo"
+ helpUsage = """
%prog
"""
- def Execute(self, opt, args):
- rp = self.manifest.repoProject
- rem = rp.GetRemote()
- branch = rp.GetBranch('default')
+ def Execute(self, opt, args):
+ rp = self.manifest.repoProject
+ rem = rp.GetRemote()
+ branch = rp.GetBranch("default")
- # These might not be the same. Report them both.
- src_ver = RepoSourceVersion()
- rp_ver = rp.bare_git.describe(HEAD)
- print('repo version %s' % rp_ver)
- print(' (from %s)' % rem.url)
- print(' (tracking %s)' % branch.merge)
- print(' (%s)' % rp.bare_git.log('-1', '--format=%cD', HEAD))
+ # These might not be the same. Report them both.
+ src_ver = RepoSourceVersion()
+ rp_ver = rp.bare_git.describe(HEAD)
+ print("repo version %s" % rp_ver)
+ print(" (from %s)" % rem.url)
+ print(" (tracking %s)" % branch.merge)
+ print(" (%s)" % rp.bare_git.log("-1", "--format=%cD", HEAD))
- if self.wrapper_path is not None:
- print('repo launcher version %s' % self.wrapper_version)
- print(' (from %s)' % self.wrapper_path)
+ if self.wrapper_path is not None:
+ print("repo launcher version %s" % self.wrapper_version)
+ print(" (from %s)" % self.wrapper_path)
- if src_ver != rp_ver:
- print(' (currently at %s)' % src_ver)
+ if src_ver != rp_ver:
+ print(" (currently at %s)" % src_ver)
- print('repo User-Agent %s' % user_agent.repo)
- print('git %s' % git.version_tuple().full)
- print('git User-Agent %s' % user_agent.git)
- print('Python %s' % sys.version)
- uname = platform.uname()
- if sys.version_info.major < 3:
- # Python 3 returns a named tuple, but Python 2 is simpler.
- print(uname)
- else:
- print('OS %s %s (%s)' % (uname.system, uname.release, uname.version))
- print('CPU %s (%s)' %
- (uname.machine, uname.processor if uname.processor else 'unknown'))
- print('Bug reports:', Wrapper().BUG_URL)
+ print("repo User-Agent %s" % user_agent.repo)
+ print("git %s" % git.version_tuple().full)
+ print("git User-Agent %s" % user_agent.git)
+ print("Python %s" % sys.version)
+ uname = platform.uname()
+ if sys.version_info.major < 3:
+ # Python 3 returns a named tuple, but Python 2 is simpler.
+ print(uname)
+ else:
+ print(
+ "OS %s %s (%s)" % (uname.system, uname.release, uname.version)
+ )
+ print(
+ "CPU %s (%s)"
+ % (
+ uname.machine,
+ uname.processor if uname.processor else "unknown",
+ )
+ )
+ print("Bug reports:", Wrapper().BUG_URL)
diff --git a/tests/conftest.py b/tests/conftest.py
index 3e43f6d..e1a2292 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -21,5 +21,5 @@
@pytest.fixture(autouse=True)
def disable_repo_trace(tmp_path):
- """Set an environment marker to relax certain strict checks for test code."""
- repo_trace._TRACE_FILE = str(tmp_path / 'TRACE_FILE_from_test')
+ """Set an environment marker to relax certain strict checks for test code.""" # noqa: E501
+ repo_trace._TRACE_FILE = str(tmp_path / "TRACE_FILE_from_test")
diff --git a/tests/test_editor.py b/tests/test_editor.py
index cfd4f5e..8f5d160 100644
--- a/tests/test_editor.py
+++ b/tests/test_editor.py
@@ -20,37 +20,37 @@
class EditorTestCase(unittest.TestCase):
- """Take care of resetting Editor state across tests."""
+ """Take care of resetting Editor state across tests."""
- def setUp(self):
- self.setEditor(None)
+ def setUp(self):
+ self.setEditor(None)
- def tearDown(self):
- self.setEditor(None)
+ def tearDown(self):
+ self.setEditor(None)
- @staticmethod
- def setEditor(editor):
- Editor._editor = editor
+ @staticmethod
+ def setEditor(editor):
+ Editor._editor = editor
class GetEditor(EditorTestCase):
- """Check GetEditor behavior."""
+ """Check GetEditor behavior."""
- def test_basic(self):
- """Basic checking of _GetEditor."""
- self.setEditor(':')
- self.assertEqual(':', Editor._GetEditor())
+ def test_basic(self):
+ """Basic checking of _GetEditor."""
+ self.setEditor(":")
+ self.assertEqual(":", Editor._GetEditor())
class EditString(EditorTestCase):
- """Check EditString behavior."""
+ """Check EditString behavior."""
- def test_no_editor(self):
- """Check behavior when no editor is available."""
- self.setEditor(':')
- self.assertEqual('foo', Editor.EditString('foo'))
+ def test_no_editor(self):
+ """Check behavior when no editor is available."""
+ self.setEditor(":")
+ self.assertEqual("foo", Editor.EditString("foo"))
- def test_cat_editor(self):
- """Check behavior when editor is `cat`."""
- self.setEditor('cat')
- self.assertEqual('foo', Editor.EditString('foo'))
+ def test_cat_editor(self):
+ """Check behavior when editor is `cat`."""
+ self.setEditor("cat")
+ self.assertEqual("foo", Editor.EditString("foo"))
diff --git a/tests/test_error.py b/tests/test_error.py
index 82b00c2..784e2d5 100644
--- a/tests/test_error.py
+++ b/tests/test_error.py
@@ -22,32 +22,34 @@
class PickleTests(unittest.TestCase):
- """Make sure all our custom exceptions can be pickled."""
+ """Make sure all our custom exceptions can be pickled."""
- def getExceptions(self):
- """Return all our custom exceptions."""
- for name in dir(error):
- cls = getattr(error, name)
- if isinstance(cls, type) and issubclass(cls, Exception):
- yield cls
+ def getExceptions(self):
+ """Return all our custom exceptions."""
+ for name in dir(error):
+ cls = getattr(error, name)
+ if isinstance(cls, type) and issubclass(cls, Exception):
+ yield cls
- def testExceptionLookup(self):
- """Make sure our introspection logic works."""
- classes = list(self.getExceptions())
- self.assertIn(error.HookError, classes)
- # Don't assert the exact number to avoid being a change-detector test.
- self.assertGreater(len(classes), 10)
+ def testExceptionLookup(self):
+ """Make sure our introspection logic works."""
+ classes = list(self.getExceptions())
+ self.assertIn(error.HookError, classes)
+ # Don't assert the exact number to avoid being a change-detector test.
+ self.assertGreater(len(classes), 10)
- def testPickle(self):
- """Try to pickle all the exceptions."""
- for cls in self.getExceptions():
- args = inspect.getfullargspec(cls.__init__).args[1:]
- obj = cls(*args)
- p = pickle.dumps(obj)
- try:
- newobj = pickle.loads(p)
- except Exception as e: # pylint: disable=broad-except
- self.fail('Class %s is unable to be pickled: %s\n'
- 'Incomplete super().__init__(...) call?' % (cls, e))
- self.assertIsInstance(newobj, cls)
- self.assertEqual(str(obj), str(newobj))
+ def testPickle(self):
+ """Try to pickle all the exceptions."""
+ for cls in self.getExceptions():
+ args = inspect.getfullargspec(cls.__init__).args[1:]
+ obj = cls(*args)
+ p = pickle.dumps(obj)
+ try:
+ newobj = pickle.loads(p)
+ except Exception as e: # pylint: disable=broad-except
+ self.fail(
+ "Class %s is unable to be pickled: %s\n"
+ "Incomplete super().__init__(...) call?" % (cls, e)
+ )
+ self.assertIsInstance(newobj, cls)
+ self.assertEqual(str(obj), str(newobj))
diff --git a/tests/test_git_command.py b/tests/test_git_command.py
index 96408a2..c4c3a4c 100644
--- a/tests/test_git_command.py
+++ b/tests/test_git_command.py
@@ -19,138 +19,146 @@
import unittest
try:
- from unittest import mock
+ from unittest import mock
except ImportError:
- import mock
+ import mock
import git_command
import wrapper
class GitCommandTest(unittest.TestCase):
- """Tests the GitCommand class (via git_command.git)."""
+ """Tests the GitCommand class (via git_command.git)."""
- def setUp(self):
+ def setUp(self):
+ def realpath_mock(val):
+ return val
- def realpath_mock(val):
- return val
+ mock.patch.object(
+ os.path, "realpath", side_effect=realpath_mock
+ ).start()
- mock.patch.object(os.path, 'realpath', side_effect=realpath_mock).start()
+ def tearDown(self):
+ mock.patch.stopall()
- def tearDown(self):
- mock.patch.stopall()
+ def test_alternative_setting_when_matching(self):
+ r = git_command._build_env(
+ objdir=os.path.join("zap", "objects"), gitdir="zap"
+ )
- def test_alternative_setting_when_matching(self):
- r = git_command._build_env(
- objdir = os.path.join('zap', 'objects'),
- gitdir = 'zap'
- )
+ self.assertIsNone(r.get("GIT_ALTERNATE_OBJECT_DIRECTORIES"))
+ self.assertEqual(
+ r.get("GIT_OBJECT_DIRECTORY"), os.path.join("zap", "objects")
+ )
- self.assertIsNone(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'))
- self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('zap', 'objects'))
+ def test_alternative_setting_when_different(self):
+ r = git_command._build_env(
+ objdir=os.path.join("wow", "objects"), gitdir="zap"
+ )
- def test_alternative_setting_when_different(self):
- r = git_command._build_env(
- objdir = os.path.join('wow', 'objects'),
- gitdir = 'zap'
- )
-
- self.assertEqual(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'), os.path.join('zap', 'objects'))
- self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('wow', 'objects'))
+ self.assertEqual(
+ r.get("GIT_ALTERNATE_OBJECT_DIRECTORIES"),
+ os.path.join("zap", "objects"),
+ )
+ self.assertEqual(
+ r.get("GIT_OBJECT_DIRECTORY"), os.path.join("wow", "objects")
+ )
class GitCallUnitTest(unittest.TestCase):
- """Tests the _GitCall class (via git_command.git)."""
+ """Tests the _GitCall class (via git_command.git)."""
- def test_version_tuple(self):
- """Check git.version_tuple() handling."""
- ver = git_command.git.version_tuple()
- self.assertIsNotNone(ver)
+ def test_version_tuple(self):
+ """Check git.version_tuple() handling."""
+ ver = git_command.git.version_tuple()
+ self.assertIsNotNone(ver)
- # We don't dive too deep into the values here to avoid having to update
- # whenever git versions change. We do check relative to this min version
- # as this is what `repo` itself requires via MIN_GIT_VERSION.
- MIN_GIT_VERSION = (2, 10, 2)
- self.assertTrue(isinstance(ver.major, int))
- self.assertTrue(isinstance(ver.minor, int))
- self.assertTrue(isinstance(ver.micro, int))
+ # We don't dive too deep into the values here to avoid having to update
+ # whenever git versions change. We do check relative to this min
+ # version as this is what `repo` itself requires via MIN_GIT_VERSION.
+ MIN_GIT_VERSION = (2, 10, 2)
+ self.assertTrue(isinstance(ver.major, int))
+ self.assertTrue(isinstance(ver.minor, int))
+ self.assertTrue(isinstance(ver.micro, int))
- self.assertGreater(ver.major, MIN_GIT_VERSION[0] - 1)
- self.assertGreaterEqual(ver.micro, 0)
- self.assertGreaterEqual(ver.major, 0)
+ self.assertGreater(ver.major, MIN_GIT_VERSION[0] - 1)
+ self.assertGreaterEqual(ver.micro, 0)
+ self.assertGreaterEqual(ver.major, 0)
- self.assertGreaterEqual(ver, MIN_GIT_VERSION)
- self.assertLess(ver, (9999, 9999, 9999))
+ self.assertGreaterEqual(ver, MIN_GIT_VERSION)
+ self.assertLess(ver, (9999, 9999, 9999))
- self.assertNotEqual('', ver.full)
+ self.assertNotEqual("", ver.full)
class UserAgentUnitTest(unittest.TestCase):
- """Tests the UserAgent function."""
+ """Tests the UserAgent function."""
- def test_smoke_os(self):
- """Make sure UA OS setting returns something useful."""
- os_name = git_command.user_agent.os
- # We can't dive too deep because of OS/tool differences, but we can check
- # the general form.
- m = re.match(r'^[^ ]+$', os_name)
- self.assertIsNotNone(m)
+ def test_smoke_os(self):
+ """Make sure UA OS setting returns something useful."""
+ os_name = git_command.user_agent.os
+ # We can't dive too deep because of OS/tool differences, but we can
+ # check the general form.
+ m = re.match(r"^[^ ]+$", os_name)
+ self.assertIsNotNone(m)
- def test_smoke_repo(self):
- """Make sure repo UA returns something useful."""
- ua = git_command.user_agent.repo
- # We can't dive too deep because of OS/tool differences, but we can check
- # the general form.
- m = re.match(r'^git-repo/[^ ]+ ([^ ]+) git/[^ ]+ Python/[0-9.]+', ua)
- self.assertIsNotNone(m)
+ def test_smoke_repo(self):
+ """Make sure repo UA returns something useful."""
+ ua = git_command.user_agent.repo
+ # We can't dive too deep because of OS/tool differences, but we can
+ # check the general form.
+ m = re.match(r"^git-repo/[^ ]+ ([^ ]+) git/[^ ]+ Python/[0-9.]+", ua)
+ self.assertIsNotNone(m)
- def test_smoke_git(self):
- """Make sure git UA returns something useful."""
- ua = git_command.user_agent.git
- # We can't dive too deep because of OS/tool differences, but we can check
- # the general form.
- m = re.match(r'^git/[^ ]+ ([^ ]+) git-repo/[^ ]+', ua)
- self.assertIsNotNone(m)
+ def test_smoke_git(self):
+ """Make sure git UA returns something useful."""
+ ua = git_command.user_agent.git
+ # We can't dive too deep because of OS/tool differences, but we can
+ # check the general form.
+ m = re.match(r"^git/[^ ]+ ([^ ]+) git-repo/[^ ]+", ua)
+ self.assertIsNotNone(m)
class GitRequireTests(unittest.TestCase):
- """Test the git_require helper."""
+ """Test the git_require helper."""
- def setUp(self):
- self.wrapper = wrapper.Wrapper()
- ver = self.wrapper.GitVersion(1, 2, 3, 4)
- mock.patch.object(git_command.git, 'version_tuple', return_value=ver).start()
+ def setUp(self):
+ self.wrapper = wrapper.Wrapper()
+ ver = self.wrapper.GitVersion(1, 2, 3, 4)
+ mock.patch.object(
+ git_command.git, "version_tuple", return_value=ver
+ ).start()
- def tearDown(self):
- mock.patch.stopall()
+ def tearDown(self):
+ mock.patch.stopall()
- def test_older_nonfatal(self):
- """Test non-fatal require calls with old versions."""
- self.assertFalse(git_command.git_require((2,)))
- self.assertFalse(git_command.git_require((1, 3)))
- self.assertFalse(git_command.git_require((1, 2, 4)))
- self.assertFalse(git_command.git_require((1, 2, 3, 5)))
+ def test_older_nonfatal(self):
+ """Test non-fatal require calls with old versions."""
+ self.assertFalse(git_command.git_require((2,)))
+ self.assertFalse(git_command.git_require((1, 3)))
+ self.assertFalse(git_command.git_require((1, 2, 4)))
+ self.assertFalse(git_command.git_require((1, 2, 3, 5)))
- def test_newer_nonfatal(self):
- """Test non-fatal require calls with newer versions."""
- self.assertTrue(git_command.git_require((0,)))
- self.assertTrue(git_command.git_require((1, 0)))
- self.assertTrue(git_command.git_require((1, 2, 0)))
- self.assertTrue(git_command.git_require((1, 2, 3, 0)))
+ def test_newer_nonfatal(self):
+ """Test non-fatal require calls with newer versions."""
+ self.assertTrue(git_command.git_require((0,)))
+ self.assertTrue(git_command.git_require((1, 0)))
+ self.assertTrue(git_command.git_require((1, 2, 0)))
+ self.assertTrue(git_command.git_require((1, 2, 3, 0)))
- def test_equal_nonfatal(self):
- """Test require calls with equal values."""
- self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=False))
- self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=True))
+ def test_equal_nonfatal(self):
+ """Test require calls with equal values."""
+ self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=False))
+ self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=True))
- def test_older_fatal(self):
- """Test fatal require calls with old versions."""
- with self.assertRaises(SystemExit) as e:
- git_command.git_require((2,), fail=True)
- self.assertNotEqual(0, e.code)
+ def test_older_fatal(self):
+ """Test fatal require calls with old versions."""
+ with self.assertRaises(SystemExit) as e:
+ git_command.git_require((2,), fail=True)
+ self.assertNotEqual(0, e.code)
- def test_older_fatal_msg(self):
- """Test fatal require calls with old versions and message."""
- with self.assertRaises(SystemExit) as e:
- git_command.git_require((2,), fail=True, msg='so sad')
- self.assertNotEqual(0, e.code)
+ def test_older_fatal_msg(self):
+ """Test fatal require calls with old versions and message."""
+ with self.assertRaises(SystemExit) as e:
+ git_command.git_require((2,), fail=True, msg="so sad")
+ self.assertNotEqual(0, e.code)
diff --git a/tests/test_git_config.py b/tests/test_git_config.py
index 3b0aa8b..a44dca0 100644
--- a/tests/test_git_config.py
+++ b/tests/test_git_config.py
@@ -22,167 +22,169 @@
def fixture(*paths):
- """Return a path relative to test/fixtures.
- """
- return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
+ """Return a path relative to test/fixtures."""
+ return os.path.join(os.path.dirname(__file__), "fixtures", *paths)
class GitConfigReadOnlyTests(unittest.TestCase):
- """Read-only tests of the GitConfig class."""
+ """Read-only tests of the GitConfig class."""
- def setUp(self):
- """Create a GitConfig object using the test.gitconfig fixture.
- """
- config_fixture = fixture('test.gitconfig')
- self.config = git_config.GitConfig(config_fixture)
+ def setUp(self):
+ """Create a GitConfig object using the test.gitconfig fixture."""
+ config_fixture = fixture("test.gitconfig")
+ self.config = git_config.GitConfig(config_fixture)
- def test_GetString_with_empty_config_values(self):
- """
- Test config entries with no value.
+ def test_GetString_with_empty_config_values(self):
+ """
+ Test config entries with no value.
- [section]
- empty
+ [section]
+ empty
- """
- val = self.config.GetString('section.empty')
- self.assertEqual(val, None)
+ """
+ val = self.config.GetString("section.empty")
+ self.assertEqual(val, None)
- def test_GetString_with_true_value(self):
- """
- Test config entries with a string value.
+ def test_GetString_with_true_value(self):
+ """
+ Test config entries with a string value.
- [section]
- nonempty = true
+ [section]
+ nonempty = true
- """
- val = self.config.GetString('section.nonempty')
- self.assertEqual(val, 'true')
+ """
+ val = self.config.GetString("section.nonempty")
+ self.assertEqual(val, "true")
- def test_GetString_from_missing_file(self):
- """
- Test missing config file
- """
- config_fixture = fixture('not.present.gitconfig')
- config = git_config.GitConfig(config_fixture)
- val = config.GetString('empty')
- self.assertEqual(val, None)
+ def test_GetString_from_missing_file(self):
+ """
+ Test missing config file
+ """
+ config_fixture = fixture("not.present.gitconfig")
+ config = git_config.GitConfig(config_fixture)
+ val = config.GetString("empty")
+ self.assertEqual(val, None)
- def test_GetBoolean_undefined(self):
- """Test GetBoolean on key that doesn't exist."""
- self.assertIsNone(self.config.GetBoolean('section.missing'))
+ def test_GetBoolean_undefined(self):
+ """Test GetBoolean on key that doesn't exist."""
+ self.assertIsNone(self.config.GetBoolean("section.missing"))
- def test_GetBoolean_invalid(self):
- """Test GetBoolean on invalid boolean value."""
- self.assertIsNone(self.config.GetBoolean('section.boolinvalid'))
+ def test_GetBoolean_invalid(self):
+ """Test GetBoolean on invalid boolean value."""
+ self.assertIsNone(self.config.GetBoolean("section.boolinvalid"))
- def test_GetBoolean_true(self):
- """Test GetBoolean on valid true boolean."""
- self.assertTrue(self.config.GetBoolean('section.booltrue'))
+ def test_GetBoolean_true(self):
+ """Test GetBoolean on valid true boolean."""
+ self.assertTrue(self.config.GetBoolean("section.booltrue"))
- def test_GetBoolean_false(self):
- """Test GetBoolean on valid false boolean."""
- self.assertFalse(self.config.GetBoolean('section.boolfalse'))
+ def test_GetBoolean_false(self):
+ """Test GetBoolean on valid false boolean."""
+ self.assertFalse(self.config.GetBoolean("section.boolfalse"))
- def test_GetInt_undefined(self):
- """Test GetInt on key that doesn't exist."""
- self.assertIsNone(self.config.GetInt('section.missing'))
+ def test_GetInt_undefined(self):
+ """Test GetInt on key that doesn't exist."""
+ self.assertIsNone(self.config.GetInt("section.missing"))
- def test_GetInt_invalid(self):
- """Test GetInt on invalid integer value."""
- self.assertIsNone(self.config.GetBoolean('section.intinvalid'))
+ def test_GetInt_invalid(self):
+ """Test GetInt on invalid integer value."""
+ self.assertIsNone(self.config.GetBoolean("section.intinvalid"))
- def test_GetInt_valid(self):
- """Test GetInt on valid integers."""
- TESTS = (
- ('inthex', 16),
- ('inthexk', 16384),
- ('int', 10),
- ('intk', 10240),
- ('intm', 10485760),
- ('intg', 10737418240),
- )
- for key, value in TESTS:
- self.assertEqual(value, self.config.GetInt('section.%s' % (key,)))
+ def test_GetInt_valid(self):
+ """Test GetInt on valid integers."""
+ TESTS = (
+ ("inthex", 16),
+ ("inthexk", 16384),
+ ("int", 10),
+ ("intk", 10240),
+ ("intm", 10485760),
+ ("intg", 10737418240),
+ )
+ for key, value in TESTS:
+ self.assertEqual(value, self.config.GetInt("section.%s" % (key,)))
class GitConfigReadWriteTests(unittest.TestCase):
- """Read/write tests of the GitConfig class."""
+ """Read/write tests of the GitConfig class."""
- def setUp(self):
- self.tmpfile = tempfile.NamedTemporaryFile()
- self.config = self.get_config()
+ def setUp(self):
+ self.tmpfile = tempfile.NamedTemporaryFile()
+ self.config = self.get_config()
- def get_config(self):
- """Get a new GitConfig instance."""
- return git_config.GitConfig(self.tmpfile.name)
+ def get_config(self):
+ """Get a new GitConfig instance."""
+ return git_config.GitConfig(self.tmpfile.name)
- def test_SetString(self):
- """Test SetString behavior."""
- # Set a value.
- self.assertIsNone(self.config.GetString('foo.bar'))
- self.config.SetString('foo.bar', 'val')
- self.assertEqual('val', self.config.GetString('foo.bar'))
+ def test_SetString(self):
+ """Test SetString behavior."""
+ # Set a value.
+ self.assertIsNone(self.config.GetString("foo.bar"))
+ self.config.SetString("foo.bar", "val")
+ self.assertEqual("val", self.config.GetString("foo.bar"))
- # Make sure the value was actually written out.
- config = self.get_config()
- self.assertEqual('val', config.GetString('foo.bar'))
+ # Make sure the value was actually written out.
+ config = self.get_config()
+ self.assertEqual("val", config.GetString("foo.bar"))
- # Update the value.
- self.config.SetString('foo.bar', 'valll')
- self.assertEqual('valll', self.config.GetString('foo.bar'))
- config = self.get_config()
- self.assertEqual('valll', config.GetString('foo.bar'))
+ # Update the value.
+ self.config.SetString("foo.bar", "valll")
+ self.assertEqual("valll", self.config.GetString("foo.bar"))
+ config = self.get_config()
+ self.assertEqual("valll", config.GetString("foo.bar"))
- # Delete the value.
- self.config.SetString('foo.bar', None)
- self.assertIsNone(self.config.GetString('foo.bar'))
- config = self.get_config()
- self.assertIsNone(config.GetString('foo.bar'))
+ # Delete the value.
+ self.config.SetString("foo.bar", None)
+ self.assertIsNone(self.config.GetString("foo.bar"))
+ config = self.get_config()
+ self.assertIsNone(config.GetString("foo.bar"))
- def test_SetBoolean(self):
- """Test SetBoolean behavior."""
- # Set a true value.
- self.assertIsNone(self.config.GetBoolean('foo.bar'))
- for val in (True, 1):
- self.config.SetBoolean('foo.bar', val)
- self.assertTrue(self.config.GetBoolean('foo.bar'))
+ def test_SetBoolean(self):
+ """Test SetBoolean behavior."""
+ # Set a true value.
+ self.assertIsNone(self.config.GetBoolean("foo.bar"))
+ for val in (True, 1):
+ self.config.SetBoolean("foo.bar", val)
+ self.assertTrue(self.config.GetBoolean("foo.bar"))
- # Make sure the value was actually written out.
- config = self.get_config()
- self.assertTrue(config.GetBoolean('foo.bar'))
- self.assertEqual('true', config.GetString('foo.bar'))
+ # Make sure the value was actually written out.
+ config = self.get_config()
+ self.assertTrue(config.GetBoolean("foo.bar"))
+ self.assertEqual("true", config.GetString("foo.bar"))
- # Set a false value.
- for val in (False, 0):
- self.config.SetBoolean('foo.bar', val)
- self.assertFalse(self.config.GetBoolean('foo.bar'))
+ # Set a false value.
+ for val in (False, 0):
+ self.config.SetBoolean("foo.bar", val)
+ self.assertFalse(self.config.GetBoolean("foo.bar"))
- # Make sure the value was actually written out.
- config = self.get_config()
- self.assertFalse(config.GetBoolean('foo.bar'))
- self.assertEqual('false', config.GetString('foo.bar'))
+ # Make sure the value was actually written out.
+ config = self.get_config()
+ self.assertFalse(config.GetBoolean("foo.bar"))
+ self.assertEqual("false", config.GetString("foo.bar"))
- # Delete the value.
- self.config.SetBoolean('foo.bar', None)
- self.assertIsNone(self.config.GetBoolean('foo.bar'))
- config = self.get_config()
- self.assertIsNone(config.GetBoolean('foo.bar'))
+ # Delete the value.
+ self.config.SetBoolean("foo.bar", None)
+ self.assertIsNone(self.config.GetBoolean("foo.bar"))
+ config = self.get_config()
+ self.assertIsNone(config.GetBoolean("foo.bar"))
- def test_GetSyncAnalysisStateData(self):
- """Test config entries with a sync state analysis data."""
- superproject_logging_data = {}
- superproject_logging_data['test'] = False
- options = type('options', (object,), {})()
- options.verbose = 'true'
- options.mp_update = 'false'
- TESTS = (
- ('superproject.test', 'false'),
- ('options.verbose', 'true'),
- ('options.mpupdate', 'false'),
- ('main.version', '1'),
- )
- self.config.UpdateSyncAnalysisState(options, superproject_logging_data)
- sync_data = self.config.GetSyncAnalysisStateData()
- for key, value in TESTS:
- self.assertEqual(sync_data[f'{git_config.SYNC_STATE_PREFIX}{key}'], value)
- self.assertTrue(sync_data[f'{git_config.SYNC_STATE_PREFIX}main.synctime'])
+ def test_GetSyncAnalysisStateData(self):
+ """Test config entries with a sync state analysis data."""
+ superproject_logging_data = {}
+ superproject_logging_data["test"] = False
+ options = type("options", (object,), {})()
+ options.verbose = "true"
+ options.mp_update = "false"
+ TESTS = (
+ ("superproject.test", "false"),
+ ("options.verbose", "true"),
+ ("options.mpupdate", "false"),
+ ("main.version", "1"),
+ )
+ self.config.UpdateSyncAnalysisState(options, superproject_logging_data)
+ sync_data = self.config.GetSyncAnalysisStateData()
+ for key, value in TESTS:
+ self.assertEqual(
+ sync_data[f"{git_config.SYNC_STATE_PREFIX}{key}"], value
+ )
+ self.assertTrue(
+ sync_data[f"{git_config.SYNC_STATE_PREFIX}main.synctime"]
+ )
diff --git a/tests/test_git_superproject.py b/tests/test_git_superproject.py
index b9b597a..eb542c6 100644
--- a/tests/test_git_superproject.py
+++ b/tests/test_git_superproject.py
@@ -28,297 +28,369 @@
class SuperprojectTestCase(unittest.TestCase):
- """TestCase for the Superproject module."""
+ """TestCase for the Superproject module."""
- PARENT_SID_KEY = 'GIT_TRACE2_PARENT_SID'
- PARENT_SID_VALUE = 'parent_sid'
- SELF_SID_REGEX = r'repo-\d+T\d+Z-.*'
- FULL_SID_REGEX = r'^%s/%s' % (PARENT_SID_VALUE, SELF_SID_REGEX)
+ PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID"
+ PARENT_SID_VALUE = "parent_sid"
+ SELF_SID_REGEX = r"repo-\d+T\d+Z-.*"
+ FULL_SID_REGEX = r"^%s/%s" % (PARENT_SID_VALUE, SELF_SID_REGEX)
- def setUp(self):
- """Set up superproject every time."""
- self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
- self.tempdir = self.tempdirobj.name
- self.repodir = os.path.join(self.tempdir, '.repo')
- self.manifest_file = os.path.join(
- self.repodir, manifest_xml.MANIFEST_FILE_NAME)
- os.mkdir(self.repodir)
- self.platform = platform.system().lower()
+ def setUp(self):
+ """Set up superproject every time."""
+ self.tempdirobj = tempfile.TemporaryDirectory(prefix="repo_tests")
+ self.tempdir = self.tempdirobj.name
+ self.repodir = os.path.join(self.tempdir, ".repo")
+ self.manifest_file = os.path.join(
+ self.repodir, manifest_xml.MANIFEST_FILE_NAME
+ )
+ os.mkdir(self.repodir)
+ self.platform = platform.system().lower()
- # By default we initialize with the expected case where
- # repo launches us (so GIT_TRACE2_PARENT_SID is set).
- env = {
- self.PARENT_SID_KEY: self.PARENT_SID_VALUE,
- }
- self.git_event_log = git_trace2_event_log.EventLog(env=env)
+ # By default we initialize with the expected case where
+ # repo launches us (so GIT_TRACE2_PARENT_SID is set).
+ env = {
+ self.PARENT_SID_KEY: self.PARENT_SID_VALUE,
+ }
+ self.git_event_log = git_trace2_event_log.EventLog(env=env)
- # The manifest parsing really wants a git repo currently.
- gitdir = os.path.join(self.repodir, 'manifests.git')
- os.mkdir(gitdir)
- with open(os.path.join(gitdir, 'config'), 'w') as fp:
- fp.write("""[remote "origin"]
+ # The manifest parsing really wants a git repo currently.
+ gitdir = os.path.join(self.repodir, "manifests.git")
+ os.mkdir(gitdir)
+ with open(os.path.join(gitdir, "config"), "w") as fp:
+ fp.write(
+ """[remote "origin"]
url = https://localhost:0/manifest
-""")
+"""
+ )
- manifest = self.getXmlManifest("""
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<superproject name="superproject"/>
- <project path="art" name="platform/art" groups="notdefault,platform-""" + self.platform + """
+ <project path="art" name="platform/art" groups="notdefault,platform-"""
+ + self.platform
+ + """
" /></manifest>
-""")
- self._superproject = git_superproject.Superproject(
- manifest, name='superproject',
- remote=manifest.remotes.get('default-remote').ToRemoteSpec('superproject'),
- revision='refs/heads/main')
+"""
+ )
+ self._superproject = git_superproject.Superproject(
+ manifest,
+ name="superproject",
+ remote=manifest.remotes.get("default-remote").ToRemoteSpec(
+ "superproject"
+ ),
+ revision="refs/heads/main",
+ )
- def tearDown(self):
- """Tear down superproject every time."""
- self.tempdirobj.cleanup()
+ def tearDown(self):
+ """Tear down superproject every time."""
+ self.tempdirobj.cleanup()
- def getXmlManifest(self, data):
- """Helper to initialize a manifest for testing."""
- with open(self.manifest_file, 'w') as fp:
- fp.write(data)
- return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
+ def getXmlManifest(self, data):
+ """Helper to initialize a manifest for testing."""
+ with open(self.manifest_file, "w") as fp:
+ fp.write(data)
+ return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
- def verifyCommonKeys(self, log_entry, expected_event_name, full_sid=True):
- """Helper function to verify common event log keys."""
- self.assertIn('event', log_entry)
- self.assertIn('sid', log_entry)
- self.assertIn('thread', log_entry)
- self.assertIn('time', log_entry)
+ def verifyCommonKeys(self, log_entry, expected_event_name, full_sid=True):
+ """Helper function to verify common event log keys."""
+ self.assertIn("event", log_entry)
+ self.assertIn("sid", log_entry)
+ self.assertIn("thread", log_entry)
+ self.assertIn("time", log_entry)
- # Do basic data format validation.
- self.assertEqual(expected_event_name, log_entry['event'])
- if full_sid:
- self.assertRegex(log_entry['sid'], self.FULL_SID_REGEX)
- else:
- self.assertRegex(log_entry['sid'], self.SELF_SID_REGEX)
- self.assertRegex(log_entry['time'], r'^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$')
+ # Do basic data format validation.
+ self.assertEqual(expected_event_name, log_entry["event"])
+ if full_sid:
+ self.assertRegex(log_entry["sid"], self.FULL_SID_REGEX)
+ else:
+ self.assertRegex(log_entry["sid"], self.SELF_SID_REGEX)
+ self.assertRegex(log_entry["time"], r"^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$")
- def readLog(self, log_path):
- """Helper function to read log data into a list."""
- log_data = []
- with open(log_path, mode='rb') as f:
- for line in f:
- log_data.append(json.loads(line))
- return log_data
+ def readLog(self, log_path):
+ """Helper function to read log data into a list."""
+ log_data = []
+ with open(log_path, mode="rb") as f:
+ for line in f:
+ log_data.append(json.loads(line))
+ return log_data
- def verifyErrorEvent(self):
- """Helper to verify that error event is written."""
+ def verifyErrorEvent(self):
+ """Helper to verify that error event is written."""
- with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
- log_path = self.git_event_log.Write(path=tempdir)
- self.log_data = self.readLog(log_path)
+ with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
+ log_path = self.git_event_log.Write(path=tempdir)
+ self.log_data = self.readLog(log_path)
- self.assertEqual(len(self.log_data), 2)
- error_event = self.log_data[1]
- self.verifyCommonKeys(self.log_data[0], expected_event_name='version')
- self.verifyCommonKeys(error_event, expected_event_name='error')
- # Check for 'error' event specific fields.
- self.assertIn('msg', error_event)
- self.assertIn('fmt', error_event)
+ self.assertEqual(len(self.log_data), 2)
+ error_event = self.log_data[1]
+ self.verifyCommonKeys(self.log_data[0], expected_event_name="version")
+ self.verifyCommonKeys(error_event, expected_event_name="error")
+ # Check for 'error' event specific fields.
+ self.assertIn("msg", error_event)
+ self.assertIn("fmt", error_event)
- def test_superproject_get_superproject_no_superproject(self):
- """Test with no url."""
- manifest = self.getXmlManifest("""
+ def test_superproject_get_superproject_no_superproject(self):
+ """Test with no url."""
+ manifest = self.getXmlManifest(
+ """
<manifest>
</manifest>
-""")
- self.assertIsNone(manifest.superproject)
+"""
+ )
+ self.assertIsNone(manifest.superproject)
- def test_superproject_get_superproject_invalid_url(self):
- """Test with an invalid url."""
- manifest = self.getXmlManifest("""
+ def test_superproject_get_superproject_invalid_url(self):
+ """Test with an invalid url."""
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="test-remote" fetch="localhost" />
<default remote="test-remote" revision="refs/heads/main" />
<superproject name="superproject"/>
</manifest>
-""")
- superproject = git_superproject.Superproject(
- manifest, name='superproject',
- remote=manifest.remotes.get('test-remote').ToRemoteSpec('superproject'),
- revision='refs/heads/main')
- sync_result = superproject.Sync(self.git_event_log)
- self.assertFalse(sync_result.success)
- self.assertTrue(sync_result.fatal)
-
- def test_superproject_get_superproject_invalid_branch(self):
- """Test with an invalid branch."""
- manifest = self.getXmlManifest("""
-<manifest>
- <remote name="test-remote" fetch="localhost" />
- <default remote="test-remote" revision="refs/heads/main" />
- <superproject name="superproject"/>
-</manifest>
-""")
- self._superproject = git_superproject.Superproject(
- manifest, name='superproject',
- remote=manifest.remotes.get('test-remote').ToRemoteSpec('superproject'),
- revision='refs/heads/main')
- with mock.patch.object(self._superproject, '_branch', 'junk'):
- sync_result = self._superproject.Sync(self.git_event_log)
- self.assertFalse(sync_result.success)
- self.assertTrue(sync_result.fatal)
- self.verifyErrorEvent()
-
- def test_superproject_get_superproject_mock_init(self):
- """Test with _Init failing."""
- with mock.patch.object(self._superproject, '_Init', return_value=False):
- sync_result = self._superproject.Sync(self.git_event_log)
- self.assertFalse(sync_result.success)
- self.assertTrue(sync_result.fatal)
-
- def test_superproject_get_superproject_mock_fetch(self):
- """Test with _Fetch failing."""
- with mock.patch.object(self._superproject, '_Init', return_value=True):
- os.mkdir(self._superproject._superproject_path)
- with mock.patch.object(self._superproject, '_Fetch', return_value=False):
- sync_result = self._superproject.Sync(self.git_event_log)
+"""
+ )
+ superproject = git_superproject.Superproject(
+ manifest,
+ name="superproject",
+ remote=manifest.remotes.get("test-remote").ToRemoteSpec(
+ "superproject"
+ ),
+ revision="refs/heads/main",
+ )
+ sync_result = superproject.Sync(self.git_event_log)
self.assertFalse(sync_result.success)
self.assertTrue(sync_result.fatal)
- def test_superproject_get_all_project_commit_ids_mock_ls_tree(self):
- """Test with LsTree being a mock."""
- data = ('120000 blob 158258bdf146f159218e2b90f8b699c4d85b5804\tAndroid.bp\x00'
- '160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00'
- '160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00'
- '120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00'
- '160000 commit ade9b7a0d874e25fff4bf2552488825c6f111928\tbuild/bazel\x00')
- with mock.patch.object(self._superproject, '_Init', return_value=True):
- with mock.patch.object(self._superproject, '_Fetch', return_value=True):
- with mock.patch.object(self._superproject, '_LsTree', return_value=data):
- commit_ids_result = self._superproject._GetAllProjectsCommitIds()
- self.assertEqual(commit_ids_result.commit_ids, {
- 'art': '2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea',
- 'bootable/recovery': 'e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06',
- 'build/bazel': 'ade9b7a0d874e25fff4bf2552488825c6f111928'
- })
- self.assertFalse(commit_ids_result.fatal)
+ def test_superproject_get_superproject_invalid_branch(self):
+ """Test with an invalid branch."""
+ manifest = self.getXmlManifest(
+ """
+<manifest>
+ <remote name="test-remote" fetch="localhost" />
+ <default remote="test-remote" revision="refs/heads/main" />
+ <superproject name="superproject"/>
+</manifest>
+"""
+ )
+ self._superproject = git_superproject.Superproject(
+ manifest,
+ name="superproject",
+ remote=manifest.remotes.get("test-remote").ToRemoteSpec(
+ "superproject"
+ ),
+ revision="refs/heads/main",
+ )
+ with mock.patch.object(self._superproject, "_branch", "junk"):
+ sync_result = self._superproject.Sync(self.git_event_log)
+ self.assertFalse(sync_result.success)
+ self.assertTrue(sync_result.fatal)
+ self.verifyErrorEvent()
- def test_superproject_write_manifest_file(self):
- """Test with writing manifest to a file after setting revisionId."""
- self.assertEqual(len(self._superproject._manifest.projects), 1)
- project = self._superproject._manifest.projects[0]
- project.SetRevisionId('ABCDEF')
- # Create temporary directory so that it can write the file.
- os.mkdir(self._superproject._superproject_path)
- manifest_path = self._superproject._WriteManifestFile()
- self.assertIsNotNone(manifest_path)
- with open(manifest_path, 'r') as fp:
- manifest_xml_data = fp.read()
- self.assertEqual(
- sort_attributes(manifest_xml_data),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="default-remote"/>'
- '<default remote="default-remote" revision="refs/heads/main"/>'
- '<project groups="notdefault,platform-' + self.platform + '" '
- 'name="platform/art" path="art" revision="ABCDEF" upstream="refs/heads/main"/>'
- '<superproject name="superproject"/>'
- '</manifest>')
+ def test_superproject_get_superproject_mock_init(self):
+ """Test with _Init failing."""
+ with mock.patch.object(self._superproject, "_Init", return_value=False):
+ sync_result = self._superproject.Sync(self.git_event_log)
+ self.assertFalse(sync_result.success)
+ self.assertTrue(sync_result.fatal)
- def test_superproject_update_project_revision_id(self):
- """Test with LsTree being a mock."""
- self.assertEqual(len(self._superproject._manifest.projects), 1)
- projects = self._superproject._manifest.projects
- data = ('160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00'
- '160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00')
- with mock.patch.object(self._superproject, '_Init', return_value=True):
- with mock.patch.object(self._superproject, '_Fetch', return_value=True):
- with mock.patch.object(self._superproject,
- '_LsTree',
- return_value=data):
- # Create temporary directory so that it can write the file.
- os.mkdir(self._superproject._superproject_path)
- update_result = self._superproject.UpdateProjectsRevisionId(projects, self.git_event_log)
- self.assertIsNotNone(update_result.manifest_path)
- self.assertFalse(update_result.fatal)
- with open(update_result.manifest_path, 'r') as fp:
+ def test_superproject_get_superproject_mock_fetch(self):
+ """Test with _Fetch failing."""
+ with mock.patch.object(self._superproject, "_Init", return_value=True):
+ os.mkdir(self._superproject._superproject_path)
+ with mock.patch.object(
+ self._superproject, "_Fetch", return_value=False
+ ):
+ sync_result = self._superproject.Sync(self.git_event_log)
+ self.assertFalse(sync_result.success)
+ self.assertTrue(sync_result.fatal)
+
+ def test_superproject_get_all_project_commit_ids_mock_ls_tree(self):
+ """Test with LsTree being a mock."""
+ data = (
+ "120000 blob 158258bdf146f159218e2b90f8b699c4d85b5804\tAndroid.bp\x00"
+ "160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00"
+ "160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00"
+ "120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00"
+ "160000 commit ade9b7a0d874e25fff4bf2552488825c6f111928\tbuild/bazel\x00"
+ )
+ with mock.patch.object(self._superproject, "_Init", return_value=True):
+ with mock.patch.object(
+ self._superproject, "_Fetch", return_value=True
+ ):
+ with mock.patch.object(
+ self._superproject, "_LsTree", return_value=data
+ ):
+ commit_ids_result = (
+ self._superproject._GetAllProjectsCommitIds()
+ )
+ self.assertEqual(
+ commit_ids_result.commit_ids,
+ {
+ "art": "2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea",
+ "bootable/recovery": "e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06",
+ "build/bazel": "ade9b7a0d874e25fff4bf2552488825c6f111928",
+ },
+ )
+ self.assertFalse(commit_ids_result.fatal)
+
+ def test_superproject_write_manifest_file(self):
+ """Test with writing manifest to a file after setting revisionId."""
+ self.assertEqual(len(self._superproject._manifest.projects), 1)
+ project = self._superproject._manifest.projects[0]
+ project.SetRevisionId("ABCDEF")
+ # Create temporary directory so that it can write the file.
+ os.mkdir(self._superproject._superproject_path)
+ manifest_path = self._superproject._WriteManifestFile()
+ self.assertIsNotNone(manifest_path)
+ with open(manifest_path, "r") as fp:
manifest_xml_data = fp.read()
- self.assertEqual(
- sort_attributes(manifest_xml_data),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="default-remote"/>'
- '<default remote="default-remote" revision="refs/heads/main"/>'
- '<project groups="notdefault,platform-' + self.platform + '" '
- 'name="platform/art" path="art" '
- 'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
- '<superproject name="superproject"/>'
- '</manifest>')
+ self.assertEqual(
+ sort_attributes(manifest_xml_data),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="default-remote"/>'
+ '<default remote="default-remote" revision="refs/heads/main"/>'
+ '<project groups="notdefault,platform-' + self.platform + '" '
+ 'name="platform/art" path="art" revision="ABCDEF" upstream="refs/heads/main"/>'
+ '<superproject name="superproject"/>'
+ "</manifest>",
+ )
- def test_superproject_update_project_revision_id_no_superproject_tag(self):
- """Test update of commit ids of a manifest without superproject tag."""
- manifest = self.getXmlManifest("""
+ def test_superproject_update_project_revision_id(self):
+ """Test with LsTree being a mock."""
+ self.assertEqual(len(self._superproject._manifest.projects), 1)
+ projects = self._superproject._manifest.projects
+ data = (
+ "160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00"
+ "160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00"
+ )
+ with mock.patch.object(self._superproject, "_Init", return_value=True):
+ with mock.patch.object(
+ self._superproject, "_Fetch", return_value=True
+ ):
+ with mock.patch.object(
+ self._superproject, "_LsTree", return_value=data
+ ):
+ # Create temporary directory so that it can write the file.
+ os.mkdir(self._superproject._superproject_path)
+ update_result = self._superproject.UpdateProjectsRevisionId(
+ projects, self.git_event_log
+ )
+ self.assertIsNotNone(update_result.manifest_path)
+ self.assertFalse(update_result.fatal)
+ with open(update_result.manifest_path, "r") as fp:
+ manifest_xml_data = fp.read()
+ self.assertEqual(
+ sort_attributes(manifest_xml_data),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="default-remote"/>'
+ '<default remote="default-remote" revision="refs/heads/main"/>'
+ '<project groups="notdefault,platform-'
+ + self.platform
+ + '" '
+ 'name="platform/art" path="art" '
+ 'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
+ '<superproject name="superproject"/>'
+ "</manifest>",
+ )
+
+ def test_superproject_update_project_revision_id_no_superproject_tag(self):
+ """Test update of commit ids of a manifest without superproject tag."""
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="test-name"/>
</manifest>
-""")
- self.maxDiff = None
- self.assertIsNone(manifest.superproject)
- self.assertEqual(
- sort_attributes(manifest.ToXml().toxml()),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="default-remote"/>'
- '<default remote="default-remote" revision="refs/heads/main"/>'
- '<project name="test-name"/>'
- '</manifest>')
+"""
+ )
+ self.maxDiff = None
+ self.assertIsNone(manifest.superproject)
+ self.assertEqual(
+ sort_attributes(manifest.ToXml().toxml()),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="default-remote"/>'
+ '<default remote="default-remote" revision="refs/heads/main"/>'
+ '<project name="test-name"/>'
+ "</manifest>",
+ )
- def test_superproject_update_project_revision_id_from_local_manifest_group(self):
- """Test update of commit ids of a manifest that have local manifest no superproject group."""
- local_group = manifest_xml.LOCAL_MANIFEST_GROUP_PREFIX + ':local'
- manifest = self.getXmlManifest("""
+ def test_superproject_update_project_revision_id_from_local_manifest_group(
+ self,
+ ):
+ """Test update of commit ids of a manifest that have local manifest no superproject group."""
+ local_group = manifest_xml.LOCAL_MANIFEST_GROUP_PREFIX + ":local"
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<remote name="goog" fetch="http://localhost2" />
<default remote="default-remote" revision="refs/heads/main" />
<superproject name="superproject"/>
<project path="vendor/x" name="platform/vendor/x" remote="goog"
- groups=\"""" + local_group + """
+ groups=\""""
+ + local_group
+ + """
" revision="master-with-vendor" clone-depth="1" />
- <project path="art" name="platform/art" groups="notdefault,platform-""" + self.platform + """
+ <project path="art" name="platform/art" groups="notdefault,platform-"""
+ + self.platform
+ + """
" /></manifest>
-""")
- self.maxDiff = None
- self._superproject = git_superproject.Superproject(
- manifest, name='superproject',
- remote=manifest.remotes.get('default-remote').ToRemoteSpec('superproject'),
- revision='refs/heads/main')
- self.assertEqual(len(self._superproject._manifest.projects), 2)
- projects = self._superproject._manifest.projects
- data = ('160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00')
- with mock.patch.object(self._superproject, '_Init', return_value=True):
- with mock.patch.object(self._superproject, '_Fetch', return_value=True):
- with mock.patch.object(self._superproject,
- '_LsTree',
- return_value=data):
- # Create temporary directory so that it can write the file.
- os.mkdir(self._superproject._superproject_path)
- update_result = self._superproject.UpdateProjectsRevisionId(projects, self.git_event_log)
- self.assertIsNotNone(update_result.manifest_path)
- self.assertFalse(update_result.fatal)
- with open(update_result.manifest_path, 'r') as fp:
- manifest_xml_data = fp.read()
- # Verify platform/vendor/x's project revision hasn't changed.
- self.assertEqual(
- sort_attributes(manifest_xml_data),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="default-remote"/>'
- '<remote fetch="http://localhost2" name="goog"/>'
- '<default remote="default-remote" revision="refs/heads/main"/>'
- '<project groups="notdefault,platform-' + self.platform + '" '
- 'name="platform/art" path="art" '
- 'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
- '<superproject name="superproject"/>'
- '</manifest>')
+"""
+ )
+ self.maxDiff = None
+ self._superproject = git_superproject.Superproject(
+ manifest,
+ name="superproject",
+ remote=manifest.remotes.get("default-remote").ToRemoteSpec(
+ "superproject"
+ ),
+ revision="refs/heads/main",
+ )
+ self.assertEqual(len(self._superproject._manifest.projects), 2)
+ projects = self._superproject._manifest.projects
+ data = "160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00"
+ with mock.patch.object(self._superproject, "_Init", return_value=True):
+ with mock.patch.object(
+ self._superproject, "_Fetch", return_value=True
+ ):
+ with mock.patch.object(
+ self._superproject, "_LsTree", return_value=data
+ ):
+ # Create temporary directory so that it can write the file.
+ os.mkdir(self._superproject._superproject_path)
+ update_result = self._superproject.UpdateProjectsRevisionId(
+ projects, self.git_event_log
+ )
+ self.assertIsNotNone(update_result.manifest_path)
+ self.assertFalse(update_result.fatal)
+ with open(update_result.manifest_path, "r") as fp:
+ manifest_xml_data = fp.read()
+ # Verify platform/vendor/x's project revision hasn't
+ # changed.
+ self.assertEqual(
+ sort_attributes(manifest_xml_data),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="default-remote"/>'
+ '<remote fetch="http://localhost2" name="goog"/>'
+ '<default remote="default-remote" revision="refs/heads/main"/>'
+ '<project groups="notdefault,platform-'
+ + self.platform
+ + '" '
+ 'name="platform/art" path="art" '
+ 'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
+ '<superproject name="superproject"/>'
+ "</manifest>",
+ )
- def test_superproject_update_project_revision_id_with_pinned_manifest(self):
- """Test update of commit ids of a pinned manifest."""
- manifest = self.getXmlManifest("""
+ def test_superproject_update_project_revision_id_with_pinned_manifest(self):
+ """Test update of commit ids of a pinned manifest."""
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
@@ -326,80 +398,132 @@
<project path="vendor/x" name="platform/vendor/x" revision="" />
<project path="vendor/y" name="platform/vendor/y"
revision="52d3c9f7c107839ece2319d077de0cd922aa9d8f" />
- <project path="art" name="platform/art" groups="notdefault,platform-""" + self.platform + """
+ <project path="art" name="platform/art" groups="notdefault,platform-"""
+ + self.platform
+ + """
" /></manifest>
-""")
- self.maxDiff = None
- self._superproject = git_superproject.Superproject(
- manifest, name='superproject',
- remote=manifest.remotes.get('default-remote').ToRemoteSpec('superproject'),
- revision='refs/heads/main')
- self.assertEqual(len(self._superproject._manifest.projects), 3)
- projects = self._superproject._manifest.projects
- data = ('160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00'
- '160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tvendor/x\x00')
- with mock.patch.object(self._superproject, '_Init', return_value=True):
- with mock.patch.object(self._superproject, '_Fetch', return_value=True):
- with mock.patch.object(self._superproject,
- '_LsTree',
- return_value=data):
- # Create temporary directory so that it can write the file.
- os.mkdir(self._superproject._superproject_path)
- update_result = self._superproject.UpdateProjectsRevisionId(projects, self.git_event_log)
- self.assertIsNotNone(update_result.manifest_path)
- self.assertFalse(update_result.fatal)
- with open(update_result.manifest_path, 'r') as fp:
- manifest_xml_data = fp.read()
- # Verify platform/vendor/x's project revision hasn't changed.
- self.assertEqual(
- sort_attributes(manifest_xml_data),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="default-remote"/>'
- '<default remote="default-remote" revision="refs/heads/main"/>'
- '<project groups="notdefault,platform-' + self.platform + '" '
- 'name="platform/art" path="art" '
- 'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
- '<project name="platform/vendor/x" path="vendor/x" '
- 'revision="e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06" upstream="refs/heads/main"/>'
- '<project name="platform/vendor/y" path="vendor/y" '
- 'revision="52d3c9f7c107839ece2319d077de0cd922aa9d8f"/>'
- '<superproject name="superproject"/>'
- '</manifest>')
+"""
+ )
+ self.maxDiff = None
+ self._superproject = git_superproject.Superproject(
+ manifest,
+ name="superproject",
+ remote=manifest.remotes.get("default-remote").ToRemoteSpec(
+ "superproject"
+ ),
+ revision="refs/heads/main",
+ )
+ self.assertEqual(len(self._superproject._manifest.projects), 3)
+ projects = self._superproject._manifest.projects
+ data = (
+ "160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00"
+ "160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tvendor/x\x00"
+ )
+ with mock.patch.object(self._superproject, "_Init", return_value=True):
+ with mock.patch.object(
+ self._superproject, "_Fetch", return_value=True
+ ):
+ with mock.patch.object(
+ self._superproject, "_LsTree", return_value=data
+ ):
+ # Create temporary directory so that it can write the file.
+ os.mkdir(self._superproject._superproject_path)
+ update_result = self._superproject.UpdateProjectsRevisionId(
+ projects, self.git_event_log
+ )
+ self.assertIsNotNone(update_result.manifest_path)
+ self.assertFalse(update_result.fatal)
+ with open(update_result.manifest_path, "r") as fp:
+ manifest_xml_data = fp.read()
+ # Verify platform/vendor/x's project revision hasn't
+ # changed.
+ self.assertEqual(
+ sort_attributes(manifest_xml_data),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="default-remote"/>'
+ '<default remote="default-remote" revision="refs/heads/main"/>'
+ '<project groups="notdefault,platform-'
+ + self.platform
+ + '" '
+ 'name="platform/art" path="art" '
+ 'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
+ '<project name="platform/vendor/x" path="vendor/x" '
+ 'revision="e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06" upstream="refs/heads/main"/>'
+ '<project name="platform/vendor/y" path="vendor/y" '
+ 'revision="52d3c9f7c107839ece2319d077de0cd922aa9d8f"/>'
+ '<superproject name="superproject"/>'
+ "</manifest>",
+ )
- def test_Fetch(self):
- manifest = self.getXmlManifest("""
+ def test_Fetch(self):
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<superproject name="superproject"/>
" /></manifest>
-""")
- self.maxDiff = None
- self._superproject = git_superproject.Superproject(
- manifest, name='superproject',
- remote=manifest.remotes.get('default-remote').ToRemoteSpec('superproject'),
- revision='refs/heads/main')
- os.mkdir(self._superproject._superproject_path)
- os.mkdir(self._superproject._work_git)
- with mock.patch.object(self._superproject, '_Init', return_value=True):
- with mock.patch('git_superproject.GitCommand', autospec=True) as mock_git_command:
- with mock.patch('git_superproject.GitRefs.get', autospec=True) as mock_git_refs:
- instance = mock_git_command.return_value
- instance.Wait.return_value = 0
- mock_git_refs.side_effect = ['', '1234']
+"""
+ )
+ self.maxDiff = None
+ self._superproject = git_superproject.Superproject(
+ manifest,
+ name="superproject",
+ remote=manifest.remotes.get("default-remote").ToRemoteSpec(
+ "superproject"
+ ),
+ revision="refs/heads/main",
+ )
+ os.mkdir(self._superproject._superproject_path)
+ os.mkdir(self._superproject._work_git)
+ with mock.patch.object(self._superproject, "_Init", return_value=True):
+ with mock.patch(
+ "git_superproject.GitCommand", autospec=True
+ ) as mock_git_command:
+ with mock.patch(
+ "git_superproject.GitRefs.get", autospec=True
+ ) as mock_git_refs:
+ instance = mock_git_command.return_value
+ instance.Wait.return_value = 0
+ mock_git_refs.side_effect = ["", "1234"]
- self.assertTrue(self._superproject._Fetch())
- self.assertEqual(mock_git_command.call_args.args,(None, [
- 'fetch', 'http://localhost/superproject', '--depth', '1',
- '--force', '--no-tags', '--filter', 'blob:none',
- 'refs/heads/main:refs/heads/main'
- ]))
+ self.assertTrue(self._superproject._Fetch())
+ self.assertEqual(
+ mock_git_command.call_args.args,
+ (
+ None,
+ [
+ "fetch",
+ "http://localhost/superproject",
+ "--depth",
+ "1",
+ "--force",
+ "--no-tags",
+ "--filter",
+ "blob:none",
+ "refs/heads/main:refs/heads/main",
+ ],
+ ),
+ )
- # If branch for revision exists, set as --negotiation-tip.
- self.assertTrue(self._superproject._Fetch())
- self.assertEqual(mock_git_command.call_args.args,(None, [
- 'fetch', 'http://localhost/superproject', '--depth', '1',
- '--force', '--no-tags', '--filter', 'blob:none',
- '--negotiation-tip', '1234',
- 'refs/heads/main:refs/heads/main'
- ]))
+ # If branch for revision exists, set as --negotiation-tip.
+ self.assertTrue(self._superproject._Fetch())
+ self.assertEqual(
+ mock_git_command.call_args.args,
+ (
+ None,
+ [
+ "fetch",
+ "http://localhost/superproject",
+ "--depth",
+ "1",
+ "--force",
+ "--no-tags",
+ "--filter",
+ "blob:none",
+ "--negotiation-tip",
+ "1234",
+ "refs/heads/main:refs/heads/main",
+ ],
+ ),
+ )
diff --git a/tests/test_git_trace2_event_log.py b/tests/test_git_trace2_event_log.py
index 7e7dfb7..a6078d3 100644
--- a/tests/test_git_trace2_event_log.py
+++ b/tests/test_git_trace2_event_log.py
@@ -27,361 +27,382 @@
def serverLoggingThread(socket_path, server_ready, received_traces):
- """Helper function to receive logs over a Unix domain socket.
+ """Helper function to receive logs over a Unix domain socket.
- Appends received messages on the provided socket and appends to received_traces.
+ Appends received messages on the provided socket and appends to
+ received_traces.
- Args:
- socket_path: path to a Unix domain socket on which to listen for traces
- server_ready: a threading.Condition used to signal to the caller that this thread is ready to
- accept connections
- received_traces: a list to which received traces will be appended (after decoding to a utf-8
- string).
- """
- platform_utils.remove(socket_path, missing_ok=True)
- data = b''
- with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock:
- sock.bind(socket_path)
- sock.listen(0)
- with server_ready:
- server_ready.notify()
- with sock.accept()[0] as conn:
- while True:
- recved = conn.recv(4096)
- if not recved:
- break
- data += recved
- received_traces.extend(data.decode('utf-8').splitlines())
+ Args:
+ socket_path: path to a Unix domain socket on which to listen for traces
+ server_ready: a threading.Condition used to signal to the caller that
+ this thread is ready to accept connections
+ received_traces: a list to which received traces will be appended (after
+ decoding to a utf-8 string).
+ """
+ platform_utils.remove(socket_path, missing_ok=True)
+ data = b""
+ with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock:
+ sock.bind(socket_path)
+ sock.listen(0)
+ with server_ready:
+ server_ready.notify()
+ with sock.accept()[0] as conn:
+ while True:
+ recved = conn.recv(4096)
+ if not recved:
+ break
+ data += recved
+ received_traces.extend(data.decode("utf-8").splitlines())
class EventLogTestCase(unittest.TestCase):
- """TestCase for the EventLog module."""
+ """TestCase for the EventLog module."""
- PARENT_SID_KEY = 'GIT_TRACE2_PARENT_SID'
- PARENT_SID_VALUE = 'parent_sid'
- SELF_SID_REGEX = r'repo-\d+T\d+Z-.*'
- FULL_SID_REGEX = r'^%s/%s' % (PARENT_SID_VALUE, SELF_SID_REGEX)
+ PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID"
+ PARENT_SID_VALUE = "parent_sid"
+ SELF_SID_REGEX = r"repo-\d+T\d+Z-.*"
+ FULL_SID_REGEX = r"^%s/%s" % (PARENT_SID_VALUE, SELF_SID_REGEX)
- def setUp(self):
- """Load the event_log module every time."""
- self._event_log_module = None
- # By default we initialize with the expected case where
- # repo launches us (so GIT_TRACE2_PARENT_SID is set).
- env = {
- self.PARENT_SID_KEY: self.PARENT_SID_VALUE,
- }
- self._event_log_module = git_trace2_event_log.EventLog(env=env)
- self._log_data = None
+ def setUp(self):
+ """Load the event_log module every time."""
+ self._event_log_module = None
+ # By default we initialize with the expected case where
+ # repo launches us (so GIT_TRACE2_PARENT_SID is set).
+ env = {
+ self.PARENT_SID_KEY: self.PARENT_SID_VALUE,
+ }
+ self._event_log_module = git_trace2_event_log.EventLog(env=env)
+ self._log_data = None
- def verifyCommonKeys(self, log_entry, expected_event_name=None, full_sid=True):
- """Helper function to verify common event log keys."""
- self.assertIn('event', log_entry)
- self.assertIn('sid', log_entry)
- self.assertIn('thread', log_entry)
- self.assertIn('time', log_entry)
+ def verifyCommonKeys(
+ self, log_entry, expected_event_name=None, full_sid=True
+ ):
+ """Helper function to verify common event log keys."""
+ self.assertIn("event", log_entry)
+ self.assertIn("sid", log_entry)
+ self.assertIn("thread", log_entry)
+ self.assertIn("time", log_entry)
- # Do basic data format validation.
- if expected_event_name:
- self.assertEqual(expected_event_name, log_entry['event'])
- if full_sid:
- self.assertRegex(log_entry['sid'], self.FULL_SID_REGEX)
- else:
- self.assertRegex(log_entry['sid'], self.SELF_SID_REGEX)
- self.assertRegex(log_entry['time'], r'^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$')
+ # Do basic data format validation.
+ if expected_event_name:
+ self.assertEqual(expected_event_name, log_entry["event"])
+ if full_sid:
+ self.assertRegex(log_entry["sid"], self.FULL_SID_REGEX)
+ else:
+ self.assertRegex(log_entry["sid"], self.SELF_SID_REGEX)
+ self.assertRegex(log_entry["time"], r"^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$")
- def readLog(self, log_path):
- """Helper function to read log data into a list."""
- log_data = []
- with open(log_path, mode='rb') as f:
- for line in f:
- log_data.append(json.loads(line))
- return log_data
+ def readLog(self, log_path):
+ """Helper function to read log data into a list."""
+ log_data = []
+ with open(log_path, mode="rb") as f:
+ for line in f:
+ log_data.append(json.loads(line))
+ return log_data
- def remove_prefix(self, s, prefix):
- """Return a copy string after removing |prefix| from |s|, if present or the original string."""
- if s.startswith(prefix):
- return s[len(prefix):]
- else:
- return s
+ def remove_prefix(self, s, prefix):
+ """Return a copy string after removing |prefix| from |s|, if present or
+ the original string."""
+ if s.startswith(prefix):
+ return s[len(prefix) :]
+ else:
+ return s
- def test_initial_state_with_parent_sid(self):
- """Test initial state when 'GIT_TRACE2_PARENT_SID' is set by parent."""
- self.assertRegex(self._event_log_module.full_sid, self.FULL_SID_REGEX)
+ def test_initial_state_with_parent_sid(self):
+ """Test initial state when 'GIT_TRACE2_PARENT_SID' is set by parent."""
+ self.assertRegex(self._event_log_module.full_sid, self.FULL_SID_REGEX)
- def test_initial_state_no_parent_sid(self):
- """Test initial state when 'GIT_TRACE2_PARENT_SID' is not set."""
- # Setup an empty environment dict (no parent sid).
- self._event_log_module = git_trace2_event_log.EventLog(env={})
- self.assertRegex(self._event_log_module.full_sid, self.SELF_SID_REGEX)
+ def test_initial_state_no_parent_sid(self):
+ """Test initial state when 'GIT_TRACE2_PARENT_SID' is not set."""
+ # Setup an empty environment dict (no parent sid).
+ self._event_log_module = git_trace2_event_log.EventLog(env={})
+ self.assertRegex(self._event_log_module.full_sid, self.SELF_SID_REGEX)
- def test_version_event(self):
- """Test 'version' event data is valid.
+ def test_version_event(self):
+ """Test 'version' event data is valid.
- Verify that the 'version' event is written even when no other
- events are addded.
+ Verify that the 'version' event is written even when no other
+ events are addded.
- Expected event log:
- <version event>
- """
- with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
- log_path = self._event_log_module.Write(path=tempdir)
- self._log_data = self.readLog(log_path)
+ Expected event log:
+ <version event>
+ """
+ with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
+ log_path = self._event_log_module.Write(path=tempdir)
+ self._log_data = self.readLog(log_path)
- # A log with no added events should only have the version entry.
- self.assertEqual(len(self._log_data), 1)
- version_event = self._log_data[0]
- self.verifyCommonKeys(version_event, expected_event_name='version')
- # Check for 'version' event specific fields.
- self.assertIn('evt', version_event)
- self.assertIn('exe', version_event)
- # Verify "evt" version field is a string.
- self.assertIsInstance(version_event['evt'], str)
+ # A log with no added events should only have the version entry.
+ self.assertEqual(len(self._log_data), 1)
+ version_event = self._log_data[0]
+ self.verifyCommonKeys(version_event, expected_event_name="version")
+ # Check for 'version' event specific fields.
+ self.assertIn("evt", version_event)
+ self.assertIn("exe", version_event)
+ # Verify "evt" version field is a string.
+ self.assertIsInstance(version_event["evt"], str)
- def test_start_event(self):
- """Test and validate 'start' event data is valid.
+ def test_start_event(self):
+ """Test and validate 'start' event data is valid.
- Expected event log:
- <version event>
- <start event>
- """
- self._event_log_module.StartEvent()
- with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
- log_path = self._event_log_module.Write(path=tempdir)
- self._log_data = self.readLog(log_path)
-
- self.assertEqual(len(self._log_data), 2)
- start_event = self._log_data[1]
- self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
- self.verifyCommonKeys(start_event, expected_event_name='start')
- # Check for 'start' event specific fields.
- self.assertIn('argv', start_event)
- self.assertTrue(isinstance(start_event['argv'], list))
-
- def test_exit_event_result_none(self):
- """Test 'exit' event data is valid when result is None.
-
- We expect None result to be converted to 0 in the exit event data.
-
- Expected event log:
- <version event>
- <exit event>
- """
- self._event_log_module.ExitEvent(None)
- with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
- log_path = self._event_log_module.Write(path=tempdir)
- self._log_data = self.readLog(log_path)
-
- self.assertEqual(len(self._log_data), 2)
- exit_event = self._log_data[1]
- self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
- self.verifyCommonKeys(exit_event, expected_event_name='exit')
- # Check for 'exit' event specific fields.
- self.assertIn('code', exit_event)
- # 'None' result should convert to 0 (successful) return code.
- self.assertEqual(exit_event['code'], 0)
-
- def test_exit_event_result_integer(self):
- """Test 'exit' event data is valid when result is an integer.
-
- Expected event log:
- <version event>
- <exit event>
- """
- self._event_log_module.ExitEvent(2)
- with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
- log_path = self._event_log_module.Write(path=tempdir)
- self._log_data = self.readLog(log_path)
-
- self.assertEqual(len(self._log_data), 2)
- exit_event = self._log_data[1]
- self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
- self.verifyCommonKeys(exit_event, expected_event_name='exit')
- # Check for 'exit' event specific fields.
- self.assertIn('code', exit_event)
- self.assertEqual(exit_event['code'], 2)
-
- def test_command_event(self):
- """Test and validate 'command' event data is valid.
-
- Expected event log:
- <version event>
- <command event>
- """
- name = 'repo'
- subcommands = ['init' 'this']
- self._event_log_module.CommandEvent(name='repo', subcommands=subcommands)
- with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
- log_path = self._event_log_module.Write(path=tempdir)
- self._log_data = self.readLog(log_path)
-
- self.assertEqual(len(self._log_data), 2)
- command_event = self._log_data[1]
- self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
- self.verifyCommonKeys(command_event, expected_event_name='command')
- # Check for 'command' event specific fields.
- self.assertIn('name', command_event)
- self.assertIn('subcommands', command_event)
- self.assertEqual(command_event['name'], name)
- self.assertEqual(command_event['subcommands'], subcommands)
-
- def test_def_params_event_repo_config(self):
- """Test 'def_params' event data outputs only repo config keys.
-
- Expected event log:
- <version event>
- <def_param event>
- <def_param event>
- """
- config = {
- 'git.foo': 'bar',
- 'repo.partialclone': 'true',
- 'repo.partialclonefilter': 'blob:none',
- }
- self._event_log_module.DefParamRepoEvents(config)
-
- with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
- log_path = self._event_log_module.Write(path=tempdir)
- self._log_data = self.readLog(log_path)
-
- self.assertEqual(len(self._log_data), 3)
- def_param_events = self._log_data[1:]
- self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
-
- for event in def_param_events:
- self.verifyCommonKeys(event, expected_event_name='def_param')
- # Check for 'def_param' event specific fields.
- self.assertIn('param', event)
- self.assertIn('value', event)
- self.assertTrue(event['param'].startswith('repo.'))
-
- def test_def_params_event_no_repo_config(self):
- """Test 'def_params' event data won't output non-repo config keys.
-
- Expected event log:
- <version event>
- """
- config = {
- 'git.foo': 'bar',
- 'git.core.foo2': 'baz',
- }
- self._event_log_module.DefParamRepoEvents(config)
-
- with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
- log_path = self._event_log_module.Write(path=tempdir)
- self._log_data = self.readLog(log_path)
-
- self.assertEqual(len(self._log_data), 1)
- self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
-
- def test_data_event_config(self):
- """Test 'data' event data outputs all config keys.
-
- Expected event log:
- <version event>
- <data event>
- <data event>
- """
- config = {
- 'git.foo': 'bar',
- 'repo.partialclone': 'false',
- 'repo.syncstate.superproject.hassuperprojecttag': 'true',
- 'repo.syncstate.superproject.sys.argv': ['--', 'sync', 'protobuf'],
- }
- prefix_value = 'prefix'
- self._event_log_module.LogDataConfigEvents(config, prefix_value)
-
- with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
- log_path = self._event_log_module.Write(path=tempdir)
- self._log_data = self.readLog(log_path)
-
- self.assertEqual(len(self._log_data), 5)
- data_events = self._log_data[1:]
- self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
-
- for event in data_events:
- self.verifyCommonKeys(event)
- # Check for 'data' event specific fields.
- self.assertIn('key', event)
- self.assertIn('value', event)
- key = event['key']
- key = self.remove_prefix(key, f'{prefix_value}/')
- value = event['value']
- self.assertEqual(self._event_log_module.GetDataEventName(value), event['event'])
- self.assertTrue(key in config and value == config[key])
-
- def test_error_event(self):
- """Test and validate 'error' event data is valid.
-
- Expected event log:
- <version event>
- <error event>
- """
- msg = 'invalid option: --cahced'
- fmt = 'invalid option: %s'
- self._event_log_module.ErrorEvent(msg, fmt)
- with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
- log_path = self._event_log_module.Write(path=tempdir)
- self._log_data = self.readLog(log_path)
-
- self.assertEqual(len(self._log_data), 2)
- error_event = self._log_data[1]
- self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
- self.verifyCommonKeys(error_event, expected_event_name='error')
- # Check for 'error' event specific fields.
- self.assertIn('msg', error_event)
- self.assertIn('fmt', error_event)
- self.assertEqual(error_event['msg'], msg)
- self.assertEqual(error_event['fmt'], fmt)
-
- def test_write_with_filename(self):
- """Test Write() with a path to a file exits with None."""
- self.assertIsNone(self._event_log_module.Write(path='path/to/file'))
-
- def test_write_with_git_config(self):
- """Test Write() uses the git config path when 'git config' call succeeds."""
- with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
- with mock.patch.object(self._event_log_module,
- '_GetEventTargetPath', return_value=tempdir):
- self.assertEqual(os.path.dirname(self._event_log_module.Write()), tempdir)
-
- def test_write_no_git_config(self):
- """Test Write() with no git config variable present exits with None."""
- with mock.patch.object(self._event_log_module,
- '_GetEventTargetPath', return_value=None):
- self.assertIsNone(self._event_log_module.Write())
-
- def test_write_non_string(self):
- """Test Write() with non-string type for |path| throws TypeError."""
- with self.assertRaises(TypeError):
- self._event_log_module.Write(path=1234)
-
- def test_write_socket(self):
- """Test Write() with Unix domain socket for |path| and validate received traces."""
- received_traces = []
- with tempfile.TemporaryDirectory(prefix='test_server_sockets') as tempdir:
- socket_path = os.path.join(tempdir, "server.sock")
- server_ready = threading.Condition()
- # Start "server" listening on Unix domain socket at socket_path.
- try:
- server_thread = threading.Thread(
- target=serverLoggingThread,
- args=(socket_path, server_ready, received_traces))
- server_thread.start()
-
- with server_ready:
- server_ready.wait(timeout=120)
-
+ Expected event log:
+ <version event>
+ <start event>
+ """
self._event_log_module.StartEvent()
- path = self._event_log_module.Write(path=f'af_unix:{socket_path}')
- finally:
- server_thread.join(timeout=5)
+ with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
+ log_path = self._event_log_module.Write(path=tempdir)
+ self._log_data = self.readLog(log_path)
- self.assertEqual(path, f'af_unix:stream:{socket_path}')
- self.assertEqual(len(received_traces), 2)
- version_event = json.loads(received_traces[0])
- start_event = json.loads(received_traces[1])
- self.verifyCommonKeys(version_event, expected_event_name='version')
- self.verifyCommonKeys(start_event, expected_event_name='start')
- # Check for 'start' event specific fields.
- self.assertIn('argv', start_event)
- self.assertIsInstance(start_event['argv'], list)
+ self.assertEqual(len(self._log_data), 2)
+ start_event = self._log_data[1]
+ self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
+ self.verifyCommonKeys(start_event, expected_event_name="start")
+ # Check for 'start' event specific fields.
+ self.assertIn("argv", start_event)
+ self.assertTrue(isinstance(start_event["argv"], list))
+
+ def test_exit_event_result_none(self):
+ """Test 'exit' event data is valid when result is None.
+
+ We expect None result to be converted to 0 in the exit event data.
+
+ Expected event log:
+ <version event>
+ <exit event>
+ """
+ self._event_log_module.ExitEvent(None)
+ with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
+ log_path = self._event_log_module.Write(path=tempdir)
+ self._log_data = self.readLog(log_path)
+
+ self.assertEqual(len(self._log_data), 2)
+ exit_event = self._log_data[1]
+ self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
+ self.verifyCommonKeys(exit_event, expected_event_name="exit")
+ # Check for 'exit' event specific fields.
+ self.assertIn("code", exit_event)
+ # 'None' result should convert to 0 (successful) return code.
+ self.assertEqual(exit_event["code"], 0)
+
+ def test_exit_event_result_integer(self):
+ """Test 'exit' event data is valid when result is an integer.
+
+ Expected event log:
+ <version event>
+ <exit event>
+ """
+ self._event_log_module.ExitEvent(2)
+ with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
+ log_path = self._event_log_module.Write(path=tempdir)
+ self._log_data = self.readLog(log_path)
+
+ self.assertEqual(len(self._log_data), 2)
+ exit_event = self._log_data[1]
+ self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
+ self.verifyCommonKeys(exit_event, expected_event_name="exit")
+ # Check for 'exit' event specific fields.
+ self.assertIn("code", exit_event)
+ self.assertEqual(exit_event["code"], 2)
+
+ def test_command_event(self):
+ """Test and validate 'command' event data is valid.
+
+ Expected event log:
+ <version event>
+ <command event>
+ """
+ name = "repo"
+ subcommands = ["init" "this"]
+ self._event_log_module.CommandEvent(
+ name="repo", subcommands=subcommands
+ )
+ with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
+ log_path = self._event_log_module.Write(path=tempdir)
+ self._log_data = self.readLog(log_path)
+
+ self.assertEqual(len(self._log_data), 2)
+ command_event = self._log_data[1]
+ self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
+ self.verifyCommonKeys(command_event, expected_event_name="command")
+ # Check for 'command' event specific fields.
+ self.assertIn("name", command_event)
+ self.assertIn("subcommands", command_event)
+ self.assertEqual(command_event["name"], name)
+ self.assertEqual(command_event["subcommands"], subcommands)
+
+ def test_def_params_event_repo_config(self):
+ """Test 'def_params' event data outputs only repo config keys.
+
+ Expected event log:
+ <version event>
+ <def_param event>
+ <def_param event>
+ """
+ config = {
+ "git.foo": "bar",
+ "repo.partialclone": "true",
+ "repo.partialclonefilter": "blob:none",
+ }
+ self._event_log_module.DefParamRepoEvents(config)
+
+ with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
+ log_path = self._event_log_module.Write(path=tempdir)
+ self._log_data = self.readLog(log_path)
+
+ self.assertEqual(len(self._log_data), 3)
+ def_param_events = self._log_data[1:]
+ self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
+
+ for event in def_param_events:
+ self.verifyCommonKeys(event, expected_event_name="def_param")
+ # Check for 'def_param' event specific fields.
+ self.assertIn("param", event)
+ self.assertIn("value", event)
+ self.assertTrue(event["param"].startswith("repo."))
+
+ def test_def_params_event_no_repo_config(self):
+ """Test 'def_params' event data won't output non-repo config keys.
+
+ Expected event log:
+ <version event>
+ """
+ config = {
+ "git.foo": "bar",
+ "git.core.foo2": "baz",
+ }
+ self._event_log_module.DefParamRepoEvents(config)
+
+ with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
+ log_path = self._event_log_module.Write(path=tempdir)
+ self._log_data = self.readLog(log_path)
+
+ self.assertEqual(len(self._log_data), 1)
+ self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
+
+ def test_data_event_config(self):
+ """Test 'data' event data outputs all config keys.
+
+ Expected event log:
+ <version event>
+ <data event>
+ <data event>
+ """
+ config = {
+ "git.foo": "bar",
+ "repo.partialclone": "false",
+ "repo.syncstate.superproject.hassuperprojecttag": "true",
+ "repo.syncstate.superproject.sys.argv": ["--", "sync", "protobuf"],
+ }
+ prefix_value = "prefix"
+ self._event_log_module.LogDataConfigEvents(config, prefix_value)
+
+ with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
+ log_path = self._event_log_module.Write(path=tempdir)
+ self._log_data = self.readLog(log_path)
+
+ self.assertEqual(len(self._log_data), 5)
+ data_events = self._log_data[1:]
+ self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
+
+ for event in data_events:
+ self.verifyCommonKeys(event)
+ # Check for 'data' event specific fields.
+ self.assertIn("key", event)
+ self.assertIn("value", event)
+ key = event["key"]
+ key = self.remove_prefix(key, f"{prefix_value}/")
+ value = event["value"]
+ self.assertEqual(
+ self._event_log_module.GetDataEventName(value), event["event"]
+ )
+ self.assertTrue(key in config and value == config[key])
+
+ def test_error_event(self):
+ """Test and validate 'error' event data is valid.
+
+ Expected event log:
+ <version event>
+ <error event>
+ """
+ msg = "invalid option: --cahced"
+ fmt = "invalid option: %s"
+ self._event_log_module.ErrorEvent(msg, fmt)
+ with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
+ log_path = self._event_log_module.Write(path=tempdir)
+ self._log_data = self.readLog(log_path)
+
+ self.assertEqual(len(self._log_data), 2)
+ error_event = self._log_data[1]
+ self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
+ self.verifyCommonKeys(error_event, expected_event_name="error")
+ # Check for 'error' event specific fields.
+ self.assertIn("msg", error_event)
+ self.assertIn("fmt", error_event)
+ self.assertEqual(error_event["msg"], msg)
+ self.assertEqual(error_event["fmt"], fmt)
+
+ def test_write_with_filename(self):
+ """Test Write() with a path to a file exits with None."""
+ self.assertIsNone(self._event_log_module.Write(path="path/to/file"))
+
+ def test_write_with_git_config(self):
+ """Test Write() uses the git config path when 'git config' call
+ succeeds."""
+ with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
+ with mock.patch.object(
+ self._event_log_module,
+ "_GetEventTargetPath",
+ return_value=tempdir,
+ ):
+ self.assertEqual(
+ os.path.dirname(self._event_log_module.Write()), tempdir
+ )
+
+ def test_write_no_git_config(self):
+ """Test Write() with no git config variable present exits with None."""
+ with mock.patch.object(
+ self._event_log_module, "_GetEventTargetPath", return_value=None
+ ):
+ self.assertIsNone(self._event_log_module.Write())
+
+ def test_write_non_string(self):
+ """Test Write() with non-string type for |path| throws TypeError."""
+ with self.assertRaises(TypeError):
+ self._event_log_module.Write(path=1234)
+
+ def test_write_socket(self):
+ """Test Write() with Unix domain socket for |path| and validate received
+ traces."""
+ received_traces = []
+ with tempfile.TemporaryDirectory(
+ prefix="test_server_sockets"
+ ) as tempdir:
+ socket_path = os.path.join(tempdir, "server.sock")
+ server_ready = threading.Condition()
+ # Start "server" listening on Unix domain socket at socket_path.
+ try:
+ server_thread = threading.Thread(
+ target=serverLoggingThread,
+ args=(socket_path, server_ready, received_traces),
+ )
+ server_thread.start()
+
+ with server_ready:
+ server_ready.wait(timeout=120)
+
+ self._event_log_module.StartEvent()
+ path = self._event_log_module.Write(
+ path=f"af_unix:{socket_path}"
+ )
+ finally:
+ server_thread.join(timeout=5)
+
+ self.assertEqual(path, f"af_unix:stream:{socket_path}")
+ self.assertEqual(len(received_traces), 2)
+ version_event = json.loads(received_traces[0])
+ start_event = json.loads(received_traces[1])
+ self.verifyCommonKeys(version_event, expected_event_name="version")
+ self.verifyCommonKeys(start_event, expected_event_name="start")
+ # Check for 'start' event specific fields.
+ self.assertIn("argv", start_event)
+ self.assertIsInstance(start_event["argv"], list)
diff --git a/tests/test_hooks.py b/tests/test_hooks.py
index 6632b3e..7827712 100644
--- a/tests/test_hooks.py
+++ b/tests/test_hooks.py
@@ -17,39 +17,38 @@
import hooks
import unittest
+
class RepoHookShebang(unittest.TestCase):
- """Check shebang parsing in RepoHook."""
+ """Check shebang parsing in RepoHook."""
- def test_no_shebang(self):
- """Lines w/out shebangs should be rejected."""
- DATA = (
- '',
- '#\n# foo\n',
- '# Bad shebang in script\n#!/foo\n'
- )
- for data in DATA:
- self.assertIsNone(hooks.RepoHook._ExtractInterpFromShebang(data))
+ def test_no_shebang(self):
+ """Lines w/out shebangs should be rejected."""
+ DATA = ("", "#\n# foo\n", "# Bad shebang in script\n#!/foo\n")
+ for data in DATA:
+ self.assertIsNone(hooks.RepoHook._ExtractInterpFromShebang(data))
- def test_direct_interp(self):
- """Lines whose shebang points directly to the interpreter."""
- DATA = (
- ('#!/foo', '/foo'),
- ('#! /foo', '/foo'),
- ('#!/bin/foo ', '/bin/foo'),
- ('#! /usr/foo ', '/usr/foo'),
- ('#! /usr/foo -args', '/usr/foo'),
- )
- for shebang, interp in DATA:
- self.assertEqual(hooks.RepoHook._ExtractInterpFromShebang(shebang),
- interp)
+ def test_direct_interp(self):
+ """Lines whose shebang points directly to the interpreter."""
+ DATA = (
+ ("#!/foo", "/foo"),
+ ("#! /foo", "/foo"),
+ ("#!/bin/foo ", "/bin/foo"),
+ ("#! /usr/foo ", "/usr/foo"),
+ ("#! /usr/foo -args", "/usr/foo"),
+ )
+ for shebang, interp in DATA:
+ self.assertEqual(
+ hooks.RepoHook._ExtractInterpFromShebang(shebang), interp
+ )
- def test_env_interp(self):
- """Lines whose shebang launches through `env`."""
- DATA = (
- ('#!/usr/bin/env foo', 'foo'),
- ('#!/bin/env foo', 'foo'),
- ('#! /bin/env /bin/foo ', '/bin/foo'),
- )
- for shebang, interp in DATA:
- self.assertEqual(hooks.RepoHook._ExtractInterpFromShebang(shebang),
- interp)
+ def test_env_interp(self):
+ """Lines whose shebang launches through `env`."""
+ DATA = (
+ ("#!/usr/bin/env foo", "foo"),
+ ("#!/bin/env foo", "foo"),
+ ("#! /bin/env /bin/foo ", "/bin/foo"),
+ )
+ for shebang, interp in DATA:
+ self.assertEqual(
+ hooks.RepoHook._ExtractInterpFromShebang(shebang), interp
+ )
diff --git a/tests/test_manifest_xml.py b/tests/test_manifest_xml.py
index 3634701..648acde 100644
--- a/tests/test_manifest_xml.py
+++ b/tests/test_manifest_xml.py
@@ -27,291 +27,318 @@
# Invalid paths that we don't want in the filesystem.
INVALID_FS_PATHS = (
- '',
- '.',
- '..',
- '../',
- './',
- './/',
- 'foo/',
- './foo',
- '../foo',
- 'foo/./bar',
- 'foo/../../bar',
- '/foo',
- './../foo',
- '.git/foo',
+ "",
+ ".",
+ "..",
+ "../",
+ "./",
+ ".//",
+ "foo/",
+ "./foo",
+ "../foo",
+ "foo/./bar",
+ "foo/../../bar",
+ "/foo",
+ "./../foo",
+ ".git/foo",
# Check case folding.
- '.GIT/foo',
- 'blah/.git/foo',
- '.repo/foo',
- '.repoconfig',
+ ".GIT/foo",
+ "blah/.git/foo",
+ ".repo/foo",
+ ".repoconfig",
# Block ~ due to 8.3 filenames on Windows filesystems.
- '~',
- 'foo~',
- 'blah/foo~',
+ "~",
+ "foo~",
+ "blah/foo~",
# Block Unicode characters that get normalized out by filesystems.
- u'foo\u200Cbar',
+ "foo\u200Cbar",
# Block newlines.
- 'f\n/bar',
- 'f\r/bar',
+ "f\n/bar",
+ "f\r/bar",
)
# Make sure platforms that use path separators (e.g. Windows) are also
# rejected properly.
-if os.path.sep != '/':
- INVALID_FS_PATHS += tuple(x.replace('/', os.path.sep) for x in INVALID_FS_PATHS)
+if os.path.sep != "/":
+ INVALID_FS_PATHS += tuple(
+ x.replace("/", os.path.sep) for x in INVALID_FS_PATHS
+ )
def sort_attributes(manifest):
- """Sort the attributes of all elements alphabetically.
+ """Sort the attributes of all elements alphabetically.
- This is needed because different versions of the toxml() function from
- xml.dom.minidom outputs the attributes of elements in different orders.
- Before Python 3.8 they were output alphabetically, later versions preserve
- the order specified by the user.
+ This is needed because different versions of the toxml() function from
+ xml.dom.minidom outputs the attributes of elements in different orders.
+ Before Python 3.8 they were output alphabetically, later versions preserve
+ the order specified by the user.
- Args:
- manifest: String containing an XML manifest.
+ Args:
+ manifest: String containing an XML manifest.
- Returns:
- The XML manifest with the attributes of all elements sorted alphabetically.
- """
- new_manifest = ''
- # This will find every element in the XML manifest, whether they have
- # attributes or not. This simplifies recreating the manifest below.
- matches = re.findall(r'(<[/?]?[a-z-]+\s*)((?:\S+?="[^"]+"\s*?)*)(\s*[/?]?>)', manifest)
- for head, attrs, tail in matches:
- m = re.findall(r'\S+?="[^"]+"', attrs)
- new_manifest += head + ' '.join(sorted(m)) + tail
- return new_manifest
+ Returns:
+ The XML manifest with the attributes of all elements sorted
+ alphabetically.
+ """
+ new_manifest = ""
+ # This will find every element in the XML manifest, whether they have
+ # attributes or not. This simplifies recreating the manifest below.
+ matches = re.findall(
+ r'(<[/?]?[a-z-]+\s*)((?:\S+?="[^"]+"\s*?)*)(\s*[/?]?>)', manifest
+ )
+ for head, attrs, tail in matches:
+ m = re.findall(r'\S+?="[^"]+"', attrs)
+ new_manifest += head + " ".join(sorted(m)) + tail
+ return new_manifest
class ManifestParseTestCase(unittest.TestCase):
- """TestCase for parsing manifests."""
+ """TestCase for parsing manifests."""
- def setUp(self):
- self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
- self.tempdir = self.tempdirobj.name
- self.repodir = os.path.join(self.tempdir, '.repo')
- self.manifest_dir = os.path.join(self.repodir, 'manifests')
- self.manifest_file = os.path.join(
- self.repodir, manifest_xml.MANIFEST_FILE_NAME)
- self.local_manifest_dir = os.path.join(
- self.repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME)
- os.mkdir(self.repodir)
- os.mkdir(self.manifest_dir)
+ def setUp(self):
+ self.tempdirobj = tempfile.TemporaryDirectory(prefix="repo_tests")
+ self.tempdir = self.tempdirobj.name
+ self.repodir = os.path.join(self.tempdir, ".repo")
+ self.manifest_dir = os.path.join(self.repodir, "manifests")
+ self.manifest_file = os.path.join(
+ self.repodir, manifest_xml.MANIFEST_FILE_NAME
+ )
+ self.local_manifest_dir = os.path.join(
+ self.repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME
+ )
+ os.mkdir(self.repodir)
+ os.mkdir(self.manifest_dir)
- # The manifest parsing really wants a git repo currently.
- gitdir = os.path.join(self.repodir, 'manifests.git')
- os.mkdir(gitdir)
- with open(os.path.join(gitdir, 'config'), 'w') as fp:
- fp.write("""[remote "origin"]
+ # The manifest parsing really wants a git repo currently.
+ gitdir = os.path.join(self.repodir, "manifests.git")
+ os.mkdir(gitdir)
+ with open(os.path.join(gitdir, "config"), "w") as fp:
+ fp.write(
+ """[remote "origin"]
url = https://localhost:0/manifest
-""")
+"""
+ )
- def tearDown(self):
- self.tempdirobj.cleanup()
+ def tearDown(self):
+ self.tempdirobj.cleanup()
- def getXmlManifest(self, data):
- """Helper to initialize a manifest for testing."""
- with open(self.manifest_file, 'w', encoding="utf-8") as fp:
- fp.write(data)
- return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
+ def getXmlManifest(self, data):
+ """Helper to initialize a manifest for testing."""
+ with open(self.manifest_file, "w", encoding="utf-8") as fp:
+ fp.write(data)
+ return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
- @staticmethod
- def encodeXmlAttr(attr):
- """Encode |attr| using XML escape rules."""
- return attr.replace('\r', '
').replace('\n', '
')
+ @staticmethod
+ def encodeXmlAttr(attr):
+ """Encode |attr| using XML escape rules."""
+ return attr.replace("\r", "
").replace("\n", "
")
class ManifestValidateFilePaths(unittest.TestCase):
- """Check _ValidateFilePaths helper.
+ """Check _ValidateFilePaths helper.
- This doesn't access a real filesystem.
- """
+ This doesn't access a real filesystem.
+ """
- def check_both(self, *args):
- manifest_xml.XmlManifest._ValidateFilePaths('copyfile', *args)
- manifest_xml.XmlManifest._ValidateFilePaths('linkfile', *args)
+ def check_both(self, *args):
+ manifest_xml.XmlManifest._ValidateFilePaths("copyfile", *args)
+ manifest_xml.XmlManifest._ValidateFilePaths("linkfile", *args)
- def test_normal_path(self):
- """Make sure good paths are accepted."""
- self.check_both('foo', 'bar')
- self.check_both('foo/bar', 'bar')
- self.check_both('foo', 'bar/bar')
- self.check_both('foo/bar', 'bar/bar')
+ def test_normal_path(self):
+ """Make sure good paths are accepted."""
+ self.check_both("foo", "bar")
+ self.check_both("foo/bar", "bar")
+ self.check_both("foo", "bar/bar")
+ self.check_both("foo/bar", "bar/bar")
- def test_symlink_targets(self):
- """Some extra checks for symlinks."""
- def check(*args):
- manifest_xml.XmlManifest._ValidateFilePaths('linkfile', *args)
+ def test_symlink_targets(self):
+ """Some extra checks for symlinks."""
- # We allow symlinks to end in a slash since we allow them to point to dirs
- # in general. Technically the slash isn't necessary.
- check('foo/', 'bar')
- # We allow a single '.' to get a reference to the project itself.
- check('.', 'bar')
+ def check(*args):
+ manifest_xml.XmlManifest._ValidateFilePaths("linkfile", *args)
- def test_bad_paths(self):
- """Make sure bad paths (src & dest) are rejected."""
- for path in INVALID_FS_PATHS:
- self.assertRaises(
- error.ManifestInvalidPathError, self.check_both, path, 'a')
- self.assertRaises(
- error.ManifestInvalidPathError, self.check_both, 'a', path)
+ # We allow symlinks to end in a slash since we allow them to point to
+ # dirs in general. Technically the slash isn't necessary.
+ check("foo/", "bar")
+ # We allow a single '.' to get a reference to the project itself.
+ check(".", "bar")
+
+ def test_bad_paths(self):
+ """Make sure bad paths (src & dest) are rejected."""
+ for path in INVALID_FS_PATHS:
+ self.assertRaises(
+ error.ManifestInvalidPathError, self.check_both, path, "a"
+ )
+ self.assertRaises(
+ error.ManifestInvalidPathError, self.check_both, "a", path
+ )
class ValueTests(unittest.TestCase):
- """Check utility parsing code."""
+ """Check utility parsing code."""
- def _get_node(self, text):
- return xml.dom.minidom.parseString(text).firstChild
+ def _get_node(self, text):
+ return xml.dom.minidom.parseString(text).firstChild
- def test_bool_default(self):
- """Check XmlBool default handling."""
- node = self._get_node('<node/>')
- self.assertIsNone(manifest_xml.XmlBool(node, 'a'))
- self.assertIsNone(manifest_xml.XmlBool(node, 'a', None))
- self.assertEqual(123, manifest_xml.XmlBool(node, 'a', 123))
+ def test_bool_default(self):
+ """Check XmlBool default handling."""
+ node = self._get_node("<node/>")
+ self.assertIsNone(manifest_xml.XmlBool(node, "a"))
+ self.assertIsNone(manifest_xml.XmlBool(node, "a", None))
+ self.assertEqual(123, manifest_xml.XmlBool(node, "a", 123))
- node = self._get_node('<node a=""/>')
- self.assertIsNone(manifest_xml.XmlBool(node, 'a'))
+ node = self._get_node('<node a=""/>')
+ self.assertIsNone(manifest_xml.XmlBool(node, "a"))
- def test_bool_invalid(self):
- """Check XmlBool invalid handling."""
- node = self._get_node('<node a="moo"/>')
- self.assertEqual(123, manifest_xml.XmlBool(node, 'a', 123))
+ def test_bool_invalid(self):
+ """Check XmlBool invalid handling."""
+ node = self._get_node('<node a="moo"/>')
+ self.assertEqual(123, manifest_xml.XmlBool(node, "a", 123))
- def test_bool_true(self):
- """Check XmlBool true values."""
- for value in ('yes', 'true', '1'):
- node = self._get_node('<node a="%s"/>' % (value,))
- self.assertTrue(manifest_xml.XmlBool(node, 'a'))
+ def test_bool_true(self):
+ """Check XmlBool true values."""
+ for value in ("yes", "true", "1"):
+ node = self._get_node('<node a="%s"/>' % (value,))
+ self.assertTrue(manifest_xml.XmlBool(node, "a"))
- def test_bool_false(self):
- """Check XmlBool false values."""
- for value in ('no', 'false', '0'):
- node = self._get_node('<node a="%s"/>' % (value,))
- self.assertFalse(manifest_xml.XmlBool(node, 'a'))
+ def test_bool_false(self):
+ """Check XmlBool false values."""
+ for value in ("no", "false", "0"):
+ node = self._get_node('<node a="%s"/>' % (value,))
+ self.assertFalse(manifest_xml.XmlBool(node, "a"))
- def test_int_default(self):
- """Check XmlInt default handling."""
- node = self._get_node('<node/>')
- self.assertIsNone(manifest_xml.XmlInt(node, 'a'))
- self.assertIsNone(manifest_xml.XmlInt(node, 'a', None))
- self.assertEqual(123, manifest_xml.XmlInt(node, 'a', 123))
+ def test_int_default(self):
+ """Check XmlInt default handling."""
+ node = self._get_node("<node/>")
+ self.assertIsNone(manifest_xml.XmlInt(node, "a"))
+ self.assertIsNone(manifest_xml.XmlInt(node, "a", None))
+ self.assertEqual(123, manifest_xml.XmlInt(node, "a", 123))
- node = self._get_node('<node a=""/>')
- self.assertIsNone(manifest_xml.XmlInt(node, 'a'))
+ node = self._get_node('<node a=""/>')
+ self.assertIsNone(manifest_xml.XmlInt(node, "a"))
- def test_int_good(self):
- """Check XmlInt numeric handling."""
- for value in (-1, 0, 1, 50000):
- node = self._get_node('<node a="%s"/>' % (value,))
- self.assertEqual(value, manifest_xml.XmlInt(node, 'a'))
+ def test_int_good(self):
+ """Check XmlInt numeric handling."""
+ for value in (-1, 0, 1, 50000):
+ node = self._get_node('<node a="%s"/>' % (value,))
+ self.assertEqual(value, manifest_xml.XmlInt(node, "a"))
- def test_int_invalid(self):
- """Check XmlInt invalid handling."""
- with self.assertRaises(error.ManifestParseError):
- node = self._get_node('<node a="xx"/>')
- manifest_xml.XmlInt(node, 'a')
+ def test_int_invalid(self):
+ """Check XmlInt invalid handling."""
+ with self.assertRaises(error.ManifestParseError):
+ node = self._get_node('<node a="xx"/>')
+ manifest_xml.XmlInt(node, "a")
class XmlManifestTests(ManifestParseTestCase):
- """Check manifest processing."""
+ """Check manifest processing."""
- def test_empty(self):
- """Parse an 'empty' manifest file."""
- manifest = self.getXmlManifest(
- '<?xml version="1.0" encoding="UTF-8"?>'
- '<manifest></manifest>')
- self.assertEqual(manifest.remotes, {})
- self.assertEqual(manifest.projects, [])
+ def test_empty(self):
+ """Parse an 'empty' manifest file."""
+ manifest = self.getXmlManifest(
+ '<?xml version="1.0" encoding="UTF-8"?>' "<manifest></manifest>"
+ )
+ self.assertEqual(manifest.remotes, {})
+ self.assertEqual(manifest.projects, [])
- def test_link(self):
- """Verify Link handling with new names."""
- manifest = manifest_xml.XmlManifest(self.repodir, self.manifest_file)
- with open(os.path.join(self.manifest_dir, 'foo.xml'), 'w') as fp:
- fp.write('<manifest></manifest>')
- manifest.Link('foo.xml')
- with open(self.manifest_file) as fp:
- self.assertIn('<include name="foo.xml" />', fp.read())
+ def test_link(self):
+ """Verify Link handling with new names."""
+ manifest = manifest_xml.XmlManifest(self.repodir, self.manifest_file)
+ with open(os.path.join(self.manifest_dir, "foo.xml"), "w") as fp:
+ fp.write("<manifest></manifest>")
+ manifest.Link("foo.xml")
+ with open(self.manifest_file) as fp:
+ self.assertIn('<include name="foo.xml" />', fp.read())
- def test_toxml_empty(self):
- """Verify the ToXml() helper."""
- manifest = self.getXmlManifest(
- '<?xml version="1.0" encoding="UTF-8"?>'
- '<manifest></manifest>')
- self.assertEqual(manifest.ToXml().toxml(), '<?xml version="1.0" ?><manifest/>')
+ def test_toxml_empty(self):
+ """Verify the ToXml() helper."""
+ manifest = self.getXmlManifest(
+ '<?xml version="1.0" encoding="UTF-8"?>' "<manifest></manifest>"
+ )
+ self.assertEqual(
+ manifest.ToXml().toxml(), '<?xml version="1.0" ?><manifest/>'
+ )
- def test_todict_empty(self):
- """Verify the ToDict() helper."""
- manifest = self.getXmlManifest(
- '<?xml version="1.0" encoding="UTF-8"?>'
- '<manifest></manifest>')
- self.assertEqual(manifest.ToDict(), {})
+ def test_todict_empty(self):
+ """Verify the ToDict() helper."""
+ manifest = self.getXmlManifest(
+ '<?xml version="1.0" encoding="UTF-8"?>' "<manifest></manifest>"
+ )
+ self.assertEqual(manifest.ToDict(), {})
- def test_toxml_omit_local(self):
- """Does not include local_manifests projects when omit_local=True."""
- manifest = self.getXmlManifest(
- '<?xml version="1.0" encoding="UTF-8"?><manifest>'
- '<remote name="a" fetch=".."/><default remote="a" revision="r"/>'
- '<project name="p" groups="local::me"/>'
- '<project name="q"/>'
- '<project name="r" groups="keep"/>'
- '</manifest>')
- self.assertEqual(
- sort_attributes(manifest.ToXml(omit_local=True).toxml()),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch=".." name="a"/><default remote="a" revision="r"/>'
- '<project name="q"/><project groups="keep" name="r"/></manifest>')
+ def test_toxml_omit_local(self):
+ """Does not include local_manifests projects when omit_local=True."""
+ manifest = self.getXmlManifest(
+ '<?xml version="1.0" encoding="UTF-8"?><manifest>'
+ '<remote name="a" fetch=".."/><default remote="a" revision="r"/>'
+ '<project name="p" groups="local::me"/>'
+ '<project name="q"/>'
+ '<project name="r" groups="keep"/>'
+ "</manifest>"
+ )
+ self.assertEqual(
+ sort_attributes(manifest.ToXml(omit_local=True).toxml()),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch=".." name="a"/><default remote="a" revision="r"/>'
+ '<project name="q"/><project groups="keep" name="r"/></manifest>',
+ )
- def test_toxml_with_local(self):
- """Does include local_manifests projects when omit_local=False."""
- manifest = self.getXmlManifest(
- '<?xml version="1.0" encoding="UTF-8"?><manifest>'
- '<remote name="a" fetch=".."/><default remote="a" revision="r"/>'
- '<project name="p" groups="local::me"/>'
- '<project name="q"/>'
- '<project name="r" groups="keep"/>'
- '</manifest>')
- self.assertEqual(
- sort_attributes(manifest.ToXml(omit_local=False).toxml()),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch=".." name="a"/><default remote="a" revision="r"/>'
- '<project groups="local::me" name="p"/>'
- '<project name="q"/><project groups="keep" name="r"/></manifest>')
+ def test_toxml_with_local(self):
+ """Does include local_manifests projects when omit_local=False."""
+ manifest = self.getXmlManifest(
+ '<?xml version="1.0" encoding="UTF-8"?><manifest>'
+ '<remote name="a" fetch=".."/><default remote="a" revision="r"/>'
+ '<project name="p" groups="local::me"/>'
+ '<project name="q"/>'
+ '<project name="r" groups="keep"/>'
+ "</manifest>"
+ )
+ self.assertEqual(
+ sort_attributes(manifest.ToXml(omit_local=False).toxml()),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch=".." name="a"/><default remote="a" revision="r"/>'
+ '<project groups="local::me" name="p"/>'
+ '<project name="q"/><project groups="keep" name="r"/></manifest>',
+ )
- def test_repo_hooks(self):
- """Check repo-hooks settings."""
- manifest = self.getXmlManifest("""
+ def test_repo_hooks(self):
+ """Check repo-hooks settings."""
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="test-remote" fetch="http://localhost" />
<default remote="test-remote" revision="refs/heads/main" />
<project name="repohooks" path="src/repohooks"/>
<repo-hooks in-project="repohooks" enabled-list="a, b"/>
</manifest>
-""")
- self.assertEqual(manifest.repo_hooks_project.name, 'repohooks')
- self.assertEqual(manifest.repo_hooks_project.enabled_repo_hooks, ['a', 'b'])
+"""
+ )
+ self.assertEqual(manifest.repo_hooks_project.name, "repohooks")
+ self.assertEqual(
+ manifest.repo_hooks_project.enabled_repo_hooks, ["a", "b"]
+ )
- def test_repo_hooks_unordered(self):
- """Check repo-hooks settings work even if the project def comes second."""
- manifest = self.getXmlManifest("""
+ def test_repo_hooks_unordered(self):
+ """Check repo-hooks settings work even if the project def comes second.""" # noqa: E501
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="test-remote" fetch="http://localhost" />
<default remote="test-remote" revision="refs/heads/main" />
<repo-hooks in-project="repohooks" enabled-list="a, b"/>
<project name="repohooks" path="src/repohooks"/>
</manifest>
-""")
- self.assertEqual(manifest.repo_hooks_project.name, 'repohooks')
- self.assertEqual(manifest.repo_hooks_project.enabled_repo_hooks, ['a', 'b'])
+"""
+ )
+ self.assertEqual(manifest.repo_hooks_project.name, "repohooks")
+ self.assertEqual(
+ manifest.repo_hooks_project.enabled_repo_hooks, ["a", "b"]
+ )
- def test_unknown_tags(self):
- """Check superproject settings."""
- manifest = self.getXmlManifest("""
+ def test_unknown_tags(self):
+ """Check superproject settings."""
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="test-remote" fetch="http://localhost" />
<default remote="test-remote" revision="refs/heads/main" />
@@ -319,44 +346,54 @@
<iankaz value="unknown (possible) future tags are ignored"/>
<x-custom-tag>X tags are always ignored</x-custom-tag>
</manifest>
-""")
- self.assertEqual(manifest.superproject.name, 'superproject')
- self.assertEqual(manifest.superproject.remote.name, 'test-remote')
- self.assertEqual(
- sort_attributes(manifest.ToXml().toxml()),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="test-remote"/>'
- '<default remote="test-remote" revision="refs/heads/main"/>'
- '<superproject name="superproject"/>'
- '</manifest>')
+"""
+ )
+ self.assertEqual(manifest.superproject.name, "superproject")
+ self.assertEqual(manifest.superproject.remote.name, "test-remote")
+ self.assertEqual(
+ sort_attributes(manifest.ToXml().toxml()),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="test-remote"/>'
+ '<default remote="test-remote" revision="refs/heads/main"/>'
+ '<superproject name="superproject"/>'
+ "</manifest>",
+ )
- def test_remote_annotations(self):
- """Check remote settings."""
- manifest = self.getXmlManifest("""
+ def test_remote_annotations(self):
+ """Check remote settings."""
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="test-remote" fetch="http://localhost">
<annotation name="foo" value="bar"/>
</remote>
</manifest>
-""")
- self.assertEqual(manifest.remotes['test-remote'].annotations[0].name, 'foo')
- self.assertEqual(manifest.remotes['test-remote'].annotations[0].value, 'bar')
- self.assertEqual(
- sort_attributes(manifest.ToXml().toxml()),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="test-remote">'
- '<annotation name="foo" value="bar"/>'
- '</remote>'
- '</manifest>')
+"""
+ )
+ self.assertEqual(
+ manifest.remotes["test-remote"].annotations[0].name, "foo"
+ )
+ self.assertEqual(
+ manifest.remotes["test-remote"].annotations[0].value, "bar"
+ )
+ self.assertEqual(
+ sort_attributes(manifest.ToXml().toxml()),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="test-remote">'
+ '<annotation name="foo" value="bar"/>'
+ "</remote>"
+ "</manifest>",
+ )
class IncludeElementTests(ManifestParseTestCase):
- """Tests for <include>."""
+ """Tests for <include>."""
- def test_group_levels(self):
- root_m = os.path.join(self.manifest_dir, 'root.xml')
- with open(root_m, 'w') as fp:
- fp.write("""
+ def test_group_levels(self):
+ root_m = os.path.join(self.manifest_dir, "root.xml")
+ with open(root_m, "w") as fp:
+ fp.write(
+ """
<manifest>
<remote name="test-remote" fetch="http://localhost" />
<default remote="test-remote" revision="refs/heads/main" />
@@ -364,438 +401,524 @@
<project name="root-name1" path="root-path1" />
<project name="root-name2" path="root-path2" groups="r2g1,r2g2" />
</manifest>
-""")
- with open(os.path.join(self.manifest_dir, 'level1.xml'), 'w') as fp:
- fp.write("""
+"""
+ )
+ with open(os.path.join(self.manifest_dir, "level1.xml"), "w") as fp:
+ fp.write(
+ """
<manifest>
<include name="level2.xml" groups="level2-group" />
<project name="level1-name1" path="level1-path1" />
</manifest>
-""")
- with open(os.path.join(self.manifest_dir, 'level2.xml'), 'w') as fp:
- fp.write("""
+"""
+ )
+ with open(os.path.join(self.manifest_dir, "level2.xml"), "w") as fp:
+ fp.write(
+ """
<manifest>
<project name="level2-name1" path="level2-path1" groups="l2g1,l2g2" />
</manifest>
-""")
- include_m = manifest_xml.XmlManifest(self.repodir, root_m)
- for proj in include_m.projects:
- if proj.name == 'root-name1':
- # Check include group not set on root level proj.
- self.assertNotIn('level1-group', proj.groups)
- if proj.name == 'root-name2':
- # Check root proj group not removed.
- self.assertIn('r2g1', proj.groups)
- if proj.name == 'level1-name1':
- # Check level1 proj has inherited group level 1.
- self.assertIn('level1-group', proj.groups)
- if proj.name == 'level2-name1':
- # Check level2 proj has inherited group levels 1 and 2.
- self.assertIn('level1-group', proj.groups)
- self.assertIn('level2-group', proj.groups)
- # Check level2 proj group not removed.
- self.assertIn('l2g1', proj.groups)
+"""
+ )
+ include_m = manifest_xml.XmlManifest(self.repodir, root_m)
+ for proj in include_m.projects:
+ if proj.name == "root-name1":
+ # Check include group not set on root level proj.
+ self.assertNotIn("level1-group", proj.groups)
+ if proj.name == "root-name2":
+ # Check root proj group not removed.
+ self.assertIn("r2g1", proj.groups)
+ if proj.name == "level1-name1":
+ # Check level1 proj has inherited group level 1.
+ self.assertIn("level1-group", proj.groups)
+ if proj.name == "level2-name1":
+ # Check level2 proj has inherited group levels 1 and 2.
+ self.assertIn("level1-group", proj.groups)
+ self.assertIn("level2-group", proj.groups)
+ # Check level2 proj group not removed.
+ self.assertIn("l2g1", proj.groups)
- def test_allow_bad_name_from_user(self):
- """Check handling of bad name attribute from the user's input."""
- def parse(name):
- name = self.encodeXmlAttr(name)
- manifest = self.getXmlManifest(f"""
+ def test_allow_bad_name_from_user(self):
+ """Check handling of bad name attribute from the user's input."""
+
+ def parse(name):
+ name = self.encodeXmlAttr(name)
+ manifest = self.getXmlManifest(
+ f"""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<include name="{name}" />
</manifest>
-""")
- # Force the manifest to be parsed.
- manifest.ToXml()
+"""
+ )
+ # Force the manifest to be parsed.
+ manifest.ToXml()
- # Setup target of the include.
- target = os.path.join(self.tempdir, 'target.xml')
- with open(target, 'w') as fp:
- fp.write('<manifest></manifest>')
+ # Setup target of the include.
+ target = os.path.join(self.tempdir, "target.xml")
+ with open(target, "w") as fp:
+ fp.write("<manifest></manifest>")
- # Include with absolute path.
- parse(os.path.abspath(target))
+ # Include with absolute path.
+ parse(os.path.abspath(target))
- # Include with relative path.
- parse(os.path.relpath(target, self.manifest_dir))
+ # Include with relative path.
+ parse(os.path.relpath(target, self.manifest_dir))
- def test_bad_name_checks(self):
- """Check handling of bad name attribute."""
- def parse(name):
- name = self.encodeXmlAttr(name)
- # Setup target of the include.
- with open(os.path.join(self.manifest_dir, 'target.xml'), 'w', encoding="utf-8") as fp:
- fp.write(f'<manifest><include name="{name}"/></manifest>')
+ def test_bad_name_checks(self):
+ """Check handling of bad name attribute."""
- manifest = self.getXmlManifest("""
+ def parse(name):
+ name = self.encodeXmlAttr(name)
+ # Setup target of the include.
+ with open(
+ os.path.join(self.manifest_dir, "target.xml"),
+ "w",
+ encoding="utf-8",
+ ) as fp:
+ fp.write(f'<manifest><include name="{name}"/></manifest>')
+
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<include name="target.xml" />
</manifest>
-""")
- # Force the manifest to be parsed.
- manifest.ToXml()
+"""
+ )
+ # Force the manifest to be parsed.
+ manifest.ToXml()
- # Handle empty name explicitly because a different codepath rejects it.
- with self.assertRaises(error.ManifestParseError):
- parse('')
+ # Handle empty name explicitly because a different codepath rejects it.
+ with self.assertRaises(error.ManifestParseError):
+ parse("")
- for path in INVALID_FS_PATHS:
- if not path:
- continue
+ for path in INVALID_FS_PATHS:
+ if not path:
+ continue
- with self.assertRaises(error.ManifestInvalidPathError):
- parse(path)
+ with self.assertRaises(error.ManifestInvalidPathError):
+ parse(path)
class ProjectElementTests(ManifestParseTestCase):
- """Tests for <project>."""
+ """Tests for <project>."""
- def test_group(self):
- """Check project group settings."""
- manifest = self.getXmlManifest("""
+ def test_group(self):
+ """Check project group settings."""
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="test-remote" fetch="http://localhost" />
<default remote="test-remote" revision="refs/heads/main" />
<project name="test-name" path="test-path"/>
<project name="extras" path="path" groups="g1,g2,g1"/>
</manifest>
-""")
- self.assertEqual(len(manifest.projects), 2)
- # Ordering isn't guaranteed.
- result = {
- manifest.projects[0].name: manifest.projects[0].groups,
- manifest.projects[1].name: manifest.projects[1].groups,
- }
- project = manifest.projects[0]
- self.assertCountEqual(
- result['test-name'],
- ['name:test-name', 'all', 'path:test-path'])
- self.assertCountEqual(
- result['extras'],
- ['g1', 'g2', 'g1', 'name:extras', 'all', 'path:path'])
- groupstr = 'default,platform-' + platform.system().lower()
- self.assertEqual(groupstr, manifest.GetGroupsStr())
- groupstr = 'g1,g2,g1'
- manifest.manifestProject.config.SetString('manifest.groups', groupstr)
- self.assertEqual(groupstr, manifest.GetGroupsStr())
+"""
+ )
+ self.assertEqual(len(manifest.projects), 2)
+ # Ordering isn't guaranteed.
+ result = {
+ manifest.projects[0].name: manifest.projects[0].groups,
+ manifest.projects[1].name: manifest.projects[1].groups,
+ }
+ self.assertCountEqual(
+ result["test-name"], ["name:test-name", "all", "path:test-path"]
+ )
+ self.assertCountEqual(
+ result["extras"],
+ ["g1", "g2", "g1", "name:extras", "all", "path:path"],
+ )
+ groupstr = "default,platform-" + platform.system().lower()
+ self.assertEqual(groupstr, manifest.GetGroupsStr())
+ groupstr = "g1,g2,g1"
+ manifest.manifestProject.config.SetString("manifest.groups", groupstr)
+ self.assertEqual(groupstr, manifest.GetGroupsStr())
- def test_set_revision_id(self):
- """Check setting of project's revisionId."""
- manifest = self.getXmlManifest("""
+ def test_set_revision_id(self):
+ """Check setting of project's revisionId."""
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="test-name"/>
</manifest>
-""")
- self.assertEqual(len(manifest.projects), 1)
- project = manifest.projects[0]
- project.SetRevisionId('ABCDEF')
- self.assertEqual(
- sort_attributes(manifest.ToXml().toxml()),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="default-remote"/>'
- '<default remote="default-remote" revision="refs/heads/main"/>'
- '<project name="test-name" revision="ABCDEF" upstream="refs/heads/main"/>'
- '</manifest>')
+"""
+ )
+ self.assertEqual(len(manifest.projects), 1)
+ project = manifest.projects[0]
+ project.SetRevisionId("ABCDEF")
+ self.assertEqual(
+ sort_attributes(manifest.ToXml().toxml()),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="default-remote"/>'
+ '<default remote="default-remote" revision="refs/heads/main"/>'
+ '<project name="test-name" revision="ABCDEF" upstream="refs/heads/main"/>' # noqa: E501
+ "</manifest>",
+ )
- def test_trailing_slash(self):
- """Check handling of trailing slashes in attributes."""
- def parse(name, path):
- name = self.encodeXmlAttr(name)
- path = self.encodeXmlAttr(path)
- return self.getXmlManifest(f"""
+ def test_trailing_slash(self):
+ """Check handling of trailing slashes in attributes."""
+
+ def parse(name, path):
+ name = self.encodeXmlAttr(name)
+ path = self.encodeXmlAttr(path)
+ return self.getXmlManifest(
+ f"""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="{name}" path="{path}" />
</manifest>
-""")
+"""
+ )
- manifest = parse('a/path/', 'foo')
- self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
- os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
- self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
- os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
+ manifest = parse("a/path/", "foo")
+ self.assertEqual(
+ os.path.normpath(manifest.projects[0].gitdir),
+ os.path.join(self.tempdir, ".repo", "projects", "foo.git"),
+ )
+ self.assertEqual(
+ os.path.normpath(manifest.projects[0].objdir),
+ os.path.join(
+ self.tempdir, ".repo", "project-objects", "a", "path.git"
+ ),
+ )
- manifest = parse('a/path', 'foo/')
- self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
- os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
- self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
- os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
+ manifest = parse("a/path", "foo/")
+ self.assertEqual(
+ os.path.normpath(manifest.projects[0].gitdir),
+ os.path.join(self.tempdir, ".repo", "projects", "foo.git"),
+ )
+ self.assertEqual(
+ os.path.normpath(manifest.projects[0].objdir),
+ os.path.join(
+ self.tempdir, ".repo", "project-objects", "a", "path.git"
+ ),
+ )
- manifest = parse('a/path', 'foo//////')
- self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
- os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
- self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
- os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
+ manifest = parse("a/path", "foo//////")
+ self.assertEqual(
+ os.path.normpath(manifest.projects[0].gitdir),
+ os.path.join(self.tempdir, ".repo", "projects", "foo.git"),
+ )
+ self.assertEqual(
+ os.path.normpath(manifest.projects[0].objdir),
+ os.path.join(
+ self.tempdir, ".repo", "project-objects", "a", "path.git"
+ ),
+ )
- def test_toplevel_path(self):
- """Check handling of path=. specially."""
- def parse(name, path):
- name = self.encodeXmlAttr(name)
- path = self.encodeXmlAttr(path)
- return self.getXmlManifest(f"""
+ def test_toplevel_path(self):
+ """Check handling of path=. specially."""
+
+ def parse(name, path):
+ name = self.encodeXmlAttr(name)
+ path = self.encodeXmlAttr(path)
+ return self.getXmlManifest(
+ f"""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="{name}" path="{path}" />
</manifest>
-""")
+"""
+ )
- for path in ('.', './', './/', './//'):
- manifest = parse('server/path', path)
- self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
- os.path.join(self.tempdir, '.repo', 'projects', '..git'))
+ for path in (".", "./", ".//", ".///"):
+ manifest = parse("server/path", path)
+ self.assertEqual(
+ os.path.normpath(manifest.projects[0].gitdir),
+ os.path.join(self.tempdir, ".repo", "projects", "..git"),
+ )
- def test_bad_path_name_checks(self):
- """Check handling of bad path & name attributes."""
- def parse(name, path):
- name = self.encodeXmlAttr(name)
- path = self.encodeXmlAttr(path)
- manifest = self.getXmlManifest(f"""
+ def test_bad_path_name_checks(self):
+ """Check handling of bad path & name attributes."""
+
+ def parse(name, path):
+ name = self.encodeXmlAttr(name)
+ path = self.encodeXmlAttr(path)
+ manifest = self.getXmlManifest(
+ f"""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="{name}" path="{path}" />
</manifest>
-""")
- # Force the manifest to be parsed.
- manifest.ToXml()
+"""
+ )
+ # Force the manifest to be parsed.
+ manifest.ToXml()
- # Verify the parser is valid by default to avoid buggy tests below.
- parse('ok', 'ok')
+ # Verify the parser is valid by default to avoid buggy tests below.
+ parse("ok", "ok")
- # Handle empty name explicitly because a different codepath rejects it.
- # Empty path is OK because it defaults to the name field.
- with self.assertRaises(error.ManifestParseError):
- parse('', 'ok')
+ # Handle empty name explicitly because a different codepath rejects it.
+ # Empty path is OK because it defaults to the name field.
+ with self.assertRaises(error.ManifestParseError):
+ parse("", "ok")
- for path in INVALID_FS_PATHS:
- if not path or path.endswith('/') or path.endswith(os.path.sep):
- continue
+ for path in INVALID_FS_PATHS:
+ if not path or path.endswith("/") or path.endswith(os.path.sep):
+ continue
- with self.assertRaises(error.ManifestInvalidPathError):
- parse(path, 'ok')
+ with self.assertRaises(error.ManifestInvalidPathError):
+ parse(path, "ok")
- # We have a dedicated test for path=".".
- if path not in {'.'}:
- with self.assertRaises(error.ManifestInvalidPathError):
- parse('ok', path)
+ # We have a dedicated test for path=".".
+ if path not in {"."}:
+ with self.assertRaises(error.ManifestInvalidPathError):
+ parse("ok", path)
class SuperProjectElementTests(ManifestParseTestCase):
- """Tests for <superproject>."""
+ """Tests for <superproject>."""
- def test_superproject(self):
- """Check superproject settings."""
- manifest = self.getXmlManifest("""
+ def test_superproject(self):
+ """Check superproject settings."""
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="test-remote" fetch="http://localhost" />
<default remote="test-remote" revision="refs/heads/main" />
<superproject name="superproject"/>
</manifest>
-""")
- self.assertEqual(manifest.superproject.name, 'superproject')
- self.assertEqual(manifest.superproject.remote.name, 'test-remote')
- self.assertEqual(manifest.superproject.remote.url, 'http://localhost/superproject')
- self.assertEqual(manifest.superproject.revision, 'refs/heads/main')
- self.assertEqual(
- sort_attributes(manifest.ToXml().toxml()),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="test-remote"/>'
- '<default remote="test-remote" revision="refs/heads/main"/>'
- '<superproject name="superproject"/>'
- '</manifest>')
+"""
+ )
+ self.assertEqual(manifest.superproject.name, "superproject")
+ self.assertEqual(manifest.superproject.remote.name, "test-remote")
+ self.assertEqual(
+ manifest.superproject.remote.url, "http://localhost/superproject"
+ )
+ self.assertEqual(manifest.superproject.revision, "refs/heads/main")
+ self.assertEqual(
+ sort_attributes(manifest.ToXml().toxml()),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="test-remote"/>'
+ '<default remote="test-remote" revision="refs/heads/main"/>'
+ '<superproject name="superproject"/>'
+ "</manifest>",
+ )
- def test_superproject_revision(self):
- """Check superproject settings with a different revision attribute"""
- self.maxDiff = None
- manifest = self.getXmlManifest("""
+ def test_superproject_revision(self):
+ """Check superproject settings with a different revision attribute"""
+ self.maxDiff = None
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="test-remote" fetch="http://localhost" />
<default remote="test-remote" revision="refs/heads/main" />
<superproject name="superproject" revision="refs/heads/stable" />
</manifest>
-""")
- self.assertEqual(manifest.superproject.name, 'superproject')
- self.assertEqual(manifest.superproject.remote.name, 'test-remote')
- self.assertEqual(manifest.superproject.remote.url, 'http://localhost/superproject')
- self.assertEqual(manifest.superproject.revision, 'refs/heads/stable')
- self.assertEqual(
- sort_attributes(manifest.ToXml().toxml()),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="test-remote"/>'
- '<default remote="test-remote" revision="refs/heads/main"/>'
- '<superproject name="superproject" revision="refs/heads/stable"/>'
- '</manifest>')
+"""
+ )
+ self.assertEqual(manifest.superproject.name, "superproject")
+ self.assertEqual(manifest.superproject.remote.name, "test-remote")
+ self.assertEqual(
+ manifest.superproject.remote.url, "http://localhost/superproject"
+ )
+ self.assertEqual(manifest.superproject.revision, "refs/heads/stable")
+ self.assertEqual(
+ sort_attributes(manifest.ToXml().toxml()),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="test-remote"/>'
+ '<default remote="test-remote" revision="refs/heads/main"/>'
+ '<superproject name="superproject" revision="refs/heads/stable"/>'
+ "</manifest>",
+ )
- def test_superproject_revision_default_negative(self):
- """Check superproject settings with a same revision attribute"""
- self.maxDiff = None
- manifest = self.getXmlManifest("""
+ def test_superproject_revision_default_negative(self):
+ """Check superproject settings with a same revision attribute"""
+ self.maxDiff = None
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="test-remote" fetch="http://localhost" />
<default remote="test-remote" revision="refs/heads/stable" />
<superproject name="superproject" revision="refs/heads/stable" />
</manifest>
-""")
- self.assertEqual(manifest.superproject.name, 'superproject')
- self.assertEqual(manifest.superproject.remote.name, 'test-remote')
- self.assertEqual(manifest.superproject.remote.url, 'http://localhost/superproject')
- self.assertEqual(manifest.superproject.revision, 'refs/heads/stable')
- self.assertEqual(
- sort_attributes(manifest.ToXml().toxml()),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="test-remote"/>'
- '<default remote="test-remote" revision="refs/heads/stable"/>'
- '<superproject name="superproject"/>'
- '</manifest>')
+"""
+ )
+ self.assertEqual(manifest.superproject.name, "superproject")
+ self.assertEqual(manifest.superproject.remote.name, "test-remote")
+ self.assertEqual(
+ manifest.superproject.remote.url, "http://localhost/superproject"
+ )
+ self.assertEqual(manifest.superproject.revision, "refs/heads/stable")
+ self.assertEqual(
+ sort_attributes(manifest.ToXml().toxml()),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="test-remote"/>'
+ '<default remote="test-remote" revision="refs/heads/stable"/>'
+ '<superproject name="superproject"/>'
+ "</manifest>",
+ )
- def test_superproject_revision_remote(self):
- """Check superproject settings with a same revision attribute"""
- self.maxDiff = None
- manifest = self.getXmlManifest("""
+ def test_superproject_revision_remote(self):
+ """Check superproject settings with a same revision attribute"""
+ self.maxDiff = None
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="test-remote" fetch="http://localhost" revision="refs/heads/main" />
<default remote="test-remote" />
<superproject name="superproject" revision="refs/heads/stable" />
</manifest>
-""")
- self.assertEqual(manifest.superproject.name, 'superproject')
- self.assertEqual(manifest.superproject.remote.name, 'test-remote')
- self.assertEqual(manifest.superproject.remote.url, 'http://localhost/superproject')
- self.assertEqual(manifest.superproject.revision, 'refs/heads/stable')
- self.assertEqual(
- sort_attributes(manifest.ToXml().toxml()),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="test-remote" revision="refs/heads/main"/>'
- '<default remote="test-remote"/>'
- '<superproject name="superproject" revision="refs/heads/stable"/>'
- '</manifest>')
+""" # noqa: E501
+ )
+ self.assertEqual(manifest.superproject.name, "superproject")
+ self.assertEqual(manifest.superproject.remote.name, "test-remote")
+ self.assertEqual(
+ manifest.superproject.remote.url, "http://localhost/superproject"
+ )
+ self.assertEqual(manifest.superproject.revision, "refs/heads/stable")
+ self.assertEqual(
+ sort_attributes(manifest.ToXml().toxml()),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="test-remote" revision="refs/heads/main"/>' # noqa: E501
+ '<default remote="test-remote"/>'
+ '<superproject name="superproject" revision="refs/heads/stable"/>'
+ "</manifest>",
+ )
- def test_remote(self):
- """Check superproject settings with a remote."""
- manifest = self.getXmlManifest("""
+ def test_remote(self):
+ """Check superproject settings with a remote."""
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<remote name="superproject-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<superproject name="platform/superproject" remote="superproject-remote"/>
</manifest>
-""")
- self.assertEqual(manifest.superproject.name, 'platform/superproject')
- self.assertEqual(manifest.superproject.remote.name, 'superproject-remote')
- self.assertEqual(manifest.superproject.remote.url, 'http://localhost/platform/superproject')
- self.assertEqual(manifest.superproject.revision, 'refs/heads/main')
- self.assertEqual(
- sort_attributes(manifest.ToXml().toxml()),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="default-remote"/>'
- '<remote fetch="http://localhost" name="superproject-remote"/>'
- '<default remote="default-remote" revision="refs/heads/main"/>'
- '<superproject name="platform/superproject" remote="superproject-remote"/>'
- '</manifest>')
+"""
+ )
+ self.assertEqual(manifest.superproject.name, "platform/superproject")
+ self.assertEqual(
+ manifest.superproject.remote.name, "superproject-remote"
+ )
+ self.assertEqual(
+ manifest.superproject.remote.url,
+ "http://localhost/platform/superproject",
+ )
+ self.assertEqual(manifest.superproject.revision, "refs/heads/main")
+ self.assertEqual(
+ sort_attributes(manifest.ToXml().toxml()),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="default-remote"/>'
+ '<remote fetch="http://localhost" name="superproject-remote"/>'
+ '<default remote="default-remote" revision="refs/heads/main"/>'
+ '<superproject name="platform/superproject" remote="superproject-remote"/>' # noqa: E501
+ "</manifest>",
+ )
- def test_defalut_remote(self):
- """Check superproject settings with a default remote."""
- manifest = self.getXmlManifest("""
+ def test_defalut_remote(self):
+ """Check superproject settings with a default remote."""
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<superproject name="superproject" remote="default-remote"/>
</manifest>
-""")
- self.assertEqual(manifest.superproject.name, 'superproject')
- self.assertEqual(manifest.superproject.remote.name, 'default-remote')
- self.assertEqual(manifest.superproject.revision, 'refs/heads/main')
- self.assertEqual(
- sort_attributes(manifest.ToXml().toxml()),
- '<?xml version="1.0" ?><manifest>'
- '<remote fetch="http://localhost" name="default-remote"/>'
- '<default remote="default-remote" revision="refs/heads/main"/>'
- '<superproject name="superproject"/>'
- '</manifest>')
+"""
+ )
+ self.assertEqual(manifest.superproject.name, "superproject")
+ self.assertEqual(manifest.superproject.remote.name, "default-remote")
+ self.assertEqual(manifest.superproject.revision, "refs/heads/main")
+ self.assertEqual(
+ sort_attributes(manifest.ToXml().toxml()),
+ '<?xml version="1.0" ?><manifest>'
+ '<remote fetch="http://localhost" name="default-remote"/>'
+ '<default remote="default-remote" revision="refs/heads/main"/>'
+ '<superproject name="superproject"/>'
+ "</manifest>",
+ )
class ContactinfoElementTests(ManifestParseTestCase):
- """Tests for <contactinfo>."""
+ """Tests for <contactinfo>."""
- def test_contactinfo(self):
- """Check contactinfo settings."""
- bugurl = 'http://localhost/contactinfo'
- manifest = self.getXmlManifest(f"""
+ def test_contactinfo(self):
+ """Check contactinfo settings."""
+ bugurl = "http://localhost/contactinfo"
+ manifest = self.getXmlManifest(
+ f"""
<manifest>
<contactinfo bugurl="{bugurl}"/>
</manifest>
-""")
- self.assertEqual(manifest.contactinfo.bugurl, bugurl)
- self.assertEqual(
- manifest.ToXml().toxml(),
- '<?xml version="1.0" ?><manifest>'
- f'<contactinfo bugurl="{bugurl}"/>'
- '</manifest>')
+"""
+ )
+ self.assertEqual(manifest.contactinfo.bugurl, bugurl)
+ self.assertEqual(
+ manifest.ToXml().toxml(),
+ '<?xml version="1.0" ?><manifest>'
+ f'<contactinfo bugurl="{bugurl}"/>'
+ "</manifest>",
+ )
class DefaultElementTests(ManifestParseTestCase):
- """Tests for <default>."""
+ """Tests for <default>."""
- def test_default(self):
- """Check default settings."""
- a = manifest_xml._Default()
- a.revisionExpr = 'foo'
- a.remote = manifest_xml._XmlRemote(name='remote')
- b = manifest_xml._Default()
- b.revisionExpr = 'bar'
- self.assertEqual(a, a)
- self.assertNotEqual(a, b)
- self.assertNotEqual(b, a.remote)
- self.assertNotEqual(a, 123)
- self.assertNotEqual(a, None)
+ def test_default(self):
+ """Check default settings."""
+ a = manifest_xml._Default()
+ a.revisionExpr = "foo"
+ a.remote = manifest_xml._XmlRemote(name="remote")
+ b = manifest_xml._Default()
+ b.revisionExpr = "bar"
+ self.assertEqual(a, a)
+ self.assertNotEqual(a, b)
+ self.assertNotEqual(b, a.remote)
+ self.assertNotEqual(a, 123)
+ self.assertNotEqual(a, None)
class RemoteElementTests(ManifestParseTestCase):
- """Tests for <remote>."""
+ """Tests for <remote>."""
- def test_remote(self):
- """Check remote settings."""
- a = manifest_xml._XmlRemote(name='foo')
- a.AddAnnotation('key1', 'value1', 'true')
- b = manifest_xml._XmlRemote(name='foo')
- b.AddAnnotation('key2', 'value1', 'true')
- c = manifest_xml._XmlRemote(name='foo')
- c.AddAnnotation('key1', 'value2', 'true')
- d = manifest_xml._XmlRemote(name='foo')
- d.AddAnnotation('key1', 'value1', 'false')
- self.assertEqual(a, a)
- self.assertNotEqual(a, b)
- self.assertNotEqual(a, c)
- self.assertNotEqual(a, d)
- self.assertNotEqual(a, manifest_xml._Default())
- self.assertNotEqual(a, 123)
- self.assertNotEqual(a, None)
+ def test_remote(self):
+ """Check remote settings."""
+ a = manifest_xml._XmlRemote(name="foo")
+ a.AddAnnotation("key1", "value1", "true")
+ b = manifest_xml._XmlRemote(name="foo")
+ b.AddAnnotation("key2", "value1", "true")
+ c = manifest_xml._XmlRemote(name="foo")
+ c.AddAnnotation("key1", "value2", "true")
+ d = manifest_xml._XmlRemote(name="foo")
+ d.AddAnnotation("key1", "value1", "false")
+ self.assertEqual(a, a)
+ self.assertNotEqual(a, b)
+ self.assertNotEqual(a, c)
+ self.assertNotEqual(a, d)
+ self.assertNotEqual(a, manifest_xml._Default())
+ self.assertNotEqual(a, 123)
+ self.assertNotEqual(a, None)
class RemoveProjectElementTests(ManifestParseTestCase):
- """Tests for <remove-project>."""
+ """Tests for <remove-project>."""
- def test_remove_one_project(self):
- manifest = self.getXmlManifest("""
+ def test_remove_one_project(self):
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="myproject" />
<remove-project name="myproject" />
</manifest>
-""")
- self.assertEqual(manifest.projects, [])
+"""
+ )
+ self.assertEqual(manifest.projects, [])
- def test_remove_one_project_one_remains(self):
- manifest = self.getXmlManifest("""
+ def test_remove_one_project_one_remains(self):
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
@@ -803,51 +926,59 @@
<project name="yourproject" />
<remove-project name="myproject" />
</manifest>
-""")
+"""
+ )
- self.assertEqual(len(manifest.projects), 1)
- self.assertEqual(manifest.projects[0].name, 'yourproject')
+ self.assertEqual(len(manifest.projects), 1)
+ self.assertEqual(manifest.projects[0].name, "yourproject")
- def test_remove_one_project_doesnt_exist(self):
- with self.assertRaises(manifest_xml.ManifestParseError):
- manifest = self.getXmlManifest("""
+ def test_remove_one_project_doesnt_exist(self):
+ with self.assertRaises(manifest_xml.ManifestParseError):
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<remove-project name="myproject" />
</manifest>
-""")
- manifest.projects
+"""
+ )
+ manifest.projects
- def test_remove_one_optional_project_doesnt_exist(self):
- manifest = self.getXmlManifest("""
+ def test_remove_one_optional_project_doesnt_exist(self):
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<remove-project name="myproject" optional="true" />
</manifest>
-""")
- self.assertEqual(manifest.projects, [])
+"""
+ )
+ self.assertEqual(manifest.projects, [])
class ExtendProjectElementTests(ManifestParseTestCase):
- """Tests for <extend-project>."""
+ """Tests for <extend-project>."""
- def test_extend_project_dest_path_single_match(self):
- manifest = self.getXmlManifest("""
+ def test_extend_project_dest_path_single_match(self):
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="myproject" />
<extend-project name="myproject" dest-path="bar" />
</manifest>
-""")
- self.assertEqual(len(manifest.projects), 1)
- self.assertEqual(manifest.projects[0].relpath, 'bar')
+"""
+ )
+ self.assertEqual(len(manifest.projects), 1)
+ self.assertEqual(manifest.projects[0].relpath, "bar")
- def test_extend_project_dest_path_multi_match(self):
- with self.assertRaises(manifest_xml.ManifestParseError):
- manifest = self.getXmlManifest("""
+ def test_extend_project_dest_path_multi_match(self):
+ with self.assertRaises(manifest_xml.ManifestParseError):
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
@@ -855,11 +986,13 @@
<project name="myproject" path="y" />
<extend-project name="myproject" dest-path="bar" />
</manifest>
-""")
- manifest.projects
+"""
+ )
+ manifest.projects
- def test_extend_project_dest_path_multi_match_path_specified(self):
- manifest = self.getXmlManifest("""
+ def test_extend_project_dest_path_multi_match_path_specified(self):
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
@@ -867,34 +1000,39 @@
<project name="myproject" path="y" />
<extend-project name="myproject" path="x" dest-path="bar" />
</manifest>
-""")
- self.assertEqual(len(manifest.projects), 2)
- if manifest.projects[0].relpath == 'y':
- self.assertEqual(manifest.projects[1].relpath, 'bar')
- else:
- self.assertEqual(manifest.projects[0].relpath, 'bar')
- self.assertEqual(manifest.projects[1].relpath, 'y')
+"""
+ )
+ self.assertEqual(len(manifest.projects), 2)
+ if manifest.projects[0].relpath == "y":
+ self.assertEqual(manifest.projects[1].relpath, "bar")
+ else:
+ self.assertEqual(manifest.projects[0].relpath, "bar")
+ self.assertEqual(manifest.projects[1].relpath, "y")
- def test_extend_project_dest_branch(self):
- manifest = self.getXmlManifest("""
+ def test_extend_project_dest_branch(self):
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" dest-branch="foo" />
<project name="myproject" />
<extend-project name="myproject" dest-branch="bar" />
</manifest>
-""")
- self.assertEqual(len(manifest.projects), 1)
- self.assertEqual(manifest.projects[0].dest_branch, 'bar')
+""" # noqa: E501
+ )
+ self.assertEqual(len(manifest.projects), 1)
+ self.assertEqual(manifest.projects[0].dest_branch, "bar")
- def test_extend_project_upstream(self):
- manifest = self.getXmlManifest("""
+ def test_extend_project_upstream(self):
+ manifest = self.getXmlManifest(
+ """
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="myproject" />
<extend-project name="myproject" upstream="bar" />
</manifest>
-""")
- self.assertEqual(len(manifest.projects), 1)
- self.assertEqual(manifest.projects[0].upstream, 'bar')
+"""
+ )
+ self.assertEqual(len(manifest.projects), 1)
+ self.assertEqual(manifest.projects[0].upstream, "bar")
diff --git a/tests/test_platform_utils.py b/tests/test_platform_utils.py
index 55b7805..7a42de0 100644
--- a/tests/test_platform_utils.py
+++ b/tests/test_platform_utils.py
@@ -22,29 +22,31 @@
class RemoveTests(unittest.TestCase):
- """Check remove() helper."""
+ """Check remove() helper."""
- def testMissingOk(self):
- """Check missing_ok handling."""
- with tempfile.TemporaryDirectory() as tmpdir:
- path = os.path.join(tmpdir, 'test')
+ def testMissingOk(self):
+ """Check missing_ok handling."""
+ with tempfile.TemporaryDirectory() as tmpdir:
+ path = os.path.join(tmpdir, "test")
- # Should not fail.
- platform_utils.remove(path, missing_ok=True)
+ # Should not fail.
+ platform_utils.remove(path, missing_ok=True)
- # Should fail.
- self.assertRaises(OSError, platform_utils.remove, path)
- self.assertRaises(OSError, platform_utils.remove, path, missing_ok=False)
+ # Should fail.
+ self.assertRaises(OSError, platform_utils.remove, path)
+ self.assertRaises(
+ OSError, platform_utils.remove, path, missing_ok=False
+ )
- # Should not fail if it exists.
- open(path, 'w').close()
- platform_utils.remove(path, missing_ok=True)
- self.assertFalse(os.path.exists(path))
+ # Should not fail if it exists.
+ open(path, "w").close()
+ platform_utils.remove(path, missing_ok=True)
+ self.assertFalse(os.path.exists(path))
- open(path, 'w').close()
- platform_utils.remove(path)
- self.assertFalse(os.path.exists(path))
+ open(path, "w").close()
+ platform_utils.remove(path)
+ self.assertFalse(os.path.exists(path))
- open(path, 'w').close()
- platform_utils.remove(path, missing_ok=False)
- self.assertFalse(os.path.exists(path))
+ open(path, "w").close()
+ platform_utils.remove(path, missing_ok=False)
+ self.assertFalse(os.path.exists(path))
diff --git a/tests/test_project.py b/tests/test_project.py
index c50d994..bc8330b 100644
--- a/tests/test_project.py
+++ b/tests/test_project.py
@@ -31,452 +31,493 @@
@contextlib.contextmanager
def TempGitTree():
- """Create a new empty git checkout for testing."""
- with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir:
- # Tests need to assume, that main is default branch at init,
- # which is not supported in config until 2.28.
- cmd = ['git', 'init']
- if git_command.git_require((2, 28, 0)):
- cmd += ['--initial-branch=main']
- else:
- # Use template dir for init.
- templatedir = tempfile.mkdtemp(prefix='.test-template')
- with open(os.path.join(templatedir, 'HEAD'), 'w') as fp:
- fp.write('ref: refs/heads/main\n')
- cmd += ['--template', templatedir]
- subprocess.check_call(cmd, cwd=tempdir)
- yield tempdir
+ """Create a new empty git checkout for testing."""
+ with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
+ # Tests need to assume, that main is default branch at init,
+ # which is not supported in config until 2.28.
+ cmd = ["git", "init"]
+ if git_command.git_require((2, 28, 0)):
+ cmd += ["--initial-branch=main"]
+ else:
+ # Use template dir for init.
+ templatedir = tempfile.mkdtemp(prefix=".test-template")
+ with open(os.path.join(templatedir, "HEAD"), "w") as fp:
+ fp.write("ref: refs/heads/main\n")
+ cmd += ["--template", templatedir]
+ subprocess.check_call(cmd, cwd=tempdir)
+ yield tempdir
class FakeProject(object):
- """A fake for Project for basic functionality."""
+ """A fake for Project for basic functionality."""
- def __init__(self, worktree):
- self.worktree = worktree
- self.gitdir = os.path.join(worktree, '.git')
- self.name = 'fakeproject'
- self.work_git = project.Project._GitGetByExec(
- self, bare=False, gitdir=self.gitdir)
- self.bare_git = project.Project._GitGetByExec(
- self, bare=True, gitdir=self.gitdir)
- self.config = git_config.GitConfig.ForRepository(gitdir=self.gitdir)
+ def __init__(self, worktree):
+ self.worktree = worktree
+ self.gitdir = os.path.join(worktree, ".git")
+ self.name = "fakeproject"
+ self.work_git = project.Project._GitGetByExec(
+ self, bare=False, gitdir=self.gitdir
+ )
+ self.bare_git = project.Project._GitGetByExec(
+ self, bare=True, gitdir=self.gitdir
+ )
+ self.config = git_config.GitConfig.ForRepository(gitdir=self.gitdir)
class ReviewableBranchTests(unittest.TestCase):
- """Check ReviewableBranch behavior."""
+ """Check ReviewableBranch behavior."""
- def test_smoke(self):
- """A quick run through everything."""
- with TempGitTree() as tempdir:
- fakeproj = FakeProject(tempdir)
+ def test_smoke(self):
+ """A quick run through everything."""
+ with TempGitTree() as tempdir:
+ fakeproj = FakeProject(tempdir)
- # Generate some commits.
- with open(os.path.join(tempdir, 'readme'), 'w') as fp:
- fp.write('txt')
- fakeproj.work_git.add('readme')
- fakeproj.work_git.commit('-mAdd file')
- fakeproj.work_git.checkout('-b', 'work')
- fakeproj.work_git.rm('-f', 'readme')
- fakeproj.work_git.commit('-mDel file')
+ # Generate some commits.
+ with open(os.path.join(tempdir, "readme"), "w") as fp:
+ fp.write("txt")
+ fakeproj.work_git.add("readme")
+ fakeproj.work_git.commit("-mAdd file")
+ fakeproj.work_git.checkout("-b", "work")
+ fakeproj.work_git.rm("-f", "readme")
+ fakeproj.work_git.commit("-mDel file")
- # Start off with the normal details.
- rb = project.ReviewableBranch(
- fakeproj, fakeproj.config.GetBranch('work'), 'main')
- self.assertEqual('work', rb.name)
- self.assertEqual(1, len(rb.commits))
- self.assertIn('Del file', rb.commits[0])
- d = rb.unabbrev_commits
- self.assertEqual(1, len(d))
- short, long = next(iter(d.items()))
- self.assertTrue(long.startswith(short))
- self.assertTrue(rb.base_exists)
- # Hard to assert anything useful about this.
- self.assertTrue(rb.date)
+ # Start off with the normal details.
+ rb = project.ReviewableBranch(
+ fakeproj, fakeproj.config.GetBranch("work"), "main"
+ )
+ self.assertEqual("work", rb.name)
+ self.assertEqual(1, len(rb.commits))
+ self.assertIn("Del file", rb.commits[0])
+ d = rb.unabbrev_commits
+ self.assertEqual(1, len(d))
+ short, long = next(iter(d.items()))
+ self.assertTrue(long.startswith(short))
+ self.assertTrue(rb.base_exists)
+ # Hard to assert anything useful about this.
+ self.assertTrue(rb.date)
- # Now delete the tracking branch!
- fakeproj.work_git.branch('-D', 'main')
- rb = project.ReviewableBranch(
- fakeproj, fakeproj.config.GetBranch('work'), 'main')
- self.assertEqual(0, len(rb.commits))
- self.assertFalse(rb.base_exists)
- # Hard to assert anything useful about this.
- self.assertTrue(rb.date)
+ # Now delete the tracking branch!
+ fakeproj.work_git.branch("-D", "main")
+ rb = project.ReviewableBranch(
+ fakeproj, fakeproj.config.GetBranch("work"), "main"
+ )
+ self.assertEqual(0, len(rb.commits))
+ self.assertFalse(rb.base_exists)
+ # Hard to assert anything useful about this.
+ self.assertTrue(rb.date)
class CopyLinkTestCase(unittest.TestCase):
- """TestCase for stub repo client checkouts.
+ """TestCase for stub repo client checkouts.
- It'll have a layout like this:
- tempdir/ # self.tempdir
- checkout/ # self.topdir
- git-project/ # self.worktree
+ It'll have a layout like this:
+ tempdir/ # self.tempdir
+ checkout/ # self.topdir
+ git-project/ # self.worktree
- Attributes:
- tempdir: A dedicated temporary directory.
- worktree: The top of the repo client checkout.
- topdir: The top of a project checkout.
- """
+ Attributes:
+ tempdir: A dedicated temporary directory.
+ worktree: The top of the repo client checkout.
+ topdir: The top of a project checkout.
+ """
- def setUp(self):
- self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
- self.tempdir = self.tempdirobj.name
- self.topdir = os.path.join(self.tempdir, 'checkout')
- self.worktree = os.path.join(self.topdir, 'git-project')
- os.makedirs(self.topdir)
- os.makedirs(self.worktree)
+ def setUp(self):
+ self.tempdirobj = tempfile.TemporaryDirectory(prefix="repo_tests")
+ self.tempdir = self.tempdirobj.name
+ self.topdir = os.path.join(self.tempdir, "checkout")
+ self.worktree = os.path.join(self.topdir, "git-project")
+ os.makedirs(self.topdir)
+ os.makedirs(self.worktree)
- def tearDown(self):
- self.tempdirobj.cleanup()
+ def tearDown(self):
+ self.tempdirobj.cleanup()
- @staticmethod
- def touch(path):
- with open(path, 'w'):
- pass
+ @staticmethod
+ def touch(path):
+ with open(path, "w"):
+ pass
- def assertExists(self, path, msg=None):
- """Make sure |path| exists."""
- if os.path.exists(path):
- return
+ def assertExists(self, path, msg=None):
+ """Make sure |path| exists."""
+ if os.path.exists(path):
+ return
- if msg is None:
- msg = ['path is missing: %s' % path]
- while path != '/':
- path = os.path.dirname(path)
- if not path:
- # If we're given something like "foo", abort once we get to "".
- break
- result = os.path.exists(path)
- msg.append('\tos.path.exists(%s): %s' % (path, result))
- if result:
- msg.append('\tcontents: %r' % os.listdir(path))
- break
- msg = '\n'.join(msg)
+ if msg is None:
+ msg = ["path is missing: %s" % path]
+ while path != "/":
+ path = os.path.dirname(path)
+ if not path:
+ # If we're given something like "foo", abort once we get to
+ # "".
+ break
+ result = os.path.exists(path)
+ msg.append("\tos.path.exists(%s): %s" % (path, result))
+ if result:
+ msg.append("\tcontents: %r" % os.listdir(path))
+ break
+ msg = "\n".join(msg)
- raise self.failureException(msg)
+ raise self.failureException(msg)
class CopyFile(CopyLinkTestCase):
- """Check _CopyFile handling."""
+ """Check _CopyFile handling."""
- def CopyFile(self, src, dest):
- return project._CopyFile(self.worktree, src, self.topdir, dest)
+ def CopyFile(self, src, dest):
+ return project._CopyFile(self.worktree, src, self.topdir, dest)
- def test_basic(self):
- """Basic test of copying a file from a project to the toplevel."""
- src = os.path.join(self.worktree, 'foo.txt')
- self.touch(src)
- cf = self.CopyFile('foo.txt', 'foo')
- cf._Copy()
- self.assertExists(os.path.join(self.topdir, 'foo'))
+ def test_basic(self):
+ """Basic test of copying a file from a project to the toplevel."""
+ src = os.path.join(self.worktree, "foo.txt")
+ self.touch(src)
+ cf = self.CopyFile("foo.txt", "foo")
+ cf._Copy()
+ self.assertExists(os.path.join(self.topdir, "foo"))
- def test_src_subdir(self):
- """Copy a file from a subdir of a project."""
- src = os.path.join(self.worktree, 'bar', 'foo.txt')
- os.makedirs(os.path.dirname(src))
- self.touch(src)
- cf = self.CopyFile('bar/foo.txt', 'new.txt')
- cf._Copy()
- self.assertExists(os.path.join(self.topdir, 'new.txt'))
+ def test_src_subdir(self):
+ """Copy a file from a subdir of a project."""
+ src = os.path.join(self.worktree, "bar", "foo.txt")
+ os.makedirs(os.path.dirname(src))
+ self.touch(src)
+ cf = self.CopyFile("bar/foo.txt", "new.txt")
+ cf._Copy()
+ self.assertExists(os.path.join(self.topdir, "new.txt"))
- def test_dest_subdir(self):
- """Copy a file to a subdir of a checkout."""
- src = os.path.join(self.worktree, 'foo.txt')
- self.touch(src)
- cf = self.CopyFile('foo.txt', 'sub/dir/new.txt')
- self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub')))
- cf._Copy()
- self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'new.txt'))
+ def test_dest_subdir(self):
+ """Copy a file to a subdir of a checkout."""
+ src = os.path.join(self.worktree, "foo.txt")
+ self.touch(src)
+ cf = self.CopyFile("foo.txt", "sub/dir/new.txt")
+ self.assertFalse(os.path.exists(os.path.join(self.topdir, "sub")))
+ cf._Copy()
+ self.assertExists(os.path.join(self.topdir, "sub", "dir", "new.txt"))
- def test_update(self):
- """Make sure changed files get copied again."""
- src = os.path.join(self.worktree, 'foo.txt')
- dest = os.path.join(self.topdir, 'bar')
- with open(src, 'w') as f:
- f.write('1st')
- cf = self.CopyFile('foo.txt', 'bar')
- cf._Copy()
- self.assertExists(dest)
- with open(dest) as f:
- self.assertEqual(f.read(), '1st')
+ def test_update(self):
+ """Make sure changed files get copied again."""
+ src = os.path.join(self.worktree, "foo.txt")
+ dest = os.path.join(self.topdir, "bar")
+ with open(src, "w") as f:
+ f.write("1st")
+ cf = self.CopyFile("foo.txt", "bar")
+ cf._Copy()
+ self.assertExists(dest)
+ with open(dest) as f:
+ self.assertEqual(f.read(), "1st")
- with open(src, 'w') as f:
- f.write('2nd!')
- cf._Copy()
- with open(dest) as f:
- self.assertEqual(f.read(), '2nd!')
+ with open(src, "w") as f:
+ f.write("2nd!")
+ cf._Copy()
+ with open(dest) as f:
+ self.assertEqual(f.read(), "2nd!")
- def test_src_block_symlink(self):
- """Do not allow reading from a symlinked path."""
- src = os.path.join(self.worktree, 'foo.txt')
- sym = os.path.join(self.worktree, 'sym')
- self.touch(src)
- platform_utils.symlink('foo.txt', sym)
- self.assertExists(sym)
- cf = self.CopyFile('sym', 'foo')
- self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
+ def test_src_block_symlink(self):
+ """Do not allow reading from a symlinked path."""
+ src = os.path.join(self.worktree, "foo.txt")
+ sym = os.path.join(self.worktree, "sym")
+ self.touch(src)
+ platform_utils.symlink("foo.txt", sym)
+ self.assertExists(sym)
+ cf = self.CopyFile("sym", "foo")
+ self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
- def test_src_block_symlink_traversal(self):
- """Do not allow reading through a symlink dir."""
- realfile = os.path.join(self.tempdir, 'file.txt')
- self.touch(realfile)
- src = os.path.join(self.worktree, 'bar', 'file.txt')
- platform_utils.symlink(self.tempdir, os.path.join(self.worktree, 'bar'))
- self.assertExists(src)
- cf = self.CopyFile('bar/file.txt', 'foo')
- self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
+ def test_src_block_symlink_traversal(self):
+ """Do not allow reading through a symlink dir."""
+ realfile = os.path.join(self.tempdir, "file.txt")
+ self.touch(realfile)
+ src = os.path.join(self.worktree, "bar", "file.txt")
+ platform_utils.symlink(self.tempdir, os.path.join(self.worktree, "bar"))
+ self.assertExists(src)
+ cf = self.CopyFile("bar/file.txt", "foo")
+ self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
- def test_src_block_copy_from_dir(self):
- """Do not allow copying from a directory."""
- src = os.path.join(self.worktree, 'dir')
- os.makedirs(src)
- cf = self.CopyFile('dir', 'foo')
- self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
+ def test_src_block_copy_from_dir(self):
+ """Do not allow copying from a directory."""
+ src = os.path.join(self.worktree, "dir")
+ os.makedirs(src)
+ cf = self.CopyFile("dir", "foo")
+ self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
- def test_dest_block_symlink(self):
- """Do not allow writing to a symlink."""
- src = os.path.join(self.worktree, 'foo.txt')
- self.touch(src)
- platform_utils.symlink('dest', os.path.join(self.topdir, 'sym'))
- cf = self.CopyFile('foo.txt', 'sym')
- self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
+ def test_dest_block_symlink(self):
+ """Do not allow writing to a symlink."""
+ src = os.path.join(self.worktree, "foo.txt")
+ self.touch(src)
+ platform_utils.symlink("dest", os.path.join(self.topdir, "sym"))
+ cf = self.CopyFile("foo.txt", "sym")
+ self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
- def test_dest_block_symlink_traversal(self):
- """Do not allow writing through a symlink dir."""
- src = os.path.join(self.worktree, 'foo.txt')
- self.touch(src)
- platform_utils.symlink(tempfile.gettempdir(),
- os.path.join(self.topdir, 'sym'))
- cf = self.CopyFile('foo.txt', 'sym/foo.txt')
- self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
+ def test_dest_block_symlink_traversal(self):
+ """Do not allow writing through a symlink dir."""
+ src = os.path.join(self.worktree, "foo.txt")
+ self.touch(src)
+ platform_utils.symlink(
+ tempfile.gettempdir(), os.path.join(self.topdir, "sym")
+ )
+ cf = self.CopyFile("foo.txt", "sym/foo.txt")
+ self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
- def test_src_block_copy_to_dir(self):
- """Do not allow copying to a directory."""
- src = os.path.join(self.worktree, 'foo.txt')
- self.touch(src)
- os.makedirs(os.path.join(self.topdir, 'dir'))
- cf = self.CopyFile('foo.txt', 'dir')
- self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
+ def test_src_block_copy_to_dir(self):
+ """Do not allow copying to a directory."""
+ src = os.path.join(self.worktree, "foo.txt")
+ self.touch(src)
+ os.makedirs(os.path.join(self.topdir, "dir"))
+ cf = self.CopyFile("foo.txt", "dir")
+ self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
class LinkFile(CopyLinkTestCase):
- """Check _LinkFile handling."""
+ """Check _LinkFile handling."""
- def LinkFile(self, src, dest):
- return project._LinkFile(self.worktree, src, self.topdir, dest)
+ def LinkFile(self, src, dest):
+ return project._LinkFile(self.worktree, src, self.topdir, dest)
- def test_basic(self):
- """Basic test of linking a file from a project into the toplevel."""
- src = os.path.join(self.worktree, 'foo.txt')
- self.touch(src)
- lf = self.LinkFile('foo.txt', 'foo')
- lf._Link()
- dest = os.path.join(self.topdir, 'foo')
- self.assertExists(dest)
- self.assertTrue(os.path.islink(dest))
- self.assertEqual(os.path.join('git-project', 'foo.txt'), os.readlink(dest))
+ def test_basic(self):
+ """Basic test of linking a file from a project into the toplevel."""
+ src = os.path.join(self.worktree, "foo.txt")
+ self.touch(src)
+ lf = self.LinkFile("foo.txt", "foo")
+ lf._Link()
+ dest = os.path.join(self.topdir, "foo")
+ self.assertExists(dest)
+ self.assertTrue(os.path.islink(dest))
+ self.assertEqual(
+ os.path.join("git-project", "foo.txt"), os.readlink(dest)
+ )
- def test_src_subdir(self):
- """Link to a file in a subdir of a project."""
- src = os.path.join(self.worktree, 'bar', 'foo.txt')
- os.makedirs(os.path.dirname(src))
- self.touch(src)
- lf = self.LinkFile('bar/foo.txt', 'foo')
- lf._Link()
- self.assertExists(os.path.join(self.topdir, 'foo'))
+ def test_src_subdir(self):
+ """Link to a file in a subdir of a project."""
+ src = os.path.join(self.worktree, "bar", "foo.txt")
+ os.makedirs(os.path.dirname(src))
+ self.touch(src)
+ lf = self.LinkFile("bar/foo.txt", "foo")
+ lf._Link()
+ self.assertExists(os.path.join(self.topdir, "foo"))
- def test_src_self(self):
- """Link to the project itself."""
- dest = os.path.join(self.topdir, 'foo', 'bar')
- lf = self.LinkFile('.', 'foo/bar')
- lf._Link()
- self.assertExists(dest)
- self.assertEqual(os.path.join('..', 'git-project'), os.readlink(dest))
+ def test_src_self(self):
+ """Link to the project itself."""
+ dest = os.path.join(self.topdir, "foo", "bar")
+ lf = self.LinkFile(".", "foo/bar")
+ lf._Link()
+ self.assertExists(dest)
+ self.assertEqual(os.path.join("..", "git-project"), os.readlink(dest))
- def test_dest_subdir(self):
- """Link a file to a subdir of a checkout."""
- src = os.path.join(self.worktree, 'foo.txt')
- self.touch(src)
- lf = self.LinkFile('foo.txt', 'sub/dir/foo/bar')
- self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub')))
- lf._Link()
- self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'foo', 'bar'))
+ def test_dest_subdir(self):
+ """Link a file to a subdir of a checkout."""
+ src = os.path.join(self.worktree, "foo.txt")
+ self.touch(src)
+ lf = self.LinkFile("foo.txt", "sub/dir/foo/bar")
+ self.assertFalse(os.path.exists(os.path.join(self.topdir, "sub")))
+ lf._Link()
+ self.assertExists(os.path.join(self.topdir, "sub", "dir", "foo", "bar"))
- def test_src_block_relative(self):
- """Do not allow relative symlinks."""
- BAD_SOURCES = (
- './',
- '..',
- '../',
- 'foo/.',
- 'foo/./bar',
- 'foo/..',
- 'foo/../foo',
- )
- for src in BAD_SOURCES:
- lf = self.LinkFile(src, 'foo')
- self.assertRaises(error.ManifestInvalidPathError, lf._Link)
+ def test_src_block_relative(self):
+ """Do not allow relative symlinks."""
+ BAD_SOURCES = (
+ "./",
+ "..",
+ "../",
+ "foo/.",
+ "foo/./bar",
+ "foo/..",
+ "foo/../foo",
+ )
+ for src in BAD_SOURCES:
+ lf = self.LinkFile(src, "foo")
+ self.assertRaises(error.ManifestInvalidPathError, lf._Link)
- def test_update(self):
- """Make sure changed targets get updated."""
- dest = os.path.join(self.topdir, 'sym')
+ def test_update(self):
+ """Make sure changed targets get updated."""
+ dest = os.path.join(self.topdir, "sym")
- src = os.path.join(self.worktree, 'foo.txt')
- self.touch(src)
- lf = self.LinkFile('foo.txt', 'sym')
- lf._Link()
- self.assertEqual(os.path.join('git-project', 'foo.txt'), os.readlink(dest))
+ src = os.path.join(self.worktree, "foo.txt")
+ self.touch(src)
+ lf = self.LinkFile("foo.txt", "sym")
+ lf._Link()
+ self.assertEqual(
+ os.path.join("git-project", "foo.txt"), os.readlink(dest)
+ )
- # Point the symlink somewhere else.
- os.unlink(dest)
- platform_utils.symlink(self.tempdir, dest)
- lf._Link()
- self.assertEqual(os.path.join('git-project', 'foo.txt'), os.readlink(dest))
+ # Point the symlink somewhere else.
+ os.unlink(dest)
+ platform_utils.symlink(self.tempdir, dest)
+ lf._Link()
+ self.assertEqual(
+ os.path.join("git-project", "foo.txt"), os.readlink(dest)
+ )
class MigrateWorkTreeTests(unittest.TestCase):
- """Check _MigrateOldWorkTreeGitDir handling."""
+ """Check _MigrateOldWorkTreeGitDir handling."""
- _SYMLINKS = {
- 'config', 'description', 'hooks', 'info', 'logs', 'objects',
- 'packed-refs', 'refs', 'rr-cache', 'shallow', 'svn',
- }
- _FILES = {
- 'COMMIT_EDITMSG', 'FETCH_HEAD', 'HEAD', 'index', 'ORIG_HEAD',
- 'unknown-file-should-be-migrated',
- }
- _CLEAN_FILES = {
- 'a-vim-temp-file~', '#an-emacs-temp-file#',
- }
+ _SYMLINKS = {
+ "config",
+ "description",
+ "hooks",
+ "info",
+ "logs",
+ "objects",
+ "packed-refs",
+ "refs",
+ "rr-cache",
+ "shallow",
+ "svn",
+ }
+ _FILES = {
+ "COMMIT_EDITMSG",
+ "FETCH_HEAD",
+ "HEAD",
+ "index",
+ "ORIG_HEAD",
+ "unknown-file-should-be-migrated",
+ }
+ _CLEAN_FILES = {
+ "a-vim-temp-file~",
+ "#an-emacs-temp-file#",
+ }
- @classmethod
- @contextlib.contextmanager
- def _simple_layout(cls):
- """Create a simple repo client checkout to test against."""
- with tempfile.TemporaryDirectory() as tempdir:
- tempdir = Path(tempdir)
+ @classmethod
+ @contextlib.contextmanager
+ def _simple_layout(cls):
+ """Create a simple repo client checkout to test against."""
+ with tempfile.TemporaryDirectory() as tempdir:
+ tempdir = Path(tempdir)
- gitdir = tempdir / '.repo/projects/src/test.git'
- gitdir.mkdir(parents=True)
- cmd = ['git', 'init', '--bare', str(gitdir)]
- subprocess.check_call(cmd)
+ gitdir = tempdir / ".repo/projects/src/test.git"
+ gitdir.mkdir(parents=True)
+ cmd = ["git", "init", "--bare", str(gitdir)]
+ subprocess.check_call(cmd)
- dotgit = tempdir / 'src/test/.git'
- dotgit.mkdir(parents=True)
- for name in cls._SYMLINKS:
- (dotgit / name).symlink_to(f'../../../.repo/projects/src/test.git/{name}')
- for name in cls._FILES | cls._CLEAN_FILES:
- (dotgit / name).write_text(name)
+ dotgit = tempdir / "src/test/.git"
+ dotgit.mkdir(parents=True)
+ for name in cls._SYMLINKS:
+ (dotgit / name).symlink_to(
+ f"../../../.repo/projects/src/test.git/{name}"
+ )
+ for name in cls._FILES | cls._CLEAN_FILES:
+ (dotgit / name).write_text(name)
- yield tempdir
+ yield tempdir
- def test_standard(self):
- """Migrate a standard checkout that we expect."""
- with self._simple_layout() as tempdir:
- dotgit = tempdir / 'src/test/.git'
- project.Project._MigrateOldWorkTreeGitDir(str(dotgit))
+ def test_standard(self):
+ """Migrate a standard checkout that we expect."""
+ with self._simple_layout() as tempdir:
+ dotgit = tempdir / "src/test/.git"
+ project.Project._MigrateOldWorkTreeGitDir(str(dotgit))
- # Make sure the dir was transformed into a symlink.
- self.assertTrue(dotgit.is_symlink())
- self.assertEqual(os.readlink(dotgit), os.path.normpath('../../.repo/projects/src/test.git'))
+ # Make sure the dir was transformed into a symlink.
+ self.assertTrue(dotgit.is_symlink())
+ self.assertEqual(
+ os.readlink(dotgit),
+ os.path.normpath("../../.repo/projects/src/test.git"),
+ )
- # Make sure files were moved over.
- gitdir = tempdir / '.repo/projects/src/test.git'
- for name in self._FILES:
- self.assertEqual(name, (gitdir / name).read_text())
- # Make sure files were removed.
- for name in self._CLEAN_FILES:
- self.assertFalse((gitdir / name).exists())
+ # Make sure files were moved over.
+ gitdir = tempdir / ".repo/projects/src/test.git"
+ for name in self._FILES:
+ self.assertEqual(name, (gitdir / name).read_text())
+ # Make sure files were removed.
+ for name in self._CLEAN_FILES:
+ self.assertFalse((gitdir / name).exists())
- def test_unknown(self):
- """A checkout with unknown files should abort."""
- with self._simple_layout() as tempdir:
- dotgit = tempdir / 'src/test/.git'
- (tempdir / '.repo/projects/src/test.git/random-file').write_text('one')
- (dotgit / 'random-file').write_text('two')
- with self.assertRaises(error.GitError):
- project.Project._MigrateOldWorkTreeGitDir(str(dotgit))
+ def test_unknown(self):
+ """A checkout with unknown files should abort."""
+ with self._simple_layout() as tempdir:
+ dotgit = tempdir / "src/test/.git"
+ (tempdir / ".repo/projects/src/test.git/random-file").write_text(
+ "one"
+ )
+ (dotgit / "random-file").write_text("two")
+ with self.assertRaises(error.GitError):
+ project.Project._MigrateOldWorkTreeGitDir(str(dotgit))
- # Make sure no content was actually changed.
- self.assertTrue(dotgit.is_dir())
- for name in self._FILES:
- self.assertTrue((dotgit / name).is_file())
- for name in self._CLEAN_FILES:
- self.assertTrue((dotgit / name).is_file())
- for name in self._SYMLINKS:
- self.assertTrue((dotgit / name).is_symlink())
+ # Make sure no content was actually changed.
+ self.assertTrue(dotgit.is_dir())
+ for name in self._FILES:
+ self.assertTrue((dotgit / name).is_file())
+ for name in self._CLEAN_FILES:
+ self.assertTrue((dotgit / name).is_file())
+ for name in self._SYMLINKS:
+ self.assertTrue((dotgit / name).is_symlink())
class ManifestPropertiesFetchedCorrectly(unittest.TestCase):
- """Ensure properties are fetched properly."""
+ """Ensure properties are fetched properly."""
- def setUpManifest(self, tempdir):
- repodir = os.path.join(tempdir, '.repo')
- manifest_dir = os.path.join(repodir, 'manifests')
- manifest_file = os.path.join(
- repodir, manifest_xml.MANIFEST_FILE_NAME)
- local_manifest_dir = os.path.join(
- repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME)
- os.mkdir(repodir)
- os.mkdir(manifest_dir)
- manifest = manifest_xml.XmlManifest(repodir, manifest_file)
+ def setUpManifest(self, tempdir):
+ repodir = os.path.join(tempdir, ".repo")
+ manifest_dir = os.path.join(repodir, "manifests")
+ manifest_file = os.path.join(repodir, manifest_xml.MANIFEST_FILE_NAME)
+ os.mkdir(repodir)
+ os.mkdir(manifest_dir)
+ manifest = manifest_xml.XmlManifest(repodir, manifest_file)
- return project.ManifestProject(
- manifest, 'test/manifest', os.path.join(tempdir, '.git'), tempdir)
+ return project.ManifestProject(
+ manifest, "test/manifest", os.path.join(tempdir, ".git"), tempdir
+ )
- def test_manifest_config_properties(self):
- """Test we are fetching the manifest config properties correctly."""
+ def test_manifest_config_properties(self):
+ """Test we are fetching the manifest config properties correctly."""
- with TempGitTree() as tempdir:
- fakeproj = self.setUpManifest(tempdir)
+ with TempGitTree() as tempdir:
+ fakeproj = self.setUpManifest(tempdir)
- # Set property using the expected Set method, then ensure
- # the porperty functions are using the correct Get methods.
- fakeproj.config.SetString(
- 'manifest.standalone', 'https://chicken/manifest.git')
- self.assertEqual(
- fakeproj.standalone_manifest_url, 'https://chicken/manifest.git')
+ # Set property using the expected Set method, then ensure
+ # the porperty functions are using the correct Get methods.
+ fakeproj.config.SetString(
+ "manifest.standalone", "https://chicken/manifest.git"
+ )
+ self.assertEqual(
+ fakeproj.standalone_manifest_url, "https://chicken/manifest.git"
+ )
- fakeproj.config.SetString('manifest.groups', 'test-group, admin-group')
- self.assertEqual(fakeproj.manifest_groups, 'test-group, admin-group')
+ fakeproj.config.SetString(
+ "manifest.groups", "test-group, admin-group"
+ )
+ self.assertEqual(
+ fakeproj.manifest_groups, "test-group, admin-group"
+ )
- fakeproj.config.SetString('repo.reference', 'mirror/ref')
- self.assertEqual(fakeproj.reference, 'mirror/ref')
+ fakeproj.config.SetString("repo.reference", "mirror/ref")
+ self.assertEqual(fakeproj.reference, "mirror/ref")
- fakeproj.config.SetBoolean('repo.dissociate', False)
- self.assertFalse(fakeproj.dissociate)
+ fakeproj.config.SetBoolean("repo.dissociate", False)
+ self.assertFalse(fakeproj.dissociate)
- fakeproj.config.SetBoolean('repo.archive', False)
- self.assertFalse(fakeproj.archive)
+ fakeproj.config.SetBoolean("repo.archive", False)
+ self.assertFalse(fakeproj.archive)
- fakeproj.config.SetBoolean('repo.mirror', False)
- self.assertFalse(fakeproj.mirror)
+ fakeproj.config.SetBoolean("repo.mirror", False)
+ self.assertFalse(fakeproj.mirror)
- fakeproj.config.SetBoolean('repo.worktree', False)
- self.assertFalse(fakeproj.use_worktree)
+ fakeproj.config.SetBoolean("repo.worktree", False)
+ self.assertFalse(fakeproj.use_worktree)
- fakeproj.config.SetBoolean('repo.clonebundle', False)
- self.assertFalse(fakeproj.clone_bundle)
+ fakeproj.config.SetBoolean("repo.clonebundle", False)
+ self.assertFalse(fakeproj.clone_bundle)
- fakeproj.config.SetBoolean('repo.submodules', False)
- self.assertFalse(fakeproj.submodules)
+ fakeproj.config.SetBoolean("repo.submodules", False)
+ self.assertFalse(fakeproj.submodules)
- fakeproj.config.SetBoolean('repo.git-lfs', False)
- self.assertFalse(fakeproj.git_lfs)
+ fakeproj.config.SetBoolean("repo.git-lfs", False)
+ self.assertFalse(fakeproj.git_lfs)
- fakeproj.config.SetBoolean('repo.superproject', False)
- self.assertFalse(fakeproj.use_superproject)
+ fakeproj.config.SetBoolean("repo.superproject", False)
+ self.assertFalse(fakeproj.use_superproject)
- fakeproj.config.SetBoolean('repo.partialclone', False)
- self.assertFalse(fakeproj.partial_clone)
+ fakeproj.config.SetBoolean("repo.partialclone", False)
+ self.assertFalse(fakeproj.partial_clone)
- fakeproj.config.SetString('repo.depth', '48')
- self.assertEqual(fakeproj.depth, '48')
+ fakeproj.config.SetString("repo.depth", "48")
+ self.assertEqual(fakeproj.depth, "48")
- fakeproj.config.SetString('repo.clonefilter', 'blob:limit=10M')
- self.assertEqual(fakeproj.clone_filter, 'blob:limit=10M')
+ fakeproj.config.SetString("repo.clonefilter", "blob:limit=10M")
+ self.assertEqual(fakeproj.clone_filter, "blob:limit=10M")
- fakeproj.config.SetString('repo.partialcloneexclude', 'third_party/big_repo')
- self.assertEqual(fakeproj.partial_clone_exclude, 'third_party/big_repo')
+ fakeproj.config.SetString(
+ "repo.partialcloneexclude", "third_party/big_repo"
+ )
+ self.assertEqual(
+ fakeproj.partial_clone_exclude, "third_party/big_repo"
+ )
- fakeproj.config.SetString('manifest.platform', 'auto')
- self.assertEqual(fakeproj.manifest_platform, 'auto')
+ fakeproj.config.SetString("manifest.platform", "auto")
+ self.assertEqual(fakeproj.manifest_platform, "auto")
diff --git a/tests/test_repo_trace.py b/tests/test_repo_trace.py
index 5faf293..e4aeb5d 100644
--- a/tests/test_repo_trace.py
+++ b/tests/test_repo_trace.py
@@ -22,35 +22,39 @@
class TraceTests(unittest.TestCase):
- """Check Trace behavior."""
+ """Check Trace behavior."""
- def testTrace_MaxSizeEnforced(self):
- content = 'git chicken'
+ def testTrace_MaxSizeEnforced(self):
+ content = "git chicken"
- with repo_trace.Trace(content, first_trace=True):
- pass
- first_trace_size = os.path.getsize(repo_trace._TRACE_FILE)
+ with repo_trace.Trace(content, first_trace=True):
+ pass
+ first_trace_size = os.path.getsize(repo_trace._TRACE_FILE)
- with repo_trace.Trace(content):
- pass
- self.assertGreater(
- os.path.getsize(repo_trace._TRACE_FILE), first_trace_size)
+ with repo_trace.Trace(content):
+ pass
+ self.assertGreater(
+ os.path.getsize(repo_trace._TRACE_FILE), first_trace_size
+ )
- # Check we clear everything is the last chunk is larger than _MAX_SIZE.
- with mock.patch('repo_trace._MAX_SIZE', 0):
- with repo_trace.Trace(content, first_trace=True):
- pass
- self.assertEqual(first_trace_size,
- os.path.getsize(repo_trace._TRACE_FILE))
+ # Check we clear everything is the last chunk is larger than _MAX_SIZE.
+ with mock.patch("repo_trace._MAX_SIZE", 0):
+ with repo_trace.Trace(content, first_trace=True):
+ pass
+ self.assertEqual(
+ first_trace_size, os.path.getsize(repo_trace._TRACE_FILE)
+ )
- # Check we only clear the chunks we need to.
- repo_trace._MAX_SIZE = (first_trace_size + 1) / (1024 * 1024)
- with repo_trace.Trace(content, first_trace=True):
- pass
- self.assertEqual(first_trace_size * 2,
- os.path.getsize(repo_trace._TRACE_FILE))
+ # Check we only clear the chunks we need to.
+ repo_trace._MAX_SIZE = (first_trace_size + 1) / (1024 * 1024)
+ with repo_trace.Trace(content, first_trace=True):
+ pass
+ self.assertEqual(
+ first_trace_size * 2, os.path.getsize(repo_trace._TRACE_FILE)
+ )
- with repo_trace.Trace(content, first_trace=True):
- pass
- self.assertEqual(first_trace_size * 2,
- os.path.getsize(repo_trace._TRACE_FILE))
+ with repo_trace.Trace(content, first_trace=True):
+ pass
+ self.assertEqual(
+ first_trace_size * 2, os.path.getsize(repo_trace._TRACE_FILE)
+ )
diff --git a/tests/test_ssh.py b/tests/test_ssh.py
index ffb5cb9..a9c1be7 100644
--- a/tests/test_ssh.py
+++ b/tests/test_ssh.py
@@ -23,52 +23,56 @@
class SshTests(unittest.TestCase):
- """Tests the ssh functions."""
+ """Tests the ssh functions."""
- def test_parse_ssh_version(self):
- """Check _parse_ssh_version() handling."""
- ver = ssh._parse_ssh_version('Unknown\n')
- self.assertEqual(ver, ())
- ver = ssh._parse_ssh_version('OpenSSH_1.0\n')
- self.assertEqual(ver, (1, 0))
- ver = ssh._parse_ssh_version('OpenSSH_6.6.1p1 Ubuntu-2ubuntu2.13, OpenSSL 1.0.1f 6 Jan 2014\n')
- self.assertEqual(ver, (6, 6, 1))
- ver = ssh._parse_ssh_version('OpenSSH_7.6p1 Ubuntu-4ubuntu0.3, OpenSSL 1.0.2n 7 Dec 2017\n')
- self.assertEqual(ver, (7, 6))
+ def test_parse_ssh_version(self):
+ """Check _parse_ssh_version() handling."""
+ ver = ssh._parse_ssh_version("Unknown\n")
+ self.assertEqual(ver, ())
+ ver = ssh._parse_ssh_version("OpenSSH_1.0\n")
+ self.assertEqual(ver, (1, 0))
+ ver = ssh._parse_ssh_version(
+ "OpenSSH_6.6.1p1 Ubuntu-2ubuntu2.13, OpenSSL 1.0.1f 6 Jan 2014\n"
+ )
+ self.assertEqual(ver, (6, 6, 1))
+ ver = ssh._parse_ssh_version(
+ "OpenSSH_7.6p1 Ubuntu-4ubuntu0.3, OpenSSL 1.0.2n 7 Dec 2017\n"
+ )
+ self.assertEqual(ver, (7, 6))
- def test_version(self):
- """Check version() handling."""
- with mock.patch('ssh._run_ssh_version', return_value='OpenSSH_1.2\n'):
- self.assertEqual(ssh.version(), (1, 2))
+ def test_version(self):
+ """Check version() handling."""
+ with mock.patch("ssh._run_ssh_version", return_value="OpenSSH_1.2\n"):
+ self.assertEqual(ssh.version(), (1, 2))
- def test_context_manager_empty(self):
- """Verify context manager with no clients works correctly."""
- with multiprocessing.Manager() as manager:
- with ssh.ProxyManager(manager):
- pass
+ def test_context_manager_empty(self):
+ """Verify context manager with no clients works correctly."""
+ with multiprocessing.Manager() as manager:
+ with ssh.ProxyManager(manager):
+ pass
- def test_context_manager_child_cleanup(self):
- """Verify orphaned clients & masters get cleaned up."""
- with multiprocessing.Manager() as manager:
- with ssh.ProxyManager(manager) as ssh_proxy:
- client = subprocess.Popen(['sleep', '964853320'])
- ssh_proxy.add_client(client)
- master = subprocess.Popen(['sleep', '964853321'])
- ssh_proxy.add_master(master)
- # If the process still exists, these will throw timeout errors.
- client.wait(0)
- master.wait(0)
+ def test_context_manager_child_cleanup(self):
+ """Verify orphaned clients & masters get cleaned up."""
+ with multiprocessing.Manager() as manager:
+ with ssh.ProxyManager(manager) as ssh_proxy:
+ client = subprocess.Popen(["sleep", "964853320"])
+ ssh_proxy.add_client(client)
+ master = subprocess.Popen(["sleep", "964853321"])
+ ssh_proxy.add_master(master)
+ # If the process still exists, these will throw timeout errors.
+ client.wait(0)
+ master.wait(0)
- def test_ssh_sock(self):
- """Check sock() function."""
- manager = multiprocessing.Manager()
- proxy = ssh.ProxyManager(manager)
- with mock.patch('tempfile.mkdtemp', return_value='/tmp/foo'):
- # old ssh version uses port
- with mock.patch('ssh.version', return_value=(6, 6)):
- self.assertTrue(proxy.sock().endswith('%p'))
+ def test_ssh_sock(self):
+ """Check sock() function."""
+ manager = multiprocessing.Manager()
+ proxy = ssh.ProxyManager(manager)
+ with mock.patch("tempfile.mkdtemp", return_value="/tmp/foo"):
+ # Old ssh version uses port.
+ with mock.patch("ssh.version", return_value=(6, 6)):
+ self.assertTrue(proxy.sock().endswith("%p"))
- proxy._sock_path = None
- # new ssh version uses hash
- with mock.patch('ssh.version', return_value=(6, 7)):
- self.assertTrue(proxy.sock().endswith('%C'))
+ proxy._sock_path = None
+ # New ssh version uses hash.
+ with mock.patch("ssh.version", return_value=(6, 7)):
+ self.assertTrue(proxy.sock().endswith("%C"))
diff --git a/tests/test_subcmds.py b/tests/test_subcmds.py
index bc53051..73b66e3 100644
--- a/tests/test_subcmds.py
+++ b/tests/test_subcmds.py
@@ -21,53 +21,57 @@
class AllCommands(unittest.TestCase):
- """Check registered all_commands."""
+ """Check registered all_commands."""
- def test_required_basic(self):
- """Basic checking of registered commands."""
- # NB: We don't test all subcommands as we want to avoid "change detection"
- # tests, so we just look for the most common/important ones here that are
- # unlikely to ever change.
- for cmd in {'cherry-pick', 'help', 'init', 'start', 'sync', 'upload'}:
- self.assertIn(cmd, subcmds.all_commands)
+ def test_required_basic(self):
+ """Basic checking of registered commands."""
+ # NB: We don't test all subcommands as we want to avoid "change
+ # detection" tests, so we just look for the most common/important ones
+ # here that are unlikely to ever change.
+ for cmd in {"cherry-pick", "help", "init", "start", "sync", "upload"}:
+ self.assertIn(cmd, subcmds.all_commands)
- def test_naming(self):
- """Verify we don't add things that we shouldn't."""
- for cmd in subcmds.all_commands:
- # Reject filename suffixes like "help.py".
- self.assertNotIn('.', cmd)
+ def test_naming(self):
+ """Verify we don't add things that we shouldn't."""
+ for cmd in subcmds.all_commands:
+ # Reject filename suffixes like "help.py".
+ self.assertNotIn(".", cmd)
- # Make sure all '_' were converted to '-'.
- self.assertNotIn('_', cmd)
+ # Make sure all '_' were converted to '-'.
+ self.assertNotIn("_", cmd)
- # Reject internal python paths like "__init__".
- self.assertFalse(cmd.startswith('__'))
+ # Reject internal python paths like "__init__".
+ self.assertFalse(cmd.startswith("__"))
- def test_help_desc_style(self):
- """Force some consistency in option descriptions.
+ def test_help_desc_style(self):
+ """Force some consistency in option descriptions.
- Python's optparse & argparse has a few default options like --help. Their
- option description text uses lowercase sentence fragments, so enforce our
- options follow the same style so UI is consistent.
+ Python's optparse & argparse has a few default options like --help.
+ Their option description text uses lowercase sentence fragments, so
+ enforce our options follow the same style so UI is consistent.
- We enforce:
- * Text starts with lowercase.
- * Text doesn't end with period.
- """
- for name, cls in subcmds.all_commands.items():
- cmd = cls()
- parser = cmd.OptionParser
- for option in parser.option_list:
- if option.help == optparse.SUPPRESS_HELP:
- continue
+ We enforce:
+ * Text starts with lowercase.
+ * Text doesn't end with period.
+ """
+ for name, cls in subcmds.all_commands.items():
+ cmd = cls()
+ parser = cmd.OptionParser
+ for option in parser.option_list:
+ if option.help == optparse.SUPPRESS_HELP:
+ continue
- c = option.help[0]
- self.assertEqual(
- c.lower(), c,
- msg=f'subcmds/{name}.py: {option.get_opt_string()}: help text '
- f'should start with lowercase: "{option.help}"')
+ c = option.help[0]
+ self.assertEqual(
+ c.lower(),
+ c,
+ msg=f"subcmds/{name}.py: {option.get_opt_string()}: "
+ f'help text should start with lowercase: "{option.help}"',
+ )
- self.assertNotEqual(
- option.help[-1], '.',
- msg=f'subcmds/{name}.py: {option.get_opt_string()}: help text '
- f'should not end in a period: "{option.help}"')
+ self.assertNotEqual(
+ option.help[-1],
+ ".",
+ msg=f"subcmds/{name}.py: {option.get_opt_string()}: "
+ f'help text should not end in a period: "{option.help}"',
+ )
diff --git a/tests/test_subcmds_init.py b/tests/test_subcmds_init.py
index af4346d..25e5be5 100644
--- a/tests/test_subcmds_init.py
+++ b/tests/test_subcmds_init.py
@@ -20,30 +20,27 @@
class InitCommand(unittest.TestCase):
- """Check registered all_commands."""
+ """Check registered all_commands."""
- def setUp(self):
- self.cmd = init.Init()
+ def setUp(self):
+ self.cmd = init.Init()
- def test_cli_parser_good(self):
- """Check valid command line options."""
- ARGV = (
- [],
- )
- for argv in ARGV:
- opts, args = self.cmd.OptionParser.parse_args(argv)
- self.cmd.ValidateOptions(opts, args)
+ def test_cli_parser_good(self):
+ """Check valid command line options."""
+ ARGV = ([],)
+ for argv in ARGV:
+ opts, args = self.cmd.OptionParser.parse_args(argv)
+ self.cmd.ValidateOptions(opts, args)
- def test_cli_parser_bad(self):
- """Check invalid command line options."""
- ARGV = (
- # Too many arguments.
- ['url', 'asdf'],
-
- # Conflicting options.
- ['--mirror', '--archive'],
- )
- for argv in ARGV:
- opts, args = self.cmd.OptionParser.parse_args(argv)
- with self.assertRaises(SystemExit):
- self.cmd.ValidateOptions(opts, args)
+ def test_cli_parser_bad(self):
+ """Check invalid command line options."""
+ ARGV = (
+ # Too many arguments.
+ ["url", "asdf"],
+ # Conflicting options.
+ ["--mirror", "--archive"],
+ )
+ for argv in ARGV:
+ opts, args = self.cmd.OptionParser.parse_args(argv)
+ with self.assertRaises(SystemExit):
+ self.cmd.ValidateOptions(opts, args)
diff --git a/tests/test_subcmds_sync.py b/tests/test_subcmds_sync.py
index 236d54e..5c8e606 100644
--- a/tests/test_subcmds_sync.py
+++ b/tests/test_subcmds_sync.py
@@ -23,111 +23,138 @@
from subcmds import sync
-@pytest.mark.parametrize('use_superproject, cli_args, result', [
- (True, ['--current-branch'], True),
- (True, ['--no-current-branch'], True),
- (True, [], True),
- (False, ['--current-branch'], True),
- (False, ['--no-current-branch'], False),
- (False, [], None),
-])
+@pytest.mark.parametrize(
+ "use_superproject, cli_args, result",
+ [
+ (True, ["--current-branch"], True),
+ (True, ["--no-current-branch"], True),
+ (True, [], True),
+ (False, ["--current-branch"], True),
+ (False, ["--no-current-branch"], False),
+ (False, [], None),
+ ],
+)
def test_get_current_branch_only(use_superproject, cli_args, result):
- """Test Sync._GetCurrentBranchOnly logic.
+ """Test Sync._GetCurrentBranchOnly logic.
- Sync._GetCurrentBranchOnly should return True if a superproject is requested,
- and otherwise the value of the current_branch_only option.
- """
- cmd = sync.Sync()
- opts, _ = cmd.OptionParser.parse_args(cli_args)
+ Sync._GetCurrentBranchOnly should return True if a superproject is
+ requested, and otherwise the value of the current_branch_only option.
+ """
+ cmd = sync.Sync()
+ opts, _ = cmd.OptionParser.parse_args(cli_args)
- with mock.patch('git_superproject.UseSuperproject',
- return_value=use_superproject):
- assert cmd._GetCurrentBranchOnly(opts, cmd.manifest) == result
+ with mock.patch(
+ "git_superproject.UseSuperproject", return_value=use_superproject
+ ):
+ assert cmd._GetCurrentBranchOnly(opts, cmd.manifest) == result
# Used to patch os.cpu_count() for reliable results.
OS_CPU_COUNT = 24
-@pytest.mark.parametrize('argv, jobs_manifest, jobs, jobs_net, jobs_check', [
- # No user or manifest settings.
- ([], None, OS_CPU_COUNT, 1, command.DEFAULT_LOCAL_JOBS),
- # No user settings, so manifest settings control.
- ([], 3, 3, 3, 3),
- # User settings, but no manifest.
- (['--jobs=4'], None, 4, 4, 4),
- (['--jobs=4', '--jobs-network=5'], None, 4, 5, 4),
- (['--jobs=4', '--jobs-checkout=6'], None, 4, 4, 6),
- (['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], None, 4, 5, 6),
- (['--jobs-network=5'], None, OS_CPU_COUNT, 5, command.DEFAULT_LOCAL_JOBS),
- (['--jobs-checkout=6'], None, OS_CPU_COUNT, 1, 6),
- (['--jobs-network=5', '--jobs-checkout=6'], None, OS_CPU_COUNT, 5, 6),
- # User settings with manifest settings.
- (['--jobs=4'], 3, 4, 4, 4),
- (['--jobs=4', '--jobs-network=5'], 3, 4, 5, 4),
- (['--jobs=4', '--jobs-checkout=6'], 3, 4, 4, 6),
- (['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], 3, 4, 5, 6),
- (['--jobs-network=5'], 3, 3, 5, 3),
- (['--jobs-checkout=6'], 3, 3, 3, 6),
- (['--jobs-network=5', '--jobs-checkout=6'], 3, 3, 5, 6),
- # Settings that exceed rlimits get capped.
- (['--jobs=1000000'], None, 83, 83, 83),
- ([], 1000000, 83, 83, 83),
-])
+
+@pytest.mark.parametrize(
+ "argv, jobs_manifest, jobs, jobs_net, jobs_check",
+ [
+ # No user or manifest settings.
+ ([], None, OS_CPU_COUNT, 1, command.DEFAULT_LOCAL_JOBS),
+ # No user settings, so manifest settings control.
+ ([], 3, 3, 3, 3),
+ # User settings, but no manifest.
+ (["--jobs=4"], None, 4, 4, 4),
+ (["--jobs=4", "--jobs-network=5"], None, 4, 5, 4),
+ (["--jobs=4", "--jobs-checkout=6"], None, 4, 4, 6),
+ (["--jobs=4", "--jobs-network=5", "--jobs-checkout=6"], None, 4, 5, 6),
+ (
+ ["--jobs-network=5"],
+ None,
+ OS_CPU_COUNT,
+ 5,
+ command.DEFAULT_LOCAL_JOBS,
+ ),
+ (["--jobs-checkout=6"], None, OS_CPU_COUNT, 1, 6),
+ (["--jobs-network=5", "--jobs-checkout=6"], None, OS_CPU_COUNT, 5, 6),
+ # User settings with manifest settings.
+ (["--jobs=4"], 3, 4, 4, 4),
+ (["--jobs=4", "--jobs-network=5"], 3, 4, 5, 4),
+ (["--jobs=4", "--jobs-checkout=6"], 3, 4, 4, 6),
+ (["--jobs=4", "--jobs-network=5", "--jobs-checkout=6"], 3, 4, 5, 6),
+ (["--jobs-network=5"], 3, 3, 5, 3),
+ (["--jobs-checkout=6"], 3, 3, 3, 6),
+ (["--jobs-network=5", "--jobs-checkout=6"], 3, 3, 5, 6),
+ # Settings that exceed rlimits get capped.
+ (["--jobs=1000000"], None, 83, 83, 83),
+ ([], 1000000, 83, 83, 83),
+ ],
+)
def test_cli_jobs(argv, jobs_manifest, jobs, jobs_net, jobs_check):
- """Tests --jobs option behavior."""
- mp = mock.MagicMock()
- mp.manifest.default.sync_j = jobs_manifest
+ """Tests --jobs option behavior."""
+ mp = mock.MagicMock()
+ mp.manifest.default.sync_j = jobs_manifest
- cmd = sync.Sync()
- opts, args = cmd.OptionParser.parse_args(argv)
- cmd.ValidateOptions(opts, args)
+ cmd = sync.Sync()
+ opts, args = cmd.OptionParser.parse_args(argv)
+ cmd.ValidateOptions(opts, args)
- with mock.patch.object(sync, '_rlimit_nofile', return_value=(256, 256)):
- with mock.patch.object(os, 'cpu_count', return_value=OS_CPU_COUNT):
- cmd._ValidateOptionsWithManifest(opts, mp)
- assert opts.jobs == jobs
- assert opts.jobs_network == jobs_net
- assert opts.jobs_checkout == jobs_check
+ with mock.patch.object(sync, "_rlimit_nofile", return_value=(256, 256)):
+ with mock.patch.object(os, "cpu_count", return_value=OS_CPU_COUNT):
+ cmd._ValidateOptionsWithManifest(opts, mp)
+ assert opts.jobs == jobs
+ assert opts.jobs_network == jobs_net
+ assert opts.jobs_checkout == jobs_check
class GetPreciousObjectsState(unittest.TestCase):
- """Tests for _GetPreciousObjectsState."""
+ """Tests for _GetPreciousObjectsState."""
- def setUp(self):
- """Common setup."""
- self.cmd = sync.Sync()
- self.project = p = mock.MagicMock(use_git_worktrees=False,
- UseAlternates=False)
- p.manifest.GetProjectsWithName.return_value = [p]
+ def setUp(self):
+ """Common setup."""
+ self.cmd = sync.Sync()
+ self.project = p = mock.MagicMock(
+ use_git_worktrees=False, UseAlternates=False
+ )
+ p.manifest.GetProjectsWithName.return_value = [p]
- self.opt = mock.Mock(spec_set=['this_manifest_only'])
- self.opt.this_manifest_only = False
+ self.opt = mock.Mock(spec_set=["this_manifest_only"])
+ self.opt.this_manifest_only = False
- def test_worktrees(self):
- """False for worktrees."""
- self.project.use_git_worktrees = True
- self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
+ def test_worktrees(self):
+ """False for worktrees."""
+ self.project.use_git_worktrees = True
+ self.assertFalse(
+ self.cmd._GetPreciousObjectsState(self.project, self.opt)
+ )
- def test_not_shared(self):
- """Singleton project."""
- self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
+ def test_not_shared(self):
+ """Singleton project."""
+ self.assertFalse(
+ self.cmd._GetPreciousObjectsState(self.project, self.opt)
+ )
- def test_shared(self):
- """Shared project."""
- self.project.manifest.GetProjectsWithName.return_value = [
- self.project, self.project
- ]
- self.assertTrue(self.cmd._GetPreciousObjectsState(self.project, self.opt))
+ def test_shared(self):
+ """Shared project."""
+ self.project.manifest.GetProjectsWithName.return_value = [
+ self.project,
+ self.project,
+ ]
+ self.assertTrue(
+ self.cmd._GetPreciousObjectsState(self.project, self.opt)
+ )
- def test_shared_with_alternates(self):
- """Shared project, with alternates."""
- self.project.manifest.GetProjectsWithName.return_value = [
- self.project, self.project
- ]
- self.project.UseAlternates = True
- self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
+ def test_shared_with_alternates(self):
+ """Shared project, with alternates."""
+ self.project.manifest.GetProjectsWithName.return_value = [
+ self.project,
+ self.project,
+ ]
+ self.project.UseAlternates = True
+ self.assertFalse(
+ self.cmd._GetPreciousObjectsState(self.project, self.opt)
+ )
- def test_not_found(self):
- """Project not found in manifest."""
- self.project.manifest.GetProjectsWithName.return_value = []
- self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
+ def test_not_found(self):
+ """Project not found in manifest."""
+ self.project.manifest.GetProjectsWithName.return_value = []
+ self.assertFalse(
+ self.cmd._GetPreciousObjectsState(self.project, self.opt)
+ )
diff --git a/tests/test_update_manpages.py b/tests/test_update_manpages.py
index 0de85be..12b19ec 100644
--- a/tests/test_update_manpages.py
+++ b/tests/test_update_manpages.py
@@ -20,9 +20,9 @@
class UpdateManpagesTest(unittest.TestCase):
- """Tests the update-manpages code."""
+ """Tests the update-manpages code."""
- def test_replace_regex(self):
- """Check that replace_regex works."""
- data = '\n\033[1mSummary\033[m\n'
- self.assertEqual(update_manpages.replace_regex(data),'\nSummary\n')
+ def test_replace_regex(self):
+ """Check that replace_regex works."""
+ data = "\n\033[1mSummary\033[m\n"
+ self.assertEqual(update_manpages.replace_regex(data), "\nSummary\n")
diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py
index ef879a5..21fa094 100644
--- a/tests/test_wrapper.py
+++ b/tests/test_wrapper.py
@@ -28,528 +28,615 @@
def fixture(*paths):
- """Return a path relative to tests/fixtures.
- """
- return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
+ """Return a path relative to tests/fixtures."""
+ return os.path.join(os.path.dirname(__file__), "fixtures", *paths)
class RepoWrapperTestCase(unittest.TestCase):
- """TestCase for the wrapper module."""
+ """TestCase for the wrapper module."""
- def setUp(self):
- """Load the wrapper module every time."""
- wrapper.Wrapper.cache_clear()
- self.wrapper = wrapper.Wrapper()
+ def setUp(self):
+ """Load the wrapper module every time."""
+ wrapper.Wrapper.cache_clear()
+ self.wrapper = wrapper.Wrapper()
class RepoWrapperUnitTest(RepoWrapperTestCase):
- """Tests helper functions in the repo wrapper
- """
+ """Tests helper functions in the repo wrapper"""
- def test_version(self):
- """Make sure _Version works."""
- with self.assertRaises(SystemExit) as e:
- with mock.patch('sys.stdout', new_callable=StringIO) as stdout:
- with mock.patch('sys.stderr', new_callable=StringIO) as stderr:
- self.wrapper._Version()
- self.assertEqual(0, e.exception.code)
- self.assertEqual('', stderr.getvalue())
- self.assertIn('repo launcher version', stdout.getvalue())
+ def test_version(self):
+ """Make sure _Version works."""
+ with self.assertRaises(SystemExit) as e:
+ with mock.patch("sys.stdout", new_callable=StringIO) as stdout:
+ with mock.patch("sys.stderr", new_callable=StringIO) as stderr:
+ self.wrapper._Version()
+ self.assertEqual(0, e.exception.code)
+ self.assertEqual("", stderr.getvalue())
+ self.assertIn("repo launcher version", stdout.getvalue())
- def test_python_constraints(self):
- """The launcher should never require newer than main.py."""
- self.assertGreaterEqual(main.MIN_PYTHON_VERSION_HARD,
- self.wrapper.MIN_PYTHON_VERSION_HARD)
- self.assertGreaterEqual(main.MIN_PYTHON_VERSION_SOFT,
- self.wrapper.MIN_PYTHON_VERSION_SOFT)
- # Make sure the versions are themselves in sync.
- self.assertGreaterEqual(self.wrapper.MIN_PYTHON_VERSION_SOFT,
- self.wrapper.MIN_PYTHON_VERSION_HARD)
+ def test_python_constraints(self):
+ """The launcher should never require newer than main.py."""
+ self.assertGreaterEqual(
+ main.MIN_PYTHON_VERSION_HARD, self.wrapper.MIN_PYTHON_VERSION_HARD
+ )
+ self.assertGreaterEqual(
+ main.MIN_PYTHON_VERSION_SOFT, self.wrapper.MIN_PYTHON_VERSION_SOFT
+ )
+ # Make sure the versions are themselves in sync.
+ self.assertGreaterEqual(
+ self.wrapper.MIN_PYTHON_VERSION_SOFT,
+ self.wrapper.MIN_PYTHON_VERSION_HARD,
+ )
- def test_init_parser(self):
- """Make sure 'init' GetParser works."""
- parser = self.wrapper.GetParser(gitc_init=False)
- opts, args = parser.parse_args([])
- self.assertEqual([], args)
- self.assertIsNone(opts.manifest_url)
+ def test_init_parser(self):
+ """Make sure 'init' GetParser works."""
+ parser = self.wrapper.GetParser(gitc_init=False)
+ opts, args = parser.parse_args([])
+ self.assertEqual([], args)
+ self.assertIsNone(opts.manifest_url)
- def test_gitc_init_parser(self):
- """Make sure 'gitc-init' GetParser works."""
- parser = self.wrapper.GetParser(gitc_init=True)
- opts, args = parser.parse_args([])
- self.assertEqual([], args)
- self.assertIsNone(opts.manifest_file)
+ def test_gitc_init_parser(self):
+ """Make sure 'gitc-init' GetParser works."""
+ parser = self.wrapper.GetParser(gitc_init=True)
+ opts, args = parser.parse_args([])
+ self.assertEqual([], args)
+ self.assertIsNone(opts.manifest_file)
- def test_get_gitc_manifest_dir_no_gitc(self):
- """
- Test reading a missing gitc config file
- """
- self.wrapper.GITC_CONFIG_FILE = fixture('missing_gitc_config')
- val = self.wrapper.get_gitc_manifest_dir()
- self.assertEqual(val, '')
+ def test_get_gitc_manifest_dir_no_gitc(self):
+ """
+ Test reading a missing gitc config file
+ """
+ self.wrapper.GITC_CONFIG_FILE = fixture("missing_gitc_config")
+ val = self.wrapper.get_gitc_manifest_dir()
+ self.assertEqual(val, "")
- def test_get_gitc_manifest_dir(self):
- """
- Test reading the gitc config file and parsing the directory
- """
- self.wrapper.GITC_CONFIG_FILE = fixture('gitc_config')
- val = self.wrapper.get_gitc_manifest_dir()
- self.assertEqual(val, '/test/usr/local/google/gitc')
+ def test_get_gitc_manifest_dir(self):
+ """
+ Test reading the gitc config file and parsing the directory
+ """
+ self.wrapper.GITC_CONFIG_FILE = fixture("gitc_config")
+ val = self.wrapper.get_gitc_manifest_dir()
+ self.assertEqual(val, "/test/usr/local/google/gitc")
- def test_gitc_parse_clientdir_no_gitc(self):
- """
- Test parsing the gitc clientdir without gitc running
- """
- self.wrapper.GITC_CONFIG_FILE = fixture('missing_gitc_config')
- self.assertEqual(self.wrapper.gitc_parse_clientdir('/something'), None)
- self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test'), 'test')
+ def test_gitc_parse_clientdir_no_gitc(self):
+ """
+ Test parsing the gitc clientdir without gitc running
+ """
+ self.wrapper.GITC_CONFIG_FILE = fixture("missing_gitc_config")
+ self.assertEqual(self.wrapper.gitc_parse_clientdir("/something"), None)
+ self.assertEqual(
+ self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test"), "test"
+ )
- def test_gitc_parse_clientdir(self):
- """
- Test parsing the gitc clientdir
- """
- self.wrapper.GITC_CONFIG_FILE = fixture('gitc_config')
- self.assertEqual(self.wrapper.gitc_parse_clientdir('/something'), None)
- self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test'), 'test')
- self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test/'), 'test')
- self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test/extra'), 'test')
- self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test'), 'test')
- self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test/'), 'test')
- self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test/extra'),
- 'test')
- self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/'), None)
- self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/'), None)
+ def test_gitc_parse_clientdir(self):
+ """
+ Test parsing the gitc clientdir
+ """
+ self.wrapper.GITC_CONFIG_FILE = fixture("gitc_config")
+ self.assertEqual(self.wrapper.gitc_parse_clientdir("/something"), None)
+ self.assertEqual(
+ self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test"), "test"
+ )
+ self.assertEqual(
+ self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test/"), "test"
+ )
+ self.assertEqual(
+ self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test/extra"),
+ "test",
+ )
+ self.assertEqual(
+ self.wrapper.gitc_parse_clientdir(
+ "/test/usr/local/google/gitc/test"
+ ),
+ "test",
+ )
+ self.assertEqual(
+ self.wrapper.gitc_parse_clientdir(
+ "/test/usr/local/google/gitc/test/"
+ ),
+ "test",
+ )
+ self.assertEqual(
+ self.wrapper.gitc_parse_clientdir(
+ "/test/usr/local/google/gitc/test/extra"
+ ),
+ "test",
+ )
+ self.assertEqual(
+ self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/"), None
+ )
+ self.assertEqual(
+ self.wrapper.gitc_parse_clientdir("/test/usr/local/google/gitc/"),
+ None,
+ )
class SetGitTrace2ParentSid(RepoWrapperTestCase):
- """Check SetGitTrace2ParentSid behavior."""
+ """Check SetGitTrace2ParentSid behavior."""
- KEY = 'GIT_TRACE2_PARENT_SID'
- VALID_FORMAT = re.compile(r'^repo-[0-9]{8}T[0-9]{6}Z-P[0-9a-f]{8}$')
+ KEY = "GIT_TRACE2_PARENT_SID"
+ VALID_FORMAT = re.compile(r"^repo-[0-9]{8}T[0-9]{6}Z-P[0-9a-f]{8}$")
- def test_first_set(self):
- """Test env var not yet set."""
- env = {}
- self.wrapper.SetGitTrace2ParentSid(env)
- self.assertIn(self.KEY, env)
- value = env[self.KEY]
- self.assertRegex(value, self.VALID_FORMAT)
+ def test_first_set(self):
+ """Test env var not yet set."""
+ env = {}
+ self.wrapper.SetGitTrace2ParentSid(env)
+ self.assertIn(self.KEY, env)
+ value = env[self.KEY]
+ self.assertRegex(value, self.VALID_FORMAT)
- def test_append(self):
- """Test env var is appended."""
- env = {self.KEY: 'pfx'}
- self.wrapper.SetGitTrace2ParentSid(env)
- self.assertIn(self.KEY, env)
- value = env[self.KEY]
- self.assertTrue(value.startswith('pfx/'))
- self.assertRegex(value[4:], self.VALID_FORMAT)
+ def test_append(self):
+ """Test env var is appended."""
+ env = {self.KEY: "pfx"}
+ self.wrapper.SetGitTrace2ParentSid(env)
+ self.assertIn(self.KEY, env)
+ value = env[self.KEY]
+ self.assertTrue(value.startswith("pfx/"))
+ self.assertRegex(value[4:], self.VALID_FORMAT)
- def test_global_context(self):
- """Check os.environ gets updated by default."""
- os.environ.pop(self.KEY, None)
- self.wrapper.SetGitTrace2ParentSid()
- self.assertIn(self.KEY, os.environ)
- value = os.environ[self.KEY]
- self.assertRegex(value, self.VALID_FORMAT)
+ def test_global_context(self):
+ """Check os.environ gets updated by default."""
+ os.environ.pop(self.KEY, None)
+ self.wrapper.SetGitTrace2ParentSid()
+ self.assertIn(self.KEY, os.environ)
+ value = os.environ[self.KEY]
+ self.assertRegex(value, self.VALID_FORMAT)
class RunCommand(RepoWrapperTestCase):
- """Check run_command behavior."""
+ """Check run_command behavior."""
- def test_capture(self):
- """Check capture_output handling."""
- ret = self.wrapper.run_command(['echo', 'hi'], capture_output=True)
- # echo command appends OS specific linesep, but on Windows + Git Bash
- # we get UNIX ending, so we allow both.
- self.assertIn(ret.stdout, ['hi' + os.linesep, 'hi\n'])
+ def test_capture(self):
+ """Check capture_output handling."""
+ ret = self.wrapper.run_command(["echo", "hi"], capture_output=True)
+ # echo command appends OS specific linesep, but on Windows + Git Bash
+ # we get UNIX ending, so we allow both.
+ self.assertIn(ret.stdout, ["hi" + os.linesep, "hi\n"])
- def test_check(self):
- """Check check handling."""
- self.wrapper.run_command(['true'], check=False)
- self.wrapper.run_command(['true'], check=True)
- self.wrapper.run_command(['false'], check=False)
- with self.assertRaises(self.wrapper.RunError):
- self.wrapper.run_command(['false'], check=True)
+ def test_check(self):
+ """Check check handling."""
+ self.wrapper.run_command(["true"], check=False)
+ self.wrapper.run_command(["true"], check=True)
+ self.wrapper.run_command(["false"], check=False)
+ with self.assertRaises(self.wrapper.RunError):
+ self.wrapper.run_command(["false"], check=True)
class RunGit(RepoWrapperTestCase):
- """Check run_git behavior."""
+ """Check run_git behavior."""
- def test_capture(self):
- """Check capture_output handling."""
- ret = self.wrapper.run_git('--version')
- self.assertIn('git', ret.stdout)
+ def test_capture(self):
+ """Check capture_output handling."""
+ ret = self.wrapper.run_git("--version")
+ self.assertIn("git", ret.stdout)
- def test_check(self):
- """Check check handling."""
- with self.assertRaises(self.wrapper.CloneFailure):
- self.wrapper.run_git('--version-asdfasdf')
- self.wrapper.run_git('--version-asdfasdf', check=False)
+ def test_check(self):
+ """Check check handling."""
+ with self.assertRaises(self.wrapper.CloneFailure):
+ self.wrapper.run_git("--version-asdfasdf")
+ self.wrapper.run_git("--version-asdfasdf", check=False)
class ParseGitVersion(RepoWrapperTestCase):
- """Check ParseGitVersion behavior."""
+ """Check ParseGitVersion behavior."""
- def test_autoload(self):
- """Check we can load the version from the live git."""
- ret = self.wrapper.ParseGitVersion()
- self.assertIsNotNone(ret)
+ def test_autoload(self):
+ """Check we can load the version from the live git."""
+ ret = self.wrapper.ParseGitVersion()
+ self.assertIsNotNone(ret)
- def test_bad_ver(self):
- """Check handling of bad git versions."""
- ret = self.wrapper.ParseGitVersion(ver_str='asdf')
- self.assertIsNone(ret)
+ def test_bad_ver(self):
+ """Check handling of bad git versions."""
+ ret = self.wrapper.ParseGitVersion(ver_str="asdf")
+ self.assertIsNone(ret)
- def test_normal_ver(self):
- """Check handling of normal git versions."""
- ret = self.wrapper.ParseGitVersion(ver_str='git version 2.25.1')
- self.assertEqual(2, ret.major)
- self.assertEqual(25, ret.minor)
- self.assertEqual(1, ret.micro)
- self.assertEqual('2.25.1', ret.full)
+ def test_normal_ver(self):
+ """Check handling of normal git versions."""
+ ret = self.wrapper.ParseGitVersion(ver_str="git version 2.25.1")
+ self.assertEqual(2, ret.major)
+ self.assertEqual(25, ret.minor)
+ self.assertEqual(1, ret.micro)
+ self.assertEqual("2.25.1", ret.full)
- def test_extended_ver(self):
- """Check handling of extended distro git versions."""
- ret = self.wrapper.ParseGitVersion(
- ver_str='git version 1.30.50.696.g5e7596f4ac-goog')
- self.assertEqual(1, ret.major)
- self.assertEqual(30, ret.minor)
- self.assertEqual(50, ret.micro)
- self.assertEqual('1.30.50.696.g5e7596f4ac-goog', ret.full)
+ def test_extended_ver(self):
+ """Check handling of extended distro git versions."""
+ ret = self.wrapper.ParseGitVersion(
+ ver_str="git version 1.30.50.696.g5e7596f4ac-goog"
+ )
+ self.assertEqual(1, ret.major)
+ self.assertEqual(30, ret.minor)
+ self.assertEqual(50, ret.micro)
+ self.assertEqual("1.30.50.696.g5e7596f4ac-goog", ret.full)
class CheckGitVersion(RepoWrapperTestCase):
- """Check _CheckGitVersion behavior."""
+ """Check _CheckGitVersion behavior."""
- def test_unknown(self):
- """Unknown versions should abort."""
- with mock.patch.object(self.wrapper, 'ParseGitVersion', return_value=None):
- with self.assertRaises(self.wrapper.CloneFailure):
- self.wrapper._CheckGitVersion()
+ def test_unknown(self):
+ """Unknown versions should abort."""
+ with mock.patch.object(
+ self.wrapper, "ParseGitVersion", return_value=None
+ ):
+ with self.assertRaises(self.wrapper.CloneFailure):
+ self.wrapper._CheckGitVersion()
- def test_old(self):
- """Old versions should abort."""
- with mock.patch.object(
- self.wrapper, 'ParseGitVersion',
- return_value=self.wrapper.GitVersion(1, 0, 0, '1.0.0')):
- with self.assertRaises(self.wrapper.CloneFailure):
- self.wrapper._CheckGitVersion()
+ def test_old(self):
+ """Old versions should abort."""
+ with mock.patch.object(
+ self.wrapper,
+ "ParseGitVersion",
+ return_value=self.wrapper.GitVersion(1, 0, 0, "1.0.0"),
+ ):
+ with self.assertRaises(self.wrapper.CloneFailure):
+ self.wrapper._CheckGitVersion()
- def test_new(self):
- """Newer versions should run fine."""
- with mock.patch.object(
- self.wrapper, 'ParseGitVersion',
- return_value=self.wrapper.GitVersion(100, 0, 0, '100.0.0')):
- self.wrapper._CheckGitVersion()
+ def test_new(self):
+ """Newer versions should run fine."""
+ with mock.patch.object(
+ self.wrapper,
+ "ParseGitVersion",
+ return_value=self.wrapper.GitVersion(100, 0, 0, "100.0.0"),
+ ):
+ self.wrapper._CheckGitVersion()
class Requirements(RepoWrapperTestCase):
- """Check Requirements handling."""
+ """Check Requirements handling."""
- def test_missing_file(self):
- """Don't crash if the file is missing (old version)."""
- testdir = os.path.dirname(os.path.realpath(__file__))
- self.assertIsNone(self.wrapper.Requirements.from_dir(testdir))
- self.assertIsNone(self.wrapper.Requirements.from_file(
- os.path.join(testdir, 'xxxxxxxxxxxxxxxxxxxxxxxx')))
+ def test_missing_file(self):
+ """Don't crash if the file is missing (old version)."""
+ testdir = os.path.dirname(os.path.realpath(__file__))
+ self.assertIsNone(self.wrapper.Requirements.from_dir(testdir))
+ self.assertIsNone(
+ self.wrapper.Requirements.from_file(
+ os.path.join(testdir, "xxxxxxxxxxxxxxxxxxxxxxxx")
+ )
+ )
- def test_corrupt_data(self):
- """If the file can't be parsed, don't blow up."""
- self.assertIsNone(self.wrapper.Requirements.from_file(__file__))
- self.assertIsNone(self.wrapper.Requirements.from_data(b'x'))
+ def test_corrupt_data(self):
+ """If the file can't be parsed, don't blow up."""
+ self.assertIsNone(self.wrapper.Requirements.from_file(__file__))
+ self.assertIsNone(self.wrapper.Requirements.from_data(b"x"))
- def test_valid_data(self):
- """Make sure we can parse the file we ship."""
- self.assertIsNotNone(self.wrapper.Requirements.from_data(b'{}'))
- rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
- self.assertIsNotNone(self.wrapper.Requirements.from_dir(rootdir))
- self.assertIsNotNone(self.wrapper.Requirements.from_file(os.path.join(
- rootdir, 'requirements.json')))
+ def test_valid_data(self):
+ """Make sure we can parse the file we ship."""
+ self.assertIsNotNone(self.wrapper.Requirements.from_data(b"{}"))
+ rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+ self.assertIsNotNone(self.wrapper.Requirements.from_dir(rootdir))
+ self.assertIsNotNone(
+ self.wrapper.Requirements.from_file(
+ os.path.join(rootdir, "requirements.json")
+ )
+ )
- def test_format_ver(self):
- """Check format_ver can format."""
- self.assertEqual('1.2.3', self.wrapper.Requirements._format_ver((1, 2, 3)))
- self.assertEqual('1', self.wrapper.Requirements._format_ver([1]))
+ def test_format_ver(self):
+ """Check format_ver can format."""
+ self.assertEqual(
+ "1.2.3", self.wrapper.Requirements._format_ver((1, 2, 3))
+ )
+ self.assertEqual("1", self.wrapper.Requirements._format_ver([1]))
- def test_assert_all_unknown(self):
- """Check assert_all works with incompatible file."""
- reqs = self.wrapper.Requirements({})
- reqs.assert_all()
+ def test_assert_all_unknown(self):
+ """Check assert_all works with incompatible file."""
+ reqs = self.wrapper.Requirements({})
+ reqs.assert_all()
- def test_assert_all_new_repo(self):
- """Check assert_all accepts new enough repo."""
- reqs = self.wrapper.Requirements({'repo': {'hard': [1, 0]}})
- reqs.assert_all()
+ def test_assert_all_new_repo(self):
+ """Check assert_all accepts new enough repo."""
+ reqs = self.wrapper.Requirements({"repo": {"hard": [1, 0]}})
+ reqs.assert_all()
- def test_assert_all_old_repo(self):
- """Check assert_all rejects old repo."""
- reqs = self.wrapper.Requirements({'repo': {'hard': [99999, 0]}})
- with self.assertRaises(SystemExit):
- reqs.assert_all()
+ def test_assert_all_old_repo(self):
+ """Check assert_all rejects old repo."""
+ reqs = self.wrapper.Requirements({"repo": {"hard": [99999, 0]}})
+ with self.assertRaises(SystemExit):
+ reqs.assert_all()
- def test_assert_all_new_python(self):
- """Check assert_all accepts new enough python."""
- reqs = self.wrapper.Requirements({'python': {'hard': sys.version_info}})
- reqs.assert_all()
+ def test_assert_all_new_python(self):
+ """Check assert_all accepts new enough python."""
+ reqs = self.wrapper.Requirements({"python": {"hard": sys.version_info}})
+ reqs.assert_all()
- def test_assert_all_old_python(self):
- """Check assert_all rejects old python."""
- reqs = self.wrapper.Requirements({'python': {'hard': [99999, 0]}})
- with self.assertRaises(SystemExit):
- reqs.assert_all()
+ def test_assert_all_old_python(self):
+ """Check assert_all rejects old python."""
+ reqs = self.wrapper.Requirements({"python": {"hard": [99999, 0]}})
+ with self.assertRaises(SystemExit):
+ reqs.assert_all()
- def test_assert_ver_unknown(self):
- """Check assert_ver works with incompatible file."""
- reqs = self.wrapper.Requirements({})
- reqs.assert_ver('xxx', (1, 0))
+ def test_assert_ver_unknown(self):
+ """Check assert_ver works with incompatible file."""
+ reqs = self.wrapper.Requirements({})
+ reqs.assert_ver("xxx", (1, 0))
- def test_assert_ver_new(self):
- """Check assert_ver allows new enough versions."""
- reqs = self.wrapper.Requirements({'git': {'hard': [1, 0], 'soft': [2, 0]}})
- reqs.assert_ver('git', (1, 0))
- reqs.assert_ver('git', (1, 5))
- reqs.assert_ver('git', (2, 0))
- reqs.assert_ver('git', (2, 5))
+ def test_assert_ver_new(self):
+ """Check assert_ver allows new enough versions."""
+ reqs = self.wrapper.Requirements(
+ {"git": {"hard": [1, 0], "soft": [2, 0]}}
+ )
+ reqs.assert_ver("git", (1, 0))
+ reqs.assert_ver("git", (1, 5))
+ reqs.assert_ver("git", (2, 0))
+ reqs.assert_ver("git", (2, 5))
- def test_assert_ver_old(self):
- """Check assert_ver rejects old versions."""
- reqs = self.wrapper.Requirements({'git': {'hard': [1, 0], 'soft': [2, 0]}})
- with self.assertRaises(SystemExit):
- reqs.assert_ver('git', (0, 5))
+ def test_assert_ver_old(self):
+ """Check assert_ver rejects old versions."""
+ reqs = self.wrapper.Requirements(
+ {"git": {"hard": [1, 0], "soft": [2, 0]}}
+ )
+ with self.assertRaises(SystemExit):
+ reqs.assert_ver("git", (0, 5))
class NeedSetupGnuPG(RepoWrapperTestCase):
- """Check NeedSetupGnuPG behavior."""
+ """Check NeedSetupGnuPG behavior."""
- def test_missing_dir(self):
- """The ~/.repoconfig tree doesn't exist yet."""
- with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir:
- self.wrapper.home_dot_repo = os.path.join(tempdir, 'foo')
- self.assertTrue(self.wrapper.NeedSetupGnuPG())
+ def test_missing_dir(self):
+ """The ~/.repoconfig tree doesn't exist yet."""
+ with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
+ self.wrapper.home_dot_repo = os.path.join(tempdir, "foo")
+ self.assertTrue(self.wrapper.NeedSetupGnuPG())
- def test_missing_keyring(self):
- """The keyring-version file doesn't exist yet."""
- with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir:
- self.wrapper.home_dot_repo = tempdir
- self.assertTrue(self.wrapper.NeedSetupGnuPG())
+ def test_missing_keyring(self):
+ """The keyring-version file doesn't exist yet."""
+ with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
+ self.wrapper.home_dot_repo = tempdir
+ self.assertTrue(self.wrapper.NeedSetupGnuPG())
- def test_empty_keyring(self):
- """The keyring-version file exists, but is empty."""
- with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir:
- self.wrapper.home_dot_repo = tempdir
- with open(os.path.join(tempdir, 'keyring-version'), 'w'):
- pass
- self.assertTrue(self.wrapper.NeedSetupGnuPG())
+ def test_empty_keyring(self):
+ """The keyring-version file exists, but is empty."""
+ with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
+ self.wrapper.home_dot_repo = tempdir
+ with open(os.path.join(tempdir, "keyring-version"), "w"):
+ pass
+ self.assertTrue(self.wrapper.NeedSetupGnuPG())
- def test_old_keyring(self):
- """The keyring-version file exists, but it's old."""
- with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir:
- self.wrapper.home_dot_repo = tempdir
- with open(os.path.join(tempdir, 'keyring-version'), 'w') as fp:
- fp.write('1.0\n')
- self.assertTrue(self.wrapper.NeedSetupGnuPG())
+ def test_old_keyring(self):
+ """The keyring-version file exists, but it's old."""
+ with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
+ self.wrapper.home_dot_repo = tempdir
+ with open(os.path.join(tempdir, "keyring-version"), "w") as fp:
+ fp.write("1.0\n")
+ self.assertTrue(self.wrapper.NeedSetupGnuPG())
- def test_new_keyring(self):
- """The keyring-version file exists, and is up-to-date."""
- with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir:
- self.wrapper.home_dot_repo = tempdir
- with open(os.path.join(tempdir, 'keyring-version'), 'w') as fp:
- fp.write('1000.0\n')
- self.assertFalse(self.wrapper.NeedSetupGnuPG())
+ def test_new_keyring(self):
+ """The keyring-version file exists, and is up-to-date."""
+ with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
+ self.wrapper.home_dot_repo = tempdir
+ with open(os.path.join(tempdir, "keyring-version"), "w") as fp:
+ fp.write("1000.0\n")
+ self.assertFalse(self.wrapper.NeedSetupGnuPG())
class SetupGnuPG(RepoWrapperTestCase):
- """Check SetupGnuPG behavior."""
+ """Check SetupGnuPG behavior."""
- def test_full(self):
- """Make sure it works completely."""
- with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir:
- self.wrapper.home_dot_repo = tempdir
- self.wrapper.gpg_dir = os.path.join(self.wrapper.home_dot_repo, 'gnupg')
- self.assertTrue(self.wrapper.SetupGnuPG(True))
- with open(os.path.join(tempdir, 'keyring-version'), 'r') as fp:
- data = fp.read()
- self.assertEqual('.'.join(str(x) for x in self.wrapper.KEYRING_VERSION),
- data.strip())
+ def test_full(self):
+ """Make sure it works completely."""
+ with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
+ self.wrapper.home_dot_repo = tempdir
+ self.wrapper.gpg_dir = os.path.join(
+ self.wrapper.home_dot_repo, "gnupg"
+ )
+ self.assertTrue(self.wrapper.SetupGnuPG(True))
+ with open(os.path.join(tempdir, "keyring-version"), "r") as fp:
+ data = fp.read()
+ self.assertEqual(
+ ".".join(str(x) for x in self.wrapper.KEYRING_VERSION),
+ data.strip(),
+ )
class VerifyRev(RepoWrapperTestCase):
- """Check verify_rev behavior."""
+ """Check verify_rev behavior."""
- def test_verify_passes(self):
- """Check when we have a valid signed tag."""
- desc_result = self.wrapper.RunResult(0, 'v1.0\n', '')
- gpg_result = self.wrapper.RunResult(0, '', '')
- with mock.patch.object(self.wrapper, 'run_git',
- side_effect=(desc_result, gpg_result)):
- ret = self.wrapper.verify_rev('/', 'refs/heads/stable', '1234', True)
- self.assertEqual('v1.0^0', ret)
+ def test_verify_passes(self):
+ """Check when we have a valid signed tag."""
+ desc_result = self.wrapper.RunResult(0, "v1.0\n", "")
+ gpg_result = self.wrapper.RunResult(0, "", "")
+ with mock.patch.object(
+ self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
+ ):
+ ret = self.wrapper.verify_rev(
+ "/", "refs/heads/stable", "1234", True
+ )
+ self.assertEqual("v1.0^0", ret)
- def test_unsigned_commit(self):
- """Check we fall back to signed tag when we have an unsigned commit."""
- desc_result = self.wrapper.RunResult(0, 'v1.0-10-g1234\n', '')
- gpg_result = self.wrapper.RunResult(0, '', '')
- with mock.patch.object(self.wrapper, 'run_git',
- side_effect=(desc_result, gpg_result)):
- ret = self.wrapper.verify_rev('/', 'refs/heads/stable', '1234', True)
- self.assertEqual('v1.0^0', ret)
+ def test_unsigned_commit(self):
+ """Check we fall back to signed tag when we have an unsigned commit."""
+ desc_result = self.wrapper.RunResult(0, "v1.0-10-g1234\n", "")
+ gpg_result = self.wrapper.RunResult(0, "", "")
+ with mock.patch.object(
+ self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
+ ):
+ ret = self.wrapper.verify_rev(
+ "/", "refs/heads/stable", "1234", True
+ )
+ self.assertEqual("v1.0^0", ret)
- def test_verify_fails(self):
- """Check we fall back to signed tag when we have an unsigned commit."""
- desc_result = self.wrapper.RunResult(0, 'v1.0-10-g1234\n', '')
- gpg_result = Exception
- with mock.patch.object(self.wrapper, 'run_git',
- side_effect=(desc_result, gpg_result)):
- with self.assertRaises(Exception):
- self.wrapper.verify_rev('/', 'refs/heads/stable', '1234', True)
+ def test_verify_fails(self):
+ """Check we fall back to signed tag when we have an unsigned commit."""
+ desc_result = self.wrapper.RunResult(0, "v1.0-10-g1234\n", "")
+ gpg_result = Exception
+ with mock.patch.object(
+ self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
+ ):
+ with self.assertRaises(Exception):
+ self.wrapper.verify_rev("/", "refs/heads/stable", "1234", True)
class GitCheckoutTestCase(RepoWrapperTestCase):
- """Tests that use a real/small git checkout."""
+ """Tests that use a real/small git checkout."""
- GIT_DIR = None
- REV_LIST = None
+ GIT_DIR = None
+ REV_LIST = None
- @classmethod
- def setUpClass(cls):
- # Create a repo to operate on, but do it once per-class.
- cls.tempdirobj = tempfile.TemporaryDirectory(prefix='repo-rev-tests')
- cls.GIT_DIR = cls.tempdirobj.name
- run_git = wrapper.Wrapper().run_git
+ @classmethod
+ def setUpClass(cls):
+ # Create a repo to operate on, but do it once per-class.
+ cls.tempdirobj = tempfile.TemporaryDirectory(prefix="repo-rev-tests")
+ cls.GIT_DIR = cls.tempdirobj.name
+ run_git = wrapper.Wrapper().run_git
- remote = os.path.join(cls.GIT_DIR, 'remote')
- os.mkdir(remote)
+ remote = os.path.join(cls.GIT_DIR, "remote")
+ os.mkdir(remote)
- # Tests need to assume, that main is default branch at init,
- # which is not supported in config until 2.28.
- if git_command.git_require((2, 28, 0)):
- initstr = '--initial-branch=main'
- else:
- # Use template dir for init.
- templatedir = tempfile.mkdtemp(prefix='.test-template')
- with open(os.path.join(templatedir, 'HEAD'), 'w') as fp:
- fp.write('ref: refs/heads/main\n')
- initstr = '--template=' + templatedir
+ # Tests need to assume, that main is default branch at init,
+ # which is not supported in config until 2.28.
+ if git_command.git_require((2, 28, 0)):
+ initstr = "--initial-branch=main"
+ else:
+ # Use template dir for init.
+ templatedir = tempfile.mkdtemp(prefix=".test-template")
+ with open(os.path.join(templatedir, "HEAD"), "w") as fp:
+ fp.write("ref: refs/heads/main\n")
+ initstr = "--template=" + templatedir
- run_git('init', initstr, cwd=remote)
- run_git('commit', '--allow-empty', '-minit', cwd=remote)
- run_git('branch', 'stable', cwd=remote)
- run_git('tag', 'v1.0', cwd=remote)
- run_git('commit', '--allow-empty', '-m2nd commit', cwd=remote)
- cls.REV_LIST = run_git('rev-list', 'HEAD', cwd=remote).stdout.splitlines()
+ run_git("init", initstr, cwd=remote)
+ run_git("commit", "--allow-empty", "-minit", cwd=remote)
+ run_git("branch", "stable", cwd=remote)
+ run_git("tag", "v1.0", cwd=remote)
+ run_git("commit", "--allow-empty", "-m2nd commit", cwd=remote)
+ cls.REV_LIST = run_git(
+ "rev-list", "HEAD", cwd=remote
+ ).stdout.splitlines()
- run_git('init', cwd=cls.GIT_DIR)
- run_git('fetch', remote, '+refs/heads/*:refs/remotes/origin/*', cwd=cls.GIT_DIR)
+ run_git("init", cwd=cls.GIT_DIR)
+ run_git(
+ "fetch",
+ remote,
+ "+refs/heads/*:refs/remotes/origin/*",
+ cwd=cls.GIT_DIR,
+ )
- @classmethod
- def tearDownClass(cls):
- if not cls.tempdirobj:
- return
+ @classmethod
+ def tearDownClass(cls):
+ if not cls.tempdirobj:
+ return
- cls.tempdirobj.cleanup()
+ cls.tempdirobj.cleanup()
class ResolveRepoRev(GitCheckoutTestCase):
- """Check resolve_repo_rev behavior."""
+ """Check resolve_repo_rev behavior."""
- def test_explicit_branch(self):
- """Check refs/heads/branch argument."""
- rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/heads/stable')
- self.assertEqual('refs/heads/stable', rrev)
- self.assertEqual(self.REV_LIST[1], lrev)
+ def test_explicit_branch(self):
+ """Check refs/heads/branch argument."""
+ rrev, lrev = self.wrapper.resolve_repo_rev(
+ self.GIT_DIR, "refs/heads/stable"
+ )
+ self.assertEqual("refs/heads/stable", rrev)
+ self.assertEqual(self.REV_LIST[1], lrev)
- with self.assertRaises(self.wrapper.CloneFailure):
- self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/heads/unknown')
+ with self.assertRaises(self.wrapper.CloneFailure):
+ self.wrapper.resolve_repo_rev(self.GIT_DIR, "refs/heads/unknown")
- def test_explicit_tag(self):
- """Check refs/tags/tag argument."""
- rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/tags/v1.0')
- self.assertEqual('refs/tags/v1.0', rrev)
- self.assertEqual(self.REV_LIST[1], lrev)
+ def test_explicit_tag(self):
+ """Check refs/tags/tag argument."""
+ rrev, lrev = self.wrapper.resolve_repo_rev(
+ self.GIT_DIR, "refs/tags/v1.0"
+ )
+ self.assertEqual("refs/tags/v1.0", rrev)
+ self.assertEqual(self.REV_LIST[1], lrev)
- with self.assertRaises(self.wrapper.CloneFailure):
- self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/tags/unknown')
+ with self.assertRaises(self.wrapper.CloneFailure):
+ self.wrapper.resolve_repo_rev(self.GIT_DIR, "refs/tags/unknown")
- def test_branch_name(self):
- """Check branch argument."""
- rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'stable')
- self.assertEqual('refs/heads/stable', rrev)
- self.assertEqual(self.REV_LIST[1], lrev)
+ def test_branch_name(self):
+ """Check branch argument."""
+ rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, "stable")
+ self.assertEqual("refs/heads/stable", rrev)
+ self.assertEqual(self.REV_LIST[1], lrev)
- rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'main')
- self.assertEqual('refs/heads/main', rrev)
- self.assertEqual(self.REV_LIST[0], lrev)
+ rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, "main")
+ self.assertEqual("refs/heads/main", rrev)
+ self.assertEqual(self.REV_LIST[0], lrev)
- def test_tag_name(self):
- """Check tag argument."""
- rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'v1.0')
- self.assertEqual('refs/tags/v1.0', rrev)
- self.assertEqual(self.REV_LIST[1], lrev)
+ def test_tag_name(self):
+ """Check tag argument."""
+ rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, "v1.0")
+ self.assertEqual("refs/tags/v1.0", rrev)
+ self.assertEqual(self.REV_LIST[1], lrev)
- def test_full_commit(self):
- """Check specific commit argument."""
- commit = self.REV_LIST[0]
- rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, commit)
- self.assertEqual(commit, rrev)
- self.assertEqual(commit, lrev)
+ def test_full_commit(self):
+ """Check specific commit argument."""
+ commit = self.REV_LIST[0]
+ rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, commit)
+ self.assertEqual(commit, rrev)
+ self.assertEqual(commit, lrev)
- def test_partial_commit(self):
- """Check specific (partial) commit argument."""
- commit = self.REV_LIST[0][0:20]
- rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, commit)
- self.assertEqual(self.REV_LIST[0], rrev)
- self.assertEqual(self.REV_LIST[0], lrev)
+ def test_partial_commit(self):
+ """Check specific (partial) commit argument."""
+ commit = self.REV_LIST[0][0:20]
+ rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, commit)
+ self.assertEqual(self.REV_LIST[0], rrev)
+ self.assertEqual(self.REV_LIST[0], lrev)
- def test_unknown(self):
- """Check unknown ref/commit argument."""
- with self.assertRaises(self.wrapper.CloneFailure):
- self.wrapper.resolve_repo_rev(self.GIT_DIR, 'boooooooya')
+ def test_unknown(self):
+ """Check unknown ref/commit argument."""
+ with self.assertRaises(self.wrapper.CloneFailure):
+ self.wrapper.resolve_repo_rev(self.GIT_DIR, "boooooooya")
class CheckRepoVerify(RepoWrapperTestCase):
- """Check check_repo_verify behavior."""
+ """Check check_repo_verify behavior."""
- def test_no_verify(self):
- """Always fail with --no-repo-verify."""
- self.assertFalse(self.wrapper.check_repo_verify(False))
+ def test_no_verify(self):
+ """Always fail with --no-repo-verify."""
+ self.assertFalse(self.wrapper.check_repo_verify(False))
- def test_gpg_initialized(self):
- """Should pass if gpg is setup already."""
- with mock.patch.object(self.wrapper, 'NeedSetupGnuPG', return_value=False):
- self.assertTrue(self.wrapper.check_repo_verify(True))
+ def test_gpg_initialized(self):
+ """Should pass if gpg is setup already."""
+ with mock.patch.object(
+ self.wrapper, "NeedSetupGnuPG", return_value=False
+ ):
+ self.assertTrue(self.wrapper.check_repo_verify(True))
- def test_need_gpg_setup(self):
- """Should pass/fail based on gpg setup."""
- with mock.patch.object(self.wrapper, 'NeedSetupGnuPG', return_value=True):
- with mock.patch.object(self.wrapper, 'SetupGnuPG') as m:
- m.return_value = True
- self.assertTrue(self.wrapper.check_repo_verify(True))
+ def test_need_gpg_setup(self):
+ """Should pass/fail based on gpg setup."""
+ with mock.patch.object(
+ self.wrapper, "NeedSetupGnuPG", return_value=True
+ ):
+ with mock.patch.object(self.wrapper, "SetupGnuPG") as m:
+ m.return_value = True
+ self.assertTrue(self.wrapper.check_repo_verify(True))
- m.return_value = False
- self.assertFalse(self.wrapper.check_repo_verify(True))
+ m.return_value = False
+ self.assertFalse(self.wrapper.check_repo_verify(True))
class CheckRepoRev(GitCheckoutTestCase):
- """Check check_repo_rev behavior."""
+ """Check check_repo_rev behavior."""
- def test_verify_works(self):
- """Should pass when verification passes."""
- with mock.patch.object(self.wrapper, 'check_repo_verify', return_value=True):
- with mock.patch.object(self.wrapper, 'verify_rev', return_value='12345'):
- rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, 'stable')
- self.assertEqual('refs/heads/stable', rrev)
- self.assertEqual('12345', lrev)
+ def test_verify_works(self):
+ """Should pass when verification passes."""
+ with mock.patch.object(
+ self.wrapper, "check_repo_verify", return_value=True
+ ):
+ with mock.patch.object(
+ self.wrapper, "verify_rev", return_value="12345"
+ ):
+ rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, "stable")
+ self.assertEqual("refs/heads/stable", rrev)
+ self.assertEqual("12345", lrev)
- def test_verify_fails(self):
- """Should fail when verification fails."""
- with mock.patch.object(self.wrapper, 'check_repo_verify', return_value=True):
- with mock.patch.object(self.wrapper, 'verify_rev', side_effect=Exception):
- with self.assertRaises(Exception):
- self.wrapper.check_repo_rev(self.GIT_DIR, 'stable')
+ def test_verify_fails(self):
+ """Should fail when verification fails."""
+ with mock.patch.object(
+ self.wrapper, "check_repo_verify", return_value=True
+ ):
+ with mock.patch.object(
+ self.wrapper, "verify_rev", side_effect=Exception
+ ):
+ with self.assertRaises(Exception):
+ self.wrapper.check_repo_rev(self.GIT_DIR, "stable")
- def test_verify_ignore(self):
- """Should pass when verification is disabled."""
- with mock.patch.object(self.wrapper, 'verify_rev', side_effect=Exception):
- rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, 'stable', repo_verify=False)
- self.assertEqual('refs/heads/stable', rrev)
- self.assertEqual(self.REV_LIST[1], lrev)
+ def test_verify_ignore(self):
+ """Should pass when verification is disabled."""
+ with mock.patch.object(
+ self.wrapper, "verify_rev", side_effect=Exception
+ ):
+ rrev, lrev = self.wrapper.check_repo_rev(
+ self.GIT_DIR, "stable", repo_verify=False
+ )
+ self.assertEqual("refs/heads/stable", rrev)
+ self.assertEqual(self.REV_LIST[1], lrev)
diff --git a/tox.ini b/tox.ini
index 8d3cc43..2575a71 100644
--- a/tox.ini
+++ b/tox.ini
@@ -27,6 +27,7 @@
[testenv]
deps =
+ black
pytest
pytest-timeout
commands = {envpython} run_tests {posargs}
diff --git a/wrapper.py b/wrapper.py
index 3099ad5..d882336 100644
--- a/wrapper.py
+++ b/wrapper.py
@@ -19,14 +19,14 @@
def WrapperPath():
- return os.path.join(os.path.dirname(__file__), 'repo')
+ return os.path.join(os.path.dirname(__file__), "repo")
@functools.lru_cache(maxsize=None)
def Wrapper():
- modname = 'wrapper'
- loader = importlib.machinery.SourceFileLoader(modname, WrapperPath())
- spec = importlib.util.spec_from_loader(modname, loader)
- module = importlib.util.module_from_spec(spec)
- loader.exec_module(module)
- return module
+ modname = "wrapper"
+ loader = importlib.machinery.SourceFileLoader(modname, WrapperPath())
+ spec = importlib.util.spec_from_loader(modname, loader)
+ module = importlib.util.module_from_spec(spec)
+ loader.exec_module(module)
+ return module