mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-30 20:17:08 +00:00
Compare commits
28 Commits
Author | SHA1 | Date | |
---|---|---|---|
4b325813fc | |||
0578ebf61a | |||
65f51ad29b | |||
80944b538d | |||
89f3ae5ae6 | |||
ac29ac397f | |||
cebf227026 | |||
7ae210a15b | |||
60fc51bb1d | |||
72325c5f3e | |||
d79a4bc51b | |||
682f0b6426 | |||
e7082ccb54 | |||
dbfbcb14c1 | |||
d0ca0f6814 | |||
433977e958 | |||
dd37fb2222 | |||
af908cb543 | |||
74e8ed4bde | |||
2fe84e17b9 | |||
1122353683 | |||
b6871899be | |||
8e0fe1920e | |||
d086467012 | |||
2735bfc5ff | |||
653f8b711b | |||
9bc283e49b | |||
b4a6f6d798 |
5
.github/workflows/test-ci.yml
vendored
5
.github/workflows/test-ci.yml
vendored
@ -14,10 +14,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [2.7, 3.6, 3.7, 3.8]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
python-version: 2.7
|
||||
python-version: [3.6, 3.7, 3.8]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
|
@ -134,6 +134,7 @@ User controlled settings are initialized when running `repo init`.
|
||||
|-------------------|---------------------------|-------------|
|
||||
| manifest.groups | `--groups` & `--platform` | The manifest groups to sync |
|
||||
| repo.archive | `--archive` | Use `git archive` for checkouts |
|
||||
| repo.clonebundle | `--clone-bundle` | Whether the initial sync used clone.bundle explicitly |
|
||||
| repo.clonefilter | `--clone-filter` | Filter setting when using [partial git clones] |
|
||||
| repo.depth | `--depth` | Create shallow checkouts when cloning |
|
||||
| repo.dissociate | `--dissociate` | Dissociate from any reference/mirrors after initial clone |
|
||||
|
@ -5,6 +5,37 @@ related topics and flows.
|
||||
|
||||
[TOC]
|
||||
|
||||
## Schedule
|
||||
|
||||
There is no specific schedule for when releases are made.
|
||||
Usually it's more along the lines of "enough minor changes have been merged",
|
||||
or "there's a known issue the maintainers know should get fixed".
|
||||
If you find a fix has been merged for an issue important to you, but hasn't been
|
||||
released after a week or so, feel free to [contact] us to request a new release.
|
||||
|
||||
### Release Freezes {#freeze}
|
||||
|
||||
We try to observe a regular schedule for when **not** to release.
|
||||
If something goes wrong, staff need to be active in order to respond quickly &
|
||||
effectively.
|
||||
We also don't want to disrupt non-Google organizations if possible.
|
||||
|
||||
We generally follow the rules:
|
||||
|
||||
* Release during Mon - Thu, 9:00 - 14:00 [US PT]
|
||||
* Avoid holidays
|
||||
* All regular [US holidays]
|
||||
* Large international ones if possible
|
||||
* All the various [New Years]
|
||||
* Jan 1 in Gregorian calendar is the most obvious
|
||||
* Check for large Lunar New Years too
|
||||
* Follow the normal [Google production freeze schedule]
|
||||
|
||||
[US holidays]: https://en.wikipedia.org/wiki/Federal_holidays_in_the_United_States
|
||||
[US PT]: https://en.wikipedia.org/wiki/Pacific_Time_Zone
|
||||
[New Years]: https://en.wikipedia.org/wiki/New_Year
|
||||
[Google production freeze schedule]: http://goto.google.com/prod-freeze
|
||||
|
||||
## Launcher script
|
||||
|
||||
The main repo script serves as a standalone program and is often referred to as
|
||||
@ -242,6 +273,7 @@ Things in italics are things we used to care about but probably don't anymore.
|
||||
| Apr 2020 | **Apr 2030** | | | **20.04 Focal** | 2.25.0 | 2.7.17 3.7.5 |
|
||||
|
||||
|
||||
[contact]: ../README.md#contact
|
||||
[rel-d]: https://en.wikipedia.org/wiki/Debian_version_history
|
||||
[rel-g]: https://en.wikipedia.org/wiki/Git#Releases
|
||||
[rel-p]: https://en.wikipedia.org/wiki/History_of_Python#Table_of_versions
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
import tempfile
|
||||
@ -47,6 +48,35 @@ LAST_CWD = None
|
||||
_ssh_proxy_path = None
|
||||
_ssh_sock_path = None
|
||||
_ssh_clients = []
|
||||
_ssh_version = None
|
||||
|
||||
|
||||
def _run_ssh_version():
|
||||
"""run ssh -V to display the version number"""
|
||||
return subprocess.check_output(['ssh', '-V'], stderr=subprocess.STDOUT).decode()
|
||||
|
||||
|
||||
def _parse_ssh_version(ver_str=None):
|
||||
"""parse a ssh version string into a tuple"""
|
||||
if ver_str is None:
|
||||
ver_str = _run_ssh_version()
|
||||
m = re.match(r'^OpenSSH_([0-9.]+)(p[0-9]+)?\s', ver_str)
|
||||
if m:
|
||||
return tuple(int(x) for x in m.group(1).split('.'))
|
||||
else:
|
||||
return ()
|
||||
|
||||
|
||||
def ssh_version():
|
||||
"""return ssh version as a tuple"""
|
||||
global _ssh_version
|
||||
if _ssh_version is None:
|
||||
try:
|
||||
_ssh_version = _parse_ssh_version()
|
||||
except subprocess.CalledProcessError:
|
||||
print('fatal: unable to detect ssh version', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return _ssh_version
|
||||
|
||||
|
||||
def ssh_sock(create=True):
|
||||
@ -57,9 +87,13 @@ def ssh_sock(create=True):
|
||||
tmp_dir = '/tmp'
|
||||
if not os.path.exists(tmp_dir):
|
||||
tmp_dir = tempfile.gettempdir()
|
||||
if ssh_version() < (6, 7):
|
||||
tokens = '%r@%h:%p'
|
||||
else:
|
||||
tokens = '%C' # hash of %l%h%p%r
|
||||
_ssh_sock_path = os.path.join(
|
||||
tempfile.mkdtemp('', 'ssh-', tmp_dir),
|
||||
'master-%r@%h:%p')
|
||||
'master-' + tokens)
|
||||
return _ssh_sock_path
|
||||
|
||||
|
||||
|
@ -362,7 +362,7 @@ class GitConfig(object):
|
||||
return c
|
||||
|
||||
def _do(self, *args):
|
||||
command = ['config', '--file', self.file]
|
||||
command = ['config', '--file', self.file, '--includes']
|
||||
command.extend(args)
|
||||
|
||||
p = GitCommand(None,
|
||||
|
2
main.py
2
main.py
@ -614,7 +614,7 @@ def _Main(argv):
|
||||
argv = list(sys.argv)
|
||||
argv.extend(rce.extra_args)
|
||||
try:
|
||||
os.execv(__file__, argv)
|
||||
os.execv(sys.executable, [__file__] + argv)
|
||||
except OSError as e:
|
||||
print('fatal: cannot restart repo after upgrade', file=sys.stderr)
|
||||
print('fatal: %s' % e, file=sys.stderr)
|
||||
|
@ -284,7 +284,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
def _ParseGroups(self, groups):
|
||||
return [x for x in re.split(r'[,\s]+', groups) if x]
|
||||
|
||||
def Save(self, fd, peg_rev=False, peg_rev_upstream=True, groups=None):
|
||||
def Save(self, fd, peg_rev=False, peg_rev_upstream=True, peg_rev_dest_branch=True, groups=None):
|
||||
"""Write the current manifest out to the given file descriptor.
|
||||
"""
|
||||
mp = self.manifestProject
|
||||
@ -389,6 +389,13 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
# Only save the origin if the origin is not a sha1, and the default
|
||||
# isn't our value
|
||||
e.setAttribute('upstream', p.revisionExpr)
|
||||
|
||||
if peg_rev_dest_branch:
|
||||
if p.dest_branch:
|
||||
e.setAttribute('dest-branch', p.dest_branch)
|
||||
elif value != p.revisionExpr:
|
||||
e.setAttribute('dest-branch', p.revisionExpr)
|
||||
|
||||
else:
|
||||
revision = self.remotes[p.remote.orig_name].revision or d.revisionExpr
|
||||
if not revision or revision != p.revisionExpr:
|
||||
@ -494,6 +501,14 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
self._Load()
|
||||
return self._manifest_server
|
||||
|
||||
@property
|
||||
def CloneBundle(self):
|
||||
clone_bundle = self.manifestProject.config.GetBoolean('repo.clonebundle')
|
||||
if clone_bundle is None:
|
||||
return False if self.manifestProject.config.GetBoolean('repo.partialclone') else True
|
||||
else:
|
||||
return clone_bundle
|
||||
|
||||
@property
|
||||
def CloneFilter(self):
|
||||
if self.manifestProject.config.GetBoolean('repo.partialclone'):
|
||||
@ -963,6 +978,10 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
return project
|
||||
|
||||
def GetProjectPaths(self, name, path):
|
||||
# The manifest entries might have trailing slashes. Normalize them to avoid
|
||||
# unexpected filesystem behavior since we do string concatenation below.
|
||||
path = path.rstrip('/')
|
||||
name = name.rstrip('/')
|
||||
use_git_worktrees = False
|
||||
relpath = path
|
||||
if self.IsMirror:
|
||||
@ -995,6 +1014,10 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
return os.path.relpath(relpath, parent_relpath)
|
||||
|
||||
def GetSubprojectPaths(self, parent, name, path):
|
||||
# The manifest entries might have trailing slashes. Normalize them to avoid
|
||||
# unexpected filesystem behavior since we do string concatenation below.
|
||||
path = path.rstrip('/')
|
||||
name = name.rstrip('/')
|
||||
relpath = self._JoinRelpath(parent.relpath, path)
|
||||
gitdir = os.path.join(parent.gitdir, 'subprojects', '%s.git' % path)
|
||||
objdir = os.path.join(parent.gitdir, 'subproject-objects', '%s.git' % name)
|
||||
|
62
project.py
62
project.py
@ -55,6 +55,12 @@ else:
|
||||
input = raw_input # noqa: F821
|
||||
|
||||
|
||||
# Maximum sleep time allowed during retries.
|
||||
MAXIMUM_RETRY_SLEEP_SEC = 3600.0
|
||||
# +-10% random jitter is added to each Fetches retry sleep duration.
|
||||
RETRY_JITTER_PERCENT = 0.1
|
||||
|
||||
|
||||
def _lwrite(path, content):
|
||||
lock = '%s.lock' % path
|
||||
|
||||
@ -399,8 +405,8 @@ class _LinkFile(object):
|
||||
else:
|
||||
src = _SafeExpandPath(self.git_worktree, self.src)
|
||||
|
||||
if os.path.exists(src):
|
||||
# Entity exists so just a simple one to one link operation.
|
||||
if not glob.has_magic(src):
|
||||
# Entity does not contain a wild card so just a simple one to one link operation.
|
||||
dest = _SafeExpandPath(self.topdir, self.dest, skipfinal=True)
|
||||
# dest & src are absolute paths at this point. Make sure the target of
|
||||
# the symlink is relative in the context of the repo client checkout.
|
||||
@ -408,7 +414,7 @@ class _LinkFile(object):
|
||||
self.__linkIt(relpath, dest)
|
||||
else:
|
||||
dest = _SafeExpandPath(self.topdir, self.dest)
|
||||
# Entity doesn't exist assume there is a wild card
|
||||
# Entity contains a wild card.
|
||||
if os.path.exists(dest) and not platform_utils.isdir(dest):
|
||||
_error('Link error: src with wildcard, %s must be a directory', dest)
|
||||
else:
|
||||
@ -875,6 +881,7 @@ class Project(object):
|
||||
is_derived=False,
|
||||
dest_branch=None,
|
||||
optimized_fetch=False,
|
||||
retry_fetches=0,
|
||||
old_revision=None):
|
||||
"""Init a Project object.
|
||||
|
||||
@ -901,6 +908,8 @@ class Project(object):
|
||||
dest_branch: The branch to which to push changes for review by default.
|
||||
optimized_fetch: If True, when a project is set to a sha1 revision, only
|
||||
fetch from the remote if the sha1 is not present locally.
|
||||
retry_fetches: Retry remote fetches n times upon receiving transient error
|
||||
with exponential backoff and jitter.
|
||||
old_revision: saved git commit id for open GITC projects.
|
||||
"""
|
||||
self.manifest = manifest
|
||||
@ -936,6 +945,7 @@ class Project(object):
|
||||
self.use_git_worktrees = use_git_worktrees
|
||||
self.is_derived = is_derived
|
||||
self.optimized_fetch = optimized_fetch
|
||||
self.retry_fetches = max(0, retry_fetches)
|
||||
self.subprojects = []
|
||||
|
||||
self.snapshots = {}
|
||||
@ -1449,6 +1459,7 @@ class Project(object):
|
||||
tags=True,
|
||||
archive=False,
|
||||
optimized_fetch=False,
|
||||
retry_fetches=0,
|
||||
prune=False,
|
||||
submodules=False,
|
||||
clone_filter=None):
|
||||
@ -1532,7 +1543,7 @@ class Project(object):
|
||||
current_branch_only=current_branch_only,
|
||||
tags=tags, prune=prune, depth=depth,
|
||||
submodules=submodules, force_sync=force_sync,
|
||||
clone_filter=clone_filter):
|
||||
clone_filter=clone_filter, retry_fetches=retry_fetches):
|
||||
return False
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
@ -2334,8 +2345,10 @@ class Project(object):
|
||||
depth=None,
|
||||
submodules=False,
|
||||
force_sync=False,
|
||||
clone_filter=None):
|
||||
|
||||
clone_filter=None,
|
||||
retry_fetches=2,
|
||||
retry_sleep_initial_sec=4.0,
|
||||
retry_exp_factor=2.0):
|
||||
is_sha1 = False
|
||||
tag_name = None
|
||||
# The depth should not be used when fetching to a mirror because
|
||||
@ -2497,18 +2510,37 @@ class Project(object):
|
||||
|
||||
cmd.extend(spec)
|
||||
|
||||
ok = False
|
||||
for _i in range(2):
|
||||
# At least one retry minimum due to git remote prune.
|
||||
retry_fetches = max(retry_fetches, 2)
|
||||
retry_cur_sleep = retry_sleep_initial_sec
|
||||
ok = prune_tried = False
|
||||
for try_n in range(retry_fetches):
|
||||
gitcmd = GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy,
|
||||
merge_output=True, capture_stdout=quiet)
|
||||
ret = gitcmd.Wait()
|
||||
if ret == 0:
|
||||
ok = True
|
||||
break
|
||||
# If needed, run the 'git remote prune' the first time through the loop
|
||||
elif (not _i and
|
||||
"error:" in gitcmd.stderr and
|
||||
"git remote prune" in gitcmd.stderr):
|
||||
|
||||
# Retry later due to HTTP 429 Too Many Requests.
|
||||
elif ('error:' in gitcmd.stderr and
|
||||
'HTTP 429' in gitcmd.stderr):
|
||||
if not quiet:
|
||||
print('429 received, sleeping: %s sec' % retry_cur_sleep,
|
||||
file=sys.stderr)
|
||||
time.sleep(retry_cur_sleep)
|
||||
retry_cur_sleep = min(retry_exp_factor * retry_cur_sleep,
|
||||
MAXIMUM_RETRY_SLEEP_SEC)
|
||||
retry_cur_sleep *= (1 - random.uniform(-RETRY_JITTER_PERCENT,
|
||||
RETRY_JITTER_PERCENT))
|
||||
continue
|
||||
|
||||
# If this is not last attempt, try 'git remote prune'.
|
||||
elif (try_n < retry_fetches - 1 and
|
||||
'error:' in gitcmd.stderr and
|
||||
'git remote prune' in gitcmd.stderr and
|
||||
not prune_tried):
|
||||
prune_tried = True
|
||||
prunecmd = GitCommand(self, ['remote', 'prune', name], bare=True,
|
||||
ssh_proxy=ssh_proxy)
|
||||
ret = prunecmd.Wait()
|
||||
@ -2644,7 +2676,9 @@ class Project(object):
|
||||
# returned another error with the HTTP error code being 400 or above.
|
||||
# This return code only appears if -f, --fail is used.
|
||||
if verbose:
|
||||
print('Server does not provide clone.bundle; ignoring.')
|
||||
print('%s: Unable to retrieve clone.bundle; ignoring.' % self.name)
|
||||
if output:
|
||||
print('Curl output:\n%s' % output)
|
||||
return False
|
||||
elif curlret and not verbose and output:
|
||||
print('%s' % output, file=sys.stderr)
|
||||
@ -2759,7 +2793,7 @@ class Project(object):
|
||||
|
||||
# Enable per-worktree config file support if possible. This is more a
|
||||
# nice-to-have feature for users rather than a hard requirement.
|
||||
if git_require((2, 19, 0)):
|
||||
if git_require((2, 20, 0)):
|
||||
self.EnableRepositoryExtension('worktreeConfig')
|
||||
|
||||
# If we have a separate directory to hold refs, initialize it as well.
|
||||
|
@ -18,11 +18,7 @@
|
||||
This is intended to be run only by the official Repo release managers, but it
|
||||
could be run by people maintaining their own fork of the project.
|
||||
|
||||
NB: Avoid new releases on off-hours. If something goes wrong, staff/oncall need
|
||||
to be active in order to respond quickly & effectively. Recommend sticking to:
|
||||
* Mon - Thu, 9:00 - 14:00 PT (i.e. MTV time)
|
||||
* Avoid US holidays (and large international ones if possible)
|
||||
* Follow the normal Google production freeze schedule
|
||||
NB: Check docs/release-process.md for production freeze information.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
|
15
repo
15
repo
@ -133,7 +133,7 @@ if not REPO_REV:
|
||||
REPO_REV = 'stable'
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (2, 5)
|
||||
VERSION = (2, 8)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (2, 3)
|
||||
@ -317,9 +317,11 @@ def GetParser(gitc_init=False):
|
||||
help='restrict manifest projects to ones with a specified '
|
||||
'platform group [auto|all|none|linux|darwin|...]',
|
||||
metavar='PLATFORM')
|
||||
group.add_option('--clone-bundle', action='store_true',
|
||||
help='enable use of /clone.bundle on HTTP/HTTPS (default if not --partial-clone)')
|
||||
group.add_option('--no-clone-bundle',
|
||||
dest='clone_bundle', default=True, action='store_false',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||
dest='clone_bundle', action='store_false',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS (default if --partial-clone)')
|
||||
group.add_option('--no-tags',
|
||||
dest='tags', default=True, action='store_false',
|
||||
help="don't fetch tags in the manifest")
|
||||
@ -502,6 +504,9 @@ def _Init(args, gitc_init=False):
|
||||
opt.quiet = opt.output_mode is False
|
||||
opt.verbose = opt.output_mode is True
|
||||
|
||||
if opt.clone_bundle is None:
|
||||
opt.clone_bundle = False if opt.partial_clone else True
|
||||
|
||||
url = opt.repo_url or REPO_URL
|
||||
rev = opt.repo_rev or REPO_REV
|
||||
|
||||
@ -1169,6 +1174,10 @@ def main(orig_args):
|
||||
if my_main:
|
||||
repo_main = my_main
|
||||
|
||||
if not repo_main:
|
||||
print("fatal: unable to find repo entry point", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
ver_str = '.'.join(map(str, VERSION))
|
||||
me = [sys.executable, repo_main,
|
||||
'--repo-dir=%s' % rel_repo_dir,
|
||||
|
@ -179,6 +179,8 @@ without iterating through the remaining projects.
|
||||
'annotations': dict((a.name, a.value) for a in project.annotations),
|
||||
'gitdir': project.gitdir,
|
||||
'worktree': project.worktree,
|
||||
'upstream': project.upstream,
|
||||
'dest_branch': project.dest_branch,
|
||||
}
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
@ -317,6 +319,8 @@ def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||
setenv('REPO_REMOTE', project['remote_name'])
|
||||
setenv('REPO_LREV', project['lrev'])
|
||||
setenv('REPO_RREV', project['rrev'])
|
||||
setenv('REPO_UPSTREAM', project['upstream'])
|
||||
setenv('REPO_DEST_BRANCH', project['dest_branch'])
|
||||
setenv('REPO_I', str(cnt + 1))
|
||||
for name in project['annotations']:
|
||||
setenv("REPO__%s" % (name), project['annotations'][name])
|
||||
@ -370,8 +374,8 @@ def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||
for s in in_ready:
|
||||
buf = s.read().decode()
|
||||
if not buf:
|
||||
s.close()
|
||||
s_in.remove(s)
|
||||
s.close()
|
||||
continue
|
||||
|
||||
if not opt.verbose:
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
from command import PagedCommand
|
||||
from color import Coloring
|
||||
from git_refs import R_M
|
||||
from git_refs import R_M, R_HEADS
|
||||
|
||||
|
||||
class _Coloring(Coloring):
|
||||
@ -127,7 +127,10 @@ class Info(PagedCommand):
|
||||
if not self.opt.local:
|
||||
project.Sync_NetworkHalf(quiet=True, current_branch_only=True)
|
||||
|
||||
logTarget = R_M + self.manifest.manifestProject.config.GetBranch("default").merge
|
||||
branch = self.manifest.manifestProject.config.GetBranch('default').merge
|
||||
if branch.startswith(R_HEADS):
|
||||
branch = branch[len(R_HEADS):]
|
||||
logTarget = R_M + branch
|
||||
|
||||
bareTmp = project.bare_git._bare
|
||||
project.bare_git._bare = False
|
||||
|
@ -155,9 +155,11 @@ to update the working directory files.
|
||||
help='restrict manifest projects to ones with a specified '
|
||||
'platform group [auto|all|none|linux|darwin|...]',
|
||||
metavar='PLATFORM')
|
||||
g.add_option('--clone-bundle', action='store_true',
|
||||
help='force use of /clone.bundle on HTTP/HTTPS (default if not --partial-clone)')
|
||||
g.add_option('--no-clone-bundle',
|
||||
dest='clone_bundle', default=True, action='store_false',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||
dest='clone_bundle', action='store_false',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS (default if --partial-clone)')
|
||||
g.add_option('--no-tags',
|
||||
dest='tags', default=True, action='store_false',
|
||||
help="don't fetch tags in the manifest")
|
||||
@ -303,6 +305,11 @@ to update the working directory files.
|
||||
else:
|
||||
opt.clone_filter = None
|
||||
|
||||
if opt.clone_bundle is None:
|
||||
opt.clone_bundle = False if opt.partial_clone else True
|
||||
else:
|
||||
m.config.SetString('repo.clonebundle', 'true' if opt.clone_bundle else 'false')
|
||||
|
||||
if opt.submodules:
|
||||
m.config.SetString('repo.submodules', 'true')
|
||||
|
||||
@ -481,6 +488,9 @@ to update the working directory files.
|
||||
if opt.archive and opt.mirror:
|
||||
self.OptionParser.error('--mirror and --archive cannot be used together.')
|
||||
|
||||
if args:
|
||||
self.OptionParser.error('init takes no arguments')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
git_require(MIN_GIT_VERSION_HARD, fail=True)
|
||||
if not git_require(MIN_GIT_VERSION_SOFT):
|
||||
|
@ -34,6 +34,12 @@ The manifest and (if present) local_manifest.xml are combined
|
||||
together to produce a single manifest file. This file can be stored
|
||||
in a Git repository for use during future 'repo init' invocations.
|
||||
|
||||
The -r option can be used to generate a manifest file with project
|
||||
revisions set to the current commit hash. These are known as
|
||||
"revision locked manifests", as they don't follow a particular branch.
|
||||
In this case, the 'upstream' attribute is set to the ref we were on
|
||||
when the manifest was generated. The 'dest-branch' attribute is set
|
||||
to indicate the remote ref to push changes to via 'repo upload'.
|
||||
"""
|
||||
|
||||
@property
|
||||
@ -57,6 +63,11 @@ in a Git repository for use during future 'repo init' invocations.
|
||||
help='If in -r mode, do not write the upstream field. '
|
||||
'Only of use if the branch names for a sha1 manifest are '
|
||||
'sensitive.')
|
||||
p.add_option('--suppress-dest-branch', dest='peg_rev_dest_branch',
|
||||
default=True, action='store_false',
|
||||
help='If in -r mode, do not write the dest-branch field. '
|
||||
'Only of use if the branch names for a sha1 manifest are '
|
||||
'sensitive.')
|
||||
p.add_option('-o', '--output-file',
|
||||
dest='output_file',
|
||||
default='-',
|
||||
@ -74,7 +85,8 @@ in a Git repository for use during future 'repo init' invocations.
|
||||
fd = open(opt.output_file, 'w')
|
||||
self.manifest.Save(fd,
|
||||
peg_rev=opt.peg_rev,
|
||||
peg_rev_upstream=opt.peg_rev_upstream)
|
||||
peg_rev_upstream=opt.peg_rev_upstream,
|
||||
peg_rev_dest_branch=opt.peg_rev_dest_branch)
|
||||
fd.close()
|
||||
if opt.output_file != '-':
|
||||
print('Saved manifest to %s' % opt.output_file, file=sys.stderr)
|
||||
|
@ -138,11 +138,11 @@ if the manifest server specified in the manifest file already includes
|
||||
credentials.
|
||||
|
||||
By default, all projects will be synced. The --fail-fast option can be used
|
||||
to halt syncing as soon as possible when the the first project fails to sync.
|
||||
to halt syncing as soon as possible when the first project fails to sync.
|
||||
|
||||
The --force-sync option can be used to overwrite existing git
|
||||
directories if they have previously been linked to a different
|
||||
object direcotry. WARNING: This may cause data to be lost since
|
||||
object directory. WARNING: This may cause data to be lost since
|
||||
refs may be removed when overwriting.
|
||||
|
||||
The --force-remove-dirty option can be used to remove previously used
|
||||
@ -247,8 +247,9 @@ later is required to fix a server side protocol bug.
|
||||
p.add_option('-m', '--manifest-name',
|
||||
dest='manifest_name',
|
||||
help='temporary manifest to use for this sync', metavar='NAME.xml')
|
||||
p.add_option('--no-clone-bundle',
|
||||
dest='clone_bundle', default=True, action='store_false',
|
||||
p.add_option('--clone-bundle', action='store_true',
|
||||
help='enable use of /clone.bundle on HTTP/HTTPS')
|
||||
p.add_option('--no-clone-bundle', dest='clone_bundle', action='store_false',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||
p.add_option('-u', '--manifest-server-username', action='store',
|
||||
dest='manifest_server_username',
|
||||
@ -265,6 +266,9 @@ later is required to fix a server side protocol bug.
|
||||
p.add_option('--optimized-fetch',
|
||||
dest='optimized_fetch', action='store_true',
|
||||
help='only fetch projects fixed to sha1 if revision does not exist locally')
|
||||
p.add_option('--retry-fetches',
|
||||
default=0, action='store', type='int',
|
||||
help='number of times to retry fetches on transient errors')
|
||||
p.add_option('--prune', dest='prune', action='store_true',
|
||||
help='delete refs that no longer exist on the remote')
|
||||
if show_smart:
|
||||
@ -342,6 +346,7 @@ later is required to fix a server side protocol bug.
|
||||
clone_bundle=opt.clone_bundle,
|
||||
tags=opt.tags, archive=self.manifest.IsArchive,
|
||||
optimized_fetch=opt.optimized_fetch,
|
||||
retry_fetches=opt.retry_fetches,
|
||||
prune=opt.prune,
|
||||
clone_filter=clone_filter)
|
||||
self._fetch_times.Set(project, time.time() - start)
|
||||
@ -777,6 +782,7 @@ later is required to fix a server side protocol bug.
|
||||
current_branch_only=opt.current_branch_only,
|
||||
tags=opt.tags,
|
||||
optimized_fetch=opt.optimized_fetch,
|
||||
retry_fetches=opt.retry_fetches,
|
||||
submodules=self.manifest.HasSubmodules,
|
||||
clone_filter=self.manifest.CloneFilter)
|
||||
finish = time.time()
|
||||
@ -831,6 +837,9 @@ later is required to fix a server side protocol bug.
|
||||
smart_sync_manifest_path = os.path.join(
|
||||
self.manifest.manifestProject.worktree, 'smart_sync_override.xml')
|
||||
|
||||
if opt.clone_bundle is None:
|
||||
opt.clone_bundle = self.manifest.CloneBundle
|
||||
|
||||
if opt.smart_sync or opt.smart_tag:
|
||||
manifest_name = self._SmartSyncSetup(opt, smart_sync_manifest_path)
|
||||
else:
|
||||
|
@ -23,6 +23,7 @@ from command import InteractiveCommand
|
||||
from editor import Editor
|
||||
from error import HookError, UploadError
|
||||
from git_command import GitCommand
|
||||
from git_refs import R_HEADS
|
||||
from project import RepoHook
|
||||
|
||||
from pyversion import is_python3
|
||||
@ -462,7 +463,10 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
# Make sure our local branch is not setup to track a different remote branch
|
||||
merge_branch = self._GetMergeBranch(branch.project)
|
||||
if destination:
|
||||
full_dest = 'refs/heads/%s' % destination
|
||||
full_dest = destination
|
||||
if not full_dest.startswith(R_HEADS):
|
||||
full_dest = R_HEADS + full_dest
|
||||
|
||||
if not opt.dest_branch and merge_branch and merge_branch != full_dest:
|
||||
print('merge branch %s does not match destination branch %s'
|
||||
% (merge_branch, full_dest))
|
||||
@ -592,7 +596,7 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
print('\nWARNING: pre-upload hooks failed, but uploading anyways.',
|
||||
file=sys.stderr)
|
||||
else:
|
||||
return
|
||||
return 1
|
||||
|
||||
if opt.reviewers:
|
||||
reviewers = _SplitEmails(opt.reviewers)
|
||||
|
@ -43,6 +43,7 @@ class Version(Command, MirrorSafeCommand):
|
||||
rp_ver = rp.bare_git.describe(HEAD)
|
||||
print('repo version %s' % rp_ver)
|
||||
print(' (from %s)' % rem.url)
|
||||
print(' (%s)' % rp.bare_git.log('-1', '--format=%cD', HEAD))
|
||||
|
||||
if self.wrapper_path is not None:
|
||||
print('repo launcher version %s' % self.wrapper_version)
|
||||
|
@ -30,6 +30,33 @@ import git_command
|
||||
import wrapper
|
||||
|
||||
|
||||
class SSHUnitTest(unittest.TestCase):
|
||||
"""Tests the ssh functions."""
|
||||
|
||||
def test_ssh_version(self):
|
||||
"""Check ssh_version() handling."""
|
||||
ver = git_command._parse_ssh_version('Unknown\n')
|
||||
self.assertEqual(ver, ())
|
||||
ver = git_command._parse_ssh_version('OpenSSH_1.0\n')
|
||||
self.assertEqual(ver, (1, 0))
|
||||
ver = git_command._parse_ssh_version('OpenSSH_6.6.1p1 Ubuntu-2ubuntu2.13, OpenSSL 1.0.1f 6 Jan 2014\n')
|
||||
self.assertEqual(ver, (6, 6, 1))
|
||||
ver = git_command._parse_ssh_version('OpenSSH_7.6p1 Ubuntu-4ubuntu0.3, OpenSSL 1.0.2n 7 Dec 2017\n')
|
||||
self.assertEqual(ver, (7, 6))
|
||||
|
||||
def test_ssh_sock(self):
|
||||
"""Check ssh_sock() function."""
|
||||
with mock.patch('tempfile.mkdtemp', return_value='/tmp/foo'):
|
||||
# old ssh version uses port
|
||||
with mock.patch('git_command.ssh_version', return_value=(6, 6)):
|
||||
self.assertTrue(git_command.ssh_sock().endswith('%p'))
|
||||
git_command._ssh_sock_path = None
|
||||
# new ssh version uses hash
|
||||
with mock.patch('git_command.ssh_version', return_value=(6, 7)):
|
||||
self.assertTrue(git_command.ssh_sock().endswith('%C'))
|
||||
git_command._ssh_sock_path = None
|
||||
|
||||
|
||||
class GitCallUnitTest(unittest.TestCase):
|
||||
"""Tests the _GitCall class (via git_command.git)."""
|
||||
|
||||
|
43
tests/test_subcmds.py
Normal file
43
tests/test_subcmds.py
Normal file
@ -0,0 +1,43 @@
|
||||
# Copyright (C) 2020 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the subcmds module (mostly __init__.py than subcommands)."""
|
||||
|
||||
import unittest
|
||||
|
||||
import subcmds
|
||||
|
||||
|
||||
class AllCommands(unittest.TestCase):
|
||||
"""Check registered all_commands."""
|
||||
|
||||
def test_required_basic(self):
|
||||
"""Basic checking of registered commands."""
|
||||
# NB: We don't test all subcommands as we want to avoid "change detection"
|
||||
# tests, so we just look for the most common/important ones here that are
|
||||
# unlikely to ever change.
|
||||
for cmd in {'cherry-pick', 'help', 'init', 'start', 'sync', 'upload'}:
|
||||
self.assertIn(cmd, subcmds.all_commands)
|
||||
|
||||
def test_naming(self):
|
||||
"""Verify we don't add things that we shouldn't."""
|
||||
for cmd in subcmds.all_commands:
|
||||
# Reject filename suffixes like "help.py".
|
||||
self.assertNotIn('.', cmd)
|
||||
|
||||
# Make sure all '_' were converted to '-'.
|
||||
self.assertNotIn('_', cmd)
|
||||
|
||||
# Reject internal python paths like "__init__".
|
||||
self.assertFalse(cmd.startswith('__'))
|
49
tests/test_subcmds_init.py
Normal file
49
tests/test_subcmds_init.py
Normal file
@ -0,0 +1,49 @@
|
||||
# Copyright (C) 2020 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the subcmds/init.py module."""
|
||||
|
||||
import unittest
|
||||
|
||||
from subcmds import init
|
||||
|
||||
|
||||
class InitCommand(unittest.TestCase):
|
||||
"""Check registered all_commands."""
|
||||
|
||||
def setUp(self):
|
||||
self.cmd = init.Init()
|
||||
|
||||
def test_cli_parser_good(self):
|
||||
"""Check valid command line options."""
|
||||
ARGV = (
|
||||
[],
|
||||
)
|
||||
for argv in ARGV:
|
||||
opts, args = self.cmd.OptionParser.parse_args(argv)
|
||||
self.cmd.ValidateOptions(opts, args)
|
||||
|
||||
def test_cli_parser_bad(self):
|
||||
"""Check invalid command line options."""
|
||||
ARGV = (
|
||||
# Too many arguments.
|
||||
['asdf'],
|
||||
|
||||
# Conflicting options.
|
||||
['--mirror', '--archive'],
|
||||
)
|
||||
for argv in ARGV:
|
||||
opts, args = self.cmd.OptionParser.parse_args(argv)
|
||||
with self.assertRaises(SystemExit):
|
||||
self.cmd.ValidateOptions(opts, args)
|
@ -304,6 +304,7 @@ class SetupGnuPG(RepoWrapperTestCase):
|
||||
"""Make sure it works completely."""
|
||||
with TemporaryDirectory() as tempdir:
|
||||
self.wrapper.home_dot_repo = tempdir
|
||||
self.wrapper.gpg_dir = os.path.join(self.wrapper.home_dot_repo, 'gnupg')
|
||||
self.assertTrue(self.wrapper.SetupGnuPG(True))
|
||||
with open(os.path.join(tempdir, 'keyring-version'), 'r') as fp:
|
||||
data = fp.read()
|
||||
|
8
tox.ini
8
tox.ini
@ -15,11 +15,10 @@
|
||||
# https://tox.readthedocs.io/
|
||||
|
||||
[tox]
|
||||
envlist = py27, py36, py37, py38
|
||||
envlist = py36, py37, py38
|
||||
|
||||
[gh-actions]
|
||||
python =
|
||||
2.7: py27
|
||||
3.6: py36
|
||||
3.7: py37
|
||||
3.8: py38
|
||||
@ -31,8 +30,3 @@ setenv =
|
||||
GIT_AUTHOR_NAME = Repo test author
|
||||
GIT_COMMITTER_NAME = Repo test committer
|
||||
EMAIL = repo@gerrit.nodomain
|
||||
|
||||
[testenv:py27]
|
||||
deps =
|
||||
mock
|
||||
pytest
|
||||
|
Reference in New Issue
Block a user