mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
10 Commits
Author | SHA1 | Date | |
---|---|---|---|
acc4c857a0 | |||
a39af3d432 | |||
4cdfdb7734 | |||
1eddca8476 | |||
aefa4d3a29 | |||
4ba29c42ca | |||
45ef9011c2 | |||
891e8f72ce | |||
af8fb132d5 | |||
4112c07688 |
@ -105,6 +105,8 @@ following DTD:
|
||||
<!ATTLIST extend-project groups CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project revision CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project remote CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project dest-branch CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project upstream CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT remove-project EMPTY>
|
||||
<!ATTLIST remove-project name CDATA #REQUIRED>
|
||||
@ -423,6 +425,12 @@ project. Same syntax as the corresponding element of `project`.
|
||||
Attribute `remote`: If specified, overrides the remote of the original
|
||||
project. Same syntax as the corresponding element of `project`.
|
||||
|
||||
Attribute `dest-branch`: If specified, overrides the dest-branch of the original
|
||||
project. Same syntax as the corresponding element of `project`.
|
||||
|
||||
Attribute `upstream`: If specified, overrides the upstream of the original
|
||||
project. Same syntax as the corresponding element of `project`.
|
||||
|
||||
### Element annotation
|
||||
|
||||
Zero or more annotation elements may be specified as children of a
|
||||
|
@ -1289,6 +1289,8 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
remote = self._default.remote
|
||||
else:
|
||||
remote = self._get_remote(node)
|
||||
dest_branch = node.getAttribute('dest-branch')
|
||||
upstream = node.getAttribute('upstream')
|
||||
|
||||
named_projects = self._projects[name]
|
||||
if dest_path and not path and len(named_projects) > 1:
|
||||
@ -1304,6 +1306,10 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
|
||||
if remote_name:
|
||||
p.remote = remote.ToRemoteSpec(name)
|
||||
if dest_branch:
|
||||
p.dest_branch = dest_branch
|
||||
if upstream:
|
||||
p.upstream = upstream
|
||||
|
||||
if dest_path:
|
||||
del self._paths[p.relpath]
|
||||
@ -1940,11 +1946,14 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
fromKeys = sorted(fromProjects.keys())
|
||||
toKeys = sorted(toProjects.keys())
|
||||
|
||||
diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []}
|
||||
diff = {'added': [], 'removed': [], 'missing': [], 'changed': [], 'unreachable': []}
|
||||
|
||||
for proj in fromKeys:
|
||||
if proj not in toKeys:
|
||||
diff['removed'].append(fromProjects[proj])
|
||||
elif not fromProjects[proj].Exists:
|
||||
diff['missing'].append(toProjects[proj])
|
||||
toKeys.remove(proj)
|
||||
else:
|
||||
fromProj = fromProjects[proj]
|
||||
toProj = toProjects[proj]
|
||||
|
71
project.py
71
project.py
@ -26,6 +26,7 @@ import sys
|
||||
import tarfile
|
||||
import tempfile
|
||||
import time
|
||||
from typing import NamedTuple
|
||||
import urllib.parse
|
||||
|
||||
from color import Coloring
|
||||
@ -45,6 +46,14 @@ from repo_trace import IsTrace, Trace
|
||||
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M, R_WORKTREE_M
|
||||
|
||||
|
||||
class SyncNetworkHalfResult(NamedTuple):
|
||||
"""Sync_NetworkHalf return value."""
|
||||
# True if successful.
|
||||
success: bool
|
||||
# Did we query the remote? False when optimized_fetch is True and we have the
|
||||
# commit already present.
|
||||
remote_fetched: bool
|
||||
|
||||
# Maximum sleep time allowed during retries.
|
||||
MAXIMUM_RETRY_SLEEP_SEC = 3600.0
|
||||
# +-10% random jitter is added to each Fetches retry sleep duration.
|
||||
@ -1133,7 +1142,7 @@ class Project(object):
|
||||
if archive and not isinstance(self, MetaProject):
|
||||
if self.remote.url.startswith(('http://', 'https://')):
|
||||
_error("%s: Cannot fetch archives from http/https remotes.", self.name)
|
||||
return False
|
||||
return SyncNetworkHalfResult(False, False)
|
||||
|
||||
name = self.relpath.replace('\\', '/')
|
||||
name = name.replace('/', '_')
|
||||
@ -1144,19 +1153,19 @@ class Project(object):
|
||||
self._FetchArchive(tarpath, cwd=topdir)
|
||||
except GitError as e:
|
||||
_error('%s', e)
|
||||
return False
|
||||
return SyncNetworkHalfResult(False, False)
|
||||
|
||||
# From now on, we only need absolute tarpath
|
||||
tarpath = os.path.join(topdir, tarpath)
|
||||
|
||||
if not self._ExtractArchive(tarpath, path=topdir):
|
||||
return False
|
||||
return SyncNetworkHalfResult(False, True)
|
||||
try:
|
||||
platform_utils.remove(tarpath)
|
||||
except OSError as e:
|
||||
_warn("Cannot remove archive %s: %s", tarpath, str(e))
|
||||
self._CopyAndLinkFiles()
|
||||
return True
|
||||
return SyncNetworkHalfResult(True, True)
|
||||
|
||||
# If the shared object dir already exists, don't try to rebootstrap with a
|
||||
# clone bundle download. We should have the majority of objects already.
|
||||
@ -1220,9 +1229,11 @@ class Project(object):
|
||||
depth = self.manifest.manifestProject.depth
|
||||
|
||||
# See if we can skip the network fetch entirely.
|
||||
remote_fetched = False
|
||||
if not (optimized_fetch and
|
||||
(ID_RE.match(self.revisionExpr) and
|
||||
self._CheckForImmutableRevision())):
|
||||
remote_fetched = True
|
||||
if not self._RemoteFetch(
|
||||
initial=is_new,
|
||||
quiet=quiet, verbose=verbose, output_redir=output_redir,
|
||||
@ -1231,7 +1242,7 @@ class Project(object):
|
||||
submodules=submodules, force_sync=force_sync,
|
||||
ssh_proxy=ssh_proxy,
|
||||
clone_filter=clone_filter, retry_fetches=retry_fetches):
|
||||
return False
|
||||
return SyncNetworkHalfResult(False, remote_fetched)
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
dissociate = mp.dissociate
|
||||
@ -1244,7 +1255,7 @@ class Project(object):
|
||||
if p.stdout and output_redir:
|
||||
output_redir.write(p.stdout)
|
||||
if p.Wait() != 0:
|
||||
return False
|
||||
return SyncNetworkHalfResult(False, remote_fetched)
|
||||
platform_utils.remove(alternates_file)
|
||||
|
||||
if self.worktree:
|
||||
@ -1253,7 +1264,7 @@ class Project(object):
|
||||
self._InitMirrorHead()
|
||||
platform_utils.remove(os.path.join(self.gitdir, 'FETCH_HEAD'),
|
||||
missing_ok=True)
|
||||
return True
|
||||
return SyncNetworkHalfResult(True, remote_fetched)
|
||||
|
||||
def PostRepoUpgrade(self):
|
||||
self._InitHooks()
|
||||
@ -1451,6 +1462,8 @@ class Project(object):
|
||||
cnt_mine += 1
|
||||
|
||||
if not upstream_gain and cnt_mine == len(local_changes):
|
||||
# The copy/linkfile config may have changed.
|
||||
self._CopyAndLinkFiles()
|
||||
return
|
||||
|
||||
if self.IsDirty(consider_untracked=False):
|
||||
@ -2794,35 +2807,6 @@ class Project(object):
|
||||
else:
|
||||
raise
|
||||
|
||||
def _InitialCheckoutStart(self):
|
||||
"""Called when checking out a project for the first time.
|
||||
|
||||
This will use temporary non-visible paths so we can be safely interrupted
|
||||
without leaving incomplete state behind.
|
||||
"""
|
||||
paths = [f'{x}.tmp' for x in (self.relpath, self.worktree, self.gitdir, self.objdir)]
|
||||
for p in paths:
|
||||
platform_utils.rmtree(p, ignore_errors=True)
|
||||
self.UpdatePaths(*paths)
|
||||
|
||||
def _InitialCheckoutFinalizeNetworkHalf(self):
|
||||
"""Finalize the object dirs after network syncing works."""
|
||||
# Once the network half finishes, we can move the objects into the right
|
||||
# place by removing the ".tmp" suffix on the dirs.
|
||||
platform_utils.rmtree(self.gitdir[:-4], ignore_errors=True)
|
||||
os.rename(self.gitdir, self.gitdir[:-4])
|
||||
self.UpdatePaths(self.relpath, self.worktree, self.gitdir[:-4], self.objdir[:-4])
|
||||
|
||||
def _InitialCheckoutFinalizeLocalHalf(self):
|
||||
"""Finalize the initial checkout and make it available."""
|
||||
assert self.gitdir == self.objdir
|
||||
# Once the local half finishes, we can move the manifest dir into the right
|
||||
# place by removing the ".tmp" suffix on the dirs.
|
||||
platform_utils.rmtree(self.worktree[:-4], ignore_errors=True)
|
||||
os.rename(self.worktree, self.worktree[:-4])
|
||||
self.UpdatePaths(
|
||||
self.relpath[:-4], self.worktree[:-4], self.gitdir, self.objdir)
|
||||
|
||||
def _InitGitWorktree(self):
|
||||
"""Init the project using git worktrees."""
|
||||
self.bare_git.worktree('prune')
|
||||
@ -3709,8 +3693,6 @@ class ManifestProject(MetaProject):
|
||||
(GitConfig.ForUser().UrlInsteadOf(manifest_url),),
|
||||
file=sys.stderr)
|
||||
|
||||
self._InitialCheckoutStart()
|
||||
|
||||
# The manifest project object doesn't keep track of the path on the
|
||||
# server where this git is located, so let's save that here.
|
||||
mirrored_manifest_git = None
|
||||
@ -3867,17 +3849,19 @@ class ManifestProject(MetaProject):
|
||||
is_new=is_new, quiet=not verbose, verbose=verbose,
|
||||
clone_bundle=clone_bundle, current_branch_only=current_branch_only,
|
||||
tags=tags, submodules=submodules, clone_filter=clone_filter,
|
||||
partial_clone_exclude=self.manifest.PartialCloneExclude):
|
||||
partial_clone_exclude=self.manifest.PartialCloneExclude).success:
|
||||
r = self.GetRemote()
|
||||
print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
|
||||
|
||||
# Better delete the manifest git dir if we created it; otherwise next
|
||||
# time (when user fixes problems) we won't go through the "is_new" logic.
|
||||
if is_new:
|
||||
platform_utils.rmtree(self.gitdir)
|
||||
return False
|
||||
|
||||
if manifest_branch:
|
||||
self.MetaBranchSwitch(submodules=submodules)
|
||||
|
||||
if is_new:
|
||||
self._InitialCheckoutFinalizeNetworkHalf()
|
||||
|
||||
syncbuf = SyncBuffer(self.config)
|
||||
self.Sync_LocalHalf(syncbuf, submodules=submodules)
|
||||
syncbuf.Finish()
|
||||
@ -3900,9 +3884,6 @@ class ManifestProject(MetaProject):
|
||||
with open(dest, 'wb') as f:
|
||||
f.write(manifest_data)
|
||||
|
||||
if is_new:
|
||||
self._InitialCheckoutFinalizeLocalHalf()
|
||||
|
||||
try:
|
||||
self.manifest.Link(manifest_name)
|
||||
except ManifestParseError as e:
|
||||
|
@ -59,18 +59,26 @@ def main(argv):
|
||||
version = RepoSourceVersion()
|
||||
cmdlist = [['help2man', '-N', '-n', f'repo {cmd} - manual page for repo {cmd}',
|
||||
'-S', f'repo {cmd}', '-m', 'Repo Manual', f'--version-string={version}',
|
||||
'-o', MANDIR.joinpath(f'repo-{cmd}.1.tmp'), TOPDIR.joinpath('repo'),
|
||||
'-o', MANDIR.joinpath(f'repo-{cmd}.1.tmp'), './repo',
|
||||
'-h', f'help {cmd}'] for cmd in subcmds.all_commands]
|
||||
cmdlist.append(['help2man', '-N', '-n', 'repository management tool built on top of git',
|
||||
'-S', 'repo', '-m', 'Repo Manual', f'--version-string={version}',
|
||||
'-o', MANDIR.joinpath('repo.1.tmp'), TOPDIR.joinpath('repo'),
|
||||
'-o', MANDIR.joinpath('repo.1.tmp'), './repo',
|
||||
'-h', '--help-all'])
|
||||
|
||||
with tempfile.TemporaryDirectory() as tempdir:
|
||||
repo_dir = Path(tempdir) / '.repo'
|
||||
tempdir = Path(tempdir)
|
||||
repo_dir = tempdir / '.repo'
|
||||
repo_dir.mkdir()
|
||||
(repo_dir / 'repo').symlink_to(TOPDIR)
|
||||
|
||||
# Create a repo wrapper using the active Python executable. We can't pass
|
||||
# this directly to help2man as it's too simple, so insert it via shebang.
|
||||
data = (TOPDIR / 'repo').read_text(encoding='utf-8')
|
||||
tempbin = tempdir / 'repo'
|
||||
tempbin.write_text(f'#!{sys.executable}\n' + data, encoding='utf-8')
|
||||
tempbin.chmod(0o755)
|
||||
|
||||
# Run all cmd in parallel, and wait for them to finish.
|
||||
with multiprocessing.Pool() as pool:
|
||||
pool.map(partial(worker, cwd=tempdir, check=True), cmdlist)
|
||||
|
@ -118,6 +118,16 @@ synced and their revisions won't be found.
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
if diff['missing']:
|
||||
self.out.nl()
|
||||
self.printText('missing projects : \n')
|
||||
self.out.nl()
|
||||
for project in diff['missing']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printText(' at revision ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
if diff['changed']:
|
||||
self.out.nl()
|
||||
self.printText('changed projects : \n')
|
||||
|
@ -51,7 +51,7 @@ need to be performed by an end-user.
|
||||
_PostRepoUpgrade(self.manifest)
|
||||
|
||||
else:
|
||||
if not rp.Sync_NetworkHalf():
|
||||
if not rp.Sync_NetworkHalf().success:
|
||||
print("error: can't update repo", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
161
subcmds/sync.py
161
subcmds/sync.py
@ -21,10 +21,12 @@ import multiprocessing
|
||||
import netrc
|
||||
from optparse import SUPPRESS_HELP
|
||||
import os
|
||||
import shutil
|
||||
import socket
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
from typing import NamedTuple
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
@ -58,11 +60,68 @@ from error import RepoChangedException, GitError, ManifestParseError
|
||||
import platform_utils
|
||||
from project import SyncBuffer
|
||||
from progress import Progress
|
||||
from repo_trace import IsTrace, Trace
|
||||
import ssh
|
||||
from wrapper import Wrapper
|
||||
from manifest_xml import GitcManifest
|
||||
|
||||
_ONE_DAY_S = 24 * 60 * 60
|
||||
# Env var to implicitly turn off object backups.
|
||||
REPO_BACKUP_OBJECTS = 'REPO_BACKUP_OBJECTS'
|
||||
|
||||
_BACKUP_OBJECTS = os.environ.get(REPO_BACKUP_OBJECTS) != '0'
|
||||
|
||||
|
||||
class _FetchOneResult(NamedTuple):
|
||||
"""_FetchOne return value.
|
||||
|
||||
Attributes:
|
||||
success (bool): True if successful.
|
||||
project (Project): The fetched project.
|
||||
start (float): The starting time.time().
|
||||
finish (float): The ending time.time().
|
||||
remote_fetched (bool): True if the remote was actually queried.
|
||||
"""
|
||||
success: bool
|
||||
project: Project
|
||||
start: float
|
||||
finish: float
|
||||
remote_fetched: bool
|
||||
|
||||
|
||||
class _FetchResult(NamedTuple):
|
||||
"""_Fetch return value.
|
||||
|
||||
Attributes:
|
||||
success (bool): True if successful.
|
||||
projects (set[str]): The names of the git directories of fetched projects.
|
||||
"""
|
||||
success: bool
|
||||
projects: set[str]
|
||||
|
||||
|
||||
class _FetchMainResult(NamedTuple):
|
||||
"""_FetchMain return value.
|
||||
|
||||
Attributes:
|
||||
all_projects (list[Project]): The fetched projects.
|
||||
"""
|
||||
all_projects: list[Project]
|
||||
|
||||
|
||||
class _CheckoutOneResult(NamedTuple):
|
||||
"""_CheckoutOne return value.
|
||||
|
||||
Attributes:
|
||||
success (bool): True if successful.
|
||||
project (Project): The project.
|
||||
start (float): The starting time.time().
|
||||
finish (float): The ending time.time().
|
||||
"""
|
||||
success: bool
|
||||
project: Project
|
||||
start: float
|
||||
finish: float
|
||||
|
||||
|
||||
class Sync(Command, MirrorSafeCommand):
|
||||
@ -406,7 +465,7 @@ later is required to fix a server side protocol bug.
|
||||
success = False
|
||||
buf = io.StringIO()
|
||||
try:
|
||||
success = project.Sync_NetworkHalf(
|
||||
sync_result = project.Sync_NetworkHalf(
|
||||
quiet=opt.quiet,
|
||||
verbose=opt.verbose,
|
||||
output_redir=buf,
|
||||
@ -420,6 +479,7 @@ later is required to fix a server side protocol bug.
|
||||
ssh_proxy=self.ssh_proxy,
|
||||
clone_filter=project.manifest.CloneFilter,
|
||||
partial_clone_exclude=project.manifest.PartialCloneExclude)
|
||||
success = sync_result.success
|
||||
|
||||
output = buf.getvalue()
|
||||
if (opt.verbose or not success) and output:
|
||||
@ -437,7 +497,8 @@ later is required to fix a server side protocol bug.
|
||||
raise
|
||||
|
||||
finish = time.time()
|
||||
return (success, project, start, finish)
|
||||
return _FetchOneResult(success, project, start, finish,
|
||||
sync_result.remote_fetched)
|
||||
|
||||
@classmethod
|
||||
def _FetchInitChild(cls, ssh_proxy):
|
||||
@ -448,6 +509,7 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
jobs = opt.jobs_network
|
||||
fetched = set()
|
||||
remote_fetched = set()
|
||||
pm = Progress('Fetching', len(projects), delay=False, quiet=opt.quiet)
|
||||
|
||||
objdir_project_map = dict()
|
||||
@ -458,10 +520,16 @@ later is required to fix a server side protocol bug.
|
||||
def _ProcessResults(results_sets):
|
||||
ret = True
|
||||
for results in results_sets:
|
||||
for (success, project, start, finish) in results:
|
||||
for result in results:
|
||||
success = result.success
|
||||
project = result.project
|
||||
start = result.start
|
||||
finish = result.finish
|
||||
self._fetch_times.Set(project, finish - start)
|
||||
self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK,
|
||||
start, finish, success)
|
||||
if result.remote_fetched:
|
||||
remote_fetched.add(project)
|
||||
# Check for any errors before running any more tasks.
|
||||
# ...we'll let existing jobs finish, though.
|
||||
if not success:
|
||||
@ -519,7 +587,7 @@ later is required to fix a server side protocol bug.
|
||||
if not self.outer_client.manifest.IsArchive:
|
||||
self._GCProjects(projects, opt, err_event)
|
||||
|
||||
return (ret, fetched)
|
||||
return _FetchResult(ret, fetched)
|
||||
|
||||
def _FetchMain(self, opt, args, all_projects, err_event,
|
||||
ssh_proxy, manifest):
|
||||
@ -545,7 +613,9 @@ later is required to fix a server side protocol bug.
|
||||
to_fetch.extend(all_projects)
|
||||
to_fetch.sort(key=self._fetch_times.Get, reverse=True)
|
||||
|
||||
success, fetched = self._Fetch(to_fetch, opt, err_event, ssh_proxy)
|
||||
result = self._Fetch(to_fetch, opt, err_event, ssh_proxy)
|
||||
success = result.success
|
||||
fetched = result.projects
|
||||
if not success:
|
||||
err_event.set()
|
||||
|
||||
@ -555,7 +625,7 @@ later is required to fix a server side protocol bug.
|
||||
if err_event.is_set():
|
||||
print('\nerror: Exited sync due to fetch errors.\n', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return
|
||||
return _FetchMainResult([])
|
||||
|
||||
# Iteratively fetch missing and/or nested unregistered submodules
|
||||
previously_missing_set = set()
|
||||
@ -578,12 +648,14 @@ later is required to fix a server side protocol bug.
|
||||
if previously_missing_set == missing_set:
|
||||
break
|
||||
previously_missing_set = missing_set
|
||||
success, new_fetched = self._Fetch(missing, opt, err_event, ssh_proxy)
|
||||
result = self._Fetch(missing, opt, err_event, ssh_proxy)
|
||||
success = result.success
|
||||
new_fetched = result.projects
|
||||
if not success:
|
||||
err_event.set()
|
||||
fetched.update(new_fetched)
|
||||
|
||||
return all_projects
|
||||
return _FetchMainResult(all_projects)
|
||||
|
||||
def _CheckoutOne(self, detach_head, force_sync, project):
|
||||
"""Checkout work tree for one project
|
||||
@ -615,7 +687,7 @@ later is required to fix a server side protocol bug.
|
||||
if not success:
|
||||
print('error: Cannot checkout %s' % (project.name), file=sys.stderr)
|
||||
finish = time.time()
|
||||
return (success, project, start, finish)
|
||||
return _CheckoutOneResult(success, project, start, finish)
|
||||
|
||||
def _Checkout(self, all_projects, opt, err_results):
|
||||
"""Checkout projects listed in all_projects
|
||||
@ -630,7 +702,11 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
def _ProcessResults(pool, pm, results):
|
||||
ret = True
|
||||
for (success, project, start, finish) in results:
|
||||
for result in results:
|
||||
success = result.success
|
||||
project = result.project
|
||||
start = result.start
|
||||
finish = result.finish
|
||||
self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL,
|
||||
start, finish, success)
|
||||
# Check for any errors before running any more tasks.
|
||||
@ -652,6 +728,36 @@ later is required to fix a server side protocol bug.
|
||||
callback=_ProcessResults,
|
||||
output=Progress('Checking out', len(all_projects), quiet=opt.quiet)) and not err_results
|
||||
|
||||
def _backup_cruft(self, bare_git):
|
||||
"""Save a copy of any cruft from `git gc`."""
|
||||
# Find any cruft packs in the current gitdir, and save them.
|
||||
# b/221065125 (repo sync complains that objects are missing). This does
|
||||
# not prevent that state, but makes it so that the missing objects are
|
||||
# available.
|
||||
objdir = bare_git._project.objdir
|
||||
pack_dir = os.path.join(objdir, 'pack')
|
||||
bak_dir = os.path.join(objdir, '.repo', 'pack.bak')
|
||||
if not _BACKUP_OBJECTS or not platform_utils.isdir(pack_dir):
|
||||
return
|
||||
saved = []
|
||||
files = set(platform_utils.listdir(pack_dir))
|
||||
to_backup = []
|
||||
for f in files:
|
||||
base, ext = os.path.splitext(f)
|
||||
if base + '.mtimes' in files:
|
||||
to_backup.append(f)
|
||||
if to_backup:
|
||||
os.makedirs(bak_dir, exist_ok=True)
|
||||
for fname in to_backup:
|
||||
bak_fname = os.path.join(bak_dir, fname)
|
||||
if not os.path.exists(bak_fname):
|
||||
saved.append(fname)
|
||||
# Use a tmp file so that we are sure of a complete copy.
|
||||
shutil.copy(os.path.join(pack_dir, fname), bak_fname + '.tmp')
|
||||
shutil.move(bak_fname + '.tmp', bak_fname)
|
||||
if saved:
|
||||
Trace('%s saved %s', bare_git._project.name, ' '.join(saved))
|
||||
|
||||
def _GCProjects(self, projects, opt, err_event):
|
||||
pm = Progress('Garbage collecting', len(projects), delay=False, quiet=opt.quiet)
|
||||
pm.update(inc=0, msg='prescan')
|
||||
@ -694,13 +800,22 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
jobs = opt.jobs
|
||||
|
||||
gc_args = ['--auto']
|
||||
backup_cruft = False
|
||||
if git_require((2, 37, 0)):
|
||||
gc_args.append('--cruft')
|
||||
backup_cruft = True
|
||||
pack_refs_args = ()
|
||||
if jobs < 2:
|
||||
for (run_gc, bare_git) in tidy_dirs.values():
|
||||
pm.update(msg=bare_git._project.name)
|
||||
|
||||
if run_gc:
|
||||
bare_git.gc('--auto')
|
||||
bare_git.gc(*gc_args)
|
||||
else:
|
||||
bare_git.pack_refs()
|
||||
bare_git.pack_refs(*pack_refs_args)
|
||||
if backup_cruft:
|
||||
self._backup_cruft(bare_git)
|
||||
pm.end()
|
||||
return
|
||||
|
||||
@ -715,15 +830,17 @@ later is required to fix a server side protocol bug.
|
||||
try:
|
||||
try:
|
||||
if run_gc:
|
||||
bare_git.gc('--auto', config=config)
|
||||
bare_git.gc(*gc_args, config=config)
|
||||
else:
|
||||
bare_git.pack_refs(config=config)
|
||||
bare_git.pack_refs(*pack_refs_args, config=config)
|
||||
except GitError:
|
||||
err_event.set()
|
||||
except Exception:
|
||||
err_event.set()
|
||||
raise
|
||||
finally:
|
||||
if backup_cruft:
|
||||
self._backup_cruft(bare_git)
|
||||
pm.finish(bare_git._project.name)
|
||||
sem.release()
|
||||
|
||||
@ -1071,14 +1188,13 @@ later is required to fix a server side protocol bug.
|
||||
file=sys.stderr)
|
||||
|
||||
for m in self.ManifestList(opt):
|
||||
mp = m.manifestProject
|
||||
is_standalone_manifest = bool(mp.standalone_manifest_url)
|
||||
if not is_standalone_manifest:
|
||||
mp.PreSync()
|
||||
if not m.manifestProject.standalone_manifest_url:
|
||||
m.manifestProject.PreSync()
|
||||
|
||||
if opt.repo_upgraded:
|
||||
_PostRepoUpgrade(m, quiet=opt.quiet)
|
||||
if opt.repo_upgraded:
|
||||
_PostRepoUpgrade(manifest, quiet=opt.quiet)
|
||||
|
||||
mp = manifest.manifestProject
|
||||
if opt.mp_update:
|
||||
self._UpdateAllManifestProjects(opt, mp, manifest_name)
|
||||
else:
|
||||
@ -1168,8 +1284,9 @@ later is required to fix a server side protocol bug.
|
||||
with ssh.ProxyManager(manager) as ssh_proxy:
|
||||
# Initialize the socket dir once in the parent.
|
||||
ssh_proxy.sock()
|
||||
all_projects = self._FetchMain(opt, args, all_projects, err_event,
|
||||
ssh_proxy, manifest)
|
||||
result = self._FetchMain(opt, args, all_projects, err_event,
|
||||
ssh_proxy, manifest)
|
||||
all_projects = result.all_projects
|
||||
|
||||
if opt.network_only:
|
||||
return
|
||||
|
@ -874,3 +874,27 @@ class ExtendProjectElementTests(ManifestParseTestCase):
|
||||
else:
|
||||
self.assertEqual(manifest.projects[0].relpath, 'bar')
|
||||
self.assertEqual(manifest.projects[1].relpath, 'y')
|
||||
|
||||
def test_extend_project_dest_branch(self):
|
||||
manifest = self.getXmlManifest("""
|
||||
<manifest>
|
||||
<remote name="default-remote" fetch="http://localhost" />
|
||||
<default remote="default-remote" revision="refs/heads/main" dest-branch="foo" />
|
||||
<project name="myproject" />
|
||||
<extend-project name="myproject" dest-branch="bar" />
|
||||
</manifest>
|
||||
""")
|
||||
self.assertEqual(len(manifest.projects), 1)
|
||||
self.assertEqual(manifest.projects[0].dest_branch, 'bar')
|
||||
|
||||
def test_extend_project_upstream(self):
|
||||
manifest = self.getXmlManifest("""
|
||||
<manifest>
|
||||
<remote name="default-remote" fetch="http://localhost" />
|
||||
<default remote="default-remote" revision="refs/heads/main" />
|
||||
<project name="myproject" />
|
||||
<extend-project name="myproject" upstream="bar" />
|
||||
</manifest>
|
||||
""")
|
||||
self.assertEqual(len(manifest.projects), 1)
|
||||
self.assertEqual(manifest.projects[0].upstream, 'bar')
|
||||
|
Reference in New Issue
Block a user