1
mirror of https://gerrit.googlesource.com/git-repo synced 2024-12-31 16:14:25 +00:00

Refactor errors for sync command

Per discussion in go/repo-error-update updated aggregated and exit
errors for sync command.

Aggregated errors are errors that result in eventual command failure.
Exit errors are errors that result in immediate command failure.

Also updated main.py to log aggregated and exit errors to git sessions
log

Bug: b/293344017
Change-Id: I77a21f14da32fe2e68c16841feb22de72e86a251
Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/379614
Reviewed-by: Aravind Vasudevan <aravindvasudev@google.com>
Tested-by: Jason Chang <jasonnc@google.com>
Commit-Queue: Jason Chang <jasonnc@google.com>
This commit is contained in:
Jason Chang 2023-07-14 16:45:35 -07:00 committed by LUCI
parent a6413f5d88
commit 32b59565b7
5 changed files with 442 additions and 143 deletions

56
main.py
View File

@ -30,6 +30,7 @@ import sys
import textwrap
import time
import urllib.request
import json
try:
import kerberos
@ -50,10 +51,12 @@ from editor import Editor
from error import DownloadError
from error import InvalidProjectGroupsError
from error import ManifestInvalidRevisionError
from error import ManifestParseError
from error import NoManifestException
from error import NoSuchProjectError
from error import RepoChangedException
from error import RepoExitError
from error import RepoUnhandledExceptionError
from error import RepoError
import gitc_utils
from manifest_xml import GitcClient, RepoClient
from pager import RunPager, TerminatePager
@ -97,6 +100,7 @@ else:
)
KEYBOARD_INTERRUPT_EXIT = 128 + signal.SIGINT
MAX_PRINT_ERRORS = 5
global_options = optparse.OptionParser(
usage="repo [-p|--paginate|--no-pager] COMMAND [ARGS]",
@ -422,10 +426,33 @@ class _Repo(object):
"""
try:
execute_command_helper()
except (KeyboardInterrupt, SystemExit, Exception) as e:
except (
KeyboardInterrupt,
SystemExit,
Exception,
RepoExitError,
) as e:
ok = isinstance(e, SystemExit) and not e.code
exception_name = type(e).__name__
if isinstance(e, RepoUnhandledExceptionError):
exception_name = type(e.error).__name__
if isinstance(e, RepoExitError):
aggregated_errors = e.aggregate_errors or []
for error in aggregated_errors:
project = None
if isinstance(error, RepoError):
project = error.project
error_info = json.dumps(
{
"ErrorType": type(error).__name__,
"Project": project,
"Message": str(error),
}
)
git_trace2_event_log.ErrorEvent(
f"AggregateExitError:{error_info}"
)
if not ok:
exception_name = type(e).__name__
git_trace2_event_log.ErrorEvent(
f"RepoExitError:{exception_name}"
)
@ -447,13 +474,13 @@ class _Repo(object):
"error: manifest missing or unreadable -- please run init",
file=sys.stderr,
)
result = 1
result = e.exit_code
except NoSuchProjectError as e:
if e.name:
print("error: project %s not found" % e.name, file=sys.stderr)
else:
print("error: no project in current directory", file=sys.stderr)
result = 1
result = e.exit_code
except InvalidProjectGroupsError as e:
if e.name:
print(
@ -467,7 +494,7 @@ class _Repo(object):
"the current directory",
file=sys.stderr,
)
result = 1
result = e.exit_code
except SystemExit as e:
if e.code:
result = e.code
@ -475,6 +502,9 @@ class _Repo(object):
except KeyboardInterrupt:
result = KEYBOARD_INTERRUPT_EXIT
raise
except RepoExitError as e:
result = e.exit_code
raise
except Exception:
result = 1
raise
@ -841,12 +871,20 @@ def _Main(argv):
SetTraceToStderr()
result = repo._Run(name, gopts, argv) or 0
except RepoExitError as e:
exception_name = type(e).__name__
result = e.exit_code
print("fatal: %s" % e, file=sys.stderr)
if e.aggregate_errors:
print(f"{exception_name} Aggregate Errors")
for err in e.aggregate_errors[:MAX_PRINT_ERRORS]:
print(err)
if len(e.aggregate_errors) > MAX_PRINT_ERRORS:
diff = len(e.aggregate_errors) - MAX_PRINT_ERRORS
print(f"+{diff} additional errors ...")
except KeyboardInterrupt:
print("aborted by user", file=sys.stderr)
result = KEYBOARD_INTERRUPT_EXIT
except ManifestParseError as mpe:
print("fatal: %s" % mpe, file=sys.stderr)
result = 1
except RepoChangedException as rce:
# If repo changed, re-exec ourselves.
#

View File

@ -26,7 +26,7 @@ import sys
import tarfile
import tempfile
import time
from typing import NamedTuple
from typing import NamedTuple, List
import urllib.parse
from color import Coloring
@ -41,7 +41,12 @@ from git_config import (
)
import git_superproject
from git_trace2_event_log import EventLog
from error import GitError, UploadError, DownloadError
from error import (
GitError,
UploadError,
DownloadError,
RepoError,
)
from error import ManifestInvalidRevisionError, ManifestInvalidPathError
from error import NoManifestException, ManifestParseError
import platform_utils
@ -54,11 +59,33 @@ from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M, R_WORKTREE_M
class SyncNetworkHalfResult(NamedTuple):
"""Sync_NetworkHalf return value."""
# True if successful.
success: bool
# Did we query the remote? False when optimized_fetch is True and we have
# the commit already present.
remote_fetched: bool
# Error from SyncNetworkHalf
error: Exception = None
@property
def success(self) -> bool:
return not self.error
class SyncNetworkHalfError(RepoError):
"""Failure trying to sync."""
class DeleteWorktreeError(RepoError):
"""Failure to delete worktree."""
def __init__(
self, *args, aggregate_errors: List[Exception] = None, **kwargs
) -> None:
super().__init__(*args, **kwargs)
self.aggregate_errors = aggregate_errors or []
class DeleteDirtyWorktreeError(DeleteWorktreeError):
"""Failure to delete worktree due to uncommitted changes."""
# Maximum sleep time allowed during retries.
@ -1070,13 +1097,19 @@ class Project(object):
if branch is None:
branch = self.CurrentBranch
if branch is None:
raise GitError("not currently on a branch")
raise GitError("not currently on a branch", project=self.name)
branch = self.GetBranch(branch)
if not branch.LocalMerge:
raise GitError("branch %s does not track a remote" % branch.name)
raise GitError(
"branch %s does not track a remote" % branch.name,
project=self.name,
)
if not branch.remote.review:
raise GitError("remote %s has no review url" % branch.remote.name)
raise GitError(
"remote %s has no review url" % branch.remote.name,
project=self.name,
)
# Basic validity check on label syntax.
for label in labels:
@ -1193,11 +1226,18 @@ class Project(object):
"""
if archive and not isinstance(self, MetaProject):
if self.remote.url.startswith(("http://", "https://")):
_error(
"%s: Cannot fetch archives from http/https remotes.",
self.name,
msg_template = (
"%s: Cannot fetch archives from http/https remotes."
)
msg_args = self.name
msg = msg_template % msg_args
_error(
msg_template,
msg_args,
)
return SyncNetworkHalfResult(
False, SyncNetworkHalfError(msg, project=self.name)
)
return SyncNetworkHalfResult(False, False)
name = self.relpath.replace("\\", "/")
name = name.replace("/", "_")
@ -1208,19 +1248,25 @@ class Project(object):
self._FetchArchive(tarpath, cwd=topdir)
except GitError as e:
_error("%s", e)
return SyncNetworkHalfResult(False, False)
return SyncNetworkHalfResult(False, e)
# From now on, we only need absolute tarpath.
tarpath = os.path.join(topdir, tarpath)
if not self._ExtractArchive(tarpath, path=topdir):
return SyncNetworkHalfResult(False, True)
return SyncNetworkHalfResult(
True,
SyncNetworkHalfError(
f"Unable to Extract Archive {tarpath}",
project=self.name,
),
)
try:
platform_utils.remove(tarpath)
except OSError as e:
_warn("Cannot remove archive %s: %s", tarpath, str(e))
self._CopyAndLinkFiles()
return SyncNetworkHalfResult(True, True)
return SyncNetworkHalfResult(True)
# If the shared object dir already exists, don't try to rebootstrap with
# a clone bundle download. We should have the majority of objects
@ -1310,23 +1356,35 @@ class Project(object):
)
):
remote_fetched = True
if not self._RemoteFetch(
initial=is_new,
quiet=quiet,
verbose=verbose,
output_redir=output_redir,
alt_dir=alt_dir,
current_branch_only=current_branch_only,
tags=tags,
prune=prune,
depth=depth,
submodules=submodules,
force_sync=force_sync,
ssh_proxy=ssh_proxy,
clone_filter=clone_filter,
retry_fetches=retry_fetches,
):
return SyncNetworkHalfResult(False, remote_fetched)
try:
if not self._RemoteFetch(
initial=is_new,
quiet=quiet,
verbose=verbose,
output_redir=output_redir,
alt_dir=alt_dir,
current_branch_only=current_branch_only,
tags=tags,
prune=prune,
depth=depth,
submodules=submodules,
force_sync=force_sync,
ssh_proxy=ssh_proxy,
clone_filter=clone_filter,
retry_fetches=retry_fetches,
):
return SyncNetworkHalfResult(
remote_fetched,
SyncNetworkHalfError(
f"Unable to remote fetch project {self.name}",
project=self.name,
),
)
except RepoError as e:
return SyncNetworkHalfResult(
remote_fetched,
e,
)
mp = self.manifest.manifestProject
dissociate = mp.dissociate
@ -1346,7 +1404,12 @@ class Project(object):
if p.stdout and output_redir:
output_redir.write(p.stdout)
if p.Wait() != 0:
return SyncNetworkHalfResult(False, remote_fetched)
return SyncNetworkHalfResult(
remote_fetched,
GitError(
"Unable to repack alternates", project=self.name
),
)
platform_utils.remove(alternates_file)
if self.worktree:
@ -1356,7 +1419,7 @@ class Project(object):
platform_utils.remove(
os.path.join(self.gitdir, "FETCH_HEAD"), missing_ok=True
)
return SyncNetworkHalfResult(True, remote_fetched)
return SyncNetworkHalfResult(remote_fetched)
def PostRepoUpgrade(self):
self._InitHooks()
@ -1409,16 +1472,27 @@ class Project(object):
self.revisionId = revisionId
def Sync_LocalHalf(self, syncbuf, force_sync=False, submodules=False):
def Sync_LocalHalf(
self, syncbuf, force_sync=False, submodules=False, errors=None
):
"""Perform only the local IO portion of the sync process.
Network access is not required.
"""
if errors is None:
errors = []
def fail(error: Exception):
errors.append(error)
syncbuf.fail(self, error)
if not os.path.exists(self.gitdir):
syncbuf.fail(
self,
"Cannot checkout %s due to missing network sync; Run "
"`repo sync -n %s` first." % (self.name, self.name),
fail(
LocalSyncFail(
"Cannot checkout %s due to missing network sync; Run "
"`repo sync -n %s` first." % (self.name, self.name),
project=self.name,
)
)
return
@ -1438,10 +1512,12 @@ class Project(object):
)
bad_paths = paths & PROTECTED_PATHS
if bad_paths:
syncbuf.fail(
self,
"Refusing to checkout project that writes to protected "
"paths: %s" % (", ".join(bad_paths),),
fail(
LocalSyncFail(
"Refusing to checkout project that writes to protected "
"paths: %s" % (", ".join(bad_paths),),
project=self.name,
)
)
return
@ -1466,7 +1542,7 @@ class Project(object):
# Currently on a detached HEAD. The user is assumed to
# not have any local modifications worth worrying about.
if self.IsRebaseInProgress():
syncbuf.fail(self, _PriorSyncFailedError())
fail(_PriorSyncFailedError(project=self.name))
return
if head == revid:
@ -1486,7 +1562,7 @@ class Project(object):
if submodules:
self._SyncSubmodules(quiet=True)
except GitError as e:
syncbuf.fail(self, e)
fail(e)
return
self._CopyAndLinkFiles()
return
@ -1511,7 +1587,7 @@ class Project(object):
if submodules:
self._SyncSubmodules(quiet=True)
except GitError as e:
syncbuf.fail(self, e)
fail(e)
return
self._CopyAndLinkFiles()
return
@ -1534,10 +1610,13 @@ class Project(object):
# The user has published this branch and some of those
# commits are not yet merged upstream. We do not want
# to rewrite the published commits so we punt.
syncbuf.fail(
self,
"branch %s is published (but not merged) and is now "
"%d commits behind" % (branch.name, len(upstream_gain)),
fail(
LocalSyncFail(
"branch %s is published (but not merged) and is "
"now %d commits behind"
% (branch.name, len(upstream_gain)),
project=self.name,
)
)
return
elif pub == head:
@ -1565,7 +1644,7 @@ class Project(object):
return
if self.IsDirty(consider_untracked=False):
syncbuf.fail(self, _DirtyError())
fail(_DirtyError(project=self.name))
return
# If the upstream switched on us, warn the user.
@ -1615,7 +1694,7 @@ class Project(object):
self._SyncSubmodules(quiet=True)
self._CopyAndLinkFiles()
except GitError as e:
syncbuf.fail(self, e)
fail(e)
return
else:
syncbuf.later1(self, _doff)
@ -1687,12 +1766,12 @@ class Project(object):
file=sys.stderr,
)
else:
print(
"error: %s: Cannot remove project: uncommitted changes are "
"present.\n" % (self.RelPath(local=False),),
file=sys.stderr,
msg = (
"error: %s: Cannot remove project: uncommitted"
"changes are present.\n" % self.RelPath(local=False)
)
return False
print(msg, file=sys.stderr)
raise DeleteDirtyWorktreeError(msg, project=self)
if not quiet:
print(
@ -1745,12 +1824,13 @@ class Project(object):
% (self.RelPath(local=False),),
file=sys.stderr,
)
return False
raise DeleteWorktreeError(aggregate_errors=[e])
# Delete everything under the worktree, except for directories that
# contain another git project.
dirs_to_remove = []
failed = False
errors = []
for root, dirs, files in platform_utils.walk(self.worktree):
for f in files:
path = os.path.join(root, f)
@ -1763,6 +1843,7 @@ class Project(object):
file=sys.stderr,
)
failed = True
errors.append(e)
dirs[:] = [
d
for d in dirs
@ -1784,6 +1865,7 @@ class Project(object):
file=sys.stderr,
)
failed = True
errors.append(e)
elif not platform_utils.listdir(d):
try:
platform_utils.rmdir(d)
@ -1794,6 +1876,7 @@ class Project(object):
file=sys.stderr,
)
failed = True
errors.append(e)
if failed:
print(
"error: %s: Failed to delete obsolete checkout."
@ -1804,7 +1887,7 @@ class Project(object):
" Remove manually, then run `repo sync -l`.",
file=sys.stderr,
)
return False
raise DeleteWorktreeError(aggregate_errors=errors)
# Try deleting parent dirs if they are empty.
path = self.worktree
@ -2264,11 +2347,14 @@ class Project(object):
cmd.append(self.revisionExpr)
command = GitCommand(
self, cmd, cwd=cwd, capture_stdout=True, capture_stderr=True
self,
cmd,
cwd=cwd,
capture_stdout=True,
capture_stderr=True,
verify_command=True,
)
if command.Wait() != 0:
raise GitError("git archive %s: %s" % (self.name, command.stderr))
command.Wait()
def _RemoteFetch(
self,
@ -2289,7 +2375,7 @@ class Project(object):
retry_fetches=2,
retry_sleep_initial_sec=4.0,
retry_exp_factor=2.0,
):
) -> bool:
is_sha1 = False
tag_name = None
# The depth should not be used when fetching to a mirror because
@ -2473,6 +2559,7 @@ class Project(object):
retry_cur_sleep = retry_sleep_initial_sec
ok = prune_tried = False
for try_n in range(retry_fetches):
verify_command = try_n == retry_fetches - 1
gitcmd = GitCommand(
self,
cmd,
@ -2481,6 +2568,7 @@ class Project(object):
ssh_proxy=ssh_proxy,
merge_output=True,
capture_stdout=quiet or bool(output_redir),
verify_command=verify_command,
)
if gitcmd.stdout and not quiet and output_redir:
output_redir.write(gitcmd.stdout)
@ -2732,7 +2820,9 @@ class Project(object):
cmd.append("--")
if GitCommand(self, cmd).Wait() != 0:
if self._allrefs:
raise GitError("%s checkout %s " % (self.name, rev))
raise GitError(
"%s checkout %s " % (self.name, rev), project=self.name
)
def _CherryPick(self, rev, ffonly=False, record_origin=False):
cmd = ["cherry-pick"]
@ -2744,7 +2834,9 @@ class Project(object):
cmd.append("--")
if GitCommand(self, cmd).Wait() != 0:
if self._allrefs:
raise GitError("%s cherry-pick %s " % (self.name, rev))
raise GitError(
"%s cherry-pick %s " % (self.name, rev), project=self.name
)
def _LsRemote(self, refs):
cmd = ["ls-remote", self.remote.name, refs]
@ -2760,7 +2852,9 @@ class Project(object):
cmd.append("--")
if GitCommand(self, cmd).Wait() != 0:
if self._allrefs:
raise GitError("%s revert %s " % (self.name, rev))
raise GitError(
"%s revert %s " % (self.name, rev), project=self.name
)
def _ResetHard(self, rev, quiet=True):
cmd = ["reset", "--hard"]
@ -2768,7 +2862,9 @@ class Project(object):
cmd.append("-q")
cmd.append(rev)
if GitCommand(self, cmd).Wait() != 0:
raise GitError("%s reset --hard %s " % (self.name, rev))
raise GitError(
"%s reset --hard %s " % (self.name, rev), project=self.name
)
def _SyncSubmodules(self, quiet=True):
cmd = ["submodule", "update", "--init", "--recursive"]
@ -2776,7 +2872,8 @@ class Project(object):
cmd.append("-q")
if GitCommand(self, cmd).Wait() != 0:
raise GitError(
"%s submodule update --init --recursive " % self.name
"%s submodule update --init --recursive " % self.name,
project=self.name,
)
def _Rebase(self, upstream, onto=None):
@ -2785,14 +2882,18 @@ class Project(object):
cmd.extend(["--onto", onto])
cmd.append(upstream)
if GitCommand(self, cmd).Wait() != 0:
raise GitError("%s rebase %s " % (self.name, upstream))
raise GitError(
"%s rebase %s " % (self.name, upstream), project=self.name
)
def _FastForward(self, head, ffonly=False):
cmd = ["merge", "--no-stat", head]
if ffonly:
cmd.append("--ff-only")
if GitCommand(self, cmd).Wait() != 0:
raise GitError("%s merge %s " % (self.name, head))
raise GitError(
"%s merge %s " % (self.name, head), project=self.name
)
def _InitGitDir(self, mirror_git=None, force_sync=False, quiet=False):
init_git_dir = not os.path.exists(self.gitdir)
@ -2964,7 +3065,9 @@ class Project(object):
try:
os.link(stock_hook, dst)
except OSError:
raise GitError(self._get_symlink_error_message())
raise GitError(
self._get_symlink_error_message(), project=self.name
)
else:
raise
@ -3065,7 +3168,8 @@ class Project(object):
"work tree. If you're comfortable with the "
"possibility of losing the work tree's git metadata,"
" use `repo sync --force-sync {0}` to "
"proceed.".format(self.RelPath(local=False))
"proceed.".format(self.RelPath(local=False)),
project=self.name,
)
def _ReferenceGitDir(self, gitdir, dotgit, copy_all):
@ -3175,7 +3279,7 @@ class Project(object):
# If using an old layout style (a directory), migrate it.
if not platform_utils.islink(dotgit) and platform_utils.isdir(dotgit):
self._MigrateOldWorkTreeGitDir(dotgit)
self._MigrateOldWorkTreeGitDir(dotgit, project=self.name)
init_dotgit = not os.path.exists(dotgit)
if self.use_git_worktrees:
@ -3205,7 +3309,8 @@ class Project(object):
cmd = ["read-tree", "--reset", "-u", "-v", HEAD]
if GitCommand(self, cmd).Wait() != 0:
raise GitError(
"Cannot initialize work tree for " + self.name
"Cannot initialize work tree for " + self.name,
project=self.name,
)
if submodules:
@ -3213,7 +3318,7 @@ class Project(object):
self._CopyAndLinkFiles()
@classmethod
def _MigrateOldWorkTreeGitDir(cls, dotgit):
def _MigrateOldWorkTreeGitDir(cls, dotgit, project=None):
"""Migrate the old worktree .git/ dir style to a symlink.
This logic specifically only uses state from |dotgit| to figure out
@ -3223,7 +3328,9 @@ class Project(object):
"""
# Figure out where in .repo/projects/ it's pointing to.
if not os.path.islink(os.path.join(dotgit, "refs")):
raise GitError(f"{dotgit}: unsupported checkout state")
raise GitError(
f"{dotgit}: unsupported checkout state", project=project
)
gitdir = os.path.dirname(os.path.realpath(os.path.join(dotgit, "refs")))
# Remove known symlink paths that exist in .repo/projects/.
@ -3271,7 +3378,10 @@ class Project(object):
f"{dotgit_path}: unknown file; please file a bug"
)
if unknown_paths:
raise GitError("Aborting migration: " + "\n".join(unknown_paths))
raise GitError(
"Aborting migration: " + "\n".join(unknown_paths),
project=project,
)
# Now walk the paths and sync the .git/ to .repo/projects/.
for name in platform_utils.listdir(dotgit):
@ -3537,12 +3647,9 @@ class Project(object):
gitdir=self._gitdir,
capture_stdout=True,
capture_stderr=True,
verify_command=True,
)
if p.Wait() != 0:
raise GitError(
"%s rev-list %s: %s"
% (self._project.name, str(args), p.stderr)
)
p.Wait()
return p.stdout.splitlines()
def __getattr__(self, name):
@ -3588,11 +3695,9 @@ class Project(object):
gitdir=self._gitdir,
capture_stdout=True,
capture_stderr=True,
verify_command=True,
)
if p.Wait() != 0:
raise GitError(
"%s %s: %s" % (self._project.name, name, p.stderr)
)
p.Wait()
r = p.stdout
if r.endswith("\n") and r.index("\n") == len(r) - 1:
return r[:-1]
@ -3601,12 +3706,16 @@ class Project(object):
return runner
class _PriorSyncFailedError(Exception):
class LocalSyncFail(RepoError):
"""Default error when there is an Sync_LocalHalf error."""
class _PriorSyncFailedError(LocalSyncFail):
def __str__(self):
return "prior sync failed; rebase still in progress"
class _DirtyError(Exception):
class _DirtyError(LocalSyncFail):
def __str__(self):
return "contains uncommitted changes"

View File

@ -118,7 +118,7 @@ If no project is specified try to use current directory as a project.
),
file=sys.stderr,
)
sys.exit(1)
raise NoSuchProjectError()
else:
project = projects[0]
print("Defaulting to cwd project", project.name)

View File

@ -63,9 +63,16 @@ from command import (
MirrorSafeCommand,
WORKER_BATCH_SIZE,
)
from error import RepoChangedException, GitError
from error import (
RepoChangedException,
GitError,
RepoExitError,
SyncError,
UpdateManifestError,
RepoUnhandledExceptionError,
)
import platform_utils
from project import SyncBuffer
from project import SyncBuffer, DeleteWorktreeError
from progress import Progress, elapsed_str, jobs_str
from repo_trace import Trace
import ssh
@ -94,6 +101,7 @@ class _FetchOneResult(NamedTuple):
"""
success: bool
errors: List[Exception]
project: Project
start: float
finish: float
@ -110,6 +118,7 @@ class _FetchResult(NamedTuple):
success: bool
projects: Set[str]
errors: List[Exception]
class _FetchMainResult(NamedTuple):
@ -120,6 +129,7 @@ class _FetchMainResult(NamedTuple):
"""
all_projects: List[Project]
errors: List[Exception]
class _CheckoutOneResult(NamedTuple):
@ -133,11 +143,24 @@ class _CheckoutOneResult(NamedTuple):
"""
success: bool
errors: List[Exception]
project: Project
start: float
finish: float
class SuperprojectError(SyncError):
"""Superproject sync repo."""
class SyncFailFastError(SyncError):
"""Sync exit error when --fail-fast set."""
class SmartSyncError(SyncError):
"""Smart sync exit error."""
class Sync(Command, MirrorSafeCommand):
COMMON = True
MULTI_MANIFEST_SUPPORT = True
@ -588,7 +611,7 @@ later is required to fix a server side protocol bug.
file=sys.stderr,
)
if update_result.fatal and opt.use_superproject is not None:
sys.exit(1)
raise SuperprojectError()
if need_unload:
m.outer_client.manifest.Unload()
@ -621,6 +644,7 @@ later is required to fix a server side protocol bug.
self._sync_dict[k] = start
success = False
remote_fetched = False
errors = []
buf = io.StringIO()
try:
sync_result = project.Sync_NetworkHalf(
@ -644,6 +668,8 @@ later is required to fix a server side protocol bug.
)
success = sync_result.success
remote_fetched = sync_result.remote_fetched
if sync_result.error:
errors.append(sync_result.error)
output = buf.getvalue()
if (opt.verbose or not success) and output:
@ -659,6 +685,7 @@ later is required to fix a server side protocol bug.
print(f"Keyboard interrupt while processing {project.name}")
except GitError as e:
print("error.GitError: Cannot fetch %s" % str(e), file=sys.stderr)
errors.append(e)
except Exception as e:
print(
"error: Cannot fetch %s (%s: %s)"
@ -666,11 +693,14 @@ later is required to fix a server side protocol bug.
file=sys.stderr,
)
del self._sync_dict[k]
errors.append(e)
raise
finish = time.time()
del self._sync_dict[k]
return _FetchOneResult(success, project, start, finish, remote_fetched)
return _FetchOneResult(
success, errors, project, start, finish, remote_fetched
)
@classmethod
def _FetchInitChild(cls, ssh_proxy):
@ -701,6 +731,7 @@ later is required to fix a server side protocol bug.
jobs = opt.jobs_network
fetched = set()
remote_fetched = set()
errors = []
pm = Progress(
"Fetching",
len(projects),
@ -745,6 +776,8 @@ later is required to fix a server side protocol bug.
finish,
success,
)
if result.errors:
errors.extend(result.errors)
if result.remote_fetched:
remote_fetched.add(project)
# Check for any errors before running any more tasks.
@ -813,7 +846,7 @@ later is required to fix a server side protocol bug.
if not self.outer_client.manifest.IsArchive:
self._GCProjects(projects, opt, err_event)
return _FetchResult(ret, fetched)
return _FetchResult(ret, fetched, errors)
def _FetchMain(
self, opt, args, all_projects, err_event, ssh_proxy, manifest
@ -832,6 +865,7 @@ later is required to fix a server side protocol bug.
List of all projects that should be checked out.
"""
rp = manifest.repoProject
errors = []
to_fetch = []
now = time.time()
@ -843,6 +877,9 @@ later is required to fix a server side protocol bug.
result = self._Fetch(to_fetch, opt, err_event, ssh_proxy)
success = result.success
fetched = result.projects
if result.errors:
errors.extend(result.errors)
if not success:
err_event.set()
@ -854,8 +891,11 @@ later is required to fix a server side protocol bug.
"\nerror: Exited sync due to fetch errors.\n",
file=sys.stderr,
)
sys.exit(1)
return _FetchMainResult([])
raise SyncError(
"error: Exited sync due to fetch errors.",
aggregate_errors=errors,
)
return _FetchMainResult([], errors)
# Iteratively fetch missing and/or nested unregistered submodules.
previously_missing_set = set()
@ -883,11 +923,13 @@ later is required to fix a server side protocol bug.
result = self._Fetch(missing, opt, err_event, ssh_proxy)
success = result.success
new_fetched = result.projects
if result.errors:
errors.extend(result.errors)
if not success:
err_event.set()
fetched.update(new_fetched)
return _FetchMainResult(all_projects)
return _FetchMainResult(all_projects, errors)
def _CheckoutOne(self, detach_head, force_sync, project):
"""Checkout work tree for one project
@ -905,8 +947,11 @@ later is required to fix a server side protocol bug.
project.manifest.manifestProject.config, detach_head=detach_head
)
success = False
errors = []
try:
project.Sync_LocalHalf(syncbuf, force_sync=force_sync)
project.Sync_LocalHalf(
syncbuf, force_sync=force_sync, errors=errors
)
success = syncbuf.Finish()
except GitError as e:
print(
@ -914,6 +959,7 @@ later is required to fix a server side protocol bug.
% (project.name, str(e)),
file=sys.stderr,
)
errors.append(e)
except Exception as e:
print(
"error: Cannot checkout %s: %s: %s"
@ -925,9 +971,9 @@ later is required to fix a server side protocol bug.
if not success:
print("error: Cannot checkout %s" % (project.name), file=sys.stderr)
finish = time.time()
return _CheckoutOneResult(success, project, start, finish)
return _CheckoutOneResult(success, errors, project, start, finish)
def _Checkout(self, all_projects, opt, err_results):
def _Checkout(self, all_projects, opt, err_results, checkout_errors):
"""Checkout projects listed in all_projects
Args:
@ -949,6 +995,10 @@ later is required to fix a server side protocol bug.
self.event_log.AddSync(
project, event_log.TASK_SYNC_LOCAL, start, finish, success
)
if result.errors:
checkout_errors.extend(result.errors)
# Check for any errors before running any more tasks.
# ...we'll let existing jobs finish, though.
if success:
@ -1214,10 +1264,9 @@ later is required to fix a server side protocol bug.
revisionId=None,
groups=None,
)
if not project.DeleteWorktree(
project.DeleteWorktree(
quiet=opt.quiet, force=opt.force_remove_dirty
):
return 1
)
new_project_paths.sort()
with open(file_path, "w") as fd:
@ -1260,7 +1309,7 @@ later is required to fix a server side protocol bug.
file=sys.stderr,
)
platform_utils.remove(copylinkfile_path)
return False
raise
need_remove_files = []
need_remove_files.extend(
@ -1285,12 +1334,10 @@ later is required to fix a server side protocol bug.
def _SmartSyncSetup(self, opt, smart_sync_manifest_path, manifest):
if not manifest.manifest_server:
print(
raise SmartSyncError(
"error: cannot smart sync: no manifest server defined in "
"manifest",
file=sys.stderr,
"manifest"
)
sys.exit(1)
manifest_server = manifest.manifest_server
if not opt.quiet:
@ -1368,33 +1415,28 @@ later is required to fix a server side protocol bug.
with open(smart_sync_manifest_path, "w") as f:
f.write(manifest_str)
except IOError as e:
print(
raise SmartSyncError(
"error: cannot write manifest to %s:\n%s"
% (smart_sync_manifest_path, e),
file=sys.stderr,
aggregate_errors=[e],
)
sys.exit(1)
self._ReloadManifest(manifest_name, manifest)
else:
print(
"error: manifest server RPC call failed: %s" % manifest_str,
file=sys.stderr,
raise SmartSyncError(
"error: manifest server RPC call failed: %s" % manifest_str
)
sys.exit(1)
except (socket.error, IOError, xmlrpc.client.Fault) as e:
print(
raise SmartSyncError(
"error: cannot connect to manifest server %s:\n%s"
% (manifest.manifest_server, e),
file=sys.stderr,
aggregate_errors=[e],
)
sys.exit(1)
except xmlrpc.client.ProtocolError as e:
print(
raise SmartSyncError(
"error: cannot connect to manifest server %s:\n%d %s"
% (manifest.manifest_server, e.errcode, e.errmsg),
file=sys.stderr,
aggregate_errors=[e],
)
sys.exit(1)
return manifest_name
@ -1436,7 +1478,7 @@ later is required to fix a server side protocol bug.
"""
if not opt.local_only:
start = time.time()
success = mp.Sync_NetworkHalf(
result = mp.Sync_NetworkHalf(
quiet=opt.quiet,
verbose=opt.verbose,
current_branch_only=self._GetCurrentBranchOnly(
@ -1453,19 +1495,24 @@ later is required to fix a server side protocol bug.
)
finish = time.time()
self.event_log.AddSync(
mp, event_log.TASK_SYNC_NETWORK, start, finish, success
mp, event_log.TASK_SYNC_NETWORK, start, finish, result.success
)
if mp.HasChanges:
errors = []
syncbuf = SyncBuffer(mp.config)
start = time.time()
mp.Sync_LocalHalf(syncbuf, submodules=mp.manifest.HasSubmodules)
mp.Sync_LocalHalf(
syncbuf, submodules=mp.manifest.HasSubmodules, errors=errors
)
clean = syncbuf.Finish()
self.event_log.AddSync(
mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean
)
if not clean:
sys.exit(1)
raise UpdateManifestError(
aggregate_errors=errors, project=mp.name
)
self._ReloadManifest(manifest_name, mp.manifest)
def ValidateOptions(self, opt, args):
@ -1546,6 +1593,15 @@ later is required to fix a server side protocol bug.
opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
def Execute(self, opt, args):
errors = []
try:
self._ExecuteHelper(opt, args, errors)
except RepoExitError:
raise
except (KeyboardInterrupt, Exception) as e:
raise RepoUnhandledExceptionError(e, aggregate_errors=errors)
def _ExecuteHelper(self, opt, args, errors):
manifest = self.outer_manifest
if not opt.outer_manifest:
manifest = self.manifest
@ -1695,6 +1751,8 @@ later is required to fix a server side protocol bug.
result = self._FetchMain(
opt, args, all_projects, err_event, ssh_proxy, manifest
)
if result.errors:
errors.extend(result.errors)
all_projects = result.all_projects
if opt.network_only:
@ -1712,36 +1770,47 @@ later is required to fix a server side protocol bug.
"`repo sync -l` will update some local checkouts.",
file=sys.stderr,
)
sys.exit(1)
raise SyncFailFastError(aggregate_errors=errors)
for m in self.ManifestList(opt):
if m.IsMirror or m.IsArchive:
# Bail out now, we have no working tree.
continue
if self.UpdateProjectList(opt, m):
try:
self.UpdateProjectList(opt, m)
except Exception as e:
err_event.set()
err_update_projects = True
errors.append(e)
if isinstance(e, DeleteWorktreeError):
errors.extend(e.aggregate_errors)
if opt.fail_fast:
print(
"\nerror: Local checkouts *not* updated.",
file=sys.stderr,
)
sys.exit(1)
raise SyncFailFastError(aggregate_errors=errors)
err_update_linkfiles = not self.UpdateCopyLinkfileList(m)
if err_update_linkfiles:
err_update_linkfiles = False
try:
self.UpdateCopyLinkfileList(m)
except Exception as e:
err_update_linkfiles = True
errors.append(e)
err_event.set()
if opt.fail_fast:
print(
"\nerror: Local update copyfile or linkfile failed.",
file=sys.stderr,
)
sys.exit(1)
raise SyncFailFastError(aggregate_errors=errors)
err_results = []
# NB: We don't exit here because this is the last step.
err_checkout = not self._Checkout(all_projects, opt, err_results)
err_checkout = not self._Checkout(
all_projects, opt, err_results, errors
)
if err_checkout:
err_event.set()
@ -1784,7 +1853,7 @@ later is required to fix a server side protocol bug.
"error.",
file=sys.stderr,
)
sys.exit(1)
raise SyncError(aggregate_errors=errors)
# Log the previous sync analysis state from the config.
self.git_event_log.LogDataConfigEvents(
@ -1842,7 +1911,7 @@ def _PostRepoFetch(rp, repo_verify=True, verbose=False):
try:
rp.work_git.reset("--keep", new_rev)
except GitError as e:
sys.exit(str(e))
raise RepoUnhandledExceptionError(e)
print("info: Restarting repo with latest version", file=sys.stderr)
raise RepoChangedException(["--repo-upgraded"])
else:

View File

@ -17,12 +17,15 @@ import os
import shutil
import tempfile
import unittest
import time
from unittest import mock
import pytest
import command
from subcmds import sync
from project import SyncNetworkHalfResult
from error import GitError, RepoExitError
@pytest.mark.parametrize(
@ -233,3 +236,83 @@ class GetPreciousObjectsState(unittest.TestCase):
self.assertFalse(
self.cmd._GetPreciousObjectsState(self.project, self.opt)
)
class SyncCommand(unittest.TestCase):
"""Tests for cmd.Execute."""
def setUp(self):
"""Common setup."""
self.repodir = tempfile.mkdtemp(".repo")
self.manifest = manifest = mock.MagicMock(
repodir=self.repodir,
)
git_event_log = mock.MagicMock(ErrorEvent=mock.Mock(return_value=None))
self.outer_client = outer_client = mock.MagicMock()
outer_client.manifest.IsArchive = True
manifest.manifestProject.worktree = "worktree_path/"
manifest.repoProject.LastFetch = time.time()
self.sync_network_half_error = None
self.sync_local_half_error = None
self.cmd = sync.Sync(
manifest=manifest,
outer_client=outer_client,
git_event_log=git_event_log,
)
def Sync_NetworkHalf(*args, **kwargs):
return SyncNetworkHalfResult(True, self.sync_network_half_error)
def Sync_LocalHalf(*args, **kwargs):
if self.sync_local_half_error:
raise self.sync_local_half_error
self.project = p = mock.MagicMock(
use_git_worktrees=False,
UseAlternates=False,
name="project",
Sync_NetworkHalf=Sync_NetworkHalf,
Sync_LocalHalf=Sync_LocalHalf,
RelPath=mock.Mock(return_value="rel_path"),
)
p.manifest.GetProjectsWithName.return_value = [p]
mock.patch.object(
sync,
"_PostRepoFetch",
return_value=None,
).start()
mock.patch.object(
self.cmd, "GetProjects", return_value=[self.project]
).start()
opt, _ = self.cmd.OptionParser.parse_args([])
opt.clone_bundle = False
opt.jobs = 4
opt.quiet = True
opt.use_superproject = False
opt.current_branch_only = True
opt.optimized_fetch = True
opt.retry_fetches = 1
opt.prune = False
opt.auto_gc = False
opt.repo_verify = False
self.opt = opt
def tearDown(self):
mock.patch.stopall()
def test_command_exit_error(self):
"""Ensure unsuccessful commands raise expected errors."""
self.sync_network_half_error = GitError(
"sync_network_half_error error", project=self.project
)
self.sync_local_half_error = GitError(
"sync_local_half_error", project=self.project
)
with self.assertRaises(RepoExitError) as e:
self.cmd.Execute(self.opt, [])
self.assertIn(self.sync_local_half_error, e.aggregate_errors)
self.assertIn(self.sync_network_half_error, e.aggregate_errors)