Refactor errors for sync command

Per discussion in go/repo-error-update updated aggregated and exit
errors for sync command.

Aggregated errors are errors that result in eventual command failure.
Exit errors are errors that result in immediate command failure.

Also updated main.py to log aggregated and exit errors to git sessions
log

Bug: b/293344017
Change-Id: I77a21f14da32fe2e68c16841feb22de72e86a251
Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/379614
Reviewed-by: Aravind Vasudevan <aravindvasudev@google.com>
Tested-by: Jason Chang <jasonnc@google.com>
Commit-Queue: Jason Chang <jasonnc@google.com>
This commit is contained in:
Jason Chang 2023-07-14 16:45:35 -07:00 committed by LUCI
parent a6413f5d88
commit 32b59565b7
5 changed files with 442 additions and 143 deletions

56
main.py
View File

@ -30,6 +30,7 @@ import sys
import textwrap import textwrap
import time import time
import urllib.request import urllib.request
import json
try: try:
import kerberos import kerberos
@ -50,10 +51,12 @@ from editor import Editor
from error import DownloadError from error import DownloadError
from error import InvalidProjectGroupsError from error import InvalidProjectGroupsError
from error import ManifestInvalidRevisionError from error import ManifestInvalidRevisionError
from error import ManifestParseError
from error import NoManifestException from error import NoManifestException
from error import NoSuchProjectError from error import NoSuchProjectError
from error import RepoChangedException from error import RepoChangedException
from error import RepoExitError
from error import RepoUnhandledExceptionError
from error import RepoError
import gitc_utils import gitc_utils
from manifest_xml import GitcClient, RepoClient from manifest_xml import GitcClient, RepoClient
from pager import RunPager, TerminatePager from pager import RunPager, TerminatePager
@ -97,6 +100,7 @@ else:
) )
KEYBOARD_INTERRUPT_EXIT = 128 + signal.SIGINT KEYBOARD_INTERRUPT_EXIT = 128 + signal.SIGINT
MAX_PRINT_ERRORS = 5
global_options = optparse.OptionParser( global_options = optparse.OptionParser(
usage="repo [-p|--paginate|--no-pager] COMMAND [ARGS]", usage="repo [-p|--paginate|--no-pager] COMMAND [ARGS]",
@ -422,10 +426,33 @@ class _Repo(object):
""" """
try: try:
execute_command_helper() execute_command_helper()
except (KeyboardInterrupt, SystemExit, Exception) as e: except (
KeyboardInterrupt,
SystemExit,
Exception,
RepoExitError,
) as e:
ok = isinstance(e, SystemExit) and not e.code ok = isinstance(e, SystemExit) and not e.code
exception_name = type(e).__name__
if isinstance(e, RepoUnhandledExceptionError):
exception_name = type(e.error).__name__
if isinstance(e, RepoExitError):
aggregated_errors = e.aggregate_errors or []
for error in aggregated_errors:
project = None
if isinstance(error, RepoError):
project = error.project
error_info = json.dumps(
{
"ErrorType": type(error).__name__,
"Project": project,
"Message": str(error),
}
)
git_trace2_event_log.ErrorEvent(
f"AggregateExitError:{error_info}"
)
if not ok: if not ok:
exception_name = type(e).__name__
git_trace2_event_log.ErrorEvent( git_trace2_event_log.ErrorEvent(
f"RepoExitError:{exception_name}" f"RepoExitError:{exception_name}"
) )
@ -447,13 +474,13 @@ class _Repo(object):
"error: manifest missing or unreadable -- please run init", "error: manifest missing or unreadable -- please run init",
file=sys.stderr, file=sys.stderr,
) )
result = 1 result = e.exit_code
except NoSuchProjectError as e: except NoSuchProjectError as e:
if e.name: if e.name:
print("error: project %s not found" % e.name, file=sys.stderr) print("error: project %s not found" % e.name, file=sys.stderr)
else: else:
print("error: no project in current directory", file=sys.stderr) print("error: no project in current directory", file=sys.stderr)
result = 1 result = e.exit_code
except InvalidProjectGroupsError as e: except InvalidProjectGroupsError as e:
if e.name: if e.name:
print( print(
@ -467,7 +494,7 @@ class _Repo(object):
"the current directory", "the current directory",
file=sys.stderr, file=sys.stderr,
) )
result = 1 result = e.exit_code
except SystemExit as e: except SystemExit as e:
if e.code: if e.code:
result = e.code result = e.code
@ -475,6 +502,9 @@ class _Repo(object):
except KeyboardInterrupt: except KeyboardInterrupt:
result = KEYBOARD_INTERRUPT_EXIT result = KEYBOARD_INTERRUPT_EXIT
raise raise
except RepoExitError as e:
result = e.exit_code
raise
except Exception: except Exception:
result = 1 result = 1
raise raise
@ -841,12 +871,20 @@ def _Main(argv):
SetTraceToStderr() SetTraceToStderr()
result = repo._Run(name, gopts, argv) or 0 result = repo._Run(name, gopts, argv) or 0
except RepoExitError as e:
exception_name = type(e).__name__
result = e.exit_code
print("fatal: %s" % e, file=sys.stderr)
if e.aggregate_errors:
print(f"{exception_name} Aggregate Errors")
for err in e.aggregate_errors[:MAX_PRINT_ERRORS]:
print(err)
if len(e.aggregate_errors) > MAX_PRINT_ERRORS:
diff = len(e.aggregate_errors) - MAX_PRINT_ERRORS
print(f"+{diff} additional errors ...")
except KeyboardInterrupt: except KeyboardInterrupt:
print("aborted by user", file=sys.stderr) print("aborted by user", file=sys.stderr)
result = KEYBOARD_INTERRUPT_EXIT result = KEYBOARD_INTERRUPT_EXIT
except ManifestParseError as mpe:
print("fatal: %s" % mpe, file=sys.stderr)
result = 1
except RepoChangedException as rce: except RepoChangedException as rce:
# If repo changed, re-exec ourselves. # If repo changed, re-exec ourselves.
# #

View File

@ -26,7 +26,7 @@ import sys
import tarfile import tarfile
import tempfile import tempfile
import time import time
from typing import NamedTuple from typing import NamedTuple, List
import urllib.parse import urllib.parse
from color import Coloring from color import Coloring
@ -41,7 +41,12 @@ from git_config import (
) )
import git_superproject import git_superproject
from git_trace2_event_log import EventLog from git_trace2_event_log import EventLog
from error import GitError, UploadError, DownloadError from error import (
GitError,
UploadError,
DownloadError,
RepoError,
)
from error import ManifestInvalidRevisionError, ManifestInvalidPathError from error import ManifestInvalidRevisionError, ManifestInvalidPathError
from error import NoManifestException, ManifestParseError from error import NoManifestException, ManifestParseError
import platform_utils import platform_utils
@ -54,11 +59,33 @@ from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M, R_WORKTREE_M
class SyncNetworkHalfResult(NamedTuple): class SyncNetworkHalfResult(NamedTuple):
"""Sync_NetworkHalf return value.""" """Sync_NetworkHalf return value."""
# True if successful.
success: bool
# Did we query the remote? False when optimized_fetch is True and we have # Did we query the remote? False when optimized_fetch is True and we have
# the commit already present. # the commit already present.
remote_fetched: bool remote_fetched: bool
# Error from SyncNetworkHalf
error: Exception = None
@property
def success(self) -> bool:
return not self.error
class SyncNetworkHalfError(RepoError):
"""Failure trying to sync."""
class DeleteWorktreeError(RepoError):
"""Failure to delete worktree."""
def __init__(
self, *args, aggregate_errors: List[Exception] = None, **kwargs
) -> None:
super().__init__(*args, **kwargs)
self.aggregate_errors = aggregate_errors or []
class DeleteDirtyWorktreeError(DeleteWorktreeError):
"""Failure to delete worktree due to uncommitted changes."""
# Maximum sleep time allowed during retries. # Maximum sleep time allowed during retries.
@ -1070,13 +1097,19 @@ class Project(object):
if branch is None: if branch is None:
branch = self.CurrentBranch branch = self.CurrentBranch
if branch is None: if branch is None:
raise GitError("not currently on a branch") raise GitError("not currently on a branch", project=self.name)
branch = self.GetBranch(branch) branch = self.GetBranch(branch)
if not branch.LocalMerge: if not branch.LocalMerge:
raise GitError("branch %s does not track a remote" % branch.name) raise GitError(
"branch %s does not track a remote" % branch.name,
project=self.name,
)
if not branch.remote.review: if not branch.remote.review:
raise GitError("remote %s has no review url" % branch.remote.name) raise GitError(
"remote %s has no review url" % branch.remote.name,
project=self.name,
)
# Basic validity check on label syntax. # Basic validity check on label syntax.
for label in labels: for label in labels:
@ -1193,11 +1226,18 @@ class Project(object):
""" """
if archive and not isinstance(self, MetaProject): if archive and not isinstance(self, MetaProject):
if self.remote.url.startswith(("http://", "https://")): if self.remote.url.startswith(("http://", "https://")):
_error( msg_template = (
"%s: Cannot fetch archives from http/https remotes.", "%s: Cannot fetch archives from http/https remotes."
self.name, )
msg_args = self.name
msg = msg_template % msg_args
_error(
msg_template,
msg_args,
)
return SyncNetworkHalfResult(
False, SyncNetworkHalfError(msg, project=self.name)
) )
return SyncNetworkHalfResult(False, False)
name = self.relpath.replace("\\", "/") name = self.relpath.replace("\\", "/")
name = name.replace("/", "_") name = name.replace("/", "_")
@ -1208,19 +1248,25 @@ class Project(object):
self._FetchArchive(tarpath, cwd=topdir) self._FetchArchive(tarpath, cwd=topdir)
except GitError as e: except GitError as e:
_error("%s", e) _error("%s", e)
return SyncNetworkHalfResult(False, False) return SyncNetworkHalfResult(False, e)
# From now on, we only need absolute tarpath. # From now on, we only need absolute tarpath.
tarpath = os.path.join(topdir, tarpath) tarpath = os.path.join(topdir, tarpath)
if not self._ExtractArchive(tarpath, path=topdir): if not self._ExtractArchive(tarpath, path=topdir):
return SyncNetworkHalfResult(False, True) return SyncNetworkHalfResult(
True,
SyncNetworkHalfError(
f"Unable to Extract Archive {tarpath}",
project=self.name,
),
)
try: try:
platform_utils.remove(tarpath) platform_utils.remove(tarpath)
except OSError as e: except OSError as e:
_warn("Cannot remove archive %s: %s", tarpath, str(e)) _warn("Cannot remove archive %s: %s", tarpath, str(e))
self._CopyAndLinkFiles() self._CopyAndLinkFiles()
return SyncNetworkHalfResult(True, True) return SyncNetworkHalfResult(True)
# If the shared object dir already exists, don't try to rebootstrap with # If the shared object dir already exists, don't try to rebootstrap with
# a clone bundle download. We should have the majority of objects # a clone bundle download. We should have the majority of objects
@ -1310,23 +1356,35 @@ class Project(object):
) )
): ):
remote_fetched = True remote_fetched = True
if not self._RemoteFetch( try:
initial=is_new, if not self._RemoteFetch(
quiet=quiet, initial=is_new,
verbose=verbose, quiet=quiet,
output_redir=output_redir, verbose=verbose,
alt_dir=alt_dir, output_redir=output_redir,
current_branch_only=current_branch_only, alt_dir=alt_dir,
tags=tags, current_branch_only=current_branch_only,
prune=prune, tags=tags,
depth=depth, prune=prune,
submodules=submodules, depth=depth,
force_sync=force_sync, submodules=submodules,
ssh_proxy=ssh_proxy, force_sync=force_sync,
clone_filter=clone_filter, ssh_proxy=ssh_proxy,
retry_fetches=retry_fetches, clone_filter=clone_filter,
): retry_fetches=retry_fetches,
return SyncNetworkHalfResult(False, remote_fetched) ):
return SyncNetworkHalfResult(
remote_fetched,
SyncNetworkHalfError(
f"Unable to remote fetch project {self.name}",
project=self.name,
),
)
except RepoError as e:
return SyncNetworkHalfResult(
remote_fetched,
e,
)
mp = self.manifest.manifestProject mp = self.manifest.manifestProject
dissociate = mp.dissociate dissociate = mp.dissociate
@ -1346,7 +1404,12 @@ class Project(object):
if p.stdout and output_redir: if p.stdout and output_redir:
output_redir.write(p.stdout) output_redir.write(p.stdout)
if p.Wait() != 0: if p.Wait() != 0:
return SyncNetworkHalfResult(False, remote_fetched) return SyncNetworkHalfResult(
remote_fetched,
GitError(
"Unable to repack alternates", project=self.name
),
)
platform_utils.remove(alternates_file) platform_utils.remove(alternates_file)
if self.worktree: if self.worktree:
@ -1356,7 +1419,7 @@ class Project(object):
platform_utils.remove( platform_utils.remove(
os.path.join(self.gitdir, "FETCH_HEAD"), missing_ok=True os.path.join(self.gitdir, "FETCH_HEAD"), missing_ok=True
) )
return SyncNetworkHalfResult(True, remote_fetched) return SyncNetworkHalfResult(remote_fetched)
def PostRepoUpgrade(self): def PostRepoUpgrade(self):
self._InitHooks() self._InitHooks()
@ -1409,16 +1472,27 @@ class Project(object):
self.revisionId = revisionId self.revisionId = revisionId
def Sync_LocalHalf(self, syncbuf, force_sync=False, submodules=False): def Sync_LocalHalf(
self, syncbuf, force_sync=False, submodules=False, errors=None
):
"""Perform only the local IO portion of the sync process. """Perform only the local IO portion of the sync process.
Network access is not required. Network access is not required.
""" """
if errors is None:
errors = []
def fail(error: Exception):
errors.append(error)
syncbuf.fail(self, error)
if not os.path.exists(self.gitdir): if not os.path.exists(self.gitdir):
syncbuf.fail( fail(
self, LocalSyncFail(
"Cannot checkout %s due to missing network sync; Run " "Cannot checkout %s due to missing network sync; Run "
"`repo sync -n %s` first." % (self.name, self.name), "`repo sync -n %s` first." % (self.name, self.name),
project=self.name,
)
) )
return return
@ -1438,10 +1512,12 @@ class Project(object):
) )
bad_paths = paths & PROTECTED_PATHS bad_paths = paths & PROTECTED_PATHS
if bad_paths: if bad_paths:
syncbuf.fail( fail(
self, LocalSyncFail(
"Refusing to checkout project that writes to protected " "Refusing to checkout project that writes to protected "
"paths: %s" % (", ".join(bad_paths),), "paths: %s" % (", ".join(bad_paths),),
project=self.name,
)
) )
return return
@ -1466,7 +1542,7 @@ class Project(object):
# Currently on a detached HEAD. The user is assumed to # Currently on a detached HEAD. The user is assumed to
# not have any local modifications worth worrying about. # not have any local modifications worth worrying about.
if self.IsRebaseInProgress(): if self.IsRebaseInProgress():
syncbuf.fail(self, _PriorSyncFailedError()) fail(_PriorSyncFailedError(project=self.name))
return return
if head == revid: if head == revid:
@ -1486,7 +1562,7 @@ class Project(object):
if submodules: if submodules:
self._SyncSubmodules(quiet=True) self._SyncSubmodules(quiet=True)
except GitError as e: except GitError as e:
syncbuf.fail(self, e) fail(e)
return return
self._CopyAndLinkFiles() self._CopyAndLinkFiles()
return return
@ -1511,7 +1587,7 @@ class Project(object):
if submodules: if submodules:
self._SyncSubmodules(quiet=True) self._SyncSubmodules(quiet=True)
except GitError as e: except GitError as e:
syncbuf.fail(self, e) fail(e)
return return
self._CopyAndLinkFiles() self._CopyAndLinkFiles()
return return
@ -1534,10 +1610,13 @@ class Project(object):
# The user has published this branch and some of those # The user has published this branch and some of those
# commits are not yet merged upstream. We do not want # commits are not yet merged upstream. We do not want
# to rewrite the published commits so we punt. # to rewrite the published commits so we punt.
syncbuf.fail( fail(
self, LocalSyncFail(
"branch %s is published (but not merged) and is now " "branch %s is published (but not merged) and is "
"%d commits behind" % (branch.name, len(upstream_gain)), "now %d commits behind"
% (branch.name, len(upstream_gain)),
project=self.name,
)
) )
return return
elif pub == head: elif pub == head:
@ -1565,7 +1644,7 @@ class Project(object):
return return
if self.IsDirty(consider_untracked=False): if self.IsDirty(consider_untracked=False):
syncbuf.fail(self, _DirtyError()) fail(_DirtyError(project=self.name))
return return
# If the upstream switched on us, warn the user. # If the upstream switched on us, warn the user.
@ -1615,7 +1694,7 @@ class Project(object):
self._SyncSubmodules(quiet=True) self._SyncSubmodules(quiet=True)
self._CopyAndLinkFiles() self._CopyAndLinkFiles()
except GitError as e: except GitError as e:
syncbuf.fail(self, e) fail(e)
return return
else: else:
syncbuf.later1(self, _doff) syncbuf.later1(self, _doff)
@ -1687,12 +1766,12 @@ class Project(object):
file=sys.stderr, file=sys.stderr,
) )
else: else:
print( msg = (
"error: %s: Cannot remove project: uncommitted changes are " "error: %s: Cannot remove project: uncommitted"
"present.\n" % (self.RelPath(local=False),), "changes are present.\n" % self.RelPath(local=False)
file=sys.stderr,
) )
return False print(msg, file=sys.stderr)
raise DeleteDirtyWorktreeError(msg, project=self)
if not quiet: if not quiet:
print( print(
@ -1745,12 +1824,13 @@ class Project(object):
% (self.RelPath(local=False),), % (self.RelPath(local=False),),
file=sys.stderr, file=sys.stderr,
) )
return False raise DeleteWorktreeError(aggregate_errors=[e])
# Delete everything under the worktree, except for directories that # Delete everything under the worktree, except for directories that
# contain another git project. # contain another git project.
dirs_to_remove = [] dirs_to_remove = []
failed = False failed = False
errors = []
for root, dirs, files in platform_utils.walk(self.worktree): for root, dirs, files in platform_utils.walk(self.worktree):
for f in files: for f in files:
path = os.path.join(root, f) path = os.path.join(root, f)
@ -1763,6 +1843,7 @@ class Project(object):
file=sys.stderr, file=sys.stderr,
) )
failed = True failed = True
errors.append(e)
dirs[:] = [ dirs[:] = [
d d
for d in dirs for d in dirs
@ -1784,6 +1865,7 @@ class Project(object):
file=sys.stderr, file=sys.stderr,
) )
failed = True failed = True
errors.append(e)
elif not platform_utils.listdir(d): elif not platform_utils.listdir(d):
try: try:
platform_utils.rmdir(d) platform_utils.rmdir(d)
@ -1794,6 +1876,7 @@ class Project(object):
file=sys.stderr, file=sys.stderr,
) )
failed = True failed = True
errors.append(e)
if failed: if failed:
print( print(
"error: %s: Failed to delete obsolete checkout." "error: %s: Failed to delete obsolete checkout."
@ -1804,7 +1887,7 @@ class Project(object):
" Remove manually, then run `repo sync -l`.", " Remove manually, then run `repo sync -l`.",
file=sys.stderr, file=sys.stderr,
) )
return False raise DeleteWorktreeError(aggregate_errors=errors)
# Try deleting parent dirs if they are empty. # Try deleting parent dirs if they are empty.
path = self.worktree path = self.worktree
@ -2264,11 +2347,14 @@ class Project(object):
cmd.append(self.revisionExpr) cmd.append(self.revisionExpr)
command = GitCommand( command = GitCommand(
self, cmd, cwd=cwd, capture_stdout=True, capture_stderr=True self,
cmd,
cwd=cwd,
capture_stdout=True,
capture_stderr=True,
verify_command=True,
) )
command.Wait()
if command.Wait() != 0:
raise GitError("git archive %s: %s" % (self.name, command.stderr))
def _RemoteFetch( def _RemoteFetch(
self, self,
@ -2289,7 +2375,7 @@ class Project(object):
retry_fetches=2, retry_fetches=2,
retry_sleep_initial_sec=4.0, retry_sleep_initial_sec=4.0,
retry_exp_factor=2.0, retry_exp_factor=2.0,
): ) -> bool:
is_sha1 = False is_sha1 = False
tag_name = None tag_name = None
# The depth should not be used when fetching to a mirror because # The depth should not be used when fetching to a mirror because
@ -2473,6 +2559,7 @@ class Project(object):
retry_cur_sleep = retry_sleep_initial_sec retry_cur_sleep = retry_sleep_initial_sec
ok = prune_tried = False ok = prune_tried = False
for try_n in range(retry_fetches): for try_n in range(retry_fetches):
verify_command = try_n == retry_fetches - 1
gitcmd = GitCommand( gitcmd = GitCommand(
self, self,
cmd, cmd,
@ -2481,6 +2568,7 @@ class Project(object):
ssh_proxy=ssh_proxy, ssh_proxy=ssh_proxy,
merge_output=True, merge_output=True,
capture_stdout=quiet or bool(output_redir), capture_stdout=quiet or bool(output_redir),
verify_command=verify_command,
) )
if gitcmd.stdout and not quiet and output_redir: if gitcmd.stdout and not quiet and output_redir:
output_redir.write(gitcmd.stdout) output_redir.write(gitcmd.stdout)
@ -2732,7 +2820,9 @@ class Project(object):
cmd.append("--") cmd.append("--")
if GitCommand(self, cmd).Wait() != 0: if GitCommand(self, cmd).Wait() != 0:
if self._allrefs: if self._allrefs:
raise GitError("%s checkout %s " % (self.name, rev)) raise GitError(
"%s checkout %s " % (self.name, rev), project=self.name
)
def _CherryPick(self, rev, ffonly=False, record_origin=False): def _CherryPick(self, rev, ffonly=False, record_origin=False):
cmd = ["cherry-pick"] cmd = ["cherry-pick"]
@ -2744,7 +2834,9 @@ class Project(object):
cmd.append("--") cmd.append("--")
if GitCommand(self, cmd).Wait() != 0: if GitCommand(self, cmd).Wait() != 0:
if self._allrefs: if self._allrefs:
raise GitError("%s cherry-pick %s " % (self.name, rev)) raise GitError(
"%s cherry-pick %s " % (self.name, rev), project=self.name
)
def _LsRemote(self, refs): def _LsRemote(self, refs):
cmd = ["ls-remote", self.remote.name, refs] cmd = ["ls-remote", self.remote.name, refs]
@ -2760,7 +2852,9 @@ class Project(object):
cmd.append("--") cmd.append("--")
if GitCommand(self, cmd).Wait() != 0: if GitCommand(self, cmd).Wait() != 0:
if self._allrefs: if self._allrefs:
raise GitError("%s revert %s " % (self.name, rev)) raise GitError(
"%s revert %s " % (self.name, rev), project=self.name
)
def _ResetHard(self, rev, quiet=True): def _ResetHard(self, rev, quiet=True):
cmd = ["reset", "--hard"] cmd = ["reset", "--hard"]
@ -2768,7 +2862,9 @@ class Project(object):
cmd.append("-q") cmd.append("-q")
cmd.append(rev) cmd.append(rev)
if GitCommand(self, cmd).Wait() != 0: if GitCommand(self, cmd).Wait() != 0:
raise GitError("%s reset --hard %s " % (self.name, rev)) raise GitError(
"%s reset --hard %s " % (self.name, rev), project=self.name
)
def _SyncSubmodules(self, quiet=True): def _SyncSubmodules(self, quiet=True):
cmd = ["submodule", "update", "--init", "--recursive"] cmd = ["submodule", "update", "--init", "--recursive"]
@ -2776,7 +2872,8 @@ class Project(object):
cmd.append("-q") cmd.append("-q")
if GitCommand(self, cmd).Wait() != 0: if GitCommand(self, cmd).Wait() != 0:
raise GitError( raise GitError(
"%s submodule update --init --recursive " % self.name "%s submodule update --init --recursive " % self.name,
project=self.name,
) )
def _Rebase(self, upstream, onto=None): def _Rebase(self, upstream, onto=None):
@ -2785,14 +2882,18 @@ class Project(object):
cmd.extend(["--onto", onto]) cmd.extend(["--onto", onto])
cmd.append(upstream) cmd.append(upstream)
if GitCommand(self, cmd).Wait() != 0: if GitCommand(self, cmd).Wait() != 0:
raise GitError("%s rebase %s " % (self.name, upstream)) raise GitError(
"%s rebase %s " % (self.name, upstream), project=self.name
)
def _FastForward(self, head, ffonly=False): def _FastForward(self, head, ffonly=False):
cmd = ["merge", "--no-stat", head] cmd = ["merge", "--no-stat", head]
if ffonly: if ffonly:
cmd.append("--ff-only") cmd.append("--ff-only")
if GitCommand(self, cmd).Wait() != 0: if GitCommand(self, cmd).Wait() != 0:
raise GitError("%s merge %s " % (self.name, head)) raise GitError(
"%s merge %s " % (self.name, head), project=self.name
)
def _InitGitDir(self, mirror_git=None, force_sync=False, quiet=False): def _InitGitDir(self, mirror_git=None, force_sync=False, quiet=False):
init_git_dir = not os.path.exists(self.gitdir) init_git_dir = not os.path.exists(self.gitdir)
@ -2964,7 +3065,9 @@ class Project(object):
try: try:
os.link(stock_hook, dst) os.link(stock_hook, dst)
except OSError: except OSError:
raise GitError(self._get_symlink_error_message()) raise GitError(
self._get_symlink_error_message(), project=self.name
)
else: else:
raise raise
@ -3065,7 +3168,8 @@ class Project(object):
"work tree. If you're comfortable with the " "work tree. If you're comfortable with the "
"possibility of losing the work tree's git metadata," "possibility of losing the work tree's git metadata,"
" use `repo sync --force-sync {0}` to " " use `repo sync --force-sync {0}` to "
"proceed.".format(self.RelPath(local=False)) "proceed.".format(self.RelPath(local=False)),
project=self.name,
) )
def _ReferenceGitDir(self, gitdir, dotgit, copy_all): def _ReferenceGitDir(self, gitdir, dotgit, copy_all):
@ -3175,7 +3279,7 @@ class Project(object):
# If using an old layout style (a directory), migrate it. # If using an old layout style (a directory), migrate it.
if not platform_utils.islink(dotgit) and platform_utils.isdir(dotgit): if not platform_utils.islink(dotgit) and platform_utils.isdir(dotgit):
self._MigrateOldWorkTreeGitDir(dotgit) self._MigrateOldWorkTreeGitDir(dotgit, project=self.name)
init_dotgit = not os.path.exists(dotgit) init_dotgit = not os.path.exists(dotgit)
if self.use_git_worktrees: if self.use_git_worktrees:
@ -3205,7 +3309,8 @@ class Project(object):
cmd = ["read-tree", "--reset", "-u", "-v", HEAD] cmd = ["read-tree", "--reset", "-u", "-v", HEAD]
if GitCommand(self, cmd).Wait() != 0: if GitCommand(self, cmd).Wait() != 0:
raise GitError( raise GitError(
"Cannot initialize work tree for " + self.name "Cannot initialize work tree for " + self.name,
project=self.name,
) )
if submodules: if submodules:
@ -3213,7 +3318,7 @@ class Project(object):
self._CopyAndLinkFiles() self._CopyAndLinkFiles()
@classmethod @classmethod
def _MigrateOldWorkTreeGitDir(cls, dotgit): def _MigrateOldWorkTreeGitDir(cls, dotgit, project=None):
"""Migrate the old worktree .git/ dir style to a symlink. """Migrate the old worktree .git/ dir style to a symlink.
This logic specifically only uses state from |dotgit| to figure out This logic specifically only uses state from |dotgit| to figure out
@ -3223,7 +3328,9 @@ class Project(object):
""" """
# Figure out where in .repo/projects/ it's pointing to. # Figure out where in .repo/projects/ it's pointing to.
if not os.path.islink(os.path.join(dotgit, "refs")): if not os.path.islink(os.path.join(dotgit, "refs")):
raise GitError(f"{dotgit}: unsupported checkout state") raise GitError(
f"{dotgit}: unsupported checkout state", project=project
)
gitdir = os.path.dirname(os.path.realpath(os.path.join(dotgit, "refs"))) gitdir = os.path.dirname(os.path.realpath(os.path.join(dotgit, "refs")))
# Remove known symlink paths that exist in .repo/projects/. # Remove known symlink paths that exist in .repo/projects/.
@ -3271,7 +3378,10 @@ class Project(object):
f"{dotgit_path}: unknown file; please file a bug" f"{dotgit_path}: unknown file; please file a bug"
) )
if unknown_paths: if unknown_paths:
raise GitError("Aborting migration: " + "\n".join(unknown_paths)) raise GitError(
"Aborting migration: " + "\n".join(unknown_paths),
project=project,
)
# Now walk the paths and sync the .git/ to .repo/projects/. # Now walk the paths and sync the .git/ to .repo/projects/.
for name in platform_utils.listdir(dotgit): for name in platform_utils.listdir(dotgit):
@ -3537,12 +3647,9 @@ class Project(object):
gitdir=self._gitdir, gitdir=self._gitdir,
capture_stdout=True, capture_stdout=True,
capture_stderr=True, capture_stderr=True,
verify_command=True,
) )
if p.Wait() != 0: p.Wait()
raise GitError(
"%s rev-list %s: %s"
% (self._project.name, str(args), p.stderr)
)
return p.stdout.splitlines() return p.stdout.splitlines()
def __getattr__(self, name): def __getattr__(self, name):
@ -3588,11 +3695,9 @@ class Project(object):
gitdir=self._gitdir, gitdir=self._gitdir,
capture_stdout=True, capture_stdout=True,
capture_stderr=True, capture_stderr=True,
verify_command=True,
) )
if p.Wait() != 0: p.Wait()
raise GitError(
"%s %s: %s" % (self._project.name, name, p.stderr)
)
r = p.stdout r = p.stdout
if r.endswith("\n") and r.index("\n") == len(r) - 1: if r.endswith("\n") and r.index("\n") == len(r) - 1:
return r[:-1] return r[:-1]
@ -3601,12 +3706,16 @@ class Project(object):
return runner return runner
class _PriorSyncFailedError(Exception): class LocalSyncFail(RepoError):
"""Default error when there is an Sync_LocalHalf error."""
class _PriorSyncFailedError(LocalSyncFail):
def __str__(self): def __str__(self):
return "prior sync failed; rebase still in progress" return "prior sync failed; rebase still in progress"
class _DirtyError(Exception): class _DirtyError(LocalSyncFail):
def __str__(self): def __str__(self):
return "contains uncommitted changes" return "contains uncommitted changes"

View File

@ -118,7 +118,7 @@ If no project is specified try to use current directory as a project.
), ),
file=sys.stderr, file=sys.stderr,
) )
sys.exit(1) raise NoSuchProjectError()
else: else:
project = projects[0] project = projects[0]
print("Defaulting to cwd project", project.name) print("Defaulting to cwd project", project.name)

View File

@ -63,9 +63,16 @@ from command import (
MirrorSafeCommand, MirrorSafeCommand,
WORKER_BATCH_SIZE, WORKER_BATCH_SIZE,
) )
from error import RepoChangedException, GitError from error import (
RepoChangedException,
GitError,
RepoExitError,
SyncError,
UpdateManifestError,
RepoUnhandledExceptionError,
)
import platform_utils import platform_utils
from project import SyncBuffer from project import SyncBuffer, DeleteWorktreeError
from progress import Progress, elapsed_str, jobs_str from progress import Progress, elapsed_str, jobs_str
from repo_trace import Trace from repo_trace import Trace
import ssh import ssh
@ -94,6 +101,7 @@ class _FetchOneResult(NamedTuple):
""" """
success: bool success: bool
errors: List[Exception]
project: Project project: Project
start: float start: float
finish: float finish: float
@ -110,6 +118,7 @@ class _FetchResult(NamedTuple):
success: bool success: bool
projects: Set[str] projects: Set[str]
errors: List[Exception]
class _FetchMainResult(NamedTuple): class _FetchMainResult(NamedTuple):
@ -120,6 +129,7 @@ class _FetchMainResult(NamedTuple):
""" """
all_projects: List[Project] all_projects: List[Project]
errors: List[Exception]
class _CheckoutOneResult(NamedTuple): class _CheckoutOneResult(NamedTuple):
@ -133,11 +143,24 @@ class _CheckoutOneResult(NamedTuple):
""" """
success: bool success: bool
errors: List[Exception]
project: Project project: Project
start: float start: float
finish: float finish: float
class SuperprojectError(SyncError):
"""Superproject sync repo."""
class SyncFailFastError(SyncError):
"""Sync exit error when --fail-fast set."""
class SmartSyncError(SyncError):
"""Smart sync exit error."""
class Sync(Command, MirrorSafeCommand): class Sync(Command, MirrorSafeCommand):
COMMON = True COMMON = True
MULTI_MANIFEST_SUPPORT = True MULTI_MANIFEST_SUPPORT = True
@ -588,7 +611,7 @@ later is required to fix a server side protocol bug.
file=sys.stderr, file=sys.stderr,
) )
if update_result.fatal and opt.use_superproject is not None: if update_result.fatal and opt.use_superproject is not None:
sys.exit(1) raise SuperprojectError()
if need_unload: if need_unload:
m.outer_client.manifest.Unload() m.outer_client.manifest.Unload()
@ -621,6 +644,7 @@ later is required to fix a server side protocol bug.
self._sync_dict[k] = start self._sync_dict[k] = start
success = False success = False
remote_fetched = False remote_fetched = False
errors = []
buf = io.StringIO() buf = io.StringIO()
try: try:
sync_result = project.Sync_NetworkHalf( sync_result = project.Sync_NetworkHalf(
@ -644,6 +668,8 @@ later is required to fix a server side protocol bug.
) )
success = sync_result.success success = sync_result.success
remote_fetched = sync_result.remote_fetched remote_fetched = sync_result.remote_fetched
if sync_result.error:
errors.append(sync_result.error)
output = buf.getvalue() output = buf.getvalue()
if (opt.verbose or not success) and output: if (opt.verbose or not success) and output:
@ -659,6 +685,7 @@ later is required to fix a server side protocol bug.
print(f"Keyboard interrupt while processing {project.name}") print(f"Keyboard interrupt while processing {project.name}")
except GitError as e: except GitError as e:
print("error.GitError: Cannot fetch %s" % str(e), file=sys.stderr) print("error.GitError: Cannot fetch %s" % str(e), file=sys.stderr)
errors.append(e)
except Exception as e: except Exception as e:
print( print(
"error: Cannot fetch %s (%s: %s)" "error: Cannot fetch %s (%s: %s)"
@ -666,11 +693,14 @@ later is required to fix a server side protocol bug.
file=sys.stderr, file=sys.stderr,
) )
del self._sync_dict[k] del self._sync_dict[k]
errors.append(e)
raise raise
finish = time.time() finish = time.time()
del self._sync_dict[k] del self._sync_dict[k]
return _FetchOneResult(success, project, start, finish, remote_fetched) return _FetchOneResult(
success, errors, project, start, finish, remote_fetched
)
@classmethod @classmethod
def _FetchInitChild(cls, ssh_proxy): def _FetchInitChild(cls, ssh_proxy):
@ -701,6 +731,7 @@ later is required to fix a server side protocol bug.
jobs = opt.jobs_network jobs = opt.jobs_network
fetched = set() fetched = set()
remote_fetched = set() remote_fetched = set()
errors = []
pm = Progress( pm = Progress(
"Fetching", "Fetching",
len(projects), len(projects),
@ -745,6 +776,8 @@ later is required to fix a server side protocol bug.
finish, finish,
success, success,
) )
if result.errors:
errors.extend(result.errors)
if result.remote_fetched: if result.remote_fetched:
remote_fetched.add(project) remote_fetched.add(project)
# Check for any errors before running any more tasks. # Check for any errors before running any more tasks.
@ -813,7 +846,7 @@ later is required to fix a server side protocol bug.
if not self.outer_client.manifest.IsArchive: if not self.outer_client.manifest.IsArchive:
self._GCProjects(projects, opt, err_event) self._GCProjects(projects, opt, err_event)
return _FetchResult(ret, fetched) return _FetchResult(ret, fetched, errors)
def _FetchMain( def _FetchMain(
self, opt, args, all_projects, err_event, ssh_proxy, manifest self, opt, args, all_projects, err_event, ssh_proxy, manifest
@ -832,6 +865,7 @@ later is required to fix a server side protocol bug.
List of all projects that should be checked out. List of all projects that should be checked out.
""" """
rp = manifest.repoProject rp = manifest.repoProject
errors = []
to_fetch = [] to_fetch = []
now = time.time() now = time.time()
@ -843,6 +877,9 @@ later is required to fix a server side protocol bug.
result = self._Fetch(to_fetch, opt, err_event, ssh_proxy) result = self._Fetch(to_fetch, opt, err_event, ssh_proxy)
success = result.success success = result.success
fetched = result.projects fetched = result.projects
if result.errors:
errors.extend(result.errors)
if not success: if not success:
err_event.set() err_event.set()
@ -854,8 +891,11 @@ later is required to fix a server side protocol bug.
"\nerror: Exited sync due to fetch errors.\n", "\nerror: Exited sync due to fetch errors.\n",
file=sys.stderr, file=sys.stderr,
) )
sys.exit(1) raise SyncError(
return _FetchMainResult([]) "error: Exited sync due to fetch errors.",
aggregate_errors=errors,
)
return _FetchMainResult([], errors)
# Iteratively fetch missing and/or nested unregistered submodules. # Iteratively fetch missing and/or nested unregistered submodules.
previously_missing_set = set() previously_missing_set = set()
@ -883,11 +923,13 @@ later is required to fix a server side protocol bug.
result = self._Fetch(missing, opt, err_event, ssh_proxy) result = self._Fetch(missing, opt, err_event, ssh_proxy)
success = result.success success = result.success
new_fetched = result.projects new_fetched = result.projects
if result.errors:
errors.extend(result.errors)
if not success: if not success:
err_event.set() err_event.set()
fetched.update(new_fetched) fetched.update(new_fetched)
return _FetchMainResult(all_projects) return _FetchMainResult(all_projects, errors)
def _CheckoutOne(self, detach_head, force_sync, project): def _CheckoutOne(self, detach_head, force_sync, project):
"""Checkout work tree for one project """Checkout work tree for one project
@ -905,8 +947,11 @@ later is required to fix a server side protocol bug.
project.manifest.manifestProject.config, detach_head=detach_head project.manifest.manifestProject.config, detach_head=detach_head
) )
success = False success = False
errors = []
try: try:
project.Sync_LocalHalf(syncbuf, force_sync=force_sync) project.Sync_LocalHalf(
syncbuf, force_sync=force_sync, errors=errors
)
success = syncbuf.Finish() success = syncbuf.Finish()
except GitError as e: except GitError as e:
print( print(
@ -914,6 +959,7 @@ later is required to fix a server side protocol bug.
% (project.name, str(e)), % (project.name, str(e)),
file=sys.stderr, file=sys.stderr,
) )
errors.append(e)
except Exception as e: except Exception as e:
print( print(
"error: Cannot checkout %s: %s: %s" "error: Cannot checkout %s: %s: %s"
@ -925,9 +971,9 @@ later is required to fix a server side protocol bug.
if not success: if not success:
print("error: Cannot checkout %s" % (project.name), file=sys.stderr) print("error: Cannot checkout %s" % (project.name), file=sys.stderr)
finish = time.time() finish = time.time()
return _CheckoutOneResult(success, project, start, finish) return _CheckoutOneResult(success, errors, project, start, finish)
def _Checkout(self, all_projects, opt, err_results): def _Checkout(self, all_projects, opt, err_results, checkout_errors):
"""Checkout projects listed in all_projects """Checkout projects listed in all_projects
Args: Args:
@ -949,6 +995,10 @@ later is required to fix a server side protocol bug.
self.event_log.AddSync( self.event_log.AddSync(
project, event_log.TASK_SYNC_LOCAL, start, finish, success project, event_log.TASK_SYNC_LOCAL, start, finish, success
) )
if result.errors:
checkout_errors.extend(result.errors)
# Check for any errors before running any more tasks. # Check for any errors before running any more tasks.
# ...we'll let existing jobs finish, though. # ...we'll let existing jobs finish, though.
if success: if success:
@ -1214,10 +1264,9 @@ later is required to fix a server side protocol bug.
revisionId=None, revisionId=None,
groups=None, groups=None,
) )
if not project.DeleteWorktree( project.DeleteWorktree(
quiet=opt.quiet, force=opt.force_remove_dirty quiet=opt.quiet, force=opt.force_remove_dirty
): )
return 1
new_project_paths.sort() new_project_paths.sort()
with open(file_path, "w") as fd: with open(file_path, "w") as fd:
@ -1260,7 +1309,7 @@ later is required to fix a server side protocol bug.
file=sys.stderr, file=sys.stderr,
) )
platform_utils.remove(copylinkfile_path) platform_utils.remove(copylinkfile_path)
return False raise
need_remove_files = [] need_remove_files = []
need_remove_files.extend( need_remove_files.extend(
@ -1285,12 +1334,10 @@ later is required to fix a server side protocol bug.
def _SmartSyncSetup(self, opt, smart_sync_manifest_path, manifest): def _SmartSyncSetup(self, opt, smart_sync_manifest_path, manifest):
if not manifest.manifest_server: if not manifest.manifest_server:
print( raise SmartSyncError(
"error: cannot smart sync: no manifest server defined in " "error: cannot smart sync: no manifest server defined in "
"manifest", "manifest"
file=sys.stderr,
) )
sys.exit(1)
manifest_server = manifest.manifest_server manifest_server = manifest.manifest_server
if not opt.quiet: if not opt.quiet:
@ -1368,33 +1415,28 @@ later is required to fix a server side protocol bug.
with open(smart_sync_manifest_path, "w") as f: with open(smart_sync_manifest_path, "w") as f:
f.write(manifest_str) f.write(manifest_str)
except IOError as e: except IOError as e:
print( raise SmartSyncError(
"error: cannot write manifest to %s:\n%s" "error: cannot write manifest to %s:\n%s"
% (smart_sync_manifest_path, e), % (smart_sync_manifest_path, e),
file=sys.stderr, aggregate_errors=[e],
) )
sys.exit(1)
self._ReloadManifest(manifest_name, manifest) self._ReloadManifest(manifest_name, manifest)
else: else:
print( raise SmartSyncError(
"error: manifest server RPC call failed: %s" % manifest_str, "error: manifest server RPC call failed: %s" % manifest_str
file=sys.stderr,
) )
sys.exit(1)
except (socket.error, IOError, xmlrpc.client.Fault) as e: except (socket.error, IOError, xmlrpc.client.Fault) as e:
print( raise SmartSyncError(
"error: cannot connect to manifest server %s:\n%s" "error: cannot connect to manifest server %s:\n%s"
% (manifest.manifest_server, e), % (manifest.manifest_server, e),
file=sys.stderr, aggregate_errors=[e],
) )
sys.exit(1)
except xmlrpc.client.ProtocolError as e: except xmlrpc.client.ProtocolError as e:
print( raise SmartSyncError(
"error: cannot connect to manifest server %s:\n%d %s" "error: cannot connect to manifest server %s:\n%d %s"
% (manifest.manifest_server, e.errcode, e.errmsg), % (manifest.manifest_server, e.errcode, e.errmsg),
file=sys.stderr, aggregate_errors=[e],
) )
sys.exit(1)
return manifest_name return manifest_name
@ -1436,7 +1478,7 @@ later is required to fix a server side protocol bug.
""" """
if not opt.local_only: if not opt.local_only:
start = time.time() start = time.time()
success = mp.Sync_NetworkHalf( result = mp.Sync_NetworkHalf(
quiet=opt.quiet, quiet=opt.quiet,
verbose=opt.verbose, verbose=opt.verbose,
current_branch_only=self._GetCurrentBranchOnly( current_branch_only=self._GetCurrentBranchOnly(
@ -1453,19 +1495,24 @@ later is required to fix a server side protocol bug.
) )
finish = time.time() finish = time.time()
self.event_log.AddSync( self.event_log.AddSync(
mp, event_log.TASK_SYNC_NETWORK, start, finish, success mp, event_log.TASK_SYNC_NETWORK, start, finish, result.success
) )
if mp.HasChanges: if mp.HasChanges:
errors = []
syncbuf = SyncBuffer(mp.config) syncbuf = SyncBuffer(mp.config)
start = time.time() start = time.time()
mp.Sync_LocalHalf(syncbuf, submodules=mp.manifest.HasSubmodules) mp.Sync_LocalHalf(
syncbuf, submodules=mp.manifest.HasSubmodules, errors=errors
)
clean = syncbuf.Finish() clean = syncbuf.Finish()
self.event_log.AddSync( self.event_log.AddSync(
mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean
) )
if not clean: if not clean:
sys.exit(1) raise UpdateManifestError(
aggregate_errors=errors, project=mp.name
)
self._ReloadManifest(manifest_name, mp.manifest) self._ReloadManifest(manifest_name, mp.manifest)
def ValidateOptions(self, opt, args): def ValidateOptions(self, opt, args):
@ -1546,6 +1593,15 @@ later is required to fix a server side protocol bug.
opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit) opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
def Execute(self, opt, args): def Execute(self, opt, args):
errors = []
try:
self._ExecuteHelper(opt, args, errors)
except RepoExitError:
raise
except (KeyboardInterrupt, Exception) as e:
raise RepoUnhandledExceptionError(e, aggregate_errors=errors)
def _ExecuteHelper(self, opt, args, errors):
manifest = self.outer_manifest manifest = self.outer_manifest
if not opt.outer_manifest: if not opt.outer_manifest:
manifest = self.manifest manifest = self.manifest
@ -1695,6 +1751,8 @@ later is required to fix a server side protocol bug.
result = self._FetchMain( result = self._FetchMain(
opt, args, all_projects, err_event, ssh_proxy, manifest opt, args, all_projects, err_event, ssh_proxy, manifest
) )
if result.errors:
errors.extend(result.errors)
all_projects = result.all_projects all_projects = result.all_projects
if opt.network_only: if opt.network_only:
@ -1712,36 +1770,47 @@ later is required to fix a server side protocol bug.
"`repo sync -l` will update some local checkouts.", "`repo sync -l` will update some local checkouts.",
file=sys.stderr, file=sys.stderr,
) )
sys.exit(1) raise SyncFailFastError(aggregate_errors=errors)
for m in self.ManifestList(opt): for m in self.ManifestList(opt):
if m.IsMirror or m.IsArchive: if m.IsMirror or m.IsArchive:
# Bail out now, we have no working tree. # Bail out now, we have no working tree.
continue continue
if self.UpdateProjectList(opt, m): try:
self.UpdateProjectList(opt, m)
except Exception as e:
err_event.set() err_event.set()
err_update_projects = True err_update_projects = True
errors.append(e)
if isinstance(e, DeleteWorktreeError):
errors.extend(e.aggregate_errors)
if opt.fail_fast: if opt.fail_fast:
print( print(
"\nerror: Local checkouts *not* updated.", "\nerror: Local checkouts *not* updated.",
file=sys.stderr, file=sys.stderr,
) )
sys.exit(1) raise SyncFailFastError(aggregate_errors=errors)
err_update_linkfiles = not self.UpdateCopyLinkfileList(m) err_update_linkfiles = False
if err_update_linkfiles: try:
self.UpdateCopyLinkfileList(m)
except Exception as e:
err_update_linkfiles = True
errors.append(e)
err_event.set() err_event.set()
if opt.fail_fast: if opt.fail_fast:
print( print(
"\nerror: Local update copyfile or linkfile failed.", "\nerror: Local update copyfile or linkfile failed.",
file=sys.stderr, file=sys.stderr,
) )
sys.exit(1) raise SyncFailFastError(aggregate_errors=errors)
err_results = [] err_results = []
# NB: We don't exit here because this is the last step. # NB: We don't exit here because this is the last step.
err_checkout = not self._Checkout(all_projects, opt, err_results) err_checkout = not self._Checkout(
all_projects, opt, err_results, errors
)
if err_checkout: if err_checkout:
err_event.set() err_event.set()
@ -1784,7 +1853,7 @@ later is required to fix a server side protocol bug.
"error.", "error.",
file=sys.stderr, file=sys.stderr,
) )
sys.exit(1) raise SyncError(aggregate_errors=errors)
# Log the previous sync analysis state from the config. # Log the previous sync analysis state from the config.
self.git_event_log.LogDataConfigEvents( self.git_event_log.LogDataConfigEvents(
@ -1842,7 +1911,7 @@ def _PostRepoFetch(rp, repo_verify=True, verbose=False):
try: try:
rp.work_git.reset("--keep", new_rev) rp.work_git.reset("--keep", new_rev)
except GitError as e: except GitError as e:
sys.exit(str(e)) raise RepoUnhandledExceptionError(e)
print("info: Restarting repo with latest version", file=sys.stderr) print("info: Restarting repo with latest version", file=sys.stderr)
raise RepoChangedException(["--repo-upgraded"]) raise RepoChangedException(["--repo-upgraded"])
else: else:

View File

@ -17,12 +17,15 @@ import os
import shutil import shutil
import tempfile import tempfile
import unittest import unittest
import time
from unittest import mock from unittest import mock
import pytest import pytest
import command import command
from subcmds import sync from subcmds import sync
from project import SyncNetworkHalfResult
from error import GitError, RepoExitError
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -233,3 +236,83 @@ class GetPreciousObjectsState(unittest.TestCase):
self.assertFalse( self.assertFalse(
self.cmd._GetPreciousObjectsState(self.project, self.opt) self.cmd._GetPreciousObjectsState(self.project, self.opt)
) )
class SyncCommand(unittest.TestCase):
"""Tests for cmd.Execute."""
def setUp(self):
"""Common setup."""
self.repodir = tempfile.mkdtemp(".repo")
self.manifest = manifest = mock.MagicMock(
repodir=self.repodir,
)
git_event_log = mock.MagicMock(ErrorEvent=mock.Mock(return_value=None))
self.outer_client = outer_client = mock.MagicMock()
outer_client.manifest.IsArchive = True
manifest.manifestProject.worktree = "worktree_path/"
manifest.repoProject.LastFetch = time.time()
self.sync_network_half_error = None
self.sync_local_half_error = None
self.cmd = sync.Sync(
manifest=manifest,
outer_client=outer_client,
git_event_log=git_event_log,
)
def Sync_NetworkHalf(*args, **kwargs):
return SyncNetworkHalfResult(True, self.sync_network_half_error)
def Sync_LocalHalf(*args, **kwargs):
if self.sync_local_half_error:
raise self.sync_local_half_error
self.project = p = mock.MagicMock(
use_git_worktrees=False,
UseAlternates=False,
name="project",
Sync_NetworkHalf=Sync_NetworkHalf,
Sync_LocalHalf=Sync_LocalHalf,
RelPath=mock.Mock(return_value="rel_path"),
)
p.manifest.GetProjectsWithName.return_value = [p]
mock.patch.object(
sync,
"_PostRepoFetch",
return_value=None,
).start()
mock.patch.object(
self.cmd, "GetProjects", return_value=[self.project]
).start()
opt, _ = self.cmd.OptionParser.parse_args([])
opt.clone_bundle = False
opt.jobs = 4
opt.quiet = True
opt.use_superproject = False
opt.current_branch_only = True
opt.optimized_fetch = True
opt.retry_fetches = 1
opt.prune = False
opt.auto_gc = False
opt.repo_verify = False
self.opt = opt
def tearDown(self):
mock.patch.stopall()
def test_command_exit_error(self):
"""Ensure unsuccessful commands raise expected errors."""
self.sync_network_half_error = GitError(
"sync_network_half_error error", project=self.project
)
self.sync_local_half_error = GitError(
"sync_local_half_error", project=self.project
)
with self.assertRaises(RepoExitError) as e:
self.cmd.Execute(self.opt, [])
self.assertIn(self.sync_local_half_error, e.aggregate_errors)
self.assertIn(self.sync_network_half_error, e.aggregate_errors)