sync: Use repo logger within sync

Bug: b/292704435
Change-Id: Iceb3ad5111e656a1ff9730ae5deb032a9b43b4a5
Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/383454
Commit-Queue: Aravind Vasudevan <aravindvasudev@google.com>
Tested-by: Aravind Vasudevan <aravindvasudev@google.com>
Reviewed-by: Mike Frysinger <vapier@google.com>
This commit is contained in:
Aravind Vasudevan 2023-08-31 20:57:31 +00:00 committed by LUCI
parent 1e9f7b9e9e
commit e914ec293a
3 changed files with 102 additions and 142 deletions

View File

@ -15,7 +15,7 @@
"""Logic for printing user-friendly logs in repo."""
import logging
import multiprocessing
from typing import Any, List
from color import Coloring
@ -45,31 +45,25 @@ class ConfigMock:
class RepoLogger(logging.Logger):
"""Repo Logging Module."""
# Aggregates error-level logs. This is used to generate an error summary
# section at the end of a command execution.
errors = multiprocessing.Manager().list()
def __init__(self, name, config=None, **kwargs):
def __init__(self, name: str, config=None, **kwargs):
super().__init__(name, **kwargs)
self.config = config if config else ConfigMock()
self.colorer = LogColoring(self.config)
def error(self, msg, *args, **kwargs):
def error(self, msg: Any, *args, **kwargs):
"""Print and aggregate error-level logs."""
colored_error = self.colorer.error(msg, *args)
RepoLogger.errors.append(colored_error)
colored_error = self.colorer.error(str(msg), *args)
super().error(colored_error, **kwargs)
def warning(self, msg, *args, **kwargs):
def warning(self, msg: Any, *args, **kwargs):
"""Print warning-level logs with coloring."""
colored_warning = self.colorer.warning(msg, *args)
colored_warning = self.colorer.warning(str(msg), *args)
super().warning(colored_warning, **kwargs)
def log_aggregated_errors(self):
def log_aggregated_errors(self, errors: List[Exception]):
"""Print all aggregated logs."""
super().error(self.colorer.error(SEPARATOR))
super().error(
self.colorer.error("Repo command failed due to following errors:")
)
super().error("\n".join(RepoLogger.errors))
super().error("\n".join(map(str, errors)))

View File

@ -22,7 +22,6 @@ import netrc
import optparse
import os
import socket
import sys
import tempfile
import time
from typing import List, NamedTuple, Set
@ -74,6 +73,7 @@ from project import DeleteWorktreeError
from project import Project
from project import RemoteSpec
from project import SyncBuffer
from repo_logging import RepoLogger
from repo_trace import Trace
import ssh
from wrapper import Wrapper
@ -88,6 +88,8 @@ _AUTO_GC = os.environ.get(_REPO_AUTO_GC) == "1"
_REPO_ALLOW_SHALLOW = os.environ.get("REPO_ALLOW_SHALLOW")
logger = RepoLogger(__file__)
class _FetchOneResult(NamedTuple):
"""_FetchOne return value.
@ -580,9 +582,10 @@ later is required to fix a server side protocol bug.
superproject_logging_data["superproject"] = False
superproject_logging_data["noworktree"] = True
if opt.use_superproject is not False:
print(
f"{m.path_prefix}: not using superproject because "
"there is no working tree."
logger.warning(
"%s: not using superproject because there is no "
"working tree.",
m.path_prefix,
)
if not use_super:
@ -602,13 +605,13 @@ later is required to fix a server side protocol bug.
need_unload = True
else:
if print_messages:
print(
f"{m.path_prefix}: warning: Update of revisionId from "
"superproject has failed, repo sync will not use "
"superproject to fetch the source. ",
"Please resync with the --no-use-superproject option "
"to avoid this repo warning.",
file=sys.stderr,
logger.warning(
"%s: warning: Update of revisionId from superproject "
"has failed, repo sync will not use superproject to "
"fetch the source. Please resync with the "
"--no-use-superproject option to avoid this repo "
"warning.",
m.path_prefix,
)
if update_result.fatal and opt.use_superproject is not None:
raise SuperprojectError()
@ -676,21 +679,22 @@ later is required to fix a server side protocol bug.
print("\n" + output.rstrip())
if not success:
print(
"error: Cannot fetch %s from %s"
% (project.name, project.remote.url),
file=sys.stderr,
logger.error(
"error: Cannot fetch %s from %s",
project.name,
project.remote.url,
)
except KeyboardInterrupt:
print(f"Keyboard interrupt while processing {project.name}")
logger.error("Keyboard interrupt while processing %s", project.name)
except GitError as e:
print("error.GitError: Cannot fetch %s" % str(e), file=sys.stderr)
logger.error("error.GitError: Cannot fetch %s", e)
errors.append(e)
except Exception as e:
print(
"error: Cannot fetch %s (%s: %s)"
% (project.name, type(e).__name__, str(e)),
file=sys.stderr,
logger.error(
"error: Cannot fetch %s (%s: %s)",
project.name,
type(e).__name__,
e,
)
del self._sync_dict[k]
errors.append(e)
@ -887,14 +891,13 @@ later is required to fix a server side protocol bug.
if opt.network_only:
# Bail out now; the rest touches the working tree.
if err_event.is_set():
print(
"\nerror: Exited sync due to fetch errors.\n",
file=sys.stderr,
)
raise SyncError(
e = SyncError(
"error: Exited sync due to fetch errors.",
aggregate_errors=errors,
)
logger.error(e)
raise e
return _FetchMainResult([], errors)
# Iteratively fetch missing and/or nested unregistered submodules.
@ -954,22 +957,21 @@ later is required to fix a server side protocol bug.
)
success = syncbuf.Finish()
except GitError as e:
print(
"error.GitError: Cannot checkout %s: %s"
% (project.name, str(e)),
file=sys.stderr,
logger.error(
"error.GitError: Cannot checkout %s: %s", project.name, e
)
errors.append(e)
except Exception as e:
print(
"error: Cannot checkout %s: %s: %s"
% (project.name, type(e).__name__, str(e)),
file=sys.stderr,
logger.error(
"error: Cannot checkout %s: %s: %s",
project.name,
type(e).__name__,
e,
)
raise
if not success:
print("error: Cannot checkout %s" % (project.name), file=sys.stderr)
logger.error("error: Cannot checkout %s", project.name)
finish = time.time()
return _CheckoutOneResult(success, errors, project, start, finish)
@ -1088,25 +1090,27 @@ later is required to fix a server side protocol bug.
)
if expected:
if not opt.quiet:
print(
"\r%s: Shared project %s found, disabling pruning."
% (relpath, project.name)
logger.info(
"%s: Shared project %s found, disabling pruning.",
relpath,
project.name,
)
if git_require((2, 7, 0)):
project.EnableRepositoryExtension("preciousObjects")
else:
# This isn't perfect, but it's the best we can do with old
# git.
print(
"\r%s: WARNING: shared projects are unreliable when "
logger.warning(
"%s: WARNING: shared projects are unreliable when "
"using old versions of git; please upgrade to "
"git-2.7.0+." % (relpath,),
file=sys.stderr,
"git-2.7.0+.",
relpath,
)
project.config.SetString("gc.pruneExpire", "never")
else:
if not opt.quiet:
print(f"\r{relpath}: not shared, disabling pruning.")
logger.info("%s: not shared, disabling pruning.", relpath)
project.config.SetString("extensions.preciousObjects", None)
project.config.SetString("gc.pruneExpire", None)
@ -1303,10 +1307,9 @@ later is required to fix a server side protocol bug.
try:
old_copylinkfile_paths = json.load(fp)
except Exception:
print(
"error: %s is not a json formatted file."
% copylinkfile_path,
file=sys.stderr,
logger.error(
"error: %s is not a json formatted file.",
copylinkfile_path,
)
platform_utils.remove(copylinkfile_path)
raise
@ -1341,7 +1344,7 @@ later is required to fix a server side protocol bug.
manifest_server = manifest.manifest_server
if not opt.quiet:
print("Using manifest server %s" % manifest_server)
logger.info("Using manifest server %s", manifest_server)
if "@" not in manifest_server:
username = None
@ -1363,15 +1366,12 @@ later is required to fix a server side protocol bug.
if auth:
username, _account, password = auth
else:
print(
"No credentials found for %s in .netrc"
% parse_result.hostname,
file=sys.stderr,
logger.error(
"No credentials found for %s in .netrc",
parse_result.hostname,
)
except netrc.NetrcParseError as e:
print(
"Error parsing .netrc file: %s" % e, file=sys.stderr
)
logger.error("Error parsing .netrc file: %s", e)
if username and password:
manifest_server = manifest_server.replace(
@ -1517,10 +1517,9 @@ later is required to fix a server side protocol bug.
def ValidateOptions(self, opt, args):
if opt.force_broken:
print(
logger.warning(
"warning: -f/--force-broken is now the default behavior, and "
"the options are deprecated",
file=sys.stderr,
"the options are deprecated"
)
if opt.network_only and opt.detach_head:
self.OptionParser.error("cannot combine -n and -d")
@ -1545,11 +1544,12 @@ later is required to fix a server side protocol bug.
opt.prune = True
if opt.auto_gc is None and _AUTO_GC:
print(
f"Will run `git gc --auto` because {_REPO_AUTO_GC} is set.",
f"{_REPO_AUTO_GC} is deprecated and will be removed in a ",
"future release. Use `--auto-gc` instead.",
file=sys.stderr,
logger.error(
"Will run `git gc --auto` because %s is set. %s is deprecated "
"and will be removed in a future release. Use `--auto-gc` "
"instead.",
_REPO_AUTO_GC,
_REPO_AUTO_GC,
)
opt.auto_gc = True
@ -1626,10 +1626,10 @@ later is required to fix a server side protocol bug.
try:
platform_utils.remove(smart_sync_manifest_path)
except OSError as e:
print(
logger.error(
"error: failed to remove existing smart sync override "
"manifest: %s" % e,
file=sys.stderr,
"manifest: %s",
e,
)
err_event = multiprocessing.Event()
@ -1640,11 +1640,10 @@ later is required to fix a server side protocol bug.
if cb:
base = rp.GetBranch(cb).merge
if not base or not base.startswith("refs/heads/"):
print(
logger.warning(
"warning: repo is not tracking a remote branch, so it will "
"not receive updates; run `repo init --repo-rev=stable` to "
"fix.",
file=sys.stderr,
"fix."
)
for m in self.ManifestList(opt):
@ -1667,7 +1666,7 @@ later is required to fix a server side protocol bug.
if opt.mp_update:
self._UpdateAllManifestProjects(opt, mp, manifest_name)
else:
print("Skipping update of local manifest project.")
logger.info("Skipping update of local manifest project.")
# Now that the manifests are up-to-date, setup options whose defaults
# might be in the manifest.
@ -1719,12 +1718,11 @@ later is required to fix a server side protocol bug.
if err_event.is_set():
err_network_sync = True
if opt.fail_fast:
print(
"\nerror: Exited sync due to fetch errors.\n"
logger.error(
"error: Exited sync due to fetch errors.\n"
"Local checkouts *not* updated. Resolve network issues "
"& retry.\n"
"`repo sync -l` will update some local checkouts.",
file=sys.stderr,
"`repo sync -l` will update some local checkouts."
)
raise SyncFailFastError(aggregate_errors=errors)
@ -1742,10 +1740,7 @@ later is required to fix a server side protocol bug.
if isinstance(e, DeleteWorktreeError):
errors.extend(e.aggregate_errors)
if opt.fail_fast:
print(
"\nerror: Local checkouts *not* updated.",
file=sys.stderr,
)
logger.error("error: Local checkouts *not* updated.")
raise SyncFailFastError(aggregate_errors=errors)
err_update_linkfiles = False
@ -1756,9 +1751,8 @@ later is required to fix a server side protocol bug.
errors.append(e)
err_event.set()
if opt.fail_fast:
print(
"\nerror: Local update copyfile or linkfile failed.",
file=sys.stderr,
logger.error(
"error: Local update copyfile or linkfile failed."
)
raise SyncFailFastError(aggregate_errors=errors)
@ -1781,12 +1775,10 @@ later is required to fix a server side protocol bug.
# If we saw an error, exit with code 1 so that other scripts can check.
if err_event.is_set():
# Add a new line so it's easier to read.
print("\n", file=sys.stderr)
def print_and_log(err_msg):
self.git_event_log.ErrorEvent(err_msg)
print(err_msg, file=sys.stderr)
logger.error("%s", err_msg)
print_and_log("error: Unable to fully sync the tree")
if err_network_sync:
@ -1799,15 +1791,11 @@ later is required to fix a server side protocol bug.
print_and_log("error: Checking out local projects failed.")
if err_results:
# Don't log repositories, as it may contain sensitive info.
print(
"Failing repos:\n%s" % "\n".join(err_results),
file=sys.stderr,
)
logger.error("Failing repos:\n%s", "\n".join(err_results))
# Not useful to log.
print(
logger.error(
'Try re-running with "-j1 --fail-fast" to exit at the first '
"error.",
file=sys.stderr,
"error."
)
raise SyncError(aggregate_errors=errors)
@ -1824,14 +1812,13 @@ later is required to fix a server side protocol bug.
self._local_sync_state.PruneRemovedProjects()
if self._local_sync_state.IsPartiallySynced():
print(
logger.warning(
"warning: Partial syncs are not supported. For the best "
"experience, sync the entire tree.",
file=sys.stderr,
"experience, sync the entire tree."
)
if not opt.quiet:
print("repo sync has finished successfully.")
logger.info("repo sync has finished successfully.")
def _PostRepoUpgrade(manifest, quiet=False):
@ -1854,7 +1841,7 @@ def _PostRepoUpgrade(manifest, quiet=False):
def _PostRepoFetch(rp, repo_verify=True, verbose=False):
if rp.HasChanges:
print("info: A new version of repo is available", file=sys.stderr)
logger.info("info: A new version of repo is available")
wrapper = Wrapper()
try:
rev = rp.bare_git.describe(rp.GetRevisionId())
@ -1876,18 +1863,14 @@ def _PostRepoFetch(rp, repo_verify=True, verbose=False):
rp.work_git.reset("--keep", new_rev)
except GitError as e:
raise RepoUnhandledExceptionError(e)
print("info: Restarting repo with latest version", file=sys.stderr)
logger.info("info: Restarting repo with latest version")
raise RepoChangedException(["--repo-upgraded"])
else:
print(
"warning: Skipped upgrade to unverified version",
file=sys.stderr,
)
logger.warning("warning: Skipped upgrade to unverified version")
else:
if verbose:
print(
"repo version %s is current" % rp.work_git.describe(HEAD),
file=sys.stderr,
logger.info(
"repo version %s is current", rp.work_git.describe(HEAD)
)

View File

@ -23,7 +23,6 @@ class TestRepoLogger(unittest.TestCase):
def test_error_logs_error(self):
"""Test if error fn outputs logs."""
logger = RepoLogger(__name__)
RepoLogger.errors[:] = []
result = None
def mock_handler(log):
@ -42,7 +41,6 @@ class TestRepoLogger(unittest.TestCase):
def test_warning_logs_error(self):
"""Test if warning fn outputs logs."""
logger = RepoLogger(__name__)
RepoLogger.errors[:] = []
result = None
def mock_handler(log):
@ -58,30 +56,9 @@ class TestRepoLogger(unittest.TestCase):
self.assertEqual(result, "You know the rules and so do I (do I)")
def test_error_aggregates_error_msg(self):
"""Test if error fn aggregates error logs."""
logger = RepoLogger(__name__)
RepoLogger.errors[:] = []
logger.error("A full commitment's what I'm thinking of")
logger.error("You wouldn't get this from any other guy")
logger.error("I just wanna tell you how I'm feeling")
logger.error("Gotta make you understand")
self.assertEqual(
RepoLogger.errors[:],
[
"A full commitment's what I'm thinking of",
"You wouldn't get this from any other guy",
"I just wanna tell you how I'm feeling",
"Gotta make you understand",
],
)
def test_log_aggregated_errors_logs_aggregated_errors(self):
"""Test if log_aggregated_errors outputs aggregated errors."""
logger = RepoLogger(__name__)
RepoLogger.errors[:] = []
result = []
def mock_handler(log):
@ -96,7 +73,13 @@ class TestRepoLogger(unittest.TestCase):
logger.error("Never gonna give you up")
logger.error("Never gonna let you down")
logger.error("Never gonna run around and desert you")
logger.log_aggregated_errors()
logger.log_aggregated_errors(
[
"Never gonna give you up",
"Never gonna let you down",
"Never gonna run around and desert you",
]
)
self.assertEqual(
result,