mirror of
https://gerrit.googlesource.com/git-repo
synced 2024-12-21 07:16:21 +00:00
sync: Track last completed fetch/checkout
Save the latest time any project is fetched and checked out. This will be used to detect partial checkouts. Bug: b/286126621 Change-Id: I53b264dc70ba168d506076dbd693ef79a696b61d Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/380514 Commit-Queue: Gavin Mak <gavinmak@google.com> Reviewed-by: Joanna Wang <jojwang@google.com> Tested-by: Gavin Mak <gavinmak@google.com>
This commit is contained in:
parent
c657844efe
commit
1d2e99d028
3
main.py
3
main.py
@ -427,7 +427,8 @@ class _Repo(object):
|
||||
if not ok:
|
||||
exception_name = type(e).__name__
|
||||
git_trace2_event_log.ErrorEvent(
|
||||
f"RepoExitError:{exception_name}")
|
||||
f"RepoExitError:{exception_name}"
|
||||
)
|
||||
raise
|
||||
|
||||
try:
|
||||
|
@ -737,6 +737,7 @@ later is required to fix a server side protocol bug.
|
||||
start = result.start
|
||||
finish = result.finish
|
||||
self._fetch_times.Set(project, finish - start)
|
||||
self._local_sync_state.SetFetchTime(project)
|
||||
self.event_log.AddSync(
|
||||
project,
|
||||
event_log.TASK_SYNC_NETWORK,
|
||||
@ -807,6 +808,7 @@ later is required to fix a server side protocol bug.
|
||||
sync_event.set()
|
||||
pm.end()
|
||||
self._fetch_times.Save()
|
||||
self._local_sync_state.Save()
|
||||
|
||||
if not self.outer_client.manifest.IsArchive:
|
||||
self._GCProjects(projects, opt, err_event)
|
||||
@ -949,7 +951,9 @@ later is required to fix a server side protocol bug.
|
||||
)
|
||||
# Check for any errors before running any more tasks.
|
||||
# ...we'll let existing jobs finish, though.
|
||||
if not success:
|
||||
if success:
|
||||
self._local_sync_state.SetCheckoutTime(project)
|
||||
else:
|
||||
ret = False
|
||||
err_results.append(
|
||||
project.RelPath(local=opt.this_manifest_only)
|
||||
@ -961,21 +965,19 @@ later is required to fix a server side protocol bug.
|
||||
pm.update(msg=project.name)
|
||||
return ret
|
||||
|
||||
return (
|
||||
self.ExecuteInParallel(
|
||||
opt.jobs_checkout,
|
||||
functools.partial(
|
||||
self._CheckoutOne, opt.detach_head, opt.force_sync
|
||||
),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
output=Progress(
|
||||
"Checking out", len(all_projects), quiet=opt.quiet
|
||||
),
|
||||
)
|
||||
and not err_results
|
||||
proc_res = self.ExecuteInParallel(
|
||||
opt.jobs_checkout,
|
||||
functools.partial(
|
||||
self._CheckoutOne, opt.detach_head, opt.force_sync
|
||||
),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
output=Progress("Checking out", len(all_projects), quiet=opt.quiet),
|
||||
)
|
||||
|
||||
self._local_sync_state.Save()
|
||||
return proc_res and not err_results
|
||||
|
||||
@staticmethod
|
||||
def _GetPreciousObjectsState(project: Project, opt):
|
||||
"""Get the preciousObjects state for the project.
|
||||
@ -1684,6 +1686,7 @@ later is required to fix a server side protocol bug.
|
||||
)
|
||||
|
||||
self._fetch_times = _FetchTimes(manifest)
|
||||
self._local_sync_state = _LocalSyncState(manifest)
|
||||
if not opt.local_only:
|
||||
with multiprocessing.Manager() as manager:
|
||||
with ssh.ProxyManager(manager) as ssh_proxy:
|
||||
@ -1898,12 +1901,64 @@ class _FetchTimes(object):
|
||||
platform_utils.remove(self._path, missing_ok=True)
|
||||
|
||||
|
||||
class _LocalSyncState(object):
|
||||
_LAST_FETCH = "last_fetch"
|
||||
_LAST_CHECKOUT = "last_checkout"
|
||||
|
||||
def __init__(self, manifest):
|
||||
self._path = os.path.join(manifest.repodir, ".repo_localsyncstate.json")
|
||||
self._time = time.time()
|
||||
self._state = None
|
||||
self._Load()
|
||||
|
||||
def SetFetchTime(self, project):
|
||||
self._Set(project, self._LAST_FETCH)
|
||||
|
||||
def SetCheckoutTime(self, project):
|
||||
self._Set(project, self._LAST_CHECKOUT)
|
||||
|
||||
def GetFetchTime(self, project):
|
||||
return self._Get(project, self._LAST_FETCH)
|
||||
|
||||
def GetCheckoutTime(self, project):
|
||||
return self._Get(project, self._LAST_CHECKOUT)
|
||||
|
||||
def _Get(self, project, key):
|
||||
self._Load()
|
||||
p = project.relpath
|
||||
if p not in self._state:
|
||||
return
|
||||
return self._state[p].get(key)
|
||||
|
||||
def _Set(self, project, key):
|
||||
p = project.relpath
|
||||
if p not in self._state:
|
||||
self._state[p] = {}
|
||||
self._state[p][key] = self._time
|
||||
|
||||
def _Load(self):
|
||||
if self._state is None:
|
||||
try:
|
||||
with open(self._path) as f:
|
||||
self._state = json.load(f)
|
||||
except (IOError, ValueError):
|
||||
platform_utils.remove(self._path, missing_ok=True)
|
||||
self._state = {}
|
||||
|
||||
def Save(self):
|
||||
if not self._state:
|
||||
return
|
||||
try:
|
||||
with open(self._path, "w") as f:
|
||||
json.dump(self._state, f, indent=2)
|
||||
except (IOError, TypeError):
|
||||
platform_utils.remove(self._path, missing_ok=True)
|
||||
|
||||
|
||||
# This is a replacement for xmlrpc.client.Transport using urllib2
|
||||
# and supporting persistent-http[s]. It cannot change hosts from
|
||||
# request to request like the normal transport, the real url
|
||||
# is passed during initialization.
|
||||
|
||||
|
||||
class PersistentTransport(xmlrpc.client.Transport):
|
||||
def __init__(self, orig_host):
|
||||
self.orig_host = orig_host
|
||||
|
@ -14,6 +14,8 @@
|
||||
"""Unittests for the subcmds/sync.py module."""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
@ -104,6 +106,79 @@ def test_cli_jobs(argv, jobs_manifest, jobs, jobs_net, jobs_check):
|
||||
assert opts.jobs_checkout == jobs_check
|
||||
|
||||
|
||||
class LocalSyncState(unittest.TestCase):
|
||||
"""Tests for _LocalSyncState."""
|
||||
|
||||
_TIME = 10
|
||||
|
||||
def setUp(self):
|
||||
"""Common setup."""
|
||||
self.repodir = tempfile.mkdtemp(".repo")
|
||||
self.manifest = mock.MagicMock(
|
||||
repodir=self.repodir,
|
||||
)
|
||||
self.state = self._new_state()
|
||||
|
||||
def tearDown(self):
|
||||
"""Common teardown."""
|
||||
shutil.rmtree(self.repodir)
|
||||
|
||||
def _new_state(self):
|
||||
with mock.patch("time.time", return_value=self._TIME):
|
||||
return sync._LocalSyncState(self.manifest)
|
||||
|
||||
def test_set(self):
|
||||
"""Times are set."""
|
||||
p = mock.MagicMock(relpath="projA")
|
||||
self.state.SetFetchTime(p)
|
||||
self.state.SetCheckoutTime(p)
|
||||
self.assertEqual(self.state.GetFetchTime(p), self._TIME)
|
||||
self.assertEqual(self.state.GetCheckoutTime(p), self._TIME)
|
||||
|
||||
def test_update(self):
|
||||
"""Times are updated."""
|
||||
with open(self.state._path, "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
{
|
||||
"projB": {
|
||||
"last_fetch": 5,
|
||||
"last_checkout": 7
|
||||
}
|
||||
}
|
||||
"""
|
||||
)
|
||||
|
||||
# Initialize state to read from the new file.
|
||||
self.state = self._new_state()
|
||||
projA = mock.MagicMock(relpath="projA")
|
||||
projB = mock.MagicMock(relpath="projB")
|
||||
self.assertEqual(self.state.GetFetchTime(projA), None)
|
||||
self.assertEqual(self.state.GetFetchTime(projB), 5)
|
||||
self.assertEqual(self.state.GetCheckoutTime(projB), 7)
|
||||
|
||||
self.state.SetFetchTime(projA)
|
||||
self.state.SetFetchTime(projB)
|
||||
self.assertEqual(self.state.GetFetchTime(projA), self._TIME)
|
||||
self.assertEqual(self.state.GetFetchTime(projB), self._TIME)
|
||||
self.assertEqual(self.state.GetCheckoutTime(projB), 7)
|
||||
|
||||
def test_save_to_file(self):
|
||||
"""Data is saved under repodir."""
|
||||
p = mock.MagicMock(relpath="projA")
|
||||
self.state.SetFetchTime(p)
|
||||
self.state.Save()
|
||||
self.assertEqual(
|
||||
os.listdir(self.repodir), [".repo_localsyncstate.json"]
|
||||
)
|
||||
|
||||
def test_nonexistent_project(self):
|
||||
"""Unsaved projects don't have data."""
|
||||
p = mock.MagicMock(relpath="projC")
|
||||
self.assertEqual(self.state.GetFetchTime(p), None)
|
||||
self.assertEqual(self.state.GetCheckoutTime(p), None)
|
||||
|
||||
|
||||
class GetPreciousObjectsState(unittest.TestCase):
|
||||
"""Tests for _GetPreciousObjectsState."""
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user