Clean up duplicate logic in subcmds/sync.py.

The fetch logic is now shared between the jobs == 1 and
jobs > 1 cases. This refactoring also fixes a bug where
opts.force_broken was not honored when jobs > 1.

Change-Id: Ic886f3c3c00f3d8fc73a65366328fed3c44dc3be
This commit is contained in:
David James 2014-01-09 18:51:58 -08:00 committed by David Pursehouse
parent 565480588d
commit 89ece429fb

View File

@ -219,7 +219,7 @@ later is required to fix a server side protocol bug.
dest='repo_upgraded', action='store_true',
help=SUPPRESS_HELP)
def _FetchProjectList(self, opt, projects, *args):
def _FetchProjectList(self, opt, projects, *args, **kwargs):
"""Main function of the fetch threads when jobs are > 1.
Delegates most of the work to _FetchHelper.
@ -227,11 +227,11 @@ later is required to fix a server side protocol bug.
Args:
opt: Program options returned from optparse. See _Options().
projects: Projects to fetch.
*args: Remaining arguments to pass to _FetchHelper. See the
*args, **kwargs: Remaining arguments to pass to _FetchHelper. See the
_FetchHelper docstring for details.
"""
for project in projects:
success = self._FetchHelper(opt, project, *args)
success = self._FetchHelper(opt, project, *args, **kwargs)
if not success and not opt.force_broken:
break
@ -304,54 +304,39 @@ later is required to fix a server side protocol bug.
def _Fetch(self, projects, opt):
fetched = set()
lock = _threading.Lock()
pm = Progress('Fetching projects', len(projects))
if self.jobs == 1:
for project in projects:
pm.update()
if not opt.quiet:
print('Fetching project %s' % project.name)
if project.Sync_NetworkHalf(
quiet=opt.quiet,
current_branch_only=opt.current_branch_only,
clone_bundle=not opt.no_clone_bundle,
no_tags=opt.no_tags,
archive=self.manifest.IsArchive):
fetched.add(project.gitdir)
else:
print('error: Cannot fetch %s' % project.name, file=sys.stderr)
if opt.force_broken:
print('warn: --force-broken, continuing to sync', file=sys.stderr)
else:
sys.exit(1)
else:
objdir_project_map = dict()
for project in projects:
objdir_project_map.setdefault(project.objdir, []).append(project)
threads = set()
lock = _threading.Lock()
sem = _threading.Semaphore(self.jobs)
err_event = _threading.Event()
for project_list in objdir_project_map.values():
# Check for any errors before starting any new threads.
# Check for any errors before running any more tasks.
# ...we'll let existing threads finish, though.
if err_event.isSet():
if err_event.isSet() and not opt.force_broken:
break
sem.acquire()
kwargs = dict(opt=opt,
projects=project_list,
lock=lock,
fetched=fetched,
pm=pm,
sem=sem,
err_event=err_event)
if self.jobs > 1:
t = _threading.Thread(target = self._FetchProjectList,
args = (opt,
project_list,
lock,
fetched,
pm,
sem,
err_event))
kwargs = kwargs)
# Ensure that Ctrl-C will not freeze the repo process.
t.daemon = True
threads.add(t)
t.start()
else:
self._FetchProjectList(**kwargs)
for t in threads:
t.join()