prune: add --jobs support

Use multiprocessing to run in parallel.  When operating on multiple
projects, this can greatly speed things up.  Across 1000 repos, it
goes from ~10sec to ~4sec with the default -j8.

This only does a simple conversion over to get an easy speedup.  It
is currently written to collect all results before displaying them.
If we refactored this module more, we could have it display results
as they came in.

Change-Id: I5caf4ca51df0b7f078f0db104ae5232268482c1c
Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/298643
Reviewed-by: Chris Mcdonald <cjmcdonald@google.com>
Tested-by: Mike Frysinger <vapier@google.com>
This commit is contained in:
Mike Frysinger 2021-02-27 15:31:58 -05:00
parent ddab0604ee
commit bec4fe8aa3

View File

@ -12,8 +12,11 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import itertools
import multiprocessing
from color import Coloring from color import Coloring
from command import PagedCommand from command import DEFAULT_LOCAL_JOBS, PagedCommand, WORKER_BATCH_SIZE
class Prune(PagedCommand): class Prune(PagedCommand):
@ -22,11 +25,29 @@ class Prune(PagedCommand):
helpUsage = """ helpUsage = """
%prog [<project>...] %prog [<project>...]
""" """
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
def _ExecuteOne(self, project):
"""Process one project."""
return project.PruneHeads()
def Execute(self, opt, args): def Execute(self, opt, args):
all_branches = [] projects = self.GetProjects(args)
for project in self.GetProjects(args):
all_branches.extend(project.PruneHeads()) # NB: Should be able to refactor this module to display summary as results
# come back from children.
def _ProcessResults(results):
return list(itertools.chain.from_iterable(results))
# NB: Multiprocessing is heavy, so don't spin it up for one job.
if len(projects) == 1 or opt.jobs == 1:
all_branches = _ProcessResults(self._ExecuteOne(x) for x in projects)
else:
with multiprocessing.Pool(opt.jobs) as pool:
results = pool.imap(
self._ExecuteOne, projects,
chunksize=WORKER_BATCH_SIZE)
all_branches = _ProcessResults(results)
if not all_branches: if not all_branches:
return return