mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-07-02 20:17:19 +00:00
Compare commits
36 Commits
Author | SHA1 | Date | |
---|---|---|---|
f9f4df62e0 | |||
ebdf0409d2 | |||
303bd963d5 | |||
ae384f8623 | |||
70a4e643e6 | |||
8da4861b38 | |||
39ffd9977e | |||
584863fb5e | |||
454fdaf119 | |||
f7f9dd4deb | |||
70ee4dd313 | |||
cfe3095e50 | |||
621de7ed12 | |||
d7ebdf56be | |||
fabab4e245 | |||
b577444a90 | |||
1e19f7dd61 | |||
d8b4101eae | |||
1c53b0fa44 | |||
e5ae870a2f | |||
e59e2ae757 | |||
c44ad09309 | |||
4592a63de5 | |||
0444ddf78e | |||
9bf8236c24 | |||
87f52f308c | |||
562cea7758 | |||
eede374e3e | |||
2c5fb84d35 | |||
12f6dc49e9 | |||
5591d99ee2 | |||
9d865454aa | |||
cbd78a9194 | |||
46819a78a1 | |||
159389f0da | |||
4406642e20 |
1
color.py
1
color.py
@ -210,6 +210,7 @@ class Coloring:
|
|||||||
if have_fg:
|
if have_fg:
|
||||||
bg = a
|
bg = a
|
||||||
else:
|
else:
|
||||||
|
have_fg = True
|
||||||
fg = a
|
fg = a
|
||||||
elif is_attr(a):
|
elif is_attr(a):
|
||||||
attr = a
|
attr = a
|
||||||
|
54
command.py
54
command.py
@ -12,6 +12,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import contextlib
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
@ -70,6 +71,14 @@ class Command:
|
|||||||
# migrated subcommands can set it to False.
|
# migrated subcommands can set it to False.
|
||||||
MULTI_MANIFEST_SUPPORT = True
|
MULTI_MANIFEST_SUPPORT = True
|
||||||
|
|
||||||
|
# Shared data across parallel execution workers.
|
||||||
|
_parallel_context = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_parallel_context(cls):
|
||||||
|
assert cls._parallel_context is not None
|
||||||
|
return cls._parallel_context
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
repodir=None,
|
repodir=None,
|
||||||
@ -242,9 +251,39 @@ class Command:
|
|||||||
"""Perform the action, after option parsing is complete."""
|
"""Perform the action, after option parsing is complete."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@staticmethod
|
@classmethod
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def ParallelContext(cls):
|
||||||
|
"""Obtains the context, which is shared to ExecuteInParallel workers.
|
||||||
|
|
||||||
|
Callers can store data in the context dict before invocation of
|
||||||
|
ExecuteInParallel. The dict will then be shared to child workers of
|
||||||
|
ExecuteInParallel.
|
||||||
|
"""
|
||||||
|
assert cls._parallel_context is None
|
||||||
|
cls._parallel_context = {}
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
cls._parallel_context = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _InitParallelWorker(cls, context, initializer):
|
||||||
|
cls._parallel_context = context
|
||||||
|
if initializer:
|
||||||
|
initializer()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
def ExecuteInParallel(
|
def ExecuteInParallel(
|
||||||
jobs, func, inputs, callback, output=None, ordered=False
|
cls,
|
||||||
|
jobs,
|
||||||
|
func,
|
||||||
|
inputs,
|
||||||
|
callback,
|
||||||
|
output=None,
|
||||||
|
ordered=False,
|
||||||
|
chunksize=WORKER_BATCH_SIZE,
|
||||||
|
initializer=None,
|
||||||
):
|
):
|
||||||
"""Helper for managing parallel execution boiler plate.
|
"""Helper for managing parallel execution boiler plate.
|
||||||
|
|
||||||
@ -269,6 +308,9 @@ class Command:
|
|||||||
output: An output manager. May be progress.Progess or
|
output: An output manager. May be progress.Progess or
|
||||||
color.Coloring.
|
color.Coloring.
|
||||||
ordered: Whether the jobs should be processed in order.
|
ordered: Whether the jobs should be processed in order.
|
||||||
|
chunksize: The number of jobs processed in batch by parallel
|
||||||
|
workers.
|
||||||
|
initializer: Worker initializer.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
The |callback| function's results are returned.
|
The |callback| function's results are returned.
|
||||||
@ -278,12 +320,16 @@ class Command:
|
|||||||
if len(inputs) == 1 or jobs == 1:
|
if len(inputs) == 1 or jobs == 1:
|
||||||
return callback(None, output, (func(x) for x in inputs))
|
return callback(None, output, (func(x) for x in inputs))
|
||||||
else:
|
else:
|
||||||
with multiprocessing.Pool(jobs) as pool:
|
with multiprocessing.Pool(
|
||||||
|
jobs,
|
||||||
|
initializer=cls._InitParallelWorker,
|
||||||
|
initargs=(cls._parallel_context, initializer),
|
||||||
|
) as pool:
|
||||||
submit = pool.imap if ordered else pool.imap_unordered
|
submit = pool.imap if ordered else pool.imap_unordered
|
||||||
return callback(
|
return callback(
|
||||||
pool,
|
pool,
|
||||||
output,
|
output,
|
||||||
submit(func, inputs, chunksize=WORKER_BATCH_SIZE),
|
submit(func, inputs, chunksize=chunksize),
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
if isinstance(output, progress.Progress):
|
if isinstance(output, progress.Progress):
|
||||||
|
1
constraints.txt
Normal file
1
constraints.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
black<24
|
@ -107,11 +107,13 @@ following DTD:
|
|||||||
<!ATTLIST extend-project remote CDATA #IMPLIED>
|
<!ATTLIST extend-project remote CDATA #IMPLIED>
|
||||||
<!ATTLIST extend-project dest-branch CDATA #IMPLIED>
|
<!ATTLIST extend-project dest-branch CDATA #IMPLIED>
|
||||||
<!ATTLIST extend-project upstream CDATA #IMPLIED>
|
<!ATTLIST extend-project upstream CDATA #IMPLIED>
|
||||||
|
<!ATTLIST extend-project base-rev CDATA #IMPLIED>
|
||||||
|
|
||||||
<!ELEMENT remove-project EMPTY>
|
<!ELEMENT remove-project EMPTY>
|
||||||
<!ATTLIST remove-project name CDATA #IMPLIED>
|
<!ATTLIST remove-project name CDATA #IMPLIED>
|
||||||
<!ATTLIST remove-project path CDATA #IMPLIED>
|
<!ATTLIST remove-project path CDATA #IMPLIED>
|
||||||
<!ATTLIST remove-project optional CDATA #IMPLIED>
|
<!ATTLIST remove-project optional CDATA #IMPLIED>
|
||||||
|
<!ATTLIST remove-project base-rev CDATA #IMPLIED>
|
||||||
|
|
||||||
<!ELEMENT repo-hooks EMPTY>
|
<!ELEMENT repo-hooks EMPTY>
|
||||||
<!ATTLIST repo-hooks in-project CDATA #REQUIRED>
|
<!ATTLIST repo-hooks in-project CDATA #REQUIRED>
|
||||||
@ -433,6 +435,14 @@ project. Same syntax as the corresponding element of `project`.
|
|||||||
Attribute `upstream`: If specified, overrides the upstream of the original
|
Attribute `upstream`: If specified, overrides the upstream of the original
|
||||||
project. Same syntax as the corresponding element of `project`.
|
project. Same syntax as the corresponding element of `project`.
|
||||||
|
|
||||||
|
Attribute `base-rev`: If specified, adds a check against the revision
|
||||||
|
to be extended. Manifest parse will fail and give a list of mismatch extends
|
||||||
|
if the revisions being extended have changed since base-rev was set.
|
||||||
|
Intended for use with layered manifests using hash revisions to prevent
|
||||||
|
patch branches hiding newer upstream revisions. Also compares named refs
|
||||||
|
like branches or tags but is misleading if branches are used as base-rev.
|
||||||
|
Same syntax as the corresponding element of `project`.
|
||||||
|
|
||||||
### Element annotation
|
### Element annotation
|
||||||
|
|
||||||
Zero or more annotation elements may be specified as children of a
|
Zero or more annotation elements may be specified as children of a
|
||||||
@ -496,6 +506,14 @@ name. Logic otherwise behaves like both are specified.
|
|||||||
Attribute `optional`: Set to true to ignore remove-project elements with no
|
Attribute `optional`: Set to true to ignore remove-project elements with no
|
||||||
matching `project` element.
|
matching `project` element.
|
||||||
|
|
||||||
|
Attribute `base-rev`: If specified, adds a check against the revision
|
||||||
|
to be removed. Manifest parse will fail and give a list of mismatch removes
|
||||||
|
if the revisions being removed have changed since base-rev was set.
|
||||||
|
Intended for use with layered manifests using hash revisions to prevent
|
||||||
|
patch branches hiding newer upstream revisions. Also compares named refs
|
||||||
|
like branches or tags but is misleading if branches are used as base-rev.
|
||||||
|
Same syntax as the corresponding element of `project`.
|
||||||
|
|
||||||
### Element repo-hooks
|
### Element repo-hooks
|
||||||
|
|
||||||
NB: See the [practical documentation](./repo-hooks.md) for using repo hooks.
|
NB: See the [practical documentation](./repo-hooks.md) for using repo hooks.
|
||||||
|
@ -96,6 +96,9 @@ If that tag is valid, then repo will warn and use that commit instead.
|
|||||||
|
|
||||||
If that tag cannot be verified, it gives up and forces the user to resolve.
|
If that tag cannot be verified, it gives up and forces the user to resolve.
|
||||||
|
|
||||||
|
If env variable `REPO_SKIP_SELF_UPDATE` is defined, this will
|
||||||
|
bypass the self update algorithm.
|
||||||
|
|
||||||
### Force an update
|
### Force an update
|
||||||
|
|
||||||
The `repo selfupdate` command can be used to force an immediate update.
|
The `repo selfupdate` command can be used to force an immediate update.
|
||||||
|
4
error.py
4
error.py
@ -107,6 +107,10 @@ class GitError(RepoError):
|
|||||||
return self.message
|
return self.message
|
||||||
|
|
||||||
|
|
||||||
|
class GitAuthError(RepoExitError):
|
||||||
|
"""Cannot talk to remote due to auth issue."""
|
||||||
|
|
||||||
|
|
||||||
class GitcUnsupportedError(RepoExitError):
|
class GitcUnsupportedError(RepoExitError):
|
||||||
"""Gitc no longer supported."""
|
"""Gitc no longer supported."""
|
||||||
|
|
||||||
|
12
event_log.py
12
event_log.py
@ -168,8 +168,10 @@ class EventLog:
|
|||||||
f.write("\n")
|
f.write("\n")
|
||||||
|
|
||||||
|
|
||||||
# An integer id that is unique across this invocation of the program.
|
# An integer id that is unique across this invocation of the program, to be set
|
||||||
_EVENT_ID = multiprocessing.Value("i", 1)
|
# by the first Add event. We can't set it here since it results in leaked
|
||||||
|
# resources (see: https://issues.gerritcodereview.com/353656374).
|
||||||
|
_EVENT_ID = None
|
||||||
|
|
||||||
|
|
||||||
def _NextEventId():
|
def _NextEventId():
|
||||||
@ -178,6 +180,12 @@ def _NextEventId():
|
|||||||
Returns:
|
Returns:
|
||||||
A unique, to this invocation of the program, integer id.
|
A unique, to this invocation of the program, integer id.
|
||||||
"""
|
"""
|
||||||
|
global _EVENT_ID
|
||||||
|
if _EVENT_ID is None:
|
||||||
|
# There is a small chance of race condition - two parallel processes
|
||||||
|
# setting up _EVENT_ID. However, we expect TASK_COMMAND to happen before
|
||||||
|
# mp kicks in.
|
||||||
|
_EVENT_ID = multiprocessing.Value("i", 1)
|
||||||
with _EVENT_ID.get_lock():
|
with _EVENT_ID.get_lock():
|
||||||
val = _EVENT_ID.value
|
val = _EVENT_ID.value
|
||||||
_EVENT_ID.value += 1
|
_EVENT_ID.value += 1
|
||||||
|
@ -33,17 +33,6 @@ from wrapper import Wrapper
|
|||||||
|
|
||||||
|
|
||||||
GIT = "git"
|
GIT = "git"
|
||||||
# NB: These do not need to be kept in sync with the repo launcher script.
|
|
||||||
# These may be much newer as it allows the repo launcher to roll between
|
|
||||||
# different repo releases while source versions might require a newer git.
|
|
||||||
#
|
|
||||||
# The soft version is when we start warning users that the version is old and
|
|
||||||
# we'll be dropping support for it. We'll refuse to work with versions older
|
|
||||||
# than the hard version.
|
|
||||||
#
|
|
||||||
# git-1.7 is in (EOL) Ubuntu Precise. git-1.9 is in Ubuntu Trusty.
|
|
||||||
MIN_GIT_VERSION_SOFT = (1, 9, 1)
|
|
||||||
MIN_GIT_VERSION_HARD = (1, 7, 2)
|
|
||||||
GIT_DIR = "GIT_DIR"
|
GIT_DIR = "GIT_DIR"
|
||||||
|
|
||||||
LAST_GITDIR = None
|
LAST_GITDIR = None
|
||||||
@ -324,10 +313,13 @@ class GitCommand:
|
|||||||
cwd = None
|
cwd = None
|
||||||
command_name = cmdv[0]
|
command_name = cmdv[0]
|
||||||
command.append(command_name)
|
command.append(command_name)
|
||||||
|
|
||||||
|
if command_name in ("fetch", "clone"):
|
||||||
|
env["GIT_TERMINAL_PROMPT"] = "0"
|
||||||
# Need to use the --progress flag for fetch/clone so output will be
|
# Need to use the --progress flag for fetch/clone so output will be
|
||||||
# displayed as by default git only does progress output if stderr is a
|
# displayed as by default git only does progress output if stderr is
|
||||||
# TTY.
|
# a TTY.
|
||||||
if sys.stderr.isatty() and command_name in ("fetch", "clone"):
|
if sys.stderr.isatty():
|
||||||
if "--progress" not in cmdv and "--quiet" not in cmdv:
|
if "--progress" not in cmdv and "--quiet" not in cmdv:
|
||||||
command.append("--progress")
|
command.append("--progress")
|
||||||
command.extend(cmdv[1:])
|
command.extend(cmdv[1:])
|
||||||
|
@ -307,8 +307,6 @@ class Superproject:
|
|||||||
)
|
)
|
||||||
return SyncResult(False, False)
|
return SyncResult(False, False)
|
||||||
|
|
||||||
_PrintBetaNotice()
|
|
||||||
|
|
||||||
should_exit = True
|
should_exit = True
|
||||||
if not self._remote_url:
|
if not self._remote_url:
|
||||||
self._LogWarning(
|
self._LogWarning(
|
||||||
@ -452,16 +450,6 @@ class Superproject:
|
|||||||
return UpdateProjectsResult(manifest_path, False)
|
return UpdateProjectsResult(manifest_path, False)
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=10)
|
|
||||||
def _PrintBetaNotice():
|
|
||||||
"""Print the notice of beta status."""
|
|
||||||
print(
|
|
||||||
"NOTICE: --use-superproject is in beta; report any issues to the "
|
|
||||||
"address described in `repo version`",
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=None)
|
@functools.lru_cache(maxsize=None)
|
||||||
def _UseSuperprojectFromConfiguration():
|
def _UseSuperprojectFromConfiguration():
|
||||||
"""Returns the user choice of whether to use superproject."""
|
"""Returns the user choice of whether to use superproject."""
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
# From Gerrit Code Review 3.10.0 d5403dbf335ba7d48977fc95170c3f7027c34659
|
# DO NOT EDIT THIS FILE
|
||||||
|
# All updates should be sent upstream: https://gerrit.googlesource.com/gerrit/
|
||||||
|
# This is synced from commit: 62f5bbea67f6dafa6e22a601a0c298214c510caf
|
||||||
|
# DO NOT EDIT THIS FILE
|
||||||
#
|
#
|
||||||
# Part of Gerrit Code Review (https://www.gerritcodereview.com/)
|
# Part of Gerrit Code Review (https://www.gerritcodereview.com/)
|
||||||
#
|
#
|
||||||
@ -31,8 +34,7 @@ if test ! -f "$1" ; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Do not create a change id if requested
|
# Do not create a change id if requested
|
||||||
create_setting=$(git config --get gerrit.createChangeId)
|
case "$(git config --get gerrit.createChangeId)" in
|
||||||
case "$create_setting" in
|
|
||||||
false)
|
false)
|
||||||
exit 0
|
exit 0
|
||||||
;;
|
;;
|
||||||
|
@ -1,33 +1,25 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
# DO NOT EDIT THIS FILE
|
||||||
|
# All updates should be sent upstream: https://github.com/git/git
|
||||||
|
# This is synced from commit: 00e10ef10e161a913893b8cb33aa080d4ca5baa6
|
||||||
|
# DO NOT EDIT THIS FILE
|
||||||
#
|
#
|
||||||
# An example hook script to verify if you are on battery, in case you
|
# An example hook script to verify if you are on battery, in case you
|
||||||
# are running Windows, Linux or OS X. Called by git-gc --auto with no
|
# are running Linux or OS X. Called by git-gc --auto with no arguments.
|
||||||
# arguments. The hook should exit with non-zero status after issuing an
|
# The hook should exit with non-zero status after issuing an appropriate
|
||||||
# appropriate message if it wants to stop the auto repacking.
|
# message if it wants to stop the auto repacking.
|
||||||
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation; either version 2 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
#
|
||||||
# This program is distributed in the hope that it will be useful,
|
# This hook is stored in the contrib/hooks directory. Your distribution
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
# may have put this somewhere else. If you want to use this hook, you
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
# should make this script executable then link to it in the repository
|
||||||
# GNU General Public License for more details.
|
# you would like to use it in.
|
||||||
#
|
#
|
||||||
# You should have received a copy of the GNU General Public License
|
# For example, if the hook is stored in
|
||||||
# along with this program; if not, write to the Free Software
|
# /usr/share/git-core/contrib/hooks/pre-auto-gc-battery:
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
#
|
||||||
|
# cd /path/to/your/repository.git
|
||||||
if uname -s | grep -q "_NT-"
|
# ln -sf /usr/share/git-core/contrib/hooks/pre-auto-gc-battery \
|
||||||
then
|
# hooks/pre-auto-gc
|
||||||
if test -x $SYSTEMROOT/System32/Wbem/wmic
|
|
||||||
then
|
|
||||||
STATUS=$(wmic path win32_battery get batterystatus /format:list | tr -d '\r\n')
|
|
||||||
[ "$STATUS" = "BatteryStatus=2" ] && exit 0 || exit 1
|
|
||||||
fi
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if test -x /sbin/on_ac_power && (/sbin/on_ac_power;test $? -ne 1)
|
if test -x /sbin/on_ac_power && (/sbin/on_ac_power;test $? -ne 1)
|
||||||
then
|
then
|
||||||
@ -48,11 +40,6 @@ elif test -x /usr/bin/pmset && /usr/bin/pmset -g batt |
|
|||||||
grep -q "drawing from 'AC Power'"
|
grep -q "drawing from 'AC Power'"
|
||||||
then
|
then
|
||||||
exit 0
|
exit 0
|
||||||
elif test -d /sys/bus/acpi/drivers/battery && test 0 = \
|
|
||||||
"$(find /sys/bus/acpi/drivers/battery/ -type l | wc -l)";
|
|
||||||
then
|
|
||||||
# No battery exists.
|
|
||||||
exit 0
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Auto packing deferred; not on AC"
|
echo "Auto packing deferred; not on AC"
|
||||||
|
@ -1,44 +0,0 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
|
||||||
.TH REPO "1" "July 2022" "repo gitc-delete" "Repo Manual"
|
|
||||||
.SH NAME
|
|
||||||
repo \- repo gitc-delete - manual page for repo gitc-delete
|
|
||||||
.SH SYNOPSIS
|
|
||||||
.B repo
|
|
||||||
\fI\,gitc-delete\/\fR
|
|
||||||
.SH DESCRIPTION
|
|
||||||
Summary
|
|
||||||
.PP
|
|
||||||
Delete a GITC Client.
|
|
||||||
.SH OPTIONS
|
|
||||||
.TP
|
|
||||||
\fB\-h\fR, \fB\-\-help\fR
|
|
||||||
show this help message and exit
|
|
||||||
.TP
|
|
||||||
\fB\-f\fR, \fB\-\-force\fR
|
|
||||||
force the deletion (no prompt)
|
|
||||||
.SS Logging options:
|
|
||||||
.TP
|
|
||||||
\fB\-v\fR, \fB\-\-verbose\fR
|
|
||||||
show all output
|
|
||||||
.TP
|
|
||||||
\fB\-q\fR, \fB\-\-quiet\fR
|
|
||||||
only show errors
|
|
||||||
.SS Multi\-manifest options:
|
|
||||||
.TP
|
|
||||||
\fB\-\-outer\-manifest\fR
|
|
||||||
operate starting at the outermost manifest
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-outer\-manifest\fR
|
|
||||||
do not operate on outer manifests
|
|
||||||
.TP
|
|
||||||
\fB\-\-this\-manifest\-only\fR
|
|
||||||
only operate on this (sub)manifest
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
|
||||||
operate on this manifest and its submanifests
|
|
||||||
.PP
|
|
||||||
Run `repo help gitc\-delete` to view the detailed manual.
|
|
||||||
.SH DETAILS
|
|
||||||
.PP
|
|
||||||
This subcommand deletes the current GITC client, deleting the GITC manifest and
|
|
||||||
all locally downloaded sources.
|
|
@ -1,175 +0,0 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
|
||||||
.TH REPO "1" "October 2022" "repo gitc-init" "Repo Manual"
|
|
||||||
.SH NAME
|
|
||||||
repo \- repo gitc-init - manual page for repo gitc-init
|
|
||||||
.SH SYNOPSIS
|
|
||||||
.B repo
|
|
||||||
\fI\,gitc-init \/\fR[\fI\,options\/\fR] [\fI\,client name\/\fR]
|
|
||||||
.SH DESCRIPTION
|
|
||||||
Summary
|
|
||||||
.PP
|
|
||||||
Initialize a GITC Client.
|
|
||||||
.SH OPTIONS
|
|
||||||
.TP
|
|
||||||
\fB\-h\fR, \fB\-\-help\fR
|
|
||||||
show this help message and exit
|
|
||||||
.SS Logging options:
|
|
||||||
.TP
|
|
||||||
\fB\-v\fR, \fB\-\-verbose\fR
|
|
||||||
show all output
|
|
||||||
.TP
|
|
||||||
\fB\-q\fR, \fB\-\-quiet\fR
|
|
||||||
only show errors
|
|
||||||
.SS Manifest options:
|
|
||||||
.TP
|
|
||||||
\fB\-u\fR URL, \fB\-\-manifest\-url\fR=\fI\,URL\/\fR
|
|
||||||
manifest repository location
|
|
||||||
.TP
|
|
||||||
\fB\-b\fR REVISION, \fB\-\-manifest\-branch\fR=\fI\,REVISION\/\fR
|
|
||||||
manifest branch or revision (use HEAD for default)
|
|
||||||
.TP
|
|
||||||
\fB\-m\fR NAME.xml, \fB\-\-manifest\-name\fR=\fI\,NAME\/\fR.xml
|
|
||||||
initial manifest file
|
|
||||||
.TP
|
|
||||||
\fB\-g\fR GROUP, \fB\-\-groups\fR=\fI\,GROUP\/\fR
|
|
||||||
restrict manifest projects to ones with specified
|
|
||||||
group(s) [default|all|G1,G2,G3|G4,\-G5,\-G6]
|
|
||||||
.TP
|
|
||||||
\fB\-p\fR PLATFORM, \fB\-\-platform\fR=\fI\,PLATFORM\/\fR
|
|
||||||
restrict manifest projects to ones with a specified
|
|
||||||
platform group [auto|all|none|linux|darwin|...]
|
|
||||||
.TP
|
|
||||||
\fB\-\-submodules\fR
|
|
||||||
sync any submodules associated with the manifest repo
|
|
||||||
.TP
|
|
||||||
\fB\-\-standalone\-manifest\fR
|
|
||||||
download the manifest as a static file rather then
|
|
||||||
create a git checkout of the manifest repo
|
|
||||||
.TP
|
|
||||||
\fB\-\-manifest\-depth\fR=\fI\,DEPTH\/\fR
|
|
||||||
create a shallow clone of the manifest repo with given
|
|
||||||
depth (0 for full clone); see git clone (default: 0)
|
|
||||||
.SS Manifest (only) checkout options:
|
|
||||||
.TP
|
|
||||||
\fB\-\-current\-branch\fR
|
|
||||||
fetch only current manifest branch from server
|
|
||||||
(default)
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-current\-branch\fR
|
|
||||||
fetch all manifest branches from server
|
|
||||||
.TP
|
|
||||||
\fB\-\-tags\fR
|
|
||||||
fetch tags in the manifest
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-tags\fR
|
|
||||||
don't fetch tags in the manifest
|
|
||||||
.SS Checkout modes:
|
|
||||||
.TP
|
|
||||||
\fB\-\-mirror\fR
|
|
||||||
create a replica of the remote repositories rather
|
|
||||||
than a client working directory
|
|
||||||
.TP
|
|
||||||
\fB\-\-archive\fR
|
|
||||||
checkout an archive instead of a git repository for
|
|
||||||
each project. See git archive.
|
|
||||||
.TP
|
|
||||||
\fB\-\-worktree\fR
|
|
||||||
use git\-worktree to manage projects
|
|
||||||
.SS Project checkout optimizations:
|
|
||||||
.TP
|
|
||||||
\fB\-\-reference\fR=\fI\,DIR\/\fR
|
|
||||||
location of mirror directory
|
|
||||||
.TP
|
|
||||||
\fB\-\-dissociate\fR
|
|
||||||
dissociate from reference mirrors after clone
|
|
||||||
.TP
|
|
||||||
\fB\-\-depth\fR=\fI\,DEPTH\/\fR
|
|
||||||
create a shallow clone with given depth; see git clone
|
|
||||||
.TP
|
|
||||||
\fB\-\-partial\-clone\fR
|
|
||||||
perform partial clone (https://gitscm.com/docs/gitrepositorylayout#_code_partialclone_code)
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-partial\-clone\fR
|
|
||||||
disable use of partial clone (https://gitscm.com/docs/gitrepositorylayout#_code_partialclone_code)
|
|
||||||
.TP
|
|
||||||
\fB\-\-partial\-clone\-exclude\fR=\fI\,PARTIAL_CLONE_EXCLUDE\/\fR
|
|
||||||
exclude the specified projects (a comma\-delimited
|
|
||||||
project names) from partial clone (https://gitscm.com/docs/gitrepositorylayout#_code_partialclone_code)
|
|
||||||
.TP
|
|
||||||
\fB\-\-clone\-filter\fR=\fI\,CLONE_FILTER\/\fR
|
|
||||||
filter for use with \fB\-\-partial\-clone\fR [default:
|
|
||||||
blob:none]
|
|
||||||
.TP
|
|
||||||
\fB\-\-use\-superproject\fR
|
|
||||||
use the manifest superproject to sync projects;
|
|
||||||
implies \fB\-c\fR
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-use\-superproject\fR
|
|
||||||
disable use of manifest superprojects
|
|
||||||
.TP
|
|
||||||
\fB\-\-clone\-bundle\fR
|
|
||||||
enable use of \fI\,/clone.bundle\/\fP on HTTP/HTTPS (default if
|
|
||||||
not \fB\-\-partial\-clone\fR)
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-clone\-bundle\fR
|
|
||||||
disable use of \fI\,/clone.bundle\/\fP on HTTP/HTTPS (default if
|
|
||||||
\fB\-\-partial\-clone\fR)
|
|
||||||
.TP
|
|
||||||
\fB\-\-git\-lfs\fR
|
|
||||||
enable Git LFS support
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-git\-lfs\fR
|
|
||||||
disable Git LFS support
|
|
||||||
.SS repo Version options:
|
|
||||||
.TP
|
|
||||||
\fB\-\-repo\-url\fR=\fI\,URL\/\fR
|
|
||||||
repo repository location ($REPO_URL)
|
|
||||||
.TP
|
|
||||||
\fB\-\-repo\-rev\fR=\fI\,REV\/\fR
|
|
||||||
repo branch or revision ($REPO_REV)
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-repo\-verify\fR
|
|
||||||
do not verify repo source code
|
|
||||||
.SS Other options:
|
|
||||||
.TP
|
|
||||||
\fB\-\-config\-name\fR
|
|
||||||
Always prompt for name/e\-mail
|
|
||||||
.SS GITC options:
|
|
||||||
.TP
|
|
||||||
\fB\-f\fR MANIFEST_FILE, \fB\-\-manifest\-file\fR=\fI\,MANIFEST_FILE\/\fR
|
|
||||||
Optional manifest file to use for this GITC client.
|
|
||||||
.TP
|
|
||||||
\fB\-c\fR GITC_CLIENT, \fB\-\-gitc\-client\fR=\fI\,GITC_CLIENT\/\fR
|
|
||||||
Name of the gitc_client instance to create or modify.
|
|
||||||
.SS Multi\-manifest:
|
|
||||||
.TP
|
|
||||||
\fB\-\-outer\-manifest\fR
|
|
||||||
operate starting at the outermost manifest
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-outer\-manifest\fR
|
|
||||||
do not operate on outer manifests
|
|
||||||
.TP
|
|
||||||
\fB\-\-this\-manifest\-only\fR
|
|
||||||
only operate on this (sub)manifest
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
|
||||||
operate on this manifest and its submanifests
|
|
||||||
.PP
|
|
||||||
Run `repo help gitc\-init` to view the detailed manual.
|
|
||||||
.SH DETAILS
|
|
||||||
.PP
|
|
||||||
The 'repo gitc\-init' command is ran to initialize a new GITC client for use with
|
|
||||||
the GITC file system.
|
|
||||||
.PP
|
|
||||||
This command will setup the client directory, initialize repo, just like repo
|
|
||||||
init does, and then downloads the manifest collection and installs it in the
|
|
||||||
\&.repo/directory of the GITC client.
|
|
||||||
.PP
|
|
||||||
Once this is done, a GITC manifest is generated by pulling the HEAD SHA for each
|
|
||||||
project and generates the properly formatted XML file and installs it as
|
|
||||||
\&.manifest in the GITC client directory.
|
|
||||||
.PP
|
|
||||||
The \fB\-c\fR argument is required to specify the GITC client name.
|
|
||||||
.PP
|
|
||||||
The optional \fB\-f\fR argument can be used to specify the manifest file to use for
|
|
||||||
this GITC client.
|
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "October 2022" "repo init" "Repo Manual"
|
.TH REPO "1" "September 2024" "repo init" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repo init - manual page for repo init
|
repo \- repo init - manual page for repo init
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -28,6 +28,11 @@ manifest repository location
|
|||||||
\fB\-b\fR REVISION, \fB\-\-manifest\-branch\fR=\fI\,REVISION\/\fR
|
\fB\-b\fR REVISION, \fB\-\-manifest\-branch\fR=\fI\,REVISION\/\fR
|
||||||
manifest branch or revision (use HEAD for default)
|
manifest branch or revision (use HEAD for default)
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-manifest\-upstream\-branch\fR=\fI\,BRANCH\/\fR
|
||||||
|
when a commit is provided to \fB\-\-manifest\-branch\fR, this
|
||||||
|
is the name of the git ref in which the commit can be
|
||||||
|
found
|
||||||
|
.TP
|
||||||
\fB\-m\fR NAME.xml, \fB\-\-manifest\-name\fR=\fI\,NAME\/\fR.xml
|
\fB\-m\fR NAME.xml, \fB\-\-manifest\-name\fR=\fI\,NAME\/\fR.xml
|
||||||
initial manifest file
|
initial manifest file
|
||||||
.TP
|
.TP
|
||||||
@ -163,6 +168,10 @@ The optional \fB\-b\fR argument can be used to select the manifest branch to che
|
|||||||
and use. If no branch is specified, the remote's default branch is used. This is
|
and use. If no branch is specified, the remote's default branch is used. This is
|
||||||
equivalent to using \fB\-b\fR HEAD.
|
equivalent to using \fB\-b\fR HEAD.
|
||||||
.PP
|
.PP
|
||||||
|
The optional \fB\-\-manifest\-upstream\-branch\fR argument can be used when a commit is
|
||||||
|
provided to \fB\-\-manifest\-branch\fR (or \fB\-b\fR), to specify the name of the git ref in
|
||||||
|
which the commit can be found.
|
||||||
|
.PP
|
||||||
The optional \fB\-m\fR argument can be used to specify an alternate manifest to be
|
The optional \fB\-m\fR argument can be used to specify an alternate manifest to be
|
||||||
used. If no manifest is specified, the manifest default.xml will be used.
|
used. If no manifest is specified, the manifest default.xml will be used.
|
||||||
.PP
|
.PP
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "October 2022" "repo manifest" "Repo Manual"
|
.TH REPO "1" "April 2024" "repo manifest" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repo manifest - manual page for repo manifest
|
repo \- repo manifest - manual page for repo manifest
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -194,7 +194,8 @@ CDATA #IMPLIED>
|
|||||||
<!ATTLIST extend\-project upstream CDATA #IMPLIED>
|
<!ATTLIST extend\-project upstream CDATA #IMPLIED>
|
||||||
.IP
|
.IP
|
||||||
<!ELEMENT remove\-project EMPTY>
|
<!ELEMENT remove\-project EMPTY>
|
||||||
<!ATTLIST remove\-project name CDATA #REQUIRED>
|
<!ATTLIST remove\-project name CDATA #IMPLIED>
|
||||||
|
<!ATTLIST remove\-project path CDATA #IMPLIED>
|
||||||
<!ATTLIST remove\-project optional CDATA #IMPLIED>
|
<!ATTLIST remove\-project optional CDATA #IMPLIED>
|
||||||
.IP
|
.IP
|
||||||
<!ELEMENT repo\-hooks EMPTY>
|
<!ELEMENT repo\-hooks EMPTY>
|
||||||
@ -212,6 +213,7 @@ CDATA #IMPLIED>
|
|||||||
<!ELEMENT include EMPTY>
|
<!ELEMENT include EMPTY>
|
||||||
<!ATTLIST include name CDATA #REQUIRED>
|
<!ATTLIST include name CDATA #REQUIRED>
|
||||||
<!ATTLIST include groups CDATA #IMPLIED>
|
<!ATTLIST include groups CDATA #IMPLIED>
|
||||||
|
<!ATTLIST include revision CDATA #IMPLIED>
|
||||||
.PP
|
.PP
|
||||||
]>
|
]>
|
||||||
```
|
```
|
||||||
@ -533,13 +535,24 @@ the repo client.
|
|||||||
.PP
|
.PP
|
||||||
Element remove\-project
|
Element remove\-project
|
||||||
.PP
|
.PP
|
||||||
Deletes the named project from the internal manifest table, possibly allowing a
|
Deletes a project from the internal manifest table, possibly allowing a
|
||||||
subsequent project element in the same manifest file to replace the project with
|
subsequent project element in the same manifest file to replace the project with
|
||||||
a different source.
|
a different source.
|
||||||
.PP
|
.PP
|
||||||
This element is mostly useful in a local manifest file, where the user can
|
This element is mostly useful in a local manifest file, where the user can
|
||||||
remove a project, and possibly replace it with their own definition.
|
remove a project, and possibly replace it with their own definition.
|
||||||
.PP
|
.PP
|
||||||
|
The project `name` or project `path` can be used to specify the remove target
|
||||||
|
meaning one of them is required. If only name is specified, all projects with
|
||||||
|
that name are removed.
|
||||||
|
.PP
|
||||||
|
If both name and path are specified, only projects with the same name and path
|
||||||
|
are removed, meaning projects with the same name but in other locations are
|
||||||
|
kept.
|
||||||
|
.PP
|
||||||
|
If only path is specified, a matching project is removed regardless of its name.
|
||||||
|
Logic otherwise behaves like both are specified.
|
||||||
|
.PP
|
||||||
Attribute `optional`: Set to true to ignore remove\-project elements with no
|
Attribute `optional`: Set to true to ignore remove\-project elements with no
|
||||||
matching `project` element.
|
matching `project` element.
|
||||||
.PP
|
.PP
|
||||||
@ -608,6 +621,9 @@ included manifest belong. This appends and recurses, meaning all projects in
|
|||||||
included manifests carry all parent include groups. Same syntax as the
|
included manifests carry all parent include groups. Same syntax as the
|
||||||
corresponding element of `project`.
|
corresponding element of `project`.
|
||||||
.PP
|
.PP
|
||||||
|
Attribute `revision`: Name of a Git branch (e.g. `main` or `refs/heads/main`)
|
||||||
|
default to which all projects in the included manifest belong.
|
||||||
|
.PP
|
||||||
Local Manifests
|
Local Manifests
|
||||||
.PP
|
.PP
|
||||||
Additional remotes and projects may be added through local manifest files stored
|
Additional remotes and projects may be added through local manifest files stored
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "November 2022" "repo smartsync" "Repo Manual"
|
.TH REPO "1" "September 2024" "repo smartsync" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repo smartsync - manual page for repo smartsync
|
repo \- repo smartsync - manual page for repo smartsync
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -37,11 +37,20 @@ overwrite an existing git directory if it needs to
|
|||||||
point to a different object directory. WARNING: this
|
point to a different object directory. WARNING: this
|
||||||
may cause loss of data
|
may cause loss of data
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-force\-checkout\fR
|
||||||
|
force checkout even if it results in throwing away
|
||||||
|
uncommitted modifications. WARNING: this may cause
|
||||||
|
loss of data
|
||||||
|
.TP
|
||||||
\fB\-\-force\-remove\-dirty\fR
|
\fB\-\-force\-remove\-dirty\fR
|
||||||
force remove projects with uncommitted modifications
|
force remove projects with uncommitted modifications
|
||||||
if projects no longer exist in the manifest. WARNING:
|
if projects no longer exist in the manifest. WARNING:
|
||||||
this may cause loss of data
|
this may cause loss of data
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-rebase\fR
|
||||||
|
rebase local commits regardless of whether they are
|
||||||
|
published
|
||||||
|
.TP
|
||||||
\fB\-l\fR, \fB\-\-local\-only\fR
|
\fB\-l\fR, \fB\-\-local\-only\fR
|
||||||
only update working tree, don't fetch
|
only update working tree, don't fetch
|
||||||
.TP
|
.TP
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "November 2022" "repo sync" "Repo Manual"
|
.TH REPO "1" "September 2024" "repo sync" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repo sync - manual page for repo sync
|
repo \- repo sync - manual page for repo sync
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -37,11 +37,20 @@ overwrite an existing git directory if it needs to
|
|||||||
point to a different object directory. WARNING: this
|
point to a different object directory. WARNING: this
|
||||||
may cause loss of data
|
may cause loss of data
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-force\-checkout\fR
|
||||||
|
force checkout even if it results in throwing away
|
||||||
|
uncommitted modifications. WARNING: this may cause
|
||||||
|
loss of data
|
||||||
|
.TP
|
||||||
\fB\-\-force\-remove\-dirty\fR
|
\fB\-\-force\-remove\-dirty\fR
|
||||||
force remove projects with uncommitted modifications
|
force remove projects with uncommitted modifications
|
||||||
if projects no longer exist in the manifest. WARNING:
|
if projects no longer exist in the manifest. WARNING:
|
||||||
this may cause loss of data
|
this may cause loss of data
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-rebase\fR
|
||||||
|
rebase local commits regardless of whether they are
|
||||||
|
published
|
||||||
|
.TP
|
||||||
\fB\-l\fR, \fB\-\-local\-only\fR
|
\fB\-l\fR, \fB\-\-local\-only\fR
|
||||||
only update working tree, don't fetch
|
only update working tree, don't fetch
|
||||||
.TP
|
.TP
|
||||||
@ -185,6 +194,11 @@ The \fB\-\-force\-sync\fR option can be used to overwrite existing git directori
|
|||||||
they have previously been linked to a different object directory. WARNING: This
|
they have previously been linked to a different object directory. WARNING: This
|
||||||
may cause data to be lost since refs may be removed when overwriting.
|
may cause data to be lost since refs may be removed when overwriting.
|
||||||
.PP
|
.PP
|
||||||
|
The \fB\-\-force\-checkout\fR option can be used to force git to switch revs even if the
|
||||||
|
index or the working tree differs from HEAD, and if there are untracked files.
|
||||||
|
WARNING: This may cause data to be lost since uncommitted changes may be
|
||||||
|
removed.
|
||||||
|
.PP
|
||||||
The \fB\-\-force\-remove\-dirty\fR option can be used to remove previously used projects
|
The \fB\-\-force\-remove\-dirty\fR option can be used to remove previously used projects
|
||||||
with uncommitted changes. WARNING: This may cause data to be lost since
|
with uncommitted changes. WARNING: This may cause data to be lost since
|
||||||
uncommitted changes may be removed with projects that no longer exist in the
|
uncommitted changes may be removed with projects that no longer exist in the
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "August 2022" "repo upload" "Repo Manual"
|
.TH REPO "1" "June 2024" "repo upload" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repo upload - manual page for repo upload
|
repo \- repo upload - manual page for repo upload
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -18,8 +18,11 @@ show this help message and exit
|
|||||||
number of jobs to run in parallel (default: based on
|
number of jobs to run in parallel (default: based on
|
||||||
number of CPU cores)
|
number of CPU cores)
|
||||||
.TP
|
.TP
|
||||||
\fB\-t\fR
|
\fB\-t\fR, \fB\-\-topic\-branch\fR
|
||||||
send local branch name to Gerrit Code Review
|
set the topic to the local branch name
|
||||||
|
.TP
|
||||||
|
\fB\-\-topic\fR=\fI\,TOPIC\/\fR
|
||||||
|
set topic for the change
|
||||||
.TP
|
.TP
|
||||||
\fB\-\-hashtag\fR=\fI\,HASHTAGS\/\fR, \fB\-\-ht\fR=\fI\,HASHTAGS\/\fR
|
\fB\-\-hashtag\fR=\fI\,HASHTAGS\/\fR, \fB\-\-ht\fR=\fI\,HASHTAGS\/\fR
|
||||||
add hashtags (comma delimited) to the review
|
add hashtags (comma delimited) to the review
|
||||||
@ -30,6 +33,9 @@ add local branch name as a hashtag
|
|||||||
\fB\-l\fR LABELS, \fB\-\-label\fR=\fI\,LABELS\/\fR
|
\fB\-l\fR LABELS, \fB\-\-label\fR=\fI\,LABELS\/\fR
|
||||||
add a label when uploading
|
add a label when uploading
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-pd\fR=\fI\,PATCHSET_DESCRIPTION\/\fR, \fB\-\-patchset\-description\fR=\fI\,PATCHSET_DESCRIPTION\/\fR
|
||||||
|
description for patchset
|
||||||
|
.TP
|
||||||
\fB\-\-re\fR=\fI\,REVIEWERS\/\fR, \fB\-\-reviewers\fR=\fI\,REVIEWERS\/\fR
|
\fB\-\-re\fR=\fI\,REVIEWERS\/\fR, \fB\-\-reviewers\fR=\fI\,REVIEWERS\/\fR
|
||||||
request reviews from these people
|
request reviews from these people
|
||||||
.TP
|
.TP
|
||||||
@ -198,6 +204,12 @@ review.URL.uploadnotify:
|
|||||||
Control e\-mail notifications when uploading.
|
Control e\-mail notifications when uploading.
|
||||||
https://gerrit\-review.googlesource.com/Documentation/user\-upload.html#notify
|
https://gerrit\-review.googlesource.com/Documentation/user\-upload.html#notify
|
||||||
.PP
|
.PP
|
||||||
|
review.URL.uploadwarningthreshold:
|
||||||
|
.PP
|
||||||
|
Repo will warn you if you are attempting to upload a large number of commits in
|
||||||
|
one or more branches. By default, the threshold is five commits. This option
|
||||||
|
allows you to override the warning threshold to a different value.
|
||||||
|
.PP
|
||||||
References
|
References
|
||||||
.PP
|
.PP
|
||||||
Gerrit Code Review: https://www.gerritcodereview.com/
|
Gerrit Code Review: https://www.gerritcodereview.com/
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "June 2023" "repo" "Repo Manual"
|
.TH REPO "1" "April 2024" "repo" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repository management tool built on top of git
|
repo \- repository management tool built on top of git
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -79,12 +79,6 @@ Download and checkout a change
|
|||||||
forall
|
forall
|
||||||
Run a shell command in each project
|
Run a shell command in each project
|
||||||
.TP
|
.TP
|
||||||
gitc\-delete
|
|
||||||
Delete a GITC Client.
|
|
||||||
.TP
|
|
||||||
gitc\-init
|
|
||||||
Initialize a GITC Client.
|
|
||||||
.TP
|
|
||||||
grep
|
grep
|
||||||
Print lines matching a pattern
|
Print lines matching a pattern
|
||||||
.TP
|
.TP
|
||||||
|
@ -435,11 +435,6 @@ class XmlManifest:
|
|||||||
self.parent_groups = parent_groups
|
self.parent_groups = parent_groups
|
||||||
self.default_groups = default_groups
|
self.default_groups = default_groups
|
||||||
|
|
||||||
if outer_client and self.isGitcClient:
|
|
||||||
raise ManifestParseError(
|
|
||||||
"Multi-manifest is incompatible with `gitc-init`"
|
|
||||||
)
|
|
||||||
|
|
||||||
if submanifest_path and not outer_client:
|
if submanifest_path and not outer_client:
|
||||||
# If passing a submanifest_path, there must be an outer_client.
|
# If passing a submanifest_path, there must be an outer_client.
|
||||||
raise ManifestParseError(f"Bad call to {self.__class__.__name__}")
|
raise ManifestParseError(f"Bad call to {self.__class__.__name__}")
|
||||||
@ -1450,6 +1445,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
|
|
||||||
repo_hooks_project = None
|
repo_hooks_project = None
|
||||||
enabled_repo_hooks = None
|
enabled_repo_hooks = None
|
||||||
|
failed_revision_changes = []
|
||||||
for node in itertools.chain(*node_list):
|
for node in itertools.chain(*node_list):
|
||||||
if node.nodeName == "project":
|
if node.nodeName == "project":
|
||||||
project = self._ParseProject(node)
|
project = self._ParseProject(node)
|
||||||
@ -1476,6 +1472,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
remote = self._get_remote(node)
|
remote = self._get_remote(node)
|
||||||
dest_branch = node.getAttribute("dest-branch")
|
dest_branch = node.getAttribute("dest-branch")
|
||||||
upstream = node.getAttribute("upstream")
|
upstream = node.getAttribute("upstream")
|
||||||
|
base_revision = node.getAttribute("base-rev")
|
||||||
|
|
||||||
named_projects = self._projects[name]
|
named_projects = self._projects[name]
|
||||||
if dest_path and not path and len(named_projects) > 1:
|
if dest_path and not path and len(named_projects) > 1:
|
||||||
@ -1489,6 +1486,13 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
if groups:
|
if groups:
|
||||||
p.groups.extend(groups)
|
p.groups.extend(groups)
|
||||||
if revision:
|
if revision:
|
||||||
|
if base_revision:
|
||||||
|
if p.revisionExpr != base_revision:
|
||||||
|
failed_revision_changes.append(
|
||||||
|
"extend-project name %s mismatch base "
|
||||||
|
"%s vs revision %s"
|
||||||
|
% (name, base_revision, p.revisionExpr)
|
||||||
|
)
|
||||||
p.SetRevision(revision)
|
p.SetRevision(revision)
|
||||||
|
|
||||||
if remote_name:
|
if remote_name:
|
||||||
@ -1563,6 +1567,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
if node.nodeName == "remove-project":
|
if node.nodeName == "remove-project":
|
||||||
name = node.getAttribute("name")
|
name = node.getAttribute("name")
|
||||||
path = node.getAttribute("path")
|
path = node.getAttribute("path")
|
||||||
|
base_revision = node.getAttribute("base-rev")
|
||||||
|
|
||||||
# Name or path needed.
|
# Name or path needed.
|
||||||
if not name and not path:
|
if not name and not path:
|
||||||
@ -1576,6 +1581,13 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
for projname, projects in list(self._projects.items()):
|
for projname, projects in list(self._projects.items()):
|
||||||
for p in projects:
|
for p in projects:
|
||||||
if name == projname and not path:
|
if name == projname and not path:
|
||||||
|
if base_revision:
|
||||||
|
if p.revisionExpr != base_revision:
|
||||||
|
failed_revision_changes.append(
|
||||||
|
"remove-project name %s mismatch base "
|
||||||
|
"%s vs revision %s"
|
||||||
|
% (name, base_revision, p.revisionExpr)
|
||||||
|
)
|
||||||
del self._paths[p.relpath]
|
del self._paths[p.relpath]
|
||||||
if not removed_project:
|
if not removed_project:
|
||||||
del self._projects[name]
|
del self._projects[name]
|
||||||
@ -1583,6 +1595,17 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
elif path == p.relpath and (
|
elif path == p.relpath and (
|
||||||
name == projname or not name
|
name == projname or not name
|
||||||
):
|
):
|
||||||
|
if base_revision:
|
||||||
|
if p.revisionExpr != base_revision:
|
||||||
|
failed_revision_changes.append(
|
||||||
|
"remove-project path %s mismatch base "
|
||||||
|
"%s vs revision %s"
|
||||||
|
% (
|
||||||
|
p.relpath,
|
||||||
|
base_revision,
|
||||||
|
p.revisionExpr,
|
||||||
|
)
|
||||||
|
)
|
||||||
self._projects[projname].remove(p)
|
self._projects[projname].remove(p)
|
||||||
del self._paths[p.relpath]
|
del self._paths[p.relpath]
|
||||||
removed_project = p.name
|
removed_project = p.name
|
||||||
@ -1602,6 +1625,13 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
"project: %s" % node.toxml()
|
"project: %s" % node.toxml()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if failed_revision_changes:
|
||||||
|
raise ManifestParseError(
|
||||||
|
"revision base check failed, rebase patches and update "
|
||||||
|
"base revs for: ",
|
||||||
|
failed_revision_changes,
|
||||||
|
)
|
||||||
|
|
||||||
# Store repo hooks project information.
|
# Store repo hooks project information.
|
||||||
if repo_hooks_project:
|
if repo_hooks_project:
|
||||||
# Store a reference to the Project.
|
# Store a reference to the Project.
|
||||||
@ -2290,7 +2320,6 @@ class RepoClient(XmlManifest):
|
|||||||
submanifest_path: The submanifest root relative to the repo root.
|
submanifest_path: The submanifest root relative to the repo root.
|
||||||
**kwargs: Additional keyword arguments, passed to XmlManifest.
|
**kwargs: Additional keyword arguments, passed to XmlManifest.
|
||||||
"""
|
"""
|
||||||
self.isGitcClient = False
|
|
||||||
submanifest_path = submanifest_path or ""
|
submanifest_path = submanifest_path or ""
|
||||||
if submanifest_path:
|
if submanifest_path:
|
||||||
self._CheckLocalPath(submanifest_path)
|
self._CheckLocalPath(submanifest_path)
|
||||||
|
@ -251,32 +251,3 @@ def readlink(path):
|
|||||||
return platform_utils_win32.readlink(_makelongpath(path))
|
return platform_utils_win32.readlink(_makelongpath(path))
|
||||||
else:
|
else:
|
||||||
return os.readlink(path)
|
return os.readlink(path)
|
||||||
|
|
||||||
|
|
||||||
def realpath(path):
|
|
||||||
"""Return the canonical path of the specified filename, eliminating
|
|
||||||
any symbolic links encountered in the path.
|
|
||||||
|
|
||||||
Availability: Windows, Unix.
|
|
||||||
"""
|
|
||||||
if isWindows():
|
|
||||||
current_path = os.path.abspath(path)
|
|
||||||
path_tail = []
|
|
||||||
for c in range(0, 100): # Avoid cycles
|
|
||||||
if islink(current_path):
|
|
||||||
target = readlink(current_path)
|
|
||||||
current_path = os.path.join(
|
|
||||||
os.path.dirname(current_path), target
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
basename = os.path.basename(current_path)
|
|
||||||
if basename == "":
|
|
||||||
path_tail.append(current_path)
|
|
||||||
break
|
|
||||||
path_tail.append(basename)
|
|
||||||
current_path = os.path.dirname(current_path)
|
|
||||||
path_tail.reverse()
|
|
||||||
result = os.path.normpath(os.path.join(*path_tail))
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
return os.path.realpath(path)
|
|
||||||
|
13
progress.py
13
progress.py
@ -100,6 +100,7 @@ class Progress:
|
|||||||
self._show = not delay
|
self._show = not delay
|
||||||
self._units = units
|
self._units = units
|
||||||
self._elide = elide and _TTY
|
self._elide = elide and _TTY
|
||||||
|
self._quiet = quiet
|
||||||
|
|
||||||
# Only show the active jobs section if we run more than one in parallel.
|
# Only show the active jobs section if we run more than one in parallel.
|
||||||
self._show_jobs = False
|
self._show_jobs = False
|
||||||
@ -114,13 +115,7 @@ class Progress:
|
|||||||
)
|
)
|
||||||
self._update_thread.daemon = True
|
self._update_thread.daemon = True
|
||||||
|
|
||||||
# When quiet, never show any output. It's a bit hacky, but reusing the
|
if not quiet and show_elapsed:
|
||||||
# existing logic that delays initial output keeps the rest of the class
|
|
||||||
# clean. Basically we set the start time to years in the future.
|
|
||||||
if quiet:
|
|
||||||
self._show = False
|
|
||||||
self._start += 2**32
|
|
||||||
elif show_elapsed:
|
|
||||||
self._update_thread.start()
|
self._update_thread.start()
|
||||||
|
|
||||||
def _update_loop(self):
|
def _update_loop(self):
|
||||||
@ -160,7 +155,7 @@ class Progress:
|
|||||||
msg = self._last_msg
|
msg = self._last_msg
|
||||||
self._last_msg = msg
|
self._last_msg = msg
|
||||||
|
|
||||||
if not _TTY or IsTraceToStderr():
|
if not _TTY or IsTraceToStderr() or self._quiet:
|
||||||
return
|
return
|
||||||
|
|
||||||
elapsed_sec = time.time() - self._start
|
elapsed_sec = time.time() - self._start
|
||||||
@ -202,7 +197,7 @@ class Progress:
|
|||||||
|
|
||||||
def end(self):
|
def end(self):
|
||||||
self._update_event.set()
|
self._update_event.set()
|
||||||
if not _TTY or IsTraceToStderr() or not self._show:
|
if not _TTY or IsTraceToStderr() or self._quiet:
|
||||||
return
|
return
|
||||||
|
|
||||||
duration = duration_str(time.time() - self._start)
|
duration = duration_str(time.time() - self._start)
|
||||||
|
168
project.py
168
project.py
@ -32,6 +32,7 @@ import urllib.parse
|
|||||||
|
|
||||||
from color import Coloring
|
from color import Coloring
|
||||||
from error import DownloadError
|
from error import DownloadError
|
||||||
|
from error import GitAuthError
|
||||||
from error import GitError
|
from error import GitError
|
||||||
from error import ManifestInvalidPathError
|
from error import ManifestInvalidPathError
|
||||||
from error import ManifestInvalidRevisionError
|
from error import ManifestInvalidRevisionError
|
||||||
@ -145,7 +146,7 @@ def _ProjectHooks():
|
|||||||
"""
|
"""
|
||||||
global _project_hook_list
|
global _project_hook_list
|
||||||
if _project_hook_list is None:
|
if _project_hook_list is None:
|
||||||
d = platform_utils.realpath(os.path.abspath(os.path.dirname(__file__)))
|
d = os.path.realpath(os.path.abspath(os.path.dirname(__file__)))
|
||||||
d = os.path.join(d, "hooks")
|
d = os.path.join(d, "hooks")
|
||||||
_project_hook_list = [
|
_project_hook_list = [
|
||||||
os.path.join(d, x) for x in platform_utils.listdir(d)
|
os.path.join(d, x) for x in platform_utils.listdir(d)
|
||||||
@ -257,7 +258,7 @@ class ReviewableBranch:
|
|||||||
self,
|
self,
|
||||||
people,
|
people,
|
||||||
dryrun=False,
|
dryrun=False,
|
||||||
auto_topic=False,
|
topic=None,
|
||||||
hashtags=(),
|
hashtags=(),
|
||||||
labels=(),
|
labels=(),
|
||||||
private=False,
|
private=False,
|
||||||
@ -273,7 +274,7 @@ class ReviewableBranch:
|
|||||||
branch=self.name,
|
branch=self.name,
|
||||||
people=people,
|
people=people,
|
||||||
dryrun=dryrun,
|
dryrun=dryrun,
|
||||||
auto_topic=auto_topic,
|
topic=topic,
|
||||||
hashtags=hashtags,
|
hashtags=hashtags,
|
||||||
labels=labels,
|
labels=labels,
|
||||||
private=private,
|
private=private,
|
||||||
@ -727,12 +728,34 @@ class Project:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def IsRebaseInProgress(self):
|
def IsRebaseInProgress(self):
|
||||||
|
"""Returns true if a rebase or "am" is in progress"""
|
||||||
|
# "rebase-apply" is used for "git rebase".
|
||||||
|
# "rebase-merge" is used for "git am".
|
||||||
return (
|
return (
|
||||||
os.path.exists(self.work_git.GetDotgitPath("rebase-apply"))
|
os.path.exists(self.work_git.GetDotgitPath("rebase-apply"))
|
||||||
or os.path.exists(self.work_git.GetDotgitPath("rebase-merge"))
|
or os.path.exists(self.work_git.GetDotgitPath("rebase-merge"))
|
||||||
or os.path.exists(os.path.join(self.worktree, ".dotest"))
|
or os.path.exists(os.path.join(self.worktree, ".dotest"))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def IsCherryPickInProgress(self):
|
||||||
|
"""Returns True if a cherry-pick is in progress."""
|
||||||
|
return os.path.exists(self.work_git.GetDotgitPath("CHERRY_PICK_HEAD"))
|
||||||
|
|
||||||
|
def _AbortRebase(self):
|
||||||
|
"""Abort ongoing rebase, cherry-pick or patch apply (am).
|
||||||
|
|
||||||
|
If no rebase, cherry-pick or patch apply was in progress, this method
|
||||||
|
ignores the status and continues.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _git(*args):
|
||||||
|
# Ignore return code, in case there was no rebase in progress.
|
||||||
|
GitCommand(self, args, log_as_error=False).Wait()
|
||||||
|
|
||||||
|
_git("cherry-pick", "--abort")
|
||||||
|
_git("rebase", "--abort")
|
||||||
|
_git("am", "--abort")
|
||||||
|
|
||||||
def IsDirty(self, consider_untracked=True):
|
def IsDirty(self, consider_untracked=True):
|
||||||
"""Is the working directory modified in some way?"""
|
"""Is the working directory modified in some way?"""
|
||||||
self.work_git.update_index(
|
self.work_git.update_index(
|
||||||
@ -1082,7 +1105,7 @@ class Project:
|
|||||||
branch=None,
|
branch=None,
|
||||||
people=([], []),
|
people=([], []),
|
||||||
dryrun=False,
|
dryrun=False,
|
||||||
auto_topic=False,
|
topic=None,
|
||||||
hashtags=(),
|
hashtags=(),
|
||||||
labels=(),
|
labels=(),
|
||||||
private=False,
|
private=False,
|
||||||
@ -1145,7 +1168,6 @@ class Project:
|
|||||||
# This stops git from pushing all reachable annotated tags when
|
# This stops git from pushing all reachable annotated tags when
|
||||||
# push.followTags is configured. Gerrit does not accept any tags
|
# push.followTags is configured. Gerrit does not accept any tags
|
||||||
# pushed to a CL.
|
# pushed to a CL.
|
||||||
if git_require((1, 8, 3)):
|
|
||||||
cmd.append("--no-follow-tags")
|
cmd.append("--no-follow-tags")
|
||||||
|
|
||||||
for push_option in push_options or []:
|
for push_option in push_options or []:
|
||||||
@ -1159,8 +1181,8 @@ class Project:
|
|||||||
|
|
||||||
ref_spec = f"{R_HEADS + branch.name}:refs/for/{dest_branch}"
|
ref_spec = f"{R_HEADS + branch.name}:refs/for/{dest_branch}"
|
||||||
opts = []
|
opts = []
|
||||||
if auto_topic:
|
if topic is not None:
|
||||||
opts += ["topic=" + branch.name]
|
opts += [f"topic={topic}"]
|
||||||
opts += ["t=%s" % p for p in hashtags]
|
opts += ["t=%s" % p for p in hashtags]
|
||||||
# NB: No need to encode labels as they've been validated above.
|
# NB: No need to encode labels as they've been validated above.
|
||||||
opts += ["l=%s" % p for p in labels]
|
opts += ["l=%s" % p for p in labels]
|
||||||
@ -1462,8 +1484,6 @@ class Project:
|
|||||||
self._InitHooks()
|
self._InitHooks()
|
||||||
|
|
||||||
def _CopyAndLinkFiles(self):
|
def _CopyAndLinkFiles(self):
|
||||||
if self.client.isGitcClient:
|
|
||||||
return
|
|
||||||
for copyfile in self.copyfiles:
|
for copyfile in self.copyfiles:
|
||||||
copyfile._Copy()
|
copyfile._Copy()
|
||||||
for linkfile in self.linkfiles:
|
for linkfile in self.linkfiles:
|
||||||
@ -1516,6 +1536,7 @@ class Project:
|
|||||||
syncbuf,
|
syncbuf,
|
||||||
force_sync=False,
|
force_sync=False,
|
||||||
force_checkout=False,
|
force_checkout=False,
|
||||||
|
force_rebase=False,
|
||||||
submodules=False,
|
submodules=False,
|
||||||
errors=None,
|
errors=None,
|
||||||
verbose=False,
|
verbose=False,
|
||||||
@ -1586,7 +1607,15 @@ class Project:
|
|||||||
if branch is None or syncbuf.detach_head:
|
if branch is None or syncbuf.detach_head:
|
||||||
# Currently on a detached HEAD. The user is assumed to
|
# Currently on a detached HEAD. The user is assumed to
|
||||||
# not have any local modifications worth worrying about.
|
# not have any local modifications worth worrying about.
|
||||||
if self.IsRebaseInProgress():
|
rebase_in_progress = (
|
||||||
|
self.IsRebaseInProgress() or self.IsCherryPickInProgress()
|
||||||
|
)
|
||||||
|
if rebase_in_progress and force_checkout:
|
||||||
|
self._AbortRebase()
|
||||||
|
rebase_in_progress = (
|
||||||
|
self.IsRebaseInProgress() or self.IsCherryPickInProgress()
|
||||||
|
)
|
||||||
|
if rebase_in_progress:
|
||||||
fail(_PriorSyncFailedError(project=self.name))
|
fail(_PriorSyncFailedError(project=self.name))
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -1653,19 +1682,22 @@ class Project:
|
|||||||
if pub:
|
if pub:
|
||||||
not_merged = self._revlist(not_rev(revid), pub)
|
not_merged = self._revlist(not_rev(revid), pub)
|
||||||
if not_merged:
|
if not_merged:
|
||||||
if upstream_gain:
|
if upstream_gain and not force_rebase:
|
||||||
# The user has published this branch and some of those
|
# The user has published this branch and some of those
|
||||||
# commits are not yet merged upstream. We do not want
|
# commits are not yet merged upstream. We do not want
|
||||||
# to rewrite the published commits so we punt.
|
# to rewrite the published commits so we punt.
|
||||||
fail(
|
fail(
|
||||||
LocalSyncFail(
|
LocalSyncFail(
|
||||||
"branch %s is published (but not merged) and is "
|
"branch %s is published (but not merged) and is "
|
||||||
"now %d commits behind"
|
"now %d commits behind. Fix this manually or rerun "
|
||||||
|
"with the --rebase option to force a rebase."
|
||||||
% (branch.name, len(upstream_gain)),
|
% (branch.name, len(upstream_gain)),
|
||||||
project=self.name,
|
project=self.name,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
syncbuf.later1(self, _doff, not verbose)
|
||||||
|
return
|
||||||
elif pub == head:
|
elif pub == head:
|
||||||
# All published commits are merged, and thus we are a
|
# All published commits are merged, and thus we are a
|
||||||
# strict subset. We can fast-forward safely.
|
# strict subset. We can fast-forward safely.
|
||||||
@ -1826,7 +1858,7 @@ class Project:
|
|||||||
# remove because it will recursively delete projects -- we handle that
|
# remove because it will recursively delete projects -- we handle that
|
||||||
# ourselves below. https://crbug.com/git/48
|
# ourselves below. https://crbug.com/git/48
|
||||||
if self.use_git_worktrees:
|
if self.use_git_worktrees:
|
||||||
needle = platform_utils.realpath(self.gitdir)
|
needle = os.path.realpath(self.gitdir)
|
||||||
# Find the git worktree commondir under .repo/worktrees/.
|
# Find the git worktree commondir under .repo/worktrees/.
|
||||||
output = self.bare_git.worktree("list", "--porcelain").splitlines()[
|
output = self.bare_git.worktree("list", "--porcelain").splitlines()[
|
||||||
0
|
0
|
||||||
@ -1840,7 +1872,7 @@ class Project:
|
|||||||
with open(gitdir) as fp:
|
with open(gitdir) as fp:
|
||||||
relpath = fp.read().strip()
|
relpath = fp.read().strip()
|
||||||
# Resolve the checkout path and see if it matches this project.
|
# Resolve the checkout path and see if it matches this project.
|
||||||
fullpath = platform_utils.realpath(
|
fullpath = os.path.realpath(
|
||||||
os.path.join(configs, name, relpath)
|
os.path.join(configs, name, relpath)
|
||||||
)
|
)
|
||||||
if fullpath == needle:
|
if fullpath == needle:
|
||||||
@ -2264,7 +2296,9 @@ class Project:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
rev = self.GetRevisionId()
|
rev = self.GetRevisionId()
|
||||||
except GitError:
|
except (GitError, ManifestInvalidRevisionError):
|
||||||
|
# The git repo may be outdated (i.e. not fetched yet) and querying
|
||||||
|
# its submodules using the revision may not work; so return here.
|
||||||
return []
|
return []
|
||||||
return get_submodules(self.gitdir, rev)
|
return get_submodules(self.gitdir, rev)
|
||||||
|
|
||||||
@ -2364,26 +2398,25 @@ class Project:
|
|||||||
try:
|
try:
|
||||||
# if revision (sha or tag) is not present then following function
|
# if revision (sha or tag) is not present then following function
|
||||||
# throws an error.
|
# throws an error.
|
||||||
self.bare_git.rev_list(
|
revs = [f"{self.revisionExpr}^0"]
|
||||||
"-1",
|
upstream_rev = None
|
||||||
"--missing=allow-any",
|
|
||||||
"%s^0" % self.revisionExpr,
|
|
||||||
"--",
|
|
||||||
log_as_error=False,
|
|
||||||
)
|
|
||||||
if self.upstream:
|
if self.upstream:
|
||||||
rev = self.GetRemote().ToLocal(self.upstream)
|
upstream_rev = self.GetRemote().ToLocal(self.upstream)
|
||||||
|
revs.append(upstream_rev)
|
||||||
|
|
||||||
self.bare_git.rev_list(
|
self.bare_git.rev_list(
|
||||||
"-1",
|
"-1",
|
||||||
"--missing=allow-any",
|
"--missing=allow-any",
|
||||||
"%s^0" % rev,
|
*revs,
|
||||||
"--",
|
"--",
|
||||||
log_as_error=False,
|
log_as_error=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if self.upstream:
|
||||||
self.bare_git.merge_base(
|
self.bare_git.merge_base(
|
||||||
"--is-ancestor",
|
"--is-ancestor",
|
||||||
self.revisionExpr,
|
self.revisionExpr,
|
||||||
rev,
|
upstream_rev,
|
||||||
log_as_error=False,
|
log_as_error=False,
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
@ -2567,12 +2600,7 @@ class Project:
|
|||||||
branch = None
|
branch = None
|
||||||
else:
|
else:
|
||||||
branch = self.revisionExpr
|
branch = self.revisionExpr
|
||||||
if (
|
if not self.manifest.IsMirror and is_sha1 and depth:
|
||||||
not self.manifest.IsMirror
|
|
||||||
and is_sha1
|
|
||||||
and depth
|
|
||||||
and git_require((1, 8, 3))
|
|
||||||
):
|
|
||||||
# Shallow checkout of a specific commit, fetch from that commit and
|
# Shallow checkout of a specific commit, fetch from that commit and
|
||||||
# not the heads only as the commit might be deeper in the history.
|
# not the heads only as the commit might be deeper in the history.
|
||||||
spec.append(branch)
|
spec.append(branch)
|
||||||
@ -2635,6 +2663,20 @@ class Project:
|
|||||||
# Fallthru to sleep+retry logic at the bottom.
|
# Fallthru to sleep+retry logic at the bottom.
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# TODO(b/360889369#comment24): git may gc commits incorrectly.
|
||||||
|
# Until the root cause is fixed, retry fetch with --refetch which
|
||||||
|
# will bring the repository into a good state.
|
||||||
|
elif gitcmd.stdout and (
|
||||||
|
"could not parse commit" in gitcmd.stdout
|
||||||
|
or "unable to parse commit" in gitcmd.stdout
|
||||||
|
):
|
||||||
|
cmd.insert(1, "--refetch")
|
||||||
|
print(
|
||||||
|
"could not parse commit error, retrying with refetch",
|
||||||
|
file=output_redir,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
# Try to prune remote branches once in case there are conflicts.
|
# Try to prune remote branches once in case there are conflicts.
|
||||||
# For example, if the remote had refs/heads/upstream, but deleted
|
# For example, if the remote had refs/heads/upstream, but deleted
|
||||||
# that and now has refs/heads/upstream/foo.
|
# that and now has refs/heads/upstream/foo.
|
||||||
@ -2660,6 +2702,33 @@ class Project:
|
|||||||
)
|
)
|
||||||
# Continue right away so we don't sleep as we shouldn't need to.
|
# Continue right away so we don't sleep as we shouldn't need to.
|
||||||
continue
|
continue
|
||||||
|
elif (
|
||||||
|
ret == 128
|
||||||
|
and gitcmd.stdout
|
||||||
|
and "fatal: could not read Username" in gitcmd.stdout
|
||||||
|
):
|
||||||
|
# User needs to be authenticated, and Git wants to prompt for
|
||||||
|
# username and password.
|
||||||
|
print(
|
||||||
|
"git requires authentication, but repo cannot perform "
|
||||||
|
"interactive authentication. Check git credentials.",
|
||||||
|
file=output_redir,
|
||||||
|
)
|
||||||
|
break
|
||||||
|
elif (
|
||||||
|
ret == 128
|
||||||
|
and gitcmd.stdout
|
||||||
|
and "remote helper 'sso' aborted session" in gitcmd.stdout
|
||||||
|
):
|
||||||
|
# User needs to be authenticated, and Git wants to prompt for
|
||||||
|
# username and password.
|
||||||
|
print(
|
||||||
|
"git requires authentication, but repo cannot perform "
|
||||||
|
"interactive authentication.",
|
||||||
|
file=output_redir,
|
||||||
|
)
|
||||||
|
raise GitAuthError(gitcmd.stdout)
|
||||||
|
break
|
||||||
elif current_branch_only and is_sha1 and ret == 128:
|
elif current_branch_only and is_sha1 and ret == 128:
|
||||||
# Exit code 128 means "couldn't find the ref you asked for"; if
|
# Exit code 128 means "couldn't find the ref you asked for"; if
|
||||||
# we're in sha1 mode, we just tried sync'ing from the upstream
|
# we're in sha1 mode, we just tried sync'ing from the upstream
|
||||||
@ -2784,6 +2853,8 @@ class Project:
|
|||||||
def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet, verbose):
|
def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet, verbose):
|
||||||
platform_utils.remove(dstPath, missing_ok=True)
|
platform_utils.remove(dstPath, missing_ok=True)
|
||||||
|
|
||||||
|
# We do not use curl's --retry option since it generally doesn't
|
||||||
|
# actually retry anything; code 18 for example, it will not retry on.
|
||||||
cmd = ["curl", "--fail", "--output", tmpPath, "--netrc", "--location"]
|
cmd = ["curl", "--fail", "--output", tmpPath, "--netrc", "--location"]
|
||||||
if quiet:
|
if quiet:
|
||||||
cmd += ["--silent", "--show-error"]
|
cmd += ["--silent", "--show-error"]
|
||||||
@ -2820,11 +2891,18 @@ class Project:
|
|||||||
(output, _) = proc.communicate()
|
(output, _) = proc.communicate()
|
||||||
curlret = proc.returncode
|
curlret = proc.returncode
|
||||||
|
|
||||||
if curlret == 22:
|
if curlret in (22, 35, 56, 92):
|
||||||
|
# We use --fail so curl exits with unique status.
|
||||||
# From curl man page:
|
# From curl man page:
|
||||||
# 22: HTTP page not retrieved. The requested url was not found
|
# 22: HTTP page not retrieved. The requested url was not found
|
||||||
# or returned another error with the HTTP error code being 400
|
# or returned another error with the HTTP error code being
|
||||||
# or above. This return code only appears if -f, --fail is used.
|
# 400 or above.
|
||||||
|
# 35: SSL connect error. The SSL handshaking failed. This can
|
||||||
|
# be thrown by Google storage sometimes.
|
||||||
|
# 56: Failure in receiving network data. This shows up with
|
||||||
|
# HTTP/404 on Google storage.
|
||||||
|
# 92: Stream error in HTTP/2 framing layer. Basically the same
|
||||||
|
# as 22 -- Google storage sometimes throws 500's.
|
||||||
if verbose:
|
if verbose:
|
||||||
print(
|
print(
|
||||||
"%s: Unable to retrieve clone.bundle; ignoring."
|
"%s: Unable to retrieve clone.bundle; ignoring."
|
||||||
@ -2975,14 +3053,12 @@ class Project:
|
|||||||
"Retrying clone after deleting %s", self.gitdir
|
"Retrying clone after deleting %s", self.gitdir
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
platform_utils.rmtree(
|
platform_utils.rmtree(os.path.realpath(self.gitdir))
|
||||||
platform_utils.realpath(self.gitdir)
|
|
||||||
)
|
|
||||||
if self.worktree and os.path.exists(
|
if self.worktree and os.path.exists(
|
||||||
platform_utils.realpath(self.worktree)
|
os.path.realpath(self.worktree)
|
||||||
):
|
):
|
||||||
platform_utils.rmtree(
|
platform_utils.rmtree(
|
||||||
platform_utils.realpath(self.worktree)
|
os.path.realpath(self.worktree)
|
||||||
)
|
)
|
||||||
return self._InitGitDir(
|
return self._InitGitDir(
|
||||||
mirror_git=mirror_git,
|
mirror_git=mirror_git,
|
||||||
@ -3068,7 +3144,7 @@ class Project:
|
|||||||
self._InitHooks(quiet=quiet)
|
self._InitHooks(quiet=quiet)
|
||||||
|
|
||||||
def _InitHooks(self, quiet=False):
|
def _InitHooks(self, quiet=False):
|
||||||
hooks = platform_utils.realpath(os.path.join(self.objdir, "hooks"))
|
hooks = os.path.realpath(os.path.join(self.objdir, "hooks"))
|
||||||
if not os.path.exists(hooks):
|
if not os.path.exists(hooks):
|
||||||
os.makedirs(hooks)
|
os.makedirs(hooks)
|
||||||
|
|
||||||
@ -3211,9 +3287,9 @@ class Project:
|
|||||||
dst_path = os.path.join(destdir, name)
|
dst_path = os.path.join(destdir, name)
|
||||||
src_path = os.path.join(srcdir, name)
|
src_path = os.path.join(srcdir, name)
|
||||||
|
|
||||||
dst = platform_utils.realpath(dst_path)
|
dst = os.path.realpath(dst_path)
|
||||||
if os.path.lexists(dst):
|
if os.path.lexists(dst):
|
||||||
src = platform_utils.realpath(src_path)
|
src = os.path.realpath(src_path)
|
||||||
# Fail if the links are pointing to the wrong place.
|
# Fail if the links are pointing to the wrong place.
|
||||||
if src != dst:
|
if src != dst:
|
||||||
logger.error(
|
logger.error(
|
||||||
@ -3249,10 +3325,10 @@ class Project:
|
|||||||
if copy_all:
|
if copy_all:
|
||||||
to_copy = platform_utils.listdir(gitdir)
|
to_copy = platform_utils.listdir(gitdir)
|
||||||
|
|
||||||
dotgit = platform_utils.realpath(dotgit)
|
dotgit = os.path.realpath(dotgit)
|
||||||
for name in set(to_copy).union(to_symlink):
|
for name in set(to_copy).union(to_symlink):
|
||||||
try:
|
try:
|
||||||
src = platform_utils.realpath(os.path.join(gitdir, name))
|
src = os.path.realpath(os.path.join(gitdir, name))
|
||||||
dst = os.path.join(dotgit, name)
|
dst = os.path.join(dotgit, name)
|
||||||
|
|
||||||
if os.path.lexists(dst):
|
if os.path.lexists(dst):
|
||||||
@ -3349,9 +3425,7 @@ class Project:
|
|||||||
else:
|
else:
|
||||||
if not init_dotgit:
|
if not init_dotgit:
|
||||||
# See if the project has changed.
|
# See if the project has changed.
|
||||||
if platform_utils.realpath(
|
if os.path.realpath(self.gitdir) != os.path.realpath(dotgit):
|
||||||
self.gitdir
|
|
||||||
) != platform_utils.realpath(dotgit):
|
|
||||||
platform_utils.remove(dotgit)
|
platform_utils.remove(dotgit)
|
||||||
|
|
||||||
if init_dotgit or not os.path.exists(dotgit):
|
if init_dotgit or not os.path.exists(dotgit):
|
||||||
|
143
release/update-hooks
Executable file
143
release/update-hooks
Executable file
@ -0,0 +1,143 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# Copyright (C) 2024 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Helper tool for updating hooks from their various upstreams."""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import sys
|
||||||
|
from typing import List, Optional
|
||||||
|
import urllib.request
|
||||||
|
|
||||||
|
|
||||||
|
assert sys.version_info >= (3, 8), "Python 3.8+ required"
|
||||||
|
|
||||||
|
|
||||||
|
TOPDIR = Path(__file__).resolve().parent.parent
|
||||||
|
HOOKS_DIR = TOPDIR / "hooks"
|
||||||
|
|
||||||
|
|
||||||
|
def update_hook_commit_msg() -> None:
|
||||||
|
"""Update commit-msg hook from Gerrit."""
|
||||||
|
hook = HOOKS_DIR / "commit-msg"
|
||||||
|
print(
|
||||||
|
f"{hook.name}: Updating from https://gerrit.googlesource.com/gerrit/"
|
||||||
|
"+/HEAD/resources/com/google/gerrit/server/tools/root/hooks/commit-msg"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the current commit.
|
||||||
|
url = "https://gerrit.googlesource.com/gerrit/+/HEAD?format=JSON"
|
||||||
|
with urllib.request.urlopen(url) as fp:
|
||||||
|
data = fp.read()
|
||||||
|
# Discard the xss protection.
|
||||||
|
data = data.split(b"\n", 1)[1]
|
||||||
|
data = json.loads(data)
|
||||||
|
commit = data["commit"]
|
||||||
|
|
||||||
|
# Fetch the data for that commit.
|
||||||
|
url = (
|
||||||
|
f"https://gerrit.googlesource.com/gerrit/+/{commit}/"
|
||||||
|
"resources/com/google/gerrit/server/tools/root/hooks/commit-msg"
|
||||||
|
)
|
||||||
|
with urllib.request.urlopen(f"{url}?format=TEXT") as fp:
|
||||||
|
data = fp.read()
|
||||||
|
|
||||||
|
# gitiles base64 encodes text data.
|
||||||
|
data = base64.b64decode(data)
|
||||||
|
|
||||||
|
# Inject header into the hook.
|
||||||
|
lines = data.split(b"\n")
|
||||||
|
lines = (
|
||||||
|
lines[:1]
|
||||||
|
+ [
|
||||||
|
b"# DO NOT EDIT THIS FILE",
|
||||||
|
(
|
||||||
|
b"# All updates should be sent upstream: "
|
||||||
|
b"https://gerrit.googlesource.com/gerrit/"
|
||||||
|
),
|
||||||
|
f"# This is synced from commit: {commit}".encode("utf-8"),
|
||||||
|
b"# DO NOT EDIT THIS FILE",
|
||||||
|
]
|
||||||
|
+ lines[1:]
|
||||||
|
)
|
||||||
|
data = b"\n".join(lines)
|
||||||
|
|
||||||
|
# Update the hook.
|
||||||
|
hook.write_bytes(data)
|
||||||
|
hook.chmod(0o755)
|
||||||
|
|
||||||
|
|
||||||
|
def update_hook_pre_auto_gc() -> None:
|
||||||
|
"""Update pre-auto-gc hook from git."""
|
||||||
|
hook = HOOKS_DIR / "pre-auto-gc"
|
||||||
|
print(
|
||||||
|
f"{hook.name}: Updating from https://github.com/git/git/"
|
||||||
|
"HEAD/contrib/hooks/pre-auto-gc-battery"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the current commit.
|
||||||
|
headers = {
|
||||||
|
"Accept": "application/vnd.github+json",
|
||||||
|
"X-GitHub-Api-Version": "2022-11-28",
|
||||||
|
}
|
||||||
|
url = "https://api.github.com/repos/git/git/git/refs/heads/master"
|
||||||
|
req = urllib.request.Request(url, headers=headers)
|
||||||
|
with urllib.request.urlopen(req) as fp:
|
||||||
|
data = fp.read()
|
||||||
|
data = json.loads(data)
|
||||||
|
|
||||||
|
# Fetch the data for that commit.
|
||||||
|
commit = data["object"]["sha"]
|
||||||
|
url = (
|
||||||
|
f"https://raw.githubusercontent.com/git/git/{commit}/"
|
||||||
|
"contrib/hooks/pre-auto-gc-battery"
|
||||||
|
)
|
||||||
|
with urllib.request.urlopen(url) as fp:
|
||||||
|
data = fp.read()
|
||||||
|
|
||||||
|
# Inject header into the hook.
|
||||||
|
lines = data.split(b"\n")
|
||||||
|
lines = (
|
||||||
|
lines[:1]
|
||||||
|
+ [
|
||||||
|
b"# DO NOT EDIT THIS FILE",
|
||||||
|
(
|
||||||
|
b"# All updates should be sent upstream: "
|
||||||
|
b"https://github.com/git/git/"
|
||||||
|
),
|
||||||
|
f"# This is synced from commit: {commit}".encode("utf-8"),
|
||||||
|
b"# DO NOT EDIT THIS FILE",
|
||||||
|
]
|
||||||
|
+ lines[1:]
|
||||||
|
)
|
||||||
|
data = b"\n".join(lines)
|
||||||
|
|
||||||
|
# Update the hook.
|
||||||
|
hook.write_bytes(data)
|
||||||
|
hook.chmod(0o755)
|
||||||
|
|
||||||
|
|
||||||
|
def main(argv: Optional[List[str]] = None) -> Optional[int]:
|
||||||
|
parser = argparse.ArgumentParser(description=__doc__)
|
||||||
|
parser.parse_args(argv)
|
||||||
|
|
||||||
|
update_hook_commit_msg()
|
||||||
|
update_hook_pre_auto_gc()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main(sys.argv[1:]))
|
21
repo
21
repo
@ -124,7 +124,7 @@ if not REPO_REV:
|
|||||||
BUG_URL = "https://issues.gerritcodereview.com/issues/new?component=1370071"
|
BUG_URL = "https://issues.gerritcodereview.com/issues/new?component=1370071"
|
||||||
|
|
||||||
# increment this whenever we make important changes to this script
|
# increment this whenever we make important changes to this script
|
||||||
VERSION = (2, 45)
|
VERSION = (2, 48)
|
||||||
|
|
||||||
# increment this if the MAINTAINER_KEYS block is modified
|
# increment this if the MAINTAINER_KEYS block is modified
|
||||||
KEYRING_VERSION = (2, 3)
|
KEYRING_VERSION = (2, 3)
|
||||||
@ -210,7 +210,6 @@ GIT = "git" # our git command
|
|||||||
# NB: The version of git that the repo launcher requires may be much older than
|
# NB: The version of git that the repo launcher requires may be much older than
|
||||||
# the version of git that the main repo source tree requires. Keeping this at
|
# the version of git that the main repo source tree requires. Keeping this at
|
||||||
# an older version also makes it easier for users to upgrade/rollback as needed.
|
# an older version also makes it easier for users to upgrade/rollback as needed.
|
||||||
# See requirements.json for versions.
|
|
||||||
MIN_GIT_VERSION = (1, 7, 9) # minimum supported git version
|
MIN_GIT_VERSION = (1, 7, 9) # minimum supported git version
|
||||||
repodir = ".repo" # name of repo's private directory
|
repodir = ".repo" # name of repo's private directory
|
||||||
S_repo = "repo" # special repo repository
|
S_repo = "repo" # special repo repository
|
||||||
@ -283,6 +282,12 @@ def InitParser(parser):
|
|||||||
metavar="REVISION",
|
metavar="REVISION",
|
||||||
help="manifest branch or revision (use HEAD for default)",
|
help="manifest branch or revision (use HEAD for default)",
|
||||||
)
|
)
|
||||||
|
group.add_option(
|
||||||
|
"--manifest-upstream-branch",
|
||||||
|
help="when a commit is provided to --manifest-branch, this "
|
||||||
|
"is the name of the git ref in which the commit can be found",
|
||||||
|
metavar="BRANCH",
|
||||||
|
)
|
||||||
group.add_option(
|
group.add_option(
|
||||||
"-m",
|
"-m",
|
||||||
"--manifest-name",
|
"--manifest-name",
|
||||||
@ -1237,13 +1242,13 @@ class Requirements:
|
|||||||
|
|
||||||
return cls(json_data)
|
return cls(json_data)
|
||||||
|
|
||||||
def _get_soft_ver(self, pkg):
|
def get_soft_ver(self, pkg):
|
||||||
"""Return the soft version for |pkg| if it exists."""
|
"""Return the soft version for |pkg| if it exists."""
|
||||||
return self.requirements.get(pkg, {}).get("soft", ())
|
return tuple(self.requirements.get(pkg, {}).get("soft", ()))
|
||||||
|
|
||||||
def _get_hard_ver(self, pkg):
|
def get_hard_ver(self, pkg):
|
||||||
"""Return the hard version for |pkg| if it exists."""
|
"""Return the hard version for |pkg| if it exists."""
|
||||||
return self.requirements.get(pkg, {}).get("hard", ())
|
return tuple(self.requirements.get(pkg, {}).get("hard", ()))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _format_ver(ver):
|
def _format_ver(ver):
|
||||||
@ -1253,8 +1258,8 @@ class Requirements:
|
|||||||
def assert_ver(self, pkg, curr_ver):
|
def assert_ver(self, pkg, curr_ver):
|
||||||
"""Verify |pkg|'s |curr_ver| is new enough."""
|
"""Verify |pkg|'s |curr_ver| is new enough."""
|
||||||
curr_ver = tuple(curr_ver)
|
curr_ver = tuple(curr_ver)
|
||||||
soft_ver = tuple(self._get_soft_ver(pkg))
|
soft_ver = tuple(self.get_soft_ver(pkg))
|
||||||
hard_ver = tuple(self._get_hard_ver(pkg))
|
hard_ver = tuple(self.get_hard_ver(pkg))
|
||||||
if curr_ver < hard_ver:
|
if curr_ver < hard_ver:
|
||||||
print(
|
print(
|
||||||
f'repo: error: Your version of "{pkg}" '
|
f'repo: error: Your version of "{pkg}" '
|
||||||
|
@ -39,8 +39,8 @@ class _LogColoring(Coloring):
|
|||||||
|
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
super().__init__(config, "logs")
|
super().__init__(config, "logs")
|
||||||
self.error = self.colorer("error", fg="red")
|
self.error = self.nofmt_colorer("error", fg="red")
|
||||||
self.warning = self.colorer("warn", fg="yellow")
|
self.warning = self.nofmt_colorer("warn", fg="yellow")
|
||||||
self.levelMap = {
|
self.levelMap = {
|
||||||
"WARNING": self.warning,
|
"WARNING": self.warning,
|
||||||
"ERROR": self.error,
|
"ERROR": self.error,
|
||||||
|
@ -46,8 +46,6 @@
|
|||||||
|
|
||||||
# Supported git versions.
|
# Supported git versions.
|
||||||
#
|
#
|
||||||
# git-1.7.9 is in Ubuntu Precise.
|
|
||||||
# git-1.7.10 is in Debian Wheezy.
|
|
||||||
# git-1.9.1 is in Ubuntu Trusty.
|
# git-1.9.1 is in Ubuntu Trusty.
|
||||||
# git-2.1.4 is in Debian Jessie.
|
# git-2.1.4 is in Debian Jessie.
|
||||||
# git-2.7.4 is in Ubuntu Xenial.
|
# git-2.7.4 is in Ubuntu Xenial.
|
||||||
@ -55,7 +53,7 @@
|
|||||||
# git-2.17.0 is in Ubuntu Bionic.
|
# git-2.17.0 is in Ubuntu Bionic.
|
||||||
# git-2.20.1 is in Debian Buster.
|
# git-2.20.1 is in Debian Buster.
|
||||||
"git": {
|
"git": {
|
||||||
"hard": [1, 7, 9],
|
"hard": [1, 9, 1],
|
||||||
"soft": [2, 7, 4]
|
"soft": [2, 7, 4]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -32,6 +32,7 @@ def run_black():
|
|||||||
extra_programs = [
|
extra_programs = [
|
||||||
"repo",
|
"repo",
|
||||||
"run_tests",
|
"run_tests",
|
||||||
|
"release/update-hooks",
|
||||||
"release/update-manpages",
|
"release/update-manpages",
|
||||||
]
|
]
|
||||||
return subprocess.run(
|
return subprocess.run(
|
||||||
|
58
ssh.py
58
ssh.py
@ -24,6 +24,7 @@ import sys
|
|||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from git_command import git
|
||||||
import platform_utils
|
import platform_utils
|
||||||
from repo_trace import Trace
|
from repo_trace import Trace
|
||||||
|
|
||||||
@ -211,7 +212,33 @@ class ProxyManager:
|
|||||||
# and print to the log there.
|
# and print to the log there.
|
||||||
pass
|
pass
|
||||||
|
|
||||||
command = command_base[:1] + ["-M", "-N"] + command_base[1:]
|
# Git protocol V2 is a new feature in git 2.18.0, made default in
|
||||||
|
# git 2.26.0
|
||||||
|
# It is faster and more efficient than V1.
|
||||||
|
# To enable it when using SSH, the environment variable GIT_PROTOCOL
|
||||||
|
# must be set in the SSH side channel when establishing the connection
|
||||||
|
# to the git server.
|
||||||
|
# See https://git-scm.com/docs/protocol-v2#_ssh_and_file_transport
|
||||||
|
# Normally git does this by itself. But here, where the SSH connection
|
||||||
|
# is established manually over ControlMaster via the repo-tool, it must
|
||||||
|
# be passed in explicitly instead.
|
||||||
|
# Based on https://git-scm.com/docs/gitprotocol-pack#_extra_parameters,
|
||||||
|
# GIT_PROTOCOL is considered an "Extra Parameter" and must be ignored
|
||||||
|
# by servers that do not understand it. This means that it is safe to
|
||||||
|
# set it even when connecting to older servers.
|
||||||
|
# It should also be safe to set the environment variable for older
|
||||||
|
# local git versions, since it is only part of the ssh side channel.
|
||||||
|
git_protocol_version = _get_git_protocol_version()
|
||||||
|
ssh_git_protocol_args = [
|
||||||
|
"-o",
|
||||||
|
f"SetEnv GIT_PROTOCOL=version={git_protocol_version}",
|
||||||
|
]
|
||||||
|
|
||||||
|
command = (
|
||||||
|
command_base[:1]
|
||||||
|
+ ["-M", "-N", *ssh_git_protocol_args]
|
||||||
|
+ command_base[1:]
|
||||||
|
)
|
||||||
p = None
|
p = None
|
||||||
try:
|
try:
|
||||||
with Trace("Call to ssh: %s", " ".join(command)):
|
with Trace("Call to ssh: %s", " ".join(command)):
|
||||||
@ -293,3 +320,32 @@ class ProxyManager:
|
|||||||
tempfile.mkdtemp("", "ssh-", tmp_dir), "master-" + tokens
|
tempfile.mkdtemp("", "ssh-", tmp_dir), "master-" + tokens
|
||||||
)
|
)
|
||||||
return self._sock_path
|
return self._sock_path
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=1)
|
||||||
|
def _get_git_protocol_version() -> str:
|
||||||
|
"""Return the git protocol version.
|
||||||
|
|
||||||
|
The version is found by first reading the global git config.
|
||||||
|
If no git config for protocol version exists, try to deduce the default
|
||||||
|
protocol version based on the git version.
|
||||||
|
|
||||||
|
See https://git-scm.com/docs/gitprotocol-v2 for details.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return subprocess.check_output(
|
||||||
|
["git", "config", "--get", "--global", "protocol.version"],
|
||||||
|
encoding="utf-8",
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
).strip()
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
if e.returncode == 1:
|
||||||
|
# Exit code 1 means that the git config key was not found.
|
||||||
|
# Try to imitate the defaults that git would have used.
|
||||||
|
git_version = git.version_tuple()
|
||||||
|
if git_version >= (2, 26, 0):
|
||||||
|
# Since git version 2.26, protocol v2 is the default.
|
||||||
|
return "2"
|
||||||
|
return "1"
|
||||||
|
# Other exit codes indicate error with reading the config.
|
||||||
|
raise
|
||||||
|
@ -70,8 +70,10 @@ It is equivalent to "git branch -D <branchname>".
|
|||||||
else:
|
else:
|
||||||
args.insert(0, "'All local branches'")
|
args.insert(0, "'All local branches'")
|
||||||
|
|
||||||
def _ExecuteOne(self, all_branches, nb, project):
|
@classmethod
|
||||||
|
def _ExecuteOne(cls, all_branches, nb, project_idx):
|
||||||
"""Abandon one project."""
|
"""Abandon one project."""
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
if all_branches:
|
if all_branches:
|
||||||
branches = project.GetBranches()
|
branches = project.GetBranches()
|
||||||
else:
|
else:
|
||||||
@ -89,7 +91,7 @@ It is equivalent to "git branch -D <branchname>".
|
|||||||
if status is not None:
|
if status is not None:
|
||||||
ret[name] = status
|
ret[name] = status
|
||||||
|
|
||||||
return (ret, project, errors)
|
return (ret, project_idx, errors)
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
nb = args[0].split()
|
nb = args[0].split()
|
||||||
@ -102,7 +104,8 @@ It is equivalent to "git branch -D <branchname>".
|
|||||||
_RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
|
_RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
|
||||||
|
|
||||||
def _ProcessResults(_pool, pm, states):
|
def _ProcessResults(_pool, pm, states):
|
||||||
for results, project, errors in states:
|
for results, project_idx, errors in states:
|
||||||
|
project = all_projects[project_idx]
|
||||||
for branch, status in results.items():
|
for branch, status in results.items():
|
||||||
if status:
|
if status:
|
||||||
success[branch].append(project)
|
success[branch].append(project)
|
||||||
@ -111,14 +114,17 @@ It is equivalent to "git branch -D <branchname>".
|
|||||||
aggregate_errors.extend(errors)
|
aggregate_errors.extend(errors)
|
||||||
pm.update(msg="")
|
pm.update(msg="")
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = all_projects
|
||||||
self.ExecuteInParallel(
|
self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(self._ExecuteOne, opt.all, nb),
|
functools.partial(self._ExecuteOne, opt.all, nb),
|
||||||
all_projects,
|
range(len(all_projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
output=Progress(
|
output=Progress(
|
||||||
f"Abandon {nb}", len(all_projects), quiet=opt.quiet
|
f"Abandon {nb}", len(all_projects), quiet=opt.quiet
|
||||||
),
|
),
|
||||||
|
chunksize=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
width = max(
|
width = max(
|
||||||
|
@ -98,6 +98,22 @@ is shown, then the branch appears in all projects.
|
|||||||
"""
|
"""
|
||||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _ExpandProjectToBranches(cls, project_idx):
|
||||||
|
"""Expands a project into a list of branch names & associated info.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_idx: project.Project index
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[Tuple[str, git_config.Branch, int]]
|
||||||
|
"""
|
||||||
|
branches = []
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
|
for name, b in project.GetBranches().items():
|
||||||
|
branches.append((name, b, project_idx))
|
||||||
|
return branches
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
projects = self.GetProjects(
|
projects = self.GetProjects(
|
||||||
args, all_manifests=not opt.this_manifest_only
|
args, all_manifests=not opt.this_manifest_only
|
||||||
@ -107,15 +123,18 @@ is shown, then the branch appears in all projects.
|
|||||||
project_cnt = len(projects)
|
project_cnt = len(projects)
|
||||||
|
|
||||||
def _ProcessResults(_pool, _output, results):
|
def _ProcessResults(_pool, _output, results):
|
||||||
for name, b in itertools.chain.from_iterable(results):
|
for name, b, project_idx in itertools.chain.from_iterable(results):
|
||||||
|
b.project = projects[project_idx]
|
||||||
if name not in all_branches:
|
if name not in all_branches:
|
||||||
all_branches[name] = BranchInfo(name)
|
all_branches[name] = BranchInfo(name)
|
||||||
all_branches[name].add(b)
|
all_branches[name].add(b)
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = projects
|
||||||
self.ExecuteInParallel(
|
self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
expand_project_to_branches,
|
self._ExpandProjectToBranches,
|
||||||
projects,
|
range(len(projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -191,19 +210,3 @@ is shown, then the branch appears in all projects.
|
|||||||
else:
|
else:
|
||||||
out.write(" in all projects")
|
out.write(" in all projects")
|
||||||
out.nl()
|
out.nl()
|
||||||
|
|
||||||
|
|
||||||
def expand_project_to_branches(project):
|
|
||||||
"""Expands a project into a list of branch names & associated information.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
project: project.Project
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List[Tuple[str, git_config.Branch]]
|
|
||||||
"""
|
|
||||||
branches = []
|
|
||||||
for name, b in project.GetBranches().items():
|
|
||||||
b.project = project
|
|
||||||
branches.append((name, b))
|
|
||||||
return branches
|
|
||||||
|
@ -20,7 +20,6 @@ from command import DEFAULT_LOCAL_JOBS
|
|||||||
from error import GitError
|
from error import GitError
|
||||||
from error import RepoExitError
|
from error import RepoExitError
|
||||||
from progress import Progress
|
from progress import Progress
|
||||||
from project import Project
|
|
||||||
from repo_logging import RepoLogger
|
from repo_logging import RepoLogger
|
||||||
|
|
||||||
|
|
||||||
@ -30,7 +29,7 @@ logger = RepoLogger(__file__)
|
|||||||
class CheckoutBranchResult(NamedTuple):
|
class CheckoutBranchResult(NamedTuple):
|
||||||
# Whether the Project is on the branch (i.e. branch exists and no errors)
|
# Whether the Project is on the branch (i.e. branch exists and no errors)
|
||||||
result: bool
|
result: bool
|
||||||
project: Project
|
project_idx: int
|
||||||
error: Exception
|
error: Exception
|
||||||
|
|
||||||
|
|
||||||
@ -62,15 +61,17 @@ The command is equivalent to:
|
|||||||
if not args:
|
if not args:
|
||||||
self.Usage()
|
self.Usage()
|
||||||
|
|
||||||
def _ExecuteOne(self, nb, project):
|
@classmethod
|
||||||
|
def _ExecuteOne(cls, nb, project_idx):
|
||||||
"""Checkout one project."""
|
"""Checkout one project."""
|
||||||
error = None
|
error = None
|
||||||
result = None
|
result = None
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
try:
|
try:
|
||||||
result = project.CheckoutBranch(nb)
|
result = project.CheckoutBranch(nb)
|
||||||
except GitError as e:
|
except GitError as e:
|
||||||
error = e
|
error = e
|
||||||
return CheckoutBranchResult(result, project, error)
|
return CheckoutBranchResult(result, project_idx, error)
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
nb = args[0]
|
nb = args[0]
|
||||||
@ -83,17 +84,20 @@ The command is equivalent to:
|
|||||||
|
|
||||||
def _ProcessResults(_pool, pm, results):
|
def _ProcessResults(_pool, pm, results):
|
||||||
for result in results:
|
for result in results:
|
||||||
|
project = all_projects[result.project_idx]
|
||||||
if result.error is not None:
|
if result.error is not None:
|
||||||
err.append(result.error)
|
err.append(result.error)
|
||||||
err_projects.append(result.project)
|
err_projects.append(project)
|
||||||
elif result.result:
|
elif result.result:
|
||||||
success.append(result.project)
|
success.append(project)
|
||||||
pm.update(msg="")
|
pm.update(msg="")
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = all_projects
|
||||||
self.ExecuteInParallel(
|
self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(self._ExecuteOne, nb),
|
functools.partial(self._ExecuteOne, nb),
|
||||||
all_projects,
|
range(len(all_projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
output=Progress(
|
output=Progress(
|
||||||
f"Checkout {nb}", len(all_projects), quiet=opt.quiet
|
f"Checkout {nb}", len(all_projects), quiet=opt.quiet
|
||||||
|
@ -40,7 +40,8 @@ to the Unix 'patch' command.
|
|||||||
help="paths are relative to the repository root",
|
help="paths are relative to the repository root",
|
||||||
)
|
)
|
||||||
|
|
||||||
def _ExecuteOne(self, absolute, local, project):
|
@classmethod
|
||||||
|
def _ExecuteOne(cls, absolute, local, project_idx):
|
||||||
"""Obtains the diff for a specific project.
|
"""Obtains the diff for a specific project.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -48,12 +49,13 @@ to the Unix 'patch' command.
|
|||||||
local: a boolean, if True, the path is relative to the local
|
local: a boolean, if True, the path is relative to the local
|
||||||
(sub)manifest. If false, the path is relative to the outermost
|
(sub)manifest. If false, the path is relative to the outermost
|
||||||
manifest.
|
manifest.
|
||||||
project: Project to get status of.
|
project_idx: Project index to get status of.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
The status of the project.
|
The status of the project.
|
||||||
"""
|
"""
|
||||||
buf = io.StringIO()
|
buf = io.StringIO()
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
ret = project.PrintWorkTreeDiff(absolute, output_redir=buf, local=local)
|
ret = project.PrintWorkTreeDiff(absolute, output_redir=buf, local=local)
|
||||||
return (ret, buf.getvalue())
|
return (ret, buf.getvalue())
|
||||||
|
|
||||||
@ -71,12 +73,15 @@ to the Unix 'patch' command.
|
|||||||
ret = 1
|
ret = 1
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = all_projects
|
||||||
return self.ExecuteInParallel(
|
return self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(
|
functools.partial(
|
||||||
self._ExecuteOne, opt.absolute, opt.this_manifest_only
|
self._ExecuteOne, opt.absolute, opt.this_manifest_only
|
||||||
),
|
),
|
||||||
all_projects,
|
range(len(all_projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
ordered=True,
|
ordered=True,
|
||||||
|
chunksize=1,
|
||||||
)
|
)
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
import errno
|
import errno
|
||||||
import functools
|
import functools
|
||||||
import io
|
import io
|
||||||
import multiprocessing
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import signal
|
import signal
|
||||||
@ -26,7 +25,6 @@ from color import Coloring
|
|||||||
from command import Command
|
from command import Command
|
||||||
from command import DEFAULT_LOCAL_JOBS
|
from command import DEFAULT_LOCAL_JOBS
|
||||||
from command import MirrorSafeCommand
|
from command import MirrorSafeCommand
|
||||||
from command import WORKER_BATCH_SIZE
|
|
||||||
from error import ManifestInvalidRevisionError
|
from error import ManifestInvalidRevisionError
|
||||||
from repo_logging import RepoLogger
|
from repo_logging import RepoLogger
|
||||||
|
|
||||||
@ -241,7 +239,6 @@ without iterating through the remaining projects.
|
|||||||
cmd.insert(cmd.index(cn) + 1, "--color")
|
cmd.insert(cmd.index(cn) + 1, "--color")
|
||||||
|
|
||||||
mirror = self.manifest.IsMirror
|
mirror = self.manifest.IsMirror
|
||||||
rc = 0
|
|
||||||
|
|
||||||
smart_sync_manifest_name = "smart_sync_override.xml"
|
smart_sync_manifest_name = "smart_sync_override.xml"
|
||||||
smart_sync_manifest_path = os.path.join(
|
smart_sync_manifest_path = os.path.join(
|
||||||
@ -264,18 +261,10 @@ without iterating through the remaining projects.
|
|||||||
|
|
||||||
os.environ["REPO_COUNT"] = str(len(projects))
|
os.environ["REPO_COUNT"] = str(len(projects))
|
||||||
|
|
||||||
try:
|
def _ProcessResults(_pool, _output, results):
|
||||||
config = self.manifest.manifestProject.config
|
rc = 0
|
||||||
with multiprocessing.Pool(opt.jobs, InitWorker) as pool:
|
|
||||||
results_it = pool.imap(
|
|
||||||
functools.partial(
|
|
||||||
DoWorkWrapper, mirror, opt, cmd, shell, config
|
|
||||||
),
|
|
||||||
enumerate(projects),
|
|
||||||
chunksize=WORKER_BATCH_SIZE,
|
|
||||||
)
|
|
||||||
first = True
|
first = True
|
||||||
for r, output in results_it:
|
for r, output in results:
|
||||||
if output:
|
if output:
|
||||||
if first:
|
if first:
|
||||||
first = False
|
first = False
|
||||||
@ -290,6 +279,23 @@ without iterating through the remaining projects.
|
|||||||
rc = rc or r
|
rc = rc or r
|
||||||
if r != 0 and opt.abort_on_errors:
|
if r != 0 and opt.abort_on_errors:
|
||||||
raise Exception("Aborting due to previous error")
|
raise Exception("Aborting due to previous error")
|
||||||
|
return rc
|
||||||
|
|
||||||
|
try:
|
||||||
|
config = self.manifest.manifestProject.config
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = projects
|
||||||
|
rc = self.ExecuteInParallel(
|
||||||
|
opt.jobs,
|
||||||
|
functools.partial(
|
||||||
|
self.DoWorkWrapper, mirror, opt, cmd, shell, config
|
||||||
|
),
|
||||||
|
range(len(projects)),
|
||||||
|
callback=_ProcessResults,
|
||||||
|
ordered=True,
|
||||||
|
initializer=self.InitWorker,
|
||||||
|
chunksize=1,
|
||||||
|
)
|
||||||
except (KeyboardInterrupt, WorkerKeyboardInterrupt):
|
except (KeyboardInterrupt, WorkerKeyboardInterrupt):
|
||||||
# Catch KeyboardInterrupt raised inside and outside of workers
|
# Catch KeyboardInterrupt raised inside and outside of workers
|
||||||
rc = rc or errno.EINTR
|
rc = rc or errno.EINTR
|
||||||
@ -304,16 +310,12 @@ without iterating through the remaining projects.
|
|||||||
if rc != 0:
|
if rc != 0:
|
||||||
sys.exit(rc)
|
sys.exit(rc)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
class WorkerKeyboardInterrupt(Exception):
|
def InitWorker(cls):
|
||||||
"""Keyboard interrupt exception for worker processes."""
|
|
||||||
|
|
||||||
|
|
||||||
def InitWorker():
|
|
||||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
def DoWorkWrapper(mirror, opt, cmd, shell, config, args):
|
def DoWorkWrapper(cls, mirror, opt, cmd, shell, config, project_idx):
|
||||||
"""A wrapper around the DoWork() method.
|
"""A wrapper around the DoWork() method.
|
||||||
|
|
||||||
Catch the KeyboardInterrupt exceptions here and re-raise them as a
|
Catch the KeyboardInterrupt exceptions here and re-raise them as a
|
||||||
@ -321,14 +323,18 @@ def DoWorkWrapper(mirror, opt, cmd, shell, config, args):
|
|||||||
with stacktraces and making the parent hang indefinitely.
|
with stacktraces and making the parent hang indefinitely.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
cnt, project = args
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
try:
|
try:
|
||||||
return DoWork(project, mirror, opt, cmd, shell, cnt, config)
|
return DoWork(project, mirror, opt, cmd, shell, project_idx, config)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("%s: Worker interrupted" % project.name)
|
print("%s: Worker interrupted" % project.name)
|
||||||
raise WorkerKeyboardInterrupt()
|
raise WorkerKeyboardInterrupt()
|
||||||
|
|
||||||
|
|
||||||
|
class WorkerKeyboardInterrupt(Exception):
|
||||||
|
"""Keyboard interrupt exception for worker processes."""
|
||||||
|
|
||||||
|
|
||||||
def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
|
|
||||||
|
@ -23,7 +23,6 @@ from error import GitError
|
|||||||
from error import InvalidArgumentsError
|
from error import InvalidArgumentsError
|
||||||
from error import SilentRepoExitError
|
from error import SilentRepoExitError
|
||||||
from git_command import GitCommand
|
from git_command import GitCommand
|
||||||
from project import Project
|
|
||||||
from repo_logging import RepoLogger
|
from repo_logging import RepoLogger
|
||||||
|
|
||||||
|
|
||||||
@ -40,7 +39,7 @@ class GrepColoring(Coloring):
|
|||||||
class ExecuteOneResult(NamedTuple):
|
class ExecuteOneResult(NamedTuple):
|
||||||
"""Result from an execute instance."""
|
"""Result from an execute instance."""
|
||||||
|
|
||||||
project: Project
|
project_idx: int
|
||||||
rc: int
|
rc: int
|
||||||
stdout: str
|
stdout: str
|
||||||
stderr: str
|
stderr: str
|
||||||
@ -262,8 +261,10 @@ contain a line that matches both expressions:
|
|||||||
help="Show only file names not containing matching lines",
|
help="Show only file names not containing matching lines",
|
||||||
)
|
)
|
||||||
|
|
||||||
def _ExecuteOne(self, cmd_argv, project):
|
@classmethod
|
||||||
|
def _ExecuteOne(cls, cmd_argv, project_idx):
|
||||||
"""Process one project."""
|
"""Process one project."""
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
try:
|
try:
|
||||||
p = GitCommand(
|
p = GitCommand(
|
||||||
project,
|
project,
|
||||||
@ -274,7 +275,7 @@ contain a line that matches both expressions:
|
|||||||
verify_command=True,
|
verify_command=True,
|
||||||
)
|
)
|
||||||
except GitError as e:
|
except GitError as e:
|
||||||
return ExecuteOneResult(project, -1, None, str(e), e)
|
return ExecuteOneResult(project_idx, -1, None, str(e), e)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
error = None
|
error = None
|
||||||
@ -282,10 +283,12 @@ contain a line that matches both expressions:
|
|||||||
except GitError as e:
|
except GitError as e:
|
||||||
rc = 1
|
rc = 1
|
||||||
error = e
|
error = e
|
||||||
return ExecuteOneResult(project, rc, p.stdout, p.stderr, error)
|
return ExecuteOneResult(project_idx, rc, p.stdout, p.stderr, error)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _ProcessResults(full_name, have_rev, opt, _pool, out, results):
|
def _ProcessResults(
|
||||||
|
full_name, have_rev, opt, projects, _pool, out, results
|
||||||
|
):
|
||||||
git_failed = False
|
git_failed = False
|
||||||
bad_rev = False
|
bad_rev = False
|
||||||
have_match = False
|
have_match = False
|
||||||
@ -293,9 +296,10 @@ contain a line that matches both expressions:
|
|||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
for result in results:
|
for result in results:
|
||||||
|
project = projects[result.project_idx]
|
||||||
if result.rc < 0:
|
if result.rc < 0:
|
||||||
git_failed = True
|
git_failed = True
|
||||||
out.project("--- project %s ---" % _RelPath(result.project))
|
out.project("--- project %s ---" % _RelPath(project))
|
||||||
out.nl()
|
out.nl()
|
||||||
out.fail("%s", result.stderr)
|
out.fail("%s", result.stderr)
|
||||||
out.nl()
|
out.nl()
|
||||||
@ -311,9 +315,7 @@ contain a line that matches both expressions:
|
|||||||
):
|
):
|
||||||
bad_rev = True
|
bad_rev = True
|
||||||
else:
|
else:
|
||||||
out.project(
|
out.project("--- project %s ---" % _RelPath(project))
|
||||||
"--- project %s ---" % _RelPath(result.project)
|
|
||||||
)
|
|
||||||
out.nl()
|
out.nl()
|
||||||
out.fail("%s", result.stderr.strip())
|
out.fail("%s", result.stderr.strip())
|
||||||
out.nl()
|
out.nl()
|
||||||
@ -331,13 +333,13 @@ contain a line that matches both expressions:
|
|||||||
rev, line = line.split(":", 1)
|
rev, line = line.split(":", 1)
|
||||||
out.write("%s", rev)
|
out.write("%s", rev)
|
||||||
out.write(":")
|
out.write(":")
|
||||||
out.project(_RelPath(result.project))
|
out.project(_RelPath(project))
|
||||||
out.write("/")
|
out.write("/")
|
||||||
out.write("%s", line)
|
out.write("%s", line)
|
||||||
out.nl()
|
out.nl()
|
||||||
elif full_name:
|
elif full_name:
|
||||||
for line in r:
|
for line in r:
|
||||||
out.project(_RelPath(result.project))
|
out.project(_RelPath(project))
|
||||||
out.write("/")
|
out.write("/")
|
||||||
out.write("%s", line)
|
out.write("%s", line)
|
||||||
out.nl()
|
out.nl()
|
||||||
@ -381,15 +383,18 @@ contain a line that matches both expressions:
|
|||||||
cmd_argv.extend(opt.revision)
|
cmd_argv.extend(opt.revision)
|
||||||
cmd_argv.append("--")
|
cmd_argv.append("--")
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = projects
|
||||||
git_failed, bad_rev, have_match, errors = self.ExecuteInParallel(
|
git_failed, bad_rev, have_match, errors = self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(self._ExecuteOne, cmd_argv),
|
functools.partial(self._ExecuteOne, cmd_argv),
|
||||||
projects,
|
range(len(projects)),
|
||||||
callback=functools.partial(
|
callback=functools.partial(
|
||||||
self._ProcessResults, full_name, have_rev, opt
|
self._ProcessResults, full_name, have_rev, opt, projects
|
||||||
),
|
),
|
||||||
output=out,
|
output=out,
|
||||||
ordered=True,
|
ordered=True,
|
||||||
|
chunksize=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
if git_failed:
|
if git_failed:
|
||||||
|
@ -21,10 +21,9 @@ from command import MirrorSafeCommand
|
|||||||
from error import RepoUnhandledExceptionError
|
from error import RepoUnhandledExceptionError
|
||||||
from error import UpdateManifestError
|
from error import UpdateManifestError
|
||||||
from git_command import git_require
|
from git_command import git_require
|
||||||
from git_command import MIN_GIT_VERSION_HARD
|
|
||||||
from git_command import MIN_GIT_VERSION_SOFT
|
|
||||||
from repo_logging import RepoLogger
|
from repo_logging import RepoLogger
|
||||||
from wrapper import Wrapper
|
from wrapper import Wrapper
|
||||||
|
from wrapper import WrapperDir
|
||||||
|
|
||||||
|
|
||||||
logger = RepoLogger(__file__)
|
logger = RepoLogger(__file__)
|
||||||
@ -53,6 +52,10 @@ The optional -b argument can be used to select the manifest branch
|
|||||||
to checkout and use. If no branch is specified, the remote's default
|
to checkout and use. If no branch is specified, the remote's default
|
||||||
branch is used. This is equivalent to using -b HEAD.
|
branch is used. This is equivalent to using -b HEAD.
|
||||||
|
|
||||||
|
The optional --manifest-upstream-branch argument can be used when a commit is
|
||||||
|
provided to --manifest-branch (or -b), to specify the name of the git ref in
|
||||||
|
which the commit can be found.
|
||||||
|
|
||||||
The optional -m argument can be used to specify an alternate manifest
|
The optional -m argument can be used to specify an alternate manifest
|
||||||
to be used. If no manifest is specified, the manifest default.xml
|
to be used. If no manifest is specified, the manifest default.xml
|
||||||
will be used.
|
will be used.
|
||||||
@ -136,6 +139,7 @@ to update the working directory files.
|
|||||||
# manifest project is special and is created when instantiating the
|
# manifest project is special and is created when instantiating the
|
||||||
# manifest which happens before we parse options.
|
# manifest which happens before we parse options.
|
||||||
self.manifest.manifestProject.clone_depth = opt.manifest_depth
|
self.manifest.manifestProject.clone_depth = opt.manifest_depth
|
||||||
|
self.manifest.manifestProject.upstream = opt.manifest_upstream_branch
|
||||||
clone_filter_for_depth = (
|
clone_filter_for_depth = (
|
||||||
"blob:none" if (_REPO_ALLOW_SHALLOW == "0") else None
|
"blob:none" if (_REPO_ALLOW_SHALLOW == "0") else None
|
||||||
)
|
)
|
||||||
@ -318,6 +322,12 @@ to update the working directory files.
|
|||||||
" be used with --standalone-manifest."
|
" be used with --standalone-manifest."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if opt.manifest_upstream_branch and opt.manifest_branch is None:
|
||||||
|
self.OptionParser.error(
|
||||||
|
"--manifest-upstream-branch cannot be used without "
|
||||||
|
"--manifest-branch."
|
||||||
|
)
|
||||||
|
|
||||||
if args:
|
if args:
|
||||||
if opt.manifest_url:
|
if opt.manifest_url:
|
||||||
self.OptionParser.error(
|
self.OptionParser.error(
|
||||||
@ -331,13 +341,17 @@ to update the working directory files.
|
|||||||
self.OptionParser.error("too many arguments to init")
|
self.OptionParser.error("too many arguments to init")
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
git_require(MIN_GIT_VERSION_HARD, fail=True)
|
wrapper = Wrapper()
|
||||||
if not git_require(MIN_GIT_VERSION_SOFT):
|
|
||||||
|
reqs = wrapper.Requirements.from_dir(WrapperDir())
|
||||||
|
git_require(reqs.get_hard_ver("git"), fail=True)
|
||||||
|
min_git_version_soft = reqs.get_soft_ver("git")
|
||||||
|
if not git_require(min_git_version_soft):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"repo: warning: git-%s+ will soon be required; "
|
"repo: warning: git-%s+ will soon be required; "
|
||||||
"please upgrade your version of git to maintain "
|
"please upgrade your version of git to maintain "
|
||||||
"support.",
|
"support.",
|
||||||
".".join(str(x) for x in MIN_GIT_VERSION_SOFT),
|
".".join(str(x) for x in min_git_version_soft),
|
||||||
)
|
)
|
||||||
|
|
||||||
rp = self.manifest.repoProject
|
rp = self.manifest.repoProject
|
||||||
@ -350,7 +364,6 @@ to update the working directory files.
|
|||||||
|
|
||||||
# Handle new --repo-rev requests.
|
# Handle new --repo-rev requests.
|
||||||
if opt.repo_rev:
|
if opt.repo_rev:
|
||||||
wrapper = Wrapper()
|
|
||||||
try:
|
try:
|
||||||
remote_ref, rev = wrapper.check_repo_rev(
|
remote_ref, rev = wrapper.check_repo_rev(
|
||||||
rp.worktree,
|
rp.worktree,
|
||||||
|
@ -27,8 +27,10 @@ class Prune(PagedCommand):
|
|||||||
"""
|
"""
|
||||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||||
|
|
||||||
def _ExecuteOne(self, project):
|
@classmethod
|
||||||
|
def _ExecuteOne(cls, project_idx):
|
||||||
"""Process one project."""
|
"""Process one project."""
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
return project.PruneHeads()
|
return project.PruneHeads()
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
@ -41,10 +43,12 @@ class Prune(PagedCommand):
|
|||||||
def _ProcessResults(_pool, _output, results):
|
def _ProcessResults(_pool, _output, results):
|
||||||
return list(itertools.chain.from_iterable(results))
|
return list(itertools.chain.from_iterable(results))
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = projects
|
||||||
all_branches = self.ExecuteInParallel(
|
all_branches = self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
self._ExecuteOne,
|
self._ExecuteOne,
|
||||||
projects,
|
range(len(projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
ordered=True,
|
ordered=True,
|
||||||
)
|
)
|
||||||
|
@ -21,7 +21,6 @@ from error import RepoExitError
|
|||||||
from git_command import git
|
from git_command import git
|
||||||
from git_config import IsImmutable
|
from git_config import IsImmutable
|
||||||
from progress import Progress
|
from progress import Progress
|
||||||
from project import Project
|
|
||||||
from repo_logging import RepoLogger
|
from repo_logging import RepoLogger
|
||||||
|
|
||||||
|
|
||||||
@ -29,7 +28,7 @@ logger = RepoLogger(__file__)
|
|||||||
|
|
||||||
|
|
||||||
class ExecuteOneResult(NamedTuple):
|
class ExecuteOneResult(NamedTuple):
|
||||||
project: Project
|
project_idx: int
|
||||||
error: Exception
|
error: Exception
|
||||||
|
|
||||||
|
|
||||||
@ -80,18 +79,20 @@ revision specified in the manifest.
|
|||||||
if not git.check_ref_format("heads/%s" % nb):
|
if not git.check_ref_format("heads/%s" % nb):
|
||||||
self.OptionParser.error("'%s' is not a valid name" % nb)
|
self.OptionParser.error("'%s' is not a valid name" % nb)
|
||||||
|
|
||||||
def _ExecuteOne(self, revision, nb, project):
|
@classmethod
|
||||||
|
def _ExecuteOne(cls, revision, nb, default_revisionExpr, project_idx):
|
||||||
"""Start one project."""
|
"""Start one project."""
|
||||||
# If the current revision is immutable, such as a SHA1, a tag or
|
# If the current revision is immutable, such as a SHA1, a tag or
|
||||||
# a change, then we can't push back to it. Substitute with
|
# a change, then we can't push back to it. Substitute with
|
||||||
# dest_branch, if defined; or with manifest default revision instead.
|
# dest_branch, if defined; or with manifest default revision instead.
|
||||||
branch_merge = ""
|
branch_merge = ""
|
||||||
error = None
|
error = None
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
if IsImmutable(project.revisionExpr):
|
if IsImmutable(project.revisionExpr):
|
||||||
if project.dest_branch:
|
if project.dest_branch:
|
||||||
branch_merge = project.dest_branch
|
branch_merge = project.dest_branch
|
||||||
else:
|
else:
|
||||||
branch_merge = self.manifest.default.revisionExpr
|
branch_merge = default_revisionExpr
|
||||||
|
|
||||||
try:
|
try:
|
||||||
project.StartBranch(
|
project.StartBranch(
|
||||||
@ -100,7 +101,7 @@ revision specified in the manifest.
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("error: unable to checkout %s: %s", project.name, e)
|
logger.error("error: unable to checkout %s: %s", project.name, e)
|
||||||
error = e
|
error = e
|
||||||
return ExecuteOneResult(project, error)
|
return ExecuteOneResult(project_idx, error)
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
nb = args[0]
|
nb = args[0]
|
||||||
@ -120,18 +121,27 @@ revision specified in the manifest.
|
|||||||
def _ProcessResults(_pool, pm, results):
|
def _ProcessResults(_pool, pm, results):
|
||||||
for result in results:
|
for result in results:
|
||||||
if result.error:
|
if result.error:
|
||||||
err_projects.append(result.project)
|
project = all_projects[result.project_idx]
|
||||||
|
err_projects.append(project)
|
||||||
err.append(result.error)
|
err.append(result.error)
|
||||||
pm.update(msg="")
|
pm.update(msg="")
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = all_projects
|
||||||
self.ExecuteInParallel(
|
self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(self._ExecuteOne, opt.revision, nb),
|
functools.partial(
|
||||||
all_projects,
|
self._ExecuteOne,
|
||||||
|
opt.revision,
|
||||||
|
nb,
|
||||||
|
self.manifest.default.revisionExpr,
|
||||||
|
),
|
||||||
|
range(len(all_projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
output=Progress(
|
output=Progress(
|
||||||
f"Starting {nb}", len(all_projects), quiet=opt.quiet
|
f"Starting {nb}", len(all_projects), quiet=opt.quiet
|
||||||
),
|
),
|
||||||
|
chunksize=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
if err_projects:
|
if err_projects:
|
||||||
|
@ -88,7 +88,8 @@ the following meanings:
|
|||||||
"projects",
|
"projects",
|
||||||
)
|
)
|
||||||
|
|
||||||
def _StatusHelper(self, quiet, local, project):
|
@classmethod
|
||||||
|
def _StatusHelper(cls, quiet, local, project_idx):
|
||||||
"""Obtains the status for a specific project.
|
"""Obtains the status for a specific project.
|
||||||
|
|
||||||
Obtains the status for a project, redirecting the output to
|
Obtains the status for a project, redirecting the output to
|
||||||
@ -99,12 +100,13 @@ the following meanings:
|
|||||||
local: a boolean, if True, the path is relative to the local
|
local: a boolean, if True, the path is relative to the local
|
||||||
(sub)manifest. If false, the path is relative to the outermost
|
(sub)manifest. If false, the path is relative to the outermost
|
||||||
manifest.
|
manifest.
|
||||||
project: Project to get status of.
|
project_idx: Project index to get status of.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
The status of the project.
|
The status of the project.
|
||||||
"""
|
"""
|
||||||
buf = io.StringIO()
|
buf = io.StringIO()
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
ret = project.PrintWorkTreeStatus(
|
ret = project.PrintWorkTreeStatus(
|
||||||
quiet=quiet, output_redir=buf, local=local
|
quiet=quiet, output_redir=buf, local=local
|
||||||
)
|
)
|
||||||
@ -143,14 +145,17 @@ the following meanings:
|
|||||||
ret += 1
|
ret += 1
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = all_projects
|
||||||
counter = self.ExecuteInParallel(
|
counter = self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(
|
functools.partial(
|
||||||
self._StatusHelper, opt.quiet, opt.this_manifest_only
|
self._StatusHelper, opt.quiet, opt.this_manifest_only
|
||||||
),
|
),
|
||||||
all_projects,
|
range(len(all_projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
ordered=True,
|
ordered=True,
|
||||||
|
chunksize=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
if not opt.quiet and len(all_projects) == counter:
|
if not opt.quiet and len(all_projects) == counter:
|
||||||
|
187
subcmds/sync.py
187
subcmds/sync.py
@ -131,12 +131,17 @@ def _SafeCheckoutOrder(checkouts: List[Project]) -> List[List[Project]]:
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def _chunksize(projects: int, jobs: int) -> int:
|
||||||
|
"""Calculate chunk size for the given number of projects and jobs."""
|
||||||
|
return min(max(1, projects // jobs), WORKER_BATCH_SIZE)
|
||||||
|
|
||||||
|
|
||||||
class _FetchOneResult(NamedTuple):
|
class _FetchOneResult(NamedTuple):
|
||||||
"""_FetchOne return value.
|
"""_FetchOne return value.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
success (bool): True if successful.
|
success (bool): True if successful.
|
||||||
project (Project): The fetched project.
|
project_idx (int): The fetched project index.
|
||||||
start (float): The starting time.time().
|
start (float): The starting time.time().
|
||||||
finish (float): The ending time.time().
|
finish (float): The ending time.time().
|
||||||
remote_fetched (bool): True if the remote was actually queried.
|
remote_fetched (bool): True if the remote was actually queried.
|
||||||
@ -144,7 +149,7 @@ class _FetchOneResult(NamedTuple):
|
|||||||
|
|
||||||
success: bool
|
success: bool
|
||||||
errors: List[Exception]
|
errors: List[Exception]
|
||||||
project: Project
|
project_idx: int
|
||||||
start: float
|
start: float
|
||||||
finish: float
|
finish: float
|
||||||
remote_fetched: bool
|
remote_fetched: bool
|
||||||
@ -177,14 +182,14 @@ class _CheckoutOneResult(NamedTuple):
|
|||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
success (bool): True if successful.
|
success (bool): True if successful.
|
||||||
project (Project): The project.
|
project_idx (int): The project index.
|
||||||
start (float): The starting time.time().
|
start (float): The starting time.time().
|
||||||
finish (float): The ending time.time().
|
finish (float): The ending time.time().
|
||||||
"""
|
"""
|
||||||
|
|
||||||
success: bool
|
success: bool
|
||||||
errors: List[Exception]
|
errors: List[Exception]
|
||||||
project: Project
|
project_idx: int
|
||||||
start: float
|
start: float
|
||||||
finish: float
|
finish: float
|
||||||
|
|
||||||
@ -400,6 +405,13 @@ later is required to fix a server side protocol bug.
|
|||||||
"projects no longer exist in the manifest. "
|
"projects no longer exist in the manifest. "
|
||||||
"WARNING: this may cause loss of data",
|
"WARNING: this may cause loss of data",
|
||||||
)
|
)
|
||||||
|
p.add_option(
|
||||||
|
"--rebase",
|
||||||
|
dest="rebase",
|
||||||
|
action="store_true",
|
||||||
|
help="rebase local commits regardless of whether they are "
|
||||||
|
"published",
|
||||||
|
)
|
||||||
p.add_option(
|
p.add_option(
|
||||||
"-l",
|
"-l",
|
||||||
"--local-only",
|
"--local-only",
|
||||||
@ -580,7 +592,8 @@ later is required to fix a server side protocol bug.
|
|||||||
branch = branch[len(R_HEADS) :]
|
branch = branch[len(R_HEADS) :]
|
||||||
return branch
|
return branch
|
||||||
|
|
||||||
def _GetCurrentBranchOnly(self, opt, manifest):
|
@classmethod
|
||||||
|
def _GetCurrentBranchOnly(cls, opt, manifest):
|
||||||
"""Returns whether current-branch or use-superproject options are
|
"""Returns whether current-branch or use-superproject options are
|
||||||
enabled.
|
enabled.
|
||||||
|
|
||||||
@ -698,7 +711,8 @@ later is required to fix a server side protocol bug.
|
|||||||
if need_unload:
|
if need_unload:
|
||||||
m.outer_client.manifest.Unload()
|
m.outer_client.manifest.Unload()
|
||||||
|
|
||||||
def _FetchProjectList(self, opt, projects):
|
@classmethod
|
||||||
|
def _FetchProjectList(cls, opt, projects):
|
||||||
"""Main function of the fetch worker.
|
"""Main function of the fetch worker.
|
||||||
|
|
||||||
The projects we're given share the same underlying git object store, so
|
The projects we're given share the same underlying git object store, so
|
||||||
@ -710,21 +724,23 @@ later is required to fix a server side protocol bug.
|
|||||||
opt: Program options returned from optparse. See _Options().
|
opt: Program options returned from optparse. See _Options().
|
||||||
projects: Projects to fetch.
|
projects: Projects to fetch.
|
||||||
"""
|
"""
|
||||||
return [self._FetchOne(opt, x) for x in projects]
|
return [cls._FetchOne(opt, x) for x in projects]
|
||||||
|
|
||||||
def _FetchOne(self, opt, project):
|
@classmethod
|
||||||
|
def _FetchOne(cls, opt, project_idx):
|
||||||
"""Fetch git objects for a single project.
|
"""Fetch git objects for a single project.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
opt: Program options returned from optparse. See _Options().
|
opt: Program options returned from optparse. See _Options().
|
||||||
project: Project object for the project to fetch.
|
project_idx: Project index for the project to fetch.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Whether the fetch was successful.
|
Whether the fetch was successful.
|
||||||
"""
|
"""
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
start = time.time()
|
start = time.time()
|
||||||
k = f"{project.name} @ {project.relpath}"
|
k = f"{project.name} @ {project.relpath}"
|
||||||
self._sync_dict[k] = start
|
cls.get_parallel_context()["sync_dict"][k] = start
|
||||||
success = False
|
success = False
|
||||||
remote_fetched = False
|
remote_fetched = False
|
||||||
errors = []
|
errors = []
|
||||||
@ -734,7 +750,7 @@ later is required to fix a server side protocol bug.
|
|||||||
quiet=opt.quiet,
|
quiet=opt.quiet,
|
||||||
verbose=opt.verbose,
|
verbose=opt.verbose,
|
||||||
output_redir=buf,
|
output_redir=buf,
|
||||||
current_branch_only=self._GetCurrentBranchOnly(
|
current_branch_only=cls._GetCurrentBranchOnly(
|
||||||
opt, project.manifest
|
opt, project.manifest
|
||||||
),
|
),
|
||||||
force_sync=opt.force_sync,
|
force_sync=opt.force_sync,
|
||||||
@ -744,7 +760,7 @@ later is required to fix a server side protocol bug.
|
|||||||
optimized_fetch=opt.optimized_fetch,
|
optimized_fetch=opt.optimized_fetch,
|
||||||
retry_fetches=opt.retry_fetches,
|
retry_fetches=opt.retry_fetches,
|
||||||
prune=opt.prune,
|
prune=opt.prune,
|
||||||
ssh_proxy=self.ssh_proxy,
|
ssh_proxy=cls.get_parallel_context()["ssh_proxy"],
|
||||||
clone_filter=project.manifest.CloneFilter,
|
clone_filter=project.manifest.CloneFilter,
|
||||||
partial_clone_exclude=project.manifest.PartialCloneExclude,
|
partial_clone_exclude=project.manifest.PartialCloneExclude,
|
||||||
clone_filter_for_depth=project.manifest.CloneFilterForDepth,
|
clone_filter_for_depth=project.manifest.CloneFilterForDepth,
|
||||||
@ -776,24 +792,20 @@ later is required to fix a server side protocol bug.
|
|||||||
type(e).__name__,
|
type(e).__name__,
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
del self._sync_dict[k]
|
|
||||||
errors.append(e)
|
errors.append(e)
|
||||||
raise
|
raise
|
||||||
|
finally:
|
||||||
|
del cls.get_parallel_context()["sync_dict"][k]
|
||||||
|
|
||||||
finish = time.time()
|
finish = time.time()
|
||||||
del self._sync_dict[k]
|
|
||||||
return _FetchOneResult(
|
return _FetchOneResult(
|
||||||
success, errors, project, start, finish, remote_fetched
|
success, errors, project_idx, start, finish, remote_fetched
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _FetchInitChild(cls, ssh_proxy):
|
|
||||||
cls.ssh_proxy = ssh_proxy
|
|
||||||
|
|
||||||
def _GetSyncProgressMessage(self):
|
def _GetSyncProgressMessage(self):
|
||||||
earliest_time = float("inf")
|
earliest_time = float("inf")
|
||||||
earliest_proj = None
|
earliest_proj = None
|
||||||
items = self._sync_dict.items()
|
items = self.get_parallel_context()["sync_dict"].items()
|
||||||
for project, t in items:
|
for project, t in items:
|
||||||
if t < earliest_time:
|
if t < earliest_time:
|
||||||
earliest_time = t
|
earliest_time = t
|
||||||
@ -801,7 +813,7 @@ later is required to fix a server side protocol bug.
|
|||||||
|
|
||||||
if not earliest_proj:
|
if not earliest_proj:
|
||||||
# This function is called when sync is still running but in some
|
# This function is called when sync is still running but in some
|
||||||
# cases (by chance), _sync_dict can contain no entries. Return some
|
# cases (by chance), sync_dict can contain no entries. Return some
|
||||||
# text to indicate that sync is still working.
|
# text to indicate that sync is still working.
|
||||||
return "..working.."
|
return "..working.."
|
||||||
|
|
||||||
@ -812,7 +824,6 @@ later is required to fix a server side protocol bug.
|
|||||||
def _Fetch(self, projects, opt, err_event, ssh_proxy, errors):
|
def _Fetch(self, projects, opt, err_event, ssh_proxy, errors):
|
||||||
ret = True
|
ret = True
|
||||||
|
|
||||||
jobs = opt.jobs_network
|
|
||||||
fetched = set()
|
fetched = set()
|
||||||
remote_fetched = set()
|
remote_fetched = set()
|
||||||
pm = Progress(
|
pm = Progress(
|
||||||
@ -824,7 +835,6 @@ later is required to fix a server side protocol bug.
|
|||||||
elide=True,
|
elide=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._sync_dict = multiprocessing.Manager().dict()
|
|
||||||
sync_event = _threading.Event()
|
sync_event = _threading.Event()
|
||||||
|
|
||||||
def _MonitorSyncLoop():
|
def _MonitorSyncLoop():
|
||||||
@ -835,19 +845,13 @@ later is required to fix a server side protocol bug.
|
|||||||
|
|
||||||
sync_progress_thread = _threading.Thread(target=_MonitorSyncLoop)
|
sync_progress_thread = _threading.Thread(target=_MonitorSyncLoop)
|
||||||
sync_progress_thread.daemon = True
|
sync_progress_thread.daemon = True
|
||||||
sync_progress_thread.start()
|
|
||||||
|
|
||||||
objdir_project_map = dict()
|
def _ProcessResults(pool, pm, results_sets):
|
||||||
for project in projects:
|
|
||||||
objdir_project_map.setdefault(project.objdir, []).append(project)
|
|
||||||
projects_list = list(objdir_project_map.values())
|
|
||||||
|
|
||||||
def _ProcessResults(results_sets):
|
|
||||||
ret = True
|
ret = True
|
||||||
for results in results_sets:
|
for results in results_sets:
|
||||||
for result in results:
|
for result in results:
|
||||||
success = result.success
|
success = result.success
|
||||||
project = result.project
|
project = projects[result.project_idx]
|
||||||
start = result.start
|
start = result.start
|
||||||
finish = result.finish
|
finish = result.finish
|
||||||
self._fetch_times.Set(project, finish - start)
|
self._fetch_times.Set(project, finish - start)
|
||||||
@ -871,58 +875,49 @@ later is required to fix a server side protocol bug.
|
|||||||
fetched.add(project.gitdir)
|
fetched.add(project.gitdir)
|
||||||
pm.update()
|
pm.update()
|
||||||
if not ret and opt.fail_fast:
|
if not ret and opt.fail_fast:
|
||||||
|
if pool:
|
||||||
|
pool.close()
|
||||||
break
|
break
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
# We pass the ssh proxy settings via the class. This allows
|
with self.ParallelContext():
|
||||||
# multiprocessing to pickle it up when spawning children. We can't pass
|
self.get_parallel_context()["projects"] = projects
|
||||||
# it as an argument to _FetchProjectList below as multiprocessing is
|
self.get_parallel_context()[
|
||||||
# unable to pickle those.
|
"sync_dict"
|
||||||
Sync.ssh_proxy = None
|
] = multiprocessing.Manager().dict()
|
||||||
|
|
||||||
# NB: Multiprocessing is heavy, so don't spin it up for one job.
|
objdir_project_map = dict()
|
||||||
if len(projects_list) == 1 or jobs == 1:
|
for index, project in enumerate(projects):
|
||||||
self._FetchInitChild(ssh_proxy)
|
objdir_project_map.setdefault(project.objdir, []).append(index)
|
||||||
if not _ProcessResults(
|
projects_list = list(objdir_project_map.values())
|
||||||
self._FetchProjectList(opt, x) for x in projects_list
|
|
||||||
):
|
jobs = min(opt.jobs_network, len(projects_list))
|
||||||
ret = False
|
|
||||||
else:
|
# We pass the ssh proxy settings via the class. This allows
|
||||||
# Favor throughput over responsiveness when quiet. It seems that
|
# multiprocessing to pickle it up when spawning children. We can't
|
||||||
# imap() will yield results in batches relative to chunksize, so
|
# pass it as an argument to _FetchProjectList below as
|
||||||
# even as the children finish a sync, we won't see the result until
|
# multiprocessing is unable to pickle those.
|
||||||
# one child finishes ~chunksize jobs. When using a large --jobs
|
self.get_parallel_context()["ssh_proxy"] = ssh_proxy
|
||||||
# with large chunksize, this can be jarring as there will be a large
|
|
||||||
# initial delay where repo looks like it isn't doing anything and
|
sync_progress_thread.start()
|
||||||
# sits at 0%, but then suddenly completes a lot of jobs all at once.
|
if not opt.quiet:
|
||||||
# Since this code is more network bound, we can accept a bit more
|
|
||||||
# CPU overhead with a smaller chunksize so that the user sees more
|
|
||||||
# immediate & continuous feedback.
|
|
||||||
if opt.quiet:
|
|
||||||
chunksize = WORKER_BATCH_SIZE
|
|
||||||
else:
|
|
||||||
pm.update(inc=0, msg="warming up")
|
pm.update(inc=0, msg="warming up")
|
||||||
chunksize = 4
|
try:
|
||||||
with multiprocessing.Pool(
|
ret = self.ExecuteInParallel(
|
||||||
jobs, initializer=self._FetchInitChild, initargs=(ssh_proxy,)
|
jobs,
|
||||||
) as pool:
|
|
||||||
results = pool.imap_unordered(
|
|
||||||
functools.partial(self._FetchProjectList, opt),
|
functools.partial(self._FetchProjectList, opt),
|
||||||
projects_list,
|
projects_list,
|
||||||
chunksize=chunksize,
|
callback=_ProcessResults,
|
||||||
|
output=pm,
|
||||||
|
# Use chunksize=1 to avoid the chance that some workers are
|
||||||
|
# idle while other workers still have more than one job in
|
||||||
|
# their chunk queue.
|
||||||
|
chunksize=1,
|
||||||
)
|
)
|
||||||
if not _ProcessResults(results):
|
finally:
|
||||||
ret = False
|
|
||||||
pool.close()
|
|
||||||
|
|
||||||
# Cleanup the reference now that we're done with it, and we're going to
|
|
||||||
# release any resources it points to. If we don't, later
|
|
||||||
# multiprocessing usage (e.g. checkouts) will try to pickle and then
|
|
||||||
# crash.
|
|
||||||
del Sync.ssh_proxy
|
|
||||||
|
|
||||||
sync_event.set()
|
sync_event.set()
|
||||||
pm.end()
|
sync_progress_thread.join()
|
||||||
|
|
||||||
self._fetch_times.Save()
|
self._fetch_times.Save()
|
||||||
self._local_sync_state.Save()
|
self._local_sync_state.Save()
|
||||||
|
|
||||||
@ -963,6 +958,8 @@ later is required to fix a server side protocol bug.
|
|||||||
if not success:
|
if not success:
|
||||||
err_event.set()
|
err_event.set()
|
||||||
|
|
||||||
|
# Call self update, unless requested not to
|
||||||
|
if os.environ.get("REPO_SKIP_SELF_UPDATE", "0") == "0":
|
||||||
_PostRepoFetch(rp, opt.repo_verify)
|
_PostRepoFetch(rp, opt.repo_verify)
|
||||||
if opt.network_only:
|
if opt.network_only:
|
||||||
# Bail out now; the rest touches the working tree.
|
# Bail out now; the rest touches the working tree.
|
||||||
@ -1008,8 +1005,15 @@ later is required to fix a server side protocol bug.
|
|||||||
|
|
||||||
return _FetchMainResult(all_projects)
|
return _FetchMainResult(all_projects)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
def _CheckoutOne(
|
def _CheckoutOne(
|
||||||
self, detach_head, force_sync, force_checkout, verbose, project
|
cls,
|
||||||
|
detach_head,
|
||||||
|
force_sync,
|
||||||
|
force_checkout,
|
||||||
|
force_rebase,
|
||||||
|
verbose,
|
||||||
|
project_idx,
|
||||||
):
|
):
|
||||||
"""Checkout work tree for one project
|
"""Checkout work tree for one project
|
||||||
|
|
||||||
@ -1019,12 +1023,14 @@ later is required to fix a server side protocol bug.
|
|||||||
existing git directory that was previously linked to a different
|
existing git directory that was previously linked to a different
|
||||||
object directory).
|
object directory).
|
||||||
force_checkout: Force checking out of the repo content.
|
force_checkout: Force checking out of the repo content.
|
||||||
|
force_rebase: Force rebase.
|
||||||
verbose: Whether to show verbose messages.
|
verbose: Whether to show verbose messages.
|
||||||
project: Project object for the project to checkout.
|
project_idx: Project index for the project to checkout.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Whether the fetch was successful.
|
Whether the fetch was successful.
|
||||||
"""
|
"""
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
start = time.time()
|
start = time.time()
|
||||||
syncbuf = SyncBuffer(
|
syncbuf = SyncBuffer(
|
||||||
project.manifest.manifestProject.config, detach_head=detach_head
|
project.manifest.manifestProject.config, detach_head=detach_head
|
||||||
@ -1036,6 +1042,7 @@ later is required to fix a server side protocol bug.
|
|||||||
syncbuf,
|
syncbuf,
|
||||||
force_sync=force_sync,
|
force_sync=force_sync,
|
||||||
force_checkout=force_checkout,
|
force_checkout=force_checkout,
|
||||||
|
force_rebase=force_rebase,
|
||||||
errors=errors,
|
errors=errors,
|
||||||
verbose=verbose,
|
verbose=verbose,
|
||||||
)
|
)
|
||||||
@ -1057,7 +1064,7 @@ later is required to fix a server side protocol bug.
|
|||||||
if not success:
|
if not success:
|
||||||
logger.error("error: Cannot checkout %s", project.name)
|
logger.error("error: Cannot checkout %s", project.name)
|
||||||
finish = time.time()
|
finish = time.time()
|
||||||
return _CheckoutOneResult(success, errors, project, start, finish)
|
return _CheckoutOneResult(success, errors, project_idx, start, finish)
|
||||||
|
|
||||||
def _Checkout(self, all_projects, opt, err_results, checkout_errors):
|
def _Checkout(self, all_projects, opt, err_results, checkout_errors):
|
||||||
"""Checkout projects listed in all_projects
|
"""Checkout projects listed in all_projects
|
||||||
@ -1075,7 +1082,9 @@ later is required to fix a server side protocol bug.
|
|||||||
ret = True
|
ret = True
|
||||||
for result in results:
|
for result in results:
|
||||||
success = result.success
|
success = result.success
|
||||||
project = result.project
|
project = self.get_parallel_context()["projects"][
|
||||||
|
result.project_idx
|
||||||
|
]
|
||||||
start = result.start
|
start = result.start
|
||||||
finish = result.finish
|
finish = result.finish
|
||||||
self.event_log.AddSync(
|
self.event_log.AddSync(
|
||||||
@ -1102,6 +1111,8 @@ later is required to fix a server side protocol bug.
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
for projects in _SafeCheckoutOrder(all_projects):
|
for projects in _SafeCheckoutOrder(all_projects):
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = projects
|
||||||
proc_res = self.ExecuteInParallel(
|
proc_res = self.ExecuteInParallel(
|
||||||
opt.jobs_checkout,
|
opt.jobs_checkout,
|
||||||
functools.partial(
|
functools.partial(
|
||||||
@ -1109,13 +1120,18 @@ later is required to fix a server side protocol bug.
|
|||||||
opt.detach_head,
|
opt.detach_head,
|
||||||
opt.force_sync,
|
opt.force_sync,
|
||||||
opt.force_checkout,
|
opt.force_checkout,
|
||||||
|
opt.rebase,
|
||||||
opt.verbose,
|
opt.verbose,
|
||||||
),
|
),
|
||||||
projects,
|
range(len(projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
output=Progress(
|
output=Progress(
|
||||||
"Checking out", len(all_projects), quiet=opt.quiet
|
"Checking out", len(all_projects), quiet=opt.quiet
|
||||||
),
|
),
|
||||||
|
# Use chunksize=1 to avoid the chance that some workers are
|
||||||
|
# idle while other workers still have more than one job in
|
||||||
|
# their chunk queue.
|
||||||
|
chunksize=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._local_sync_state.Save()
|
self._local_sync_state.Save()
|
||||||
@ -1480,6 +1496,19 @@ later is required to fix a server side protocol bug.
|
|||||||
[success, manifest_str] = server.GetApprovedManifest(
|
[success, manifest_str] = server.GetApprovedManifest(
|
||||||
branch, target
|
branch, target
|
||||||
)
|
)
|
||||||
|
elif (
|
||||||
|
"TARGET_PRODUCT" in os.environ
|
||||||
|
and "TARGET_BUILD_VARIANT" in os.environ
|
||||||
|
and "TARGET_RELEASE" in os.environ
|
||||||
|
):
|
||||||
|
target = "%s-%s-%s" % (
|
||||||
|
os.environ["TARGET_PRODUCT"],
|
||||||
|
os.environ["TARGET_RELEASE"],
|
||||||
|
os.environ["TARGET_BUILD_VARIANT"],
|
||||||
|
)
|
||||||
|
[success, manifest_str] = server.GetApprovedManifest(
|
||||||
|
branch, target
|
||||||
|
)
|
||||||
elif (
|
elif (
|
||||||
"TARGET_PRODUCT" in os.environ
|
"TARGET_PRODUCT" in os.environ
|
||||||
and "TARGET_BUILD_VARIANT" in os.environ
|
and "TARGET_BUILD_VARIANT" in os.environ
|
||||||
|
@ -218,9 +218,14 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
def _Options(self, p):
|
def _Options(self, p):
|
||||||
p.add_option(
|
p.add_option(
|
||||||
"-t",
|
"-t",
|
||||||
|
"--topic-branch",
|
||||||
dest="auto_topic",
|
dest="auto_topic",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="send local branch name to Gerrit Code Review",
|
help="set the topic to the local branch name",
|
||||||
|
)
|
||||||
|
p.add_option(
|
||||||
|
"--topic",
|
||||||
|
help="set topic for the change",
|
||||||
)
|
)
|
||||||
p.add_option(
|
p.add_option(
|
||||||
"--hashtag",
|
"--hashtag",
|
||||||
@ -549,42 +554,14 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
people = copy.deepcopy(original_people)
|
people = copy.deepcopy(original_people)
|
||||||
self._AppendAutoList(branch, people)
|
self._AppendAutoList(branch, people)
|
||||||
|
|
||||||
# Check if there are local changes that may have been forgotten.
|
|
||||||
changes = branch.project.UncommitedFiles()
|
|
||||||
if opt.ignore_untracked_files:
|
|
||||||
untracked = set(branch.project.UntrackedFiles())
|
|
||||||
changes = [x for x in changes if x not in untracked]
|
|
||||||
|
|
||||||
if changes:
|
|
||||||
key = "review.%s.autoupload" % branch.project.remote.review
|
|
||||||
answer = branch.project.config.GetBoolean(key)
|
|
||||||
|
|
||||||
# If they want to auto upload, let's not ask because it
|
|
||||||
# could be automated.
|
|
||||||
if answer is None:
|
|
||||||
print()
|
|
||||||
print(
|
|
||||||
"Uncommitted changes in %s (did you forget to "
|
|
||||||
"amend?):" % branch.project.name
|
|
||||||
)
|
|
||||||
print("\n".join(changes))
|
|
||||||
print("Continue uploading? (y/N) ", end="", flush=True)
|
|
||||||
if opt.yes:
|
|
||||||
print("<--yes>")
|
|
||||||
a = "yes"
|
|
||||||
else:
|
|
||||||
a = sys.stdin.readline().strip().lower()
|
|
||||||
if a not in ("y", "yes", "t", "true", "on"):
|
|
||||||
print("skipping upload", file=sys.stderr)
|
|
||||||
branch.uploaded = False
|
|
||||||
branch.error = "User aborted"
|
|
||||||
return
|
|
||||||
|
|
||||||
# Check if topic branches should be sent to the server during
|
# Check if topic branches should be sent to the server during
|
||||||
# upload.
|
# upload.
|
||||||
|
if opt.topic is None:
|
||||||
if opt.auto_topic is not True:
|
if opt.auto_topic is not True:
|
||||||
key = "review.%s.uploadtopic" % branch.project.remote.review
|
key = "review.%s.uploadtopic" % branch.project.remote.review
|
||||||
opt.auto_topic = branch.project.config.GetBoolean(key)
|
opt.auto_topic = branch.project.config.GetBoolean(key)
|
||||||
|
if opt.auto_topic:
|
||||||
|
opt.topic = branch.name
|
||||||
|
|
||||||
def _ExpandCommaList(value):
|
def _ExpandCommaList(value):
|
||||||
"""Split |value| up into comma delimited entries."""
|
"""Split |value| up into comma delimited entries."""
|
||||||
@ -626,19 +603,22 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
full_dest = destination
|
full_dest = destination
|
||||||
if not full_dest.startswith(R_HEADS):
|
if not full_dest.startswith(R_HEADS):
|
||||||
full_dest = R_HEADS + full_dest
|
full_dest = R_HEADS + full_dest
|
||||||
|
full_revision = branch.project.revisionExpr
|
||||||
|
if not full_revision.startswith(R_HEADS):
|
||||||
|
full_revision = R_HEADS + full_revision
|
||||||
|
|
||||||
# If the merge branch of the local branch is different from
|
# If the merge branch of the local branch is different from
|
||||||
# the project's revision AND destination, this might not be
|
# the project's revision AND destination, this might not be
|
||||||
# intentional.
|
# intentional.
|
||||||
if (
|
if (
|
||||||
merge_branch
|
merge_branch
|
||||||
and merge_branch != branch.project.revisionExpr
|
and merge_branch != full_revision
|
||||||
and merge_branch != full_dest
|
and merge_branch != full_dest
|
||||||
):
|
):
|
||||||
print(
|
print(
|
||||||
f"For local branch {branch.name}: merge branch "
|
f"For local branch {branch.name}: merge branch "
|
||||||
f"{merge_branch} does not match destination branch "
|
f"{merge_branch} does not match destination branch "
|
||||||
f"{destination}"
|
f"{destination} and revision {branch.project.revisionExpr}"
|
||||||
)
|
)
|
||||||
print("skipping upload.")
|
print("skipping upload.")
|
||||||
print(
|
print(
|
||||||
@ -651,7 +631,7 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
branch.UploadForReview(
|
branch.UploadForReview(
|
||||||
people,
|
people,
|
||||||
dryrun=opt.dryrun,
|
dryrun=opt.dryrun,
|
||||||
auto_topic=opt.auto_topic,
|
topic=opt.topic,
|
||||||
hashtags=hashtags,
|
hashtags=hashtags,
|
||||||
labels=labels,
|
labels=labels,
|
||||||
private=opt.private,
|
private=opt.private,
|
||||||
@ -736,16 +716,17 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
merge_branch = p.stdout.strip()
|
merge_branch = p.stdout.strip()
|
||||||
return merge_branch
|
return merge_branch
|
||||||
|
|
||||||
@staticmethod
|
@classmethod
|
||||||
def _GatherOne(opt, project):
|
def _GatherOne(cls, opt, project_idx):
|
||||||
"""Figure out the upload status for |project|."""
|
"""Figure out the upload status for |project|."""
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
if opt.current_branch:
|
if opt.current_branch:
|
||||||
cbr = project.CurrentBranch
|
cbr = project.CurrentBranch
|
||||||
up_branch = project.GetUploadableBranch(cbr)
|
up_branch = project.GetUploadableBranch(cbr)
|
||||||
avail = [up_branch] if up_branch else None
|
avail = [up_branch] if up_branch else None
|
||||||
else:
|
else:
|
||||||
avail = project.GetUploadableBranches(opt.branch)
|
avail = project.GetUploadableBranches(opt.branch)
|
||||||
return (project, avail)
|
return (project_idx, avail)
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
projects = self.GetProjects(
|
projects = self.GetProjects(
|
||||||
@ -755,8 +736,9 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
def _ProcessResults(_pool, _out, results):
|
def _ProcessResults(_pool, _out, results):
|
||||||
pending = []
|
pending = []
|
||||||
for result in results:
|
for result in results:
|
||||||
project, avail = result
|
project_idx, avail = result
|
||||||
if avail is None:
|
if avail is None:
|
||||||
|
project = projects[project_idx]
|
||||||
logger.error(
|
logger.error(
|
||||||
'repo: error: %s: Unable to upload branch "%s". '
|
'repo: error: %s: Unable to upload branch "%s". '
|
||||||
"You might be able to fix the branch by running:\n"
|
"You might be able to fix the branch by running:\n"
|
||||||
@ -769,10 +751,12 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
pending.append(result)
|
pending.append(result)
|
||||||
return pending
|
return pending
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = projects
|
||||||
pending = self.ExecuteInParallel(
|
pending = self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(self._GatherOne, opt),
|
functools.partial(self._GatherOne, opt),
|
||||||
projects,
|
range(len(projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
8
tests/fixtures/test.gitconfig
vendored
8
tests/fixtures/test.gitconfig
vendored
@ -11,3 +11,11 @@
|
|||||||
intk = 10k
|
intk = 10k
|
||||||
intm = 10m
|
intm = 10m
|
||||||
intg = 10g
|
intg = 10g
|
||||||
|
|
||||||
|
[color "status"]
|
||||||
|
one = yellow
|
||||||
|
two = magenta cyan
|
||||||
|
three = black red ul
|
||||||
|
reset = reset
|
||||||
|
none
|
||||||
|
empty =
|
||||||
|
74
tests/test_color.py
Normal file
74
tests/test_color.py
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
# Copyright (C) 2024 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Unittests for the color.py module."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import color
|
||||||
|
import git_config
|
||||||
|
|
||||||
|
|
||||||
|
def fixture(*paths):
|
||||||
|
"""Return a path relative to test/fixtures."""
|
||||||
|
return os.path.join(os.path.dirname(__file__), "fixtures", *paths)
|
||||||
|
|
||||||
|
|
||||||
|
class ColoringTests(unittest.TestCase):
|
||||||
|
"""tests of the Coloring class."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
"""Create a GitConfig object using the test.gitconfig fixture."""
|
||||||
|
config_fixture = fixture("test.gitconfig")
|
||||||
|
self.config = git_config.GitConfig(config_fixture)
|
||||||
|
color.SetDefaultColoring("true")
|
||||||
|
self.color = color.Coloring(self.config, "status")
|
||||||
|
|
||||||
|
def test_Color_Parse_all_params_none(self):
|
||||||
|
"""all params are None"""
|
||||||
|
val = self.color._parse(None, None, None, None)
|
||||||
|
self.assertEqual("", val)
|
||||||
|
|
||||||
|
def test_Color_Parse_first_parameter_none(self):
|
||||||
|
"""check fg & bg & attr"""
|
||||||
|
val = self.color._parse(None, "black", "red", "ul")
|
||||||
|
self.assertEqual("\x1b[4;30;41m", val)
|
||||||
|
|
||||||
|
def test_Color_Parse_one_entry(self):
|
||||||
|
"""check fg"""
|
||||||
|
val = self.color._parse("one", None, None, None)
|
||||||
|
self.assertEqual("\033[33m", val)
|
||||||
|
|
||||||
|
def test_Color_Parse_two_entry(self):
|
||||||
|
"""check fg & bg"""
|
||||||
|
val = self.color._parse("two", None, None, None)
|
||||||
|
self.assertEqual("\033[35;46m", val)
|
||||||
|
|
||||||
|
def test_Color_Parse_three_entry(self):
|
||||||
|
"""check fg & bg & attr"""
|
||||||
|
val = self.color._parse("three", None, None, None)
|
||||||
|
self.assertEqual("\033[4;30;41m", val)
|
||||||
|
|
||||||
|
def test_Color_Parse_reset_entry(self):
|
||||||
|
"""check reset entry"""
|
||||||
|
val = self.color._parse("reset", None, None, None)
|
||||||
|
self.assertEqual("\033[m", val)
|
||||||
|
|
||||||
|
def test_Color_Parse_empty_entry(self):
|
||||||
|
"""check empty entry"""
|
||||||
|
val = self.color._parse("none", "blue", "white", "dim")
|
||||||
|
self.assertEqual("\033[2;34;47m", val)
|
||||||
|
val = self.color._parse("empty", "green", "white", "bold")
|
||||||
|
self.assertEqual("\033[1;32;47m", val)
|
@ -1049,6 +1049,91 @@ class RemoveProjectElementTests(ManifestParseTestCase):
|
|||||||
self.assertTrue(found_proj1_path1)
|
self.assertTrue(found_proj1_path1)
|
||||||
self.assertTrue(found_proj2)
|
self.assertTrue(found_proj2)
|
||||||
|
|
||||||
|
def test_base_revision_checks_on_patching(self):
|
||||||
|
manifest_fail_wrong_tag = self.getXmlManifest(
|
||||||
|
"""
|
||||||
|
<manifest>
|
||||||
|
<remote name="default-remote" fetch="http://localhost" />
|
||||||
|
<default remote="default-remote" revision="tag.002" />
|
||||||
|
<project name="project1" path="tests/path1" />
|
||||||
|
<extend-project name="project1" revision="new_hash" base-rev="tag.001" />
|
||||||
|
</manifest>
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with self.assertRaises(error.ManifestParseError):
|
||||||
|
manifest_fail_wrong_tag.ToXml()
|
||||||
|
|
||||||
|
manifest_fail_remove = self.getXmlManifest(
|
||||||
|
"""
|
||||||
|
<manifest>
|
||||||
|
<remote name="default-remote" fetch="http://localhost" />
|
||||||
|
<default remote="default-remote" revision="refs/heads/main" />
|
||||||
|
<project name="project1" path="tests/path1" revision="hash1" />
|
||||||
|
<remove-project name="project1" base-rev="wrong_hash" />
|
||||||
|
</manifest>
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with self.assertRaises(error.ManifestParseError):
|
||||||
|
manifest_fail_remove.ToXml()
|
||||||
|
|
||||||
|
manifest_fail_extend = self.getXmlManifest(
|
||||||
|
"""
|
||||||
|
<manifest>
|
||||||
|
<remote name="default-remote" fetch="http://localhost" />
|
||||||
|
<default remote="default-remote" revision="refs/heads/main" />
|
||||||
|
<project name="project1" path="tests/path1" revision="hash1" />
|
||||||
|
<extend-project name="project1" revision="new_hash" base-rev="wrong_hash" />
|
||||||
|
</manifest>
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with self.assertRaises(error.ManifestParseError):
|
||||||
|
manifest_fail_extend.ToXml()
|
||||||
|
|
||||||
|
manifest_fail_unknown = self.getXmlManifest(
|
||||||
|
"""
|
||||||
|
<manifest>
|
||||||
|
<remote name="default-remote" fetch="http://localhost" />
|
||||||
|
<default remote="default-remote" revision="refs/heads/main" />
|
||||||
|
<project name="project1" path="tests/path1" />
|
||||||
|
<extend-project name="project1" revision="new_hash" base-rev="any_hash" />
|
||||||
|
</manifest>
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with self.assertRaises(error.ManifestParseError):
|
||||||
|
manifest_fail_unknown.ToXml()
|
||||||
|
|
||||||
|
manifest_ok = self.getXmlManifest(
|
||||||
|
"""
|
||||||
|
<manifest>
|
||||||
|
<remote name="default-remote" fetch="http://localhost" />
|
||||||
|
<default remote="default-remote" revision="refs/heads/main" />
|
||||||
|
<project name="project1" path="tests/path1" revision="hash1" />
|
||||||
|
<project name="project2" path="tests/path2" revision="hash2" />
|
||||||
|
<project name="project3" path="tests/path3" revision="hash3" />
|
||||||
|
<project name="project4" path="tests/path4" revision="hash4" />
|
||||||
|
|
||||||
|
<remove-project name="project1" />
|
||||||
|
<remove-project name="project2" base-rev="hash2" />
|
||||||
|
<project name="project2" path="tests/path2" revision="new_hash2" />
|
||||||
|
<extend-project name="project3" base-rev="hash3" revision="new_hash3" />
|
||||||
|
<extend-project name="project3" base-rev="new_hash3" revision="newer_hash3" />
|
||||||
|
<remove-project path="tests/path4" base-rev="hash4" />
|
||||||
|
</manifest>
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
found_proj2 = False
|
||||||
|
found_proj3 = False
|
||||||
|
for proj in manifest_ok.projects:
|
||||||
|
if proj.name == "project2":
|
||||||
|
found_proj2 = True
|
||||||
|
if proj.name == "project3":
|
||||||
|
found_proj3 = True
|
||||||
|
self.assertNotEqual(proj.name, "project1")
|
||||||
|
self.assertNotEqual(proj.name, "project4")
|
||||||
|
self.assertTrue(found_proj2)
|
||||||
|
self.assertTrue(found_proj3)
|
||||||
|
self.assertTrue(len(manifest_ok.projects) == 2)
|
||||||
|
|
||||||
|
|
||||||
class ExtendProjectElementTests(ManifestParseTestCase):
|
class ExtendProjectElementTests(ManifestParseTestCase):
|
||||||
"""Tests for <extend-project>."""
|
"""Tests for <extend-project>."""
|
||||||
|
@ -13,9 +13,14 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
"""Unit test for repo_logging module."""
|
"""Unit test for repo_logging module."""
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import io
|
||||||
|
import logging
|
||||||
import unittest
|
import unittest
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
from color import SetDefaultColoring
|
||||||
from error import RepoExitError
|
from error import RepoExitError
|
||||||
from repo_logging import RepoLogger
|
from repo_logging import RepoLogger
|
||||||
|
|
||||||
@ -62,3 +67,35 @@ class TestRepoLogger(unittest.TestCase):
|
|||||||
mock.call("Repo command failed: %s", "RepoExitError"),
|
mock.call("Repo command failed: %s", "RepoExitError"),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_log_with_format_string(self):
|
||||||
|
"""Test different log levels with format strings."""
|
||||||
|
|
||||||
|
# Set color output to "always" for consistent test results.
|
||||||
|
# This ensures the logger's behavior is uniform across different
|
||||||
|
# environments and git configurations.
|
||||||
|
SetDefaultColoring("always")
|
||||||
|
|
||||||
|
# Regex pattern to match optional ANSI color codes.
|
||||||
|
# \033 - Escape character
|
||||||
|
# \[ - Opening square bracket
|
||||||
|
# [0-9;]* - Zero or more digits or semicolons
|
||||||
|
# m - Ending 'm' character
|
||||||
|
# ? - Makes the entire group optional
|
||||||
|
opt_color = r"(\033\[[0-9;]*m)?"
|
||||||
|
|
||||||
|
for level in (logging.INFO, logging.WARN, logging.ERROR):
|
||||||
|
name = logging.getLevelName(level)
|
||||||
|
|
||||||
|
with self.subTest(level=level, name=name):
|
||||||
|
output = io.StringIO()
|
||||||
|
|
||||||
|
with contextlib.redirect_stderr(output):
|
||||||
|
logger = RepoLogger(__name__)
|
||||||
|
logger.log(level, "%s", "100% pass")
|
||||||
|
|
||||||
|
self.assertRegex(
|
||||||
|
output.getvalue().strip(),
|
||||||
|
f"^{opt_color}100% pass{opt_color}$",
|
||||||
|
f"failed for level {name}",
|
||||||
|
)
|
||||||
|
@ -355,6 +355,30 @@ class SafeCheckoutOrder(unittest.TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Chunksize(unittest.TestCase):
|
||||||
|
"""Tests for _chunksize."""
|
||||||
|
|
||||||
|
def test_single_project(self):
|
||||||
|
"""Single project."""
|
||||||
|
self.assertEqual(sync._chunksize(1, 1), 1)
|
||||||
|
|
||||||
|
def test_low_project_count(self):
|
||||||
|
"""Multiple projects, low number of projects to sync."""
|
||||||
|
self.assertEqual(sync._chunksize(10, 1), 10)
|
||||||
|
self.assertEqual(sync._chunksize(10, 2), 5)
|
||||||
|
self.assertEqual(sync._chunksize(10, 4), 2)
|
||||||
|
self.assertEqual(sync._chunksize(10, 8), 1)
|
||||||
|
self.assertEqual(sync._chunksize(10, 16), 1)
|
||||||
|
|
||||||
|
def test_high_project_count(self):
|
||||||
|
"""Multiple projects, high number of projects to sync."""
|
||||||
|
self.assertEqual(sync._chunksize(2800, 1), 32)
|
||||||
|
self.assertEqual(sync._chunksize(2800, 16), 32)
|
||||||
|
self.assertEqual(sync._chunksize(2800, 32), 32)
|
||||||
|
self.assertEqual(sync._chunksize(2800, 64), 32)
|
||||||
|
self.assertEqual(sync._chunksize(2800, 128), 21)
|
||||||
|
|
||||||
|
|
||||||
class GetPreciousObjectsState(unittest.TestCase):
|
class GetPreciousObjectsState(unittest.TestCase):
|
||||||
"""Tests for _GetPreciousObjectsState."""
|
"""Tests for _GetPreciousObjectsState."""
|
||||||
|
|
||||||
|
7
tox.ini
7
tox.ini
@ -30,6 +30,7 @@ python =
|
|||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
deps =
|
deps =
|
||||||
|
-c constraints.txt
|
||||||
black
|
black
|
||||||
flake8
|
flake8
|
||||||
isort
|
isort
|
||||||
@ -44,17 +45,19 @@ setenv =
|
|||||||
[testenv:lint]
|
[testenv:lint]
|
||||||
skip_install = true
|
skip_install = true
|
||||||
deps =
|
deps =
|
||||||
|
-c constraints.txt
|
||||||
black
|
black
|
||||||
flake8
|
flake8
|
||||||
commands =
|
commands =
|
||||||
black --check {posargs:.}
|
black --check {posargs:. repo run_tests release/update-hooks release/update-manpages}
|
||||||
flake8
|
flake8
|
||||||
|
|
||||||
[testenv:format]
|
[testenv:format]
|
||||||
skip_install = true
|
skip_install = true
|
||||||
deps =
|
deps =
|
||||||
|
-c constraints.txt
|
||||||
black
|
black
|
||||||
flake8
|
flake8
|
||||||
commands =
|
commands =
|
||||||
black {posargs:.}
|
black {posargs:. repo run_tests release/update-hooks release/update-manpages}
|
||||||
flake8
|
flake8
|
||||||
|
@ -18,8 +18,12 @@ import importlib.util
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def WrapperDir():
|
||||||
|
return os.path.dirname(__file__)
|
||||||
|
|
||||||
|
|
||||||
def WrapperPath():
|
def WrapperPath():
|
||||||
return os.path.join(os.path.dirname(__file__), "repo")
|
return os.path.join(WrapperDir(), "repo")
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=None)
|
@functools.lru_cache(maxsize=None)
|
||||||
|
Reference in New Issue
Block a user