mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-04-20 14:09:30 +00:00
Compare commits
121 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
a94457d1ce | ||
|
97dc5c1bd9 | ||
|
0214730c9a | ||
|
daebd6cbc2 | ||
|
3667de1d0f | ||
|
85ee1738e6 | ||
|
f070331a4c | ||
|
9ecb80ba26 | ||
|
dc8185f2a9 | ||
|
59b81c84de | ||
|
507d463600 | ||
|
cd391e77d0 | ||
|
8310436be0 | ||
|
d5087392ed | ||
|
91f428058d | ||
|
243df2042e | ||
|
4b94e773ef | ||
|
fc901b92bb | ||
|
8d5f032611 | ||
|
99eca45eb2 | ||
|
66685f07ec | ||
|
cf9a2a2a76 | ||
|
5ae8292fea | ||
|
dfdf577e98 | ||
|
747ec83f58 | ||
|
1711bc23c0 | ||
|
db111d3924 | ||
|
3405446a4e | ||
|
41a27eb854 | ||
|
d93fe60e89 | ||
|
61224d01fa | ||
|
13d6588bf6 | ||
|
9500aca754 | ||
|
e8a7b9d596 | ||
|
cf411b3f03 | ||
|
1feecbd91e | ||
|
616e314902 | ||
|
fafd1ec23e | ||
|
b1613d741e | ||
|
ab2d321104 | ||
|
aada468916 | ||
|
1d5098617e | ||
|
e219c78fe5 | ||
|
f9f4df62e0 | ||
|
ebdf0409d2 | ||
|
303bd963d5 | ||
|
ae384f8623 | ||
|
70a4e643e6 | ||
|
8da4861b38 | ||
|
39ffd9977e | ||
|
584863fb5e | ||
|
454fdaf119 | ||
|
f7f9dd4deb | ||
|
70ee4dd313 | ||
|
cfe3095e50 | ||
|
621de7ed12 | ||
|
d7ebdf56be | ||
|
fabab4e245 | ||
|
b577444a90 | ||
|
1e19f7dd61 | ||
|
d8b4101eae | ||
|
1c53b0fa44 | ||
|
e5ae870a2f | ||
|
e59e2ae757 | ||
|
c44ad09309 | ||
|
4592a63de5 | ||
|
0444ddf78e | ||
|
9bf8236c24 | ||
|
87f52f308c | ||
|
562cea7758 | ||
|
eede374e3e | ||
|
2c5fb84d35 | ||
|
12f6dc49e9 | ||
|
5591d99ee2 | ||
|
9d865454aa | ||
|
cbd78a9194 | ||
|
46819a78a1 | ||
|
159389f0da | ||
|
4406642e20 | ||
|
73356f1d5c | ||
|
09fc214a79 | ||
|
3762b17e98 | ||
|
ae419e1e01 | ||
|
a3a7372612 | ||
|
fff1d2d74c | ||
|
4b01a242d8 | ||
|
46790229fc | ||
|
edadb25c02 | ||
|
96edb9b573 | ||
|
5554572f02 | ||
|
97ca50f5f9 | ||
|
8896b68926 | ||
|
fec8cd6704 | ||
|
b8139bdcf8 | ||
|
26fa3180fb | ||
|
d379e77f44 | ||
|
4217a82bec | ||
|
208f344950 | ||
|
138c8a9ff5 | ||
|
9b57aa00f6 | ||
|
b1d1ece2fb | ||
|
449b23b698 | ||
|
e5fb6e585f | ||
|
48e4137eba | ||
|
172c58398b | ||
|
aa506db8a7 | ||
|
14c61d2c9d | ||
|
4c80921d22 | ||
|
f56484c05b | ||
|
a50c4e3bc0 | ||
|
0dd0a830b0 | ||
|
9f0ef5d926 | ||
|
c287428b37 | ||
|
c984e8d4f6 | ||
|
6d821124e0 | ||
|
560a79727f | ||
|
8a6d1724d9 | ||
|
3652b497bb | ||
|
89f761cfef | ||
|
d32b2dcd15 | ||
|
b32ccbb66b |
3
color.py
3
color.py
@ -194,7 +194,7 @@ class Coloring:
|
|||||||
if not opt:
|
if not opt:
|
||||||
return _Color(fg, bg, attr)
|
return _Color(fg, bg, attr)
|
||||||
|
|
||||||
v = self._config.GetString("%s.%s" % (self._section, opt))
|
v = self._config.GetString(f"{self._section}.{opt}")
|
||||||
if v is None:
|
if v is None:
|
||||||
return _Color(fg, bg, attr)
|
return _Color(fg, bg, attr)
|
||||||
|
|
||||||
@ -210,6 +210,7 @@ class Coloring:
|
|||||||
if have_fg:
|
if have_fg:
|
||||||
bg = a
|
bg = a
|
||||||
else:
|
else:
|
||||||
|
have_fg = True
|
||||||
fg = a
|
fg = a
|
||||||
elif is_attr(a):
|
elif is_attr(a):
|
||||||
attr = a
|
attr = a
|
||||||
|
58
command.py
58
command.py
@ -12,6 +12,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import contextlib
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
@ -70,6 +71,14 @@ class Command:
|
|||||||
# migrated subcommands can set it to False.
|
# migrated subcommands can set it to False.
|
||||||
MULTI_MANIFEST_SUPPORT = True
|
MULTI_MANIFEST_SUPPORT = True
|
||||||
|
|
||||||
|
# Shared data across parallel execution workers.
|
||||||
|
_parallel_context = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_parallel_context(cls):
|
||||||
|
assert cls._parallel_context is not None
|
||||||
|
return cls._parallel_context
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
repodir=None,
|
repodir=None,
|
||||||
@ -242,9 +251,39 @@ class Command:
|
|||||||
"""Perform the action, after option parsing is complete."""
|
"""Perform the action, after option parsing is complete."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@staticmethod
|
@classmethod
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def ParallelContext(cls):
|
||||||
|
"""Obtains the context, which is shared to ExecuteInParallel workers.
|
||||||
|
|
||||||
|
Callers can store data in the context dict before invocation of
|
||||||
|
ExecuteInParallel. The dict will then be shared to child workers of
|
||||||
|
ExecuteInParallel.
|
||||||
|
"""
|
||||||
|
assert cls._parallel_context is None
|
||||||
|
cls._parallel_context = {}
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
cls._parallel_context = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _InitParallelWorker(cls, context, initializer):
|
||||||
|
cls._parallel_context = context
|
||||||
|
if initializer:
|
||||||
|
initializer()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
def ExecuteInParallel(
|
def ExecuteInParallel(
|
||||||
jobs, func, inputs, callback, output=None, ordered=False
|
cls,
|
||||||
|
jobs,
|
||||||
|
func,
|
||||||
|
inputs,
|
||||||
|
callback,
|
||||||
|
output=None,
|
||||||
|
ordered=False,
|
||||||
|
chunksize=WORKER_BATCH_SIZE,
|
||||||
|
initializer=None,
|
||||||
):
|
):
|
||||||
"""Helper for managing parallel execution boiler plate.
|
"""Helper for managing parallel execution boiler plate.
|
||||||
|
|
||||||
@ -269,6 +308,9 @@ class Command:
|
|||||||
output: An output manager. May be progress.Progess or
|
output: An output manager. May be progress.Progess or
|
||||||
color.Coloring.
|
color.Coloring.
|
||||||
ordered: Whether the jobs should be processed in order.
|
ordered: Whether the jobs should be processed in order.
|
||||||
|
chunksize: The number of jobs processed in batch by parallel
|
||||||
|
workers.
|
||||||
|
initializer: Worker initializer.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
The |callback| function's results are returned.
|
The |callback| function's results are returned.
|
||||||
@ -278,12 +320,16 @@ class Command:
|
|||||||
if len(inputs) == 1 or jobs == 1:
|
if len(inputs) == 1 or jobs == 1:
|
||||||
return callback(None, output, (func(x) for x in inputs))
|
return callback(None, output, (func(x) for x in inputs))
|
||||||
else:
|
else:
|
||||||
with multiprocessing.Pool(jobs) as pool:
|
with multiprocessing.Pool(
|
||||||
|
jobs,
|
||||||
|
initializer=cls._InitParallelWorker,
|
||||||
|
initargs=(cls._parallel_context, initializer),
|
||||||
|
) as pool:
|
||||||
submit = pool.imap if ordered else pool.imap_unordered
|
submit = pool.imap if ordered else pool.imap_unordered
|
||||||
return callback(
|
return callback(
|
||||||
pool,
|
pool,
|
||||||
output,
|
output,
|
||||||
submit(func, inputs, chunksize=WORKER_BATCH_SIZE),
|
submit(func, inputs, chunksize=chunksize),
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
if isinstance(output, progress.Progress):
|
if isinstance(output, progress.Progress):
|
||||||
@ -501,7 +547,3 @@ class MirrorSafeCommand:
|
|||||||
"""Command permits itself to run within a mirror, and does not require a
|
"""Command permits itself to run within a mirror, and does not require a
|
||||||
working directory.
|
working directory.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class GitcClientCommand:
|
|
||||||
"""Command that requires the local client to be a GITC client."""
|
|
||||||
|
2
constraints.txt
Normal file
2
constraints.txt
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# NB: Keep in sync with run_tests.vpython3.
|
||||||
|
black<26
|
@ -141,7 +141,7 @@ Instead, you should use standard Git workflows like [git worktree] or
|
|||||||
(e.g. a local mirror & a public review server) while avoiding duplicating
|
(e.g. a local mirror & a public review server) while avoiding duplicating
|
||||||
the content. However, this can run into problems if different remotes use
|
the content. However, this can run into problems if different remotes use
|
||||||
the same path on their respective servers. Best to avoid that.
|
the same path on their respective servers. Best to avoid that.
|
||||||
* `subprojects/`: Like `projects/`, but for git submodules.
|
* `modules/`: Like `projects/`, but for git submodules.
|
||||||
* `subproject-objects/`: Like `project-objects/`, but for git submodules.
|
* `subproject-objects/`: Like `project-objects/`, but for git submodules.
|
||||||
* `worktrees/`: Bare checkouts of every project synced by the manifest. The
|
* `worktrees/`: Bare checkouts of every project synced by the manifest. The
|
||||||
filesystem layout matches the `<project name=...` setting in the manifest
|
filesystem layout matches the `<project name=...` setting in the manifest
|
||||||
|
@ -107,11 +107,13 @@ following DTD:
|
|||||||
<!ATTLIST extend-project remote CDATA #IMPLIED>
|
<!ATTLIST extend-project remote CDATA #IMPLIED>
|
||||||
<!ATTLIST extend-project dest-branch CDATA #IMPLIED>
|
<!ATTLIST extend-project dest-branch CDATA #IMPLIED>
|
||||||
<!ATTLIST extend-project upstream CDATA #IMPLIED>
|
<!ATTLIST extend-project upstream CDATA #IMPLIED>
|
||||||
|
<!ATTLIST extend-project base-rev CDATA #IMPLIED>
|
||||||
|
|
||||||
<!ELEMENT remove-project EMPTY>
|
<!ELEMENT remove-project EMPTY>
|
||||||
<!ATTLIST remove-project name CDATA #IMPLIED>
|
<!ATTLIST remove-project name CDATA #IMPLIED>
|
||||||
<!ATTLIST remove-project path CDATA #IMPLIED>
|
<!ATTLIST remove-project path CDATA #IMPLIED>
|
||||||
<!ATTLIST remove-project optional CDATA #IMPLIED>
|
<!ATTLIST remove-project optional CDATA #IMPLIED>
|
||||||
|
<!ATTLIST remove-project base-rev CDATA #IMPLIED>
|
||||||
|
|
||||||
<!ELEMENT repo-hooks EMPTY>
|
<!ELEMENT repo-hooks EMPTY>
|
||||||
<!ATTLIST repo-hooks in-project CDATA #REQUIRED>
|
<!ATTLIST repo-hooks in-project CDATA #REQUIRED>
|
||||||
@ -229,26 +231,7 @@ At most one manifest-server may be specified. The url attribute
|
|||||||
is used to specify the URL of a manifest server, which is an
|
is used to specify the URL of a manifest server, which is an
|
||||||
XML RPC service.
|
XML RPC service.
|
||||||
|
|
||||||
The manifest server should implement the following RPC methods:
|
See the [smart sync documentation](./smart-sync.md) for more details.
|
||||||
|
|
||||||
GetApprovedManifest(branch, target)
|
|
||||||
|
|
||||||
Return a manifest in which each project is pegged to a known good revision
|
|
||||||
for the current branch and target. This is used by repo sync when the
|
|
||||||
--smart-sync option is given.
|
|
||||||
|
|
||||||
The target to use is defined by environment variables TARGET_PRODUCT
|
|
||||||
and TARGET_BUILD_VARIANT. These variables are used to create a string
|
|
||||||
of the form $TARGET_PRODUCT-$TARGET_BUILD_VARIANT, e.g. passion-userdebug.
|
|
||||||
If one of those variables or both are not present, the program will call
|
|
||||||
GetApprovedManifest without the target parameter and the manifest server
|
|
||||||
should choose a reasonable default target.
|
|
||||||
|
|
||||||
GetManifest(tag)
|
|
||||||
|
|
||||||
Return a manifest in which each project is pegged to the revision at
|
|
||||||
the specified tag. This is used by repo sync when the --smart-tag option
|
|
||||||
is given.
|
|
||||||
|
|
||||||
|
|
||||||
### Element submanifest
|
### Element submanifest
|
||||||
@ -433,6 +416,14 @@ project. Same syntax as the corresponding element of `project`.
|
|||||||
Attribute `upstream`: If specified, overrides the upstream of the original
|
Attribute `upstream`: If specified, overrides the upstream of the original
|
||||||
project. Same syntax as the corresponding element of `project`.
|
project. Same syntax as the corresponding element of `project`.
|
||||||
|
|
||||||
|
Attribute `base-rev`: If specified, adds a check against the revision
|
||||||
|
to be extended. Manifest parse will fail and give a list of mismatch extends
|
||||||
|
if the revisions being extended have changed since base-rev was set.
|
||||||
|
Intended for use with layered manifests using hash revisions to prevent
|
||||||
|
patch branches hiding newer upstream revisions. Also compares named refs
|
||||||
|
like branches or tags but is misleading if branches are used as base-rev.
|
||||||
|
Same syntax as the corresponding element of `project`.
|
||||||
|
|
||||||
### Element annotation
|
### Element annotation
|
||||||
|
|
||||||
Zero or more annotation elements may be specified as children of a
|
Zero or more annotation elements may be specified as children of a
|
||||||
@ -496,6 +487,14 @@ name. Logic otherwise behaves like both are specified.
|
|||||||
Attribute `optional`: Set to true to ignore remove-project elements with no
|
Attribute `optional`: Set to true to ignore remove-project elements with no
|
||||||
matching `project` element.
|
matching `project` element.
|
||||||
|
|
||||||
|
Attribute `base-rev`: If specified, adds a check against the revision
|
||||||
|
to be removed. Manifest parse will fail and give a list of mismatch removes
|
||||||
|
if the revisions being removed have changed since base-rev was set.
|
||||||
|
Intended for use with layered manifests using hash revisions to prevent
|
||||||
|
patch branches hiding newer upstream revisions. Also compares named refs
|
||||||
|
like branches or tags but is misleading if branches are used as base-rev.
|
||||||
|
Same syntax as the corresponding element of `project`.
|
||||||
|
|
||||||
### Element repo-hooks
|
### Element repo-hooks
|
||||||
|
|
||||||
NB: See the [practical documentation](./repo-hooks.md) for using repo hooks.
|
NB: See the [practical documentation](./repo-hooks.md) for using repo hooks.
|
||||||
|
@ -33,9 +33,8 @@ you have newer versions installed, your choices are:
|
|||||||
|
|
||||||
* Modify the [repo launcher]'s shebang to suite your environment.
|
* Modify the [repo launcher]'s shebang to suite your environment.
|
||||||
* Download an older version of the [repo launcher] and don't upgrade it.
|
* Download an older version of the [repo launcher] and don't upgrade it.
|
||||||
Be aware that there is no guarantee old repo launchers are WILL work with
|
Be aware that we do not guarantee old repo launchers will work with current
|
||||||
current versions of repo. Bug reports using old launchers will not be
|
versions of repo. Bug reports using old launchers will not be accepted.
|
||||||
accepted.
|
|
||||||
|
|
||||||
## When to drop support
|
## When to drop support
|
||||||
|
|
||||||
|
@ -96,6 +96,9 @@ If that tag is valid, then repo will warn and use that commit instead.
|
|||||||
|
|
||||||
If that tag cannot be verified, it gives up and forces the user to resolve.
|
If that tag cannot be verified, it gives up and forces the user to resolve.
|
||||||
|
|
||||||
|
If env variable `REPO_SKIP_SELF_UPDATE` is defined, this will
|
||||||
|
bypass the self update algorithm.
|
||||||
|
|
||||||
### Force an update
|
### Force an update
|
||||||
|
|
||||||
The `repo selfupdate` command can be used to force an immediate update.
|
The `repo selfupdate` command can be used to force an immediate update.
|
||||||
@ -202,7 +205,7 @@ still support them.
|
|||||||
Things in italics are things we used to care about but probably don't anymore.
|
Things in italics are things we used to care about but probably don't anymore.
|
||||||
|
|
||||||
| Date | EOL | [Git][rel-g] | [Python][rel-p] | [SSH][rel-o] | [Ubuntu][rel-u] / [Debian][rel-d] | Git | Python | SSH |
|
| Date | EOL | [Git][rel-g] | [Python][rel-p] | [SSH][rel-o] | [Ubuntu][rel-u] / [Debian][rel-d] | Git | Python | SSH |
|
||||||
|:--------:|:------------:|:------------:|:---------------:|:------------:|-----------------------------------|-----|--------|-----|
|
|:--------:|:------------:|:------------:|:---------------:|:------------:|-----------------------------------|:---:|:------:|:---:|
|
||||||
| Apr 2008 | | | | 5.0 |
|
| Apr 2008 | | | | 5.0 |
|
||||||
| Jun 2008 | | | | 5.1 |
|
| Jun 2008 | | | | 5.1 |
|
||||||
| Oct 2008 | *Oct 2013* | | 2.6.0 | | *10.04 Lucid* - 10.10 Maverick / *Squeeze* |
|
| Oct 2008 | *Oct 2013* | | 2.6.0 | | *10.04 Lucid* - 10.10 Maverick / *Squeeze* |
|
||||||
@ -241,7 +244,7 @@ Things in italics are things we used to care about but probably don't anymore.
|
|||||||
| Feb 2014 | *Dec 2014* | **1.9.0** | | | *14.04 Trusty* |
|
| Feb 2014 | *Dec 2014* | **1.9.0** | | | *14.04 Trusty* |
|
||||||
| Mar 2014 | *Mar 2019* | | *3.4.0* | | *14.04 Trusty* - 15.10 Wily / *Jessie* |
|
| Mar 2014 | *Mar 2019* | | *3.4.0* | | *14.04 Trusty* - 15.10 Wily / *Jessie* |
|
||||||
| Mar 2014 | | | | 6.6 | *14.04 Trusty* - 14.10 Utopic |
|
| Mar 2014 | | | | 6.6 | *14.04 Trusty* - 14.10 Utopic |
|
||||||
| Apr 2014 | *Apr 2022* | | | | *14.04 Trusty* | 1.9.1 | 2.7.5 3.4.0 | 6.6 |
|
| Apr 2014 | *Apr 2024* | | | | *14.04 Trusty* | 1.9.1 | 2.7.5 3.4.0 | 6.6 |
|
||||||
| May 2014 | *Dec 2014* | 2.0.0 |
|
| May 2014 | *Dec 2014* | 2.0.0 |
|
||||||
| Aug 2014 | *Dec 2014* | *2.1.0* | | | 14.10 Utopic - 15.04 Vivid / *Jessie* |
|
| Aug 2014 | *Dec 2014* | *2.1.0* | | | 14.10 Utopic - 15.04 Vivid / *Jessie* |
|
||||||
| Oct 2014 | | | | 6.7 | 15.04 Vivid |
|
| Oct 2014 | | | | 6.7 | 15.04 Vivid |
|
||||||
@ -262,7 +265,7 @@ Things in italics are things we used to care about but probably don't anymore.
|
|||||||
| Jan 2016 | *Jul 2017* | *2.7.0* | | | *16.04 Xenial* |
|
| Jan 2016 | *Jul 2017* | *2.7.0* | | | *16.04 Xenial* |
|
||||||
| Feb 2016 | | | | 7.2 | *16.04 Xenial* |
|
| Feb 2016 | | | | 7.2 | *16.04 Xenial* |
|
||||||
| Mar 2016 | *Jul 2017* | 2.8.0 |
|
| Mar 2016 | *Jul 2017* | 2.8.0 |
|
||||||
| Apr 2016 | *Apr 2024* | | | | *16.04 Xenial* | 2.7.4 | 2.7.11 3.5.1 | 7.2 |
|
| Apr 2016 | *Apr 2026* | | | | *16.04 Xenial* | 2.7.4 | 2.7.11 3.5.1 | 7.2 |
|
||||||
| Jun 2016 | *Jul 2017* | 2.9.0 | | | 16.10 Yakkety |
|
| Jun 2016 | *Jul 2017* | 2.9.0 | | | 16.10 Yakkety |
|
||||||
| Jul 2016 | | | | 7.3 | 16.10 Yakkety |
|
| Jul 2016 | | | | 7.3 | 16.10 Yakkety |
|
||||||
| Sep 2016 | *Sep 2017* | 2.10.0 |
|
| Sep 2016 | *Sep 2017* | 2.10.0 |
|
||||||
@ -312,14 +315,33 @@ Things in italics are things we used to care about but probably don't anymore.
|
|||||||
| Oct 2020 | | | | | 20.10 Groovy | 2.27.0 | 2.7.18 3.8.6 | 8.3 |
|
| Oct 2020 | | | | | 20.10 Groovy | 2.27.0 | 2.7.18 3.8.6 | 8.3 |
|
||||||
| Oct 2020 | **Oct 2025** | | 3.9.0 | | 21.04 Hirsute / **Bullseye** |
|
| Oct 2020 | **Oct 2025** | | 3.9.0 | | 21.04 Hirsute / **Bullseye** |
|
||||||
| Dec 2020 | *Mar 2021* | 2.30.0 | | | 21.04 Hirsute / **Bullseye** |
|
| Dec 2020 | *Mar 2021* | 2.30.0 | | | 21.04 Hirsute / **Bullseye** |
|
||||||
| Mar 2021 | | 2.31.0 |
|
| Mar 2021 | | 2.31.0 | | 8.5 |
|
||||||
| Mar 2021 | | | | 8.5 |
|
|
||||||
| Apr 2021 | | | | 8.6 |
|
| Apr 2021 | | | | 8.6 |
|
||||||
| Apr 2021 | *Jan 2022* | | | | 21.04 Hirsute | 2.30.2 | 2.7.18 3.9.4 | 8.4 |
|
| Apr 2021 | *Jan 2022* | | | | 21.04 Hirsute | 2.30.2 | 2.7.18 3.9.4 | 8.4 |
|
||||||
| Jun 2021 | | 2.32.0 |
|
| Jun 2021 | | 2.32.0 |
|
||||||
| Aug 2021 | | 2.33.0 |
|
| Aug 2021 | | 2.33.0 | | 8.7 |
|
||||||
| Aug 2021 | | | | 8.7 |
|
|
||||||
| Aug 2021 | **Aug 2026** | | | | **Debian 11 Bullseye** | 2.30.2 | 2.7.18 3.9.2 | 8.4 |
|
| Aug 2021 | **Aug 2026** | | | | **Debian 11 Bullseye** | 2.30.2 | 2.7.18 3.9.2 | 8.4 |
|
||||||
|
| Sep 2021 | | | | 8.8 |
|
||||||
|
| Oct 2021 | | 2.34.0 | 3.10.0 | | **22.04 Jammy** |
|
||||||
|
| Jan 2022 | | 2.35.0 |
|
||||||
|
| Feb 2022 | | | | 8.9 | **22.04 Jammy** |
|
||||||
|
| Apr 2022 | | 2.36.0 | | 9.0 |
|
||||||
|
| Apr 2022 | **Apr 2032** | | | | **22.04 Jammy** | 2.34.1 | 2.7.18 3.10.6 | 8.9 |
|
||||||
|
| Jun 2022 | | 2.37.0 |
|
||||||
|
| Oct 2022 | | 2.38.0 | | 9.1 |
|
||||||
|
| Oct 2022 | | | 3.11.0 | | **Bookworm** |
|
||||||
|
| Dec 2022 | | 2.39.0 | | | **Bookworm** |
|
||||||
|
| Feb 2023 | | | | 9.2 | **Bookworm** |
|
||||||
|
| Mar 2023 | | 2.40.0 | | 9.3 |
|
||||||
|
| Jun 2023 | | 2.41.0 |
|
||||||
|
| Jun 2023 | **Jun 2028** | | | | **Debian 12 Bookworm** | 2.39.2 | 3.11.2 | 9.2 |
|
||||||
|
| Aug 2023 | | 2.42.0 | | 9.4 |
|
||||||
|
| Oct 2023 | | | 3.12.0 | 9.5 |
|
||||||
|
| Nov 2022 | | 2.43.0 |
|
||||||
|
| Dec 2023 | | | | 9.6 |
|
||||||
|
| Feb 2024 | | 2.44.0 |
|
||||||
|
| Mar 2024 | | | | 9.7 |
|
||||||
|
| Oct 2024 | | | 3.13.0 |
|
||||||
| **Date** | **EOL** | **[Git][rel-g]** | **[Python][rel-p]** | **[SSH][rel-o]** | **[Ubuntu][rel-u] / [Debian][rel-d]** | **Git** | **Python** | **SSH** |
|
| **Date** | **EOL** | **[Git][rel-g]** | **[Python][rel-p]** | **[SSH][rel-o]** | **[Ubuntu][rel-u] / [Debian][rel-d]** | **Git** | **Python** | **SSH** |
|
||||||
|
|
||||||
|
|
||||||
@ -328,7 +350,7 @@ Things in italics are things we used to care about but probably don't anymore.
|
|||||||
[rel-g]: https://en.wikipedia.org/wiki/Git#Releases
|
[rel-g]: https://en.wikipedia.org/wiki/Git#Releases
|
||||||
[rel-o]: https://www.openssh.com/releasenotes.html
|
[rel-o]: https://www.openssh.com/releasenotes.html
|
||||||
[rel-p]: https://en.wikipedia.org/wiki/History_of_Python#Table_of_versions
|
[rel-p]: https://en.wikipedia.org/wiki/History_of_Python#Table_of_versions
|
||||||
[rel-u]: https://en.wikipedia.org/wiki/Ubuntu_version_history#Table_of_versions
|
[rel-u]: https://wiki.ubuntu.com/Releases
|
||||||
[example announcement]: https://groups.google.com/d/topic/repo-discuss/UGBNismWo1M/discussion
|
[example announcement]: https://groups.google.com/d/topic/repo-discuss/UGBNismWo1M/discussion
|
||||||
[repo-discuss@googlegroups.com]: https://groups.google.com/forum/#!forum/repo-discuss
|
[repo-discuss@googlegroups.com]: https://groups.google.com/forum/#!forum/repo-discuss
|
||||||
[go/repo-release]: https://goto.google.com/repo-release
|
[go/repo-release]: https://goto.google.com/repo-release
|
||||||
|
129
docs/smart-sync.md
Normal file
129
docs/smart-sync.md
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
# repo Smart Syncing
|
||||||
|
|
||||||
|
Repo normally fetches & syncs manifests from the same URL specified during
|
||||||
|
`repo init`, and that often fetches the latest revisions of all projects in
|
||||||
|
the manifest. This flow works well for tracking and developing with the
|
||||||
|
latest code, but often it's desirable to sync to other points. For example,
|
||||||
|
to get a local build matching a specific release or build to reproduce bugs
|
||||||
|
reported by other people.
|
||||||
|
|
||||||
|
Repo's sync subcommand has support for fetching manifests from a server over
|
||||||
|
an XML-RPC connection. The local configuration and network API are defined by
|
||||||
|
repo, but individual projects have to host their own server for the client to
|
||||||
|
communicate with.
|
||||||
|
|
||||||
|
This process is called "smart syncing" -- instead of blindly fetching the latest
|
||||||
|
revision of all projects and getting an unknown state to develop against, the
|
||||||
|
client passes a request to the server and is given a matching manifest that
|
||||||
|
typically specifies specific commits for every project to fetch a known source
|
||||||
|
state.
|
||||||
|
|
||||||
|
[TOC]
|
||||||
|
|
||||||
|
## Manifest Configuration
|
||||||
|
|
||||||
|
The manifest specifies the server to communicate with via the
|
||||||
|
the [`<manifest-server>` element](manifest-format.md#Element-manifest_server)
|
||||||
|
element. This is how the client knows what service to talk to.
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<manifest-server url="https://example.com/your/manifest/server/url" />
|
||||||
|
```
|
||||||
|
|
||||||
|
If the URL starts with `persistent-`, then the
|
||||||
|
[`git-remote-persistent-https` helper](https://github.com/git/git/blob/HEAD/contrib/persistent-https/README)
|
||||||
|
is used to communicate with the server.
|
||||||
|
|
||||||
|
## Credentials
|
||||||
|
|
||||||
|
Credentials may be specified directly in typical `username:password`
|
||||||
|
[URI syntax](https://en.wikipedia.org/wiki/URI#Syntax) in the
|
||||||
|
`<manifest-server>` element directly in the manifest.
|
||||||
|
|
||||||
|
If they are not specified, `repo sync` has `--manifest-server-username=USERNAME`
|
||||||
|
and `--manifest-server-password=PASSWORD` options.
|
||||||
|
|
||||||
|
If those are not used, then repo will look up the host in your
|
||||||
|
[`~/.netrc`](https://docs.python.org/3/library/netrc.html) database.
|
||||||
|
|
||||||
|
When making the connection, cookies matching the host are automatically loaded
|
||||||
|
from the cookiejar specified in
|
||||||
|
[Git's `http.cookiefile` setting](https://git-scm.com/docs/git-config#Documentation/git-config.txt-httpcookieFile).
|
||||||
|
|
||||||
|
## Manifest Server
|
||||||
|
|
||||||
|
Unfortunately, there are no public reference implementations. Google has an
|
||||||
|
internal one for Android, but it is written using Google's internal systems,
|
||||||
|
so wouldn't be that helpful as a reference.
|
||||||
|
|
||||||
|
That said, the XML-RPC API is pretty simple, so any standard XML-RPC server
|
||||||
|
example would do. Google's internal server uses Python's
|
||||||
|
[xmlrpc.server.SimpleXMLRPCDispatcher](https://docs.python.org/3/library/xmlrpc.server.html).
|
||||||
|
|
||||||
|
## Network API
|
||||||
|
|
||||||
|
The manifest server should implement the following RPC methods.
|
||||||
|
|
||||||
|
### GetApprovedManifest
|
||||||
|
|
||||||
|
> `GetApprovedManifest(branch: str, target: Optional[str]) -> str`
|
||||||
|
|
||||||
|
The meaning of `branch` and `target` is not strictly defined. The server may
|
||||||
|
interpret them however it wants. The recommended interpretation is that the
|
||||||
|
`branch` matches the manifest branch, and `target` is an identifier for your
|
||||||
|
project that matches something users would build.
|
||||||
|
|
||||||
|
See the client section below for how repo typically generates these values.
|
||||||
|
|
||||||
|
The server will return a manifest or an error. If it's an error, repo will
|
||||||
|
show the output directly to the user to provide a limited feedback channel.
|
||||||
|
|
||||||
|
If the user's request is ambiguous and could match multiple manifests, the
|
||||||
|
server has to decide whether to pick one automatically (and silently such that
|
||||||
|
the user won't know there were multiple matches), or return an error and force
|
||||||
|
the user to be more specific.
|
||||||
|
|
||||||
|
### GetManifest
|
||||||
|
|
||||||
|
> `GetManifest(tag: str) -> str`
|
||||||
|
|
||||||
|
The meaning of `tag` is not strictly defined. Projects are encouraged to use
|
||||||
|
a system where the tag matches a unique source state.
|
||||||
|
|
||||||
|
See the client section below for how repo typically generates these values.
|
||||||
|
|
||||||
|
The server will return a manifest or an error. If it's an error, repo will
|
||||||
|
show the output directly to the user to provide a limited feedback channel.
|
||||||
|
|
||||||
|
If the user's request is ambiguous and could match multiple manifests, the
|
||||||
|
server has to decide whether to pick one automatically (and silently such that
|
||||||
|
the user won't know there were multiple matches), or return an error and force
|
||||||
|
the user to be more specific.
|
||||||
|
|
||||||
|
## Client Options
|
||||||
|
|
||||||
|
Once repo has successfully downloaded the manifest from the server, it saves a
|
||||||
|
copy into `.repo/manifests/smart_sync_override.xml` so users can examine it.
|
||||||
|
The next time `repo sync` is run, this file is automatically replaced or removed
|
||||||
|
based on the current set of options.
|
||||||
|
|
||||||
|
### --smart-sync
|
||||||
|
|
||||||
|
Repo will call `GetApprovedManifest(branch[, target])`.
|
||||||
|
|
||||||
|
The `branch` is determined by the current manifest branch as specified by
|
||||||
|
`--manifest-branch=BRANCH` when running `repo init`.
|
||||||
|
|
||||||
|
The `target` is defined by environment variables in the order below. If none
|
||||||
|
of them match, then `target` is omitted. These variables were decided as they
|
||||||
|
match the settings Android build environments automatically setup.
|
||||||
|
|
||||||
|
1. `${SYNC_TARGET}`: If defined, the value is used directly.
|
||||||
|
2. `${TARGET_PRODUCT}-${TARGET_RELEASE}-${TARGET_BUILD_VARIANT}`: If these
|
||||||
|
variables are all defined, then they are merged with `-` and used.
|
||||||
|
3. `${TARGET_PRODUCT}-${TARGET_BUILD_VARIANT}`: If these variables are all
|
||||||
|
defined, then they are merged with `-` and used.
|
||||||
|
|
||||||
|
### --smart-tag=TAG
|
||||||
|
|
||||||
|
Repo will call `GetManifest(TAG)`.
|
@ -104,9 +104,7 @@ least one of these before using this command.""", # noqa: E501
|
|||||||
try:
|
try:
|
||||||
rc = subprocess.Popen(args, shell=shell).wait()
|
rc = subprocess.Popen(args, shell=shell).wait()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
raise EditorError(
|
raise EditorError(f"editor failed, {str(e)}: {editor} {path}")
|
||||||
"editor failed, %s: %s %s" % (str(e), editor, path)
|
|
||||||
)
|
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
raise EditorError(
|
raise EditorError(
|
||||||
"editor failed with exit status %d: %s %s"
|
"editor failed with exit status %d: %s %s"
|
||||||
|
4
error.py
4
error.py
@ -107,8 +107,8 @@ class GitError(RepoError):
|
|||||||
return self.message
|
return self.message
|
||||||
|
|
||||||
|
|
||||||
class GitcUnsupportedError(RepoExitError):
|
class GitAuthError(RepoExitError):
|
||||||
"""Gitc no longer supported."""
|
"""Cannot talk to remote due to auth issue."""
|
||||||
|
|
||||||
|
|
||||||
class UploadError(RepoError):
|
class UploadError(RepoError):
|
||||||
|
12
event_log.py
12
event_log.py
@ -168,8 +168,10 @@ class EventLog:
|
|||||||
f.write("\n")
|
f.write("\n")
|
||||||
|
|
||||||
|
|
||||||
# An integer id that is unique across this invocation of the program.
|
# An integer id that is unique across this invocation of the program, to be set
|
||||||
_EVENT_ID = multiprocessing.Value("i", 1)
|
# by the first Add event. We can't set it here since it results in leaked
|
||||||
|
# resources (see: https://issues.gerritcodereview.com/353656374).
|
||||||
|
_EVENT_ID = None
|
||||||
|
|
||||||
|
|
||||||
def _NextEventId():
|
def _NextEventId():
|
||||||
@ -178,6 +180,12 @@ def _NextEventId():
|
|||||||
Returns:
|
Returns:
|
||||||
A unique, to this invocation of the program, integer id.
|
A unique, to this invocation of the program, integer id.
|
||||||
"""
|
"""
|
||||||
|
global _EVENT_ID
|
||||||
|
if _EVENT_ID is None:
|
||||||
|
# There is a small chance of race condition - two parallel processes
|
||||||
|
# setting up _EVENT_ID. However, we expect TASK_COMMAND to happen before
|
||||||
|
# mp kicks in.
|
||||||
|
_EVENT_ID = multiprocessing.Value("i", 1)
|
||||||
with _EVENT_ID.get_lock():
|
with _EVENT_ID.get_lock():
|
||||||
val = _EVENT_ID.value
|
val = _EVENT_ID.value
|
||||||
_EVENT_ID.value += 1
|
_EVENT_ID.value += 1
|
||||||
|
@ -33,17 +33,6 @@ from wrapper import Wrapper
|
|||||||
|
|
||||||
|
|
||||||
GIT = "git"
|
GIT = "git"
|
||||||
# NB: These do not need to be kept in sync with the repo launcher script.
|
|
||||||
# These may be much newer as it allows the repo launcher to roll between
|
|
||||||
# different repo releases while source versions might require a newer git.
|
|
||||||
#
|
|
||||||
# The soft version is when we start warning users that the version is old and
|
|
||||||
# we'll be dropping support for it. We'll refuse to work with versions older
|
|
||||||
# than the hard version.
|
|
||||||
#
|
|
||||||
# git-1.7 is in (EOL) Ubuntu Precise. git-1.9 is in Ubuntu Trusty.
|
|
||||||
MIN_GIT_VERSION_SOFT = (1, 9, 1)
|
|
||||||
MIN_GIT_VERSION_HARD = (1, 7, 2)
|
|
||||||
GIT_DIR = "GIT_DIR"
|
GIT_DIR = "GIT_DIR"
|
||||||
|
|
||||||
LAST_GITDIR = None
|
LAST_GITDIR = None
|
||||||
@ -135,6 +124,8 @@ def GetEventTargetPath():
|
|||||||
if retval == 0:
|
if retval == 0:
|
||||||
# Strip trailing carriage-return in path.
|
# Strip trailing carriage-return in path.
|
||||||
path = p.stdout.rstrip("\n")
|
path = p.stdout.rstrip("\n")
|
||||||
|
if path == "":
|
||||||
|
return None
|
||||||
elif retval != 1:
|
elif retval != 1:
|
||||||
# `git config --get` is documented to produce an exit status of `1`
|
# `git config --get` is documented to produce an exit status of `1`
|
||||||
# if the requested variable is not present in the configuration.
|
# if the requested variable is not present in the configuration.
|
||||||
@ -196,12 +187,10 @@ class UserAgent:
|
|||||||
def git(self):
|
def git(self):
|
||||||
"""The UA when running git."""
|
"""The UA when running git."""
|
||||||
if self._git_ua is None:
|
if self._git_ua is None:
|
||||||
self._git_ua = "git/%s (%s) git-repo/%s" % (
|
self._git_ua = (
|
||||||
git.version_tuple().full,
|
f"git/{git.version_tuple().full} ({self.os}) "
|
||||||
self.os,
|
f"git-repo/{RepoSourceVersion()}"
|
||||||
RepoSourceVersion(),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._git_ua
|
return self._git_ua
|
||||||
|
|
||||||
|
|
||||||
@ -216,7 +205,7 @@ def git_require(min_version, fail=False, msg=""):
|
|||||||
need = ".".join(map(str, min_version))
|
need = ".".join(map(str, min_version))
|
||||||
if msg:
|
if msg:
|
||||||
msg = " for " + msg
|
msg = " for " + msg
|
||||||
error_msg = "fatal: git %s or later required%s" % (need, msg)
|
error_msg = f"fatal: git {need} or later required{msg}"
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise GitRequireError(error_msg)
|
raise GitRequireError(error_msg)
|
||||||
return False
|
return False
|
||||||
@ -243,15 +232,15 @@ def _build_env(
|
|||||||
env["GIT_SSH"] = ssh_proxy.proxy
|
env["GIT_SSH"] = ssh_proxy.proxy
|
||||||
env["GIT_SSH_VARIANT"] = "ssh"
|
env["GIT_SSH_VARIANT"] = "ssh"
|
||||||
if "http_proxy" in env and "darwin" == sys.platform:
|
if "http_proxy" in env and "darwin" == sys.platform:
|
||||||
s = "'http.proxy=%s'" % (env["http_proxy"],)
|
s = f"'http.proxy={env['http_proxy']}'"
|
||||||
p = env.get("GIT_CONFIG_PARAMETERS")
|
p = env.get("GIT_CONFIG_PARAMETERS")
|
||||||
if p is not None:
|
if p is not None:
|
||||||
s = p + " " + s
|
s = p + " " + s
|
||||||
env["GIT_CONFIG_PARAMETERS"] = s
|
env["GIT_CONFIG_PARAMETERS"] = s
|
||||||
if "GIT_ALLOW_PROTOCOL" not in env:
|
if "GIT_ALLOW_PROTOCOL" not in env:
|
||||||
env[
|
env["GIT_ALLOW_PROTOCOL"] = (
|
||||||
"GIT_ALLOW_PROTOCOL"
|
"file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc"
|
||||||
] = "file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc"
|
)
|
||||||
env["GIT_HTTP_USER_AGENT"] = user_agent.git
|
env["GIT_HTTP_USER_AGENT"] = user_agent.git
|
||||||
|
|
||||||
if objdir:
|
if objdir:
|
||||||
@ -324,10 +313,13 @@ class GitCommand:
|
|||||||
cwd = None
|
cwd = None
|
||||||
command_name = cmdv[0]
|
command_name = cmdv[0]
|
||||||
command.append(command_name)
|
command.append(command_name)
|
||||||
|
|
||||||
|
if command_name in ("fetch", "clone"):
|
||||||
|
env["GIT_TERMINAL_PROMPT"] = "0"
|
||||||
# Need to use the --progress flag for fetch/clone so output will be
|
# Need to use the --progress flag for fetch/clone so output will be
|
||||||
# displayed as by default git only does progress output if stderr is a
|
# displayed as by default git only does progress output if stderr is
|
||||||
# TTY.
|
# a TTY.
|
||||||
if sys.stderr.isatty() and command_name in ("fetch", "clone"):
|
if sys.stderr.isatty():
|
||||||
if "--progress" not in cmdv and "--quiet" not in cmdv:
|
if "--progress" not in cmdv and "--quiet" not in cmdv:
|
||||||
command.append("--progress")
|
command.append("--progress")
|
||||||
command.extend(cmdv[1:])
|
command.extend(cmdv[1:])
|
||||||
@ -358,9 +350,9 @@ class GitCommand:
|
|||||||
"Project": e.project,
|
"Project": e.project,
|
||||||
"CommandName": command_name,
|
"CommandName": command_name,
|
||||||
"Message": str(e),
|
"Message": str(e),
|
||||||
"ReturnCode": str(e.git_rc)
|
"ReturnCode": (
|
||||||
if e.git_rc is not None
|
str(e.git_rc) if e.git_rc is not None else None
|
||||||
else None,
|
),
|
||||||
"IsError": log_as_error,
|
"IsError": log_as_error,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -468,7 +460,7 @@ class GitCommand:
|
|||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise GitPopenCommandError(
|
raise GitPopenCommandError(
|
||||||
message="%s: %s" % (command[1], e),
|
message=f"{command[1]}: {e}",
|
||||||
project=self.project.name if self.project else None,
|
project=self.project.name if self.project else None,
|
||||||
command_args=self.cmdv,
|
command_args=self.cmdv,
|
||||||
)
|
)
|
||||||
|
@ -90,6 +90,20 @@ class GitConfig:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _getUserConfig():
|
def _getUserConfig():
|
||||||
|
"""Get the user-specific config file.
|
||||||
|
|
||||||
|
Prefers the XDG config location if available, with fallback to
|
||||||
|
~/.gitconfig
|
||||||
|
|
||||||
|
This matches git behavior:
|
||||||
|
https://git-scm.com/docs/git-config#FILES
|
||||||
|
"""
|
||||||
|
xdg_config_home = os.getenv(
|
||||||
|
"XDG_CONFIG_HOME", os.path.expanduser("~/.config")
|
||||||
|
)
|
||||||
|
xdg_config_file = os.path.join(xdg_config_home, "git", "config")
|
||||||
|
if os.path.exists(xdg_config_file):
|
||||||
|
return xdg_config_file
|
||||||
return os.path.expanduser("~/.gitconfig")
|
return os.path.expanduser("~/.gitconfig")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -418,7 +432,7 @@ class GitConfig:
|
|||||||
if p.Wait() == 0:
|
if p.Wait() == 0:
|
||||||
return p.stdout
|
return p.stdout
|
||||||
else:
|
else:
|
||||||
raise GitError("git config %s: %s" % (str(args), p.stderr))
|
raise GitError(f"git config {str(args)}: {p.stderr}")
|
||||||
|
|
||||||
|
|
||||||
class RepoConfig(GitConfig):
|
class RepoConfig(GitConfig):
|
||||||
@ -651,13 +665,11 @@ class Remote:
|
|||||||
userEmail, host, port
|
userEmail, host, port
|
||||||
)
|
)
|
||||||
except urllib.error.HTTPError as e:
|
except urllib.error.HTTPError as e:
|
||||||
raise UploadError("%s: %s" % (self.review, str(e)))
|
raise UploadError(f"{self.review}: {str(e)}")
|
||||||
except urllib.error.URLError as e:
|
except urllib.error.URLError as e:
|
||||||
raise UploadError("%s: %s" % (self.review, str(e)))
|
raise UploadError(f"{self.review}: {str(e)}")
|
||||||
except http.client.HTTPException as e:
|
except http.client.HTTPException as e:
|
||||||
raise UploadError(
|
raise UploadError(f"{self.review}: {e.__class__.__name__}")
|
||||||
"%s: %s" % (self.review, e.__class__.__name__)
|
|
||||||
)
|
|
||||||
|
|
||||||
REVIEW_CACHE[u] = self._review_url
|
REVIEW_CACHE[u] = self._review_url
|
||||||
return self._review_url + self.projectname
|
return self._review_url + self.projectname
|
||||||
@ -666,7 +678,7 @@ class Remote:
|
|||||||
username = self._config.GetString("review.%s.username" % self.review)
|
username = self._config.GetString("review.%s.username" % self.review)
|
||||||
if username is None:
|
if username is None:
|
||||||
username = userEmail.split("@")[0]
|
username = userEmail.split("@")[0]
|
||||||
return "ssh://%s@%s:%s/" % (username, host, port)
|
return f"ssh://{username}@{host}:{port}/"
|
||||||
|
|
||||||
def ToLocal(self, rev):
|
def ToLocal(self, rev):
|
||||||
"""Convert a remote revision string to something we have locally."""
|
"""Convert a remote revision string to something we have locally."""
|
||||||
@ -715,11 +727,11 @@ class Remote:
|
|||||||
self._Set("fetch", list(map(str, self.fetch)))
|
self._Set("fetch", list(map(str, self.fetch)))
|
||||||
|
|
||||||
def _Set(self, key, value):
|
def _Set(self, key, value):
|
||||||
key = "remote.%s.%s" % (self.name, key)
|
key = f"remote.{self.name}.{key}"
|
||||||
return self._config.SetString(key, value)
|
return self._config.SetString(key, value)
|
||||||
|
|
||||||
def _Get(self, key, all_keys=False):
|
def _Get(self, key, all_keys=False):
|
||||||
key = "remote.%s.%s" % (self.name, key)
|
key = f"remote.{self.name}.{key}"
|
||||||
return self._config.GetString(key, all_keys=all_keys)
|
return self._config.GetString(key, all_keys=all_keys)
|
||||||
|
|
||||||
|
|
||||||
@ -762,11 +774,11 @@ class Branch:
|
|||||||
fd.write("\tmerge = %s\n" % self.merge)
|
fd.write("\tmerge = %s\n" % self.merge)
|
||||||
|
|
||||||
def _Set(self, key, value):
|
def _Set(self, key, value):
|
||||||
key = "branch.%s.%s" % (self.name, key)
|
key = f"branch.{self.name}.{key}"
|
||||||
return self._config.SetString(key, value)
|
return self._config.SetString(key, value)
|
||||||
|
|
||||||
def _Get(self, key, all_keys=False):
|
def _Get(self, key, all_keys=False):
|
||||||
key = "branch.%s.%s" % (self.name, key)
|
key = f"branch.{self.name}.{key}"
|
||||||
return self._config.GetString(key, all_keys=all_keys)
|
return self._config.GetString(key, all_keys=all_keys)
|
||||||
|
|
||||||
|
|
||||||
|
@ -69,9 +69,9 @@ class UpdateProjectsResult(NamedTuple):
|
|||||||
class Superproject:
|
class Superproject:
|
||||||
"""Get commit ids from superproject.
|
"""Get commit ids from superproject.
|
||||||
|
|
||||||
Initializes a local copy of a superproject for the manifest. This allows
|
Initializes a bare local copy of a superproject for the manifest. This
|
||||||
lookup of commit ids for all projects. It contains _project_commit_ids which
|
allows lookup of commit ids for all projects. It contains
|
||||||
is a dictionary with project/commit id entries.
|
_project_commit_ids which is a dictionary with project/commit id entries.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@ -235,7 +235,8 @@ class Superproject:
|
|||||||
p = GitCommand(
|
p = GitCommand(
|
||||||
None,
|
None,
|
||||||
cmd,
|
cmd,
|
||||||
cwd=self._work_git,
|
gitdir=self._work_git,
|
||||||
|
bare=True,
|
||||||
capture_stdout=True,
|
capture_stdout=True,
|
||||||
capture_stderr=True,
|
capture_stderr=True,
|
||||||
)
|
)
|
||||||
@ -271,7 +272,8 @@ class Superproject:
|
|||||||
p = GitCommand(
|
p = GitCommand(
|
||||||
None,
|
None,
|
||||||
cmd,
|
cmd,
|
||||||
cwd=self._work_git,
|
gitdir=self._work_git,
|
||||||
|
bare=True,
|
||||||
capture_stdout=True,
|
capture_stdout=True,
|
||||||
capture_stderr=True,
|
capture_stderr=True,
|
||||||
)
|
)
|
||||||
@ -305,8 +307,6 @@ class Superproject:
|
|||||||
)
|
)
|
||||||
return SyncResult(False, False)
|
return SyncResult(False, False)
|
||||||
|
|
||||||
_PrintBetaNotice()
|
|
||||||
|
|
||||||
should_exit = True
|
should_exit = True
|
||||||
if not self._remote_url:
|
if not self._remote_url:
|
||||||
self._LogWarning(
|
self._LogWarning(
|
||||||
@ -450,16 +450,6 @@ class Superproject:
|
|||||||
return UpdateProjectsResult(manifest_path, False)
|
return UpdateProjectsResult(manifest_path, False)
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=10)
|
|
||||||
def _PrintBetaNotice():
|
|
||||||
"""Print the notice of beta status."""
|
|
||||||
print(
|
|
||||||
"NOTICE: --use-superproject is in beta; report any issues to the "
|
|
||||||
"address described in `repo version`",
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=None)
|
@functools.lru_cache(maxsize=None)
|
||||||
def _UseSuperprojectFromConfiguration():
|
def _UseSuperprojectFromConfiguration():
|
||||||
"""Returns the user choice of whether to use superproject."""
|
"""Returns the user choice of whether to use superproject."""
|
||||||
|
@ -38,6 +38,8 @@ import tempfile
|
|||||||
import threading
|
import threading
|
||||||
|
|
||||||
|
|
||||||
|
# Timeout when sending events via socket (applies to connect, send)
|
||||||
|
SOCK_TIMEOUT = 0.5 # in seconds
|
||||||
# BaseEventLog __init__ Counter that is consistent within the same process
|
# BaseEventLog __init__ Counter that is consistent within the same process
|
||||||
p_init_count = 0
|
p_init_count = 0
|
||||||
|
|
||||||
@ -76,9 +78,8 @@ class BaseEventLog:
|
|||||||
# Save both our sid component and the complete sid.
|
# Save both our sid component and the complete sid.
|
||||||
# We use our sid component (self._sid) as the unique filename prefix and
|
# We use our sid component (self._sid) as the unique filename prefix and
|
||||||
# the full sid (self._full_sid) in the log itself.
|
# the full sid (self._full_sid) in the log itself.
|
||||||
self._sid = "repo-%s-P%08x" % (
|
self._sid = (
|
||||||
self.start.strftime("%Y%m%dT%H%M%SZ"),
|
f"repo-{self.start.strftime('%Y%m%dT%H%M%SZ')}-P{os.getpid():08x}"
|
||||||
os.getpid(),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if add_init_count:
|
if add_init_count:
|
||||||
@ -129,10 +130,10 @@ class BaseEventLog:
|
|||||||
"time": datetime.datetime.now(datetime.timezone.utc).isoformat(),
|
"time": datetime.datetime.now(datetime.timezone.utc).isoformat(),
|
||||||
}
|
}
|
||||||
|
|
||||||
def StartEvent(self):
|
def StartEvent(self, argv):
|
||||||
"""Append a 'start' event to the current log."""
|
"""Append a 'start' event to the current log."""
|
||||||
start_event = self._CreateEventDict("start")
|
start_event = self._CreateEventDict("start")
|
||||||
start_event["argv"] = sys.argv
|
start_event["argv"] = argv
|
||||||
self._log.append(start_event)
|
self._log.append(start_event)
|
||||||
|
|
||||||
def ExitEvent(self, result):
|
def ExitEvent(self, result):
|
||||||
@ -158,9 +159,11 @@ class BaseEventLog:
|
|||||||
name: Name of the primary command (ex: repo, git)
|
name: Name of the primary command (ex: repo, git)
|
||||||
subcommands: List of the sub-commands (ex: version, init, sync)
|
subcommands: List of the sub-commands (ex: version, init, sync)
|
||||||
"""
|
"""
|
||||||
command_event = self._CreateEventDict("command")
|
command_event = self._CreateEventDict("cmd_name")
|
||||||
|
name = f"{name}-"
|
||||||
|
name += "-".join(subcommands)
|
||||||
command_event["name"] = name
|
command_event["name"] = name
|
||||||
command_event["subcommands"] = subcommands
|
command_event["hierarchy"] = name
|
||||||
self._log.append(command_event)
|
self._log.append(command_event)
|
||||||
|
|
||||||
def LogConfigEvents(self, config, event_dict_name):
|
def LogConfigEvents(self, config, event_dict_name):
|
||||||
@ -297,6 +300,7 @@ class BaseEventLog:
|
|||||||
with socket.socket(
|
with socket.socket(
|
||||||
socket.AF_UNIX, socket.SOCK_STREAM
|
socket.AF_UNIX, socket.SOCK_STREAM
|
||||||
) as sock:
|
) as sock:
|
||||||
|
sock.settimeout(SOCK_TIMEOUT)
|
||||||
sock.connect(path)
|
sock.connect(path)
|
||||||
self._WriteLog(sock.sendall)
|
self._WriteLog(sock.sendall)
|
||||||
return f"af_unix:stream:{path}"
|
return f"af_unix:stream:{path}"
|
||||||
|
11
hooks.py
11
hooks.py
@ -180,7 +180,7 @@ class RepoHook:
|
|||||||
abort_if_user_denies was passed to the consturctor.
|
abort_if_user_denies was passed to the consturctor.
|
||||||
"""
|
"""
|
||||||
hooks_config = self._hooks_project.config
|
hooks_config = self._hooks_project.config
|
||||||
git_approval_key = "repo.hooks.%s.%s" % (self._hook_type, subkey)
|
git_approval_key = f"repo.hooks.{self._hook_type}.{subkey}"
|
||||||
|
|
||||||
# Get the last value that the user approved for this hook; may be None.
|
# Get the last value that the user approved for this hook; may be None.
|
||||||
old_val = hooks_config.GetString(git_approval_key)
|
old_val = hooks_config.GetString(git_approval_key)
|
||||||
@ -193,7 +193,7 @@ class RepoHook:
|
|||||||
else:
|
else:
|
||||||
# Give the user a reason why we're prompting, since they last
|
# Give the user a reason why we're prompting, since they last
|
||||||
# told us to "never ask again".
|
# told us to "never ask again".
|
||||||
prompt = "WARNING: %s\n\n" % (changed_prompt,)
|
prompt = f"WARNING: {changed_prompt}\n\n"
|
||||||
else:
|
else:
|
||||||
prompt = ""
|
prompt = ""
|
||||||
|
|
||||||
@ -241,9 +241,8 @@ class RepoHook:
|
|||||||
return self._CheckForHookApprovalHelper(
|
return self._CheckForHookApprovalHelper(
|
||||||
"approvedmanifest",
|
"approvedmanifest",
|
||||||
self._manifest_url,
|
self._manifest_url,
|
||||||
"Run hook scripts from %s" % (self._manifest_url,),
|
f"Run hook scripts from {self._manifest_url}",
|
||||||
"Manifest URL has changed since %s was allowed."
|
f"Manifest URL has changed since {self._hook_type} was allowed.",
|
||||||
% (self._hook_type,),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _CheckForHookApprovalHash(self):
|
def _CheckForHookApprovalHash(self):
|
||||||
@ -262,7 +261,7 @@ class RepoHook:
|
|||||||
"approvedhash",
|
"approvedhash",
|
||||||
self._GetHash(),
|
self._GetHash(),
|
||||||
prompt % (self._GetMustVerb(), self._script_fullpath),
|
prompt % (self._GetMustVerb(), self._script_fullpath),
|
||||||
"Scripts have changed since %s was allowed." % (self._hook_type,),
|
f"Scripts have changed since {self._hook_type} was allowed.",
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
# From Gerrit Code Review 3.6.1 c67916dbdc07555c44e32a68f92ffc484b9b34f0
|
# DO NOT EDIT THIS FILE
|
||||||
|
# All updates should be sent upstream: https://gerrit.googlesource.com/gerrit/
|
||||||
|
# This is synced from commit: 62f5bbea67f6dafa6e22a601a0c298214c510caf
|
||||||
|
# DO NOT EDIT THIS FILE
|
||||||
#
|
#
|
||||||
# Part of Gerrit Code Review (https://www.gerritcodereview.com/)
|
# Part of Gerrit Code Review (https://www.gerritcodereview.com/)
|
||||||
#
|
#
|
||||||
@ -31,14 +34,20 @@ if test ! -f "$1" ; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Do not create a change id if requested
|
# Do not create a change id if requested
|
||||||
if test "false" = "$(git config --bool --get gerrit.createChangeId)" ; then
|
case "$(git config --get gerrit.createChangeId)" in
|
||||||
|
false)
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
;;
|
||||||
|
always)
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
# Do not create a change id for squash/fixup commits.
|
||||||
|
if head -n1 "$1" | LC_ALL=C grep -q '^[a-z][a-z]*! '; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
# Do not create a change id for squash commits.
|
|
||||||
if head -n1 "$1" | grep -q '^squash! '; then
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if git rev-parse --verify HEAD >/dev/null 2>&1; then
|
if git rev-parse --verify HEAD >/dev/null 2>&1; then
|
||||||
refhash="$(git rev-parse HEAD)"
|
refhash="$(git rev-parse HEAD)"
|
||||||
@ -51,7 +60,7 @@ dest="$1.tmp.${random}"
|
|||||||
|
|
||||||
trap 'rm -f "$dest" "$dest-2"' EXIT
|
trap 'rm -f "$dest" "$dest-2"' EXIT
|
||||||
|
|
||||||
if ! git stripspace --strip-comments < "$1" > "${dest}" ; then
|
if ! cat "$1" | sed -e '/>8/q' | git stripspace --strip-comments > "${dest}" ; then
|
||||||
echo "cannot strip comments from $1"
|
echo "cannot strip comments from $1"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
@ -65,7 +74,7 @@ reviewurl="$(git config --get gerrit.reviewUrl)"
|
|||||||
if test -n "${reviewurl}" ; then
|
if test -n "${reviewurl}" ; then
|
||||||
token="Link"
|
token="Link"
|
||||||
value="${reviewurl%/}/id/I$random"
|
value="${reviewurl%/}/id/I$random"
|
||||||
pattern=".*/id/I[0-9a-f]\{40\}$"
|
pattern=".*/id/I[0-9a-f]\{40\}"
|
||||||
else
|
else
|
||||||
token="Change-Id"
|
token="Change-Id"
|
||||||
value="I$random"
|
value="I$random"
|
||||||
@ -92,7 +101,7 @@ fi
|
|||||||
# Avoid the --where option which only appeared in Git 2.15
|
# Avoid the --where option which only appeared in Git 2.15
|
||||||
if ! git -c trailer.where=before interpret-trailers \
|
if ! git -c trailer.where=before interpret-trailers \
|
||||||
--trailer "Signed-off-by: $token: $value" < "$dest-2" |
|
--trailer "Signed-off-by: $token: $value" < "$dest-2" |
|
||||||
sed -re "s/^Signed-off-by: ($token: )/\1/" \
|
sed -e "s/^Signed-off-by: \($token: \)/\1/" \
|
||||||
-e "/^Signed-off-by: SENTINEL/d" > "$dest" ; then
|
-e "/^Signed-off-by: SENTINEL/d" > "$dest" ; then
|
||||||
echo "cannot insert $token line in $1"
|
echo "cannot insert $token line in $1"
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -1,33 +1,25 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
# DO NOT EDIT THIS FILE
|
||||||
|
# All updates should be sent upstream: https://github.com/git/git
|
||||||
|
# This is synced from commit: 00e10ef10e161a913893b8cb33aa080d4ca5baa6
|
||||||
|
# DO NOT EDIT THIS FILE
|
||||||
#
|
#
|
||||||
# An example hook script to verify if you are on battery, in case you
|
# An example hook script to verify if you are on battery, in case you
|
||||||
# are running Windows, Linux or OS X. Called by git-gc --auto with no
|
# are running Linux or OS X. Called by git-gc --auto with no arguments.
|
||||||
# arguments. The hook should exit with non-zero status after issuing an
|
# The hook should exit with non-zero status after issuing an appropriate
|
||||||
# appropriate message if it wants to stop the auto repacking.
|
# message if it wants to stop the auto repacking.
|
||||||
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation; either version 2 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
#
|
||||||
# This program is distributed in the hope that it will be useful,
|
# This hook is stored in the contrib/hooks directory. Your distribution
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
# may have put this somewhere else. If you want to use this hook, you
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
# should make this script executable then link to it in the repository
|
||||||
# GNU General Public License for more details.
|
# you would like to use it in.
|
||||||
#
|
#
|
||||||
# You should have received a copy of the GNU General Public License
|
# For example, if the hook is stored in
|
||||||
# along with this program; if not, write to the Free Software
|
# /usr/share/git-core/contrib/hooks/pre-auto-gc-battery:
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
#
|
||||||
|
# cd /path/to/your/repository.git
|
||||||
if uname -s | grep -q "_NT-"
|
# ln -sf /usr/share/git-core/contrib/hooks/pre-auto-gc-battery \
|
||||||
then
|
# hooks/pre-auto-gc
|
||||||
if test -x $SYSTEMROOT/System32/Wbem/wmic
|
|
||||||
then
|
|
||||||
STATUS=$(wmic path win32_battery get batterystatus /format:list | tr -d '\r\n')
|
|
||||||
[ "$STATUS" = "BatteryStatus=2" ] && exit 0 || exit 1
|
|
||||||
fi
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if test -x /sbin/on_ac_power && (/sbin/on_ac_power;test $? -ne 1)
|
if test -x /sbin/on_ac_power && (/sbin/on_ac_power;test $? -ne 1)
|
||||||
then
|
then
|
||||||
@ -48,11 +40,6 @@ elif test -x /usr/bin/pmset && /usr/bin/pmset -g batt |
|
|||||||
grep -q "drawing from 'AC Power'"
|
grep -q "drawing from 'AC Power'"
|
||||||
then
|
then
|
||||||
exit 0
|
exit 0
|
||||||
elif test -d /sys/bus/acpi/drivers/battery && test 0 = \
|
|
||||||
"$(find /sys/bus/acpi/drivers/battery/ -type l | wc -l)";
|
|
||||||
then
|
|
||||||
# No battery exists.
|
|
||||||
exit 0
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Auto packing deferred; not on AC"
|
echo "Auto packing deferred; not on AC"
|
||||||
|
29
main.py
29
main.py
@ -45,9 +45,9 @@ from command import InteractiveCommand
|
|||||||
from command import MirrorSafeCommand
|
from command import MirrorSafeCommand
|
||||||
from editor import Editor
|
from editor import Editor
|
||||||
from error import DownloadError
|
from error import DownloadError
|
||||||
from error import GitcUnsupportedError
|
|
||||||
from error import InvalidProjectGroupsError
|
from error import InvalidProjectGroupsError
|
||||||
from error import ManifestInvalidRevisionError
|
from error import ManifestInvalidRevisionError
|
||||||
|
from error import ManifestParseError
|
||||||
from error import NoManifestException
|
from error import NoManifestException
|
||||||
from error import NoSuchProjectError
|
from error import NoSuchProjectError
|
||||||
from error import RepoChangedException
|
from error import RepoChangedException
|
||||||
@ -198,9 +198,8 @@ class _Repo:
|
|||||||
if short:
|
if short:
|
||||||
commands = " ".join(sorted(self.commands))
|
commands = " ".join(sorted(self.commands))
|
||||||
wrapped_commands = textwrap.wrap(commands, width=77)
|
wrapped_commands = textwrap.wrap(commands, width=77)
|
||||||
print(
|
help_commands = "".join(f"\n {x}" for x in wrapped_commands)
|
||||||
"Available commands:\n %s" % ("\n ".join(wrapped_commands),)
|
print(f"Available commands:{help_commands}")
|
||||||
)
|
|
||||||
print("\nRun `repo help <command>` for command-specific details.")
|
print("\nRun `repo help <command>` for command-specific details.")
|
||||||
print("Bug reports:", Wrapper().BUG_URL)
|
print("Bug reports:", Wrapper().BUG_URL)
|
||||||
else:
|
else:
|
||||||
@ -236,7 +235,7 @@ class _Repo:
|
|||||||
if name in self.commands:
|
if name in self.commands:
|
||||||
return name, []
|
return name, []
|
||||||
|
|
||||||
key = "alias.%s" % (name,)
|
key = f"alias.{name}"
|
||||||
alias = RepoConfig.ForRepository(self.repodir).GetString(key)
|
alias = RepoConfig.ForRepository(self.repodir).GetString(key)
|
||||||
if alias is None:
|
if alias is None:
|
||||||
alias = RepoConfig.ForUser().GetString(key)
|
alias = RepoConfig.ForUser().GetString(key)
|
||||||
@ -270,10 +269,14 @@ class _Repo:
|
|||||||
self._PrintHelp(short=True)
|
self._PrintHelp(short=True)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
run = lambda: self._RunLong(name, gopts, argv) or 0
|
git_trace2_event_log = EventLog()
|
||||||
|
run = (
|
||||||
|
lambda: self._RunLong(name, gopts, argv, git_trace2_event_log) or 0
|
||||||
|
)
|
||||||
with Trace(
|
with Trace(
|
||||||
"starting new command: %s",
|
"starting new command: %s [sid=%s]",
|
||||||
", ".join([name] + argv),
|
", ".join([name] + argv),
|
||||||
|
git_trace2_event_log.full_sid,
|
||||||
first_trace=True,
|
first_trace=True,
|
||||||
):
|
):
|
||||||
if gopts.trace_python:
|
if gopts.trace_python:
|
||||||
@ -290,12 +293,11 @@ class _Repo:
|
|||||||
result = run()
|
result = run()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _RunLong(self, name, gopts, argv):
|
def _RunLong(self, name, gopts, argv, git_trace2_event_log):
|
||||||
"""Execute the (longer running) requested subcommand."""
|
"""Execute the (longer running) requested subcommand."""
|
||||||
result = 0
|
result = 0
|
||||||
SetDefaultColoring(gopts.color)
|
SetDefaultColoring(gopts.color)
|
||||||
|
|
||||||
git_trace2_event_log = EventLog()
|
|
||||||
outer_client = RepoClient(self.repodir)
|
outer_client = RepoClient(self.repodir)
|
||||||
repo_client = outer_client
|
repo_client = outer_client
|
||||||
if gopts.submanifest_path:
|
if gopts.submanifest_path:
|
||||||
@ -305,10 +307,6 @@ class _Repo:
|
|||||||
outer_client=outer_client,
|
outer_client=outer_client,
|
||||||
)
|
)
|
||||||
|
|
||||||
if Wrapper().gitc_parse_clientdir(os.getcwd()):
|
|
||||||
logger.error("GITC is not supported.")
|
|
||||||
raise GitcUnsupportedError()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cmd = self.commands[name](
|
cmd = self.commands[name](
|
||||||
repodir=self.repodir,
|
repodir=self.repodir,
|
||||||
@ -354,7 +352,7 @@ class _Repo:
|
|||||||
start = time.time()
|
start = time.time()
|
||||||
cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
|
cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
|
||||||
cmd.event_log.SetParent(cmd_event)
|
cmd.event_log.SetParent(cmd_event)
|
||||||
git_trace2_event_log.StartEvent()
|
git_trace2_event_log.StartEvent(["repo", name] + argv)
|
||||||
git_trace2_event_log.CommandEvent(name="repo", subcommands=[name])
|
git_trace2_event_log.CommandEvent(name="repo", subcommands=[name])
|
||||||
|
|
||||||
def execute_command_helper():
|
def execute_command_helper():
|
||||||
@ -422,7 +420,7 @@ class _Repo:
|
|||||||
error_info = json.dumps(
|
error_info = json.dumps(
|
||||||
{
|
{
|
||||||
"ErrorType": type(error).__name__,
|
"ErrorType": type(error).__name__,
|
||||||
"Project": project,
|
"Project": str(project),
|
||||||
"Message": str(error),
|
"Message": str(error),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -440,6 +438,7 @@ class _Repo:
|
|||||||
except (
|
except (
|
||||||
DownloadError,
|
DownloadError,
|
||||||
ManifestInvalidRevisionError,
|
ManifestInvalidRevisionError,
|
||||||
|
ManifestParseError,
|
||||||
NoManifestException,
|
NoManifestException,
|
||||||
) as e:
|
) as e:
|
||||||
logger.error("error: in `%s`: %s", " ".join([name] + argv), e)
|
logger.error("error: in `%s`: %s", " ".join([name] + argv), e)
|
||||||
|
@ -1,21 +1,24 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "July 2022" "repo gitc-delete" "Repo Manual"
|
.TH REPO "1" "December 2024" "repo gc" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repo gitc-delete - manual page for repo gitc-delete
|
repo \- repo gc - manual page for repo gc
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
.B repo
|
.B repo
|
||||||
\fI\,gitc-delete\/\fR
|
\fI\,gc\/\fR
|
||||||
.SH DESCRIPTION
|
.SH DESCRIPTION
|
||||||
Summary
|
Summary
|
||||||
.PP
|
.PP
|
||||||
Delete a GITC Client.
|
Cleaning up internal repo state.
|
||||||
.SH OPTIONS
|
.SH OPTIONS
|
||||||
.TP
|
.TP
|
||||||
\fB\-h\fR, \fB\-\-help\fR
|
\fB\-h\fR, \fB\-\-help\fR
|
||||||
show this help message and exit
|
show this help message and exit
|
||||||
.TP
|
.TP
|
||||||
\fB\-f\fR, \fB\-\-force\fR
|
\fB\-n\fR, \fB\-\-dry\-run\fR
|
||||||
force the deletion (no prompt)
|
do everything except actually delete
|
||||||
|
.TP
|
||||||
|
\fB\-y\fR, \fB\-\-yes\fR
|
||||||
|
answer yes to all safe prompts
|
||||||
.SS Logging options:
|
.SS Logging options:
|
||||||
.TP
|
.TP
|
||||||
\fB\-v\fR, \fB\-\-verbose\fR
|
\fB\-v\fR, \fB\-\-verbose\fR
|
||||||
@ -37,8 +40,4 @@ only operate on this (sub)manifest
|
|||||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||||
operate on this manifest and its submanifests
|
operate on this manifest and its submanifests
|
||||||
.PP
|
.PP
|
||||||
Run `repo help gitc\-delete` to view the detailed manual.
|
Run `repo help gc` to view the detailed manual.
|
||||||
.SH DETAILS
|
|
||||||
.PP
|
|
||||||
This subcommand deletes the current GITC client, deleting the GITC manifest and
|
|
||||||
all locally downloaded sources.
|
|
@ -1,175 +0,0 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
|
||||||
.TH REPO "1" "October 2022" "repo gitc-init" "Repo Manual"
|
|
||||||
.SH NAME
|
|
||||||
repo \- repo gitc-init - manual page for repo gitc-init
|
|
||||||
.SH SYNOPSIS
|
|
||||||
.B repo
|
|
||||||
\fI\,gitc-init \/\fR[\fI\,options\/\fR] [\fI\,client name\/\fR]
|
|
||||||
.SH DESCRIPTION
|
|
||||||
Summary
|
|
||||||
.PP
|
|
||||||
Initialize a GITC Client.
|
|
||||||
.SH OPTIONS
|
|
||||||
.TP
|
|
||||||
\fB\-h\fR, \fB\-\-help\fR
|
|
||||||
show this help message and exit
|
|
||||||
.SS Logging options:
|
|
||||||
.TP
|
|
||||||
\fB\-v\fR, \fB\-\-verbose\fR
|
|
||||||
show all output
|
|
||||||
.TP
|
|
||||||
\fB\-q\fR, \fB\-\-quiet\fR
|
|
||||||
only show errors
|
|
||||||
.SS Manifest options:
|
|
||||||
.TP
|
|
||||||
\fB\-u\fR URL, \fB\-\-manifest\-url\fR=\fI\,URL\/\fR
|
|
||||||
manifest repository location
|
|
||||||
.TP
|
|
||||||
\fB\-b\fR REVISION, \fB\-\-manifest\-branch\fR=\fI\,REVISION\/\fR
|
|
||||||
manifest branch or revision (use HEAD for default)
|
|
||||||
.TP
|
|
||||||
\fB\-m\fR NAME.xml, \fB\-\-manifest\-name\fR=\fI\,NAME\/\fR.xml
|
|
||||||
initial manifest file
|
|
||||||
.TP
|
|
||||||
\fB\-g\fR GROUP, \fB\-\-groups\fR=\fI\,GROUP\/\fR
|
|
||||||
restrict manifest projects to ones with specified
|
|
||||||
group(s) [default|all|G1,G2,G3|G4,\-G5,\-G6]
|
|
||||||
.TP
|
|
||||||
\fB\-p\fR PLATFORM, \fB\-\-platform\fR=\fI\,PLATFORM\/\fR
|
|
||||||
restrict manifest projects to ones with a specified
|
|
||||||
platform group [auto|all|none|linux|darwin|...]
|
|
||||||
.TP
|
|
||||||
\fB\-\-submodules\fR
|
|
||||||
sync any submodules associated with the manifest repo
|
|
||||||
.TP
|
|
||||||
\fB\-\-standalone\-manifest\fR
|
|
||||||
download the manifest as a static file rather then
|
|
||||||
create a git checkout of the manifest repo
|
|
||||||
.TP
|
|
||||||
\fB\-\-manifest\-depth\fR=\fI\,DEPTH\/\fR
|
|
||||||
create a shallow clone of the manifest repo with given
|
|
||||||
depth (0 for full clone); see git clone (default: 0)
|
|
||||||
.SS Manifest (only) checkout options:
|
|
||||||
.TP
|
|
||||||
\fB\-\-current\-branch\fR
|
|
||||||
fetch only current manifest branch from server
|
|
||||||
(default)
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-current\-branch\fR
|
|
||||||
fetch all manifest branches from server
|
|
||||||
.TP
|
|
||||||
\fB\-\-tags\fR
|
|
||||||
fetch tags in the manifest
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-tags\fR
|
|
||||||
don't fetch tags in the manifest
|
|
||||||
.SS Checkout modes:
|
|
||||||
.TP
|
|
||||||
\fB\-\-mirror\fR
|
|
||||||
create a replica of the remote repositories rather
|
|
||||||
than a client working directory
|
|
||||||
.TP
|
|
||||||
\fB\-\-archive\fR
|
|
||||||
checkout an archive instead of a git repository for
|
|
||||||
each project. See git archive.
|
|
||||||
.TP
|
|
||||||
\fB\-\-worktree\fR
|
|
||||||
use git\-worktree to manage projects
|
|
||||||
.SS Project checkout optimizations:
|
|
||||||
.TP
|
|
||||||
\fB\-\-reference\fR=\fI\,DIR\/\fR
|
|
||||||
location of mirror directory
|
|
||||||
.TP
|
|
||||||
\fB\-\-dissociate\fR
|
|
||||||
dissociate from reference mirrors after clone
|
|
||||||
.TP
|
|
||||||
\fB\-\-depth\fR=\fI\,DEPTH\/\fR
|
|
||||||
create a shallow clone with given depth; see git clone
|
|
||||||
.TP
|
|
||||||
\fB\-\-partial\-clone\fR
|
|
||||||
perform partial clone (https://gitscm.com/docs/gitrepositorylayout#_code_partialclone_code)
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-partial\-clone\fR
|
|
||||||
disable use of partial clone (https://gitscm.com/docs/gitrepositorylayout#_code_partialclone_code)
|
|
||||||
.TP
|
|
||||||
\fB\-\-partial\-clone\-exclude\fR=\fI\,PARTIAL_CLONE_EXCLUDE\/\fR
|
|
||||||
exclude the specified projects (a comma\-delimited
|
|
||||||
project names) from partial clone (https://gitscm.com/docs/gitrepositorylayout#_code_partialclone_code)
|
|
||||||
.TP
|
|
||||||
\fB\-\-clone\-filter\fR=\fI\,CLONE_FILTER\/\fR
|
|
||||||
filter for use with \fB\-\-partial\-clone\fR [default:
|
|
||||||
blob:none]
|
|
||||||
.TP
|
|
||||||
\fB\-\-use\-superproject\fR
|
|
||||||
use the manifest superproject to sync projects;
|
|
||||||
implies \fB\-c\fR
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-use\-superproject\fR
|
|
||||||
disable use of manifest superprojects
|
|
||||||
.TP
|
|
||||||
\fB\-\-clone\-bundle\fR
|
|
||||||
enable use of \fI\,/clone.bundle\/\fP on HTTP/HTTPS (default if
|
|
||||||
not \fB\-\-partial\-clone\fR)
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-clone\-bundle\fR
|
|
||||||
disable use of \fI\,/clone.bundle\/\fP on HTTP/HTTPS (default if
|
|
||||||
\fB\-\-partial\-clone\fR)
|
|
||||||
.TP
|
|
||||||
\fB\-\-git\-lfs\fR
|
|
||||||
enable Git LFS support
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-git\-lfs\fR
|
|
||||||
disable Git LFS support
|
|
||||||
.SS repo Version options:
|
|
||||||
.TP
|
|
||||||
\fB\-\-repo\-url\fR=\fI\,URL\/\fR
|
|
||||||
repo repository location ($REPO_URL)
|
|
||||||
.TP
|
|
||||||
\fB\-\-repo\-rev\fR=\fI\,REV\/\fR
|
|
||||||
repo branch or revision ($REPO_REV)
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-repo\-verify\fR
|
|
||||||
do not verify repo source code
|
|
||||||
.SS Other options:
|
|
||||||
.TP
|
|
||||||
\fB\-\-config\-name\fR
|
|
||||||
Always prompt for name/e\-mail
|
|
||||||
.SS GITC options:
|
|
||||||
.TP
|
|
||||||
\fB\-f\fR MANIFEST_FILE, \fB\-\-manifest\-file\fR=\fI\,MANIFEST_FILE\/\fR
|
|
||||||
Optional manifest file to use for this GITC client.
|
|
||||||
.TP
|
|
||||||
\fB\-c\fR GITC_CLIENT, \fB\-\-gitc\-client\fR=\fI\,GITC_CLIENT\/\fR
|
|
||||||
Name of the gitc_client instance to create or modify.
|
|
||||||
.SS Multi\-manifest:
|
|
||||||
.TP
|
|
||||||
\fB\-\-outer\-manifest\fR
|
|
||||||
operate starting at the outermost manifest
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-outer\-manifest\fR
|
|
||||||
do not operate on outer manifests
|
|
||||||
.TP
|
|
||||||
\fB\-\-this\-manifest\-only\fR
|
|
||||||
only operate on this (sub)manifest
|
|
||||||
.TP
|
|
||||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
|
||||||
operate on this manifest and its submanifests
|
|
||||||
.PP
|
|
||||||
Run `repo help gitc\-init` to view the detailed manual.
|
|
||||||
.SH DETAILS
|
|
||||||
.PP
|
|
||||||
The 'repo gitc\-init' command is ran to initialize a new GITC client for use with
|
|
||||||
the GITC file system.
|
|
||||||
.PP
|
|
||||||
This command will setup the client directory, initialize repo, just like repo
|
|
||||||
init does, and then downloads the manifest collection and installs it in the
|
|
||||||
\&.repo/directory of the GITC client.
|
|
||||||
.PP
|
|
||||||
Once this is done, a GITC manifest is generated by pulling the HEAD SHA for each
|
|
||||||
project and generates the properly formatted XML file and installs it as
|
|
||||||
\&.manifest in the GITC client directory.
|
|
||||||
.PP
|
|
||||||
The \fB\-c\fR argument is required to specify the GITC client name.
|
|
||||||
.PP
|
|
||||||
The optional \fB\-f\fR argument can be used to specify the manifest file to use for
|
|
||||||
this GITC client.
|
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "October 2022" "repo init" "Repo Manual"
|
.TH REPO "1" "September 2024" "repo init" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repo init - manual page for repo init
|
repo \- repo init - manual page for repo init
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -28,6 +28,11 @@ manifest repository location
|
|||||||
\fB\-b\fR REVISION, \fB\-\-manifest\-branch\fR=\fI\,REVISION\/\fR
|
\fB\-b\fR REVISION, \fB\-\-manifest\-branch\fR=\fI\,REVISION\/\fR
|
||||||
manifest branch or revision (use HEAD for default)
|
manifest branch or revision (use HEAD for default)
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-manifest\-upstream\-branch\fR=\fI\,BRANCH\/\fR
|
||||||
|
when a commit is provided to \fB\-\-manifest\-branch\fR, this
|
||||||
|
is the name of the git ref in which the commit can be
|
||||||
|
found
|
||||||
|
.TP
|
||||||
\fB\-m\fR NAME.xml, \fB\-\-manifest\-name\fR=\fI\,NAME\/\fR.xml
|
\fB\-m\fR NAME.xml, \fB\-\-manifest\-name\fR=\fI\,NAME\/\fR.xml
|
||||||
initial manifest file
|
initial manifest file
|
||||||
.TP
|
.TP
|
||||||
@ -163,6 +168,10 @@ The optional \fB\-b\fR argument can be used to select the manifest branch to che
|
|||||||
and use. If no branch is specified, the remote's default branch is used. This is
|
and use. If no branch is specified, the remote's default branch is used. This is
|
||||||
equivalent to using \fB\-b\fR HEAD.
|
equivalent to using \fB\-b\fR HEAD.
|
||||||
.PP
|
.PP
|
||||||
|
The optional \fB\-\-manifest\-upstream\-branch\fR argument can be used when a commit is
|
||||||
|
provided to \fB\-\-manifest\-branch\fR (or \fB\-b\fR), to specify the name of the git ref in
|
||||||
|
which the commit can be found.
|
||||||
|
.PP
|
||||||
The optional \fB\-m\fR argument can be used to specify an alternate manifest to be
|
The optional \fB\-m\fR argument can be used to specify an alternate manifest to be
|
||||||
used. If no manifest is specified, the manifest default.xml will be used.
|
used. If no manifest is specified, the manifest default.xml will be used.
|
||||||
.PP
|
.PP
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "October 2022" "repo manifest" "Repo Manual"
|
.TH REPO "1" "December 2024" "repo manifest" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repo manifest - manual page for repo manifest
|
repo \- repo manifest - manual page for repo manifest
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -192,10 +192,13 @@ CDATA #IMPLIED>
|
|||||||
<!ATTLIST extend\-project remote CDATA #IMPLIED>
|
<!ATTLIST extend\-project remote CDATA #IMPLIED>
|
||||||
<!ATTLIST extend\-project dest\-branch CDATA #IMPLIED>
|
<!ATTLIST extend\-project dest\-branch CDATA #IMPLIED>
|
||||||
<!ATTLIST extend\-project upstream CDATA #IMPLIED>
|
<!ATTLIST extend\-project upstream CDATA #IMPLIED>
|
||||||
|
<!ATTLIST extend\-project base\-rev CDATA #IMPLIED>
|
||||||
.IP
|
.IP
|
||||||
<!ELEMENT remove\-project EMPTY>
|
<!ELEMENT remove\-project EMPTY>
|
||||||
<!ATTLIST remove\-project name CDATA #REQUIRED>
|
<!ATTLIST remove\-project name CDATA #IMPLIED>
|
||||||
|
<!ATTLIST remove\-project path CDATA #IMPLIED>
|
||||||
<!ATTLIST remove\-project optional CDATA #IMPLIED>
|
<!ATTLIST remove\-project optional CDATA #IMPLIED>
|
||||||
|
<!ATTLIST remove\-project base\-rev CDATA #IMPLIED>
|
||||||
.IP
|
.IP
|
||||||
<!ELEMENT repo\-hooks EMPTY>
|
<!ELEMENT repo\-hooks EMPTY>
|
||||||
<!ATTLIST repo\-hooks in\-project CDATA #REQUIRED>
|
<!ATTLIST repo\-hooks in\-project CDATA #REQUIRED>
|
||||||
@ -212,6 +215,7 @@ CDATA #IMPLIED>
|
|||||||
<!ELEMENT include EMPTY>
|
<!ELEMENT include EMPTY>
|
||||||
<!ATTLIST include name CDATA #REQUIRED>
|
<!ATTLIST include name CDATA #REQUIRED>
|
||||||
<!ATTLIST include groups CDATA #IMPLIED>
|
<!ATTLIST include groups CDATA #IMPLIED>
|
||||||
|
<!ATTLIST include revision CDATA #IMPLIED>
|
||||||
.PP
|
.PP
|
||||||
]>
|
]>
|
||||||
```
|
```
|
||||||
@ -493,6 +497,14 @@ project. Same syntax as the corresponding element of `project`.
|
|||||||
Attribute `upstream`: If specified, overrides the upstream of the original
|
Attribute `upstream`: If specified, overrides the upstream of the original
|
||||||
project. Same syntax as the corresponding element of `project`.
|
project. Same syntax as the corresponding element of `project`.
|
||||||
.PP
|
.PP
|
||||||
|
Attribute `base\-rev`: If specified, adds a check against the revision to be
|
||||||
|
extended. Manifest parse will fail and give a list of mismatch extends if the
|
||||||
|
revisions being extended have changed since base\-rev was set. Intended for use
|
||||||
|
with layered manifests using hash revisions to prevent patch branches hiding
|
||||||
|
newer upstream revisions. Also compares named refs like branches or tags but is
|
||||||
|
misleading if branches are used as base\-rev. Same syntax as the corresponding
|
||||||
|
element of `project`.
|
||||||
|
.PP
|
||||||
Element annotation
|
Element annotation
|
||||||
.PP
|
.PP
|
||||||
Zero or more annotation elements may be specified as children of a project or
|
Zero or more annotation elements may be specified as children of a project or
|
||||||
@ -533,16 +545,35 @@ the repo client.
|
|||||||
.PP
|
.PP
|
||||||
Element remove\-project
|
Element remove\-project
|
||||||
.PP
|
.PP
|
||||||
Deletes the named project from the internal manifest table, possibly allowing a
|
Deletes a project from the internal manifest table, possibly allowing a
|
||||||
subsequent project element in the same manifest file to replace the project with
|
subsequent project element in the same manifest file to replace the project with
|
||||||
a different source.
|
a different source.
|
||||||
.PP
|
.PP
|
||||||
This element is mostly useful in a local manifest file, where the user can
|
This element is mostly useful in a local manifest file, where the user can
|
||||||
remove a project, and possibly replace it with their own definition.
|
remove a project, and possibly replace it with their own definition.
|
||||||
.PP
|
.PP
|
||||||
|
The project `name` or project `path` can be used to specify the remove target
|
||||||
|
meaning one of them is required. If only name is specified, all projects with
|
||||||
|
that name are removed.
|
||||||
|
.PP
|
||||||
|
If both name and path are specified, only projects with the same name and path
|
||||||
|
are removed, meaning projects with the same name but in other locations are
|
||||||
|
kept.
|
||||||
|
.PP
|
||||||
|
If only path is specified, a matching project is removed regardless of its name.
|
||||||
|
Logic otherwise behaves like both are specified.
|
||||||
|
.PP
|
||||||
Attribute `optional`: Set to true to ignore remove\-project elements with no
|
Attribute `optional`: Set to true to ignore remove\-project elements with no
|
||||||
matching `project` element.
|
matching `project` element.
|
||||||
.PP
|
.PP
|
||||||
|
Attribute `base\-rev`: If specified, adds a check against the revision to be
|
||||||
|
removed. Manifest parse will fail and give a list of mismatch removes if the
|
||||||
|
revisions being removed have changed since base\-rev was set. Intended for use
|
||||||
|
with layered manifests using hash revisions to prevent patch branches hiding
|
||||||
|
newer upstream revisions. Also compares named refs like branches or tags but is
|
||||||
|
misleading if branches are used as base\-rev. Same syntax as the corresponding
|
||||||
|
element of `project`.
|
||||||
|
.PP
|
||||||
Element repo\-hooks
|
Element repo\-hooks
|
||||||
.PP
|
.PP
|
||||||
NB: See the [practical documentation](./repo\-hooks.md) for using repo hooks.
|
NB: See the [practical documentation](./repo\-hooks.md) for using repo hooks.
|
||||||
@ -608,6 +639,9 @@ included manifest belong. This appends and recurses, meaning all projects in
|
|||||||
included manifests carry all parent include groups. Same syntax as the
|
included manifests carry all parent include groups. Same syntax as the
|
||||||
corresponding element of `project`.
|
corresponding element of `project`.
|
||||||
.PP
|
.PP
|
||||||
|
Attribute `revision`: Name of a Git branch (e.g. `main` or `refs/heads/main`)
|
||||||
|
default to which all projects in the included manifest belong.
|
||||||
|
.PP
|
||||||
Local Manifests
|
Local Manifests
|
||||||
.PP
|
.PP
|
||||||
Additional remotes and projects may be added through local manifest files stored
|
Additional remotes and projects may be added through local manifest files stored
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "November 2022" "repo smartsync" "Repo Manual"
|
.TH REPO "1" "September 2024" "repo smartsync" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repo smartsync - manual page for repo smartsync
|
repo \- repo smartsync - manual page for repo smartsync
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -37,11 +37,20 @@ overwrite an existing git directory if it needs to
|
|||||||
point to a different object directory. WARNING: this
|
point to a different object directory. WARNING: this
|
||||||
may cause loss of data
|
may cause loss of data
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-force\-checkout\fR
|
||||||
|
force checkout even if it results in throwing away
|
||||||
|
uncommitted modifications. WARNING: this may cause
|
||||||
|
loss of data
|
||||||
|
.TP
|
||||||
\fB\-\-force\-remove\-dirty\fR
|
\fB\-\-force\-remove\-dirty\fR
|
||||||
force remove projects with uncommitted modifications
|
force remove projects with uncommitted modifications
|
||||||
if projects no longer exist in the manifest. WARNING:
|
if projects no longer exist in the manifest. WARNING:
|
||||||
this may cause loss of data
|
this may cause loss of data
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-rebase\fR
|
||||||
|
rebase local commits regardless of whether they are
|
||||||
|
published
|
||||||
|
.TP
|
||||||
\fB\-l\fR, \fB\-\-local\-only\fR
|
\fB\-l\fR, \fB\-\-local\-only\fR
|
||||||
only update working tree, don't fetch
|
only update working tree, don't fetch
|
||||||
.TP
|
.TP
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "November 2022" "repo sync" "Repo Manual"
|
.TH REPO "1" "September 2024" "repo sync" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repo sync - manual page for repo sync
|
repo \- repo sync - manual page for repo sync
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -37,11 +37,20 @@ overwrite an existing git directory if it needs to
|
|||||||
point to a different object directory. WARNING: this
|
point to a different object directory. WARNING: this
|
||||||
may cause loss of data
|
may cause loss of data
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-force\-checkout\fR
|
||||||
|
force checkout even if it results in throwing away
|
||||||
|
uncommitted modifications. WARNING: this may cause
|
||||||
|
loss of data
|
||||||
|
.TP
|
||||||
\fB\-\-force\-remove\-dirty\fR
|
\fB\-\-force\-remove\-dirty\fR
|
||||||
force remove projects with uncommitted modifications
|
force remove projects with uncommitted modifications
|
||||||
if projects no longer exist in the manifest. WARNING:
|
if projects no longer exist in the manifest. WARNING:
|
||||||
this may cause loss of data
|
this may cause loss of data
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-rebase\fR
|
||||||
|
rebase local commits regardless of whether they are
|
||||||
|
published
|
||||||
|
.TP
|
||||||
\fB\-l\fR, \fB\-\-local\-only\fR
|
\fB\-l\fR, \fB\-\-local\-only\fR
|
||||||
only update working tree, don't fetch
|
only update working tree, don't fetch
|
||||||
.TP
|
.TP
|
||||||
@ -185,6 +194,11 @@ The \fB\-\-force\-sync\fR option can be used to overwrite existing git directori
|
|||||||
they have previously been linked to a different object directory. WARNING: This
|
they have previously been linked to a different object directory. WARNING: This
|
||||||
may cause data to be lost since refs may be removed when overwriting.
|
may cause data to be lost since refs may be removed when overwriting.
|
||||||
.PP
|
.PP
|
||||||
|
The \fB\-\-force\-checkout\fR option can be used to force git to switch revs even if the
|
||||||
|
index or the working tree differs from HEAD, and if there are untracked files.
|
||||||
|
WARNING: This may cause data to be lost since uncommitted changes may be
|
||||||
|
removed.
|
||||||
|
.PP
|
||||||
The \fB\-\-force\-remove\-dirty\fR option can be used to remove previously used projects
|
The \fB\-\-force\-remove\-dirty\fR option can be used to remove previously used projects
|
||||||
with uncommitted changes. WARNING: This may cause data to be lost since
|
with uncommitted changes. WARNING: This may cause data to be lost since
|
||||||
uncommitted changes may be removed with projects that no longer exist in the
|
uncommitted changes may be removed with projects that no longer exist in the
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "August 2022" "repo upload" "Repo Manual"
|
.TH REPO "1" "June 2024" "repo upload" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repo upload - manual page for repo upload
|
repo \- repo upload - manual page for repo upload
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -18,8 +18,11 @@ show this help message and exit
|
|||||||
number of jobs to run in parallel (default: based on
|
number of jobs to run in parallel (default: based on
|
||||||
number of CPU cores)
|
number of CPU cores)
|
||||||
.TP
|
.TP
|
||||||
\fB\-t\fR
|
\fB\-t\fR, \fB\-\-topic\-branch\fR
|
||||||
send local branch name to Gerrit Code Review
|
set the topic to the local branch name
|
||||||
|
.TP
|
||||||
|
\fB\-\-topic\fR=\fI\,TOPIC\/\fR
|
||||||
|
set topic for the change
|
||||||
.TP
|
.TP
|
||||||
\fB\-\-hashtag\fR=\fI\,HASHTAGS\/\fR, \fB\-\-ht\fR=\fI\,HASHTAGS\/\fR
|
\fB\-\-hashtag\fR=\fI\,HASHTAGS\/\fR, \fB\-\-ht\fR=\fI\,HASHTAGS\/\fR
|
||||||
add hashtags (comma delimited) to the review
|
add hashtags (comma delimited) to the review
|
||||||
@ -30,6 +33,9 @@ add local branch name as a hashtag
|
|||||||
\fB\-l\fR LABELS, \fB\-\-label\fR=\fI\,LABELS\/\fR
|
\fB\-l\fR LABELS, \fB\-\-label\fR=\fI\,LABELS\/\fR
|
||||||
add a label when uploading
|
add a label when uploading
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-pd\fR=\fI\,PATCHSET_DESCRIPTION\/\fR, \fB\-\-patchset\-description\fR=\fI\,PATCHSET_DESCRIPTION\/\fR
|
||||||
|
description for patchset
|
||||||
|
.TP
|
||||||
\fB\-\-re\fR=\fI\,REVIEWERS\/\fR, \fB\-\-reviewers\fR=\fI\,REVIEWERS\/\fR
|
\fB\-\-re\fR=\fI\,REVIEWERS\/\fR, \fB\-\-reviewers\fR=\fI\,REVIEWERS\/\fR
|
||||||
request reviews from these people
|
request reviews from these people
|
||||||
.TP
|
.TP
|
||||||
@ -198,6 +204,12 @@ review.URL.uploadnotify:
|
|||||||
Control e\-mail notifications when uploading.
|
Control e\-mail notifications when uploading.
|
||||||
https://gerrit\-review.googlesource.com/Documentation/user\-upload.html#notify
|
https://gerrit\-review.googlesource.com/Documentation/user\-upload.html#notify
|
||||||
.PP
|
.PP
|
||||||
|
review.URL.uploadwarningthreshold:
|
||||||
|
.PP
|
||||||
|
Repo will warn you if you are attempting to upload a large number of commits in
|
||||||
|
one or more branches. By default, the threshold is five commits. This option
|
||||||
|
allows you to override the warning threshold to a different value.
|
||||||
|
.PP
|
||||||
References
|
References
|
||||||
.PP
|
.PP
|
||||||
Gerrit Code Review: https://www.gerritcodereview.com/
|
Gerrit Code Review: https://www.gerritcodereview.com/
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "June 2023" "repo" "Repo Manual"
|
.TH REPO "1" "December 2024" "repo" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repository management tool built on top of git
|
repo \- repository management tool built on top of git
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -79,11 +79,8 @@ Download and checkout a change
|
|||||||
forall
|
forall
|
||||||
Run a shell command in each project
|
Run a shell command in each project
|
||||||
.TP
|
.TP
|
||||||
gitc\-delete
|
gc
|
||||||
Delete a GITC Client.
|
Cleaning up internal repo state.
|
||||||
.TP
|
|
||||||
gitc\-init
|
|
||||||
Initialize a GITC Client.
|
|
||||||
.TP
|
.TP
|
||||||
grep
|
grep
|
||||||
Print lines matching a pattern
|
Print lines matching a pattern
|
||||||
|
180
manifest_xml.py
180
manifest_xml.py
@ -114,9 +114,37 @@ def XmlInt(node, attr, default=None):
|
|||||||
try:
|
try:
|
||||||
return int(value)
|
return int(value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ManifestParseError(
|
raise ManifestParseError(f'manifest: invalid {attr}="{value}" integer')
|
||||||
'manifest: invalid %s="%s" integer' % (attr, value)
|
|
||||||
)
|
|
||||||
|
def normalize_url(url: str) -> str:
|
||||||
|
"""Mutate input 'url' into normalized form:
|
||||||
|
|
||||||
|
* remove trailing slashes
|
||||||
|
* convert SCP-like syntax to SSH URL
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: URL to modify
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The normalized URL.
|
||||||
|
"""
|
||||||
|
|
||||||
|
url = url.rstrip("/")
|
||||||
|
parsed_url = urllib.parse.urlparse(url)
|
||||||
|
|
||||||
|
# This matches patterns like "git@github.com:foo".
|
||||||
|
scp_like_url_re = r"^[^/:]+@[^/:]+:[^/]+"
|
||||||
|
|
||||||
|
# If our URL is missing a schema and matches git's
|
||||||
|
# SCP-like syntax we should convert it to a proper
|
||||||
|
# SSH URL instead to make urljoin() happier.
|
||||||
|
#
|
||||||
|
# See: https://git-scm.com/docs/git-clone#URLS
|
||||||
|
if not parsed_url.scheme and re.match(scp_like_url_re, url):
|
||||||
|
return "ssh://" + url.replace(":", "/", 1)
|
||||||
|
|
||||||
|
return url
|
||||||
|
|
||||||
|
|
||||||
class _Default:
|
class _Default:
|
||||||
@ -182,20 +210,22 @@ class _XmlRemote:
|
|||||||
def _resolveFetchUrl(self):
|
def _resolveFetchUrl(self):
|
||||||
if self.fetchUrl is None:
|
if self.fetchUrl is None:
|
||||||
return ""
|
return ""
|
||||||
url = self.fetchUrl.rstrip("/")
|
|
||||||
manifestUrl = self.manifestUrl.rstrip("/")
|
|
||||||
# urljoin will gets confused over quite a few things. The ones we care
|
|
||||||
# about here are:
|
|
||||||
# * no scheme in the base url, like <hostname:port>
|
|
||||||
# We handle no scheme by replacing it with an obscure protocol, gopher
|
|
||||||
# and then replacing it with the original when we are done.
|
|
||||||
|
|
||||||
if manifestUrl.find(":") != manifestUrl.find("/") - 1:
|
fetch_url = normalize_url(self.fetchUrl)
|
||||||
url = urllib.parse.urljoin("gopher://" + manifestUrl, url)
|
manifest_url = normalize_url(self.manifestUrl)
|
||||||
url = re.sub(r"^gopher://", "", url)
|
|
||||||
|
# urljoin doesn't like URLs with no scheme in the base URL
|
||||||
|
# such as file paths. We handle this by prefixing it with
|
||||||
|
# an obscure protocol, gopher, and replacing it with the
|
||||||
|
# original after urljoin
|
||||||
|
if manifest_url.find(":") != manifest_url.find("/") - 1:
|
||||||
|
fetch_url = urllib.parse.urljoin(
|
||||||
|
"gopher://" + manifest_url, fetch_url
|
||||||
|
)
|
||||||
|
fetch_url = re.sub(r"^gopher://", "", fetch_url)
|
||||||
else:
|
else:
|
||||||
url = urllib.parse.urljoin(manifestUrl, url)
|
fetch_url = urllib.parse.urljoin(manifest_url, fetch_url)
|
||||||
return url
|
return fetch_url
|
||||||
|
|
||||||
def ToRemoteSpec(self, projectName):
|
def ToRemoteSpec(self, projectName):
|
||||||
fetchUrl = self.resolvedFetchUrl.rstrip("/")
|
fetchUrl = self.resolvedFetchUrl.rstrip("/")
|
||||||
@ -275,7 +305,7 @@ class _XmlSubmanifest:
|
|||||||
parent.repodir,
|
parent.repodir,
|
||||||
linkFile,
|
linkFile,
|
||||||
parent_groups=",".join(groups) or "",
|
parent_groups=",".join(groups) or "",
|
||||||
submanifest_path=self.relpath,
|
submanifest_path=os.path.join(parent.path_prefix, self.relpath),
|
||||||
outer_client=outer_client,
|
outer_client=outer_client,
|
||||||
default_groups=default_groups,
|
default_groups=default_groups,
|
||||||
)
|
)
|
||||||
@ -405,11 +435,6 @@ class XmlManifest:
|
|||||||
self.parent_groups = parent_groups
|
self.parent_groups = parent_groups
|
||||||
self.default_groups = default_groups
|
self.default_groups = default_groups
|
||||||
|
|
||||||
if outer_client and self.isGitcClient:
|
|
||||||
raise ManifestParseError(
|
|
||||||
"Multi-manifest is incompatible with `gitc-init`"
|
|
||||||
)
|
|
||||||
|
|
||||||
if submanifest_path and not outer_client:
|
if submanifest_path and not outer_client:
|
||||||
# If passing a submanifest_path, there must be an outer_client.
|
# If passing a submanifest_path, there must be an outer_client.
|
||||||
raise ManifestParseError(f"Bad call to {self.__class__.__name__}")
|
raise ManifestParseError(f"Bad call to {self.__class__.__name__}")
|
||||||
@ -810,7 +835,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
ret.setdefault(child.nodeName, []).append(element)
|
ret.setdefault(child.nodeName, []).append(element)
|
||||||
else:
|
else:
|
||||||
raise ManifestParseError(
|
raise ManifestParseError(
|
||||||
'Unhandled element "%s"' % (child.nodeName,)
|
f'Unhandled element "{child.nodeName}"'
|
||||||
)
|
)
|
||||||
|
|
||||||
append_children(element, child)
|
append_children(element, child)
|
||||||
@ -989,9 +1014,9 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
|
|
||||||
def SetManifestOverride(self, path):
|
def SetManifestOverride(self, path):
|
||||||
"""Override manifestFile. The caller must call Unload()"""
|
"""Override manifestFile. The caller must call Unload()"""
|
||||||
self._outer_client.manifest.manifestFileOverrides[
|
self._outer_client.manifest.manifestFileOverrides[self.path_prefix] = (
|
||||||
self.path_prefix
|
path
|
||||||
] = path
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def UseLocalManifests(self):
|
def UseLocalManifests(self):
|
||||||
@ -1258,12 +1283,10 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
try:
|
try:
|
||||||
root = xml.dom.minidom.parse(path)
|
root = xml.dom.minidom.parse(path)
|
||||||
except (OSError, xml.parsers.expat.ExpatError) as e:
|
except (OSError, xml.parsers.expat.ExpatError) as e:
|
||||||
raise ManifestParseError(
|
raise ManifestParseError(f"error parsing manifest {path}: {e}")
|
||||||
"error parsing manifest %s: %s" % (path, e)
|
|
||||||
)
|
|
||||||
|
|
||||||
if not root or not root.childNodes:
|
if not root or not root.childNodes:
|
||||||
raise ManifestParseError("no root node in %s" % (path,))
|
raise ManifestParseError(f"no root node in {path}")
|
||||||
|
|
||||||
for manifest in root.childNodes:
|
for manifest in root.childNodes:
|
||||||
if (
|
if (
|
||||||
@ -1272,7 +1295,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
):
|
):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
raise ManifestParseError("no <manifest> in %s" % (path,))
|
raise ManifestParseError(f"no <manifest> in {path}")
|
||||||
|
|
||||||
nodes = []
|
nodes = []
|
||||||
for node in manifest.childNodes:
|
for node in manifest.childNodes:
|
||||||
@ -1282,7 +1305,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
msg = self._CheckLocalPath(name)
|
msg = self._CheckLocalPath(name)
|
||||||
if msg:
|
if msg:
|
||||||
raise ManifestInvalidPathError(
|
raise ManifestInvalidPathError(
|
||||||
'<include> invalid "name": %s: %s' % (name, msg)
|
f'<include> invalid "name": {name}: {msg}'
|
||||||
)
|
)
|
||||||
include_groups = ""
|
include_groups = ""
|
||||||
if parent_groups:
|
if parent_groups:
|
||||||
@ -1314,7 +1337,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise ManifestParseError(
|
raise ManifestParseError(
|
||||||
"failed parsing included manifest %s: %s" % (name, e)
|
f"failed parsing included manifest {name}: {e}"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if parent_groups and node.nodeName == "project":
|
if parent_groups and node.nodeName == "project":
|
||||||
@ -1422,6 +1445,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
|
|
||||||
repo_hooks_project = None
|
repo_hooks_project = None
|
||||||
enabled_repo_hooks = None
|
enabled_repo_hooks = None
|
||||||
|
failed_revision_changes = []
|
||||||
for node in itertools.chain(*node_list):
|
for node in itertools.chain(*node_list):
|
||||||
if node.nodeName == "project":
|
if node.nodeName == "project":
|
||||||
project = self._ParseProject(node)
|
project = self._ParseProject(node)
|
||||||
@ -1448,6 +1472,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
remote = self._get_remote(node)
|
remote = self._get_remote(node)
|
||||||
dest_branch = node.getAttribute("dest-branch")
|
dest_branch = node.getAttribute("dest-branch")
|
||||||
upstream = node.getAttribute("upstream")
|
upstream = node.getAttribute("upstream")
|
||||||
|
base_revision = node.getAttribute("base-rev")
|
||||||
|
|
||||||
named_projects = self._projects[name]
|
named_projects = self._projects[name]
|
||||||
if dest_path and not path and len(named_projects) > 1:
|
if dest_path and not path and len(named_projects) > 1:
|
||||||
@ -1461,6 +1486,13 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
if groups:
|
if groups:
|
||||||
p.groups.extend(groups)
|
p.groups.extend(groups)
|
||||||
if revision:
|
if revision:
|
||||||
|
if base_revision:
|
||||||
|
if p.revisionExpr != base_revision:
|
||||||
|
failed_revision_changes.append(
|
||||||
|
"extend-project name %s mismatch base "
|
||||||
|
"%s vs revision %s"
|
||||||
|
% (name, base_revision, p.revisionExpr)
|
||||||
|
)
|
||||||
p.SetRevision(revision)
|
p.SetRevision(revision)
|
||||||
|
|
||||||
if remote_name:
|
if remote_name:
|
||||||
@ -1535,6 +1567,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
if node.nodeName == "remove-project":
|
if node.nodeName == "remove-project":
|
||||||
name = node.getAttribute("name")
|
name = node.getAttribute("name")
|
||||||
path = node.getAttribute("path")
|
path = node.getAttribute("path")
|
||||||
|
base_revision = node.getAttribute("base-rev")
|
||||||
|
|
||||||
# Name or path needed.
|
# Name or path needed.
|
||||||
if not name and not path:
|
if not name and not path:
|
||||||
@ -1548,6 +1581,13 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
for projname, projects in list(self._projects.items()):
|
for projname, projects in list(self._projects.items()):
|
||||||
for p in projects:
|
for p in projects:
|
||||||
if name == projname and not path:
|
if name == projname and not path:
|
||||||
|
if base_revision:
|
||||||
|
if p.revisionExpr != base_revision:
|
||||||
|
failed_revision_changes.append(
|
||||||
|
"remove-project name %s mismatch base "
|
||||||
|
"%s vs revision %s"
|
||||||
|
% (name, base_revision, p.revisionExpr)
|
||||||
|
)
|
||||||
del self._paths[p.relpath]
|
del self._paths[p.relpath]
|
||||||
if not removed_project:
|
if not removed_project:
|
||||||
del self._projects[name]
|
del self._projects[name]
|
||||||
@ -1555,6 +1595,17 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
elif path == p.relpath and (
|
elif path == p.relpath and (
|
||||||
name == projname or not name
|
name == projname or not name
|
||||||
):
|
):
|
||||||
|
if base_revision:
|
||||||
|
if p.revisionExpr != base_revision:
|
||||||
|
failed_revision_changes.append(
|
||||||
|
"remove-project path %s mismatch base "
|
||||||
|
"%s vs revision %s"
|
||||||
|
% (
|
||||||
|
p.relpath,
|
||||||
|
base_revision,
|
||||||
|
p.revisionExpr,
|
||||||
|
)
|
||||||
|
)
|
||||||
self._projects[projname].remove(p)
|
self._projects[projname].remove(p)
|
||||||
del self._paths[p.relpath]
|
del self._paths[p.relpath]
|
||||||
removed_project = p.name
|
removed_project = p.name
|
||||||
@ -1574,6 +1625,13 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
"project: %s" % node.toxml()
|
"project: %s" % node.toxml()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if failed_revision_changes:
|
||||||
|
raise ManifestParseError(
|
||||||
|
"revision base check failed, rebase patches and update "
|
||||||
|
"base revs for: ",
|
||||||
|
failed_revision_changes,
|
||||||
|
)
|
||||||
|
|
||||||
# Store repo hooks project information.
|
# Store repo hooks project information.
|
||||||
if repo_hooks_project:
|
if repo_hooks_project:
|
||||||
# Store a reference to the Project.
|
# Store a reference to the Project.
|
||||||
@ -1765,13 +1823,13 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
msg = self._CheckLocalPath(name)
|
msg = self._CheckLocalPath(name)
|
||||||
if msg:
|
if msg:
|
||||||
raise ManifestInvalidPathError(
|
raise ManifestInvalidPathError(
|
||||||
'<submanifest> invalid "name": %s: %s' % (name, msg)
|
f'<submanifest> invalid "name": {name}: {msg}'
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
msg = self._CheckLocalPath(path)
|
msg = self._CheckLocalPath(path)
|
||||||
if msg:
|
if msg:
|
||||||
raise ManifestInvalidPathError(
|
raise ManifestInvalidPathError(
|
||||||
'<submanifest> invalid "path": %s: %s' % (path, msg)
|
f'<submanifest> invalid "path": {path}: {msg}'
|
||||||
)
|
)
|
||||||
|
|
||||||
submanifest = _XmlSubmanifest(
|
submanifest = _XmlSubmanifest(
|
||||||
@ -1806,7 +1864,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
msg = self._CheckLocalPath(name, dir_ok=True)
|
msg = self._CheckLocalPath(name, dir_ok=True)
|
||||||
if msg:
|
if msg:
|
||||||
raise ManifestInvalidPathError(
|
raise ManifestInvalidPathError(
|
||||||
'<project> invalid "name": %s: %s' % (name, msg)
|
f'<project> invalid "name": {name}: {msg}'
|
||||||
)
|
)
|
||||||
if parent:
|
if parent:
|
||||||
name = self._JoinName(parent.name, name)
|
name = self._JoinName(parent.name, name)
|
||||||
@ -1816,7 +1874,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
remote = self._default.remote
|
remote = self._default.remote
|
||||||
if remote is None:
|
if remote is None:
|
||||||
raise ManifestParseError(
|
raise ManifestParseError(
|
||||||
"no remote for project %s within %s" % (name, self.manifestFile)
|
f"no remote for project {name} within {self.manifestFile}"
|
||||||
)
|
)
|
||||||
|
|
||||||
revisionExpr = node.getAttribute("revision") or remote.revision
|
revisionExpr = node.getAttribute("revision") or remote.revision
|
||||||
@ -1837,7 +1895,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
msg = self._CheckLocalPath(path, dir_ok=True, cwd_dot_ok=True)
|
msg = self._CheckLocalPath(path, dir_ok=True, cwd_dot_ok=True)
|
||||||
if msg:
|
if msg:
|
||||||
raise ManifestInvalidPathError(
|
raise ManifestInvalidPathError(
|
||||||
'<project> invalid "path": %s: %s' % (path, msg)
|
f'<project> invalid "path": {path}: {msg}'
|
||||||
)
|
)
|
||||||
|
|
||||||
rebase = XmlBool(node, "rebase", True)
|
rebase = XmlBool(node, "rebase", True)
|
||||||
@ -1998,7 +2056,12 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
path = path.rstrip("/")
|
path = path.rstrip("/")
|
||||||
name = name.rstrip("/")
|
name = name.rstrip("/")
|
||||||
relpath = self._JoinRelpath(parent.relpath, path)
|
relpath = self._JoinRelpath(parent.relpath, path)
|
||||||
gitdir = os.path.join(parent.gitdir, "subprojects", "%s.git" % path)
|
subprojects = os.path.join(parent.gitdir, "subprojects", f"{path}.git")
|
||||||
|
modules = os.path.join(parent.gitdir, "modules", path)
|
||||||
|
if platform_utils.isdir(subprojects):
|
||||||
|
gitdir = subprojects
|
||||||
|
else:
|
||||||
|
gitdir = modules
|
||||||
objdir = os.path.join(
|
objdir = os.path.join(
|
||||||
parent.gitdir, "subproject-objects", "%s.git" % name
|
parent.gitdir, "subproject-objects", "%s.git" % name
|
||||||
)
|
)
|
||||||
@ -2049,22 +2112,22 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
# implementation:
|
# implementation:
|
||||||
# https://eclipse.googlesource.com/jgit/jgit/+/9110037e3e9461ff4dac22fee84ef3694ed57648/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java#884
|
# https://eclipse.googlesource.com/jgit/jgit/+/9110037e3e9461ff4dac22fee84ef3694ed57648/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java#884
|
||||||
BAD_CODEPOINTS = {
|
BAD_CODEPOINTS = {
|
||||||
"\u200C", # ZERO WIDTH NON-JOINER
|
"\u200c", # ZERO WIDTH NON-JOINER
|
||||||
"\u200D", # ZERO WIDTH JOINER
|
"\u200d", # ZERO WIDTH JOINER
|
||||||
"\u200E", # LEFT-TO-RIGHT MARK
|
"\u200e", # LEFT-TO-RIGHT MARK
|
||||||
"\u200F", # RIGHT-TO-LEFT MARK
|
"\u200f", # RIGHT-TO-LEFT MARK
|
||||||
"\u202A", # LEFT-TO-RIGHT EMBEDDING
|
"\u202a", # LEFT-TO-RIGHT EMBEDDING
|
||||||
"\u202B", # RIGHT-TO-LEFT EMBEDDING
|
"\u202b", # RIGHT-TO-LEFT EMBEDDING
|
||||||
"\u202C", # POP DIRECTIONAL FORMATTING
|
"\u202c", # POP DIRECTIONAL FORMATTING
|
||||||
"\u202D", # LEFT-TO-RIGHT OVERRIDE
|
"\u202d", # LEFT-TO-RIGHT OVERRIDE
|
||||||
"\u202E", # RIGHT-TO-LEFT OVERRIDE
|
"\u202e", # RIGHT-TO-LEFT OVERRIDE
|
||||||
"\u206A", # INHIBIT SYMMETRIC SWAPPING
|
"\u206a", # INHIBIT SYMMETRIC SWAPPING
|
||||||
"\u206B", # ACTIVATE SYMMETRIC SWAPPING
|
"\u206b", # ACTIVATE SYMMETRIC SWAPPING
|
||||||
"\u206C", # INHIBIT ARABIC FORM SHAPING
|
"\u206c", # INHIBIT ARABIC FORM SHAPING
|
||||||
"\u206D", # ACTIVATE ARABIC FORM SHAPING
|
"\u206d", # ACTIVATE ARABIC FORM SHAPING
|
||||||
"\u206E", # NATIONAL DIGIT SHAPES
|
"\u206e", # NATIONAL DIGIT SHAPES
|
||||||
"\u206F", # NOMINAL DIGIT SHAPES
|
"\u206f", # NOMINAL DIGIT SHAPES
|
||||||
"\uFEFF", # ZERO WIDTH NO-BREAK SPACE
|
"\ufeff", # ZERO WIDTH NO-BREAK SPACE
|
||||||
}
|
}
|
||||||
if BAD_CODEPOINTS & path_codepoints:
|
if BAD_CODEPOINTS & path_codepoints:
|
||||||
# This message is more expansive than reality, but should be fine.
|
# This message is more expansive than reality, but should be fine.
|
||||||
@ -2094,7 +2157,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
if not cwd_dot_ok or parts != ["."]:
|
if not cwd_dot_ok or parts != ["."]:
|
||||||
for part in set(parts):
|
for part in set(parts):
|
||||||
if part in {".", "..", ".git"} or part.startswith(".repo"):
|
if part in {".", "..", ".git"} or part.startswith(".repo"):
|
||||||
return "bad component: %s" % (part,)
|
return f"bad component: {part}"
|
||||||
|
|
||||||
if not dir_ok and resep.match(path[-1]):
|
if not dir_ok and resep.match(path[-1]):
|
||||||
return "dirs not allowed"
|
return "dirs not allowed"
|
||||||
@ -2130,7 +2193,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
msg = cls._CheckLocalPath(dest)
|
msg = cls._CheckLocalPath(dest)
|
||||||
if msg:
|
if msg:
|
||||||
raise ManifestInvalidPathError(
|
raise ManifestInvalidPathError(
|
||||||
'<%s> invalid "dest": %s: %s' % (element, dest, msg)
|
f'<{element}> invalid "dest": {dest}: {msg}'
|
||||||
)
|
)
|
||||||
|
|
||||||
# |src| is the file we read from or path we point to for symlinks.
|
# |src| is the file we read from or path we point to for symlinks.
|
||||||
@ -2141,7 +2204,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
)
|
)
|
||||||
if msg:
|
if msg:
|
||||||
raise ManifestInvalidPathError(
|
raise ManifestInvalidPathError(
|
||||||
'<%s> invalid "src": %s: %s' % (element, src, msg)
|
f'<{element}> invalid "src": {src}: {msg}'
|
||||||
)
|
)
|
||||||
|
|
||||||
def _ParseCopyFile(self, project, node):
|
def _ParseCopyFile(self, project, node):
|
||||||
@ -2185,7 +2248,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
|||||||
v = self._remotes.get(name)
|
v = self._remotes.get(name)
|
||||||
if not v:
|
if not v:
|
||||||
raise ManifestParseError(
|
raise ManifestParseError(
|
||||||
"remote %s not defined in %s" % (name, self.manifestFile)
|
f"remote {name} not defined in {self.manifestFile}"
|
||||||
)
|
)
|
||||||
return v
|
return v
|
||||||
|
|
||||||
@ -2262,7 +2325,6 @@ class RepoClient(XmlManifest):
|
|||||||
submanifest_path: The submanifest root relative to the repo root.
|
submanifest_path: The submanifest root relative to the repo root.
|
||||||
**kwargs: Additional keyword arguments, passed to XmlManifest.
|
**kwargs: Additional keyword arguments, passed to XmlManifest.
|
||||||
"""
|
"""
|
||||||
self.isGitcClient = False
|
|
||||||
submanifest_path = submanifest_path or ""
|
submanifest_path = submanifest_path or ""
|
||||||
if submanifest_path:
|
if submanifest_path:
|
||||||
self._CheckLocalPath(submanifest_path)
|
self._CheckLocalPath(submanifest_path)
|
||||||
|
2
pager.py
2
pager.py
@ -40,7 +40,7 @@ def RunPager(globalConfig):
|
|||||||
|
|
||||||
|
|
||||||
def TerminatePager():
|
def TerminatePager():
|
||||||
global pager_process, old_stdout, old_stderr
|
global pager_process
|
||||||
if pager_process:
|
if pager_process:
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
@ -57,8 +57,8 @@ def _validate_winpath(path):
|
|||||||
if _winpath_is_valid(path):
|
if _winpath_is_valid(path):
|
||||||
return path
|
return path
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'Path "{}" must be a relative path or an absolute '
|
f'Path "{path}" must be a relative path or an absolute '
|
||||||
"path starting with a drive letter".format(path)
|
"path starting with a drive letter"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -156,6 +156,12 @@ def remove(path, missing_ok=False):
|
|||||||
os.rmdir(longpath)
|
os.rmdir(longpath)
|
||||||
else:
|
else:
|
||||||
os.remove(longpath)
|
os.remove(longpath)
|
||||||
|
elif (
|
||||||
|
e.errno == errno.EROFS
|
||||||
|
and missing_ok
|
||||||
|
and not os.path.exists(longpath)
|
||||||
|
):
|
||||||
|
pass
|
||||||
elif missing_ok and e.errno == errno.ENOENT:
|
elif missing_ok and e.errno == errno.ENOENT:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
@ -251,32 +257,3 @@ def readlink(path):
|
|||||||
return platform_utils_win32.readlink(_makelongpath(path))
|
return platform_utils_win32.readlink(_makelongpath(path))
|
||||||
else:
|
else:
|
||||||
return os.readlink(path)
|
return os.readlink(path)
|
||||||
|
|
||||||
|
|
||||||
def realpath(path):
|
|
||||||
"""Return the canonical path of the specified filename, eliminating
|
|
||||||
any symbolic links encountered in the path.
|
|
||||||
|
|
||||||
Availability: Windows, Unix.
|
|
||||||
"""
|
|
||||||
if isWindows():
|
|
||||||
current_path = os.path.abspath(path)
|
|
||||||
path_tail = []
|
|
||||||
for c in range(0, 100): # Avoid cycles
|
|
||||||
if islink(current_path):
|
|
||||||
target = readlink(current_path)
|
|
||||||
current_path = os.path.join(
|
|
||||||
os.path.dirname(current_path), target
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
basename = os.path.basename(current_path)
|
|
||||||
if basename == "":
|
|
||||||
path_tail.append(current_path)
|
|
||||||
break
|
|
||||||
path_tail.append(basename)
|
|
||||||
current_path = os.path.dirname(current_path)
|
|
||||||
path_tail.reverse()
|
|
||||||
result = os.path.normpath(os.path.join(*path_tail))
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
return os.path.realpath(path)
|
|
||||||
|
@ -186,9 +186,7 @@ def _create_symlink(source, link_name, dwFlags):
|
|||||||
error_desc = FormatError(code).strip()
|
error_desc = FormatError(code).strip()
|
||||||
if code == ERROR_PRIVILEGE_NOT_HELD:
|
if code == ERROR_PRIVILEGE_NOT_HELD:
|
||||||
raise OSError(errno.EPERM, error_desc, link_name)
|
raise OSError(errno.EPERM, error_desc, link_name)
|
||||||
_raise_winerror(
|
_raise_winerror(code, f'Error creating symbolic link "{link_name}"')
|
||||||
code, 'Error creating symbolic link "{}"'.format(link_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def islink(path):
|
def islink(path):
|
||||||
@ -210,7 +208,7 @@ def readlink(path):
|
|||||||
)
|
)
|
||||||
if reparse_point_handle == INVALID_HANDLE_VALUE:
|
if reparse_point_handle == INVALID_HANDLE_VALUE:
|
||||||
_raise_winerror(
|
_raise_winerror(
|
||||||
get_last_error(), 'Error opening symbolic link "{}"'.format(path)
|
get_last_error(), f'Error opening symbolic link "{path}"'
|
||||||
)
|
)
|
||||||
target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE)
|
target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE)
|
||||||
n_bytes_returned = DWORD()
|
n_bytes_returned = DWORD()
|
||||||
@ -227,7 +225,7 @@ def readlink(path):
|
|||||||
CloseHandle(reparse_point_handle)
|
CloseHandle(reparse_point_handle)
|
||||||
if not io_result:
|
if not io_result:
|
||||||
_raise_winerror(
|
_raise_winerror(
|
||||||
get_last_error(), 'Error reading symbolic link "{}"'.format(path)
|
get_last_error(), f'Error reading symbolic link "{path}"'
|
||||||
)
|
)
|
||||||
rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer)
|
rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer)
|
||||||
if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK:
|
if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK:
|
||||||
@ -236,11 +234,11 @@ def readlink(path):
|
|||||||
return rdb.MountPointReparseBuffer.PrintName
|
return rdb.MountPointReparseBuffer.PrintName
|
||||||
# Unsupported reparse point type.
|
# Unsupported reparse point type.
|
||||||
_raise_winerror(
|
_raise_winerror(
|
||||||
ERROR_NOT_SUPPORTED, 'Error reading symbolic link "{}"'.format(path)
|
ERROR_NOT_SUPPORTED, f'Error reading symbolic link "{path}"'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _raise_winerror(code, error_desc):
|
def _raise_winerror(code, error_desc):
|
||||||
win_error_desc = FormatError(code).strip()
|
win_error_desc = FormatError(code).strip()
|
||||||
error_desc = "{0}: {1}".format(error_desc, win_error_desc)
|
error_desc = f"{error_desc}: {win_error_desc}"
|
||||||
raise WinError(code, error_desc)
|
raise WinError(code, error_desc)
|
||||||
|
19
progress.py
19
progress.py
@ -52,11 +52,11 @@ def duration_str(total):
|
|||||||
uses microsecond resolution. This makes for noisy output.
|
uses microsecond resolution. This makes for noisy output.
|
||||||
"""
|
"""
|
||||||
hours, mins, secs = convert_to_hms(total)
|
hours, mins, secs = convert_to_hms(total)
|
||||||
ret = "%.3fs" % (secs,)
|
ret = f"{secs:.3f}s"
|
||||||
if mins:
|
if mins:
|
||||||
ret = "%im%s" % (mins, ret)
|
ret = f"{mins}m{ret}"
|
||||||
if hours:
|
if hours:
|
||||||
ret = "%ih%s" % (hours, ret)
|
ret = f"{hours}h{ret}"
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
@ -100,6 +100,7 @@ class Progress:
|
|||||||
self._show = not delay
|
self._show = not delay
|
||||||
self._units = units
|
self._units = units
|
||||||
self._elide = elide and _TTY
|
self._elide = elide and _TTY
|
||||||
|
self._quiet = quiet
|
||||||
|
|
||||||
# Only show the active jobs section if we run more than one in parallel.
|
# Only show the active jobs section if we run more than one in parallel.
|
||||||
self._show_jobs = False
|
self._show_jobs = False
|
||||||
@ -114,13 +115,7 @@ class Progress:
|
|||||||
)
|
)
|
||||||
self._update_thread.daemon = True
|
self._update_thread.daemon = True
|
||||||
|
|
||||||
# When quiet, never show any output. It's a bit hacky, but reusing the
|
if not quiet and show_elapsed:
|
||||||
# existing logic that delays initial output keeps the rest of the class
|
|
||||||
# clean. Basically we set the start time to years in the future.
|
|
||||||
if quiet:
|
|
||||||
self._show = False
|
|
||||||
self._start += 2**32
|
|
||||||
elif show_elapsed:
|
|
||||||
self._update_thread.start()
|
self._update_thread.start()
|
||||||
|
|
||||||
def _update_loop(self):
|
def _update_loop(self):
|
||||||
@ -160,7 +155,7 @@ class Progress:
|
|||||||
msg = self._last_msg
|
msg = self._last_msg
|
||||||
self._last_msg = msg
|
self._last_msg = msg
|
||||||
|
|
||||||
if not _TTY or IsTraceToStderr():
|
if not _TTY or IsTraceToStderr() or self._quiet:
|
||||||
return
|
return
|
||||||
|
|
||||||
elapsed_sec = time.time() - self._start
|
elapsed_sec = time.time() - self._start
|
||||||
@ -202,7 +197,7 @@ class Progress:
|
|||||||
|
|
||||||
def end(self):
|
def end(self):
|
||||||
self._update_event.set()
|
self._update_event.set()
|
||||||
if not _TTY or IsTraceToStderr() or not self._show:
|
if not _TTY or IsTraceToStderr() or self._quiet:
|
||||||
return
|
return
|
||||||
|
|
||||||
duration = duration_str(time.time() - self._start)
|
duration = duration_str(time.time() - self._start)
|
||||||
|
541
project.py
541
project.py
File diff suppressed because it is too large
Load Diff
@ -16,3 +16,8 @@
|
|||||||
line-length = 80
|
line-length = 80
|
||||||
# NB: Keep in sync with tox.ini.
|
# NB: Keep in sync with tox.ini.
|
||||||
target-version = ['py36', 'py37', 'py38', 'py39', 'py310', 'py311'] #, 'py312'
|
target-version = ['py36', 'py37', 'py38', 'py39', 'py310', 'py311'] #, 'py312'
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
markers = """
|
||||||
|
skip_cq: Skip tests in the CQ. Should be rarely used!
|
||||||
|
"""
|
||||||
|
143
release/update-hooks
Executable file
143
release/update-hooks
Executable file
@ -0,0 +1,143 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# Copyright (C) 2024 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Helper tool for updating hooks from their various upstreams."""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import sys
|
||||||
|
from typing import List, Optional
|
||||||
|
import urllib.request
|
||||||
|
|
||||||
|
|
||||||
|
assert sys.version_info >= (3, 8), "Python 3.8+ required"
|
||||||
|
|
||||||
|
|
||||||
|
TOPDIR = Path(__file__).resolve().parent.parent
|
||||||
|
HOOKS_DIR = TOPDIR / "hooks"
|
||||||
|
|
||||||
|
|
||||||
|
def update_hook_commit_msg() -> None:
|
||||||
|
"""Update commit-msg hook from Gerrit."""
|
||||||
|
hook = HOOKS_DIR / "commit-msg"
|
||||||
|
print(
|
||||||
|
f"{hook.name}: Updating from https://gerrit.googlesource.com/gerrit/"
|
||||||
|
"+/HEAD/resources/com/google/gerrit/server/tools/root/hooks/commit-msg"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the current commit.
|
||||||
|
url = "https://gerrit.googlesource.com/gerrit/+/HEAD?format=JSON"
|
||||||
|
with urllib.request.urlopen(url) as fp:
|
||||||
|
data = fp.read()
|
||||||
|
# Discard the xss protection.
|
||||||
|
data = data.split(b"\n", 1)[1]
|
||||||
|
data = json.loads(data)
|
||||||
|
commit = data["commit"]
|
||||||
|
|
||||||
|
# Fetch the data for that commit.
|
||||||
|
url = (
|
||||||
|
f"https://gerrit.googlesource.com/gerrit/+/{commit}/"
|
||||||
|
"resources/com/google/gerrit/server/tools/root/hooks/commit-msg"
|
||||||
|
)
|
||||||
|
with urllib.request.urlopen(f"{url}?format=TEXT") as fp:
|
||||||
|
data = fp.read()
|
||||||
|
|
||||||
|
# gitiles base64 encodes text data.
|
||||||
|
data = base64.b64decode(data)
|
||||||
|
|
||||||
|
# Inject header into the hook.
|
||||||
|
lines = data.split(b"\n")
|
||||||
|
lines = (
|
||||||
|
lines[:1]
|
||||||
|
+ [
|
||||||
|
b"# DO NOT EDIT THIS FILE",
|
||||||
|
(
|
||||||
|
b"# All updates should be sent upstream: "
|
||||||
|
b"https://gerrit.googlesource.com/gerrit/"
|
||||||
|
),
|
||||||
|
f"# This is synced from commit: {commit}".encode("utf-8"),
|
||||||
|
b"# DO NOT EDIT THIS FILE",
|
||||||
|
]
|
||||||
|
+ lines[1:]
|
||||||
|
)
|
||||||
|
data = b"\n".join(lines)
|
||||||
|
|
||||||
|
# Update the hook.
|
||||||
|
hook.write_bytes(data)
|
||||||
|
hook.chmod(0o755)
|
||||||
|
|
||||||
|
|
||||||
|
def update_hook_pre_auto_gc() -> None:
|
||||||
|
"""Update pre-auto-gc hook from git."""
|
||||||
|
hook = HOOKS_DIR / "pre-auto-gc"
|
||||||
|
print(
|
||||||
|
f"{hook.name}: Updating from https://github.com/git/git/"
|
||||||
|
"HEAD/contrib/hooks/pre-auto-gc-battery"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the current commit.
|
||||||
|
headers = {
|
||||||
|
"Accept": "application/vnd.github+json",
|
||||||
|
"X-GitHub-Api-Version": "2022-11-28",
|
||||||
|
}
|
||||||
|
url = "https://api.github.com/repos/git/git/git/refs/heads/master"
|
||||||
|
req = urllib.request.Request(url, headers=headers)
|
||||||
|
with urllib.request.urlopen(req) as fp:
|
||||||
|
data = fp.read()
|
||||||
|
data = json.loads(data)
|
||||||
|
|
||||||
|
# Fetch the data for that commit.
|
||||||
|
commit = data["object"]["sha"]
|
||||||
|
url = (
|
||||||
|
f"https://raw.githubusercontent.com/git/git/{commit}/"
|
||||||
|
"contrib/hooks/pre-auto-gc-battery"
|
||||||
|
)
|
||||||
|
with urllib.request.urlopen(url) as fp:
|
||||||
|
data = fp.read()
|
||||||
|
|
||||||
|
# Inject header into the hook.
|
||||||
|
lines = data.split(b"\n")
|
||||||
|
lines = (
|
||||||
|
lines[:1]
|
||||||
|
+ [
|
||||||
|
b"# DO NOT EDIT THIS FILE",
|
||||||
|
(
|
||||||
|
b"# All updates should be sent upstream: "
|
||||||
|
b"https://github.com/git/git/"
|
||||||
|
),
|
||||||
|
f"# This is synced from commit: {commit}".encode("utf-8"),
|
||||||
|
b"# DO NOT EDIT THIS FILE",
|
||||||
|
]
|
||||||
|
+ lines[1:]
|
||||||
|
)
|
||||||
|
data = b"\n".join(lines)
|
||||||
|
|
||||||
|
# Update the hook.
|
||||||
|
hook.write_bytes(data)
|
||||||
|
hook.chmod(0o755)
|
||||||
|
|
||||||
|
|
||||||
|
def main(argv: Optional[List[str]] = None) -> Optional[int]:
|
||||||
|
parser = argparse.ArgumentParser(description=__doc__)
|
||||||
|
parser.parse_args(argv)
|
||||||
|
|
||||||
|
update_hook_commit_msg()
|
||||||
|
update_hook_pre_auto_gc()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main(sys.argv[1:]))
|
@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import shlex
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
@ -35,12 +36,7 @@ KEYID_ECC = "E1F9040D7A3F6DAFAC897CD3D3B95DA243E48A39"
|
|||||||
|
|
||||||
def cmdstr(cmd):
|
def cmdstr(cmd):
|
||||||
"""Get a nicely quoted shell command."""
|
"""Get a nicely quoted shell command."""
|
||||||
ret = []
|
return " ".join(shlex.quote(x) for x in cmd)
|
||||||
for arg in cmd:
|
|
||||||
if not re.match(r"^[a-zA-Z0-9/_.=-]+$", arg):
|
|
||||||
arg = f'"{arg}"'
|
|
||||||
ret.append(arg)
|
|
||||||
return " ".join(ret)
|
|
||||||
|
|
||||||
|
|
||||||
def run(opts, cmd, check=True, **kwargs):
|
def run(opts, cmd, check=True, **kwargs):
|
||||||
|
338
repo
338
repo
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -27,13 +27,14 @@ import platform
|
|||||||
import shlex
|
import shlex
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
from typing import NamedTuple
|
||||||
|
|
||||||
|
|
||||||
# These should never be newer than the main.py version since this needs to be a
|
# These should never be newer than the main.py version since this needs to be a
|
||||||
# bit more flexible with older systems. See that file for more details on the
|
# bit more flexible with older systems. See that file for more details on the
|
||||||
# versions we select.
|
# versions we select.
|
||||||
MIN_PYTHON_VERSION_SOFT = (3, 6)
|
MIN_PYTHON_VERSION_SOFT = (3, 6)
|
||||||
MIN_PYTHON_VERSION_HARD = (3, 5)
|
MIN_PYTHON_VERSION_HARD = (3, 6)
|
||||||
|
|
||||||
|
|
||||||
# Keep basic logic in sync with repo_trace.py.
|
# Keep basic logic in sync with repo_trace.py.
|
||||||
@ -56,9 +57,14 @@ class Trace:
|
|||||||
trace = Trace()
|
trace = Trace()
|
||||||
|
|
||||||
|
|
||||||
|
def cmdstr(cmd):
|
||||||
|
"""Get a nicely quoted shell command."""
|
||||||
|
return " ".join(shlex.quote(x) for x in cmd)
|
||||||
|
|
||||||
|
|
||||||
def exec_command(cmd):
|
def exec_command(cmd):
|
||||||
"""Execute |cmd| or return None on failure."""
|
"""Execute |cmd| or return None on failure."""
|
||||||
trace.print(":", " ".join(cmd))
|
trace.print(":", cmdstr(cmd))
|
||||||
try:
|
try:
|
||||||
if platform.system() == "Windows":
|
if platform.system() == "Windows":
|
||||||
ret = subprocess.call(cmd)
|
ret = subprocess.call(cmd)
|
||||||
@ -79,24 +85,13 @@ def check_python_version():
|
|||||||
major = ver.major
|
major = ver.major
|
||||||
minor = ver.minor
|
minor = ver.minor
|
||||||
|
|
||||||
# Abort on very old Python 2 versions.
|
# Try to re-exec the version specific Python if needed.
|
||||||
if (major, minor) < (2, 7):
|
|
||||||
print(
|
|
||||||
"repo: error: Your Python version is too old. "
|
|
||||||
"Please use Python {}.{} or newer instead.".format(
|
|
||||||
*MIN_PYTHON_VERSION_SOFT
|
|
||||||
),
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Try to re-exec the version specific Python 3 if needed.
|
|
||||||
if (major, minor) < MIN_PYTHON_VERSION_SOFT:
|
if (major, minor) < MIN_PYTHON_VERSION_SOFT:
|
||||||
# Python makes releases ~once a year, so try our min version +10 to help
|
# Python makes releases ~once a year, so try our min version +10 to help
|
||||||
# bridge the gap. This is the fallback anyways so perf isn't critical.
|
# bridge the gap. This is the fallback anyways so perf isn't critical.
|
||||||
min_major, min_minor = MIN_PYTHON_VERSION_SOFT
|
min_major, min_minor = MIN_PYTHON_VERSION_SOFT
|
||||||
for inc in range(0, 10):
|
for inc in range(0, 10):
|
||||||
reexec("python{}.{}".format(min_major, min_minor + inc))
|
reexec(f"python{min_major}.{min_minor + inc}")
|
||||||
|
|
||||||
# Fallback to older versions if possible.
|
# Fallback to older versions if possible.
|
||||||
for inc in range(
|
for inc in range(
|
||||||
@ -105,47 +100,12 @@ def check_python_version():
|
|||||||
# Don't downgrade, and don't reexec ourselves (which would infinite loop).
|
# Don't downgrade, and don't reexec ourselves (which would infinite loop).
|
||||||
if (min_major, min_minor - inc) <= (major, minor):
|
if (min_major, min_minor - inc) <= (major, minor):
|
||||||
break
|
break
|
||||||
reexec("python{}.{}".format(min_major, min_minor - inc))
|
reexec(f"python{min_major}.{min_minor - inc}")
|
||||||
|
|
||||||
# Try the generic Python 3 wrapper, but only if it's new enough. If it
|
|
||||||
# isn't, we want to just give up below and make the user resolve things.
|
|
||||||
try:
|
|
||||||
proc = subprocess.Popen(
|
|
||||||
[
|
|
||||||
"python3",
|
|
||||||
"-c",
|
|
||||||
"import sys; "
|
|
||||||
"print(sys.version_info.major, sys.version_info.minor)",
|
|
||||||
],
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
)
|
|
||||||
(output, _) = proc.communicate()
|
|
||||||
python3_ver = tuple(int(x) for x in output.decode("utf-8").split())
|
|
||||||
except (OSError, subprocess.CalledProcessError):
|
|
||||||
python3_ver = None
|
|
||||||
|
|
||||||
# If the python3 version looks like it's new enough, give it a try.
|
|
||||||
if (
|
|
||||||
python3_ver
|
|
||||||
and python3_ver >= MIN_PYTHON_VERSION_HARD
|
|
||||||
and python3_ver != (major, minor)
|
|
||||||
):
|
|
||||||
reexec("python3")
|
|
||||||
|
|
||||||
# We're still here, so diagnose things for the user.
|
# We're still here, so diagnose things for the user.
|
||||||
if major < 3:
|
if (major, minor) < MIN_PYTHON_VERSION_HARD:
|
||||||
print(
|
print(
|
||||||
"repo: error: Python 2 is no longer supported; "
|
"repo: error: Python version is too old; "
|
||||||
"Please upgrade to Python {}.{}+.".format(
|
|
||||||
*MIN_PYTHON_VERSION_HARD
|
|
||||||
),
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
sys.exit(1)
|
|
||||||
elif (major, minor) < MIN_PYTHON_VERSION_HARD:
|
|
||||||
print(
|
|
||||||
"repo: error: Python 3 version is too old; "
|
|
||||||
"Please use Python {}.{} or newer.".format(
|
"Please use Python {}.{} or newer.".format(
|
||||||
*MIN_PYTHON_VERSION_HARD
|
*MIN_PYTHON_VERSION_HARD
|
||||||
),
|
),
|
||||||
@ -170,7 +130,7 @@ if not REPO_REV:
|
|||||||
BUG_URL = "https://issues.gerritcodereview.com/issues/new?component=1370071"
|
BUG_URL = "https://issues.gerritcodereview.com/issues/new?component=1370071"
|
||||||
|
|
||||||
# increment this whenever we make important changes to this script
|
# increment this whenever we make important changes to this script
|
||||||
VERSION = (2, 39)
|
VERSION = (2, 54)
|
||||||
|
|
||||||
# increment this if the MAINTAINER_KEYS block is modified
|
# increment this if the MAINTAINER_KEYS block is modified
|
||||||
KEYRING_VERSION = (2, 3)
|
KEYRING_VERSION = (2, 3)
|
||||||
@ -256,37 +216,21 @@ GIT = "git" # our git command
|
|||||||
# NB: The version of git that the repo launcher requires may be much older than
|
# NB: The version of git that the repo launcher requires may be much older than
|
||||||
# the version of git that the main repo source tree requires. Keeping this at
|
# the version of git that the main repo source tree requires. Keeping this at
|
||||||
# an older version also makes it easier for users to upgrade/rollback as needed.
|
# an older version also makes it easier for users to upgrade/rollback as needed.
|
||||||
#
|
MIN_GIT_VERSION = (1, 7, 9) # minimum supported git version
|
||||||
# git-1.7 is in (EOL) Ubuntu Precise.
|
|
||||||
MIN_GIT_VERSION = (1, 7, 2) # minimum supported git version
|
|
||||||
repodir = ".repo" # name of repo's private directory
|
repodir = ".repo" # name of repo's private directory
|
||||||
S_repo = "repo" # special repo repository
|
S_repo = "repo" # special repo repository
|
||||||
S_manifests = "manifests" # special manifest repository
|
S_manifests = "manifests" # special manifest repository
|
||||||
REPO_MAIN = S_repo + "/main.py" # main script
|
REPO_MAIN = S_repo + "/main.py" # main script
|
||||||
GITC_CONFIG_FILE = "/gitc/.config"
|
|
||||||
GITC_FS_ROOT_DIR = "/gitc/manifest-rw/"
|
|
||||||
|
|
||||||
|
|
||||||
import collections
|
|
||||||
import errno
|
import errno
|
||||||
import json
|
import json
|
||||||
import optparse
|
import optparse
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
|
import urllib.error
|
||||||
|
import urllib.request
|
||||||
if sys.version_info[0] == 3:
|
|
||||||
import urllib.error
|
|
||||||
import urllib.request
|
|
||||||
else:
|
|
||||||
import imp
|
|
||||||
|
|
||||||
import urllib2
|
|
||||||
|
|
||||||
urllib = imp.new_module("urllib")
|
|
||||||
urllib.request = urllib2
|
|
||||||
urllib.error = urllib2
|
|
||||||
|
|
||||||
|
|
||||||
repo_config_dir = os.getenv("REPO_CONFIG_DIR", os.path.expanduser("~"))
|
repo_config_dir = os.getenv("REPO_CONFIG_DIR", os.path.expanduser("~"))
|
||||||
@ -294,11 +238,8 @@ home_dot_repo = os.path.join(repo_config_dir, ".repoconfig")
|
|||||||
gpg_dir = os.path.join(home_dot_repo, "gnupg")
|
gpg_dir = os.path.join(home_dot_repo, "gnupg")
|
||||||
|
|
||||||
|
|
||||||
def GetParser(gitc_init=False):
|
def GetParser():
|
||||||
"""Setup the CLI parser."""
|
"""Setup the CLI parser."""
|
||||||
if gitc_init:
|
|
||||||
sys.exit("repo: fatal: GITC not supported.")
|
|
||||||
else:
|
|
||||||
usage = "repo init [options] [-u] url"
|
usage = "repo init [options] [-u] url"
|
||||||
|
|
||||||
parser = optparse.OptionParser(usage=usage)
|
parser = optparse.OptionParser(usage=usage)
|
||||||
@ -341,6 +282,12 @@ def InitParser(parser):
|
|||||||
metavar="REVISION",
|
metavar="REVISION",
|
||||||
help="manifest branch or revision (use HEAD for default)",
|
help="manifest branch or revision (use HEAD for default)",
|
||||||
)
|
)
|
||||||
|
group.add_option(
|
||||||
|
"--manifest-upstream-branch",
|
||||||
|
help="when a commit is provided to --manifest-branch, this "
|
||||||
|
"is the name of the git ref in which the commit can be found",
|
||||||
|
metavar="BRANCH",
|
||||||
|
)
|
||||||
group.add_option(
|
group.add_option(
|
||||||
"-m",
|
"-m",
|
||||||
"--manifest-name",
|
"--manifest-name",
|
||||||
@ -540,16 +487,6 @@ def InitParser(parser):
|
|||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
# This is a poor replacement for subprocess.run until we require Python 3.6+.
|
|
||||||
RunResult = collections.namedtuple(
|
|
||||||
"RunResult", ("returncode", "stdout", "stderr")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RunError(Exception):
|
|
||||||
"""Error when running a command failed."""
|
|
||||||
|
|
||||||
|
|
||||||
def run_command(cmd, **kwargs):
|
def run_command(cmd, **kwargs):
|
||||||
"""Run |cmd| and return its output."""
|
"""Run |cmd| and return its output."""
|
||||||
check = kwargs.pop("check", False)
|
check = kwargs.pop("check", False)
|
||||||
@ -566,8 +503,7 @@ def run_command(cmd, **kwargs):
|
|||||||
return output.decode("utf-8")
|
return output.decode("utf-8")
|
||||||
except UnicodeError:
|
except UnicodeError:
|
||||||
print(
|
print(
|
||||||
"repo: warning: Invalid UTF-8 output:\ncmd: %r\n%r"
|
f"repo: warning: Invalid UTF-8 output:\ncmd: {cmd!r}\n{output}",
|
||||||
% (cmd, output),
|
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
return output.decode("utf-8", "backslashreplace")
|
return output.decode("utf-8", "backslashreplace")
|
||||||
@ -575,7 +511,7 @@ def run_command(cmd, **kwargs):
|
|||||||
# Run & package the results.
|
# Run & package the results.
|
||||||
proc = subprocess.Popen(cmd, **kwargs)
|
proc = subprocess.Popen(cmd, **kwargs)
|
||||||
(stdout, stderr) = proc.communicate(input=cmd_input)
|
(stdout, stderr) = proc.communicate(input=cmd_input)
|
||||||
dbg = ": " + " ".join(cmd)
|
dbg = ": " + cmdstr(cmd)
|
||||||
if cmd_input is not None:
|
if cmd_input is not None:
|
||||||
dbg += " 0<|"
|
dbg += " 0<|"
|
||||||
if stdout == subprocess.PIPE:
|
if stdout == subprocess.PIPE:
|
||||||
@ -585,80 +521,36 @@ def run_command(cmd, **kwargs):
|
|||||||
elif stderr == subprocess.STDOUT:
|
elif stderr == subprocess.STDOUT:
|
||||||
dbg += " 2>&1"
|
dbg += " 2>&1"
|
||||||
trace.print(dbg)
|
trace.print(dbg)
|
||||||
ret = RunResult(proc.returncode, decode(stdout), decode(stderr))
|
ret = subprocess.CompletedProcess(
|
||||||
|
cmd, proc.returncode, decode(stdout), decode(stderr)
|
||||||
|
)
|
||||||
|
|
||||||
# If things failed, print useful debugging output.
|
# If things failed, print useful debugging output.
|
||||||
if check and ret.returncode:
|
if check and ret.returncode:
|
||||||
print(
|
print(
|
||||||
'repo: error: "%s" failed with exit status %s'
|
f'repo: error: "{cmd[0]}" failed with exit status {ret.returncode}',
|
||||||
% (cmd[0], ret.returncode),
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
print(
|
|
||||||
" cwd: %s\n cmd: %r" % (kwargs.get("cwd", os.getcwd()), cmd),
|
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
|
cwd = kwargs.get("cwd", os.getcwd())
|
||||||
|
print(f" cwd: {cwd}\n cmd: {cmd!r}", file=sys.stderr)
|
||||||
|
|
||||||
def _print_output(name, output):
|
def _print_output(name, output):
|
||||||
if output:
|
if output:
|
||||||
print(
|
print(
|
||||||
" %s:\n >> %s"
|
f" {name}:"
|
||||||
% (name, "\n >> ".join(output.splitlines())),
|
+ "".join(f"\n >> {x}" for x in output.splitlines()),
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
|
|
||||||
_print_output("stdout", ret.stdout)
|
_print_output("stdout", ret.stdout)
|
||||||
_print_output("stderr", ret.stderr)
|
_print_output("stderr", ret.stderr)
|
||||||
raise RunError(ret)
|
# This will raise subprocess.CalledProcessError for us.
|
||||||
|
ret.check_returncode()
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
_gitc_manifest_dir = None
|
|
||||||
|
|
||||||
|
|
||||||
def get_gitc_manifest_dir():
|
|
||||||
global _gitc_manifest_dir
|
|
||||||
if _gitc_manifest_dir is None:
|
|
||||||
_gitc_manifest_dir = ""
|
|
||||||
try:
|
|
||||||
with open(GITC_CONFIG_FILE) as gitc_config:
|
|
||||||
for line in gitc_config:
|
|
||||||
match = re.match("gitc_dir=(?P<gitc_manifest_dir>.*)", line)
|
|
||||||
if match:
|
|
||||||
_gitc_manifest_dir = match.group("gitc_manifest_dir")
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
return _gitc_manifest_dir
|
|
||||||
|
|
||||||
|
|
||||||
def gitc_parse_clientdir(gitc_fs_path):
|
|
||||||
"""Parse a path in the GITC FS and return its client name.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
gitc_fs_path: A subdirectory path within the GITC_FS_ROOT_DIR.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The GITC client name.
|
|
||||||
"""
|
|
||||||
if gitc_fs_path == GITC_FS_ROOT_DIR:
|
|
||||||
return None
|
|
||||||
if not gitc_fs_path.startswith(GITC_FS_ROOT_DIR):
|
|
||||||
manifest_dir = get_gitc_manifest_dir()
|
|
||||||
if manifest_dir == "":
|
|
||||||
return None
|
|
||||||
if manifest_dir[-1] != "/":
|
|
||||||
manifest_dir += "/"
|
|
||||||
if gitc_fs_path == manifest_dir:
|
|
||||||
return None
|
|
||||||
if not gitc_fs_path.startswith(manifest_dir):
|
|
||||||
return None
|
|
||||||
return gitc_fs_path.split(manifest_dir)[1].split("/")[0]
|
|
||||||
return gitc_fs_path.split(GITC_FS_ROOT_DIR)[1].split("/")[0]
|
|
||||||
|
|
||||||
|
|
||||||
class CloneFailure(Exception):
|
class CloneFailure(Exception):
|
||||||
|
|
||||||
"""Indicate the remote clone of repo itself failed."""
|
"""Indicate the remote clone of repo itself failed."""
|
||||||
|
|
||||||
|
|
||||||
@ -695,9 +587,9 @@ def check_repo_rev(dst, rev, repo_verify=True, quiet=False):
|
|||||||
return (remote_ref, rev)
|
return (remote_ref, rev)
|
||||||
|
|
||||||
|
|
||||||
def _Init(args, gitc_init=False):
|
def _Init(args):
|
||||||
"""Installs repo by cloning it over the network."""
|
"""Installs repo by cloning it over the network."""
|
||||||
parser = GetParser(gitc_init=gitc_init)
|
parser = GetParser()
|
||||||
opt, args = parser.parse_args(args)
|
opt, args = parser.parse_args(args)
|
||||||
if args:
|
if args:
|
||||||
if not opt.manifest_url:
|
if not opt.manifest_url:
|
||||||
@ -719,7 +611,7 @@ def _Init(args, gitc_init=False):
|
|||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.EEXIST:
|
if e.errno != errno.EEXIST:
|
||||||
print(
|
print(
|
||||||
"fatal: cannot make %s directory: %s" % (repodir, e.strerror),
|
f"fatal: cannot make {repodir} directory: {e.strerror}",
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
# Don't raise CloneFailure; that would delete the
|
# Don't raise CloneFailure; that would delete the
|
||||||
@ -777,15 +669,20 @@ def run_git(*args, **kwargs):
|
|||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
except RunError:
|
except subprocess.CalledProcessError:
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
|
|
||||||
|
|
||||||
# The git version info broken down into components for easy analysis.
|
class GitVersion(NamedTuple):
|
||||||
# Similar to Python's sys.version_info.
|
"""The git version info broken down into components for easy analysis.
|
||||||
GitVersion = collections.namedtuple(
|
|
||||||
"GitVersion", ("major", "minor", "micro", "full")
|
Similar to Python's sys.version_info.
|
||||||
)
|
"""
|
||||||
|
|
||||||
|
major: int
|
||||||
|
minor: int
|
||||||
|
micro: int
|
||||||
|
full: int
|
||||||
|
|
||||||
|
|
||||||
def ParseGitVersion(ver_str=None):
|
def ParseGitVersion(ver_str=None):
|
||||||
@ -817,7 +714,7 @@ def _CheckGitVersion():
|
|||||||
if ver_act < MIN_GIT_VERSION:
|
if ver_act < MIN_GIT_VERSION:
|
||||||
need = ".".join(map(str, MIN_GIT_VERSION))
|
need = ".".join(map(str, MIN_GIT_VERSION))
|
||||||
print(
|
print(
|
||||||
"fatal: git %s or later required; found %s" % (need, ver_act.full),
|
f"fatal: git {need} or later required; found {ver_act.full}",
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
@ -836,7 +733,8 @@ def SetGitTrace2ParentSid(env=None):
|
|||||||
KEY = "GIT_TRACE2_PARENT_SID"
|
KEY = "GIT_TRACE2_PARENT_SID"
|
||||||
|
|
||||||
now = datetime.datetime.now(datetime.timezone.utc)
|
now = datetime.datetime.now(datetime.timezone.utc)
|
||||||
value = "repo-%s-P%08x" % (now.strftime("%Y%m%dT%H%M%SZ"), os.getpid())
|
timestamp = now.strftime("%Y%m%dT%H%M%SZ")
|
||||||
|
value = f"repo-{timestamp}-P{os.getpid():08x}"
|
||||||
|
|
||||||
# If it's already set, then append ourselves.
|
# If it's already set, then append ourselves.
|
||||||
if KEY in env:
|
if KEY in env:
|
||||||
@ -880,8 +778,7 @@ def SetupGnuPG(quiet):
|
|||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.EEXIST:
|
if e.errno != errno.EEXIST:
|
||||||
print(
|
print(
|
||||||
"fatal: cannot make %s directory: %s"
|
f"fatal: cannot make {home_dot_repo} directory: {e.strerror}",
|
||||||
% (home_dot_repo, e.strerror),
|
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -891,15 +788,15 @@ def SetupGnuPG(quiet):
|
|||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.EEXIST:
|
if e.errno != errno.EEXIST:
|
||||||
print(
|
print(
|
||||||
"fatal: cannot make %s directory: %s" % (gpg_dir, e.strerror),
|
f"fatal: cannot make {gpg_dir} directory: {e.strerror}",
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(
|
print(
|
||||||
"repo: Updating release signing keys to keyset ver %s"
|
"repo: Updating release signing keys to keyset ver "
|
||||||
% (".".join(str(x) for x in KEYRING_VERSION),)
|
+ ".".join(str(x) for x in KEYRING_VERSION),
|
||||||
)
|
)
|
||||||
# NB: We use --homedir (and cwd below) because some environments (Windows) do
|
# NB: We use --homedir (and cwd below) because some environments (Windows) do
|
||||||
# not correctly handle full native paths. We avoid the issue by changing to
|
# not correctly handle full native paths. We avoid the issue by changing to
|
||||||
@ -951,10 +848,11 @@ def _GetRepoConfig(name):
|
|||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
print(
|
print(
|
||||||
"repo: error: git %s failed:\n%s" % (" ".join(cmd), ret.stderr),
|
f"repo: error: git {cmdstr(cmd)} failed:\n{ret.stderr}",
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
raise RunError()
|
# This will raise subprocess.CalledProcessError for us.
|
||||||
|
ret.check_returncode()
|
||||||
|
|
||||||
|
|
||||||
def _InitHttp():
|
def _InitHttp():
|
||||||
@ -1064,7 +962,7 @@ def _Clone(url, cwd, clone_bundle, quiet, verbose):
|
|||||||
os.mkdir(cwd)
|
os.mkdir(cwd)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
print(
|
print(
|
||||||
"fatal: cannot make %s directory: %s" % (cwd, e.strerror),
|
f"fatal: cannot make {cwd} directory: {e.strerror}",
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
@ -1104,7 +1002,7 @@ def resolve_repo_rev(cwd, committish):
|
|||||||
ret = run_git(
|
ret = run_git(
|
||||||
"rev-parse",
|
"rev-parse",
|
||||||
"--verify",
|
"--verify",
|
||||||
"%s^{commit}" % (committish,),
|
f"{committish}^{{commit}}",
|
||||||
cwd=cwd,
|
cwd=cwd,
|
||||||
check=False,
|
check=False,
|
||||||
)
|
)
|
||||||
@ -1117,7 +1015,7 @@ def resolve_repo_rev(cwd, committish):
|
|||||||
rev = resolve("refs/remotes/origin/%s" % committish)
|
rev = resolve("refs/remotes/origin/%s" % committish)
|
||||||
if rev is None:
|
if rev is None:
|
||||||
print(
|
print(
|
||||||
'repo: error: unknown branch "%s"' % (committish,),
|
f'repo: error: unknown branch "{committish}"',
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
@ -1130,7 +1028,8 @@ def resolve_repo_rev(cwd, committish):
|
|||||||
rev = resolve(remote_ref)
|
rev = resolve(remote_ref)
|
||||||
if rev is None:
|
if rev is None:
|
||||||
print(
|
print(
|
||||||
'repo: error: unknown tag "%s"' % (committish,), file=sys.stderr
|
f'repo: error: unknown tag "{committish}"',
|
||||||
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
return (remote_ref, rev)
|
return (remote_ref, rev)
|
||||||
@ -1138,12 +1037,12 @@ def resolve_repo_rev(cwd, committish):
|
|||||||
# See if it's a short branch name.
|
# See if it's a short branch name.
|
||||||
rev = resolve("refs/remotes/origin/%s" % committish)
|
rev = resolve("refs/remotes/origin/%s" % committish)
|
||||||
if rev:
|
if rev:
|
||||||
return ("refs/heads/%s" % (committish,), rev)
|
return (f"refs/heads/{committish}", rev)
|
||||||
|
|
||||||
# See if it's a tag.
|
# See if it's a tag.
|
||||||
rev = resolve("refs/tags/%s" % committish)
|
rev = resolve(f"refs/tags/{committish}")
|
||||||
if rev:
|
if rev:
|
||||||
return ("refs/tags/%s" % (committish,), rev)
|
return (f"refs/tags/{committish}", rev)
|
||||||
|
|
||||||
# See if it's a commit.
|
# See if it's a commit.
|
||||||
rev = resolve(committish)
|
rev = resolve(committish)
|
||||||
@ -1152,7 +1051,8 @@ def resolve_repo_rev(cwd, committish):
|
|||||||
|
|
||||||
# Give up!
|
# Give up!
|
||||||
print(
|
print(
|
||||||
'repo: error: unable to resolve "%s"' % (committish,), file=sys.stderr
|
f'repo: error: unable to resolve "{committish}"',
|
||||||
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
|
|
||||||
@ -1168,8 +1068,8 @@ def verify_rev(cwd, remote_ref, rev, quiet):
|
|||||||
if not quiet:
|
if not quiet:
|
||||||
print(file=sys.stderr)
|
print(file=sys.stderr)
|
||||||
print(
|
print(
|
||||||
"warning: '%s' is not signed; falling back to signed release '%s'"
|
f"warning: '{remote_ref}' is not signed; "
|
||||||
% (remote_ref, cur),
|
f"falling back to signed release '{cur}'",
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
print(file=sys.stderr)
|
print(file=sys.stderr)
|
||||||
@ -1219,10 +1119,10 @@ class _Options:
|
|||||||
def _ExpandAlias(name):
|
def _ExpandAlias(name):
|
||||||
"""Look up user registered aliases."""
|
"""Look up user registered aliases."""
|
||||||
# We don't resolve aliases for existing subcommands. This matches git.
|
# We don't resolve aliases for existing subcommands. This matches git.
|
||||||
if name in {"gitc-init", "help", "init"}:
|
if name in {"help", "init"}:
|
||||||
return name, []
|
return name, []
|
||||||
|
|
||||||
alias = _GetRepoConfig("alias.%s" % (name,))
|
alias = _GetRepoConfig(f"alias.{name}")
|
||||||
if alias is None:
|
if alias is None:
|
||||||
return name, []
|
return name, []
|
||||||
|
|
||||||
@ -1278,7 +1178,6 @@ class Requirements:
|
|||||||
with open(path, "rb") as f:
|
with open(path, "rb") as f:
|
||||||
data = f.read()
|
data = f.read()
|
||||||
except OSError:
|
except OSError:
|
||||||
# NB: EnvironmentError is used for Python 2 & 3 compatibility.
|
|
||||||
# If we couldn't open the file, assume it's an old source tree.
|
# If we couldn't open the file, assume it's an old source tree.
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -1298,13 +1197,13 @@ class Requirements:
|
|||||||
|
|
||||||
return cls(json_data)
|
return cls(json_data)
|
||||||
|
|
||||||
def _get_soft_ver(self, pkg):
|
def get_soft_ver(self, pkg):
|
||||||
"""Return the soft version for |pkg| if it exists."""
|
"""Return the soft version for |pkg| if it exists."""
|
||||||
return self.requirements.get(pkg, {}).get("soft", ())
|
return tuple(self.requirements.get(pkg, {}).get("soft", ()))
|
||||||
|
|
||||||
def _get_hard_ver(self, pkg):
|
def get_hard_ver(self, pkg):
|
||||||
"""Return the hard version for |pkg| if it exists."""
|
"""Return the hard version for |pkg| if it exists."""
|
||||||
return self.requirements.get(pkg, {}).get("hard", ())
|
return tuple(self.requirements.get(pkg, {}).get("hard", ()))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _format_ver(ver):
|
def _format_ver(ver):
|
||||||
@ -1314,22 +1213,24 @@ class Requirements:
|
|||||||
def assert_ver(self, pkg, curr_ver):
|
def assert_ver(self, pkg, curr_ver):
|
||||||
"""Verify |pkg|'s |curr_ver| is new enough."""
|
"""Verify |pkg|'s |curr_ver| is new enough."""
|
||||||
curr_ver = tuple(curr_ver)
|
curr_ver = tuple(curr_ver)
|
||||||
soft_ver = tuple(self._get_soft_ver(pkg))
|
soft_ver = tuple(self.get_soft_ver(pkg))
|
||||||
hard_ver = tuple(self._get_hard_ver(pkg))
|
hard_ver = tuple(self.get_hard_ver(pkg))
|
||||||
if curr_ver < hard_ver:
|
if curr_ver < hard_ver:
|
||||||
print(
|
print(
|
||||||
'repo: error: Your version of "%s" (%s) is unsupported; '
|
f'repo: error: Your version of "{pkg}" '
|
||||||
"Please upgrade to at least version %s to continue."
|
f"({self._format_ver(curr_ver)}) is unsupported; "
|
||||||
% (pkg, self._format_ver(curr_ver), self._format_ver(soft_ver)),
|
"Please upgrade to at least version "
|
||||||
|
f"{self._format_ver(soft_ver)} to continue.",
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if curr_ver < soft_ver:
|
if curr_ver < soft_ver:
|
||||||
print(
|
print(
|
||||||
'repo: warning: Your version of "%s" (%s) is no longer supported; '
|
f'repo: error: Your version of "{pkg}" '
|
||||||
"Please upgrade to at least version %s to avoid breakage."
|
f"({self._format_ver(curr_ver)}) is no longer supported; "
|
||||||
% (pkg, self._format_ver(curr_ver), self._format_ver(soft_ver)),
|
"Please upgrade to at least version "
|
||||||
|
f"{self._format_ver(soft_ver)} to continue.",
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1346,10 +1247,6 @@ class Requirements:
|
|||||||
|
|
||||||
|
|
||||||
def _Usage():
|
def _Usage():
|
||||||
gitc_usage = ""
|
|
||||||
if get_gitc_manifest_dir():
|
|
||||||
gitc_usage = " gitc-init Initialize a GITC Client.\n"
|
|
||||||
|
|
||||||
print(
|
print(
|
||||||
"""usage: repo COMMAND [ARGS]
|
"""usage: repo COMMAND [ARGS]
|
||||||
|
|
||||||
@ -1358,9 +1255,7 @@ repo is not yet installed. Use "repo init" to install it here.
|
|||||||
The most commonly used repo commands are:
|
The most commonly used repo commands are:
|
||||||
|
|
||||||
init Install repo in the current working directory
|
init Install repo in the current working directory
|
||||||
"""
|
help Display detailed help on a command
|
||||||
+ gitc_usage
|
|
||||||
+ """ help Display detailed help on a command
|
|
||||||
|
|
||||||
For access to the full online help, install repo ("repo init").
|
For access to the full online help, install repo ("repo init").
|
||||||
"""
|
"""
|
||||||
@ -1371,8 +1266,8 @@ For access to the full online help, install repo ("repo init").
|
|||||||
|
|
||||||
def _Help(args):
|
def _Help(args):
|
||||||
if args:
|
if args:
|
||||||
if args[0] in {"init", "gitc-init"}:
|
if args[0] in {"init"}:
|
||||||
parser = GetParser(gitc_init=args[0] == "gitc-init")
|
parser = GetParser()
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
@ -1389,21 +1284,16 @@ def _Help(args):
|
|||||||
|
|
||||||
def _Version():
|
def _Version():
|
||||||
"""Show version information."""
|
"""Show version information."""
|
||||||
|
git_version = ParseGitVersion()
|
||||||
print("<repo not installed>")
|
print("<repo not installed>")
|
||||||
print("repo launcher version %s" % (".".join(str(x) for x in VERSION),))
|
print(f"repo launcher version {'.'.join(str(x) for x in VERSION)}")
|
||||||
print(" (from %s)" % (__file__,))
|
print(f" (from {__file__})")
|
||||||
print("git %s" % (ParseGitVersion().full,))
|
print(f"git {git_version.full}" if git_version else "git not installed")
|
||||||
print("Python %s" % sys.version)
|
print(f"Python {sys.version}")
|
||||||
uname = platform.uname()
|
uname = platform.uname()
|
||||||
if sys.version_info.major < 3:
|
print(f"OS {uname.system} {uname.release} ({uname.version})")
|
||||||
# Python 3 returns a named tuple, but Python 2 is simpler.
|
processor = uname.processor if uname.processor else "unknown"
|
||||||
print(uname)
|
print(f"CPU {uname.machine} ({processor})")
|
||||||
else:
|
|
||||||
print("OS %s %s (%s)" % (uname.system, uname.release, uname.version))
|
|
||||||
print(
|
|
||||||
"CPU %s (%s)"
|
|
||||||
% (uname.machine, uname.processor if uname.processor else "unknown")
|
|
||||||
)
|
|
||||||
print("Bug reports:", BUG_URL)
|
print("Bug reports:", BUG_URL)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
@ -1431,11 +1321,11 @@ def _RunSelf(wrapper_path):
|
|||||||
my_main = os.path.join(my_dir, "main.py")
|
my_main = os.path.join(my_dir, "main.py")
|
||||||
my_git = os.path.join(my_dir, ".git")
|
my_git = os.path.join(my_dir, ".git")
|
||||||
|
|
||||||
if os.path.isfile(my_main) and os.path.isdir(my_git):
|
if os.path.isfile(my_main):
|
||||||
for name in ["git_config.py", "project.py", "subcmds"]:
|
for name in ["git_config.py", "project.py", "subcmds"]:
|
||||||
if not os.path.exists(os.path.join(my_dir, name)):
|
if not os.path.exists(os.path.join(my_dir, name)):
|
||||||
return None, None
|
return None, None
|
||||||
return my_main, my_git
|
return my_main, my_git if os.path.isdir(my_git) else None
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
@ -1466,23 +1356,11 @@ def main(orig_args):
|
|||||||
# We run this early as we run some git commands ourselves.
|
# We run this early as we run some git commands ourselves.
|
||||||
SetGitTrace2ParentSid()
|
SetGitTrace2ParentSid()
|
||||||
|
|
||||||
repo_main, rel_repo_dir = None, None
|
|
||||||
# Don't use the local repo copy, make sure to switch to the gitc client first.
|
|
||||||
if cmd != "gitc-init":
|
|
||||||
repo_main, rel_repo_dir = _FindRepo()
|
repo_main, rel_repo_dir = _FindRepo()
|
||||||
|
|
||||||
wrapper_path = os.path.abspath(__file__)
|
wrapper_path = os.path.abspath(__file__)
|
||||||
my_main, my_git = _RunSelf(wrapper_path)
|
my_main, my_git = _RunSelf(wrapper_path)
|
||||||
|
|
||||||
cwd = os.getcwd()
|
|
||||||
if get_gitc_manifest_dir() and cwd.startswith(get_gitc_manifest_dir()):
|
|
||||||
print(
|
|
||||||
"error: repo cannot be used in the GITC local manifest directory."
|
|
||||||
"\nIf you want to work on this GITC client please rerun this "
|
|
||||||
"command from the corresponding client under /gitc/",
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
sys.exit(1)
|
|
||||||
if not repo_main:
|
if not repo_main:
|
||||||
# Only expand aliases here since we'll be parsing the CLI ourselves.
|
# Only expand aliases here since we'll be parsing the CLI ourselves.
|
||||||
# If we had repo_main, alias expansion would happen in main.py.
|
# If we had repo_main, alias expansion would happen in main.py.
|
||||||
@ -1497,11 +1375,11 @@ def main(orig_args):
|
|||||||
_Version()
|
_Version()
|
||||||
if not cmd:
|
if not cmd:
|
||||||
_NotInstalled()
|
_NotInstalled()
|
||||||
if cmd == "init" or cmd == "gitc-init":
|
if cmd == "init":
|
||||||
if my_git:
|
if my_git:
|
||||||
_SetDefaultsTo(my_git)
|
_SetDefaultsTo(my_git)
|
||||||
try:
|
try:
|
||||||
_Init(args, gitc_init=(cmd == "gitc-init"))
|
_Init(args)
|
||||||
except CloneFailure:
|
except CloneFailure:
|
||||||
path = os.path.join(repodir, S_repo)
|
path = os.path.join(repodir, S_repo)
|
||||||
print(
|
print(
|
||||||
@ -1527,6 +1405,14 @@ def main(orig_args):
|
|||||||
if reqs:
|
if reqs:
|
||||||
reqs.assert_all()
|
reqs.assert_all()
|
||||||
|
|
||||||
|
# Python 3.11 introduces PYTHONSAFEPATH and the -P flag which, if enabled,
|
||||||
|
# does not prepend the script's directory to sys.path by default.
|
||||||
|
# repo relies on this import path, so add directory of REPO_MAIN to
|
||||||
|
# PYTHONPATH so that this continues to work when PYTHONSAFEPATH is enabled.
|
||||||
|
python_paths = os.environ.get("PYTHONPATH", "").split(os.pathsep)
|
||||||
|
new_python_paths = [os.path.join(rel_repo_dir, S_repo)] + python_paths
|
||||||
|
os.environ["PYTHONPATH"] = os.pathsep.join(new_python_paths)
|
||||||
|
|
||||||
ver_str = ".".join(map(str, VERSION))
|
ver_str = ".".join(map(str, VERSION))
|
||||||
me = [
|
me = [
|
||||||
sys.executable,
|
sys.executable,
|
||||||
|
@ -39,8 +39,8 @@ class _LogColoring(Coloring):
|
|||||||
|
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
super().__init__(config, "logs")
|
super().__init__(config, "logs")
|
||||||
self.error = self.colorer("error", fg="red")
|
self.error = self.nofmt_colorer("error", fg="red")
|
||||||
self.warning = self.colorer("warn", fg="yellow")
|
self.warning = self.nofmt_colorer("warn", fg="yellow")
|
||||||
self.levelMap = {
|
self.levelMap = {
|
||||||
"WARNING": self.warning,
|
"WARNING": self.warning,
|
||||||
"ERROR": self.error,
|
"ERROR": self.error,
|
||||||
@ -77,6 +77,7 @@ class RepoLogger(logging.Logger):
|
|||||||
|
|
||||||
if not err.aggregate_errors:
|
if not err.aggregate_errors:
|
||||||
self.error("Repo command failed: %s", type(err).__name__)
|
self.error("Repo command failed: %s", type(err).__name__)
|
||||||
|
self.error("\t%s", str(err))
|
||||||
return
|
return
|
||||||
|
|
||||||
self.error(
|
self.error(
|
||||||
|
@ -46,12 +46,14 @@
|
|||||||
|
|
||||||
# Supported git versions.
|
# Supported git versions.
|
||||||
#
|
#
|
||||||
# git-1.7.2 is in Debian Squeeze.
|
|
||||||
# git-1.7.9 is in Ubuntu Precise.
|
|
||||||
# git-1.9.1 is in Ubuntu Trusty.
|
# git-1.9.1 is in Ubuntu Trusty.
|
||||||
# git-1.7.10 is in Debian Wheezy.
|
# git-2.1.4 is in Debian Jessie.
|
||||||
|
# git-2.7.4 is in Ubuntu Xenial.
|
||||||
|
# git-2.11.0 is in Debian Stretch.
|
||||||
|
# git-2.17.0 is in Ubuntu Bionic.
|
||||||
|
# git-2.20.1 is in Debian Buster.
|
||||||
"git": {
|
"git": {
|
||||||
"hard": [1, 7, 2],
|
"hard": [1, 9, 1],
|
||||||
"soft": [1, 9, 1]
|
"soft": [2, 7, 4]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
62
run_tests
62
run_tests
@ -15,16 +15,57 @@
|
|||||||
|
|
||||||
"""Wrapper to run linters and pytest with the right settings."""
|
"""Wrapper to run linters and pytest with the right settings."""
|
||||||
|
|
||||||
|
import functools
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
from typing import List
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
|
ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache()
|
||||||
|
def is_ci() -> bool:
|
||||||
|
"""Whether we're running in our CI system."""
|
||||||
|
return os.getenv("LUCI_CQ") == "yes"
|
||||||
|
|
||||||
|
|
||||||
|
def run_pytest(argv: List[str]) -> int:
|
||||||
|
"""Returns the exit code from pytest."""
|
||||||
|
if is_ci():
|
||||||
|
argv = ["-m", "not skip_cq"] + argv
|
||||||
|
|
||||||
|
return subprocess.run(
|
||||||
|
[sys.executable, "-m", "pytest"] + argv,
|
||||||
|
check=False,
|
||||||
|
cwd=ROOT_DIR,
|
||||||
|
).returncode
|
||||||
|
|
||||||
|
|
||||||
|
def run_pytest_py38(argv: List[str]) -> int:
|
||||||
|
"""Returns the exit code from pytest under Python 3.8."""
|
||||||
|
if is_ci():
|
||||||
|
argv = ["-m", "not skip_cq"] + argv
|
||||||
|
|
||||||
|
try:
|
||||||
|
return subprocess.run(
|
||||||
|
[
|
||||||
|
"vpython3",
|
||||||
|
"-vpython-spec",
|
||||||
|
"run_tests.vpython3.8",
|
||||||
|
"-m",
|
||||||
|
"pytest",
|
||||||
|
]
|
||||||
|
+ argv,
|
||||||
|
check=False,
|
||||||
|
cwd=ROOT_DIR,
|
||||||
|
).returncode
|
||||||
|
except FileNotFoundError:
|
||||||
|
# Skip if the user doesn't have vpython from depot_tools.
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def run_black():
|
def run_black():
|
||||||
"""Returns the exit code from black."""
|
"""Returns the exit code from black."""
|
||||||
# Black by default only matches .py files. We have to list standalone
|
# Black by default only matches .py files. We have to list standalone
|
||||||
@ -32,37 +73,46 @@ def run_black():
|
|||||||
extra_programs = [
|
extra_programs = [
|
||||||
"repo",
|
"repo",
|
||||||
"run_tests",
|
"run_tests",
|
||||||
|
"release/update-hooks",
|
||||||
"release/update-manpages",
|
"release/update-manpages",
|
||||||
]
|
]
|
||||||
return subprocess.run(
|
return subprocess.run(
|
||||||
[sys.executable, "-m", "black", "--check", ROOT_DIR] + extra_programs,
|
[sys.executable, "-m", "black", "--check", ROOT_DIR] + extra_programs,
|
||||||
check=False,
|
check=False,
|
||||||
|
cwd=ROOT_DIR,
|
||||||
).returncode
|
).returncode
|
||||||
|
|
||||||
|
|
||||||
def run_flake8():
|
def run_flake8():
|
||||||
"""Returns the exit code from flake8."""
|
"""Returns the exit code from flake8."""
|
||||||
return subprocess.run(
|
return subprocess.run(
|
||||||
[sys.executable, "-m", "flake8", ROOT_DIR], check=False
|
[sys.executable, "-m", "flake8", ROOT_DIR],
|
||||||
|
check=False,
|
||||||
|
cwd=ROOT_DIR,
|
||||||
).returncode
|
).returncode
|
||||||
|
|
||||||
|
|
||||||
def run_isort():
|
def run_isort():
|
||||||
"""Returns the exit code from isort."""
|
"""Returns the exit code from isort."""
|
||||||
return subprocess.run(
|
return subprocess.run(
|
||||||
[sys.executable, "-m", "isort", "--check", ROOT_DIR], check=False
|
[sys.executable, "-m", "isort", "--check", ROOT_DIR],
|
||||||
|
check=False,
|
||||||
|
cwd=ROOT_DIR,
|
||||||
).returncode
|
).returncode
|
||||||
|
|
||||||
|
|
||||||
def main(argv):
|
def main(argv):
|
||||||
"""The main entry."""
|
"""The main entry."""
|
||||||
checks = (
|
checks = (
|
||||||
lambda: pytest.main(argv),
|
functools.partial(run_pytest, argv),
|
||||||
|
functools.partial(run_pytest_py38, argv),
|
||||||
run_black,
|
run_black,
|
||||||
run_flake8,
|
run_flake8,
|
||||||
run_isort,
|
run_isort,
|
||||||
)
|
)
|
||||||
return 0 if all(not c() for c in checks) else 1
|
# Run all the tests all the time to get full feedback. Don't exit on the
|
||||||
|
# first error as that makes it more difficult to iterate in the CQ.
|
||||||
|
return 1 if sum(c() for c in checks) else 0
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -5,97 +5,92 @@
|
|||||||
# List of available wheels:
|
# List of available wheels:
|
||||||
# https://chromium.googlesource.com/infra/infra/+/main/infra/tools/dockerbuild/wheels.md
|
# https://chromium.googlesource.com/infra/infra/+/main/infra/tools/dockerbuild/wheels.md
|
||||||
|
|
||||||
python_version: "3.8"
|
python_version: "3.11"
|
||||||
|
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/pytest-py3"
|
name: "infra/python/wheels/pytest-py3"
|
||||||
version: "version:6.2.2"
|
version: "version:8.3.4"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by pytest==6.2.2
|
# Required by pytest==8.3.4
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/py-py2_py3"
|
name: "infra/python/wheels/py-py2_py3"
|
||||||
version: "version:1.10.0"
|
version: "version:1.11.0"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by pytest==6.2.2
|
# Required by pytest==8.3.4
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/iniconfig-py3"
|
name: "infra/python/wheels/iniconfig-py3"
|
||||||
version: "version:1.1.1"
|
version: "version:1.1.1"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by pytest==6.2.2
|
# Required by pytest==8.3.4
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/packaging-py3"
|
name: "infra/python/wheels/packaging-py3"
|
||||||
version: "version:23.0"
|
version: "version:23.0"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by pytest==6.2.2
|
# Required by pytest==8.3.4
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/pluggy-py3"
|
name: "infra/python/wheels/pluggy-py3"
|
||||||
version: "version:0.13.1"
|
version: "version:1.5.0"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by pytest==6.2.2
|
# Required by pytest==8.3.4
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/toml-py3"
|
name: "infra/python/wheels/toml-py3"
|
||||||
version: "version:0.10.1"
|
version: "version:0.10.1"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by pytest==6.2.2
|
# Required by pytest==8.3.4
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/pyparsing-py3"
|
name: "infra/python/wheels/pyparsing-py3"
|
||||||
version: "version:3.0.7"
|
version: "version:3.0.7"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by pytest==6.2.2
|
# Required by pytest==8.3.4
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/attrs-py2_py3"
|
name: "infra/python/wheels/attrs-py2_py3"
|
||||||
version: "version:21.4.0"
|
version: "version:21.4.0"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by packaging==16.8
|
# NB: Keep in sync with constraints.txt.
|
||||||
wheel: <
|
|
||||||
name: "infra/python/wheels/six-py2_py3"
|
|
||||||
version: "version:1.16.0"
|
|
||||||
>
|
|
||||||
|
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/black-py3"
|
name: "infra/python/wheels/black-py3"
|
||||||
version: "version:23.1.0"
|
version: "version:25.1.0"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by black==23.1.0
|
# Required by black==25.1.0
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/mypy-extensions-py3"
|
name: "infra/python/wheels/mypy-extensions-py3"
|
||||||
version: "version:0.4.3"
|
version: "version:0.4.3"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by black==23.1.0
|
# Required by black==25.1.0
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/tomli-py3"
|
name: "infra/python/wheels/tomli-py3"
|
||||||
version: "version:2.0.1"
|
version: "version:2.0.1"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by black==23.1.0
|
# Required by black==25.1.0
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/platformdirs-py3"
|
name: "infra/python/wheels/platformdirs-py3"
|
||||||
version: "version:2.5.2"
|
version: "version:2.5.2"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by black==23.1.0
|
# Required by black==25.1.0
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/pathspec-py3"
|
name: "infra/python/wheels/pathspec-py3"
|
||||||
version: "version:0.9.0"
|
version: "version:0.9.0"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by black==23.1.0
|
# Required by black==25.1.0
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/typing-extensions-py3"
|
name: "infra/python/wheels/typing-extensions-py3"
|
||||||
version: "version:4.3.0"
|
version: "version:4.3.0"
|
||||||
>
|
>
|
||||||
|
|
||||||
# Required by black==23.1.0
|
# Required by black==25.1.0
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/click-py3"
|
name: "infra/python/wheels/click-py3"
|
||||||
version: "version:8.0.3"
|
version: "version:8.0.3"
|
||||||
|
67
run_tests.vpython3.8
Normal file
67
run_tests.vpython3.8
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
# This is a vpython "spec" file.
|
||||||
|
#
|
||||||
|
# Read more about `vpython` and how to modify this file here:
|
||||||
|
# https://chromium.googlesource.com/infra/infra/+/main/doc/users/vpython.md
|
||||||
|
# List of available wheels:
|
||||||
|
# https://chromium.googlesource.com/infra/infra/+/main/infra/tools/dockerbuild/wheels.md
|
||||||
|
|
||||||
|
python_version: "3.8"
|
||||||
|
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/pytest-py3"
|
||||||
|
version: "version:8.3.4"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==8.3.4
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/py-py2_py3"
|
||||||
|
version: "version:1.11.0"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==8.3.4
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/iniconfig-py3"
|
||||||
|
version: "version:1.1.1"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==8.3.4
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/packaging-py3"
|
||||||
|
version: "version:23.0"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==8.3.4
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/pluggy-py3"
|
||||||
|
version: "version:1.5.0"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==8.3.4
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/toml-py3"
|
||||||
|
version: "version:0.10.1"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==8.3.4
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/tomli-py3"
|
||||||
|
version: "version:2.1.0"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==8.3.4
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/pyparsing-py3"
|
||||||
|
version: "version:3.0.7"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==8.3.4
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/attrs-py2_py3"
|
||||||
|
version: "version:21.4.0"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==8.3.4
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/exceptiongroup-py3"
|
||||||
|
version: "version:1.1.2"
|
||||||
|
>
|
68
ssh.py
68
ssh.py
@ -24,6 +24,7 @@ import sys
|
|||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from git_command import git
|
||||||
import platform_utils
|
import platform_utils
|
||||||
from repo_trace import Trace
|
from repo_trace import Trace
|
||||||
|
|
||||||
@ -57,8 +58,12 @@ def version():
|
|||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
print("fatal: ssh not installed", file=sys.stderr)
|
print("fatal: ssh not installed", file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError as e:
|
||||||
print("fatal: unable to detect ssh version", file=sys.stderr)
|
print(
|
||||||
|
"fatal: unable to detect ssh version"
|
||||||
|
f" (code={e.returncode}, output={e.stdout})",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
@ -165,7 +170,7 @@ class ProxyManager:
|
|||||||
# Check to see whether we already think that the master is running; if
|
# Check to see whether we already think that the master is running; if
|
||||||
# we think it's already running, return right away.
|
# we think it's already running, return right away.
|
||||||
if port is not None:
|
if port is not None:
|
||||||
key = "%s:%s" % (host, port)
|
key = f"{host}:{port}"
|
||||||
else:
|
else:
|
||||||
key = host
|
key = host
|
||||||
|
|
||||||
@ -207,7 +212,33 @@ class ProxyManager:
|
|||||||
# and print to the log there.
|
# and print to the log there.
|
||||||
pass
|
pass
|
||||||
|
|
||||||
command = command_base[:1] + ["-M", "-N"] + command_base[1:]
|
# Git protocol V2 is a new feature in git 2.18.0, made default in
|
||||||
|
# git 2.26.0
|
||||||
|
# It is faster and more efficient than V1.
|
||||||
|
# To enable it when using SSH, the environment variable GIT_PROTOCOL
|
||||||
|
# must be set in the SSH side channel when establishing the connection
|
||||||
|
# to the git server.
|
||||||
|
# See https://git-scm.com/docs/protocol-v2#_ssh_and_file_transport
|
||||||
|
# Normally git does this by itself. But here, where the SSH connection
|
||||||
|
# is established manually over ControlMaster via the repo-tool, it must
|
||||||
|
# be passed in explicitly instead.
|
||||||
|
# Based on https://git-scm.com/docs/gitprotocol-pack#_extra_parameters,
|
||||||
|
# GIT_PROTOCOL is considered an "Extra Parameter" and must be ignored
|
||||||
|
# by servers that do not understand it. This means that it is safe to
|
||||||
|
# set it even when connecting to older servers.
|
||||||
|
# It should also be safe to set the environment variable for older
|
||||||
|
# local git versions, since it is only part of the ssh side channel.
|
||||||
|
git_protocol_version = _get_git_protocol_version()
|
||||||
|
ssh_git_protocol_args = [
|
||||||
|
"-o",
|
||||||
|
f"SetEnv GIT_PROTOCOL=version={git_protocol_version}",
|
||||||
|
]
|
||||||
|
|
||||||
|
command = (
|
||||||
|
command_base[:1]
|
||||||
|
+ ["-M", "-N", *ssh_git_protocol_args]
|
||||||
|
+ command_base[1:]
|
||||||
|
)
|
||||||
p = None
|
p = None
|
||||||
try:
|
try:
|
||||||
with Trace("Call to ssh: %s", " ".join(command)):
|
with Trace("Call to ssh: %s", " ".join(command)):
|
||||||
@ -289,3 +320,32 @@ class ProxyManager:
|
|||||||
tempfile.mkdtemp("", "ssh-", tmp_dir), "master-" + tokens
|
tempfile.mkdtemp("", "ssh-", tmp_dir), "master-" + tokens
|
||||||
)
|
)
|
||||||
return self._sock_path
|
return self._sock_path
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=1)
|
||||||
|
def _get_git_protocol_version() -> str:
|
||||||
|
"""Return the git protocol version.
|
||||||
|
|
||||||
|
The version is found by first reading the global git config.
|
||||||
|
If no git config for protocol version exists, try to deduce the default
|
||||||
|
protocol version based on the git version.
|
||||||
|
|
||||||
|
See https://git-scm.com/docs/gitprotocol-v2 for details.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return subprocess.check_output(
|
||||||
|
["git", "config", "--get", "--global", "protocol.version"],
|
||||||
|
encoding="utf-8",
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
).strip()
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
if e.returncode == 1:
|
||||||
|
# Exit code 1 means that the git config key was not found.
|
||||||
|
# Try to imitate the defaults that git would have used.
|
||||||
|
git_version = git.version_tuple()
|
||||||
|
if git_version >= (2, 26, 0):
|
||||||
|
# Since git version 2.26, protocol v2 is the default.
|
||||||
|
return "2"
|
||||||
|
return "1"
|
||||||
|
# Other exit codes indicate error with reading the config.
|
||||||
|
raise
|
||||||
|
@ -37,9 +37,7 @@ for py in os.listdir(my_dir):
|
|||||||
try:
|
try:
|
||||||
cmd = getattr(mod, clsn)
|
cmd = getattr(mod, clsn)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise SyntaxError(
|
raise SyntaxError(f"{__name__}/{py} does not define class {clsn}")
|
||||||
"%s/%s does not define class %s" % (__name__, py, clsn)
|
|
||||||
)
|
|
||||||
|
|
||||||
name = name.replace("_", "-")
|
name = name.replace("_", "-")
|
||||||
cmd.NAME = name
|
cmd.NAME = name
|
||||||
|
@ -70,8 +70,10 @@ It is equivalent to "git branch -D <branchname>".
|
|||||||
else:
|
else:
|
||||||
args.insert(0, "'All local branches'")
|
args.insert(0, "'All local branches'")
|
||||||
|
|
||||||
def _ExecuteOne(self, all_branches, nb, project):
|
@classmethod
|
||||||
|
def _ExecuteOne(cls, all_branches, nb, project_idx):
|
||||||
"""Abandon one project."""
|
"""Abandon one project."""
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
if all_branches:
|
if all_branches:
|
||||||
branches = project.GetBranches()
|
branches = project.GetBranches()
|
||||||
else:
|
else:
|
||||||
@ -89,7 +91,7 @@ It is equivalent to "git branch -D <branchname>".
|
|||||||
if status is not None:
|
if status is not None:
|
||||||
ret[name] = status
|
ret[name] = status
|
||||||
|
|
||||||
return (ret, project, errors)
|
return (ret, project_idx, errors)
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
nb = args[0].split()
|
nb = args[0].split()
|
||||||
@ -102,7 +104,8 @@ It is equivalent to "git branch -D <branchname>".
|
|||||||
_RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
|
_RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
|
||||||
|
|
||||||
def _ProcessResults(_pool, pm, states):
|
def _ProcessResults(_pool, pm, states):
|
||||||
for results, project, errors in states:
|
for results, project_idx, errors in states:
|
||||||
|
project = all_projects[project_idx]
|
||||||
for branch, status in results.items():
|
for branch, status in results.items():
|
||||||
if status:
|
if status:
|
||||||
success[branch].append(project)
|
success[branch].append(project)
|
||||||
@ -111,14 +114,17 @@ It is equivalent to "git branch -D <branchname>".
|
|||||||
aggregate_errors.extend(errors)
|
aggregate_errors.extend(errors)
|
||||||
pm.update(msg="")
|
pm.update(msg="")
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = all_projects
|
||||||
self.ExecuteInParallel(
|
self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(self._ExecuteOne, opt.all, nb),
|
functools.partial(self._ExecuteOne, opt.all, nb),
|
||||||
all_projects,
|
range(len(all_projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
output=Progress(
|
output=Progress(
|
||||||
"Abandon %s" % (nb,), len(all_projects), quiet=opt.quiet
|
f"Abandon {nb}", len(all_projects), quiet=opt.quiet
|
||||||
),
|
),
|
||||||
|
chunksize=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
width = max(
|
width = max(
|
||||||
@ -152,4 +158,4 @@ It is equivalent to "git branch -D <branchname>".
|
|||||||
_RelPath(p) for p in success[br]
|
_RelPath(p) for p in success[br]
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
print("%s%s| %s\n" % (br, " " * (width - len(br)), result))
|
print(f"{br}{' ' * (width - len(br))}| {result}\n")
|
||||||
|
@ -98,6 +98,22 @@ is shown, then the branch appears in all projects.
|
|||||||
"""
|
"""
|
||||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _ExpandProjectToBranches(cls, project_idx):
|
||||||
|
"""Expands a project into a list of branch names & associated info.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_idx: project.Project index
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[Tuple[str, git_config.Branch, int]]
|
||||||
|
"""
|
||||||
|
branches = []
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
|
for name, b in project.GetBranches().items():
|
||||||
|
branches.append((name, b, project_idx))
|
||||||
|
return branches
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
projects = self.GetProjects(
|
projects = self.GetProjects(
|
||||||
args, all_manifests=not opt.this_manifest_only
|
args, all_manifests=not opt.this_manifest_only
|
||||||
@ -107,15 +123,18 @@ is shown, then the branch appears in all projects.
|
|||||||
project_cnt = len(projects)
|
project_cnt = len(projects)
|
||||||
|
|
||||||
def _ProcessResults(_pool, _output, results):
|
def _ProcessResults(_pool, _output, results):
|
||||||
for name, b in itertools.chain.from_iterable(results):
|
for name, b, project_idx in itertools.chain.from_iterable(results):
|
||||||
|
b.project = projects[project_idx]
|
||||||
if name not in all_branches:
|
if name not in all_branches:
|
||||||
all_branches[name] = BranchInfo(name)
|
all_branches[name] = BranchInfo(name)
|
||||||
all_branches[name].add(b)
|
all_branches[name].add(b)
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = projects
|
||||||
self.ExecuteInParallel(
|
self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
expand_project_to_branches,
|
self._ExpandProjectToBranches,
|
||||||
projects,
|
range(len(projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -148,7 +167,10 @@ is shown, then the branch appears in all projects.
|
|||||||
else:
|
else:
|
||||||
published = " "
|
published = " "
|
||||||
|
|
||||||
hdr("%c%c %-*s" % (current, published, width, name))
|
# A branch name can contain a percent sign, so we need to escape it.
|
||||||
|
# Escape after f-string formatting to properly account for leading
|
||||||
|
# spaces.
|
||||||
|
hdr(f"{current}{published} {name:{width}}".replace("%", "%%"))
|
||||||
out.write(" |")
|
out.write(" |")
|
||||||
|
|
||||||
_RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
|
_RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
|
||||||
@ -174,7 +196,7 @@ is shown, then the branch appears in all projects.
|
|||||||
if _RelPath(p) not in have:
|
if _RelPath(p) not in have:
|
||||||
paths.append(_RelPath(p))
|
paths.append(_RelPath(p))
|
||||||
|
|
||||||
s = " %s %s" % (in_type, ", ".join(paths))
|
s = f" {in_type} {', '.join(paths)}"
|
||||||
if not i.IsSplitCurrent and (width + 7 + len(s) < 80):
|
if not i.IsSplitCurrent and (width + 7 + len(s) < 80):
|
||||||
fmt = out.current if i.IsCurrent else fmt
|
fmt = out.current if i.IsCurrent else fmt
|
||||||
fmt(s)
|
fmt(s)
|
||||||
@ -191,19 +213,3 @@ is shown, then the branch appears in all projects.
|
|||||||
else:
|
else:
|
||||||
out.write(" in all projects")
|
out.write(" in all projects")
|
||||||
out.nl()
|
out.nl()
|
||||||
|
|
||||||
|
|
||||||
def expand_project_to_branches(project):
|
|
||||||
"""Expands a project into a list of branch names & associated information.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
project: project.Project
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List[Tuple[str, git_config.Branch]]
|
|
||||||
"""
|
|
||||||
branches = []
|
|
||||||
for name, b in project.GetBranches().items():
|
|
||||||
b.project = project
|
|
||||||
branches.append((name, b))
|
|
||||||
return branches
|
|
||||||
|
@ -20,7 +20,6 @@ from command import DEFAULT_LOCAL_JOBS
|
|||||||
from error import GitError
|
from error import GitError
|
||||||
from error import RepoExitError
|
from error import RepoExitError
|
||||||
from progress import Progress
|
from progress import Progress
|
||||||
from project import Project
|
|
||||||
from repo_logging import RepoLogger
|
from repo_logging import RepoLogger
|
||||||
|
|
||||||
|
|
||||||
@ -30,7 +29,7 @@ logger = RepoLogger(__file__)
|
|||||||
class CheckoutBranchResult(NamedTuple):
|
class CheckoutBranchResult(NamedTuple):
|
||||||
# Whether the Project is on the branch (i.e. branch exists and no errors)
|
# Whether the Project is on the branch (i.e. branch exists and no errors)
|
||||||
result: bool
|
result: bool
|
||||||
project: Project
|
project_idx: int
|
||||||
error: Exception
|
error: Exception
|
||||||
|
|
||||||
|
|
||||||
@ -62,15 +61,17 @@ The command is equivalent to:
|
|||||||
if not args:
|
if not args:
|
||||||
self.Usage()
|
self.Usage()
|
||||||
|
|
||||||
def _ExecuteOne(self, nb, project):
|
@classmethod
|
||||||
|
def _ExecuteOne(cls, nb, project_idx):
|
||||||
"""Checkout one project."""
|
"""Checkout one project."""
|
||||||
error = None
|
error = None
|
||||||
result = None
|
result = None
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
try:
|
try:
|
||||||
result = project.CheckoutBranch(nb)
|
result = project.CheckoutBranch(nb)
|
||||||
except GitError as e:
|
except GitError as e:
|
||||||
error = e
|
error = e
|
||||||
return CheckoutBranchResult(result, project, error)
|
return CheckoutBranchResult(result, project_idx, error)
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
nb = args[0]
|
nb = args[0]
|
||||||
@ -83,20 +84,23 @@ The command is equivalent to:
|
|||||||
|
|
||||||
def _ProcessResults(_pool, pm, results):
|
def _ProcessResults(_pool, pm, results):
|
||||||
for result in results:
|
for result in results:
|
||||||
|
project = all_projects[result.project_idx]
|
||||||
if result.error is not None:
|
if result.error is not None:
|
||||||
err.append(result.error)
|
err.append(result.error)
|
||||||
err_projects.append(result.project)
|
err_projects.append(project)
|
||||||
elif result.result:
|
elif result.result:
|
||||||
success.append(result.project)
|
success.append(project)
|
||||||
pm.update(msg="")
|
pm.update(msg="")
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = all_projects
|
||||||
self.ExecuteInParallel(
|
self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(self._ExecuteOne, nb),
|
functools.partial(self._ExecuteOne, nb),
|
||||||
all_projects,
|
range(len(all_projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
output=Progress(
|
output=Progress(
|
||||||
"Checkout %s" % (nb,), len(all_projects), quiet=opt.quiet
|
f"Checkout {nb}", len(all_projects), quiet=opt.quiet
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -40,7 +40,8 @@ to the Unix 'patch' command.
|
|||||||
help="paths are relative to the repository root",
|
help="paths are relative to the repository root",
|
||||||
)
|
)
|
||||||
|
|
||||||
def _ExecuteOne(self, absolute, local, project):
|
@classmethod
|
||||||
|
def _ExecuteOne(cls, absolute, local, project_idx):
|
||||||
"""Obtains the diff for a specific project.
|
"""Obtains the diff for a specific project.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -48,12 +49,13 @@ to the Unix 'patch' command.
|
|||||||
local: a boolean, if True, the path is relative to the local
|
local: a boolean, if True, the path is relative to the local
|
||||||
(sub)manifest. If false, the path is relative to the outermost
|
(sub)manifest. If false, the path is relative to the outermost
|
||||||
manifest.
|
manifest.
|
||||||
project: Project to get status of.
|
project_idx: Project index to get status of.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
The status of the project.
|
The status of the project.
|
||||||
"""
|
"""
|
||||||
buf = io.StringIO()
|
buf = io.StringIO()
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
ret = project.PrintWorkTreeDiff(absolute, output_redir=buf, local=local)
|
ret = project.PrintWorkTreeDiff(absolute, output_redir=buf, local=local)
|
||||||
return (ret, buf.getvalue())
|
return (ret, buf.getvalue())
|
||||||
|
|
||||||
@ -71,12 +73,15 @@ to the Unix 'patch' command.
|
|||||||
ret = 1
|
ret = 1
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = all_projects
|
||||||
return self.ExecuteInParallel(
|
return self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(
|
functools.partial(
|
||||||
self._ExecuteOne, opt.absolute, opt.this_manifest_only
|
self._ExecuteOne, opt.absolute, opt.this_manifest_only
|
||||||
),
|
),
|
||||||
all_projects,
|
range(len(all_projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
ordered=True,
|
ordered=True,
|
||||||
|
chunksize=1,
|
||||||
)
|
)
|
||||||
|
@ -87,25 +87,17 @@ synced and their revisions won't be found.
|
|||||||
def _printRawDiff(self, diff, pretty_format=None, local=False):
|
def _printRawDiff(self, diff, pretty_format=None, local=False):
|
||||||
_RelPath = lambda p: p.RelPath(local=local)
|
_RelPath = lambda p: p.RelPath(local=local)
|
||||||
for project in diff["added"]:
|
for project in diff["added"]:
|
||||||
self.printText(
|
self.printText(f"A {_RelPath(project)} {project.revisionExpr}")
|
||||||
"A %s %s" % (_RelPath(project), project.revisionExpr)
|
|
||||||
)
|
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
|
|
||||||
for project in diff["removed"]:
|
for project in diff["removed"]:
|
||||||
self.printText(
|
self.printText(f"R {_RelPath(project)} {project.revisionExpr}")
|
||||||
"R %s %s" % (_RelPath(project), project.revisionExpr)
|
|
||||||
)
|
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
|
|
||||||
for project, otherProject in diff["changed"]:
|
for project, otherProject in diff["changed"]:
|
||||||
self.printText(
|
self.printText(
|
||||||
"C %s %s %s"
|
f"C {_RelPath(project)} {project.revisionExpr} "
|
||||||
% (
|
f"{otherProject.revisionExpr}"
|
||||||
_RelPath(project),
|
|
||||||
project.revisionExpr,
|
|
||||||
otherProject.revisionExpr,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
self._printLogs(
|
self._printLogs(
|
||||||
@ -118,12 +110,8 @@ synced and their revisions won't be found.
|
|||||||
|
|
||||||
for project, otherProject in diff["unreachable"]:
|
for project, otherProject in diff["unreachable"]:
|
||||||
self.printText(
|
self.printText(
|
||||||
"U %s %s %s"
|
f"U {_RelPath(project)} {project.revisionExpr} "
|
||||||
% (
|
f"{otherProject.revisionExpr}"
|
||||||
_RelPath(project),
|
|
||||||
project.revisionExpr,
|
|
||||||
otherProject.revisionExpr,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
|
|
||||||
@ -245,9 +233,9 @@ synced and their revisions won't be found.
|
|||||||
)
|
)
|
||||||
self.printRevision = self.out.nofmt_printer("revision", fg="yellow")
|
self.printRevision = self.out.nofmt_printer("revision", fg="yellow")
|
||||||
else:
|
else:
|
||||||
self.printProject = (
|
self.printProject = self.printAdded = self.printRemoved = (
|
||||||
self.printAdded
|
self.printRevision
|
||||||
) = self.printRemoved = self.printRevision = self.printText
|
) = self.printText
|
||||||
|
|
||||||
manifest1 = RepoClient(self.repodir)
|
manifest1 = RepoClient(self.repodir)
|
||||||
manifest1.Override(args[0], load_local_manifests=False)
|
manifest1.Override(args[0], load_local_manifests=False)
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
import errno
|
import errno
|
||||||
import functools
|
import functools
|
||||||
import io
|
import io
|
||||||
import multiprocessing
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import signal
|
import signal
|
||||||
@ -26,7 +25,6 @@ from color import Coloring
|
|||||||
from command import Command
|
from command import Command
|
||||||
from command import DEFAULT_LOCAL_JOBS
|
from command import DEFAULT_LOCAL_JOBS
|
||||||
from command import MirrorSafeCommand
|
from command import MirrorSafeCommand
|
||||||
from command import WORKER_BATCH_SIZE
|
|
||||||
from error import ManifestInvalidRevisionError
|
from error import ManifestInvalidRevisionError
|
||||||
from repo_logging import RepoLogger
|
from repo_logging import RepoLogger
|
||||||
|
|
||||||
@ -241,7 +239,6 @@ without iterating through the remaining projects.
|
|||||||
cmd.insert(cmd.index(cn) + 1, "--color")
|
cmd.insert(cmd.index(cn) + 1, "--color")
|
||||||
|
|
||||||
mirror = self.manifest.IsMirror
|
mirror = self.manifest.IsMirror
|
||||||
rc = 0
|
|
||||||
|
|
||||||
smart_sync_manifest_name = "smart_sync_override.xml"
|
smart_sync_manifest_name = "smart_sync_override.xml"
|
||||||
smart_sync_manifest_path = os.path.join(
|
smart_sync_manifest_path = os.path.join(
|
||||||
@ -264,18 +261,10 @@ without iterating through the remaining projects.
|
|||||||
|
|
||||||
os.environ["REPO_COUNT"] = str(len(projects))
|
os.environ["REPO_COUNT"] = str(len(projects))
|
||||||
|
|
||||||
try:
|
def _ProcessResults(_pool, _output, results):
|
||||||
config = self.manifest.manifestProject.config
|
rc = 0
|
||||||
with multiprocessing.Pool(opt.jobs, InitWorker) as pool:
|
|
||||||
results_it = pool.imap(
|
|
||||||
functools.partial(
|
|
||||||
DoWorkWrapper, mirror, opt, cmd, shell, config
|
|
||||||
),
|
|
||||||
enumerate(projects),
|
|
||||||
chunksize=WORKER_BATCH_SIZE,
|
|
||||||
)
|
|
||||||
first = True
|
first = True
|
||||||
for r, output in results_it:
|
for r, output in results:
|
||||||
if output:
|
if output:
|
||||||
if first:
|
if first:
|
||||||
first = False
|
first = False
|
||||||
@ -290,9 +279,26 @@ without iterating through the remaining projects.
|
|||||||
rc = rc or r
|
rc = rc or r
|
||||||
if r != 0 and opt.abort_on_errors:
|
if r != 0 and opt.abort_on_errors:
|
||||||
raise Exception("Aborting due to previous error")
|
raise Exception("Aborting due to previous error")
|
||||||
|
return rc
|
||||||
|
|
||||||
|
try:
|
||||||
|
config = self.manifest.manifestProject.config
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = projects
|
||||||
|
rc = self.ExecuteInParallel(
|
||||||
|
opt.jobs,
|
||||||
|
functools.partial(
|
||||||
|
self.DoWorkWrapper, mirror, opt, cmd, shell, config
|
||||||
|
),
|
||||||
|
range(len(projects)),
|
||||||
|
callback=_ProcessResults,
|
||||||
|
ordered=True,
|
||||||
|
initializer=self.InitWorker,
|
||||||
|
chunksize=1,
|
||||||
|
)
|
||||||
except (KeyboardInterrupt, WorkerKeyboardInterrupt):
|
except (KeyboardInterrupt, WorkerKeyboardInterrupt):
|
||||||
# Catch KeyboardInterrupt raised inside and outside of workers
|
# Catch KeyboardInterrupt raised inside and outside of workers
|
||||||
rc = rc or errno.EINTR
|
rc = errno.EINTR
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Catch any other exceptions raised
|
# Catch any other exceptions raised
|
||||||
logger.error(
|
logger.error(
|
||||||
@ -300,20 +306,16 @@ without iterating through the remaining projects.
|
|||||||
type(e).__name__,
|
type(e).__name__,
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
rc = rc or getattr(e, "errno", 1)
|
rc = getattr(e, "errno", 1)
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
sys.exit(rc)
|
sys.exit(rc)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
class WorkerKeyboardInterrupt(Exception):
|
def InitWorker(cls):
|
||||||
"""Keyboard interrupt exception for worker processes."""
|
|
||||||
|
|
||||||
|
|
||||||
def InitWorker():
|
|
||||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
def DoWorkWrapper(mirror, opt, cmd, shell, config, args):
|
def DoWorkWrapper(cls, mirror, opt, cmd, shell, config, project_idx):
|
||||||
"""A wrapper around the DoWork() method.
|
"""A wrapper around the DoWork() method.
|
||||||
|
|
||||||
Catch the KeyboardInterrupt exceptions here and re-raise them as a
|
Catch the KeyboardInterrupt exceptions here and re-raise them as a
|
||||||
@ -321,14 +323,18 @@ def DoWorkWrapper(mirror, opt, cmd, shell, config, args):
|
|||||||
with stacktraces and making the parent hang indefinitely.
|
with stacktraces and making the parent hang indefinitely.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
cnt, project = args
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
try:
|
try:
|
||||||
return DoWork(project, mirror, opt, cmd, shell, cnt, config)
|
return DoWork(project, mirror, opt, cmd, shell, project_idx, config)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("%s: Worker interrupted" % project.name)
|
print("%s: Worker interrupted" % project.name)
|
||||||
raise WorkerKeyboardInterrupt()
|
raise WorkerKeyboardInterrupt()
|
||||||
|
|
||||||
|
|
||||||
|
class WorkerKeyboardInterrupt(Exception):
|
||||||
|
"""Keyboard interrupt exception for worker processes."""
|
||||||
|
|
||||||
|
|
||||||
def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
|
|
||||||
|
294
subcmds/gc.py
Normal file
294
subcmds/gc.py
Normal file
@ -0,0 +1,294 @@
|
|||||||
|
# Copyright (C) 2024 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
from typing import List, Set
|
||||||
|
|
||||||
|
from command import Command
|
||||||
|
from git_command import GitCommand
|
||||||
|
import platform_utils
|
||||||
|
from progress import Progress
|
||||||
|
from project import Project
|
||||||
|
|
||||||
|
|
||||||
|
class Gc(Command):
|
||||||
|
COMMON = True
|
||||||
|
helpSummary = "Cleaning up internal repo and Git state."
|
||||||
|
helpUsage = """
|
||||||
|
%prog
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option(
|
||||||
|
"-n",
|
||||||
|
"--dry-run",
|
||||||
|
dest="dryrun",
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="do everything except actually delete",
|
||||||
|
)
|
||||||
|
p.add_option(
|
||||||
|
"-y",
|
||||||
|
"--yes",
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="answer yes to all safe prompts",
|
||||||
|
)
|
||||||
|
p.add_option(
|
||||||
|
"--repack",
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="repack all projects that use partial clone with "
|
||||||
|
"filter=blob:none",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _find_git_to_delete(
|
||||||
|
self, to_keep: Set[str], start_dir: str
|
||||||
|
) -> Set[str]:
|
||||||
|
"""Searches no longer needed ".git" directories.
|
||||||
|
|
||||||
|
Scans the file system starting from `start_dir` and removes all
|
||||||
|
directories that end with ".git" that are not in the `to_keep` set.
|
||||||
|
"""
|
||||||
|
to_delete = set()
|
||||||
|
for root, dirs, _ in platform_utils.walk(start_dir):
|
||||||
|
for directory in dirs:
|
||||||
|
if not directory.endswith(".git"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
path = os.path.join(root, directory)
|
||||||
|
if path not in to_keep:
|
||||||
|
to_delete.add(path)
|
||||||
|
|
||||||
|
return to_delete
|
||||||
|
|
||||||
|
def delete_unused_projects(self, projects: List[Project], opt):
|
||||||
|
print(f"Scanning filesystem under {self.repodir}...")
|
||||||
|
|
||||||
|
project_paths = set()
|
||||||
|
project_object_paths = set()
|
||||||
|
|
||||||
|
for project in projects:
|
||||||
|
project_paths.add(project.gitdir)
|
||||||
|
project_object_paths.add(project.objdir)
|
||||||
|
|
||||||
|
to_delete = self._find_git_to_delete(
|
||||||
|
project_paths, os.path.join(self.repodir, "projects")
|
||||||
|
)
|
||||||
|
|
||||||
|
to_delete.update(
|
||||||
|
self._find_git_to_delete(
|
||||||
|
project_object_paths,
|
||||||
|
os.path.join(self.repodir, "project-objects"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not to_delete:
|
||||||
|
print("Nothing to clean up.")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
print("Identified the following projects are no longer used:")
|
||||||
|
print("\n".join(to_delete))
|
||||||
|
print("")
|
||||||
|
if not opt.yes:
|
||||||
|
print(
|
||||||
|
"If you proceed, any local commits in those projects will be "
|
||||||
|
"destroyed!"
|
||||||
|
)
|
||||||
|
ask = input("Proceed? [y/N] ")
|
||||||
|
if ask.lower() != "y":
|
||||||
|
return 1
|
||||||
|
|
||||||
|
pm = Progress(
|
||||||
|
"Deleting",
|
||||||
|
len(to_delete),
|
||||||
|
delay=False,
|
||||||
|
quiet=opt.quiet,
|
||||||
|
show_elapsed=True,
|
||||||
|
elide=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
for path in to_delete:
|
||||||
|
if opt.dryrun:
|
||||||
|
print(f"\nWould have deleted ${path}")
|
||||||
|
else:
|
||||||
|
tmp_path = os.path.join(
|
||||||
|
os.path.dirname(path),
|
||||||
|
f"to_be_deleted_{os.path.basename(path)}",
|
||||||
|
)
|
||||||
|
platform_utils.rename(path, tmp_path)
|
||||||
|
platform_utils.rmtree(tmp_path)
|
||||||
|
pm.update(msg=path)
|
||||||
|
pm.end()
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def _generate_promisor_files(self, pack_dir: str):
|
||||||
|
"""Generates promisor files for all pack files in the given directory.
|
||||||
|
|
||||||
|
Promisor files are empty files with the same name as the corresponding
|
||||||
|
pack file but with the ".promisor" extension. They are used by Git.
|
||||||
|
"""
|
||||||
|
for root, _, files in platform_utils.walk(pack_dir):
|
||||||
|
for file in files:
|
||||||
|
if not file.endswith(".pack"):
|
||||||
|
continue
|
||||||
|
with open(os.path.join(root, f"{file[:-4]}promisor"), "w"):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def repack_projects(self, projects: List[Project], opt):
|
||||||
|
repack_projects = []
|
||||||
|
# Find all projects eligible for repacking:
|
||||||
|
# - can't be shared
|
||||||
|
# - have a specific fetch filter
|
||||||
|
for project in projects:
|
||||||
|
if project.config.GetBoolean("extensions.preciousObjects"):
|
||||||
|
continue
|
||||||
|
if not project.clone_depth:
|
||||||
|
continue
|
||||||
|
if project.manifest.CloneFilterForDepth != "blob:none":
|
||||||
|
continue
|
||||||
|
|
||||||
|
repack_projects.append(project)
|
||||||
|
|
||||||
|
if opt.dryrun:
|
||||||
|
print(f"Would have repacked {len(repack_projects)} projects.")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
pm = Progress(
|
||||||
|
"Repacking (this will take a while)",
|
||||||
|
len(repack_projects),
|
||||||
|
delay=False,
|
||||||
|
quiet=opt.quiet,
|
||||||
|
show_elapsed=True,
|
||||||
|
elide=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
for project in repack_projects:
|
||||||
|
pm.update(msg=f"{project.name}")
|
||||||
|
|
||||||
|
pack_dir = os.path.join(project.gitdir, "tmp_repo_repack")
|
||||||
|
if os.path.isdir(pack_dir):
|
||||||
|
platform_utils.rmtree(pack_dir)
|
||||||
|
os.mkdir(pack_dir)
|
||||||
|
|
||||||
|
# Prepare workspace for repacking - remove all unreachable refs and
|
||||||
|
# their objects.
|
||||||
|
GitCommand(
|
||||||
|
project,
|
||||||
|
["reflog", "expire", "--expire-unreachable=all"],
|
||||||
|
verify_command=True,
|
||||||
|
).Wait()
|
||||||
|
pm.update(msg=f"{project.name} | gc", inc=0)
|
||||||
|
GitCommand(
|
||||||
|
project,
|
||||||
|
["gc"],
|
||||||
|
verify_command=True,
|
||||||
|
).Wait()
|
||||||
|
|
||||||
|
# Get all objects that are reachable from the remote, and pack them.
|
||||||
|
pm.update(msg=f"{project.name} | generating list of objects", inc=0)
|
||||||
|
remote_objects_cmd = GitCommand(
|
||||||
|
project,
|
||||||
|
[
|
||||||
|
"rev-list",
|
||||||
|
"--objects",
|
||||||
|
f"--remotes={project.remote.name}",
|
||||||
|
"--filter=blob:none",
|
||||||
|
"--tags",
|
||||||
|
],
|
||||||
|
capture_stdout=True,
|
||||||
|
verify_command=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get all local objects and pack them.
|
||||||
|
local_head_objects_cmd = GitCommand(
|
||||||
|
project,
|
||||||
|
["rev-list", "--objects", "HEAD^{tree}"],
|
||||||
|
capture_stdout=True,
|
||||||
|
verify_command=True,
|
||||||
|
)
|
||||||
|
local_objects_cmd = GitCommand(
|
||||||
|
project,
|
||||||
|
[
|
||||||
|
"rev-list",
|
||||||
|
"--objects",
|
||||||
|
"--all",
|
||||||
|
"--reflog",
|
||||||
|
"--indexed-objects",
|
||||||
|
"--not",
|
||||||
|
f"--remotes={project.remote.name}",
|
||||||
|
"--tags",
|
||||||
|
],
|
||||||
|
capture_stdout=True,
|
||||||
|
verify_command=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
remote_objects_cmd.Wait()
|
||||||
|
|
||||||
|
pm.update(msg=f"{project.name} | remote repack", inc=0)
|
||||||
|
GitCommand(
|
||||||
|
project,
|
||||||
|
["pack-objects", os.path.join(pack_dir, "pack")],
|
||||||
|
input=remote_objects_cmd.stdout,
|
||||||
|
capture_stderr=True,
|
||||||
|
capture_stdout=True,
|
||||||
|
verify_command=True,
|
||||||
|
).Wait()
|
||||||
|
|
||||||
|
# create promisor file for each pack file
|
||||||
|
self._generate_promisor_files(pack_dir)
|
||||||
|
|
||||||
|
local_head_objects_cmd.Wait()
|
||||||
|
local_objects_cmd.Wait()
|
||||||
|
|
||||||
|
pm.update(msg=f"{project.name} | local repack", inc=0)
|
||||||
|
GitCommand(
|
||||||
|
project,
|
||||||
|
["pack-objects", os.path.join(pack_dir, "pack")],
|
||||||
|
input=local_head_objects_cmd.stdout + local_objects_cmd.stdout,
|
||||||
|
capture_stderr=True,
|
||||||
|
capture_stdout=True,
|
||||||
|
verify_command=True,
|
||||||
|
).Wait()
|
||||||
|
|
||||||
|
# Swap the old pack directory with the new one.
|
||||||
|
platform_utils.rename(
|
||||||
|
os.path.join(project.objdir, "objects", "pack"),
|
||||||
|
os.path.join(project.objdir, "objects", "pack_old"),
|
||||||
|
)
|
||||||
|
platform_utils.rename(
|
||||||
|
pack_dir,
|
||||||
|
os.path.join(project.objdir, "objects", "pack"),
|
||||||
|
)
|
||||||
|
platform_utils.rmtree(
|
||||||
|
os.path.join(project.objdir, "objects", "pack_old")
|
||||||
|
)
|
||||||
|
|
||||||
|
pm.end()
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
projects: List[Project] = self.GetProjects(
|
||||||
|
args, all_manifests=not opt.this_manifest_only
|
||||||
|
)
|
||||||
|
|
||||||
|
ret = self.delete_unused_projects(projects, opt)
|
||||||
|
if ret != 0:
|
||||||
|
return ret
|
||||||
|
|
||||||
|
if not opt.repack:
|
||||||
|
return
|
||||||
|
|
||||||
|
return self.repack_projects(projects, opt)
|
@ -23,7 +23,6 @@ from error import GitError
|
|||||||
from error import InvalidArgumentsError
|
from error import InvalidArgumentsError
|
||||||
from error import SilentRepoExitError
|
from error import SilentRepoExitError
|
||||||
from git_command import GitCommand
|
from git_command import GitCommand
|
||||||
from project import Project
|
|
||||||
from repo_logging import RepoLogger
|
from repo_logging import RepoLogger
|
||||||
|
|
||||||
|
|
||||||
@ -40,7 +39,7 @@ class GrepColoring(Coloring):
|
|||||||
class ExecuteOneResult(NamedTuple):
|
class ExecuteOneResult(NamedTuple):
|
||||||
"""Result from an execute instance."""
|
"""Result from an execute instance."""
|
||||||
|
|
||||||
project: Project
|
project_idx: int
|
||||||
rc: int
|
rc: int
|
||||||
stdout: str
|
stdout: str
|
||||||
stderr: str
|
stderr: str
|
||||||
@ -262,8 +261,10 @@ contain a line that matches both expressions:
|
|||||||
help="Show only file names not containing matching lines",
|
help="Show only file names not containing matching lines",
|
||||||
)
|
)
|
||||||
|
|
||||||
def _ExecuteOne(self, cmd_argv, project):
|
@classmethod
|
||||||
|
def _ExecuteOne(cls, cmd_argv, project_idx):
|
||||||
"""Process one project."""
|
"""Process one project."""
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
try:
|
try:
|
||||||
p = GitCommand(
|
p = GitCommand(
|
||||||
project,
|
project,
|
||||||
@ -274,7 +275,7 @@ contain a line that matches both expressions:
|
|||||||
verify_command=True,
|
verify_command=True,
|
||||||
)
|
)
|
||||||
except GitError as e:
|
except GitError as e:
|
||||||
return ExecuteOneResult(project, -1, None, str(e), e)
|
return ExecuteOneResult(project_idx, -1, None, str(e), e)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
error = None
|
error = None
|
||||||
@ -282,10 +283,12 @@ contain a line that matches both expressions:
|
|||||||
except GitError as e:
|
except GitError as e:
|
||||||
rc = 1
|
rc = 1
|
||||||
error = e
|
error = e
|
||||||
return ExecuteOneResult(project, rc, p.stdout, p.stderr, error)
|
return ExecuteOneResult(project_idx, rc, p.stdout, p.stderr, error)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _ProcessResults(full_name, have_rev, opt, _pool, out, results):
|
def _ProcessResults(
|
||||||
|
full_name, have_rev, opt, projects, _pool, out, results
|
||||||
|
):
|
||||||
git_failed = False
|
git_failed = False
|
||||||
bad_rev = False
|
bad_rev = False
|
||||||
have_match = False
|
have_match = False
|
||||||
@ -293,9 +296,10 @@ contain a line that matches both expressions:
|
|||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
for result in results:
|
for result in results:
|
||||||
|
project = projects[result.project_idx]
|
||||||
if result.rc < 0:
|
if result.rc < 0:
|
||||||
git_failed = True
|
git_failed = True
|
||||||
out.project("--- project %s ---" % _RelPath(result.project))
|
out.project("--- project %s ---" % _RelPath(project))
|
||||||
out.nl()
|
out.nl()
|
||||||
out.fail("%s", result.stderr)
|
out.fail("%s", result.stderr)
|
||||||
out.nl()
|
out.nl()
|
||||||
@ -311,9 +315,7 @@ contain a line that matches both expressions:
|
|||||||
):
|
):
|
||||||
bad_rev = True
|
bad_rev = True
|
||||||
else:
|
else:
|
||||||
out.project(
|
out.project("--- project %s ---" % _RelPath(project))
|
||||||
"--- project %s ---" % _RelPath(result.project)
|
|
||||||
)
|
|
||||||
out.nl()
|
out.nl()
|
||||||
out.fail("%s", result.stderr.strip())
|
out.fail("%s", result.stderr.strip())
|
||||||
out.nl()
|
out.nl()
|
||||||
@ -331,13 +333,13 @@ contain a line that matches both expressions:
|
|||||||
rev, line = line.split(":", 1)
|
rev, line = line.split(":", 1)
|
||||||
out.write("%s", rev)
|
out.write("%s", rev)
|
||||||
out.write(":")
|
out.write(":")
|
||||||
out.project(_RelPath(result.project))
|
out.project(_RelPath(project))
|
||||||
out.write("/")
|
out.write("/")
|
||||||
out.write("%s", line)
|
out.write("%s", line)
|
||||||
out.nl()
|
out.nl()
|
||||||
elif full_name:
|
elif full_name:
|
||||||
for line in r:
|
for line in r:
|
||||||
out.project(_RelPath(result.project))
|
out.project(_RelPath(project))
|
||||||
out.write("/")
|
out.write("/")
|
||||||
out.write("%s", line)
|
out.write("%s", line)
|
||||||
out.nl()
|
out.nl()
|
||||||
@ -381,15 +383,18 @@ contain a line that matches both expressions:
|
|||||||
cmd_argv.extend(opt.revision)
|
cmd_argv.extend(opt.revision)
|
||||||
cmd_argv.append("--")
|
cmd_argv.append("--")
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = projects
|
||||||
git_failed, bad_rev, have_match, errors = self.ExecuteInParallel(
|
git_failed, bad_rev, have_match, errors = self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(self._ExecuteOne, cmd_argv),
|
functools.partial(self._ExecuteOne, cmd_argv),
|
||||||
projects,
|
range(len(projects)),
|
||||||
callback=functools.partial(
|
callback=functools.partial(
|
||||||
self._ProcessResults, full_name, have_rev, opt
|
self._ProcessResults, full_name, have_rev, opt, projects
|
||||||
),
|
),
|
||||||
output=out,
|
output=out,
|
||||||
ordered=True,
|
ordered=True,
|
||||||
|
chunksize=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
if git_failed:
|
if git_failed:
|
||||||
|
@ -150,7 +150,7 @@ Displays detailed usage information about a command.
|
|||||||
def _PrintAllCommandHelp(self):
|
def _PrintAllCommandHelp(self):
|
||||||
for name in sorted(all_commands):
|
for name in sorted(all_commands):
|
||||||
cmd = all_commands[name](manifest=self.manifest)
|
cmd = all_commands[name](manifest=self.manifest)
|
||||||
self._PrintCommandHelp(cmd, header_prefix="[%s] " % (name,))
|
self._PrintCommandHelp(cmd, header_prefix=f"[{name}] ")
|
||||||
|
|
||||||
def _Options(self, p):
|
def _Options(self, p):
|
||||||
p.add_option(
|
p.add_option(
|
||||||
|
@ -97,7 +97,9 @@ class Info(PagedCommand):
|
|||||||
self.headtext(self.manifest.default.revisionExpr)
|
self.headtext(self.manifest.default.revisionExpr)
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
self.heading("Manifest merge branch: ")
|
self.heading("Manifest merge branch: ")
|
||||||
self.headtext(mergeBranch)
|
# The manifest might not have a merge branch if it isn't in a git repo,
|
||||||
|
# e.g. if `repo init --standalone-manifest` is used.
|
||||||
|
self.headtext(mergeBranch or "")
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
self.heading("Manifest groups: ")
|
self.heading("Manifest groups: ")
|
||||||
self.headtext(manifestGroups)
|
self.headtext(manifestGroups)
|
||||||
@ -248,7 +250,7 @@ class Info(PagedCommand):
|
|||||||
|
|
||||||
for commit in commits:
|
for commit in commits:
|
||||||
split = commit.split()
|
split = commit.split()
|
||||||
self.text("{0:38}{1} ".format("", "-"))
|
self.text(f"{'':38}{'-'} ")
|
||||||
self.sha(split[0] + " ")
|
self.sha(split[0] + " ")
|
||||||
self.text(" ".join(split[1:]))
|
self.text(" ".join(split[1:]))
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
|
@ -21,10 +21,9 @@ from command import MirrorSafeCommand
|
|||||||
from error import RepoUnhandledExceptionError
|
from error import RepoUnhandledExceptionError
|
||||||
from error import UpdateManifestError
|
from error import UpdateManifestError
|
||||||
from git_command import git_require
|
from git_command import git_require
|
||||||
from git_command import MIN_GIT_VERSION_HARD
|
|
||||||
from git_command import MIN_GIT_VERSION_SOFT
|
|
||||||
from repo_logging import RepoLogger
|
from repo_logging import RepoLogger
|
||||||
from wrapper import Wrapper
|
from wrapper import Wrapper
|
||||||
|
from wrapper import WrapperDir
|
||||||
|
|
||||||
|
|
||||||
logger = RepoLogger(__file__)
|
logger = RepoLogger(__file__)
|
||||||
@ -53,6 +52,10 @@ The optional -b argument can be used to select the manifest branch
|
|||||||
to checkout and use. If no branch is specified, the remote's default
|
to checkout and use. If no branch is specified, the remote's default
|
||||||
branch is used. This is equivalent to using -b HEAD.
|
branch is used. This is equivalent to using -b HEAD.
|
||||||
|
|
||||||
|
The optional --manifest-upstream-branch argument can be used when a commit is
|
||||||
|
provided to --manifest-branch (or -b), to specify the name of the git ref in
|
||||||
|
which the commit can be found.
|
||||||
|
|
||||||
The optional -m argument can be used to specify an alternate manifest
|
The optional -m argument can be used to specify an alternate manifest
|
||||||
to be used. If no manifest is specified, the manifest default.xml
|
to be used. If no manifest is specified, the manifest default.xml
|
||||||
will be used.
|
will be used.
|
||||||
@ -136,6 +139,7 @@ to update the working directory files.
|
|||||||
# manifest project is special and is created when instantiating the
|
# manifest project is special and is created when instantiating the
|
||||||
# manifest which happens before we parse options.
|
# manifest which happens before we parse options.
|
||||||
self.manifest.manifestProject.clone_depth = opt.manifest_depth
|
self.manifest.manifestProject.clone_depth = opt.manifest_depth
|
||||||
|
self.manifest.manifestProject.upstream = opt.manifest_upstream_branch
|
||||||
clone_filter_for_depth = (
|
clone_filter_for_depth = (
|
||||||
"blob:none" if (_REPO_ALLOW_SHALLOW == "0") else None
|
"blob:none" if (_REPO_ALLOW_SHALLOW == "0") else None
|
||||||
)
|
)
|
||||||
@ -215,7 +219,7 @@ to update the working directory files.
|
|||||||
|
|
||||||
if not opt.quiet:
|
if not opt.quiet:
|
||||||
print()
|
print()
|
||||||
print("Your identity is: %s <%s>" % (name, email))
|
print(f"Your identity is: {name} <{email}>")
|
||||||
print("is this correct [y/N]? ", end="", flush=True)
|
print("is this correct [y/N]? ", end="", flush=True)
|
||||||
a = sys.stdin.readline().strip().lower()
|
a = sys.stdin.readline().strip().lower()
|
||||||
if a in ("yes", "y", "t", "true"):
|
if a in ("yes", "y", "t", "true"):
|
||||||
@ -318,6 +322,12 @@ to update the working directory files.
|
|||||||
" be used with --standalone-manifest."
|
" be used with --standalone-manifest."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if opt.manifest_upstream_branch and opt.manifest_branch is None:
|
||||||
|
self.OptionParser.error(
|
||||||
|
"--manifest-upstream-branch cannot be used without "
|
||||||
|
"--manifest-branch."
|
||||||
|
)
|
||||||
|
|
||||||
if args:
|
if args:
|
||||||
if opt.manifest_url:
|
if opt.manifest_url:
|
||||||
self.OptionParser.error(
|
self.OptionParser.error(
|
||||||
@ -331,13 +341,17 @@ to update the working directory files.
|
|||||||
self.OptionParser.error("too many arguments to init")
|
self.OptionParser.error("too many arguments to init")
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
git_require(MIN_GIT_VERSION_HARD, fail=True)
|
wrapper = Wrapper()
|
||||||
if not git_require(MIN_GIT_VERSION_SOFT):
|
|
||||||
|
reqs = wrapper.Requirements.from_dir(WrapperDir())
|
||||||
|
git_require(reqs.get_hard_ver("git"), fail=True)
|
||||||
|
min_git_version_soft = reqs.get_soft_ver("git")
|
||||||
|
if not git_require(min_git_version_soft):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"repo: warning: git-%s+ will soon be required; "
|
"repo: warning: git-%s+ will soon be required; "
|
||||||
"please upgrade your version of git to maintain "
|
"please upgrade your version of git to maintain "
|
||||||
"support.",
|
"support.",
|
||||||
".".join(str(x) for x in MIN_GIT_VERSION_SOFT),
|
".".join(str(x) for x in min_git_version_soft),
|
||||||
)
|
)
|
||||||
|
|
||||||
rp = self.manifest.repoProject
|
rp = self.manifest.repoProject
|
||||||
@ -350,10 +364,9 @@ to update the working directory files.
|
|||||||
|
|
||||||
# Handle new --repo-rev requests.
|
# Handle new --repo-rev requests.
|
||||||
if opt.repo_rev:
|
if opt.repo_rev:
|
||||||
wrapper = Wrapper()
|
|
||||||
try:
|
try:
|
||||||
remote_ref, rev = wrapper.check_repo_rev(
|
remote_ref, rev = wrapper.check_repo_rev(
|
||||||
rp.gitdir,
|
rp.worktree,
|
||||||
opt.repo_rev,
|
opt.repo_rev,
|
||||||
repo_verify=opt.repo_verify,
|
repo_verify=opt.repo_verify,
|
||||||
quiet=opt.quiet,
|
quiet=opt.quiet,
|
||||||
|
@ -131,7 +131,7 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
|||||||
elif opt.path_only and not opt.name_only:
|
elif opt.path_only and not opt.name_only:
|
||||||
lines.append("%s" % (_getpath(project)))
|
lines.append("%s" % (_getpath(project)))
|
||||||
else:
|
else:
|
||||||
lines.append("%s : %s" % (_getpath(project), project.name))
|
lines.append(f"{_getpath(project)} : {project.name}")
|
||||||
|
|
||||||
if lines:
|
if lines:
|
||||||
lines.sort()
|
lines.sort()
|
||||||
|
@ -27,8 +27,10 @@ class Prune(PagedCommand):
|
|||||||
"""
|
"""
|
||||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||||
|
|
||||||
def _ExecuteOne(self, project):
|
@classmethod
|
||||||
|
def _ExecuteOne(cls, project_idx):
|
||||||
"""Process one project."""
|
"""Process one project."""
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
return project.PruneHeads()
|
return project.PruneHeads()
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
@ -41,10 +43,12 @@ class Prune(PagedCommand):
|
|||||||
def _ProcessResults(_pool, _output, results):
|
def _ProcessResults(_pool, _output, results):
|
||||||
return list(itertools.chain.from_iterable(results))
|
return list(itertools.chain.from_iterable(results))
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = projects
|
||||||
all_branches = self.ExecuteInParallel(
|
all_branches = self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
self._ExecuteOne,
|
self._ExecuteOne,
|
||||||
projects,
|
range(len(projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
ordered=True,
|
ordered=True,
|
||||||
)
|
)
|
||||||
@ -83,9 +87,7 @@ class Prune(PagedCommand):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not branch.base_exists:
|
if not branch.base_exists:
|
||||||
print(
|
print(f"(ignoring: tracking branch is gone: {branch.base})")
|
||||||
"(ignoring: tracking branch is gone: %s)" % (branch.base,)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
commits = branch.commits
|
commits = branch.commits
|
||||||
date = branch.date
|
date = branch.date
|
||||||
|
@ -21,7 +21,6 @@ from error import RepoExitError
|
|||||||
from git_command import git
|
from git_command import git
|
||||||
from git_config import IsImmutable
|
from git_config import IsImmutable
|
||||||
from progress import Progress
|
from progress import Progress
|
||||||
from project import Project
|
|
||||||
from repo_logging import RepoLogger
|
from repo_logging import RepoLogger
|
||||||
|
|
||||||
|
|
||||||
@ -29,7 +28,7 @@ logger = RepoLogger(__file__)
|
|||||||
|
|
||||||
|
|
||||||
class ExecuteOneResult(NamedTuple):
|
class ExecuteOneResult(NamedTuple):
|
||||||
project: Project
|
project_idx: int
|
||||||
error: Exception
|
error: Exception
|
||||||
|
|
||||||
|
|
||||||
@ -80,18 +79,20 @@ revision specified in the manifest.
|
|||||||
if not git.check_ref_format("heads/%s" % nb):
|
if not git.check_ref_format("heads/%s" % nb):
|
||||||
self.OptionParser.error("'%s' is not a valid name" % nb)
|
self.OptionParser.error("'%s' is not a valid name" % nb)
|
||||||
|
|
||||||
def _ExecuteOne(self, revision, nb, project):
|
@classmethod
|
||||||
|
def _ExecuteOne(cls, revision, nb, default_revisionExpr, project_idx):
|
||||||
"""Start one project."""
|
"""Start one project."""
|
||||||
# If the current revision is immutable, such as a SHA1, a tag or
|
# If the current revision is immutable, such as a SHA1, a tag or
|
||||||
# a change, then we can't push back to it. Substitute with
|
# a change, then we can't push back to it. Substitute with
|
||||||
# dest_branch, if defined; or with manifest default revision instead.
|
# dest_branch, if defined; or with manifest default revision instead.
|
||||||
branch_merge = ""
|
branch_merge = ""
|
||||||
error = None
|
error = None
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
if IsImmutable(project.revisionExpr):
|
if IsImmutable(project.revisionExpr):
|
||||||
if project.dest_branch:
|
if project.dest_branch:
|
||||||
branch_merge = project.dest_branch
|
branch_merge = project.dest_branch
|
||||||
else:
|
else:
|
||||||
branch_merge = self.manifest.default.revisionExpr
|
branch_merge = default_revisionExpr
|
||||||
|
|
||||||
try:
|
try:
|
||||||
project.StartBranch(
|
project.StartBranch(
|
||||||
@ -100,7 +101,7 @@ revision specified in the manifest.
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("error: unable to checkout %s: %s", project.name, e)
|
logger.error("error: unable to checkout %s: %s", project.name, e)
|
||||||
error = e
|
error = e
|
||||||
return ExecuteOneResult(project, error)
|
return ExecuteOneResult(project_idx, error)
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
nb = args[0]
|
nb = args[0]
|
||||||
@ -120,18 +121,27 @@ revision specified in the manifest.
|
|||||||
def _ProcessResults(_pool, pm, results):
|
def _ProcessResults(_pool, pm, results):
|
||||||
for result in results:
|
for result in results:
|
||||||
if result.error:
|
if result.error:
|
||||||
err_projects.append(result.project)
|
project = all_projects[result.project_idx]
|
||||||
|
err_projects.append(project)
|
||||||
err.append(result.error)
|
err.append(result.error)
|
||||||
pm.update(msg="")
|
pm.update(msg="")
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = all_projects
|
||||||
self.ExecuteInParallel(
|
self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(self._ExecuteOne, opt.revision, nb),
|
functools.partial(
|
||||||
all_projects,
|
self._ExecuteOne,
|
||||||
|
opt.revision,
|
||||||
|
nb,
|
||||||
|
self.manifest.default.revisionExpr,
|
||||||
|
),
|
||||||
|
range(len(all_projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
output=Progress(
|
output=Progress(
|
||||||
"Starting %s" % (nb,), len(all_projects), quiet=opt.quiet
|
f"Starting {nb}", len(all_projects), quiet=opt.quiet
|
||||||
),
|
),
|
||||||
|
chunksize=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
if err_projects:
|
if err_projects:
|
||||||
|
@ -88,7 +88,8 @@ the following meanings:
|
|||||||
"projects",
|
"projects",
|
||||||
)
|
)
|
||||||
|
|
||||||
def _StatusHelper(self, quiet, local, project):
|
@classmethod
|
||||||
|
def _StatusHelper(cls, quiet, local, project_idx):
|
||||||
"""Obtains the status for a specific project.
|
"""Obtains the status for a specific project.
|
||||||
|
|
||||||
Obtains the status for a project, redirecting the output to
|
Obtains the status for a project, redirecting the output to
|
||||||
@ -99,12 +100,13 @@ the following meanings:
|
|||||||
local: a boolean, if True, the path is relative to the local
|
local: a boolean, if True, the path is relative to the local
|
||||||
(sub)manifest. If false, the path is relative to the outermost
|
(sub)manifest. If false, the path is relative to the outermost
|
||||||
manifest.
|
manifest.
|
||||||
project: Project to get status of.
|
project_idx: Project index to get status of.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
The status of the project.
|
The status of the project.
|
||||||
"""
|
"""
|
||||||
buf = io.StringIO()
|
buf = io.StringIO()
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
ret = project.PrintWorkTreeStatus(
|
ret = project.PrintWorkTreeStatus(
|
||||||
quiet=quiet, output_redir=buf, local=local
|
quiet=quiet, output_redir=buf, local=local
|
||||||
)
|
)
|
||||||
@ -143,14 +145,17 @@ the following meanings:
|
|||||||
ret += 1
|
ret += 1
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = all_projects
|
||||||
counter = self.ExecuteInParallel(
|
counter = self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(
|
functools.partial(
|
||||||
self._StatusHelper, opt.quiet, opt.this_manifest_only
|
self._StatusHelper, opt.quiet, opt.this_manifest_only
|
||||||
),
|
),
|
||||||
all_projects,
|
range(len(all_projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
ordered=True,
|
ordered=True,
|
||||||
|
chunksize=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
if not opt.quiet and len(all_projects) == counter:
|
if not opt.quiet and len(all_projects) == counter:
|
||||||
|
346
subcmds/sync.py
346
subcmds/sync.py
@ -21,6 +21,7 @@ import multiprocessing
|
|||||||
import netrc
|
import netrc
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
|
from pathlib import Path
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
@ -82,22 +83,65 @@ from wrapper import Wrapper
|
|||||||
|
|
||||||
_ONE_DAY_S = 24 * 60 * 60
|
_ONE_DAY_S = 24 * 60 * 60
|
||||||
|
|
||||||
# Env var to implicitly turn auto-gc back on. This was added to allow a user to
|
|
||||||
# revert a change in default behavior in v2.29.9. Remove after 2023-04-01.
|
|
||||||
_REPO_AUTO_GC = "REPO_AUTO_GC"
|
|
||||||
_AUTO_GC = os.environ.get(_REPO_AUTO_GC) == "1"
|
|
||||||
|
|
||||||
_REPO_ALLOW_SHALLOW = os.environ.get("REPO_ALLOW_SHALLOW")
|
_REPO_ALLOW_SHALLOW = os.environ.get("REPO_ALLOW_SHALLOW")
|
||||||
|
|
||||||
logger = RepoLogger(__file__)
|
logger = RepoLogger(__file__)
|
||||||
|
|
||||||
|
|
||||||
|
def _SafeCheckoutOrder(checkouts: List[Project]) -> List[List[Project]]:
|
||||||
|
"""Generate a sequence of checkouts that is safe to perform. The client
|
||||||
|
should checkout everything from n-th index before moving to n+1.
|
||||||
|
|
||||||
|
This is only useful if manifest contains nested projects.
|
||||||
|
|
||||||
|
E.g. if foo, foo/bar and foo/bar/baz are project paths, then foo needs to
|
||||||
|
finish before foo/bar can proceed, and foo/bar needs to finish before
|
||||||
|
foo/bar/baz."""
|
||||||
|
res = [[]]
|
||||||
|
current = res[0]
|
||||||
|
|
||||||
|
# depth_stack contains a current stack of parent paths.
|
||||||
|
depth_stack = []
|
||||||
|
# Checkouts are iterated in the hierarchical order. That way, it can easily
|
||||||
|
# be determined if the previous checkout is parent of the current checkout.
|
||||||
|
# We are splitting by the path separator so the final result is
|
||||||
|
# hierarchical, and not just lexicographical. For example, if the projects
|
||||||
|
# are: foo, foo/bar, foo-bar, lexicographical order produces foo, foo-bar
|
||||||
|
# and foo/bar, which doesn't work.
|
||||||
|
for checkout in sorted(checkouts, key=lambda x: x.relpath.split("/")):
|
||||||
|
checkout_path = Path(checkout.relpath)
|
||||||
|
while depth_stack:
|
||||||
|
try:
|
||||||
|
checkout_path.relative_to(depth_stack[-1])
|
||||||
|
except ValueError:
|
||||||
|
# Path.relative_to returns ValueError if paths are not relative.
|
||||||
|
# TODO(sokcevic): Switch to is_relative_to once min supported
|
||||||
|
# version is py3.9.
|
||||||
|
depth_stack.pop()
|
||||||
|
else:
|
||||||
|
if len(depth_stack) >= len(res):
|
||||||
|
# Another depth created.
|
||||||
|
res.append([])
|
||||||
|
break
|
||||||
|
|
||||||
|
current = res[len(depth_stack)]
|
||||||
|
current.append(checkout)
|
||||||
|
depth_stack.append(checkout_path)
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def _chunksize(projects: int, jobs: int) -> int:
|
||||||
|
"""Calculate chunk size for the given number of projects and jobs."""
|
||||||
|
return min(max(1, projects // jobs), WORKER_BATCH_SIZE)
|
||||||
|
|
||||||
|
|
||||||
class _FetchOneResult(NamedTuple):
|
class _FetchOneResult(NamedTuple):
|
||||||
"""_FetchOne return value.
|
"""_FetchOne return value.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
success (bool): True if successful.
|
success (bool): True if successful.
|
||||||
project (Project): The fetched project.
|
project_idx (int): The fetched project index.
|
||||||
start (float): The starting time.time().
|
start (float): The starting time.time().
|
||||||
finish (float): The ending time.time().
|
finish (float): The ending time.time().
|
||||||
remote_fetched (bool): True if the remote was actually queried.
|
remote_fetched (bool): True if the remote was actually queried.
|
||||||
@ -105,7 +149,7 @@ class _FetchOneResult(NamedTuple):
|
|||||||
|
|
||||||
success: bool
|
success: bool
|
||||||
errors: List[Exception]
|
errors: List[Exception]
|
||||||
project: Project
|
project_idx: int
|
||||||
start: float
|
start: float
|
||||||
finish: float
|
finish: float
|
||||||
remote_fetched: bool
|
remote_fetched: bool
|
||||||
@ -138,14 +182,14 @@ class _CheckoutOneResult(NamedTuple):
|
|||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
success (bool): True if successful.
|
success (bool): True if successful.
|
||||||
project (Project): The project.
|
project_idx (int): The project index.
|
||||||
start (float): The starting time.time().
|
start (float): The starting time.time().
|
||||||
finish (float): The ending time.time().
|
finish (float): The ending time.time().
|
||||||
"""
|
"""
|
||||||
|
|
||||||
success: bool
|
success: bool
|
||||||
errors: List[Exception]
|
errors: List[Exception]
|
||||||
project: Project
|
project_idx: int
|
||||||
start: float
|
start: float
|
||||||
finish: float
|
finish: float
|
||||||
|
|
||||||
@ -243,6 +287,11 @@ directories if they have previously been linked to a different
|
|||||||
object directory. WARNING: This may cause data to be lost since
|
object directory. WARNING: This may cause data to be lost since
|
||||||
refs may be removed when overwriting.
|
refs may be removed when overwriting.
|
||||||
|
|
||||||
|
The --force-checkout option can be used to force git to switch revs even if the
|
||||||
|
index or the working tree differs from HEAD, and if there are untracked files.
|
||||||
|
WARNING: This may cause data to be lost since uncommitted changes may be
|
||||||
|
removed.
|
||||||
|
|
||||||
The --force-remove-dirty option can be used to remove previously used
|
The --force-remove-dirty option can be used to remove previously used
|
||||||
projects with uncommitted changes. WARNING: This may cause data to be
|
projects with uncommitted changes. WARNING: This may cause data to be
|
||||||
lost since uncommitted changes may be removed with projects that no longer
|
lost since uncommitted changes may be removed with projects that no longer
|
||||||
@ -301,6 +350,8 @@ later is required to fix a server side protocol bug.
|
|||||||
# value later on.
|
# value later on.
|
||||||
PARALLEL_JOBS = 0
|
PARALLEL_JOBS = 0
|
||||||
|
|
||||||
|
_JOBS_WARN_THRESHOLD = 100
|
||||||
|
|
||||||
def _Options(self, p, show_smart=True):
|
def _Options(self, p, show_smart=True):
|
||||||
p.add_option(
|
p.add_option(
|
||||||
"--jobs-network",
|
"--jobs-network",
|
||||||
@ -340,6 +391,14 @@ later is required to fix a server side protocol bug.
|
|||||||
"point to a different object directory. WARNING: this "
|
"point to a different object directory. WARNING: this "
|
||||||
"may cause loss of data",
|
"may cause loss of data",
|
||||||
)
|
)
|
||||||
|
p.add_option(
|
||||||
|
"--force-checkout",
|
||||||
|
dest="force_checkout",
|
||||||
|
action="store_true",
|
||||||
|
help="force checkout even if it results in throwing away "
|
||||||
|
"uncommitted modifications. "
|
||||||
|
"WARNING: this may cause loss of data",
|
||||||
|
)
|
||||||
p.add_option(
|
p.add_option(
|
||||||
"--force-remove-dirty",
|
"--force-remove-dirty",
|
||||||
dest="force_remove_dirty",
|
dest="force_remove_dirty",
|
||||||
@ -348,6 +407,13 @@ later is required to fix a server side protocol bug.
|
|||||||
"projects no longer exist in the manifest. "
|
"projects no longer exist in the manifest. "
|
||||||
"WARNING: this may cause loss of data",
|
"WARNING: this may cause loss of data",
|
||||||
)
|
)
|
||||||
|
p.add_option(
|
||||||
|
"--rebase",
|
||||||
|
dest="rebase",
|
||||||
|
action="store_true",
|
||||||
|
help="rebase local commits regardless of whether they are "
|
||||||
|
"published",
|
||||||
|
)
|
||||||
p.add_option(
|
p.add_option(
|
||||||
"-l",
|
"-l",
|
||||||
"--local-only",
|
"--local-only",
|
||||||
@ -528,7 +594,8 @@ later is required to fix a server side protocol bug.
|
|||||||
branch = branch[len(R_HEADS) :]
|
branch = branch[len(R_HEADS) :]
|
||||||
return branch
|
return branch
|
||||||
|
|
||||||
def _GetCurrentBranchOnly(self, opt, manifest):
|
@classmethod
|
||||||
|
def _GetCurrentBranchOnly(cls, opt, manifest):
|
||||||
"""Returns whether current-branch or use-superproject options are
|
"""Returns whether current-branch or use-superproject options are
|
||||||
enabled.
|
enabled.
|
||||||
|
|
||||||
@ -618,7 +685,7 @@ later is required to fix a server side protocol bug.
|
|||||||
|
|
||||||
if not use_super:
|
if not use_super:
|
||||||
continue
|
continue
|
||||||
m.superproject.SetQuiet(opt.quiet)
|
m.superproject.SetQuiet(not opt.verbose)
|
||||||
print_messages = git_superproject.PrintMessages(
|
print_messages = git_superproject.PrintMessages(
|
||||||
opt.use_superproject, m
|
opt.use_superproject, m
|
||||||
)
|
)
|
||||||
@ -646,7 +713,8 @@ later is required to fix a server side protocol bug.
|
|||||||
if need_unload:
|
if need_unload:
|
||||||
m.outer_client.manifest.Unload()
|
m.outer_client.manifest.Unload()
|
||||||
|
|
||||||
def _FetchProjectList(self, opt, projects):
|
@classmethod
|
||||||
|
def _FetchProjectList(cls, opt, projects):
|
||||||
"""Main function of the fetch worker.
|
"""Main function of the fetch worker.
|
||||||
|
|
||||||
The projects we're given share the same underlying git object store, so
|
The projects we're given share the same underlying git object store, so
|
||||||
@ -658,21 +726,23 @@ later is required to fix a server side protocol bug.
|
|||||||
opt: Program options returned from optparse. See _Options().
|
opt: Program options returned from optparse. See _Options().
|
||||||
projects: Projects to fetch.
|
projects: Projects to fetch.
|
||||||
"""
|
"""
|
||||||
return [self._FetchOne(opt, x) for x in projects]
|
return [cls._FetchOne(opt, x) for x in projects]
|
||||||
|
|
||||||
def _FetchOne(self, opt, project):
|
@classmethod
|
||||||
|
def _FetchOne(cls, opt, project_idx):
|
||||||
"""Fetch git objects for a single project.
|
"""Fetch git objects for a single project.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
opt: Program options returned from optparse. See _Options().
|
opt: Program options returned from optparse. See _Options().
|
||||||
project: Project object for the project to fetch.
|
project_idx: Project index for the project to fetch.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Whether the fetch was successful.
|
Whether the fetch was successful.
|
||||||
"""
|
"""
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
start = time.time()
|
start = time.time()
|
||||||
k = f"{project.name} @ {project.relpath}"
|
k = f"{project.name} @ {project.relpath}"
|
||||||
self._sync_dict[k] = start
|
cls.get_parallel_context()["sync_dict"][k] = start
|
||||||
success = False
|
success = False
|
||||||
remote_fetched = False
|
remote_fetched = False
|
||||||
errors = []
|
errors = []
|
||||||
@ -682,7 +752,7 @@ later is required to fix a server side protocol bug.
|
|||||||
quiet=opt.quiet,
|
quiet=opt.quiet,
|
||||||
verbose=opt.verbose,
|
verbose=opt.verbose,
|
||||||
output_redir=buf,
|
output_redir=buf,
|
||||||
current_branch_only=self._GetCurrentBranchOnly(
|
current_branch_only=cls._GetCurrentBranchOnly(
|
||||||
opt, project.manifest
|
opt, project.manifest
|
||||||
),
|
),
|
||||||
force_sync=opt.force_sync,
|
force_sync=opt.force_sync,
|
||||||
@ -692,7 +762,7 @@ later is required to fix a server side protocol bug.
|
|||||||
optimized_fetch=opt.optimized_fetch,
|
optimized_fetch=opt.optimized_fetch,
|
||||||
retry_fetches=opt.retry_fetches,
|
retry_fetches=opt.retry_fetches,
|
||||||
prune=opt.prune,
|
prune=opt.prune,
|
||||||
ssh_proxy=self.ssh_proxy,
|
ssh_proxy=cls.get_parallel_context()["ssh_proxy"],
|
||||||
clone_filter=project.manifest.CloneFilter,
|
clone_filter=project.manifest.CloneFilter,
|
||||||
partial_clone_exclude=project.manifest.PartialCloneExclude,
|
partial_clone_exclude=project.manifest.PartialCloneExclude,
|
||||||
clone_filter_for_depth=project.manifest.CloneFilterForDepth,
|
clone_filter_for_depth=project.manifest.CloneFilterForDepth,
|
||||||
@ -724,24 +794,20 @@ later is required to fix a server side protocol bug.
|
|||||||
type(e).__name__,
|
type(e).__name__,
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
del self._sync_dict[k]
|
|
||||||
errors.append(e)
|
errors.append(e)
|
||||||
raise
|
raise
|
||||||
|
finally:
|
||||||
|
del cls.get_parallel_context()["sync_dict"][k]
|
||||||
|
|
||||||
finish = time.time()
|
finish = time.time()
|
||||||
del self._sync_dict[k]
|
|
||||||
return _FetchOneResult(
|
return _FetchOneResult(
|
||||||
success, errors, project, start, finish, remote_fetched
|
success, errors, project_idx, start, finish, remote_fetched
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _FetchInitChild(cls, ssh_proxy):
|
|
||||||
cls.ssh_proxy = ssh_proxy
|
|
||||||
|
|
||||||
def _GetSyncProgressMessage(self):
|
def _GetSyncProgressMessage(self):
|
||||||
earliest_time = float("inf")
|
earliest_time = float("inf")
|
||||||
earliest_proj = None
|
earliest_proj = None
|
||||||
items = self._sync_dict.items()
|
items = self.get_parallel_context()["sync_dict"].items()
|
||||||
for project, t in items:
|
for project, t in items:
|
||||||
if t < earliest_time:
|
if t < earliest_time:
|
||||||
earliest_time = t
|
earliest_time = t
|
||||||
@ -749,7 +815,7 @@ later is required to fix a server side protocol bug.
|
|||||||
|
|
||||||
if not earliest_proj:
|
if not earliest_proj:
|
||||||
# This function is called when sync is still running but in some
|
# This function is called when sync is still running but in some
|
||||||
# cases (by chance), _sync_dict can contain no entries. Return some
|
# cases (by chance), sync_dict can contain no entries. Return some
|
||||||
# text to indicate that sync is still working.
|
# text to indicate that sync is still working.
|
||||||
return "..working.."
|
return "..working.."
|
||||||
|
|
||||||
@ -757,10 +823,19 @@ later is required to fix a server side protocol bug.
|
|||||||
jobs = jobs_str(len(items))
|
jobs = jobs_str(len(items))
|
||||||
return f"{jobs} | {elapsed_str(elapsed)} {earliest_proj}"
|
return f"{jobs} | {elapsed_str(elapsed)} {earliest_proj}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def InitWorker(cls):
|
||||||
|
# Force connect to the manager server now.
|
||||||
|
# This is good because workers are initialized one by one. Without this,
|
||||||
|
# multiple workers may connect to the manager when handling the first
|
||||||
|
# job at the same time. Then the connection may fail if too many
|
||||||
|
# connections are pending and execeeded the socket listening backlog,
|
||||||
|
# especially on MacOS.
|
||||||
|
len(cls.get_parallel_context()["sync_dict"])
|
||||||
|
|
||||||
def _Fetch(self, projects, opt, err_event, ssh_proxy, errors):
|
def _Fetch(self, projects, opt, err_event, ssh_proxy, errors):
|
||||||
ret = True
|
ret = True
|
||||||
|
|
||||||
jobs = opt.jobs_network
|
|
||||||
fetched = set()
|
fetched = set()
|
||||||
remote_fetched = set()
|
remote_fetched = set()
|
||||||
pm = Progress(
|
pm = Progress(
|
||||||
@ -772,7 +847,6 @@ later is required to fix a server side protocol bug.
|
|||||||
elide=True,
|
elide=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._sync_dict = multiprocessing.Manager().dict()
|
|
||||||
sync_event = _threading.Event()
|
sync_event = _threading.Event()
|
||||||
|
|
||||||
def _MonitorSyncLoop():
|
def _MonitorSyncLoop():
|
||||||
@ -783,19 +857,13 @@ later is required to fix a server side protocol bug.
|
|||||||
|
|
||||||
sync_progress_thread = _threading.Thread(target=_MonitorSyncLoop)
|
sync_progress_thread = _threading.Thread(target=_MonitorSyncLoop)
|
||||||
sync_progress_thread.daemon = True
|
sync_progress_thread.daemon = True
|
||||||
sync_progress_thread.start()
|
|
||||||
|
|
||||||
objdir_project_map = dict()
|
def _ProcessResults(pool, pm, results_sets):
|
||||||
for project in projects:
|
|
||||||
objdir_project_map.setdefault(project.objdir, []).append(project)
|
|
||||||
projects_list = list(objdir_project_map.values())
|
|
||||||
|
|
||||||
def _ProcessResults(results_sets):
|
|
||||||
ret = True
|
ret = True
|
||||||
for results in results_sets:
|
for results in results_sets:
|
||||||
for result in results:
|
for result in results:
|
||||||
success = result.success
|
success = result.success
|
||||||
project = result.project
|
project = projects[result.project_idx]
|
||||||
start = result.start
|
start = result.start
|
||||||
finish = result.finish
|
finish = result.finish
|
||||||
self._fetch_times.Set(project, finish - start)
|
self._fetch_times.Set(project, finish - start)
|
||||||
@ -819,58 +887,50 @@ later is required to fix a server side protocol bug.
|
|||||||
fetched.add(project.gitdir)
|
fetched.add(project.gitdir)
|
||||||
pm.update()
|
pm.update()
|
||||||
if not ret and opt.fail_fast:
|
if not ret and opt.fail_fast:
|
||||||
|
if pool:
|
||||||
|
pool.close()
|
||||||
break
|
break
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
# We pass the ssh proxy settings via the class. This allows
|
with self.ParallelContext():
|
||||||
# multiprocessing to pickle it up when spawning children. We can't pass
|
self.get_parallel_context()["projects"] = projects
|
||||||
# it as an argument to _FetchProjectList below as multiprocessing is
|
self.get_parallel_context()[
|
||||||
# unable to pickle those.
|
"sync_dict"
|
||||||
Sync.ssh_proxy = None
|
] = multiprocessing.Manager().dict()
|
||||||
|
|
||||||
# NB: Multiprocessing is heavy, so don't spin it up for one job.
|
objdir_project_map = dict()
|
||||||
if len(projects_list) == 1 or jobs == 1:
|
for index, project in enumerate(projects):
|
||||||
self._FetchInitChild(ssh_proxy)
|
objdir_project_map.setdefault(project.objdir, []).append(index)
|
||||||
if not _ProcessResults(
|
projects_list = list(objdir_project_map.values())
|
||||||
self._FetchProjectList(opt, x) for x in projects_list
|
|
||||||
):
|
jobs = max(1, min(opt.jobs_network, len(projects_list)))
|
||||||
ret = False
|
|
||||||
else:
|
# We pass the ssh proxy settings via the class. This allows
|
||||||
# Favor throughput over responsiveness when quiet. It seems that
|
# multiprocessing to pickle it up when spawning children. We can't
|
||||||
# imap() will yield results in batches relative to chunksize, so
|
# pass it as an argument to _FetchProjectList below as
|
||||||
# even as the children finish a sync, we won't see the result until
|
# multiprocessing is unable to pickle those.
|
||||||
# one child finishes ~chunksize jobs. When using a large --jobs
|
self.get_parallel_context()["ssh_proxy"] = ssh_proxy
|
||||||
# with large chunksize, this can be jarring as there will be a large
|
|
||||||
# initial delay where repo looks like it isn't doing anything and
|
sync_progress_thread.start()
|
||||||
# sits at 0%, but then suddenly completes a lot of jobs all at once.
|
if not opt.quiet:
|
||||||
# Since this code is more network bound, we can accept a bit more
|
|
||||||
# CPU overhead with a smaller chunksize so that the user sees more
|
|
||||||
# immediate & continuous feedback.
|
|
||||||
if opt.quiet:
|
|
||||||
chunksize = WORKER_BATCH_SIZE
|
|
||||||
else:
|
|
||||||
pm.update(inc=0, msg="warming up")
|
pm.update(inc=0, msg="warming up")
|
||||||
chunksize = 4
|
try:
|
||||||
with multiprocessing.Pool(
|
ret = self.ExecuteInParallel(
|
||||||
jobs, initializer=self._FetchInitChild, initargs=(ssh_proxy,)
|
jobs,
|
||||||
) as pool:
|
|
||||||
results = pool.imap_unordered(
|
|
||||||
functools.partial(self._FetchProjectList, opt),
|
functools.partial(self._FetchProjectList, opt),
|
||||||
projects_list,
|
projects_list,
|
||||||
chunksize=chunksize,
|
callback=_ProcessResults,
|
||||||
|
output=pm,
|
||||||
|
# Use chunksize=1 to avoid the chance that some workers are
|
||||||
|
# idle while other workers still have more than one job in
|
||||||
|
# their chunk queue.
|
||||||
|
chunksize=1,
|
||||||
|
initializer=self.InitWorker,
|
||||||
)
|
)
|
||||||
if not _ProcessResults(results):
|
finally:
|
||||||
ret = False
|
|
||||||
pool.close()
|
|
||||||
|
|
||||||
# Cleanup the reference now that we're done with it, and we're going to
|
|
||||||
# release any resources it points to. If we don't, later
|
|
||||||
# multiprocessing usage (e.g. checkouts) will try to pickle and then
|
|
||||||
# crash.
|
|
||||||
del Sync.ssh_proxy
|
|
||||||
|
|
||||||
sync_event.set()
|
sync_event.set()
|
||||||
pm.end()
|
sync_progress_thread.join()
|
||||||
|
|
||||||
self._fetch_times.Save()
|
self._fetch_times.Save()
|
||||||
self._local_sync_state.Save()
|
self._local_sync_state.Save()
|
||||||
|
|
||||||
@ -911,6 +971,8 @@ later is required to fix a server side protocol bug.
|
|||||||
if not success:
|
if not success:
|
||||||
err_event.set()
|
err_event.set()
|
||||||
|
|
||||||
|
# Call self update, unless requested not to
|
||||||
|
if os.environ.get("REPO_SKIP_SELF_UPDATE", "0") == "0":
|
||||||
_PostRepoFetch(rp, opt.repo_verify)
|
_PostRepoFetch(rp, opt.repo_verify)
|
||||||
if opt.network_only:
|
if opt.network_only:
|
||||||
# Bail out now; the rest touches the working tree.
|
# Bail out now; the rest touches the working tree.
|
||||||
@ -956,17 +1018,32 @@ later is required to fix a server side protocol bug.
|
|||||||
|
|
||||||
return _FetchMainResult(all_projects)
|
return _FetchMainResult(all_projects)
|
||||||
|
|
||||||
def _CheckoutOne(self, detach_head, force_sync, project):
|
@classmethod
|
||||||
|
def _CheckoutOne(
|
||||||
|
cls,
|
||||||
|
detach_head,
|
||||||
|
force_sync,
|
||||||
|
force_checkout,
|
||||||
|
force_rebase,
|
||||||
|
verbose,
|
||||||
|
project_idx,
|
||||||
|
):
|
||||||
"""Checkout work tree for one project
|
"""Checkout work tree for one project
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
detach_head: Whether to leave a detached HEAD.
|
detach_head: Whether to leave a detached HEAD.
|
||||||
force_sync: Force checking out of the repo.
|
force_sync: Force checking out of .git directory (e.g. overwrite
|
||||||
project: Project object for the project to checkout.
|
existing git directory that was previously linked to a different
|
||||||
|
object directory).
|
||||||
|
force_checkout: Force checking out of the repo content.
|
||||||
|
force_rebase: Force rebase.
|
||||||
|
verbose: Whether to show verbose messages.
|
||||||
|
project_idx: Project index for the project to checkout.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Whether the fetch was successful.
|
Whether the fetch was successful.
|
||||||
"""
|
"""
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
start = time.time()
|
start = time.time()
|
||||||
syncbuf = SyncBuffer(
|
syncbuf = SyncBuffer(
|
||||||
project.manifest.manifestProject.config, detach_head=detach_head
|
project.manifest.manifestProject.config, detach_head=detach_head
|
||||||
@ -975,9 +1052,16 @@ later is required to fix a server side protocol bug.
|
|||||||
errors = []
|
errors = []
|
||||||
try:
|
try:
|
||||||
project.Sync_LocalHalf(
|
project.Sync_LocalHalf(
|
||||||
syncbuf, force_sync=force_sync, errors=errors
|
syncbuf,
|
||||||
|
force_sync=force_sync,
|
||||||
|
force_checkout=force_checkout,
|
||||||
|
force_rebase=force_rebase,
|
||||||
|
errors=errors,
|
||||||
|
verbose=verbose,
|
||||||
)
|
)
|
||||||
success = syncbuf.Finish()
|
success = syncbuf.Finish()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger.error("Keyboard interrupt while processing %s", project.name)
|
||||||
except GitError as e:
|
except GitError as e:
|
||||||
logger.error(
|
logger.error(
|
||||||
"error.GitError: Cannot checkout %s: %s", project.name, e
|
"error.GitError: Cannot checkout %s: %s", project.name, e
|
||||||
@ -995,7 +1079,7 @@ later is required to fix a server side protocol bug.
|
|||||||
if not success:
|
if not success:
|
||||||
logger.error("error: Cannot checkout %s", project.name)
|
logger.error("error: Cannot checkout %s", project.name)
|
||||||
finish = time.time()
|
finish = time.time()
|
||||||
return _CheckoutOneResult(success, errors, project, start, finish)
|
return _CheckoutOneResult(success, errors, project_idx, start, finish)
|
||||||
|
|
||||||
def _Checkout(self, all_projects, opt, err_results, checkout_errors):
|
def _Checkout(self, all_projects, opt, err_results, checkout_errors):
|
||||||
"""Checkout projects listed in all_projects
|
"""Checkout projects listed in all_projects
|
||||||
@ -1013,7 +1097,9 @@ later is required to fix a server side protocol bug.
|
|||||||
ret = True
|
ret = True
|
||||||
for result in results:
|
for result in results:
|
||||||
success = result.success
|
success = result.success
|
||||||
project = result.project
|
project = self.get_parallel_context()["projects"][
|
||||||
|
result.project_idx
|
||||||
|
]
|
||||||
start = result.start
|
start = result.start
|
||||||
finish = result.finish
|
finish = result.finish
|
||||||
self.event_log.AddSync(
|
self.event_log.AddSync(
|
||||||
@ -1039,14 +1125,28 @@ later is required to fix a server side protocol bug.
|
|||||||
pm.update(msg=project.name)
|
pm.update(msg=project.name)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
for projects in _SafeCheckoutOrder(all_projects):
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = projects
|
||||||
proc_res = self.ExecuteInParallel(
|
proc_res = self.ExecuteInParallel(
|
||||||
opt.jobs_checkout,
|
opt.jobs_checkout,
|
||||||
functools.partial(
|
functools.partial(
|
||||||
self._CheckoutOne, opt.detach_head, opt.force_sync
|
self._CheckoutOne,
|
||||||
|
opt.detach_head,
|
||||||
|
opt.force_sync,
|
||||||
|
opt.force_checkout,
|
||||||
|
opt.rebase,
|
||||||
|
opt.verbose,
|
||||||
),
|
),
|
||||||
all_projects,
|
range(len(projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
output=Progress("Checking out", len(all_projects), quiet=opt.quiet),
|
output=Progress(
|
||||||
|
"Checking out", len(all_projects), quiet=opt.quiet
|
||||||
|
),
|
||||||
|
# Use chunksize=1 to avoid the chance that some workers are
|
||||||
|
# idle while other workers still have more than one job in
|
||||||
|
# their chunk queue.
|
||||||
|
chunksize=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._local_sync_state.Save()
|
self._local_sync_state.Save()
|
||||||
@ -1288,7 +1388,7 @@ later is required to fix a server side protocol bug.
|
|||||||
groups=None,
|
groups=None,
|
||||||
)
|
)
|
||||||
project.DeleteWorktree(
|
project.DeleteWorktree(
|
||||||
quiet=opt.quiet, force=opt.force_remove_dirty
|
verbose=opt.verbose, force=opt.force_remove_dirty
|
||||||
)
|
)
|
||||||
|
|
||||||
new_project_paths.sort()
|
new_project_paths.sort()
|
||||||
@ -1346,7 +1446,10 @@ later is required to fix a server side protocol bug.
|
|||||||
for need_remove_file in need_remove_files:
|
for need_remove_file in need_remove_files:
|
||||||
# Try to remove the updated copyfile or linkfile.
|
# Try to remove the updated copyfile or linkfile.
|
||||||
# So, if the file is not exist, nothing need to do.
|
# So, if the file is not exist, nothing need to do.
|
||||||
platform_utils.remove(need_remove_file, missing_ok=True)
|
platform_utils.remove(
|
||||||
|
os.path.join(self.client.topdir, need_remove_file),
|
||||||
|
missing_ok=True,
|
||||||
|
)
|
||||||
|
|
||||||
# Create copy-link-files.json, save dest path of "copyfile" and
|
# Create copy-link-files.json, save dest path of "copyfile" and
|
||||||
# "linkfile".
|
# "linkfile".
|
||||||
@ -1394,13 +1497,14 @@ later is required to fix a server side protocol bug.
|
|||||||
|
|
||||||
if username and password:
|
if username and password:
|
||||||
manifest_server = manifest_server.replace(
|
manifest_server = manifest_server.replace(
|
||||||
"://", "://%s:%s@" % (username, password), 1
|
"://", f"://{username}:{password}@", 1
|
||||||
)
|
)
|
||||||
|
|
||||||
transport = PersistentTransport(manifest_server)
|
transport = PersistentTransport(manifest_server)
|
||||||
if manifest_server.startswith("persistent-"):
|
if manifest_server.startswith("persistent-"):
|
||||||
manifest_server = manifest_server[len("persistent-") :]
|
manifest_server = manifest_server[len("persistent-") :]
|
||||||
|
|
||||||
|
# Changes in behavior should update docs/smart-sync.md accordingly.
|
||||||
try:
|
try:
|
||||||
server = xmlrpc.client.Server(manifest_server, transport=transport)
|
server = xmlrpc.client.Server(manifest_server, transport=transport)
|
||||||
if opt.smart_sync:
|
if opt.smart_sync:
|
||||||
@ -1411,6 +1515,19 @@ later is required to fix a server side protocol bug.
|
|||||||
[success, manifest_str] = server.GetApprovedManifest(
|
[success, manifest_str] = server.GetApprovedManifest(
|
||||||
branch, target
|
branch, target
|
||||||
)
|
)
|
||||||
|
elif (
|
||||||
|
"TARGET_PRODUCT" in os.environ
|
||||||
|
and "TARGET_BUILD_VARIANT" in os.environ
|
||||||
|
and "TARGET_RELEASE" in os.environ
|
||||||
|
):
|
||||||
|
target = "%s-%s-%s" % (
|
||||||
|
os.environ["TARGET_PRODUCT"],
|
||||||
|
os.environ["TARGET_RELEASE"],
|
||||||
|
os.environ["TARGET_BUILD_VARIANT"],
|
||||||
|
)
|
||||||
|
[success, manifest_str] = server.GetApprovedManifest(
|
||||||
|
branch, target
|
||||||
|
)
|
||||||
elif (
|
elif (
|
||||||
"TARGET_PRODUCT" in os.environ
|
"TARGET_PRODUCT" in os.environ
|
||||||
and "TARGET_BUILD_VARIANT" in os.environ
|
and "TARGET_BUILD_VARIANT" in os.environ
|
||||||
@ -1500,7 +1617,7 @@ later is required to fix a server side protocol bug.
|
|||||||
buf = TeeStringIO(sys.stdout)
|
buf = TeeStringIO(sys.stdout)
|
||||||
try:
|
try:
|
||||||
result = mp.Sync_NetworkHalf(
|
result = mp.Sync_NetworkHalf(
|
||||||
quiet=opt.quiet,
|
quiet=not opt.verbose,
|
||||||
output_redir=buf,
|
output_redir=buf,
|
||||||
verbose=opt.verbose,
|
verbose=opt.verbose,
|
||||||
current_branch_only=self._GetCurrentBranchOnly(
|
current_branch_only=self._GetCurrentBranchOnly(
|
||||||
@ -1533,16 +1650,17 @@ later is required to fix a server side protocol bug.
|
|||||||
syncbuf = SyncBuffer(mp.config)
|
syncbuf = SyncBuffer(mp.config)
|
||||||
start = time.time()
|
start = time.time()
|
||||||
mp.Sync_LocalHalf(
|
mp.Sync_LocalHalf(
|
||||||
syncbuf, submodules=mp.manifest.HasSubmodules, errors=errors
|
syncbuf,
|
||||||
|
submodules=mp.manifest.HasSubmodules,
|
||||||
|
errors=errors,
|
||||||
|
verbose=opt.verbose,
|
||||||
)
|
)
|
||||||
clean = syncbuf.Finish()
|
clean = syncbuf.Finish()
|
||||||
self.event_log.AddSync(
|
self.event_log.AddSync(
|
||||||
mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean
|
mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean
|
||||||
)
|
)
|
||||||
if not clean:
|
if not clean:
|
||||||
raise UpdateManifestError(
|
raise UpdateManifestError(aggregate_errors=errors)
|
||||||
aggregate_errors=errors, project=mp.name
|
|
||||||
)
|
|
||||||
self._ReloadManifest(manifest_name, mp.manifest)
|
self._ReloadManifest(manifest_name, mp.manifest)
|
||||||
|
|
||||||
def ValidateOptions(self, opt, args):
|
def ValidateOptions(self, opt, args):
|
||||||
@ -1573,16 +1691,6 @@ later is required to fix a server side protocol bug.
|
|||||||
if opt.prune is None:
|
if opt.prune is None:
|
||||||
opt.prune = True
|
opt.prune = True
|
||||||
|
|
||||||
if opt.auto_gc is None and _AUTO_GC:
|
|
||||||
logger.error(
|
|
||||||
"Will run `git gc --auto` because %s is set. %s is deprecated "
|
|
||||||
"and will be removed in a future release. Use `--auto-gc` "
|
|
||||||
"instead.",
|
|
||||||
_REPO_AUTO_GC,
|
|
||||||
_REPO_AUTO_GC,
|
|
||||||
)
|
|
||||||
opt.auto_gc = True
|
|
||||||
|
|
||||||
def _ValidateOptionsWithManifest(self, opt, mp):
|
def _ValidateOptionsWithManifest(self, opt, mp):
|
||||||
"""Like ValidateOptions, but after we've updated the manifest.
|
"""Like ValidateOptions, but after we've updated the manifest.
|
||||||
|
|
||||||
@ -1622,11 +1730,29 @@ later is required to fix a server side protocol bug.
|
|||||||
opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
|
opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
|
||||||
opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
|
opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
|
||||||
|
|
||||||
|
# Warn once if effective job counts seem excessively high.
|
||||||
|
# Prioritize --jobs, then --jobs-network, then --jobs-checkout.
|
||||||
|
job_options_to_check = (
|
||||||
|
("--jobs", opt.jobs),
|
||||||
|
("--jobs-network", opt.jobs_network),
|
||||||
|
("--jobs-checkout", opt.jobs_checkout),
|
||||||
|
)
|
||||||
|
for name, value in job_options_to_check:
|
||||||
|
if value > self._JOBS_WARN_THRESHOLD:
|
||||||
|
logger.warning(
|
||||||
|
"High job count (%d > %d) specified for %s; this may "
|
||||||
|
"lead to excessive resource usage or diminishing returns.",
|
||||||
|
value,
|
||||||
|
self._JOBS_WARN_THRESHOLD,
|
||||||
|
name,
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
errors = []
|
errors = []
|
||||||
try:
|
try:
|
||||||
self._ExecuteHelper(opt, args, errors)
|
self._ExecuteHelper(opt, args, errors)
|
||||||
except RepoExitError:
|
except (RepoExitError, RepoChangedException):
|
||||||
raise
|
raise
|
||||||
except (KeyboardInterrupt, Exception) as e:
|
except (KeyboardInterrupt, Exception) as e:
|
||||||
raise RepoUnhandledExceptionError(e, aggregate_errors=errors)
|
raise RepoUnhandledExceptionError(e, aggregate_errors=errors)
|
||||||
@ -1893,6 +2019,8 @@ def _PostRepoFetch(rp, repo_verify=True, verbose=False):
|
|||||||
# We also have to make sure this will switch to an older commit if
|
# We also have to make sure this will switch to an older commit if
|
||||||
# that's the latest tag in order to support release rollback.
|
# that's the latest tag in order to support release rollback.
|
||||||
try:
|
try:
|
||||||
|
# Refresh index since reset --keep won't do it.
|
||||||
|
rp.work_git.update_index("-q", "--refresh")
|
||||||
rp.work_git.reset("--keep", new_rev)
|
rp.work_git.reset("--keep", new_rev)
|
||||||
except GitError as e:
|
except GitError as e:
|
||||||
raise RepoUnhandledExceptionError(e)
|
raise RepoUnhandledExceptionError(e)
|
||||||
@ -2011,7 +2139,7 @@ class LocalSyncState:
|
|||||||
delete = set()
|
delete = set()
|
||||||
for path in self._state:
|
for path in self._state:
|
||||||
gitdir = os.path.join(self._manifest.topdir, path, ".git")
|
gitdir = os.path.join(self._manifest.topdir, path, ".git")
|
||||||
if not os.path.exists(gitdir):
|
if not os.path.exists(gitdir) or os.path.islink(gitdir):
|
||||||
delete.add(path)
|
delete.add(path)
|
||||||
if not delete:
|
if not delete:
|
||||||
return
|
return
|
||||||
|
@ -218,9 +218,14 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
def _Options(self, p):
|
def _Options(self, p):
|
||||||
p.add_option(
|
p.add_option(
|
||||||
"-t",
|
"-t",
|
||||||
|
"--topic-branch",
|
||||||
dest="auto_topic",
|
dest="auto_topic",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="send local branch name to Gerrit Code Review",
|
help="set the topic to the local branch name",
|
||||||
|
)
|
||||||
|
p.add_option(
|
||||||
|
"--topic",
|
||||||
|
help="set topic for the change",
|
||||||
)
|
)
|
||||||
p.add_option(
|
p.add_option(
|
||||||
"--hashtag",
|
"--hashtag",
|
||||||
@ -244,6 +249,12 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
default=[],
|
default=[],
|
||||||
help="add a label when uploading",
|
help="add a label when uploading",
|
||||||
)
|
)
|
||||||
|
p.add_option(
|
||||||
|
"--pd",
|
||||||
|
"--patchset-description",
|
||||||
|
dest="patchset_description",
|
||||||
|
help="description for patchset",
|
||||||
|
)
|
||||||
p.add_option(
|
p.add_option(
|
||||||
"--re",
|
"--re",
|
||||||
"--reviewers",
|
"--reviewers",
|
||||||
@ -543,42 +554,14 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
people = copy.deepcopy(original_people)
|
people = copy.deepcopy(original_people)
|
||||||
self._AppendAutoList(branch, people)
|
self._AppendAutoList(branch, people)
|
||||||
|
|
||||||
# Check if there are local changes that may have been forgotten.
|
|
||||||
changes = branch.project.UncommitedFiles()
|
|
||||||
if opt.ignore_untracked_files:
|
|
||||||
untracked = set(branch.project.UntrackedFiles())
|
|
||||||
changes = [x for x in changes if x not in untracked]
|
|
||||||
|
|
||||||
if changes:
|
|
||||||
key = "review.%s.autoupload" % branch.project.remote.review
|
|
||||||
answer = branch.project.config.GetBoolean(key)
|
|
||||||
|
|
||||||
# If they want to auto upload, let's not ask because it
|
|
||||||
# could be automated.
|
|
||||||
if answer is None:
|
|
||||||
print()
|
|
||||||
print(
|
|
||||||
"Uncommitted changes in %s (did you forget to "
|
|
||||||
"amend?):" % branch.project.name
|
|
||||||
)
|
|
||||||
print("\n".join(changes))
|
|
||||||
print("Continue uploading? (y/N) ", end="", flush=True)
|
|
||||||
if opt.yes:
|
|
||||||
print("<--yes>")
|
|
||||||
a = "yes"
|
|
||||||
else:
|
|
||||||
a = sys.stdin.readline().strip().lower()
|
|
||||||
if a not in ("y", "yes", "t", "true", "on"):
|
|
||||||
print("skipping upload", file=sys.stderr)
|
|
||||||
branch.uploaded = False
|
|
||||||
branch.error = "User aborted"
|
|
||||||
return
|
|
||||||
|
|
||||||
# Check if topic branches should be sent to the server during
|
# Check if topic branches should be sent to the server during
|
||||||
# upload.
|
# upload.
|
||||||
|
if opt.topic is None:
|
||||||
if opt.auto_topic is not True:
|
if opt.auto_topic is not True:
|
||||||
key = "review.%s.uploadtopic" % branch.project.remote.review
|
key = "review.%s.uploadtopic" % branch.project.remote.review
|
||||||
opt.auto_topic = branch.project.config.GetBoolean(key)
|
opt.auto_topic = branch.project.config.GetBoolean(key)
|
||||||
|
if opt.auto_topic:
|
||||||
|
opt.topic = branch.name
|
||||||
|
|
||||||
def _ExpandCommaList(value):
|
def _ExpandCommaList(value):
|
||||||
"""Split |value| up into comma delimited entries."""
|
"""Split |value| up into comma delimited entries."""
|
||||||
@ -620,19 +603,22 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
full_dest = destination
|
full_dest = destination
|
||||||
if not full_dest.startswith(R_HEADS):
|
if not full_dest.startswith(R_HEADS):
|
||||||
full_dest = R_HEADS + full_dest
|
full_dest = R_HEADS + full_dest
|
||||||
|
full_revision = branch.project.revisionExpr
|
||||||
|
if not full_revision.startswith(R_HEADS):
|
||||||
|
full_revision = R_HEADS + full_revision
|
||||||
|
|
||||||
# If the merge branch of the local branch is different from
|
# If the merge branch of the local branch is different from
|
||||||
# the project's revision AND destination, this might not be
|
# the project's revision AND destination, this might not be
|
||||||
# intentional.
|
# intentional.
|
||||||
if (
|
if (
|
||||||
merge_branch
|
merge_branch
|
||||||
and merge_branch != branch.project.revisionExpr
|
and merge_branch != full_revision
|
||||||
and merge_branch != full_dest
|
and merge_branch != full_dest
|
||||||
):
|
):
|
||||||
print(
|
print(
|
||||||
f"For local branch {branch.name}: merge branch "
|
f"For local branch {branch.name}: merge branch "
|
||||||
f"{merge_branch} does not match destination branch "
|
f"{merge_branch} does not match destination branch "
|
||||||
f"{destination}"
|
f"{destination} and revision {branch.project.revisionExpr}"
|
||||||
)
|
)
|
||||||
print("skipping upload.")
|
print("skipping upload.")
|
||||||
print(
|
print(
|
||||||
@ -645,7 +631,7 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
branch.UploadForReview(
|
branch.UploadForReview(
|
||||||
people,
|
people,
|
||||||
dryrun=opt.dryrun,
|
dryrun=opt.dryrun,
|
||||||
auto_topic=opt.auto_topic,
|
topic=opt.topic,
|
||||||
hashtags=hashtags,
|
hashtags=hashtags,
|
||||||
labels=labels,
|
labels=labels,
|
||||||
private=opt.private,
|
private=opt.private,
|
||||||
@ -655,6 +641,7 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
dest_branch=destination,
|
dest_branch=destination,
|
||||||
validate_certs=opt.validate_certs,
|
validate_certs=opt.validate_certs,
|
||||||
push_options=opt.push_options,
|
push_options=opt.push_options,
|
||||||
|
patchset_description=opt.patchset_description,
|
||||||
)
|
)
|
||||||
|
|
||||||
branch.uploaded = True
|
branch.uploaded = True
|
||||||
@ -729,16 +716,17 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
merge_branch = p.stdout.strip()
|
merge_branch = p.stdout.strip()
|
||||||
return merge_branch
|
return merge_branch
|
||||||
|
|
||||||
@staticmethod
|
@classmethod
|
||||||
def _GatherOne(opt, project):
|
def _GatherOne(cls, opt, project_idx):
|
||||||
"""Figure out the upload status for |project|."""
|
"""Figure out the upload status for |project|."""
|
||||||
|
project = cls.get_parallel_context()["projects"][project_idx]
|
||||||
if opt.current_branch:
|
if opt.current_branch:
|
||||||
cbr = project.CurrentBranch
|
cbr = project.CurrentBranch
|
||||||
up_branch = project.GetUploadableBranch(cbr)
|
up_branch = project.GetUploadableBranch(cbr)
|
||||||
avail = [up_branch] if up_branch else None
|
avail = [up_branch] if up_branch else None
|
||||||
else:
|
else:
|
||||||
avail = project.GetUploadableBranches(opt.branch)
|
avail = project.GetUploadableBranches(opt.branch)
|
||||||
return (project, avail)
|
return (project_idx, avail)
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
projects = self.GetProjects(
|
projects = self.GetProjects(
|
||||||
@ -748,7 +736,8 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
def _ProcessResults(_pool, _out, results):
|
def _ProcessResults(_pool, _out, results):
|
||||||
pending = []
|
pending = []
|
||||||
for result in results:
|
for result in results:
|
||||||
project, avail = result
|
project_idx, avail = result
|
||||||
|
project = projects[project_idx]
|
||||||
if avail is None:
|
if avail is None:
|
||||||
logger.error(
|
logger.error(
|
||||||
'repo: error: %s: Unable to upload branch "%s". '
|
'repo: error: %s: Unable to upload branch "%s". '
|
||||||
@ -759,13 +748,15 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
project.manifest.branch,
|
project.manifest.branch,
|
||||||
)
|
)
|
||||||
elif avail:
|
elif avail:
|
||||||
pending.append(result)
|
pending.append((project, avail))
|
||||||
return pending
|
return pending
|
||||||
|
|
||||||
|
with self.ParallelContext():
|
||||||
|
self.get_parallel_context()["projects"] = projects
|
||||||
pending = self.ExecuteInParallel(
|
pending = self.ExecuteInParallel(
|
||||||
opt.jobs,
|
opt.jobs,
|
||||||
functools.partial(self._GatherOne, opt),
|
functools.partial(self._GatherOne, opt),
|
||||||
projects,
|
range(len(projects)),
|
||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -42,35 +42,28 @@ class Version(Command, MirrorSafeCommand):
|
|||||||
# These might not be the same. Report them both.
|
# These might not be the same. Report them both.
|
||||||
src_ver = RepoSourceVersion()
|
src_ver = RepoSourceVersion()
|
||||||
rp_ver = rp.bare_git.describe(HEAD)
|
rp_ver = rp.bare_git.describe(HEAD)
|
||||||
print("repo version %s" % rp_ver)
|
print(f"repo version {rp_ver}")
|
||||||
print(" (from %s)" % rem.url)
|
print(f" (from {rem.url})")
|
||||||
print(" (tracking %s)" % branch.merge)
|
print(f" (tracking {branch.merge})")
|
||||||
print(" (%s)" % rp.bare_git.log("-1", "--format=%cD", HEAD))
|
print(f" ({rp.bare_git.log('-1', '--format=%cD', HEAD)})")
|
||||||
|
|
||||||
if self.wrapper_path is not None:
|
if self.wrapper_path is not None:
|
||||||
print("repo launcher version %s" % self.wrapper_version)
|
print(f"repo launcher version {self.wrapper_version}")
|
||||||
print(" (from %s)" % self.wrapper_path)
|
print(f" (from {self.wrapper_path})")
|
||||||
|
|
||||||
if src_ver != rp_ver:
|
if src_ver != rp_ver:
|
||||||
print(" (currently at %s)" % src_ver)
|
print(f" (currently at {src_ver})")
|
||||||
|
|
||||||
print("repo User-Agent %s" % user_agent.repo)
|
print(f"repo User-Agent {user_agent.repo}")
|
||||||
print("git %s" % git.version_tuple().full)
|
print(f"git {git.version_tuple().full}")
|
||||||
print("git User-Agent %s" % user_agent.git)
|
print(f"git User-Agent {user_agent.git}")
|
||||||
print("Python %s" % sys.version)
|
print(f"Python {sys.version}")
|
||||||
uname = platform.uname()
|
uname = platform.uname()
|
||||||
if sys.version_info.major < 3:
|
if sys.version_info.major < 3:
|
||||||
# Python 3 returns a named tuple, but Python 2 is simpler.
|
# Python 3 returns a named tuple, but Python 2 is simpler.
|
||||||
print(uname)
|
print(uname)
|
||||||
else:
|
else:
|
||||||
print(
|
print(f"OS {uname.system} {uname.release} ({uname.version})")
|
||||||
"OS %s %s (%s)" % (uname.system, uname.release, uname.version)
|
processor = uname.processor if uname.processor else "unknown"
|
||||||
)
|
print(f"CPU {uname.machine} ({processor})")
|
||||||
print(
|
|
||||||
"CPU %s (%s)"
|
|
||||||
% (
|
|
||||||
uname.machine,
|
|
||||||
uname.processor if uname.processor else "unknown",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
print("Bug reports:", Wrapper().BUG_URL)
|
print("Bug reports:", Wrapper().BUG_URL)
|
||||||
|
@ -72,3 +72,12 @@ def tmp_home_dir(monkeypatch, tmp_path_factory):
|
|||||||
the function scope.
|
the function scope.
|
||||||
"""
|
"""
|
||||||
return _set_home(monkeypatch, tmp_path_factory.mktemp("home"))
|
return _set_home(monkeypatch, tmp_path_factory.mktemp("home"))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def setup_user_identity(monkeysession, scope="session"):
|
||||||
|
"""Set env variables for author and committer name and email."""
|
||||||
|
monkeysession.setenv("GIT_AUTHOR_NAME", "Foo Bar")
|
||||||
|
monkeysession.setenv("GIT_COMMITTER_NAME", "Foo Bar")
|
||||||
|
monkeysession.setenv("GIT_AUTHOR_EMAIL", "foo@bar.baz")
|
||||||
|
monkeysession.setenv("GIT_COMMITTER_EMAIL", "foo@bar.baz")
|
||||||
|
1
tests/fixtures/gitc_config
vendored
1
tests/fixtures/gitc_config
vendored
@ -1 +0,0 @@
|
|||||||
gitc_dir=/test/usr/local/google/gitc
|
|
8
tests/fixtures/test.gitconfig
vendored
8
tests/fixtures/test.gitconfig
vendored
@ -11,3 +11,11 @@
|
|||||||
intk = 10k
|
intk = 10k
|
||||||
intm = 10m
|
intm = 10m
|
||||||
intg = 10g
|
intg = 10g
|
||||||
|
|
||||||
|
[color "status"]
|
||||||
|
one = yellow
|
||||||
|
two = magenta cyan
|
||||||
|
three = black red ul
|
||||||
|
reset = reset
|
||||||
|
none
|
||||||
|
empty =
|
||||||
|
74
tests/test_color.py
Normal file
74
tests/test_color.py
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
# Copyright (C) 2024 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Unittests for the color.py module."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import color
|
||||||
|
import git_config
|
||||||
|
|
||||||
|
|
||||||
|
def fixture(*paths):
|
||||||
|
"""Return a path relative to test/fixtures."""
|
||||||
|
return os.path.join(os.path.dirname(__file__), "fixtures", *paths)
|
||||||
|
|
||||||
|
|
||||||
|
class ColoringTests(unittest.TestCase):
|
||||||
|
"""tests of the Coloring class."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
"""Create a GitConfig object using the test.gitconfig fixture."""
|
||||||
|
config_fixture = fixture("test.gitconfig")
|
||||||
|
self.config = git_config.GitConfig(config_fixture)
|
||||||
|
color.SetDefaultColoring("true")
|
||||||
|
self.color = color.Coloring(self.config, "status")
|
||||||
|
|
||||||
|
def test_Color_Parse_all_params_none(self):
|
||||||
|
"""all params are None"""
|
||||||
|
val = self.color._parse(None, None, None, None)
|
||||||
|
self.assertEqual("", val)
|
||||||
|
|
||||||
|
def test_Color_Parse_first_parameter_none(self):
|
||||||
|
"""check fg & bg & attr"""
|
||||||
|
val = self.color._parse(None, "black", "red", "ul")
|
||||||
|
self.assertEqual("\x1b[4;30;41m", val)
|
||||||
|
|
||||||
|
def test_Color_Parse_one_entry(self):
|
||||||
|
"""check fg"""
|
||||||
|
val = self.color._parse("one", None, None, None)
|
||||||
|
self.assertEqual("\033[33m", val)
|
||||||
|
|
||||||
|
def test_Color_Parse_two_entry(self):
|
||||||
|
"""check fg & bg"""
|
||||||
|
val = self.color._parse("two", None, None, None)
|
||||||
|
self.assertEqual("\033[35;46m", val)
|
||||||
|
|
||||||
|
def test_Color_Parse_three_entry(self):
|
||||||
|
"""check fg & bg & attr"""
|
||||||
|
val = self.color._parse("three", None, None, None)
|
||||||
|
self.assertEqual("\033[4;30;41m", val)
|
||||||
|
|
||||||
|
def test_Color_Parse_reset_entry(self):
|
||||||
|
"""check reset entry"""
|
||||||
|
val = self.color._parse("reset", None, None, None)
|
||||||
|
self.assertEqual("\033[m", val)
|
||||||
|
|
||||||
|
def test_Color_Parse_empty_entry(self):
|
||||||
|
"""check empty entry"""
|
||||||
|
val = self.color._parse("none", "blue", "white", "dim")
|
||||||
|
self.assertEqual("\033[2;34;47m", val)
|
||||||
|
val = self.color._parse("empty", "green", "white", "bold")
|
||||||
|
self.assertEqual("\033[1;32;47m", val)
|
@ -19,12 +19,9 @@ import os
|
|||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import unittest
|
import unittest
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
import pytest
|
||||||
try:
|
|
||||||
from unittest import mock
|
|
||||||
except ImportError:
|
|
||||||
import mock
|
|
||||||
|
|
||||||
import git_command
|
import git_command
|
||||||
import wrapper
|
import wrapper
|
||||||
@ -268,6 +265,7 @@ class UserAgentUnitTest(unittest.TestCase):
|
|||||||
m = re.match(r"^[^ ]+$", os_name)
|
m = re.match(r"^[^ ]+$", os_name)
|
||||||
self.assertIsNotNone(m)
|
self.assertIsNotNone(m)
|
||||||
|
|
||||||
|
@pytest.mark.skip_cq("TODO(b/266734831): Find out why this fails in CQ")
|
||||||
def test_smoke_repo(self):
|
def test_smoke_repo(self):
|
||||||
"""Make sure repo UA returns something useful."""
|
"""Make sure repo UA returns something useful."""
|
||||||
ua = git_command.user_agent.repo
|
ua = git_command.user_agent.repo
|
||||||
@ -276,6 +274,7 @@ class UserAgentUnitTest(unittest.TestCase):
|
|||||||
m = re.match(r"^git-repo/[^ ]+ ([^ ]+) git/[^ ]+ Python/[0-9.]+", ua)
|
m = re.match(r"^git-repo/[^ ]+ ([^ ]+) git/[^ ]+ Python/[0-9.]+", ua)
|
||||||
self.assertIsNotNone(m)
|
self.assertIsNotNone(m)
|
||||||
|
|
||||||
|
@pytest.mark.skip_cq("TODO(b/266734831): Find out why this fails in CQ")
|
||||||
def test_smoke_git(self):
|
def test_smoke_git(self):
|
||||||
"""Make sure git UA returns something useful."""
|
"""Make sure git UA returns something useful."""
|
||||||
ua = git_command.user_agent.git
|
ua = git_command.user_agent.git
|
||||||
|
@ -100,7 +100,7 @@ class GitConfigReadOnlyTests(unittest.TestCase):
|
|||||||
("intg", 10737418240),
|
("intg", 10737418240),
|
||||||
)
|
)
|
||||||
for key, value in TESTS:
|
for key, value in TESTS:
|
||||||
self.assertEqual(value, self.config.GetInt("section.%s" % (key,)))
|
self.assertEqual(value, self.config.GetInt(f"section.{key}"))
|
||||||
|
|
||||||
|
|
||||||
class GitConfigReadWriteTests(unittest.TestCase):
|
class GitConfigReadWriteTests(unittest.TestCase):
|
||||||
|
@ -21,6 +21,7 @@ import tempfile
|
|||||||
import unittest
|
import unittest
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
import pytest
|
||||||
from test_manifest_xml import sort_attributes
|
from test_manifest_xml import sort_attributes
|
||||||
|
|
||||||
import git_superproject
|
import git_superproject
|
||||||
@ -34,7 +35,7 @@ class SuperprojectTestCase(unittest.TestCase):
|
|||||||
PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID"
|
PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID"
|
||||||
PARENT_SID_VALUE = "parent_sid"
|
PARENT_SID_VALUE = "parent_sid"
|
||||||
SELF_SID_REGEX = r"repo-\d+T\d+Z-.*"
|
SELF_SID_REGEX = r"repo-\d+T\d+Z-.*"
|
||||||
FULL_SID_REGEX = r"^%s/%s" % (PARENT_SID_VALUE, SELF_SID_REGEX)
|
FULL_SID_REGEX = rf"^{PARENT_SID_VALUE}/{SELF_SID_REGEX}"
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
"""Set up superproject every time."""
|
"""Set up superproject every time."""
|
||||||
@ -145,6 +146,7 @@ class SuperprojectTestCase(unittest.TestCase):
|
|||||||
)
|
)
|
||||||
self.assertIsNone(manifest.superproject)
|
self.assertIsNone(manifest.superproject)
|
||||||
|
|
||||||
|
@pytest.mark.skip_cq("TODO(b/266734831): Find out why this takes 8m+ in CQ")
|
||||||
def test_superproject_get_superproject_invalid_url(self):
|
def test_superproject_get_superproject_invalid_url(self):
|
||||||
"""Test with an invalid url."""
|
"""Test with an invalid url."""
|
||||||
manifest = self.getXmlManifest(
|
manifest = self.getXmlManifest(
|
||||||
@ -168,6 +170,7 @@ class SuperprojectTestCase(unittest.TestCase):
|
|||||||
self.assertFalse(sync_result.success)
|
self.assertFalse(sync_result.success)
|
||||||
self.assertTrue(sync_result.fatal)
|
self.assertTrue(sync_result.fatal)
|
||||||
|
|
||||||
|
@pytest.mark.skip_cq("TODO(b/266734831): Find out why this takes 8m+ in CQ")
|
||||||
def test_superproject_get_superproject_invalid_branch(self):
|
def test_superproject_get_superproject_invalid_branch(self):
|
||||||
"""Test with an invalid branch."""
|
"""Test with an invalid branch."""
|
||||||
manifest = self.getXmlManifest(
|
manifest = self.getXmlManifest(
|
||||||
|
@ -61,7 +61,7 @@ class EventLogTestCase(unittest.TestCase):
|
|||||||
PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID"
|
PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID"
|
||||||
PARENT_SID_VALUE = "parent_sid"
|
PARENT_SID_VALUE = "parent_sid"
|
||||||
SELF_SID_REGEX = r"repo-\d+T\d+Z-.*"
|
SELF_SID_REGEX = r"repo-\d+T\d+Z-.*"
|
||||||
FULL_SID_REGEX = r"^%s/%s" % (PARENT_SID_VALUE, SELF_SID_REGEX)
|
FULL_SID_REGEX = rf"^{PARENT_SID_VALUE}/{SELF_SID_REGEX}"
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
"""Load the event_log module every time."""
|
"""Load the event_log module every time."""
|
||||||
@ -150,7 +150,7 @@ class EventLogTestCase(unittest.TestCase):
|
|||||||
<version event>
|
<version event>
|
||||||
<start event>
|
<start event>
|
||||||
"""
|
"""
|
||||||
self._event_log_module.StartEvent()
|
self._event_log_module.StartEvent([])
|
||||||
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
||||||
log_path = self._event_log_module.Write(path=tempdir)
|
log_path = self._event_log_module.Write(path=tempdir)
|
||||||
self._log_data = self.readLog(log_path)
|
self._log_data = self.readLog(log_path)
|
||||||
@ -213,10 +213,8 @@ class EventLogTestCase(unittest.TestCase):
|
|||||||
<version event>
|
<version event>
|
||||||
<command event>
|
<command event>
|
||||||
"""
|
"""
|
||||||
name = "repo"
|
|
||||||
subcommands = ["init" "this"]
|
|
||||||
self._event_log_module.CommandEvent(
|
self._event_log_module.CommandEvent(
|
||||||
name="repo", subcommands=subcommands
|
name="repo", subcommands=["init", "this"]
|
||||||
)
|
)
|
||||||
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
||||||
log_path = self._event_log_module.Write(path=tempdir)
|
log_path = self._event_log_module.Write(path=tempdir)
|
||||||
@ -225,12 +223,10 @@ class EventLogTestCase(unittest.TestCase):
|
|||||||
self.assertEqual(len(self._log_data), 2)
|
self.assertEqual(len(self._log_data), 2)
|
||||||
command_event = self._log_data[1]
|
command_event = self._log_data[1]
|
||||||
self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
|
self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
|
||||||
self.verifyCommonKeys(command_event, expected_event_name="command")
|
self.verifyCommonKeys(command_event, expected_event_name="cmd_name")
|
||||||
# Check for 'command' event specific fields.
|
# Check for 'command' event specific fields.
|
||||||
self.assertIn("name", command_event)
|
self.assertIn("name", command_event)
|
||||||
self.assertIn("subcommands", command_event)
|
self.assertEqual(command_event["name"], "repo-init-this")
|
||||||
self.assertEqual(command_event["name"], name)
|
|
||||||
self.assertEqual(command_event["subcommands"], subcommands)
|
|
||||||
|
|
||||||
def test_def_params_event_repo_config(self):
|
def test_def_params_event_repo_config(self):
|
||||||
"""Test 'def_params' event data outputs only repo config keys.
|
"""Test 'def_params' event data outputs only repo config keys.
|
||||||
@ -382,17 +378,17 @@ class EventLogTestCase(unittest.TestCase):
|
|||||||
socket_path = os.path.join(tempdir, "server.sock")
|
socket_path = os.path.join(tempdir, "server.sock")
|
||||||
server_ready = threading.Condition()
|
server_ready = threading.Condition()
|
||||||
# Start "server" listening on Unix domain socket at socket_path.
|
# Start "server" listening on Unix domain socket at socket_path.
|
||||||
try:
|
|
||||||
server_thread = threading.Thread(
|
server_thread = threading.Thread(
|
||||||
target=serverLoggingThread,
|
target=serverLoggingThread,
|
||||||
args=(socket_path, server_ready, received_traces),
|
args=(socket_path, server_ready, received_traces),
|
||||||
)
|
)
|
||||||
|
try:
|
||||||
server_thread.start()
|
server_thread.start()
|
||||||
|
|
||||||
with server_ready:
|
with server_ready:
|
||||||
server_ready.wait(timeout=120)
|
server_ready.wait(timeout=120)
|
||||||
|
|
||||||
self._event_log_module.StartEvent()
|
self._event_log_module.StartEvent([])
|
||||||
path = self._event_log_module.Write(
|
path = self._event_log_module.Write(
|
||||||
path=f"af_unix:{socket_path}"
|
path=f"af_unix:{socket_path}"
|
||||||
)
|
)
|
||||||
|
@ -51,7 +51,7 @@ INVALID_FS_PATHS = (
|
|||||||
"foo~",
|
"foo~",
|
||||||
"blah/foo~",
|
"blah/foo~",
|
||||||
# Block Unicode characters that get normalized out by filesystems.
|
# Block Unicode characters that get normalized out by filesystems.
|
||||||
"foo\u200Cbar",
|
"foo\u200cbar",
|
||||||
# Block newlines.
|
# Block newlines.
|
||||||
"f\n/bar",
|
"f\n/bar",
|
||||||
"f\r/bar",
|
"f\r/bar",
|
||||||
@ -198,13 +198,13 @@ class ValueTests(unittest.TestCase):
|
|||||||
def test_bool_true(self):
|
def test_bool_true(self):
|
||||||
"""Check XmlBool true values."""
|
"""Check XmlBool true values."""
|
||||||
for value in ("yes", "true", "1"):
|
for value in ("yes", "true", "1"):
|
||||||
node = self._get_node('<node a="%s"/>' % (value,))
|
node = self._get_node(f'<node a="{value}"/>')
|
||||||
self.assertTrue(manifest_xml.XmlBool(node, "a"))
|
self.assertTrue(manifest_xml.XmlBool(node, "a"))
|
||||||
|
|
||||||
def test_bool_false(self):
|
def test_bool_false(self):
|
||||||
"""Check XmlBool false values."""
|
"""Check XmlBool false values."""
|
||||||
for value in ("no", "false", "0"):
|
for value in ("no", "false", "0"):
|
||||||
node = self._get_node('<node a="%s"/>' % (value,))
|
node = self._get_node(f'<node a="{value}"/>')
|
||||||
self.assertFalse(manifest_xml.XmlBool(node, "a"))
|
self.assertFalse(manifest_xml.XmlBool(node, "a"))
|
||||||
|
|
||||||
def test_int_default(self):
|
def test_int_default(self):
|
||||||
@ -220,7 +220,7 @@ class ValueTests(unittest.TestCase):
|
|||||||
def test_int_good(self):
|
def test_int_good(self):
|
||||||
"""Check XmlInt numeric handling."""
|
"""Check XmlInt numeric handling."""
|
||||||
for value in (-1, 0, 1, 50000):
|
for value in (-1, 0, 1, 50000):
|
||||||
node = self._get_node('<node a="%s"/>' % (value,))
|
node = self._get_node(f'<node a="{value}"/>')
|
||||||
self.assertEqual(value, manifest_xml.XmlInt(node, "a"))
|
self.assertEqual(value, manifest_xml.XmlInt(node, "a"))
|
||||||
|
|
||||||
def test_int_invalid(self):
|
def test_int_invalid(self):
|
||||||
@ -1049,6 +1049,91 @@ class RemoveProjectElementTests(ManifestParseTestCase):
|
|||||||
self.assertTrue(found_proj1_path1)
|
self.assertTrue(found_proj1_path1)
|
||||||
self.assertTrue(found_proj2)
|
self.assertTrue(found_proj2)
|
||||||
|
|
||||||
|
def test_base_revision_checks_on_patching(self):
|
||||||
|
manifest_fail_wrong_tag = self.getXmlManifest(
|
||||||
|
"""
|
||||||
|
<manifest>
|
||||||
|
<remote name="default-remote" fetch="http://localhost" />
|
||||||
|
<default remote="default-remote" revision="tag.002" />
|
||||||
|
<project name="project1" path="tests/path1" />
|
||||||
|
<extend-project name="project1" revision="new_hash" base-rev="tag.001" />
|
||||||
|
</manifest>
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with self.assertRaises(error.ManifestParseError):
|
||||||
|
manifest_fail_wrong_tag.ToXml()
|
||||||
|
|
||||||
|
manifest_fail_remove = self.getXmlManifest(
|
||||||
|
"""
|
||||||
|
<manifest>
|
||||||
|
<remote name="default-remote" fetch="http://localhost" />
|
||||||
|
<default remote="default-remote" revision="refs/heads/main" />
|
||||||
|
<project name="project1" path="tests/path1" revision="hash1" />
|
||||||
|
<remove-project name="project1" base-rev="wrong_hash" />
|
||||||
|
</manifest>
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with self.assertRaises(error.ManifestParseError):
|
||||||
|
manifest_fail_remove.ToXml()
|
||||||
|
|
||||||
|
manifest_fail_extend = self.getXmlManifest(
|
||||||
|
"""
|
||||||
|
<manifest>
|
||||||
|
<remote name="default-remote" fetch="http://localhost" />
|
||||||
|
<default remote="default-remote" revision="refs/heads/main" />
|
||||||
|
<project name="project1" path="tests/path1" revision="hash1" />
|
||||||
|
<extend-project name="project1" revision="new_hash" base-rev="wrong_hash" />
|
||||||
|
</manifest>
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with self.assertRaises(error.ManifestParseError):
|
||||||
|
manifest_fail_extend.ToXml()
|
||||||
|
|
||||||
|
manifest_fail_unknown = self.getXmlManifest(
|
||||||
|
"""
|
||||||
|
<manifest>
|
||||||
|
<remote name="default-remote" fetch="http://localhost" />
|
||||||
|
<default remote="default-remote" revision="refs/heads/main" />
|
||||||
|
<project name="project1" path="tests/path1" />
|
||||||
|
<extend-project name="project1" revision="new_hash" base-rev="any_hash" />
|
||||||
|
</manifest>
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with self.assertRaises(error.ManifestParseError):
|
||||||
|
manifest_fail_unknown.ToXml()
|
||||||
|
|
||||||
|
manifest_ok = self.getXmlManifest(
|
||||||
|
"""
|
||||||
|
<manifest>
|
||||||
|
<remote name="default-remote" fetch="http://localhost" />
|
||||||
|
<default remote="default-remote" revision="refs/heads/main" />
|
||||||
|
<project name="project1" path="tests/path1" revision="hash1" />
|
||||||
|
<project name="project2" path="tests/path2" revision="hash2" />
|
||||||
|
<project name="project3" path="tests/path3" revision="hash3" />
|
||||||
|
<project name="project4" path="tests/path4" revision="hash4" />
|
||||||
|
|
||||||
|
<remove-project name="project1" />
|
||||||
|
<remove-project name="project2" base-rev="hash2" />
|
||||||
|
<project name="project2" path="tests/path2" revision="new_hash2" />
|
||||||
|
<extend-project name="project3" base-rev="hash3" revision="new_hash3" />
|
||||||
|
<extend-project name="project3" base-rev="new_hash3" revision="newer_hash3" />
|
||||||
|
<remove-project path="tests/path4" base-rev="hash4" />
|
||||||
|
</manifest>
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
found_proj2 = False
|
||||||
|
found_proj3 = False
|
||||||
|
for proj in manifest_ok.projects:
|
||||||
|
if proj.name == "project2":
|
||||||
|
found_proj2 = True
|
||||||
|
if proj.name == "project3":
|
||||||
|
found_proj3 = True
|
||||||
|
self.assertNotEqual(proj.name, "project1")
|
||||||
|
self.assertNotEqual(proj.name, "project4")
|
||||||
|
self.assertTrue(found_proj2)
|
||||||
|
self.assertTrue(found_proj3)
|
||||||
|
self.assertTrue(len(manifest_ok.projects) == 2)
|
||||||
|
|
||||||
|
|
||||||
class ExtendProjectElementTests(ManifestParseTestCase):
|
class ExtendProjectElementTests(ManifestParseTestCase):
|
||||||
"""Tests for <extend-project>."""
|
"""Tests for <extend-project>."""
|
||||||
@ -1128,3 +1213,79 @@ class ExtendProjectElementTests(ManifestParseTestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(len(manifest.projects), 1)
|
self.assertEqual(len(manifest.projects), 1)
|
||||||
self.assertEqual(manifest.projects[0].upstream, "bar")
|
self.assertEqual(manifest.projects[0].upstream, "bar")
|
||||||
|
|
||||||
|
|
||||||
|
class NormalizeUrlTests(ManifestParseTestCase):
|
||||||
|
"""Tests for normalize_url() in manifest_xml.py"""
|
||||||
|
|
||||||
|
def test_has_trailing_slash(self):
|
||||||
|
url = "http://foo.com/bar/baz/"
|
||||||
|
self.assertEqual(
|
||||||
|
"http://foo.com/bar/baz", manifest_xml.normalize_url(url)
|
||||||
|
)
|
||||||
|
|
||||||
|
url = "http://foo.com/bar/"
|
||||||
|
self.assertEqual("http://foo.com/bar", manifest_xml.normalize_url(url))
|
||||||
|
|
||||||
|
def test_has_leading_slash(self):
|
||||||
|
"""SCP-like syntax except a / comes before the : which git disallows."""
|
||||||
|
url = "/git@foo.com:bar/baf"
|
||||||
|
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||||
|
|
||||||
|
url = "gi/t@foo.com:bar/baf"
|
||||||
|
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||||
|
|
||||||
|
url = "git@fo/o.com:bar/baf"
|
||||||
|
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||||
|
|
||||||
|
def test_has_no_scheme(self):
|
||||||
|
"""Deal with cases where we have no scheme, but we also
|
||||||
|
aren't dealing with the git SCP-like syntax
|
||||||
|
"""
|
||||||
|
url = "foo.com/baf/bat"
|
||||||
|
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||||
|
|
||||||
|
url = "foo.com/baf"
|
||||||
|
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||||
|
|
||||||
|
url = "git@foo.com/baf/bat"
|
||||||
|
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||||
|
|
||||||
|
url = "git@foo.com/baf"
|
||||||
|
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||||
|
|
||||||
|
url = "/file/path/here"
|
||||||
|
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||||
|
|
||||||
|
def test_has_no_scheme_matches_scp_like_syntax(self):
|
||||||
|
url = "git@foo.com:bar/baf"
|
||||||
|
self.assertEqual(
|
||||||
|
"ssh://git@foo.com/bar/baf", manifest_xml.normalize_url(url)
|
||||||
|
)
|
||||||
|
|
||||||
|
url = "git@foo.com:bar/"
|
||||||
|
self.assertEqual(
|
||||||
|
"ssh://git@foo.com/bar", manifest_xml.normalize_url(url)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_remote_url_resolution(self):
|
||||||
|
remote = manifest_xml._XmlRemote(
|
||||||
|
name="foo",
|
||||||
|
fetch="git@github.com:org2/",
|
||||||
|
manifestUrl="git@github.com:org2/custom_manifest.git",
|
||||||
|
)
|
||||||
|
self.assertEqual("ssh://git@github.com/org2", remote.resolvedFetchUrl)
|
||||||
|
|
||||||
|
remote = manifest_xml._XmlRemote(
|
||||||
|
name="foo",
|
||||||
|
fetch="ssh://git@github.com/org2/",
|
||||||
|
manifestUrl="git@github.com:org2/custom_manifest.git",
|
||||||
|
)
|
||||||
|
self.assertEqual("ssh://git@github.com/org2", remote.resolvedFetchUrl)
|
||||||
|
|
||||||
|
remote = manifest_xml._XmlRemote(
|
||||||
|
name="foo",
|
||||||
|
fetch="git@github.com:org2/",
|
||||||
|
manifestUrl="ssh://git@github.com/org2/custom_manifest.git",
|
||||||
|
)
|
||||||
|
self.assertEqual("ssh://git@github.com/org2", remote.resolvedFetchUrl)
|
||||||
|
@ -107,6 +107,16 @@ class ReviewableBranchTests(unittest.TestCase):
|
|||||||
self.assertTrue(rb.date)
|
self.assertTrue(rb.date)
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectTests(unittest.TestCase):
|
||||||
|
"""Check Project behavior."""
|
||||||
|
|
||||||
|
def test_encode_patchset_description(self):
|
||||||
|
self.assertEqual(
|
||||||
|
project.Project._encode_patchset_description("abcd00!! +"),
|
||||||
|
"abcd00%21%21_%2b",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class CopyLinkTestCase(unittest.TestCase):
|
class CopyLinkTestCase(unittest.TestCase):
|
||||||
"""TestCase for stub repo client checkouts.
|
"""TestCase for stub repo client checkouts.
|
||||||
|
|
||||||
@ -151,7 +161,7 @@ class CopyLinkTestCase(unittest.TestCase):
|
|||||||
# "".
|
# "".
|
||||||
break
|
break
|
||||||
result = os.path.exists(path)
|
result = os.path.exists(path)
|
||||||
msg.append("\tos.path.exists(%s): %s" % (path, result))
|
msg.append(f"\tos.path.exists({path}): {result}")
|
||||||
if result:
|
if result:
|
||||||
msg.append("\tcontents: %r" % os.listdir(path))
|
msg.append("\tcontents: %r" % os.listdir(path))
|
||||||
break
|
break
|
||||||
|
@ -13,9 +13,14 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
"""Unit test for repo_logging module."""
|
"""Unit test for repo_logging module."""
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import io
|
||||||
|
import logging
|
||||||
import unittest
|
import unittest
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
from color import SetDefaultColoring
|
||||||
from error import RepoExitError
|
from error import RepoExitError
|
||||||
from repo_logging import RepoLogger
|
from repo_logging import RepoLogger
|
||||||
|
|
||||||
@ -62,3 +67,35 @@ class TestRepoLogger(unittest.TestCase):
|
|||||||
mock.call("Repo command failed: %s", "RepoExitError"),
|
mock.call("Repo command failed: %s", "RepoExitError"),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_log_with_format_string(self):
|
||||||
|
"""Test different log levels with format strings."""
|
||||||
|
|
||||||
|
# Set color output to "always" for consistent test results.
|
||||||
|
# This ensures the logger's behavior is uniform across different
|
||||||
|
# environments and git configurations.
|
||||||
|
SetDefaultColoring("always")
|
||||||
|
|
||||||
|
# Regex pattern to match optional ANSI color codes.
|
||||||
|
# \033 - Escape character
|
||||||
|
# \[ - Opening square bracket
|
||||||
|
# [0-9;]* - Zero or more digits or semicolons
|
||||||
|
# m - Ending 'm' character
|
||||||
|
# ? - Makes the entire group optional
|
||||||
|
opt_color = r"(\033\[[0-9;]*m)?"
|
||||||
|
|
||||||
|
for level in (logging.INFO, logging.WARN, logging.ERROR):
|
||||||
|
name = logging.getLevelName(level)
|
||||||
|
|
||||||
|
with self.subTest(level=level, name=name):
|
||||||
|
output = io.StringIO()
|
||||||
|
|
||||||
|
with contextlib.redirect_stderr(output):
|
||||||
|
logger = RepoLogger(__name__)
|
||||||
|
logger.log(level, "%s", "100% pass")
|
||||||
|
|
||||||
|
self.assertRegex(
|
||||||
|
output.getvalue().strip(),
|
||||||
|
f"^{opt_color}100% pass{opt_color}$",
|
||||||
|
f"failed for level {name}",
|
||||||
|
)
|
||||||
|
156
tests/test_subcmds_forall.py
Normal file
156
tests/test_subcmds_forall.py
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
# Copyright (C) 2024 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Unittests for the forall subcmd."""
|
||||||
|
|
||||||
|
from io import StringIO
|
||||||
|
import os
|
||||||
|
from shutil import rmtree
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
import git_command
|
||||||
|
import manifest_xml
|
||||||
|
import project
|
||||||
|
import subcmds
|
||||||
|
|
||||||
|
|
||||||
|
class AllCommands(unittest.TestCase):
|
||||||
|
"""Check registered all_commands."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
"""Common setup."""
|
||||||
|
self.tempdirobj = tempfile.TemporaryDirectory(prefix="forall_tests")
|
||||||
|
self.tempdir = self.tempdirobj.name
|
||||||
|
self.repodir = os.path.join(self.tempdir, ".repo")
|
||||||
|
self.manifest_dir = os.path.join(self.repodir, "manifests")
|
||||||
|
self.manifest_file = os.path.join(
|
||||||
|
self.repodir, manifest_xml.MANIFEST_FILE_NAME
|
||||||
|
)
|
||||||
|
self.local_manifest_dir = os.path.join(
|
||||||
|
self.repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME
|
||||||
|
)
|
||||||
|
os.mkdir(self.repodir)
|
||||||
|
os.mkdir(self.manifest_dir)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
"""Common teardown."""
|
||||||
|
rmtree(self.tempdir, ignore_errors=True)
|
||||||
|
|
||||||
|
def initTempGitTree(self, git_dir):
|
||||||
|
"""Create a new empty git checkout for testing."""
|
||||||
|
|
||||||
|
# Tests need to assume, that main is default branch at init,
|
||||||
|
# which is not supported in config until 2.28.
|
||||||
|
cmd = ["git", "init", "-q"]
|
||||||
|
if git_command.git_require((2, 28, 0)):
|
||||||
|
cmd += ["--initial-branch=main"]
|
||||||
|
else:
|
||||||
|
# Use template dir for init
|
||||||
|
templatedir = os.path.join(self.tempdirobj.name, ".test-template")
|
||||||
|
os.makedirs(templatedir)
|
||||||
|
with open(os.path.join(templatedir, "HEAD"), "w") as fp:
|
||||||
|
fp.write("ref: refs/heads/main\n")
|
||||||
|
cmd += ["--template", templatedir]
|
||||||
|
cmd += [git_dir]
|
||||||
|
subprocess.check_call(cmd)
|
||||||
|
|
||||||
|
def getXmlManifestWith8Projects(self):
|
||||||
|
"""Create and return a setup of 8 projects with enough dummy
|
||||||
|
files and setup to execute forall."""
|
||||||
|
|
||||||
|
# Set up a manifest git dir for parsing to work
|
||||||
|
gitdir = os.path.join(self.repodir, "manifests.git")
|
||||||
|
os.mkdir(gitdir)
|
||||||
|
with open(os.path.join(gitdir, "config"), "w") as fp:
|
||||||
|
fp.write(
|
||||||
|
"""[remote "origin"]
|
||||||
|
url = https://localhost:0/manifest
|
||||||
|
verbose = false
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add the manifest data
|
||||||
|
manifest_data = """
|
||||||
|
<manifest>
|
||||||
|
<remote name="origin" fetch="http://localhost" />
|
||||||
|
<default remote="origin" revision="refs/heads/main" />
|
||||||
|
<project name="project1" path="tests/path1" />
|
||||||
|
<project name="project2" path="tests/path2" />
|
||||||
|
<project name="project3" path="tests/path3" />
|
||||||
|
<project name="project4" path="tests/path4" />
|
||||||
|
<project name="project5" path="tests/path5" />
|
||||||
|
<project name="project6" path="tests/path6" />
|
||||||
|
<project name="project7" path="tests/path7" />
|
||||||
|
<project name="project8" path="tests/path8" />
|
||||||
|
</manifest>
|
||||||
|
"""
|
||||||
|
with open(self.manifest_file, "w", encoding="utf-8") as fp:
|
||||||
|
fp.write(manifest_data)
|
||||||
|
|
||||||
|
# Set up 8 empty projects to match the manifest
|
||||||
|
for x in range(1, 9):
|
||||||
|
os.makedirs(
|
||||||
|
os.path.join(
|
||||||
|
self.repodir, "projects/tests/path" + str(x) + ".git"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
os.makedirs(
|
||||||
|
os.path.join(
|
||||||
|
self.repodir, "project-objects/project" + str(x) + ".git"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
git_path = os.path.join(self.tempdir, "tests/path" + str(x))
|
||||||
|
self.initTempGitTree(git_path)
|
||||||
|
|
||||||
|
return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
|
||||||
|
|
||||||
|
# Use mock to capture stdout from the forall run
|
||||||
|
@unittest.mock.patch("sys.stdout", new_callable=StringIO)
|
||||||
|
def test_forall_all_projects_called_once(self, mock_stdout):
|
||||||
|
"""Test that all projects get a command run once each."""
|
||||||
|
|
||||||
|
manifest_with_8_projects = self.getXmlManifestWith8Projects()
|
||||||
|
|
||||||
|
cmd = subcmds.forall.Forall()
|
||||||
|
cmd.manifest = manifest_with_8_projects
|
||||||
|
|
||||||
|
# Use echo project names as the test of forall
|
||||||
|
opts, args = cmd.OptionParser.parse_args(["-c", "echo $REPO_PROJECT"])
|
||||||
|
opts.verbose = False
|
||||||
|
|
||||||
|
# Mock to not have the Execute fail on remote check
|
||||||
|
with mock.patch.object(
|
||||||
|
project.Project, "GetRevisionId", return_value="refs/heads/main"
|
||||||
|
):
|
||||||
|
# Run the forall command
|
||||||
|
cmd.Execute(opts, args)
|
||||||
|
|
||||||
|
# Verify that we got every project name in the prints
|
||||||
|
for x in range(1, 9):
|
||||||
|
self.assertIn("project" + str(x), mock_stdout.getvalue())
|
||||||
|
|
||||||
|
# Split the captured output into lines to count them
|
||||||
|
line_count = 0
|
||||||
|
for line in mock_stdout.getvalue().split("\n"):
|
||||||
|
# A commented out print to stderr as a reminder
|
||||||
|
# that stdout is mocked, include sys and uncomment if needed
|
||||||
|
# print(line, file=sys.stderr)
|
||||||
|
if len(line) > 0:
|
||||||
|
line_count += 1
|
||||||
|
|
||||||
|
# Verify that we didn't get more lines than expected
|
||||||
|
assert line_count == 8
|
@ -265,6 +265,119 @@ class LocalSyncState(unittest.TestCase):
|
|||||||
self.assertIsNone(self.state.GetFetchTime(projA))
|
self.assertIsNone(self.state.GetFetchTime(projA))
|
||||||
self.assertEqual(self.state.GetFetchTime(projB), 7)
|
self.assertEqual(self.state.GetFetchTime(projB), 7)
|
||||||
|
|
||||||
|
def test_prune_removed_and_symlinked_projects(self):
|
||||||
|
"""Removed projects that still exists on disk as symlink are pruned."""
|
||||||
|
with open(self.state._path, "w") as f:
|
||||||
|
f.write(
|
||||||
|
"""
|
||||||
|
{
|
||||||
|
"projA": {
|
||||||
|
"last_fetch": 5
|
||||||
|
},
|
||||||
|
"projB": {
|
||||||
|
"last_fetch": 7
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
def mock_exists(path):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def mock_islink(path):
|
||||||
|
if "projB" in path:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
projA = mock.MagicMock(relpath="projA")
|
||||||
|
projB = mock.MagicMock(relpath="projB")
|
||||||
|
self.state = self._new_state()
|
||||||
|
self.assertEqual(self.state.GetFetchTime(projA), 5)
|
||||||
|
self.assertEqual(self.state.GetFetchTime(projB), 7)
|
||||||
|
with mock.patch("os.path.exists", side_effect=mock_exists):
|
||||||
|
with mock.patch("os.path.islink", side_effect=mock_islink):
|
||||||
|
self.state.PruneRemovedProjects()
|
||||||
|
self.assertIsNone(self.state.GetFetchTime(projB))
|
||||||
|
|
||||||
|
self.state = self._new_state()
|
||||||
|
self.assertIsNone(self.state.GetFetchTime(projB))
|
||||||
|
self.assertEqual(self.state.GetFetchTime(projA), 5)
|
||||||
|
|
||||||
|
|
||||||
|
class FakeProject:
|
||||||
|
def __init__(self, relpath):
|
||||||
|
self.relpath = relpath
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"project: {self.relpath}"
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return str(self)
|
||||||
|
|
||||||
|
|
||||||
|
class SafeCheckoutOrder(unittest.TestCase):
|
||||||
|
def test_no_nested(self):
|
||||||
|
p_f = FakeProject("f")
|
||||||
|
p_foo = FakeProject("foo")
|
||||||
|
out = sync._SafeCheckoutOrder([p_f, p_foo])
|
||||||
|
self.assertEqual(out, [[p_f, p_foo]])
|
||||||
|
|
||||||
|
def test_basic_nested(self):
|
||||||
|
p_foo = p_foo = FakeProject("foo")
|
||||||
|
p_foo_bar = FakeProject("foo/bar")
|
||||||
|
out = sync._SafeCheckoutOrder([p_foo, p_foo_bar])
|
||||||
|
self.assertEqual(out, [[p_foo], [p_foo_bar]])
|
||||||
|
|
||||||
|
def test_complex_nested(self):
|
||||||
|
p_foo = FakeProject("foo")
|
||||||
|
p_foobar = FakeProject("foobar")
|
||||||
|
p_foo_dash_bar = FakeProject("foo-bar")
|
||||||
|
p_foo_bar = FakeProject("foo/bar")
|
||||||
|
p_foo_bar_baz_baq = FakeProject("foo/bar/baz/baq")
|
||||||
|
p_bar = FakeProject("bar")
|
||||||
|
out = sync._SafeCheckoutOrder(
|
||||||
|
[
|
||||||
|
p_foo_bar_baz_baq,
|
||||||
|
p_foo,
|
||||||
|
p_foobar,
|
||||||
|
p_foo_dash_bar,
|
||||||
|
p_foo_bar,
|
||||||
|
p_bar,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
out,
|
||||||
|
[
|
||||||
|
[p_bar, p_foo, p_foo_dash_bar, p_foobar],
|
||||||
|
[p_foo_bar],
|
||||||
|
[p_foo_bar_baz_baq],
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Chunksize(unittest.TestCase):
|
||||||
|
"""Tests for _chunksize."""
|
||||||
|
|
||||||
|
def test_single_project(self):
|
||||||
|
"""Single project."""
|
||||||
|
self.assertEqual(sync._chunksize(1, 1), 1)
|
||||||
|
|
||||||
|
def test_low_project_count(self):
|
||||||
|
"""Multiple projects, low number of projects to sync."""
|
||||||
|
self.assertEqual(sync._chunksize(10, 1), 10)
|
||||||
|
self.assertEqual(sync._chunksize(10, 2), 5)
|
||||||
|
self.assertEqual(sync._chunksize(10, 4), 2)
|
||||||
|
self.assertEqual(sync._chunksize(10, 8), 1)
|
||||||
|
self.assertEqual(sync._chunksize(10, 16), 1)
|
||||||
|
|
||||||
|
def test_high_project_count(self):
|
||||||
|
"""Multiple projects, high number of projects to sync."""
|
||||||
|
self.assertEqual(sync._chunksize(2800, 1), 32)
|
||||||
|
self.assertEqual(sync._chunksize(2800, 16), 32)
|
||||||
|
self.assertEqual(sync._chunksize(2800, 32), 32)
|
||||||
|
self.assertEqual(sync._chunksize(2800, 64), 32)
|
||||||
|
self.assertEqual(sync._chunksize(2800, 128), 21)
|
||||||
|
|
||||||
|
|
||||||
class GetPreciousObjectsState(unittest.TestCase):
|
class GetPreciousObjectsState(unittest.TestCase):
|
||||||
"""Tests for _GetPreciousObjectsState."""
|
"""Tests for _GetPreciousObjectsState."""
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import unittest
|
import unittest
|
||||||
@ -72,84 +73,11 @@ class RepoWrapperUnitTest(RepoWrapperTestCase):
|
|||||||
|
|
||||||
def test_init_parser(self):
|
def test_init_parser(self):
|
||||||
"""Make sure 'init' GetParser works."""
|
"""Make sure 'init' GetParser works."""
|
||||||
parser = self.wrapper.GetParser(gitc_init=False)
|
parser = self.wrapper.GetParser()
|
||||||
opts, args = parser.parse_args([])
|
opts, args = parser.parse_args([])
|
||||||
self.assertEqual([], args)
|
self.assertEqual([], args)
|
||||||
self.assertIsNone(opts.manifest_url)
|
self.assertIsNone(opts.manifest_url)
|
||||||
|
|
||||||
def test_gitc_init_parser(self):
|
|
||||||
"""Make sure 'gitc-init' GetParser raises."""
|
|
||||||
with self.assertRaises(SystemExit):
|
|
||||||
self.wrapper.GetParser(gitc_init=True)
|
|
||||||
|
|
||||||
def test_get_gitc_manifest_dir_no_gitc(self):
|
|
||||||
"""
|
|
||||||
Test reading a missing gitc config file
|
|
||||||
"""
|
|
||||||
self.wrapper.GITC_CONFIG_FILE = fixture("missing_gitc_config")
|
|
||||||
val = self.wrapper.get_gitc_manifest_dir()
|
|
||||||
self.assertEqual(val, "")
|
|
||||||
|
|
||||||
def test_get_gitc_manifest_dir(self):
|
|
||||||
"""
|
|
||||||
Test reading the gitc config file and parsing the directory
|
|
||||||
"""
|
|
||||||
self.wrapper.GITC_CONFIG_FILE = fixture("gitc_config")
|
|
||||||
val = self.wrapper.get_gitc_manifest_dir()
|
|
||||||
self.assertEqual(val, "/test/usr/local/google/gitc")
|
|
||||||
|
|
||||||
def test_gitc_parse_clientdir_no_gitc(self):
|
|
||||||
"""
|
|
||||||
Test parsing the gitc clientdir without gitc running
|
|
||||||
"""
|
|
||||||
self.wrapper.GITC_CONFIG_FILE = fixture("missing_gitc_config")
|
|
||||||
self.assertEqual(self.wrapper.gitc_parse_clientdir("/something"), None)
|
|
||||||
self.assertEqual(
|
|
||||||
self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test"), "test"
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_gitc_parse_clientdir(self):
|
|
||||||
"""
|
|
||||||
Test parsing the gitc clientdir
|
|
||||||
"""
|
|
||||||
self.wrapper.GITC_CONFIG_FILE = fixture("gitc_config")
|
|
||||||
self.assertEqual(self.wrapper.gitc_parse_clientdir("/something"), None)
|
|
||||||
self.assertEqual(
|
|
||||||
self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test"), "test"
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test/"), "test"
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test/extra"),
|
|
||||||
"test",
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
self.wrapper.gitc_parse_clientdir(
|
|
||||||
"/test/usr/local/google/gitc/test"
|
|
||||||
),
|
|
||||||
"test",
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
self.wrapper.gitc_parse_clientdir(
|
|
||||||
"/test/usr/local/google/gitc/test/"
|
|
||||||
),
|
|
||||||
"test",
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
self.wrapper.gitc_parse_clientdir(
|
|
||||||
"/test/usr/local/google/gitc/test/extra"
|
|
||||||
),
|
|
||||||
"test",
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/"), None
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
self.wrapper.gitc_parse_clientdir("/test/usr/local/google/gitc/"),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SetGitTrace2ParentSid(RepoWrapperTestCase):
|
class SetGitTrace2ParentSid(RepoWrapperTestCase):
|
||||||
"""Check SetGitTrace2ParentSid behavior."""
|
"""Check SetGitTrace2ParentSid behavior."""
|
||||||
@ -198,7 +126,7 @@ class RunCommand(RepoWrapperTestCase):
|
|||||||
self.wrapper.run_command(["true"], check=False)
|
self.wrapper.run_command(["true"], check=False)
|
||||||
self.wrapper.run_command(["true"], check=True)
|
self.wrapper.run_command(["true"], check=True)
|
||||||
self.wrapper.run_command(["false"], check=False)
|
self.wrapper.run_command(["false"], check=False)
|
||||||
with self.assertRaises(self.wrapper.RunError):
|
with self.assertRaises(subprocess.CalledProcessError):
|
||||||
self.wrapper.run_command(["false"], check=True)
|
self.wrapper.run_command(["false"], check=True)
|
||||||
|
|
||||||
|
|
||||||
@ -431,8 +359,8 @@ class VerifyRev(RepoWrapperTestCase):
|
|||||||
|
|
||||||
def test_verify_passes(self):
|
def test_verify_passes(self):
|
||||||
"""Check when we have a valid signed tag."""
|
"""Check when we have a valid signed tag."""
|
||||||
desc_result = self.wrapper.RunResult(0, "v1.0\n", "")
|
desc_result = subprocess.CompletedProcess([], 0, "v1.0\n", "")
|
||||||
gpg_result = self.wrapper.RunResult(0, "", "")
|
gpg_result = subprocess.CompletedProcess([], 0, "", "")
|
||||||
with mock.patch.object(
|
with mock.patch.object(
|
||||||
self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
|
self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
|
||||||
):
|
):
|
||||||
@ -443,8 +371,8 @@ class VerifyRev(RepoWrapperTestCase):
|
|||||||
|
|
||||||
def test_unsigned_commit(self):
|
def test_unsigned_commit(self):
|
||||||
"""Check we fall back to signed tag when we have an unsigned commit."""
|
"""Check we fall back to signed tag when we have an unsigned commit."""
|
||||||
desc_result = self.wrapper.RunResult(0, "v1.0-10-g1234\n", "")
|
desc_result = subprocess.CompletedProcess([], 0, "v1.0-10-g1234\n", "")
|
||||||
gpg_result = self.wrapper.RunResult(0, "", "")
|
gpg_result = subprocess.CompletedProcess([], 0, "", "")
|
||||||
with mock.patch.object(
|
with mock.patch.object(
|
||||||
self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
|
self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
|
||||||
):
|
):
|
||||||
@ -455,7 +383,7 @@ class VerifyRev(RepoWrapperTestCase):
|
|||||||
|
|
||||||
def test_verify_fails(self):
|
def test_verify_fails(self):
|
||||||
"""Check we fall back to signed tag when we have an unsigned commit."""
|
"""Check we fall back to signed tag when we have an unsigned commit."""
|
||||||
desc_result = self.wrapper.RunResult(0, "v1.0-10-g1234\n", "")
|
desc_result = subprocess.CompletedProcess([], 0, "v1.0-10-g1234\n", "")
|
||||||
gpg_result = Exception
|
gpg_result = Exception
|
||||||
with mock.patch.object(
|
with mock.patch.object(
|
||||||
self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
|
self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
|
||||||
|
10
tox.ini
10
tox.ini
@ -30,6 +30,7 @@ python =
|
|||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
deps =
|
deps =
|
||||||
|
-c constraints.txt
|
||||||
black
|
black
|
||||||
flake8
|
flake8
|
||||||
isort
|
isort
|
||||||
@ -44,20 +45,19 @@ setenv =
|
|||||||
[testenv:lint]
|
[testenv:lint]
|
||||||
skip_install = true
|
skip_install = true
|
||||||
deps =
|
deps =
|
||||||
|
-c constraints.txt
|
||||||
black
|
black
|
||||||
flake8
|
flake8
|
||||||
commands =
|
commands =
|
||||||
black --check {posargs:.}
|
black --check {posargs:. repo run_tests release/update-hooks release/update-manpages}
|
||||||
flake8
|
flake8
|
||||||
|
|
||||||
[testenv:format]
|
[testenv:format]
|
||||||
skip_install = true
|
skip_install = true
|
||||||
deps =
|
deps =
|
||||||
|
-c constraints.txt
|
||||||
black
|
black
|
||||||
flake8
|
flake8
|
||||||
commands =
|
commands =
|
||||||
black {posargs:.}
|
black {posargs:. repo run_tests release/update-hooks release/update-manpages}
|
||||||
flake8
|
flake8
|
||||||
|
|
||||||
[pytest]
|
|
||||||
timeout = 300
|
|
||||||
|
@ -18,8 +18,12 @@ import importlib.util
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def WrapperDir():
|
||||||
|
return os.path.dirname(__file__)
|
||||||
|
|
||||||
|
|
||||||
def WrapperPath():
|
def WrapperPath():
|
||||||
return os.path.join(os.path.dirname(__file__), "repo")
|
return os.path.join(WrapperDir(), "repo")
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=None)
|
@functools.lru_cache(maxsize=None)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user