Compare commits

..

No commits in common. "main" and "v2.39" have entirely different histories.
main ... v2.39

78 changed files with 1442 additions and 2976 deletions

View File

@ -194,7 +194,7 @@ class Coloring:
if not opt: if not opt:
return _Color(fg, bg, attr) return _Color(fg, bg, attr)
v = self._config.GetString(f"{self._section}.{opt}") v = self._config.GetString("%s.%s" % (self._section, opt))
if v is None: if v is None:
return _Color(fg, bg, attr) return _Color(fg, bg, attr)
@ -210,7 +210,6 @@ class Coloring:
if have_fg: if have_fg:
bg = a bg = a
else: else:
have_fg = True
fg = a fg = a
elif is_attr(a): elif is_attr(a):
attr = a attr = a

View File

@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import contextlib
import multiprocessing import multiprocessing
import optparse import optparse
import os import os
@ -71,14 +70,6 @@ class Command:
# migrated subcommands can set it to False. # migrated subcommands can set it to False.
MULTI_MANIFEST_SUPPORT = True MULTI_MANIFEST_SUPPORT = True
# Shared data across parallel execution workers.
_parallel_context = None
@classmethod
def get_parallel_context(cls):
assert cls._parallel_context is not None
return cls._parallel_context
def __init__( def __init__(
self, self,
repodir=None, repodir=None,
@ -251,39 +242,9 @@ class Command:
"""Perform the action, after option parsing is complete.""" """Perform the action, after option parsing is complete."""
raise NotImplementedError raise NotImplementedError
@classmethod @staticmethod
@contextlib.contextmanager
def ParallelContext(cls):
"""Obtains the context, which is shared to ExecuteInParallel workers.
Callers can store data in the context dict before invocation of
ExecuteInParallel. The dict will then be shared to child workers of
ExecuteInParallel.
"""
assert cls._parallel_context is None
cls._parallel_context = {}
try:
yield
finally:
cls._parallel_context = None
@classmethod
def _InitParallelWorker(cls, context, initializer):
cls._parallel_context = context
if initializer:
initializer()
@classmethod
def ExecuteInParallel( def ExecuteInParallel(
cls, jobs, func, inputs, callback, output=None, ordered=False
jobs,
func,
inputs,
callback,
output=None,
ordered=False,
chunksize=WORKER_BATCH_SIZE,
initializer=None,
): ):
"""Helper for managing parallel execution boiler plate. """Helper for managing parallel execution boiler plate.
@ -308,9 +269,6 @@ class Command:
output: An output manager. May be progress.Progess or output: An output manager. May be progress.Progess or
color.Coloring. color.Coloring.
ordered: Whether the jobs should be processed in order. ordered: Whether the jobs should be processed in order.
chunksize: The number of jobs processed in batch by parallel
workers.
initializer: Worker initializer.
Returns: Returns:
The |callback| function's results are returned. The |callback| function's results are returned.
@ -320,16 +278,12 @@ class Command:
if len(inputs) == 1 or jobs == 1: if len(inputs) == 1 or jobs == 1:
return callback(None, output, (func(x) for x in inputs)) return callback(None, output, (func(x) for x in inputs))
else: else:
with multiprocessing.Pool( with multiprocessing.Pool(jobs) as pool:
jobs,
initializer=cls._InitParallelWorker,
initargs=(cls._parallel_context, initializer),
) as pool:
submit = pool.imap if ordered else pool.imap_unordered submit = pool.imap if ordered else pool.imap_unordered
return callback( return callback(
pool, pool,
output, output,
submit(func, inputs, chunksize=chunksize), submit(func, inputs, chunksize=WORKER_BATCH_SIZE),
) )
finally: finally:
if isinstance(output, progress.Progress): if isinstance(output, progress.Progress):
@ -547,3 +501,7 @@ class MirrorSafeCommand:
"""Command permits itself to run within a mirror, and does not require a """Command permits itself to run within a mirror, and does not require a
working directory. working directory.
""" """
class GitcClientCommand:
"""Command that requires the local client to be a GITC client."""

View File

@ -1,2 +0,0 @@
# NB: Keep in sync with run_tests.vpython3.
black<26

View File

@ -141,7 +141,7 @@ Instead, you should use standard Git workflows like [git worktree] or
(e.g. a local mirror & a public review server) while avoiding duplicating (e.g. a local mirror & a public review server) while avoiding duplicating
the content. However, this can run into problems if different remotes use the content. However, this can run into problems if different remotes use
the same path on their respective servers. Best to avoid that. the same path on their respective servers. Best to avoid that.
* `modules/`: Like `projects/`, but for git submodules. * `subprojects/`: Like `projects/`, but for git submodules.
* `subproject-objects/`: Like `project-objects/`, but for git submodules. * `subproject-objects/`: Like `project-objects/`, but for git submodules.
* `worktrees/`: Bare checkouts of every project synced by the manifest. The * `worktrees/`: Bare checkouts of every project synced by the manifest. The
filesystem layout matches the `<project name=...` setting in the manifest filesystem layout matches the `<project name=...` setting in the manifest

View File

@ -107,13 +107,11 @@ following DTD:
<!ATTLIST extend-project remote CDATA #IMPLIED> <!ATTLIST extend-project remote CDATA #IMPLIED>
<!ATTLIST extend-project dest-branch CDATA #IMPLIED> <!ATTLIST extend-project dest-branch CDATA #IMPLIED>
<!ATTLIST extend-project upstream CDATA #IMPLIED> <!ATTLIST extend-project upstream CDATA #IMPLIED>
<!ATTLIST extend-project base-rev CDATA #IMPLIED>
<!ELEMENT remove-project EMPTY> <!ELEMENT remove-project EMPTY>
<!ATTLIST remove-project name CDATA #IMPLIED> <!ATTLIST remove-project name CDATA #IMPLIED>
<!ATTLIST remove-project path CDATA #IMPLIED> <!ATTLIST remove-project path CDATA #IMPLIED>
<!ATTLIST remove-project optional CDATA #IMPLIED> <!ATTLIST remove-project optional CDATA #IMPLIED>
<!ATTLIST remove-project base-rev CDATA #IMPLIED>
<!ELEMENT repo-hooks EMPTY> <!ELEMENT repo-hooks EMPTY>
<!ATTLIST repo-hooks in-project CDATA #REQUIRED> <!ATTLIST repo-hooks in-project CDATA #REQUIRED>
@ -231,7 +229,26 @@ At most one manifest-server may be specified. The url attribute
is used to specify the URL of a manifest server, which is an is used to specify the URL of a manifest server, which is an
XML RPC service. XML RPC service.
See the [smart sync documentation](./smart-sync.md) for more details. The manifest server should implement the following RPC methods:
GetApprovedManifest(branch, target)
Return a manifest in which each project is pegged to a known good revision
for the current branch and target. This is used by repo sync when the
--smart-sync option is given.
The target to use is defined by environment variables TARGET_PRODUCT
and TARGET_BUILD_VARIANT. These variables are used to create a string
of the form $TARGET_PRODUCT-$TARGET_BUILD_VARIANT, e.g. passion-userdebug.
If one of those variables or both are not present, the program will call
GetApprovedManifest without the target parameter and the manifest server
should choose a reasonable default target.
GetManifest(tag)
Return a manifest in which each project is pegged to the revision at
the specified tag. This is used by repo sync when the --smart-tag option
is given.
### Element submanifest ### Element submanifest
@ -416,14 +433,6 @@ project. Same syntax as the corresponding element of `project`.
Attribute `upstream`: If specified, overrides the upstream of the original Attribute `upstream`: If specified, overrides the upstream of the original
project. Same syntax as the corresponding element of `project`. project. Same syntax as the corresponding element of `project`.
Attribute `base-rev`: If specified, adds a check against the revision
to be extended. Manifest parse will fail and give a list of mismatch extends
if the revisions being extended have changed since base-rev was set.
Intended for use with layered manifests using hash revisions to prevent
patch branches hiding newer upstream revisions. Also compares named refs
like branches or tags but is misleading if branches are used as base-rev.
Same syntax as the corresponding element of `project`.
### Element annotation ### Element annotation
Zero or more annotation elements may be specified as children of a Zero or more annotation elements may be specified as children of a
@ -487,14 +496,6 @@ name. Logic otherwise behaves like both are specified.
Attribute `optional`: Set to true to ignore remove-project elements with no Attribute `optional`: Set to true to ignore remove-project elements with no
matching `project` element. matching `project` element.
Attribute `base-rev`: If specified, adds a check against the revision
to be removed. Manifest parse will fail and give a list of mismatch removes
if the revisions being removed have changed since base-rev was set.
Intended for use with layered manifests using hash revisions to prevent
patch branches hiding newer upstream revisions. Also compares named refs
like branches or tags but is misleading if branches are used as base-rev.
Same syntax as the corresponding element of `project`.
### Element repo-hooks ### Element repo-hooks
NB: See the [practical documentation](./repo-hooks.md) for using repo hooks. NB: See the [practical documentation](./repo-hooks.md) for using repo hooks.

View File

@ -33,8 +33,9 @@ you have newer versions installed, your choices are:
* Modify the [repo launcher]'s shebang to suite your environment. * Modify the [repo launcher]'s shebang to suite your environment.
* Download an older version of the [repo launcher] and don't upgrade it. * Download an older version of the [repo launcher] and don't upgrade it.
Be aware that we do not guarantee old repo launchers will work with current Be aware that there is no guarantee old repo launchers are WILL work with
versions of repo. Bug reports using old launchers will not be accepted. current versions of repo. Bug reports using old launchers will not be
accepted.
## When to drop support ## When to drop support

View File

@ -96,9 +96,6 @@ If that tag is valid, then repo will warn and use that commit instead.
If that tag cannot be verified, it gives up and forces the user to resolve. If that tag cannot be verified, it gives up and forces the user to resolve.
If env variable `REPO_SKIP_SELF_UPDATE` is defined, this will
bypass the self update algorithm.
### Force an update ### Force an update
The `repo selfupdate` command can be used to force an immediate update. The `repo selfupdate` command can be used to force an immediate update.
@ -205,7 +202,7 @@ still support them.
Things in italics are things we used to care about but probably don't anymore. Things in italics are things we used to care about but probably don't anymore.
| Date | EOL | [Git][rel-g] | [Python][rel-p] | [SSH][rel-o] | [Ubuntu][rel-u] / [Debian][rel-d] | Git | Python | SSH | | Date | EOL | [Git][rel-g] | [Python][rel-p] | [SSH][rel-o] | [Ubuntu][rel-u] / [Debian][rel-d] | Git | Python | SSH |
|:--------:|:------------:|:------------:|:---------------:|:------------:|-----------------------------------|:---:|:------:|:---:| |:--------:|:------------:|:------------:|:---------------:|:------------:|-----------------------------------|-----|--------|-----|
| Apr 2008 | | | | 5.0 | | Apr 2008 | | | | 5.0 |
| Jun 2008 | | | | 5.1 | | Jun 2008 | | | | 5.1 |
| Oct 2008 | *Oct 2013* | | 2.6.0 | | *10.04 Lucid* - 10.10 Maverick / *Squeeze* | | Oct 2008 | *Oct 2013* | | 2.6.0 | | *10.04 Lucid* - 10.10 Maverick / *Squeeze* |
@ -244,7 +241,7 @@ Things in italics are things we used to care about but probably don't anymore.
| Feb 2014 | *Dec 2014* | **1.9.0** | | | *14.04 Trusty* | | Feb 2014 | *Dec 2014* | **1.9.0** | | | *14.04 Trusty* |
| Mar 2014 | *Mar 2019* | | *3.4.0* | | *14.04 Trusty* - 15.10 Wily / *Jessie* | | Mar 2014 | *Mar 2019* | | *3.4.0* | | *14.04 Trusty* - 15.10 Wily / *Jessie* |
| Mar 2014 | | | | 6.6 | *14.04 Trusty* - 14.10 Utopic | | Mar 2014 | | | | 6.6 | *14.04 Trusty* - 14.10 Utopic |
| Apr 2014 | *Apr 2024* | | | | *14.04 Trusty* | 1.9.1 | 2.7.5 3.4.0 | 6.6 | | Apr 2014 | *Apr 2022* | | | | *14.04 Trusty* | 1.9.1 | 2.7.5 3.4.0 | 6.6 |
| May 2014 | *Dec 2014* | 2.0.0 | | May 2014 | *Dec 2014* | 2.0.0 |
| Aug 2014 | *Dec 2014* | *2.1.0* | | | 14.10 Utopic - 15.04 Vivid / *Jessie* | | Aug 2014 | *Dec 2014* | *2.1.0* | | | 14.10 Utopic - 15.04 Vivid / *Jessie* |
| Oct 2014 | | | | 6.7 | 15.04 Vivid | | Oct 2014 | | | | 6.7 | 15.04 Vivid |
@ -265,7 +262,7 @@ Things in italics are things we used to care about but probably don't anymore.
| Jan 2016 | *Jul 2017* | *2.7.0* | | | *16.04 Xenial* | | Jan 2016 | *Jul 2017* | *2.7.0* | | | *16.04 Xenial* |
| Feb 2016 | | | | 7.2 | *16.04 Xenial* | | Feb 2016 | | | | 7.2 | *16.04 Xenial* |
| Mar 2016 | *Jul 2017* | 2.8.0 | | Mar 2016 | *Jul 2017* | 2.8.0 |
| Apr 2016 | *Apr 2026* | | | | *16.04 Xenial* | 2.7.4 | 2.7.11 3.5.1 | 7.2 | | Apr 2016 | *Apr 2024* | | | | *16.04 Xenial* | 2.7.4 | 2.7.11 3.5.1 | 7.2 |
| Jun 2016 | *Jul 2017* | 2.9.0 | | | 16.10 Yakkety | | Jun 2016 | *Jul 2017* | 2.9.0 | | | 16.10 Yakkety |
| Jul 2016 | | | | 7.3 | 16.10 Yakkety | | Jul 2016 | | | | 7.3 | 16.10 Yakkety |
| Sep 2016 | *Sep 2017* | 2.10.0 | | Sep 2016 | *Sep 2017* | 2.10.0 |
@ -315,33 +312,14 @@ Things in italics are things we used to care about but probably don't anymore.
| Oct 2020 | | | | | 20.10 Groovy | 2.27.0 | 2.7.18 3.8.6 | 8.3 | | Oct 2020 | | | | | 20.10 Groovy | 2.27.0 | 2.7.18 3.8.6 | 8.3 |
| Oct 2020 | **Oct 2025** | | 3.9.0 | | 21.04 Hirsute / **Bullseye** | | Oct 2020 | **Oct 2025** | | 3.9.0 | | 21.04 Hirsute / **Bullseye** |
| Dec 2020 | *Mar 2021* | 2.30.0 | | | 21.04 Hirsute / **Bullseye** | | Dec 2020 | *Mar 2021* | 2.30.0 | | | 21.04 Hirsute / **Bullseye** |
| Mar 2021 | | 2.31.0 | | 8.5 | | Mar 2021 | | 2.31.0 |
| Mar 2021 | | | | 8.5 |
| Apr 2021 | | | | 8.6 | | Apr 2021 | | | | 8.6 |
| Apr 2021 | *Jan 2022* | | | | 21.04 Hirsute | 2.30.2 | 2.7.18 3.9.4 | 8.4 | | Apr 2021 | *Jan 2022* | | | | 21.04 Hirsute | 2.30.2 | 2.7.18 3.9.4 | 8.4 |
| Jun 2021 | | 2.32.0 | | Jun 2021 | | 2.32.0 |
| Aug 2021 | | 2.33.0 | | 8.7 | | Aug 2021 | | 2.33.0 |
| Aug 2021 | | | | 8.7 |
| Aug 2021 | **Aug 2026** | | | | **Debian 11 Bullseye** | 2.30.2 | 2.7.18 3.9.2 | 8.4 | | Aug 2021 | **Aug 2026** | | | | **Debian 11 Bullseye** | 2.30.2 | 2.7.18 3.9.2 | 8.4 |
| Sep 2021 | | | | 8.8 |
| Oct 2021 | | 2.34.0 | 3.10.0 | | **22.04 Jammy** |
| Jan 2022 | | 2.35.0 |
| Feb 2022 | | | | 8.9 | **22.04 Jammy** |
| Apr 2022 | | 2.36.0 | | 9.0 |
| Apr 2022 | **Apr 2032** | | | | **22.04 Jammy** | 2.34.1 | 2.7.18 3.10.6 | 8.9 |
| Jun 2022 | | 2.37.0 |
| Oct 2022 | | 2.38.0 | | 9.1 |
| Oct 2022 | | | 3.11.0 | | **Bookworm** |
| Dec 2022 | | 2.39.0 | | | **Bookworm** |
| Feb 2023 | | | | 9.2 | **Bookworm** |
| Mar 2023 | | 2.40.0 | | 9.3 |
| Jun 2023 | | 2.41.0 |
| Jun 2023 | **Jun 2028** | | | | **Debian 12 Bookworm** | 2.39.2 | 3.11.2 | 9.2 |
| Aug 2023 | | 2.42.0 | | 9.4 |
| Oct 2023 | | | 3.12.0 | 9.5 |
| Nov 2022 | | 2.43.0 |
| Dec 2023 | | | | 9.6 |
| Feb 2024 | | 2.44.0 |
| Mar 2024 | | | | 9.7 |
| Oct 2024 | | | 3.13.0 |
| **Date** | **EOL** | **[Git][rel-g]** | **[Python][rel-p]** | **[SSH][rel-o]** | **[Ubuntu][rel-u] / [Debian][rel-d]** | **Git** | **Python** | **SSH** | | **Date** | **EOL** | **[Git][rel-g]** | **[Python][rel-p]** | **[SSH][rel-o]** | **[Ubuntu][rel-u] / [Debian][rel-d]** | **Git** | **Python** | **SSH** |
@ -350,7 +328,7 @@ Things in italics are things we used to care about but probably don't anymore.
[rel-g]: https://en.wikipedia.org/wiki/Git#Releases [rel-g]: https://en.wikipedia.org/wiki/Git#Releases
[rel-o]: https://www.openssh.com/releasenotes.html [rel-o]: https://www.openssh.com/releasenotes.html
[rel-p]: https://en.wikipedia.org/wiki/History_of_Python#Table_of_versions [rel-p]: https://en.wikipedia.org/wiki/History_of_Python#Table_of_versions
[rel-u]: https://wiki.ubuntu.com/Releases [rel-u]: https://en.wikipedia.org/wiki/Ubuntu_version_history#Table_of_versions
[example announcement]: https://groups.google.com/d/topic/repo-discuss/UGBNismWo1M/discussion [example announcement]: https://groups.google.com/d/topic/repo-discuss/UGBNismWo1M/discussion
[repo-discuss@googlegroups.com]: https://groups.google.com/forum/#!forum/repo-discuss [repo-discuss@googlegroups.com]: https://groups.google.com/forum/#!forum/repo-discuss
[go/repo-release]: https://goto.google.com/repo-release [go/repo-release]: https://goto.google.com/repo-release

View File

@ -1,129 +0,0 @@
# repo Smart Syncing
Repo normally fetches & syncs manifests from the same URL specified during
`repo init`, and that often fetches the latest revisions of all projects in
the manifest. This flow works well for tracking and developing with the
latest code, but often it's desirable to sync to other points. For example,
to get a local build matching a specific release or build to reproduce bugs
reported by other people.
Repo's sync subcommand has support for fetching manifests from a server over
an XML-RPC connection. The local configuration and network API are defined by
repo, but individual projects have to host their own server for the client to
communicate with.
This process is called "smart syncing" -- instead of blindly fetching the latest
revision of all projects and getting an unknown state to develop against, the
client passes a request to the server and is given a matching manifest that
typically specifies specific commits for every project to fetch a known source
state.
[TOC]
## Manifest Configuration
The manifest specifies the server to communicate with via the
the [`<manifest-server>` element](manifest-format.md#Element-manifest_server)
element. This is how the client knows what service to talk to.
```xml
<manifest-server url="https://example.com/your/manifest/server/url" />
```
If the URL starts with `persistent-`, then the
[`git-remote-persistent-https` helper](https://github.com/git/git/blob/HEAD/contrib/persistent-https/README)
is used to communicate with the server.
## Credentials
Credentials may be specified directly in typical `username:password`
[URI syntax](https://en.wikipedia.org/wiki/URI#Syntax) in the
`<manifest-server>` element directly in the manifest.
If they are not specified, `repo sync` has `--manifest-server-username=USERNAME`
and `--manifest-server-password=PASSWORD` options.
If those are not used, then repo will look up the host in your
[`~/.netrc`](https://docs.python.org/3/library/netrc.html) database.
When making the connection, cookies matching the host are automatically loaded
from the cookiejar specified in
[Git's `http.cookiefile` setting](https://git-scm.com/docs/git-config#Documentation/git-config.txt-httpcookieFile).
## Manifest Server
Unfortunately, there are no public reference implementations. Google has an
internal one for Android, but it is written using Google's internal systems,
so wouldn't be that helpful as a reference.
That said, the XML-RPC API is pretty simple, so any standard XML-RPC server
example would do. Google's internal server uses Python's
[xmlrpc.server.SimpleXMLRPCDispatcher](https://docs.python.org/3/library/xmlrpc.server.html).
## Network API
The manifest server should implement the following RPC methods.
### GetApprovedManifest
> `GetApprovedManifest(branch: str, target: Optional[str]) -> str`
The meaning of `branch` and `target` is not strictly defined. The server may
interpret them however it wants. The recommended interpretation is that the
`branch` matches the manifest branch, and `target` is an identifier for your
project that matches something users would build.
See the client section below for how repo typically generates these values.
The server will return a manifest or an error. If it's an error, repo will
show the output directly to the user to provide a limited feedback channel.
If the user's request is ambiguous and could match multiple manifests, the
server has to decide whether to pick one automatically (and silently such that
the user won't know there were multiple matches), or return an error and force
the user to be more specific.
### GetManifest
> `GetManifest(tag: str) -> str`
The meaning of `tag` is not strictly defined. Projects are encouraged to use
a system where the tag matches a unique source state.
See the client section below for how repo typically generates these values.
The server will return a manifest or an error. If it's an error, repo will
show the output directly to the user to provide a limited feedback channel.
If the user's request is ambiguous and could match multiple manifests, the
server has to decide whether to pick one automatically (and silently such that
the user won't know there were multiple matches), or return an error and force
the user to be more specific.
## Client Options
Once repo has successfully downloaded the manifest from the server, it saves a
copy into `.repo/manifests/smart_sync_override.xml` so users can examine it.
The next time `repo sync` is run, this file is automatically replaced or removed
based on the current set of options.
### --smart-sync
Repo will call `GetApprovedManifest(branch[, target])`.
The `branch` is determined by the current manifest branch as specified by
`--manifest-branch=BRANCH` when running `repo init`.
The `target` is defined by environment variables in the order below. If none
of them match, then `target` is omitted. These variables were decided as they
match the settings Android build environments automatically setup.
1. `${SYNC_TARGET}`: If defined, the value is used directly.
2. `${TARGET_PRODUCT}-${TARGET_RELEASE}-${TARGET_BUILD_VARIANT}`: If these
variables are all defined, then they are merged with `-` and used.
3. `${TARGET_PRODUCT}-${TARGET_BUILD_VARIANT}`: If these variables are all
defined, then they are merged with `-` and used.
### --smart-tag=TAG
Repo will call `GetManifest(TAG)`.

View File

@ -104,7 +104,9 @@ least one of these before using this command.""", # noqa: E501
try: try:
rc = subprocess.Popen(args, shell=shell).wait() rc = subprocess.Popen(args, shell=shell).wait()
except OSError as e: except OSError as e:
raise EditorError(f"editor failed, {str(e)}: {editor} {path}") raise EditorError(
"editor failed, %s: %s %s" % (str(e), editor, path)
)
if rc != 0: if rc != 0:
raise EditorError( raise EditorError(
"editor failed with exit status %d: %s %s" "editor failed with exit status %d: %s %s"

View File

@ -107,8 +107,8 @@ class GitError(RepoError):
return self.message return self.message
class GitAuthError(RepoExitError): class GitcUnsupportedError(RepoExitError):
"""Cannot talk to remote due to auth issue.""" """Gitc no longer supported."""
class UploadError(RepoError): class UploadError(RepoError):

View File

@ -168,10 +168,8 @@ class EventLog:
f.write("\n") f.write("\n")
# An integer id that is unique across this invocation of the program, to be set # An integer id that is unique across this invocation of the program.
# by the first Add event. We can't set it here since it results in leaked _EVENT_ID = multiprocessing.Value("i", 1)
# resources (see: https://issues.gerritcodereview.com/353656374).
_EVENT_ID = None
def _NextEventId(): def _NextEventId():
@ -180,12 +178,6 @@ def _NextEventId():
Returns: Returns:
A unique, to this invocation of the program, integer id. A unique, to this invocation of the program, integer id.
""" """
global _EVENT_ID
if _EVENT_ID is None:
# There is a small chance of race condition - two parallel processes
# setting up _EVENT_ID. However, we expect TASK_COMMAND to happen before
# mp kicks in.
_EVENT_ID = multiprocessing.Value("i", 1)
with _EVENT_ID.get_lock(): with _EVENT_ID.get_lock():
val = _EVENT_ID.value val = _EVENT_ID.value
_EVENT_ID.value += 1 _EVENT_ID.value += 1

View File

@ -33,6 +33,17 @@ from wrapper import Wrapper
GIT = "git" GIT = "git"
# NB: These do not need to be kept in sync with the repo launcher script.
# These may be much newer as it allows the repo launcher to roll between
# different repo releases while source versions might require a newer git.
#
# The soft version is when we start warning users that the version is old and
# we'll be dropping support for it. We'll refuse to work with versions older
# than the hard version.
#
# git-1.7 is in (EOL) Ubuntu Precise. git-1.9 is in Ubuntu Trusty.
MIN_GIT_VERSION_SOFT = (1, 9, 1)
MIN_GIT_VERSION_HARD = (1, 7, 2)
GIT_DIR = "GIT_DIR" GIT_DIR = "GIT_DIR"
LAST_GITDIR = None LAST_GITDIR = None
@ -124,8 +135,6 @@ def GetEventTargetPath():
if retval == 0: if retval == 0:
# Strip trailing carriage-return in path. # Strip trailing carriage-return in path.
path = p.stdout.rstrip("\n") path = p.stdout.rstrip("\n")
if path == "":
return None
elif retval != 1: elif retval != 1:
# `git config --get` is documented to produce an exit status of `1` # `git config --get` is documented to produce an exit status of `1`
# if the requested variable is not present in the configuration. # if the requested variable is not present in the configuration.
@ -187,10 +196,12 @@ class UserAgent:
def git(self): def git(self):
"""The UA when running git.""" """The UA when running git."""
if self._git_ua is None: if self._git_ua is None:
self._git_ua = ( self._git_ua = "git/%s (%s) git-repo/%s" % (
f"git/{git.version_tuple().full} ({self.os}) " git.version_tuple().full,
f"git-repo/{RepoSourceVersion()}" self.os,
RepoSourceVersion(),
) )
return self._git_ua return self._git_ua
@ -205,7 +216,7 @@ def git_require(min_version, fail=False, msg=""):
need = ".".join(map(str, min_version)) need = ".".join(map(str, min_version))
if msg: if msg:
msg = " for " + msg msg = " for " + msg
error_msg = f"fatal: git {need} or later required{msg}" error_msg = "fatal: git %s or later required%s" % (need, msg)
logger.error(error_msg) logger.error(error_msg)
raise GitRequireError(error_msg) raise GitRequireError(error_msg)
return False return False
@ -232,15 +243,15 @@ def _build_env(
env["GIT_SSH"] = ssh_proxy.proxy env["GIT_SSH"] = ssh_proxy.proxy
env["GIT_SSH_VARIANT"] = "ssh" env["GIT_SSH_VARIANT"] = "ssh"
if "http_proxy" in env and "darwin" == sys.platform: if "http_proxy" in env and "darwin" == sys.platform:
s = f"'http.proxy={env['http_proxy']}'" s = "'http.proxy=%s'" % (env["http_proxy"],)
p = env.get("GIT_CONFIG_PARAMETERS") p = env.get("GIT_CONFIG_PARAMETERS")
if p is not None: if p is not None:
s = p + " " + s s = p + " " + s
env["GIT_CONFIG_PARAMETERS"] = s env["GIT_CONFIG_PARAMETERS"] = s
if "GIT_ALLOW_PROTOCOL" not in env: if "GIT_ALLOW_PROTOCOL" not in env:
env["GIT_ALLOW_PROTOCOL"] = ( env[
"file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc" "GIT_ALLOW_PROTOCOL"
) ] = "file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc"
env["GIT_HTTP_USER_AGENT"] = user_agent.git env["GIT_HTTP_USER_AGENT"] = user_agent.git
if objdir: if objdir:
@ -313,13 +324,10 @@ class GitCommand:
cwd = None cwd = None
command_name = cmdv[0] command_name = cmdv[0]
command.append(command_name) command.append(command_name)
if command_name in ("fetch", "clone"):
env["GIT_TERMINAL_PROMPT"] = "0"
# Need to use the --progress flag for fetch/clone so output will be # Need to use the --progress flag for fetch/clone so output will be
# displayed as by default git only does progress output if stderr is # displayed as by default git only does progress output if stderr is a
# a TTY. # TTY.
if sys.stderr.isatty(): if sys.stderr.isatty() and command_name in ("fetch", "clone"):
if "--progress" not in cmdv and "--quiet" not in cmdv: if "--progress" not in cmdv and "--quiet" not in cmdv:
command.append("--progress") command.append("--progress")
command.extend(cmdv[1:]) command.extend(cmdv[1:])
@ -350,9 +358,9 @@ class GitCommand:
"Project": e.project, "Project": e.project,
"CommandName": command_name, "CommandName": command_name,
"Message": str(e), "Message": str(e),
"ReturnCode": ( "ReturnCode": str(e.git_rc)
str(e.git_rc) if e.git_rc is not None else None if e.git_rc is not None
), else None,
"IsError": log_as_error, "IsError": log_as_error,
} }
) )
@ -460,7 +468,7 @@ class GitCommand:
) )
except Exception as e: except Exception as e:
raise GitPopenCommandError( raise GitPopenCommandError(
message=f"{command[1]}: {e}", message="%s: %s" % (command[1], e),
project=self.project.name if self.project else None, project=self.project.name if self.project else None,
command_args=self.cmdv, command_args=self.cmdv,
) )

View File

@ -90,20 +90,6 @@ class GitConfig:
@staticmethod @staticmethod
def _getUserConfig(): def _getUserConfig():
"""Get the user-specific config file.
Prefers the XDG config location if available, with fallback to
~/.gitconfig
This matches git behavior:
https://git-scm.com/docs/git-config#FILES
"""
xdg_config_home = os.getenv(
"XDG_CONFIG_HOME", os.path.expanduser("~/.config")
)
xdg_config_file = os.path.join(xdg_config_home, "git", "config")
if os.path.exists(xdg_config_file):
return xdg_config_file
return os.path.expanduser("~/.gitconfig") return os.path.expanduser("~/.gitconfig")
@classmethod @classmethod
@ -432,7 +418,7 @@ class GitConfig:
if p.Wait() == 0: if p.Wait() == 0:
return p.stdout return p.stdout
else: else:
raise GitError(f"git config {str(args)}: {p.stderr}") raise GitError("git config %s: %s" % (str(args), p.stderr))
class RepoConfig(GitConfig): class RepoConfig(GitConfig):
@ -665,11 +651,13 @@ class Remote:
userEmail, host, port userEmail, host, port
) )
except urllib.error.HTTPError as e: except urllib.error.HTTPError as e:
raise UploadError(f"{self.review}: {str(e)}") raise UploadError("%s: %s" % (self.review, str(e)))
except urllib.error.URLError as e: except urllib.error.URLError as e:
raise UploadError(f"{self.review}: {str(e)}") raise UploadError("%s: %s" % (self.review, str(e)))
except http.client.HTTPException as e: except http.client.HTTPException as e:
raise UploadError(f"{self.review}: {e.__class__.__name__}") raise UploadError(
"%s: %s" % (self.review, e.__class__.__name__)
)
REVIEW_CACHE[u] = self._review_url REVIEW_CACHE[u] = self._review_url
return self._review_url + self.projectname return self._review_url + self.projectname
@ -678,7 +666,7 @@ class Remote:
username = self._config.GetString("review.%s.username" % self.review) username = self._config.GetString("review.%s.username" % self.review)
if username is None: if username is None:
username = userEmail.split("@")[0] username = userEmail.split("@")[0]
return f"ssh://{username}@{host}:{port}/" return "ssh://%s@%s:%s/" % (username, host, port)
def ToLocal(self, rev): def ToLocal(self, rev):
"""Convert a remote revision string to something we have locally.""" """Convert a remote revision string to something we have locally."""
@ -727,11 +715,11 @@ class Remote:
self._Set("fetch", list(map(str, self.fetch))) self._Set("fetch", list(map(str, self.fetch)))
def _Set(self, key, value): def _Set(self, key, value):
key = f"remote.{self.name}.{key}" key = "remote.%s.%s" % (self.name, key)
return self._config.SetString(key, value) return self._config.SetString(key, value)
def _Get(self, key, all_keys=False): def _Get(self, key, all_keys=False):
key = f"remote.{self.name}.{key}" key = "remote.%s.%s" % (self.name, key)
return self._config.GetString(key, all_keys=all_keys) return self._config.GetString(key, all_keys=all_keys)
@ -774,11 +762,11 @@ class Branch:
fd.write("\tmerge = %s\n" % self.merge) fd.write("\tmerge = %s\n" % self.merge)
def _Set(self, key, value): def _Set(self, key, value):
key = f"branch.{self.name}.{key}" key = "branch.%s.%s" % (self.name, key)
return self._config.SetString(key, value) return self._config.SetString(key, value)
def _Get(self, key, all_keys=False): def _Get(self, key, all_keys=False):
key = f"branch.{self.name}.{key}" key = "branch.%s.%s" % (self.name, key)
return self._config.GetString(key, all_keys=all_keys) return self._config.GetString(key, all_keys=all_keys)

View File

@ -69,9 +69,9 @@ class UpdateProjectsResult(NamedTuple):
class Superproject: class Superproject:
"""Get commit ids from superproject. """Get commit ids from superproject.
Initializes a bare local copy of a superproject for the manifest. This Initializes a local copy of a superproject for the manifest. This allows
allows lookup of commit ids for all projects. It contains lookup of commit ids for all projects. It contains _project_commit_ids which
_project_commit_ids which is a dictionary with project/commit id entries. is a dictionary with project/commit id entries.
""" """
def __init__( def __init__(
@ -235,8 +235,7 @@ class Superproject:
p = GitCommand( p = GitCommand(
None, None,
cmd, cmd,
gitdir=self._work_git, cwd=self._work_git,
bare=True,
capture_stdout=True, capture_stdout=True,
capture_stderr=True, capture_stderr=True,
) )
@ -272,8 +271,7 @@ class Superproject:
p = GitCommand( p = GitCommand(
None, None,
cmd, cmd,
gitdir=self._work_git, cwd=self._work_git,
bare=True,
capture_stdout=True, capture_stdout=True,
capture_stderr=True, capture_stderr=True,
) )
@ -307,6 +305,8 @@ class Superproject:
) )
return SyncResult(False, False) return SyncResult(False, False)
_PrintBetaNotice()
should_exit = True should_exit = True
if not self._remote_url: if not self._remote_url:
self._LogWarning( self._LogWarning(
@ -450,6 +450,16 @@ class Superproject:
return UpdateProjectsResult(manifest_path, False) return UpdateProjectsResult(manifest_path, False)
@functools.lru_cache(maxsize=10)
def _PrintBetaNotice():
"""Print the notice of beta status."""
print(
"NOTICE: --use-superproject is in beta; report any issues to the "
"address described in `repo version`",
file=sys.stderr,
)
@functools.lru_cache(maxsize=None) @functools.lru_cache(maxsize=None)
def _UseSuperprojectFromConfiguration(): def _UseSuperprojectFromConfiguration():
"""Returns the user choice of whether to use superproject.""" """Returns the user choice of whether to use superproject."""

View File

@ -38,8 +38,6 @@ import tempfile
import threading import threading
# Timeout when sending events via socket (applies to connect, send)
SOCK_TIMEOUT = 0.5 # in seconds
# BaseEventLog __init__ Counter that is consistent within the same process # BaseEventLog __init__ Counter that is consistent within the same process
p_init_count = 0 p_init_count = 0
@ -78,8 +76,9 @@ class BaseEventLog:
# Save both our sid component and the complete sid. # Save both our sid component and the complete sid.
# We use our sid component (self._sid) as the unique filename prefix and # We use our sid component (self._sid) as the unique filename prefix and
# the full sid (self._full_sid) in the log itself. # the full sid (self._full_sid) in the log itself.
self._sid = ( self._sid = "repo-%s-P%08x" % (
f"repo-{self.start.strftime('%Y%m%dT%H%M%SZ')}-P{os.getpid():08x}" self.start.strftime("%Y%m%dT%H%M%SZ"),
os.getpid(),
) )
if add_init_count: if add_init_count:
@ -130,10 +129,10 @@ class BaseEventLog:
"time": datetime.datetime.now(datetime.timezone.utc).isoformat(), "time": datetime.datetime.now(datetime.timezone.utc).isoformat(),
} }
def StartEvent(self, argv): def StartEvent(self):
"""Append a 'start' event to the current log.""" """Append a 'start' event to the current log."""
start_event = self._CreateEventDict("start") start_event = self._CreateEventDict("start")
start_event["argv"] = argv start_event["argv"] = sys.argv
self._log.append(start_event) self._log.append(start_event)
def ExitEvent(self, result): def ExitEvent(self, result):
@ -159,11 +158,9 @@ class BaseEventLog:
name: Name of the primary command (ex: repo, git) name: Name of the primary command (ex: repo, git)
subcommands: List of the sub-commands (ex: version, init, sync) subcommands: List of the sub-commands (ex: version, init, sync)
""" """
command_event = self._CreateEventDict("cmd_name") command_event = self._CreateEventDict("command")
name = f"{name}-"
name += "-".join(subcommands)
command_event["name"] = name command_event["name"] = name
command_event["hierarchy"] = name command_event["subcommands"] = subcommands
self._log.append(command_event) self._log.append(command_event)
def LogConfigEvents(self, config, event_dict_name): def LogConfigEvents(self, config, event_dict_name):
@ -300,7 +297,6 @@ class BaseEventLog:
with socket.socket( with socket.socket(
socket.AF_UNIX, socket.SOCK_STREAM socket.AF_UNIX, socket.SOCK_STREAM
) as sock: ) as sock:
sock.settimeout(SOCK_TIMEOUT)
sock.connect(path) sock.connect(path)
self._WriteLog(sock.sendall) self._WriteLog(sock.sendall)
return f"af_unix:stream:{path}" return f"af_unix:stream:{path}"

View File

@ -180,7 +180,7 @@ class RepoHook:
abort_if_user_denies was passed to the consturctor. abort_if_user_denies was passed to the consturctor.
""" """
hooks_config = self._hooks_project.config hooks_config = self._hooks_project.config
git_approval_key = f"repo.hooks.{self._hook_type}.{subkey}" git_approval_key = "repo.hooks.%s.%s" % (self._hook_type, subkey)
# Get the last value that the user approved for this hook; may be None. # Get the last value that the user approved for this hook; may be None.
old_val = hooks_config.GetString(git_approval_key) old_val = hooks_config.GetString(git_approval_key)
@ -193,7 +193,7 @@ class RepoHook:
else: else:
# Give the user a reason why we're prompting, since they last # Give the user a reason why we're prompting, since they last
# told us to "never ask again". # told us to "never ask again".
prompt = f"WARNING: {changed_prompt}\n\n" prompt = "WARNING: %s\n\n" % (changed_prompt,)
else: else:
prompt = "" prompt = ""
@ -241,8 +241,9 @@ class RepoHook:
return self._CheckForHookApprovalHelper( return self._CheckForHookApprovalHelper(
"approvedmanifest", "approvedmanifest",
self._manifest_url, self._manifest_url,
f"Run hook scripts from {self._manifest_url}", "Run hook scripts from %s" % (self._manifest_url,),
f"Manifest URL has changed since {self._hook_type} was allowed.", "Manifest URL has changed since %s was allowed."
% (self._hook_type,),
) )
def _CheckForHookApprovalHash(self): def _CheckForHookApprovalHash(self):
@ -261,7 +262,7 @@ class RepoHook:
"approvedhash", "approvedhash",
self._GetHash(), self._GetHash(),
prompt % (self._GetMustVerb(), self._script_fullpath), prompt % (self._GetMustVerb(), self._script_fullpath),
f"Scripts have changed since {self._hook_type} was allowed.", "Scripts have changed since %s was allowed." % (self._hook_type,),
) )
@staticmethod @staticmethod

View File

@ -1,8 +1,5 @@
#!/bin/sh #!/bin/sh
# DO NOT EDIT THIS FILE # From Gerrit Code Review 3.6.1 c67916dbdc07555c44e32a68f92ffc484b9b34f0
# All updates should be sent upstream: https://gerrit.googlesource.com/gerrit/
# This is synced from commit: 62f5bbea67f6dafa6e22a601a0c298214c510caf
# DO NOT EDIT THIS FILE
# #
# Part of Gerrit Code Review (https://www.gerritcodereview.com/) # Part of Gerrit Code Review (https://www.gerritcodereview.com/)
# #
@ -34,20 +31,14 @@ if test ! -f "$1" ; then
fi fi
# Do not create a change id if requested # Do not create a change id if requested
case "$(git config --get gerrit.createChangeId)" in if test "false" = "$(git config --bool --get gerrit.createChangeId)" ; then
false)
exit 0
;;
always)
;;
*)
# Do not create a change id for squash/fixup commits.
if head -n1 "$1" | LC_ALL=C grep -q '^[a-z][a-z]*! '; then
exit 0 exit 0
fi fi
;;
esac
# Do not create a change id for squash commits.
if head -n1 "$1" | grep -q '^squash! '; then
exit 0
fi
if git rev-parse --verify HEAD >/dev/null 2>&1; then if git rev-parse --verify HEAD >/dev/null 2>&1; then
refhash="$(git rev-parse HEAD)" refhash="$(git rev-parse HEAD)"
@ -60,7 +51,7 @@ dest="$1.tmp.${random}"
trap 'rm -f "$dest" "$dest-2"' EXIT trap 'rm -f "$dest" "$dest-2"' EXIT
if ! cat "$1" | sed -e '/>8/q' | git stripspace --strip-comments > "${dest}" ; then if ! git stripspace --strip-comments < "$1" > "${dest}" ; then
echo "cannot strip comments from $1" echo "cannot strip comments from $1"
exit 1 exit 1
fi fi
@ -74,7 +65,7 @@ reviewurl="$(git config --get gerrit.reviewUrl)"
if test -n "${reviewurl}" ; then if test -n "${reviewurl}" ; then
token="Link" token="Link"
value="${reviewurl%/}/id/I$random" value="${reviewurl%/}/id/I$random"
pattern=".*/id/I[0-9a-f]\{40\}" pattern=".*/id/I[0-9a-f]\{40\}$"
else else
token="Change-Id" token="Change-Id"
value="I$random" value="I$random"
@ -101,7 +92,7 @@ fi
# Avoid the --where option which only appeared in Git 2.15 # Avoid the --where option which only appeared in Git 2.15
if ! git -c trailer.where=before interpret-trailers \ if ! git -c trailer.where=before interpret-trailers \
--trailer "Signed-off-by: $token: $value" < "$dest-2" | --trailer "Signed-off-by: $token: $value" < "$dest-2" |
sed -e "s/^Signed-off-by: \($token: \)/\1/" \ sed -re "s/^Signed-off-by: ($token: )/\1/" \
-e "/^Signed-off-by: SENTINEL/d" > "$dest" ; then -e "/^Signed-off-by: SENTINEL/d" > "$dest" ; then
echo "cannot insert $token line in $1" echo "cannot insert $token line in $1"
exit 1 exit 1

View File

@ -1,25 +1,33 @@
#!/bin/sh #!/bin/sh
# DO NOT EDIT THIS FILE
# All updates should be sent upstream: https://github.com/git/git
# This is synced from commit: 00e10ef10e161a913893b8cb33aa080d4ca5baa6
# DO NOT EDIT THIS FILE
# #
# An example hook script to verify if you are on battery, in case you # An example hook script to verify if you are on battery, in case you
# are running Linux or OS X. Called by git-gc --auto with no arguments. # are running Windows, Linux or OS X. Called by git-gc --auto with no
# The hook should exit with non-zero status after issuing an appropriate # arguments. The hook should exit with non-zero status after issuing an
# message if it wants to stop the auto repacking. # appropriate message if it wants to stop the auto repacking.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# #
# This hook is stored in the contrib/hooks directory. Your distribution # This program is distributed in the hope that it will be useful,
# may have put this somewhere else. If you want to use this hook, you # but WITHOUT ANY WARRANTY; without even the implied warranty of
# should make this script executable then link to it in the repository # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# you would like to use it in. # GNU General Public License for more details.
# #
# For example, if the hook is stored in # You should have received a copy of the GNU General Public License
# /usr/share/git-core/contrib/hooks/pre-auto-gc-battery: # along with this program; if not, write to the Free Software
# # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# cd /path/to/your/repository.git
# ln -sf /usr/share/git-core/contrib/hooks/pre-auto-gc-battery \ if uname -s | grep -q "_NT-"
# hooks/pre-auto-gc then
if test -x $SYSTEMROOT/System32/Wbem/wmic
then
STATUS=$(wmic path win32_battery get batterystatus /format:list | tr -d '\r\n')
[ "$STATUS" = "BatteryStatus=2" ] && exit 0 || exit 1
fi
exit 0
fi
if test -x /sbin/on_ac_power && (/sbin/on_ac_power;test $? -ne 1) if test -x /sbin/on_ac_power && (/sbin/on_ac_power;test $? -ne 1)
then then
@ -40,6 +48,11 @@ elif test -x /usr/bin/pmset && /usr/bin/pmset -g batt |
grep -q "drawing from 'AC Power'" grep -q "drawing from 'AC Power'"
then then
exit 0 exit 0
elif test -d /sys/bus/acpi/drivers/battery && test 0 = \
"$(find /sys/bus/acpi/drivers/battery/ -type l | wc -l)";
then
# No battery exists.
exit 0
fi fi
echo "Auto packing deferred; not on AC" echo "Auto packing deferred; not on AC"

29
main.py
View File

@ -45,9 +45,9 @@ from command import InteractiveCommand
from command import MirrorSafeCommand from command import MirrorSafeCommand
from editor import Editor from editor import Editor
from error import DownloadError from error import DownloadError
from error import GitcUnsupportedError
from error import InvalidProjectGroupsError from error import InvalidProjectGroupsError
from error import ManifestInvalidRevisionError from error import ManifestInvalidRevisionError
from error import ManifestParseError
from error import NoManifestException from error import NoManifestException
from error import NoSuchProjectError from error import NoSuchProjectError
from error import RepoChangedException from error import RepoChangedException
@ -198,8 +198,9 @@ class _Repo:
if short: if short:
commands = " ".join(sorted(self.commands)) commands = " ".join(sorted(self.commands))
wrapped_commands = textwrap.wrap(commands, width=77) wrapped_commands = textwrap.wrap(commands, width=77)
help_commands = "".join(f"\n {x}" for x in wrapped_commands) print(
print(f"Available commands:{help_commands}") "Available commands:\n %s" % ("\n ".join(wrapped_commands),)
)
print("\nRun `repo help <command>` for command-specific details.") print("\nRun `repo help <command>` for command-specific details.")
print("Bug reports:", Wrapper().BUG_URL) print("Bug reports:", Wrapper().BUG_URL)
else: else:
@ -235,7 +236,7 @@ class _Repo:
if name in self.commands: if name in self.commands:
return name, [] return name, []
key = f"alias.{name}" key = "alias.%s" % (name,)
alias = RepoConfig.ForRepository(self.repodir).GetString(key) alias = RepoConfig.ForRepository(self.repodir).GetString(key)
if alias is None: if alias is None:
alias = RepoConfig.ForUser().GetString(key) alias = RepoConfig.ForUser().GetString(key)
@ -269,14 +270,10 @@ class _Repo:
self._PrintHelp(short=True) self._PrintHelp(short=True)
return 1 return 1
git_trace2_event_log = EventLog() run = lambda: self._RunLong(name, gopts, argv) or 0
run = (
lambda: self._RunLong(name, gopts, argv, git_trace2_event_log) or 0
)
with Trace( with Trace(
"starting new command: %s [sid=%s]", "starting new command: %s",
", ".join([name] + argv), ", ".join([name] + argv),
git_trace2_event_log.full_sid,
first_trace=True, first_trace=True,
): ):
if gopts.trace_python: if gopts.trace_python:
@ -293,11 +290,12 @@ class _Repo:
result = run() result = run()
return result return result
def _RunLong(self, name, gopts, argv, git_trace2_event_log): def _RunLong(self, name, gopts, argv):
"""Execute the (longer running) requested subcommand.""" """Execute the (longer running) requested subcommand."""
result = 0 result = 0
SetDefaultColoring(gopts.color) SetDefaultColoring(gopts.color)
git_trace2_event_log = EventLog()
outer_client = RepoClient(self.repodir) outer_client = RepoClient(self.repodir)
repo_client = outer_client repo_client = outer_client
if gopts.submanifest_path: if gopts.submanifest_path:
@ -307,6 +305,10 @@ class _Repo:
outer_client=outer_client, outer_client=outer_client,
) )
if Wrapper().gitc_parse_clientdir(os.getcwd()):
logger.error("GITC is not supported.")
raise GitcUnsupportedError()
try: try:
cmd = self.commands[name]( cmd = self.commands[name](
repodir=self.repodir, repodir=self.repodir,
@ -352,7 +354,7 @@ class _Repo:
start = time.time() start = time.time()
cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start) cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
cmd.event_log.SetParent(cmd_event) cmd.event_log.SetParent(cmd_event)
git_trace2_event_log.StartEvent(["repo", name] + argv) git_trace2_event_log.StartEvent()
git_trace2_event_log.CommandEvent(name="repo", subcommands=[name]) git_trace2_event_log.CommandEvent(name="repo", subcommands=[name])
def execute_command_helper(): def execute_command_helper():
@ -420,7 +422,7 @@ class _Repo:
error_info = json.dumps( error_info = json.dumps(
{ {
"ErrorType": type(error).__name__, "ErrorType": type(error).__name__,
"Project": str(project), "Project": project,
"Message": str(error), "Message": str(error),
} }
) )
@ -438,7 +440,6 @@ class _Repo:
except ( except (
DownloadError, DownloadError,
ManifestInvalidRevisionError, ManifestInvalidRevisionError,
ManifestParseError,
NoManifestException, NoManifestException,
) as e: ) as e:
logger.error("error: in `%s`: %s", " ".join([name] + argv), e) logger.error("error: in `%s`: %s", " ".join([name] + argv), e)

View File

@ -1,24 +1,21 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man. .\" DO NOT MODIFY THIS FILE! It was generated by help2man.
.TH REPO "1" "December 2024" "repo gc" "Repo Manual" .TH REPO "1" "July 2022" "repo gitc-delete" "Repo Manual"
.SH NAME .SH NAME
repo \- repo gc - manual page for repo gc repo \- repo gitc-delete - manual page for repo gitc-delete
.SH SYNOPSIS .SH SYNOPSIS
.B repo .B repo
\fI\,gc\/\fR \fI\,gitc-delete\/\fR
.SH DESCRIPTION .SH DESCRIPTION
Summary Summary
.PP .PP
Cleaning up internal repo state. Delete a GITC Client.
.SH OPTIONS .SH OPTIONS
.TP .TP
\fB\-h\fR, \fB\-\-help\fR \fB\-h\fR, \fB\-\-help\fR
show this help message and exit show this help message and exit
.TP .TP
\fB\-n\fR, \fB\-\-dry\-run\fR \fB\-f\fR, \fB\-\-force\fR
do everything except actually delete force the deletion (no prompt)
.TP
\fB\-y\fR, \fB\-\-yes\fR
answer yes to all safe prompts
.SS Logging options: .SS Logging options:
.TP .TP
\fB\-v\fR, \fB\-\-verbose\fR \fB\-v\fR, \fB\-\-verbose\fR
@ -40,4 +37,8 @@ only operate on this (sub)manifest
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR \fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
operate on this manifest and its submanifests operate on this manifest and its submanifests
.PP .PP
Run `repo help gc` to view the detailed manual. Run `repo help gitc\-delete` to view the detailed manual.
.SH DETAILS
.PP
This subcommand deletes the current GITC client, deleting the GITC manifest and
all locally downloaded sources.

175
man/repo-gitc-init.1 Normal file
View File

@ -0,0 +1,175 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
.TH REPO "1" "October 2022" "repo gitc-init" "Repo Manual"
.SH NAME
repo \- repo gitc-init - manual page for repo gitc-init
.SH SYNOPSIS
.B repo
\fI\,gitc-init \/\fR[\fI\,options\/\fR] [\fI\,client name\/\fR]
.SH DESCRIPTION
Summary
.PP
Initialize a GITC Client.
.SH OPTIONS
.TP
\fB\-h\fR, \fB\-\-help\fR
show this help message and exit
.SS Logging options:
.TP
\fB\-v\fR, \fB\-\-verbose\fR
show all output
.TP
\fB\-q\fR, \fB\-\-quiet\fR
only show errors
.SS Manifest options:
.TP
\fB\-u\fR URL, \fB\-\-manifest\-url\fR=\fI\,URL\/\fR
manifest repository location
.TP
\fB\-b\fR REVISION, \fB\-\-manifest\-branch\fR=\fI\,REVISION\/\fR
manifest branch or revision (use HEAD for default)
.TP
\fB\-m\fR NAME.xml, \fB\-\-manifest\-name\fR=\fI\,NAME\/\fR.xml
initial manifest file
.TP
\fB\-g\fR GROUP, \fB\-\-groups\fR=\fI\,GROUP\/\fR
restrict manifest projects to ones with specified
group(s) [default|all|G1,G2,G3|G4,\-G5,\-G6]
.TP
\fB\-p\fR PLATFORM, \fB\-\-platform\fR=\fI\,PLATFORM\/\fR
restrict manifest projects to ones with a specified
platform group [auto|all|none|linux|darwin|...]
.TP
\fB\-\-submodules\fR
sync any submodules associated with the manifest repo
.TP
\fB\-\-standalone\-manifest\fR
download the manifest as a static file rather then
create a git checkout of the manifest repo
.TP
\fB\-\-manifest\-depth\fR=\fI\,DEPTH\/\fR
create a shallow clone of the manifest repo with given
depth (0 for full clone); see git clone (default: 0)
.SS Manifest (only) checkout options:
.TP
\fB\-\-current\-branch\fR
fetch only current manifest branch from server
(default)
.TP
\fB\-\-no\-current\-branch\fR
fetch all manifest branches from server
.TP
\fB\-\-tags\fR
fetch tags in the manifest
.TP
\fB\-\-no\-tags\fR
don't fetch tags in the manifest
.SS Checkout modes:
.TP
\fB\-\-mirror\fR
create a replica of the remote repositories rather
than a client working directory
.TP
\fB\-\-archive\fR
checkout an archive instead of a git repository for
each project. See git archive.
.TP
\fB\-\-worktree\fR
use git\-worktree to manage projects
.SS Project checkout optimizations:
.TP
\fB\-\-reference\fR=\fI\,DIR\/\fR
location of mirror directory
.TP
\fB\-\-dissociate\fR
dissociate from reference mirrors after clone
.TP
\fB\-\-depth\fR=\fI\,DEPTH\/\fR
create a shallow clone with given depth; see git clone
.TP
\fB\-\-partial\-clone\fR
perform partial clone (https://gitscm.com/docs/gitrepositorylayout#_code_partialclone_code)
.TP
\fB\-\-no\-partial\-clone\fR
disable use of partial clone (https://gitscm.com/docs/gitrepositorylayout#_code_partialclone_code)
.TP
\fB\-\-partial\-clone\-exclude\fR=\fI\,PARTIAL_CLONE_EXCLUDE\/\fR
exclude the specified projects (a comma\-delimited
project names) from partial clone (https://gitscm.com/docs/gitrepositorylayout#_code_partialclone_code)
.TP
\fB\-\-clone\-filter\fR=\fI\,CLONE_FILTER\/\fR
filter for use with \fB\-\-partial\-clone\fR [default:
blob:none]
.TP
\fB\-\-use\-superproject\fR
use the manifest superproject to sync projects;
implies \fB\-c\fR
.TP
\fB\-\-no\-use\-superproject\fR
disable use of manifest superprojects
.TP
\fB\-\-clone\-bundle\fR
enable use of \fI\,/clone.bundle\/\fP on HTTP/HTTPS (default if
not \fB\-\-partial\-clone\fR)
.TP
\fB\-\-no\-clone\-bundle\fR
disable use of \fI\,/clone.bundle\/\fP on HTTP/HTTPS (default if
\fB\-\-partial\-clone\fR)
.TP
\fB\-\-git\-lfs\fR
enable Git LFS support
.TP
\fB\-\-no\-git\-lfs\fR
disable Git LFS support
.SS repo Version options:
.TP
\fB\-\-repo\-url\fR=\fI\,URL\/\fR
repo repository location ($REPO_URL)
.TP
\fB\-\-repo\-rev\fR=\fI\,REV\/\fR
repo branch or revision ($REPO_REV)
.TP
\fB\-\-no\-repo\-verify\fR
do not verify repo source code
.SS Other options:
.TP
\fB\-\-config\-name\fR
Always prompt for name/e\-mail
.SS GITC options:
.TP
\fB\-f\fR MANIFEST_FILE, \fB\-\-manifest\-file\fR=\fI\,MANIFEST_FILE\/\fR
Optional manifest file to use for this GITC client.
.TP
\fB\-c\fR GITC_CLIENT, \fB\-\-gitc\-client\fR=\fI\,GITC_CLIENT\/\fR
Name of the gitc_client instance to create or modify.
.SS Multi\-manifest:
.TP
\fB\-\-outer\-manifest\fR
operate starting at the outermost manifest
.TP
\fB\-\-no\-outer\-manifest\fR
do not operate on outer manifests
.TP
\fB\-\-this\-manifest\-only\fR
only operate on this (sub)manifest
.TP
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
operate on this manifest and its submanifests
.PP
Run `repo help gitc\-init` to view the detailed manual.
.SH DETAILS
.PP
The 'repo gitc\-init' command is ran to initialize a new GITC client for use with
the GITC file system.
.PP
This command will setup the client directory, initialize repo, just like repo
init does, and then downloads the manifest collection and installs it in the
\&.repo/directory of the GITC client.
.PP
Once this is done, a GITC manifest is generated by pulling the HEAD SHA for each
project and generates the properly formatted XML file and installs it as
\&.manifest in the GITC client directory.
.PP
The \fB\-c\fR argument is required to specify the GITC client name.
.PP
The optional \fB\-f\fR argument can be used to specify the manifest file to use for
this GITC client.

View File

@ -1,5 +1,5 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man. .\" DO NOT MODIFY THIS FILE! It was generated by help2man.
.TH REPO "1" "September 2024" "repo init" "Repo Manual" .TH REPO "1" "October 2022" "repo init" "Repo Manual"
.SH NAME .SH NAME
repo \- repo init - manual page for repo init repo \- repo init - manual page for repo init
.SH SYNOPSIS .SH SYNOPSIS
@ -28,11 +28,6 @@ manifest repository location
\fB\-b\fR REVISION, \fB\-\-manifest\-branch\fR=\fI\,REVISION\/\fR \fB\-b\fR REVISION, \fB\-\-manifest\-branch\fR=\fI\,REVISION\/\fR
manifest branch or revision (use HEAD for default) manifest branch or revision (use HEAD for default)
.TP .TP
\fB\-\-manifest\-upstream\-branch\fR=\fI\,BRANCH\/\fR
when a commit is provided to \fB\-\-manifest\-branch\fR, this
is the name of the git ref in which the commit can be
found
.TP
\fB\-m\fR NAME.xml, \fB\-\-manifest\-name\fR=\fI\,NAME\/\fR.xml \fB\-m\fR NAME.xml, \fB\-\-manifest\-name\fR=\fI\,NAME\/\fR.xml
initial manifest file initial manifest file
.TP .TP
@ -168,10 +163,6 @@ The optional \fB\-b\fR argument can be used to select the manifest branch to che
and use. If no branch is specified, the remote's default branch is used. This is and use. If no branch is specified, the remote's default branch is used. This is
equivalent to using \fB\-b\fR HEAD. equivalent to using \fB\-b\fR HEAD.
.PP .PP
The optional \fB\-\-manifest\-upstream\-branch\fR argument can be used when a commit is
provided to \fB\-\-manifest\-branch\fR (or \fB\-b\fR), to specify the name of the git ref in
which the commit can be found.
.PP
The optional \fB\-m\fR argument can be used to specify an alternate manifest to be The optional \fB\-m\fR argument can be used to specify an alternate manifest to be
used. If no manifest is specified, the manifest default.xml will be used. used. If no manifest is specified, the manifest default.xml will be used.
.PP .PP

View File

@ -1,5 +1,5 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man. .\" DO NOT MODIFY THIS FILE! It was generated by help2man.
.TH REPO "1" "December 2024" "repo manifest" "Repo Manual" .TH REPO "1" "October 2022" "repo manifest" "Repo Manual"
.SH NAME .SH NAME
repo \- repo manifest - manual page for repo manifest repo \- repo manifest - manual page for repo manifest
.SH SYNOPSIS .SH SYNOPSIS
@ -192,13 +192,10 @@ CDATA #IMPLIED>
<!ATTLIST extend\-project remote CDATA #IMPLIED> <!ATTLIST extend\-project remote CDATA #IMPLIED>
<!ATTLIST extend\-project dest\-branch CDATA #IMPLIED> <!ATTLIST extend\-project dest\-branch CDATA #IMPLIED>
<!ATTLIST extend\-project upstream CDATA #IMPLIED> <!ATTLIST extend\-project upstream CDATA #IMPLIED>
<!ATTLIST extend\-project base\-rev CDATA #IMPLIED>
.IP .IP
<!ELEMENT remove\-project EMPTY> <!ELEMENT remove\-project EMPTY>
<!ATTLIST remove\-project name CDATA #IMPLIED> <!ATTLIST remove\-project name CDATA #REQUIRED>
<!ATTLIST remove\-project path CDATA #IMPLIED>
<!ATTLIST remove\-project optional CDATA #IMPLIED> <!ATTLIST remove\-project optional CDATA #IMPLIED>
<!ATTLIST remove\-project base\-rev CDATA #IMPLIED>
.IP .IP
<!ELEMENT repo\-hooks EMPTY> <!ELEMENT repo\-hooks EMPTY>
<!ATTLIST repo\-hooks in\-project CDATA #REQUIRED> <!ATTLIST repo\-hooks in\-project CDATA #REQUIRED>
@ -215,7 +212,6 @@ CDATA #IMPLIED>
<!ELEMENT include EMPTY> <!ELEMENT include EMPTY>
<!ATTLIST include name CDATA #REQUIRED> <!ATTLIST include name CDATA #REQUIRED>
<!ATTLIST include groups CDATA #IMPLIED> <!ATTLIST include groups CDATA #IMPLIED>
<!ATTLIST include revision CDATA #IMPLIED>
.PP .PP
]> ]>
``` ```
@ -497,14 +493,6 @@ project. Same syntax as the corresponding element of `project`.
Attribute `upstream`: If specified, overrides the upstream of the original Attribute `upstream`: If specified, overrides the upstream of the original
project. Same syntax as the corresponding element of `project`. project. Same syntax as the corresponding element of `project`.
.PP .PP
Attribute `base\-rev`: If specified, adds a check against the revision to be
extended. Manifest parse will fail and give a list of mismatch extends if the
revisions being extended have changed since base\-rev was set. Intended for use
with layered manifests using hash revisions to prevent patch branches hiding
newer upstream revisions. Also compares named refs like branches or tags but is
misleading if branches are used as base\-rev. Same syntax as the corresponding
element of `project`.
.PP
Element annotation Element annotation
.PP .PP
Zero or more annotation elements may be specified as children of a project or Zero or more annotation elements may be specified as children of a project or
@ -545,35 +533,16 @@ the repo client.
.PP .PP
Element remove\-project Element remove\-project
.PP .PP
Deletes a project from the internal manifest table, possibly allowing a Deletes the named project from the internal manifest table, possibly allowing a
subsequent project element in the same manifest file to replace the project with subsequent project element in the same manifest file to replace the project with
a different source. a different source.
.PP .PP
This element is mostly useful in a local manifest file, where the user can This element is mostly useful in a local manifest file, where the user can
remove a project, and possibly replace it with their own definition. remove a project, and possibly replace it with their own definition.
.PP .PP
The project `name` or project `path` can be used to specify the remove target
meaning one of them is required. If only name is specified, all projects with
that name are removed.
.PP
If both name and path are specified, only projects with the same name and path
are removed, meaning projects with the same name but in other locations are
kept.
.PP
If only path is specified, a matching project is removed regardless of its name.
Logic otherwise behaves like both are specified.
.PP
Attribute `optional`: Set to true to ignore remove\-project elements with no Attribute `optional`: Set to true to ignore remove\-project elements with no
matching `project` element. matching `project` element.
.PP .PP
Attribute `base\-rev`: If specified, adds a check against the revision to be
removed. Manifest parse will fail and give a list of mismatch removes if the
revisions being removed have changed since base\-rev was set. Intended for use
with layered manifests using hash revisions to prevent patch branches hiding
newer upstream revisions. Also compares named refs like branches or tags but is
misleading if branches are used as base\-rev. Same syntax as the corresponding
element of `project`.
.PP
Element repo\-hooks Element repo\-hooks
.PP .PP
NB: See the [practical documentation](./repo\-hooks.md) for using repo hooks. NB: See the [practical documentation](./repo\-hooks.md) for using repo hooks.
@ -639,9 +608,6 @@ included manifest belong. This appends and recurses, meaning all projects in
included manifests carry all parent include groups. Same syntax as the included manifests carry all parent include groups. Same syntax as the
corresponding element of `project`. corresponding element of `project`.
.PP .PP
Attribute `revision`: Name of a Git branch (e.g. `main` or `refs/heads/main`)
default to which all projects in the included manifest belong.
.PP
Local Manifests Local Manifests
.PP .PP
Additional remotes and projects may be added through local manifest files stored Additional remotes and projects may be added through local manifest files stored

View File

@ -1,5 +1,5 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man. .\" DO NOT MODIFY THIS FILE! It was generated by help2man.
.TH REPO "1" "September 2024" "repo smartsync" "Repo Manual" .TH REPO "1" "November 2022" "repo smartsync" "Repo Manual"
.SH NAME .SH NAME
repo \- repo smartsync - manual page for repo smartsync repo \- repo smartsync - manual page for repo smartsync
.SH SYNOPSIS .SH SYNOPSIS
@ -37,20 +37,11 @@ overwrite an existing git directory if it needs to
point to a different object directory. WARNING: this point to a different object directory. WARNING: this
may cause loss of data may cause loss of data
.TP .TP
\fB\-\-force\-checkout\fR
force checkout even if it results in throwing away
uncommitted modifications. WARNING: this may cause
loss of data
.TP
\fB\-\-force\-remove\-dirty\fR \fB\-\-force\-remove\-dirty\fR
force remove projects with uncommitted modifications force remove projects with uncommitted modifications
if projects no longer exist in the manifest. WARNING: if projects no longer exist in the manifest. WARNING:
this may cause loss of data this may cause loss of data
.TP .TP
\fB\-\-rebase\fR
rebase local commits regardless of whether they are
published
.TP
\fB\-l\fR, \fB\-\-local\-only\fR \fB\-l\fR, \fB\-\-local\-only\fR
only update working tree, don't fetch only update working tree, don't fetch
.TP .TP

View File

@ -1,5 +1,5 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man. .\" DO NOT MODIFY THIS FILE! It was generated by help2man.
.TH REPO "1" "September 2024" "repo sync" "Repo Manual" .TH REPO "1" "November 2022" "repo sync" "Repo Manual"
.SH NAME .SH NAME
repo \- repo sync - manual page for repo sync repo \- repo sync - manual page for repo sync
.SH SYNOPSIS .SH SYNOPSIS
@ -37,20 +37,11 @@ overwrite an existing git directory if it needs to
point to a different object directory. WARNING: this point to a different object directory. WARNING: this
may cause loss of data may cause loss of data
.TP .TP
\fB\-\-force\-checkout\fR
force checkout even if it results in throwing away
uncommitted modifications. WARNING: this may cause
loss of data
.TP
\fB\-\-force\-remove\-dirty\fR \fB\-\-force\-remove\-dirty\fR
force remove projects with uncommitted modifications force remove projects with uncommitted modifications
if projects no longer exist in the manifest. WARNING: if projects no longer exist in the manifest. WARNING:
this may cause loss of data this may cause loss of data
.TP .TP
\fB\-\-rebase\fR
rebase local commits regardless of whether they are
published
.TP
\fB\-l\fR, \fB\-\-local\-only\fR \fB\-l\fR, \fB\-\-local\-only\fR
only update working tree, don't fetch only update working tree, don't fetch
.TP .TP
@ -194,11 +185,6 @@ The \fB\-\-force\-sync\fR option can be used to overwrite existing git directori
they have previously been linked to a different object directory. WARNING: This they have previously been linked to a different object directory. WARNING: This
may cause data to be lost since refs may be removed when overwriting. may cause data to be lost since refs may be removed when overwriting.
.PP .PP
The \fB\-\-force\-checkout\fR option can be used to force git to switch revs even if the
index or the working tree differs from HEAD, and if there are untracked files.
WARNING: This may cause data to be lost since uncommitted changes may be
removed.
.PP
The \fB\-\-force\-remove\-dirty\fR option can be used to remove previously used projects The \fB\-\-force\-remove\-dirty\fR option can be used to remove previously used projects
with uncommitted changes. WARNING: This may cause data to be lost since with uncommitted changes. WARNING: This may cause data to be lost since
uncommitted changes may be removed with projects that no longer exist in the uncommitted changes may be removed with projects that no longer exist in the

View File

@ -1,5 +1,5 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man. .\" DO NOT MODIFY THIS FILE! It was generated by help2man.
.TH REPO "1" "June 2024" "repo upload" "Repo Manual" .TH REPO "1" "August 2022" "repo upload" "Repo Manual"
.SH NAME .SH NAME
repo \- repo upload - manual page for repo upload repo \- repo upload - manual page for repo upload
.SH SYNOPSIS .SH SYNOPSIS
@ -18,11 +18,8 @@ show this help message and exit
number of jobs to run in parallel (default: based on number of jobs to run in parallel (default: based on
number of CPU cores) number of CPU cores)
.TP .TP
\fB\-t\fR, \fB\-\-topic\-branch\fR \fB\-t\fR
set the topic to the local branch name send local branch name to Gerrit Code Review
.TP
\fB\-\-topic\fR=\fI\,TOPIC\/\fR
set topic for the change
.TP .TP
\fB\-\-hashtag\fR=\fI\,HASHTAGS\/\fR, \fB\-\-ht\fR=\fI\,HASHTAGS\/\fR \fB\-\-hashtag\fR=\fI\,HASHTAGS\/\fR, \fB\-\-ht\fR=\fI\,HASHTAGS\/\fR
add hashtags (comma delimited) to the review add hashtags (comma delimited) to the review
@ -33,9 +30,6 @@ add local branch name as a hashtag
\fB\-l\fR LABELS, \fB\-\-label\fR=\fI\,LABELS\/\fR \fB\-l\fR LABELS, \fB\-\-label\fR=\fI\,LABELS\/\fR
add a label when uploading add a label when uploading
.TP .TP
\fB\-\-pd\fR=\fI\,PATCHSET_DESCRIPTION\/\fR, \fB\-\-patchset\-description\fR=\fI\,PATCHSET_DESCRIPTION\/\fR
description for patchset
.TP
\fB\-\-re\fR=\fI\,REVIEWERS\/\fR, \fB\-\-reviewers\fR=\fI\,REVIEWERS\/\fR \fB\-\-re\fR=\fI\,REVIEWERS\/\fR, \fB\-\-reviewers\fR=\fI\,REVIEWERS\/\fR
request reviews from these people request reviews from these people
.TP .TP
@ -204,12 +198,6 @@ review.URL.uploadnotify:
Control e\-mail notifications when uploading. Control e\-mail notifications when uploading.
https://gerrit\-review.googlesource.com/Documentation/user\-upload.html#notify https://gerrit\-review.googlesource.com/Documentation/user\-upload.html#notify
.PP .PP
review.URL.uploadwarningthreshold:
.PP
Repo will warn you if you are attempting to upload a large number of commits in
one or more branches. By default, the threshold is five commits. This option
allows you to override the warning threshold to a different value.
.PP
References References
.PP .PP
Gerrit Code Review: https://www.gerritcodereview.com/ Gerrit Code Review: https://www.gerritcodereview.com/

View File

@ -1,5 +1,5 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man. .\" DO NOT MODIFY THIS FILE! It was generated by help2man.
.TH REPO "1" "December 2024" "repo" "Repo Manual" .TH REPO "1" "June 2023" "repo" "Repo Manual"
.SH NAME .SH NAME
repo \- repository management tool built on top of git repo \- repository management tool built on top of git
.SH SYNOPSIS .SH SYNOPSIS
@ -79,8 +79,11 @@ Download and checkout a change
forall forall
Run a shell command in each project Run a shell command in each project
.TP .TP
gc gitc\-delete
Cleaning up internal repo state. Delete a GITC Client.
.TP
gitc\-init
Initialize a GITC Client.
.TP .TP
grep grep
Print lines matching a pattern Print lines matching a pattern

View File

@ -114,37 +114,9 @@ def XmlInt(node, attr, default=None):
try: try:
return int(value) return int(value)
except ValueError: except ValueError:
raise ManifestParseError(f'manifest: invalid {attr}="{value}" integer') raise ManifestParseError(
'manifest: invalid %s="%s" integer' % (attr, value)
)
def normalize_url(url: str) -> str:
"""Mutate input 'url' into normalized form:
* remove trailing slashes
* convert SCP-like syntax to SSH URL
Args:
url: URL to modify
Returns:
The normalized URL.
"""
url = url.rstrip("/")
parsed_url = urllib.parse.urlparse(url)
# This matches patterns like "git@github.com:foo".
scp_like_url_re = r"^[^/:]+@[^/:]+:[^/]+"
# If our URL is missing a schema and matches git's
# SCP-like syntax we should convert it to a proper
# SSH URL instead to make urljoin() happier.
#
# See: https://git-scm.com/docs/git-clone#URLS
if not parsed_url.scheme and re.match(scp_like_url_re, url):
return "ssh://" + url.replace(":", "/", 1)
return url
class _Default: class _Default:
@ -210,22 +182,20 @@ class _XmlRemote:
def _resolveFetchUrl(self): def _resolveFetchUrl(self):
if self.fetchUrl is None: if self.fetchUrl is None:
return "" return ""
url = self.fetchUrl.rstrip("/")
manifestUrl = self.manifestUrl.rstrip("/")
# urljoin will gets confused over quite a few things. The ones we care
# about here are:
# * no scheme in the base url, like <hostname:port>
# We handle no scheme by replacing it with an obscure protocol, gopher
# and then replacing it with the original when we are done.
fetch_url = normalize_url(self.fetchUrl) if manifestUrl.find(":") != manifestUrl.find("/") - 1:
manifest_url = normalize_url(self.manifestUrl) url = urllib.parse.urljoin("gopher://" + manifestUrl, url)
url = re.sub(r"^gopher://", "", url)
# urljoin doesn't like URLs with no scheme in the base URL
# such as file paths. We handle this by prefixing it with
# an obscure protocol, gopher, and replacing it with the
# original after urljoin
if manifest_url.find(":") != manifest_url.find("/") - 1:
fetch_url = urllib.parse.urljoin(
"gopher://" + manifest_url, fetch_url
)
fetch_url = re.sub(r"^gopher://", "", fetch_url)
else: else:
fetch_url = urllib.parse.urljoin(manifest_url, fetch_url) url = urllib.parse.urljoin(manifestUrl, url)
return fetch_url return url
def ToRemoteSpec(self, projectName): def ToRemoteSpec(self, projectName):
fetchUrl = self.resolvedFetchUrl.rstrip("/") fetchUrl = self.resolvedFetchUrl.rstrip("/")
@ -305,7 +275,7 @@ class _XmlSubmanifest:
parent.repodir, parent.repodir,
linkFile, linkFile,
parent_groups=",".join(groups) or "", parent_groups=",".join(groups) or "",
submanifest_path=os.path.join(parent.path_prefix, self.relpath), submanifest_path=self.relpath,
outer_client=outer_client, outer_client=outer_client,
default_groups=default_groups, default_groups=default_groups,
) )
@ -435,6 +405,11 @@ class XmlManifest:
self.parent_groups = parent_groups self.parent_groups = parent_groups
self.default_groups = default_groups self.default_groups = default_groups
if outer_client and self.isGitcClient:
raise ManifestParseError(
"Multi-manifest is incompatible with `gitc-init`"
)
if submanifest_path and not outer_client: if submanifest_path and not outer_client:
# If passing a submanifest_path, there must be an outer_client. # If passing a submanifest_path, there must be an outer_client.
raise ManifestParseError(f"Bad call to {self.__class__.__name__}") raise ManifestParseError(f"Bad call to {self.__class__.__name__}")
@ -835,7 +810,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
ret.setdefault(child.nodeName, []).append(element) ret.setdefault(child.nodeName, []).append(element)
else: else:
raise ManifestParseError( raise ManifestParseError(
f'Unhandled element "{child.nodeName}"' 'Unhandled element "%s"' % (child.nodeName,)
) )
append_children(element, child) append_children(element, child)
@ -1014,9 +989,9 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
def SetManifestOverride(self, path): def SetManifestOverride(self, path):
"""Override manifestFile. The caller must call Unload()""" """Override manifestFile. The caller must call Unload()"""
self._outer_client.manifest.manifestFileOverrides[self.path_prefix] = ( self._outer_client.manifest.manifestFileOverrides[
path self.path_prefix
) ] = path
@property @property
def UseLocalManifests(self): def UseLocalManifests(self):
@ -1283,10 +1258,12 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
try: try:
root = xml.dom.minidom.parse(path) root = xml.dom.minidom.parse(path)
except (OSError, xml.parsers.expat.ExpatError) as e: except (OSError, xml.parsers.expat.ExpatError) as e:
raise ManifestParseError(f"error parsing manifest {path}: {e}") raise ManifestParseError(
"error parsing manifest %s: %s" % (path, e)
)
if not root or not root.childNodes: if not root or not root.childNodes:
raise ManifestParseError(f"no root node in {path}") raise ManifestParseError("no root node in %s" % (path,))
for manifest in root.childNodes: for manifest in root.childNodes:
if ( if (
@ -1295,7 +1272,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
): ):
break break
else: else:
raise ManifestParseError(f"no <manifest> in {path}") raise ManifestParseError("no <manifest> in %s" % (path,))
nodes = [] nodes = []
for node in manifest.childNodes: for node in manifest.childNodes:
@ -1305,7 +1282,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
msg = self._CheckLocalPath(name) msg = self._CheckLocalPath(name)
if msg: if msg:
raise ManifestInvalidPathError( raise ManifestInvalidPathError(
f'<include> invalid "name": {name}: {msg}' '<include> invalid "name": %s: %s' % (name, msg)
) )
include_groups = "" include_groups = ""
if parent_groups: if parent_groups:
@ -1337,7 +1314,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
raise raise
except Exception as e: except Exception as e:
raise ManifestParseError( raise ManifestParseError(
f"failed parsing included manifest {name}: {e}" "failed parsing included manifest %s: %s" % (name, e)
) )
else: else:
if parent_groups and node.nodeName == "project": if parent_groups and node.nodeName == "project":
@ -1445,7 +1422,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
repo_hooks_project = None repo_hooks_project = None
enabled_repo_hooks = None enabled_repo_hooks = None
failed_revision_changes = []
for node in itertools.chain(*node_list): for node in itertools.chain(*node_list):
if node.nodeName == "project": if node.nodeName == "project":
project = self._ParseProject(node) project = self._ParseProject(node)
@ -1472,7 +1448,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
remote = self._get_remote(node) remote = self._get_remote(node)
dest_branch = node.getAttribute("dest-branch") dest_branch = node.getAttribute("dest-branch")
upstream = node.getAttribute("upstream") upstream = node.getAttribute("upstream")
base_revision = node.getAttribute("base-rev")
named_projects = self._projects[name] named_projects = self._projects[name]
if dest_path and not path and len(named_projects) > 1: if dest_path and not path and len(named_projects) > 1:
@ -1486,13 +1461,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
if groups: if groups:
p.groups.extend(groups) p.groups.extend(groups)
if revision: if revision:
if base_revision:
if p.revisionExpr != base_revision:
failed_revision_changes.append(
"extend-project name %s mismatch base "
"%s vs revision %s"
% (name, base_revision, p.revisionExpr)
)
p.SetRevision(revision) p.SetRevision(revision)
if remote_name: if remote_name:
@ -1567,7 +1535,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
if node.nodeName == "remove-project": if node.nodeName == "remove-project":
name = node.getAttribute("name") name = node.getAttribute("name")
path = node.getAttribute("path") path = node.getAttribute("path")
base_revision = node.getAttribute("base-rev")
# Name or path needed. # Name or path needed.
if not name and not path: if not name and not path:
@ -1581,13 +1548,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
for projname, projects in list(self._projects.items()): for projname, projects in list(self._projects.items()):
for p in projects: for p in projects:
if name == projname and not path: if name == projname and not path:
if base_revision:
if p.revisionExpr != base_revision:
failed_revision_changes.append(
"remove-project name %s mismatch base "
"%s vs revision %s"
% (name, base_revision, p.revisionExpr)
)
del self._paths[p.relpath] del self._paths[p.relpath]
if not removed_project: if not removed_project:
del self._projects[name] del self._projects[name]
@ -1595,17 +1555,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
elif path == p.relpath and ( elif path == p.relpath and (
name == projname or not name name == projname or not name
): ):
if base_revision:
if p.revisionExpr != base_revision:
failed_revision_changes.append(
"remove-project path %s mismatch base "
"%s vs revision %s"
% (
p.relpath,
base_revision,
p.revisionExpr,
)
)
self._projects[projname].remove(p) self._projects[projname].remove(p)
del self._paths[p.relpath] del self._paths[p.relpath]
removed_project = p.name removed_project = p.name
@ -1625,13 +1574,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
"project: %s" % node.toxml() "project: %s" % node.toxml()
) )
if failed_revision_changes:
raise ManifestParseError(
"revision base check failed, rebase patches and update "
"base revs for: ",
failed_revision_changes,
)
# Store repo hooks project information. # Store repo hooks project information.
if repo_hooks_project: if repo_hooks_project:
# Store a reference to the Project. # Store a reference to the Project.
@ -1823,13 +1765,13 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
msg = self._CheckLocalPath(name) msg = self._CheckLocalPath(name)
if msg: if msg:
raise ManifestInvalidPathError( raise ManifestInvalidPathError(
f'<submanifest> invalid "name": {name}: {msg}' '<submanifest> invalid "name": %s: %s' % (name, msg)
) )
else: else:
msg = self._CheckLocalPath(path) msg = self._CheckLocalPath(path)
if msg: if msg:
raise ManifestInvalidPathError( raise ManifestInvalidPathError(
f'<submanifest> invalid "path": {path}: {msg}' '<submanifest> invalid "path": %s: %s' % (path, msg)
) )
submanifest = _XmlSubmanifest( submanifest = _XmlSubmanifest(
@ -1864,7 +1806,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
msg = self._CheckLocalPath(name, dir_ok=True) msg = self._CheckLocalPath(name, dir_ok=True)
if msg: if msg:
raise ManifestInvalidPathError( raise ManifestInvalidPathError(
f'<project> invalid "name": {name}: {msg}' '<project> invalid "name": %s: %s' % (name, msg)
) )
if parent: if parent:
name = self._JoinName(parent.name, name) name = self._JoinName(parent.name, name)
@ -1874,7 +1816,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
remote = self._default.remote remote = self._default.remote
if remote is None: if remote is None:
raise ManifestParseError( raise ManifestParseError(
f"no remote for project {name} within {self.manifestFile}" "no remote for project %s within %s" % (name, self.manifestFile)
) )
revisionExpr = node.getAttribute("revision") or remote.revision revisionExpr = node.getAttribute("revision") or remote.revision
@ -1895,7 +1837,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
msg = self._CheckLocalPath(path, dir_ok=True, cwd_dot_ok=True) msg = self._CheckLocalPath(path, dir_ok=True, cwd_dot_ok=True)
if msg: if msg:
raise ManifestInvalidPathError( raise ManifestInvalidPathError(
f'<project> invalid "path": {path}: {msg}' '<project> invalid "path": %s: %s' % (path, msg)
) )
rebase = XmlBool(node, "rebase", True) rebase = XmlBool(node, "rebase", True)
@ -2056,12 +1998,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
path = path.rstrip("/") path = path.rstrip("/")
name = name.rstrip("/") name = name.rstrip("/")
relpath = self._JoinRelpath(parent.relpath, path) relpath = self._JoinRelpath(parent.relpath, path)
subprojects = os.path.join(parent.gitdir, "subprojects", f"{path}.git") gitdir = os.path.join(parent.gitdir, "subprojects", "%s.git" % path)
modules = os.path.join(parent.gitdir, "modules", path)
if platform_utils.isdir(subprojects):
gitdir = subprojects
else:
gitdir = modules
objdir = os.path.join( objdir = os.path.join(
parent.gitdir, "subproject-objects", "%s.git" % name parent.gitdir, "subproject-objects", "%s.git" % name
) )
@ -2112,22 +2049,22 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
# implementation: # implementation:
# https://eclipse.googlesource.com/jgit/jgit/+/9110037e3e9461ff4dac22fee84ef3694ed57648/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java#884 # https://eclipse.googlesource.com/jgit/jgit/+/9110037e3e9461ff4dac22fee84ef3694ed57648/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java#884
BAD_CODEPOINTS = { BAD_CODEPOINTS = {
"\u200c", # ZERO WIDTH NON-JOINER "\u200C", # ZERO WIDTH NON-JOINER
"\u200d", # ZERO WIDTH JOINER "\u200D", # ZERO WIDTH JOINER
"\u200e", # LEFT-TO-RIGHT MARK "\u200E", # LEFT-TO-RIGHT MARK
"\u200f", # RIGHT-TO-LEFT MARK "\u200F", # RIGHT-TO-LEFT MARK
"\u202a", # LEFT-TO-RIGHT EMBEDDING "\u202A", # LEFT-TO-RIGHT EMBEDDING
"\u202b", # RIGHT-TO-LEFT EMBEDDING "\u202B", # RIGHT-TO-LEFT EMBEDDING
"\u202c", # POP DIRECTIONAL FORMATTING "\u202C", # POP DIRECTIONAL FORMATTING
"\u202d", # LEFT-TO-RIGHT OVERRIDE "\u202D", # LEFT-TO-RIGHT OVERRIDE
"\u202e", # RIGHT-TO-LEFT OVERRIDE "\u202E", # RIGHT-TO-LEFT OVERRIDE
"\u206a", # INHIBIT SYMMETRIC SWAPPING "\u206A", # INHIBIT SYMMETRIC SWAPPING
"\u206b", # ACTIVATE SYMMETRIC SWAPPING "\u206B", # ACTIVATE SYMMETRIC SWAPPING
"\u206c", # INHIBIT ARABIC FORM SHAPING "\u206C", # INHIBIT ARABIC FORM SHAPING
"\u206d", # ACTIVATE ARABIC FORM SHAPING "\u206D", # ACTIVATE ARABIC FORM SHAPING
"\u206e", # NATIONAL DIGIT SHAPES "\u206E", # NATIONAL DIGIT SHAPES
"\u206f", # NOMINAL DIGIT SHAPES "\u206F", # NOMINAL DIGIT SHAPES
"\ufeff", # ZERO WIDTH NO-BREAK SPACE "\uFEFF", # ZERO WIDTH NO-BREAK SPACE
} }
if BAD_CODEPOINTS & path_codepoints: if BAD_CODEPOINTS & path_codepoints:
# This message is more expansive than reality, but should be fine. # This message is more expansive than reality, but should be fine.
@ -2157,7 +2094,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
if not cwd_dot_ok or parts != ["."]: if not cwd_dot_ok or parts != ["."]:
for part in set(parts): for part in set(parts):
if part in {".", "..", ".git"} or part.startswith(".repo"): if part in {".", "..", ".git"} or part.startswith(".repo"):
return f"bad component: {part}" return "bad component: %s" % (part,)
if not dir_ok and resep.match(path[-1]): if not dir_ok and resep.match(path[-1]):
return "dirs not allowed" return "dirs not allowed"
@ -2193,7 +2130,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
msg = cls._CheckLocalPath(dest) msg = cls._CheckLocalPath(dest)
if msg: if msg:
raise ManifestInvalidPathError( raise ManifestInvalidPathError(
f'<{element}> invalid "dest": {dest}: {msg}' '<%s> invalid "dest": %s: %s' % (element, dest, msg)
) )
# |src| is the file we read from or path we point to for symlinks. # |src| is the file we read from or path we point to for symlinks.
@ -2204,7 +2141,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
) )
if msg: if msg:
raise ManifestInvalidPathError( raise ManifestInvalidPathError(
f'<{element}> invalid "src": {src}: {msg}' '<%s> invalid "src": %s: %s' % (element, src, msg)
) )
def _ParseCopyFile(self, project, node): def _ParseCopyFile(self, project, node):
@ -2248,7 +2185,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
v = self._remotes.get(name) v = self._remotes.get(name)
if not v: if not v:
raise ManifestParseError( raise ManifestParseError(
f"remote {name} not defined in {self.manifestFile}" "remote %s not defined in %s" % (name, self.manifestFile)
) )
return v return v
@ -2325,6 +2262,7 @@ class RepoClient(XmlManifest):
submanifest_path: The submanifest root relative to the repo root. submanifest_path: The submanifest root relative to the repo root.
**kwargs: Additional keyword arguments, passed to XmlManifest. **kwargs: Additional keyword arguments, passed to XmlManifest.
""" """
self.isGitcClient = False
submanifest_path = submanifest_path or "" submanifest_path = submanifest_path or ""
if submanifest_path: if submanifest_path:
self._CheckLocalPath(submanifest_path) self._CheckLocalPath(submanifest_path)

View File

@ -40,7 +40,7 @@ def RunPager(globalConfig):
def TerminatePager(): def TerminatePager():
global pager_process global pager_process, old_stdout, old_stderr
if pager_process: if pager_process:
sys.stdout.flush() sys.stdout.flush()
sys.stderr.flush() sys.stderr.flush()

View File

@ -57,8 +57,8 @@ def _validate_winpath(path):
if _winpath_is_valid(path): if _winpath_is_valid(path):
return path return path
raise ValueError( raise ValueError(
f'Path "{path}" must be a relative path or an absolute ' 'Path "{}" must be a relative path or an absolute '
"path starting with a drive letter" "path starting with a drive letter".format(path)
) )
@ -156,12 +156,6 @@ def remove(path, missing_ok=False):
os.rmdir(longpath) os.rmdir(longpath)
else: else:
os.remove(longpath) os.remove(longpath)
elif (
e.errno == errno.EROFS
and missing_ok
and not os.path.exists(longpath)
):
pass
elif missing_ok and e.errno == errno.ENOENT: elif missing_ok and e.errno == errno.ENOENT:
pass pass
else: else:
@ -257,3 +251,32 @@ def readlink(path):
return platform_utils_win32.readlink(_makelongpath(path)) return platform_utils_win32.readlink(_makelongpath(path))
else: else:
return os.readlink(path) return os.readlink(path)
def realpath(path):
"""Return the canonical path of the specified filename, eliminating
any symbolic links encountered in the path.
Availability: Windows, Unix.
"""
if isWindows():
current_path = os.path.abspath(path)
path_tail = []
for c in range(0, 100): # Avoid cycles
if islink(current_path):
target = readlink(current_path)
current_path = os.path.join(
os.path.dirname(current_path), target
)
else:
basename = os.path.basename(current_path)
if basename == "":
path_tail.append(current_path)
break
path_tail.append(basename)
current_path = os.path.dirname(current_path)
path_tail.reverse()
result = os.path.normpath(os.path.join(*path_tail))
return result
else:
return os.path.realpath(path)

View File

@ -186,7 +186,9 @@ def _create_symlink(source, link_name, dwFlags):
error_desc = FormatError(code).strip() error_desc = FormatError(code).strip()
if code == ERROR_PRIVILEGE_NOT_HELD: if code == ERROR_PRIVILEGE_NOT_HELD:
raise OSError(errno.EPERM, error_desc, link_name) raise OSError(errno.EPERM, error_desc, link_name)
_raise_winerror(code, f'Error creating symbolic link "{link_name}"') _raise_winerror(
code, 'Error creating symbolic link "{}"'.format(link_name)
)
def islink(path): def islink(path):
@ -208,7 +210,7 @@ def readlink(path):
) )
if reparse_point_handle == INVALID_HANDLE_VALUE: if reparse_point_handle == INVALID_HANDLE_VALUE:
_raise_winerror( _raise_winerror(
get_last_error(), f'Error opening symbolic link "{path}"' get_last_error(), 'Error opening symbolic link "{}"'.format(path)
) )
target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE) target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE)
n_bytes_returned = DWORD() n_bytes_returned = DWORD()
@ -225,7 +227,7 @@ def readlink(path):
CloseHandle(reparse_point_handle) CloseHandle(reparse_point_handle)
if not io_result: if not io_result:
_raise_winerror( _raise_winerror(
get_last_error(), f'Error reading symbolic link "{path}"' get_last_error(), 'Error reading symbolic link "{}"'.format(path)
) )
rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer) rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer)
if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK: if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK:
@ -234,11 +236,11 @@ def readlink(path):
return rdb.MountPointReparseBuffer.PrintName return rdb.MountPointReparseBuffer.PrintName
# Unsupported reparse point type. # Unsupported reparse point type.
_raise_winerror( _raise_winerror(
ERROR_NOT_SUPPORTED, f'Error reading symbolic link "{path}"' ERROR_NOT_SUPPORTED, 'Error reading symbolic link "{}"'.format(path)
) )
def _raise_winerror(code, error_desc): def _raise_winerror(code, error_desc):
win_error_desc = FormatError(code).strip() win_error_desc = FormatError(code).strip()
error_desc = f"{error_desc}: {win_error_desc}" error_desc = "{0}: {1}".format(error_desc, win_error_desc)
raise WinError(code, error_desc) raise WinError(code, error_desc)

View File

@ -52,11 +52,11 @@ def duration_str(total):
uses microsecond resolution. This makes for noisy output. uses microsecond resolution. This makes for noisy output.
""" """
hours, mins, secs = convert_to_hms(total) hours, mins, secs = convert_to_hms(total)
ret = f"{secs:.3f}s" ret = "%.3fs" % (secs,)
if mins: if mins:
ret = f"{mins}m{ret}" ret = "%im%s" % (mins, ret)
if hours: if hours:
ret = f"{hours}h{ret}" ret = "%ih%s" % (hours, ret)
return ret return ret
@ -100,7 +100,6 @@ class Progress:
self._show = not delay self._show = not delay
self._units = units self._units = units
self._elide = elide and _TTY self._elide = elide and _TTY
self._quiet = quiet
# Only show the active jobs section if we run more than one in parallel. # Only show the active jobs section if we run more than one in parallel.
self._show_jobs = False self._show_jobs = False
@ -115,7 +114,13 @@ class Progress:
) )
self._update_thread.daemon = True self._update_thread.daemon = True
if not quiet and show_elapsed: # When quiet, never show any output. It's a bit hacky, but reusing the
# existing logic that delays initial output keeps the rest of the class
# clean. Basically we set the start time to years in the future.
if quiet:
self._show = False
self._start += 2**32
elif show_elapsed:
self._update_thread.start() self._update_thread.start()
def _update_loop(self): def _update_loop(self):
@ -155,7 +160,7 @@ class Progress:
msg = self._last_msg msg = self._last_msg
self._last_msg = msg self._last_msg = msg
if not _TTY or IsTraceToStderr() or self._quiet: if not _TTY or IsTraceToStderr():
return return
elapsed_sec = time.time() - self._start elapsed_sec = time.time() - self._start
@ -197,7 +202,7 @@ class Progress:
def end(self): def end(self):
self._update_event.set() self._update_event.set()
if not _TTY or IsTraceToStderr() or self._quiet: if not _TTY or IsTraceToStderr() or not self._show:
return return
duration = duration_str(time.time() - self._start) duration = duration_str(time.time() - self._start)

File diff suppressed because it is too large Load Diff

View File

@ -16,8 +16,3 @@
line-length = 80 line-length = 80
# NB: Keep in sync with tox.ini. # NB: Keep in sync with tox.ini.
target-version = ['py36', 'py37', 'py38', 'py39', 'py310', 'py311'] #, 'py312' target-version = ['py36', 'py37', 'py38', 'py39', 'py310', 'py311'] #, 'py312'
[tool.pytest.ini_options]
markers = """
skip_cq: Skip tests in the CQ. Should be rarely used!
"""

View File

@ -1,143 +0,0 @@
#!/usr/bin/env python3
# Copyright (C) 2024 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper tool for updating hooks from their various upstreams."""
import argparse
import base64
import json
from pathlib import Path
import sys
from typing import List, Optional
import urllib.request
assert sys.version_info >= (3, 8), "Python 3.8+ required"
TOPDIR = Path(__file__).resolve().parent.parent
HOOKS_DIR = TOPDIR / "hooks"
def update_hook_commit_msg() -> None:
"""Update commit-msg hook from Gerrit."""
hook = HOOKS_DIR / "commit-msg"
print(
f"{hook.name}: Updating from https://gerrit.googlesource.com/gerrit/"
"+/HEAD/resources/com/google/gerrit/server/tools/root/hooks/commit-msg"
)
# Get the current commit.
url = "https://gerrit.googlesource.com/gerrit/+/HEAD?format=JSON"
with urllib.request.urlopen(url) as fp:
data = fp.read()
# Discard the xss protection.
data = data.split(b"\n", 1)[1]
data = json.loads(data)
commit = data["commit"]
# Fetch the data for that commit.
url = (
f"https://gerrit.googlesource.com/gerrit/+/{commit}/"
"resources/com/google/gerrit/server/tools/root/hooks/commit-msg"
)
with urllib.request.urlopen(f"{url}?format=TEXT") as fp:
data = fp.read()
# gitiles base64 encodes text data.
data = base64.b64decode(data)
# Inject header into the hook.
lines = data.split(b"\n")
lines = (
lines[:1]
+ [
b"# DO NOT EDIT THIS FILE",
(
b"# All updates should be sent upstream: "
b"https://gerrit.googlesource.com/gerrit/"
),
f"# This is synced from commit: {commit}".encode("utf-8"),
b"# DO NOT EDIT THIS FILE",
]
+ lines[1:]
)
data = b"\n".join(lines)
# Update the hook.
hook.write_bytes(data)
hook.chmod(0o755)
def update_hook_pre_auto_gc() -> None:
"""Update pre-auto-gc hook from git."""
hook = HOOKS_DIR / "pre-auto-gc"
print(
f"{hook.name}: Updating from https://github.com/git/git/"
"HEAD/contrib/hooks/pre-auto-gc-battery"
)
# Get the current commit.
headers = {
"Accept": "application/vnd.github+json",
"X-GitHub-Api-Version": "2022-11-28",
}
url = "https://api.github.com/repos/git/git/git/refs/heads/master"
req = urllib.request.Request(url, headers=headers)
with urllib.request.urlopen(req) as fp:
data = fp.read()
data = json.loads(data)
# Fetch the data for that commit.
commit = data["object"]["sha"]
url = (
f"https://raw.githubusercontent.com/git/git/{commit}/"
"contrib/hooks/pre-auto-gc-battery"
)
with urllib.request.urlopen(url) as fp:
data = fp.read()
# Inject header into the hook.
lines = data.split(b"\n")
lines = (
lines[:1]
+ [
b"# DO NOT EDIT THIS FILE",
(
b"# All updates should be sent upstream: "
b"https://github.com/git/git/"
),
f"# This is synced from commit: {commit}".encode("utf-8"),
b"# DO NOT EDIT THIS FILE",
]
+ lines[1:]
)
data = b"\n".join(lines)
# Update the hook.
hook.write_bytes(data)
hook.chmod(0o755)
def main(argv: Optional[List[str]] = None) -> Optional[int]:
parser = argparse.ArgumentParser(description=__doc__)
parser.parse_args(argv)
update_hook_commit_msg()
update_hook_pre_auto_gc()
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))

View File

@ -16,7 +16,6 @@
import os import os
import re import re
import shlex
import subprocess import subprocess
import sys import sys
@ -36,7 +35,12 @@ KEYID_ECC = "E1F9040D7A3F6DAFAC897CD3D3B95DA243E48A39"
def cmdstr(cmd): def cmdstr(cmd):
"""Get a nicely quoted shell command.""" """Get a nicely quoted shell command."""
return " ".join(shlex.quote(x) for x in cmd) ret = []
for arg in cmd:
if not re.match(r"^[a-zA-Z0-9/_.=-]+$", arg):
arg = f'"{arg}"'
ret.append(arg)
return " ".join(ret)
def run(opts, cmd, check=True, **kwargs): def run(opts, cmd, check=True, **kwargs):

334
repo
View File

@ -1,4 +1,4 @@
#!/usr/bin/env python3 #!/usr/bin/env python
# #
# Copyright (C) 2008 The Android Open Source Project # Copyright (C) 2008 The Android Open Source Project
# #
@ -27,14 +27,13 @@ import platform
import shlex import shlex
import subprocess import subprocess
import sys import sys
from typing import NamedTuple
# These should never be newer than the main.py version since this needs to be a # These should never be newer than the main.py version since this needs to be a
# bit more flexible with older systems. See that file for more details on the # bit more flexible with older systems. See that file for more details on the
# versions we select. # versions we select.
MIN_PYTHON_VERSION_SOFT = (3, 6) MIN_PYTHON_VERSION_SOFT = (3, 6)
MIN_PYTHON_VERSION_HARD = (3, 6) MIN_PYTHON_VERSION_HARD = (3, 5)
# Keep basic logic in sync with repo_trace.py. # Keep basic logic in sync with repo_trace.py.
@ -57,14 +56,9 @@ class Trace:
trace = Trace() trace = Trace()
def cmdstr(cmd):
"""Get a nicely quoted shell command."""
return " ".join(shlex.quote(x) for x in cmd)
def exec_command(cmd): def exec_command(cmd):
"""Execute |cmd| or return None on failure.""" """Execute |cmd| or return None on failure."""
trace.print(":", cmdstr(cmd)) trace.print(":", " ".join(cmd))
try: try:
if platform.system() == "Windows": if platform.system() == "Windows":
ret = subprocess.call(cmd) ret = subprocess.call(cmd)
@ -85,13 +79,24 @@ def check_python_version():
major = ver.major major = ver.major
minor = ver.minor minor = ver.minor
# Try to re-exec the version specific Python if needed. # Abort on very old Python 2 versions.
if (major, minor) < (2, 7):
print(
"repo: error: Your Python version is too old. "
"Please use Python {}.{} or newer instead.".format(
*MIN_PYTHON_VERSION_SOFT
),
file=sys.stderr,
)
sys.exit(1)
# Try to re-exec the version specific Python 3 if needed.
if (major, minor) < MIN_PYTHON_VERSION_SOFT: if (major, minor) < MIN_PYTHON_VERSION_SOFT:
# Python makes releases ~once a year, so try our min version +10 to help # Python makes releases ~once a year, so try our min version +10 to help
# bridge the gap. This is the fallback anyways so perf isn't critical. # bridge the gap. This is the fallback anyways so perf isn't critical.
min_major, min_minor = MIN_PYTHON_VERSION_SOFT min_major, min_minor = MIN_PYTHON_VERSION_SOFT
for inc in range(0, 10): for inc in range(0, 10):
reexec(f"python{min_major}.{min_minor + inc}") reexec("python{}.{}".format(min_major, min_minor + inc))
# Fallback to older versions if possible. # Fallback to older versions if possible.
for inc in range( for inc in range(
@ -100,12 +105,47 @@ def check_python_version():
# Don't downgrade, and don't reexec ourselves (which would infinite loop). # Don't downgrade, and don't reexec ourselves (which would infinite loop).
if (min_major, min_minor - inc) <= (major, minor): if (min_major, min_minor - inc) <= (major, minor):
break break
reexec(f"python{min_major}.{min_minor - inc}") reexec("python{}.{}".format(min_major, min_minor - inc))
# Try the generic Python 3 wrapper, but only if it's new enough. If it
# isn't, we want to just give up below and make the user resolve things.
try:
proc = subprocess.Popen(
[
"python3",
"-c",
"import sys; "
"print(sys.version_info.major, sys.version_info.minor)",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, _) = proc.communicate()
python3_ver = tuple(int(x) for x in output.decode("utf-8").split())
except (OSError, subprocess.CalledProcessError):
python3_ver = None
# If the python3 version looks like it's new enough, give it a try.
if (
python3_ver
and python3_ver >= MIN_PYTHON_VERSION_HARD
and python3_ver != (major, minor)
):
reexec("python3")
# We're still here, so diagnose things for the user. # We're still here, so diagnose things for the user.
if (major, minor) < MIN_PYTHON_VERSION_HARD: if major < 3:
print( print(
"repo: error: Python version is too old; " "repo: error: Python 2 is no longer supported; "
"Please upgrade to Python {}.{}+.".format(
*MIN_PYTHON_VERSION_HARD
),
file=sys.stderr,
)
sys.exit(1)
elif (major, minor) < MIN_PYTHON_VERSION_HARD:
print(
"repo: error: Python 3 version is too old; "
"Please use Python {}.{} or newer.".format( "Please use Python {}.{} or newer.".format(
*MIN_PYTHON_VERSION_HARD *MIN_PYTHON_VERSION_HARD
), ),
@ -130,7 +170,7 @@ if not REPO_REV:
BUG_URL = "https://issues.gerritcodereview.com/issues/new?component=1370071" BUG_URL = "https://issues.gerritcodereview.com/issues/new?component=1370071"
# increment this whenever we make important changes to this script # increment this whenever we make important changes to this script
VERSION = (2, 54) VERSION = (2, 39)
# increment this if the MAINTAINER_KEYS block is modified # increment this if the MAINTAINER_KEYS block is modified
KEYRING_VERSION = (2, 3) KEYRING_VERSION = (2, 3)
@ -216,21 +256,37 @@ GIT = "git" # our git command
# NB: The version of git that the repo launcher requires may be much older than # NB: The version of git that the repo launcher requires may be much older than
# the version of git that the main repo source tree requires. Keeping this at # the version of git that the main repo source tree requires. Keeping this at
# an older version also makes it easier for users to upgrade/rollback as needed. # an older version also makes it easier for users to upgrade/rollback as needed.
MIN_GIT_VERSION = (1, 7, 9) # minimum supported git version #
# git-1.7 is in (EOL) Ubuntu Precise.
MIN_GIT_VERSION = (1, 7, 2) # minimum supported git version
repodir = ".repo" # name of repo's private directory repodir = ".repo" # name of repo's private directory
S_repo = "repo" # special repo repository S_repo = "repo" # special repo repository
S_manifests = "manifests" # special manifest repository S_manifests = "manifests" # special manifest repository
REPO_MAIN = S_repo + "/main.py" # main script REPO_MAIN = S_repo + "/main.py" # main script
GITC_CONFIG_FILE = "/gitc/.config"
GITC_FS_ROOT_DIR = "/gitc/manifest-rw/"
import collections
import errno import errno
import json import json
import optparse import optparse
import re import re
import shutil import shutil
import stat import stat
if sys.version_info[0] == 3:
import urllib.error import urllib.error
import urllib.request import urllib.request
else:
import imp
import urllib2
urllib = imp.new_module("urllib")
urllib.request = urllib2
urllib.error = urllib2
repo_config_dir = os.getenv("REPO_CONFIG_DIR", os.path.expanduser("~")) repo_config_dir = os.getenv("REPO_CONFIG_DIR", os.path.expanduser("~"))
@ -238,8 +294,11 @@ home_dot_repo = os.path.join(repo_config_dir, ".repoconfig")
gpg_dir = os.path.join(home_dot_repo, "gnupg") gpg_dir = os.path.join(home_dot_repo, "gnupg")
def GetParser(): def GetParser(gitc_init=False):
"""Setup the CLI parser.""" """Setup the CLI parser."""
if gitc_init:
sys.exit("repo: fatal: GITC not supported.")
else:
usage = "repo init [options] [-u] url" usage = "repo init [options] [-u] url"
parser = optparse.OptionParser(usage=usage) parser = optparse.OptionParser(usage=usage)
@ -282,12 +341,6 @@ def InitParser(parser):
metavar="REVISION", metavar="REVISION",
help="manifest branch or revision (use HEAD for default)", help="manifest branch or revision (use HEAD for default)",
) )
group.add_option(
"--manifest-upstream-branch",
help="when a commit is provided to --manifest-branch, this "
"is the name of the git ref in which the commit can be found",
metavar="BRANCH",
)
group.add_option( group.add_option(
"-m", "-m",
"--manifest-name", "--manifest-name",
@ -487,6 +540,16 @@ def InitParser(parser):
return parser return parser
# This is a poor replacement for subprocess.run until we require Python 3.6+.
RunResult = collections.namedtuple(
"RunResult", ("returncode", "stdout", "stderr")
)
class RunError(Exception):
"""Error when running a command failed."""
def run_command(cmd, **kwargs): def run_command(cmd, **kwargs):
"""Run |cmd| and return its output.""" """Run |cmd| and return its output."""
check = kwargs.pop("check", False) check = kwargs.pop("check", False)
@ -503,7 +566,8 @@ def run_command(cmd, **kwargs):
return output.decode("utf-8") return output.decode("utf-8")
except UnicodeError: except UnicodeError:
print( print(
f"repo: warning: Invalid UTF-8 output:\ncmd: {cmd!r}\n{output}", "repo: warning: Invalid UTF-8 output:\ncmd: %r\n%r"
% (cmd, output),
file=sys.stderr, file=sys.stderr,
) )
return output.decode("utf-8", "backslashreplace") return output.decode("utf-8", "backslashreplace")
@ -511,7 +575,7 @@ def run_command(cmd, **kwargs):
# Run & package the results. # Run & package the results.
proc = subprocess.Popen(cmd, **kwargs) proc = subprocess.Popen(cmd, **kwargs)
(stdout, stderr) = proc.communicate(input=cmd_input) (stdout, stderr) = proc.communicate(input=cmd_input)
dbg = ": " + cmdstr(cmd) dbg = ": " + " ".join(cmd)
if cmd_input is not None: if cmd_input is not None:
dbg += " 0<|" dbg += " 0<|"
if stdout == subprocess.PIPE: if stdout == subprocess.PIPE:
@ -521,36 +585,80 @@ def run_command(cmd, **kwargs):
elif stderr == subprocess.STDOUT: elif stderr == subprocess.STDOUT:
dbg += " 2>&1" dbg += " 2>&1"
trace.print(dbg) trace.print(dbg)
ret = subprocess.CompletedProcess( ret = RunResult(proc.returncode, decode(stdout), decode(stderr))
cmd, proc.returncode, decode(stdout), decode(stderr)
)
# If things failed, print useful debugging output. # If things failed, print useful debugging output.
if check and ret.returncode: if check and ret.returncode:
print( print(
f'repo: error: "{cmd[0]}" failed with exit status {ret.returncode}', 'repo: error: "%s" failed with exit status %s'
% (cmd[0], ret.returncode),
file=sys.stderr,
)
print(
" cwd: %s\n cmd: %r" % (kwargs.get("cwd", os.getcwd()), cmd),
file=sys.stderr, file=sys.stderr,
) )
cwd = kwargs.get("cwd", os.getcwd())
print(f" cwd: {cwd}\n cmd: {cmd!r}", file=sys.stderr)
def _print_output(name, output): def _print_output(name, output):
if output: if output:
print( print(
f" {name}:" " %s:\n >> %s"
+ "".join(f"\n >> {x}" for x in output.splitlines()), % (name, "\n >> ".join(output.splitlines())),
file=sys.stderr, file=sys.stderr,
) )
_print_output("stdout", ret.stdout) _print_output("stdout", ret.stdout)
_print_output("stderr", ret.stderr) _print_output("stderr", ret.stderr)
# This will raise subprocess.CalledProcessError for us. raise RunError(ret)
ret.check_returncode()
return ret return ret
_gitc_manifest_dir = None
def get_gitc_manifest_dir():
global _gitc_manifest_dir
if _gitc_manifest_dir is None:
_gitc_manifest_dir = ""
try:
with open(GITC_CONFIG_FILE) as gitc_config:
for line in gitc_config:
match = re.match("gitc_dir=(?P<gitc_manifest_dir>.*)", line)
if match:
_gitc_manifest_dir = match.group("gitc_manifest_dir")
except OSError:
pass
return _gitc_manifest_dir
def gitc_parse_clientdir(gitc_fs_path):
"""Parse a path in the GITC FS and return its client name.
Args:
gitc_fs_path: A subdirectory path within the GITC_FS_ROOT_DIR.
Returns:
The GITC client name.
"""
if gitc_fs_path == GITC_FS_ROOT_DIR:
return None
if not gitc_fs_path.startswith(GITC_FS_ROOT_DIR):
manifest_dir = get_gitc_manifest_dir()
if manifest_dir == "":
return None
if manifest_dir[-1] != "/":
manifest_dir += "/"
if gitc_fs_path == manifest_dir:
return None
if not gitc_fs_path.startswith(manifest_dir):
return None
return gitc_fs_path.split(manifest_dir)[1].split("/")[0]
return gitc_fs_path.split(GITC_FS_ROOT_DIR)[1].split("/")[0]
class CloneFailure(Exception): class CloneFailure(Exception):
"""Indicate the remote clone of repo itself failed.""" """Indicate the remote clone of repo itself failed."""
@ -587,9 +695,9 @@ def check_repo_rev(dst, rev, repo_verify=True, quiet=False):
return (remote_ref, rev) return (remote_ref, rev)
def _Init(args): def _Init(args, gitc_init=False):
"""Installs repo by cloning it over the network.""" """Installs repo by cloning it over the network."""
parser = GetParser() parser = GetParser(gitc_init=gitc_init)
opt, args = parser.parse_args(args) opt, args = parser.parse_args(args)
if args: if args:
if not opt.manifest_url: if not opt.manifest_url:
@ -611,7 +719,7 @@ def _Init(args):
except OSError as e: except OSError as e:
if e.errno != errno.EEXIST: if e.errno != errno.EEXIST:
print( print(
f"fatal: cannot make {repodir} directory: {e.strerror}", "fatal: cannot make %s directory: %s" % (repodir, e.strerror),
file=sys.stderr, file=sys.stderr,
) )
# Don't raise CloneFailure; that would delete the # Don't raise CloneFailure; that would delete the
@ -669,20 +777,15 @@ def run_git(*args, **kwargs):
file=sys.stderr, file=sys.stderr,
) )
sys.exit(1) sys.exit(1)
except subprocess.CalledProcessError: except RunError:
raise CloneFailure() raise CloneFailure()
class GitVersion(NamedTuple): # The git version info broken down into components for easy analysis.
"""The git version info broken down into components for easy analysis. # Similar to Python's sys.version_info.
GitVersion = collections.namedtuple(
Similar to Python's sys.version_info. "GitVersion", ("major", "minor", "micro", "full")
""" )
major: int
minor: int
micro: int
full: int
def ParseGitVersion(ver_str=None): def ParseGitVersion(ver_str=None):
@ -714,7 +817,7 @@ def _CheckGitVersion():
if ver_act < MIN_GIT_VERSION: if ver_act < MIN_GIT_VERSION:
need = ".".join(map(str, MIN_GIT_VERSION)) need = ".".join(map(str, MIN_GIT_VERSION))
print( print(
f"fatal: git {need} or later required; found {ver_act.full}", "fatal: git %s or later required; found %s" % (need, ver_act.full),
file=sys.stderr, file=sys.stderr,
) )
raise CloneFailure() raise CloneFailure()
@ -733,8 +836,7 @@ def SetGitTrace2ParentSid(env=None):
KEY = "GIT_TRACE2_PARENT_SID" KEY = "GIT_TRACE2_PARENT_SID"
now = datetime.datetime.now(datetime.timezone.utc) now = datetime.datetime.now(datetime.timezone.utc)
timestamp = now.strftime("%Y%m%dT%H%M%SZ") value = "repo-%s-P%08x" % (now.strftime("%Y%m%dT%H%M%SZ"), os.getpid())
value = f"repo-{timestamp}-P{os.getpid():08x}"
# If it's already set, then append ourselves. # If it's already set, then append ourselves.
if KEY in env: if KEY in env:
@ -778,7 +880,8 @@ def SetupGnuPG(quiet):
except OSError as e: except OSError as e:
if e.errno != errno.EEXIST: if e.errno != errno.EEXIST:
print( print(
f"fatal: cannot make {home_dot_repo} directory: {e.strerror}", "fatal: cannot make %s directory: %s"
% (home_dot_repo, e.strerror),
file=sys.stderr, file=sys.stderr,
) )
sys.exit(1) sys.exit(1)
@ -788,15 +891,15 @@ def SetupGnuPG(quiet):
except OSError as e: except OSError as e:
if e.errno != errno.EEXIST: if e.errno != errno.EEXIST:
print( print(
f"fatal: cannot make {gpg_dir} directory: {e.strerror}", "fatal: cannot make %s directory: %s" % (gpg_dir, e.strerror),
file=sys.stderr, file=sys.stderr,
) )
sys.exit(1) sys.exit(1)
if not quiet: if not quiet:
print( print(
"repo: Updating release signing keys to keyset ver " "repo: Updating release signing keys to keyset ver %s"
+ ".".join(str(x) for x in KEYRING_VERSION), % (".".join(str(x) for x in KEYRING_VERSION),)
) )
# NB: We use --homedir (and cwd below) because some environments (Windows) do # NB: We use --homedir (and cwd below) because some environments (Windows) do
# not correctly handle full native paths. We avoid the issue by changing to # not correctly handle full native paths. We avoid the issue by changing to
@ -848,11 +951,10 @@ def _GetRepoConfig(name):
return None return None
else: else:
print( print(
f"repo: error: git {cmdstr(cmd)} failed:\n{ret.stderr}", "repo: error: git %s failed:\n%s" % (" ".join(cmd), ret.stderr),
file=sys.stderr, file=sys.stderr,
) )
# This will raise subprocess.CalledProcessError for us. raise RunError()
ret.check_returncode()
def _InitHttp(): def _InitHttp():
@ -962,7 +1064,7 @@ def _Clone(url, cwd, clone_bundle, quiet, verbose):
os.mkdir(cwd) os.mkdir(cwd)
except OSError as e: except OSError as e:
print( print(
f"fatal: cannot make {cwd} directory: {e.strerror}", "fatal: cannot make %s directory: %s" % (cwd, e.strerror),
file=sys.stderr, file=sys.stderr,
) )
raise CloneFailure() raise CloneFailure()
@ -1002,7 +1104,7 @@ def resolve_repo_rev(cwd, committish):
ret = run_git( ret = run_git(
"rev-parse", "rev-parse",
"--verify", "--verify",
f"{committish}^{{commit}}", "%s^{commit}" % (committish,),
cwd=cwd, cwd=cwd,
check=False, check=False,
) )
@ -1015,7 +1117,7 @@ def resolve_repo_rev(cwd, committish):
rev = resolve("refs/remotes/origin/%s" % committish) rev = resolve("refs/remotes/origin/%s" % committish)
if rev is None: if rev is None:
print( print(
f'repo: error: unknown branch "{committish}"', 'repo: error: unknown branch "%s"' % (committish,),
file=sys.stderr, file=sys.stderr,
) )
raise CloneFailure() raise CloneFailure()
@ -1028,8 +1130,7 @@ def resolve_repo_rev(cwd, committish):
rev = resolve(remote_ref) rev = resolve(remote_ref)
if rev is None: if rev is None:
print( print(
f'repo: error: unknown tag "{committish}"', 'repo: error: unknown tag "%s"' % (committish,), file=sys.stderr
file=sys.stderr,
) )
raise CloneFailure() raise CloneFailure()
return (remote_ref, rev) return (remote_ref, rev)
@ -1037,12 +1138,12 @@ def resolve_repo_rev(cwd, committish):
# See if it's a short branch name. # See if it's a short branch name.
rev = resolve("refs/remotes/origin/%s" % committish) rev = resolve("refs/remotes/origin/%s" % committish)
if rev: if rev:
return (f"refs/heads/{committish}", rev) return ("refs/heads/%s" % (committish,), rev)
# See if it's a tag. # See if it's a tag.
rev = resolve(f"refs/tags/{committish}") rev = resolve("refs/tags/%s" % committish)
if rev: if rev:
return (f"refs/tags/{committish}", rev) return ("refs/tags/%s" % (committish,), rev)
# See if it's a commit. # See if it's a commit.
rev = resolve(committish) rev = resolve(committish)
@ -1051,8 +1152,7 @@ def resolve_repo_rev(cwd, committish):
# Give up! # Give up!
print( print(
f'repo: error: unable to resolve "{committish}"', 'repo: error: unable to resolve "%s"' % (committish,), file=sys.stderr
file=sys.stderr,
) )
raise CloneFailure() raise CloneFailure()
@ -1068,8 +1168,8 @@ def verify_rev(cwd, remote_ref, rev, quiet):
if not quiet: if not quiet:
print(file=sys.stderr) print(file=sys.stderr)
print( print(
f"warning: '{remote_ref}' is not signed; " "warning: '%s' is not signed; falling back to signed release '%s'"
f"falling back to signed release '{cur}'", % (remote_ref, cur),
file=sys.stderr, file=sys.stderr,
) )
print(file=sys.stderr) print(file=sys.stderr)
@ -1119,10 +1219,10 @@ class _Options:
def _ExpandAlias(name): def _ExpandAlias(name):
"""Look up user registered aliases.""" """Look up user registered aliases."""
# We don't resolve aliases for existing subcommands. This matches git. # We don't resolve aliases for existing subcommands. This matches git.
if name in {"help", "init"}: if name in {"gitc-init", "help", "init"}:
return name, [] return name, []
alias = _GetRepoConfig(f"alias.{name}") alias = _GetRepoConfig("alias.%s" % (name,))
if alias is None: if alias is None:
return name, [] return name, []
@ -1178,6 +1278,7 @@ class Requirements:
with open(path, "rb") as f: with open(path, "rb") as f:
data = f.read() data = f.read()
except OSError: except OSError:
# NB: EnvironmentError is used for Python 2 & 3 compatibility.
# If we couldn't open the file, assume it's an old source tree. # If we couldn't open the file, assume it's an old source tree.
return None return None
@ -1197,13 +1298,13 @@ class Requirements:
return cls(json_data) return cls(json_data)
def get_soft_ver(self, pkg): def _get_soft_ver(self, pkg):
"""Return the soft version for |pkg| if it exists.""" """Return the soft version for |pkg| if it exists."""
return tuple(self.requirements.get(pkg, {}).get("soft", ())) return self.requirements.get(pkg, {}).get("soft", ())
def get_hard_ver(self, pkg): def _get_hard_ver(self, pkg):
"""Return the hard version for |pkg| if it exists.""" """Return the hard version for |pkg| if it exists."""
return tuple(self.requirements.get(pkg, {}).get("hard", ())) return self.requirements.get(pkg, {}).get("hard", ())
@staticmethod @staticmethod
def _format_ver(ver): def _format_ver(ver):
@ -1213,24 +1314,22 @@ class Requirements:
def assert_ver(self, pkg, curr_ver): def assert_ver(self, pkg, curr_ver):
"""Verify |pkg|'s |curr_ver| is new enough.""" """Verify |pkg|'s |curr_ver| is new enough."""
curr_ver = tuple(curr_ver) curr_ver = tuple(curr_ver)
soft_ver = tuple(self.get_soft_ver(pkg)) soft_ver = tuple(self._get_soft_ver(pkg))
hard_ver = tuple(self.get_hard_ver(pkg)) hard_ver = tuple(self._get_hard_ver(pkg))
if curr_ver < hard_ver: if curr_ver < hard_ver:
print( print(
f'repo: error: Your version of "{pkg}" ' 'repo: error: Your version of "%s" (%s) is unsupported; '
f"({self._format_ver(curr_ver)}) is unsupported; " "Please upgrade to at least version %s to continue."
"Please upgrade to at least version " % (pkg, self._format_ver(curr_ver), self._format_ver(soft_ver)),
f"{self._format_ver(soft_ver)} to continue.",
file=sys.stderr, file=sys.stderr,
) )
sys.exit(1) sys.exit(1)
if curr_ver < soft_ver: if curr_ver < soft_ver:
print( print(
f'repo: error: Your version of "{pkg}" ' 'repo: warning: Your version of "%s" (%s) is no longer supported; '
f"({self._format_ver(curr_ver)}) is no longer supported; " "Please upgrade to at least version %s to avoid breakage."
"Please upgrade to at least version " % (pkg, self._format_ver(curr_ver), self._format_ver(soft_ver)),
f"{self._format_ver(soft_ver)} to continue.",
file=sys.stderr, file=sys.stderr,
) )
@ -1247,6 +1346,10 @@ class Requirements:
def _Usage(): def _Usage():
gitc_usage = ""
if get_gitc_manifest_dir():
gitc_usage = " gitc-init Initialize a GITC Client.\n"
print( print(
"""usage: repo COMMAND [ARGS] """usage: repo COMMAND [ARGS]
@ -1255,7 +1358,9 @@ repo is not yet installed. Use "repo init" to install it here.
The most commonly used repo commands are: The most commonly used repo commands are:
init Install repo in the current working directory init Install repo in the current working directory
help Display detailed help on a command """
+ gitc_usage
+ """ help Display detailed help on a command
For access to the full online help, install repo ("repo init"). For access to the full online help, install repo ("repo init").
""" """
@ -1266,8 +1371,8 @@ For access to the full online help, install repo ("repo init").
def _Help(args): def _Help(args):
if args: if args:
if args[0] in {"init"}: if args[0] in {"init", "gitc-init"}:
parser = GetParser() parser = GetParser(gitc_init=args[0] == "gitc-init")
parser.print_help() parser.print_help()
sys.exit(0) sys.exit(0)
else: else:
@ -1284,16 +1389,21 @@ def _Help(args):
def _Version(): def _Version():
"""Show version information.""" """Show version information."""
git_version = ParseGitVersion()
print("<repo not installed>") print("<repo not installed>")
print(f"repo launcher version {'.'.join(str(x) for x in VERSION)}") print("repo launcher version %s" % (".".join(str(x) for x in VERSION),))
print(f" (from {__file__})") print(" (from %s)" % (__file__,))
print(f"git {git_version.full}" if git_version else "git not installed") print("git %s" % (ParseGitVersion().full,))
print(f"Python {sys.version}") print("Python %s" % sys.version)
uname = platform.uname() uname = platform.uname()
print(f"OS {uname.system} {uname.release} ({uname.version})") if sys.version_info.major < 3:
processor = uname.processor if uname.processor else "unknown" # Python 3 returns a named tuple, but Python 2 is simpler.
print(f"CPU {uname.machine} ({processor})") print(uname)
else:
print("OS %s %s (%s)" % (uname.system, uname.release, uname.version))
print(
"CPU %s (%s)"
% (uname.machine, uname.processor if uname.processor else "unknown")
)
print("Bug reports:", BUG_URL) print("Bug reports:", BUG_URL)
sys.exit(0) sys.exit(0)
@ -1321,11 +1431,11 @@ def _RunSelf(wrapper_path):
my_main = os.path.join(my_dir, "main.py") my_main = os.path.join(my_dir, "main.py")
my_git = os.path.join(my_dir, ".git") my_git = os.path.join(my_dir, ".git")
if os.path.isfile(my_main): if os.path.isfile(my_main) and os.path.isdir(my_git):
for name in ["git_config.py", "project.py", "subcmds"]: for name in ["git_config.py", "project.py", "subcmds"]:
if not os.path.exists(os.path.join(my_dir, name)): if not os.path.exists(os.path.join(my_dir, name)):
return None, None return None, None
return my_main, my_git if os.path.isdir(my_git) else None return my_main, my_git
return None, None return None, None
@ -1356,11 +1466,23 @@ def main(orig_args):
# We run this early as we run some git commands ourselves. # We run this early as we run some git commands ourselves.
SetGitTrace2ParentSid() SetGitTrace2ParentSid()
repo_main, rel_repo_dir = None, None
# Don't use the local repo copy, make sure to switch to the gitc client first.
if cmd != "gitc-init":
repo_main, rel_repo_dir = _FindRepo() repo_main, rel_repo_dir = _FindRepo()
wrapper_path = os.path.abspath(__file__) wrapper_path = os.path.abspath(__file__)
my_main, my_git = _RunSelf(wrapper_path) my_main, my_git = _RunSelf(wrapper_path)
cwd = os.getcwd()
if get_gitc_manifest_dir() and cwd.startswith(get_gitc_manifest_dir()):
print(
"error: repo cannot be used in the GITC local manifest directory."
"\nIf you want to work on this GITC client please rerun this "
"command from the corresponding client under /gitc/",
file=sys.stderr,
)
sys.exit(1)
if not repo_main: if not repo_main:
# Only expand aliases here since we'll be parsing the CLI ourselves. # Only expand aliases here since we'll be parsing the CLI ourselves.
# If we had repo_main, alias expansion would happen in main.py. # If we had repo_main, alias expansion would happen in main.py.
@ -1375,11 +1497,11 @@ def main(orig_args):
_Version() _Version()
if not cmd: if not cmd:
_NotInstalled() _NotInstalled()
if cmd == "init": if cmd == "init" or cmd == "gitc-init":
if my_git: if my_git:
_SetDefaultsTo(my_git) _SetDefaultsTo(my_git)
try: try:
_Init(args) _Init(args, gitc_init=(cmd == "gitc-init"))
except CloneFailure: except CloneFailure:
path = os.path.join(repodir, S_repo) path = os.path.join(repodir, S_repo)
print( print(
@ -1405,14 +1527,6 @@ def main(orig_args):
if reqs: if reqs:
reqs.assert_all() reqs.assert_all()
# Python 3.11 introduces PYTHONSAFEPATH and the -P flag which, if enabled,
# does not prepend the script's directory to sys.path by default.
# repo relies on this import path, so add directory of REPO_MAIN to
# PYTHONPATH so that this continues to work when PYTHONSAFEPATH is enabled.
python_paths = os.environ.get("PYTHONPATH", "").split(os.pathsep)
new_python_paths = [os.path.join(rel_repo_dir, S_repo)] + python_paths
os.environ["PYTHONPATH"] = os.pathsep.join(new_python_paths)
ver_str = ".".join(map(str, VERSION)) ver_str = ".".join(map(str, VERSION))
me = [ me = [
sys.executable, sys.executable,

View File

@ -39,8 +39,8 @@ class _LogColoring(Coloring):
def __init__(self, config): def __init__(self, config):
super().__init__(config, "logs") super().__init__(config, "logs")
self.error = self.nofmt_colorer("error", fg="red") self.error = self.colorer("error", fg="red")
self.warning = self.nofmt_colorer("warn", fg="yellow") self.warning = self.colorer("warn", fg="yellow")
self.levelMap = { self.levelMap = {
"WARNING": self.warning, "WARNING": self.warning,
"ERROR": self.error, "ERROR": self.error,
@ -77,7 +77,6 @@ class RepoLogger(logging.Logger):
if not err.aggregate_errors: if not err.aggregate_errors:
self.error("Repo command failed: %s", type(err).__name__) self.error("Repo command failed: %s", type(err).__name__)
self.error("\t%s", str(err))
return return
self.error( self.error(

View File

@ -46,14 +46,12 @@
# Supported git versions. # Supported git versions.
# #
# git-1.7.2 is in Debian Squeeze.
# git-1.7.9 is in Ubuntu Precise.
# git-1.9.1 is in Ubuntu Trusty. # git-1.9.1 is in Ubuntu Trusty.
# git-2.1.4 is in Debian Jessie. # git-1.7.10 is in Debian Wheezy.
# git-2.7.4 is in Ubuntu Xenial.
# git-2.11.0 is in Debian Stretch.
# git-2.17.0 is in Ubuntu Bionic.
# git-2.20.1 is in Debian Buster.
"git": { "git": {
"hard": [1, 9, 1], "hard": [1, 7, 2],
"soft": [2, 7, 4] "soft": [1, 9, 1]
} }
} }

View File

@ -15,57 +15,16 @@
"""Wrapper to run linters and pytest with the right settings.""" """Wrapper to run linters and pytest with the right settings."""
import functools
import os import os
import subprocess import subprocess
import sys import sys
from typing import List
import pytest
ROOT_DIR = os.path.dirname(os.path.realpath(__file__)) ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
@functools.lru_cache()
def is_ci() -> bool:
"""Whether we're running in our CI system."""
return os.getenv("LUCI_CQ") == "yes"
def run_pytest(argv: List[str]) -> int:
"""Returns the exit code from pytest."""
if is_ci():
argv = ["-m", "not skip_cq"] + argv
return subprocess.run(
[sys.executable, "-m", "pytest"] + argv,
check=False,
cwd=ROOT_DIR,
).returncode
def run_pytest_py38(argv: List[str]) -> int:
"""Returns the exit code from pytest under Python 3.8."""
if is_ci():
argv = ["-m", "not skip_cq"] + argv
try:
return subprocess.run(
[
"vpython3",
"-vpython-spec",
"run_tests.vpython3.8",
"-m",
"pytest",
]
+ argv,
check=False,
cwd=ROOT_DIR,
).returncode
except FileNotFoundError:
# Skip if the user doesn't have vpython from depot_tools.
return 0
def run_black(): def run_black():
"""Returns the exit code from black.""" """Returns the exit code from black."""
# Black by default only matches .py files. We have to list standalone # Black by default only matches .py files. We have to list standalone
@ -73,46 +32,37 @@ def run_black():
extra_programs = [ extra_programs = [
"repo", "repo",
"run_tests", "run_tests",
"release/update-hooks",
"release/update-manpages", "release/update-manpages",
] ]
return subprocess.run( return subprocess.run(
[sys.executable, "-m", "black", "--check", ROOT_DIR] + extra_programs, [sys.executable, "-m", "black", "--check", ROOT_DIR] + extra_programs,
check=False, check=False,
cwd=ROOT_DIR,
).returncode ).returncode
def run_flake8(): def run_flake8():
"""Returns the exit code from flake8.""" """Returns the exit code from flake8."""
return subprocess.run( return subprocess.run(
[sys.executable, "-m", "flake8", ROOT_DIR], [sys.executable, "-m", "flake8", ROOT_DIR], check=False
check=False,
cwd=ROOT_DIR,
).returncode ).returncode
def run_isort(): def run_isort():
"""Returns the exit code from isort.""" """Returns the exit code from isort."""
return subprocess.run( return subprocess.run(
[sys.executable, "-m", "isort", "--check", ROOT_DIR], [sys.executable, "-m", "isort", "--check", ROOT_DIR], check=False
check=False,
cwd=ROOT_DIR,
).returncode ).returncode
def main(argv): def main(argv):
"""The main entry.""" """The main entry."""
checks = ( checks = (
functools.partial(run_pytest, argv), lambda: pytest.main(argv),
functools.partial(run_pytest_py38, argv),
run_black, run_black,
run_flake8, run_flake8,
run_isort, run_isort,
) )
# Run all the tests all the time to get full feedback. Don't exit on the return 0 if all(not c() for c in checks) else 1
# first error as that makes it more difficult to iterate in the CQ.
return 1 if sum(c() for c in checks) else 0
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -5,92 +5,97 @@
# List of available wheels: # List of available wheels:
# https://chromium.googlesource.com/infra/infra/+/main/infra/tools/dockerbuild/wheels.md # https://chromium.googlesource.com/infra/infra/+/main/infra/tools/dockerbuild/wheels.md
python_version: "3.11" python_version: "3.8"
wheel: < wheel: <
name: "infra/python/wheels/pytest-py3" name: "infra/python/wheels/pytest-py3"
version: "version:8.3.4" version: "version:6.2.2"
> >
# Required by pytest==8.3.4 # Required by pytest==6.2.2
wheel: < wheel: <
name: "infra/python/wheels/py-py2_py3" name: "infra/python/wheels/py-py2_py3"
version: "version:1.11.0" version: "version:1.10.0"
> >
# Required by pytest==8.3.4 # Required by pytest==6.2.2
wheel: < wheel: <
name: "infra/python/wheels/iniconfig-py3" name: "infra/python/wheels/iniconfig-py3"
version: "version:1.1.1" version: "version:1.1.1"
> >
# Required by pytest==8.3.4 # Required by pytest==6.2.2
wheel: < wheel: <
name: "infra/python/wheels/packaging-py3" name: "infra/python/wheels/packaging-py3"
version: "version:23.0" version: "version:23.0"
> >
# Required by pytest==8.3.4 # Required by pytest==6.2.2
wheel: < wheel: <
name: "infra/python/wheels/pluggy-py3" name: "infra/python/wheels/pluggy-py3"
version: "version:1.5.0" version: "version:0.13.1"
> >
# Required by pytest==8.3.4 # Required by pytest==6.2.2
wheel: < wheel: <
name: "infra/python/wheels/toml-py3" name: "infra/python/wheels/toml-py3"
version: "version:0.10.1" version: "version:0.10.1"
> >
# Required by pytest==8.3.4 # Required by pytest==6.2.2
wheel: < wheel: <
name: "infra/python/wheels/pyparsing-py3" name: "infra/python/wheels/pyparsing-py3"
version: "version:3.0.7" version: "version:3.0.7"
> >
# Required by pytest==8.3.4 # Required by pytest==6.2.2
wheel: < wheel: <
name: "infra/python/wheels/attrs-py2_py3" name: "infra/python/wheels/attrs-py2_py3"
version: "version:21.4.0" version: "version:21.4.0"
> >
# NB: Keep in sync with constraints.txt. # Required by packaging==16.8
wheel: < wheel: <
name: "infra/python/wheels/black-py3" name: "infra/python/wheels/six-py2_py3"
version: "version:25.1.0" version: "version:1.16.0"
> >
# Required by black==25.1.0 wheel: <
name: "infra/python/wheels/black-py3"
version: "version:23.1.0"
>
# Required by black==23.1.0
wheel: < wheel: <
name: "infra/python/wheels/mypy-extensions-py3" name: "infra/python/wheels/mypy-extensions-py3"
version: "version:0.4.3" version: "version:0.4.3"
> >
# Required by black==25.1.0 # Required by black==23.1.0
wheel: < wheel: <
name: "infra/python/wheels/tomli-py3" name: "infra/python/wheels/tomli-py3"
version: "version:2.0.1" version: "version:2.0.1"
> >
# Required by black==25.1.0 # Required by black==23.1.0
wheel: < wheel: <
name: "infra/python/wheels/platformdirs-py3" name: "infra/python/wheels/platformdirs-py3"
version: "version:2.5.2" version: "version:2.5.2"
> >
# Required by black==25.1.0 # Required by black==23.1.0
wheel: < wheel: <
name: "infra/python/wheels/pathspec-py3" name: "infra/python/wheels/pathspec-py3"
version: "version:0.9.0" version: "version:0.9.0"
> >
# Required by black==25.1.0 # Required by black==23.1.0
wheel: < wheel: <
name: "infra/python/wheels/typing-extensions-py3" name: "infra/python/wheels/typing-extensions-py3"
version: "version:4.3.0" version: "version:4.3.0"
> >
# Required by black==25.1.0 # Required by black==23.1.0
wheel: < wheel: <
name: "infra/python/wheels/click-py3" name: "infra/python/wheels/click-py3"
version: "version:8.0.3" version: "version:8.0.3"

View File

@ -1,67 +0,0 @@
# This is a vpython "spec" file.
#
# Read more about `vpython` and how to modify this file here:
# https://chromium.googlesource.com/infra/infra/+/main/doc/users/vpython.md
# List of available wheels:
# https://chromium.googlesource.com/infra/infra/+/main/infra/tools/dockerbuild/wheels.md
python_version: "3.8"
wheel: <
name: "infra/python/wheels/pytest-py3"
version: "version:8.3.4"
>
# Required by pytest==8.3.4
wheel: <
name: "infra/python/wheels/py-py2_py3"
version: "version:1.11.0"
>
# Required by pytest==8.3.4
wheel: <
name: "infra/python/wheels/iniconfig-py3"
version: "version:1.1.1"
>
# Required by pytest==8.3.4
wheel: <
name: "infra/python/wheels/packaging-py3"
version: "version:23.0"
>
# Required by pytest==8.3.4
wheel: <
name: "infra/python/wheels/pluggy-py3"
version: "version:1.5.0"
>
# Required by pytest==8.3.4
wheel: <
name: "infra/python/wheels/toml-py3"
version: "version:0.10.1"
>
# Required by pytest==8.3.4
wheel: <
name: "infra/python/wheels/tomli-py3"
version: "version:2.1.0"
>
# Required by pytest==8.3.4
wheel: <
name: "infra/python/wheels/pyparsing-py3"
version: "version:3.0.7"
>
# Required by pytest==8.3.4
wheel: <
name: "infra/python/wheels/attrs-py2_py3"
version: "version:21.4.0"
>
# Required by pytest==8.3.4
wheel: <
name: "infra/python/wheels/exceptiongroup-py3"
version: "version:1.1.2"
>

68
ssh.py
View File

@ -24,7 +24,6 @@ import sys
import tempfile import tempfile
import time import time
from git_command import git
import platform_utils import platform_utils
from repo_trace import Trace from repo_trace import Trace
@ -58,12 +57,8 @@ def version():
except FileNotFoundError: except FileNotFoundError:
print("fatal: ssh not installed", file=sys.stderr) print("fatal: ssh not installed", file=sys.stderr)
sys.exit(1) sys.exit(1)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError:
print( print("fatal: unable to detect ssh version", file=sys.stderr)
"fatal: unable to detect ssh version"
f" (code={e.returncode}, output={e.stdout})",
file=sys.stderr,
)
sys.exit(1) sys.exit(1)
@ -170,7 +165,7 @@ class ProxyManager:
# Check to see whether we already think that the master is running; if # Check to see whether we already think that the master is running; if
# we think it's already running, return right away. # we think it's already running, return right away.
if port is not None: if port is not None:
key = f"{host}:{port}" key = "%s:%s" % (host, port)
else: else:
key = host key = host
@ -212,33 +207,7 @@ class ProxyManager:
# and print to the log there. # and print to the log there.
pass pass
# Git protocol V2 is a new feature in git 2.18.0, made default in command = command_base[:1] + ["-M", "-N"] + command_base[1:]
# git 2.26.0
# It is faster and more efficient than V1.
# To enable it when using SSH, the environment variable GIT_PROTOCOL
# must be set in the SSH side channel when establishing the connection
# to the git server.
# See https://git-scm.com/docs/protocol-v2#_ssh_and_file_transport
# Normally git does this by itself. But here, where the SSH connection
# is established manually over ControlMaster via the repo-tool, it must
# be passed in explicitly instead.
# Based on https://git-scm.com/docs/gitprotocol-pack#_extra_parameters,
# GIT_PROTOCOL is considered an "Extra Parameter" and must be ignored
# by servers that do not understand it. This means that it is safe to
# set it even when connecting to older servers.
# It should also be safe to set the environment variable for older
# local git versions, since it is only part of the ssh side channel.
git_protocol_version = _get_git_protocol_version()
ssh_git_protocol_args = [
"-o",
f"SetEnv GIT_PROTOCOL=version={git_protocol_version}",
]
command = (
command_base[:1]
+ ["-M", "-N", *ssh_git_protocol_args]
+ command_base[1:]
)
p = None p = None
try: try:
with Trace("Call to ssh: %s", " ".join(command)): with Trace("Call to ssh: %s", " ".join(command)):
@ -320,32 +289,3 @@ class ProxyManager:
tempfile.mkdtemp("", "ssh-", tmp_dir), "master-" + tokens tempfile.mkdtemp("", "ssh-", tmp_dir), "master-" + tokens
) )
return self._sock_path return self._sock_path
@functools.lru_cache(maxsize=1)
def _get_git_protocol_version() -> str:
"""Return the git protocol version.
The version is found by first reading the global git config.
If no git config for protocol version exists, try to deduce the default
protocol version based on the git version.
See https://git-scm.com/docs/gitprotocol-v2 for details.
"""
try:
return subprocess.check_output(
["git", "config", "--get", "--global", "protocol.version"],
encoding="utf-8",
stderr=subprocess.PIPE,
).strip()
except subprocess.CalledProcessError as e:
if e.returncode == 1:
# Exit code 1 means that the git config key was not found.
# Try to imitate the defaults that git would have used.
git_version = git.version_tuple()
if git_version >= (2, 26, 0):
# Since git version 2.26, protocol v2 is the default.
return "2"
return "1"
# Other exit codes indicate error with reading the config.
raise

View File

@ -37,7 +37,9 @@ for py in os.listdir(my_dir):
try: try:
cmd = getattr(mod, clsn) cmd = getattr(mod, clsn)
except AttributeError: except AttributeError:
raise SyntaxError(f"{__name__}/{py} does not define class {clsn}") raise SyntaxError(
"%s/%s does not define class %s" % (__name__, py, clsn)
)
name = name.replace("_", "-") name = name.replace("_", "-")
cmd.NAME = name cmd.NAME = name

View File

@ -70,10 +70,8 @@ It is equivalent to "git branch -D <branchname>".
else: else:
args.insert(0, "'All local branches'") args.insert(0, "'All local branches'")
@classmethod def _ExecuteOne(self, all_branches, nb, project):
def _ExecuteOne(cls, all_branches, nb, project_idx):
"""Abandon one project.""" """Abandon one project."""
project = cls.get_parallel_context()["projects"][project_idx]
if all_branches: if all_branches:
branches = project.GetBranches() branches = project.GetBranches()
else: else:
@ -91,7 +89,7 @@ It is equivalent to "git branch -D <branchname>".
if status is not None: if status is not None:
ret[name] = status ret[name] = status
return (ret, project_idx, errors) return (ret, project, errors)
def Execute(self, opt, args): def Execute(self, opt, args):
nb = args[0].split() nb = args[0].split()
@ -104,8 +102,7 @@ It is equivalent to "git branch -D <branchname>".
_RelPath = lambda p: p.RelPath(local=opt.this_manifest_only) _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
def _ProcessResults(_pool, pm, states): def _ProcessResults(_pool, pm, states):
for results, project_idx, errors in states: for results, project, errors in states:
project = all_projects[project_idx]
for branch, status in results.items(): for branch, status in results.items():
if status: if status:
success[branch].append(project) success[branch].append(project)
@ -114,17 +111,14 @@ It is equivalent to "git branch -D <branchname>".
aggregate_errors.extend(errors) aggregate_errors.extend(errors)
pm.update(msg="") pm.update(msg="")
with self.ParallelContext():
self.get_parallel_context()["projects"] = all_projects
self.ExecuteInParallel( self.ExecuteInParallel(
opt.jobs, opt.jobs,
functools.partial(self._ExecuteOne, opt.all, nb), functools.partial(self._ExecuteOne, opt.all, nb),
range(len(all_projects)), all_projects,
callback=_ProcessResults, callback=_ProcessResults,
output=Progress( output=Progress(
f"Abandon {nb}", len(all_projects), quiet=opt.quiet "Abandon %s" % (nb,), len(all_projects), quiet=opt.quiet
), ),
chunksize=1,
) )
width = max( width = max(
@ -158,4 +152,4 @@ It is equivalent to "git branch -D <branchname>".
_RelPath(p) for p in success[br] _RelPath(p) for p in success[br]
) )
) )
print(f"{br}{' ' * (width - len(br))}| {result}\n") print("%s%s| %s\n" % (br, " " * (width - len(br)), result))

View File

@ -98,22 +98,6 @@ is shown, then the branch appears in all projects.
""" """
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
@classmethod
def _ExpandProjectToBranches(cls, project_idx):
"""Expands a project into a list of branch names & associated info.
Args:
project_idx: project.Project index
Returns:
List[Tuple[str, git_config.Branch, int]]
"""
branches = []
project = cls.get_parallel_context()["projects"][project_idx]
for name, b in project.GetBranches().items():
branches.append((name, b, project_idx))
return branches
def Execute(self, opt, args): def Execute(self, opt, args):
projects = self.GetProjects( projects = self.GetProjects(
args, all_manifests=not opt.this_manifest_only args, all_manifests=not opt.this_manifest_only
@ -123,18 +107,15 @@ is shown, then the branch appears in all projects.
project_cnt = len(projects) project_cnt = len(projects)
def _ProcessResults(_pool, _output, results): def _ProcessResults(_pool, _output, results):
for name, b, project_idx in itertools.chain.from_iterable(results): for name, b in itertools.chain.from_iterable(results):
b.project = projects[project_idx]
if name not in all_branches: if name not in all_branches:
all_branches[name] = BranchInfo(name) all_branches[name] = BranchInfo(name)
all_branches[name].add(b) all_branches[name].add(b)
with self.ParallelContext():
self.get_parallel_context()["projects"] = projects
self.ExecuteInParallel( self.ExecuteInParallel(
opt.jobs, opt.jobs,
self._ExpandProjectToBranches, expand_project_to_branches,
range(len(projects)), projects,
callback=_ProcessResults, callback=_ProcessResults,
) )
@ -167,10 +148,7 @@ is shown, then the branch appears in all projects.
else: else:
published = " " published = " "
# A branch name can contain a percent sign, so we need to escape it. hdr("%c%c %-*s" % (current, published, width, name))
# Escape after f-string formatting to properly account for leading
# spaces.
hdr(f"{current}{published} {name:{width}}".replace("%", "%%"))
out.write(" |") out.write(" |")
_RelPath = lambda p: p.RelPath(local=opt.this_manifest_only) _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
@ -196,7 +174,7 @@ is shown, then the branch appears in all projects.
if _RelPath(p) not in have: if _RelPath(p) not in have:
paths.append(_RelPath(p)) paths.append(_RelPath(p))
s = f" {in_type} {', '.join(paths)}" s = " %s %s" % (in_type, ", ".join(paths))
if not i.IsSplitCurrent and (width + 7 + len(s) < 80): if not i.IsSplitCurrent and (width + 7 + len(s) < 80):
fmt = out.current if i.IsCurrent else fmt fmt = out.current if i.IsCurrent else fmt
fmt(s) fmt(s)
@ -213,3 +191,19 @@ is shown, then the branch appears in all projects.
else: else:
out.write(" in all projects") out.write(" in all projects")
out.nl() out.nl()
def expand_project_to_branches(project):
"""Expands a project into a list of branch names & associated information.
Args:
project: project.Project
Returns:
List[Tuple[str, git_config.Branch]]
"""
branches = []
for name, b in project.GetBranches().items():
b.project = project
branches.append((name, b))
return branches

View File

@ -20,6 +20,7 @@ from command import DEFAULT_LOCAL_JOBS
from error import GitError from error import GitError
from error import RepoExitError from error import RepoExitError
from progress import Progress from progress import Progress
from project import Project
from repo_logging import RepoLogger from repo_logging import RepoLogger
@ -29,7 +30,7 @@ logger = RepoLogger(__file__)
class CheckoutBranchResult(NamedTuple): class CheckoutBranchResult(NamedTuple):
# Whether the Project is on the branch (i.e. branch exists and no errors) # Whether the Project is on the branch (i.e. branch exists and no errors)
result: bool result: bool
project_idx: int project: Project
error: Exception error: Exception
@ -61,17 +62,15 @@ The command is equivalent to:
if not args: if not args:
self.Usage() self.Usage()
@classmethod def _ExecuteOne(self, nb, project):
def _ExecuteOne(cls, nb, project_idx):
"""Checkout one project.""" """Checkout one project."""
error = None error = None
result = None result = None
project = cls.get_parallel_context()["projects"][project_idx]
try: try:
result = project.CheckoutBranch(nb) result = project.CheckoutBranch(nb)
except GitError as e: except GitError as e:
error = e error = e
return CheckoutBranchResult(result, project_idx, error) return CheckoutBranchResult(result, project, error)
def Execute(self, opt, args): def Execute(self, opt, args):
nb = args[0] nb = args[0]
@ -84,23 +83,20 @@ The command is equivalent to:
def _ProcessResults(_pool, pm, results): def _ProcessResults(_pool, pm, results):
for result in results: for result in results:
project = all_projects[result.project_idx]
if result.error is not None: if result.error is not None:
err.append(result.error) err.append(result.error)
err_projects.append(project) err_projects.append(result.project)
elif result.result: elif result.result:
success.append(project) success.append(result.project)
pm.update(msg="") pm.update(msg="")
with self.ParallelContext():
self.get_parallel_context()["projects"] = all_projects
self.ExecuteInParallel( self.ExecuteInParallel(
opt.jobs, opt.jobs,
functools.partial(self._ExecuteOne, nb), functools.partial(self._ExecuteOne, nb),
range(len(all_projects)), all_projects,
callback=_ProcessResults, callback=_ProcessResults,
output=Progress( output=Progress(
f"Checkout {nb}", len(all_projects), quiet=opt.quiet "Checkout %s" % (nb,), len(all_projects), quiet=opt.quiet
), ),
) )

View File

@ -40,8 +40,7 @@ to the Unix 'patch' command.
help="paths are relative to the repository root", help="paths are relative to the repository root",
) )
@classmethod def _ExecuteOne(self, absolute, local, project):
def _ExecuteOne(cls, absolute, local, project_idx):
"""Obtains the diff for a specific project. """Obtains the diff for a specific project.
Args: Args:
@ -49,13 +48,12 @@ to the Unix 'patch' command.
local: a boolean, if True, the path is relative to the local local: a boolean, if True, the path is relative to the local
(sub)manifest. If false, the path is relative to the outermost (sub)manifest. If false, the path is relative to the outermost
manifest. manifest.
project_idx: Project index to get status of. project: Project to get status of.
Returns: Returns:
The status of the project. The status of the project.
""" """
buf = io.StringIO() buf = io.StringIO()
project = cls.get_parallel_context()["projects"][project_idx]
ret = project.PrintWorkTreeDiff(absolute, output_redir=buf, local=local) ret = project.PrintWorkTreeDiff(absolute, output_redir=buf, local=local)
return (ret, buf.getvalue()) return (ret, buf.getvalue())
@ -73,15 +71,12 @@ to the Unix 'patch' command.
ret = 1 ret = 1
return ret return ret
with self.ParallelContext():
self.get_parallel_context()["projects"] = all_projects
return self.ExecuteInParallel( return self.ExecuteInParallel(
opt.jobs, opt.jobs,
functools.partial( functools.partial(
self._ExecuteOne, opt.absolute, opt.this_manifest_only self._ExecuteOne, opt.absolute, opt.this_manifest_only
), ),
range(len(all_projects)), all_projects,
callback=_ProcessResults, callback=_ProcessResults,
ordered=True, ordered=True,
chunksize=1,
) )

View File

@ -87,17 +87,25 @@ synced and their revisions won't be found.
def _printRawDiff(self, diff, pretty_format=None, local=False): def _printRawDiff(self, diff, pretty_format=None, local=False):
_RelPath = lambda p: p.RelPath(local=local) _RelPath = lambda p: p.RelPath(local=local)
for project in diff["added"]: for project in diff["added"]:
self.printText(f"A {_RelPath(project)} {project.revisionExpr}") self.printText(
"A %s %s" % (_RelPath(project), project.revisionExpr)
)
self.out.nl() self.out.nl()
for project in diff["removed"]: for project in diff["removed"]:
self.printText(f"R {_RelPath(project)} {project.revisionExpr}") self.printText(
"R %s %s" % (_RelPath(project), project.revisionExpr)
)
self.out.nl() self.out.nl()
for project, otherProject in diff["changed"]: for project, otherProject in diff["changed"]:
self.printText( self.printText(
f"C {_RelPath(project)} {project.revisionExpr} " "C %s %s %s"
f"{otherProject.revisionExpr}" % (
_RelPath(project),
project.revisionExpr,
otherProject.revisionExpr,
)
) )
self.out.nl() self.out.nl()
self._printLogs( self._printLogs(
@ -110,8 +118,12 @@ synced and their revisions won't be found.
for project, otherProject in diff["unreachable"]: for project, otherProject in diff["unreachable"]:
self.printText( self.printText(
f"U {_RelPath(project)} {project.revisionExpr} " "U %s %s %s"
f"{otherProject.revisionExpr}" % (
_RelPath(project),
project.revisionExpr,
otherProject.revisionExpr,
)
) )
self.out.nl() self.out.nl()
@ -233,9 +245,9 @@ synced and their revisions won't be found.
) )
self.printRevision = self.out.nofmt_printer("revision", fg="yellow") self.printRevision = self.out.nofmt_printer("revision", fg="yellow")
else: else:
self.printProject = self.printAdded = self.printRemoved = ( self.printProject = (
self.printRevision self.printAdded
) = self.printText ) = self.printRemoved = self.printRevision = self.printText
manifest1 = RepoClient(self.repodir) manifest1 = RepoClient(self.repodir)
manifest1.Override(args[0], load_local_manifests=False) manifest1.Override(args[0], load_local_manifests=False)

View File

@ -15,6 +15,7 @@
import errno import errno
import functools import functools
import io import io
import multiprocessing
import os import os
import re import re
import signal import signal
@ -25,6 +26,7 @@ from color import Coloring
from command import Command from command import Command
from command import DEFAULT_LOCAL_JOBS from command import DEFAULT_LOCAL_JOBS
from command import MirrorSafeCommand from command import MirrorSafeCommand
from command import WORKER_BATCH_SIZE
from error import ManifestInvalidRevisionError from error import ManifestInvalidRevisionError
from repo_logging import RepoLogger from repo_logging import RepoLogger
@ -239,6 +241,7 @@ without iterating through the remaining projects.
cmd.insert(cmd.index(cn) + 1, "--color") cmd.insert(cmd.index(cn) + 1, "--color")
mirror = self.manifest.IsMirror mirror = self.manifest.IsMirror
rc = 0
smart_sync_manifest_name = "smart_sync_override.xml" smart_sync_manifest_name = "smart_sync_override.xml"
smart_sync_manifest_path = os.path.join( smart_sync_manifest_path = os.path.join(
@ -261,10 +264,18 @@ without iterating through the remaining projects.
os.environ["REPO_COUNT"] = str(len(projects)) os.environ["REPO_COUNT"] = str(len(projects))
def _ProcessResults(_pool, _output, results): try:
rc = 0 config = self.manifest.manifestProject.config
with multiprocessing.Pool(opt.jobs, InitWorker) as pool:
results_it = pool.imap(
functools.partial(
DoWorkWrapper, mirror, opt, cmd, shell, config
),
enumerate(projects),
chunksize=WORKER_BATCH_SIZE,
)
first = True first = True
for r, output in results: for r, output in results_it:
if output: if output:
if first: if first:
first = False first = False
@ -279,26 +290,9 @@ without iterating through the remaining projects.
rc = rc or r rc = rc or r
if r != 0 and opt.abort_on_errors: if r != 0 and opt.abort_on_errors:
raise Exception("Aborting due to previous error") raise Exception("Aborting due to previous error")
return rc
try:
config = self.manifest.manifestProject.config
with self.ParallelContext():
self.get_parallel_context()["projects"] = projects
rc = self.ExecuteInParallel(
opt.jobs,
functools.partial(
self.DoWorkWrapper, mirror, opt, cmd, shell, config
),
range(len(projects)),
callback=_ProcessResults,
ordered=True,
initializer=self.InitWorker,
chunksize=1,
)
except (KeyboardInterrupt, WorkerKeyboardInterrupt): except (KeyboardInterrupt, WorkerKeyboardInterrupt):
# Catch KeyboardInterrupt raised inside and outside of workers # Catch KeyboardInterrupt raised inside and outside of workers
rc = errno.EINTR rc = rc or errno.EINTR
except Exception as e: except Exception as e:
# Catch any other exceptions raised # Catch any other exceptions raised
logger.error( logger.error(
@ -306,16 +300,20 @@ without iterating through the remaining projects.
type(e).__name__, type(e).__name__,
e, e,
) )
rc = getattr(e, "errno", 1) rc = rc or getattr(e, "errno", 1)
if rc != 0: if rc != 0:
sys.exit(rc) sys.exit(rc)
@classmethod
def InitWorker(cls): class WorkerKeyboardInterrupt(Exception):
"""Keyboard interrupt exception for worker processes."""
def InitWorker():
signal.signal(signal.SIGINT, signal.SIG_IGN) signal.signal(signal.SIGINT, signal.SIG_IGN)
@classmethod
def DoWorkWrapper(cls, mirror, opt, cmd, shell, config, project_idx): def DoWorkWrapper(mirror, opt, cmd, shell, config, args):
"""A wrapper around the DoWork() method. """A wrapper around the DoWork() method.
Catch the KeyboardInterrupt exceptions here and re-raise them as a Catch the KeyboardInterrupt exceptions here and re-raise them as a
@ -323,18 +321,14 @@ without iterating through the remaining projects.
with stacktraces and making the parent hang indefinitely. with stacktraces and making the parent hang indefinitely.
""" """
project = cls.get_parallel_context()["projects"][project_idx] cnt, project = args
try: try:
return DoWork(project, mirror, opt, cmd, shell, project_idx, config) return DoWork(project, mirror, opt, cmd, shell, cnt, config)
except KeyboardInterrupt: except KeyboardInterrupt:
print("%s: Worker interrupted" % project.name) print("%s: Worker interrupted" % project.name)
raise WorkerKeyboardInterrupt() raise WorkerKeyboardInterrupt()
class WorkerKeyboardInterrupt(Exception):
"""Keyboard interrupt exception for worker processes."""
def DoWork(project, mirror, opt, cmd, shell, cnt, config): def DoWork(project, mirror, opt, cmd, shell, cnt, config):
env = os.environ.copy() env = os.environ.copy()

View File

@ -1,294 +0,0 @@
# Copyright (C) 2024 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import List, Set
from command import Command
from git_command import GitCommand
import platform_utils
from progress import Progress
from project import Project
class Gc(Command):
COMMON = True
helpSummary = "Cleaning up internal repo and Git state."
helpUsage = """
%prog
"""
def _Options(self, p):
p.add_option(
"-n",
"--dry-run",
dest="dryrun",
default=False,
action="store_true",
help="do everything except actually delete",
)
p.add_option(
"-y",
"--yes",
default=False,
action="store_true",
help="answer yes to all safe prompts",
)
p.add_option(
"--repack",
default=False,
action="store_true",
help="repack all projects that use partial clone with "
"filter=blob:none",
)
def _find_git_to_delete(
self, to_keep: Set[str], start_dir: str
) -> Set[str]:
"""Searches no longer needed ".git" directories.
Scans the file system starting from `start_dir` and removes all
directories that end with ".git" that are not in the `to_keep` set.
"""
to_delete = set()
for root, dirs, _ in platform_utils.walk(start_dir):
for directory in dirs:
if not directory.endswith(".git"):
continue
path = os.path.join(root, directory)
if path not in to_keep:
to_delete.add(path)
return to_delete
def delete_unused_projects(self, projects: List[Project], opt):
print(f"Scanning filesystem under {self.repodir}...")
project_paths = set()
project_object_paths = set()
for project in projects:
project_paths.add(project.gitdir)
project_object_paths.add(project.objdir)
to_delete = self._find_git_to_delete(
project_paths, os.path.join(self.repodir, "projects")
)
to_delete.update(
self._find_git_to_delete(
project_object_paths,
os.path.join(self.repodir, "project-objects"),
)
)
if not to_delete:
print("Nothing to clean up.")
return 0
print("Identified the following projects are no longer used:")
print("\n".join(to_delete))
print("")
if not opt.yes:
print(
"If you proceed, any local commits in those projects will be "
"destroyed!"
)
ask = input("Proceed? [y/N] ")
if ask.lower() != "y":
return 1
pm = Progress(
"Deleting",
len(to_delete),
delay=False,
quiet=opt.quiet,
show_elapsed=True,
elide=True,
)
for path in to_delete:
if opt.dryrun:
print(f"\nWould have deleted ${path}")
else:
tmp_path = os.path.join(
os.path.dirname(path),
f"to_be_deleted_{os.path.basename(path)}",
)
platform_utils.rename(path, tmp_path)
platform_utils.rmtree(tmp_path)
pm.update(msg=path)
pm.end()
return 0
def _generate_promisor_files(self, pack_dir: str):
"""Generates promisor files for all pack files in the given directory.
Promisor files are empty files with the same name as the corresponding
pack file but with the ".promisor" extension. They are used by Git.
"""
for root, _, files in platform_utils.walk(pack_dir):
for file in files:
if not file.endswith(".pack"):
continue
with open(os.path.join(root, f"{file[:-4]}promisor"), "w"):
pass
def repack_projects(self, projects: List[Project], opt):
repack_projects = []
# Find all projects eligible for repacking:
# - can't be shared
# - have a specific fetch filter
for project in projects:
if project.config.GetBoolean("extensions.preciousObjects"):
continue
if not project.clone_depth:
continue
if project.manifest.CloneFilterForDepth != "blob:none":
continue
repack_projects.append(project)
if opt.dryrun:
print(f"Would have repacked {len(repack_projects)} projects.")
return 0
pm = Progress(
"Repacking (this will take a while)",
len(repack_projects),
delay=False,
quiet=opt.quiet,
show_elapsed=True,
elide=True,
)
for project in repack_projects:
pm.update(msg=f"{project.name}")
pack_dir = os.path.join(project.gitdir, "tmp_repo_repack")
if os.path.isdir(pack_dir):
platform_utils.rmtree(pack_dir)
os.mkdir(pack_dir)
# Prepare workspace for repacking - remove all unreachable refs and
# their objects.
GitCommand(
project,
["reflog", "expire", "--expire-unreachable=all"],
verify_command=True,
).Wait()
pm.update(msg=f"{project.name} | gc", inc=0)
GitCommand(
project,
["gc"],
verify_command=True,
).Wait()
# Get all objects that are reachable from the remote, and pack them.
pm.update(msg=f"{project.name} | generating list of objects", inc=0)
remote_objects_cmd = GitCommand(
project,
[
"rev-list",
"--objects",
f"--remotes={project.remote.name}",
"--filter=blob:none",
"--tags",
],
capture_stdout=True,
verify_command=True,
)
# Get all local objects and pack them.
local_head_objects_cmd = GitCommand(
project,
["rev-list", "--objects", "HEAD^{tree}"],
capture_stdout=True,
verify_command=True,
)
local_objects_cmd = GitCommand(
project,
[
"rev-list",
"--objects",
"--all",
"--reflog",
"--indexed-objects",
"--not",
f"--remotes={project.remote.name}",
"--tags",
],
capture_stdout=True,
verify_command=True,
)
remote_objects_cmd.Wait()
pm.update(msg=f"{project.name} | remote repack", inc=0)
GitCommand(
project,
["pack-objects", os.path.join(pack_dir, "pack")],
input=remote_objects_cmd.stdout,
capture_stderr=True,
capture_stdout=True,
verify_command=True,
).Wait()
# create promisor file for each pack file
self._generate_promisor_files(pack_dir)
local_head_objects_cmd.Wait()
local_objects_cmd.Wait()
pm.update(msg=f"{project.name} | local repack", inc=0)
GitCommand(
project,
["pack-objects", os.path.join(pack_dir, "pack")],
input=local_head_objects_cmd.stdout + local_objects_cmd.stdout,
capture_stderr=True,
capture_stdout=True,
verify_command=True,
).Wait()
# Swap the old pack directory with the new one.
platform_utils.rename(
os.path.join(project.objdir, "objects", "pack"),
os.path.join(project.objdir, "objects", "pack_old"),
)
platform_utils.rename(
pack_dir,
os.path.join(project.objdir, "objects", "pack"),
)
platform_utils.rmtree(
os.path.join(project.objdir, "objects", "pack_old")
)
pm.end()
return 0
def Execute(self, opt, args):
projects: List[Project] = self.GetProjects(
args, all_manifests=not opt.this_manifest_only
)
ret = self.delete_unused_projects(projects, opt)
if ret != 0:
return ret
if not opt.repack:
return
return self.repack_projects(projects, opt)

View File

@ -23,6 +23,7 @@ from error import GitError
from error import InvalidArgumentsError from error import InvalidArgumentsError
from error import SilentRepoExitError from error import SilentRepoExitError
from git_command import GitCommand from git_command import GitCommand
from project import Project
from repo_logging import RepoLogger from repo_logging import RepoLogger
@ -39,7 +40,7 @@ class GrepColoring(Coloring):
class ExecuteOneResult(NamedTuple): class ExecuteOneResult(NamedTuple):
"""Result from an execute instance.""" """Result from an execute instance."""
project_idx: int project: Project
rc: int rc: int
stdout: str stdout: str
stderr: str stderr: str
@ -261,10 +262,8 @@ contain a line that matches both expressions:
help="Show only file names not containing matching lines", help="Show only file names not containing matching lines",
) )
@classmethod def _ExecuteOne(self, cmd_argv, project):
def _ExecuteOne(cls, cmd_argv, project_idx):
"""Process one project.""" """Process one project."""
project = cls.get_parallel_context()["projects"][project_idx]
try: try:
p = GitCommand( p = GitCommand(
project, project,
@ -275,7 +274,7 @@ contain a line that matches both expressions:
verify_command=True, verify_command=True,
) )
except GitError as e: except GitError as e:
return ExecuteOneResult(project_idx, -1, None, str(e), e) return ExecuteOneResult(project, -1, None, str(e), e)
try: try:
error = None error = None
@ -283,12 +282,10 @@ contain a line that matches both expressions:
except GitError as e: except GitError as e:
rc = 1 rc = 1
error = e error = e
return ExecuteOneResult(project_idx, rc, p.stdout, p.stderr, error) return ExecuteOneResult(project, rc, p.stdout, p.stderr, error)
@staticmethod @staticmethod
def _ProcessResults( def _ProcessResults(full_name, have_rev, opt, _pool, out, results):
full_name, have_rev, opt, projects, _pool, out, results
):
git_failed = False git_failed = False
bad_rev = False bad_rev = False
have_match = False have_match = False
@ -296,10 +293,9 @@ contain a line that matches both expressions:
errors = [] errors = []
for result in results: for result in results:
project = projects[result.project_idx]
if result.rc < 0: if result.rc < 0:
git_failed = True git_failed = True
out.project("--- project %s ---" % _RelPath(project)) out.project("--- project %s ---" % _RelPath(result.project))
out.nl() out.nl()
out.fail("%s", result.stderr) out.fail("%s", result.stderr)
out.nl() out.nl()
@ -315,7 +311,9 @@ contain a line that matches both expressions:
): ):
bad_rev = True bad_rev = True
else: else:
out.project("--- project %s ---" % _RelPath(project)) out.project(
"--- project %s ---" % _RelPath(result.project)
)
out.nl() out.nl()
out.fail("%s", result.stderr.strip()) out.fail("%s", result.stderr.strip())
out.nl() out.nl()
@ -333,13 +331,13 @@ contain a line that matches both expressions:
rev, line = line.split(":", 1) rev, line = line.split(":", 1)
out.write("%s", rev) out.write("%s", rev)
out.write(":") out.write(":")
out.project(_RelPath(project)) out.project(_RelPath(result.project))
out.write("/") out.write("/")
out.write("%s", line) out.write("%s", line)
out.nl() out.nl()
elif full_name: elif full_name:
for line in r: for line in r:
out.project(_RelPath(project)) out.project(_RelPath(result.project))
out.write("/") out.write("/")
out.write("%s", line) out.write("%s", line)
out.nl() out.nl()
@ -383,18 +381,15 @@ contain a line that matches both expressions:
cmd_argv.extend(opt.revision) cmd_argv.extend(opt.revision)
cmd_argv.append("--") cmd_argv.append("--")
with self.ParallelContext():
self.get_parallel_context()["projects"] = projects
git_failed, bad_rev, have_match, errors = self.ExecuteInParallel( git_failed, bad_rev, have_match, errors = self.ExecuteInParallel(
opt.jobs, opt.jobs,
functools.partial(self._ExecuteOne, cmd_argv), functools.partial(self._ExecuteOne, cmd_argv),
range(len(projects)), projects,
callback=functools.partial( callback=functools.partial(
self._ProcessResults, full_name, have_rev, opt, projects self._ProcessResults, full_name, have_rev, opt
), ),
output=out, output=out,
ordered=True, ordered=True,
chunksize=1,
) )
if git_failed: if git_failed:

View File

@ -150,7 +150,7 @@ Displays detailed usage information about a command.
def _PrintAllCommandHelp(self): def _PrintAllCommandHelp(self):
for name in sorted(all_commands): for name in sorted(all_commands):
cmd = all_commands[name](manifest=self.manifest) cmd = all_commands[name](manifest=self.manifest)
self._PrintCommandHelp(cmd, header_prefix=f"[{name}] ") self._PrintCommandHelp(cmd, header_prefix="[%s] " % (name,))
def _Options(self, p): def _Options(self, p):
p.add_option( p.add_option(

View File

@ -97,9 +97,7 @@ class Info(PagedCommand):
self.headtext(self.manifest.default.revisionExpr) self.headtext(self.manifest.default.revisionExpr)
self.out.nl() self.out.nl()
self.heading("Manifest merge branch: ") self.heading("Manifest merge branch: ")
# The manifest might not have a merge branch if it isn't in a git repo, self.headtext(mergeBranch)
# e.g. if `repo init --standalone-manifest` is used.
self.headtext(mergeBranch or "")
self.out.nl() self.out.nl()
self.heading("Manifest groups: ") self.heading("Manifest groups: ")
self.headtext(manifestGroups) self.headtext(manifestGroups)
@ -250,7 +248,7 @@ class Info(PagedCommand):
for commit in commits: for commit in commits:
split = commit.split() split = commit.split()
self.text(f"{'':38}{'-'} ") self.text("{0:38}{1} ".format("", "-"))
self.sha(split[0] + " ") self.sha(split[0] + " ")
self.text(" ".join(split[1:])) self.text(" ".join(split[1:]))
self.out.nl() self.out.nl()

View File

@ -21,9 +21,10 @@ from command import MirrorSafeCommand
from error import RepoUnhandledExceptionError from error import RepoUnhandledExceptionError
from error import UpdateManifestError from error import UpdateManifestError
from git_command import git_require from git_command import git_require
from git_command import MIN_GIT_VERSION_HARD
from git_command import MIN_GIT_VERSION_SOFT
from repo_logging import RepoLogger from repo_logging import RepoLogger
from wrapper import Wrapper from wrapper import Wrapper
from wrapper import WrapperDir
logger = RepoLogger(__file__) logger = RepoLogger(__file__)
@ -52,10 +53,6 @@ The optional -b argument can be used to select the manifest branch
to checkout and use. If no branch is specified, the remote's default to checkout and use. If no branch is specified, the remote's default
branch is used. This is equivalent to using -b HEAD. branch is used. This is equivalent to using -b HEAD.
The optional --manifest-upstream-branch argument can be used when a commit is
provided to --manifest-branch (or -b), to specify the name of the git ref in
which the commit can be found.
The optional -m argument can be used to specify an alternate manifest The optional -m argument can be used to specify an alternate manifest
to be used. If no manifest is specified, the manifest default.xml to be used. If no manifest is specified, the manifest default.xml
will be used. will be used.
@ -139,7 +136,6 @@ to update the working directory files.
# manifest project is special and is created when instantiating the # manifest project is special and is created when instantiating the
# manifest which happens before we parse options. # manifest which happens before we parse options.
self.manifest.manifestProject.clone_depth = opt.manifest_depth self.manifest.manifestProject.clone_depth = opt.manifest_depth
self.manifest.manifestProject.upstream = opt.manifest_upstream_branch
clone_filter_for_depth = ( clone_filter_for_depth = (
"blob:none" if (_REPO_ALLOW_SHALLOW == "0") else None "blob:none" if (_REPO_ALLOW_SHALLOW == "0") else None
) )
@ -219,7 +215,7 @@ to update the working directory files.
if not opt.quiet: if not opt.quiet:
print() print()
print(f"Your identity is: {name} <{email}>") print("Your identity is: %s <%s>" % (name, email))
print("is this correct [y/N]? ", end="", flush=True) print("is this correct [y/N]? ", end="", flush=True)
a = sys.stdin.readline().strip().lower() a = sys.stdin.readline().strip().lower()
if a in ("yes", "y", "t", "true"): if a in ("yes", "y", "t", "true"):
@ -322,12 +318,6 @@ to update the working directory files.
" be used with --standalone-manifest." " be used with --standalone-manifest."
) )
if opt.manifest_upstream_branch and opt.manifest_branch is None:
self.OptionParser.error(
"--manifest-upstream-branch cannot be used without "
"--manifest-branch."
)
if args: if args:
if opt.manifest_url: if opt.manifest_url:
self.OptionParser.error( self.OptionParser.error(
@ -341,17 +331,13 @@ to update the working directory files.
self.OptionParser.error("too many arguments to init") self.OptionParser.error("too many arguments to init")
def Execute(self, opt, args): def Execute(self, opt, args):
wrapper = Wrapper() git_require(MIN_GIT_VERSION_HARD, fail=True)
if not git_require(MIN_GIT_VERSION_SOFT):
reqs = wrapper.Requirements.from_dir(WrapperDir())
git_require(reqs.get_hard_ver("git"), fail=True)
min_git_version_soft = reqs.get_soft_ver("git")
if not git_require(min_git_version_soft):
logger.warning( logger.warning(
"repo: warning: git-%s+ will soon be required; " "repo: warning: git-%s+ will soon be required; "
"please upgrade your version of git to maintain " "please upgrade your version of git to maintain "
"support.", "support.",
".".join(str(x) for x in min_git_version_soft), ".".join(str(x) for x in MIN_GIT_VERSION_SOFT),
) )
rp = self.manifest.repoProject rp = self.manifest.repoProject
@ -364,9 +350,10 @@ to update the working directory files.
# Handle new --repo-rev requests. # Handle new --repo-rev requests.
if opt.repo_rev: if opt.repo_rev:
wrapper = Wrapper()
try: try:
remote_ref, rev = wrapper.check_repo_rev( remote_ref, rev = wrapper.check_repo_rev(
rp.worktree, rp.gitdir,
opt.repo_rev, opt.repo_rev,
repo_verify=opt.repo_verify, repo_verify=opt.repo_verify,
quiet=opt.quiet, quiet=opt.quiet,

View File

@ -131,7 +131,7 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
elif opt.path_only and not opt.name_only: elif opt.path_only and not opt.name_only:
lines.append("%s" % (_getpath(project))) lines.append("%s" % (_getpath(project)))
else: else:
lines.append(f"{_getpath(project)} : {project.name}") lines.append("%s : %s" % (_getpath(project), project.name))
if lines: if lines:
lines.sort() lines.sort()

View File

@ -27,10 +27,8 @@ class Prune(PagedCommand):
""" """
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
@classmethod def _ExecuteOne(self, project):
def _ExecuteOne(cls, project_idx):
"""Process one project.""" """Process one project."""
project = cls.get_parallel_context()["projects"][project_idx]
return project.PruneHeads() return project.PruneHeads()
def Execute(self, opt, args): def Execute(self, opt, args):
@ -43,12 +41,10 @@ class Prune(PagedCommand):
def _ProcessResults(_pool, _output, results): def _ProcessResults(_pool, _output, results):
return list(itertools.chain.from_iterable(results)) return list(itertools.chain.from_iterable(results))
with self.ParallelContext():
self.get_parallel_context()["projects"] = projects
all_branches = self.ExecuteInParallel( all_branches = self.ExecuteInParallel(
opt.jobs, opt.jobs,
self._ExecuteOne, self._ExecuteOne,
range(len(projects)), projects,
callback=_ProcessResults, callback=_ProcessResults,
ordered=True, ordered=True,
) )
@ -87,7 +83,9 @@ class Prune(PagedCommand):
) )
if not branch.base_exists: if not branch.base_exists:
print(f"(ignoring: tracking branch is gone: {branch.base})") print(
"(ignoring: tracking branch is gone: %s)" % (branch.base,)
)
else: else:
commits = branch.commits commits = branch.commits
date = branch.date date = branch.date

View File

@ -21,6 +21,7 @@ from error import RepoExitError
from git_command import git from git_command import git
from git_config import IsImmutable from git_config import IsImmutable
from progress import Progress from progress import Progress
from project import Project
from repo_logging import RepoLogger from repo_logging import RepoLogger
@ -28,7 +29,7 @@ logger = RepoLogger(__file__)
class ExecuteOneResult(NamedTuple): class ExecuteOneResult(NamedTuple):
project_idx: int project: Project
error: Exception error: Exception
@ -79,20 +80,18 @@ revision specified in the manifest.
if not git.check_ref_format("heads/%s" % nb): if not git.check_ref_format("heads/%s" % nb):
self.OptionParser.error("'%s' is not a valid name" % nb) self.OptionParser.error("'%s' is not a valid name" % nb)
@classmethod def _ExecuteOne(self, revision, nb, project):
def _ExecuteOne(cls, revision, nb, default_revisionExpr, project_idx):
"""Start one project.""" """Start one project."""
# If the current revision is immutable, such as a SHA1, a tag or # If the current revision is immutable, such as a SHA1, a tag or
# a change, then we can't push back to it. Substitute with # a change, then we can't push back to it. Substitute with
# dest_branch, if defined; or with manifest default revision instead. # dest_branch, if defined; or with manifest default revision instead.
branch_merge = "" branch_merge = ""
error = None error = None
project = cls.get_parallel_context()["projects"][project_idx]
if IsImmutable(project.revisionExpr): if IsImmutable(project.revisionExpr):
if project.dest_branch: if project.dest_branch:
branch_merge = project.dest_branch branch_merge = project.dest_branch
else: else:
branch_merge = default_revisionExpr branch_merge = self.manifest.default.revisionExpr
try: try:
project.StartBranch( project.StartBranch(
@ -101,7 +100,7 @@ revision specified in the manifest.
except Exception as e: except Exception as e:
logger.error("error: unable to checkout %s: %s", project.name, e) logger.error("error: unable to checkout %s: %s", project.name, e)
error = e error = e
return ExecuteOneResult(project_idx, error) return ExecuteOneResult(project, error)
def Execute(self, opt, args): def Execute(self, opt, args):
nb = args[0] nb = args[0]
@ -121,27 +120,18 @@ revision specified in the manifest.
def _ProcessResults(_pool, pm, results): def _ProcessResults(_pool, pm, results):
for result in results: for result in results:
if result.error: if result.error:
project = all_projects[result.project_idx] err_projects.append(result.project)
err_projects.append(project)
err.append(result.error) err.append(result.error)
pm.update(msg="") pm.update(msg="")
with self.ParallelContext():
self.get_parallel_context()["projects"] = all_projects
self.ExecuteInParallel( self.ExecuteInParallel(
opt.jobs, opt.jobs,
functools.partial( functools.partial(self._ExecuteOne, opt.revision, nb),
self._ExecuteOne, all_projects,
opt.revision,
nb,
self.manifest.default.revisionExpr,
),
range(len(all_projects)),
callback=_ProcessResults, callback=_ProcessResults,
output=Progress( output=Progress(
f"Starting {nb}", len(all_projects), quiet=opt.quiet "Starting %s" % (nb,), len(all_projects), quiet=opt.quiet
), ),
chunksize=1,
) )
if err_projects: if err_projects:

View File

@ -88,8 +88,7 @@ the following meanings:
"projects", "projects",
) )
@classmethod def _StatusHelper(self, quiet, local, project):
def _StatusHelper(cls, quiet, local, project_idx):
"""Obtains the status for a specific project. """Obtains the status for a specific project.
Obtains the status for a project, redirecting the output to Obtains the status for a project, redirecting the output to
@ -100,13 +99,12 @@ the following meanings:
local: a boolean, if True, the path is relative to the local local: a boolean, if True, the path is relative to the local
(sub)manifest. If false, the path is relative to the outermost (sub)manifest. If false, the path is relative to the outermost
manifest. manifest.
project_idx: Project index to get status of. project: Project to get status of.
Returns: Returns:
The status of the project. The status of the project.
""" """
buf = io.StringIO() buf = io.StringIO()
project = cls.get_parallel_context()["projects"][project_idx]
ret = project.PrintWorkTreeStatus( ret = project.PrintWorkTreeStatus(
quiet=quiet, output_redir=buf, local=local quiet=quiet, output_redir=buf, local=local
) )
@ -145,17 +143,14 @@ the following meanings:
ret += 1 ret += 1
return ret return ret
with self.ParallelContext():
self.get_parallel_context()["projects"] = all_projects
counter = self.ExecuteInParallel( counter = self.ExecuteInParallel(
opt.jobs, opt.jobs,
functools.partial( functools.partial(
self._StatusHelper, opt.quiet, opt.this_manifest_only self._StatusHelper, opt.quiet, opt.this_manifest_only
), ),
range(len(all_projects)), all_projects,
callback=_ProcessResults, callback=_ProcessResults,
ordered=True, ordered=True,
chunksize=1,
) )
if not opt.quiet and len(all_projects) == counter: if not opt.quiet and len(all_projects) == counter:

View File

@ -21,7 +21,6 @@ import multiprocessing
import netrc import netrc
import optparse import optparse
import os import os
from pathlib import Path
import sys import sys
import tempfile import tempfile
import time import time
@ -83,65 +82,22 @@ from wrapper import Wrapper
_ONE_DAY_S = 24 * 60 * 60 _ONE_DAY_S = 24 * 60 * 60
# Env var to implicitly turn auto-gc back on. This was added to allow a user to
# revert a change in default behavior in v2.29.9. Remove after 2023-04-01.
_REPO_AUTO_GC = "REPO_AUTO_GC"
_AUTO_GC = os.environ.get(_REPO_AUTO_GC) == "1"
_REPO_ALLOW_SHALLOW = os.environ.get("REPO_ALLOW_SHALLOW") _REPO_ALLOW_SHALLOW = os.environ.get("REPO_ALLOW_SHALLOW")
logger = RepoLogger(__file__) logger = RepoLogger(__file__)
def _SafeCheckoutOrder(checkouts: List[Project]) -> List[List[Project]]:
"""Generate a sequence of checkouts that is safe to perform. The client
should checkout everything from n-th index before moving to n+1.
This is only useful if manifest contains nested projects.
E.g. if foo, foo/bar and foo/bar/baz are project paths, then foo needs to
finish before foo/bar can proceed, and foo/bar needs to finish before
foo/bar/baz."""
res = [[]]
current = res[0]
# depth_stack contains a current stack of parent paths.
depth_stack = []
# Checkouts are iterated in the hierarchical order. That way, it can easily
# be determined if the previous checkout is parent of the current checkout.
# We are splitting by the path separator so the final result is
# hierarchical, and not just lexicographical. For example, if the projects
# are: foo, foo/bar, foo-bar, lexicographical order produces foo, foo-bar
# and foo/bar, which doesn't work.
for checkout in sorted(checkouts, key=lambda x: x.relpath.split("/")):
checkout_path = Path(checkout.relpath)
while depth_stack:
try:
checkout_path.relative_to(depth_stack[-1])
except ValueError:
# Path.relative_to returns ValueError if paths are not relative.
# TODO(sokcevic): Switch to is_relative_to once min supported
# version is py3.9.
depth_stack.pop()
else:
if len(depth_stack) >= len(res):
# Another depth created.
res.append([])
break
current = res[len(depth_stack)]
current.append(checkout)
depth_stack.append(checkout_path)
return res
def _chunksize(projects: int, jobs: int) -> int:
"""Calculate chunk size for the given number of projects and jobs."""
return min(max(1, projects // jobs), WORKER_BATCH_SIZE)
class _FetchOneResult(NamedTuple): class _FetchOneResult(NamedTuple):
"""_FetchOne return value. """_FetchOne return value.
Attributes: Attributes:
success (bool): True if successful. success (bool): True if successful.
project_idx (int): The fetched project index. project (Project): The fetched project.
start (float): The starting time.time(). start (float): The starting time.time().
finish (float): The ending time.time(). finish (float): The ending time.time().
remote_fetched (bool): True if the remote was actually queried. remote_fetched (bool): True if the remote was actually queried.
@ -149,7 +105,7 @@ class _FetchOneResult(NamedTuple):
success: bool success: bool
errors: List[Exception] errors: List[Exception]
project_idx: int project: Project
start: float start: float
finish: float finish: float
remote_fetched: bool remote_fetched: bool
@ -182,14 +138,14 @@ class _CheckoutOneResult(NamedTuple):
Attributes: Attributes:
success (bool): True if successful. success (bool): True if successful.
project_idx (int): The project index. project (Project): The project.
start (float): The starting time.time(). start (float): The starting time.time().
finish (float): The ending time.time(). finish (float): The ending time.time().
""" """
success: bool success: bool
errors: List[Exception] errors: List[Exception]
project_idx: int project: Project
start: float start: float
finish: float finish: float
@ -287,11 +243,6 @@ directories if they have previously been linked to a different
object directory. WARNING: This may cause data to be lost since object directory. WARNING: This may cause data to be lost since
refs may be removed when overwriting. refs may be removed when overwriting.
The --force-checkout option can be used to force git to switch revs even if the
index or the working tree differs from HEAD, and if there are untracked files.
WARNING: This may cause data to be lost since uncommitted changes may be
removed.
The --force-remove-dirty option can be used to remove previously used The --force-remove-dirty option can be used to remove previously used
projects with uncommitted changes. WARNING: This may cause data to be projects with uncommitted changes. WARNING: This may cause data to be
lost since uncommitted changes may be removed with projects that no longer lost since uncommitted changes may be removed with projects that no longer
@ -350,8 +301,6 @@ later is required to fix a server side protocol bug.
# value later on. # value later on.
PARALLEL_JOBS = 0 PARALLEL_JOBS = 0
_JOBS_WARN_THRESHOLD = 100
def _Options(self, p, show_smart=True): def _Options(self, p, show_smart=True):
p.add_option( p.add_option(
"--jobs-network", "--jobs-network",
@ -391,14 +340,6 @@ later is required to fix a server side protocol bug.
"point to a different object directory. WARNING: this " "point to a different object directory. WARNING: this "
"may cause loss of data", "may cause loss of data",
) )
p.add_option(
"--force-checkout",
dest="force_checkout",
action="store_true",
help="force checkout even if it results in throwing away "
"uncommitted modifications. "
"WARNING: this may cause loss of data",
)
p.add_option( p.add_option(
"--force-remove-dirty", "--force-remove-dirty",
dest="force_remove_dirty", dest="force_remove_dirty",
@ -407,13 +348,6 @@ later is required to fix a server side protocol bug.
"projects no longer exist in the manifest. " "projects no longer exist in the manifest. "
"WARNING: this may cause loss of data", "WARNING: this may cause loss of data",
) )
p.add_option(
"--rebase",
dest="rebase",
action="store_true",
help="rebase local commits regardless of whether they are "
"published",
)
p.add_option( p.add_option(
"-l", "-l",
"--local-only", "--local-only",
@ -594,8 +528,7 @@ later is required to fix a server side protocol bug.
branch = branch[len(R_HEADS) :] branch = branch[len(R_HEADS) :]
return branch return branch
@classmethod def _GetCurrentBranchOnly(self, opt, manifest):
def _GetCurrentBranchOnly(cls, opt, manifest):
"""Returns whether current-branch or use-superproject options are """Returns whether current-branch or use-superproject options are
enabled. enabled.
@ -685,7 +618,7 @@ later is required to fix a server side protocol bug.
if not use_super: if not use_super:
continue continue
m.superproject.SetQuiet(not opt.verbose) m.superproject.SetQuiet(opt.quiet)
print_messages = git_superproject.PrintMessages( print_messages = git_superproject.PrintMessages(
opt.use_superproject, m opt.use_superproject, m
) )
@ -713,8 +646,7 @@ later is required to fix a server side protocol bug.
if need_unload: if need_unload:
m.outer_client.manifest.Unload() m.outer_client.manifest.Unload()
@classmethod def _FetchProjectList(self, opt, projects):
def _FetchProjectList(cls, opt, projects):
"""Main function of the fetch worker. """Main function of the fetch worker.
The projects we're given share the same underlying git object store, so The projects we're given share the same underlying git object store, so
@ -726,23 +658,21 @@ later is required to fix a server side protocol bug.
opt: Program options returned from optparse. See _Options(). opt: Program options returned from optparse. See _Options().
projects: Projects to fetch. projects: Projects to fetch.
""" """
return [cls._FetchOne(opt, x) for x in projects] return [self._FetchOne(opt, x) for x in projects]
@classmethod def _FetchOne(self, opt, project):
def _FetchOne(cls, opt, project_idx):
"""Fetch git objects for a single project. """Fetch git objects for a single project.
Args: Args:
opt: Program options returned from optparse. See _Options(). opt: Program options returned from optparse. See _Options().
project_idx: Project index for the project to fetch. project: Project object for the project to fetch.
Returns: Returns:
Whether the fetch was successful. Whether the fetch was successful.
""" """
project = cls.get_parallel_context()["projects"][project_idx]
start = time.time() start = time.time()
k = f"{project.name} @ {project.relpath}" k = f"{project.name} @ {project.relpath}"
cls.get_parallel_context()["sync_dict"][k] = start self._sync_dict[k] = start
success = False success = False
remote_fetched = False remote_fetched = False
errors = [] errors = []
@ -752,7 +682,7 @@ later is required to fix a server side protocol bug.
quiet=opt.quiet, quiet=opt.quiet,
verbose=opt.verbose, verbose=opt.verbose,
output_redir=buf, output_redir=buf,
current_branch_only=cls._GetCurrentBranchOnly( current_branch_only=self._GetCurrentBranchOnly(
opt, project.manifest opt, project.manifest
), ),
force_sync=opt.force_sync, force_sync=opt.force_sync,
@ -762,7 +692,7 @@ later is required to fix a server side protocol bug.
optimized_fetch=opt.optimized_fetch, optimized_fetch=opt.optimized_fetch,
retry_fetches=opt.retry_fetches, retry_fetches=opt.retry_fetches,
prune=opt.prune, prune=opt.prune,
ssh_proxy=cls.get_parallel_context()["ssh_proxy"], ssh_proxy=self.ssh_proxy,
clone_filter=project.manifest.CloneFilter, clone_filter=project.manifest.CloneFilter,
partial_clone_exclude=project.manifest.PartialCloneExclude, partial_clone_exclude=project.manifest.PartialCloneExclude,
clone_filter_for_depth=project.manifest.CloneFilterForDepth, clone_filter_for_depth=project.manifest.CloneFilterForDepth,
@ -794,20 +724,24 @@ later is required to fix a server side protocol bug.
type(e).__name__, type(e).__name__,
e, e,
) )
del self._sync_dict[k]
errors.append(e) errors.append(e)
raise raise
finally:
del cls.get_parallel_context()["sync_dict"][k]
finish = time.time() finish = time.time()
del self._sync_dict[k]
return _FetchOneResult( return _FetchOneResult(
success, errors, project_idx, start, finish, remote_fetched success, errors, project, start, finish, remote_fetched
) )
@classmethod
def _FetchInitChild(cls, ssh_proxy):
cls.ssh_proxy = ssh_proxy
def _GetSyncProgressMessage(self): def _GetSyncProgressMessage(self):
earliest_time = float("inf") earliest_time = float("inf")
earliest_proj = None earliest_proj = None
items = self.get_parallel_context()["sync_dict"].items() items = self._sync_dict.items()
for project, t in items: for project, t in items:
if t < earliest_time: if t < earliest_time:
earliest_time = t earliest_time = t
@ -815,7 +749,7 @@ later is required to fix a server side protocol bug.
if not earliest_proj: if not earliest_proj:
# This function is called when sync is still running but in some # This function is called when sync is still running but in some
# cases (by chance), sync_dict can contain no entries. Return some # cases (by chance), _sync_dict can contain no entries. Return some
# text to indicate that sync is still working. # text to indicate that sync is still working.
return "..working.." return "..working.."
@ -823,19 +757,10 @@ later is required to fix a server side protocol bug.
jobs = jobs_str(len(items)) jobs = jobs_str(len(items))
return f"{jobs} | {elapsed_str(elapsed)} {earliest_proj}" return f"{jobs} | {elapsed_str(elapsed)} {earliest_proj}"
@classmethod
def InitWorker(cls):
# Force connect to the manager server now.
# This is good because workers are initialized one by one. Without this,
# multiple workers may connect to the manager when handling the first
# job at the same time. Then the connection may fail if too many
# connections are pending and execeeded the socket listening backlog,
# especially on MacOS.
len(cls.get_parallel_context()["sync_dict"])
def _Fetch(self, projects, opt, err_event, ssh_proxy, errors): def _Fetch(self, projects, opt, err_event, ssh_proxy, errors):
ret = True ret = True
jobs = opt.jobs_network
fetched = set() fetched = set()
remote_fetched = set() remote_fetched = set()
pm = Progress( pm = Progress(
@ -847,6 +772,7 @@ later is required to fix a server side protocol bug.
elide=True, elide=True,
) )
self._sync_dict = multiprocessing.Manager().dict()
sync_event = _threading.Event() sync_event = _threading.Event()
def _MonitorSyncLoop(): def _MonitorSyncLoop():
@ -857,13 +783,19 @@ later is required to fix a server side protocol bug.
sync_progress_thread = _threading.Thread(target=_MonitorSyncLoop) sync_progress_thread = _threading.Thread(target=_MonitorSyncLoop)
sync_progress_thread.daemon = True sync_progress_thread.daemon = True
sync_progress_thread.start()
def _ProcessResults(pool, pm, results_sets): objdir_project_map = dict()
for project in projects:
objdir_project_map.setdefault(project.objdir, []).append(project)
projects_list = list(objdir_project_map.values())
def _ProcessResults(results_sets):
ret = True ret = True
for results in results_sets: for results in results_sets:
for result in results: for result in results:
success = result.success success = result.success
project = projects[result.project_idx] project = result.project
start = result.start start = result.start
finish = result.finish finish = result.finish
self._fetch_times.Set(project, finish - start) self._fetch_times.Set(project, finish - start)
@ -887,50 +819,58 @@ later is required to fix a server side protocol bug.
fetched.add(project.gitdir) fetched.add(project.gitdir)
pm.update() pm.update()
if not ret and opt.fail_fast: if not ret and opt.fail_fast:
if pool:
pool.close()
break break
return ret return ret
with self.ParallelContext():
self.get_parallel_context()["projects"] = projects
self.get_parallel_context()[
"sync_dict"
] = multiprocessing.Manager().dict()
objdir_project_map = dict()
for index, project in enumerate(projects):
objdir_project_map.setdefault(project.objdir, []).append(index)
projects_list = list(objdir_project_map.values())
jobs = max(1, min(opt.jobs_network, len(projects_list)))
# We pass the ssh proxy settings via the class. This allows # We pass the ssh proxy settings via the class. This allows
# multiprocessing to pickle it up when spawning children. We can't # multiprocessing to pickle it up when spawning children. We can't pass
# pass it as an argument to _FetchProjectList below as # it as an argument to _FetchProjectList below as multiprocessing is
# multiprocessing is unable to pickle those. # unable to pickle those.
self.get_parallel_context()["ssh_proxy"] = ssh_proxy Sync.ssh_proxy = None
sync_progress_thread.start() # NB: Multiprocessing is heavy, so don't spin it up for one job.
if not opt.quiet: if len(projects_list) == 1 or jobs == 1:
self._FetchInitChild(ssh_proxy)
if not _ProcessResults(
self._FetchProjectList(opt, x) for x in projects_list
):
ret = False
else:
# Favor throughput over responsiveness when quiet. It seems that
# imap() will yield results in batches relative to chunksize, so
# even as the children finish a sync, we won't see the result until
# one child finishes ~chunksize jobs. When using a large --jobs
# with large chunksize, this can be jarring as there will be a large
# initial delay where repo looks like it isn't doing anything and
# sits at 0%, but then suddenly completes a lot of jobs all at once.
# Since this code is more network bound, we can accept a bit more
# CPU overhead with a smaller chunksize so that the user sees more
# immediate & continuous feedback.
if opt.quiet:
chunksize = WORKER_BATCH_SIZE
else:
pm.update(inc=0, msg="warming up") pm.update(inc=0, msg="warming up")
try: chunksize = 4
ret = self.ExecuteInParallel( with multiprocessing.Pool(
jobs, jobs, initializer=self._FetchInitChild, initargs=(ssh_proxy,)
) as pool:
results = pool.imap_unordered(
functools.partial(self._FetchProjectList, opt), functools.partial(self._FetchProjectList, opt),
projects_list, projects_list,
callback=_ProcessResults, chunksize=chunksize,
output=pm,
# Use chunksize=1 to avoid the chance that some workers are
# idle while other workers still have more than one job in
# their chunk queue.
chunksize=1,
initializer=self.InitWorker,
) )
finally: if not _ProcessResults(results):
sync_event.set() ret = False
sync_progress_thread.join() pool.close()
# Cleanup the reference now that we're done with it, and we're going to
# release any resources it points to. If we don't, later
# multiprocessing usage (e.g. checkouts) will try to pickle and then
# crash.
del Sync.ssh_proxy
sync_event.set()
pm.end()
self._fetch_times.Save() self._fetch_times.Save()
self._local_sync_state.Save() self._local_sync_state.Save()
@ -971,8 +911,6 @@ later is required to fix a server side protocol bug.
if not success: if not success:
err_event.set() err_event.set()
# Call self update, unless requested not to
if os.environ.get("REPO_SKIP_SELF_UPDATE", "0") == "0":
_PostRepoFetch(rp, opt.repo_verify) _PostRepoFetch(rp, opt.repo_verify)
if opt.network_only: if opt.network_only:
# Bail out now; the rest touches the working tree. # Bail out now; the rest touches the working tree.
@ -1018,32 +956,17 @@ later is required to fix a server side protocol bug.
return _FetchMainResult(all_projects) return _FetchMainResult(all_projects)
@classmethod def _CheckoutOne(self, detach_head, force_sync, project):
def _CheckoutOne(
cls,
detach_head,
force_sync,
force_checkout,
force_rebase,
verbose,
project_idx,
):
"""Checkout work tree for one project """Checkout work tree for one project
Args: Args:
detach_head: Whether to leave a detached HEAD. detach_head: Whether to leave a detached HEAD.
force_sync: Force checking out of .git directory (e.g. overwrite force_sync: Force checking out of the repo.
existing git directory that was previously linked to a different project: Project object for the project to checkout.
object directory).
force_checkout: Force checking out of the repo content.
force_rebase: Force rebase.
verbose: Whether to show verbose messages.
project_idx: Project index for the project to checkout.
Returns: Returns:
Whether the fetch was successful. Whether the fetch was successful.
""" """
project = cls.get_parallel_context()["projects"][project_idx]
start = time.time() start = time.time()
syncbuf = SyncBuffer( syncbuf = SyncBuffer(
project.manifest.manifestProject.config, detach_head=detach_head project.manifest.manifestProject.config, detach_head=detach_head
@ -1052,16 +975,9 @@ later is required to fix a server side protocol bug.
errors = [] errors = []
try: try:
project.Sync_LocalHalf( project.Sync_LocalHalf(
syncbuf, syncbuf, force_sync=force_sync, errors=errors
force_sync=force_sync,
force_checkout=force_checkout,
force_rebase=force_rebase,
errors=errors,
verbose=verbose,
) )
success = syncbuf.Finish() success = syncbuf.Finish()
except KeyboardInterrupt:
logger.error("Keyboard interrupt while processing %s", project.name)
except GitError as e: except GitError as e:
logger.error( logger.error(
"error.GitError: Cannot checkout %s: %s", project.name, e "error.GitError: Cannot checkout %s: %s", project.name, e
@ -1079,7 +995,7 @@ later is required to fix a server side protocol bug.
if not success: if not success:
logger.error("error: Cannot checkout %s", project.name) logger.error("error: Cannot checkout %s", project.name)
finish = time.time() finish = time.time()
return _CheckoutOneResult(success, errors, project_idx, start, finish) return _CheckoutOneResult(success, errors, project, start, finish)
def _Checkout(self, all_projects, opt, err_results, checkout_errors): def _Checkout(self, all_projects, opt, err_results, checkout_errors):
"""Checkout projects listed in all_projects """Checkout projects listed in all_projects
@ -1097,9 +1013,7 @@ later is required to fix a server side protocol bug.
ret = True ret = True
for result in results: for result in results:
success = result.success success = result.success
project = self.get_parallel_context()["projects"][ project = result.project
result.project_idx
]
start = result.start start = result.start
finish = result.finish finish = result.finish
self.event_log.AddSync( self.event_log.AddSync(
@ -1125,28 +1039,14 @@ later is required to fix a server side protocol bug.
pm.update(msg=project.name) pm.update(msg=project.name)
return ret return ret
for projects in _SafeCheckoutOrder(all_projects):
with self.ParallelContext():
self.get_parallel_context()["projects"] = projects
proc_res = self.ExecuteInParallel( proc_res = self.ExecuteInParallel(
opt.jobs_checkout, opt.jobs_checkout,
functools.partial( functools.partial(
self._CheckoutOne, self._CheckoutOne, opt.detach_head, opt.force_sync
opt.detach_head,
opt.force_sync,
opt.force_checkout,
opt.rebase,
opt.verbose,
), ),
range(len(projects)), all_projects,
callback=_ProcessResults, callback=_ProcessResults,
output=Progress( output=Progress("Checking out", len(all_projects), quiet=opt.quiet),
"Checking out", len(all_projects), quiet=opt.quiet
),
# Use chunksize=1 to avoid the chance that some workers are
# idle while other workers still have more than one job in
# their chunk queue.
chunksize=1,
) )
self._local_sync_state.Save() self._local_sync_state.Save()
@ -1388,7 +1288,7 @@ later is required to fix a server side protocol bug.
groups=None, groups=None,
) )
project.DeleteWorktree( project.DeleteWorktree(
verbose=opt.verbose, force=opt.force_remove_dirty quiet=opt.quiet, force=opt.force_remove_dirty
) )
new_project_paths.sort() new_project_paths.sort()
@ -1446,10 +1346,7 @@ later is required to fix a server side protocol bug.
for need_remove_file in need_remove_files: for need_remove_file in need_remove_files:
# Try to remove the updated copyfile or linkfile. # Try to remove the updated copyfile or linkfile.
# So, if the file is not exist, nothing need to do. # So, if the file is not exist, nothing need to do.
platform_utils.remove( platform_utils.remove(need_remove_file, missing_ok=True)
os.path.join(self.client.topdir, need_remove_file),
missing_ok=True,
)
# Create copy-link-files.json, save dest path of "copyfile" and # Create copy-link-files.json, save dest path of "copyfile" and
# "linkfile". # "linkfile".
@ -1497,14 +1394,13 @@ later is required to fix a server side protocol bug.
if username and password: if username and password:
manifest_server = manifest_server.replace( manifest_server = manifest_server.replace(
"://", f"://{username}:{password}@", 1 "://", "://%s:%s@" % (username, password), 1
) )
transport = PersistentTransport(manifest_server) transport = PersistentTransport(manifest_server)
if manifest_server.startswith("persistent-"): if manifest_server.startswith("persistent-"):
manifest_server = manifest_server[len("persistent-") :] manifest_server = manifest_server[len("persistent-") :]
# Changes in behavior should update docs/smart-sync.md accordingly.
try: try:
server = xmlrpc.client.Server(manifest_server, transport=transport) server = xmlrpc.client.Server(manifest_server, transport=transport)
if opt.smart_sync: if opt.smart_sync:
@ -1515,19 +1411,6 @@ later is required to fix a server side protocol bug.
[success, manifest_str] = server.GetApprovedManifest( [success, manifest_str] = server.GetApprovedManifest(
branch, target branch, target
) )
elif (
"TARGET_PRODUCT" in os.environ
and "TARGET_BUILD_VARIANT" in os.environ
and "TARGET_RELEASE" in os.environ
):
target = "%s-%s-%s" % (
os.environ["TARGET_PRODUCT"],
os.environ["TARGET_RELEASE"],
os.environ["TARGET_BUILD_VARIANT"],
)
[success, manifest_str] = server.GetApprovedManifest(
branch, target
)
elif ( elif (
"TARGET_PRODUCT" in os.environ "TARGET_PRODUCT" in os.environ
and "TARGET_BUILD_VARIANT" in os.environ and "TARGET_BUILD_VARIANT" in os.environ
@ -1617,7 +1500,7 @@ later is required to fix a server side protocol bug.
buf = TeeStringIO(sys.stdout) buf = TeeStringIO(sys.stdout)
try: try:
result = mp.Sync_NetworkHalf( result = mp.Sync_NetworkHalf(
quiet=not opt.verbose, quiet=opt.quiet,
output_redir=buf, output_redir=buf,
verbose=opt.verbose, verbose=opt.verbose,
current_branch_only=self._GetCurrentBranchOnly( current_branch_only=self._GetCurrentBranchOnly(
@ -1650,17 +1533,16 @@ later is required to fix a server side protocol bug.
syncbuf = SyncBuffer(mp.config) syncbuf = SyncBuffer(mp.config)
start = time.time() start = time.time()
mp.Sync_LocalHalf( mp.Sync_LocalHalf(
syncbuf, syncbuf, submodules=mp.manifest.HasSubmodules, errors=errors
submodules=mp.manifest.HasSubmodules,
errors=errors,
verbose=opt.verbose,
) )
clean = syncbuf.Finish() clean = syncbuf.Finish()
self.event_log.AddSync( self.event_log.AddSync(
mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean
) )
if not clean: if not clean:
raise UpdateManifestError(aggregate_errors=errors) raise UpdateManifestError(
aggregate_errors=errors, project=mp.name
)
self._ReloadManifest(manifest_name, mp.manifest) self._ReloadManifest(manifest_name, mp.manifest)
def ValidateOptions(self, opt, args): def ValidateOptions(self, opt, args):
@ -1691,6 +1573,16 @@ later is required to fix a server side protocol bug.
if opt.prune is None: if opt.prune is None:
opt.prune = True opt.prune = True
if opt.auto_gc is None and _AUTO_GC:
logger.error(
"Will run `git gc --auto` because %s is set. %s is deprecated "
"and will be removed in a future release. Use `--auto-gc` "
"instead.",
_REPO_AUTO_GC,
_REPO_AUTO_GC,
)
opt.auto_gc = True
def _ValidateOptionsWithManifest(self, opt, mp): def _ValidateOptionsWithManifest(self, opt, mp):
"""Like ValidateOptions, but after we've updated the manifest. """Like ValidateOptions, but after we've updated the manifest.
@ -1730,29 +1622,11 @@ later is required to fix a server side protocol bug.
opt.jobs_network = min(opt.jobs_network, jobs_soft_limit) opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit) opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
# Warn once if effective job counts seem excessively high.
# Prioritize --jobs, then --jobs-network, then --jobs-checkout.
job_options_to_check = (
("--jobs", opt.jobs),
("--jobs-network", opt.jobs_network),
("--jobs-checkout", opt.jobs_checkout),
)
for name, value in job_options_to_check:
if value > self._JOBS_WARN_THRESHOLD:
logger.warning(
"High job count (%d > %d) specified for %s; this may "
"lead to excessive resource usage or diminishing returns.",
value,
self._JOBS_WARN_THRESHOLD,
name,
)
break
def Execute(self, opt, args): def Execute(self, opt, args):
errors = [] errors = []
try: try:
self._ExecuteHelper(opt, args, errors) self._ExecuteHelper(opt, args, errors)
except (RepoExitError, RepoChangedException): except RepoExitError:
raise raise
except (KeyboardInterrupt, Exception) as e: except (KeyboardInterrupt, Exception) as e:
raise RepoUnhandledExceptionError(e, aggregate_errors=errors) raise RepoUnhandledExceptionError(e, aggregate_errors=errors)
@ -2019,8 +1893,6 @@ def _PostRepoFetch(rp, repo_verify=True, verbose=False):
# We also have to make sure this will switch to an older commit if # We also have to make sure this will switch to an older commit if
# that's the latest tag in order to support release rollback. # that's the latest tag in order to support release rollback.
try: try:
# Refresh index since reset --keep won't do it.
rp.work_git.update_index("-q", "--refresh")
rp.work_git.reset("--keep", new_rev) rp.work_git.reset("--keep", new_rev)
except GitError as e: except GitError as e:
raise RepoUnhandledExceptionError(e) raise RepoUnhandledExceptionError(e)
@ -2139,7 +2011,7 @@ class LocalSyncState:
delete = set() delete = set()
for path in self._state: for path in self._state:
gitdir = os.path.join(self._manifest.topdir, path, ".git") gitdir = os.path.join(self._manifest.topdir, path, ".git")
if not os.path.exists(gitdir) or os.path.islink(gitdir): if not os.path.exists(gitdir):
delete.add(path) delete.add(path)
if not delete: if not delete:
return return

View File

@ -218,14 +218,9 @@ Gerrit Code Review: https://www.gerritcodereview.com/
def _Options(self, p): def _Options(self, p):
p.add_option( p.add_option(
"-t", "-t",
"--topic-branch",
dest="auto_topic", dest="auto_topic",
action="store_true", action="store_true",
help="set the topic to the local branch name", help="send local branch name to Gerrit Code Review",
)
p.add_option(
"--topic",
help="set topic for the change",
) )
p.add_option( p.add_option(
"--hashtag", "--hashtag",
@ -249,12 +244,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
default=[], default=[],
help="add a label when uploading", help="add a label when uploading",
) )
p.add_option(
"--pd",
"--patchset-description",
dest="patchset_description",
help="description for patchset",
)
p.add_option( p.add_option(
"--re", "--re",
"--reviewers", "--reviewers",
@ -554,14 +543,42 @@ Gerrit Code Review: https://www.gerritcodereview.com/
people = copy.deepcopy(original_people) people = copy.deepcopy(original_people)
self._AppendAutoList(branch, people) self._AppendAutoList(branch, people)
# Check if there are local changes that may have been forgotten.
changes = branch.project.UncommitedFiles()
if opt.ignore_untracked_files:
untracked = set(branch.project.UntrackedFiles())
changes = [x for x in changes if x not in untracked]
if changes:
key = "review.%s.autoupload" % branch.project.remote.review
answer = branch.project.config.GetBoolean(key)
# If they want to auto upload, let's not ask because it
# could be automated.
if answer is None:
print()
print(
"Uncommitted changes in %s (did you forget to "
"amend?):" % branch.project.name
)
print("\n".join(changes))
print("Continue uploading? (y/N) ", end="", flush=True)
if opt.yes:
print("<--yes>")
a = "yes"
else:
a = sys.stdin.readline().strip().lower()
if a not in ("y", "yes", "t", "true", "on"):
print("skipping upload", file=sys.stderr)
branch.uploaded = False
branch.error = "User aborted"
return
# Check if topic branches should be sent to the server during # Check if topic branches should be sent to the server during
# upload. # upload.
if opt.topic is None:
if opt.auto_topic is not True: if opt.auto_topic is not True:
key = "review.%s.uploadtopic" % branch.project.remote.review key = "review.%s.uploadtopic" % branch.project.remote.review
opt.auto_topic = branch.project.config.GetBoolean(key) opt.auto_topic = branch.project.config.GetBoolean(key)
if opt.auto_topic:
opt.topic = branch.name
def _ExpandCommaList(value): def _ExpandCommaList(value):
"""Split |value| up into comma delimited entries.""" """Split |value| up into comma delimited entries."""
@ -603,22 +620,19 @@ Gerrit Code Review: https://www.gerritcodereview.com/
full_dest = destination full_dest = destination
if not full_dest.startswith(R_HEADS): if not full_dest.startswith(R_HEADS):
full_dest = R_HEADS + full_dest full_dest = R_HEADS + full_dest
full_revision = branch.project.revisionExpr
if not full_revision.startswith(R_HEADS):
full_revision = R_HEADS + full_revision
# If the merge branch of the local branch is different from # If the merge branch of the local branch is different from
# the project's revision AND destination, this might not be # the project's revision AND destination, this might not be
# intentional. # intentional.
if ( if (
merge_branch merge_branch
and merge_branch != full_revision and merge_branch != branch.project.revisionExpr
and merge_branch != full_dest and merge_branch != full_dest
): ):
print( print(
f"For local branch {branch.name}: merge branch " f"For local branch {branch.name}: merge branch "
f"{merge_branch} does not match destination branch " f"{merge_branch} does not match destination branch "
f"{destination} and revision {branch.project.revisionExpr}" f"{destination}"
) )
print("skipping upload.") print("skipping upload.")
print( print(
@ -631,7 +645,7 @@ Gerrit Code Review: https://www.gerritcodereview.com/
branch.UploadForReview( branch.UploadForReview(
people, people,
dryrun=opt.dryrun, dryrun=opt.dryrun,
topic=opt.topic, auto_topic=opt.auto_topic,
hashtags=hashtags, hashtags=hashtags,
labels=labels, labels=labels,
private=opt.private, private=opt.private,
@ -641,7 +655,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
dest_branch=destination, dest_branch=destination,
validate_certs=opt.validate_certs, validate_certs=opt.validate_certs,
push_options=opt.push_options, push_options=opt.push_options,
patchset_description=opt.patchset_description,
) )
branch.uploaded = True branch.uploaded = True
@ -716,17 +729,16 @@ Gerrit Code Review: https://www.gerritcodereview.com/
merge_branch = p.stdout.strip() merge_branch = p.stdout.strip()
return merge_branch return merge_branch
@classmethod @staticmethod
def _GatherOne(cls, opt, project_idx): def _GatherOne(opt, project):
"""Figure out the upload status for |project|.""" """Figure out the upload status for |project|."""
project = cls.get_parallel_context()["projects"][project_idx]
if opt.current_branch: if opt.current_branch:
cbr = project.CurrentBranch cbr = project.CurrentBranch
up_branch = project.GetUploadableBranch(cbr) up_branch = project.GetUploadableBranch(cbr)
avail = [up_branch] if up_branch else None avail = [up_branch] if up_branch else None
else: else:
avail = project.GetUploadableBranches(opt.branch) avail = project.GetUploadableBranches(opt.branch)
return (project_idx, avail) return (project, avail)
def Execute(self, opt, args): def Execute(self, opt, args):
projects = self.GetProjects( projects = self.GetProjects(
@ -736,8 +748,7 @@ Gerrit Code Review: https://www.gerritcodereview.com/
def _ProcessResults(_pool, _out, results): def _ProcessResults(_pool, _out, results):
pending = [] pending = []
for result in results: for result in results:
project_idx, avail = result project, avail = result
project = projects[project_idx]
if avail is None: if avail is None:
logger.error( logger.error(
'repo: error: %s: Unable to upload branch "%s". ' 'repo: error: %s: Unable to upload branch "%s". '
@ -748,15 +759,13 @@ Gerrit Code Review: https://www.gerritcodereview.com/
project.manifest.branch, project.manifest.branch,
) )
elif avail: elif avail:
pending.append((project, avail)) pending.append(result)
return pending return pending
with self.ParallelContext():
self.get_parallel_context()["projects"] = projects
pending = self.ExecuteInParallel( pending = self.ExecuteInParallel(
opt.jobs, opt.jobs,
functools.partial(self._GatherOne, opt), functools.partial(self._GatherOne, opt),
range(len(projects)), projects,
callback=_ProcessResults, callback=_ProcessResults,
) )

View File

@ -42,28 +42,35 @@ class Version(Command, MirrorSafeCommand):
# These might not be the same. Report them both. # These might not be the same. Report them both.
src_ver = RepoSourceVersion() src_ver = RepoSourceVersion()
rp_ver = rp.bare_git.describe(HEAD) rp_ver = rp.bare_git.describe(HEAD)
print(f"repo version {rp_ver}") print("repo version %s" % rp_ver)
print(f" (from {rem.url})") print(" (from %s)" % rem.url)
print(f" (tracking {branch.merge})") print(" (tracking %s)" % branch.merge)
print(f" ({rp.bare_git.log('-1', '--format=%cD', HEAD)})") print(" (%s)" % rp.bare_git.log("-1", "--format=%cD", HEAD))
if self.wrapper_path is not None: if self.wrapper_path is not None:
print(f"repo launcher version {self.wrapper_version}") print("repo launcher version %s" % self.wrapper_version)
print(f" (from {self.wrapper_path})") print(" (from %s)" % self.wrapper_path)
if src_ver != rp_ver: if src_ver != rp_ver:
print(f" (currently at {src_ver})") print(" (currently at %s)" % src_ver)
print(f"repo User-Agent {user_agent.repo}") print("repo User-Agent %s" % user_agent.repo)
print(f"git {git.version_tuple().full}") print("git %s" % git.version_tuple().full)
print(f"git User-Agent {user_agent.git}") print("git User-Agent %s" % user_agent.git)
print(f"Python {sys.version}") print("Python %s" % sys.version)
uname = platform.uname() uname = platform.uname()
if sys.version_info.major < 3: if sys.version_info.major < 3:
# Python 3 returns a named tuple, but Python 2 is simpler. # Python 3 returns a named tuple, but Python 2 is simpler.
print(uname) print(uname)
else: else:
print(f"OS {uname.system} {uname.release} ({uname.version})") print(
processor = uname.processor if uname.processor else "unknown" "OS %s %s (%s)" % (uname.system, uname.release, uname.version)
print(f"CPU {uname.machine} ({processor})") )
print(
"CPU %s (%s)"
% (
uname.machine,
uname.processor if uname.processor else "unknown",
)
)
print("Bug reports:", Wrapper().BUG_URL) print("Bug reports:", Wrapper().BUG_URL)

View File

@ -72,12 +72,3 @@ def tmp_home_dir(monkeypatch, tmp_path_factory):
the function scope. the function scope.
""" """
return _set_home(monkeypatch, tmp_path_factory.mktemp("home")) return _set_home(monkeypatch, tmp_path_factory.mktemp("home"))
@pytest.fixture(autouse=True)
def setup_user_identity(monkeysession, scope="session"):
"""Set env variables for author and committer name and email."""
monkeysession.setenv("GIT_AUTHOR_NAME", "Foo Bar")
monkeysession.setenv("GIT_COMMITTER_NAME", "Foo Bar")
monkeysession.setenv("GIT_AUTHOR_EMAIL", "foo@bar.baz")
monkeysession.setenv("GIT_COMMITTER_EMAIL", "foo@bar.baz")

1
tests/fixtures/gitc_config vendored Normal file
View File

@ -0,0 +1 @@
gitc_dir=/test/usr/local/google/gitc

View File

@ -11,11 +11,3 @@
intk = 10k intk = 10k
intm = 10m intm = 10m
intg = 10g intg = 10g
[color "status"]
one = yellow
two = magenta cyan
three = black red ul
reset = reset
none
empty =

View File

@ -1,74 +0,0 @@
# Copyright (C) 2024 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittests for the color.py module."""
import os
import unittest
import color
import git_config
def fixture(*paths):
"""Return a path relative to test/fixtures."""
return os.path.join(os.path.dirname(__file__), "fixtures", *paths)
class ColoringTests(unittest.TestCase):
"""tests of the Coloring class."""
def setUp(self):
"""Create a GitConfig object using the test.gitconfig fixture."""
config_fixture = fixture("test.gitconfig")
self.config = git_config.GitConfig(config_fixture)
color.SetDefaultColoring("true")
self.color = color.Coloring(self.config, "status")
def test_Color_Parse_all_params_none(self):
"""all params are None"""
val = self.color._parse(None, None, None, None)
self.assertEqual("", val)
def test_Color_Parse_first_parameter_none(self):
"""check fg & bg & attr"""
val = self.color._parse(None, "black", "red", "ul")
self.assertEqual("\x1b[4;30;41m", val)
def test_Color_Parse_one_entry(self):
"""check fg"""
val = self.color._parse("one", None, None, None)
self.assertEqual("\033[33m", val)
def test_Color_Parse_two_entry(self):
"""check fg & bg"""
val = self.color._parse("two", None, None, None)
self.assertEqual("\033[35;46m", val)
def test_Color_Parse_three_entry(self):
"""check fg & bg & attr"""
val = self.color._parse("three", None, None, None)
self.assertEqual("\033[4;30;41m", val)
def test_Color_Parse_reset_entry(self):
"""check reset entry"""
val = self.color._parse("reset", None, None, None)
self.assertEqual("\033[m", val)
def test_Color_Parse_empty_entry(self):
"""check empty entry"""
val = self.color._parse("none", "blue", "white", "dim")
self.assertEqual("\033[2;34;47m", val)
val = self.color._parse("empty", "green", "white", "bold")
self.assertEqual("\033[1;32;47m", val)

View File

@ -19,9 +19,12 @@ import os
import re import re
import subprocess import subprocess
import unittest import unittest
from unittest import mock
import pytest
try:
from unittest import mock
except ImportError:
import mock
import git_command import git_command
import wrapper import wrapper
@ -265,7 +268,6 @@ class UserAgentUnitTest(unittest.TestCase):
m = re.match(r"^[^ ]+$", os_name) m = re.match(r"^[^ ]+$", os_name)
self.assertIsNotNone(m) self.assertIsNotNone(m)
@pytest.mark.skip_cq("TODO(b/266734831): Find out why this fails in CQ")
def test_smoke_repo(self): def test_smoke_repo(self):
"""Make sure repo UA returns something useful.""" """Make sure repo UA returns something useful."""
ua = git_command.user_agent.repo ua = git_command.user_agent.repo
@ -274,7 +276,6 @@ class UserAgentUnitTest(unittest.TestCase):
m = re.match(r"^git-repo/[^ ]+ ([^ ]+) git/[^ ]+ Python/[0-9.]+", ua) m = re.match(r"^git-repo/[^ ]+ ([^ ]+) git/[^ ]+ Python/[0-9.]+", ua)
self.assertIsNotNone(m) self.assertIsNotNone(m)
@pytest.mark.skip_cq("TODO(b/266734831): Find out why this fails in CQ")
def test_smoke_git(self): def test_smoke_git(self):
"""Make sure git UA returns something useful.""" """Make sure git UA returns something useful."""
ua = git_command.user_agent.git ua = git_command.user_agent.git

View File

@ -100,7 +100,7 @@ class GitConfigReadOnlyTests(unittest.TestCase):
("intg", 10737418240), ("intg", 10737418240),
) )
for key, value in TESTS: for key, value in TESTS:
self.assertEqual(value, self.config.GetInt(f"section.{key}")) self.assertEqual(value, self.config.GetInt("section.%s" % (key,)))
class GitConfigReadWriteTests(unittest.TestCase): class GitConfigReadWriteTests(unittest.TestCase):

View File

@ -21,7 +21,6 @@ import tempfile
import unittest import unittest
from unittest import mock from unittest import mock
import pytest
from test_manifest_xml import sort_attributes from test_manifest_xml import sort_attributes
import git_superproject import git_superproject
@ -35,7 +34,7 @@ class SuperprojectTestCase(unittest.TestCase):
PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID" PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID"
PARENT_SID_VALUE = "parent_sid" PARENT_SID_VALUE = "parent_sid"
SELF_SID_REGEX = r"repo-\d+T\d+Z-.*" SELF_SID_REGEX = r"repo-\d+T\d+Z-.*"
FULL_SID_REGEX = rf"^{PARENT_SID_VALUE}/{SELF_SID_REGEX}" FULL_SID_REGEX = r"^%s/%s" % (PARENT_SID_VALUE, SELF_SID_REGEX)
def setUp(self): def setUp(self):
"""Set up superproject every time.""" """Set up superproject every time."""
@ -146,7 +145,6 @@ class SuperprojectTestCase(unittest.TestCase):
) )
self.assertIsNone(manifest.superproject) self.assertIsNone(manifest.superproject)
@pytest.mark.skip_cq("TODO(b/266734831): Find out why this takes 8m+ in CQ")
def test_superproject_get_superproject_invalid_url(self): def test_superproject_get_superproject_invalid_url(self):
"""Test with an invalid url.""" """Test with an invalid url."""
manifest = self.getXmlManifest( manifest = self.getXmlManifest(
@ -170,7 +168,6 @@ class SuperprojectTestCase(unittest.TestCase):
self.assertFalse(sync_result.success) self.assertFalse(sync_result.success)
self.assertTrue(sync_result.fatal) self.assertTrue(sync_result.fatal)
@pytest.mark.skip_cq("TODO(b/266734831): Find out why this takes 8m+ in CQ")
def test_superproject_get_superproject_invalid_branch(self): def test_superproject_get_superproject_invalid_branch(self):
"""Test with an invalid branch.""" """Test with an invalid branch."""
manifest = self.getXmlManifest( manifest = self.getXmlManifest(

View File

@ -61,7 +61,7 @@ class EventLogTestCase(unittest.TestCase):
PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID" PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID"
PARENT_SID_VALUE = "parent_sid" PARENT_SID_VALUE = "parent_sid"
SELF_SID_REGEX = r"repo-\d+T\d+Z-.*" SELF_SID_REGEX = r"repo-\d+T\d+Z-.*"
FULL_SID_REGEX = rf"^{PARENT_SID_VALUE}/{SELF_SID_REGEX}" FULL_SID_REGEX = r"^%s/%s" % (PARENT_SID_VALUE, SELF_SID_REGEX)
def setUp(self): def setUp(self):
"""Load the event_log module every time.""" """Load the event_log module every time."""
@ -150,7 +150,7 @@ class EventLogTestCase(unittest.TestCase):
<version event> <version event>
<start event> <start event>
""" """
self._event_log_module.StartEvent([]) self._event_log_module.StartEvent()
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir: with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
log_path = self._event_log_module.Write(path=tempdir) log_path = self._event_log_module.Write(path=tempdir)
self._log_data = self.readLog(log_path) self._log_data = self.readLog(log_path)
@ -213,8 +213,10 @@ class EventLogTestCase(unittest.TestCase):
<version event> <version event>
<command event> <command event>
""" """
name = "repo"
subcommands = ["init" "this"]
self._event_log_module.CommandEvent( self._event_log_module.CommandEvent(
name="repo", subcommands=["init", "this"] name="repo", subcommands=subcommands
) )
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir: with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
log_path = self._event_log_module.Write(path=tempdir) log_path = self._event_log_module.Write(path=tempdir)
@ -223,10 +225,12 @@ class EventLogTestCase(unittest.TestCase):
self.assertEqual(len(self._log_data), 2) self.assertEqual(len(self._log_data), 2)
command_event = self._log_data[1] command_event = self._log_data[1]
self.verifyCommonKeys(self._log_data[0], expected_event_name="version") self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
self.verifyCommonKeys(command_event, expected_event_name="cmd_name") self.verifyCommonKeys(command_event, expected_event_name="command")
# Check for 'command' event specific fields. # Check for 'command' event specific fields.
self.assertIn("name", command_event) self.assertIn("name", command_event)
self.assertEqual(command_event["name"], "repo-init-this") self.assertIn("subcommands", command_event)
self.assertEqual(command_event["name"], name)
self.assertEqual(command_event["subcommands"], subcommands)
def test_def_params_event_repo_config(self): def test_def_params_event_repo_config(self):
"""Test 'def_params' event data outputs only repo config keys. """Test 'def_params' event data outputs only repo config keys.
@ -378,17 +382,17 @@ class EventLogTestCase(unittest.TestCase):
socket_path = os.path.join(tempdir, "server.sock") socket_path = os.path.join(tempdir, "server.sock")
server_ready = threading.Condition() server_ready = threading.Condition()
# Start "server" listening on Unix domain socket at socket_path. # Start "server" listening on Unix domain socket at socket_path.
try:
server_thread = threading.Thread( server_thread = threading.Thread(
target=serverLoggingThread, target=serverLoggingThread,
args=(socket_path, server_ready, received_traces), args=(socket_path, server_ready, received_traces),
) )
try:
server_thread.start() server_thread.start()
with server_ready: with server_ready:
server_ready.wait(timeout=120) server_ready.wait(timeout=120)
self._event_log_module.StartEvent([]) self._event_log_module.StartEvent()
path = self._event_log_module.Write( path = self._event_log_module.Write(
path=f"af_unix:{socket_path}" path=f"af_unix:{socket_path}"
) )

View File

@ -51,7 +51,7 @@ INVALID_FS_PATHS = (
"foo~", "foo~",
"blah/foo~", "blah/foo~",
# Block Unicode characters that get normalized out by filesystems. # Block Unicode characters that get normalized out by filesystems.
"foo\u200cbar", "foo\u200Cbar",
# Block newlines. # Block newlines.
"f\n/bar", "f\n/bar",
"f\r/bar", "f\r/bar",
@ -198,13 +198,13 @@ class ValueTests(unittest.TestCase):
def test_bool_true(self): def test_bool_true(self):
"""Check XmlBool true values.""" """Check XmlBool true values."""
for value in ("yes", "true", "1"): for value in ("yes", "true", "1"):
node = self._get_node(f'<node a="{value}"/>') node = self._get_node('<node a="%s"/>' % (value,))
self.assertTrue(manifest_xml.XmlBool(node, "a")) self.assertTrue(manifest_xml.XmlBool(node, "a"))
def test_bool_false(self): def test_bool_false(self):
"""Check XmlBool false values.""" """Check XmlBool false values."""
for value in ("no", "false", "0"): for value in ("no", "false", "0"):
node = self._get_node(f'<node a="{value}"/>') node = self._get_node('<node a="%s"/>' % (value,))
self.assertFalse(manifest_xml.XmlBool(node, "a")) self.assertFalse(manifest_xml.XmlBool(node, "a"))
def test_int_default(self): def test_int_default(self):
@ -220,7 +220,7 @@ class ValueTests(unittest.TestCase):
def test_int_good(self): def test_int_good(self):
"""Check XmlInt numeric handling.""" """Check XmlInt numeric handling."""
for value in (-1, 0, 1, 50000): for value in (-1, 0, 1, 50000):
node = self._get_node(f'<node a="{value}"/>') node = self._get_node('<node a="%s"/>' % (value,))
self.assertEqual(value, manifest_xml.XmlInt(node, "a")) self.assertEqual(value, manifest_xml.XmlInt(node, "a"))
def test_int_invalid(self): def test_int_invalid(self):
@ -1049,91 +1049,6 @@ class RemoveProjectElementTests(ManifestParseTestCase):
self.assertTrue(found_proj1_path1) self.assertTrue(found_proj1_path1)
self.assertTrue(found_proj2) self.assertTrue(found_proj2)
def test_base_revision_checks_on_patching(self):
manifest_fail_wrong_tag = self.getXmlManifest(
"""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="tag.002" />
<project name="project1" path="tests/path1" />
<extend-project name="project1" revision="new_hash" base-rev="tag.001" />
</manifest>
"""
)
with self.assertRaises(error.ManifestParseError):
manifest_fail_wrong_tag.ToXml()
manifest_fail_remove = self.getXmlManifest(
"""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="project1" path="tests/path1" revision="hash1" />
<remove-project name="project1" base-rev="wrong_hash" />
</manifest>
"""
)
with self.assertRaises(error.ManifestParseError):
manifest_fail_remove.ToXml()
manifest_fail_extend = self.getXmlManifest(
"""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="project1" path="tests/path1" revision="hash1" />
<extend-project name="project1" revision="new_hash" base-rev="wrong_hash" />
</manifest>
"""
)
with self.assertRaises(error.ManifestParseError):
manifest_fail_extend.ToXml()
manifest_fail_unknown = self.getXmlManifest(
"""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="project1" path="tests/path1" />
<extend-project name="project1" revision="new_hash" base-rev="any_hash" />
</manifest>
"""
)
with self.assertRaises(error.ManifestParseError):
manifest_fail_unknown.ToXml()
manifest_ok = self.getXmlManifest(
"""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="project1" path="tests/path1" revision="hash1" />
<project name="project2" path="tests/path2" revision="hash2" />
<project name="project3" path="tests/path3" revision="hash3" />
<project name="project4" path="tests/path4" revision="hash4" />
<remove-project name="project1" />
<remove-project name="project2" base-rev="hash2" />
<project name="project2" path="tests/path2" revision="new_hash2" />
<extend-project name="project3" base-rev="hash3" revision="new_hash3" />
<extend-project name="project3" base-rev="new_hash3" revision="newer_hash3" />
<remove-project path="tests/path4" base-rev="hash4" />
</manifest>
"""
)
found_proj2 = False
found_proj3 = False
for proj in manifest_ok.projects:
if proj.name == "project2":
found_proj2 = True
if proj.name == "project3":
found_proj3 = True
self.assertNotEqual(proj.name, "project1")
self.assertNotEqual(proj.name, "project4")
self.assertTrue(found_proj2)
self.assertTrue(found_proj3)
self.assertTrue(len(manifest_ok.projects) == 2)
class ExtendProjectElementTests(ManifestParseTestCase): class ExtendProjectElementTests(ManifestParseTestCase):
"""Tests for <extend-project>.""" """Tests for <extend-project>."""
@ -1213,79 +1128,3 @@ class ExtendProjectElementTests(ManifestParseTestCase):
) )
self.assertEqual(len(manifest.projects), 1) self.assertEqual(len(manifest.projects), 1)
self.assertEqual(manifest.projects[0].upstream, "bar") self.assertEqual(manifest.projects[0].upstream, "bar")
class NormalizeUrlTests(ManifestParseTestCase):
"""Tests for normalize_url() in manifest_xml.py"""
def test_has_trailing_slash(self):
url = "http://foo.com/bar/baz/"
self.assertEqual(
"http://foo.com/bar/baz", manifest_xml.normalize_url(url)
)
url = "http://foo.com/bar/"
self.assertEqual("http://foo.com/bar", manifest_xml.normalize_url(url))
def test_has_leading_slash(self):
"""SCP-like syntax except a / comes before the : which git disallows."""
url = "/git@foo.com:bar/baf"
self.assertEqual(url, manifest_xml.normalize_url(url))
url = "gi/t@foo.com:bar/baf"
self.assertEqual(url, manifest_xml.normalize_url(url))
url = "git@fo/o.com:bar/baf"
self.assertEqual(url, manifest_xml.normalize_url(url))
def test_has_no_scheme(self):
"""Deal with cases where we have no scheme, but we also
aren't dealing with the git SCP-like syntax
"""
url = "foo.com/baf/bat"
self.assertEqual(url, manifest_xml.normalize_url(url))
url = "foo.com/baf"
self.assertEqual(url, manifest_xml.normalize_url(url))
url = "git@foo.com/baf/bat"
self.assertEqual(url, manifest_xml.normalize_url(url))
url = "git@foo.com/baf"
self.assertEqual(url, manifest_xml.normalize_url(url))
url = "/file/path/here"
self.assertEqual(url, manifest_xml.normalize_url(url))
def test_has_no_scheme_matches_scp_like_syntax(self):
url = "git@foo.com:bar/baf"
self.assertEqual(
"ssh://git@foo.com/bar/baf", manifest_xml.normalize_url(url)
)
url = "git@foo.com:bar/"
self.assertEqual(
"ssh://git@foo.com/bar", manifest_xml.normalize_url(url)
)
def test_remote_url_resolution(self):
remote = manifest_xml._XmlRemote(
name="foo",
fetch="git@github.com:org2/",
manifestUrl="git@github.com:org2/custom_manifest.git",
)
self.assertEqual("ssh://git@github.com/org2", remote.resolvedFetchUrl)
remote = manifest_xml._XmlRemote(
name="foo",
fetch="ssh://git@github.com/org2/",
manifestUrl="git@github.com:org2/custom_manifest.git",
)
self.assertEqual("ssh://git@github.com/org2", remote.resolvedFetchUrl)
remote = manifest_xml._XmlRemote(
name="foo",
fetch="git@github.com:org2/",
manifestUrl="ssh://git@github.com/org2/custom_manifest.git",
)
self.assertEqual("ssh://git@github.com/org2", remote.resolvedFetchUrl)

View File

@ -107,16 +107,6 @@ class ReviewableBranchTests(unittest.TestCase):
self.assertTrue(rb.date) self.assertTrue(rb.date)
class ProjectTests(unittest.TestCase):
"""Check Project behavior."""
def test_encode_patchset_description(self):
self.assertEqual(
project.Project._encode_patchset_description("abcd00!! +"),
"abcd00%21%21_%2b",
)
class CopyLinkTestCase(unittest.TestCase): class CopyLinkTestCase(unittest.TestCase):
"""TestCase for stub repo client checkouts. """TestCase for stub repo client checkouts.
@ -161,7 +151,7 @@ class CopyLinkTestCase(unittest.TestCase):
# "". # "".
break break
result = os.path.exists(path) result = os.path.exists(path)
msg.append(f"\tos.path.exists({path}): {result}") msg.append("\tos.path.exists(%s): %s" % (path, result))
if result: if result:
msg.append("\tcontents: %r" % os.listdir(path)) msg.append("\tcontents: %r" % os.listdir(path))
break break

View File

@ -13,14 +13,9 @@
# limitations under the License. # limitations under the License.
"""Unit test for repo_logging module.""" """Unit test for repo_logging module."""
import contextlib
import io
import logging
import unittest import unittest
from unittest import mock from unittest import mock
from color import SetDefaultColoring
from error import RepoExitError from error import RepoExitError
from repo_logging import RepoLogger from repo_logging import RepoLogger
@ -67,35 +62,3 @@ class TestRepoLogger(unittest.TestCase):
mock.call("Repo command failed: %s", "RepoExitError"), mock.call("Repo command failed: %s", "RepoExitError"),
] ]
) )
def test_log_with_format_string(self):
"""Test different log levels with format strings."""
# Set color output to "always" for consistent test results.
# This ensures the logger's behavior is uniform across different
# environments and git configurations.
SetDefaultColoring("always")
# Regex pattern to match optional ANSI color codes.
# \033 - Escape character
# \[ - Opening square bracket
# [0-9;]* - Zero or more digits or semicolons
# m - Ending 'm' character
# ? - Makes the entire group optional
opt_color = r"(\033\[[0-9;]*m)?"
for level in (logging.INFO, logging.WARN, logging.ERROR):
name = logging.getLevelName(level)
with self.subTest(level=level, name=name):
output = io.StringIO()
with contextlib.redirect_stderr(output):
logger = RepoLogger(__name__)
logger.log(level, "%s", "100% pass")
self.assertRegex(
output.getvalue().strip(),
f"^{opt_color}100% pass{opt_color}$",
f"failed for level {name}",
)

View File

@ -1,156 +0,0 @@
# Copyright (C) 2024 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittests for the forall subcmd."""
from io import StringIO
import os
from shutil import rmtree
import subprocess
import tempfile
import unittest
from unittest import mock
import git_command
import manifest_xml
import project
import subcmds
class AllCommands(unittest.TestCase):
"""Check registered all_commands."""
def setUp(self):
"""Common setup."""
self.tempdirobj = tempfile.TemporaryDirectory(prefix="forall_tests")
self.tempdir = self.tempdirobj.name
self.repodir = os.path.join(self.tempdir, ".repo")
self.manifest_dir = os.path.join(self.repodir, "manifests")
self.manifest_file = os.path.join(
self.repodir, manifest_xml.MANIFEST_FILE_NAME
)
self.local_manifest_dir = os.path.join(
self.repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME
)
os.mkdir(self.repodir)
os.mkdir(self.manifest_dir)
def tearDown(self):
"""Common teardown."""
rmtree(self.tempdir, ignore_errors=True)
def initTempGitTree(self, git_dir):
"""Create a new empty git checkout for testing."""
# Tests need to assume, that main is default branch at init,
# which is not supported in config until 2.28.
cmd = ["git", "init", "-q"]
if git_command.git_require((2, 28, 0)):
cmd += ["--initial-branch=main"]
else:
# Use template dir for init
templatedir = os.path.join(self.tempdirobj.name, ".test-template")
os.makedirs(templatedir)
with open(os.path.join(templatedir, "HEAD"), "w") as fp:
fp.write("ref: refs/heads/main\n")
cmd += ["--template", templatedir]
cmd += [git_dir]
subprocess.check_call(cmd)
def getXmlManifestWith8Projects(self):
"""Create and return a setup of 8 projects with enough dummy
files and setup to execute forall."""
# Set up a manifest git dir for parsing to work
gitdir = os.path.join(self.repodir, "manifests.git")
os.mkdir(gitdir)
with open(os.path.join(gitdir, "config"), "w") as fp:
fp.write(
"""[remote "origin"]
url = https://localhost:0/manifest
verbose = false
"""
)
# Add the manifest data
manifest_data = """
<manifest>
<remote name="origin" fetch="http://localhost" />
<default remote="origin" revision="refs/heads/main" />
<project name="project1" path="tests/path1" />
<project name="project2" path="tests/path2" />
<project name="project3" path="tests/path3" />
<project name="project4" path="tests/path4" />
<project name="project5" path="tests/path5" />
<project name="project6" path="tests/path6" />
<project name="project7" path="tests/path7" />
<project name="project8" path="tests/path8" />
</manifest>
"""
with open(self.manifest_file, "w", encoding="utf-8") as fp:
fp.write(manifest_data)
# Set up 8 empty projects to match the manifest
for x in range(1, 9):
os.makedirs(
os.path.join(
self.repodir, "projects/tests/path" + str(x) + ".git"
)
)
os.makedirs(
os.path.join(
self.repodir, "project-objects/project" + str(x) + ".git"
)
)
git_path = os.path.join(self.tempdir, "tests/path" + str(x))
self.initTempGitTree(git_path)
return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
# Use mock to capture stdout from the forall run
@unittest.mock.patch("sys.stdout", new_callable=StringIO)
def test_forall_all_projects_called_once(self, mock_stdout):
"""Test that all projects get a command run once each."""
manifest_with_8_projects = self.getXmlManifestWith8Projects()
cmd = subcmds.forall.Forall()
cmd.manifest = manifest_with_8_projects
# Use echo project names as the test of forall
opts, args = cmd.OptionParser.parse_args(["-c", "echo $REPO_PROJECT"])
opts.verbose = False
# Mock to not have the Execute fail on remote check
with mock.patch.object(
project.Project, "GetRevisionId", return_value="refs/heads/main"
):
# Run the forall command
cmd.Execute(opts, args)
# Verify that we got every project name in the prints
for x in range(1, 9):
self.assertIn("project" + str(x), mock_stdout.getvalue())
# Split the captured output into lines to count them
line_count = 0
for line in mock_stdout.getvalue().split("\n"):
# A commented out print to stderr as a reminder
# that stdout is mocked, include sys and uncomment if needed
# print(line, file=sys.stderr)
if len(line) > 0:
line_count += 1
# Verify that we didn't get more lines than expected
assert line_count == 8

View File

@ -265,119 +265,6 @@ class LocalSyncState(unittest.TestCase):
self.assertIsNone(self.state.GetFetchTime(projA)) self.assertIsNone(self.state.GetFetchTime(projA))
self.assertEqual(self.state.GetFetchTime(projB), 7) self.assertEqual(self.state.GetFetchTime(projB), 7)
def test_prune_removed_and_symlinked_projects(self):
"""Removed projects that still exists on disk as symlink are pruned."""
with open(self.state._path, "w") as f:
f.write(
"""
{
"projA": {
"last_fetch": 5
},
"projB": {
"last_fetch": 7
}
}
"""
)
def mock_exists(path):
return True
def mock_islink(path):
if "projB" in path:
return True
return False
projA = mock.MagicMock(relpath="projA")
projB = mock.MagicMock(relpath="projB")
self.state = self._new_state()
self.assertEqual(self.state.GetFetchTime(projA), 5)
self.assertEqual(self.state.GetFetchTime(projB), 7)
with mock.patch("os.path.exists", side_effect=mock_exists):
with mock.patch("os.path.islink", side_effect=mock_islink):
self.state.PruneRemovedProjects()
self.assertIsNone(self.state.GetFetchTime(projB))
self.state = self._new_state()
self.assertIsNone(self.state.GetFetchTime(projB))
self.assertEqual(self.state.GetFetchTime(projA), 5)
class FakeProject:
def __init__(self, relpath):
self.relpath = relpath
def __str__(self):
return f"project: {self.relpath}"
def __repr__(self):
return str(self)
class SafeCheckoutOrder(unittest.TestCase):
def test_no_nested(self):
p_f = FakeProject("f")
p_foo = FakeProject("foo")
out = sync._SafeCheckoutOrder([p_f, p_foo])
self.assertEqual(out, [[p_f, p_foo]])
def test_basic_nested(self):
p_foo = p_foo = FakeProject("foo")
p_foo_bar = FakeProject("foo/bar")
out = sync._SafeCheckoutOrder([p_foo, p_foo_bar])
self.assertEqual(out, [[p_foo], [p_foo_bar]])
def test_complex_nested(self):
p_foo = FakeProject("foo")
p_foobar = FakeProject("foobar")
p_foo_dash_bar = FakeProject("foo-bar")
p_foo_bar = FakeProject("foo/bar")
p_foo_bar_baz_baq = FakeProject("foo/bar/baz/baq")
p_bar = FakeProject("bar")
out = sync._SafeCheckoutOrder(
[
p_foo_bar_baz_baq,
p_foo,
p_foobar,
p_foo_dash_bar,
p_foo_bar,
p_bar,
]
)
self.assertEqual(
out,
[
[p_bar, p_foo, p_foo_dash_bar, p_foobar],
[p_foo_bar],
[p_foo_bar_baz_baq],
],
)
class Chunksize(unittest.TestCase):
"""Tests for _chunksize."""
def test_single_project(self):
"""Single project."""
self.assertEqual(sync._chunksize(1, 1), 1)
def test_low_project_count(self):
"""Multiple projects, low number of projects to sync."""
self.assertEqual(sync._chunksize(10, 1), 10)
self.assertEqual(sync._chunksize(10, 2), 5)
self.assertEqual(sync._chunksize(10, 4), 2)
self.assertEqual(sync._chunksize(10, 8), 1)
self.assertEqual(sync._chunksize(10, 16), 1)
def test_high_project_count(self):
"""Multiple projects, high number of projects to sync."""
self.assertEqual(sync._chunksize(2800, 1), 32)
self.assertEqual(sync._chunksize(2800, 16), 32)
self.assertEqual(sync._chunksize(2800, 32), 32)
self.assertEqual(sync._chunksize(2800, 64), 32)
self.assertEqual(sync._chunksize(2800, 128), 21)
class GetPreciousObjectsState(unittest.TestCase): class GetPreciousObjectsState(unittest.TestCase):
"""Tests for _GetPreciousObjectsState.""" """Tests for _GetPreciousObjectsState."""

View File

@ -17,7 +17,6 @@
import io import io
import os import os
import re import re
import subprocess
import sys import sys
import tempfile import tempfile
import unittest import unittest
@ -73,11 +72,84 @@ class RepoWrapperUnitTest(RepoWrapperTestCase):
def test_init_parser(self): def test_init_parser(self):
"""Make sure 'init' GetParser works.""" """Make sure 'init' GetParser works."""
parser = self.wrapper.GetParser() parser = self.wrapper.GetParser(gitc_init=False)
opts, args = parser.parse_args([]) opts, args = parser.parse_args([])
self.assertEqual([], args) self.assertEqual([], args)
self.assertIsNone(opts.manifest_url) self.assertIsNone(opts.manifest_url)
def test_gitc_init_parser(self):
"""Make sure 'gitc-init' GetParser raises."""
with self.assertRaises(SystemExit):
self.wrapper.GetParser(gitc_init=True)
def test_get_gitc_manifest_dir_no_gitc(self):
"""
Test reading a missing gitc config file
"""
self.wrapper.GITC_CONFIG_FILE = fixture("missing_gitc_config")
val = self.wrapper.get_gitc_manifest_dir()
self.assertEqual(val, "")
def test_get_gitc_manifest_dir(self):
"""
Test reading the gitc config file and parsing the directory
"""
self.wrapper.GITC_CONFIG_FILE = fixture("gitc_config")
val = self.wrapper.get_gitc_manifest_dir()
self.assertEqual(val, "/test/usr/local/google/gitc")
def test_gitc_parse_clientdir_no_gitc(self):
"""
Test parsing the gitc clientdir without gitc running
"""
self.wrapper.GITC_CONFIG_FILE = fixture("missing_gitc_config")
self.assertEqual(self.wrapper.gitc_parse_clientdir("/something"), None)
self.assertEqual(
self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test"), "test"
)
def test_gitc_parse_clientdir(self):
"""
Test parsing the gitc clientdir
"""
self.wrapper.GITC_CONFIG_FILE = fixture("gitc_config")
self.assertEqual(self.wrapper.gitc_parse_clientdir("/something"), None)
self.assertEqual(
self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test"), "test"
)
self.assertEqual(
self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test/"), "test"
)
self.assertEqual(
self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test/extra"),
"test",
)
self.assertEqual(
self.wrapper.gitc_parse_clientdir(
"/test/usr/local/google/gitc/test"
),
"test",
)
self.assertEqual(
self.wrapper.gitc_parse_clientdir(
"/test/usr/local/google/gitc/test/"
),
"test",
)
self.assertEqual(
self.wrapper.gitc_parse_clientdir(
"/test/usr/local/google/gitc/test/extra"
),
"test",
)
self.assertEqual(
self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/"), None
)
self.assertEqual(
self.wrapper.gitc_parse_clientdir("/test/usr/local/google/gitc/"),
None,
)
class SetGitTrace2ParentSid(RepoWrapperTestCase): class SetGitTrace2ParentSid(RepoWrapperTestCase):
"""Check SetGitTrace2ParentSid behavior.""" """Check SetGitTrace2ParentSid behavior."""
@ -126,7 +198,7 @@ class RunCommand(RepoWrapperTestCase):
self.wrapper.run_command(["true"], check=False) self.wrapper.run_command(["true"], check=False)
self.wrapper.run_command(["true"], check=True) self.wrapper.run_command(["true"], check=True)
self.wrapper.run_command(["false"], check=False) self.wrapper.run_command(["false"], check=False)
with self.assertRaises(subprocess.CalledProcessError): with self.assertRaises(self.wrapper.RunError):
self.wrapper.run_command(["false"], check=True) self.wrapper.run_command(["false"], check=True)
@ -359,8 +431,8 @@ class VerifyRev(RepoWrapperTestCase):
def test_verify_passes(self): def test_verify_passes(self):
"""Check when we have a valid signed tag.""" """Check when we have a valid signed tag."""
desc_result = subprocess.CompletedProcess([], 0, "v1.0\n", "") desc_result = self.wrapper.RunResult(0, "v1.0\n", "")
gpg_result = subprocess.CompletedProcess([], 0, "", "") gpg_result = self.wrapper.RunResult(0, "", "")
with mock.patch.object( with mock.patch.object(
self.wrapper, "run_git", side_effect=(desc_result, gpg_result) self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
): ):
@ -371,8 +443,8 @@ class VerifyRev(RepoWrapperTestCase):
def test_unsigned_commit(self): def test_unsigned_commit(self):
"""Check we fall back to signed tag when we have an unsigned commit.""" """Check we fall back to signed tag when we have an unsigned commit."""
desc_result = subprocess.CompletedProcess([], 0, "v1.0-10-g1234\n", "") desc_result = self.wrapper.RunResult(0, "v1.0-10-g1234\n", "")
gpg_result = subprocess.CompletedProcess([], 0, "", "") gpg_result = self.wrapper.RunResult(0, "", "")
with mock.patch.object( with mock.patch.object(
self.wrapper, "run_git", side_effect=(desc_result, gpg_result) self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
): ):
@ -383,7 +455,7 @@ class VerifyRev(RepoWrapperTestCase):
def test_verify_fails(self): def test_verify_fails(self):
"""Check we fall back to signed tag when we have an unsigned commit.""" """Check we fall back to signed tag when we have an unsigned commit."""
desc_result = subprocess.CompletedProcess([], 0, "v1.0-10-g1234\n", "") desc_result = self.wrapper.RunResult(0, "v1.0-10-g1234\n", "")
gpg_result = Exception gpg_result = Exception
with mock.patch.object( with mock.patch.object(
self.wrapper, "run_git", side_effect=(desc_result, gpg_result) self.wrapper, "run_git", side_effect=(desc_result, gpg_result)

10
tox.ini
View File

@ -30,7 +30,6 @@ python =
[testenv] [testenv]
deps = deps =
-c constraints.txt
black black
flake8 flake8
isort isort
@ -45,19 +44,20 @@ setenv =
[testenv:lint] [testenv:lint]
skip_install = true skip_install = true
deps = deps =
-c constraints.txt
black black
flake8 flake8
commands = commands =
black --check {posargs:. repo run_tests release/update-hooks release/update-manpages} black --check {posargs:.}
flake8 flake8
[testenv:format] [testenv:format]
skip_install = true skip_install = true
deps = deps =
-c constraints.txt
black black
flake8 flake8
commands = commands =
black {posargs:. repo run_tests release/update-hooks release/update-manpages} black {posargs:.}
flake8 flake8
[pytest]
timeout = 300

View File

@ -18,12 +18,8 @@ import importlib.util
import os import os
def WrapperDir():
return os.path.dirname(__file__)
def WrapperPath(): def WrapperPath():
return os.path.join(WrapperDir(), "repo") return os.path.join(os.path.dirname(__file__), "repo")
@functools.lru_cache(maxsize=None) @functools.lru_cache(maxsize=None)