mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
127 Commits
Author | SHA1 | Date | |
---|---|---|---|
0b57eed8f0 | |||
72b6dc8891 | |||
e19d9e1a65 | |||
8ddff5c74f | |||
8409410aa2 | |||
dc63181fcd | |||
f700ac79c3 | |||
6f1c626a9b | |||
77479863da | |||
16a5c3ac51 | |||
145e35b805 | |||
819827a42d | |||
abdf750061 | |||
0ab95ba6d0 | |||
5a2517f411 | |||
54a4e6007a | |||
42339d7e52 | |||
03ae99290a | |||
9090e804ab | |||
eeff3537de | |||
8f78a83083 | |||
e5913ae410 | |||
119085e6b1 | |||
086710465e | |||
ed4f2113d2 | |||
719675bcec | |||
21c1575ee4 | |||
8f9e02231a | |||
348e218d5b | |||
4bbba7d627 | |||
dc1d0e0c7f | |||
82caef67a1 | |||
3645bd2420 | |||
5f2b045195 | |||
163d42eb43 | |||
07392ed326 | |||
3285e4b436 | |||
ae62541005 | |||
83a3227b62 | |||
09dd9bda38 | |||
f914edca53 | |||
e7c91889a6 | |||
1b117db767 | |||
563f1a6512 | |||
b4687ad862 | |||
ded477dbb9 | |||
93293ca47f | |||
dbd277ce50 | |||
5a03308c5c | |||
3ba716f382 | |||
655aedd7f3 | |||
cc960971f4 | |||
66098f707a | |||
f7b64e3350 | |||
bd0aae95f5 | |||
e6a202f790 | |||
04122b7261 | |||
f5525fb310 | |||
ee451f035d | |||
91d9587e45 | |||
0bcc2d28d4 | |||
ec0ba2777f | |||
9da67feecf | |||
b0b164a87f | |||
b71d61d34e | |||
8f997b38cb | |||
0eb2d3c8a0 | |||
e4d20372b2 | |||
1e01a74445 | |||
7c321f1bf6 | |||
7ac12a9b22 | |||
0b304c06ff | |||
4997d1c838 | |||
5b3a57c3ff | |||
6f8c85ce2a | |||
6856f98467 | |||
34bc5712eb | |||
70c54dc255 | |||
6da17751ca | |||
2ba5a1e963 | |||
3538dd224d | |||
b610b850ac | |||
dff919493a | |||
3164d40e22 | |||
f454512619 | |||
b466854bed | |||
d1e93dd58a | |||
e778e57f11 | |||
f1c5dd8a0f | |||
2058c63641 | |||
c8290ad49e | |||
9775a3d5d2 | |||
9bfdfbe117 | |||
2f0951b216 | |||
72ab852ca5 | |||
0a9265e2d6 | |||
dc1b59d2c0 | |||
71b0f312b1 | |||
369814b4a7 | |||
e37aa5f331 | |||
4a07798c82 | |||
fb527e3f52 | |||
6be76337a0 | |||
a2cd6aeae8 | |||
70d861fa29 | |||
9100f7fadd | |||
01d6c3c0c5 | |||
4c263b52e7 | |||
60fdc5cad1 | |||
46702eddc7 | |||
ae6cb08ae5 | |||
3fc157285c | |||
8a11f6f24c | |||
898f4e6217 | |||
d9e5cf0ee7 | |||
3069be2684 | |||
d5c306b404 | |||
a850ca2712 | |||
a34186e481 | |||
600f49278a | |||
1f2462e0d2 | |||
50d27639b5 | |||
c5b172ad6f | |||
87deaefd86 | |||
5fbd1c6053 | |||
1126c4ed86 | |||
f7c51606f0 |
14
.flake8
14
.flake8
@ -1,3 +1,13 @@
|
||||
[flake8]
|
||||
max-line-length=80
|
||||
ignore=E111,E114,E402
|
||||
max-line-length=100
|
||||
ignore=
|
||||
# E111: Indentation is not a multiple of four
|
||||
E111,
|
||||
# E114: Indentation is not a multiple of four (comment)
|
||||
E114,
|
||||
# E402: Module level import not at top of file
|
||||
E402,
|
||||
# W503: Line break before binary operator
|
||||
W503,
|
||||
# W504: Line break after binary operator
|
||||
W504
|
||||
|
8
.gitignore
vendored
8
.gitignore
vendored
@ -1,3 +1,11 @@
|
||||
*.egg-info/
|
||||
*.log
|
||||
*.pyc
|
||||
__pycache__
|
||||
/dist
|
||||
.repopickle_*
|
||||
/repoc
|
||||
/.tox
|
||||
|
||||
# PyCharm related
|
||||
/.idea/
|
||||
|
6
MANIFEST.in
Normal file
6
MANIFEST.in
Normal file
@ -0,0 +1,6 @@
|
||||
graft docs hooks tests
|
||||
include *.py
|
||||
include LICENSE
|
||||
include git_ssh
|
||||
include repo
|
||||
include run_tests
|
20
README.md
20
README.md
@ -14,3 +14,23 @@ that you can put anywhere in your path.
|
||||
* [repo Manifest Format](./docs/manifest-format.md)
|
||||
* [repo Hooks](./docs/repo-hooks.md)
|
||||
* [Submitting patches](./SUBMITTING_PATCHES.md)
|
||||
* Running Repo in [Microsoft Windows](./docs/windows.md)
|
||||
|
||||
## Install
|
||||
|
||||
Many distros include repo, so you might be able to install from there.
|
||||
```sh
|
||||
# Debian/Ubuntu.
|
||||
$ sudo apt-get install repo
|
||||
|
||||
# Gentoo.
|
||||
$ sudo emerge dev-vcs/repo
|
||||
```
|
||||
|
||||
You can install it manually as well as it's a single script.
|
||||
```sh
|
||||
$ mkdir -p ~/.bin
|
||||
$ PATH="${HOME}/.bin:${PATH}"
|
||||
$ curl https://storage.googleapis.com/git-repo-downloads/repo > ~/.bin/repo
|
||||
$ chmod a+rx ~/.bin/repo
|
||||
```
|
||||
|
@ -69,10 +69,38 @@ suppressed in the included `.flake8` file.
|
||||
|
||||
## Running tests
|
||||
|
||||
There is a [`./run_tests`](./run_tests) helper script for quickly invoking all
|
||||
of our unittests. The coverage isn't great currently, but it should still be
|
||||
run for all commits.
|
||||
We use [pytest](https://pytest.org/) and [tox](https://tox.readthedocs.io/) for
|
||||
running tests. You should make sure to install those first.
|
||||
|
||||
To run the full suite against all supported Python versions, simply execute:
|
||||
```sh
|
||||
$ tox -p auto
|
||||
```
|
||||
|
||||
We have [`./run_tests`](./run_tests) which is a simple wrapper around `pytest`:
|
||||
```sh
|
||||
# Run the full suite against the default Python version.
|
||||
$ ./run_tests
|
||||
# List each test as it runs.
|
||||
$ ./run_tests -v
|
||||
|
||||
# Run a specific unittest module (and all tests in it).
|
||||
$ ./run_tests tests/test_git_command.py
|
||||
|
||||
# Run a specific testsuite in a specific unittest module.
|
||||
$ ./run_tests tests/test_editor.py::EditString
|
||||
|
||||
# Run a single test.
|
||||
$ ./run_tests tests/test_editor.py::EditString::test_cat_editor
|
||||
|
||||
# List all available tests.
|
||||
$ ./run_tests --collect-only
|
||||
|
||||
# Run a single test using substring match.
|
||||
$ ./run_tests -k test_cat_editor
|
||||
```
|
||||
|
||||
The coverage isn't great currently, but it should still be run for all commits.
|
||||
Adding more unittests for changes you make would be greatly appreciated :).
|
||||
Check out the [tests/](./tests/) subdirectory for more details.
|
||||
|
||||
|
1
color.py
1
color.py
@ -84,6 +84,7 @@ def _Color(fg=None, bg=None, attr=None):
|
||||
code = ''
|
||||
return code
|
||||
|
||||
|
||||
DEFAULT = None
|
||||
|
||||
|
||||
|
25
command.py
25
command.py
@ -98,6 +98,16 @@ class Command(object):
|
||||
self.OptionParser.print_usage()
|
||||
sys.exit(1)
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
"""Validate the user options & arguments before executing.
|
||||
|
||||
This is meant to help break the code up into logical steps. Some tips:
|
||||
* Use self.OptionParser.error to display CLI related errors.
|
||||
* Adjust opt member defaults as makes sense.
|
||||
* Adjust the args list, but do so inplace so the caller sees updates.
|
||||
* Try to avoid updating self state. Leave that to Execute.
|
||||
"""
|
||||
|
||||
def Execute(self, opt, args):
|
||||
"""Perform the action, after option parsing is complete.
|
||||
"""
|
||||
@ -113,9 +123,9 @@ class Command(object):
|
||||
project = None
|
||||
if os.path.exists(path):
|
||||
oldpath = None
|
||||
while path and \
|
||||
path != oldpath and \
|
||||
path != manifest.topdir:
|
||||
while (path and
|
||||
path != oldpath and
|
||||
path != manifest.topdir):
|
||||
try:
|
||||
project = self._by_path[path]
|
||||
break
|
||||
@ -165,7 +175,10 @@ class Command(object):
|
||||
self._ResetPathToProjectMap(all_projects_list)
|
||||
|
||||
for arg in args:
|
||||
projects = manifest.GetProjectsWithName(arg)
|
||||
# We have to filter by manifest groups in case the requested project is
|
||||
# checked out multiple times or differently based on them.
|
||||
projects = [project for project in manifest.GetProjectsWithName(arg)
|
||||
if project.MatchesGroups(groups)]
|
||||
|
||||
if not projects:
|
||||
path = os.path.abspath(arg).replace('\\', '/')
|
||||
@ -190,7 +203,7 @@ class Command(object):
|
||||
|
||||
for project in projects:
|
||||
if not missing_ok and not project.Exists:
|
||||
raise NoSuchProjectError(arg)
|
||||
raise NoSuchProjectError('%s (%s)' % (arg, project.relpath))
|
||||
if not project.MatchesGroups(groups):
|
||||
raise InvalidProjectGroupsError(arg)
|
||||
|
||||
@ -223,6 +236,7 @@ class InteractiveCommand(Command):
|
||||
"""Command which requires user interaction on the tty and
|
||||
must not run within a pager, even if the user asks to.
|
||||
"""
|
||||
|
||||
def WantPager(self, _opt):
|
||||
return False
|
||||
|
||||
@ -231,6 +245,7 @@ class PagedCommand(Command):
|
||||
"""Command which defaults to output in a pager, as its
|
||||
display tends to be larger than one screen full.
|
||||
"""
|
||||
|
||||
def WantPager(self, _opt):
|
||||
return True
|
||||
|
||||
|
145
docs/internal-fs-layout.md
Normal file
145
docs/internal-fs-layout.md
Normal file
@ -0,0 +1,145 @@
|
||||
# Repo internal filesystem layout
|
||||
|
||||
A reference to the `.repo/` tree in repo client checkouts.
|
||||
Hopefully it's complete & up-to-date, but who knows!
|
||||
|
||||
*** note
|
||||
**Warning**:
|
||||
This is meant for developers of the repo project itself as a quick reference.
|
||||
**Nothing** in here must be construed as ABI, or that repo itself will never
|
||||
change its internals in backwards incompatible ways.
|
||||
***
|
||||
|
||||
[TOC]
|
||||
|
||||
## .repo/ layout
|
||||
|
||||
All content under `.repo/` is managed by `repo` itself with few exceptions.
|
||||
|
||||
In general, you should not make manual changes in here.
|
||||
If a setting was initialized using an option to `repo init`, you should use that
|
||||
command to change the setting later on.
|
||||
It is always safe to re-run `repo init` in existing repo client checkouts.
|
||||
For example, if you want to change the manifest branch, you can simply run
|
||||
`repo init --manifest-branch=<new name>` and repo will take care of the rest.
|
||||
|
||||
### repo/ state
|
||||
|
||||
* `repo/`: A git checkout of the repo project. This is how `repo` re-execs
|
||||
itself to get the latest released version.
|
||||
|
||||
It tracks the git repository at `REPO_URL` using the `REPO_REV` branch.
|
||||
Those are specified at `repo init` time using the `--repo-url=<REPO_URL>`
|
||||
and `--repo-branch=<REPO_REV>` options.
|
||||
|
||||
Any changes made to this directory will usually be automatically discarded
|
||||
by repo itself when it checks for updates. If you want to update to the
|
||||
latest version of repo, use `repo selfupdate` instead. If you want to
|
||||
change the git URL/branch that this tracks, re-run `repo init` with the new
|
||||
settings.
|
||||
|
||||
* `.repo_fetchtimes.json`: Used by `repo sync` to record stats when syncing
|
||||
the various projects.
|
||||
|
||||
### Manifests
|
||||
|
||||
For more documentation on the manifest format, including the local_manifests
|
||||
support, see the [manifest-format.md] file.
|
||||
|
||||
* `manifests/`: A git checkout of the manifest project. Its `.git/` state
|
||||
points to the `manifest.git` bare checkout (see below). It tracks the git
|
||||
branch specified at `repo init` time via `--manifest-branch`.
|
||||
|
||||
The local branch name is always `default` regardless of the remote tracking
|
||||
branch. Do not get confused if the remote branch is not `default`, or if
|
||||
there is a remote `default` that is completely different!
|
||||
|
||||
No manual changes should be made in here as it will just confuse repo and
|
||||
it won't automatically recover causing no new changes to be picked up.
|
||||
|
||||
* `manifests.git/`: A bare checkout of the manifest project. It tracks the
|
||||
git repository specified at `repo init` time via `--manifest-url`.
|
||||
|
||||
No manual changes should be made in here as it will just confuse repo.
|
||||
If you want to switch the tracking settings, re-run `repo init` with the
|
||||
new settings.
|
||||
|
||||
* `manifest.xml -> manifests/<manifest-name>.xml`: A symlink to the manifest
|
||||
that the user wishes to sync. It is specified at `repo init` time via
|
||||
`--manifest-name`.
|
||||
|
||||
Do not try to repoint this symlink to other files as it will confuse repo.
|
||||
If you want to switch manifest files, re-run `repo init` with the new
|
||||
setting.
|
||||
|
||||
* `manifests.git/.repo_config.json`: JSON cache of the `manifests.git/config`
|
||||
file for repo to read/process quickly.
|
||||
|
||||
* `local_manifest.xml` (*Deprecated*): User-authored tweaks to the manifest
|
||||
used to sync. See [local manifests] for more details.
|
||||
* `local_manifests/`: Directory of user-authored manifest fragments to tweak
|
||||
the manifest used to sync. See [local manifests] for more details.
|
||||
|
||||
### Project objects
|
||||
|
||||
* `project.list`: Tracking file used by `repo sync` to determine when projects
|
||||
are added or removed and need corresponding updates in the checkout.
|
||||
* `projects/`: Bare checkouts of every project synced by the manifest. The
|
||||
filesystem layout matches the `<project path=...` setting in the manifest
|
||||
(i.e. where it's checked out in the repo client source tree). Those
|
||||
checkouts will symlink their `.git/` state to paths under here.
|
||||
|
||||
Some git state is further split out under `project-objects/`.
|
||||
* `project-objects/`: Git objects that are safe to share across multiple
|
||||
git checkouts. The filesystem layout matches the `<project name=...`
|
||||
setting in the manifest (i.e. the path on the remote server). This allows
|
||||
for multiple checkouts of the same remote git repo to share their objects.
|
||||
For example, you could have different branches of `foo/bar.git` checked
|
||||
out to `foo/bar-master`, `foo/bar-release`, etc... There will be multiple
|
||||
trees under `projects/` for each one, but only one under `project-objects/`.
|
||||
|
||||
This can run into problems if different remotes use the same path on their
|
||||
respective servers ...
|
||||
* `subprojects/`: Like `projects/`, but for git submodules.
|
||||
* `subproject-objects/`: Like `project-objects/`, but for git submodules.
|
||||
|
||||
### Settings
|
||||
|
||||
The `.repo/manifests.git/config` file is used to track settings for the entire
|
||||
repo client checkout.
|
||||
Most settings use the `[repo]` section to avoid conflicts with git.
|
||||
User controlled settings are initialized when running `repo init`.
|
||||
|
||||
| Setting | `repo init` Option | Use/Meaning |
|
||||
|-------------------|---------------------------|-------------|
|
||||
| manifest.groups | `--groups` & `--platform` | The manifest groups to sync |
|
||||
| repo.archive | `--archive` | Use `git archive` for checkouts |
|
||||
| repo.clonefilter | `--clone-filter` | Filter setting when using [partial git clones] |
|
||||
| repo.depth | `--depth` | Create shallow checkouts when cloning |
|
||||
| repo.dissociate | `--dissociate` | Dissociate from any reference/mirrors after initial clone |
|
||||
| repo.mirror | `--mirror` | Checkout is a repo mirror |
|
||||
| repo.partialclone | `--partial-clone` | Create [partial git clones] |
|
||||
| repo.reference | `--reference` | Reference repo client checkout |
|
||||
| repo.submodules | `--submodules` | Sync git submodules |
|
||||
| user.email | `--config-name` | User's e-mail address; Copied into `.git/config` when checking out a new project |
|
||||
| user.name | `--config-name` | User's name; Copied into `.git/config` when checking out a new project |
|
||||
|
||||
[partial git clones]: https://git-scm.com/docs/gitrepository-layout#_code_partialclone_code
|
||||
|
||||
## ~/ dotconfig layout
|
||||
|
||||
Repo will create & maintain a few files in the user's home directory.
|
||||
|
||||
* `.repoconfig/`: Repo's per-user directory for all random config files/state.
|
||||
* `.repoconfig/keyring-version`: Cache file for checking if the gnupg subdir
|
||||
has all the same keys as the repo launcher. Used to avoid running gpg
|
||||
constantly as that can be quite slow.
|
||||
* `.repoconfig/gnupg/`: GnuPG's internal state directory used when repo needs
|
||||
to run `gpg`. This provides isolation from the user's normal `~/.gnupg/`.
|
||||
|
||||
* `.repo_.gitconfig.json`: JSON cache of the `.gitconfig` file for repo to
|
||||
read/process quickly.
|
||||
|
||||
|
||||
[manifest-format.md]: ./manifest-format.md
|
||||
[local manifests]: ./manifest-format.md#Local-Manifests
|
@ -89,6 +89,7 @@ following DTD:
|
||||
<!ATTLIST extend-project path CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project groups CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project revision CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project remote CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT remove-project EMPTY>
|
||||
<!ATTLIST remove-project name CDATA #REQUIRED>
|
||||
@ -306,6 +307,9 @@ belongs. Same syntax as the corresponding element of `project`.
|
||||
Attribute `revision`: If specified, overrides the revision of the original
|
||||
project. Same syntax as the corresponding element of `project`.
|
||||
|
||||
Attribute `remote`: If specified, overrides the remote of the original
|
||||
project. Same syntax as the corresponding element of `project`.
|
||||
|
||||
### Element annotation
|
||||
|
||||
Zero or more annotation elements may be specified as children of a
|
||||
@ -322,13 +326,29 @@ Zero or more copyfile elements may be specified as children of a
|
||||
project element. Each element describes a src-dest pair of files;
|
||||
the "src" file will be copied to the "dest" place during `repo sync`
|
||||
command.
|
||||
|
||||
"src" is project relative, "dest" is relative to the top of the tree.
|
||||
Copying from paths outside of the project or to paths outside of the repo
|
||||
client is not allowed.
|
||||
|
||||
"src" and "dest" must be files. Directories or symlinks are not allowed.
|
||||
Intermediate paths must not be symlinks either.
|
||||
|
||||
Parent directories of "dest" will be automatically created if missing.
|
||||
|
||||
### Element linkfile
|
||||
|
||||
It's just like copyfile and runs at the same time as copyfile but
|
||||
instead of copying it creates a symlink.
|
||||
|
||||
The symlink is created at "dest" (relative to the top of the tree) and
|
||||
points to the path specified by "src" which is a path in the project.
|
||||
|
||||
Parent directories of "dest" will be automatically created if missing.
|
||||
|
||||
The symlink target may be a file or directory, but it may not point outside
|
||||
of the repo client.
|
||||
|
||||
### Element remove-project
|
||||
|
||||
Deletes the named project from the internal manifest table, possibly
|
||||
|
@ -7,9 +7,9 @@ their old LTS/corp systems and have little power to change the system.
|
||||
|
||||
## Summary
|
||||
|
||||
* Python 3.6 (released Dec 2016) is required by default starting with repo-1.14.
|
||||
* Python 3.6 (released Dec 2016) is required by default starting with repo-2.x.
|
||||
* Older versions of Python (e.g. v2.7) may use the legacy feature-frozen branch
|
||||
based on repo-1.13.
|
||||
based on repo-1.x.
|
||||
|
||||
## Overview
|
||||
|
||||
@ -28,5 +28,20 @@ The master branch will require Python 3.6 at a minimum.
|
||||
If the system has an older version of Python 3, then users will have to select
|
||||
the legacy Python 2 branch instead.
|
||||
|
||||
### repo hooks
|
||||
|
||||
Projects that use [repo hooks] run on independent schedules.
|
||||
They might migrate to Python 3 earlier or later than us.
|
||||
To support them, we'll probe the shebang of the hook script and if we find an
|
||||
interpreter in there that indicates a different version than repo is currently
|
||||
running under, we'll attempt to reexec ourselves under that.
|
||||
|
||||
For example, a hook with a header like `#!/usr/bin/python2` will have repo
|
||||
execute `/usr/bin/python2` to execute the hook code specifically if repo is
|
||||
currently running Python 3.
|
||||
|
||||
For more details, consult the [repo hooks] documentation.
|
||||
|
||||
|
||||
[repo hooks]: ./repo-hooks.md
|
||||
[repo launcher]: ../repo
|
||||
|
@ -161,7 +161,91 @@ You can create a short changelog using the command:
|
||||
$ git log --format="%h (%aN) %s" --no-merges origin/stable..$r
|
||||
```
|
||||
|
||||
## Project References
|
||||
|
||||
Here's a table showing the relationship of major tools, their EOL dates, and
|
||||
their status in Ubuntu & Debian.
|
||||
Those distros tend to be good indicators of how long we need to support things.
|
||||
|
||||
Things in bold indicate stuff to take note of, but does not guarantee that we
|
||||
still support them.
|
||||
Things in italics are things we used to care about but probably don't anymore.
|
||||
|
||||
| Date | EOL | [Git][rel-g] | [Python][rel-p] | [Ubuntu][rel-u] / [Debian][rel-d] | Git | Python |
|
||||
|:--------:|:------------:|--------------|-----------------|-----------------------------------|-----|--------|
|
||||
| Oct 2008 | *Oct 2013* | | 2.6.0 | *10.04 Lucid* - 10.10 Maverick / *Squeeze* |
|
||||
| Dec 2008 | *Feb 2009* | | 3.0.0 |
|
||||
| Feb 2009 | *Mar 2012* | | | Debian 5 Lenny | 1.5.6.5 | 2.5.2 |
|
||||
| Jun 2009 | *Jun 2016* | | 3.1.0 | *10.04 Lucid* - 10.10 Maverick / *Squeeze* |
|
||||
| Feb 2010 | *Oct 2012* | 1.7.0 | | *10.04 Lucid* - *12.04 Precise* - 12.10 Quantal |
|
||||
| Apr 2010 | *Apr 2015* | | | *10.04 Lucid* | 1.7.0.4 | 2.6.5 3.1.2 |
|
||||
| Jul 2010 | *Dec 2019* | | **2.7.0** | 11.04 Natty - **<current>** |
|
||||
| Oct 2010 | | | | 10.10 Maverick | 1.7.1 | 2.6.6 3.1.3 |
|
||||
| Feb 2011 | *Feb 2016* | | | Debian 6 Squeeze | 1.7.2.5 | 2.6.6 3.1.3 |
|
||||
| Apr 2011 | | | | 11.04 Natty | 1.7.4 | 2.7.1 3.2.0 |
|
||||
| Oct 2011 | *Feb 2016* | | 3.2.0 | 11.04 Natty - 12.10 Quantal |
|
||||
| Oct 2011 | | | | 11.10 Ocelot | 1.7.5.4 | 2.7.2 3.2.2 |
|
||||
| Apr 2012 | *Apr 2019* | | | *12.04 Precise* | 1.7.9.5 | 2.7.3 3.2.3 |
|
||||
| Sep 2012 | *Sep 2017* | | 3.3.0 | 13.04 Raring - 13.10 Saucy |
|
||||
| Oct 2012 | *Dec 2014* | 1.8.0 | | 13.04 Raring - 13.10 Saucy |
|
||||
| Oct 2012 | | | | 12.10 Quantal | 1.7.10.4 | 2.7.3 3.2.3 |
|
||||
| Apr 2013 | | | | 13.04 Raring | 1.8.1.2 | 2.7.4 3.3.1 |
|
||||
| May 2013 | *May 2018* | | | Debian 7 Wheezy | 1.7.10.4 | 2.7.3 3.2.3 |
|
||||
| Oct 2013 | | | | 13.10 Saucy | 1.8.3.2 | 2.7.5 3.3.2 |
|
||||
| Feb 2014 | *Dec 2014* | **1.9.0** | | **14.04 Trusty** |
|
||||
| Mar 2014 | *Mar 2019* | | **3.4.0** | **14.04 Trusty** - 15.10 Wily / **Jessie** |
|
||||
| Apr 2014 | **Apr 2022** | | | **14.04 Trusty** | 1.9.1 | 2.7.5 3.4.0 |
|
||||
| May 2014 | *Dec 2014* | 2.0.0 |
|
||||
| Aug 2014 | *Dec 2014* | **2.1.0** | | 14.10 Utopic - 15.04 Vivid / **Jessie** |
|
||||
| Oct 2014 | | | | 14.10 Utopic | 2.1.0 | 2.7.8 3.4.2 |
|
||||
| Nov 2014 | *Sep 2015* | 2.2.0 |
|
||||
| Feb 2015 | *Sep 2015* | 2.3.0 |
|
||||
| Apr 2015 | *May 2017* | 2.4.0 |
|
||||
| Apr 2015 | **Jun 2020** | | | **Debian 8 Jessie** | 2.1.4 | 2.7.9 3.4.2 |
|
||||
| Apr 2015 | | | | 15.04 Vivid | 2.1.4 | 2.7.9 3.4.3 |
|
||||
| Jul 2015 | *May 2017* | 2.5.0 | | 15.10 Wily |
|
||||
| Sep 2015 | *May 2017* | 2.6.0 |
|
||||
| Sep 2015 | **Sep 2020** | | **3.5.0** | **16.04 Xenial** - 17.04 Zesty / **Stretch** |
|
||||
| Oct 2015 | | | | 15.10 Wily | 2.5.0 | 2.7.9 3.4.3 |
|
||||
| Jan 2016 | *Jul 2017* | **2.7.0** | | **16.04 Xenial** |
|
||||
| Mar 2016 | *Jul 2017* | 2.8.0 |
|
||||
| Apr 2016 | **Apr 2024** | | | **16.04 Xenial** | 2.7.4 | 2.7.11 3.5.1 |
|
||||
| Jun 2016 | *Jul 2017* | 2.9.0 | | 16.10 Yakkety |
|
||||
| Sep 2016 | *Sep 2017* | 2.10.0 |
|
||||
| Oct 2016 | | | | 16.10 Yakkety | 2.9.3 | 2.7.11 3.5.1 |
|
||||
| Nov 2016 | *Sep 2017* | **2.11.0** | | 17.04 Zesty / **Stretch** |
|
||||
| Dec 2016 | **Dec 2021** | | **3.6.0** | 17.10 Artful - **18.04 Bionic** - 18.10 Cosmic |
|
||||
| Feb 2017 | *Sep 2017* | 2.12.0 |
|
||||
| Apr 2017 | | | | 17.04 Zesty | 2.11.0 | 2.7.13 3.5.3 |
|
||||
| May 2017 | *May 2018* | 2.13.0 |
|
||||
| Jun 2017 | **Jun 2022** | | | **Debian 9 Stretch** | 2.11.0 | 2.7.13 3.5.3 |
|
||||
| Aug 2017 | *Dec 2019* | 2.14.0 | | 17.10 Artful |
|
||||
| Oct 2017 | *Dec 2019* | 2.15.0 |
|
||||
| Oct 2017 | | | | 17.10 Artful | 2.14.1 | 2.7.14 3.6.3 |
|
||||
| Jan 2018 | *Dec 2019* | 2.16.0 |
|
||||
| Apr 2018 | *Dec 2019* | 2.17.0 | | **18.04 Bionic** |
|
||||
| Apr 2018 | **Apr 2028** | | | **18.04 Bionic** | 2.17.0 | 2.7.15 3.6.5 |
|
||||
| Jun 2018 | *Dec 2019* | 2.18.0 |
|
||||
| Jun 2018 | **Jun 2023** | | 3.7.0 | 19.04 Disco - **20.04 Focal** / **Buster** |
|
||||
| Sep 2018 | *Dec 2019* | 2.19.0 | | 18.10 Cosmic |
|
||||
| Oct 2018 | | | | 18.10 Cosmic | 2.19.1 | 2.7.15 3.6.6 |
|
||||
| Dec 2018 | *Dec 2019* | **2.20.0** | | 19.04 Disco / **Buster** |
|
||||
| Feb 2019 | *Dec 2019* | 2.21.0 |
|
||||
| Apr 2019 | | | | 19.04 Disco | 2.20.1 | 2.7.16 3.7.3 |
|
||||
| Jun 2019 | | 2.22.0 |
|
||||
| Jul 2019 | **Jul 2024** | | | **Debian 10 Buster** | 2.20.1 | 2.7.16 3.7.3 |
|
||||
| Aug 2019 | | 2.23.0 |
|
||||
| Oct 2019 | **Oct 2024** | | 3.8.0 |
|
||||
| Oct 2019 | | | | 19.10 Eoan | 2.20.1 | 2.7.17 3.7.5 |
|
||||
| Nov 2019 | | 2.24.0 |
|
||||
| Jan 2020 | | 2.25.0 | | **20.04 Focal** |
|
||||
| Apr 2020 | **Apr 2030** | | | **20.04 Focal** | 2.25.0 | 2.7.17 3.7.5 |
|
||||
|
||||
|
||||
[rel-d]: https://en.wikipedia.org/wiki/Debian_version_history
|
||||
[rel-g]: https://en.wikipedia.org/wiki/Git#Releases
|
||||
[rel-p]: https://en.wikipedia.org/wiki/History_of_Python#Table_of_versions
|
||||
[rel-u]: https://en.wikipedia.org/wiki/Ubuntu_version_history#Table_of_versions
|
||||
[example announcement]: https://groups.google.com/d/topic/repo-discuss/UGBNismWo1M/discussion
|
||||
[repo-discuss@googlegroups.com]: https://groups.google.com/forum/#!forum/repo-discuss
|
||||
[go/repo-release]: https://goto.google.com/repo-release
|
||||
|
@ -83,6 +83,31 @@ then check it directly. Hooks should not normally modify the active git repo
|
||||
the user. Although user interaction is discouraged in the common case, it can
|
||||
be useful when deploying automatic fixes.
|
||||
|
||||
### Shebang Handling
|
||||
|
||||
*** note
|
||||
This is intended as a transitional feature. Hooks are expected to eventually
|
||||
migrate to Python 3 only as Python 2 is EOL & deprecated.
|
||||
***
|
||||
|
||||
If the hook is written against a specific version of Python (either 2 or 3),
|
||||
the script can declare that explicitly. Repo will then attempt to execute it
|
||||
under the right version of Python regardless of the version repo itself might
|
||||
be executing under.
|
||||
|
||||
Here are the shebangs that are recognized.
|
||||
|
||||
* `#!/usr/bin/env python` & `#!/usr/bin/python`: The hook is compatible with
|
||||
Python 2 & Python 3. For maximum compatibility, these are recommended.
|
||||
* `#!/usr/bin/env python2` & `#!/usr/bin/python2`: The hook requires Python 2.
|
||||
Version specific names like `python2.7` are also recognized.
|
||||
* `#!/usr/bin/env python3` & `#!/usr/bin/python3`: The hook requires Python 3.
|
||||
Version specific names like `python3.6` are also recognized.
|
||||
|
||||
If no shebang is detected, or does not match the forms above, we assume that the
|
||||
hook is compatible with both Python 2 & Python 3 as if `#!/usr/bin/python` was
|
||||
used.
|
||||
|
||||
## Hooks
|
||||
|
||||
Here are all the points available for hooking.
|
||||
|
144
docs/windows.md
Normal file
144
docs/windows.md
Normal file
@ -0,0 +1,144 @@
|
||||
# Microsoft Windows Details
|
||||
|
||||
Repo is primarily developed on Linux with a lot of users on macOS.
|
||||
Windows is, unfortunately, not a common platform.
|
||||
There is support in repo for Windows, but there might be some rough edges.
|
||||
|
||||
Keep in mind that Windows in general is "best effort" and "community supported".
|
||||
That means we don't actively test or verify behavior, but rely heavily on users
|
||||
to report problems back to us, and to contribute fixes as needed.
|
||||
|
||||
[TOC]
|
||||
|
||||
## Windows
|
||||
|
||||
We only support Windows 10 or newer.
|
||||
This is largely due to symlinks not being available in older versions, but it's
|
||||
also due to most developers not using Windows.
|
||||
|
||||
We will never add code specific to older versions of Windows.
|
||||
It might work, but it most likely won't, so please don't bother asking.
|
||||
|
||||
## Symlinks
|
||||
|
||||
Repo will use symlinks heavily internally.
|
||||
On *NIX platforms, this isn't an issue, but Windows makes it a bit difficult.
|
||||
|
||||
There are some documents out there for how to do this, but usually the easiest
|
||||
answer is to run your shell as an Administrator and invoke repo/git in that.
|
||||
|
||||
This isn't a great solution, but Windows doesn't make this easy, so here we are.
|
||||
|
||||
### Launch Git Bash
|
||||
|
||||
If you install Git Bash (see below), you can launch that with appropriate
|
||||
permissions so that all programs "just work".
|
||||
|
||||
* Open the Start Menu (i.e. press the ⊞ key).
|
||||
* Find/search for "Git Bash".
|
||||
* Right click it and select "Run as administrator".
|
||||
|
||||
*** note
|
||||
**NB**: This environment is only needed when running `repo`, or any specific `git`
|
||||
command that might involve symlinks (e.g. `pull` or `checkout`).
|
||||
You do not need to run all your commands in here such as your editor.
|
||||
***
|
||||
|
||||
### Symlinks with GNU tools
|
||||
|
||||
If you want to use `ln -s` inside of the default Git/bash shell, you might need
|
||||
to export this environment variable:
|
||||
```sh
|
||||
$ export MSYS="winsymlinks:nativestrict"
|
||||
```
|
||||
|
||||
Otherwise `ln -s` will copy files and not actually create a symlink.
|
||||
This also helps `tar` unpack symlinks, so that's nice.
|
||||
|
||||
### References
|
||||
|
||||
* https://github.com/git-for-windows/git/wiki/Symbolic-Links
|
||||
* https://blogs.windows.com/windowsdeveloper/2016/12/02/symlinks-windows-10/
|
||||
|
||||
## Python
|
||||
|
||||
You should make sure to be running Python 3.6 or newer under Windows.
|
||||
Python 2 might work, but due to already limited platform testing, you should
|
||||
only run newer Python versions.
|
||||
See our [Python Support](./python-support.md) document for more details.
|
||||
|
||||
You can grab the latest Windows installer here:<br>
|
||||
https://www.python.org/downloads/release/python-3
|
||||
|
||||
## Git
|
||||
|
||||
You should install the most recent version of Git for Windows:<br>
|
||||
https://git-scm.com/download/win
|
||||
|
||||
When installing, make sure to turn on "Enable symbolic links" when prompted.
|
||||
|
||||
If you've already installed Git for Windows, you can simply download the latest
|
||||
installer from above and run it again.
|
||||
It should safely upgrade things in situ for you.
|
||||
This is useful if you want to switch the symbolic link option after the fact.
|
||||
|
||||
## Shell
|
||||
|
||||
We don't have a specific requirement for shell environments when running repo.
|
||||
Most developers use MinTTY/bash that's included with the Git for Windows install
|
||||
(so see above for installing Git).
|
||||
|
||||
Command & Powershell & the Windows Terminal probably work.
|
||||
Who knows!
|
||||
|
||||
## FAQ
|
||||
|
||||
### repo upload always complains about allowing hooks or using --no-verify!
|
||||
|
||||
When using `repo upload` in projects that have custom repohooks, you might get
|
||||
an error like the following:
|
||||
```sh
|
||||
$ repo upload
|
||||
ERROR: You must allow the pre-upload hook or use --no-verify.
|
||||
```
|
||||
|
||||
This can be confusing as you never get prompted.
|
||||
[MinTTY has a bug][mintty] that breaks isatty checking inside of repo which
|
||||
causes repo to never interactively prompt the user which means the upload check
|
||||
always fails.
|
||||
|
||||
You can workaround this by manually granting consent when uploading.
|
||||
Simply add the `--verify` option whenever uploading:
|
||||
```sh
|
||||
$ repo upload --verify
|
||||
```
|
||||
|
||||
You will have to specify this flag every time you upload.
|
||||
|
||||
[mintty]: https://github.com/mintty/mintty/issues/56
|
||||
|
||||
### repohooks always fail with an close_fds error.
|
||||
|
||||
When using the [reference repohooks project][repohooks] included in AOSP,
|
||||
you might see errors like this when running `repo upload`:
|
||||
```sh
|
||||
$ repo upload
|
||||
ERROR: Traceback (most recent call last):
|
||||
...
|
||||
File "C:\...\lib\subprocess.py", line 351, in __init__
|
||||
raise ValueError("close_fds is not supported on Windows "
|
||||
ValueError: close_fds is not supported on Windows platforms if you redirect stdin/stderr/stdout
|
||||
|
||||
Failed to run main() for pre-upload hook; see traceback above.
|
||||
```
|
||||
|
||||
This error shows up when using Python 2.
|
||||
You should upgrade to Python 3 instead (see above).
|
||||
|
||||
If you already have Python 3 installed, make sure it's the default version.
|
||||
Running `python --version` should say `Python 3`, not `Python 2`.
|
||||
If you didn't install the Python versions, or don't have permission to change
|
||||
the default version, you can probably workaround this by changing `$PATH` in
|
||||
your shell so the Python 3 version is found first.
|
||||
|
||||
[repohooks]: https://android.googlesource.com/platform/tools/repohooks
|
27
editor.py
27
editor.py
@ -24,6 +24,7 @@ import tempfile
|
||||
from error import EditorError
|
||||
import platform_utils
|
||||
|
||||
|
||||
class Editor(object):
|
||||
"""Manages the user's preferred text editor."""
|
||||
|
||||
@ -57,7 +58,7 @@ class Editor(object):
|
||||
|
||||
if os.getenv('TERM') == 'dumb':
|
||||
print(
|
||||
"""No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
|
||||
"""No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
|
||||
Tried to fall back to vi but terminal is dumb. Please configure at
|
||||
least one of these before using this command.""", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
@ -68,11 +69,14 @@ least one of these before using this command.""", file=sys.stderr)
|
||||
def EditString(cls, data):
|
||||
"""Opens an editor to edit the given content.
|
||||
|
||||
Args:
|
||||
data : the text to edit
|
||||
Args:
|
||||
data: The text to edit.
|
||||
|
||||
Returns:
|
||||
new value of edited text; None if editing did not succeed
|
||||
Returns:
|
||||
New value of edited text.
|
||||
|
||||
Raises:
|
||||
EditorError: The editor failed to run.
|
||||
"""
|
||||
editor = cls._GetEditor()
|
||||
if editor == ':':
|
||||
@ -80,7 +84,7 @@ least one of these before using this command.""", file=sys.stderr)
|
||||
|
||||
fd, path = tempfile.mkstemp()
|
||||
try:
|
||||
os.write(fd, data)
|
||||
os.write(fd, data.encode('utf-8'))
|
||||
os.close(fd)
|
||||
fd = None
|
||||
|
||||
@ -101,16 +105,13 @@ least one of these before using this command.""", file=sys.stderr)
|
||||
rc = subprocess.Popen(args, shell=shell).wait()
|
||||
except OSError as e:
|
||||
raise EditorError('editor failed, %s: %s %s'
|
||||
% (str(e), editor, path))
|
||||
% (str(e), editor, path))
|
||||
if rc != 0:
|
||||
raise EditorError('editor failed with exit status %d: %s %s'
|
||||
% (rc, editor, path))
|
||||
% (rc, editor, path))
|
||||
|
||||
fd2 = open(path)
|
||||
try:
|
||||
return fd2.read()
|
||||
finally:
|
||||
fd2.close()
|
||||
with open(path, mode='rb') as fd2:
|
||||
return fd2.read().decode('utf-8')
|
||||
finally:
|
||||
if fd:
|
||||
os.close(fd)
|
||||
|
23
error.py
23
error.py
@ -14,17 +14,26 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
class ManifestParseError(Exception):
|
||||
"""Failed to parse the manifest file.
|
||||
"""
|
||||
|
||||
|
||||
class ManifestInvalidRevisionError(Exception):
|
||||
"""The revision value in a project is incorrect.
|
||||
"""
|
||||
|
||||
|
||||
class ManifestInvalidPathError(Exception):
|
||||
"""A path used in <copyfile> or <linkfile> is incorrect.
|
||||
"""
|
||||
|
||||
|
||||
class NoManifestException(Exception):
|
||||
"""The required manifest does not exist.
|
||||
"""
|
||||
|
||||
def __init__(self, path, reason):
|
||||
super(NoManifestException, self).__init__()
|
||||
self.path = path
|
||||
@ -33,9 +42,11 @@ class NoManifestException(Exception):
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
|
||||
class EditorError(Exception):
|
||||
"""Unspecified error from the user's text editor.
|
||||
"""
|
||||
|
||||
def __init__(self, reason):
|
||||
super(EditorError, self).__init__()
|
||||
self.reason = reason
|
||||
@ -43,9 +54,11 @@ class EditorError(Exception):
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
|
||||
class GitError(Exception):
|
||||
"""Unspecified internal error from git.
|
||||
"""
|
||||
|
||||
def __init__(self, command):
|
||||
super(GitError, self).__init__()
|
||||
self.command = command
|
||||
@ -53,9 +66,11 @@ class GitError(Exception):
|
||||
def __str__(self):
|
||||
return self.command
|
||||
|
||||
|
||||
class UploadError(Exception):
|
||||
"""A bundle upload to Gerrit did not succeed.
|
||||
"""
|
||||
|
||||
def __init__(self, reason):
|
||||
super(UploadError, self).__init__()
|
||||
self.reason = reason
|
||||
@ -63,9 +78,11 @@ class UploadError(Exception):
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
|
||||
class DownloadError(Exception):
|
||||
"""Cannot download a repository.
|
||||
"""
|
||||
|
||||
def __init__(self, reason):
|
||||
super(DownloadError, self).__init__()
|
||||
self.reason = reason
|
||||
@ -73,9 +90,11 @@ class DownloadError(Exception):
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
|
||||
class NoSuchProjectError(Exception):
|
||||
"""A specified project does not exist in the work tree.
|
||||
"""
|
||||
|
||||
def __init__(self, name=None):
|
||||
super(NoSuchProjectError, self).__init__()
|
||||
self.name = name
|
||||
@ -89,6 +108,7 @@ class NoSuchProjectError(Exception):
|
||||
class InvalidProjectGroupsError(Exception):
|
||||
"""A specified project is not suitable for the specified groups
|
||||
"""
|
||||
|
||||
def __init__(self, name=None):
|
||||
super(InvalidProjectGroupsError, self).__init__()
|
||||
self.name = name
|
||||
@ -98,15 +118,18 @@ class InvalidProjectGroupsError(Exception):
|
||||
return 'in current directory'
|
||||
return self.name
|
||||
|
||||
|
||||
class RepoChangedException(Exception):
|
||||
"""Thrown if 'repo sync' results in repo updating its internal
|
||||
repo or manifest repositories. In this special case we must
|
||||
use exec to re-execute repo with the new code and manifest.
|
||||
"""
|
||||
|
||||
def __init__(self, extra_args=None):
|
||||
super(RepoChangedException, self).__init__()
|
||||
self.extra_args = extra_args or []
|
||||
|
||||
|
||||
class HookError(Exception):
|
||||
"""Thrown if a 'repo-hook' could not be run.
|
||||
|
||||
|
@ -23,6 +23,7 @@ TASK_COMMAND = 'command'
|
||||
TASK_SYNC_NETWORK = 'sync-network'
|
||||
TASK_SYNC_LOCAL = 'sync-local'
|
||||
|
||||
|
||||
class EventLog(object):
|
||||
"""Event log that records events that occurred during a repo invocation.
|
||||
|
||||
@ -138,7 +139,7 @@ class EventLog(object):
|
||||
Returns:
|
||||
A dictionary of the event added to the log.
|
||||
"""
|
||||
event['status'] = self.GetStatusString(success)
|
||||
event['status'] = self.GetStatusString(success)
|
||||
event['finish_time'] = finish
|
||||
return event
|
||||
|
||||
@ -165,6 +166,7 @@ class EventLog(object):
|
||||
# An integer id that is unique across this invocation of the program.
|
||||
_EVENT_ID = multiprocessing.Value('i', 1)
|
||||
|
||||
|
||||
def _NextEventId():
|
||||
"""Helper function for grabbing the next unique id.
|
||||
|
||||
|
175
git_command.py
175
git_command.py
@ -22,12 +22,23 @@ import tempfile
|
||||
from signal import SIGTERM
|
||||
|
||||
from error import GitError
|
||||
from git_refs import HEAD
|
||||
import platform_utils
|
||||
from trace import REPO_TRACE, IsTrace, Trace
|
||||
from repo_trace import REPO_TRACE, IsTrace, Trace
|
||||
from wrapper import Wrapper
|
||||
|
||||
GIT = 'git'
|
||||
MIN_GIT_VERSION = (1, 5, 4)
|
||||
# NB: These do not need to be kept in sync with the repo launcher script.
|
||||
# These may be much newer as it allows the repo launcher to roll between
|
||||
# different repo releases while source versions might require a newer git.
|
||||
#
|
||||
# The soft version is when we start warning users that the version is old and
|
||||
# we'll be dropping support for it. We'll refuse to work with versions older
|
||||
# than the hard version.
|
||||
#
|
||||
# git-1.7 is in (EOL) Ubuntu Precise. git-1.9 is in Ubuntu Trusty.
|
||||
MIN_GIT_VERSION_SOFT = (1, 9, 1)
|
||||
MIN_GIT_VERSION_HARD = (1, 7, 2)
|
||||
GIT_DIR = 'GIT_DIR'
|
||||
|
||||
LAST_GITDIR = None
|
||||
@ -37,6 +48,7 @@ _ssh_proxy_path = None
|
||||
_ssh_sock_path = None
|
||||
_ssh_clients = []
|
||||
|
||||
|
||||
def ssh_sock(create=True):
|
||||
global _ssh_sock_path
|
||||
if _ssh_sock_path is None:
|
||||
@ -46,27 +58,31 @@ def ssh_sock(create=True):
|
||||
if not os.path.exists(tmp_dir):
|
||||
tmp_dir = tempfile.gettempdir()
|
||||
_ssh_sock_path = os.path.join(
|
||||
tempfile.mkdtemp('', 'ssh-', tmp_dir),
|
||||
'master-%r@%h:%p')
|
||||
tempfile.mkdtemp('', 'ssh-', tmp_dir),
|
||||
'master-%r@%h:%p')
|
||||
return _ssh_sock_path
|
||||
|
||||
|
||||
def _ssh_proxy():
|
||||
global _ssh_proxy_path
|
||||
if _ssh_proxy_path is None:
|
||||
_ssh_proxy_path = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'git_ssh')
|
||||
os.path.dirname(__file__),
|
||||
'git_ssh')
|
||||
return _ssh_proxy_path
|
||||
|
||||
|
||||
def _add_ssh_client(p):
|
||||
_ssh_clients.append(p)
|
||||
|
||||
|
||||
def _remove_ssh_client(p):
|
||||
try:
|
||||
_ssh_clients.remove(p)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
|
||||
def terminate_ssh_clients():
|
||||
global _ssh_clients
|
||||
for p in _ssh_clients:
|
||||
@ -77,8 +93,10 @@ def terminate_ssh_clients():
|
||||
pass
|
||||
_ssh_clients = []
|
||||
|
||||
|
||||
_git_version = None
|
||||
|
||||
|
||||
class _GitCall(object):
|
||||
def version_tuple(self):
|
||||
global _git_version
|
||||
@ -90,14 +108,99 @@ class _GitCall(object):
|
||||
return _git_version
|
||||
|
||||
def __getattr__(self, name):
|
||||
name = name.replace('_','-')
|
||||
name = name.replace('_', '-')
|
||||
|
||||
def fun(*cmdv):
|
||||
command = [name]
|
||||
command.extend(cmdv)
|
||||
return GitCommand(None, command).Wait() == 0
|
||||
return fun
|
||||
|
||||
|
||||
git = _GitCall()
|
||||
|
||||
|
||||
def RepoSourceVersion():
|
||||
"""Return the version of the repo.git tree."""
|
||||
ver = getattr(RepoSourceVersion, 'version', None)
|
||||
|
||||
# We avoid GitCommand so we don't run into circular deps -- GitCommand needs
|
||||
# to initialize version info we provide.
|
||||
if ver is None:
|
||||
env = GitCommand._GetBasicEnv()
|
||||
|
||||
proj = os.path.dirname(os.path.abspath(__file__))
|
||||
env[GIT_DIR] = os.path.join(proj, '.git')
|
||||
|
||||
p = subprocess.Popen([GIT, 'describe', HEAD], stdout=subprocess.PIPE,
|
||||
env=env)
|
||||
if p.wait() == 0:
|
||||
ver = p.stdout.read().strip().decode('utf-8')
|
||||
if ver.startswith('v'):
|
||||
ver = ver[1:]
|
||||
else:
|
||||
ver = 'unknown'
|
||||
setattr(RepoSourceVersion, 'version', ver)
|
||||
|
||||
return ver
|
||||
|
||||
|
||||
class UserAgent(object):
|
||||
"""Mange User-Agent settings when talking to external services
|
||||
|
||||
We follow the style as documented here:
|
||||
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
|
||||
"""
|
||||
|
||||
_os = None
|
||||
_repo_ua = None
|
||||
_git_ua = None
|
||||
|
||||
@property
|
||||
def os(self):
|
||||
"""The operating system name."""
|
||||
if self._os is None:
|
||||
os_name = sys.platform
|
||||
if os_name.lower().startswith('linux'):
|
||||
os_name = 'Linux'
|
||||
elif os_name == 'win32':
|
||||
os_name = 'Win32'
|
||||
elif os_name == 'cygwin':
|
||||
os_name = 'Cygwin'
|
||||
elif os_name == 'darwin':
|
||||
os_name = 'Darwin'
|
||||
self._os = os_name
|
||||
|
||||
return self._os
|
||||
|
||||
@property
|
||||
def repo(self):
|
||||
"""The UA when connecting directly from repo."""
|
||||
if self._repo_ua is None:
|
||||
py_version = sys.version_info
|
||||
self._repo_ua = 'git-repo/%s (%s) git/%s Python/%d.%d.%d' % (
|
||||
RepoSourceVersion(),
|
||||
self.os,
|
||||
git.version_tuple().full,
|
||||
py_version.major, py_version.minor, py_version.micro)
|
||||
|
||||
return self._repo_ua
|
||||
|
||||
@property
|
||||
def git(self):
|
||||
"""The UA when running git."""
|
||||
if self._git_ua is None:
|
||||
self._git_ua = 'git/%s (%s) git-repo/%s' % (
|
||||
git.version_tuple().full,
|
||||
self.os,
|
||||
RepoSourceVersion())
|
||||
|
||||
return self._git_ua
|
||||
|
||||
|
||||
user_agent = UserAgent()
|
||||
|
||||
|
||||
def git_require(min_version, fail=False, msg=''):
|
||||
git_version = git.version_tuple()
|
||||
if min_version <= git_version:
|
||||
@ -110,32 +213,24 @@ def git_require(min_version, fail=False, msg=''):
|
||||
sys.exit(1)
|
||||
return False
|
||||
|
||||
|
||||
def _setenv(env, name, value):
|
||||
env[name] = value.encode()
|
||||
|
||||
|
||||
class GitCommand(object):
|
||||
def __init__(self,
|
||||
project,
|
||||
cmdv,
|
||||
bare = False,
|
||||
provide_stdin = False,
|
||||
capture_stdout = False,
|
||||
capture_stderr = False,
|
||||
disable_editor = False,
|
||||
ssh_proxy = False,
|
||||
cwd = None,
|
||||
gitdir = None):
|
||||
env = os.environ.copy()
|
||||
|
||||
for key in [REPO_TRACE,
|
||||
GIT_DIR,
|
||||
'GIT_ALTERNATE_OBJECT_DIRECTORIES',
|
||||
'GIT_OBJECT_DIRECTORY',
|
||||
'GIT_WORK_TREE',
|
||||
'GIT_GRAFT_FILE',
|
||||
'GIT_INDEX_FILE']:
|
||||
if key in env:
|
||||
del env[key]
|
||||
bare=False,
|
||||
provide_stdin=False,
|
||||
capture_stdout=False,
|
||||
capture_stderr=False,
|
||||
disable_editor=False,
|
||||
ssh_proxy=False,
|
||||
cwd=None,
|
||||
gitdir=None):
|
||||
env = self._GetBasicEnv()
|
||||
|
||||
# If we are not capturing std* then need to print it.
|
||||
self.tee = {'stdout': not capture_stdout, 'stderr': not capture_stderr}
|
||||
@ -155,6 +250,7 @@ class GitCommand(object):
|
||||
if 'GIT_ALLOW_PROTOCOL' not in env:
|
||||
_setenv(env, 'GIT_ALLOW_PROTOCOL',
|
||||
'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
|
||||
_setenv(env, 'GIT_HTTP_USER_AGENT', user_agent.git)
|
||||
|
||||
if project:
|
||||
if not cwd:
|
||||
@ -213,11 +309,11 @@ class GitCommand(object):
|
||||
|
||||
try:
|
||||
p = subprocess.Popen(command,
|
||||
cwd = cwd,
|
||||
env = env,
|
||||
stdin = stdin,
|
||||
stdout = stdout,
|
||||
stderr = stderr)
|
||||
cwd=cwd,
|
||||
env=env,
|
||||
stdin=stdin,
|
||||
stdout=stdout,
|
||||
stderr=stderr)
|
||||
except Exception as e:
|
||||
raise GitError('%s: %s' % (command[1], e))
|
||||
|
||||
@ -227,6 +323,23 @@ class GitCommand(object):
|
||||
self.process = p
|
||||
self.stdin = p.stdin
|
||||
|
||||
@staticmethod
|
||||
def _GetBasicEnv():
|
||||
"""Return a basic env for running git under.
|
||||
|
||||
This is guaranteed to be side-effect free.
|
||||
"""
|
||||
env = os.environ.copy()
|
||||
for key in (REPO_TRACE,
|
||||
GIT_DIR,
|
||||
'GIT_ALTERNATE_OBJECT_DIRECTORIES',
|
||||
'GIT_OBJECT_DIRECTORY',
|
||||
'GIT_WORK_TREE',
|
||||
'GIT_GRAFT_FILE',
|
||||
'GIT_INDEX_FILE'):
|
||||
env.pop(key, None)
|
||||
return env
|
||||
|
||||
def Wait(self):
|
||||
try:
|
||||
p = self.process
|
||||
|
@ -44,7 +44,7 @@ else:
|
||||
from signal import SIGTERM
|
||||
from error import GitError, UploadError
|
||||
import platform_utils
|
||||
from trace import Trace
|
||||
from repo_trace import Trace
|
||||
if is_python3():
|
||||
from http.client import HTTPException
|
||||
else:
|
||||
@ -59,39 +59,45 @@ ID_RE = re.compile(r'^[0-9a-f]{40}$')
|
||||
|
||||
REVIEW_CACHE = dict()
|
||||
|
||||
|
||||
def IsChange(rev):
|
||||
return rev.startswith(R_CHANGES)
|
||||
|
||||
|
||||
def IsId(rev):
|
||||
return ID_RE.match(rev)
|
||||
|
||||
|
||||
def IsTag(rev):
|
||||
return rev.startswith(R_TAGS)
|
||||
|
||||
|
||||
def IsImmutable(rev):
|
||||
return IsChange(rev) or IsId(rev) or IsTag(rev)
|
||||
|
||||
|
||||
def _key(name):
|
||||
parts = name.split('.')
|
||||
if len(parts) < 2:
|
||||
return name.lower()
|
||||
parts[ 0] = parts[ 0].lower()
|
||||
parts[0] = parts[0].lower()
|
||||
parts[-1] = parts[-1].lower()
|
||||
return '.'.join(parts)
|
||||
|
||||
|
||||
class GitConfig(object):
|
||||
_ForUser = None
|
||||
|
||||
@classmethod
|
||||
def ForUser(cls):
|
||||
if cls._ForUser is None:
|
||||
cls._ForUser = cls(configfile = os.path.expanduser('~/.gitconfig'))
|
||||
cls._ForUser = cls(configfile=os.path.expanduser('~/.gitconfig'))
|
||||
return cls._ForUser
|
||||
|
||||
@classmethod
|
||||
def ForRepository(cls, gitdir, defaults=None):
|
||||
return cls(configfile = os.path.join(gitdir, 'config'),
|
||||
defaults = defaults)
|
||||
return cls(configfile=os.path.join(gitdir, 'config'),
|
||||
defaults=defaults)
|
||||
|
||||
def __init__(self, configfile, defaults=None, jsonFile=None):
|
||||
self.file = configfile
|
||||
@ -104,16 +110,16 @@ class GitConfig(object):
|
||||
self._json = jsonFile
|
||||
if self._json is None:
|
||||
self._json = os.path.join(
|
||||
os.path.dirname(self.file),
|
||||
'.repo_' + os.path.basename(self.file) + '.json')
|
||||
os.path.dirname(self.file),
|
||||
'.repo_' + os.path.basename(self.file) + '.json')
|
||||
|
||||
def Has(self, name, include_defaults = True):
|
||||
def Has(self, name, include_defaults=True):
|
||||
"""Return true if this configuration file has the key.
|
||||
"""
|
||||
if _key(name) in self._cache:
|
||||
return True
|
||||
if include_defaults and self.defaults:
|
||||
return self.defaults.Has(name, include_defaults = True)
|
||||
return self.defaults.Has(name, include_defaults=True)
|
||||
return False
|
||||
|
||||
def GetBoolean(self, name):
|
||||
@ -142,7 +148,7 @@ class GitConfig(object):
|
||||
v = self._cache[_key(name)]
|
||||
except KeyError:
|
||||
if self.defaults:
|
||||
return self.defaults.GetString(name, all_keys = all_keys)
|
||||
return self.defaults.GetString(name, all_keys=all_keys)
|
||||
v = []
|
||||
|
||||
if not all_keys:
|
||||
@ -153,7 +159,7 @@ class GitConfig(object):
|
||||
r = []
|
||||
r.extend(v)
|
||||
if self.defaults:
|
||||
r.extend(self.defaults.GetString(name, all_keys = True))
|
||||
r.extend(self.defaults.GetString(name, all_keys=True))
|
||||
return r
|
||||
|
||||
def SetString(self, name, value):
|
||||
@ -217,7 +223,7 @@ class GitConfig(object):
|
||||
"""
|
||||
return self._sections.get(section, set())
|
||||
|
||||
def HasSection(self, section, subsection = ''):
|
||||
def HasSection(self, section, subsection=''):
|
||||
"""Does at least one key in section.subsection exist?
|
||||
"""
|
||||
try:
|
||||
@ -268,30 +274,23 @@ class GitConfig(object):
|
||||
|
||||
def _ReadJson(self):
|
||||
try:
|
||||
if os.path.getmtime(self._json) \
|
||||
<= os.path.getmtime(self.file):
|
||||
if os.path.getmtime(self._json) <= os.path.getmtime(self.file):
|
||||
platform_utils.remove(self._json)
|
||||
return None
|
||||
except OSError:
|
||||
return None
|
||||
try:
|
||||
Trace(': parsing %s', self.file)
|
||||
fd = open(self._json)
|
||||
try:
|
||||
with open(self._json) as fd:
|
||||
return json.load(fd)
|
||||
finally:
|
||||
fd.close()
|
||||
except (IOError, ValueError):
|
||||
platform_utils.remove(self._json)
|
||||
return None
|
||||
|
||||
def _SaveJson(self, cache):
|
||||
try:
|
||||
fd = open(self._json, 'w')
|
||||
try:
|
||||
with open(self._json, 'w') as fd:
|
||||
json.dump(cache, fd, indent=2)
|
||||
finally:
|
||||
fd.close()
|
||||
except (IOError, TypeError):
|
||||
if os.path.exists(self._json):
|
||||
platform_utils.remove(self._json)
|
||||
@ -329,8 +328,8 @@ class GitConfig(object):
|
||||
|
||||
p = GitCommand(None,
|
||||
command,
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
if p.Wait() == 0:
|
||||
return p.stdout
|
||||
else:
|
||||
@ -398,6 +397,7 @@ _master_keys = set()
|
||||
_ssh_master = True
|
||||
_master_keys_lock = None
|
||||
|
||||
|
||||
def init_ssh():
|
||||
"""Should be called once at the start of repo to init ssh master handling.
|
||||
|
||||
@ -407,6 +407,7 @@ def init_ssh():
|
||||
assert _master_keys_lock is None, "Should only call init_ssh once"
|
||||
_master_keys_lock = _threading.Lock()
|
||||
|
||||
|
||||
def _open_ssh(host, port=None):
|
||||
global _ssh_master
|
||||
|
||||
@ -427,17 +428,17 @@ def _open_ssh(host, port=None):
|
||||
if key in _master_keys:
|
||||
return True
|
||||
|
||||
if not _ssh_master \
|
||||
or 'GIT_SSH' in os.environ \
|
||||
or sys.platform in ('win32', 'cygwin'):
|
||||
if (not _ssh_master
|
||||
or 'GIT_SSH' in os.environ
|
||||
or sys.platform in ('win32', 'cygwin')):
|
||||
# failed earlier, or cygwin ssh can't do this
|
||||
#
|
||||
return False
|
||||
|
||||
# We will make two calls to ssh; this is the common part of both calls.
|
||||
command_base = ['ssh',
|
||||
'-o','ControlPath %s' % ssh_sock(),
|
||||
host]
|
||||
'-o', 'ControlPath %s' % ssh_sock(),
|
||||
host]
|
||||
if port is not None:
|
||||
command_base[1:1] = ['-p', str(port)]
|
||||
|
||||
@ -445,13 +446,13 @@ def _open_ssh(host, port=None):
|
||||
# ...but before actually starting a master, we'll double-check. This can
|
||||
# be important because we can't tell that that 'git@myhost.com' is the same
|
||||
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
|
||||
check_command = command_base + ['-O','check']
|
||||
check_command = command_base + ['-O', 'check']
|
||||
try:
|
||||
Trace(': %s', ' '.join(check_command))
|
||||
check_process = subprocess.Popen(check_command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
check_process.communicate() # read output, but ignore it...
|
||||
check_process.communicate() # read output, but ignore it...
|
||||
isnt_running = check_process.wait()
|
||||
|
||||
if not isnt_running:
|
||||
@ -464,16 +465,14 @@ def _open_ssh(host, port=None):
|
||||
# to the log there.
|
||||
pass
|
||||
|
||||
command = command_base[:1] + \
|
||||
['-M', '-N'] + \
|
||||
command_base[1:]
|
||||
command = command_base[:1] + ['-M', '-N'] + command_base[1:]
|
||||
try:
|
||||
Trace(': %s', ' '.join(command))
|
||||
p = subprocess.Popen(command)
|
||||
except Exception as e:
|
||||
_ssh_master = False
|
||||
print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
|
||||
% (host,port, str(e)), file=sys.stderr)
|
||||
% (host, port, str(e)), file=sys.stderr)
|
||||
return False
|
||||
|
||||
time.sleep(1)
|
||||
@ -487,6 +486,7 @@ def _open_ssh(host, port=None):
|
||||
finally:
|
||||
_master_keys_lock.release()
|
||||
|
||||
|
||||
def close_ssh():
|
||||
global _master_keys_lock
|
||||
|
||||
@ -511,15 +511,18 @@ def close_ssh():
|
||||
# We're done with the lock, so we can delete it.
|
||||
_master_keys_lock = None
|
||||
|
||||
|
||||
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
|
||||
URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
|
||||
|
||||
|
||||
def GetSchemeFromUrl(url):
|
||||
m = URI_ALL.match(url)
|
||||
if m:
|
||||
return m.group(1)
|
||||
return None
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def GetUrlCookieFile(url, quiet):
|
||||
if url.startswith('persistent-'):
|
||||
@ -534,7 +537,7 @@ def GetUrlCookieFile(url, quiet):
|
||||
cookiefile = None
|
||||
proxy = None
|
||||
for line in p.stdout:
|
||||
line = line.strip()
|
||||
line = line.strip().decode('utf-8')
|
||||
if line.startswith(cookieprefix):
|
||||
cookiefile = os.path.expanduser(line[len(cookieprefix):])
|
||||
if line.startswith(proxyprefix):
|
||||
@ -546,7 +549,7 @@ def GetUrlCookieFile(url, quiet):
|
||||
finally:
|
||||
p.stdin.close()
|
||||
if p.wait():
|
||||
err_msg = p.stderr.read()
|
||||
err_msg = p.stderr.read().decode('utf-8')
|
||||
if ' -print_config' in err_msg:
|
||||
pass # Persistent proxy doesn't support -print_config.
|
||||
elif not quiet:
|
||||
@ -560,6 +563,7 @@ def GetUrlCookieFile(url, quiet):
|
||||
cookiefile = os.path.expanduser(cookiefile)
|
||||
yield cookiefile, None
|
||||
|
||||
|
||||
def _preconnect(url):
|
||||
m = URI_ALL.match(url)
|
||||
if m:
|
||||
@ -580,9 +584,11 @@ def _preconnect(url):
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class Remote(object):
|
||||
"""Configuration options related to a remote.
|
||||
"""
|
||||
|
||||
def __init__(self, config, name):
|
||||
self._config = config
|
||||
self.name = name
|
||||
@ -591,7 +597,7 @@ class Remote(object):
|
||||
self.review = self._Get('review')
|
||||
self.projectname = self._Get('projectname')
|
||||
self.fetch = list(map(RefSpec.FromString,
|
||||
self._Get('fetch', all_keys=True)))
|
||||
self._Get('fetch', all_keys=True)))
|
||||
self._review_url = None
|
||||
|
||||
def _InsteadOf(self):
|
||||
@ -605,8 +611,8 @@ class Remote(object):
|
||||
insteadOfList = globCfg.GetString(key, all_keys=True)
|
||||
|
||||
for insteadOf in insteadOfList:
|
||||
if self.url.startswith(insteadOf) \
|
||||
and len(insteadOf) > len(longest):
|
||||
if (self.url.startswith(insteadOf)
|
||||
and len(insteadOf) > len(longest)):
|
||||
longest = insteadOf
|
||||
longestUrl = url
|
||||
|
||||
@ -699,7 +705,8 @@ class Remote(object):
|
||||
if not rev.startswith(R_HEADS):
|
||||
return rev
|
||||
|
||||
raise GitError('remote %s does not have %s' % (self.name, rev))
|
||||
raise GitError('%s: remote %s does not have %s' %
|
||||
(self.projectname, self.name, rev))
|
||||
|
||||
def WritesTo(self, ref):
|
||||
"""True if the remote stores to the tracking ref.
|
||||
@ -736,12 +743,13 @@ class Remote(object):
|
||||
|
||||
def _Get(self, key, all_keys=False):
|
||||
key = 'remote.%s.%s' % (self.name, key)
|
||||
return self._config.GetString(key, all_keys = all_keys)
|
||||
return self._config.GetString(key, all_keys=all_keys)
|
||||
|
||||
|
||||
class Branch(object):
|
||||
"""Configuration options related to a single branch.
|
||||
"""
|
||||
|
||||
def __init__(self, config, name):
|
||||
self._config = config
|
||||
self.name = name
|
||||
@ -772,15 +780,12 @@ class Branch(object):
|
||||
self._Set('merge', self.merge)
|
||||
|
||||
else:
|
||||
fd = open(self._config.file, 'a')
|
||||
try:
|
||||
with open(self._config.file, 'a') as fd:
|
||||
fd.write('[branch "%s"]\n' % self.name)
|
||||
if self.remote:
|
||||
fd.write('\tremote = %s\n' % self.remote.name)
|
||||
if self.merge:
|
||||
fd.write('\tmerge = %s\n' % self.merge)
|
||||
finally:
|
||||
fd.close()
|
||||
|
||||
def _Set(self, key, value):
|
||||
key = 'branch.%s.%s' % (self.name, key)
|
||||
@ -788,4 +793,4 @@ class Branch(object):
|
||||
|
||||
def _Get(self, key, all_keys=False):
|
||||
key = 'branch.%s.%s' % (self.name, key)
|
||||
return self._config.GetString(key, all_keys = all_keys)
|
||||
return self._config.GetString(key, all_keys=all_keys)
|
||||
|
25
git_refs.py
25
git_refs.py
@ -15,15 +15,15 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from trace import Trace
|
||||
from repo_trace import Trace
|
||||
import platform_utils
|
||||
|
||||
HEAD = 'HEAD'
|
||||
HEAD = 'HEAD'
|
||||
R_CHANGES = 'refs/changes/'
|
||||
R_HEADS = 'refs/heads/'
|
||||
R_TAGS = 'refs/tags/'
|
||||
R_PUB = 'refs/published/'
|
||||
R_M = 'refs/remotes/m/'
|
||||
R_HEADS = 'refs/heads/'
|
||||
R_TAGS = 'refs/tags/'
|
||||
R_PUB = 'refs/published/'
|
||||
R_M = 'refs/remotes/m/'
|
||||
|
||||
|
||||
class GitRefs(object):
|
||||
@ -141,18 +141,11 @@ class GitRefs(object):
|
||||
|
||||
def _ReadLoose1(self, path, name):
|
||||
try:
|
||||
fd = open(path)
|
||||
except IOError:
|
||||
return
|
||||
|
||||
try:
|
||||
try:
|
||||
with open(path) as fd:
|
||||
mtime = os.path.getmtime(path)
|
||||
ref_id = fd.readline()
|
||||
except (IOError, OSError):
|
||||
return
|
||||
finally:
|
||||
fd.close()
|
||||
except (IOError, OSError):
|
||||
return
|
||||
|
||||
try:
|
||||
ref_id = ref_id.decode()
|
||||
|
@ -29,12 +29,15 @@ from error import ManifestParseError
|
||||
|
||||
NUM_BATCH_RETRIEVE_REVISIONID = 32
|
||||
|
||||
|
||||
def get_gitc_manifest_dir():
|
||||
return wrapper.Wrapper().get_gitc_manifest_dir()
|
||||
|
||||
|
||||
def parse_clientdir(gitc_fs_path):
|
||||
return wrapper.Wrapper().gitc_parse_clientdir(gitc_fs_path)
|
||||
|
||||
|
||||
def _set_project_revisions(projects):
|
||||
"""Sets the revisionExpr for a list of projects.
|
||||
|
||||
@ -52,7 +55,7 @@ def _set_project_revisions(projects):
|
||||
project.remote.url,
|
||||
project.revisionExpr],
|
||||
capture_stdout=True, cwd='/tmp'))
|
||||
for project in projects if not git_config.IsId(project.revisionExpr)]
|
||||
for project in projects if not git_config.IsId(project.revisionExpr)]
|
||||
for proj, gitcmd in project_gitcmds:
|
||||
if gitcmd.Wait():
|
||||
print('FATAL: Failed to retrieve revisionExpr for %s' % proj)
|
||||
@ -63,6 +66,7 @@ def _set_project_revisions(projects):
|
||||
(proj.remote.url, proj.revisionExpr))
|
||||
proj.revisionExpr = revisionExpr
|
||||
|
||||
|
||||
def _manifest_groups(manifest):
|
||||
"""Returns the manifest group string that should be synced
|
||||
|
||||
@ -77,6 +81,7 @@ def _manifest_groups(manifest):
|
||||
groups = 'default,platform-' + platform.system().lower()
|
||||
return groups
|
||||
|
||||
|
||||
def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
|
||||
"""Generate a manifest for shafsd to use for this GITC client.
|
||||
|
||||
@ -104,11 +109,11 @@ def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
|
||||
if not proj.upstream and not git_config.IsId(proj.revisionExpr):
|
||||
proj.upstream = proj.revisionExpr
|
||||
|
||||
if not path in gitc_manifest.paths:
|
||||
if path not in gitc_manifest.paths:
|
||||
# Any new projects need their first revision, even if we weren't asked
|
||||
# for them.
|
||||
projects.append(proj)
|
||||
elif not path in paths:
|
||||
elif path not in paths:
|
||||
# And copy revisions from the previous manifest if we're not updating
|
||||
# them now.
|
||||
gitc_proj = gitc_manifest.paths[path]
|
||||
@ -121,7 +126,7 @@ def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
|
||||
index = 0
|
||||
while index < len(projects):
|
||||
_set_project_revisions(
|
||||
projects[index:(index+NUM_BATCH_RETRIEVE_REVISIONID)])
|
||||
projects[index:(index + NUM_BATCH_RETRIEVE_REVISIONID)])
|
||||
index += NUM_BATCH_RETRIEVE_REVISIONID
|
||||
|
||||
if gitc_manifest is not None:
|
||||
@ -140,6 +145,7 @@ def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
|
||||
# Save the manifest.
|
||||
save_manifest(manifest)
|
||||
|
||||
|
||||
def save_manifest(manifest, client_dir=None):
|
||||
"""Save the manifest file in the client_dir.
|
||||
|
||||
|
156
main.py
156
main.py
@ -23,17 +23,18 @@ which takes care of execing this entry point.
|
||||
|
||||
from __future__ import print_function
|
||||
import getpass
|
||||
import imp
|
||||
import netrc
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
import time
|
||||
|
||||
from pyversion import is_python3
|
||||
if is_python3():
|
||||
import urllib.request
|
||||
else:
|
||||
import imp
|
||||
import urllib2
|
||||
urllib = imp.new_module('urllib')
|
||||
urllib.request = urllib2
|
||||
@ -45,8 +46,8 @@ except ImportError:
|
||||
|
||||
from color import SetDefaultColoring
|
||||
import event_log
|
||||
from trace import SetTrace
|
||||
from git_command import git, GitCommand
|
||||
from repo_trace import SetTrace
|
||||
from git_command import user_agent
|
||||
from git_config import init_ssh, close_ssh
|
||||
from command import InteractiveCommand
|
||||
from command import MirrorSafeCommand
|
||||
@ -71,8 +72,10 @@ if not is_python3():
|
||||
input = raw_input
|
||||
|
||||
global_options = optparse.OptionParser(
|
||||
usage="repo [-p|--paginate|--no-pager] COMMAND [ARGS]"
|
||||
)
|
||||
usage='repo [-p|--paginate|--no-pager] COMMAND [ARGS]',
|
||||
add_help_option=False)
|
||||
global_options.add_option('-h', '--help', action='store_true',
|
||||
help='show this help message and exit')
|
||||
global_options.add_option('-p', '--paginate',
|
||||
dest='pager', action='store_true',
|
||||
help='display command output in the pager')
|
||||
@ -84,7 +87,10 @@ global_options.add_option('--color',
|
||||
help='control color usage: auto, always, never')
|
||||
global_options.add_option('--trace',
|
||||
dest='trace', action='store_true',
|
||||
help='trace git command execution')
|
||||
help='trace git command execution (REPO_TRACE=1)')
|
||||
global_options.add_option('--trace-python',
|
||||
dest='trace_python', action='store_true',
|
||||
help='trace python command execution')
|
||||
global_options.add_option('--time',
|
||||
dest='time', action='store_true',
|
||||
help='time repo command execution')
|
||||
@ -95,6 +101,7 @@ global_options.add_option('--event-log',
|
||||
dest='event_log', action='store',
|
||||
help='filename of event log to append timeline to')
|
||||
|
||||
|
||||
class _Repo(object):
|
||||
def __init__(self, repodir):
|
||||
self.repodir = repodir
|
||||
@ -102,8 +109,8 @@ class _Repo(object):
|
||||
# add 'branch' as an alias for 'branches'
|
||||
all_commands['branch'] = all_commands['branches']
|
||||
|
||||
def _Run(self, argv):
|
||||
result = 0
|
||||
def _ParseArgs(self, argv):
|
||||
"""Parse the main `repo` command line options."""
|
||||
name = None
|
||||
glob = []
|
||||
|
||||
@ -120,6 +127,20 @@ class _Repo(object):
|
||||
argv = []
|
||||
gopts, _gargs = global_options.parse_args(glob)
|
||||
|
||||
if gopts.help:
|
||||
global_options.print_help()
|
||||
commands = ' '.join(sorted(self.commands))
|
||||
wrapped_commands = textwrap.wrap(commands, width=77)
|
||||
print('\nAvailable commands:\n %s' % ('\n '.join(wrapped_commands),))
|
||||
print('\nRun `repo help <command>` for command-specific details.')
|
||||
global_options.exit()
|
||||
|
||||
return (name, gopts, argv)
|
||||
|
||||
def _Run(self, name, gopts, argv):
|
||||
"""Execute the requested subcommand."""
|
||||
result = 0
|
||||
|
||||
if gopts.trace:
|
||||
SetTrace()
|
||||
if gopts.show_version:
|
||||
@ -168,7 +189,7 @@ class _Repo(object):
|
||||
copts = cmd.ReadEnvironmentOptions(copts)
|
||||
except NoManifestException as e:
|
||||
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||
file=sys.stderr)
|
||||
file=sys.stderr)
|
||||
print('error: manifest missing or unreadable -- please run init',
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
@ -188,11 +209,12 @@ class _Repo(object):
|
||||
cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
|
||||
cmd.event_log.SetParent(cmd_event)
|
||||
try:
|
||||
cmd.ValidateOptions(copts, cargs)
|
||||
result = cmd.Execute(copts, cargs)
|
||||
except (DownloadError, ManifestInvalidRevisionError,
|
||||
NoManifestException) as e:
|
||||
NoManifestException) as e:
|
||||
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||
file=sys.stderr)
|
||||
file=sys.stderr)
|
||||
if isinstance(e, NoManifestException):
|
||||
print('error: manifest missing or unreadable -- please run init',
|
||||
file=sys.stderr)
|
||||
@ -234,31 +256,41 @@ class _Repo(object):
|
||||
return result
|
||||
|
||||
|
||||
def _MyRepoPath():
|
||||
return os.path.dirname(__file__)
|
||||
def _CheckWrapperVersion(ver_str, repo_path):
|
||||
"""Verify the repo launcher is new enough for this checkout.
|
||||
|
||||
Args:
|
||||
ver_str: The version string passed from the repo launcher when it ran us.
|
||||
repo_path: The path to the repo launcher that loaded us.
|
||||
"""
|
||||
# Refuse to work with really old wrapper versions. We don't test these,
|
||||
# so might as well require a somewhat recent sane version.
|
||||
# v1.15 of the repo launcher was released in ~Mar 2012.
|
||||
MIN_REPO_VERSION = (1, 15)
|
||||
min_str = '.'.join(str(x) for x in MIN_REPO_VERSION)
|
||||
|
||||
def _CheckWrapperVersion(ver, repo_path):
|
||||
if not repo_path:
|
||||
repo_path = '~/bin/repo'
|
||||
|
||||
if not ver:
|
||||
if not ver_str:
|
||||
print('no --wrapper-version argument', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Pull out the version of the repo launcher we know about to compare.
|
||||
exp = Wrapper().VERSION
|
||||
ver = tuple(map(int, ver.split('.')))
|
||||
if len(ver) == 1:
|
||||
ver = (0, ver[0])
|
||||
ver = tuple(map(int, ver_str.split('.')))
|
||||
|
||||
exp_str = '.'.join(map(str, exp))
|
||||
if exp[0] > ver[0] or ver < (0, 4):
|
||||
if ver < MIN_REPO_VERSION:
|
||||
print("""
|
||||
!!! A new repo command (%5s) is available. !!!
|
||||
!!! You must upgrade before you can continue: !!!
|
||||
repo: error:
|
||||
!!! Your version of repo %s is too old.
|
||||
!!! We need at least version %s.
|
||||
!!! A new repo command (%s) is available.
|
||||
!!! You must upgrade before you can continue:
|
||||
|
||||
cp %s %s
|
||||
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||
""" % (ver_str, min_str, exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if exp > ver:
|
||||
@ -269,11 +301,13 @@ def _CheckWrapperVersion(ver, repo_path):
|
||||
cp %s %s
|
||||
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||
|
||||
|
||||
def _CheckRepoDir(repo_dir):
|
||||
if not repo_dir:
|
||||
print('no --repo-dir argument', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _PruneOptions(argv, opt):
|
||||
i = 0
|
||||
while i < len(argv):
|
||||
@ -289,53 +323,17 @@ def _PruneOptions(argv, opt):
|
||||
continue
|
||||
i += 1
|
||||
|
||||
_user_agent = None
|
||||
|
||||
def _UserAgent():
|
||||
global _user_agent
|
||||
|
||||
if _user_agent is None:
|
||||
py_version = sys.version_info
|
||||
|
||||
os_name = sys.platform
|
||||
if os_name == 'linux2':
|
||||
os_name = 'Linux'
|
||||
elif os_name == 'win32':
|
||||
os_name = 'Win32'
|
||||
elif os_name == 'cygwin':
|
||||
os_name = 'Cygwin'
|
||||
elif os_name == 'darwin':
|
||||
os_name = 'Darwin'
|
||||
|
||||
p = GitCommand(
|
||||
None, ['describe', 'HEAD'],
|
||||
cwd = _MyRepoPath(),
|
||||
capture_stdout = True)
|
||||
if p.Wait() == 0:
|
||||
repo_version = p.stdout
|
||||
if len(repo_version) > 0 and repo_version[-1] == '\n':
|
||||
repo_version = repo_version[0:-1]
|
||||
if len(repo_version) > 0 and repo_version[0] == 'v':
|
||||
repo_version = repo_version[1:]
|
||||
else:
|
||||
repo_version = 'unknown'
|
||||
|
||||
_user_agent = 'git-repo/%s (%s) git/%s Python/%d.%d.%d' % (
|
||||
repo_version,
|
||||
os_name,
|
||||
git.version_tuple().full,
|
||||
py_version[0], py_version[1], py_version[2])
|
||||
return _user_agent
|
||||
|
||||
class _UserAgentHandler(urllib.request.BaseHandler):
|
||||
def http_request(self, req):
|
||||
req.add_header('User-Agent', _UserAgent())
|
||||
req.add_header('User-Agent', user_agent.repo)
|
||||
return req
|
||||
|
||||
def https_request(self, req):
|
||||
req.add_header('User-Agent', _UserAgent())
|
||||
req.add_header('User-Agent', user_agent.repo)
|
||||
return req
|
||||
|
||||
|
||||
def _AddPasswordFromUserInput(handler, msg, req):
|
||||
# If repo could not find auth info from netrc, try to get it from user input
|
||||
url = req.get_full_url()
|
||||
@ -349,22 +347,24 @@ def _AddPasswordFromUserInput(handler, msg, req):
|
||||
return
|
||||
handler.passwd.add_password(None, url, user, password)
|
||||
|
||||
|
||||
class _BasicAuthHandler(urllib.request.HTTPBasicAuthHandler):
|
||||
def http_error_401(self, req, fp, code, msg, headers):
|
||||
_AddPasswordFromUserInput(self, msg, req)
|
||||
return urllib.request.HTTPBasicAuthHandler.http_error_401(
|
||||
self, req, fp, code, msg, headers)
|
||||
self, req, fp, code, msg, headers)
|
||||
|
||||
def http_error_auth_reqed(self, authreq, host, req, headers):
|
||||
try:
|
||||
old_add_header = req.add_header
|
||||
|
||||
def _add_header(name, val):
|
||||
val = val.replace('\n', '')
|
||||
old_add_header(name, val)
|
||||
req.add_header = _add_header
|
||||
return urllib.request.AbstractBasicAuthHandler.http_error_auth_reqed(
|
||||
self, authreq, host, req, headers)
|
||||
except:
|
||||
self, authreq, host, req, headers)
|
||||
except Exception:
|
||||
reset = getattr(self, 'reset_retry_count', None)
|
||||
if reset is not None:
|
||||
reset()
|
||||
@ -372,22 +372,24 @@ class _BasicAuthHandler(urllib.request.HTTPBasicAuthHandler):
|
||||
self.retried = 0
|
||||
raise
|
||||
|
||||
|
||||
class _DigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
|
||||
def http_error_401(self, req, fp, code, msg, headers):
|
||||
_AddPasswordFromUserInput(self, msg, req)
|
||||
return urllib.request.HTTPDigestAuthHandler.http_error_401(
|
||||
self, req, fp, code, msg, headers)
|
||||
self, req, fp, code, msg, headers)
|
||||
|
||||
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
||||
try:
|
||||
old_add_header = req.add_header
|
||||
|
||||
def _add_header(name, val):
|
||||
val = val.replace('\n', '')
|
||||
old_add_header(name, val)
|
||||
req.add_header = _add_header
|
||||
return urllib.request.AbstractDigestAuthHandler.http_error_auth_reqed(
|
||||
self, auth_header, host, req, headers)
|
||||
except:
|
||||
self, auth_header, host, req, headers)
|
||||
except Exception:
|
||||
reset = getattr(self, 'reset_retry_count', None)
|
||||
if reset is not None:
|
||||
reset()
|
||||
@ -395,6 +397,7 @@ class _DigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
|
||||
self.retried = 0
|
||||
raise
|
||||
|
||||
|
||||
class _KerberosAuthHandler(urllib.request.BaseHandler):
|
||||
def __init__(self):
|
||||
self.retried = 0
|
||||
@ -413,7 +416,7 @@ class _KerberosAuthHandler(urllib.request.BaseHandler):
|
||||
|
||||
if self.retried > 3:
|
||||
raise urllib.request.HTTPError(req.get_full_url(), 401,
|
||||
"Negotiate auth failed", headers, None)
|
||||
"Negotiate auth failed", headers, None)
|
||||
else:
|
||||
self.retried += 1
|
||||
|
||||
@ -429,7 +432,7 @@ class _KerberosAuthHandler(urllib.request.BaseHandler):
|
||||
return response
|
||||
except kerberos.GSSError:
|
||||
return None
|
||||
except:
|
||||
except Exception:
|
||||
self.reset_retry_count()
|
||||
raise
|
||||
finally:
|
||||
@ -475,6 +478,7 @@ class _KerberosAuthHandler(urllib.request.BaseHandler):
|
||||
kerberos.authGSSClientClean(self.context)
|
||||
self.context = None
|
||||
|
||||
|
||||
def init_http():
|
||||
handlers = [_UserAgentHandler()]
|
||||
|
||||
@ -483,7 +487,7 @@ def init_http():
|
||||
n = netrc.netrc()
|
||||
for host in n.hosts:
|
||||
p = n.hosts[host]
|
||||
mgr.add_password(p[1], 'http://%s/' % host, p[0], p[2])
|
||||
mgr.add_password(p[1], 'http://%s/' % host, p[0], p[2])
|
||||
mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2])
|
||||
except netrc.NetrcParseError:
|
||||
pass
|
||||
@ -502,6 +506,7 @@ def init_http():
|
||||
handlers.append(urllib.request.HTTPSHandler(debuglevel=1))
|
||||
urllib.request.install_opener(urllib.request.build_opener(*handlers))
|
||||
|
||||
|
||||
def _Main(argv):
|
||||
result = 0
|
||||
|
||||
@ -526,7 +531,15 @@ def _Main(argv):
|
||||
try:
|
||||
init_ssh()
|
||||
init_http()
|
||||
result = repo._Run(argv) or 0
|
||||
name, gopts, argv = repo._ParseArgs(argv)
|
||||
run = lambda: repo._Run(name, gopts, argv) or 0
|
||||
if gopts.trace_python:
|
||||
import trace
|
||||
tracer = trace.Trace(count=False, trace=True, timing=True,
|
||||
ignoredirs=set(sys.path[1:]))
|
||||
result = tracer.runfunc(run)
|
||||
else:
|
||||
result = run()
|
||||
finally:
|
||||
close_ssh()
|
||||
except KeyboardInterrupt:
|
||||
@ -550,5 +563,6 @@ def _Main(argv):
|
||||
TerminatePager()
|
||||
sys.exit(result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
_Main(sys.argv[1:])
|
||||
|
193
manifest_xml.py
193
manifest_xml.py
@ -35,7 +35,8 @@ from git_config import GitConfig
|
||||
from git_refs import R_HEADS, HEAD
|
||||
import platform_utils
|
||||
from project import RemoteSpec, Project, MetaProject
|
||||
from error import ManifestParseError, ManifestInvalidRevisionError
|
||||
from error import (ManifestParseError, ManifestInvalidPathError,
|
||||
ManifestInvalidRevisionError)
|
||||
|
||||
MANIFEST_FILE_NAME = 'manifest.xml'
|
||||
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
|
||||
@ -55,6 +56,7 @@ urllib.parse.uses_netloc.extend([
|
||||
'sso',
|
||||
'rpc'])
|
||||
|
||||
|
||||
class _Default(object):
|
||||
"""Project defaults within the manifest."""
|
||||
|
||||
@ -73,6 +75,7 @@ class _Default(object):
|
||||
def __ne__(self, other):
|
||||
return self.__dict__ != other.__dict__
|
||||
|
||||
|
||||
class _XmlRemote(object):
|
||||
def __init__(self,
|
||||
name,
|
||||
@ -126,6 +129,7 @@ class _XmlRemote(object):
|
||||
orig_name=self.name,
|
||||
fetchUrl=self.fetchUrl)
|
||||
|
||||
|
||||
class XmlManifest(object):
|
||||
"""manages the repo configuration file"""
|
||||
|
||||
@ -139,12 +143,12 @@ class XmlManifest(object):
|
||||
self._load_local_manifests = True
|
||||
|
||||
self.repoProject = MetaProject(self, 'repo',
|
||||
gitdir = os.path.join(repodir, 'repo/.git'),
|
||||
worktree = os.path.join(repodir, 'repo'))
|
||||
gitdir=os.path.join(repodir, 'repo/.git'),
|
||||
worktree=os.path.join(repodir, 'repo'))
|
||||
|
||||
self.manifestProject = MetaProject(self, 'manifests',
|
||||
gitdir = os.path.join(repodir, 'manifests.git'),
|
||||
worktree = os.path.join(repodir, 'manifests'))
|
||||
gitdir=os.path.join(repodir, 'manifests.git'),
|
||||
worktree=os.path.join(repodir, 'manifests'))
|
||||
|
||||
self._Unload()
|
||||
|
||||
@ -223,7 +227,7 @@ class XmlManifest(object):
|
||||
if self.notice:
|
||||
notice_element = root.appendChild(doc.createElement('notice'))
|
||||
notice_lines = self.notice.splitlines()
|
||||
indented_notice = ('\n'.join(" "*4 + line for line in notice_lines))[4:]
|
||||
indented_notice = ('\n'.join(" " * 4 + line for line in notice_lines))[4:]
|
||||
notice_element.appendChild(doc.createTextNode(indented_notice))
|
||||
|
||||
d = self.default
|
||||
@ -461,12 +465,12 @@ class XmlManifest(object):
|
||||
self.localManifestWarning = True
|
||||
print('warning: %s is deprecated; put local manifests '
|
||||
'in `%s` instead' % (LOCAL_MANIFEST_NAME,
|
||||
os.path.join(self.repodir, LOCAL_MANIFESTS_DIR_NAME)),
|
||||
os.path.join(self.repodir, LOCAL_MANIFESTS_DIR_NAME)),
|
||||
file=sys.stderr)
|
||||
nodes.append(self._ParseManifestXml(local, self.repodir))
|
||||
|
||||
local_dir = os.path.abspath(os.path.join(self.repodir,
|
||||
LOCAL_MANIFESTS_DIR_NAME))
|
||||
LOCAL_MANIFESTS_DIR_NAME))
|
||||
try:
|
||||
for local_file in sorted(platform_utils.listdir(local_dir)):
|
||||
if local_file.endswith('.xml'):
|
||||
@ -511,7 +515,7 @@ class XmlManifest(object):
|
||||
fp = os.path.join(include_root, name)
|
||||
if not os.path.isfile(fp):
|
||||
raise ManifestParseError("include %s doesn't exist or isn't a file"
|
||||
% (name,))
|
||||
% (name,))
|
||||
try:
|
||||
nodes.extend(self._ParseManifestXml(fp, include_root))
|
||||
# should isolate this to the exact exception, but that's
|
||||
@ -598,6 +602,9 @@ class XmlManifest(object):
|
||||
if groups:
|
||||
groups = self._ParseGroups(groups)
|
||||
revision = node.getAttribute('revision')
|
||||
remote = node.getAttribute('remote')
|
||||
if remote:
|
||||
remote = self._get_remote(node)
|
||||
|
||||
for p in self._projects[name]:
|
||||
if path and p.relpath != path:
|
||||
@ -606,6 +613,8 @@ class XmlManifest(object):
|
||||
p.groups.extend(groups)
|
||||
if revision:
|
||||
p.revisionExpr = revision
|
||||
if remote:
|
||||
p.remote = remote.ToRemoteSpec(name)
|
||||
if node.nodeName == 'repo-hooks':
|
||||
# Get the name of the project and the (space-separated) list of enabled.
|
||||
repo_hooks_project = self._reqatt(node, 'in-project')
|
||||
@ -649,7 +658,6 @@ class XmlManifest(object):
|
||||
if self._repo_hooks_project and (self._repo_hooks_project.name == name):
|
||||
self._repo_hooks_project = None
|
||||
|
||||
|
||||
def _AddMetaProjectMirror(self, m):
|
||||
name = None
|
||||
m_url = m.GetRemote(m.remote.name).url
|
||||
@ -676,15 +684,15 @@ class XmlManifest(object):
|
||||
if name not in self._projects:
|
||||
m.PreSync()
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||
project = Project(manifest = self,
|
||||
name = name,
|
||||
remote = remote.ToRemoteSpec(name),
|
||||
gitdir = gitdir,
|
||||
objdir = gitdir,
|
||||
worktree = None,
|
||||
relpath = name or None,
|
||||
revisionExpr = m.revisionExpr,
|
||||
revisionId = None)
|
||||
project = Project(manifest=self,
|
||||
name=name,
|
||||
remote=remote.ToRemoteSpec(name),
|
||||
gitdir=gitdir,
|
||||
objdir=gitdir,
|
||||
worktree=None,
|
||||
relpath=name or None,
|
||||
revisionExpr=m.revisionExpr,
|
||||
revisionId=None)
|
||||
self._projects[project.name] = [project]
|
||||
self._paths[project.relpath] = project
|
||||
|
||||
@ -792,7 +800,7 @@ class XmlManifest(object):
|
||||
def _UnjoinName(self, parent_name, name):
|
||||
return os.path.relpath(name, parent_name)
|
||||
|
||||
def _ParseProject(self, node, parent = None, **extra_proj_attrs):
|
||||
def _ParseProject(self, node, parent=None, **extra_proj_attrs):
|
||||
"""
|
||||
reads a <project> element from the manifest file
|
||||
"""
|
||||
@ -805,21 +813,21 @@ class XmlManifest(object):
|
||||
remote = self._default.remote
|
||||
if remote is None:
|
||||
raise ManifestParseError("no remote for project %s within %s" %
|
||||
(name, self.manifestFile))
|
||||
(name, self.manifestFile))
|
||||
|
||||
revisionExpr = node.getAttribute('revision') or remote.revision
|
||||
if not revisionExpr:
|
||||
revisionExpr = self._default.revisionExpr
|
||||
if not revisionExpr:
|
||||
raise ManifestParseError("no revision for project %s within %s" %
|
||||
(name, self.manifestFile))
|
||||
(name, self.manifestFile))
|
||||
|
||||
path = node.getAttribute('path')
|
||||
if not path:
|
||||
path = name
|
||||
if path.startswith('/'):
|
||||
raise ManifestParseError("project %s path cannot be absolute in %s" %
|
||||
(name, self.manifestFile))
|
||||
(name, self.manifestFile))
|
||||
|
||||
rebase = node.getAttribute('rebase')
|
||||
if not rebase:
|
||||
@ -849,7 +857,7 @@ class XmlManifest(object):
|
||||
if clone_depth:
|
||||
try:
|
||||
clone_depth = int(clone_depth)
|
||||
if clone_depth <= 0:
|
||||
if clone_depth <= 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ManifestParseError('invalid clone-depth %s in %s' %
|
||||
@ -877,24 +885,24 @@ class XmlManifest(object):
|
||||
if node.getAttribute('force-path').lower() in ("yes", "true", "1"):
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % path)
|
||||
|
||||
project = Project(manifest = self,
|
||||
name = name,
|
||||
remote = remote.ToRemoteSpec(name),
|
||||
gitdir = gitdir,
|
||||
objdir = objdir,
|
||||
worktree = worktree,
|
||||
relpath = relpath,
|
||||
revisionExpr = revisionExpr,
|
||||
revisionId = None,
|
||||
rebase = rebase,
|
||||
groups = groups,
|
||||
sync_c = sync_c,
|
||||
sync_s = sync_s,
|
||||
sync_tags = sync_tags,
|
||||
clone_depth = clone_depth,
|
||||
upstream = upstream,
|
||||
parent = parent,
|
||||
dest_branch = dest_branch,
|
||||
project = Project(manifest=self,
|
||||
name=name,
|
||||
remote=remote.ToRemoteSpec(name),
|
||||
gitdir=gitdir,
|
||||
objdir=objdir,
|
||||
worktree=worktree,
|
||||
relpath=relpath,
|
||||
revisionExpr=revisionExpr,
|
||||
revisionId=None,
|
||||
rebase=rebase,
|
||||
groups=groups,
|
||||
sync_c=sync_c,
|
||||
sync_s=sync_s,
|
||||
sync_tags=sync_tags,
|
||||
clone_depth=clone_depth,
|
||||
upstream=upstream,
|
||||
parent=parent,
|
||||
dest_branch=dest_branch,
|
||||
**extra_proj_attrs)
|
||||
|
||||
for n in node.childNodes:
|
||||
@ -905,7 +913,7 @@ class XmlManifest(object):
|
||||
if n.nodeName == 'annotation':
|
||||
self._ParseAnnotation(project, n)
|
||||
if n.nodeName == 'project':
|
||||
project.subprojects.append(self._ParseProject(n, parent = project))
|
||||
project.subprojects.append(self._ParseProject(n, parent=project))
|
||||
|
||||
return project
|
||||
|
||||
@ -943,21 +951,101 @@ class XmlManifest(object):
|
||||
worktree = os.path.join(parent.worktree, path).replace('\\', '/')
|
||||
return relpath, worktree, gitdir, objdir
|
||||
|
||||
@staticmethod
|
||||
def _CheckLocalPath(path, symlink=False):
|
||||
"""Verify |path| is reasonable for use in <copyfile> & <linkfile>."""
|
||||
if '~' in path:
|
||||
return '~ not allowed (due to 8.3 filenames on Windows filesystems)'
|
||||
|
||||
# Some filesystems (like Apple's HFS+) try to normalize Unicode codepoints
|
||||
# which means there are alternative names for ".git". Reject paths with
|
||||
# these in it as there shouldn't be any reasonable need for them here.
|
||||
# The set of codepoints here was cribbed from jgit's implementation:
|
||||
# https://eclipse.googlesource.com/jgit/jgit/+/9110037e3e9461ff4dac22fee84ef3694ed57648/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java#884
|
||||
BAD_CODEPOINTS = {
|
||||
u'\u200C', # ZERO WIDTH NON-JOINER
|
||||
u'\u200D', # ZERO WIDTH JOINER
|
||||
u'\u200E', # LEFT-TO-RIGHT MARK
|
||||
u'\u200F', # RIGHT-TO-LEFT MARK
|
||||
u'\u202A', # LEFT-TO-RIGHT EMBEDDING
|
||||
u'\u202B', # RIGHT-TO-LEFT EMBEDDING
|
||||
u'\u202C', # POP DIRECTIONAL FORMATTING
|
||||
u'\u202D', # LEFT-TO-RIGHT OVERRIDE
|
||||
u'\u202E', # RIGHT-TO-LEFT OVERRIDE
|
||||
u'\u206A', # INHIBIT SYMMETRIC SWAPPING
|
||||
u'\u206B', # ACTIVATE SYMMETRIC SWAPPING
|
||||
u'\u206C', # INHIBIT ARABIC FORM SHAPING
|
||||
u'\u206D', # ACTIVATE ARABIC FORM SHAPING
|
||||
u'\u206E', # NATIONAL DIGIT SHAPES
|
||||
u'\u206F', # NOMINAL DIGIT SHAPES
|
||||
u'\uFEFF', # ZERO WIDTH NO-BREAK SPACE
|
||||
}
|
||||
if BAD_CODEPOINTS & set(path):
|
||||
# This message is more expansive than reality, but should be fine.
|
||||
return 'Unicode combining characters not allowed'
|
||||
|
||||
# Assume paths might be used on case-insensitive filesystems.
|
||||
path = path.lower()
|
||||
|
||||
# Some people use src="." to create stable links to projects. Lets allow
|
||||
# that but reject all other uses of "." to keep things simple.
|
||||
parts = path.split(os.path.sep)
|
||||
if parts != ['.']:
|
||||
for part in set(parts):
|
||||
if part in {'.', '..', '.git'} or part.startswith('.repo'):
|
||||
return 'bad component: %s' % (part,)
|
||||
|
||||
if not symlink and path.endswith(os.path.sep):
|
||||
return 'dirs not allowed'
|
||||
|
||||
norm = os.path.normpath(path)
|
||||
if norm == '..' or norm.startswith('../') or norm.startswith(os.path.sep):
|
||||
return 'path cannot be outside'
|
||||
|
||||
@classmethod
|
||||
def _ValidateFilePaths(cls, element, src, dest):
|
||||
"""Verify |src| & |dest| are reasonable for <copyfile> & <linkfile>.
|
||||
|
||||
We verify the path independent of any filesystem state as we won't have a
|
||||
checkout available to compare to. i.e. This is for parsing validation
|
||||
purposes only.
|
||||
|
||||
We'll do full/live sanity checking before we do the actual filesystem
|
||||
modifications in _CopyFile/_LinkFile/etc...
|
||||
"""
|
||||
# |dest| is the file we write to or symlink we create.
|
||||
# It is relative to the top of the repo client checkout.
|
||||
msg = cls._CheckLocalPath(dest)
|
||||
if msg:
|
||||
raise ManifestInvalidPathError(
|
||||
'<%s> invalid "dest": %s: %s' % (element, dest, msg))
|
||||
|
||||
# |src| is the file we read from or path we point to for symlinks.
|
||||
# It is relative to the top of the git project checkout.
|
||||
msg = cls._CheckLocalPath(src, symlink=element == 'linkfile')
|
||||
if msg:
|
||||
raise ManifestInvalidPathError(
|
||||
'<%s> invalid "src": %s: %s' % (element, src, msg))
|
||||
|
||||
def _ParseCopyFile(self, project, node):
|
||||
src = self._reqatt(node, 'src')
|
||||
dest = self._reqatt(node, 'dest')
|
||||
if not self.IsMirror:
|
||||
# src is project relative;
|
||||
# dest is relative to the top of the tree
|
||||
project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
|
||||
# dest is relative to the top of the tree.
|
||||
# We only validate paths if we actually plan to process them.
|
||||
self._ValidateFilePaths('copyfile', src, dest)
|
||||
project.AddCopyFile(src, dest, self.topdir)
|
||||
|
||||
def _ParseLinkFile(self, project, node):
|
||||
src = self._reqatt(node, 'src')
|
||||
dest = self._reqatt(node, 'dest')
|
||||
if not self.IsMirror:
|
||||
# src is project relative;
|
||||
# dest is relative to the top of the tree
|
||||
project.AddLinkFile(src, dest, os.path.join(self.topdir, dest))
|
||||
# dest is relative to the top of the tree.
|
||||
# We only validate paths if we actually plan to process them.
|
||||
self._ValidateFilePaths('linkfile', src, dest)
|
||||
project.AddLinkFile(src, dest, self.topdir)
|
||||
|
||||
def _ParseAnnotation(self, project, node):
|
||||
name = self._reqatt(node, 'name')
|
||||
@ -968,7 +1056,7 @@ class XmlManifest(object):
|
||||
keep = "true"
|
||||
if keep != "true" and keep != "false":
|
||||
raise ManifestParseError('optional "keep" attribute must be '
|
||||
'"true" or "false"')
|
||||
'"true" or "false"')
|
||||
project.AddAnnotation(name, value, keep)
|
||||
|
||||
def _get_remote(self, node):
|
||||
@ -979,7 +1067,7 @@ class XmlManifest(object):
|
||||
v = self._remotes.get(name)
|
||||
if not v:
|
||||
raise ManifestParseError("remote %s not defined in %s" %
|
||||
(name, self.manifestFile))
|
||||
(name, self.manifestFile))
|
||||
return v
|
||||
|
||||
def _reqatt(self, node, attname):
|
||||
@ -989,7 +1077,7 @@ class XmlManifest(object):
|
||||
v = node.getAttribute(attname)
|
||||
if not v:
|
||||
raise ManifestParseError("no %s in <%s> within %s" %
|
||||
(attname, node.nodeName, self.manifestFile))
|
||||
(attname, node.nodeName, self.manifestFile))
|
||||
return v
|
||||
|
||||
def projectsDiff(self, manifest):
|
||||
@ -1007,7 +1095,7 @@ class XmlManifest(object):
|
||||
diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []}
|
||||
|
||||
for proj in fromKeys:
|
||||
if not proj in toKeys:
|
||||
if proj not in toKeys:
|
||||
diff['removed'].append(fromProjects[proj])
|
||||
else:
|
||||
fromProj = fromProjects[proj]
|
||||
@ -1039,7 +1127,7 @@ class GitcManifest(XmlManifest):
|
||||
gitc_client_name)
|
||||
self.manifestFile = os.path.join(self.gitc_client_dir, '.manifest')
|
||||
|
||||
def _ParseProject(self, node, parent = None):
|
||||
def _ParseProject(self, node, parent=None):
|
||||
"""Override _ParseProject and add support for GITC specific attributes."""
|
||||
return super(GitcManifest, self)._ParseProject(
|
||||
node, parent=parent, old_revision=node.getAttribute('old-revision'))
|
||||
@ -1048,4 +1136,3 @@ class GitcManifest(XmlManifest):
|
||||
"""Output GITC Specific Project attributes"""
|
||||
if p.old_revision:
|
||||
e.setAttribute('old-revision', str(p.old_revision))
|
||||
|
||||
|
10
pager.py
Executable file → Normal file
10
pager.py
Executable file → Normal file
@ -27,6 +27,7 @@ pager_process = None
|
||||
old_stdout = None
|
||||
old_stderr = None
|
||||
|
||||
|
||||
def RunPager(globalConfig):
|
||||
if not os.isatty(0) or not os.isatty(1):
|
||||
return
|
||||
@ -35,23 +36,25 @@ def RunPager(globalConfig):
|
||||
return
|
||||
|
||||
if platform_utils.isWindows():
|
||||
_PipePager(pager);
|
||||
_PipePager(pager)
|
||||
else:
|
||||
_ForkPager(pager)
|
||||
|
||||
|
||||
def TerminatePager():
|
||||
global pager_process, old_stdout, old_stderr
|
||||
if pager_process:
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
pager_process.stdin.close()
|
||||
pager_process.wait();
|
||||
pager_process.wait()
|
||||
pager_process = None
|
||||
# Restore initial stdout/err in case there is more output in this process
|
||||
# after shutting down the pager process
|
||||
sys.stdout = old_stdout
|
||||
sys.stderr = old_stderr
|
||||
|
||||
|
||||
def _PipePager(pager):
|
||||
global pager_process, old_stdout, old_stderr
|
||||
assert pager_process is None, "Only one active pager process at a time"
|
||||
@ -62,6 +65,7 @@ def _PipePager(pager):
|
||||
sys.stdout = pager_process.stdin
|
||||
sys.stderr = pager_process.stdin
|
||||
|
||||
|
||||
def _ForkPager(pager):
|
||||
global active
|
||||
# This process turns into the pager; a child it forks will
|
||||
@ -88,6 +92,7 @@ def _ForkPager(pager):
|
||||
print("fatal: cannot start pager '%s'" % pager, file=sys.stderr)
|
||||
sys.exit(255)
|
||||
|
||||
|
||||
def _SelectPager(globalConfig):
|
||||
try:
|
||||
return os.environ['GIT_PAGER']
|
||||
@ -105,6 +110,7 @@ def _SelectPager(globalConfig):
|
||||
|
||||
return 'less'
|
||||
|
||||
|
||||
def _BecomePager(pager):
|
||||
# Delaying execution of the pager until we have output
|
||||
# ready works around a long-standing bug in popularly
|
||||
|
@ -80,7 +80,7 @@ class FileDescriptorStreams(object):
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def _create_stream(fd, dest, std_name):
|
||||
def _create_stream(self, fd, dest, std_name):
|
||||
""" Creates a new stream wrapping an existing file descriptor.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
@ -92,6 +92,7 @@ class _FileDescriptorStreamsNonBlocking(FileDescriptorStreams):
|
||||
"""
|
||||
class Stream(object):
|
||||
""" Encapsulates a file descriptor """
|
||||
|
||||
def __init__(self, fd, dest, std_name):
|
||||
self.fd = fd
|
||||
self.dest = dest
|
||||
@ -125,6 +126,7 @@ class _FileDescriptorStreamsThreads(FileDescriptorStreams):
|
||||
non blocking I/O. This implementation requires creating threads issuing
|
||||
blocking read operations on file descriptors.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(_FileDescriptorStreamsThreads, self).__init__()
|
||||
# The queue is shared accross all threads so we can simulate the
|
||||
@ -144,12 +146,14 @@ class _FileDescriptorStreamsThreads(FileDescriptorStreams):
|
||||
|
||||
class QueueItem(object):
|
||||
""" Item put in the shared queue """
|
||||
|
||||
def __init__(self, stream, data):
|
||||
self.stream = stream
|
||||
self.data = data
|
||||
|
||||
class Stream(object):
|
||||
""" Encapsulates a file descriptor """
|
||||
|
||||
def __init__(self, fd, dest, std_name, queue):
|
||||
self.fd = fd
|
||||
self.dest = dest
|
||||
@ -241,14 +245,15 @@ def _makelongpath(path):
|
||||
return path
|
||||
|
||||
|
||||
def rmtree(path):
|
||||
def rmtree(path, ignore_errors=False):
|
||||
"""shutil.rmtree(path) wrapper with support for long paths on Windows.
|
||||
|
||||
Availability: Unix, Windows."""
|
||||
onerror = None
|
||||
if isWindows():
|
||||
shutil.rmtree(_makelongpath(path), onerror=handle_rmtree_error)
|
||||
else:
|
||||
shutil.rmtree(path)
|
||||
path = _makelongpath(path)
|
||||
onerror = handle_rmtree_error
|
||||
shutil.rmtree(path, ignore_errors=ignore_errors, onerror=onerror)
|
||||
|
||||
|
||||
def handle_rmtree_error(function, path, excinfo):
|
||||
|
@ -16,15 +16,21 @@
|
||||
|
||||
import errno
|
||||
|
||||
from pyversion import is_python3
|
||||
from ctypes import WinDLL, get_last_error, FormatError, WinError, addressof
|
||||
from ctypes import c_buffer
|
||||
from ctypes.wintypes import BOOL, BOOLEAN, LPCWSTR, DWORD, HANDLE, POINTER, c_ubyte
|
||||
from ctypes.wintypes import WCHAR, USHORT, LPVOID, Structure, Union, ULONG
|
||||
from ctypes.wintypes import byref
|
||||
from ctypes.wintypes import BOOL, BOOLEAN, LPCWSTR, DWORD, HANDLE
|
||||
from ctypes.wintypes import WCHAR, USHORT, LPVOID, ULONG
|
||||
if is_python3():
|
||||
from ctypes import c_ubyte, Structure, Union, byref
|
||||
from ctypes.wintypes import LPDWORD
|
||||
else:
|
||||
# For legacy Python2 different imports are needed.
|
||||
from ctypes.wintypes import POINTER, c_ubyte, Structure, Union, byref
|
||||
LPDWORD = POINTER(DWORD)
|
||||
|
||||
kernel32 = WinDLL('kernel32', use_last_error=True)
|
||||
|
||||
LPDWORD = POINTER(DWORD)
|
||||
UCHAR = c_ubyte
|
||||
|
||||
# Win32 error codes
|
||||
@ -179,7 +185,7 @@ def readlink(path):
|
||||
if reparse_point_handle == INVALID_HANDLE_VALUE:
|
||||
_raise_winerror(
|
||||
get_last_error(),
|
||||
'Error opening symblic link \"%s\"'.format(path))
|
||||
'Error opening symbolic link \"%s\"'.format(path))
|
||||
target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE)
|
||||
n_bytes_returned = DWORD()
|
||||
io_result = DeviceIoControl(reparse_point_handle,
|
||||
@ -194,7 +200,7 @@ def readlink(path):
|
||||
if not io_result:
|
||||
_raise_winerror(
|
||||
get_last_error(),
|
||||
'Error reading symblic link \"%s\"'.format(path))
|
||||
'Error reading symbolic link \"%s\"'.format(path))
|
||||
rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer)
|
||||
if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK:
|
||||
return _preserve_encoding(path, rdb.SymbolicLinkReparseBuffer.PrintName)
|
||||
@ -203,11 +209,15 @@ def readlink(path):
|
||||
# Unsupported reparse point type
|
||||
_raise_winerror(
|
||||
ERROR_NOT_SUPPORTED,
|
||||
'Error reading symblic link \"%s\"'.format(path))
|
||||
'Error reading symbolic link \"%s\"'.format(path))
|
||||
|
||||
|
||||
def _preserve_encoding(source, target):
|
||||
"""Ensures target is the same string type (i.e. unicode or str) as source."""
|
||||
|
||||
if is_python3():
|
||||
return target
|
||||
|
||||
if isinstance(source, unicode):
|
||||
return unicode(target)
|
||||
return str(target)
|
||||
|
49
progress.py
49
progress.py
@ -17,10 +17,16 @@
|
||||
import os
|
||||
import sys
|
||||
from time import time
|
||||
from trace import IsTrace
|
||||
from repo_trace import IsTrace
|
||||
|
||||
_NOT_TTY = not os.isatty(2)
|
||||
|
||||
# This will erase all content in the current line (wherever the cursor is).
|
||||
# It does not move the cursor, so this is usually followed by \r to move to
|
||||
# column 0.
|
||||
CSI_ERASE_LINE = '\x1b[2K'
|
||||
|
||||
|
||||
class Progress(object):
|
||||
def __init__(self, title, total=0, units='', print_newline=False,
|
||||
always_print_percentage=False):
|
||||
@ -34,7 +40,7 @@ class Progress(object):
|
||||
self._print_newline = print_newline
|
||||
self._always_print_percentage = always_print_percentage
|
||||
|
||||
def update(self, inc=1):
|
||||
def update(self, inc=1, msg=''):
|
||||
self._done += inc
|
||||
|
||||
if _NOT_TTY or IsTrace():
|
||||
@ -47,21 +53,24 @@ class Progress(object):
|
||||
return
|
||||
|
||||
if self._total <= 0:
|
||||
sys.stderr.write('\r%s: %d, ' % (
|
||||
self._title,
|
||||
self._done))
|
||||
sys.stderr.write('%s\r%s: %d,' % (
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
self._done))
|
||||
sys.stderr.flush()
|
||||
else:
|
||||
p = (100 * self._done) / self._total
|
||||
|
||||
if self._lastp != p or self._always_print_percentage:
|
||||
self._lastp = p
|
||||
sys.stderr.write('\r%s: %3d%% (%d%s/%d%s)%s' % (
|
||||
self._title,
|
||||
p,
|
||||
self._done, self._units,
|
||||
self._total, self._units,
|
||||
"\n" if self._print_newline else ""))
|
||||
sys.stderr.write('%s\r%s: %3d%% (%d%s/%d%s)%s%s%s' % (
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
p,
|
||||
self._done, self._units,
|
||||
self._total, self._units,
|
||||
' ' if msg else '', msg,
|
||||
"\n" if self._print_newline else ""))
|
||||
sys.stderr.flush()
|
||||
|
||||
def end(self):
|
||||
@ -69,15 +78,17 @@ class Progress(object):
|
||||
return
|
||||
|
||||
if self._total <= 0:
|
||||
sys.stderr.write('\r%s: %d, done. \n' % (
|
||||
self._title,
|
||||
self._done))
|
||||
sys.stderr.write('%s\r%s: %d, done.\n' % (
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
self._done))
|
||||
sys.stderr.flush()
|
||||
else:
|
||||
p = (100 * self._done) / self._total
|
||||
sys.stderr.write('\r%s: %3d%% (%d%s/%d%s), done. \n' % (
|
||||
self._title,
|
||||
p,
|
||||
self._done, self._units,
|
||||
self._total, self._units))
|
||||
sys.stderr.write('%s\r%s: %3d%% (%d%s/%d%s), done.\n' % (
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
p,
|
||||
self._done, self._units,
|
||||
self._total, self._units))
|
||||
sys.stderr.flush()
|
||||
|
615
project.py
Executable file → Normal file
615
project.py
Executable file → Normal file
@ -18,6 +18,7 @@ from __future__ import print_function
|
||||
import errno
|
||||
import filecmp
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
@ -35,10 +36,11 @@ from git_command import GitCommand, git_require
|
||||
from git_config import GitConfig, IsId, GetSchemeFromUrl, GetUrlCookieFile, \
|
||||
ID_RE
|
||||
from error import GitError, HookError, UploadError, DownloadError
|
||||
from error import ManifestInvalidRevisionError
|
||||
from error import ManifestInvalidRevisionError, ManifestInvalidPathError
|
||||
from error import NoManifestException
|
||||
import platform_utils
|
||||
from trace import IsTrace, Trace
|
||||
import progress
|
||||
from repo_trace import IsTrace, Trace
|
||||
|
||||
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M
|
||||
|
||||
@ -56,11 +58,8 @@ else:
|
||||
def _lwrite(path, content):
|
||||
lock = '%s.lock' % path
|
||||
|
||||
fd = open(lock, 'w')
|
||||
try:
|
||||
with open(lock, 'w') as fd:
|
||||
fd.write(content)
|
||||
finally:
|
||||
fd.close()
|
||||
|
||||
try:
|
||||
platform_utils.rename(lock, path)
|
||||
@ -86,6 +85,7 @@ def not_rev(r):
|
||||
def sq(r):
|
||||
return "'" + r.replace("'", "'\''") + "'"
|
||||
|
||||
|
||||
_project_hook_list = None
|
||||
|
||||
|
||||
@ -135,6 +135,7 @@ class DownloadedChange(object):
|
||||
|
||||
class ReviewableBranch(object):
|
||||
_commit_cache = None
|
||||
_base_exists = None
|
||||
|
||||
def __init__(self, project, branch, base):
|
||||
self.project = project
|
||||
@ -148,14 +149,19 @@ class ReviewableBranch(object):
|
||||
@property
|
||||
def commits(self):
|
||||
if self._commit_cache is None:
|
||||
self._commit_cache = self.project.bare_git.rev_list('--abbrev=8',
|
||||
'--abbrev-commit',
|
||||
'--pretty=oneline',
|
||||
'--reverse',
|
||||
'--date-order',
|
||||
not_rev(self.base),
|
||||
R_HEADS + self.name,
|
||||
'--')
|
||||
args = ('--abbrev=8', '--abbrev-commit', '--pretty=oneline', '--reverse',
|
||||
'--date-order', not_rev(self.base), R_HEADS + self.name, '--')
|
||||
try:
|
||||
self._commit_cache = self.project.bare_git.rev_list(*args)
|
||||
except GitError:
|
||||
# We weren't able to probe the commits for this branch. Was it tracking
|
||||
# a branch that no longer exists? If so, return no commits. Otherwise,
|
||||
# rethrow the error as we don't know what's going on.
|
||||
if self.base_exists:
|
||||
raise
|
||||
|
||||
self._commit_cache = []
|
||||
|
||||
return self._commit_cache
|
||||
|
||||
@property
|
||||
@ -174,6 +180,23 @@ class ReviewableBranch(object):
|
||||
R_HEADS + self.name,
|
||||
'--')
|
||||
|
||||
@property
|
||||
def base_exists(self):
|
||||
"""Whether the branch we're tracking exists.
|
||||
|
||||
Normally it should, but sometimes branches we track can get deleted.
|
||||
"""
|
||||
if self._base_exists is None:
|
||||
try:
|
||||
self.project.bare_git.rev_parse('--verify', not_rev(self.base))
|
||||
# If we're still here, the base branch exists.
|
||||
self._base_exists = True
|
||||
except GitError:
|
||||
# If we failed to verify, the base branch doesn't exist.
|
||||
self._base_exists = False
|
||||
|
||||
return self._base_exists
|
||||
|
||||
def UploadForReview(self, people,
|
||||
auto_topic=False,
|
||||
draft=False,
|
||||
@ -228,6 +251,7 @@ class DiffColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'diff')
|
||||
self.project = self.printer('header', attr='bold')
|
||||
self.fail = self.printer('fail', fg='red')
|
||||
|
||||
|
||||
class _Annotation(object):
|
||||
@ -238,17 +262,70 @@ class _Annotation(object):
|
||||
self.keep = keep
|
||||
|
||||
|
||||
class _CopyFile(object):
|
||||
def _SafeExpandPath(base, subpath, skipfinal=False):
|
||||
"""Make sure |subpath| is completely safe under |base|.
|
||||
|
||||
def __init__(self, src, dest, abssrc, absdest):
|
||||
We make sure no intermediate symlinks are traversed, and that the final path
|
||||
is not a special file (e.g. not a socket or fifo).
|
||||
|
||||
NB: We rely on a number of paths already being filtered out while parsing the
|
||||
manifest. See the validation logic in manifest_xml.py for more details.
|
||||
"""
|
||||
components = subpath.split(os.path.sep)
|
||||
if skipfinal:
|
||||
# Whether the caller handles the final component itself.
|
||||
finalpart = components.pop()
|
||||
|
||||
path = base
|
||||
for part in components:
|
||||
if part in {'.', '..'}:
|
||||
raise ManifestInvalidPathError(
|
||||
'%s: "%s" not allowed in paths' % (subpath, part))
|
||||
|
||||
path = os.path.join(path, part)
|
||||
if platform_utils.islink(path):
|
||||
raise ManifestInvalidPathError(
|
||||
'%s: traversing symlinks not allow' % (path,))
|
||||
|
||||
if os.path.exists(path):
|
||||
if not os.path.isfile(path) and not platform_utils.isdir(path):
|
||||
raise ManifestInvalidPathError(
|
||||
'%s: only regular files & directories allowed' % (path,))
|
||||
|
||||
if skipfinal:
|
||||
path = os.path.join(path, finalpart)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
class _CopyFile(object):
|
||||
"""Container for <copyfile> manifest element."""
|
||||
|
||||
def __init__(self, git_worktree, src, topdir, dest):
|
||||
"""Register a <copyfile> request.
|
||||
|
||||
Args:
|
||||
git_worktree: Absolute path to the git project checkout.
|
||||
src: Relative path under |git_worktree| of file to read.
|
||||
topdir: Absolute path to the top of the repo client checkout.
|
||||
dest: Relative path under |topdir| of file to write.
|
||||
"""
|
||||
self.git_worktree = git_worktree
|
||||
self.topdir = topdir
|
||||
self.src = src
|
||||
self.dest = dest
|
||||
self.abs_src = abssrc
|
||||
self.abs_dest = absdest
|
||||
|
||||
def _Copy(self):
|
||||
src = self.abs_src
|
||||
dest = self.abs_dest
|
||||
src = _SafeExpandPath(self.git_worktree, self.src)
|
||||
dest = _SafeExpandPath(self.topdir, self.dest)
|
||||
|
||||
if platform_utils.isdir(src):
|
||||
raise ManifestInvalidPathError(
|
||||
'%s: copying from directory not supported' % (self.src,))
|
||||
if platform_utils.isdir(dest):
|
||||
raise ManifestInvalidPathError(
|
||||
'%s: copying to directory not allowed' % (self.dest,))
|
||||
|
||||
# copy file if it does not exist or is out of date
|
||||
if not os.path.exists(dest) or not filecmp.cmp(src, dest):
|
||||
try:
|
||||
@ -269,13 +346,21 @@ class _CopyFile(object):
|
||||
|
||||
|
||||
class _LinkFile(object):
|
||||
"""Container for <linkfile> manifest element."""
|
||||
|
||||
def __init__(self, git_worktree, src, dest, relsrc, absdest):
|
||||
def __init__(self, git_worktree, src, topdir, dest):
|
||||
"""Register a <linkfile> request.
|
||||
|
||||
Args:
|
||||
git_worktree: Absolute path to the git project checkout.
|
||||
src: Target of symlink relative to path under |git_worktree|.
|
||||
topdir: Absolute path to the top of the repo client checkout.
|
||||
dest: Relative path under |topdir| of symlink to create.
|
||||
"""
|
||||
self.git_worktree = git_worktree
|
||||
self.topdir = topdir
|
||||
self.src = src
|
||||
self.dest = dest
|
||||
self.src_rel_to_dest = relsrc
|
||||
self.abs_dest = absdest
|
||||
|
||||
def __linkIt(self, relSrc, absDest):
|
||||
# link file if it does not exist or is out of date
|
||||
@ -293,35 +378,42 @@ class _LinkFile(object):
|
||||
_error('Cannot link file %s to %s', relSrc, absDest)
|
||||
|
||||
def _Link(self):
|
||||
"""Link the self.rel_src_to_dest and self.abs_dest. Handles wild cards
|
||||
on the src linking all of the files in the source in to the destination
|
||||
directory.
|
||||
"""Link the self.src & self.dest paths.
|
||||
|
||||
Handles wild cards on the src linking all of the files in the source in to
|
||||
the destination directory.
|
||||
"""
|
||||
# We use the absSrc to handle the situation where the current directory
|
||||
# is not the root of the repo
|
||||
absSrc = os.path.join(self.git_worktree, self.src)
|
||||
if os.path.exists(absSrc):
|
||||
# Entity exists so just a simple one to one link operation
|
||||
self.__linkIt(self.src_rel_to_dest, self.abs_dest)
|
||||
# Some people use src="." to create stable links to projects. Lets allow
|
||||
# that but reject all other uses of "." to keep things simple.
|
||||
if self.src == '.':
|
||||
src = self.git_worktree
|
||||
else:
|
||||
src = _SafeExpandPath(self.git_worktree, self.src)
|
||||
|
||||
if os.path.exists(src):
|
||||
# Entity exists so just a simple one to one link operation.
|
||||
dest = _SafeExpandPath(self.topdir, self.dest, skipfinal=True)
|
||||
# dest & src are absolute paths at this point. Make sure the target of
|
||||
# the symlink is relative in the context of the repo client checkout.
|
||||
relpath = os.path.relpath(src, os.path.dirname(dest))
|
||||
self.__linkIt(relpath, dest)
|
||||
else:
|
||||
dest = _SafeExpandPath(self.topdir, self.dest)
|
||||
# Entity doesn't exist assume there is a wild card
|
||||
absDestDir = self.abs_dest
|
||||
if os.path.exists(absDestDir) and not platform_utils.isdir(absDestDir):
|
||||
_error('Link error: src with wildcard, %s must be a directory',
|
||||
absDestDir)
|
||||
if os.path.exists(dest) and not platform_utils.isdir(dest):
|
||||
_error('Link error: src with wildcard, %s must be a directory', dest)
|
||||
else:
|
||||
absSrcFiles = glob.glob(absSrc)
|
||||
for absSrcFile in absSrcFiles:
|
||||
for absSrcFile in glob.glob(src):
|
||||
# Create a releative path from source dir to destination dir
|
||||
absSrcDir = os.path.dirname(absSrcFile)
|
||||
relSrcDir = os.path.relpath(absSrcDir, absDestDir)
|
||||
relSrcDir = os.path.relpath(absSrcDir, dest)
|
||||
|
||||
# Get the source file name
|
||||
srcFile = os.path.basename(absSrcFile)
|
||||
|
||||
# Now form the final full paths to srcFile. They will be
|
||||
# absolute for the desintaiton and relative for the srouce.
|
||||
absDest = os.path.join(absDestDir, srcFile)
|
||||
absDest = os.path.join(dest, srcFile)
|
||||
relSrc = os.path.join(relSrcDir, srcFile)
|
||||
self.__linkIt(relSrc, absDest)
|
||||
|
||||
@ -544,6 +636,105 @@ class RepoHook(object):
|
||||
prompt % (self._GetMustVerb(), self._script_fullpath),
|
||||
'Scripts have changed since %s was allowed.' % (self._hook_type,))
|
||||
|
||||
@staticmethod
|
||||
def _ExtractInterpFromShebang(data):
|
||||
"""Extract the interpreter used in the shebang.
|
||||
|
||||
Try to locate the interpreter the script is using (ignoring `env`).
|
||||
|
||||
Args:
|
||||
data: The file content of the script.
|
||||
|
||||
Returns:
|
||||
The basename of the main script interpreter, or None if a shebang is not
|
||||
used or could not be parsed out.
|
||||
"""
|
||||
firstline = data.splitlines()[:1]
|
||||
if not firstline:
|
||||
return None
|
||||
|
||||
# The format here can be tricky.
|
||||
shebang = firstline[0].strip()
|
||||
m = re.match(r'^#!\s*([^\s]+)(?:\s+([^\s]+))?', shebang)
|
||||
if not m:
|
||||
return None
|
||||
|
||||
# If the using `env`, find the target program.
|
||||
interp = m.group(1)
|
||||
if os.path.basename(interp) == 'env':
|
||||
interp = m.group(2)
|
||||
|
||||
return interp
|
||||
|
||||
def _ExecuteHookViaReexec(self, interp, context, **kwargs):
|
||||
"""Execute the hook script through |interp|.
|
||||
|
||||
Note: Support for this feature should be dropped ~Jun 2021.
|
||||
|
||||
Args:
|
||||
interp: The Python program to run.
|
||||
context: Basic Python context to execute the hook inside.
|
||||
kwargs: Arbitrary arguments to pass to the hook script.
|
||||
|
||||
Raises:
|
||||
HookError: When the hooks failed for any reason.
|
||||
"""
|
||||
# This logic needs to be kept in sync with _ExecuteHookViaImport below.
|
||||
script = """
|
||||
import json, os, sys
|
||||
path = '''%(path)s'''
|
||||
kwargs = json.loads('''%(kwargs)s''')
|
||||
context = json.loads('''%(context)s''')
|
||||
sys.path.insert(0, os.path.dirname(path))
|
||||
data = open(path).read()
|
||||
exec(compile(data, path, 'exec'), context)
|
||||
context['main'](**kwargs)
|
||||
""" % {
|
||||
'path': self._script_fullpath,
|
||||
'kwargs': json.dumps(kwargs),
|
||||
'context': json.dumps(context),
|
||||
}
|
||||
|
||||
# We pass the script via stdin to avoid OS argv limits. It also makes
|
||||
# unhandled exception tracebacks less verbose/confusing for users.
|
||||
cmd = [interp, '-c', 'import sys; exec(sys.stdin.read())']
|
||||
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE)
|
||||
proc.communicate(input=script.encode('utf-8'))
|
||||
if proc.returncode:
|
||||
raise HookError('Failed to run %s hook.' % (self._hook_type,))
|
||||
|
||||
def _ExecuteHookViaImport(self, data, context, **kwargs):
|
||||
"""Execute the hook code in |data| directly.
|
||||
|
||||
Args:
|
||||
data: The code of the hook to execute.
|
||||
context: Basic Python context to execute the hook inside.
|
||||
kwargs: Arbitrary arguments to pass to the hook script.
|
||||
|
||||
Raises:
|
||||
HookError: When the hooks failed for any reason.
|
||||
"""
|
||||
# Exec, storing global context in the context dict. We catch exceptions
|
||||
# and convert to a HookError w/ just the failing traceback.
|
||||
try:
|
||||
exec(compile(data, self._script_fullpath, 'exec'), context)
|
||||
except Exception:
|
||||
raise HookError('%s\nFailed to import %s hook; see traceback above.' %
|
||||
(traceback.format_exc(), self._hook_type))
|
||||
|
||||
# Running the script should have defined a main() function.
|
||||
if 'main' not in context:
|
||||
raise HookError('Missing main() in: "%s"' % self._script_fullpath)
|
||||
|
||||
# Call the main function in the hook. If the hook should cause the
|
||||
# build to fail, it will raise an Exception. We'll catch that convert
|
||||
# to a HookError w/ just the failing traceback.
|
||||
try:
|
||||
context['main'](**kwargs)
|
||||
except Exception:
|
||||
raise HookError('%s\nFailed to run main() for %s hook; see traceback '
|
||||
'above.' % (traceback.format_exc(), self._hook_type))
|
||||
|
||||
def _ExecuteHook(self, **kwargs):
|
||||
"""Actually execute the given hook.
|
||||
|
||||
@ -568,19 +759,8 @@ class RepoHook(object):
|
||||
# hooks can't import repo files.
|
||||
sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:]
|
||||
|
||||
# Exec, storing global context in the context dict. We catch exceptions
|
||||
# and convert to a HookError w/ just the failing traceback.
|
||||
# Initial global context for the hook to run within.
|
||||
context = {'__file__': self._script_fullpath}
|
||||
try:
|
||||
exec(compile(open(self._script_fullpath).read(),
|
||||
self._script_fullpath, 'exec'), context)
|
||||
except Exception:
|
||||
raise HookError('%s\nFailed to import %s hook; see traceback above.' %
|
||||
(traceback.format_exc(), self._hook_type))
|
||||
|
||||
# Running the script should have defined a main() function.
|
||||
if 'main' not in context:
|
||||
raise HookError('Missing main() in: "%s"' % self._script_fullpath)
|
||||
|
||||
# Add 'hook_should_take_kwargs' to the arguments to be passed to main.
|
||||
# We don't actually want hooks to define their main with this argument--
|
||||
@ -592,15 +772,31 @@ class RepoHook(object):
|
||||
kwargs = kwargs.copy()
|
||||
kwargs['hook_should_take_kwargs'] = True
|
||||
|
||||
# Call the main function in the hook. If the hook should cause the
|
||||
# build to fail, it will raise an Exception. We'll catch that convert
|
||||
# to a HookError w/ just the failing traceback.
|
||||
try:
|
||||
context['main'](**kwargs)
|
||||
except Exception:
|
||||
raise HookError('%s\nFailed to run main() for %s hook; see traceback '
|
||||
'above.' % (traceback.format_exc(),
|
||||
self._hook_type))
|
||||
# See what version of python the hook has been written against.
|
||||
data = open(self._script_fullpath).read()
|
||||
interp = self._ExtractInterpFromShebang(data)
|
||||
reexec = False
|
||||
if interp:
|
||||
prog = os.path.basename(interp)
|
||||
if prog.startswith('python2') and sys.version_info.major != 2:
|
||||
reexec = True
|
||||
elif prog.startswith('python3') and sys.version_info.major == 2:
|
||||
reexec = True
|
||||
|
||||
# Attempt to execute the hooks through the requested version of Python.
|
||||
if reexec:
|
||||
try:
|
||||
self._ExecuteHookViaReexec(interp, context, **kwargs)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
# We couldn't find the interpreter, so fallback to importing.
|
||||
reexec = False
|
||||
else:
|
||||
raise
|
||||
|
||||
# Run the hook by importing directly.
|
||||
if not reexec:
|
||||
self._ExecuteHookViaImport(data, context, **kwargs)
|
||||
finally:
|
||||
# Restore sys.path and CWD.
|
||||
sys.path = orig_syspath
|
||||
@ -759,10 +955,17 @@ class Project(object):
|
||||
@property
|
||||
def CurrentBranch(self):
|
||||
"""Obtain the name of the currently checked out branch.
|
||||
The branch name omits the 'refs/heads/' prefix.
|
||||
None is returned if the project is on a detached HEAD.
|
||||
|
||||
The branch name omits the 'refs/heads/' prefix.
|
||||
None is returned if the project is on a detached HEAD, or if the work_git is
|
||||
otheriwse inaccessible (e.g. an incomplete sync).
|
||||
"""
|
||||
b = self.work_git.GetHead()
|
||||
try:
|
||||
b = self.work_git.GetHead()
|
||||
except NoManifestException:
|
||||
# If the local checkout is in a bad state, don't barf. Let the callers
|
||||
# process this like the head is unreadable.
|
||||
return None
|
||||
if b.startswith(R_HEADS):
|
||||
return b[len(R_HEADS):]
|
||||
return None
|
||||
@ -931,7 +1134,7 @@ class Project(object):
|
||||
"""Prints the status of the repository to stdout.
|
||||
|
||||
Args:
|
||||
output: If specified, redirect the output to this object.
|
||||
output_redir: If specified, redirect the output to this object.
|
||||
quiet: If True then only print the project name. Do not print
|
||||
the modified files, branch name, etc.
|
||||
"""
|
||||
@ -1030,23 +1233,31 @@ class Project(object):
|
||||
cmd.append('--src-prefix=a/%s/' % self.relpath)
|
||||
cmd.append('--dst-prefix=b/%s/' % self.relpath)
|
||||
cmd.append('--')
|
||||
p = GitCommand(self,
|
||||
cmd,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
try:
|
||||
p = GitCommand(self,
|
||||
cmd,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
except GitError as e:
|
||||
out.nl()
|
||||
out.project('project %s/' % self.relpath)
|
||||
out.nl()
|
||||
out.fail('%s', str(e))
|
||||
out.nl()
|
||||
return False
|
||||
has_diff = False
|
||||
for line in p.process.stdout:
|
||||
if not hasattr(line, 'encode'):
|
||||
line = line.decode()
|
||||
if not has_diff:
|
||||
out.nl()
|
||||
out.project('project %s/' % self.relpath)
|
||||
out.nl()
|
||||
has_diff = True
|
||||
print(line[:-1])
|
||||
p.Wait()
|
||||
|
||||
return p.Wait() == 0
|
||||
|
||||
# Publish / Upload ##
|
||||
|
||||
def WasPublished(self, branch, all_refs=None):
|
||||
"""Was the branch published (uploaded) for code review?
|
||||
If so, returns the SHA-1 hash of the last published
|
||||
@ -1198,9 +1409,7 @@ class Project(object):
|
||||
R_HEADS + branch.name,
|
||||
message=msg)
|
||||
|
||||
|
||||
# Sync ##
|
||||
|
||||
def _ExtractArchive(self, tarpath, path=None):
|
||||
"""Extract the given tar on its current location
|
||||
|
||||
@ -1269,12 +1478,9 @@ class Project(object):
|
||||
if is_new:
|
||||
alt = os.path.join(self.gitdir, 'objects/info/alternates')
|
||||
try:
|
||||
fd = open(alt)
|
||||
try:
|
||||
with open(alt) as fd:
|
||||
# This works for both absolute and relative alternate directories.
|
||||
alt_dir = os.path.join(self.objdir, 'objects', fd.readline().rstrip())
|
||||
finally:
|
||||
fd.close()
|
||||
except IOError:
|
||||
alt_dir = None
|
||||
else:
|
||||
@ -1381,6 +1587,13 @@ class Project(object):
|
||||
"""Perform only the local IO portion of the sync process.
|
||||
Network access is not required.
|
||||
"""
|
||||
if not os.path.exists(self.gitdir):
|
||||
syncbuf.fail(self,
|
||||
'Cannot checkout %s due to missing network sync; Run '
|
||||
'`repo sync -n %s` first.' %
|
||||
(self.name, self.name))
|
||||
return
|
||||
|
||||
self._InitWorkTree(force_sync=force_sync, submodules=submodules)
|
||||
all_refs = self.bare_ref.all
|
||||
self.CleanPublishedCache(all_refs)
|
||||
@ -1461,7 +1674,16 @@ class Project(object):
|
||||
return
|
||||
|
||||
upstream_gain = self._revlist(not_rev(HEAD), revid)
|
||||
pub = self.WasPublished(branch.name, all_refs)
|
||||
|
||||
# See if we can perform a fast forward merge. This can happen if our
|
||||
# branch isn't in the exact same state as we last published.
|
||||
try:
|
||||
self.work_git.merge_base('--is-ancestor', HEAD, revid)
|
||||
# Skip the published logic.
|
||||
pub = False
|
||||
except GitError:
|
||||
pub = self.WasPublished(branch.name, all_refs)
|
||||
|
||||
if pub:
|
||||
not_merged = self._revlist(not_rev(revid), pub)
|
||||
if not_merged:
|
||||
@ -1490,7 +1712,7 @@ class Project(object):
|
||||
last_mine = None
|
||||
cnt_mine = 0
|
||||
for commit in local_changes:
|
||||
commit_id, committer_email = commit.decode('utf-8').split(' ', 1)
|
||||
commit_id, committer_email = commit.split(' ', 1)
|
||||
if committer_email == self.UserEmail:
|
||||
last_mine = commit_id
|
||||
cnt_mine += 1
|
||||
@ -1555,18 +1777,25 @@ class Project(object):
|
||||
if submodules:
|
||||
syncbuf.later1(self, _dosubmodules)
|
||||
|
||||
def AddCopyFile(self, src, dest, absdest):
|
||||
# dest should already be an absolute path, but src is project relative
|
||||
# make src an absolute path
|
||||
abssrc = os.path.join(self.worktree, src)
|
||||
self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest))
|
||||
def AddCopyFile(self, src, dest, topdir):
|
||||
"""Mark |src| for copying to |dest| (relative to |topdir|).
|
||||
|
||||
def AddLinkFile(self, src, dest, absdest):
|
||||
# dest should already be an absolute path, but src is project relative
|
||||
# make src relative path to dest
|
||||
absdestdir = os.path.dirname(absdest)
|
||||
relsrc = os.path.relpath(os.path.join(self.worktree, src), absdestdir)
|
||||
self.linkfiles.append(_LinkFile(self.worktree, src, dest, relsrc, absdest))
|
||||
No filesystem changes occur here. Actual copying happens later on.
|
||||
|
||||
Paths should have basic validation run on them before being queued.
|
||||
Further checking will be handled when the actual copy happens.
|
||||
"""
|
||||
self.copyfiles.append(_CopyFile(self.worktree, src, topdir, dest))
|
||||
|
||||
def AddLinkFile(self, src, dest, topdir):
|
||||
"""Mark |dest| to create a symlink (relative to |topdir|) pointing to |src|.
|
||||
|
||||
No filesystem changes occur here. Actual linking happens later on.
|
||||
|
||||
Paths should have basic validation run on them before being queued.
|
||||
Further checking will be handled when the actual link happens.
|
||||
"""
|
||||
self.linkfiles.append(_LinkFile(self.worktree, src, topdir, dest))
|
||||
|
||||
def AddAnnotation(self, name, value, keep):
|
||||
self.annotations.append(_Annotation(name, value, keep))
|
||||
@ -1587,10 +1816,20 @@ class Project(object):
|
||||
patch_id,
|
||||
self.bare_git.rev_parse('FETCH_HEAD'))
|
||||
|
||||
|
||||
# Branch Management ##
|
||||
def GetHeadPath(self):
|
||||
"""Return the full path to the HEAD ref."""
|
||||
dotgit = os.path.join(self.worktree, '.git')
|
||||
if os.path.isfile(dotgit):
|
||||
# Git worktrees use a "gitdir:" syntax to point to the scratch space.
|
||||
with open(dotgit) as fp:
|
||||
setting = fp.read()
|
||||
assert setting.startswith('gitdir:')
|
||||
gitdir = setting.split(':', 1)[1].strip()
|
||||
dotgit = os.path.join(self.worktree, gitdir)
|
||||
return os.path.join(dotgit, HEAD)
|
||||
|
||||
def StartBranch(self, name, branch_merge=''):
|
||||
def StartBranch(self, name, branch_merge='', revision=None):
|
||||
"""Create a new branch off the manifest's revision.
|
||||
"""
|
||||
if not branch_merge:
|
||||
@ -1611,7 +1850,11 @@ class Project(object):
|
||||
branch.merge = branch_merge
|
||||
if not branch.merge.startswith('refs/') and not ID_RE.match(branch_merge):
|
||||
branch.merge = R_HEADS + branch_merge
|
||||
revid = self.GetRevisionId(all_refs)
|
||||
|
||||
if revision is None:
|
||||
revid = self.GetRevisionId(all_refs)
|
||||
else:
|
||||
revid = self.work_git.rev_parse(revision)
|
||||
|
||||
if head.startswith(R_HEADS):
|
||||
try:
|
||||
@ -1625,8 +1868,7 @@ class Project(object):
|
||||
except OSError:
|
||||
pass
|
||||
_lwrite(ref, '%s\n' % revid)
|
||||
_lwrite(os.path.join(self.worktree, '.git', HEAD),
|
||||
'ref: %s%s\n' % (R_HEADS, name))
|
||||
_lwrite(self.GetHeadPath(), 'ref: %s%s\n' % (R_HEADS, name))
|
||||
branch.Save()
|
||||
return True
|
||||
|
||||
@ -1673,8 +1915,7 @@ class Project(object):
|
||||
# Same revision; just update HEAD to point to the new
|
||||
# target branch, but otherwise take no other action.
|
||||
#
|
||||
_lwrite(os.path.join(self.worktree, '.git', HEAD),
|
||||
'ref: %s%s\n' % (R_HEADS, name))
|
||||
_lwrite(self.GetHeadPath(), 'ref: %s%s\n' % (R_HEADS, name))
|
||||
return True
|
||||
|
||||
return GitCommand(self,
|
||||
@ -1707,8 +1948,7 @@ class Project(object):
|
||||
|
||||
revid = self.GetRevisionId(all_refs)
|
||||
if head == revid:
|
||||
_lwrite(os.path.join(self.worktree, '.git', HEAD),
|
||||
'%s\n' % revid)
|
||||
_lwrite(self.GetHeadPath(), '%s\n' % revid)
|
||||
else:
|
||||
self._Checkout(revid, quiet=True)
|
||||
|
||||
@ -1774,9 +2014,7 @@ class Project(object):
|
||||
kept.append(ReviewableBranch(self, branch, base))
|
||||
return kept
|
||||
|
||||
|
||||
# Submodule Management ##
|
||||
|
||||
def GetRegisteredSubprojects(self):
|
||||
result = []
|
||||
|
||||
@ -1828,7 +2066,7 @@ class Project(object):
|
||||
gitmodules_lines = []
|
||||
fd, temp_gitmodules_path = tempfile.mkstemp()
|
||||
try:
|
||||
os.write(fd, p.stdout)
|
||||
os.write(fd, p.stdout.encode('utf-8'))
|
||||
os.close(fd)
|
||||
cmd = ['config', '--file', temp_gitmodules_path, '--list']
|
||||
p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
|
||||
@ -1927,7 +2165,6 @@ class Project(object):
|
||||
result.extend(subproject.GetDerivedSubprojects())
|
||||
return result
|
||||
|
||||
|
||||
# Direct Git Commands ##
|
||||
def _CheckForImmutableRevision(self):
|
||||
try:
|
||||
@ -2074,13 +2311,6 @@ class Project(object):
|
||||
cmd.append('--update-head-ok')
|
||||
cmd.append(name)
|
||||
|
||||
# If using depth then we should not get all the tags since they may
|
||||
# be outside of the depth.
|
||||
if no_tags or depth:
|
||||
cmd.append('--no-tags')
|
||||
else:
|
||||
cmd.append('--tags')
|
||||
|
||||
if force_sync:
|
||||
cmd.append('--force')
|
||||
|
||||
@ -2098,19 +2328,36 @@ class Project(object):
|
||||
spec.append('tag')
|
||||
spec.append(tag_name)
|
||||
|
||||
if not self.manifest.IsMirror:
|
||||
if self.manifest.IsMirror and not current_branch_only:
|
||||
branch = None
|
||||
else:
|
||||
branch = self.revisionExpr
|
||||
if is_sha1 and depth and git_require((1, 8, 3)):
|
||||
# Shallow checkout of a specific commit, fetch from that commit and not
|
||||
# the heads only as the commit might be deeper in the history.
|
||||
spec.append(branch)
|
||||
else:
|
||||
if is_sha1:
|
||||
branch = self.upstream
|
||||
if branch is not None and branch.strip():
|
||||
if not branch.startswith('refs/'):
|
||||
branch = R_HEADS + branch
|
||||
spec.append(str((u'+%s:' % branch) + remote.ToLocal(branch)))
|
||||
if (not self.manifest.IsMirror and is_sha1 and depth
|
||||
and git_require((1, 8, 3))):
|
||||
# Shallow checkout of a specific commit, fetch from that commit and not
|
||||
# the heads only as the commit might be deeper in the history.
|
||||
spec.append(branch)
|
||||
else:
|
||||
if is_sha1:
|
||||
branch = self.upstream
|
||||
if branch is not None and branch.strip():
|
||||
if not branch.startswith('refs/'):
|
||||
branch = R_HEADS + branch
|
||||
spec.append(str((u'+%s:' % branch) + remote.ToLocal(branch)))
|
||||
|
||||
# If mirroring repo and we cannot deduce the tag or branch to fetch, fetch
|
||||
# whole repo.
|
||||
if self.manifest.IsMirror and not spec:
|
||||
spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
|
||||
|
||||
# If using depth then we should not get all the tags since they may
|
||||
# be outside of the depth.
|
||||
if no_tags or depth:
|
||||
cmd.append('--no-tags')
|
||||
else:
|
||||
cmd.append('--tags')
|
||||
spec.append(str((u'+refs/tags/*:') + remote.ToLocal('refs/tags/*')))
|
||||
|
||||
cmd.extend(spec)
|
||||
|
||||
ok = False
|
||||
@ -2227,7 +2474,7 @@ class Project(object):
|
||||
platform_utils.remove(tmpPath)
|
||||
with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, proxy):
|
||||
if cookiefile:
|
||||
cmd += ['--cookie', cookiefile, '--cookie-jar', cookiefile]
|
||||
cmd += ['--cookie', cookiefile]
|
||||
if proxy:
|
||||
cmd += ['--proxy', proxy]
|
||||
elif 'http_proxy' in os.environ and 'darwin' == sys.platform:
|
||||
@ -2301,10 +2548,7 @@ class Project(object):
|
||||
cmd = ['ls-remote', self.remote.name, refs]
|
||||
p = GitCommand(self, cmd, capture_stdout=True)
|
||||
if p.Wait() == 0:
|
||||
if hasattr(p.stdout, 'decode'):
|
||||
return p.stdout.decode('utf-8')
|
||||
else:
|
||||
return p.stdout
|
||||
return p.stdout
|
||||
return None
|
||||
|
||||
def _Revert(self, rev):
|
||||
@ -2375,7 +2619,7 @@ class Project(object):
|
||||
(self.worktree)):
|
||||
platform_utils.rmtree(platform_utils.realpath(self.worktree))
|
||||
return self._InitGitDir(mirror_git=mirror_git, force_sync=False)
|
||||
except:
|
||||
except Exception:
|
||||
raise e
|
||||
raise e
|
||||
|
||||
@ -2508,9 +2752,31 @@ class Project(object):
|
||||
symlink_dirs += self.working_tree_dirs
|
||||
to_symlink = symlink_files + symlink_dirs
|
||||
for name in set(to_symlink):
|
||||
dst = platform_utils.realpath(os.path.join(destdir, name))
|
||||
# Try to self-heal a bit in simple cases.
|
||||
dst_path = os.path.join(destdir, name)
|
||||
src_path = os.path.join(srcdir, name)
|
||||
|
||||
if name in self.working_tree_dirs:
|
||||
# If the dir is missing under .repo/projects/, create it.
|
||||
if not os.path.exists(src_path):
|
||||
os.makedirs(src_path)
|
||||
|
||||
elif name in self.working_tree_files:
|
||||
# If it's a file under the checkout .git/ and the .repo/projects/ has
|
||||
# nothing, move the file under the .repo/projects/ tree.
|
||||
if not os.path.exists(src_path) and os.path.isfile(dst_path):
|
||||
platform_utils.rename(dst_path, src_path)
|
||||
|
||||
# If the path exists under the .repo/projects/ and there's no symlink
|
||||
# under the checkout .git/, recreate the symlink.
|
||||
if name in self.working_tree_dirs or name in self.working_tree_files:
|
||||
if os.path.exists(src_path) and not os.path.exists(dst_path):
|
||||
platform_utils.symlink(
|
||||
os.path.relpath(src_path, os.path.dirname(dst_path)), dst_path)
|
||||
|
||||
dst = platform_utils.realpath(dst_path)
|
||||
if os.path.lexists(dst):
|
||||
src = platform_utils.realpath(os.path.join(srcdir, name))
|
||||
src = platform_utils.realpath(src_path)
|
||||
# Fail if the links are pointing to the wrong place
|
||||
if src != dst:
|
||||
_error('%s is different in %s vs %s', name, destdir, srcdir)
|
||||
@ -2579,41 +2845,45 @@ class Project(object):
|
||||
raise
|
||||
|
||||
def _InitWorkTree(self, force_sync=False, submodules=False):
|
||||
dotgit = os.path.join(self.worktree, '.git')
|
||||
init_dotgit = not os.path.exists(dotgit)
|
||||
realdotgit = os.path.join(self.worktree, '.git')
|
||||
tmpdotgit = realdotgit + '.tmp'
|
||||
init_dotgit = not os.path.exists(realdotgit)
|
||||
if init_dotgit:
|
||||
dotgit = tmpdotgit
|
||||
platform_utils.rmtree(tmpdotgit, ignore_errors=True)
|
||||
os.makedirs(tmpdotgit)
|
||||
self._ReferenceGitDir(self.gitdir, tmpdotgit, share_refs=True,
|
||||
copy_all=False)
|
||||
else:
|
||||
dotgit = realdotgit
|
||||
|
||||
try:
|
||||
if init_dotgit:
|
||||
os.makedirs(dotgit)
|
||||
self._ReferenceGitDir(self.gitdir, dotgit, share_refs=True,
|
||||
copy_all=False)
|
||||
self._CheckDirReference(self.gitdir, dotgit, share_refs=True)
|
||||
except GitError as e:
|
||||
if force_sync and not init_dotgit:
|
||||
try:
|
||||
platform_utils.rmtree(dotgit)
|
||||
return self._InitWorkTree(force_sync=False, submodules=submodules)
|
||||
except Exception:
|
||||
raise e
|
||||
raise e
|
||||
|
||||
try:
|
||||
self._CheckDirReference(self.gitdir, dotgit, share_refs=True)
|
||||
except GitError as e:
|
||||
if force_sync:
|
||||
try:
|
||||
platform_utils.rmtree(dotgit)
|
||||
return self._InitWorkTree(force_sync=False, submodules=submodules)
|
||||
except:
|
||||
raise e
|
||||
raise e
|
||||
if init_dotgit:
|
||||
_lwrite(os.path.join(tmpdotgit, HEAD), '%s\n' % self.GetRevisionId())
|
||||
|
||||
if init_dotgit:
|
||||
_lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId())
|
||||
# Now that the .git dir is fully set up, move it to its final home.
|
||||
platform_utils.rename(tmpdotgit, realdotgit)
|
||||
|
||||
cmd = ['read-tree', '--reset', '-u']
|
||||
cmd.append('-v')
|
||||
cmd.append(HEAD)
|
||||
if GitCommand(self, cmd).Wait() != 0:
|
||||
raise GitError("cannot initialize work tree for " + self.name)
|
||||
# Finish checking out the worktree.
|
||||
cmd = ['read-tree', '--reset', '-u']
|
||||
cmd.append('-v')
|
||||
cmd.append(HEAD)
|
||||
if GitCommand(self, cmd).Wait() != 0:
|
||||
raise GitError('Cannot initialize work tree for ' + self.name)
|
||||
|
||||
if submodules:
|
||||
self._SyncSubmodules(quiet=True)
|
||||
self._CopyAndLinkFiles()
|
||||
except Exception:
|
||||
if init_dotgit:
|
||||
platform_utils.rmtree(dotgit)
|
||||
raise
|
||||
if submodules:
|
||||
self._SyncSubmodules(quiet=True)
|
||||
self._CopyAndLinkFiles()
|
||||
|
||||
def _get_symlink_error_message(self):
|
||||
if platform_utils.isWindows():
|
||||
@ -2715,6 +2985,8 @@ class Project(object):
|
||||
capture_stderr=True)
|
||||
try:
|
||||
out = p.process.stdout.read()
|
||||
if not hasattr(out, 'encode'):
|
||||
out = out.decode()
|
||||
r = {}
|
||||
if out:
|
||||
out = iter(out[:-1].split('\0'))
|
||||
@ -2758,15 +3030,12 @@ class Project(object):
|
||||
if self._bare:
|
||||
path = os.path.join(self._project.gitdir, HEAD)
|
||||
else:
|
||||
path = os.path.join(self._project.worktree, '.git', HEAD)
|
||||
path = self._project.GetHeadPath()
|
||||
try:
|
||||
fd = open(path)
|
||||
with open(path) as fd:
|
||||
line = fd.readline()
|
||||
except IOError as e:
|
||||
raise NoManifestException(path, str(e))
|
||||
try:
|
||||
line = fd.readline()
|
||||
finally:
|
||||
fd.close()
|
||||
try:
|
||||
line = line.decode()
|
||||
except AttributeError:
|
||||
@ -2856,9 +3125,6 @@ class Project(object):
|
||||
raise TypeError('%s() got an unexpected keyword argument %r'
|
||||
% (name, k))
|
||||
if config is not None:
|
||||
if not git_require((1, 7, 2)):
|
||||
raise ValueError('cannot set config on command line for %s()'
|
||||
% name)
|
||||
for k, v in config.items():
|
||||
cmdv.append('-c')
|
||||
cmdv.append('%s=%s' % (k, v))
|
||||
@ -2874,10 +3140,6 @@ class Project(object):
|
||||
raise GitError('%s %s: %s' %
|
||||
(self._project.name, name, p.stderr))
|
||||
r = p.stdout
|
||||
try:
|
||||
r = r.decode('utf-8')
|
||||
except AttributeError:
|
||||
pass
|
||||
if r.endswith('\n') and r.index('\n') == len(r) - 1:
|
||||
return r[:-1]
|
||||
return r
|
||||
@ -3005,6 +3267,11 @@ class SyncBuffer(object):
|
||||
return True
|
||||
|
||||
def _PrintMessages(self):
|
||||
if self._messages or self._failures:
|
||||
if os.isatty(2):
|
||||
self.out.write(progress.CSI_ERASE_LINE)
|
||||
self.out.write('\r')
|
||||
|
||||
for m in self._messages:
|
||||
m.Print(self)
|
||||
for m in self._failures:
|
||||
|
@ -16,5 +16,6 @@
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
def is_python3():
|
||||
return sys.version_info[0] == 3
|
||||
|
386
repo
386
repo
@ -10,13 +10,94 @@ copy of repo in the checkout.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def exec_command(cmd):
|
||||
"""Execute |cmd| or return None on failure."""
|
||||
try:
|
||||
if platform.system() == 'Windows':
|
||||
ret = subprocess.call(cmd)
|
||||
sys.exit(ret)
|
||||
else:
|
||||
os.execvp(cmd[0], cmd)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def check_python_version():
|
||||
"""Make sure the active Python version is recent enough."""
|
||||
def reexec(prog):
|
||||
exec_command([prog] + sys.argv)
|
||||
|
||||
MIN_PYTHON_VERSION = (3, 6)
|
||||
|
||||
ver = sys.version_info
|
||||
major = ver.major
|
||||
minor = ver.minor
|
||||
|
||||
# Abort on very old Python 2 versions.
|
||||
if (major, minor) < (2, 7):
|
||||
print('repo: error: Your Python version is too old. '
|
||||
'Please use Python {}.{} or newer instead.'.format(
|
||||
*MIN_PYTHON_VERSION), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Try to re-exec the version specific Python 3 if needed.
|
||||
if (major, minor) < MIN_PYTHON_VERSION:
|
||||
# Python makes releases ~once a year, so try our min version +10 to help
|
||||
# bridge the gap. This is the fallback anyways so perf isn't critical.
|
||||
min_major, min_minor = MIN_PYTHON_VERSION
|
||||
for inc in range(0, 10):
|
||||
reexec('python{}.{}'.format(min_major, min_minor + inc))
|
||||
|
||||
# Try the generic Python 3 wrapper, but only if it's new enough. We don't
|
||||
# want to go from (still supported) Python 2.7 to (unsupported) Python 3.5.
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
['python3', '-c', 'import sys; '
|
||||
'print(sys.version_info.major, sys.version_info.minor)'],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
(output, _) = proc.communicate()
|
||||
python3_ver = tuple(int(x) for x in output.decode('utf-8').split())
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
python3_ver = None
|
||||
|
||||
# The python3 version looks like it's new enough, so give it a try.
|
||||
if python3_ver and python3_ver >= MIN_PYTHON_VERSION:
|
||||
reexec('python3')
|
||||
|
||||
# We're still here, so diagnose things for the user.
|
||||
if major < 3:
|
||||
print('repo: warning: Python 2 is no longer supported; '
|
||||
'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION),
|
||||
file=sys.stderr)
|
||||
else:
|
||||
print('repo: error: Python 3 version is too old; '
|
||||
'Please use Python {}.{} or newer.'.format(*MIN_PYTHON_VERSION),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# TODO(vapier): Enable this on Windows once we have Python 3 issues fixed.
|
||||
if platform.system() != 'Windows':
|
||||
check_python_version()
|
||||
|
||||
|
||||
# repo default configuration
|
||||
#
|
||||
import os
|
||||
REPO_URL = os.environ.get('REPO_URL', None)
|
||||
if not REPO_URL:
|
||||
REPO_URL = 'https://gerrit.googlesource.com/git-repo'
|
||||
REPO_REV = 'stable'
|
||||
REPO_REV = os.environ.get('REPO_REV')
|
||||
if not REPO_REV:
|
||||
REPO_REV = 'stable'
|
||||
|
||||
# Copyright (C) 2008 Google Inc.
|
||||
#
|
||||
@ -33,10 +114,10 @@ REPO_REV = 'stable'
|
||||
# limitations under the License.
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (1, 25)
|
||||
VERSION = (2, 3)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (1, 2)
|
||||
KEYRING_VERSION = (2, 0)
|
||||
|
||||
# Each individual key entry is created by using:
|
||||
# gpg --armor --export keyid
|
||||
@ -82,48 +163,20 @@ HTHs37+/QLMomGEGKZMWi0dShU2J5mNRQu3Hhxl3hHDVbt5CeJBb26aQcQrFz69W
|
||||
zE3GNvmJosh6leayjtI9P2A6iEkEGBECAAkFAkj3uiACGwwACgkQFlMNXpIPXGWp
|
||||
TACbBS+Up3RpfYVfd63c1cDdlru13pQAn3NQy/SN858MkxN+zym86UBgOad2
|
||||
=CMiZ
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
Conley Owens <cco3@android.com>
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
Version: GnuPG v1.4.11 (GNU/Linux)
|
||||
|
||||
mQENBFHRvc8BCADFg45Xx/y6QDC+T7Y/gGc7vx0ww7qfOwIKlAZ9xG3qKunMxo+S
|
||||
hPCnzEl3cq+6I1Ww/ndop/HB3N3toPXRCoN8Vs4/Hc7by+SnaLFnacrm+tV5/OgT
|
||||
V37Lzt8lhay1Kl+YfpFwHYYpIEBLFV9knyfRXS/428W2qhdzYfvB15/AasRmwmor
|
||||
py4NIzSs8UD/SPr1ihqNCdZM76+MQyN5HMYXW/ALZXUFG0pwluHFA7hrfPG74i8C
|
||||
zMiP7qvMWIl/r/jtzHioH1dRKgbod+LZsrDJ8mBaqsZaDmNJMhss9g76XvfMyLra
|
||||
9DI9/iFuBpGzeqBv0hwOGQspLRrEoyTeR6n1ABEBAAG0H0NvbmxleSBPd2VucyA8
|
||||
Y2NvM0BhbmRyb2lkLmNvbT6JATgEEwECACIFAlHRvc8CGwMGCwkIBwMCBhUIAgkK
|
||||
CwQWAgMBAh4BAheAAAoJEGe35EhpKzgsP6AIAJKJmNtn4l7hkYHKHFSo3egb6RjQ
|
||||
zEIP3MFTcu8HFX1kF1ZFbrp7xqurLaE53kEkKuAAvjJDAgI8mcZHP1JyplubqjQA
|
||||
xvv84gK+OGP3Xk+QK1ZjUQSbjOpjEiSZpRhWcHci3dgOUH4blJfByHw25hlgHowd
|
||||
a/2PrNKZVcJ92YienaxxGjcXEUcd0uYEG2+rwllQigFcnMFDhr9B71MfalRHjFKE
|
||||
fmdoypqLrri61YBc59P88Rw2/WUpTQjgNubSqa3A2+CKdaRyaRw+2fdF4TdR0h8W
|
||||
zbg+lbaPtJHsV+3mJC7fq26MiJDRJa5ZztpMn8su20gbLgi2ShBOaHAYDDi5AQ0E
|
||||
UdG9zwEIAMoOBq+QLNozAhxOOl5GL3StTStGRgPRXINfmViTsihrqGCWBBUfXlUE
|
||||
OytC0mYcrDUQev/8ToVoyqw+iGSwDkcSXkrEUCKFtHV/GECWtk1keyHgR10YKI1R
|
||||
mquSXoubWGqPeG1PAI74XWaRx8UrL8uCXUtmD8Q5J7mDjKR5NpxaXrwlA0bKsf2E
|
||||
Gp9tu1kKauuToZhWHMRMqYSOGikQJwWSFYKT1KdNcOXLQF6+bfoJ6sjVYdwfmNQL
|
||||
Ixn8QVhoTDedcqClSWB17VDEFDFa7MmqXZz2qtM3X1R/MUMHqPtegQzBGNhRdnI2
|
||||
V45+1Nnx/uuCxDbeI4RbHzujnxDiq70AEQEAAYkBHwQYAQIACQUCUdG9zwIbDAAK
|
||||
CRBnt+RIaSs4LNVeB/0Y2pZ8I7gAAcEM0Xw8drr4omg2fUoK1J33ozlA/RxeA/lJ
|
||||
I3KnyCDTpXuIeBKPGkdL8uMATC9Z8DnBBajRlftNDVZS3Hz4G09G9QpMojvJkFJV
|
||||
By+01Flw/X+eeN8NpqSuLV4W+AjEO8at/VvgKr1AFvBRdZ7GkpI1o6DgPe7ZqX+1
|
||||
dzQZt3e13W0rVBb/bUgx9iSLoeWP3aq/k+/GRGOR+S6F6BBSl0SQ2EF2+dIywb1x
|
||||
JuinEP+AwLAUZ1Bsx9ISC0Agpk2VeHXPL3FGhroEmoMvBzO0kTFGyoeT7PR/BfKv
|
||||
+H/g3HsL2LOB9uoIm8/5p2TTU5ttYCXMHhQZ81AY
|
||||
=AUp4
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
"""
|
||||
|
||||
GIT = 'git' # our git command
|
||||
# NB: The version of git that the repo launcher requires may be much older than
|
||||
# the version of git that the main repo source tree requires. Keeping this at
|
||||
# an older version also makes it easier for users to upgrade/rollback as needed.
|
||||
#
|
||||
# git-1.7 is in (EOL) Ubuntu Precise.
|
||||
MIN_GIT_VERSION = (1, 7, 2) # minimum supported git version
|
||||
repodir = '.repo' # name of repo's private directory
|
||||
S_repo = 'repo' # special repo repository
|
||||
S_manifests = 'manifests' # special manifest repository
|
||||
REPO_MAIN = S_repo + '/main.py' # main script
|
||||
MIN_PYTHON_VERSION = (2, 7) # minimum supported python version
|
||||
GITC_CONFIG_FILE = '/gitc/.config'
|
||||
GITC_FS_ROOT_DIR = '/gitc/manifest-rw/'
|
||||
|
||||
@ -131,12 +184,9 @@ GITC_FS_ROOT_DIR = '/gitc/manifest-rw/'
|
||||
import collections
|
||||
import errno
|
||||
import optparse
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
if sys.version_info[0] == 3:
|
||||
import urllib.request
|
||||
@ -149,105 +199,79 @@ else:
|
||||
urllib.error = urllib2
|
||||
|
||||
|
||||
# Python version check
|
||||
ver = sys.version_info
|
||||
if (ver[0], ver[1]) < MIN_PYTHON_VERSION:
|
||||
print('error: Python version {} unsupported.\n'
|
||||
'Please use Python {}.{} instead.'.format(
|
||||
sys.version.split(' ')[0],
|
||||
MIN_PYTHON_VERSION[0],
|
||||
MIN_PYTHON_VERSION[1],
|
||||
), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
home_dot_repo = os.path.expanduser('~/.repoconfig')
|
||||
gpg_dir = os.path.join(home_dot_repo, 'gnupg')
|
||||
|
||||
extra_args = []
|
||||
init_optparse = optparse.OptionParser(usage="repo init -u url [options]")
|
||||
|
||||
# Logging
|
||||
group = init_optparse.add_option_group('Logging options')
|
||||
group.add_option('-q', '--quiet',
|
||||
dest="quiet", action="store_true", default=False,
|
||||
help="be quiet")
|
||||
def _InitParser():
|
||||
"""Setup the init subcommand parser."""
|
||||
# Logging.
|
||||
group = init_optparse.add_option_group('Logging options')
|
||||
group.add_option('-q', '--quiet',
|
||||
action='store_true', default=False,
|
||||
help='be quiet')
|
||||
|
||||
# Manifest
|
||||
group = init_optparse.add_option_group('Manifest options')
|
||||
group.add_option('-u', '--manifest-url',
|
||||
dest='manifest_url',
|
||||
help='manifest repository location', metavar='URL')
|
||||
group.add_option('-b', '--manifest-branch',
|
||||
dest='manifest_branch',
|
||||
help='manifest branch or revision', metavar='REVISION')
|
||||
group.add_option('-m', '--manifest-name',
|
||||
dest='manifest_name',
|
||||
help='initial manifest file', metavar='NAME.xml')
|
||||
group.add_option('--current-branch',
|
||||
dest='current_branch_only', action='store_true',
|
||||
help='fetch only current manifest branch from server')
|
||||
group.add_option('--mirror',
|
||||
dest='mirror', action='store_true',
|
||||
help='create a replica of the remote repositories '
|
||||
'rather than a client working directory')
|
||||
group.add_option('--reference',
|
||||
dest='reference',
|
||||
help='location of mirror directory', metavar='DIR')
|
||||
group.add_option('--dissociate',
|
||||
dest='dissociate', action='store_true',
|
||||
help='dissociate from reference mirrors after clone')
|
||||
group.add_option('--depth', type='int', default=None,
|
||||
dest='depth',
|
||||
help='create a shallow clone with given depth; see git clone')
|
||||
group.add_option('--partial-clone', action='store_true',
|
||||
dest='partial_clone',
|
||||
help='perform partial clone (https://git-scm.com/'
|
||||
'docs/gitrepository-layout#_code_partialclone_code)')
|
||||
group.add_option('--clone-filter', action='store', default='blob:none',
|
||||
dest='clone_filter',
|
||||
help='filter for use with --partial-clone [default: %default]')
|
||||
group.add_option('--archive',
|
||||
dest='archive', action='store_true',
|
||||
help='checkout an archive instead of a git repository for '
|
||||
'each project. See git archive.')
|
||||
group.add_option('--submodules',
|
||||
dest='submodules', action='store_true',
|
||||
help='sync any submodules associated with the manifest repo')
|
||||
group.add_option('-g', '--groups',
|
||||
dest='groups', default='default',
|
||||
help='restrict manifest projects to ones with specified '
|
||||
'group(s) [default|all|G1,G2,G3|G4,-G5,-G6]',
|
||||
metavar='GROUP')
|
||||
group.add_option('-p', '--platform',
|
||||
dest='platform', default="auto",
|
||||
help='restrict manifest projects to ones with a specified '
|
||||
'platform group [auto|all|none|linux|darwin|...]',
|
||||
metavar='PLATFORM')
|
||||
group.add_option('--no-clone-bundle',
|
||||
dest='no_clone_bundle', action='store_true',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||
group.add_option('--no-tags',
|
||||
dest='no_tags', action='store_true',
|
||||
help="don't fetch tags in the manifest")
|
||||
# Manifest.
|
||||
group = init_optparse.add_option_group('Manifest options')
|
||||
group.add_option('-u', '--manifest-url',
|
||||
help='manifest repository location', metavar='URL')
|
||||
group.add_option('-b', '--manifest-branch',
|
||||
help='manifest branch or revision', metavar='REVISION')
|
||||
group.add_option('-m', '--manifest-name',
|
||||
help='initial manifest file', metavar='NAME.xml')
|
||||
group.add_option('--current-branch',
|
||||
dest='current_branch_only', action='store_true',
|
||||
help='fetch only current manifest branch from server')
|
||||
group.add_option('--mirror', action='store_true',
|
||||
help='create a replica of the remote repositories '
|
||||
'rather than a client working directory')
|
||||
group.add_option('--reference',
|
||||
help='location of mirror directory', metavar='DIR')
|
||||
group.add_option('--dissociate', action='store_true',
|
||||
help='dissociate from reference mirrors after clone')
|
||||
group.add_option('--depth', type='int', default=None,
|
||||
help='create a shallow clone with given depth; '
|
||||
'see git clone')
|
||||
group.add_option('--partial-clone', action='store_true',
|
||||
help='perform partial clone (https://git-scm.com/'
|
||||
'docs/gitrepository-layout#_code_partialclone_code)')
|
||||
group.add_option('--clone-filter', action='store', default='blob:none',
|
||||
help='filter for use with --partial-clone '
|
||||
'[default: %default]')
|
||||
group.add_option('--archive', action='store_true',
|
||||
help='checkout an archive instead of a git repository for '
|
||||
'each project. See git archive.')
|
||||
group.add_option('--submodules', action='store_true',
|
||||
help='sync any submodules associated with the manifest repo')
|
||||
group.add_option('-g', '--groups', default='default',
|
||||
help='restrict manifest projects to ones with specified '
|
||||
'group(s) [default|all|G1,G2,G3|G4,-G5,-G6]',
|
||||
metavar='GROUP')
|
||||
group.add_option('-p', '--platform', default='auto',
|
||||
help='restrict manifest projects to ones with a specified '
|
||||
'platform group [auto|all|none|linux|darwin|...]',
|
||||
metavar='PLATFORM')
|
||||
group.add_option('--no-clone-bundle', action='store_true',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||
group.add_option('--no-tags', action='store_true',
|
||||
help="don't fetch tags in the manifest")
|
||||
|
||||
# Tool.
|
||||
group = init_optparse.add_option_group('repo Version options')
|
||||
group.add_option('--repo-url', metavar='URL',
|
||||
help='repo repository location ($REPO_URL)')
|
||||
group.add_option('--repo-branch', metavar='REVISION',
|
||||
help='repo branch or revision ($REPO_REV)')
|
||||
group.add_option('--no-repo-verify', action='store_true',
|
||||
help='do not verify repo source code')
|
||||
|
||||
# Tool
|
||||
group = init_optparse.add_option_group('repo Version options')
|
||||
group.add_option('--repo-url',
|
||||
dest='repo_url',
|
||||
help='repo repository location', metavar='URL')
|
||||
group.add_option('--repo-branch',
|
||||
dest='repo_branch',
|
||||
help='repo branch or revision', metavar='REVISION')
|
||||
group.add_option('--no-repo-verify',
|
||||
dest='no_repo_verify', action='store_true',
|
||||
help='do not verify repo source code')
|
||||
|
||||
# Other
|
||||
group = init_optparse.add_option_group('Other options')
|
||||
group.add_option('--config-name',
|
||||
dest='config_name', action="store_true", default=False,
|
||||
help='Always prompt for name/e-mail')
|
||||
# Other.
|
||||
group = init_optparse.add_option_group('Other options')
|
||||
group.add_option('--config-name',
|
||||
action='store_true', default=False,
|
||||
help='Always prompt for name/e-mail')
|
||||
|
||||
|
||||
def _GitcInitOptions(init_optparse_arg):
|
||||
@ -366,15 +390,18 @@ def _Init(args, gitc_init=False):
|
||||
|
||||
_CheckGitVersion()
|
||||
try:
|
||||
if NeedSetupGnuPG():
|
||||
can_verify = SetupGnuPG(opt.quiet)
|
||||
if opt.no_repo_verify:
|
||||
do_verify = False
|
||||
else:
|
||||
can_verify = True
|
||||
if NeedSetupGnuPG():
|
||||
do_verify = SetupGnuPG(opt.quiet)
|
||||
else:
|
||||
do_verify = True
|
||||
|
||||
dst = os.path.abspath(os.path.join(repodir, S_repo))
|
||||
_Clone(url, dst, opt.quiet, not opt.no_clone_bundle)
|
||||
|
||||
if can_verify and not opt.no_repo_verify:
|
||||
if do_verify:
|
||||
rev = _Verify(dst, branch, opt.quiet)
|
||||
else:
|
||||
rev = 'refs/remotes/origin/%s^0' % branch
|
||||
@ -443,7 +470,7 @@ def _CheckGitVersion():
|
||||
raise CloneFailure()
|
||||
|
||||
if ver_act is None:
|
||||
print('error: "%s" unsupported' % ver_str, file=sys.stderr)
|
||||
print('fatal: unable to detect git version', file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
if ver_act < MIN_GIT_VERSION:
|
||||
@ -452,6 +479,39 @@ def _CheckGitVersion():
|
||||
raise CloneFailure()
|
||||
|
||||
|
||||
def SetGitTrace2ParentSid(env=None):
|
||||
"""Set up GIT_TRACE2_PARENT_SID for git tracing."""
|
||||
# We roughly follow the format git itself uses in trace2/tr2_sid.c.
|
||||
# (1) Be unique (2) be valid filename (3) be fixed length.
|
||||
#
|
||||
# Since we always export this variable, we try to avoid more expensive calls.
|
||||
# e.g. We don't attempt hostname lookups or hashing the results.
|
||||
if env is None:
|
||||
env = os.environ
|
||||
|
||||
KEY = 'GIT_TRACE2_PARENT_SID'
|
||||
|
||||
now = datetime.datetime.utcnow()
|
||||
value = 'repo-%s-P%08x' % (now.strftime('%Y%m%dT%H%M%SZ'), os.getpid())
|
||||
|
||||
# If it's already set, then append ourselves.
|
||||
if KEY in env:
|
||||
value = env[KEY] + '/' + value
|
||||
|
||||
_setenv(KEY, value, env=env)
|
||||
|
||||
|
||||
def _setenv(key, value, env=None):
|
||||
"""Set |key| in the OS environment |env| to |value|."""
|
||||
if env is None:
|
||||
env = os.environ
|
||||
# Environment handling across systems is messy.
|
||||
try:
|
||||
env[key] = value
|
||||
except UnicodeEncodeError:
|
||||
env[key] = value.encode()
|
||||
|
||||
|
||||
def NeedSetupGnuPG():
|
||||
if not os.path.isdir(home_dot_repo):
|
||||
return True
|
||||
@ -488,10 +548,7 @@ def SetupGnuPG(quiet):
|
||||
sys.exit(1)
|
||||
|
||||
env = os.environ.copy()
|
||||
try:
|
||||
env['GNUPGHOME'] = gpg_dir
|
||||
except UnicodeEncodeError:
|
||||
env['GNUPGHOME'] = gpg_dir.encode()
|
||||
_setenv('GNUPGHOME', gpg_dir, env)
|
||||
|
||||
cmd = ['gpg', '--import']
|
||||
try:
|
||||
@ -505,7 +562,7 @@ def SetupGnuPG(quiet):
|
||||
print(file=sys.stderr)
|
||||
return False
|
||||
|
||||
proc.stdin.write(MAINTAINER_KEYS)
|
||||
proc.stdin.write(MAINTAINER_KEYS.encode('utf-8'))
|
||||
proc.stdin.close()
|
||||
|
||||
if proc.wait() != 0:
|
||||
@ -513,9 +570,8 @@ def SetupGnuPG(quiet):
|
||||
sys.exit(1)
|
||||
print()
|
||||
|
||||
fd = open(os.path.join(home_dot_repo, 'keyring-version'), 'w')
|
||||
fd.write('.'.join(map(str, KEYRING_VERSION)) + '\n')
|
||||
fd.close()
|
||||
with open(os.path.join(home_dot_repo, 'keyring-version'), 'w') as fd:
|
||||
fd.write('.'.join(map(str, KEYRING_VERSION)) + '\n')
|
||||
return True
|
||||
|
||||
|
||||
@ -584,6 +640,7 @@ def _DownloadBundle(url, local, quiet):
|
||||
cwd=local,
|
||||
stdout=subprocess.PIPE)
|
||||
for line in proc.stdout:
|
||||
line = line.decode('utf-8')
|
||||
m = re.compile(r'^url\.(.*)\.insteadof (.*)$').match(line)
|
||||
if m:
|
||||
new_url = m.group(1)
|
||||
@ -676,7 +733,7 @@ def _Verify(cwd, branch, quiet):
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=cwd)
|
||||
cur = proc.stdout.read().strip()
|
||||
cur = proc.stdout.read().strip().decode('utf-8')
|
||||
proc.stdout.close()
|
||||
|
||||
proc.stderr.read()
|
||||
@ -697,10 +754,7 @@ def _Verify(cwd, branch, quiet):
|
||||
print(file=sys.stderr)
|
||||
|
||||
env = os.environ.copy()
|
||||
try:
|
||||
env['GNUPGHOME'] = gpg_dir
|
||||
except UnicodeEncodeError:
|
||||
env['GNUPGHOME'] = gpg_dir.encode()
|
||||
_setenv('GNUPGHOME', gpg_dir, env)
|
||||
|
||||
cmd = [GIT, 'tag', '-v', cur]
|
||||
proc = subprocess.Popen(cmd,
|
||||
@ -708,10 +762,10 @@ def _Verify(cwd, branch, quiet):
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=cwd,
|
||||
env=env)
|
||||
out = proc.stdout.read()
|
||||
out = proc.stdout.read().decode('utf-8')
|
||||
proc.stdout.close()
|
||||
|
||||
err = proc.stderr.read()
|
||||
err = proc.stderr.read().decode('utf-8')
|
||||
proc.stderr.close()
|
||||
|
||||
if proc.wait() != 0:
|
||||
@ -765,6 +819,7 @@ def _FindRepo():
|
||||
|
||||
class _Options(object):
|
||||
help = False
|
||||
version = False
|
||||
|
||||
|
||||
def _ParseArguments(args):
|
||||
@ -776,7 +831,8 @@ def _ParseArguments(args):
|
||||
a = args[i]
|
||||
if a == '-h' or a == '--help':
|
||||
opt.help = True
|
||||
|
||||
elif a == '--version':
|
||||
opt.version = True
|
||||
elif not a.startswith('-'):
|
||||
cmd = a
|
||||
arg = args[i + 1:]
|
||||
@ -823,6 +879,16 @@ def _Help(args):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _Version():
|
||||
"""Show version information."""
|
||||
print('<repo not installed>')
|
||||
print('repo launcher version %s' % ('.'.join(str(x) for x in VERSION),))
|
||||
print(' (from %s)' % (__file__,))
|
||||
print('git %s' % (ParseGitVersion().full,))
|
||||
print('Python %s' % sys.version)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def _NotInstalled():
|
||||
print('error: repo is not installed. Use "repo init" to install it here.',
|
||||
file=sys.stderr)
|
||||
@ -861,7 +927,7 @@ def _SetDefaultsTo(gitdir):
|
||||
'HEAD'],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
REPO_REV = proc.stdout.read().strip()
|
||||
REPO_REV = proc.stdout.read().strip().decode('utf-8')
|
||||
proc.stdout.close()
|
||||
|
||||
proc.stderr.read()
|
||||
@ -875,6 +941,9 @@ def _SetDefaultsTo(gitdir):
|
||||
def main(orig_args):
|
||||
cmd, opt, args = _ParseArguments(orig_args)
|
||||
|
||||
# We run this early as we run some git commands ourselves.
|
||||
SetGitTrace2ParentSid()
|
||||
|
||||
repo_main, rel_repo_dir = None, None
|
||||
# Don't use the local repo copy, make sure to switch to the gitc client first.
|
||||
if cmd != 'gitc-init':
|
||||
@ -890,11 +959,14 @@ def main(orig_args):
|
||||
'command from the corresponding client under /gitc/',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
_InitParser()
|
||||
if not repo_main:
|
||||
if opt.help:
|
||||
_Usage()
|
||||
if cmd == 'help':
|
||||
_Help(args)
|
||||
if opt.version or cmd == 'version':
|
||||
_Version()
|
||||
if not cmd:
|
||||
_NotInstalled()
|
||||
if cmd == 'init' or cmd == 'gitc-init':
|
||||
@ -923,15 +995,9 @@ def main(orig_args):
|
||||
'--']
|
||||
me.extend(orig_args)
|
||||
me.extend(extra_args)
|
||||
try:
|
||||
if platform.system() == "Windows":
|
||||
sys.exit(subprocess.call(me))
|
||||
else:
|
||||
os.execv(sys.executable, me)
|
||||
except OSError as e:
|
||||
print("fatal: unable to start %s" % repo_main, file=sys.stderr)
|
||||
print("fatal: %s" % e, file=sys.stderr)
|
||||
sys.exit(148)
|
||||
exec_command(me)
|
||||
print("fatal: unable to start %s" % repo_main, file=sys.stderr)
|
||||
sys.exit(148)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -14,23 +14,30 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Logic for tracing repo interactions.
|
||||
|
||||
Activated via `repo --trace ...` or `REPO_TRACE=1 repo ...`.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Env var to implicitly turn on tracing.
|
||||
REPO_TRACE = 'REPO_TRACE'
|
||||
|
||||
try:
|
||||
_TRACE = os.environ[REPO_TRACE] == '1'
|
||||
except KeyError:
|
||||
_TRACE = False
|
||||
_TRACE = os.environ.get(REPO_TRACE) == '1'
|
||||
|
||||
|
||||
def IsTrace():
|
||||
return _TRACE
|
||||
|
||||
|
||||
def SetTrace():
|
||||
global _TRACE
|
||||
_TRACE = True
|
||||
|
||||
|
||||
def Trace(fmt, *args):
|
||||
if IsTrace():
|
||||
print(fmt % args, file=sys.stderr)
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
#
|
||||
@ -27,14 +27,13 @@ import sys
|
||||
def run_pytest(cmd, argv):
|
||||
"""Run the unittests via |cmd|."""
|
||||
try:
|
||||
subprocess.check_call([cmd] + argv)
|
||||
return 0
|
||||
return subprocess.call([cmd] + argv)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
print('%s: unable to run `%s`: %s' % (__file__, cmd, e), file=sys.stderr)
|
||||
print('%s: Try installing pytest: sudo apt-get install python-pytest' %
|
||||
(__file__,), file=sys.stderr)
|
||||
return 1
|
||||
return 127
|
||||
else:
|
||||
raise
|
||||
|
||||
|
63
setup.py
Executable file
63
setup.py
Executable file
@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the 'License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Python packaging for repo."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import setuptools
|
||||
|
||||
|
||||
TOPDIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
# Rip out the first intro paragraph.
|
||||
with open(os.path.join(TOPDIR, 'README.md')) as fp:
|
||||
lines = fp.read().splitlines()[2:]
|
||||
end = lines.index('')
|
||||
long_description = ' '.join(lines[0:end])
|
||||
|
||||
|
||||
# https://packaging.python.org/tutorials/packaging-projects/
|
||||
setuptools.setup(
|
||||
name='repo',
|
||||
version='1.13.8',
|
||||
maintainer='Various',
|
||||
maintainer_email='repo-discuss@googlegroups.com',
|
||||
description='Repo helps manage many Git repositories',
|
||||
long_description=long_description,
|
||||
long_description_content_type='text/plain',
|
||||
url='https://gerrit.googlesource.com/git-repo/',
|
||||
project_urls={
|
||||
'Bug Tracker': 'https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo',
|
||||
},
|
||||
# https://pypi.org/classifiers/
|
||||
classifiers=[
|
||||
'Development Status :: 6 - Mature',
|
||||
'Environment :: Console',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Natural Language :: English',
|
||||
'Operating System :: MacOS :: MacOS X',
|
||||
'Operating System :: Microsoft :: Windows :: Windows 10',
|
||||
'Operating System :: POSIX :: Linux',
|
||||
'Topic :: Software Development :: Version Control :: Git',
|
||||
],
|
||||
# We support Python 2.7 and Python 3.6+.
|
||||
python_requires='>=2.7, ' + ', '.join('!=3.%i.*' % x for x in range(0, 6)),
|
||||
packages=['subcmds'],
|
||||
)
|
@ -40,7 +40,7 @@ for py in os.listdir(my_dir):
|
||||
cmd = getattr(mod, clsn)()
|
||||
except AttributeError:
|
||||
raise SyntaxError('%s/%s does not define class %s' % (
|
||||
__name__, py, clsn))
|
||||
__name__, py, clsn))
|
||||
|
||||
name = name.replace('_', '-')
|
||||
cmd.NAME = name
|
||||
|
@ -21,6 +21,7 @@ from collections import defaultdict
|
||||
from git_command import git
|
||||
from progress import Progress
|
||||
|
||||
|
||||
class Abandon(Command):
|
||||
common = True
|
||||
helpSummary = "Permanently abandon a development branch"
|
||||
@ -32,24 +33,25 @@ deleting it (and all its history) from your local repository.
|
||||
|
||||
It is equivalent to "git branch -D <branchname>".
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('--all',
|
||||
dest='all', action='store_true',
|
||||
help='delete all branches in all projects')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not opt.all and not args:
|
||||
self.Usage()
|
||||
|
||||
if not opt.all:
|
||||
nb = args[0]
|
||||
if not git.check_ref_format('heads/%s' % nb):
|
||||
print("error: '%s' is not a valid name" % nb, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
self.OptionParser.error("'%s' is not a valid branch name" % nb)
|
||||
else:
|
||||
args.insert(0,None)
|
||||
nb = "'All local branches'"
|
||||
args.insert(0, "'All local branches'")
|
||||
|
||||
def Execute(self, opt, args):
|
||||
nb = args[0]
|
||||
err = defaultdict(list)
|
||||
success = defaultdict(list)
|
||||
all_projects = self.GetProjects(args[1:])
|
||||
@ -79,10 +81,10 @@ It is equivalent to "git branch -D <branchname>".
|
||||
|
||||
if err:
|
||||
for br in err.keys():
|
||||
err_msg = "error: cannot abandon %s" %br
|
||||
err_msg = "error: cannot abandon %s" % br
|
||||
print(err_msg, file=sys.stderr)
|
||||
for proj in err[br]:
|
||||
print(' '*len(err_msg) + " | %s" % proj.relpath, file=sys.stderr)
|
||||
print(' ' * len(err_msg) + " | %s" % proj.relpath, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
elif not success:
|
||||
print('error: no project has local branch(es) : %s' % nb,
|
||||
@ -95,5 +97,5 @@ It is equivalent to "git branch -D <branchname>".
|
||||
result = "all project"
|
||||
else:
|
||||
result = "%s" % (
|
||||
('\n'+' '*width + '| ').join(p.relpath for p in success[br]))
|
||||
print("%s%s| %s\n" % (br,' '*(width-len(br)), result),file=sys.stderr)
|
||||
('\n' + ' ' * width + '| ').join(p.relpath for p in success[br]))
|
||||
print("%s%s| %s\n" % (br, ' ' * (width - len(br)), result), file=sys.stderr)
|
||||
|
@ -19,13 +19,15 @@ import sys
|
||||
from color import Coloring
|
||||
from command import Command
|
||||
|
||||
|
||||
class BranchColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'branch')
|
||||
self.current = self.printer('current', fg='green')
|
||||
self.local = self.printer('local')
|
||||
self.local = self.printer('local')
|
||||
self.notinproject = self.printer('notinproject', fg='red')
|
||||
|
||||
|
||||
class BranchInfo(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
@ -158,7 +160,7 @@ is shown, then the branch appears in all projects.
|
||||
for b in i.projects:
|
||||
have.add(b.project)
|
||||
for p in projects:
|
||||
if not p in have:
|
||||
if p not in have:
|
||||
paths.append(p.relpath)
|
||||
|
||||
s = ' %s %s' % (in_type, ', '.join(paths))
|
||||
@ -170,11 +172,11 @@ is shown, then the branch appears in all projects.
|
||||
fmt = out.current if i.IsCurrent else out.write
|
||||
for p in paths:
|
||||
out.nl()
|
||||
fmt(width*' ' + ' %s' % p)
|
||||
fmt(width * ' ' + ' %s' % p)
|
||||
fmt = out.write
|
||||
for p in non_cur_paths:
|
||||
out.nl()
|
||||
fmt(width*' ' + ' %s' % p)
|
||||
fmt(width * ' ' + ' %s' % p)
|
||||
else:
|
||||
out.write(' in all projects')
|
||||
out.nl()
|
||||
|
@ -19,6 +19,7 @@ import sys
|
||||
from command import Command
|
||||
from progress import Progress
|
||||
|
||||
|
||||
class Checkout(Command):
|
||||
common = True
|
||||
helpSummary = "Checkout a branch for development"
|
||||
@ -34,10 +35,11 @@ The command is equivalent to:
|
||||
repo forall [<project>...] -c git checkout <branchname>
|
||||
"""
|
||||
|
||||
def Execute(self, opt, args):
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not args:
|
||||
self.Usage()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
nb = args[0]
|
||||
err = []
|
||||
success = []
|
||||
|
@ -22,6 +22,7 @@ from git_command import GitCommand
|
||||
|
||||
CHANGE_ID_RE = re.compile(r'^\s*Change-Id: I([0-9a-f]{40})\s*$')
|
||||
|
||||
|
||||
class CherryPick(Command):
|
||||
common = True
|
||||
helpSummary = "Cherry-pick a change."
|
||||
@ -37,16 +38,17 @@ change id will be added.
|
||||
def _Options(self, p):
|
||||
pass
|
||||
|
||||
def Execute(self, opt, args):
|
||||
def ValidateOptions(self, opt, args):
|
||||
if len(args) != 1:
|
||||
self.Usage()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
reference = args[0]
|
||||
|
||||
p = GitCommand(None,
|
||||
['rev-parse', '--verify', reference],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
if p.Wait() != 0:
|
||||
print(p.stderr, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
@ -60,8 +62,8 @@ change id will be added.
|
||||
|
||||
p = GitCommand(None,
|
||||
['cherry-pick', sha1],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
status = p.Wait()
|
||||
|
||||
print(p.stdout, file=sys.stdout)
|
||||
@ -73,9 +75,9 @@ change id will be added.
|
||||
new_msg = self._Reformat(old_msg, sha1)
|
||||
|
||||
p = GitCommand(None, ['commit', '--amend', '-F', '-'],
|
||||
provide_stdin = True,
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
provide_stdin=True,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
p.stdin.write(new_msg)
|
||||
p.stdin.close()
|
||||
if p.Wait() != 0:
|
||||
@ -96,7 +98,7 @@ change id will be added.
|
||||
|
||||
def _StripHeader(self, commit_msg):
|
||||
lines = commit_msg.splitlines()
|
||||
return "\n".join(lines[lines.index("")+1:])
|
||||
return "\n".join(lines[lines.index("") + 1:])
|
||||
|
||||
def _Reformat(self, old_msg, sha1):
|
||||
new_msg = []
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
from command import PagedCommand
|
||||
|
||||
|
||||
class Diff(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Show changes between commit and working tree"
|
||||
@ -37,5 +38,8 @@ to the Unix 'patch' command.
|
||||
help='Paths are relative to the repository root')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
ret = 0
|
||||
for project in self.GetProjects(args):
|
||||
project.PrintWorkTreeDiff(opt.absolute)
|
||||
if not project.PrintWorkTreeDiff(opt.absolute):
|
||||
ret = 1
|
||||
return ret
|
||||
|
@ -18,10 +18,12 @@ from color import Coloring
|
||||
from command import PagedCommand
|
||||
from manifest_xml import XmlManifest
|
||||
|
||||
|
||||
class _Coloring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
|
||||
|
||||
class Diffmanifests(PagedCommand):
|
||||
""" A command to see logs in projects represented by manifests
|
||||
|
||||
@ -176,17 +178,18 @@ synced and their revisions won't be found.
|
||||
self.printText(log)
|
||||
self.out.nl()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not args or len(args) > 2:
|
||||
self.Usage()
|
||||
self.OptionParser.error('missing manifests to diff')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
self.out = _Coloring(self.manifest.globalConfig)
|
||||
self.printText = self.out.nofmt_printer('text')
|
||||
if opt.color:
|
||||
self.printProject = self.out.nofmt_printer('project', attr = 'bold')
|
||||
self.printAdded = self.out.nofmt_printer('green', fg = 'green', attr = 'bold')
|
||||
self.printRemoved = self.out.nofmt_printer('red', fg = 'red', attr = 'bold')
|
||||
self.printRevision = self.out.nofmt_printer('revision', fg = 'yellow')
|
||||
self.printProject = self.out.nofmt_printer('project', attr='bold')
|
||||
self.printAdded = self.out.nofmt_printer('green', fg='green', attr='bold')
|
||||
self.printRemoved = self.out.nofmt_printer('red', fg='red', attr='bold')
|
||||
self.printRevision = self.out.nofmt_printer('revision', fg='yellow')
|
||||
else:
|
||||
self.printProject = self.printAdded = self.printRemoved = self.printRevision = self.printText
|
||||
|
||||
|
5
subcmds/download.py
Executable file → Normal file
5
subcmds/download.py
Executable file → Normal file
@ -23,6 +23,7 @@ from error import GitError
|
||||
|
||||
CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
|
||||
|
||||
|
||||
class Download(Command):
|
||||
common = True
|
||||
helpSummary = "Download and checkout a change"
|
||||
@ -93,7 +94,7 @@ If no project is specified try to use current directory as a project.
|
||||
continue
|
||||
|
||||
if len(dl.commits) > 1:
|
||||
print('[%s] %d/%d depends on %d unmerged changes:' \
|
||||
print('[%s] %d/%d depends on %d unmerged changes:'
|
||||
% (project.name, change_id, ps_id, len(dl.commits)),
|
||||
file=sys.stderr)
|
||||
for c in dl.commits:
|
||||
@ -102,7 +103,7 @@ If no project is specified try to use current directory as a project.
|
||||
try:
|
||||
project._CherryPick(dl.commit)
|
||||
except GitError:
|
||||
print('[%s] Could not complete the cherry-pick of %s' \
|
||||
print('[%s] Could not complete the cherry-pick of %s'
|
||||
% (project.name, dl.commit), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
@ -28,10 +28,10 @@ from command import Command, MirrorSafeCommand
|
||||
import platform_utils
|
||||
|
||||
_CAN_COLOR = [
|
||||
'branch',
|
||||
'diff',
|
||||
'grep',
|
||||
'log',
|
||||
'branch',
|
||||
'diff',
|
||||
'grep',
|
||||
'log',
|
||||
]
|
||||
|
||||
|
||||
@ -139,6 +139,9 @@ without iterating through the remaining projects.
|
||||
p.add_option('-e', '--abort-on-errors',
|
||||
dest='abort_on_errors', action='store_true',
|
||||
help='Abort if a command exits unsuccessfully')
|
||||
p.add_option('--ignore-missing', action='store_true',
|
||||
help='Silently skip & do not exit non-zero due missing '
|
||||
'checkouts')
|
||||
|
||||
g = p.add_option_group('Output')
|
||||
g.add_option('-p',
|
||||
@ -167,20 +170,21 @@ without iterating through the remaining projects.
|
||||
else:
|
||||
lrev = None
|
||||
return {
|
||||
'name': project.name,
|
||||
'relpath': project.relpath,
|
||||
'remote_name': project.remote.name,
|
||||
'lrev': lrev,
|
||||
'rrev': project.revisionExpr,
|
||||
'annotations': dict((a.name, a.value) for a in project.annotations),
|
||||
'gitdir': project.gitdir,
|
||||
'worktree': project.worktree,
|
||||
'name': project.name,
|
||||
'relpath': project.relpath,
|
||||
'remote_name': project.remote.name,
|
||||
'lrev': lrev,
|
||||
'rrev': project.revisionExpr,
|
||||
'annotations': dict((a.name, a.value) for a in project.annotations),
|
||||
'gitdir': project.gitdir,
|
||||
'worktree': project.worktree,
|
||||
}
|
||||
|
||||
def Execute(self, opt, args):
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not opt.command:
|
||||
self.Usage()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
cmd = [opt.command[0]]
|
||||
|
||||
shell = True
|
||||
@ -191,9 +195,9 @@ without iterating through the remaining projects.
|
||||
cmd.append(cmd[0])
|
||||
cmd.extend(opt.command[1:])
|
||||
|
||||
if opt.project_header \
|
||||
and not shell \
|
||||
and cmd[0] == 'git':
|
||||
if opt.project_header \
|
||||
and not shell \
|
||||
and cmd[0] == 'git':
|
||||
# If this is a direct git command that can enable colorized
|
||||
# output and the user prefers coloring, add --color into the
|
||||
# command line because we are going to wrap the command into
|
||||
@ -216,7 +220,7 @@ without iterating through the remaining projects.
|
||||
|
||||
smart_sync_manifest_name = "smart_sync_override.xml"
|
||||
smart_sync_manifest_path = os.path.join(
|
||||
self.manifest.manifestProject.worktree, smart_sync_manifest_name)
|
||||
self.manifest.manifestProject.worktree, smart_sync_manifest_name)
|
||||
|
||||
if os.path.isfile(smart_sync_manifest_path):
|
||||
self.manifest.Override(smart_sync_manifest_path)
|
||||
@ -234,8 +238,8 @@ without iterating through the remaining projects.
|
||||
try:
|
||||
config = self.manifest.manifestProject.config
|
||||
results_it = pool.imap(
|
||||
DoWorkWrapper,
|
||||
self.ProjectArgs(projects, mirror, opt, cmd, shell, config))
|
||||
DoWorkWrapper,
|
||||
self.ProjectArgs(projects, mirror, opt, cmd, shell, config))
|
||||
pool.close()
|
||||
for r in results_it:
|
||||
rc = rc or r
|
||||
@ -249,7 +253,7 @@ without iterating through the remaining projects.
|
||||
except Exception as e:
|
||||
# Catch any other exceptions raised
|
||||
print('Got an error, terminating the pool: %s: %s' %
|
||||
(type(e).__name__, e),
|
||||
(type(e).__name__, e),
|
||||
file=sys.stderr)
|
||||
pool.terminate()
|
||||
rc = rc or getattr(e, 'errno', 1)
|
||||
@ -264,7 +268,7 @@ without iterating through the remaining projects.
|
||||
project = self._SerializeProject(p)
|
||||
except Exception as e:
|
||||
print('Project list error on project %s: %s: %s' %
|
||||
(p.name, type(e).__name__, e),
|
||||
(p.name, type(e).__name__, e),
|
||||
file=sys.stderr)
|
||||
return
|
||||
except KeyboardInterrupt:
|
||||
@ -273,6 +277,7 @@ without iterating through the remaining projects.
|
||||
return
|
||||
yield [mirror, opt, cmd, shell, cnt, config, project]
|
||||
|
||||
|
||||
class WorkerKeyboardInterrupt(Exception):
|
||||
""" Keyboard interrupt exception for worker processes. """
|
||||
pass
|
||||
@ -281,6 +286,7 @@ class WorkerKeyboardInterrupt(Exception):
|
||||
def InitWorker():
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
|
||||
|
||||
def DoWorkWrapper(args):
|
||||
""" A wrapper around the DoWork() method.
|
||||
|
||||
@ -299,6 +305,7 @@ def DoWorkWrapper(args):
|
||||
|
||||
def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||
env = os.environ.copy()
|
||||
|
||||
def setenv(name, val):
|
||||
if val is None:
|
||||
val = ''
|
||||
@ -322,10 +329,14 @@ def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||
cwd = project['worktree']
|
||||
|
||||
if not os.path.exists(cwd):
|
||||
if (opt.project_header and opt.verbose) \
|
||||
or not opt.project_header:
|
||||
# Allow the user to silently ignore missing checkouts so they can run on
|
||||
# partial checkouts (good for infra recovery tools).
|
||||
if opt.ignore_missing:
|
||||
return 0
|
||||
if ((opt.project_header and opt.verbose)
|
||||
or not opt.project_header):
|
||||
print('skipping %s/' % project['relpath'], file=sys.stderr)
|
||||
return
|
||||
return 1
|
||||
|
||||
if opt.project_header:
|
||||
stdin = subprocess.PIPE
|
||||
@ -358,7 +369,7 @@ def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||
while not s_in.is_done:
|
||||
in_ready = s_in.select()
|
||||
for s in in_ready:
|
||||
buf = s.read()
|
||||
buf = s.read().decode()
|
||||
if not buf:
|
||||
s.close()
|
||||
s_in.remove(s)
|
||||
|
@ -24,6 +24,7 @@ from pyversion import is_python3
|
||||
if not is_python3():
|
||||
input = raw_input
|
||||
|
||||
|
||||
class GitcDelete(Command, GitcClientCommand):
|
||||
common = True
|
||||
visible_everywhere = False
|
||||
|
@ -50,7 +50,7 @@ use for this GITC client.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
super(GitcInit, self)._Options(p)
|
||||
super(GitcInit, self)._Options(p, gitc_init=True)
|
||||
g = p.add_option_group('GITC options')
|
||||
g.add_option('-f', '--manifest-file',
|
||||
dest='manifest_file',
|
||||
|
@ -15,15 +15,21 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
from git_command import git_require, GitCommand
|
||||
from error import GitError
|
||||
from git_command import GitCommand
|
||||
|
||||
|
||||
class GrepColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'grep')
|
||||
self.project = self.printer('project', attr='bold')
|
||||
self.fail = self.printer('fail', fg='red')
|
||||
|
||||
|
||||
class Grep(PagedCommand):
|
||||
common = True
|
||||
@ -152,12 +158,11 @@ contain a line that matches both expressions:
|
||||
action='callback', callback=carry,
|
||||
help='Show only file names not containing matching lines')
|
||||
|
||||
|
||||
def Execute(self, opt, args):
|
||||
out = GrepColoring(self.manifest.manifestProject.config)
|
||||
|
||||
cmd_argv = ['grep']
|
||||
if out.is_on and git_require((1, 6, 3)):
|
||||
if out.is_on:
|
||||
cmd_argv.append('--color')
|
||||
cmd_argv.extend(getattr(opt, 'cmd_argv', []))
|
||||
|
||||
@ -184,15 +189,25 @@ contain a line that matches both expressions:
|
||||
cmd_argv.extend(opt.revision)
|
||||
cmd_argv.append('--')
|
||||
|
||||
git_failed = False
|
||||
bad_rev = False
|
||||
have_match = False
|
||||
|
||||
for project in projects:
|
||||
p = GitCommand(project,
|
||||
cmd_argv,
|
||||
bare = False,
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
try:
|
||||
p = GitCommand(project,
|
||||
cmd_argv,
|
||||
bare=False,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
except GitError as e:
|
||||
git_failed = True
|
||||
out.project('--- project %s ---' % project.relpath)
|
||||
out.nl()
|
||||
out.fail('%s', str(e))
|
||||
out.nl()
|
||||
continue
|
||||
|
||||
if p.Wait() != 0:
|
||||
# no results
|
||||
#
|
||||
@ -202,7 +217,7 @@ contain a line that matches both expressions:
|
||||
else:
|
||||
out.project('--- project %s ---' % project.relpath)
|
||||
out.nl()
|
||||
out.write("%s", p.stderr)
|
||||
out.fail('%s', p.stderr.strip())
|
||||
out.nl()
|
||||
continue
|
||||
have_match = True
|
||||
@ -231,7 +246,9 @@ contain a line that matches both expressions:
|
||||
for line in r:
|
||||
print(line)
|
||||
|
||||
if have_match:
|
||||
if git_failed:
|
||||
sys.exit(1)
|
||||
elif have_match:
|
||||
sys.exit(0)
|
||||
elif have_rev and bad_rev:
|
||||
for r in opt.revision:
|
||||
|
@ -23,6 +23,7 @@ from color import Coloring
|
||||
from command import PagedCommand, MirrorSafeCommand, GitcAvailableCommand, GitcClientCommand
|
||||
import gitc_utils
|
||||
|
||||
|
||||
class Help(PagedCommand, MirrorSafeCommand):
|
||||
common = False
|
||||
helpSummary = "Display detailed help on a command"
|
||||
@ -33,11 +34,8 @@ class Help(PagedCommand, MirrorSafeCommand):
|
||||
Displays detailed usage information about a command.
|
||||
"""
|
||||
|
||||
def _PrintAllCommands(self):
|
||||
print('usage: repo COMMAND [ARGS]')
|
||||
print('The complete list of recognized repo commands are:')
|
||||
commandNames = list(sorted(self.commands))
|
||||
|
||||
def _PrintCommands(self, commandNames):
|
||||
"""Helper to display |commandNames| summaries."""
|
||||
maxlen = 0
|
||||
for name in commandNames:
|
||||
maxlen = max(maxlen, len(name))
|
||||
@ -50,6 +48,12 @@ Displays detailed usage information about a command.
|
||||
except AttributeError:
|
||||
summary = ''
|
||||
print(fmt % (name, summary))
|
||||
|
||||
def _PrintAllCommands(self):
|
||||
print('usage: repo COMMAND [ARGS]')
|
||||
print('The complete list of recognized repo commands are:')
|
||||
commandNames = list(sorted(self.commands))
|
||||
self._PrintCommands(commandNames)
|
||||
print("See 'repo help <command>' for more information on a "
|
||||
'specific command.')
|
||||
|
||||
@ -69,26 +73,15 @@ Displays detailed usage information about a command.
|
||||
return False
|
||||
|
||||
commandNames = list(sorted([name
|
||||
for name, command in self.commands.items()
|
||||
if command.common and gitc_supported(command)]))
|
||||
for name, command in self.commands.items()
|
||||
if command.common and gitc_supported(command)]))
|
||||
self._PrintCommands(commandNames)
|
||||
|
||||
maxlen = 0
|
||||
for name in commandNames:
|
||||
maxlen = max(maxlen, len(name))
|
||||
fmt = ' %%-%ds %%s' % maxlen
|
||||
|
||||
for name in commandNames:
|
||||
command = self.commands[name]
|
||||
try:
|
||||
summary = command.helpSummary.strip()
|
||||
except AttributeError:
|
||||
summary = ''
|
||||
print(fmt % (name, summary))
|
||||
print(
|
||||
"See 'repo help <command>' for more information on a specific command.\n"
|
||||
"See 'repo help --all' for a complete list of recognized commands.")
|
||||
"See 'repo help <command>' for more information on a specific command.\n"
|
||||
"See 'repo help --all' for a complete list of recognized commands.")
|
||||
|
||||
def _PrintCommandHelp(self, cmd):
|
||||
def _PrintCommandHelp(self, cmd, header_prefix=''):
|
||||
class _Out(Coloring):
|
||||
def __init__(self, gc):
|
||||
Coloring.__init__(self, gc, 'help')
|
||||
@ -106,7 +99,7 @@ Displays detailed usage information about a command.
|
||||
|
||||
self.nl()
|
||||
|
||||
self.heading('%s', heading)
|
||||
self.heading('%s%s', header_prefix, heading)
|
||||
self.nl()
|
||||
self.nl()
|
||||
|
||||
@ -124,7 +117,7 @@ Displays detailed usage information about a command.
|
||||
|
||||
m = asciidoc_hdr.match(para)
|
||||
if m:
|
||||
self.heading(m.group(1))
|
||||
self.heading('%s%s', header_prefix, m.group(1))
|
||||
self.nl()
|
||||
self.nl()
|
||||
continue
|
||||
@ -138,14 +131,25 @@ Displays detailed usage information about a command.
|
||||
cmd.OptionParser.print_help()
|
||||
out._PrintSection('Description', 'helpDescription')
|
||||
|
||||
def _PrintAllCommandHelp(self):
|
||||
for name in sorted(self.commands):
|
||||
cmd = self.commands[name]
|
||||
cmd.manifest = self.manifest
|
||||
self._PrintCommandHelp(cmd, header_prefix='[%s] ' % (name,))
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-a', '--all',
|
||||
dest='show_all', action='store_true',
|
||||
help='show the complete list of commands')
|
||||
p.add_option('--help-all',
|
||||
dest='show_all_help', action='store_true',
|
||||
help='show the --help of all commands')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if len(args) == 0:
|
||||
if opt.show_all:
|
||||
if opt.show_all_help:
|
||||
self._PrintAllCommandHelp()
|
||||
elif opt.show_all:
|
||||
self._PrintAllCommands()
|
||||
else:
|
||||
self._PrintCommonCommands()
|
||||
|
@ -16,13 +16,14 @@
|
||||
|
||||
from command import PagedCommand
|
||||
from color import Coloring
|
||||
from error import NoSuchProjectError
|
||||
from git_refs import R_M
|
||||
|
||||
|
||||
class _Coloring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
|
||||
|
||||
class Info(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Get info on the manifest branch, current branch or unmerged branches"
|
||||
@ -42,15 +43,14 @@ class Info(PagedCommand):
|
||||
dest="local", action="store_true",
|
||||
help="Disable all remote operations")
|
||||
|
||||
|
||||
def Execute(self, opt, args):
|
||||
self.out = _Coloring(self.manifest.globalConfig)
|
||||
self.heading = self.out.printer('heading', attr = 'bold')
|
||||
self.headtext = self.out.nofmt_printer('headtext', fg = 'yellow')
|
||||
self.redtext = self.out.printer('redtext', fg = 'red')
|
||||
self.sha = self.out.printer("sha", fg = 'yellow')
|
||||
self.heading = self.out.printer('heading', attr='bold')
|
||||
self.headtext = self.out.nofmt_printer('headtext', fg='yellow')
|
||||
self.redtext = self.out.printer('redtext', fg='red')
|
||||
self.sha = self.out.printer("sha", fg='yellow')
|
||||
self.text = self.out.nofmt_printer('text')
|
||||
self.dimtext = self.out.printer('dimtext', attr = 'dim')
|
||||
self.dimtext = self.out.printer('dimtext', attr='dim')
|
||||
|
||||
self.opt = opt
|
||||
|
||||
@ -82,10 +82,8 @@ class Info(PagedCommand):
|
||||
self.out.nl()
|
||||
|
||||
def printDiffInfo(self, args):
|
||||
try:
|
||||
projs = self.GetProjects(args)
|
||||
except NoSuchProjectError:
|
||||
return
|
||||
# We let exceptions bubble up to main as they'll be well structured.
|
||||
projs = self.GetProjects(args)
|
||||
|
||||
for p in projs:
|
||||
self.heading("Project: ")
|
||||
@ -97,13 +95,23 @@ class Info(PagedCommand):
|
||||
self.out.nl()
|
||||
|
||||
self.heading("Current revision: ")
|
||||
self.headtext(p.GetRevisionId())
|
||||
self.out.nl()
|
||||
|
||||
currentBranch = p.CurrentBranch
|
||||
if currentBranch:
|
||||
self.heading('Current branch: ')
|
||||
self.headtext(currentBranch)
|
||||
self.out.nl()
|
||||
|
||||
self.heading("Manifest revision: ")
|
||||
self.headtext(p.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
localBranches = list(p.GetBranches().keys())
|
||||
self.heading("Local Branches: ")
|
||||
self.redtext(str(len(localBranches)))
|
||||
if len(localBranches) > 0:
|
||||
if localBranches:
|
||||
self.text(" [")
|
||||
self.text(", ".join(localBranches))
|
||||
self.text("]")
|
||||
@ -115,7 +123,7 @@ class Info(PagedCommand):
|
||||
self.printSeparator()
|
||||
|
||||
def findRemoteLocalDiff(self, project):
|
||||
#Fetch all the latest commits
|
||||
# Fetch all the latest commits.
|
||||
if not self.opt.local:
|
||||
project.Sync_NetworkHalf(quiet=True, current_branch_only=True)
|
||||
|
||||
@ -188,16 +196,16 @@ class Info(PagedCommand):
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
self.text('%s %-33s (%2d commit%s, %s)' % (
|
||||
branch.name == project.CurrentBranch and '*' or ' ',
|
||||
branch.name,
|
||||
len(commits),
|
||||
len(commits) != 1 and 's' or '',
|
||||
date))
|
||||
branch.name == project.CurrentBranch and '*' or ' ',
|
||||
branch.name,
|
||||
len(commits),
|
||||
len(commits) != 1 and 's' or '',
|
||||
date))
|
||||
self.out.nl()
|
||||
|
||||
for commit in commits:
|
||||
split = commit.split()
|
||||
self.text('{0:38}{1} '.format('','-'))
|
||||
self.text('{0:38}{1} '.format('', '-'))
|
||||
self.sha(split[0] + " ")
|
||||
self.text(" ".join(split[1:]))
|
||||
self.out.nl()
|
||||
|
@ -34,9 +34,10 @@ from command import InteractiveCommand, MirrorSafeCommand
|
||||
from error import ManifestParseError
|
||||
from project import SyncBuffer
|
||||
from git_config import GitConfig
|
||||
from git_command import git_require, MIN_GIT_VERSION
|
||||
from git_command import git_require, MIN_GIT_VERSION_SOFT, MIN_GIT_VERSION_HARD
|
||||
import platform_utils
|
||||
|
||||
|
||||
class Init(InteractiveCommand, MirrorSafeCommand):
|
||||
common = True
|
||||
helpSummary = "Initialize repo in the current directory"
|
||||
@ -81,7 +82,7 @@ manifest, a subsequent `repo sync` (or `repo sync -d`) is necessary
|
||||
to update the working directory files.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
def _Options(self, p, gitc_init=False):
|
||||
# Logging
|
||||
g = p.add_option_group('Logging options')
|
||||
g.add_option('-q', '--quiet',
|
||||
@ -96,7 +97,12 @@ to update the working directory files.
|
||||
g.add_option('-b', '--manifest-branch',
|
||||
dest='manifest_branch',
|
||||
help='manifest branch or revision', metavar='REVISION')
|
||||
g.add_option('--current-branch',
|
||||
cbr_opts = ['--current-branch']
|
||||
# The gitc-init subcommand allocates -c itself, but a lot of init users
|
||||
# want -c, so try to satisfy both as best we can.
|
||||
if not gitc_init:
|
||||
cbr_opts += ['-c']
|
||||
g.add_option(*cbr_opts,
|
||||
dest='current_branch_only', action='store_true',
|
||||
help='fetch only current manifest branch from server')
|
||||
g.add_option('-m', '--manifest-name',
|
||||
@ -218,7 +224,7 @@ to update the working directory files.
|
||||
platformize = lambda x: 'platform-' + x
|
||||
if opt.platform == 'auto':
|
||||
if (not opt.mirror and
|
||||
not m.config.GetString('repo.mirror') == 'true'):
|
||||
not m.config.GetString('repo.mirror') == 'true'):
|
||||
groups.append(platformize(platform.system().lower()))
|
||||
elif opt.platform == 'all':
|
||||
groups.extend(map(platformize, all_platforms))
|
||||
@ -275,10 +281,10 @@ to update the working directory files.
|
||||
m.config.SetString('repo.submodules', 'true')
|
||||
|
||||
if not m.Sync_NetworkHalf(is_new=is_new, quiet=opt.quiet,
|
||||
clone_bundle=not opt.no_clone_bundle,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
no_tags=opt.no_tags, submodules=opt.submodules,
|
||||
clone_filter=opt.clone_filter):
|
||||
clone_bundle=not opt.no_clone_bundle,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
no_tags=opt.no_tags, submodules=opt.submodules,
|
||||
clone_filter=opt.clone_filter):
|
||||
r = m.GetRemote(m.remote.name)
|
||||
print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
|
||||
|
||||
@ -344,7 +350,7 @@ to update the working directory files.
|
||||
|
||||
while True:
|
||||
print()
|
||||
name = self._Prompt('Your Name', mp.UserName)
|
||||
name = self._Prompt('Your Name', mp.UserName)
|
||||
email = self._Prompt('Your Email', mp.UserEmail)
|
||||
|
||||
print()
|
||||
@ -436,18 +442,22 @@ to update the working directory files.
|
||||
print(' rm -r %s/.repo' % self.manifest.topdir)
|
||||
print('and try again.')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
git_require(MIN_GIT_VERSION, fail=True)
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if opt.reference:
|
||||
opt.reference = os.path.expanduser(opt.reference)
|
||||
|
||||
# Check this here, else manifest will be tagged "not new" and init won't be
|
||||
# possible anymore without removing the .repo/manifests directory.
|
||||
if opt.archive and opt.mirror:
|
||||
print('fatal: --mirror and --archive cannot be used together.',
|
||||
self.OptionParser.error('--mirror and --archive cannot be used together.')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
git_require(MIN_GIT_VERSION_HARD, fail=True)
|
||||
if not git_require(MIN_GIT_VERSION_SOFT):
|
||||
print('repo: warning: git-%s+ will soon be required; please upgrade your '
|
||||
'version of git to maintain support.'
|
||||
% ('.'.join(str(x) for x in MIN_GIT_VERSION_SOFT),),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
self._SyncManifest(opt)
|
||||
self._LinkManifest(opt.manifest_name)
|
||||
|
@ -15,10 +15,10 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
|
||||
from command import Command, MirrorSafeCommand
|
||||
|
||||
|
||||
class List(Command, MirrorSafeCommand):
|
||||
common = True
|
||||
helpSummary = "List projects and their associated directories"
|
||||
@ -49,6 +49,10 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
dest='path_only', action='store_true',
|
||||
help="Display only the path of the repository")
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if opt.fullpath and opt.name_only:
|
||||
self.OptionParser.error('cannot combine -f and -n')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
"""List all projects and the associated directories.
|
||||
|
||||
@ -60,11 +64,6 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
opt: The options.
|
||||
args: Positional args. Can be a list of projects to list, or empty.
|
||||
"""
|
||||
|
||||
if opt.fullpath and opt.name_only:
|
||||
print('error: cannot combine -f and -n', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not opt.regex:
|
||||
projects = self.GetProjects(args, groups=opt.groups)
|
||||
else:
|
||||
@ -78,7 +77,7 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
lines = []
|
||||
for project in projects:
|
||||
if opt.name_only and not opt.path_only:
|
||||
lines.append("%s" % ( project.name))
|
||||
lines.append("%s" % (project.name))
|
||||
elif opt.path_only and not opt.name_only:
|
||||
lines.append("%s" % (_getpath(project)))
|
||||
else:
|
||||
|
@ -20,6 +20,7 @@ import sys
|
||||
|
||||
from command import PagedCommand
|
||||
|
||||
|
||||
class Manifest(PagedCommand):
|
||||
common = False
|
||||
helpSummary = "Manifest inspection utility"
|
||||
@ -40,10 +41,9 @@ in a Git repository for use during future 'repo init' invocations.
|
||||
helptext = self._helpDescription + '\n'
|
||||
r = os.path.dirname(__file__)
|
||||
r = os.path.dirname(r)
|
||||
fd = open(os.path.join(r, 'docs', 'manifest-format.md'))
|
||||
for line in fd:
|
||||
helptext += line
|
||||
fd.close()
|
||||
with open(os.path.join(r, 'docs', 'manifest-format.md')) as fd:
|
||||
for line in fd:
|
||||
helptext += line
|
||||
return helptext
|
||||
|
||||
def _Options(self, p):
|
||||
@ -67,20 +67,15 @@ in a Git repository for use during future 'repo init' invocations.
|
||||
else:
|
||||
fd = open(opt.output_file, 'w')
|
||||
self.manifest.Save(fd,
|
||||
peg_rev = opt.peg_rev,
|
||||
peg_rev_upstream = opt.peg_rev_upstream)
|
||||
peg_rev=opt.peg_rev,
|
||||
peg_rev_upstream=opt.peg_rev_upstream)
|
||||
fd.close()
|
||||
if opt.output_file != '-':
|
||||
print('Saved manifest to %s' % opt.output_file, file=sys.stderr)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
def ValidateOptions(self, opt, args):
|
||||
if args:
|
||||
self.Usage()
|
||||
|
||||
if opt.output_file is not None:
|
||||
self._Output(opt)
|
||||
return
|
||||
|
||||
print('error: no operation to perform', file=sys.stderr)
|
||||
print('error: see repo help manifest', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
def Execute(self, opt, args):
|
||||
self._Output(opt)
|
||||
|
@ -18,6 +18,7 @@ from __future__ import print_function
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
|
||||
|
||||
class Prune(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Prune (delete) already merged topics"
|
||||
@ -51,11 +52,16 @@ class Prune(PagedCommand):
|
||||
out.project('project %s/' % project.relpath)
|
||||
out.nl()
|
||||
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
print('%s %-33s (%2d commit%s, %s)' % (
|
||||
print('%s %-33s ' % (
|
||||
branch.name == project.CurrentBranch and '*' or ' ',
|
||||
branch.name,
|
||||
branch.name), end='')
|
||||
|
||||
if not branch.base_exists:
|
||||
print('(ignoring: tracking branch is gone: %s)' % (branch.base,))
|
||||
else:
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
print('(%2d commit%s, %s)' % (
|
||||
len(commits),
|
||||
len(commits) != 1 and 's' or ' ',
|
||||
date))
|
||||
|
@ -17,9 +17,18 @@
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
|
||||
from color import Coloring
|
||||
from command import Command
|
||||
from git_command import GitCommand
|
||||
|
||||
|
||||
class RebaseColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'rebase')
|
||||
self.project = self.printer('project', attr='bold')
|
||||
self.fail = self.printer('fail', fg='red')
|
||||
|
||||
|
||||
class Rebase(Command):
|
||||
common = True
|
||||
helpSummary = "Rebase local branches on upstream branch"
|
||||
@ -34,9 +43,12 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-i', '--interactive',
|
||||
dest="interactive", action="store_true",
|
||||
help="interactive rebase (single project only)")
|
||||
dest="interactive", action="store_true",
|
||||
help="interactive rebase (single project only)")
|
||||
|
||||
p.add_option('--fail-fast',
|
||||
dest='fail_fast', action='store_true',
|
||||
help='Stop rebasing after first error is hit')
|
||||
p.add_option('-f', '--force-rebase',
|
||||
dest='force_rebase', action='store_true',
|
||||
help='Pass --force-rebase to git rebase')
|
||||
@ -70,16 +82,39 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
file=sys.stderr)
|
||||
if len(args) == 1:
|
||||
print('note: project %s is mapped to more than one path' % (args[0],),
|
||||
file=sys.stderr)
|
||||
return -1
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Setup the common git rebase args that we use for all projects.
|
||||
common_args = ['rebase']
|
||||
if opt.whitespace:
|
||||
common_args.append('--whitespace=%s' % opt.whitespace)
|
||||
if opt.quiet:
|
||||
common_args.append('--quiet')
|
||||
if opt.force_rebase:
|
||||
common_args.append('--force-rebase')
|
||||
if opt.no_ff:
|
||||
common_args.append('--no-ff')
|
||||
if opt.autosquash:
|
||||
common_args.append('--autosquash')
|
||||
if opt.interactive:
|
||||
common_args.append('-i')
|
||||
|
||||
config = self.manifest.manifestProject.config
|
||||
out = RebaseColoring(config)
|
||||
out.redirect(sys.stdout)
|
||||
|
||||
ret = 0
|
||||
for project in all_projects:
|
||||
if ret and opt.fail_fast:
|
||||
break
|
||||
|
||||
cb = project.CurrentBranch
|
||||
if not cb:
|
||||
if one_project:
|
||||
print("error: project %s has a detached HEAD" % project.relpath,
|
||||
file=sys.stderr)
|
||||
return -1
|
||||
return 1
|
||||
# ignore branches with detatched HEADs
|
||||
continue
|
||||
|
||||
@ -88,38 +123,21 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
if one_project:
|
||||
print("error: project %s does not track any remote branches"
|
||||
% project.relpath, file=sys.stderr)
|
||||
return -1
|
||||
return 1
|
||||
# ignore branches without remotes
|
||||
continue
|
||||
|
||||
args = ["rebase"]
|
||||
|
||||
if opt.whitespace:
|
||||
args.append('--whitespace=%s' % opt.whitespace)
|
||||
|
||||
if opt.quiet:
|
||||
args.append('--quiet')
|
||||
|
||||
if opt.force_rebase:
|
||||
args.append('--force-rebase')
|
||||
|
||||
if opt.no_ff:
|
||||
args.append('--no-ff')
|
||||
|
||||
if opt.autosquash:
|
||||
args.append('--autosquash')
|
||||
|
||||
if opt.interactive:
|
||||
args.append("-i")
|
||||
|
||||
args = common_args[:]
|
||||
if opt.onto_manifest:
|
||||
args.append('--onto')
|
||||
args.append(project.revisionExpr)
|
||||
|
||||
args.append(upbranch.LocalMerge)
|
||||
|
||||
print('# %s: rebasing %s -> %s'
|
||||
% (project.relpath, cb, upbranch.LocalMerge), file=sys.stderr)
|
||||
out.project('project %s: rebasing %s -> %s',
|
||||
project.relpath, cb, upbranch.LocalMerge)
|
||||
out.nl()
|
||||
out.flush()
|
||||
|
||||
needs_stash = False
|
||||
if opt.auto_stash:
|
||||
@ -131,13 +149,21 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
stash_args = ["stash"]
|
||||
|
||||
if GitCommand(project, stash_args).Wait() != 0:
|
||||
return -1
|
||||
ret += 1
|
||||
continue
|
||||
|
||||
if GitCommand(project, args).Wait() != 0:
|
||||
return -1
|
||||
ret += 1
|
||||
continue
|
||||
|
||||
if needs_stash:
|
||||
stash_args.append('pop')
|
||||
stash_args.append('--quiet')
|
||||
if GitCommand(project, stash_args).Wait() != 0:
|
||||
return -1
|
||||
ret += 1
|
||||
|
||||
if ret:
|
||||
out.fail('%i projects had errors', ret)
|
||||
out.nl()
|
||||
|
||||
return ret
|
||||
|
@ -22,6 +22,7 @@ from command import Command, MirrorSafeCommand
|
||||
from subcmds.sync import _PostRepoUpgrade
|
||||
from subcmds.sync import _PostRepoFetch
|
||||
|
||||
|
||||
class Selfupdate(Command, MirrorSafeCommand):
|
||||
common = False
|
||||
helpSummary = "Update repo to the latest version"
|
||||
@ -59,5 +60,5 @@ need to be performed by an end-user.
|
||||
|
||||
rp.bare_git.gc('--auto')
|
||||
_PostRepoFetch(rp,
|
||||
no_repo_verify = opt.no_repo_verify,
|
||||
verbose = True)
|
||||
no_repo_verify=opt.no_repo_verify,
|
||||
verbose=True)
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
from subcmds.sync import Sync
|
||||
|
||||
|
||||
class Smartsync(Sync):
|
||||
common = True
|
||||
helpSummary = "Update working tree to the latest known good revision"
|
||||
|
@ -21,6 +21,7 @@ from color import Coloring
|
||||
from command import InteractiveCommand
|
||||
from git_command import GitCommand
|
||||
|
||||
|
||||
class _ProjectList(Coloring):
|
||||
def __init__(self, gc):
|
||||
Coloring.__init__(self, gc, 'interactive')
|
||||
@ -28,6 +29,7 @@ class _ProjectList(Coloring):
|
||||
self.header = self.printer('header', attr='bold')
|
||||
self.help = self.printer('help', fg='red', attr='bold')
|
||||
|
||||
|
||||
class Stage(InteractiveCommand):
|
||||
common = True
|
||||
helpSummary = "Stage file(s) for commit"
|
||||
@ -105,6 +107,7 @@ The '%prog' command stages files to prepare the next commit.
|
||||
continue
|
||||
print('Bye.')
|
||||
|
||||
|
||||
def _AddI(project):
|
||||
p = GitCommand(project, ['add', '--interactive'], bare=False)
|
||||
p.Wait()
|
||||
|
@ -25,6 +25,7 @@ import gitc_utils
|
||||
from progress import Progress
|
||||
from project import SyncBuffer
|
||||
|
||||
|
||||
class Start(Command):
|
||||
common = True
|
||||
helpSummary = "Start a new branch for development"
|
||||
@ -40,22 +41,27 @@ revision specified in the manifest.
|
||||
p.add_option('--all',
|
||||
dest='all', action='store_true',
|
||||
help='begin branch in all projects')
|
||||
p.add_option('-r', '--rev', '--revision', dest='revision',
|
||||
help='point branch at this revision instead of upstream')
|
||||
p.add_option('--head', dest='revision', action='store_const', const='HEAD',
|
||||
help='abbreviation for --rev HEAD')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not args:
|
||||
self.Usage()
|
||||
|
||||
nb = args[0]
|
||||
if not git.check_ref_format('heads/%s' % nb):
|
||||
print("error: '%s' is not a valid name" % nb, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
self.OptionParser.error("'%s' is not a valid name" % nb)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
nb = args[0]
|
||||
err = []
|
||||
projects = []
|
||||
if not opt.all:
|
||||
projects = args[1:]
|
||||
if len(projects) < 1:
|
||||
projects = ['.',] # start it in the local project by default
|
||||
projects = ['.'] # start it in the local project by default
|
||||
|
||||
all_projects = self.GetProjects(projects,
|
||||
missing_ok=bool(self.gitc_manifest))
|
||||
@ -107,7 +113,8 @@ revision specified in the manifest.
|
||||
else:
|
||||
branch_merge = self.manifest.default.revisionExpr
|
||||
|
||||
if not project.StartBranch(nb, branch_merge=branch_merge):
|
||||
if not project.StartBranch(
|
||||
nb, branch_merge=branch_merge, revision=opt.revision):
|
||||
err.append(project)
|
||||
pm.end()
|
||||
|
||||
|
@ -31,6 +31,7 @@ import os
|
||||
from color import Coloring
|
||||
import platform_utils
|
||||
|
||||
|
||||
class Status(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Show the working tree status"
|
||||
@ -126,8 +127,8 @@ the following meanings:
|
||||
continue
|
||||
if item in proj_dirs_parents:
|
||||
self._FindOrphans(glob.glob('%s/.*' % item) +
|
||||
glob.glob('%s/*' % item),
|
||||
proj_dirs, proj_dirs_parents, outstring)
|
||||
glob.glob('%s/*' % item),
|
||||
proj_dirs, proj_dirs_parents, outstring)
|
||||
continue
|
||||
outstring.append(''.join([status_header, item, '/']))
|
||||
|
||||
@ -170,8 +171,8 @@ the following meanings:
|
||||
class StatusColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'status')
|
||||
self.project = self.printer('header', attr = 'bold')
|
||||
self.untracked = self.printer('untracked', fg = 'red')
|
||||
self.project = self.printer('header', attr='bold')
|
||||
self.untracked = self.printer('untracked', fg='red')
|
||||
|
||||
orig_path = os.getcwd()
|
||||
try:
|
||||
@ -179,8 +180,8 @@ the following meanings:
|
||||
|
||||
outstring = []
|
||||
self._FindOrphans(glob.glob('.*') +
|
||||
glob.glob('*'),
|
||||
proj_dirs, proj_dirs_parents, outstring)
|
||||
glob.glob('*'),
|
||||
proj_dirs, proj_dirs_parents, outstring)
|
||||
|
||||
if outstring:
|
||||
output = StatusColoring(self.manifest.globalConfig)
|
||||
|
534
subcmds/sync.py
534
subcmds/sync.py
@ -53,6 +53,7 @@ except ImportError:
|
||||
|
||||
try:
|
||||
import resource
|
||||
|
||||
def _rlimit_nofile():
|
||||
return resource.getrlimit(resource.RLIMIT_NOFILE)
|
||||
except ImportError:
|
||||
@ -81,13 +82,16 @@ from manifest_xml import GitcManifest
|
||||
|
||||
_ONE_DAY_S = 24 * 60 * 60
|
||||
|
||||
|
||||
class _FetchError(Exception):
|
||||
"""Internal error thrown in _FetchHelper() when we don't want stack trace."""
|
||||
pass
|
||||
|
||||
|
||||
class _CheckoutError(Exception):
|
||||
"""Internal error thrown in _CheckoutOne() when we don't want stack trace."""
|
||||
|
||||
|
||||
class Sync(Command, MirrorSafeCommand):
|
||||
jobs = 1
|
||||
common = True
|
||||
@ -132,8 +136,8 @@ from the user's .netrc file.
|
||||
if the manifest server specified in the manifest file already includes
|
||||
credentials.
|
||||
|
||||
The -f/--force-broken option can be used to proceed with syncing
|
||||
other projects if a project sync fails.
|
||||
By default, all projects will be synced. The --fail-fast option can be used
|
||||
to halt syncing as soon as possible when the the first project fails to sync.
|
||||
|
||||
The --force-sync option can be used to overwrite existing git
|
||||
directories if they have previously been linked to a different
|
||||
@ -200,7 +204,10 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
p.add_option('-f', '--force-broken',
|
||||
dest='force_broken', action='store_true',
|
||||
help="continue sync even if a project fails to sync")
|
||||
help='obsolete option (to be deleted in the future)')
|
||||
p.add_option('--fail-fast',
|
||||
dest='fail_fast', action='store_true',
|
||||
help='stop syncing after first error is hit')
|
||||
p.add_option('--force-sync',
|
||||
dest='force_sync', action='store_true',
|
||||
help="overwrite an existing git directory if it needs to "
|
||||
@ -214,6 +221,10 @@ later is required to fix a server side protocol bug.
|
||||
p.add_option('-l', '--local-only',
|
||||
dest='local_only', action='store_true',
|
||||
help="only update working tree, don't fetch")
|
||||
p.add_option('--no-manifest-update', '--nmu',
|
||||
dest='mp_update', action='store_false', default='true',
|
||||
help='use the existing manifest checkout as-is. '
|
||||
'(do not update to the latest revision)')
|
||||
p.add_option('-n', '--network-only',
|
||||
dest='network_only', action='store_true',
|
||||
help="fetch only, don't update working tree")
|
||||
@ -284,7 +295,7 @@ later is required to fix a server side protocol bug.
|
||||
try:
|
||||
for project in projects:
|
||||
success = self._FetchHelper(opt, project, *args, **kwargs)
|
||||
if not success and not opt.force_broken:
|
||||
if not success and opt.fail_fast:
|
||||
break
|
||||
finally:
|
||||
sem.release()
|
||||
@ -312,9 +323,6 @@ later is required to fix a server side protocol bug.
|
||||
# We'll set to true once we've locked the lock.
|
||||
did_lock = False
|
||||
|
||||
if not opt.quiet:
|
||||
print('Fetching project %s' % project.name)
|
||||
|
||||
# Encapsulate everything in a try/except/finally so that:
|
||||
# - We always set err_event in the case of an exception.
|
||||
# - We always make sure we unlock the lock if we locked it.
|
||||
@ -323,14 +331,14 @@ later is required to fix a server side protocol bug.
|
||||
try:
|
||||
try:
|
||||
success = project.Sync_NetworkHalf(
|
||||
quiet=opt.quiet,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
force_sync=opt.force_sync,
|
||||
clone_bundle=not opt.no_clone_bundle,
|
||||
no_tags=opt.no_tags, archive=self.manifest.IsArchive,
|
||||
optimized_fetch=opt.optimized_fetch,
|
||||
prune=opt.prune,
|
||||
clone_filter=clone_filter)
|
||||
quiet=opt.quiet,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
force_sync=opt.force_sync,
|
||||
clone_bundle=not opt.no_clone_bundle,
|
||||
no_tags=opt.no_tags, archive=self.manifest.IsArchive,
|
||||
optimized_fetch=opt.optimized_fetch,
|
||||
prune=opt.prune,
|
||||
clone_filter=clone_filter)
|
||||
self._fetch_times.Set(project, time.time() - start)
|
||||
|
||||
# Lock around all the rest of the code, since printing, updating a set
|
||||
@ -343,19 +351,16 @@ later is required to fix a server side protocol bug.
|
||||
print('error: Cannot fetch %s from %s'
|
||||
% (project.name, project.remote.url),
|
||||
file=sys.stderr)
|
||||
if opt.force_broken:
|
||||
print('warn: --force-broken, continuing to sync',
|
||||
file=sys.stderr)
|
||||
else:
|
||||
if opt.fail_fast:
|
||||
raise _FetchError()
|
||||
|
||||
fetched.add(project.gitdir)
|
||||
pm.update()
|
||||
pm.update(msg=project.name)
|
||||
except _FetchError:
|
||||
pass
|
||||
except Exception as e:
|
||||
print('error: Cannot fetch %s (%s: %s)' \
|
||||
% (project.name, type(e).__name__, str(e)), file=sys.stderr)
|
||||
print('error: Cannot fetch %s (%s: %s)'
|
||||
% (project.name, type(e).__name__, str(e)), file=sys.stderr)
|
||||
err_event.set()
|
||||
raise
|
||||
finally:
|
||||
@ -367,11 +372,10 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
return success
|
||||
|
||||
def _Fetch(self, projects, opt):
|
||||
def _Fetch(self, projects, opt, err_event):
|
||||
fetched = set()
|
||||
lock = _threading.Lock()
|
||||
pm = Progress('Fetching projects', len(projects),
|
||||
print_newline=not(opt.quiet),
|
||||
always_print_percentage=opt.quiet)
|
||||
|
||||
objdir_project_map = dict()
|
||||
@ -380,11 +384,10 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
threads = set()
|
||||
sem = _threading.Semaphore(self.jobs)
|
||||
err_event = _threading.Event()
|
||||
for project_list in objdir_project_map.values():
|
||||
# Check for any errors before running any more tasks.
|
||||
# ...we'll let existing threads finish, though.
|
||||
if err_event.isSet() and not opt.force_broken:
|
||||
if err_event.isSet() and opt.fail_fast:
|
||||
break
|
||||
|
||||
sem.acquire()
|
||||
@ -397,8 +400,8 @@ later is required to fix a server side protocol bug.
|
||||
err_event=err_event,
|
||||
clone_filter=self.manifest.CloneFilter)
|
||||
if self.jobs > 1:
|
||||
t = _threading.Thread(target = self._FetchProjectList,
|
||||
kwargs = kwargs)
|
||||
t = _threading.Thread(target=self._FetchProjectList,
|
||||
kwargs=kwargs)
|
||||
# Ensure that Ctrl-C will not freeze the repo process.
|
||||
t.daemon = True
|
||||
threads.add(t)
|
||||
@ -409,16 +412,11 @@ later is required to fix a server side protocol bug.
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet() and not opt.force_broken:
|
||||
print('\nerror: Exited sync due to fetch errors', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
pm.end()
|
||||
self._fetch_times.Save()
|
||||
|
||||
if not self.manifest.IsArchive:
|
||||
self._GCProjects(projects)
|
||||
self._GCProjects(projects, opt, err_event)
|
||||
|
||||
return fetched
|
||||
|
||||
@ -436,13 +434,11 @@ later is required to fix a server side protocol bug.
|
||||
_CheckoutOne docstring for details.
|
||||
"""
|
||||
try:
|
||||
success = self._CheckoutOne(opt, project, *args, **kwargs)
|
||||
if not success:
|
||||
sys.exit(1)
|
||||
return self._CheckoutOne(opt, project, *args, **kwargs)
|
||||
finally:
|
||||
sem.release()
|
||||
|
||||
def _CheckoutOne(self, opt, project, lock, pm, err_event):
|
||||
def _CheckoutOne(self, opt, project, lock, pm, err_event, err_results):
|
||||
"""Checkout work tree for one project
|
||||
|
||||
Args:
|
||||
@ -454,6 +450,8 @@ later is required to fix a server side protocol bug.
|
||||
lock held).
|
||||
err_event: We'll set this event in the case of an error (after printing
|
||||
out info about the error).
|
||||
err_results: A list of strings, paths to git repos where checkout
|
||||
failed.
|
||||
|
||||
Returns:
|
||||
Whether the fetch was successful.
|
||||
@ -461,9 +459,6 @@ later is required to fix a server side protocol bug.
|
||||
# We'll set to true once we've locked the lock.
|
||||
did_lock = False
|
||||
|
||||
if not opt.quiet:
|
||||
print('Checking out project %s' % project.name)
|
||||
|
||||
# Encapsulate everything in a try/except/finally so that:
|
||||
# - We always set err_event in the case of an exception.
|
||||
# - We always make sure we unlock the lock if we locked it.
|
||||
@ -474,11 +469,11 @@ later is required to fix a server side protocol bug.
|
||||
try:
|
||||
try:
|
||||
project.Sync_LocalHalf(syncbuf, force_sync=opt.force_sync)
|
||||
success = syncbuf.Finish()
|
||||
|
||||
# Lock around all the rest of the code, since printing, updating a set
|
||||
# and Progress.update() are not thread safe.
|
||||
lock.acquire()
|
||||
success = syncbuf.Finish()
|
||||
did_lock = True
|
||||
|
||||
if not success:
|
||||
@ -487,7 +482,7 @@ later is required to fix a server side protocol bug.
|
||||
file=sys.stderr)
|
||||
raise _CheckoutError()
|
||||
|
||||
pm.update()
|
||||
pm.update(msg=project.name)
|
||||
except _CheckoutError:
|
||||
pass
|
||||
except Exception as e:
|
||||
@ -498,6 +493,8 @@ later is required to fix a server side protocol bug.
|
||||
raise
|
||||
finally:
|
||||
if did_lock:
|
||||
if not success:
|
||||
err_results.append(project.relpath)
|
||||
lock.release()
|
||||
finish = time.time()
|
||||
self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL,
|
||||
@ -505,12 +502,16 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
return success
|
||||
|
||||
def _Checkout(self, all_projects, opt):
|
||||
def _Checkout(self, all_projects, opt, err_event, err_results):
|
||||
"""Checkout projects listed in all_projects
|
||||
|
||||
Args:
|
||||
all_projects: List of all projects that should be checked out.
|
||||
opt: Program options returned from optparse. See _Options().
|
||||
err_event: We'll set this event in the case of an error (after printing
|
||||
out info about the error).
|
||||
err_results: A list of strings, paths to git repos where checkout
|
||||
failed.
|
||||
"""
|
||||
|
||||
# Perform checkouts in multiple threads when we are using partial clone.
|
||||
@ -525,16 +526,15 @@ later is required to fix a server side protocol bug.
|
||||
syncjobs = 1
|
||||
|
||||
lock = _threading.Lock()
|
||||
pm = Progress('Syncing work tree', len(all_projects))
|
||||
pm = Progress('Checking out projects', len(all_projects))
|
||||
|
||||
threads = set()
|
||||
sem = _threading.Semaphore(syncjobs)
|
||||
err_event = _threading.Event()
|
||||
|
||||
for project in all_projects:
|
||||
# Check for any errors before running any more tasks.
|
||||
# ...we'll let existing threads finish, though.
|
||||
if err_event.isSet() and not opt.force_broken:
|
||||
if err_event.isSet() and opt.fail_fast:
|
||||
break
|
||||
|
||||
sem.acquire()
|
||||
@ -544,7 +544,8 @@ later is required to fix a server side protocol bug.
|
||||
project=project,
|
||||
lock=lock,
|
||||
pm=pm,
|
||||
err_event=err_event)
|
||||
err_event=err_event,
|
||||
err_results=err_results)
|
||||
if syncjobs > 1:
|
||||
t = _threading.Thread(target=self._CheckoutWorker,
|
||||
kwargs=kwargs)
|
||||
@ -559,21 +560,27 @@ later is required to fix a server side protocol bug.
|
||||
t.join()
|
||||
|
||||
pm.end()
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet():
|
||||
print('\nerror: Exited sync due to checkout errors', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def _GCProjects(self, projects):
|
||||
def _GCProjects(self, projects, opt, err_event):
|
||||
gc_gitdirs = {}
|
||||
for project in projects:
|
||||
# Make sure pruning never kicks in with shared projects.
|
||||
if len(project.manifest.GetProjectsWithName(project.name)) > 1:
|
||||
print('Shared project %s found, disabling pruning.' % project.name)
|
||||
project.bare_git.config('--replace-all', 'gc.pruneExpire', 'never')
|
||||
print('%s: Shared project %s found, disabling pruning.' %
|
||||
(project.relpath, project.name))
|
||||
if git_require((2, 7, 0)):
|
||||
project.config.SetString('core.repositoryFormatVersion', '1')
|
||||
project.config.SetString('extensions.preciousObjects', 'true')
|
||||
else:
|
||||
# This isn't perfect, but it's the best we can do with old git.
|
||||
print('%s: WARNING: shared projects are unreliable when using old '
|
||||
'versions of git; please upgrade to git-2.7.0+.'
|
||||
% (project.relpath,),
|
||||
file=sys.stderr)
|
||||
project.config.SetString('gc.pruneExpire', 'never')
|
||||
gc_gitdirs[project.gitdir] = project.bare_git
|
||||
|
||||
has_dash_c = git_require((1, 7, 2))
|
||||
if multiprocessing and has_dash_c:
|
||||
if multiprocessing:
|
||||
cpu_count = multiprocessing.cpu_count()
|
||||
else:
|
||||
cpu_count = 1
|
||||
@ -588,7 +595,6 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
threads = set()
|
||||
sem = _threading.Semaphore(jobs)
|
||||
err_event = _threading.Event()
|
||||
|
||||
def GC(bare_git):
|
||||
try:
|
||||
@ -596,14 +602,14 @@ later is required to fix a server side protocol bug.
|
||||
bare_git.gc('--auto', config=config)
|
||||
except GitError:
|
||||
err_event.set()
|
||||
except:
|
||||
except Exception:
|
||||
err_event.set()
|
||||
raise
|
||||
finally:
|
||||
sem.release()
|
||||
|
||||
for bare_git in gc_gitdirs.values():
|
||||
if err_event.isSet():
|
||||
if err_event.isSet() and opt.fail_fast:
|
||||
break
|
||||
sem.acquire()
|
||||
t = _threading.Thread(target=GC, args=(bare_git,))
|
||||
@ -614,10 +620,6 @@ later is required to fix a server side protocol bug.
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
if err_event.isSet():
|
||||
print('\nerror: Exited sync due to gc errors', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def _ReloadManifest(self, manifest_name=None):
|
||||
if manifest_name:
|
||||
# Override calls _Unload already
|
||||
@ -637,7 +639,7 @@ later is required to fix a server side protocol bug.
|
||||
print('Failed to remove %s (%s)' % (os.path.join(path, '.git'), str(e)), file=sys.stderr)
|
||||
print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
|
||||
print(' remove manually, then run sync again', file=sys.stderr)
|
||||
return -1
|
||||
return 1
|
||||
|
||||
# Delete everything under the worktree, except for directories that contain
|
||||
# another git project
|
||||
@ -671,7 +673,7 @@ later is required to fix a server side protocol bug.
|
||||
if failed:
|
||||
print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
|
||||
print(' remove manually, then run sync again', file=sys.stderr)
|
||||
return -1
|
||||
return 1
|
||||
|
||||
# Try deleting parent dirs if they are empty
|
||||
project_dir = path
|
||||
@ -694,11 +696,8 @@ later is required to fix a server side protocol bug.
|
||||
old_project_paths = []
|
||||
|
||||
if os.path.exists(file_path):
|
||||
fd = open(file_path, 'r')
|
||||
try:
|
||||
with open(file_path, 'r') as fd:
|
||||
old_project_paths = fd.read().split('\n')
|
||||
finally:
|
||||
fd.close()
|
||||
# In reversed order, so subfolders are deleted before parent folder.
|
||||
for path in sorted(old_project_paths, reverse=True):
|
||||
if not path:
|
||||
@ -708,16 +707,16 @@ later is required to fix a server side protocol bug.
|
||||
gitdir = os.path.join(self.manifest.topdir, path, '.git')
|
||||
if os.path.exists(gitdir):
|
||||
project = Project(
|
||||
manifest = self.manifest,
|
||||
name = path,
|
||||
remote = RemoteSpec('origin'),
|
||||
gitdir = gitdir,
|
||||
objdir = gitdir,
|
||||
worktree = os.path.join(self.manifest.topdir, path),
|
||||
relpath = path,
|
||||
revisionExpr = 'HEAD',
|
||||
revisionId = None,
|
||||
groups = None)
|
||||
manifest=self.manifest,
|
||||
name=path,
|
||||
remote=RemoteSpec('origin'),
|
||||
gitdir=gitdir,
|
||||
objdir=gitdir,
|
||||
worktree=os.path.join(self.manifest.topdir, path),
|
||||
relpath=path,
|
||||
revisionExpr='HEAD',
|
||||
revisionId=None,
|
||||
groups=None)
|
||||
|
||||
if project.IsDirty() and opt.force_remove_dirty:
|
||||
print('WARNING: Removing dirty project "%s": uncommitted changes '
|
||||
@ -728,166 +727,112 @@ later is required to fix a server side protocol bug.
|
||||
'are present' % project.relpath, file=sys.stderr)
|
||||
print(' commit changes, then run sync again',
|
||||
file=sys.stderr)
|
||||
return -1
|
||||
return 1
|
||||
elif self._DeleteProject(project.worktree):
|
||||
return -1
|
||||
return 1
|
||||
|
||||
new_project_paths.sort()
|
||||
fd = open(file_path, 'w')
|
||||
try:
|
||||
with open(file_path, 'w') as fd:
|
||||
fd.write('\n'.join(new_project_paths))
|
||||
fd.write('\n')
|
||||
finally:
|
||||
fd.close()
|
||||
return 0
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if opt.jobs:
|
||||
self.jobs = opt.jobs
|
||||
if self.jobs > 1:
|
||||
soft_limit, _ = _rlimit_nofile()
|
||||
self.jobs = min(self.jobs, (soft_limit - 5) // 3)
|
||||
|
||||
if opt.network_only and opt.detach_head:
|
||||
print('error: cannot combine -n and -d', file=sys.stderr)
|
||||
def _SmartSyncSetup(self, opt, smart_sync_manifest_path):
|
||||
if not self.manifest.manifest_server:
|
||||
print('error: cannot smart sync: no manifest server defined in '
|
||||
'manifest', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if opt.network_only and opt.local_only:
|
||||
print('error: cannot combine -n and -l', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if opt.manifest_name and opt.smart_sync:
|
||||
print('error: cannot combine -m and -s', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if opt.manifest_name and opt.smart_tag:
|
||||
print('error: cannot combine -m and -t', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if opt.manifest_server_username or opt.manifest_server_password:
|
||||
if not (opt.smart_sync or opt.smart_tag):
|
||||
print('error: -u and -p may only be combined with -s or -t',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if None in [opt.manifest_server_username, opt.manifest_server_password]:
|
||||
print('error: both -u and -p must be given', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if opt.manifest_name:
|
||||
self.manifest.Override(opt.manifest_name)
|
||||
manifest_server = self.manifest.manifest_server
|
||||
if not opt.quiet:
|
||||
print('Using manifest server %s' % manifest_server)
|
||||
|
||||
manifest_name = opt.manifest_name
|
||||
smart_sync_manifest_name = "smart_sync_override.xml"
|
||||
smart_sync_manifest_path = os.path.join(
|
||||
self.manifest.manifestProject.worktree, smart_sync_manifest_name)
|
||||
|
||||
if opt.smart_sync or opt.smart_tag:
|
||||
if not self.manifest.manifest_server:
|
||||
print('error: cannot smart sync: no manifest server defined in '
|
||||
'manifest', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
manifest_server = self.manifest.manifest_server
|
||||
if not opt.quiet:
|
||||
print('Using manifest server %s' % manifest_server)
|
||||
|
||||
if not '@' in manifest_server:
|
||||
username = None
|
||||
password = None
|
||||
if opt.manifest_server_username and opt.manifest_server_password:
|
||||
username = opt.manifest_server_username
|
||||
password = opt.manifest_server_password
|
||||
else:
|
||||
try:
|
||||
info = netrc.netrc()
|
||||
except IOError:
|
||||
# .netrc file does not exist or could not be opened
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
parse_result = urllib.parse.urlparse(manifest_server)
|
||||
if parse_result.hostname:
|
||||
auth = info.authenticators(parse_result.hostname)
|
||||
if auth:
|
||||
username, _account, password = auth
|
||||
else:
|
||||
print('No credentials found for %s in .netrc'
|
||||
% parse_result.hostname, file=sys.stderr)
|
||||
except netrc.NetrcParseError as e:
|
||||
print('Error parsing .netrc file: %s' % e, file=sys.stderr)
|
||||
|
||||
if (username and password):
|
||||
manifest_server = manifest_server.replace('://', '://%s:%s@' %
|
||||
(username, password),
|
||||
1)
|
||||
|
||||
transport = PersistentTransport(manifest_server)
|
||||
if manifest_server.startswith('persistent-'):
|
||||
manifest_server = manifest_server[len('persistent-'):]
|
||||
|
||||
try:
|
||||
server = xmlrpc.client.Server(manifest_server, transport=transport)
|
||||
if opt.smart_sync:
|
||||
p = self.manifest.manifestProject
|
||||
b = p.GetBranch(p.CurrentBranch)
|
||||
branch = b.merge
|
||||
if branch.startswith(R_HEADS):
|
||||
branch = branch[len(R_HEADS):]
|
||||
|
||||
env = os.environ.copy()
|
||||
if 'SYNC_TARGET' in env:
|
||||
target = env['SYNC_TARGET']
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||
elif 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env:
|
||||
target = '%s-%s' % (env['TARGET_PRODUCT'],
|
||||
env['TARGET_BUILD_VARIANT'])
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||
else:
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch)
|
||||
else:
|
||||
assert(opt.smart_tag)
|
||||
[success, manifest_str] = server.GetManifest(opt.smart_tag)
|
||||
|
||||
if success:
|
||||
manifest_name = smart_sync_manifest_name
|
||||
try:
|
||||
f = open(smart_sync_manifest_path, 'w')
|
||||
try:
|
||||
f.write(manifest_str)
|
||||
finally:
|
||||
f.close()
|
||||
except IOError as e:
|
||||
print('error: cannot write manifest to %s:\n%s'
|
||||
% (smart_sync_manifest_path, e),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
self._ReloadManifest(manifest_name)
|
||||
else:
|
||||
print('error: manifest server RPC call failed: %s' %
|
||||
manifest_str, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except (socket.error, IOError, xmlrpc.client.Fault) as e:
|
||||
print('error: cannot connect to manifest server %s:\n%s'
|
||||
% (self.manifest.manifest_server, e), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except xmlrpc.client.ProtocolError as e:
|
||||
print('error: cannot connect to manifest server %s:\n%d %s'
|
||||
% (self.manifest.manifest_server, e.errcode, e.errmsg),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else: # Not smart sync or smart tag mode
|
||||
if os.path.isfile(smart_sync_manifest_path):
|
||||
if '@' not in manifest_server:
|
||||
username = None
|
||||
password = None
|
||||
if opt.manifest_server_username and opt.manifest_server_password:
|
||||
username = opt.manifest_server_username
|
||||
password = opt.manifest_server_password
|
||||
else:
|
||||
try:
|
||||
platform_utils.remove(smart_sync_manifest_path)
|
||||
except OSError as e:
|
||||
print('error: failed to remove existing smart sync override manifest: %s' %
|
||||
e, file=sys.stderr)
|
||||
info = netrc.netrc()
|
||||
except IOError:
|
||||
# .netrc file does not exist or could not be opened
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
parse_result = urllib.parse.urlparse(manifest_server)
|
||||
if parse_result.hostname:
|
||||
auth = info.authenticators(parse_result.hostname)
|
||||
if auth:
|
||||
username, _account, password = auth
|
||||
else:
|
||||
print('No credentials found for %s in .netrc'
|
||||
% parse_result.hostname, file=sys.stderr)
|
||||
except netrc.NetrcParseError as e:
|
||||
print('Error parsing .netrc file: %s' % e, file=sys.stderr)
|
||||
|
||||
rp = self.manifest.repoProject
|
||||
rp.PreSync()
|
||||
if (username and password):
|
||||
manifest_server = manifest_server.replace('://', '://%s:%s@' %
|
||||
(username, password),
|
||||
1)
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
mp.PreSync()
|
||||
transport = PersistentTransport(manifest_server)
|
||||
if manifest_server.startswith('persistent-'):
|
||||
manifest_server = manifest_server[len('persistent-'):]
|
||||
|
||||
if opt.repo_upgraded:
|
||||
_PostRepoUpgrade(self.manifest, quiet=opt.quiet)
|
||||
try:
|
||||
server = xmlrpc.client.Server(manifest_server, transport=transport)
|
||||
if opt.smart_sync:
|
||||
p = self.manifest.manifestProject
|
||||
b = p.GetBranch(p.CurrentBranch)
|
||||
branch = b.merge
|
||||
if branch.startswith(R_HEADS):
|
||||
branch = branch[len(R_HEADS):]
|
||||
|
||||
env = os.environ.copy()
|
||||
if 'SYNC_TARGET' in env:
|
||||
target = env['SYNC_TARGET']
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||
elif 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env:
|
||||
target = '%s-%s' % (env['TARGET_PRODUCT'],
|
||||
env['TARGET_BUILD_VARIANT'])
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||
else:
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch)
|
||||
else:
|
||||
assert(opt.smart_tag)
|
||||
[success, manifest_str] = server.GetManifest(opt.smart_tag)
|
||||
|
||||
if success:
|
||||
manifest_name = os.path.basename(smart_sync_manifest_path)
|
||||
try:
|
||||
with open(smart_sync_manifest_path, 'w') as f:
|
||||
f.write(manifest_str)
|
||||
except IOError as e:
|
||||
print('error: cannot write manifest to %s:\n%s'
|
||||
% (smart_sync_manifest_path, e),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
self._ReloadManifest(manifest_name)
|
||||
else:
|
||||
print('error: manifest server RPC call failed: %s' %
|
||||
manifest_str, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except (socket.error, IOError, xmlrpc.client.Fault) as e:
|
||||
print('error: cannot connect to manifest server %s:\n%s'
|
||||
% (self.manifest.manifest_server, e), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except xmlrpc.client.ProtocolError as e:
|
||||
print('error: cannot connect to manifest server %s:\n%d %s'
|
||||
% (self.manifest.manifest_server, e.errcode, e.errmsg),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
return manifest_name
|
||||
|
||||
def _UpdateManifestProject(self, opt, mp, manifest_name):
|
||||
"""Fetch & update the local manifest project."""
|
||||
if not opt.local_only:
|
||||
start = time.time()
|
||||
success = mp.Sync_NetworkHalf(quiet=opt.quiet,
|
||||
@ -909,10 +854,68 @@ later is required to fix a server side protocol bug.
|
||||
start, time.time(), clean)
|
||||
if not clean:
|
||||
sys.exit(1)
|
||||
self._ReloadManifest(manifest_name)
|
||||
self._ReloadManifest(opt.manifest_name)
|
||||
if opt.jobs is None:
|
||||
self.jobs = self.manifest.default.sync_j
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if opt.force_broken:
|
||||
print('warning: -f/--force-broken is now the default behavior, and the '
|
||||
'options are deprecated', file=sys.stderr)
|
||||
if opt.network_only and opt.detach_head:
|
||||
self.OptionParser.error('cannot combine -n and -d')
|
||||
if opt.network_only and opt.local_only:
|
||||
self.OptionParser.error('cannot combine -n and -l')
|
||||
if opt.manifest_name and opt.smart_sync:
|
||||
self.OptionParser.error('cannot combine -m and -s')
|
||||
if opt.manifest_name and opt.smart_tag:
|
||||
self.OptionParser.error('cannot combine -m and -t')
|
||||
if opt.manifest_server_username or opt.manifest_server_password:
|
||||
if not (opt.smart_sync or opt.smart_tag):
|
||||
self.OptionParser.error('-u and -p may only be combined with -s or -t')
|
||||
if None in [opt.manifest_server_username, opt.manifest_server_password]:
|
||||
self.OptionParser.error('both -u and -p must be given')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if opt.jobs:
|
||||
self.jobs = opt.jobs
|
||||
if self.jobs > 1:
|
||||
soft_limit, _ = _rlimit_nofile()
|
||||
self.jobs = min(self.jobs, (soft_limit - 5) // 3)
|
||||
|
||||
if opt.manifest_name:
|
||||
self.manifest.Override(opt.manifest_name)
|
||||
|
||||
manifest_name = opt.manifest_name
|
||||
smart_sync_manifest_path = os.path.join(
|
||||
self.manifest.manifestProject.worktree, 'smart_sync_override.xml')
|
||||
|
||||
if opt.smart_sync or opt.smart_tag:
|
||||
manifest_name = self._SmartSyncSetup(opt, smart_sync_manifest_path)
|
||||
else:
|
||||
if os.path.isfile(smart_sync_manifest_path):
|
||||
try:
|
||||
platform_utils.remove(smart_sync_manifest_path)
|
||||
except OSError as e:
|
||||
print('error: failed to remove existing smart sync override manifest: %s' %
|
||||
e, file=sys.stderr)
|
||||
|
||||
err_event = _threading.Event()
|
||||
|
||||
rp = self.manifest.repoProject
|
||||
rp.PreSync()
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
mp.PreSync()
|
||||
|
||||
if opt.repo_upgraded:
|
||||
_PostRepoUpgrade(self.manifest, quiet=opt.quiet)
|
||||
|
||||
if not opt.mp_update:
|
||||
print('Skipping update of local manifest project.')
|
||||
else:
|
||||
self._UpdateManifestProject(opt, mp, manifest_name)
|
||||
|
||||
if self.gitc_manifest:
|
||||
gitc_manifest_projects = self.GetProjects(args,
|
||||
missing_ok=True)
|
||||
@ -952,6 +955,10 @@ later is required to fix a server side protocol bug.
|
||||
missing_ok=True,
|
||||
submodules_ok=opt.fetch_submodules)
|
||||
|
||||
err_network_sync = False
|
||||
err_update_projects = False
|
||||
err_checkout = False
|
||||
|
||||
self._fetch_times = _FetchTimes(self.manifest)
|
||||
if not opt.local_only:
|
||||
to_fetch = []
|
||||
@ -961,10 +968,14 @@ later is required to fix a server side protocol bug.
|
||||
to_fetch.extend(all_projects)
|
||||
to_fetch.sort(key=self._fetch_times.Get, reverse=True)
|
||||
|
||||
fetched = self._Fetch(to_fetch, opt)
|
||||
fetched = self._Fetch(to_fetch, opt, err_event)
|
||||
|
||||
_PostRepoFetch(rp, opt.no_repo_verify)
|
||||
if opt.network_only:
|
||||
# bail out now; the rest touches the working tree
|
||||
if err_event.isSet():
|
||||
print('\nerror: Exited sync due to fetch errors.\n', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return
|
||||
|
||||
# Iteratively fetch missing and/or nested unregistered submodules
|
||||
@ -986,22 +997,60 @@ later is required to fix a server side protocol bug.
|
||||
if previously_missing_set == missing_set:
|
||||
break
|
||||
previously_missing_set = missing_set
|
||||
fetched.update(self._Fetch(missing, opt))
|
||||
fetched.update(self._Fetch(missing, opt, err_event))
|
||||
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet():
|
||||
err_network_sync = True
|
||||
if opt.fail_fast:
|
||||
print('\nerror: Exited sync due to fetch errors.\n'
|
||||
'Local checkouts *not* updated. Resolve network issues & '
|
||||
'retry.\n'
|
||||
'`repo sync -l` will update some local checkouts.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if self.manifest.IsMirror or self.manifest.IsArchive:
|
||||
# bail out now, we have no working tree
|
||||
return
|
||||
|
||||
if self.UpdateProjectList(opt):
|
||||
sys.exit(1)
|
||||
err_event.set()
|
||||
err_update_projects = True
|
||||
if opt.fail_fast:
|
||||
print('\nerror: Local checkouts *not* updated.', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
self._Checkout(all_projects, opt)
|
||||
err_results = []
|
||||
self._Checkout(all_projects, opt, err_event, err_results)
|
||||
if err_event.isSet():
|
||||
err_checkout = True
|
||||
# NB: We don't exit here because this is the last step.
|
||||
|
||||
# If there's a notice that's supposed to print at the end of the sync, print
|
||||
# it now...
|
||||
if self.manifest.notice:
|
||||
print(self.manifest.notice)
|
||||
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet():
|
||||
print('\nerror: Unable to fully sync the tree.', file=sys.stderr)
|
||||
if err_network_sync:
|
||||
print('error: Downloading network changes failed.', file=sys.stderr)
|
||||
if err_update_projects:
|
||||
print('error: Updating local project lists failed.', file=sys.stderr)
|
||||
if err_checkout:
|
||||
print('error: Checking out local projects failed.', file=sys.stderr)
|
||||
if err_results:
|
||||
print('Failing repos:\n%s' % '\n'.join(err_results), file=sys.stderr)
|
||||
print('Try re-running with "-j1 --fail-fast" to exit at the first error.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not opt.quiet:
|
||||
print('repo sync has finished successfully.')
|
||||
|
||||
|
||||
def _PostRepoUpgrade(manifest, quiet=False):
|
||||
wrapper = Wrapper()
|
||||
if wrapper.NeedSetupGnuPG():
|
||||
@ -1010,6 +1059,7 @@ def _PostRepoUpgrade(manifest, quiet=False):
|
||||
if project.Exists:
|
||||
project.PostRepoUpgrade()
|
||||
|
||||
|
||||
def _PostRepoFetch(rp, no_repo_verify=False, verbose=False):
|
||||
if rp.HasChanges:
|
||||
print('info: A new version of repo is available', file=sys.stderr)
|
||||
@ -1028,6 +1078,7 @@ def _PostRepoFetch(rp, no_repo_verify=False, verbose=False):
|
||||
print('repo version %s is current' % rp.work_git.describe(HEAD),
|
||||
file=sys.stderr)
|
||||
|
||||
|
||||
def _VerifyTag(project):
|
||||
gpg_dir = os.path.expanduser('~/.repoconfig/gnupg')
|
||||
if not os.path.exists(gpg_dir):
|
||||
@ -1058,9 +1109,9 @@ def _VerifyTag(project):
|
||||
|
||||
cmd = [GIT, 'tag', '-v', cur]
|
||||
proc = subprocess.Popen(cmd,
|
||||
stdout = subprocess.PIPE,
|
||||
stderr = subprocess.PIPE,
|
||||
env = env)
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
env=env)
|
||||
out = proc.stdout.read()
|
||||
proc.stdout.close()
|
||||
|
||||
@ -1094,16 +1145,13 @@ class _FetchTimes(object):
|
||||
old = self._times.get(name, t)
|
||||
self._seen.add(name)
|
||||
a = self._ALPHA
|
||||
self._times[name] = (a*t) + ((1-a) * old)
|
||||
self._times[name] = (a * t) + ((1 - a) * old)
|
||||
|
||||
def _Load(self):
|
||||
if self._times is None:
|
||||
try:
|
||||
f = open(self._path)
|
||||
try:
|
||||
with open(self._path) as f:
|
||||
self._times = json.load(f)
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, ValueError):
|
||||
try:
|
||||
platform_utils.remove(self._path)
|
||||
@ -1123,11 +1171,8 @@ class _FetchTimes(object):
|
||||
del self._times[name]
|
||||
|
||||
try:
|
||||
f = open(self._path, 'w')
|
||||
try:
|
||||
with open(self._path, 'w') as f:
|
||||
json.dump(self._times, f, indent=2)
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, TypeError):
|
||||
try:
|
||||
platform_utils.remove(self._path)
|
||||
@ -1138,6 +1183,8 @@ class _FetchTimes(object):
|
||||
# and supporting persistent-http[s]. It cannot change hosts from
|
||||
# request to request like the normal transport, the real url
|
||||
# is passed during initialization.
|
||||
|
||||
|
||||
class PersistentTransport(xmlrpc.client.Transport):
|
||||
def __init__(self, orig_host):
|
||||
self.orig_host = orig_host
|
||||
@ -1172,7 +1219,7 @@ class PersistentTransport(xmlrpc.client.Transport):
|
||||
if proxy:
|
||||
proxyhandler = urllib.request.ProxyHandler({
|
||||
"http": proxy,
|
||||
"https": proxy })
|
||||
"https": proxy})
|
||||
|
||||
opener = urllib.request.build_opener(
|
||||
urllib.request.HTTPCookieProcessor(cookiejar),
|
||||
@ -1229,4 +1276,3 @@ class PersistentTransport(xmlrpc.client.Transport):
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
|
@ -33,6 +33,7 @@ else:
|
||||
|
||||
UNUSUAL_COMMIT_THRESHOLD = 5
|
||||
|
||||
|
||||
def _ConfirmManyUploads(multiple_branches=False):
|
||||
if multiple_branches:
|
||||
print('ATTENTION: One or more branches has an unusually high number '
|
||||
@ -44,17 +45,20 @@ def _ConfirmManyUploads(multiple_branches=False):
|
||||
answer = input("If you are sure you intend to do this, type 'yes': ").strip()
|
||||
return answer == "yes"
|
||||
|
||||
|
||||
def _die(fmt, *args):
|
||||
msg = fmt % args
|
||||
print('error: %s' % msg, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _SplitEmails(values):
|
||||
result = []
|
||||
for value in values:
|
||||
result.extend([s.strip() for s in value.split(',')])
|
||||
return result
|
||||
|
||||
|
||||
class Upload(InteractiveCommand):
|
||||
common = True
|
||||
helpSummary = "Upload changes for code review"
|
||||
@ -137,13 +141,13 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
dest='auto_topic', action='store_true',
|
||||
help='Send local branch name to Gerrit Code Review')
|
||||
p.add_option('--re', '--reviewers',
|
||||
type='string', action='append', dest='reviewers',
|
||||
type='string', action='append', dest='reviewers',
|
||||
help='Request reviews from these people.')
|
||||
p.add_option('--cc',
|
||||
type='string', action='append', dest='cc',
|
||||
type='string', action='append', dest='cc',
|
||||
help='Also send email to these email addresses.')
|
||||
p.add_option('--br',
|
||||
type='string', action='store', dest='branch',
|
||||
type='string', action='store', dest='branch',
|
||||
help='Branch to upload.')
|
||||
p.add_option('--cbr', '--current-branch',
|
||||
dest='current_branch', action='store_true',
|
||||
@ -168,6 +172,9 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
type='string', action='store', dest='dest_branch',
|
||||
metavar='BRANCH',
|
||||
help='Submit for review on this target branch.')
|
||||
p.add_option('--no-cert-checks',
|
||||
dest='validate_certs', action='store_false', default=True,
|
||||
help='Disable verifying ssl certs (unsafe).')
|
||||
|
||||
# Options relating to upload hook. Note that verify and no-verify are NOT
|
||||
# opposites of each other, which is why they store to different locations.
|
||||
@ -185,15 +192,16 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
# Never run upload hooks, but upload anyway (AKA bypass hooks).
|
||||
# - no-verify=True, verify=True:
|
||||
# Invalid
|
||||
p.add_option('--no-cert-checks',
|
||||
dest='validate_certs', action='store_false', default=True,
|
||||
help='Disable verifying ssl certs (unsafe).')
|
||||
p.add_option('--no-verify',
|
||||
g = p.add_option_group('Upload hooks')
|
||||
g.add_option('--no-verify',
|
||||
dest='bypass_hooks', action='store_true',
|
||||
help='Do not run the upload hook.')
|
||||
p.add_option('--verify',
|
||||
g.add_option('--verify',
|
||||
dest='allow_all_hooks', action='store_true',
|
||||
help='Run the upload hook without prompting.')
|
||||
g.add_option('--ignore-hooks',
|
||||
dest='ignore_hooks', action='store_true',
|
||||
help='Do not abort uploading if upload hooks fail.')
|
||||
|
||||
def _SingleBranch(self, opt, branch, people):
|
||||
project = branch.project
|
||||
@ -214,10 +222,10 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
print('Upload project %s/ to remote branch %s%s:' %
|
||||
(project.relpath, destination, ' (draft)' if opt.draft else ''))
|
||||
print(' branch %s (%2d commit%s, %s):' % (
|
||||
name,
|
||||
len(commit_list),
|
||||
len(commit_list) != 1 and 's' or '',
|
||||
date))
|
||||
name,
|
||||
len(commit_list),
|
||||
len(commit_list) != 1 and 's' or '',
|
||||
date))
|
||||
for commit in commit_list:
|
||||
print(' %s' % commit)
|
||||
|
||||
@ -271,11 +279,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
branches[project.name] = b
|
||||
script.append('')
|
||||
|
||||
script = [ x.encode('utf-8')
|
||||
if issubclass(type(x), unicode)
|
||||
else x
|
||||
for x in script ]
|
||||
|
||||
script = Editor.EditString("\n".join(script)).split("\n")
|
||||
|
||||
project_re = re.compile(r'^#?\s*project\s*([^\s]+)/:$')
|
||||
@ -327,12 +330,12 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
|
||||
key = 'review.%s.autoreviewer' % project.GetBranch(name).remote.review
|
||||
raw_list = project.config.GetString(key)
|
||||
if not raw_list is None:
|
||||
if raw_list is not None:
|
||||
people[0].extend([entry.strip() for entry in raw_list.split(',')])
|
||||
|
||||
key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
|
||||
raw_list = project.config.GetString(key)
|
||||
if not raw_list is None and len(people[0]) > 0:
|
||||
if raw_list is not None and len(people[0]) > 0:
|
||||
people[1].extend([entry.strip() for entry in raw_list.split(',')])
|
||||
|
||||
def _FindGerritChange(self, branch):
|
||||
@ -423,18 +426,18 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
else:
|
||||
fmt = '\n (%s)'
|
||||
print(('[FAILED] %-15s %-15s' + fmt) % (
|
||||
branch.project.relpath + '/', \
|
||||
branch.name, \
|
||||
str(branch.error)),
|
||||
file=sys.stderr)
|
||||
branch.project.relpath + '/',
|
||||
branch.name,
|
||||
str(branch.error)),
|
||||
file=sys.stderr)
|
||||
print()
|
||||
|
||||
for branch in todo:
|
||||
if branch.uploaded:
|
||||
print('[OK ] %-15s %s' % (
|
||||
branch.project.relpath + '/',
|
||||
branch.name),
|
||||
file=sys.stderr)
|
||||
branch.project.relpath + '/',
|
||||
branch.name),
|
||||
file=sys.stderr)
|
||||
|
||||
if have_errors:
|
||||
sys.exit(1)
|
||||
@ -442,14 +445,14 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
def _GetMergeBranch(self, project):
|
||||
p = GitCommand(project,
|
||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
p.Wait()
|
||||
local_branch = p.stdout.strip()
|
||||
p = GitCommand(project,
|
||||
['config', '--get', 'branch.%s.merge' % local_branch],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
p.Wait()
|
||||
merge_branch = p.stdout.strip()
|
||||
return merge_branch
|
||||
@ -493,12 +496,24 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
abort_if_user_denies=True)
|
||||
pending_proj_names = [project.name for (project, available) in pending]
|
||||
pending_worktrees = [project.worktree for (project, available) in pending]
|
||||
passed = True
|
||||
try:
|
||||
hook.Run(opt.allow_all_hooks, project_list=pending_proj_names,
|
||||
worktree_list=pending_worktrees)
|
||||
except SystemExit:
|
||||
passed = False
|
||||
if not opt.ignore_hooks:
|
||||
raise
|
||||
except HookError as e:
|
||||
passed = False
|
||||
print("ERROR: %s" % str(e), file=sys.stderr)
|
||||
return
|
||||
|
||||
if not passed:
|
||||
if opt.ignore_hooks:
|
||||
print('\nWARNING: pre-upload hooks failed, but uploading anyways.',
|
||||
file=sys.stderr)
|
||||
else:
|
||||
return
|
||||
|
||||
if opt.reviewers:
|
||||
reviewers = _SplitEmails(opt.reviewers)
|
||||
|
@ -17,9 +17,10 @@
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
from command import Command, MirrorSafeCommand
|
||||
from git_command import git
|
||||
from git_command import git, RepoSourceVersion, user_agent
|
||||
from git_refs import HEAD
|
||||
|
||||
|
||||
class Version(Command, MirrorSafeCommand):
|
||||
wrapper_version = None
|
||||
wrapper_path = None
|
||||
@ -34,12 +35,20 @@ class Version(Command, MirrorSafeCommand):
|
||||
rp = self.manifest.repoProject
|
||||
rem = rp.GetRemote(rp.remote.name)
|
||||
|
||||
print('repo version %s' % rp.work_git.describe(HEAD))
|
||||
# These might not be the same. Report them both.
|
||||
src_ver = RepoSourceVersion()
|
||||
rp_ver = rp.bare_git.describe(HEAD)
|
||||
print('repo version %s' % rp_ver)
|
||||
print(' (from %s)' % rem.url)
|
||||
|
||||
if Version.wrapper_path is not None:
|
||||
print('repo launcher version %s' % Version.wrapper_version)
|
||||
print(' (from %s)' % Version.wrapper_path)
|
||||
|
||||
if src_ver != rp_ver:
|
||||
print(' (currently at %s)' % src_ver)
|
||||
|
||||
print('repo User-Agent %s' % user_agent.repo)
|
||||
print('git %s' % git.version_tuple().full)
|
||||
print('git User-Agent %s' % user_agent.git)
|
||||
print('Python %s' % sys.version)
|
||||
|
60
tests/test_editor.py
Normal file
60
tests/test_editor.py
Normal file
@ -0,0 +1,60 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the editor.py module."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import unittest
|
||||
|
||||
from editor import Editor
|
||||
|
||||
|
||||
class EditorTestCase(unittest.TestCase):
|
||||
"""Take care of resetting Editor state across tests."""
|
||||
|
||||
def setUp(self):
|
||||
self.setEditor(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.setEditor(None)
|
||||
|
||||
@staticmethod
|
||||
def setEditor(editor):
|
||||
Editor._editor = editor
|
||||
|
||||
|
||||
class GetEditor(EditorTestCase):
|
||||
"""Check GetEditor behavior."""
|
||||
|
||||
def test_basic(self):
|
||||
"""Basic checking of _GetEditor."""
|
||||
self.setEditor(':')
|
||||
self.assertEqual(':', Editor._GetEditor())
|
||||
|
||||
|
||||
class EditString(EditorTestCase):
|
||||
"""Check EditString behavior."""
|
||||
|
||||
def test_no_editor(self):
|
||||
"""Check behavior when no editor is available."""
|
||||
self.setEditor(':')
|
||||
self.assertEqual('foo', Editor.EditString('foo'))
|
||||
|
||||
def test_cat_editor(self):
|
||||
"""Check behavior when editor is `cat`."""
|
||||
self.setEditor('cat')
|
||||
self.assertEqual('foo', Editor.EditString('foo'))
|
@ -14,6 +14,11 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the git_command.py module."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import re
|
||||
import unittest
|
||||
|
||||
import git_command
|
||||
@ -30,7 +35,7 @@ class GitCallUnitTest(unittest.TestCase):
|
||||
# We don't dive too deep into the values here to avoid having to update
|
||||
# whenever git versions change. We do check relative to this min version
|
||||
# as this is what `repo` itself requires via MIN_GIT_VERSION.
|
||||
MIN_GIT_VERSION = (1, 7, 2)
|
||||
MIN_GIT_VERSION = (2, 10, 2)
|
||||
self.assertTrue(isinstance(ver.major, int))
|
||||
self.assertTrue(isinstance(ver.minor, int))
|
||||
self.assertTrue(isinstance(ver.micro, int))
|
||||
@ -43,3 +48,31 @@ class GitCallUnitTest(unittest.TestCase):
|
||||
self.assertLess(ver, (9999, 9999, 9999))
|
||||
|
||||
self.assertNotEqual('', ver.full)
|
||||
|
||||
|
||||
class UserAgentUnitTest(unittest.TestCase):
|
||||
"""Tests the UserAgent function."""
|
||||
|
||||
def test_smoke_os(self):
|
||||
"""Make sure UA OS setting returns something useful."""
|
||||
os_name = git_command.user_agent.os
|
||||
# We can't dive too deep because of OS/tool differences, but we can check
|
||||
# the general form.
|
||||
m = re.match(r'^[^ ]+$', os_name)
|
||||
self.assertIsNotNone(m)
|
||||
|
||||
def test_smoke_repo(self):
|
||||
"""Make sure repo UA returns something useful."""
|
||||
ua = git_command.user_agent.repo
|
||||
# We can't dive too deep because of OS/tool differences, but we can check
|
||||
# the general form.
|
||||
m = re.match(r'^git-repo/[^ ]+ ([^ ]+) git/[^ ]+ Python/[0-9.]+', ua)
|
||||
self.assertIsNotNone(m)
|
||||
|
||||
def test_smoke_git(self):
|
||||
"""Make sure git UA returns something useful."""
|
||||
ua = git_command.user_agent.git
|
||||
# We can't dive too deep because of OS/tool differences, but we can check
|
||||
# the general form.
|
||||
m = re.match(r'^git/[^ ]+ ([^ ]+) git-repo/[^ ]+', ua)
|
||||
self.assertIsNotNone(m)
|
||||
|
@ -14,19 +14,26 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the git_config.py module."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import unittest
|
||||
|
||||
import git_config
|
||||
|
||||
|
||||
def fixture(*paths):
|
||||
"""Return a path relative to test/fixtures.
|
||||
"""
|
||||
return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
|
||||
|
||||
|
||||
class GitConfigUnitTest(unittest.TestCase):
|
||||
"""Tests the GitConfig class.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""Create a GitConfig object using the test.gitconfig fixture.
|
||||
"""
|
||||
@ -64,5 +71,6 @@ class GitConfigUnitTest(unittest.TestCase):
|
||||
val = config.GetString('empty')
|
||||
self.assertEqual(val, None)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
85
tests/test_manifest_xml.py
Normal file
85
tests/test_manifest_xml.py
Normal file
@ -0,0 +1,85 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the manifest_xml.py module."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import unittest
|
||||
|
||||
import error
|
||||
import manifest_xml
|
||||
|
||||
|
||||
class ManifestValidateFilePaths(unittest.TestCase):
|
||||
"""Check _ValidateFilePaths helper.
|
||||
|
||||
This doesn't access a real filesystem.
|
||||
"""
|
||||
|
||||
def check_both(self, *args):
|
||||
manifest_xml.XmlManifest._ValidateFilePaths('copyfile', *args)
|
||||
manifest_xml.XmlManifest._ValidateFilePaths('linkfile', *args)
|
||||
|
||||
def test_normal_path(self):
|
||||
"""Make sure good paths are accepted."""
|
||||
self.check_both('foo', 'bar')
|
||||
self.check_both('foo/bar', 'bar')
|
||||
self.check_both('foo', 'bar/bar')
|
||||
self.check_both('foo/bar', 'bar/bar')
|
||||
|
||||
def test_symlink_targets(self):
|
||||
"""Some extra checks for symlinks."""
|
||||
def check(*args):
|
||||
manifest_xml.XmlManifest._ValidateFilePaths('linkfile', *args)
|
||||
|
||||
# We allow symlinks to end in a slash since we allow them to point to dirs
|
||||
# in general. Technically the slash isn't necessary.
|
||||
check('foo/', 'bar')
|
||||
# We allow a single '.' to get a reference to the project itself.
|
||||
check('.', 'bar')
|
||||
|
||||
def test_bad_paths(self):
|
||||
"""Make sure bad paths (src & dest) are rejected."""
|
||||
PATHS = (
|
||||
'..',
|
||||
'../',
|
||||
'./',
|
||||
'foo/',
|
||||
'./foo',
|
||||
'../foo',
|
||||
'foo/./bar',
|
||||
'foo/../../bar',
|
||||
'/foo',
|
||||
'./../foo',
|
||||
'.git/foo',
|
||||
# Check case folding.
|
||||
'.GIT/foo',
|
||||
'blah/.git/foo',
|
||||
'.repo/foo',
|
||||
'.repoconfig',
|
||||
# Block ~ due to 8.3 filenames on Windows filesystems.
|
||||
'~',
|
||||
'foo~',
|
||||
'blah/foo~',
|
||||
# Block Unicode characters that get normalized out by filesystems.
|
||||
u'foo\u200Cbar',
|
||||
)
|
||||
for path in PATHS:
|
||||
self.assertRaises(
|
||||
error.ManifestInvalidPathError, self.check_both, path, 'a')
|
||||
self.assertRaises(
|
||||
error.ManifestInvalidPathError, self.check_both, 'a', path)
|
363
tests/test_project.py
Normal file
363
tests/test_project.py
Normal file
@ -0,0 +1,363 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the project.py module."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import error
|
||||
import git_config
|
||||
import project
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def TempGitTree():
|
||||
"""Create a new empty git checkout for testing."""
|
||||
# TODO(vapier): Convert this to tempfile.TemporaryDirectory once we drop
|
||||
# Python 2 support entirely.
|
||||
try:
|
||||
tempdir = tempfile.mkdtemp(prefix='repo-tests')
|
||||
subprocess.check_call(['git', 'init'], cwd=tempdir)
|
||||
yield tempdir
|
||||
finally:
|
||||
shutil.rmtree(tempdir)
|
||||
|
||||
|
||||
class RepoHookShebang(unittest.TestCase):
|
||||
"""Check shebang parsing in RepoHook."""
|
||||
|
||||
def test_no_shebang(self):
|
||||
"""Lines w/out shebangs should be rejected."""
|
||||
DATA = (
|
||||
'',
|
||||
'# -*- coding:utf-8 -*-\n',
|
||||
'#\n# foo\n',
|
||||
'# Bad shebang in script\n#!/foo\n'
|
||||
)
|
||||
for data in DATA:
|
||||
self.assertIsNone(project.RepoHook._ExtractInterpFromShebang(data))
|
||||
|
||||
def test_direct_interp(self):
|
||||
"""Lines whose shebang points directly to the interpreter."""
|
||||
DATA = (
|
||||
('#!/foo', '/foo'),
|
||||
('#! /foo', '/foo'),
|
||||
('#!/bin/foo ', '/bin/foo'),
|
||||
('#! /usr/foo ', '/usr/foo'),
|
||||
('#! /usr/foo -args', '/usr/foo'),
|
||||
)
|
||||
for shebang, interp in DATA:
|
||||
self.assertEqual(project.RepoHook._ExtractInterpFromShebang(shebang),
|
||||
interp)
|
||||
|
||||
def test_env_interp(self):
|
||||
"""Lines whose shebang launches through `env`."""
|
||||
DATA = (
|
||||
('#!/usr/bin/env foo', 'foo'),
|
||||
('#!/bin/env foo', 'foo'),
|
||||
('#! /bin/env /bin/foo ', '/bin/foo'),
|
||||
)
|
||||
for shebang, interp in DATA:
|
||||
self.assertEqual(project.RepoHook._ExtractInterpFromShebang(shebang),
|
||||
interp)
|
||||
|
||||
|
||||
class FakeProject(object):
|
||||
"""A fake for Project for basic functionality."""
|
||||
|
||||
def __init__(self, worktree):
|
||||
self.worktree = worktree
|
||||
self.gitdir = os.path.join(worktree, '.git')
|
||||
self.name = 'fakeproject'
|
||||
self.work_git = project.Project._GitGetByExec(
|
||||
self, bare=False, gitdir=self.gitdir)
|
||||
self.bare_git = project.Project._GitGetByExec(
|
||||
self, bare=True, gitdir=self.gitdir)
|
||||
self.config = git_config.GitConfig.ForRepository(gitdir=self.gitdir)
|
||||
|
||||
|
||||
class ReviewableBranchTests(unittest.TestCase):
|
||||
"""Check ReviewableBranch behavior."""
|
||||
|
||||
def test_smoke(self):
|
||||
"""A quick run through everything."""
|
||||
with TempGitTree() as tempdir:
|
||||
fakeproj = FakeProject(tempdir)
|
||||
|
||||
# Generate some commits.
|
||||
with open(os.path.join(tempdir, 'readme'), 'w') as fp:
|
||||
fp.write('txt')
|
||||
fakeproj.work_git.add('readme')
|
||||
fakeproj.work_git.commit('-mAdd file')
|
||||
fakeproj.work_git.checkout('-b', 'work')
|
||||
fakeproj.work_git.rm('-f', 'readme')
|
||||
fakeproj.work_git.commit('-mDel file')
|
||||
|
||||
# Start off with the normal details.
|
||||
rb = project.ReviewableBranch(
|
||||
fakeproj, fakeproj.config.GetBranch('work'), 'master')
|
||||
self.assertEqual('work', rb.name)
|
||||
self.assertEqual(1, len(rb.commits))
|
||||
self.assertIn('Del file', rb.commits[0])
|
||||
d = rb.unabbrev_commits
|
||||
self.assertEqual(1, len(d))
|
||||
short, long = next(iter(d.items()))
|
||||
self.assertTrue(long.startswith(short))
|
||||
self.assertTrue(rb.base_exists)
|
||||
# Hard to assert anything useful about this.
|
||||
self.assertTrue(rb.date)
|
||||
|
||||
# Now delete the tracking branch!
|
||||
fakeproj.work_git.branch('-D', 'master')
|
||||
rb = project.ReviewableBranch(
|
||||
fakeproj, fakeproj.config.GetBranch('work'), 'master')
|
||||
self.assertEqual(0, len(rb.commits))
|
||||
self.assertFalse(rb.base_exists)
|
||||
# Hard to assert anything useful about this.
|
||||
self.assertTrue(rb.date)
|
||||
|
||||
|
||||
class CopyLinkTestCase(unittest.TestCase):
|
||||
"""TestCase for stub repo client checkouts.
|
||||
|
||||
It'll have a layout like:
|
||||
tempdir/ # self.tempdir
|
||||
checkout/ # self.topdir
|
||||
git-project/ # self.worktree
|
||||
|
||||
Attributes:
|
||||
tempdir: A dedicated temporary directory.
|
||||
worktree: The top of the repo client checkout.
|
||||
topdir: The top of a project checkout.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tempdir = tempfile.mkdtemp(prefix='repo_tests')
|
||||
self.topdir = os.path.join(self.tempdir, 'checkout')
|
||||
self.worktree = os.path.join(self.topdir, 'git-project')
|
||||
os.makedirs(self.topdir)
|
||||
os.makedirs(self.worktree)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tempdir, ignore_errors=True)
|
||||
|
||||
@staticmethod
|
||||
def touch(path):
|
||||
with open(path, 'w'):
|
||||
pass
|
||||
|
||||
def assertExists(self, path, msg=None):
|
||||
"""Make sure |path| exists."""
|
||||
if os.path.exists(path):
|
||||
return
|
||||
|
||||
if msg is None:
|
||||
msg = ['path is missing: %s' % path]
|
||||
while path != '/':
|
||||
path = os.path.dirname(path)
|
||||
if not path:
|
||||
# If we're given something like "foo", abort once we get to "".
|
||||
break
|
||||
result = os.path.exists(path)
|
||||
msg.append('\tos.path.exists(%s): %s' % (path, result))
|
||||
if result:
|
||||
msg.append('\tcontents: %r' % os.listdir(path))
|
||||
break
|
||||
msg = '\n'.join(msg)
|
||||
|
||||
raise self.failureException(msg)
|
||||
|
||||
|
||||
class CopyFile(CopyLinkTestCase):
|
||||
"""Check _CopyFile handling."""
|
||||
|
||||
def CopyFile(self, src, dest):
|
||||
return project._CopyFile(self.worktree, src, self.topdir, dest)
|
||||
|
||||
def test_basic(self):
|
||||
"""Basic test of copying a file from a project to the toplevel."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
cf = self.CopyFile('foo.txt', 'foo')
|
||||
cf._Copy()
|
||||
self.assertExists(os.path.join(self.topdir, 'foo'))
|
||||
|
||||
def test_src_subdir(self):
|
||||
"""Copy a file from a subdir of a project."""
|
||||
src = os.path.join(self.worktree, 'bar', 'foo.txt')
|
||||
os.makedirs(os.path.dirname(src))
|
||||
self.touch(src)
|
||||
cf = self.CopyFile('bar/foo.txt', 'new.txt')
|
||||
cf._Copy()
|
||||
self.assertExists(os.path.join(self.topdir, 'new.txt'))
|
||||
|
||||
def test_dest_subdir(self):
|
||||
"""Copy a file to a subdir of a checkout."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
cf = self.CopyFile('foo.txt', 'sub/dir/new.txt')
|
||||
self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub')))
|
||||
cf._Copy()
|
||||
self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'new.txt'))
|
||||
|
||||
def test_update(self):
|
||||
"""Make sure changed files get copied again."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
dest = os.path.join(self.topdir, 'bar')
|
||||
with open(src, 'w') as f:
|
||||
f.write('1st')
|
||||
cf = self.CopyFile('foo.txt', 'bar')
|
||||
cf._Copy()
|
||||
self.assertExists(dest)
|
||||
with open(dest) as f:
|
||||
self.assertEqual(f.read(), '1st')
|
||||
|
||||
with open(src, 'w') as f:
|
||||
f.write('2nd!')
|
||||
cf._Copy()
|
||||
with open(dest) as f:
|
||||
self.assertEqual(f.read(), '2nd!')
|
||||
|
||||
def test_src_block_symlink(self):
|
||||
"""Do not allow reading from a symlinked path."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
sym = os.path.join(self.worktree, 'sym')
|
||||
self.touch(src)
|
||||
os.symlink('foo.txt', sym)
|
||||
self.assertExists(sym)
|
||||
cf = self.CopyFile('sym', 'foo')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_src_block_symlink_traversal(self):
|
||||
"""Do not allow reading through a symlink dir."""
|
||||
src = os.path.join(self.worktree, 'bar', 'passwd')
|
||||
os.symlink('/etc', os.path.join(self.worktree, 'bar'))
|
||||
self.assertExists(src)
|
||||
cf = self.CopyFile('bar/foo.txt', 'foo')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_src_block_copy_from_dir(self):
|
||||
"""Do not allow copying from a directory."""
|
||||
src = os.path.join(self.worktree, 'dir')
|
||||
os.makedirs(src)
|
||||
cf = self.CopyFile('dir', 'foo')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_dest_block_symlink(self):
|
||||
"""Do not allow writing to a symlink."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
os.symlink('dest', os.path.join(self.topdir, 'sym'))
|
||||
cf = self.CopyFile('foo.txt', 'sym')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_dest_block_symlink_traversal(self):
|
||||
"""Do not allow writing through a symlink dir."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
os.symlink('/tmp', os.path.join(self.topdir, 'sym'))
|
||||
cf = self.CopyFile('foo.txt', 'sym/foo.txt')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_src_block_copy_to_dir(self):
|
||||
"""Do not allow copying to a directory."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
os.makedirs(os.path.join(self.topdir, 'dir'))
|
||||
cf = self.CopyFile('foo.txt', 'dir')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
|
||||
class LinkFile(CopyLinkTestCase):
|
||||
"""Check _LinkFile handling."""
|
||||
|
||||
def LinkFile(self, src, dest):
|
||||
return project._LinkFile(self.worktree, src, self.topdir, dest)
|
||||
|
||||
def test_basic(self):
|
||||
"""Basic test of linking a file from a project into the toplevel."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
lf = self.LinkFile('foo.txt', 'foo')
|
||||
lf._Link()
|
||||
dest = os.path.join(self.topdir, 'foo')
|
||||
self.assertExists(dest)
|
||||
self.assertTrue(os.path.islink(dest))
|
||||
self.assertEqual('git-project/foo.txt', os.readlink(dest))
|
||||
|
||||
def test_src_subdir(self):
|
||||
"""Link to a file in a subdir of a project."""
|
||||
src = os.path.join(self.worktree, 'bar', 'foo.txt')
|
||||
os.makedirs(os.path.dirname(src))
|
||||
self.touch(src)
|
||||
lf = self.LinkFile('bar/foo.txt', 'foo')
|
||||
lf._Link()
|
||||
self.assertExists(os.path.join(self.topdir, 'foo'))
|
||||
|
||||
def test_src_self(self):
|
||||
"""Link to the project itself."""
|
||||
dest = os.path.join(self.topdir, 'foo', 'bar')
|
||||
lf = self.LinkFile('.', 'foo/bar')
|
||||
lf._Link()
|
||||
self.assertExists(dest)
|
||||
self.assertEqual('../git-project', os.readlink(dest))
|
||||
|
||||
def test_dest_subdir(self):
|
||||
"""Link a file to a subdir of a checkout."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
lf = self.LinkFile('foo.txt', 'sub/dir/foo/bar')
|
||||
self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub')))
|
||||
lf._Link()
|
||||
self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'foo', 'bar'))
|
||||
|
||||
def test_src_block_relative(self):
|
||||
"""Do not allow relative symlinks."""
|
||||
BAD_SOURCES = (
|
||||
'./',
|
||||
'..',
|
||||
'../',
|
||||
'foo/.',
|
||||
'foo/./bar',
|
||||
'foo/..',
|
||||
'foo/../foo',
|
||||
)
|
||||
for src in BAD_SOURCES:
|
||||
lf = self.LinkFile(src, 'foo')
|
||||
self.assertRaises(error.ManifestInvalidPathError, lf._Link)
|
||||
|
||||
def test_update(self):
|
||||
"""Make sure changed targets get updated."""
|
||||
dest = os.path.join(self.topdir, 'sym')
|
||||
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
lf = self.LinkFile('foo.txt', 'sym')
|
||||
lf._Link()
|
||||
self.assertEqual('git-project/foo.txt', os.readlink(dest))
|
||||
|
||||
# Point the symlink somewhere else.
|
||||
os.unlink(dest)
|
||||
os.symlink('/', dest)
|
||||
lf._Link()
|
||||
self.assertEqual('git-project/foo.txt', os.readlink(dest))
|
@ -14,25 +14,58 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the wrapper.py module."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
import unittest
|
||||
|
||||
from pyversion import is_python3
|
||||
import wrapper
|
||||
|
||||
|
||||
if is_python3():
|
||||
from unittest import mock
|
||||
from io import StringIO
|
||||
else:
|
||||
import mock
|
||||
from StringIO import StringIO
|
||||
|
||||
|
||||
def fixture(*paths):
|
||||
"""Return a path relative to tests/fixtures.
|
||||
"""
|
||||
return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
|
||||
|
||||
class RepoWrapperUnitTest(unittest.TestCase):
|
||||
"""Tests helper functions in the repo wrapper
|
||||
"""
|
||||
|
||||
class RepoWrapperTestCase(unittest.TestCase):
|
||||
"""TestCase for the wrapper module."""
|
||||
|
||||
def setUp(self):
|
||||
"""Load the wrapper module every time
|
||||
"""
|
||||
"""Load the wrapper module every time."""
|
||||
wrapper._wrapper_module = None
|
||||
self.wrapper = wrapper.Wrapper()
|
||||
|
||||
if not is_python3():
|
||||
self.assertRegex = self.assertRegexpMatches
|
||||
|
||||
|
||||
class RepoWrapperUnitTest(RepoWrapperTestCase):
|
||||
"""Tests helper functions in the repo wrapper
|
||||
"""
|
||||
|
||||
def test_version(self):
|
||||
"""Make sure _Version works."""
|
||||
with self.assertRaises(SystemExit) as e:
|
||||
with mock.patch('sys.stdout', new_callable=StringIO) as stdout:
|
||||
with mock.patch('sys.stderr', new_callable=StringIO) as stderr:
|
||||
self.wrapper._Version()
|
||||
self.assertEqual(0, e.exception.code)
|
||||
self.assertEqual('', stderr.getvalue())
|
||||
self.assertIn('repo launcher version', stdout.getvalue())
|
||||
|
||||
def test_get_gitc_manifest_dir_no_gitc(self):
|
||||
"""
|
||||
Test reading a missing gitc config file
|
||||
@ -72,5 +105,38 @@ class RepoWrapperUnitTest(unittest.TestCase):
|
||||
self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/'), None)
|
||||
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/'), None)
|
||||
|
||||
|
||||
class SetGitTrace2ParentSid(RepoWrapperTestCase):
|
||||
"""Check SetGitTrace2ParentSid behavior."""
|
||||
|
||||
KEY = 'GIT_TRACE2_PARENT_SID'
|
||||
VALID_FORMAT = re.compile(r'^repo-[0-9]{8}T[0-9]{6}Z-P[0-9a-f]{8}$')
|
||||
|
||||
def test_first_set(self):
|
||||
"""Test env var not yet set."""
|
||||
env = {}
|
||||
self.wrapper.SetGitTrace2ParentSid(env)
|
||||
self.assertIn(self.KEY, env)
|
||||
value = env[self.KEY]
|
||||
self.assertRegex(value, self.VALID_FORMAT)
|
||||
|
||||
def test_append(self):
|
||||
"""Test env var is appended."""
|
||||
env = {self.KEY: 'pfx'}
|
||||
self.wrapper.SetGitTrace2ParentSid(env)
|
||||
self.assertIn(self.KEY, env)
|
||||
value = env[self.KEY]
|
||||
self.assertTrue(value.startswith('pfx/'))
|
||||
self.assertRegex(value[4:], self.VALID_FORMAT)
|
||||
|
||||
def test_global_context(self):
|
||||
"""Check os.environ gets updated by default."""
|
||||
os.environ.pop(self.KEY, None)
|
||||
self.wrapper.SetGitTrace2ParentSid()
|
||||
self.assertIn(self.KEY, os.environ)
|
||||
value = os.environ[self.KEY]
|
||||
self.assertRegex(value, self.VALID_FORMAT)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
27
tox.ini
Normal file
27
tox.ini
Normal file
@ -0,0 +1,27 @@
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# https://tox.readthedocs.io/
|
||||
|
||||
[tox]
|
||||
envlist = py27, py36, py37, py38
|
||||
|
||||
[testenv]
|
||||
deps = pytest
|
||||
commands = {toxinidir}/run_tests
|
||||
|
||||
[testenv:py27]
|
||||
deps =
|
||||
mock
|
||||
pytest
|
12
wrapper.py
12
wrapper.py
@ -15,16 +15,24 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import imp
|
||||
try:
|
||||
from importlib.machinery import SourceFileLoader
|
||||
_loader = lambda *args: SourceFileLoader(*args).load_module()
|
||||
except ImportError:
|
||||
import imp
|
||||
_loader = lambda *args: imp.load_source(*args)
|
||||
import os
|
||||
|
||||
|
||||
def WrapperPath():
|
||||
return os.path.join(os.path.dirname(__file__), 'repo')
|
||||
|
||||
|
||||
_wrapper_module = None
|
||||
|
||||
|
||||
def Wrapper():
|
||||
global _wrapper_module
|
||||
if not _wrapper_module:
|
||||
_wrapper_module = imp.load_source('wrapper', WrapperPath())
|
||||
_wrapper_module = _loader('wrapper', WrapperPath())
|
||||
return _wrapper_module
|
||||
|
Reference in New Issue
Block a user