mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
181 Commits
Author | SHA1 | Date | |
---|---|---|---|
60fc51bb1d | |||
72325c5f3e | |||
d79a4bc51b | |||
682f0b6426 | |||
e7082ccb54 | |||
dbfbcb14c1 | |||
d0ca0f6814 | |||
433977e958 | |||
dd37fb2222 | |||
af908cb543 | |||
74e8ed4bde | |||
2fe84e17b9 | |||
1122353683 | |||
b6871899be | |||
8e0fe1920e | |||
d086467012 | |||
2735bfc5ff | |||
653f8b711b | |||
9bc283e49b | |||
b4a6f6d798 | |||
3e5b269fc6 | |||
cdb344c0e7 | |||
e257d56665 | |||
3599cc3975 | |||
cfc8111f5e | |||
587f162033 | |||
78964472ad | |||
05097c6222 | |||
915fda130e | |||
ea43176de0 | |||
58ac1678e8 | |||
e1111f5710 | |||
7936ce8677 | |||
23c900f105 | |||
bb930461ce | |||
d3639c53d5 | |||
f725e548db | |||
4847e05743 | |||
bb8ee7f54a | |||
23d7dafd10 | |||
8b40c00eab | |||
e20da3eeed | |||
910dfe8497 | |||
21b7fbe14d | |||
b967f5c17a | |||
dc15532bee | |||
eea23b44a9 | |||
5f11eac147 | |||
b0fbc7fb58 | |||
4c418bf423 | |||
fc1b18ae9e | |||
d957ec6a83 | |||
9f91c4395a | |||
4b0eb5a441 | |||
d38300c756 | |||
dcbfadf814 | |||
edd3d45b35 | |||
71928c19a6 | |||
f5dbd2eb07 | |||
0b888912cb | |||
75264789c0 | |||
a269b1cb9d | |||
7951e14385 | |||
8c268c0e7b | |||
d9254599f9 | |||
746e7f664e | |||
f241f8c094 | |||
a1e24b1f00 | |||
e6e27b338b | |||
aa611a2ca2 | |||
949bc34267 | |||
f841ca48c1 | |||
c0d1866b35 | |||
f81c72ed77 | |||
77b4397a73 | |||
0334b8c673 | |||
7ff80afdf6 | |||
19ec797f81 | |||
979d5bdc3e | |||
56ce3468b4 | |||
02aa889ecd | |||
819cc81c57 | |||
84685ba187 | |||
72ebf19e52 | |||
e50b6a7c4f | |||
8a98efee5c | |||
7a753b8b18 | |||
0258584c72 | |||
c58ec4dba1 | |||
e1191b3adb | |||
8f9bf484d8 | |||
37f28f1b4e | |||
af1e5dea35 | |||
3cceda535d | |||
31990f0097 | |||
16f2fae16f | |||
521d01b2e0 | |||
2b1345b8c5 | |||
3995ebd8c1 | |||
b57e633433 | |||
d21638424c | |||
c102fd5c0d | |||
d6b8bd464c | |||
6a784ff9a6 | |||
a46bf7dc2a | |||
19a1f22cd0 | |||
076512aafa | |||
d8fda90eed | |||
9cc1d70476 | |||
c19cc5c508 | |||
6fb0cb5c80 | |||
62285d22c1 | |||
3cda50a41b | |||
afbccdb11e | |||
e8ace26117 | |||
daa2cecdc5 | |||
3c5114cd78 | |||
7838e388ac | |||
aa47181e36 | |||
58a8b5c5d9 | |||
22dbfb99e5 | |||
31b9b4b06c | |||
0b57eed8f0 | |||
72b6dc8891 | |||
e19d9e1a65 | |||
8ddff5c74f | |||
8409410aa2 | |||
dc63181fcd | |||
f700ac79c3 | |||
6f1c626a9b | |||
77479863da | |||
16a5c3ac51 | |||
145e35b805 | |||
819827a42d | |||
abdf750061 | |||
0ab95ba6d0 | |||
5a2517f411 | |||
54a4e6007a | |||
42339d7e52 | |||
03ae99290a | |||
9090e804ab | |||
eeff3537de | |||
8f78a83083 | |||
e5913ae410 | |||
119085e6b1 | |||
086710465e | |||
ed4f2113d2 | |||
719675bcec | |||
21c1575ee4 | |||
8f9e02231a | |||
348e218d5b | |||
4bbba7d627 | |||
dc1d0e0c7f | |||
82caef67a1 | |||
3645bd2420 | |||
5f2b045195 | |||
163d42eb43 | |||
07392ed326 | |||
3285e4b436 | |||
ae62541005 | |||
83a3227b62 | |||
09dd9bda38 | |||
f914edca53 | |||
e7c91889a6 | |||
1b117db767 | |||
563f1a6512 | |||
b4687ad862 | |||
ded477dbb9 | |||
93293ca47f | |||
dbd277ce50 | |||
5a03308c5c | |||
3ba716f382 | |||
655aedd7f3 | |||
cc960971f4 | |||
66098f707a | |||
f7b64e3350 | |||
bd0aae95f5 | |||
e6a202f790 | |||
04122b7261 | |||
f5525fb310 | |||
ee451f035d |
16
.flake8
16
.flake8
@ -1,3 +1,15 @@
|
||||
[flake8]
|
||||
max-line-length=80
|
||||
ignore=E111,E114,E402
|
||||
max-line-length=100
|
||||
ignore=
|
||||
# E111: Indentation is not a multiple of four
|
||||
E111,
|
||||
# E114: Indentation is not a multiple of four (comment)
|
||||
E114,
|
||||
# E402: Module level import not at top of file
|
||||
E402,
|
||||
# E731: do not assign a lambda expression, use a def
|
||||
E731,
|
||||
# W503: Line break before binary operator
|
||||
W503,
|
||||
# W504: Line break after binary operator
|
||||
W504
|
||||
|
34
.github/workflows/test-ci.yml
vendored
Normal file
34
.github/workflows/test-ci.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
# GitHub actions workflow.
|
||||
# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions
|
||||
|
||||
name: Test CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, repo-1, stable, maint]
|
||||
tags: [v*]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [2.7, 3.6, 3.7, 3.8]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
python-version: 2.7
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox tox-gh-actions
|
||||
- name: Test with tox
|
||||
run: tox
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,3 +1,4 @@
|
||||
*.asc
|
||||
*.egg-info/
|
||||
*.log
|
||||
*.pyc
|
||||
|
1
.mailmap
1
.mailmap
@ -4,6 +4,7 @@ Hu Xiuyun <xiuyun.hu@hisilicon.com> Hu xiuyun <xiuyun.hu@hisilicon.com
|
||||
Hu Xiuyun <xiuyun.hu@hisilicon.com> Hu Xiuyun <clouds08@qq.com>
|
||||
Jelly Chen <chenguodong@huawei.com> chenguodong <chenguodong@huawei.com>
|
||||
Jia Bi <bijia@xiaomi.com> bijia <bijia@xiaomi.com>
|
||||
Jiri Tyr <jiri.tyr@gmail.com> Jiri tyr <jiri.tyr@gmail.com>
|
||||
JoonCheol Park <jooncheol@gmail.com> Jooncheol Park <jooncheol@gmail.com>
|
||||
Sergii Pylypenko <x.pelya.x@gmail.com> pelya <x.pelya.x@gmail.com>
|
||||
Shawn Pearce <sop@google.com> Shawn O. Pearce <sop@google.com>
|
||||
|
29
README.md
29
README.md
@ -6,15 +6,29 @@ development workflow. Repo is not meant to replace Git, only to make it
|
||||
easier to work with Git. The repo command is an executable Python script
|
||||
that you can put anywhere in your path.
|
||||
|
||||
* Homepage: https://gerrit.googlesource.com/git-repo/
|
||||
* Bug reports: https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo
|
||||
* Source: https://gerrit.googlesource.com/git-repo/
|
||||
* Overview: https://source.android.com/source/developing.html
|
||||
* Docs: https://source.android.com/source/using-repo.html
|
||||
* Homepage: <https://gerrit.googlesource.com/git-repo/>
|
||||
* Mailing list: [repo-discuss on Google Groups][repo-discuss]
|
||||
* Bug reports: <https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo>
|
||||
* Source: <https://gerrit.googlesource.com/git-repo/>
|
||||
* Overview: <https://source.android.com/source/developing.html>
|
||||
* Docs: <https://source.android.com/source/using-repo.html>
|
||||
* [repo Manifest Format](./docs/manifest-format.md)
|
||||
* [repo Hooks](./docs/repo-hooks.md)
|
||||
* [Submitting patches](./SUBMITTING_PATCHES.md)
|
||||
* Running Repo in [Microsoft Windows](./docs/windows.md)
|
||||
* GitHub mirror: <https://github.com/GerritCodeReview/git-repo>
|
||||
* Postsubmit tests: <https://github.com/GerritCodeReview/git-repo/actions>
|
||||
|
||||
## Contact
|
||||
|
||||
Please use the [repo-discuss] mailing list or [issue tracker] for questions.
|
||||
|
||||
You can [file a new bug report][new-bug] under the "repo" component.
|
||||
|
||||
Please do not e-mail individual developers for support.
|
||||
They do not have the bandwidth for it, and often times questions have already
|
||||
been asked on [repo-discuss] or bugs posted to the [issue tracker].
|
||||
So please search those sites first.
|
||||
|
||||
## Install
|
||||
|
||||
@ -34,3 +48,8 @@ $ PATH="${HOME}/.bin:${PATH}"
|
||||
$ curl https://storage.googleapis.com/git-repo-downloads/repo > ~/.bin/repo
|
||||
$ chmod a+rx ~/.bin/repo
|
||||
```
|
||||
|
||||
|
||||
[new-bug]: https://bugs.chromium.org/p/gerrit/issues/entry?template=Repo+tool+issue
|
||||
[issue tracker]: https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo
|
||||
[repo-discuss]: https://groups.google.com/forum/#!forum/repo-discuss
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
- Make small logical changes.
|
||||
- Provide a meaningful commit message.
|
||||
- Check for coding errors and style nits with pyflakes and flake8
|
||||
- Check for coding errors and style nits with flake8.
|
||||
- Make sure all code is under the Apache License, 2.0.
|
||||
- Publish your changes for review.
|
||||
- Make corrections if requested.
|
||||
@ -38,34 +38,30 @@ If your description starts to get too long, that's a sign that you
|
||||
probably need to split up your commit to finer grained pieces.
|
||||
|
||||
|
||||
## Check for coding errors and style nits with pyflakes and flake8
|
||||
## Check for coding errors and style violations with flake8
|
||||
|
||||
### Coding errors
|
||||
|
||||
Run `pyflakes` on changed modules:
|
||||
|
||||
pyflakes file.py
|
||||
|
||||
Ideally there should be no new errors or warnings introduced.
|
||||
|
||||
### Style violations
|
||||
|
||||
Run `flake8` on changes modules:
|
||||
Run `flake8` on changed modules:
|
||||
|
||||
flake8 file.py
|
||||
|
||||
Note that repo generally follows [Google's python style guide] rather than
|
||||
[PEP 8], so it's possible that the output of `flake8` will be quite noisy.
|
||||
It's not mandatory to avoid all warnings, but at least the maximum line
|
||||
length should be followed.
|
||||
Note that repo generally follows [Google's Python Style Guide] rather than
|
||||
[PEP 8], with a couple of notable exceptions:
|
||||
|
||||
If there are many occurrences of the same warning that cannot be
|
||||
avoided without going against the Google style guide, these may be
|
||||
suppressed in the included `.flake8` file.
|
||||
* Indentation is at 2 columns rather than 4
|
||||
* The maximum line length is 100 columns rather than 80
|
||||
|
||||
[Google's python style guide]: https://google.github.io/styleguide/pyguide.html
|
||||
There should be no new errors or warnings introduced.
|
||||
|
||||
Warnings that cannot be avoided without going against the Google Style Guide
|
||||
may be suppressed inline individally using a `# noqa` comment as described
|
||||
in the [flake8 documentation].
|
||||
|
||||
If there are many occurrences of the same warning, these may be suppressed for
|
||||
the entire project in the included `.flake8` file.
|
||||
|
||||
[Google's Python Style Guide]: https://google.github.io/styleguide/pyguide.html
|
||||
[PEP 8]: https://www.python.org/dev/peps/pep-0008/
|
||||
|
||||
[flake8 documentation]: https://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html#in-line-ignoring-errors
|
||||
|
||||
## Running tests
|
||||
|
||||
|
1
color.py
1
color.py
@ -84,6 +84,7 @@ def _Color(fg=None, bg=None, attr=None):
|
||||
code = ''
|
||||
return code
|
||||
|
||||
|
||||
DEFAULT = None
|
||||
|
||||
|
||||
|
11
command.py
11
command.py
@ -66,7 +66,8 @@ class Command(object):
|
||||
usage = self.helpUsage.strip().replace('%prog', me)
|
||||
except AttributeError:
|
||||
usage = 'repo %s' % self.NAME
|
||||
self._optparse = optparse.OptionParser(usage=usage)
|
||||
epilog = 'Run `repo help %s` to view the detailed manual.' % self.NAME
|
||||
self._optparse = optparse.OptionParser(usage=usage, epilog=epilog)
|
||||
self._Options(self._optparse)
|
||||
return self._optparse
|
||||
|
||||
@ -123,9 +124,9 @@ class Command(object):
|
||||
project = None
|
||||
if os.path.exists(path):
|
||||
oldpath = None
|
||||
while path and \
|
||||
path != oldpath and \
|
||||
path != manifest.topdir:
|
||||
while (path and
|
||||
path != oldpath and
|
||||
path != manifest.topdir):
|
||||
try:
|
||||
project = self._by_path[path]
|
||||
break
|
||||
@ -236,6 +237,7 @@ class InteractiveCommand(Command):
|
||||
"""Command which requires user interaction on the tty and
|
||||
must not run within a pager, even if the user asks to.
|
||||
"""
|
||||
|
||||
def WantPager(self, _opt):
|
||||
return False
|
||||
|
||||
@ -244,6 +246,7 @@ class PagedCommand(Command):
|
||||
"""Command which defaults to output in a pager, as its
|
||||
display tends to be larger than one screen full.
|
||||
"""
|
||||
|
||||
def WantPager(self, _opt):
|
||||
return True
|
||||
|
||||
|
232
docs/internal-fs-layout.md
Normal file
232
docs/internal-fs-layout.md
Normal file
@ -0,0 +1,232 @@
|
||||
# Repo internal filesystem layout
|
||||
|
||||
A reference to the `.repo/` tree in repo client checkouts.
|
||||
Hopefully it's complete & up-to-date, but who knows!
|
||||
|
||||
*** note
|
||||
**Warning**:
|
||||
This is meant for developers of the repo project itself as a quick reference.
|
||||
**Nothing** in here must be construed as ABI, or that repo itself will never
|
||||
change its internals in backwards incompatible ways.
|
||||
***
|
||||
|
||||
[TOC]
|
||||
|
||||
## .repo/ layout
|
||||
|
||||
All content under `.repo/` is managed by `repo` itself with few exceptions.
|
||||
|
||||
In general, you should not make manual changes in here.
|
||||
If a setting was initialized using an option to `repo init`, you should use that
|
||||
command to change the setting later on.
|
||||
It is always safe to re-run `repo init` in existing repo client checkouts.
|
||||
For example, if you want to change the manifest branch, you can simply run
|
||||
`repo init --manifest-branch=<new name>` and repo will take care of the rest.
|
||||
|
||||
* `config`: Per-repo client checkout settings using [git-config] file format.
|
||||
* `.repo_config.json`: JSON cache of the `config` file for repo to
|
||||
read/process quickly.
|
||||
|
||||
### repo/ state
|
||||
|
||||
* `repo/`: A git checkout of the repo project. This is how `repo` re-execs
|
||||
itself to get the latest released version.
|
||||
|
||||
It tracks the git repository at `REPO_URL` using the `REPO_REV` branch.
|
||||
Those are specified at `repo init` time using the `--repo-url=<REPO_URL>`
|
||||
and `--repo-rev=<REPO_REV>` options.
|
||||
|
||||
Any changes made to this directory will usually be automatically discarded
|
||||
by repo itself when it checks for updates. If you want to update to the
|
||||
latest version of repo, use `repo selfupdate` instead. If you want to
|
||||
change the git URL/branch that this tracks, re-run `repo init` with the new
|
||||
settings.
|
||||
|
||||
* `.repo_fetchtimes.json`: Used by `repo sync` to record stats when syncing
|
||||
the various projects.
|
||||
|
||||
### Manifests
|
||||
|
||||
For more documentation on the manifest format, including the local_manifests
|
||||
support, see the [manifest-format.md] file.
|
||||
|
||||
* `manifests/`: A git checkout of the manifest project. Its `.git/` state
|
||||
points to the `manifest.git` bare checkout (see below). It tracks the git
|
||||
branch specified at `repo init` time via `--manifest-branch`.
|
||||
|
||||
The local branch name is always `default` regardless of the remote tracking
|
||||
branch. Do not get confused if the remote branch is not `default`, or if
|
||||
there is a remote `default` that is completely different!
|
||||
|
||||
No manual changes should be made in here as it will just confuse repo and
|
||||
it won't automatically recover causing no new changes to be picked up.
|
||||
|
||||
* `manifests.git/`: A bare checkout of the manifest project. It tracks the
|
||||
git repository specified at `repo init` time via `--manifest-url`.
|
||||
|
||||
No manual changes should be made in here as it will just confuse repo.
|
||||
If you want to switch the tracking settings, re-run `repo init` with the
|
||||
new settings.
|
||||
|
||||
* `manifest.xml`: The manifest that repo uses. It is generated at `repo init`
|
||||
and uses the `--manifest-name` to determine what manifest file to load next
|
||||
out of `manifests/`.
|
||||
|
||||
Do not try to modify this to load other manifests as it will confuse repo.
|
||||
If you want to switch manifest files, re-run `repo init` with the new
|
||||
setting.
|
||||
|
||||
Older versions of repo managed this with symlinks.
|
||||
|
||||
* `manifest.xml -> manifests/<manifest-name>.xml`: A symlink to the manifest
|
||||
that the user wishes to sync. It is specified at `repo init` time via
|
||||
`--manifest-name`.
|
||||
|
||||
|
||||
* `manifests.git/.repo_config.json`: JSON cache of the `manifests.git/config`
|
||||
file for repo to read/process quickly.
|
||||
|
||||
* `local_manifest.xml` (*Deprecated*): User-authored tweaks to the manifest
|
||||
used to sync. See [local manifests] for more details.
|
||||
* `local_manifests/`: Directory of user-authored manifest fragments to tweak
|
||||
the manifest used to sync. See [local manifests] for more details.
|
||||
|
||||
### Project objects
|
||||
|
||||
* `project.list`: Tracking file used by `repo sync` to determine when projects
|
||||
are added or removed and need corresponding updates in the checkout.
|
||||
* `projects/`: Bare checkouts of every project synced by the manifest. The
|
||||
filesystem layout matches the `<project path=...` setting in the manifest
|
||||
(i.e. where it's checked out in the repo client source tree). Those
|
||||
checkouts will symlink their `.git/` state to paths under here.
|
||||
|
||||
Some git state is further split out under `project-objects/`.
|
||||
* `project-objects/`: Git objects that are safe to share across multiple
|
||||
git checkouts. The filesystem layout matches the `<project name=...`
|
||||
setting in the manifest (i.e. the path on the remote server) with a `.git`
|
||||
suffix. This allows for multiple checkouts of the same remote git repo to
|
||||
share their objects. For example, you could have different branches of
|
||||
`foo/bar.git` checked out to `foo/bar-master`, `foo/bar-release`, etc...
|
||||
There will be multiple trees under `projects/` for each one, but only one
|
||||
under `project-objects/`.
|
||||
|
||||
This layout is designed to allow people to sync against different remotes
|
||||
(e.g. a local mirror & a public review server) while avoiding duplicating
|
||||
the content. However, this can run into problems if different remotes use
|
||||
the same path on their respective servers. Best to avoid that.
|
||||
* `subprojects/`: Like `projects/`, but for git submodules.
|
||||
* `subproject-objects/`: Like `project-objects/`, but for git submodules.
|
||||
* `worktrees/`: Bare checkouts of every project synced by the manifest. The
|
||||
filesystem layout matches the `<project name=...` setting in the manifest
|
||||
(i.e. the path on the remote server) with a `.git` suffix. This has the
|
||||
same advantages as the `project-objects/` layout above.
|
||||
|
||||
This is used when git worktrees are enabled.
|
||||
|
||||
### Global settings
|
||||
|
||||
The `.repo/manifests.git/config` file is used to track settings for the entire
|
||||
repo client checkout.
|
||||
Most settings use the `[repo]` section to avoid conflicts with git.
|
||||
User controlled settings are initialized when running `repo init`.
|
||||
|
||||
| Setting | `repo init` Option | Use/Meaning |
|
||||
|-------------------|---------------------------|-------------|
|
||||
| manifest.groups | `--groups` & `--platform` | The manifest groups to sync |
|
||||
| repo.archive | `--archive` | Use `git archive` for checkouts |
|
||||
| repo.clonebundle | `--clone-bundle` | Whether the initial sync used clone.bundle explicitly |
|
||||
| repo.clonefilter | `--clone-filter` | Filter setting when using [partial git clones] |
|
||||
| repo.depth | `--depth` | Create shallow checkouts when cloning |
|
||||
| repo.dissociate | `--dissociate` | Dissociate from any reference/mirrors after initial clone |
|
||||
| repo.mirror | `--mirror` | Checkout is a repo mirror |
|
||||
| repo.partialclone | `--partial-clone` | Create [partial git clones] |
|
||||
| repo.reference | `--reference` | Reference repo client checkout |
|
||||
| repo.submodules | `--submodules` | Sync git submodules |
|
||||
| repo.worktree | `--worktree` | Use `git worktree` for checkouts |
|
||||
| user.email | `--config-name` | User's e-mail address; Copied into `.git/config` when checking out a new project |
|
||||
| user.name | `--config-name` | User's name; Copied into `.git/config` when checking out a new project |
|
||||
|
||||
[partial git clones]: https://git-scm.com/docs/gitrepository-layout#_code_partialclone_code
|
||||
|
||||
### Repo hooks settings
|
||||
|
||||
For more details on this feature, see the [repo-hooks docs](./repo-hooks.md).
|
||||
We'll just discuss the internal configuration settings.
|
||||
These are stored in the registered `<repo-hooks>` project itself, so if the
|
||||
manifest switches to a different project, the settings will not be copied.
|
||||
|
||||
| Setting | Use/Meaning |
|
||||
|--------------------------------------|-------------|
|
||||
| repo.hooks.\<hook\>.approvedmanifest | User approval for secure manifest sources (e.g. https://) |
|
||||
| repo.hooks.\<hook\>.approvedhash | User approval for insecure manifest sources (e.g. http://) |
|
||||
|
||||
|
||||
For example, if our manifest had the following entries, we would store settings
|
||||
under `.repo/projects/src/repohooks.git/config` (which would be reachable via
|
||||
`git --git-dir=src/repohooks/.git config`).
|
||||
```xml
|
||||
<project path="src/repohooks" name="chromiumos/repohooks" ... />
|
||||
<repo-hooks in-project="chromiumos/repohooks" ... />
|
||||
```
|
||||
|
||||
If `<hook>` is `pre-upload`, the `.git/config` setting might be:
|
||||
```ini
|
||||
[repo "hooks.pre-upload"]
|
||||
approvedmanifest = https://chromium.googlesource.com/chromiumos/manifest
|
||||
```
|
||||
|
||||
## Per-project settings
|
||||
|
||||
These settings are somewhat meant to be tweaked by the user on a per-project
|
||||
basis (e.g. `git config` in a checked out source repo).
|
||||
|
||||
Where possible, we re-use standard git settings to avoid confusion, and we
|
||||
refrain from documenting those, so see [git-config] documentation instead.
|
||||
|
||||
See `repo help upload` for documentation on `[review]` settings.
|
||||
|
||||
The `[remote]` settings are automatically populated/updated from the manifest.
|
||||
|
||||
The `[branch]` settings are updated by `repo start` and `git branch`.
|
||||
|
||||
| Setting | Subcommands | Use/Meaning |
|
||||
|-------------------------------|---------------|-------------|
|
||||
| review.\<url\>.autocopy | upload | Automatically add to `--cc=<value>` |
|
||||
| review.\<url\>.autoreviewer | upload | Automatically add to `--reviewers=<value>` |
|
||||
| review.\<url\>.autoupload | upload | Automatically answer "yes" or "no" to all prompts |
|
||||
| review.\<url\>.uploadhashtags | upload | Automatically add to `--hashtag=<value>` |
|
||||
| review.\<url\>.uploadlabels | upload | Automatically add to `--label=<value>` |
|
||||
| review.\<url\>.uploadnotify | upload | [Notify setting][upload-notify] to use |
|
||||
| review.\<url\>.uploadtopic | upload | Default [topic] to use |
|
||||
| review.\<url\>.username | upload | Override username with `ssh://` review URIs |
|
||||
| remote.\<remote\>.fetch | sync | Set of refs to fetch |
|
||||
| remote.\<remote\>.projectname | \<network\> | The name of the project as it exists in Gerrit review |
|
||||
| remote.\<remote\>.pushurl | upload | The base URI for pushing CLs |
|
||||
| remote.\<remote\>.review | upload | The URI of the Gerrit review server |
|
||||
| remote.\<remote\>.url | sync & upload | The URI of the git project to fetch |
|
||||
| branch.\<branch\>.merge | sync & upload | The branch to merge & upload & track |
|
||||
| branch.\<branch\>.remote | sync & upload | The remote to track |
|
||||
|
||||
## ~/ dotconfig layout
|
||||
|
||||
Repo will create & maintain a few files in the user's home directory.
|
||||
|
||||
* `.repoconfig/`: Repo's per-user directory for all random config files/state.
|
||||
* `.repoconfig/config`: Per-user settings using [git-config] file format.
|
||||
* `.repoconfig/keyring-version`: Cache file for checking if the gnupg subdir
|
||||
has all the same keys as the repo launcher. Used to avoid running gpg
|
||||
constantly as that can be quite slow.
|
||||
* `.repoconfig/gnupg/`: GnuPG's internal state directory used when repo needs
|
||||
to run `gpg`. This provides isolation from the user's normal `~/.gnupg/`.
|
||||
|
||||
* `.repoconfig/.repo_config.json`: JSON cache of the `.repoconfig/config`
|
||||
file for repo to read/process quickly.
|
||||
* `.repo_.gitconfig.json`: JSON cache of the `.gitconfig` file for repo to
|
||||
read/process quickly.
|
||||
|
||||
|
||||
[git-config]: https://git-scm.com/docs/git-config
|
||||
[manifest-format.md]: ./manifest-format.md
|
||||
[local manifests]: ./manifest-format.md#Local-Manifests
|
||||
[topic]: https://gerrit-review.googlesource.com/Documentation/intro-user.html#topics
|
||||
[upload-notify]: https://gerrit-review.googlesource.com/Documentation/user-upload.html#notify
|
@ -89,6 +89,7 @@ following DTD:
|
||||
<!ATTLIST extend-project path CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project groups CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project revision CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project remote CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT remove-project EMPTY>
|
||||
<!ATTLIST remove-project name CDATA #REQUIRED>
|
||||
@ -306,6 +307,9 @@ belongs. Same syntax as the corresponding element of `project`.
|
||||
Attribute `revision`: If specified, overrides the revision of the original
|
||||
project. Same syntax as the corresponding element of `project`.
|
||||
|
||||
Attribute `remote`: If specified, overrides the remote of the original
|
||||
project. Same syntax as the corresponding element of `project`.
|
||||
|
||||
### Element annotation
|
||||
|
||||
Zero or more annotation elements may be specified as children of a
|
||||
@ -338,7 +342,7 @@ It's just like copyfile and runs at the same time as copyfile but
|
||||
instead of copying it creates a symlink.
|
||||
|
||||
The symlink is created at "dest" (relative to the top of the tree) and
|
||||
points to the path specified by "src".
|
||||
points to the path specified by "src" which is a path in the project.
|
||||
|
||||
Parent directories of "dest" will be automatically created if missing.
|
||||
|
||||
|
@ -49,11 +49,11 @@ control how repo finds updates:
|
||||
|
||||
* `--repo-url`: This tells repo where to clone the full repo project itself.
|
||||
It defaults to the official project (`REPO_URL` in the launcher script).
|
||||
* `--repo-branch`: This tells repo which branch to use for the full project.
|
||||
* `--repo-rev`: This tells repo which branch to use for the full project.
|
||||
It defaults to the `stable` branch (`REPO_REV` in the launcher script).
|
||||
|
||||
Whenever `repo sync` is run, repo will check to see if an update is available.
|
||||
It fetches the latest repo-branch from the repo-url.
|
||||
It fetches the latest repo-rev from the repo-url.
|
||||
Then it verifies that the latest commit in the branch has a valid signed tag
|
||||
using `git tag -v` (which uses gpg).
|
||||
If the tag is valid, then repo will update its internal checkout to it.
|
||||
@ -91,7 +91,7 @@ When you want to create a new release, you'll need to select a good version and
|
||||
create a signed tag using a key registered in repo itself.
|
||||
Typically we just tag the latest version of the `master` branch.
|
||||
The tag could be pushed now, but it won't be used by clients normally (since the
|
||||
default `repo-branch` setting is `stable`).
|
||||
default `repo-rev` setting is `stable`).
|
||||
This would allow some early testing on systems who explicitly select `master`.
|
||||
|
||||
### Creating a signed tag
|
||||
@ -161,7 +161,91 @@ You can create a short changelog using the command:
|
||||
$ git log --format="%h (%aN) %s" --no-merges origin/stable..$r
|
||||
```
|
||||
|
||||
## Project References
|
||||
|
||||
Here's a table showing the relationship of major tools, their EOL dates, and
|
||||
their status in Ubuntu & Debian.
|
||||
Those distros tend to be good indicators of how long we need to support things.
|
||||
|
||||
Things in bold indicate stuff to take note of, but does not guarantee that we
|
||||
still support them.
|
||||
Things in italics are things we used to care about but probably don't anymore.
|
||||
|
||||
| Date | EOL | [Git][rel-g] | [Python][rel-p] | [Ubuntu][rel-u] / [Debian][rel-d] | Git | Python |
|
||||
|:--------:|:------------:|--------------|-----------------|-----------------------------------|-----|--------|
|
||||
| Oct 2008 | *Oct 2013* | | 2.6.0 | *10.04 Lucid* - 10.10 Maverick / *Squeeze* |
|
||||
| Dec 2008 | *Feb 2009* | | 3.0.0 |
|
||||
| Feb 2009 | *Mar 2012* | | | Debian 5 Lenny | 1.5.6.5 | 2.5.2 |
|
||||
| Jun 2009 | *Jun 2016* | | 3.1.0 | *10.04 Lucid* - 10.10 Maverick / *Squeeze* |
|
||||
| Feb 2010 | *Oct 2012* | 1.7.0 | | *10.04 Lucid* - *12.04 Precise* - 12.10 Quantal |
|
||||
| Apr 2010 | *Apr 2015* | | | *10.04 Lucid* | 1.7.0.4 | 2.6.5 3.1.2 |
|
||||
| Jul 2010 | *Dec 2019* | | **2.7.0** | 11.04 Natty - **<current>** |
|
||||
| Oct 2010 | | | | 10.10 Maverick | 1.7.1 | 2.6.6 3.1.3 |
|
||||
| Feb 2011 | *Feb 2016* | | | Debian 6 Squeeze | 1.7.2.5 | 2.6.6 3.1.3 |
|
||||
| Apr 2011 | | | | 11.04 Natty | 1.7.4 | 2.7.1 3.2.0 |
|
||||
| Oct 2011 | *Feb 2016* | | 3.2.0 | 11.04 Natty - 12.10 Quantal |
|
||||
| Oct 2011 | | | | 11.10 Ocelot | 1.7.5.4 | 2.7.2 3.2.2 |
|
||||
| Apr 2012 | *Apr 2019* | | | *12.04 Precise* | 1.7.9.5 | 2.7.3 3.2.3 |
|
||||
| Sep 2012 | *Sep 2017* | | 3.3.0 | 13.04 Raring - 13.10 Saucy |
|
||||
| Oct 2012 | *Dec 2014* | 1.8.0 | | 13.04 Raring - 13.10 Saucy |
|
||||
| Oct 2012 | | | | 12.10 Quantal | 1.7.10.4 | 2.7.3 3.2.3 |
|
||||
| Apr 2013 | | | | 13.04 Raring | 1.8.1.2 | 2.7.4 3.3.1 |
|
||||
| May 2013 | *May 2018* | | | Debian 7 Wheezy | 1.7.10.4 | 2.7.3 3.2.3 |
|
||||
| Oct 2013 | | | | 13.10 Saucy | 1.8.3.2 | 2.7.5 3.3.2 |
|
||||
| Feb 2014 | *Dec 2014* | **1.9.0** | | **14.04 Trusty** |
|
||||
| Mar 2014 | *Mar 2019* | | **3.4.0** | **14.04 Trusty** - 15.10 Wily / **Jessie** |
|
||||
| Apr 2014 | **Apr 2022** | | | **14.04 Trusty** | 1.9.1 | 2.7.5 3.4.0 |
|
||||
| May 2014 | *Dec 2014* | 2.0.0 |
|
||||
| Aug 2014 | *Dec 2014* | **2.1.0** | | 14.10 Utopic - 15.04 Vivid / **Jessie** |
|
||||
| Oct 2014 | | | | 14.10 Utopic | 2.1.0 | 2.7.8 3.4.2 |
|
||||
| Nov 2014 | *Sep 2015* | 2.2.0 |
|
||||
| Feb 2015 | *Sep 2015* | 2.3.0 |
|
||||
| Apr 2015 | *May 2017* | 2.4.0 |
|
||||
| Apr 2015 | **Jun 2020** | | | **Debian 8 Jessie** | 2.1.4 | 2.7.9 3.4.2 |
|
||||
| Apr 2015 | | | | 15.04 Vivid | 2.1.4 | 2.7.9 3.4.3 |
|
||||
| Jul 2015 | *May 2017* | 2.5.0 | | 15.10 Wily |
|
||||
| Sep 2015 | *May 2017* | 2.6.0 |
|
||||
| Sep 2015 | **Sep 2020** | | **3.5.0** | **16.04 Xenial** - 17.04 Zesty / **Stretch** |
|
||||
| Oct 2015 | | | | 15.10 Wily | 2.5.0 | 2.7.9 3.4.3 |
|
||||
| Jan 2016 | *Jul 2017* | **2.7.0** | | **16.04 Xenial** |
|
||||
| Mar 2016 | *Jul 2017* | 2.8.0 |
|
||||
| Apr 2016 | **Apr 2024** | | | **16.04 Xenial** | 2.7.4 | 2.7.11 3.5.1 |
|
||||
| Jun 2016 | *Jul 2017* | 2.9.0 | | 16.10 Yakkety |
|
||||
| Sep 2016 | *Sep 2017* | 2.10.0 |
|
||||
| Oct 2016 | | | | 16.10 Yakkety | 2.9.3 | 2.7.11 3.5.1 |
|
||||
| Nov 2016 | *Sep 2017* | **2.11.0** | | 17.04 Zesty / **Stretch** |
|
||||
| Dec 2016 | **Dec 2021** | | **3.6.0** | 17.10 Artful - **18.04 Bionic** - 18.10 Cosmic |
|
||||
| Feb 2017 | *Sep 2017* | 2.12.0 |
|
||||
| Apr 2017 | | | | 17.04 Zesty | 2.11.0 | 2.7.13 3.5.3 |
|
||||
| May 2017 | *May 2018* | 2.13.0 |
|
||||
| Jun 2017 | **Jun 2022** | | | **Debian 9 Stretch** | 2.11.0 | 2.7.13 3.5.3 |
|
||||
| Aug 2017 | *Dec 2019* | 2.14.0 | | 17.10 Artful |
|
||||
| Oct 2017 | *Dec 2019* | 2.15.0 |
|
||||
| Oct 2017 | | | | 17.10 Artful | 2.14.1 | 2.7.14 3.6.3 |
|
||||
| Jan 2018 | *Dec 2019* | 2.16.0 |
|
||||
| Apr 2018 | *Dec 2019* | 2.17.0 | | **18.04 Bionic** |
|
||||
| Apr 2018 | **Apr 2028** | | | **18.04 Bionic** | 2.17.0 | 2.7.15 3.6.5 |
|
||||
| Jun 2018 | *Dec 2019* | 2.18.0 |
|
||||
| Jun 2018 | **Jun 2023** | | 3.7.0 | 19.04 Disco - **20.04 Focal** / **Buster** |
|
||||
| Sep 2018 | *Dec 2019* | 2.19.0 | | 18.10 Cosmic |
|
||||
| Oct 2018 | | | | 18.10 Cosmic | 2.19.1 | 2.7.15 3.6.6 |
|
||||
| Dec 2018 | *Dec 2019* | **2.20.0** | | 19.04 Disco / **Buster** |
|
||||
| Feb 2019 | *Dec 2019* | 2.21.0 |
|
||||
| Apr 2019 | | | | 19.04 Disco | 2.20.1 | 2.7.16 3.7.3 |
|
||||
| Jun 2019 | | 2.22.0 |
|
||||
| Jul 2019 | **Jul 2024** | | | **Debian 10 Buster** | 2.20.1 | 2.7.16 3.7.3 |
|
||||
| Aug 2019 | | 2.23.0 |
|
||||
| Oct 2019 | **Oct 2024** | | 3.8.0 |
|
||||
| Oct 2019 | | | | 19.10 Eoan | 2.20.1 | 2.7.17 3.7.5 |
|
||||
| Nov 2019 | | 2.24.0 |
|
||||
| Jan 2020 | | 2.25.0 | | **20.04 Focal** |
|
||||
| Apr 2020 | **Apr 2030** | | | **20.04 Focal** | 2.25.0 | 2.7.17 3.7.5 |
|
||||
|
||||
|
||||
[rel-d]: https://en.wikipedia.org/wiki/Debian_version_history
|
||||
[rel-g]: https://en.wikipedia.org/wiki/Git#Releases
|
||||
[rel-p]: https://en.wikipedia.org/wiki/History_of_Python#Table_of_versions
|
||||
[rel-u]: https://en.wikipedia.org/wiki/Ubuntu_version_history#Table_of_versions
|
||||
[example announcement]: https://groups.google.com/d/topic/repo-discuss/UGBNismWo1M/discussion
|
||||
[repo-discuss@googlegroups.com]: https://groups.google.com/forum/#!forum/repo-discuss
|
||||
[go/repo-release]: https://goto.google.com/repo-release
|
||||
|
@ -19,7 +19,33 @@ also due to most developers not using Windows.
|
||||
We will never add code specific to older versions of Windows.
|
||||
It might work, but it most likely won't, so please don't bother asking.
|
||||
|
||||
## Symlinks
|
||||
## Git worktrees
|
||||
|
||||
*** note
|
||||
**Warning**: Repo's support for Git worktrees is new & experimental.
|
||||
Please report any bugs and be sure to maintain backups!
|
||||
***
|
||||
|
||||
The Repo 2.4 release introduced support for [Git worktrees][git-worktree].
|
||||
You don't have to worry about or understand this particular feature, so don't
|
||||
worry if this section of the Git manual is particularly impenetrable.
|
||||
|
||||
The salient point is that Git worktrees allow Repo to create repo client
|
||||
checkouts that do not require symlinks at all under Windows.
|
||||
This means users no longer need Administrator access to sync code.
|
||||
|
||||
Simply use `--worktree` when running `repo init` to opt in.
|
||||
|
||||
This does not effect specific Git repositories that use symlinks themselves.
|
||||
|
||||
[git-worktree]: https://git-scm.com/docs/git-worktree
|
||||
|
||||
## Symlinks by default
|
||||
|
||||
*** note
|
||||
**NB**: This section applies to the default Repo behavior which does not use
|
||||
Git worktrees (see the previous section for more info).
|
||||
***
|
||||
|
||||
Repo will use symlinks heavily internally.
|
||||
On *NIX platforms, this isn't an issue, but Windows makes it a bit difficult.
|
||||
@ -62,9 +88,8 @@ This also helps `tar` unpack symlinks, so that's nice.
|
||||
|
||||
## Python
|
||||
|
||||
You should make sure to be running Python 3.6 or newer under Windows.
|
||||
Python 2 might work, but due to already limited platform testing, you should
|
||||
only run newer Python versions.
|
||||
Python 3.6 or newer is required.
|
||||
Python 2 is known to be broken when running under Windows.
|
||||
See our [Python Support](./python-support.md) document for more details.
|
||||
|
||||
You can grab the latest Windows installer here:<br>
|
||||
|
@ -24,6 +24,7 @@ import tempfile
|
||||
from error import EditorError
|
||||
import platform_utils
|
||||
|
||||
|
||||
class Editor(object):
|
||||
"""Manages the user's preferred text editor."""
|
||||
|
||||
@ -57,7 +58,7 @@ class Editor(object):
|
||||
|
||||
if os.getenv('TERM') == 'dumb':
|
||||
print(
|
||||
"""No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
|
||||
"""No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
|
||||
Tried to fall back to vi but terminal is dumb. Please configure at
|
||||
least one of these before using this command.""", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
@ -104,10 +105,10 @@ least one of these before using this command.""", file=sys.stderr)
|
||||
rc = subprocess.Popen(args, shell=shell).wait()
|
||||
except OSError as e:
|
||||
raise EditorError('editor failed, %s: %s %s'
|
||||
% (str(e), editor, path))
|
||||
% (str(e), editor, path))
|
||||
if rc != 0:
|
||||
raise EditorError('editor failed with exit status %d: %s %s'
|
||||
% (rc, editor, path))
|
||||
% (rc, editor, path))
|
||||
|
||||
with open(path, mode='rb') as fd2:
|
||||
return fd2.read().decode('utf-8')
|
||||
|
23
error.py
23
error.py
@ -14,17 +14,26 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
class ManifestParseError(Exception):
|
||||
"""Failed to parse the manifest file.
|
||||
"""
|
||||
|
||||
|
||||
class ManifestInvalidRevisionError(Exception):
|
||||
"""The revision value in a project is incorrect.
|
||||
"""
|
||||
|
||||
|
||||
class ManifestInvalidPathError(Exception):
|
||||
"""A path used in <copyfile> or <linkfile> is incorrect.
|
||||
"""
|
||||
|
||||
|
||||
class NoManifestException(Exception):
|
||||
"""The required manifest does not exist.
|
||||
"""
|
||||
|
||||
def __init__(self, path, reason):
|
||||
super(NoManifestException, self).__init__()
|
||||
self.path = path
|
||||
@ -33,9 +42,11 @@ class NoManifestException(Exception):
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
|
||||
class EditorError(Exception):
|
||||
"""Unspecified error from the user's text editor.
|
||||
"""
|
||||
|
||||
def __init__(self, reason):
|
||||
super(EditorError, self).__init__()
|
||||
self.reason = reason
|
||||
@ -43,9 +54,11 @@ class EditorError(Exception):
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
|
||||
class GitError(Exception):
|
||||
"""Unspecified internal error from git.
|
||||
"""
|
||||
|
||||
def __init__(self, command):
|
||||
super(GitError, self).__init__()
|
||||
self.command = command
|
||||
@ -53,9 +66,11 @@ class GitError(Exception):
|
||||
def __str__(self):
|
||||
return self.command
|
||||
|
||||
|
||||
class UploadError(Exception):
|
||||
"""A bundle upload to Gerrit did not succeed.
|
||||
"""
|
||||
|
||||
def __init__(self, reason):
|
||||
super(UploadError, self).__init__()
|
||||
self.reason = reason
|
||||
@ -63,9 +78,11 @@ class UploadError(Exception):
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
|
||||
class DownloadError(Exception):
|
||||
"""Cannot download a repository.
|
||||
"""
|
||||
|
||||
def __init__(self, reason):
|
||||
super(DownloadError, self).__init__()
|
||||
self.reason = reason
|
||||
@ -73,9 +90,11 @@ class DownloadError(Exception):
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
|
||||
class NoSuchProjectError(Exception):
|
||||
"""A specified project does not exist in the work tree.
|
||||
"""
|
||||
|
||||
def __init__(self, name=None):
|
||||
super(NoSuchProjectError, self).__init__()
|
||||
self.name = name
|
||||
@ -89,6 +108,7 @@ class NoSuchProjectError(Exception):
|
||||
class InvalidProjectGroupsError(Exception):
|
||||
"""A specified project is not suitable for the specified groups
|
||||
"""
|
||||
|
||||
def __init__(self, name=None):
|
||||
super(InvalidProjectGroupsError, self).__init__()
|
||||
self.name = name
|
||||
@ -98,15 +118,18 @@ class InvalidProjectGroupsError(Exception):
|
||||
return 'in current directory'
|
||||
return self.name
|
||||
|
||||
|
||||
class RepoChangedException(Exception):
|
||||
"""Thrown if 'repo sync' results in repo updating its internal
|
||||
repo or manifest repositories. In this special case we must
|
||||
use exec to re-execute repo with the new code and manifest.
|
||||
"""
|
||||
|
||||
def __init__(self, extra_args=None):
|
||||
super(RepoChangedException, self).__init__()
|
||||
self.extra_args = extra_args or []
|
||||
|
||||
|
||||
class HookError(Exception):
|
||||
"""Thrown if a 'repo-hook' could not be run.
|
||||
|
||||
|
@ -23,6 +23,7 @@ TASK_COMMAND = 'command'
|
||||
TASK_SYNC_NETWORK = 'sync-network'
|
||||
TASK_SYNC_LOCAL = 'sync-local'
|
||||
|
||||
|
||||
class EventLog(object):
|
||||
"""Event log that records events that occurred during a repo invocation.
|
||||
|
||||
@ -138,7 +139,7 @@ class EventLog(object):
|
||||
Returns:
|
||||
A dictionary of the event added to the log.
|
||||
"""
|
||||
event['status'] = self.GetStatusString(success)
|
||||
event['status'] = self.GetStatusString(success)
|
||||
event['finish_time'] = finish
|
||||
return event
|
||||
|
||||
@ -165,6 +166,7 @@ class EventLog(object):
|
||||
# An integer id that is unique across this invocation of the program.
|
||||
_EVENT_ID = multiprocessing.Value('i', 1)
|
||||
|
||||
|
||||
def _NextEventId():
|
||||
"""Helper function for grabbing the next unique id.
|
||||
|
||||
|
122
git_command.py
122
git_command.py
@ -16,6 +16,7 @@
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
import tempfile
|
||||
@ -28,7 +29,17 @@ from repo_trace import REPO_TRACE, IsTrace, Trace
|
||||
from wrapper import Wrapper
|
||||
|
||||
GIT = 'git'
|
||||
MIN_GIT_VERSION = (1, 5, 4)
|
||||
# NB: These do not need to be kept in sync with the repo launcher script.
|
||||
# These may be much newer as it allows the repo launcher to roll between
|
||||
# different repo releases while source versions might require a newer git.
|
||||
#
|
||||
# The soft version is when we start warning users that the version is old and
|
||||
# we'll be dropping support for it. We'll refuse to work with versions older
|
||||
# than the hard version.
|
||||
#
|
||||
# git-1.7 is in (EOL) Ubuntu Precise. git-1.9 is in Ubuntu Trusty.
|
||||
MIN_GIT_VERSION_SOFT = (1, 9, 1)
|
||||
MIN_GIT_VERSION_HARD = (1, 7, 2)
|
||||
GIT_DIR = 'GIT_DIR'
|
||||
|
||||
LAST_GITDIR = None
|
||||
@ -37,6 +48,36 @@ LAST_CWD = None
|
||||
_ssh_proxy_path = None
|
||||
_ssh_sock_path = None
|
||||
_ssh_clients = []
|
||||
_ssh_version = None
|
||||
|
||||
|
||||
def _run_ssh_version():
|
||||
"""run ssh -V to display the version number"""
|
||||
return subprocess.check_output(['ssh', '-V'], stderr=subprocess.STDOUT).decode()
|
||||
|
||||
|
||||
def _parse_ssh_version(ver_str=None):
|
||||
"""parse a ssh version string into a tuple"""
|
||||
if ver_str is None:
|
||||
ver_str = _run_ssh_version()
|
||||
m = re.match(r'^OpenSSH_([0-9.]+)(p[0-9]+)?\s', ver_str)
|
||||
if m:
|
||||
return tuple(int(x) for x in m.group(1).split('.'))
|
||||
else:
|
||||
return ()
|
||||
|
||||
|
||||
def ssh_version():
|
||||
"""return ssh version as a tuple"""
|
||||
global _ssh_version
|
||||
if _ssh_version is None:
|
||||
try:
|
||||
_ssh_version = _parse_ssh_version()
|
||||
except subprocess.CalledProcessError:
|
||||
print('fatal: unable to detect ssh version', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return _ssh_version
|
||||
|
||||
|
||||
def ssh_sock(create=True):
|
||||
global _ssh_sock_path
|
||||
@ -46,28 +87,36 @@ def ssh_sock(create=True):
|
||||
tmp_dir = '/tmp'
|
||||
if not os.path.exists(tmp_dir):
|
||||
tmp_dir = tempfile.gettempdir()
|
||||
if ssh_version() < (6, 7):
|
||||
tokens = '%r@%h:%p'
|
||||
else:
|
||||
tokens = '%C' # hash of %l%h%p%r
|
||||
_ssh_sock_path = os.path.join(
|
||||
tempfile.mkdtemp('', 'ssh-', tmp_dir),
|
||||
'master-%r@%h:%p')
|
||||
tempfile.mkdtemp('', 'ssh-', tmp_dir),
|
||||
'master-' + tokens)
|
||||
return _ssh_sock_path
|
||||
|
||||
|
||||
def _ssh_proxy():
|
||||
global _ssh_proxy_path
|
||||
if _ssh_proxy_path is None:
|
||||
_ssh_proxy_path = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'git_ssh')
|
||||
os.path.dirname(__file__),
|
||||
'git_ssh')
|
||||
return _ssh_proxy_path
|
||||
|
||||
|
||||
def _add_ssh_client(p):
|
||||
_ssh_clients.append(p)
|
||||
|
||||
|
||||
def _remove_ssh_client(p):
|
||||
try:
|
||||
_ssh_clients.remove(p)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
|
||||
def terminate_ssh_clients():
|
||||
global _ssh_clients
|
||||
for p in _ssh_clients:
|
||||
@ -78,8 +127,10 @@ def terminate_ssh_clients():
|
||||
pass
|
||||
_ssh_clients = []
|
||||
|
||||
|
||||
_git_version = None
|
||||
|
||||
|
||||
class _GitCall(object):
|
||||
def version_tuple(self):
|
||||
global _git_version
|
||||
@ -91,12 +142,15 @@ class _GitCall(object):
|
||||
return _git_version
|
||||
|
||||
def __getattr__(self, name):
|
||||
name = name.replace('_','-')
|
||||
name = name.replace('_', '-')
|
||||
|
||||
def fun(*cmdv):
|
||||
command = [name]
|
||||
command.extend(cmdv)
|
||||
return GitCommand(None, command).Wait() == 0
|
||||
return fun
|
||||
|
||||
|
||||
git = _GitCall()
|
||||
|
||||
|
||||
@ -177,8 +231,10 @@ class UserAgent(object):
|
||||
|
||||
return self._git_ua
|
||||
|
||||
|
||||
user_agent = UserAgent()
|
||||
|
||||
|
||||
def git_require(min_version, fail=False, msg=''):
|
||||
git_version = git.version_tuple()
|
||||
if min_version <= git_version:
|
||||
@ -191,42 +247,41 @@ def git_require(min_version, fail=False, msg=''):
|
||||
sys.exit(1)
|
||||
return False
|
||||
|
||||
def _setenv(env, name, value):
|
||||
env[name] = value.encode()
|
||||
|
||||
class GitCommand(object):
|
||||
def __init__(self,
|
||||
project,
|
||||
cmdv,
|
||||
bare = False,
|
||||
provide_stdin = False,
|
||||
capture_stdout = False,
|
||||
capture_stderr = False,
|
||||
disable_editor = False,
|
||||
ssh_proxy = False,
|
||||
cwd = None,
|
||||
gitdir = None):
|
||||
bare=False,
|
||||
provide_stdin=False,
|
||||
capture_stdout=False,
|
||||
capture_stderr=False,
|
||||
merge_output=False,
|
||||
disable_editor=False,
|
||||
ssh_proxy=False,
|
||||
cwd=None,
|
||||
gitdir=None):
|
||||
env = self._GetBasicEnv()
|
||||
|
||||
# If we are not capturing std* then need to print it.
|
||||
self.tee = {'stdout': not capture_stdout, 'stderr': not capture_stderr}
|
||||
|
||||
if disable_editor:
|
||||
_setenv(env, 'GIT_EDITOR', ':')
|
||||
env['GIT_EDITOR'] = ':'
|
||||
if ssh_proxy:
|
||||
_setenv(env, 'REPO_SSH_SOCK', ssh_sock())
|
||||
_setenv(env, 'GIT_SSH', _ssh_proxy())
|
||||
_setenv(env, 'GIT_SSH_VARIANT', 'ssh')
|
||||
env['REPO_SSH_SOCK'] = ssh_sock()
|
||||
env['GIT_SSH'] = _ssh_proxy()
|
||||
env['GIT_SSH_VARIANT'] = 'ssh'
|
||||
if 'http_proxy' in env and 'darwin' == sys.platform:
|
||||
s = "'http.proxy=%s'" % (env['http_proxy'],)
|
||||
p = env.get('GIT_CONFIG_PARAMETERS')
|
||||
if p is not None:
|
||||
s = p + ' ' + s
|
||||
_setenv(env, 'GIT_CONFIG_PARAMETERS', s)
|
||||
env['GIT_CONFIG_PARAMETERS'] = s
|
||||
if 'GIT_ALLOW_PROTOCOL' not in env:
|
||||
_setenv(env, 'GIT_ALLOW_PROTOCOL',
|
||||
'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
|
||||
_setenv(env, 'GIT_HTTP_USER_AGENT', user_agent.git)
|
||||
env['GIT_ALLOW_PROTOCOL'] = (
|
||||
'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
|
||||
env['GIT_HTTP_USER_AGENT'] = user_agent.git
|
||||
|
||||
if project:
|
||||
if not cwd:
|
||||
@ -237,7 +292,7 @@ class GitCommand(object):
|
||||
command = [GIT]
|
||||
if bare:
|
||||
if gitdir:
|
||||
_setenv(env, GIT_DIR, gitdir)
|
||||
env[GIT_DIR] = gitdir
|
||||
cwd = None
|
||||
command.append(cmdv[0])
|
||||
# Need to use the --progress flag for fetch/clone so output will be
|
||||
@ -253,7 +308,7 @@ class GitCommand(object):
|
||||
stdin = None
|
||||
|
||||
stdout = subprocess.PIPE
|
||||
stderr = subprocess.PIPE
|
||||
stderr = subprocess.STDOUT if merge_output else subprocess.PIPE
|
||||
|
||||
if IsTrace():
|
||||
global LAST_CWD
|
||||
@ -281,15 +336,17 @@ class GitCommand(object):
|
||||
dbg += ' 1>|'
|
||||
if stderr == subprocess.PIPE:
|
||||
dbg += ' 2>|'
|
||||
elif stderr == subprocess.STDOUT:
|
||||
dbg += ' 2>&1'
|
||||
Trace('%s', dbg)
|
||||
|
||||
try:
|
||||
p = subprocess.Popen(command,
|
||||
cwd = cwd,
|
||||
env = env,
|
||||
stdin = stdin,
|
||||
stdout = stdout,
|
||||
stderr = stderr)
|
||||
cwd=cwd,
|
||||
env=env,
|
||||
stdin=stdin,
|
||||
stdout=stdout,
|
||||
stderr=stderr)
|
||||
except Exception as e:
|
||||
raise GitError('%s: %s' % (command[1], e))
|
||||
|
||||
@ -328,7 +385,8 @@ class GitCommand(object):
|
||||
p = self.process
|
||||
s_in = platform_utils.FileDescriptorStreams.create()
|
||||
s_in.add(p.stdout, sys.stdout, 'stdout')
|
||||
s_in.add(p.stderr, sys.stderr, 'stderr')
|
||||
if p.stderr is not None:
|
||||
s_in.add(p.stderr, sys.stderr, 'stderr')
|
||||
self.stdout = ''
|
||||
self.stderr = ''
|
||||
|
||||
|
130
git_config.py
130
git_config.py
@ -21,6 +21,7 @@ import errno
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import ssl
|
||||
import subprocess
|
||||
import sys
|
||||
@ -41,7 +42,6 @@ else:
|
||||
urllib.request = urllib2
|
||||
urllib.error = urllib2
|
||||
|
||||
from signal import SIGTERM
|
||||
from error import GitError, UploadError
|
||||
import platform_utils
|
||||
from repo_trace import Trace
|
||||
@ -59,39 +59,47 @@ ID_RE = re.compile(r'^[0-9a-f]{40}$')
|
||||
|
||||
REVIEW_CACHE = dict()
|
||||
|
||||
|
||||
def IsChange(rev):
|
||||
return rev.startswith(R_CHANGES)
|
||||
|
||||
|
||||
def IsId(rev):
|
||||
return ID_RE.match(rev)
|
||||
|
||||
|
||||
def IsTag(rev):
|
||||
return rev.startswith(R_TAGS)
|
||||
|
||||
|
||||
def IsImmutable(rev):
|
||||
return IsChange(rev) or IsId(rev) or IsTag(rev)
|
||||
|
||||
|
||||
def _key(name):
|
||||
parts = name.split('.')
|
||||
if len(parts) < 2:
|
||||
return name.lower()
|
||||
parts[ 0] = parts[ 0].lower()
|
||||
parts[0] = parts[0].lower()
|
||||
parts[-1] = parts[-1].lower()
|
||||
return '.'.join(parts)
|
||||
|
||||
|
||||
class GitConfig(object):
|
||||
_ForUser = None
|
||||
|
||||
_USER_CONFIG = '~/.gitconfig'
|
||||
|
||||
@classmethod
|
||||
def ForUser(cls):
|
||||
if cls._ForUser is None:
|
||||
cls._ForUser = cls(configfile = os.path.expanduser('~/.gitconfig'))
|
||||
cls._ForUser = cls(configfile=os.path.expanduser(cls._USER_CONFIG))
|
||||
return cls._ForUser
|
||||
|
||||
@classmethod
|
||||
def ForRepository(cls, gitdir, defaults=None):
|
||||
return cls(configfile = os.path.join(gitdir, 'config'),
|
||||
defaults = defaults)
|
||||
return cls(configfile=os.path.join(gitdir, 'config'),
|
||||
defaults=defaults)
|
||||
|
||||
def __init__(self, configfile, defaults=None, jsonFile=None):
|
||||
self.file = configfile
|
||||
@ -104,18 +112,55 @@ class GitConfig(object):
|
||||
self._json = jsonFile
|
||||
if self._json is None:
|
||||
self._json = os.path.join(
|
||||
os.path.dirname(self.file),
|
||||
'.repo_' + os.path.basename(self.file) + '.json')
|
||||
os.path.dirname(self.file),
|
||||
'.repo_' + os.path.basename(self.file) + '.json')
|
||||
|
||||
def Has(self, name, include_defaults = True):
|
||||
def Has(self, name, include_defaults=True):
|
||||
"""Return true if this configuration file has the key.
|
||||
"""
|
||||
if _key(name) in self._cache:
|
||||
return True
|
||||
if include_defaults and self.defaults:
|
||||
return self.defaults.Has(name, include_defaults = True)
|
||||
return self.defaults.Has(name, include_defaults=True)
|
||||
return False
|
||||
|
||||
def GetInt(self, name):
|
||||
"""Returns an integer from the configuration file.
|
||||
|
||||
This follows the git config syntax.
|
||||
|
||||
Args:
|
||||
name: The key to lookup.
|
||||
|
||||
Returns:
|
||||
None if the value was not defined, or is not a boolean.
|
||||
Otherwise, the number itself.
|
||||
"""
|
||||
v = self.GetString(name)
|
||||
if v is None:
|
||||
return None
|
||||
v = v.strip()
|
||||
|
||||
mult = 1
|
||||
if v.endswith('k'):
|
||||
v = v[:-1]
|
||||
mult = 1024
|
||||
elif v.endswith('m'):
|
||||
v = v[:-1]
|
||||
mult = 1024 * 1024
|
||||
elif v.endswith('g'):
|
||||
v = v[:-1]
|
||||
mult = 1024 * 1024 * 1024
|
||||
|
||||
base = 10
|
||||
if v.startswith('0x'):
|
||||
base = 16
|
||||
|
||||
try:
|
||||
return int(v, base=base) * mult
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
def GetBoolean(self, name):
|
||||
"""Returns a boolean from the configuration file.
|
||||
None : The value was not defined, or is not a boolean.
|
||||
@ -142,7 +187,7 @@ class GitConfig(object):
|
||||
v = self._cache[_key(name)]
|
||||
except KeyError:
|
||||
if self.defaults:
|
||||
return self.defaults.GetString(name, all_keys = all_keys)
|
||||
return self.defaults.GetString(name, all_keys=all_keys)
|
||||
v = []
|
||||
|
||||
if not all_keys:
|
||||
@ -153,7 +198,7 @@ class GitConfig(object):
|
||||
r = []
|
||||
r.extend(v)
|
||||
if self.defaults:
|
||||
r.extend(self.defaults.GetString(name, all_keys = True))
|
||||
r.extend(self.defaults.GetString(name, all_keys=True))
|
||||
return r
|
||||
|
||||
def SetString(self, name, value):
|
||||
@ -217,7 +262,7 @@ class GitConfig(object):
|
||||
"""
|
||||
return self._sections.get(section, set())
|
||||
|
||||
def HasSection(self, section, subsection = ''):
|
||||
def HasSection(self, section, subsection=''):
|
||||
"""Does at least one key in section.subsection exist?
|
||||
"""
|
||||
try:
|
||||
@ -268,8 +313,7 @@ class GitConfig(object):
|
||||
|
||||
def _ReadJson(self):
|
||||
try:
|
||||
if os.path.getmtime(self._json) \
|
||||
<= os.path.getmtime(self.file):
|
||||
if os.path.getmtime(self._json) <= os.path.getmtime(self.file):
|
||||
platform_utils.remove(self._json)
|
||||
return None
|
||||
except OSError:
|
||||
@ -318,19 +362,25 @@ class GitConfig(object):
|
||||
return c
|
||||
|
||||
def _do(self, *args):
|
||||
command = ['config', '--file', self.file]
|
||||
command = ['config', '--file', self.file, '--includes']
|
||||
command.extend(args)
|
||||
|
||||
p = GitCommand(None,
|
||||
command,
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
if p.Wait() == 0:
|
||||
return p.stdout
|
||||
else:
|
||||
GitError('git config %s: %s' % (str(args), p.stderr))
|
||||
|
||||
|
||||
class RepoConfig(GitConfig):
|
||||
"""User settings for repo itself."""
|
||||
|
||||
_USER_CONFIG = '~/.repoconfig/config'
|
||||
|
||||
|
||||
class RefSpec(object):
|
||||
"""A Git refspec line, split into its components:
|
||||
|
||||
@ -392,6 +442,7 @@ _master_keys = set()
|
||||
_ssh_master = True
|
||||
_master_keys_lock = None
|
||||
|
||||
|
||||
def init_ssh():
|
||||
"""Should be called once at the start of repo to init ssh master handling.
|
||||
|
||||
@ -401,6 +452,7 @@ def init_ssh():
|
||||
assert _master_keys_lock is None, "Should only call init_ssh once"
|
||||
_master_keys_lock = _threading.Lock()
|
||||
|
||||
|
||||
def _open_ssh(host, port=None):
|
||||
global _ssh_master
|
||||
|
||||
@ -421,17 +473,17 @@ def _open_ssh(host, port=None):
|
||||
if key in _master_keys:
|
||||
return True
|
||||
|
||||
if not _ssh_master \
|
||||
or 'GIT_SSH' in os.environ \
|
||||
or sys.platform in ('win32', 'cygwin'):
|
||||
if (not _ssh_master
|
||||
or 'GIT_SSH' in os.environ
|
||||
or sys.platform in ('win32', 'cygwin')):
|
||||
# failed earlier, or cygwin ssh can't do this
|
||||
#
|
||||
return False
|
||||
|
||||
# We will make two calls to ssh; this is the common part of both calls.
|
||||
command_base = ['ssh',
|
||||
'-o','ControlPath %s' % ssh_sock(),
|
||||
host]
|
||||
'-o', 'ControlPath %s' % ssh_sock(),
|
||||
host]
|
||||
if port is not None:
|
||||
command_base[1:1] = ['-p', str(port)]
|
||||
|
||||
@ -439,13 +491,13 @@ def _open_ssh(host, port=None):
|
||||
# ...but before actually starting a master, we'll double-check. This can
|
||||
# be important because we can't tell that that 'git@myhost.com' is the same
|
||||
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
|
||||
check_command = command_base + ['-O','check']
|
||||
check_command = command_base + ['-O', 'check']
|
||||
try:
|
||||
Trace(': %s', ' '.join(check_command))
|
||||
check_process = subprocess.Popen(check_command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
check_process.communicate() # read output, but ignore it...
|
||||
check_process.communicate() # read output, but ignore it...
|
||||
isnt_running = check_process.wait()
|
||||
|
||||
if not isnt_running:
|
||||
@ -458,16 +510,14 @@ def _open_ssh(host, port=None):
|
||||
# to the log there.
|
||||
pass
|
||||
|
||||
command = command_base[:1] + \
|
||||
['-M', '-N'] + \
|
||||
command_base[1:]
|
||||
command = command_base[:1] + ['-M', '-N'] + command_base[1:]
|
||||
try:
|
||||
Trace(': %s', ' '.join(command))
|
||||
p = subprocess.Popen(command)
|
||||
except Exception as e:
|
||||
_ssh_master = False
|
||||
print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
|
||||
% (host,port, str(e)), file=sys.stderr)
|
||||
% (host, port, str(e)), file=sys.stderr)
|
||||
return False
|
||||
|
||||
time.sleep(1)
|
||||
@ -481,6 +531,7 @@ def _open_ssh(host, port=None):
|
||||
finally:
|
||||
_master_keys_lock.release()
|
||||
|
||||
|
||||
def close_ssh():
|
||||
global _master_keys_lock
|
||||
|
||||
@ -488,7 +539,7 @@ def close_ssh():
|
||||
|
||||
for p in _master_processes:
|
||||
try:
|
||||
os.kill(p.pid, SIGTERM)
|
||||
os.kill(p.pid, signal.SIGTERM)
|
||||
p.wait()
|
||||
except OSError:
|
||||
pass
|
||||
@ -505,15 +556,18 @@ def close_ssh():
|
||||
# We're done with the lock, so we can delete it.
|
||||
_master_keys_lock = None
|
||||
|
||||
|
||||
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
|
||||
URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
|
||||
|
||||
|
||||
def GetSchemeFromUrl(url):
|
||||
m = URI_ALL.match(url)
|
||||
if m:
|
||||
return m.group(1)
|
||||
return None
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def GetUrlCookieFile(url, quiet):
|
||||
if url.startswith('persistent-'):
|
||||
@ -528,7 +582,7 @@ def GetUrlCookieFile(url, quiet):
|
||||
cookiefile = None
|
||||
proxy = None
|
||||
for line in p.stdout:
|
||||
line = line.strip()
|
||||
line = line.strip().decode('utf-8')
|
||||
if line.startswith(cookieprefix):
|
||||
cookiefile = os.path.expanduser(line[len(cookieprefix):])
|
||||
if line.startswith(proxyprefix):
|
||||
@ -540,7 +594,7 @@ def GetUrlCookieFile(url, quiet):
|
||||
finally:
|
||||
p.stdin.close()
|
||||
if p.wait():
|
||||
err_msg = p.stderr.read()
|
||||
err_msg = p.stderr.read().decode('utf-8')
|
||||
if ' -print_config' in err_msg:
|
||||
pass # Persistent proxy doesn't support -print_config.
|
||||
elif not quiet:
|
||||
@ -554,6 +608,7 @@ def GetUrlCookieFile(url, quiet):
|
||||
cookiefile = os.path.expanduser(cookiefile)
|
||||
yield cookiefile, None
|
||||
|
||||
|
||||
def _preconnect(url):
|
||||
m = URI_ALL.match(url)
|
||||
if m:
|
||||
@ -574,9 +629,11 @@ def _preconnect(url):
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class Remote(object):
|
||||
"""Configuration options related to a remote.
|
||||
"""
|
||||
|
||||
def __init__(self, config, name):
|
||||
self._config = config
|
||||
self.name = name
|
||||
@ -585,7 +642,7 @@ class Remote(object):
|
||||
self.review = self._Get('review')
|
||||
self.projectname = self._Get('projectname')
|
||||
self.fetch = list(map(RefSpec.FromString,
|
||||
self._Get('fetch', all_keys=True)))
|
||||
self._Get('fetch', all_keys=True)))
|
||||
self._review_url = None
|
||||
|
||||
def _InsteadOf(self):
|
||||
@ -599,8 +656,8 @@ class Remote(object):
|
||||
insteadOfList = globCfg.GetString(key, all_keys=True)
|
||||
|
||||
for insteadOf in insteadOfList:
|
||||
if self.url.startswith(insteadOf) \
|
||||
and len(insteadOf) > len(longest):
|
||||
if (self.url.startswith(insteadOf)
|
||||
and len(insteadOf) > len(longest)):
|
||||
longest = insteadOf
|
||||
longestUrl = url
|
||||
|
||||
@ -731,12 +788,13 @@ class Remote(object):
|
||||
|
||||
def _Get(self, key, all_keys=False):
|
||||
key = 'remote.%s.%s' % (self.name, key)
|
||||
return self._config.GetString(key, all_keys = all_keys)
|
||||
return self._config.GetString(key, all_keys=all_keys)
|
||||
|
||||
|
||||
class Branch(object):
|
||||
"""Configuration options related to a single branch.
|
||||
"""
|
||||
|
||||
def __init__(self, config, name):
|
||||
self._config = config
|
||||
self.name = name
|
||||
@ -780,4 +838,4 @@ class Branch(object):
|
||||
|
||||
def _Get(self, key, all_keys=False):
|
||||
key = 'branch.%s.%s' % (self.name, key)
|
||||
return self._config.GetString(key, all_keys = all_keys)
|
||||
return self._config.GetString(key, all_keys=all_keys)
|
||||
|
12
git_refs.py
12
git_refs.py
@ -18,12 +18,14 @@ import os
|
||||
from repo_trace import Trace
|
||||
import platform_utils
|
||||
|
||||
HEAD = 'HEAD'
|
||||
HEAD = 'HEAD'
|
||||
R_CHANGES = 'refs/changes/'
|
||||
R_HEADS = 'refs/heads/'
|
||||
R_TAGS = 'refs/tags/'
|
||||
R_PUB = 'refs/published/'
|
||||
R_M = 'refs/remotes/m/'
|
||||
R_HEADS = 'refs/heads/'
|
||||
R_TAGS = 'refs/tags/'
|
||||
R_PUB = 'refs/published/'
|
||||
R_WORKTREE = 'refs/worktree/'
|
||||
R_WORKTREE_M = R_WORKTREE + 'm/'
|
||||
R_M = 'refs/remotes/m/'
|
||||
|
||||
|
||||
class GitRefs(object):
|
||||
|
@ -29,12 +29,15 @@ from error import ManifestParseError
|
||||
|
||||
NUM_BATCH_RETRIEVE_REVISIONID = 32
|
||||
|
||||
|
||||
def get_gitc_manifest_dir():
|
||||
return wrapper.Wrapper().get_gitc_manifest_dir()
|
||||
|
||||
|
||||
def parse_clientdir(gitc_fs_path):
|
||||
return wrapper.Wrapper().gitc_parse_clientdir(gitc_fs_path)
|
||||
|
||||
|
||||
def _set_project_revisions(projects):
|
||||
"""Sets the revisionExpr for a list of projects.
|
||||
|
||||
@ -52,7 +55,7 @@ def _set_project_revisions(projects):
|
||||
project.remote.url,
|
||||
project.revisionExpr],
|
||||
capture_stdout=True, cwd='/tmp'))
|
||||
for project in projects if not git_config.IsId(project.revisionExpr)]
|
||||
for project in projects if not git_config.IsId(project.revisionExpr)]
|
||||
for proj, gitcmd in project_gitcmds:
|
||||
if gitcmd.Wait():
|
||||
print('FATAL: Failed to retrieve revisionExpr for %s' % proj)
|
||||
@ -63,6 +66,7 @@ def _set_project_revisions(projects):
|
||||
(proj.remote.url, proj.revisionExpr))
|
||||
proj.revisionExpr = revisionExpr
|
||||
|
||||
|
||||
def _manifest_groups(manifest):
|
||||
"""Returns the manifest group string that should be synced
|
||||
|
||||
@ -77,6 +81,7 @@ def _manifest_groups(manifest):
|
||||
groups = 'default,platform-' + platform.system().lower()
|
||||
return groups
|
||||
|
||||
|
||||
def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
|
||||
"""Generate a manifest for shafsd to use for this GITC client.
|
||||
|
||||
@ -104,11 +109,11 @@ def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
|
||||
if not proj.upstream and not git_config.IsId(proj.revisionExpr):
|
||||
proj.upstream = proj.revisionExpr
|
||||
|
||||
if not path in gitc_manifest.paths:
|
||||
if path not in gitc_manifest.paths:
|
||||
# Any new projects need their first revision, even if we weren't asked
|
||||
# for them.
|
||||
projects.append(proj)
|
||||
elif not path in paths:
|
||||
elif path not in paths:
|
||||
# And copy revisions from the previous manifest if we're not updating
|
||||
# them now.
|
||||
gitc_proj = gitc_manifest.paths[path]
|
||||
@ -121,7 +126,7 @@ def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
|
||||
index = 0
|
||||
while index < len(projects):
|
||||
_set_project_revisions(
|
||||
projects[index:(index+NUM_BATCH_RETRIEVE_REVISIONID)])
|
||||
projects[index:(index + NUM_BATCH_RETRIEVE_REVISIONID)])
|
||||
index += NUM_BATCH_RETRIEVE_REVISIONID
|
||||
|
||||
if gitc_manifest is not None:
|
||||
@ -140,6 +145,7 @@ def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
|
||||
# Save the manifest.
|
||||
save_manifest(manifest)
|
||||
|
||||
|
||||
def save_manifest(manifest, client_dir=None):
|
||||
"""Save the manifest file in the client_dir.
|
||||
|
||||
|
202
hooks/commit-msg
202
hooks/commit-msg
@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# From Gerrit Code Review 2.14.6
|
||||
# From Gerrit Code Review 3.1.3
|
||||
#
|
||||
# Part of Gerrit Code Review (https://www.gerritcodereview.com/)
|
||||
#
|
||||
@ -16,176 +16,48 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
unset GREP_OPTIONS
|
||||
# avoid [[ which is not POSIX sh.
|
||||
if test "$#" != 1 ; then
|
||||
echo "$0 requires an argument."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CHANGE_ID_AFTER="Bug|Depends-On|Issue|Test|Feature|Fixes|Fixed"
|
||||
MSG="$1"
|
||||
if test ! -f "$1" ; then
|
||||
echo "file does not exist: $1"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for, and add if missing, a unique Change-Id
|
||||
#
|
||||
add_ChangeId() {
|
||||
clean_message=`sed -e '
|
||||
/^diff --git .*/{
|
||||
s///
|
||||
q
|
||||
}
|
||||
/^Signed-off-by:/d
|
||||
/^#/d
|
||||
' "$MSG" | git stripspace`
|
||||
if test -z "$clean_message"
|
||||
then
|
||||
return
|
||||
fi
|
||||
# Do not create a change id if requested
|
||||
if test "false" = "`git config --bool --get gerrit.createChangeId`" ; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Do not add Change-Id to temp commits
|
||||
if echo "$clean_message" | head -1 | grep -q '^\(fixup\|squash\)!'
|
||||
then
|
||||
return
|
||||
fi
|
||||
# $RANDOM will be undefined if not using bash, so don't use set -u
|
||||
random=$( (whoami ; hostname ; date; cat $1 ; echo $RANDOM) | git hash-object --stdin)
|
||||
dest="$1.tmp.${random}"
|
||||
|
||||
if test "false" = "`git config --bool --get gerrit.createChangeId`"
|
||||
then
|
||||
return
|
||||
fi
|
||||
trap 'rm -f "${dest}"' EXIT
|
||||
|
||||
# Does Change-Id: already exist? if so, exit (no change).
|
||||
if grep -i '^Change-Id:' "$MSG" >/dev/null
|
||||
then
|
||||
return
|
||||
fi
|
||||
if ! git stripspace --strip-comments < "$1" > "${dest}" ; then
|
||||
echo "cannot strip comments from $1"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
id=`_gen_ChangeId`
|
||||
T="$MSG.tmp.$$"
|
||||
AWK=awk
|
||||
if [ -x /usr/xpg4/bin/awk ]; then
|
||||
# Solaris AWK is just too broken
|
||||
AWK=/usr/xpg4/bin/awk
|
||||
fi
|
||||
if test ! -s "${dest}" ; then
|
||||
echo "file is empty: $1"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get core.commentChar from git config or use default symbol
|
||||
commentChar=`git config --get core.commentChar`
|
||||
commentChar=${commentChar:-#}
|
||||
# Avoid the --in-place option which only appeared in Git 2.8
|
||||
# Avoid the --if-exists option which only appeared in Git 2.15
|
||||
if ! git -c trailer.ifexists=doNothing interpret-trailers \
|
||||
--trailer "Change-Id: I${random}" < "$1" > "${dest}" ; then
|
||||
echo "cannot insert change-id line in $1"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# How this works:
|
||||
# - parse the commit message as (textLine+ blankLine*)*
|
||||
# - assume textLine+ to be a footer until proven otherwise
|
||||
# - exception: the first block is not footer (as it is the title)
|
||||
# - read textLine+ into a variable
|
||||
# - then count blankLines
|
||||
# - once the next textLine appears, print textLine+ blankLine* as these
|
||||
# aren't footer
|
||||
# - in END, the last textLine+ block is available for footer parsing
|
||||
$AWK '
|
||||
BEGIN {
|
||||
# while we start with the assumption that textLine+
|
||||
# is a footer, the first block is not.
|
||||
isFooter = 0
|
||||
footerComment = 0
|
||||
blankLines = 0
|
||||
}
|
||||
|
||||
# Skip lines starting with commentChar without any spaces before it.
|
||||
/^'"$commentChar"'/ { next }
|
||||
|
||||
# Skip the line starting with the diff command and everything after it,
|
||||
# up to the end of the file, assuming it is only patch data.
|
||||
# If more than one line before the diff was empty, strip all but one.
|
||||
/^diff --git / {
|
||||
blankLines = 0
|
||||
while (getline) { }
|
||||
next
|
||||
}
|
||||
|
||||
# Count blank lines outside footer comments
|
||||
/^$/ && (footerComment == 0) {
|
||||
blankLines++
|
||||
next
|
||||
}
|
||||
|
||||
# Catch footer comment
|
||||
/^\[[a-zA-Z0-9-]+:/ && (isFooter == 1) {
|
||||
footerComment = 1
|
||||
}
|
||||
|
||||
/]$/ && (footerComment == 1) {
|
||||
footerComment = 2
|
||||
}
|
||||
|
||||
# We have a non-blank line after blank lines. Handle this.
|
||||
(blankLines > 0) {
|
||||
print lines
|
||||
for (i = 0; i < blankLines; i++) {
|
||||
print ""
|
||||
}
|
||||
|
||||
lines = ""
|
||||
blankLines = 0
|
||||
isFooter = 1
|
||||
footerComment = 0
|
||||
}
|
||||
|
||||
# Detect that the current block is not the footer
|
||||
(footerComment == 0) && (!/^\[?[a-zA-Z0-9-]+:/ || /^[a-zA-Z0-9-]+:\/\//) {
|
||||
isFooter = 0
|
||||
}
|
||||
|
||||
{
|
||||
# We need this information about the current last comment line
|
||||
if (footerComment == 2) {
|
||||
footerComment = 0
|
||||
}
|
||||
if (lines != "") {
|
||||
lines = lines "\n";
|
||||
}
|
||||
lines = lines $0
|
||||
}
|
||||
|
||||
# Footer handling:
|
||||
# If the last block is considered a footer, splice in the Change-Id at the
|
||||
# right place.
|
||||
# Look for the right place to inject Change-Id by considering
|
||||
# CHANGE_ID_AFTER. Keys listed in it (case insensitive) come first,
|
||||
# then Change-Id, then everything else (eg. Signed-off-by:).
|
||||
#
|
||||
# Otherwise just print the last block, a new line and the Change-Id as a
|
||||
# block of its own.
|
||||
END {
|
||||
unprinted = 1
|
||||
if (isFooter == 0) {
|
||||
print lines "\n"
|
||||
lines = ""
|
||||
}
|
||||
changeIdAfter = "^(" tolower("'"$CHANGE_ID_AFTER"'") "):"
|
||||
numlines = split(lines, footer, "\n")
|
||||
for (line = 1; line <= numlines; line++) {
|
||||
if (unprinted && match(tolower(footer[line]), changeIdAfter) != 1) {
|
||||
unprinted = 0
|
||||
print "Change-Id: I'"$id"'"
|
||||
}
|
||||
print footer[line]
|
||||
}
|
||||
if (unprinted) {
|
||||
print "Change-Id: I'"$id"'"
|
||||
}
|
||||
}' "$MSG" > "$T" && mv "$T" "$MSG" || rm -f "$T"
|
||||
}
|
||||
_gen_ChangeIdInput() {
|
||||
echo "tree `git write-tree`"
|
||||
if parent=`git rev-parse "HEAD^0" 2>/dev/null`
|
||||
then
|
||||
echo "parent $parent"
|
||||
fi
|
||||
echo "author `git var GIT_AUTHOR_IDENT`"
|
||||
echo "committer `git var GIT_COMMITTER_IDENT`"
|
||||
echo
|
||||
printf '%s' "$clean_message"
|
||||
}
|
||||
_gen_ChangeId() {
|
||||
_gen_ChangeIdInput |
|
||||
git hash-object -t commit --stdin
|
||||
}
|
||||
|
||||
|
||||
add_ChangeId
|
||||
if ! mv "${dest}" "$1" ; then
|
||||
echo "cannot mv ${dest} to $1"
|
||||
exit 1
|
||||
fi
|
||||
|
157
main.py
157
main.py
@ -26,6 +26,7 @@ import getpass
|
||||
import netrc
|
||||
import optparse
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
import textwrap
|
||||
import time
|
||||
@ -47,8 +48,8 @@ except ImportError:
|
||||
from color import SetDefaultColoring
|
||||
import event_log
|
||||
from repo_trace import SetTrace
|
||||
from git_command import git, GitCommand, user_agent
|
||||
from git_config import init_ssh, close_ssh
|
||||
from git_command import user_agent
|
||||
from git_config import init_ssh, close_ssh, RepoConfig
|
||||
from command import InteractiveCommand
|
||||
from command import MirrorSafeCommand
|
||||
from command import GitcAvailableCommand, GitcClientCommand
|
||||
@ -69,7 +70,35 @@ from wrapper import WrapperPath, Wrapper
|
||||
from subcmds import all_commands
|
||||
|
||||
if not is_python3():
|
||||
input = raw_input
|
||||
input = raw_input # noqa: F821
|
||||
|
||||
# NB: These do not need to be kept in sync with the repo launcher script.
|
||||
# These may be much newer as it allows the repo launcher to roll between
|
||||
# different repo releases while source versions might require a newer python.
|
||||
#
|
||||
# The soft version is when we start warning users that the version is old and
|
||||
# we'll be dropping support for it. We'll refuse to work with versions older
|
||||
# than the hard version.
|
||||
#
|
||||
# python-3.6 is in Ubuntu Bionic.
|
||||
MIN_PYTHON_VERSION_SOFT = (3, 6)
|
||||
MIN_PYTHON_VERSION_HARD = (3, 4)
|
||||
|
||||
if sys.version_info.major < 3:
|
||||
print('repo: warning: Python 2 is no longer supported; '
|
||||
'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION_SOFT),
|
||||
file=sys.stderr)
|
||||
else:
|
||||
if sys.version_info < MIN_PYTHON_VERSION_HARD:
|
||||
print('repo: error: Python 3 version is too old; '
|
||||
'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION_SOFT),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
elif sys.version_info < MIN_PYTHON_VERSION_SOFT:
|
||||
print('repo: warning: your Python 3 version is no longer supported; '
|
||||
'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION_SOFT),
|
||||
file=sys.stderr)
|
||||
|
||||
|
||||
global_options = optparse.OptionParser(
|
||||
usage='repo [-p|--paginate|--no-pager] COMMAND [ARGS]',
|
||||
@ -80,7 +109,7 @@ global_options.add_option('-p', '--paginate',
|
||||
dest='pager', action='store_true',
|
||||
help='display command output in the pager')
|
||||
global_options.add_option('--no-pager',
|
||||
dest='no_pager', action='store_true',
|
||||
dest='pager', action='store_false',
|
||||
help='disable the pager')
|
||||
global_options.add_option('--color',
|
||||
choices=('auto', 'always', 'never'), default=None,
|
||||
@ -101,12 +130,11 @@ global_options.add_option('--event-log',
|
||||
dest='event_log', action='store',
|
||||
help='filename of event log to append timeline to')
|
||||
|
||||
|
||||
class _Repo(object):
|
||||
def __init__(self, repodir):
|
||||
self.repodir = repodir
|
||||
self.commands = all_commands
|
||||
# add 'branch' as an alias for 'branches'
|
||||
all_commands['branch'] = all_commands['branches']
|
||||
|
||||
def _ParseArgs(self, argv):
|
||||
"""Parse the main `repo` command line options."""
|
||||
@ -126,6 +154,9 @@ class _Repo(object):
|
||||
argv = []
|
||||
gopts, _gargs = global_options.parse_args(glob)
|
||||
|
||||
name, alias_args = self._ExpandAlias(name)
|
||||
argv = alias_args + argv
|
||||
|
||||
if gopts.help:
|
||||
global_options.print_help()
|
||||
commands = ' '.join(sorted(self.commands))
|
||||
@ -136,6 +167,27 @@ class _Repo(object):
|
||||
|
||||
return (name, gopts, argv)
|
||||
|
||||
def _ExpandAlias(self, name):
|
||||
"""Look up user registered aliases."""
|
||||
# We don't resolve aliases for existing subcommands. This matches git.
|
||||
if name in self.commands:
|
||||
return name, []
|
||||
|
||||
key = 'alias.%s' % (name,)
|
||||
alias = RepoConfig.ForRepository(self.repodir).GetString(key)
|
||||
if alias is None:
|
||||
alias = RepoConfig.ForUser().GetString(key)
|
||||
if alias is None:
|
||||
return name, []
|
||||
|
||||
args = alias.strip().split(' ', 1)
|
||||
name = args[0]
|
||||
if len(args) == 2:
|
||||
args = shlex.split(args[1])
|
||||
else:
|
||||
args = []
|
||||
return name, args
|
||||
|
||||
def _Run(self, name, gopts, argv):
|
||||
"""Execute the requested subcommand."""
|
||||
result = 0
|
||||
@ -152,7 +204,7 @@ class _Repo(object):
|
||||
SetDefaultColoring(gopts.color)
|
||||
|
||||
try:
|
||||
cmd = self.commands[name]
|
||||
cmd = self.commands[name]()
|
||||
except KeyError:
|
||||
print("repo: '%s' is not a repo command. See 'repo help'." % name,
|
||||
file=sys.stderr)
|
||||
@ -188,12 +240,12 @@ class _Repo(object):
|
||||
copts = cmd.ReadEnvironmentOptions(copts)
|
||||
except NoManifestException as e:
|
||||
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||
file=sys.stderr)
|
||||
file=sys.stderr)
|
||||
print('error: manifest missing or unreadable -- please run init',
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
|
||||
if gopts.pager is not False and not isinstance(cmd, InteractiveCommand):
|
||||
config = cmd.manifest.globalConfig
|
||||
if gopts.pager:
|
||||
use_pager = True
|
||||
@ -211,9 +263,9 @@ class _Repo(object):
|
||||
cmd.ValidateOptions(copts, cargs)
|
||||
result = cmd.Execute(copts, cargs)
|
||||
except (DownloadError, ManifestInvalidRevisionError,
|
||||
NoManifestException) as e:
|
||||
NoManifestException) as e:
|
||||
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||
file=sys.stderr)
|
||||
file=sys.stderr)
|
||||
if isinstance(e, NoManifestException):
|
||||
print('error: manifest missing or unreadable -- please run init',
|
||||
file=sys.stderr)
|
||||
@ -228,7 +280,8 @@ class _Repo(object):
|
||||
if e.name:
|
||||
print('error: project group must be enabled for project %s' % e.name, file=sys.stderr)
|
||||
else:
|
||||
print('error: project group must be enabled for the project in the current directory', file=sys.stderr)
|
||||
print('error: project group must be enabled for the project in the current directory',
|
||||
file=sys.stderr)
|
||||
result = 1
|
||||
except SystemExit as e:
|
||||
if e.code:
|
||||
@ -255,42 +308,66 @@ class _Repo(object):
|
||||
return result
|
||||
|
||||
|
||||
def _CheckWrapperVersion(ver, repo_path):
|
||||
def _CheckWrapperVersion(ver_str, repo_path):
|
||||
"""Verify the repo launcher is new enough for this checkout.
|
||||
|
||||
Args:
|
||||
ver_str: The version string passed from the repo launcher when it ran us.
|
||||
repo_path: The path to the repo launcher that loaded us.
|
||||
"""
|
||||
# Refuse to work with really old wrapper versions. We don't test these,
|
||||
# so might as well require a somewhat recent sane version.
|
||||
# v1.15 of the repo launcher was released in ~Mar 2012.
|
||||
MIN_REPO_VERSION = (1, 15)
|
||||
min_str = '.'.join(str(x) for x in MIN_REPO_VERSION)
|
||||
|
||||
if not repo_path:
|
||||
repo_path = '~/bin/repo'
|
||||
|
||||
if not ver:
|
||||
if not ver_str:
|
||||
print('no --wrapper-version argument', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Pull out the version of the repo launcher we know about to compare.
|
||||
exp = Wrapper().VERSION
|
||||
ver = tuple(map(int, ver.split('.')))
|
||||
if len(ver) == 1:
|
||||
ver = (0, ver[0])
|
||||
ver = tuple(map(int, ver_str.split('.')))
|
||||
|
||||
exp_str = '.'.join(map(str, exp))
|
||||
if exp[0] > ver[0] or ver < (0, 4):
|
||||
if ver < MIN_REPO_VERSION:
|
||||
print("""
|
||||
!!! A new repo command (%5s) is available. !!!
|
||||
!!! You must upgrade before you can continue: !!!
|
||||
repo: error:
|
||||
!!! Your version of repo %s is too old.
|
||||
!!! We need at least version %s.
|
||||
!!! A new version of repo (%s) is available.
|
||||
!!! You must upgrade before you can continue:
|
||||
|
||||
cp %s %s
|
||||
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||
""" % (ver_str, min_str, exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if exp > ver:
|
||||
print("""
|
||||
... A new repo command (%5s) is available.
|
||||
print('\n... A new version of repo (%s) is available.' % (exp_str,),
|
||||
file=sys.stderr)
|
||||
if os.access(repo_path, os.W_OK):
|
||||
print("""\
|
||||
... You should upgrade soon:
|
||||
|
||||
cp %s %s
|
||||
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||
""" % (WrapperPath(), repo_path), file=sys.stderr)
|
||||
else:
|
||||
print("""\
|
||||
... New version is available at: %s
|
||||
... The launcher is run from: %s
|
||||
!!! The launcher is not writable. Please talk to your sysadmin or distro
|
||||
!!! to get an update installed.
|
||||
""" % (WrapperPath(), repo_path), file=sys.stderr)
|
||||
|
||||
|
||||
def _CheckRepoDir(repo_dir):
|
||||
if not repo_dir:
|
||||
print('no --repo-dir argument', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _PruneOptions(argv, opt):
|
||||
i = 0
|
||||
while i < len(argv):
|
||||
@ -306,6 +383,7 @@ def _PruneOptions(argv, opt):
|
||||
continue
|
||||
i += 1
|
||||
|
||||
|
||||
class _UserAgentHandler(urllib.request.BaseHandler):
|
||||
def http_request(self, req):
|
||||
req.add_header('User-Agent', user_agent.repo)
|
||||
@ -315,6 +393,7 @@ class _UserAgentHandler(urllib.request.BaseHandler):
|
||||
req.add_header('User-Agent', user_agent.repo)
|
||||
return req
|
||||
|
||||
|
||||
def _AddPasswordFromUserInput(handler, msg, req):
|
||||
# If repo could not find auth info from netrc, try to get it from user input
|
||||
url = req.get_full_url()
|
||||
@ -328,22 +407,24 @@ def _AddPasswordFromUserInput(handler, msg, req):
|
||||
return
|
||||
handler.passwd.add_password(None, url, user, password)
|
||||
|
||||
|
||||
class _BasicAuthHandler(urllib.request.HTTPBasicAuthHandler):
|
||||
def http_error_401(self, req, fp, code, msg, headers):
|
||||
_AddPasswordFromUserInput(self, msg, req)
|
||||
return urllib.request.HTTPBasicAuthHandler.http_error_401(
|
||||
self, req, fp, code, msg, headers)
|
||||
self, req, fp, code, msg, headers)
|
||||
|
||||
def http_error_auth_reqed(self, authreq, host, req, headers):
|
||||
try:
|
||||
old_add_header = req.add_header
|
||||
|
||||
def _add_header(name, val):
|
||||
val = val.replace('\n', '')
|
||||
old_add_header(name, val)
|
||||
req.add_header = _add_header
|
||||
return urllib.request.AbstractBasicAuthHandler.http_error_auth_reqed(
|
||||
self, authreq, host, req, headers)
|
||||
except:
|
||||
self, authreq, host, req, headers)
|
||||
except Exception:
|
||||
reset = getattr(self, 'reset_retry_count', None)
|
||||
if reset is not None:
|
||||
reset()
|
||||
@ -351,22 +432,24 @@ class _BasicAuthHandler(urllib.request.HTTPBasicAuthHandler):
|
||||
self.retried = 0
|
||||
raise
|
||||
|
||||
|
||||
class _DigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
|
||||
def http_error_401(self, req, fp, code, msg, headers):
|
||||
_AddPasswordFromUserInput(self, msg, req)
|
||||
return urllib.request.HTTPDigestAuthHandler.http_error_401(
|
||||
self, req, fp, code, msg, headers)
|
||||
self, req, fp, code, msg, headers)
|
||||
|
||||
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
||||
try:
|
||||
old_add_header = req.add_header
|
||||
|
||||
def _add_header(name, val):
|
||||
val = val.replace('\n', '')
|
||||
old_add_header(name, val)
|
||||
req.add_header = _add_header
|
||||
return urllib.request.AbstractDigestAuthHandler.http_error_auth_reqed(
|
||||
self, auth_header, host, req, headers)
|
||||
except:
|
||||
self, auth_header, host, req, headers)
|
||||
except Exception:
|
||||
reset = getattr(self, 'reset_retry_count', None)
|
||||
if reset is not None:
|
||||
reset()
|
||||
@ -374,6 +457,7 @@ class _DigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
|
||||
self.retried = 0
|
||||
raise
|
||||
|
||||
|
||||
class _KerberosAuthHandler(urllib.request.BaseHandler):
|
||||
def __init__(self):
|
||||
self.retried = 0
|
||||
@ -392,7 +476,7 @@ class _KerberosAuthHandler(urllib.request.BaseHandler):
|
||||
|
||||
if self.retried > 3:
|
||||
raise urllib.request.HTTPError(req.get_full_url(), 401,
|
||||
"Negotiate auth failed", headers, None)
|
||||
"Negotiate auth failed", headers, None)
|
||||
else:
|
||||
self.retried += 1
|
||||
|
||||
@ -408,7 +492,7 @@ class _KerberosAuthHandler(urllib.request.BaseHandler):
|
||||
return response
|
||||
except kerberos.GSSError:
|
||||
return None
|
||||
except:
|
||||
except Exception:
|
||||
self.reset_retry_count()
|
||||
raise
|
||||
finally:
|
||||
@ -454,6 +538,7 @@ class _KerberosAuthHandler(urllib.request.BaseHandler):
|
||||
kerberos.authGSSClientClean(self.context)
|
||||
self.context = None
|
||||
|
||||
|
||||
def init_http():
|
||||
handlers = [_UserAgentHandler()]
|
||||
|
||||
@ -462,7 +547,7 @@ def init_http():
|
||||
n = netrc.netrc()
|
||||
for host in n.hosts:
|
||||
p = n.hosts[host]
|
||||
mgr.add_password(p[1], 'http://%s/' % host, p[0], p[2])
|
||||
mgr.add_password(p[1], 'http://%s/' % host, p[0], p[2])
|
||||
mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2])
|
||||
except netrc.NetrcParseError:
|
||||
pass
|
||||
@ -481,6 +566,7 @@ def init_http():
|
||||
handlers.append(urllib.request.HTTPSHandler(debuglevel=1))
|
||||
urllib.request.install_opener(urllib.request.build_opener(*handlers))
|
||||
|
||||
|
||||
def _Main(argv):
|
||||
result = 0
|
||||
|
||||
@ -528,7 +614,7 @@ def _Main(argv):
|
||||
argv = list(sys.argv)
|
||||
argv.extend(rce.extra_args)
|
||||
try:
|
||||
os.execv(__file__, argv)
|
||||
os.execv(sys.executable, [__file__] + argv)
|
||||
except OSError as e:
|
||||
print('fatal: cannot restart repo after upgrade', file=sys.stderr)
|
||||
print('fatal: %s' % e, file=sys.stderr)
|
||||
@ -537,5 +623,6 @@ def _Main(argv):
|
||||
TerminatePager()
|
||||
sys.exit(result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
_Main(sys.argv[1:])
|
||||
|
402
manifest_xml.py
402
manifest_xml.py
@ -35,7 +35,8 @@ from git_config import GitConfig
|
||||
from git_refs import R_HEADS, HEAD
|
||||
import platform_utils
|
||||
from project import RemoteSpec, Project, MetaProject
|
||||
from error import ManifestParseError, ManifestInvalidRevisionError
|
||||
from error import (ManifestParseError, ManifestInvalidPathError,
|
||||
ManifestInvalidRevisionError)
|
||||
|
||||
MANIFEST_FILE_NAME = 'manifest.xml'
|
||||
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
|
||||
@ -55,6 +56,61 @@ urllib.parse.uses_netloc.extend([
|
||||
'sso',
|
||||
'rpc'])
|
||||
|
||||
|
||||
def XmlBool(node, attr, default=None):
|
||||
"""Determine boolean value of |node|'s |attr|.
|
||||
|
||||
Invalid values will issue a non-fatal warning.
|
||||
|
||||
Args:
|
||||
node: XML node whose attributes we access.
|
||||
attr: The attribute to access.
|
||||
default: If the attribute is not set (value is empty), then use this.
|
||||
|
||||
Returns:
|
||||
True if the attribute is a valid string representing true.
|
||||
False if the attribute is a valid string representing false.
|
||||
|default| otherwise.
|
||||
"""
|
||||
value = node.getAttribute(attr)
|
||||
s = value.lower()
|
||||
if s == '':
|
||||
return default
|
||||
elif s in {'yes', 'true', '1'}:
|
||||
return True
|
||||
elif s in {'no', 'false', '0'}:
|
||||
return False
|
||||
else:
|
||||
print('warning: manifest: %s="%s": ignoring invalid XML boolean' %
|
||||
(attr, value), file=sys.stderr)
|
||||
return default
|
||||
|
||||
|
||||
def XmlInt(node, attr, default=None):
|
||||
"""Determine integer value of |node|'s |attr|.
|
||||
|
||||
Args:
|
||||
node: XML node whose attributes we access.
|
||||
attr: The attribute to access.
|
||||
default: If the attribute is not set (value is empty), then use this.
|
||||
|
||||
Returns:
|
||||
The number if the attribute is a valid number.
|
||||
|
||||
Raises:
|
||||
ManifestParseError: The number is invalid.
|
||||
"""
|
||||
value = node.getAttribute(attr)
|
||||
if not value:
|
||||
return default
|
||||
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError:
|
||||
raise ManifestParseError('manifest: invalid %s="%s" integer' %
|
||||
(attr, value))
|
||||
|
||||
|
||||
class _Default(object):
|
||||
"""Project defaults within the manifest."""
|
||||
|
||||
@ -73,6 +129,7 @@ class _Default(object):
|
||||
def __ne__(self, other):
|
||||
return self.__dict__ != other.__dict__
|
||||
|
||||
|
||||
class _XmlRemote(object):
|
||||
def __init__(self,
|
||||
name,
|
||||
@ -126,6 +183,7 @@ class _XmlRemote(object):
|
||||
orig_name=self.name,
|
||||
fetchUrl=self.fetchUrl)
|
||||
|
||||
|
||||
class XmlManifest(object):
|
||||
"""manages the repo configuration file"""
|
||||
|
||||
@ -139,12 +197,20 @@ class XmlManifest(object):
|
||||
self._load_local_manifests = True
|
||||
|
||||
self.repoProject = MetaProject(self, 'repo',
|
||||
gitdir = os.path.join(repodir, 'repo/.git'),
|
||||
worktree = os.path.join(repodir, 'repo'))
|
||||
gitdir=os.path.join(repodir, 'repo/.git'),
|
||||
worktree=os.path.join(repodir, 'repo'))
|
||||
|
||||
self.manifestProject = MetaProject(self, 'manifests',
|
||||
gitdir = os.path.join(repodir, 'manifests.git'),
|
||||
worktree = os.path.join(repodir, 'manifests'))
|
||||
mp = MetaProject(self, 'manifests',
|
||||
gitdir=os.path.join(repodir, 'manifests.git'),
|
||||
worktree=os.path.join(repodir, 'manifests'))
|
||||
self.manifestProject = mp
|
||||
|
||||
# This is a bit hacky, but we're in a chicken & egg situation: all the
|
||||
# normal repo settings live in the manifestProject which we just setup
|
||||
# above, so we couldn't easily query before that. We assume Project()
|
||||
# init doesn't care if this changes afterwards.
|
||||
if os.path.exists(mp.gitdir) and mp.config.GetBoolean('repo.worktree'):
|
||||
mp.use_git_worktrees = True
|
||||
|
||||
self._Unload()
|
||||
|
||||
@ -179,12 +245,27 @@ class XmlManifest(object):
|
||||
"""
|
||||
self.Override(name)
|
||||
|
||||
try:
|
||||
if os.path.lexists(self.manifestFile):
|
||||
platform_utils.remove(self.manifestFile)
|
||||
platform_utils.symlink(os.path.join('manifests', name), self.manifestFile)
|
||||
except OSError as e:
|
||||
raise ManifestParseError('cannot link manifest %s: %s' % (name, str(e)))
|
||||
# Old versions of repo would generate symlinks we need to clean up.
|
||||
if os.path.lexists(self.manifestFile):
|
||||
platform_utils.remove(self.manifestFile)
|
||||
# This file is interpreted as if it existed inside the manifest repo.
|
||||
# That allows us to use <include> with the relative file name.
|
||||
with open(self.manifestFile, 'w') as fp:
|
||||
fp.write("""<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
DO NOT EDIT THIS FILE! It is generated by repo and changes will be discarded.
|
||||
If you want to use a different manifest, use `repo init -m <file>` instead.
|
||||
|
||||
If you want to customize your checkout by overriding manifest settings, use
|
||||
the local_manifests/ directory instead.
|
||||
|
||||
For more information on repo manifests, check out:
|
||||
https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
-->
|
||||
<manifest>
|
||||
<include name="%s" />
|
||||
</manifest>
|
||||
""" % (name,))
|
||||
|
||||
def _RemoteToXml(self, r, doc, root):
|
||||
e = doc.createElement('remote')
|
||||
@ -203,7 +284,7 @@ class XmlManifest(object):
|
||||
def _ParseGroups(self, groups):
|
||||
return [x for x in re.split(r'[,\s]+', groups) if x]
|
||||
|
||||
def Save(self, fd, peg_rev=False, peg_rev_upstream=True, groups=None):
|
||||
def Save(self, fd, peg_rev=False, peg_rev_upstream=True, peg_rev_dest_branch=True, groups=None):
|
||||
"""Write the current manifest out to the given file descriptor.
|
||||
"""
|
||||
mp = self.manifestProject
|
||||
@ -223,7 +304,7 @@ class XmlManifest(object):
|
||||
if self.notice:
|
||||
notice_element = root.appendChild(doc.createElement('notice'))
|
||||
notice_lines = self.notice.splitlines()
|
||||
indented_notice = ('\n'.join(" "*4 + line for line in notice_lines))[4:]
|
||||
indented_notice = ('\n'.join(" " * 4 + line for line in notice_lines))[4:]
|
||||
notice_element.appendChild(doc.createTextNode(indented_notice))
|
||||
|
||||
d = self.default
|
||||
@ -308,6 +389,13 @@ class XmlManifest(object):
|
||||
# Only save the origin if the origin is not a sha1, and the default
|
||||
# isn't our value
|
||||
e.setAttribute('upstream', p.revisionExpr)
|
||||
|
||||
if peg_rev_dest_branch:
|
||||
if p.dest_branch:
|
||||
e.setAttribute('dest-branch', p.dest_branch)
|
||||
elif value != p.revisionExpr:
|
||||
e.setAttribute('dest-branch', p.revisionExpr)
|
||||
|
||||
else:
|
||||
revision = self.remotes[p.remote.orig_name].revision or d.revisionExpr
|
||||
if not revision or revision != p.revisionExpr:
|
||||
@ -413,6 +501,14 @@ class XmlManifest(object):
|
||||
self._Load()
|
||||
return self._manifest_server
|
||||
|
||||
@property
|
||||
def CloneBundle(self):
|
||||
clone_bundle = self.manifestProject.config.GetBoolean('repo.clonebundle')
|
||||
if clone_bundle is None:
|
||||
return False if self.manifestProject.config.GetBoolean('repo.partialclone') else True
|
||||
else:
|
||||
return clone_bundle
|
||||
|
||||
@property
|
||||
def CloneFilter(self):
|
||||
if self.manifestProject.config.GetBoolean('repo.partialclone'):
|
||||
@ -423,6 +519,10 @@ class XmlManifest(object):
|
||||
def IsMirror(self):
|
||||
return self.manifestProject.config.GetBoolean('repo.mirror')
|
||||
|
||||
@property
|
||||
def UseGitWorktrees(self):
|
||||
return self.manifestProject.config.GetBoolean('repo.worktree')
|
||||
|
||||
@property
|
||||
def IsArchive(self):
|
||||
return self.manifestProject.config.GetBoolean('repo.archive')
|
||||
@ -461,12 +561,12 @@ class XmlManifest(object):
|
||||
self.localManifestWarning = True
|
||||
print('warning: %s is deprecated; put local manifests '
|
||||
'in `%s` instead' % (LOCAL_MANIFEST_NAME,
|
||||
os.path.join(self.repodir, LOCAL_MANIFESTS_DIR_NAME)),
|
||||
os.path.join(self.repodir, LOCAL_MANIFESTS_DIR_NAME)),
|
||||
file=sys.stderr)
|
||||
nodes.append(self._ParseManifestXml(local, self.repodir))
|
||||
|
||||
local_dir = os.path.abspath(os.path.join(self.repodir,
|
||||
LOCAL_MANIFESTS_DIR_NAME))
|
||||
LOCAL_MANIFESTS_DIR_NAME))
|
||||
try:
|
||||
for local_file in sorted(platform_utils.listdir(local_dir)):
|
||||
if local_file.endswith('.xml'):
|
||||
@ -511,7 +611,7 @@ class XmlManifest(object):
|
||||
fp = os.path.join(include_root, name)
|
||||
if not os.path.isfile(fp):
|
||||
raise ManifestParseError("include %s doesn't exist or isn't a file"
|
||||
% (name,))
|
||||
% (name,))
|
||||
try:
|
||||
nodes.extend(self._ParseManifestXml(fp, include_root))
|
||||
# should isolate this to the exact exception, but that's
|
||||
@ -598,6 +698,9 @@ class XmlManifest(object):
|
||||
if groups:
|
||||
groups = self._ParseGroups(groups)
|
||||
revision = node.getAttribute('revision')
|
||||
remote = node.getAttribute('remote')
|
||||
if remote:
|
||||
remote = self._get_remote(node)
|
||||
|
||||
for p in self._projects[name]:
|
||||
if path and p.relpath != path:
|
||||
@ -606,6 +709,8 @@ class XmlManifest(object):
|
||||
p.groups.extend(groups)
|
||||
if revision:
|
||||
p.revisionExpr = revision
|
||||
if remote:
|
||||
p.remote = remote.ToRemoteSpec(name)
|
||||
if node.nodeName == 'repo-hooks':
|
||||
# Get the name of the project and the (space-separated) list of enabled.
|
||||
repo_hooks_project = self._reqatt(node, 'in-project')
|
||||
@ -649,7 +754,6 @@ class XmlManifest(object):
|
||||
if self._repo_hooks_project and (self._repo_hooks_project.name == name):
|
||||
self._repo_hooks_project = None
|
||||
|
||||
|
||||
def _AddMetaProjectMirror(self, m):
|
||||
name = None
|
||||
m_url = m.GetRemote(m.remote.name).url
|
||||
@ -676,15 +780,15 @@ class XmlManifest(object):
|
||||
if name not in self._projects:
|
||||
m.PreSync()
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||
project = Project(manifest = self,
|
||||
name = name,
|
||||
remote = remote.ToRemoteSpec(name),
|
||||
gitdir = gitdir,
|
||||
objdir = gitdir,
|
||||
worktree = None,
|
||||
relpath = name or None,
|
||||
revisionExpr = m.revisionExpr,
|
||||
revisionId = None)
|
||||
project = Project(manifest=self,
|
||||
name=name,
|
||||
remote=remote.ToRemoteSpec(name),
|
||||
gitdir=gitdir,
|
||||
objdir=gitdir,
|
||||
worktree=None,
|
||||
relpath=name or None,
|
||||
revisionExpr=m.revisionExpr,
|
||||
revisionId=None)
|
||||
self._projects[project.name] = [project]
|
||||
self._paths[project.relpath] = project
|
||||
|
||||
@ -722,29 +826,14 @@ class XmlManifest(object):
|
||||
d.destBranchExpr = node.getAttribute('dest-branch') or None
|
||||
d.upstreamExpr = node.getAttribute('upstream') or None
|
||||
|
||||
sync_j = node.getAttribute('sync-j')
|
||||
if sync_j == '' or sync_j is None:
|
||||
d.sync_j = 1
|
||||
else:
|
||||
d.sync_j = int(sync_j)
|
||||
d.sync_j = XmlInt(node, 'sync-j', 1)
|
||||
if d.sync_j <= 0:
|
||||
raise ManifestParseError('%s: sync-j must be greater than 0, not "%s"' %
|
||||
(self.manifestFile, d.sync_j))
|
||||
|
||||
sync_c = node.getAttribute('sync-c')
|
||||
if not sync_c:
|
||||
d.sync_c = False
|
||||
else:
|
||||
d.sync_c = sync_c.lower() in ("yes", "true", "1")
|
||||
|
||||
sync_s = node.getAttribute('sync-s')
|
||||
if not sync_s:
|
||||
d.sync_s = False
|
||||
else:
|
||||
d.sync_s = sync_s.lower() in ("yes", "true", "1")
|
||||
|
||||
sync_tags = node.getAttribute('sync-tags')
|
||||
if not sync_tags:
|
||||
d.sync_tags = True
|
||||
else:
|
||||
d.sync_tags = sync_tags.lower() in ("yes", "true", "1")
|
||||
d.sync_c = XmlBool(node, 'sync-c', False)
|
||||
d.sync_s = XmlBool(node, 'sync-s', False)
|
||||
d.sync_tags = XmlBool(node, 'sync-tags', True)
|
||||
return d
|
||||
|
||||
def _ParseNotice(self, node):
|
||||
@ -792,7 +881,7 @@ class XmlManifest(object):
|
||||
def _UnjoinName(self, parent_name, name):
|
||||
return os.path.relpath(name, parent_name)
|
||||
|
||||
def _ParseProject(self, node, parent = None, **extra_proj_attrs):
|
||||
def _ParseProject(self, node, parent=None, **extra_proj_attrs):
|
||||
"""
|
||||
reads a <project> element from the manifest file
|
||||
"""
|
||||
@ -805,55 +894,31 @@ class XmlManifest(object):
|
||||
remote = self._default.remote
|
||||
if remote is None:
|
||||
raise ManifestParseError("no remote for project %s within %s" %
|
||||
(name, self.manifestFile))
|
||||
(name, self.manifestFile))
|
||||
|
||||
revisionExpr = node.getAttribute('revision') or remote.revision
|
||||
if not revisionExpr:
|
||||
revisionExpr = self._default.revisionExpr
|
||||
if not revisionExpr:
|
||||
raise ManifestParseError("no revision for project %s within %s" %
|
||||
(name, self.manifestFile))
|
||||
(name, self.manifestFile))
|
||||
|
||||
path = node.getAttribute('path')
|
||||
if not path:
|
||||
path = name
|
||||
if path.startswith('/'):
|
||||
raise ManifestParseError("project %s path cannot be absolute in %s" %
|
||||
(name, self.manifestFile))
|
||||
(name, self.manifestFile))
|
||||
|
||||
rebase = node.getAttribute('rebase')
|
||||
if not rebase:
|
||||
rebase = True
|
||||
else:
|
||||
rebase = rebase.lower() in ("yes", "true", "1")
|
||||
rebase = XmlBool(node, 'rebase', True)
|
||||
sync_c = XmlBool(node, 'sync-c', False)
|
||||
sync_s = XmlBool(node, 'sync-s', self._default.sync_s)
|
||||
sync_tags = XmlBool(node, 'sync-tags', self._default.sync_tags)
|
||||
|
||||
sync_c = node.getAttribute('sync-c')
|
||||
if not sync_c:
|
||||
sync_c = False
|
||||
else:
|
||||
sync_c = sync_c.lower() in ("yes", "true", "1")
|
||||
|
||||
sync_s = node.getAttribute('sync-s')
|
||||
if not sync_s:
|
||||
sync_s = self._default.sync_s
|
||||
else:
|
||||
sync_s = sync_s.lower() in ("yes", "true", "1")
|
||||
|
||||
sync_tags = node.getAttribute('sync-tags')
|
||||
if not sync_tags:
|
||||
sync_tags = self._default.sync_tags
|
||||
else:
|
||||
sync_tags = sync_tags.lower() in ("yes", "true", "1")
|
||||
|
||||
clone_depth = node.getAttribute('clone-depth')
|
||||
if clone_depth:
|
||||
try:
|
||||
clone_depth = int(clone_depth)
|
||||
if clone_depth <= 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ManifestParseError('invalid clone-depth %s in %s' %
|
||||
(clone_depth, self.manifestFile))
|
||||
clone_depth = XmlInt(node, 'clone-depth')
|
||||
if clone_depth is not None and clone_depth <= 0:
|
||||
raise ManifestParseError('%s: clone-depth must be greater than 0, not "%s"' %
|
||||
(self.manifestFile, clone_depth))
|
||||
|
||||
dest_branch = node.getAttribute('dest-branch') or self._default.destBranchExpr
|
||||
|
||||
@ -865,8 +930,10 @@ class XmlManifest(object):
|
||||
groups = self._ParseGroups(groups)
|
||||
|
||||
if parent is None:
|
||||
relpath, worktree, gitdir, objdir = self.GetProjectPaths(name, path)
|
||||
relpath, worktree, gitdir, objdir, use_git_worktrees = \
|
||||
self.GetProjectPaths(name, path)
|
||||
else:
|
||||
use_git_worktrees = False
|
||||
relpath, worktree, gitdir, objdir = \
|
||||
self.GetSubprojectPaths(parent, name, path)
|
||||
|
||||
@ -874,27 +941,28 @@ class XmlManifest(object):
|
||||
groups.extend(set(default_groups).difference(groups))
|
||||
|
||||
if self.IsMirror and node.hasAttribute('force-path'):
|
||||
if node.getAttribute('force-path').lower() in ("yes", "true", "1"):
|
||||
if XmlBool(node, 'force-path', False):
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % path)
|
||||
|
||||
project = Project(manifest = self,
|
||||
name = name,
|
||||
remote = remote.ToRemoteSpec(name),
|
||||
gitdir = gitdir,
|
||||
objdir = objdir,
|
||||
worktree = worktree,
|
||||
relpath = relpath,
|
||||
revisionExpr = revisionExpr,
|
||||
revisionId = None,
|
||||
rebase = rebase,
|
||||
groups = groups,
|
||||
sync_c = sync_c,
|
||||
sync_s = sync_s,
|
||||
sync_tags = sync_tags,
|
||||
clone_depth = clone_depth,
|
||||
upstream = upstream,
|
||||
parent = parent,
|
||||
dest_branch = dest_branch,
|
||||
project = Project(manifest=self,
|
||||
name=name,
|
||||
remote=remote.ToRemoteSpec(name),
|
||||
gitdir=gitdir,
|
||||
objdir=objdir,
|
||||
worktree=worktree,
|
||||
relpath=relpath,
|
||||
revisionExpr=revisionExpr,
|
||||
revisionId=None,
|
||||
rebase=rebase,
|
||||
groups=groups,
|
||||
sync_c=sync_c,
|
||||
sync_s=sync_s,
|
||||
sync_tags=sync_tags,
|
||||
clone_depth=clone_depth,
|
||||
upstream=upstream,
|
||||
parent=parent,
|
||||
dest_branch=dest_branch,
|
||||
use_git_worktrees=use_git_worktrees,
|
||||
**extra_proj_attrs)
|
||||
|
||||
for n in node.childNodes:
|
||||
@ -905,11 +973,12 @@ class XmlManifest(object):
|
||||
if n.nodeName == 'annotation':
|
||||
self._ParseAnnotation(project, n)
|
||||
if n.nodeName == 'project':
|
||||
project.subprojects.append(self._ParseProject(n, parent = project))
|
||||
project.subprojects.append(self._ParseProject(n, parent=project))
|
||||
|
||||
return project
|
||||
|
||||
def GetProjectPaths(self, name, path):
|
||||
use_git_worktrees = False
|
||||
relpath = path
|
||||
if self.IsMirror:
|
||||
worktree = None
|
||||
@ -918,8 +987,15 @@ class XmlManifest(object):
|
||||
else:
|
||||
worktree = os.path.join(self.topdir, path).replace('\\', '/')
|
||||
gitdir = os.path.join(self.repodir, 'projects', '%s.git' % path)
|
||||
objdir = os.path.join(self.repodir, 'project-objects', '%s.git' % name)
|
||||
return relpath, worktree, gitdir, objdir
|
||||
# We allow people to mix git worktrees & non-git worktrees for now.
|
||||
# This allows for in situ migration of repo clients.
|
||||
if os.path.exists(gitdir) or not self.UseGitWorktrees:
|
||||
objdir = os.path.join(self.repodir, 'project-objects', '%s.git' % name)
|
||||
else:
|
||||
use_git_worktrees = True
|
||||
gitdir = os.path.join(self.repodir, 'worktrees', '%s.git' % name)
|
||||
objdir = gitdir
|
||||
return relpath, worktree, gitdir, objdir, use_git_worktrees
|
||||
|
||||
def GetProjectsWithName(self, name):
|
||||
return self._projects.get(name, [])
|
||||
@ -943,21 +1019,112 @@ class XmlManifest(object):
|
||||
worktree = os.path.join(parent.worktree, path).replace('\\', '/')
|
||||
return relpath, worktree, gitdir, objdir
|
||||
|
||||
@staticmethod
|
||||
def _CheckLocalPath(path, symlink=False):
|
||||
"""Verify |path| is reasonable for use in <copyfile> & <linkfile>."""
|
||||
if '~' in path:
|
||||
return '~ not allowed (due to 8.3 filenames on Windows filesystems)'
|
||||
|
||||
# Some filesystems (like Apple's HFS+) try to normalize Unicode codepoints
|
||||
# which means there are alternative names for ".git". Reject paths with
|
||||
# these in it as there shouldn't be any reasonable need for them here.
|
||||
# The set of codepoints here was cribbed from jgit's implementation:
|
||||
# https://eclipse.googlesource.com/jgit/jgit/+/9110037e3e9461ff4dac22fee84ef3694ed57648/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java#884
|
||||
BAD_CODEPOINTS = {
|
||||
u'\u200C', # ZERO WIDTH NON-JOINER
|
||||
u'\u200D', # ZERO WIDTH JOINER
|
||||
u'\u200E', # LEFT-TO-RIGHT MARK
|
||||
u'\u200F', # RIGHT-TO-LEFT MARK
|
||||
u'\u202A', # LEFT-TO-RIGHT EMBEDDING
|
||||
u'\u202B', # RIGHT-TO-LEFT EMBEDDING
|
||||
u'\u202C', # POP DIRECTIONAL FORMATTING
|
||||
u'\u202D', # LEFT-TO-RIGHT OVERRIDE
|
||||
u'\u202E', # RIGHT-TO-LEFT OVERRIDE
|
||||
u'\u206A', # INHIBIT SYMMETRIC SWAPPING
|
||||
u'\u206B', # ACTIVATE SYMMETRIC SWAPPING
|
||||
u'\u206C', # INHIBIT ARABIC FORM SHAPING
|
||||
u'\u206D', # ACTIVATE ARABIC FORM SHAPING
|
||||
u'\u206E', # NATIONAL DIGIT SHAPES
|
||||
u'\u206F', # NOMINAL DIGIT SHAPES
|
||||
u'\uFEFF', # ZERO WIDTH NO-BREAK SPACE
|
||||
}
|
||||
if BAD_CODEPOINTS & set(path):
|
||||
# This message is more expansive than reality, but should be fine.
|
||||
return 'Unicode combining characters not allowed'
|
||||
|
||||
# Assume paths might be used on case-insensitive filesystems.
|
||||
path = path.lower()
|
||||
|
||||
# Split up the path by its components. We can't use os.path.sep exclusively
|
||||
# as some platforms (like Windows) will convert / to \ and that bypasses all
|
||||
# our constructed logic here. Especially since manifest authors only use
|
||||
# / in their paths.
|
||||
resep = re.compile(r'[/%s]' % re.escape(os.path.sep))
|
||||
parts = resep.split(path)
|
||||
|
||||
# Some people use src="." to create stable links to projects. Lets allow
|
||||
# that but reject all other uses of "." to keep things simple.
|
||||
if parts != ['.']:
|
||||
for part in set(parts):
|
||||
if part in {'.', '..', '.git'} or part.startswith('.repo'):
|
||||
return 'bad component: %s' % (part,)
|
||||
|
||||
if not symlink and resep.match(path[-1]):
|
||||
return 'dirs not allowed'
|
||||
|
||||
# NB: The two abspath checks here are to handle platforms with multiple
|
||||
# filesystem path styles (e.g. Windows).
|
||||
norm = os.path.normpath(path)
|
||||
if (norm == '..' or
|
||||
(len(norm) >= 3 and norm.startswith('..') and resep.match(norm[0])) or
|
||||
os.path.isabs(norm) or
|
||||
norm.startswith('/')):
|
||||
return 'path cannot be outside'
|
||||
|
||||
@classmethod
|
||||
def _ValidateFilePaths(cls, element, src, dest):
|
||||
"""Verify |src| & |dest| are reasonable for <copyfile> & <linkfile>.
|
||||
|
||||
We verify the path independent of any filesystem state as we won't have a
|
||||
checkout available to compare to. i.e. This is for parsing validation
|
||||
purposes only.
|
||||
|
||||
We'll do full/live sanity checking before we do the actual filesystem
|
||||
modifications in _CopyFile/_LinkFile/etc...
|
||||
"""
|
||||
# |dest| is the file we write to or symlink we create.
|
||||
# It is relative to the top of the repo client checkout.
|
||||
msg = cls._CheckLocalPath(dest)
|
||||
if msg:
|
||||
raise ManifestInvalidPathError(
|
||||
'<%s> invalid "dest": %s: %s' % (element, dest, msg))
|
||||
|
||||
# |src| is the file we read from or path we point to for symlinks.
|
||||
# It is relative to the top of the git project checkout.
|
||||
msg = cls._CheckLocalPath(src, symlink=element == 'linkfile')
|
||||
if msg:
|
||||
raise ManifestInvalidPathError(
|
||||
'<%s> invalid "src": %s: %s' % (element, src, msg))
|
||||
|
||||
def _ParseCopyFile(self, project, node):
|
||||
src = self._reqatt(node, 'src')
|
||||
dest = self._reqatt(node, 'dest')
|
||||
if not self.IsMirror:
|
||||
# src is project relative;
|
||||
# dest is relative to the top of the tree
|
||||
project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
|
||||
# dest is relative to the top of the tree.
|
||||
# We only validate paths if we actually plan to process them.
|
||||
self._ValidateFilePaths('copyfile', src, dest)
|
||||
project.AddCopyFile(src, dest, self.topdir)
|
||||
|
||||
def _ParseLinkFile(self, project, node):
|
||||
src = self._reqatt(node, 'src')
|
||||
dest = self._reqatt(node, 'dest')
|
||||
if not self.IsMirror:
|
||||
# src is project relative;
|
||||
# dest is relative to the top of the tree
|
||||
project.AddLinkFile(src, dest, os.path.join(self.topdir, dest))
|
||||
# dest is relative to the top of the tree.
|
||||
# We only validate paths if we actually plan to process them.
|
||||
self._ValidateFilePaths('linkfile', src, dest)
|
||||
project.AddLinkFile(src, dest, self.topdir)
|
||||
|
||||
def _ParseAnnotation(self, project, node):
|
||||
name = self._reqatt(node, 'name')
|
||||
@ -968,7 +1135,7 @@ class XmlManifest(object):
|
||||
keep = "true"
|
||||
if keep != "true" and keep != "false":
|
||||
raise ManifestParseError('optional "keep" attribute must be '
|
||||
'"true" or "false"')
|
||||
'"true" or "false"')
|
||||
project.AddAnnotation(name, value, keep)
|
||||
|
||||
def _get_remote(self, node):
|
||||
@ -979,7 +1146,7 @@ class XmlManifest(object):
|
||||
v = self._remotes.get(name)
|
||||
if not v:
|
||||
raise ManifestParseError("remote %s not defined in %s" %
|
||||
(name, self.manifestFile))
|
||||
(name, self.manifestFile))
|
||||
return v
|
||||
|
||||
def _reqatt(self, node, attname):
|
||||
@ -989,7 +1156,7 @@ class XmlManifest(object):
|
||||
v = node.getAttribute(attname)
|
||||
if not v:
|
||||
raise ManifestParseError("no %s in <%s> within %s" %
|
||||
(attname, node.nodeName, self.manifestFile))
|
||||
(attname, node.nodeName, self.manifestFile))
|
||||
return v
|
||||
|
||||
def projectsDiff(self, manifest):
|
||||
@ -1007,7 +1174,7 @@ class XmlManifest(object):
|
||||
diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []}
|
||||
|
||||
for proj in fromKeys:
|
||||
if not proj in toKeys:
|
||||
if proj not in toKeys:
|
||||
diff['removed'].append(fromProjects[proj])
|
||||
else:
|
||||
fromProj = fromProjects[proj]
|
||||
@ -1039,7 +1206,7 @@ class GitcManifest(XmlManifest):
|
||||
gitc_client_name)
|
||||
self.manifestFile = os.path.join(self.gitc_client_dir, '.manifest')
|
||||
|
||||
def _ParseProject(self, node, parent = None):
|
||||
def _ParseProject(self, node, parent=None):
|
||||
"""Override _ParseProject and add support for GITC specific attributes."""
|
||||
return super(GitcManifest, self)._ParseProject(
|
||||
node, parent=parent, old_revision=node.getAttribute('old-revision'))
|
||||
@ -1048,4 +1215,3 @@ class GitcManifest(XmlManifest):
|
||||
"""Output GITC Specific Project attributes"""
|
||||
if p.old_revision:
|
||||
e.setAttribute('old-revision', str(p.old_revision))
|
||||
|
||||
|
13
pager.py
Executable file → Normal file
13
pager.py
Executable file → Normal file
@ -27,6 +27,7 @@ pager_process = None
|
||||
old_stdout = None
|
||||
old_stderr = None
|
||||
|
||||
|
||||
def RunPager(globalConfig):
|
||||
if not os.isatty(0) or not os.isatty(1):
|
||||
return
|
||||
@ -35,33 +36,37 @@ def RunPager(globalConfig):
|
||||
return
|
||||
|
||||
if platform_utils.isWindows():
|
||||
_PipePager(pager);
|
||||
_PipePager(pager)
|
||||
else:
|
||||
_ForkPager(pager)
|
||||
|
||||
|
||||
def TerminatePager():
|
||||
global pager_process, old_stdout, old_stderr
|
||||
if pager_process:
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
pager_process.stdin.close()
|
||||
pager_process.wait();
|
||||
pager_process.wait()
|
||||
pager_process = None
|
||||
# Restore initial stdout/err in case there is more output in this process
|
||||
# after shutting down the pager process
|
||||
sys.stdout = old_stdout
|
||||
sys.stderr = old_stderr
|
||||
|
||||
|
||||
def _PipePager(pager):
|
||||
global pager_process, old_stdout, old_stderr
|
||||
assert pager_process is None, "Only one active pager process at a time"
|
||||
# Create pager process, piping stdout/err into its stdin
|
||||
pager_process = subprocess.Popen([pager], stdin=subprocess.PIPE, stdout=sys.stdout, stderr=sys.stderr)
|
||||
pager_process = subprocess.Popen([pager], stdin=subprocess.PIPE, stdout=sys.stdout,
|
||||
stderr=sys.stderr)
|
||||
old_stdout = sys.stdout
|
||||
old_stderr = sys.stderr
|
||||
sys.stdout = pager_process.stdin
|
||||
sys.stderr = pager_process.stdin
|
||||
|
||||
|
||||
def _ForkPager(pager):
|
||||
global active
|
||||
# This process turns into the pager; a child it forks will
|
||||
@ -88,6 +93,7 @@ def _ForkPager(pager):
|
||||
print("fatal: cannot start pager '%s'" % pager, file=sys.stderr)
|
||||
sys.exit(255)
|
||||
|
||||
|
||||
def _SelectPager(globalConfig):
|
||||
try:
|
||||
return os.environ['GIT_PAGER']
|
||||
@ -105,6 +111,7 @@ def _SelectPager(globalConfig):
|
||||
|
||||
return 'less'
|
||||
|
||||
|
||||
def _BecomePager(pager):
|
||||
# Delaying execution of the pager until we have output
|
||||
# ready works around a long-standing bug in popularly
|
||||
|
@ -90,8 +90,14 @@ class _FileDescriptorStreamsNonBlocking(FileDescriptorStreams):
|
||||
""" Implementation of FileDescriptorStreams for platforms that support
|
||||
non blocking I/O.
|
||||
"""
|
||||
def __init__(self):
|
||||
super(_FileDescriptorStreamsNonBlocking, self).__init__()
|
||||
self._poll = select.poll()
|
||||
self._fd_to_stream = {}
|
||||
|
||||
class Stream(object):
|
||||
""" Encapsulates a file descriptor """
|
||||
|
||||
def __init__(self, fd, dest, std_name):
|
||||
self.fd = fd
|
||||
self.dest = dest
|
||||
@ -113,11 +119,18 @@ class _FileDescriptorStreamsNonBlocking(FileDescriptorStreams):
|
||||
self.fd.close()
|
||||
|
||||
def _create_stream(self, fd, dest, std_name):
|
||||
return self.Stream(fd, dest, std_name)
|
||||
stream = self.Stream(fd, dest, std_name)
|
||||
self._fd_to_stream[stream.fileno()] = stream
|
||||
self._poll.register(stream, select.POLLIN)
|
||||
return stream
|
||||
|
||||
def remove(self, stream):
|
||||
self._poll.unregister(stream)
|
||||
del self._fd_to_stream[stream.fileno()]
|
||||
super(_FileDescriptorStreamsNonBlocking, self).remove(stream)
|
||||
|
||||
def select(self):
|
||||
ready_streams, _, _ = select.select(self.streams, [], [])
|
||||
return ready_streams
|
||||
return [self._fd_to_stream[fd] for fd, _ in self._poll.poll()]
|
||||
|
||||
|
||||
class _FileDescriptorStreamsThreads(FileDescriptorStreams):
|
||||
@ -125,6 +138,7 @@ class _FileDescriptorStreamsThreads(FileDescriptorStreams):
|
||||
non blocking I/O. This implementation requires creating threads issuing
|
||||
blocking read operations on file descriptors.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(_FileDescriptorStreamsThreads, self).__init__()
|
||||
# The queue is shared accross all threads so we can simulate the
|
||||
@ -144,12 +158,14 @@ class _FileDescriptorStreamsThreads(FileDescriptorStreams):
|
||||
|
||||
class QueueItem(object):
|
||||
""" Item put in the shared queue """
|
||||
|
||||
def __init__(self, stream, data):
|
||||
self.stream = stream
|
||||
self.data = data
|
||||
|
||||
class Stream(object):
|
||||
""" Encapsulates a file descriptor """
|
||||
|
||||
def __init__(self, fd, dest, std_name, queue):
|
||||
self.fd = fd
|
||||
self.dest = dest
|
||||
@ -175,7 +191,7 @@ class _FileDescriptorStreamsThreads(FileDescriptorStreams):
|
||||
for line in iter(self.fd.readline, b''):
|
||||
self.queue.put(_FileDescriptorStreamsThreads.QueueItem(self, line))
|
||||
self.fd.close()
|
||||
self.queue.put(_FileDescriptorStreamsThreads.QueueItem(self, None))
|
||||
self.queue.put(_FileDescriptorStreamsThreads.QueueItem(self, b''))
|
||||
|
||||
|
||||
def symlink(source, link_name):
|
||||
|
@ -19,13 +19,18 @@ import errno
|
||||
from pyversion import is_python3
|
||||
from ctypes import WinDLL, get_last_error, FormatError, WinError, addressof
|
||||
from ctypes import c_buffer
|
||||
from ctypes.wintypes import BOOL, BOOLEAN, LPCWSTR, DWORD, HANDLE, POINTER, c_ubyte
|
||||
from ctypes.wintypes import WCHAR, USHORT, LPVOID, Structure, Union, ULONG
|
||||
from ctypes.wintypes import byref
|
||||
from ctypes.wintypes import BOOL, BOOLEAN, LPCWSTR, DWORD, HANDLE
|
||||
from ctypes.wintypes import WCHAR, USHORT, LPVOID, ULONG
|
||||
if is_python3():
|
||||
from ctypes import c_ubyte, Structure, Union, byref
|
||||
from ctypes.wintypes import LPDWORD
|
||||
else:
|
||||
# For legacy Python2 different imports are needed.
|
||||
from ctypes.wintypes import POINTER, c_ubyte, Structure, Union, byref
|
||||
LPDWORD = POINTER(DWORD)
|
||||
|
||||
kernel32 = WinDLL('kernel32', use_last_error=True)
|
||||
|
||||
LPDWORD = POINTER(DWORD)
|
||||
UCHAR = c_ubyte
|
||||
|
||||
# Win32 error codes
|
||||
@ -147,7 +152,8 @@ def create_dirsymlink(source, link_name):
|
||||
|
||||
|
||||
def _create_symlink(source, link_name, dwFlags):
|
||||
if not CreateSymbolicLinkW(link_name, source, dwFlags | SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE):
|
||||
if not CreateSymbolicLinkW(link_name, source,
|
||||
dwFlags | SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE):
|
||||
# See https://github.com/golang/go/pull/24307/files#diff-b87bc12e4da2497308f9ef746086e4f0
|
||||
# "the unprivileged create flag is unsupported below Windows 10 (1703, v10.0.14972).
|
||||
# retry without it."
|
||||
@ -213,8 +219,8 @@ def _preserve_encoding(source, target):
|
||||
if is_python3():
|
||||
return target
|
||||
|
||||
if isinstance(source, unicode):
|
||||
return unicode(target)
|
||||
if isinstance(source, unicode): # noqa: F821
|
||||
return unicode(target) # noqa: F821
|
||||
return str(target)
|
||||
|
||||
|
||||
|
37
progress.py
37
progress.py
@ -26,6 +26,7 @@ _NOT_TTY = not os.isatty(2)
|
||||
# column 0.
|
||||
CSI_ERASE_LINE = '\x1b[2K'
|
||||
|
||||
|
||||
class Progress(object):
|
||||
def __init__(self, title, total=0, units='', print_newline=False,
|
||||
always_print_percentage=False):
|
||||
@ -53,9 +54,9 @@ class Progress(object):
|
||||
|
||||
if self._total <= 0:
|
||||
sys.stderr.write('%s\r%s: %d,' % (
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
self._done))
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
self._done))
|
||||
sys.stderr.flush()
|
||||
else:
|
||||
p = (100 * self._done) / self._total
|
||||
@ -63,13 +64,13 @@ class Progress(object):
|
||||
if self._lastp != p or self._always_print_percentage:
|
||||
self._lastp = p
|
||||
sys.stderr.write('%s\r%s: %3d%% (%d%s/%d%s)%s%s%s' % (
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
p,
|
||||
self._done, self._units,
|
||||
self._total, self._units,
|
||||
' ' if msg else '', msg,
|
||||
"\n" if self._print_newline else ""))
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
p,
|
||||
self._done, self._units,
|
||||
self._total, self._units,
|
||||
' ' if msg else '', msg,
|
||||
"\n" if self._print_newline else ""))
|
||||
sys.stderr.flush()
|
||||
|
||||
def end(self):
|
||||
@ -78,16 +79,16 @@ class Progress(object):
|
||||
|
||||
if self._total <= 0:
|
||||
sys.stderr.write('%s\r%s: %d, done.\n' % (
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
self._done))
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
self._done))
|
||||
sys.stderr.flush()
|
||||
else:
|
||||
p = (100 * self._done) / self._total
|
||||
sys.stderr.write('%s\r%s: %3d%% (%d%s/%d%s), done.\n' % (
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
p,
|
||||
self._done, self._units,
|
||||
self._total, self._units))
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
p,
|
||||
self._done, self._units,
|
||||
self._total, self._units))
|
||||
sys.stderr.flush()
|
||||
|
700
project.py
Executable file → Normal file
700
project.py
Executable file → Normal file
File diff suppressed because it is too large
Load Diff
@ -16,5 +16,6 @@
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
def is_python3():
|
||||
return sys.version_info[0] == 3
|
||||
|
2
release/README.md
Normal file
2
release/README.md
Normal file
@ -0,0 +1,2 @@
|
||||
These are helper tools for managing official releases.
|
||||
See the [release process](../docs/release-process.md) document for more details.
|
114
release/sign-launcher.py
Executable file
114
release/sign-launcher.py
Executable file
@ -0,0 +1,114 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (C) 2020 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Helper tool for signing repo launcher scripts correctly.
|
||||
|
||||
This is intended to be run only by the official Repo release managers.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import util
|
||||
|
||||
|
||||
def sign(opts):
|
||||
"""Sign the launcher!"""
|
||||
output = ''
|
||||
for key in opts.keys:
|
||||
# We use ! at the end of the key so that gpg uses this specific key.
|
||||
# Otherwise it uses the key as a lookup into the overall key and uses the
|
||||
# default signing key. i.e. It will see that KEYID_RSA is a subkey of
|
||||
# another key, and use the primary key to sign instead of the subkey.
|
||||
cmd = ['gpg', '--homedir', opts.gpgdir, '-u', f'{key}!', '--batch', '--yes',
|
||||
'--armor', '--detach-sign', '--output', '-', opts.launcher]
|
||||
ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE)
|
||||
output += ret.stdout
|
||||
|
||||
# Save the combined signatures into one file.
|
||||
with open(f'{opts.launcher}.asc', 'w', encoding='utf-8') as fp:
|
||||
fp.write(output)
|
||||
|
||||
|
||||
def check(opts):
|
||||
"""Check the signature."""
|
||||
util.run(opts, ['gpg', '--verify', f'{opts.launcher}.asc'])
|
||||
|
||||
|
||||
def postmsg(opts):
|
||||
"""Helpful info to show at the end for release manager."""
|
||||
print(f"""
|
||||
Repo launcher bucket:
|
||||
gs://git-repo-downloads/
|
||||
|
||||
To upload this launcher directly:
|
||||
gsutil cp -a public-read {opts.launcher} {opts.launcher}.asc gs://git-repo-downloads/
|
||||
|
||||
NB: You probably want to upload it with a specific version first, e.g.:
|
||||
gsutil cp -a public-read {opts.launcher} gs://git-repo-downloads/repo-3.0
|
||||
gsutil cp -a public-read {opts.launcher}.asc gs://git-repo-downloads/repo-3.0.asc
|
||||
""")
|
||||
|
||||
|
||||
def get_parser():
|
||||
"""Get a CLI parser."""
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument('-n', '--dry-run',
|
||||
dest='dryrun', action='store_true',
|
||||
help='show everything that would be done')
|
||||
parser.add_argument('--gpgdir',
|
||||
default=os.path.join(util.HOMEDIR, '.gnupg', 'repo'),
|
||||
help='path to dedicated gpg dir with release keys '
|
||||
'(default: ~/.gnupg/repo/)')
|
||||
parser.add_argument('--keyid', dest='keys', default=[], action='append',
|
||||
help='alternative signing keys to use')
|
||||
parser.add_argument('launcher',
|
||||
default=os.path.join(util.TOPDIR, 'repo'), nargs='?',
|
||||
help='the launcher script to sign')
|
||||
return parser
|
||||
|
||||
|
||||
def main(argv):
|
||||
"""The main func!"""
|
||||
parser = get_parser()
|
||||
opts = parser.parse_args(argv)
|
||||
|
||||
if not os.path.exists(opts.gpgdir):
|
||||
parser.error(f'--gpgdir does not exist: {opts.gpgdir}')
|
||||
if not os.path.exists(opts.launcher):
|
||||
parser.error(f'launcher does not exist: {opts.launcher}')
|
||||
|
||||
opts.launcher = os.path.relpath(opts.launcher)
|
||||
print(f'Signing "{opts.launcher}" launcher script and saving to '
|
||||
f'"{opts.launcher}.asc"')
|
||||
|
||||
if opts.keys:
|
||||
print(f'Using custom keys to sign: {" ".join(opts.keys)}')
|
||||
else:
|
||||
print('Using official Repo release keys to sign')
|
||||
opts.keys = [util.KEYID_DSA, util.KEYID_RSA, util.KEYID_ECC]
|
||||
util.import_release_key(opts)
|
||||
|
||||
sign(opts)
|
||||
check(opts)
|
||||
postmsg(opts)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
144
release/sign-tag.py
Executable file
144
release/sign-tag.py
Executable file
@ -0,0 +1,144 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (C) 2020 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Helper tool for signing repo release tags correctly.
|
||||
|
||||
This is intended to be run only by the official Repo release managers, but it
|
||||
could be run by people maintaining their own fork of the project.
|
||||
|
||||
NB: Avoid new releases on off-hours. If something goes wrong, staff/oncall need
|
||||
to be active in order to respond quickly & effectively. Recommend sticking to:
|
||||
* Mon - Thu, 9:00 - 14:00 PT (i.e. MTV time)
|
||||
* Avoid US holidays (and large international ones if possible)
|
||||
* Follow the normal Google production freeze schedule
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import util
|
||||
|
||||
|
||||
# We currently sign with the old DSA key as it's been around the longest.
|
||||
# We should transition to RSA by Jun 2020, and ECC by Jun 2021.
|
||||
KEYID = util.KEYID_DSA
|
||||
|
||||
# Regular expression to validate tag names.
|
||||
RE_VALID_TAG = r'^v([0-9]+[.])+[0-9]+$'
|
||||
|
||||
|
||||
def sign(opts):
|
||||
"""Tag the commit & sign it!"""
|
||||
# We use ! at the end of the key so that gpg uses this specific key.
|
||||
# Otherwise it uses the key as a lookup into the overall key and uses the
|
||||
# default signing key. i.e. It will see that KEYID_RSA is a subkey of
|
||||
# another key, and use the primary key to sign instead of the subkey.
|
||||
cmd = ['git', 'tag', '-s', opts.tag, '-u', f'{opts.key}!',
|
||||
'-m', f'repo {opts.tag}', opts.commit]
|
||||
|
||||
key = 'GNUPGHOME'
|
||||
print('+', f'export {key}="{opts.gpgdir}"')
|
||||
oldvalue = os.getenv(key)
|
||||
os.putenv(key, opts.gpgdir)
|
||||
util.run(opts, cmd)
|
||||
if oldvalue is None:
|
||||
os.unsetenv(key)
|
||||
else:
|
||||
os.putenv(key, oldvalue)
|
||||
|
||||
|
||||
def check(opts):
|
||||
"""Check the signature."""
|
||||
util.run(opts, ['git', 'tag', '--verify', opts.tag])
|
||||
|
||||
|
||||
def postmsg(opts):
|
||||
"""Helpful info to show at the end for release manager."""
|
||||
cmd = ['git', 'rev-parse', 'remotes/origin/stable']
|
||||
ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE)
|
||||
current_release = ret.stdout.strip()
|
||||
|
||||
cmd = ['git', 'log', '--format=%h (%aN) %s', '--no-merges',
|
||||
f'remotes/origin/stable..{opts.tag}']
|
||||
ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE)
|
||||
shortlog = ret.stdout.strip()
|
||||
|
||||
print(f"""
|
||||
Here's the short log since the last release.
|
||||
{shortlog}
|
||||
|
||||
To push release to the public:
|
||||
git push origin {opts.commit}:stable {opts.tag} -n
|
||||
NB: People will start upgrading to this version immediately.
|
||||
|
||||
To roll back a release:
|
||||
git push origin --force {current_release}:stable -n
|
||||
""")
|
||||
|
||||
|
||||
def get_parser():
|
||||
"""Get a CLI parser."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description=__doc__,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
parser.add_argument('-n', '--dry-run',
|
||||
dest='dryrun', action='store_true',
|
||||
help='show everything that would be done')
|
||||
parser.add_argument('--gpgdir',
|
||||
default=os.path.join(util.HOMEDIR, '.gnupg', 'repo'),
|
||||
help='path to dedicated gpg dir with release keys '
|
||||
'(default: ~/.gnupg/repo/)')
|
||||
parser.add_argument('-f', '--force', action='store_true',
|
||||
help='force signing of any tag')
|
||||
parser.add_argument('--keyid', dest='key',
|
||||
help='alternative signing key to use')
|
||||
parser.add_argument('tag',
|
||||
help='the tag to create (e.g. "v2.0")')
|
||||
parser.add_argument('commit', default='HEAD', nargs='?',
|
||||
help='the commit to tag')
|
||||
return parser
|
||||
|
||||
|
||||
def main(argv):
|
||||
"""The main func!"""
|
||||
parser = get_parser()
|
||||
opts = parser.parse_args(argv)
|
||||
|
||||
if not os.path.exists(opts.gpgdir):
|
||||
parser.error(f'--gpgdir does not exist: {opts.gpgdir}')
|
||||
|
||||
if not opts.force and not re.match(RE_VALID_TAG, opts.tag):
|
||||
parser.error(f'tag "{opts.tag}" does not match regex "{RE_VALID_TAG}"; '
|
||||
'use --force to sign anyways')
|
||||
|
||||
if opts.key:
|
||||
print(f'Using custom key to sign: {opts.key}')
|
||||
else:
|
||||
print('Using official Repo release key to sign')
|
||||
opts.key = KEYID
|
||||
util.import_release_key(opts)
|
||||
|
||||
sign(opts)
|
||||
check(opts)
|
||||
postmsg(opts)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
73
release/util.py
Normal file
73
release/util.py
Normal file
@ -0,0 +1,73 @@
|
||||
# Copyright (C) 2020 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Random utility code for release tools."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
|
||||
|
||||
|
||||
TOPDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
HOMEDIR = os.path.expanduser('~')
|
||||
|
||||
|
||||
# These are the release keys we sign with.
|
||||
KEYID_DSA = '8BB9AD793E8E6153AF0F9A4416530D5E920F5C65'
|
||||
KEYID_RSA = 'A34A13BE8E76BFF46A0C022DA2E75A824AAB9624'
|
||||
KEYID_ECC = 'E1F9040D7A3F6DAFAC897CD3D3B95DA243E48A39'
|
||||
|
||||
|
||||
def cmdstr(cmd):
|
||||
"""Get a nicely quoted shell command."""
|
||||
ret = []
|
||||
for arg in cmd:
|
||||
if not re.match(r'^[a-zA-Z0-9/_.=-]+$', arg):
|
||||
arg = f'"{arg}"'
|
||||
ret.append(arg)
|
||||
return ' '.join(ret)
|
||||
|
||||
|
||||
def run(opts, cmd, check=True, **kwargs):
|
||||
"""Helper around subprocess.run to include logging."""
|
||||
print('+', cmdstr(cmd))
|
||||
if opts.dryrun:
|
||||
cmd = ['true', '--'] + cmd
|
||||
try:
|
||||
return subprocess.run(cmd, check=check, **kwargs)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f'aborting: {e}', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def import_release_key(opts):
|
||||
"""Import the public key of the official release repo signing key."""
|
||||
# Extract the key from our repo launcher.
|
||||
launcher = getattr(opts, 'launcher', os.path.join(TOPDIR, 'repo'))
|
||||
print(f'Importing keys from "{launcher}" launcher script')
|
||||
with open(launcher, encoding='utf-8') as fp:
|
||||
data = fp.read()
|
||||
|
||||
keys = re.findall(
|
||||
r'\n-----BEGIN PGP PUBLIC KEY BLOCK-----\n[^-]*'
|
||||
r'\n-----END PGP PUBLIC KEY BLOCK-----\n', data, flags=re.M)
|
||||
run(opts, ['gpg', '--import'], input='\n'.join(keys).encode('utf-8'))
|
||||
|
||||
print('Marking keys as fully trusted')
|
||||
run(opts, ['gpg', '--import-ownertrust'],
|
||||
input=f'{KEYID_DSA}:6:\n'.encode('utf-8'))
|
@ -28,13 +28,16 @@ REPO_TRACE = 'REPO_TRACE'
|
||||
|
||||
_TRACE = os.environ.get(REPO_TRACE) == '1'
|
||||
|
||||
|
||||
def IsTrace():
|
||||
return _TRACE
|
||||
|
||||
|
||||
def SetTrace():
|
||||
global _TRACE
|
||||
_TRACE = True
|
||||
|
||||
|
||||
def Trace(fmt, *args):
|
||||
if IsTrace():
|
||||
print(fmt % args, file=sys.stderr)
|
||||
|
10
run_tests
10
run_tests
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
#
|
||||
@ -42,9 +42,11 @@ def main(argv):
|
||||
"""The main entry."""
|
||||
# Add the repo tree to PYTHONPATH as the tests expect to be able to import
|
||||
# modules directly.
|
||||
topdir = os.path.dirname(os.path.realpath(__file__))
|
||||
pythonpath = os.environ.get('PYTHONPATH', '')
|
||||
os.environ['PYTHONPATH'] = '%s:%s' % (topdir, pythonpath)
|
||||
pythonpath = os.path.dirname(os.path.realpath(__file__))
|
||||
oldpythonpath = os.environ.get('PYTHONPATH', None)
|
||||
if oldpythonpath is not None:
|
||||
pythonpath += os.pathsep + oldpythonpath
|
||||
os.environ['PYTHONPATH'] = pythonpath
|
||||
|
||||
return run_pytest('pytest', argv)
|
||||
|
||||
|
2
setup.py
2
setup.py
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
#
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
import os
|
||||
|
||||
# A mapping of the subcommand name to the class that implements it.
|
||||
all_commands = {}
|
||||
|
||||
my_dir = os.path.dirname(__file__)
|
||||
@ -37,14 +38,14 @@ for py in os.listdir(my_dir):
|
||||
['%s' % name])
|
||||
mod = getattr(mod, name)
|
||||
try:
|
||||
cmd = getattr(mod, clsn)()
|
||||
cmd = getattr(mod, clsn)
|
||||
except AttributeError:
|
||||
raise SyntaxError('%s/%s does not define class %s' % (
|
||||
__name__, py, clsn))
|
||||
__name__, py, clsn))
|
||||
|
||||
name = name.replace('_', '-')
|
||||
cmd.NAME = name
|
||||
all_commands[name] = cmd
|
||||
|
||||
if 'help' in all_commands:
|
||||
all_commands['help'].commands = all_commands
|
||||
# Add 'branch' as an alias for 'branches'.
|
||||
all_commands['branch'] = all_commands['branches']
|
||||
|
@ -15,12 +15,15 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
from command import Command
|
||||
|
||||
from collections import defaultdict
|
||||
import sys
|
||||
|
||||
from command import Command
|
||||
from git_command import git
|
||||
from progress import Progress
|
||||
|
||||
|
||||
class Abandon(Command):
|
||||
common = True
|
||||
helpSummary = "Permanently abandon a development branch"
|
||||
@ -32,7 +35,11 @@ deleting it (and all its history) from your local repository.
|
||||
|
||||
It is equivalent to "git branch -D <branchname>".
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-q', '--quiet',
|
||||
action='store_true', default=False,
|
||||
help='be quiet')
|
||||
p.add_option('--all',
|
||||
dest='all', action='store_true',
|
||||
help='delete all branches in all projects')
|
||||
@ -79,21 +86,24 @@ It is equivalent to "git branch -D <branchname>".
|
||||
|
||||
if err:
|
||||
for br in err.keys():
|
||||
err_msg = "error: cannot abandon %s" %br
|
||||
err_msg = "error: cannot abandon %s" % br
|
||||
print(err_msg, file=sys.stderr)
|
||||
for proj in err[br]:
|
||||
print(' '*len(err_msg) + " | %s" % proj.relpath, file=sys.stderr)
|
||||
print(' ' * len(err_msg) + " | %s" % proj.relpath, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
elif not success:
|
||||
print('error: no project has local branch(es) : %s' % nb,
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
print('Abandoned branches:', file=sys.stderr)
|
||||
# Everything below here is displaying status.
|
||||
if opt.quiet:
|
||||
return
|
||||
print('Abandoned branches:')
|
||||
for br in success.keys():
|
||||
if len(all_projects) > 1 and len(all_projects) == len(success[br]):
|
||||
result = "all project"
|
||||
else:
|
||||
result = "%s" % (
|
||||
('\n'+' '*width + '| ').join(p.relpath for p in success[br]))
|
||||
print("%s%s| %s\n" % (br,' '*(width-len(br)), result),file=sys.stderr)
|
||||
('\n' + ' ' * width + '| ').join(p.relpath for p in success[br]))
|
||||
print("%s%s| %s\n" % (br, ' ' * (width - len(br)), result))
|
||||
|
@ -19,13 +19,15 @@ import sys
|
||||
from color import Coloring
|
||||
from command import Command
|
||||
|
||||
|
||||
class BranchColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'branch')
|
||||
self.current = self.printer('current', fg='green')
|
||||
self.local = self.printer('local')
|
||||
self.local = self.printer('local')
|
||||
self.notinproject = self.printer('notinproject', fg='red')
|
||||
|
||||
|
||||
class BranchInfo(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
@ -158,7 +160,7 @@ is shown, then the branch appears in all projects.
|
||||
for b in i.projects:
|
||||
have.add(b.project)
|
||||
for p in projects:
|
||||
if not p in have:
|
||||
if p not in have:
|
||||
paths.append(p.relpath)
|
||||
|
||||
s = ' %s %s' % (in_type, ', '.join(paths))
|
||||
@ -170,11 +172,11 @@ is shown, then the branch appears in all projects.
|
||||
fmt = out.current if i.IsCurrent else out.write
|
||||
for p in paths:
|
||||
out.nl()
|
||||
fmt(width*' ' + ' %s' % p)
|
||||
fmt(width * ' ' + ' %s' % p)
|
||||
fmt = out.write
|
||||
for p in non_cur_paths:
|
||||
out.nl()
|
||||
fmt(width*' ' + ' %s' % p)
|
||||
fmt(width * ' ' + ' %s' % p)
|
||||
else:
|
||||
out.write(' in all projects')
|
||||
out.nl()
|
||||
|
@ -19,6 +19,7 @@ import sys
|
||||
from command import Command
|
||||
from progress import Progress
|
||||
|
||||
|
||||
class Checkout(Command):
|
||||
common = True
|
||||
helpSummary = "Checkout a branch for development"
|
||||
|
@ -22,6 +22,7 @@ from git_command import GitCommand
|
||||
|
||||
CHANGE_ID_RE = re.compile(r'^\s*Change-Id: I([0-9a-f]{40})\s*$')
|
||||
|
||||
|
||||
class CherryPick(Command):
|
||||
common = True
|
||||
helpSummary = "Cherry-pick a change."
|
||||
@ -46,8 +47,8 @@ change id will be added.
|
||||
|
||||
p = GitCommand(None,
|
||||
['rev-parse', '--verify', reference],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
if p.Wait() != 0:
|
||||
print(p.stderr, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
@ -61,8 +62,8 @@ change id will be added.
|
||||
|
||||
p = GitCommand(None,
|
||||
['cherry-pick', sha1],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
status = p.Wait()
|
||||
|
||||
print(p.stdout, file=sys.stdout)
|
||||
@ -74,9 +75,9 @@ change id will be added.
|
||||
new_msg = self._Reformat(old_msg, sha1)
|
||||
|
||||
p = GitCommand(None, ['commit', '--amend', '-F', '-'],
|
||||
provide_stdin = True,
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
provide_stdin=True,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
p.stdin.write(new_msg)
|
||||
p.stdin.close()
|
||||
if p.Wait() != 0:
|
||||
@ -97,7 +98,7 @@ change id will be added.
|
||||
|
||||
def _StripHeader(self, commit_msg):
|
||||
lines = commit_msg.splitlines()
|
||||
return "\n".join(lines[lines.index("")+1:])
|
||||
return "\n".join(lines[lines.index("") + 1:])
|
||||
|
||||
def _Reformat(self, old_msg, sha1):
|
||||
new_msg = []
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
from command import PagedCommand
|
||||
|
||||
|
||||
class Diff(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Show changes between commit and working tree"
|
||||
@ -28,10 +29,6 @@ to the Unix 'patch' command.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
def cmd(option, opt_str, value, parser):
|
||||
setattr(parser.values, option.dest, list(parser.rargs))
|
||||
while parser.rargs:
|
||||
del parser.rargs[0]
|
||||
p.add_option('-u', '--absolute',
|
||||
dest='absolute', action='store_true',
|
||||
help='Paths are relative to the repository root')
|
||||
|
@ -18,10 +18,12 @@ from color import Coloring
|
||||
from command import PagedCommand
|
||||
from manifest_xml import XmlManifest
|
||||
|
||||
|
||||
class _Coloring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
|
||||
|
||||
class Diffmanifests(PagedCommand):
|
||||
""" A command to see logs in projects represented by manifests
|
||||
|
||||
@ -77,7 +79,7 @@ synced and their revisions won't be found.
|
||||
metavar='<FORMAT>',
|
||||
help='print the log using a custom git pretty format string')
|
||||
|
||||
def _printRawDiff(self, diff):
|
||||
def _printRawDiff(self, diff, pretty_format=None):
|
||||
for project in diff['added']:
|
||||
self.printText("A %s %s" % (project.relpath, project.revisionExpr))
|
||||
self.out.nl()
|
||||
@ -90,7 +92,7 @@ synced and their revisions won't be found.
|
||||
self.printText("C %s %s %s" % (project.relpath, project.revisionExpr,
|
||||
otherProject.revisionExpr))
|
||||
self.out.nl()
|
||||
self._printLogs(project, otherProject, raw=True, color=False)
|
||||
self._printLogs(project, otherProject, raw=True, color=False, pretty_format=pretty_format)
|
||||
|
||||
for project, otherProject in diff['unreachable']:
|
||||
self.printText("U %s %s %s" % (project.relpath, project.revisionExpr,
|
||||
@ -184,10 +186,10 @@ synced and their revisions won't be found.
|
||||
self.out = _Coloring(self.manifest.globalConfig)
|
||||
self.printText = self.out.nofmt_printer('text')
|
||||
if opt.color:
|
||||
self.printProject = self.out.nofmt_printer('project', attr = 'bold')
|
||||
self.printAdded = self.out.nofmt_printer('green', fg = 'green', attr = 'bold')
|
||||
self.printRemoved = self.out.nofmt_printer('red', fg = 'red', attr = 'bold')
|
||||
self.printRevision = self.out.nofmt_printer('revision', fg = 'yellow')
|
||||
self.printProject = self.out.nofmt_printer('project', attr='bold')
|
||||
self.printAdded = self.out.nofmt_printer('green', fg='green', attr='bold')
|
||||
self.printRemoved = self.out.nofmt_printer('red', fg='red', attr='bold')
|
||||
self.printRevision = self.out.nofmt_printer('revision', fg='yellow')
|
||||
else:
|
||||
self.printProject = self.printAdded = self.printRemoved = self.printRevision = self.printText
|
||||
|
||||
@ -201,6 +203,6 @@ synced and their revisions won't be found.
|
||||
|
||||
diff = manifest1.projectsDiff(manifest2)
|
||||
if opt.raw:
|
||||
self._printRawDiff(diff)
|
||||
self._printRawDiff(diff, pretty_format=opt.pretty_format)
|
||||
else:
|
||||
self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format)
|
||||
|
54
subcmds/download.py
Executable file → Normal file
54
subcmds/download.py
Executable file → Normal file
@ -23,6 +23,7 @@ from error import GitError
|
||||
|
||||
CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
|
||||
|
||||
|
||||
class Download(Command):
|
||||
common = True
|
||||
helpSummary = "Download and checkout a change"
|
||||
@ -36,9 +37,13 @@ If no project is specified try to use current directory as a project.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-b', '--branch',
|
||||
help='create a new branch first')
|
||||
p.add_option('-c', '--cherry-pick',
|
||||
dest='cherrypick', action='store_true',
|
||||
help="cherry-pick instead of checkout")
|
||||
p.add_option('-x', '--record-origin', action='store_true',
|
||||
help='pass -x when cherry-picking')
|
||||
p.add_option('-r', '--revert',
|
||||
dest='revert', action='store_true',
|
||||
help="revert instead of checkout")
|
||||
@ -77,6 +82,14 @@ If no project is specified try to use current directory as a project.
|
||||
project = self.GetProjects([a])[0]
|
||||
return to_get
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if opt.record_origin:
|
||||
if not opt.cherrypick:
|
||||
self.OptionParser.error('-x only makes sense with --cherry-pick')
|
||||
|
||||
if opt.ffonly:
|
||||
self.OptionParser.error('-x and --ff are mutually exclusive options')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
for project, change_id, ps_id in self._ParseChangeIds(args):
|
||||
dl = project.DownloadPatchSet(change_id, ps_id)
|
||||
@ -93,22 +106,41 @@ If no project is specified try to use current directory as a project.
|
||||
continue
|
||||
|
||||
if len(dl.commits) > 1:
|
||||
print('[%s] %d/%d depends on %d unmerged changes:' \
|
||||
print('[%s] %d/%d depends on %d unmerged changes:'
|
||||
% (project.name, change_id, ps_id, len(dl.commits)),
|
||||
file=sys.stderr)
|
||||
for c in dl.commits:
|
||||
print(' %s' % (c), file=sys.stderr)
|
||||
if opt.cherrypick:
|
||||
try:
|
||||
project._CherryPick(dl.commit)
|
||||
except GitError:
|
||||
print('[%s] Could not complete the cherry-pick of %s' \
|
||||
% (project.name, dl.commit), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if opt.cherrypick:
|
||||
mode = 'cherry-pick'
|
||||
elif opt.revert:
|
||||
project._Revert(dl.commit)
|
||||
mode = 'revert'
|
||||
elif opt.ffonly:
|
||||
project._FastForward(dl.commit, ffonly=True)
|
||||
mode = 'fast-forward merge'
|
||||
else:
|
||||
project._Checkout(dl.commit)
|
||||
mode = 'checkout'
|
||||
|
||||
# We'll combine the branch+checkout operation, but all the rest need a
|
||||
# dedicated branch start.
|
||||
if opt.branch and mode != 'checkout':
|
||||
project.StartBranch(opt.branch)
|
||||
|
||||
try:
|
||||
if opt.cherrypick:
|
||||
project._CherryPick(dl.commit, ffonly=opt.ffonly,
|
||||
record_origin=opt.record_origin)
|
||||
elif opt.revert:
|
||||
project._Revert(dl.commit)
|
||||
elif opt.ffonly:
|
||||
project._FastForward(dl.commit, ffonly=True)
|
||||
else:
|
||||
if opt.branch:
|
||||
project.StartBranch(opt.branch, revision=dl.commit)
|
||||
else:
|
||||
project._Checkout(dl.commit)
|
||||
|
||||
except GitError:
|
||||
print('[%s] Could not complete the %s of %s'
|
||||
% (project.name, mode, dl.commit), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
@ -28,10 +28,10 @@ from command import Command, MirrorSafeCommand
|
||||
import platform_utils
|
||||
|
||||
_CAN_COLOR = [
|
||||
'branch',
|
||||
'diff',
|
||||
'grep',
|
||||
'log',
|
||||
'branch',
|
||||
'diff',
|
||||
'grep',
|
||||
'log',
|
||||
]
|
||||
|
||||
|
||||
@ -127,7 +127,8 @@ without iterating through the remaining projects.
|
||||
help="Execute the command only on projects matching regex or wildcard expression")
|
||||
p.add_option('-i', '--inverse-regex',
|
||||
dest='inverse_regex', action='store_true',
|
||||
help="Execute the command only on projects not matching regex or wildcard expression")
|
||||
help="Execute the command only on projects not matching regex or "
|
||||
"wildcard expression")
|
||||
p.add_option('-g', '--groups',
|
||||
dest='groups',
|
||||
help="Execute the command only on projects matching the specified groups")
|
||||
@ -170,14 +171,16 @@ without iterating through the remaining projects.
|
||||
else:
|
||||
lrev = None
|
||||
return {
|
||||
'name': project.name,
|
||||
'relpath': project.relpath,
|
||||
'remote_name': project.remote.name,
|
||||
'lrev': lrev,
|
||||
'rrev': project.revisionExpr,
|
||||
'annotations': dict((a.name, a.value) for a in project.annotations),
|
||||
'gitdir': project.gitdir,
|
||||
'worktree': project.worktree,
|
||||
'name': project.name,
|
||||
'relpath': project.relpath,
|
||||
'remote_name': project.remote.name,
|
||||
'lrev': lrev,
|
||||
'rrev': project.revisionExpr,
|
||||
'annotations': dict((a.name, a.value) for a in project.annotations),
|
||||
'gitdir': project.gitdir,
|
||||
'worktree': project.worktree,
|
||||
'upstream': project.upstream,
|
||||
'dest_branch': project.dest_branch,
|
||||
}
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
@ -195,9 +198,9 @@ without iterating through the remaining projects.
|
||||
cmd.append(cmd[0])
|
||||
cmd.extend(opt.command[1:])
|
||||
|
||||
if opt.project_header \
|
||||
and not shell \
|
||||
and cmd[0] == 'git':
|
||||
if opt.project_header \
|
||||
and not shell \
|
||||
and cmd[0] == 'git':
|
||||
# If this is a direct git command that can enable colorized
|
||||
# output and the user prefers coloring, add --color into the
|
||||
# command line because we are going to wrap the command into
|
||||
@ -220,7 +223,7 @@ without iterating through the remaining projects.
|
||||
|
||||
smart_sync_manifest_name = "smart_sync_override.xml"
|
||||
smart_sync_manifest_path = os.path.join(
|
||||
self.manifest.manifestProject.worktree, smart_sync_manifest_name)
|
||||
self.manifest.manifestProject.worktree, smart_sync_manifest_name)
|
||||
|
||||
if os.path.isfile(smart_sync_manifest_path):
|
||||
self.manifest.Override(smart_sync_manifest_path)
|
||||
@ -238,8 +241,8 @@ without iterating through the remaining projects.
|
||||
try:
|
||||
config = self.manifest.manifestProject.config
|
||||
results_it = pool.imap(
|
||||
DoWorkWrapper,
|
||||
self.ProjectArgs(projects, mirror, opt, cmd, shell, config))
|
||||
DoWorkWrapper,
|
||||
self.ProjectArgs(projects, mirror, opt, cmd, shell, config))
|
||||
pool.close()
|
||||
for r in results_it:
|
||||
rc = rc or r
|
||||
@ -253,7 +256,7 @@ without iterating through the remaining projects.
|
||||
except Exception as e:
|
||||
# Catch any other exceptions raised
|
||||
print('Got an error, terminating the pool: %s: %s' %
|
||||
(type(e).__name__, e),
|
||||
(type(e).__name__, e),
|
||||
file=sys.stderr)
|
||||
pool.terminate()
|
||||
rc = rc or getattr(e, 'errno', 1)
|
||||
@ -268,7 +271,7 @@ without iterating through the remaining projects.
|
||||
project = self._SerializeProject(p)
|
||||
except Exception as e:
|
||||
print('Project list error on project %s: %s: %s' %
|
||||
(p.name, type(e).__name__, e),
|
||||
(p.name, type(e).__name__, e),
|
||||
file=sys.stderr)
|
||||
return
|
||||
except KeyboardInterrupt:
|
||||
@ -277,6 +280,7 @@ without iterating through the remaining projects.
|
||||
return
|
||||
yield [mirror, opt, cmd, shell, cnt, config, project]
|
||||
|
||||
|
||||
class WorkerKeyboardInterrupt(Exception):
|
||||
""" Keyboard interrupt exception for worker processes. """
|
||||
pass
|
||||
@ -285,6 +289,7 @@ class WorkerKeyboardInterrupt(Exception):
|
||||
def InitWorker():
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
|
||||
|
||||
def DoWorkWrapper(args):
|
||||
""" A wrapper around the DoWork() method.
|
||||
|
||||
@ -303,11 +308,10 @@ def DoWorkWrapper(args):
|
||||
|
||||
def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||
env = os.environ.copy()
|
||||
|
||||
def setenv(name, val):
|
||||
if val is None:
|
||||
val = ''
|
||||
if hasattr(val, 'encode'):
|
||||
val = val.encode()
|
||||
env[name] = val
|
||||
|
||||
setenv('REPO_PROJECT', project['name'])
|
||||
@ -315,6 +319,8 @@ def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||
setenv('REPO_REMOTE', project['remote_name'])
|
||||
setenv('REPO_LREV', project['lrev'])
|
||||
setenv('REPO_RREV', project['rrev'])
|
||||
setenv('REPO_UPSTREAM', project['upstream'])
|
||||
setenv('REPO_DEST_BRANCH', project['dest_branch'])
|
||||
setenv('REPO_I', str(cnt + 1))
|
||||
for name in project['annotations']:
|
||||
setenv("REPO__%s" % (name), project['annotations'][name])
|
||||
@ -331,7 +337,7 @@ def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||
if opt.ignore_missing:
|
||||
return 0
|
||||
if ((opt.project_header and opt.verbose)
|
||||
or not opt.project_header):
|
||||
or not opt.project_header):
|
||||
print('skipping %s/' % project['relpath'], file=sys.stderr)
|
||||
return 1
|
||||
|
||||
@ -366,10 +372,10 @@ def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||
while not s_in.is_done:
|
||||
in_ready = s_in.select()
|
||||
for s in in_ready:
|
||||
buf = s.read()
|
||||
buf = s.read().decode()
|
||||
if not buf:
|
||||
s.close()
|
||||
s_in.remove(s)
|
||||
s.close()
|
||||
continue
|
||||
|
||||
if not opt.verbose:
|
||||
|
@ -22,7 +22,8 @@ import platform_utils
|
||||
|
||||
from pyversion import is_python3
|
||||
if not is_python3():
|
||||
input = raw_input
|
||||
input = raw_input # noqa: F821
|
||||
|
||||
|
||||
class GitcDelete(Command, GitcClientCommand):
|
||||
common = True
|
||||
|
@ -50,7 +50,7 @@ use for this GITC client.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
super(GitcInit, self)._Options(p)
|
||||
super(GitcInit, self)._Options(p, gitc_init=True)
|
||||
g = p.add_option_group('GITC options')
|
||||
g.add_option('-f', '--manifest-file',
|
||||
dest='manifest_file',
|
||||
@ -62,7 +62,8 @@ use for this GITC client.
|
||||
def Execute(self, opt, args):
|
||||
gitc_client = gitc_utils.parse_clientdir(os.getcwd())
|
||||
if not gitc_client or (opt.gitc_client and gitc_client != opt.gitc_client):
|
||||
print('fatal: Please update your repo command. See go/gitc for instructions.', file=sys.stderr)
|
||||
print('fatal: Please update your repo command. See go/gitc for instructions.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
self.client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(),
|
||||
gitc_client)
|
||||
|
@ -21,7 +21,8 @@ import sys
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
from error import GitError
|
||||
from git_command import git_require, GitCommand
|
||||
from git_command import GitCommand
|
||||
|
||||
|
||||
class GrepColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
@ -29,6 +30,7 @@ class GrepColoring(Coloring):
|
||||
self.project = self.printer('project', attr='bold')
|
||||
self.fail = self.printer('fail', fg='red')
|
||||
|
||||
|
||||
class Grep(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Print lines matching a pattern"
|
||||
@ -156,12 +158,11 @@ contain a line that matches both expressions:
|
||||
action='callback', callback=carry,
|
||||
help='Show only file names not containing matching lines')
|
||||
|
||||
|
||||
def Execute(self, opt, args):
|
||||
out = GrepColoring(self.manifest.manifestProject.config)
|
||||
|
||||
cmd_argv = ['grep']
|
||||
if out.is_on and git_require((1, 6, 3)):
|
||||
if out.is_on:
|
||||
cmd_argv.append('--color')
|
||||
cmd_argv.extend(getattr(opt, 'cmd_argv', []))
|
||||
|
||||
|
@ -19,10 +19,12 @@ import re
|
||||
import sys
|
||||
from formatter import AbstractFormatter, DumbWriter
|
||||
|
||||
from subcmds import all_commands
|
||||
from color import Coloring
|
||||
from command import PagedCommand, MirrorSafeCommand, GitcAvailableCommand, GitcClientCommand
|
||||
import gitc_utils
|
||||
|
||||
|
||||
class Help(PagedCommand, MirrorSafeCommand):
|
||||
common = False
|
||||
helpSummary = "Display detailed help on a command"
|
||||
@ -41,7 +43,7 @@ Displays detailed usage information about a command.
|
||||
fmt = ' %%-%ds %%s' % maxlen
|
||||
|
||||
for name in commandNames:
|
||||
command = self.commands[name]
|
||||
command = all_commands[name]()
|
||||
try:
|
||||
summary = command.helpSummary.strip()
|
||||
except AttributeError:
|
||||
@ -51,7 +53,7 @@ Displays detailed usage information about a command.
|
||||
def _PrintAllCommands(self):
|
||||
print('usage: repo COMMAND [ARGS]')
|
||||
print('The complete list of recognized repo commands are:')
|
||||
commandNames = list(sorted(self.commands))
|
||||
commandNames = list(sorted(all_commands))
|
||||
self._PrintCommands(commandNames)
|
||||
print("See 'repo help <command>' for more information on a "
|
||||
'specific command.')
|
||||
@ -72,13 +74,13 @@ Displays detailed usage information about a command.
|
||||
return False
|
||||
|
||||
commandNames = list(sorted([name
|
||||
for name, command in self.commands.items()
|
||||
if command.common and gitc_supported(command)]))
|
||||
for name, command in all_commands.items()
|
||||
if command.common and gitc_supported(command)]))
|
||||
self._PrintCommands(commandNames)
|
||||
|
||||
print(
|
||||
"See 'repo help <command>' for more information on a specific command.\n"
|
||||
"See 'repo help --all' for a complete list of recognized commands.")
|
||||
"See 'repo help <command>' for more information on a specific command.\n"
|
||||
"See 'repo help --all' for a complete list of recognized commands.")
|
||||
|
||||
def _PrintCommandHelp(self, cmd, header_prefix=''):
|
||||
class _Out(Coloring):
|
||||
@ -131,8 +133,8 @@ Displays detailed usage information about a command.
|
||||
out._PrintSection('Description', 'helpDescription')
|
||||
|
||||
def _PrintAllCommandHelp(self):
|
||||
for name in sorted(self.commands):
|
||||
cmd = self.commands[name]
|
||||
for name in sorted(all_commands):
|
||||
cmd = all_commands[name]()
|
||||
cmd.manifest = self.manifest
|
||||
self._PrintCommandHelp(cmd, header_prefix='[%s] ' % (name,))
|
||||
|
||||
@ -157,7 +159,7 @@ Displays detailed usage information about a command.
|
||||
name = args[0]
|
||||
|
||||
try:
|
||||
cmd = self.commands[name]
|
||||
cmd = all_commands[name]()
|
||||
except KeyError:
|
||||
print("repo: '%s' is not a repo command." % name, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
@ -16,12 +16,14 @@
|
||||
|
||||
from command import PagedCommand
|
||||
from color import Coloring
|
||||
from git_refs import R_M
|
||||
from git_refs import R_M, R_HEADS
|
||||
|
||||
|
||||
class _Coloring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
|
||||
|
||||
class Info(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Get info on the manifest branch, current branch or unmerged branches"
|
||||
@ -41,15 +43,14 @@ class Info(PagedCommand):
|
||||
dest="local", action="store_true",
|
||||
help="Disable all remote operations")
|
||||
|
||||
|
||||
def Execute(self, opt, args):
|
||||
self.out = _Coloring(self.manifest.globalConfig)
|
||||
self.heading = self.out.printer('heading', attr = 'bold')
|
||||
self.headtext = self.out.nofmt_printer('headtext', fg = 'yellow')
|
||||
self.redtext = self.out.printer('redtext', fg = 'red')
|
||||
self.sha = self.out.printer("sha", fg = 'yellow')
|
||||
self.heading = self.out.printer('heading', attr='bold')
|
||||
self.headtext = self.out.nofmt_printer('headtext', fg='yellow')
|
||||
self.redtext = self.out.printer('redtext', fg='red')
|
||||
self.sha = self.out.printer("sha", fg='yellow')
|
||||
self.text = self.out.nofmt_printer('text')
|
||||
self.dimtext = self.out.printer('dimtext', attr = 'dim')
|
||||
self.dimtext = self.out.printer('dimtext', attr='dim')
|
||||
|
||||
self.opt = opt
|
||||
|
||||
@ -122,11 +123,14 @@ class Info(PagedCommand):
|
||||
self.printSeparator()
|
||||
|
||||
def findRemoteLocalDiff(self, project):
|
||||
#Fetch all the latest commits
|
||||
# Fetch all the latest commits.
|
||||
if not self.opt.local:
|
||||
project.Sync_NetworkHalf(quiet=True, current_branch_only=True)
|
||||
|
||||
logTarget = R_M + self.manifest.manifestProject.config.GetBranch("default").merge
|
||||
branch = self.manifest.manifestProject.config.GetBranch('default').merge
|
||||
if branch.startswith(R_HEADS):
|
||||
branch = branch[len(R_HEADS):]
|
||||
logTarget = R_M + branch
|
||||
|
||||
bareTmp = project.bare_git._bare
|
||||
project.bare_git._bare = False
|
||||
@ -195,16 +199,16 @@ class Info(PagedCommand):
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
self.text('%s %-33s (%2d commit%s, %s)' % (
|
||||
branch.name == project.CurrentBranch and '*' or ' ',
|
||||
branch.name,
|
||||
len(commits),
|
||||
len(commits) != 1 and 's' or '',
|
||||
date))
|
||||
branch.name == project.CurrentBranch and '*' or ' ',
|
||||
branch.name,
|
||||
len(commits),
|
||||
len(commits) != 1 and 's' or '',
|
||||
date))
|
||||
self.out.nl()
|
||||
|
||||
for commit in commits:
|
||||
split = commit.split()
|
||||
self.text('{0:38}{1} '.format('','-'))
|
||||
self.text('{0:38}{1} '.format('', '-'))
|
||||
self.sha(split[0] + " ")
|
||||
self.text(" ".join(split[1:]))
|
||||
self.out.nl()
|
||||
|
145
subcmds/init.py
145
subcmds/init.py
@ -15,6 +15,8 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import optparse
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
@ -34,8 +36,10 @@ from command import InteractiveCommand, MirrorSafeCommand
|
||||
from error import ManifestParseError
|
||||
from project import SyncBuffer
|
||||
from git_config import GitConfig
|
||||
from git_command import git_require, MIN_GIT_VERSION
|
||||
from git_command import git_require, MIN_GIT_VERSION_SOFT, MIN_GIT_VERSION_HARD
|
||||
import platform_utils
|
||||
from wrapper import Wrapper
|
||||
|
||||
|
||||
class Init(InteractiveCommand, MirrorSafeCommand):
|
||||
common = True
|
||||
@ -81,12 +85,15 @@ manifest, a subsequent `repo sync` (or `repo sync -d`) is necessary
|
||||
to update the working directory files.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
def _Options(self, p, gitc_init=False):
|
||||
# Logging
|
||||
g = p.add_option_group('Logging options')
|
||||
g.add_option('-v', '--verbose',
|
||||
dest='output_mode', action='store_true',
|
||||
help='show all output')
|
||||
g.add_option('-q', '--quiet',
|
||||
dest="quiet", action="store_true", default=False,
|
||||
help="be quiet")
|
||||
dest='output_mode', action='store_false',
|
||||
help='only show errors')
|
||||
|
||||
# Manifest
|
||||
g = p.add_option_group('Manifest options')
|
||||
@ -96,7 +103,12 @@ to update the working directory files.
|
||||
g.add_option('-b', '--manifest-branch',
|
||||
dest='manifest_branch',
|
||||
help='manifest branch or revision', metavar='REVISION')
|
||||
g.add_option('-c', '--current-branch',
|
||||
cbr_opts = ['--current-branch']
|
||||
# The gitc-init subcommand allocates -c itself, but a lot of init users
|
||||
# want -c, so try to satisfy both as best we can.
|
||||
if not gitc_init:
|
||||
cbr_opts += ['-c']
|
||||
g.add_option(*cbr_opts,
|
||||
dest='current_branch_only', action='store_true',
|
||||
help='fetch only current manifest branch from server')
|
||||
g.add_option('-m', '--manifest-name',
|
||||
@ -122,6 +134,10 @@ to update the working directory files.
|
||||
g.add_option('--clone-filter', action='store', default='blob:none',
|
||||
dest='clone_filter',
|
||||
help='filter for use with --partial-clone [default: %default]')
|
||||
# TODO(vapier): Expose option with real help text once this has been in the
|
||||
# wild for a while w/out significant bug reports. Goal is by ~Sep 2020.
|
||||
g.add_option('--worktree', action='store_true',
|
||||
help=optparse.SUPPRESS_HELP)
|
||||
g.add_option('--archive',
|
||||
dest='archive', action='store_true',
|
||||
help='checkout an archive instead of a git repository for '
|
||||
@ -139,11 +155,13 @@ to update the working directory files.
|
||||
help='restrict manifest projects to ones with a specified '
|
||||
'platform group [auto|all|none|linux|darwin|...]',
|
||||
metavar='PLATFORM')
|
||||
g.add_option('--clone-bundle', action='store_true',
|
||||
help='force use of /clone.bundle on HTTP/HTTPS (default if not --partial-clone)')
|
||||
g.add_option('--no-clone-bundle',
|
||||
dest='no_clone_bundle', action='store_true',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||
dest='clone_bundle', action='store_false',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS (default if --partial-clone)')
|
||||
g.add_option('--no-tags',
|
||||
dest='no_tags', action='store_true',
|
||||
dest='tags', default=True, action='store_false',
|
||||
help="don't fetch tags in the manifest")
|
||||
|
||||
# Tool
|
||||
@ -151,11 +169,12 @@ to update the working directory files.
|
||||
g.add_option('--repo-url',
|
||||
dest='repo_url',
|
||||
help='repo repository location', metavar='URL')
|
||||
g.add_option('--repo-branch',
|
||||
dest='repo_branch',
|
||||
help='repo branch or revision', metavar='REVISION')
|
||||
g.add_option('--repo-rev', metavar='REV',
|
||||
help='repo branch or revision')
|
||||
g.add_option('--repo-branch', dest='repo_rev',
|
||||
help=optparse.SUPPRESS_HELP)
|
||||
g.add_option('--no-repo-verify',
|
||||
dest='no_repo_verify', action='store_true',
|
||||
dest='repo_verify', default=True, action='store_false',
|
||||
help='do not verify repo source code')
|
||||
|
||||
# Other
|
||||
@ -178,7 +197,8 @@ to update the working directory files.
|
||||
sys.exit(1)
|
||||
|
||||
if not opt.quiet:
|
||||
print('Get %s' % GitConfig.ForUser().UrlInsteadOf(opt.manifest_url),
|
||||
print('Downloading manifest from %s' %
|
||||
(GitConfig.ForUser().UrlInsteadOf(opt.manifest_url),),
|
||||
file=sys.stderr)
|
||||
|
||||
# The manifest project object doesn't keep track of the path on the
|
||||
@ -218,7 +238,7 @@ to update the working directory files.
|
||||
platformize = lambda x: 'platform-' + x
|
||||
if opt.platform == 'auto':
|
||||
if (not opt.mirror and
|
||||
not m.config.GetString('repo.mirror') == 'true'):
|
||||
not m.config.GetString('repo.mirror') == 'true'):
|
||||
groups.append(platformize(platform.system().lower()))
|
||||
elif opt.platform == 'all':
|
||||
groups.extend(map(platformize, all_platforms))
|
||||
@ -240,6 +260,20 @@ to update the working directory files.
|
||||
if opt.dissociate:
|
||||
m.config.SetString('repo.dissociate', 'true')
|
||||
|
||||
if opt.worktree:
|
||||
if opt.mirror:
|
||||
print('fatal: --mirror and --worktree are incompatible',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if opt.submodules:
|
||||
print('fatal: --submodules and --worktree are incompatible',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
m.config.SetString('repo.worktree', 'true')
|
||||
if is_new:
|
||||
m.use_git_worktrees = True
|
||||
print('warning: --worktree is experimental!', file=sys.stderr)
|
||||
|
||||
if opt.archive:
|
||||
if is_new:
|
||||
m.config.SetString('repo.archive', 'true')
|
||||
@ -271,14 +305,19 @@ to update the working directory files.
|
||||
else:
|
||||
opt.clone_filter = None
|
||||
|
||||
if opt.clone_bundle is None:
|
||||
opt.clone_bundle = False if opt.partial_clone else True
|
||||
else:
|
||||
m.config.SetString('repo.clonebundle', 'true' if opt.clone_bundle else 'false')
|
||||
|
||||
if opt.submodules:
|
||||
m.config.SetString('repo.submodules', 'true')
|
||||
|
||||
if not m.Sync_NetworkHalf(is_new=is_new, quiet=opt.quiet,
|
||||
clone_bundle=not opt.no_clone_bundle,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
no_tags=opt.no_tags, submodules=opt.submodules,
|
||||
clone_filter=opt.clone_filter):
|
||||
if not m.Sync_NetworkHalf(is_new=is_new, quiet=opt.quiet, verbose=opt.verbose,
|
||||
clone_bundle=opt.clone_bundle,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
tags=opt.tags, submodules=opt.submodules,
|
||||
clone_filter=opt.clone_filter):
|
||||
r = m.GetRemote(m.remote.name)
|
||||
print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
|
||||
|
||||
@ -321,7 +360,7 @@ to update the working directory files.
|
||||
return value
|
||||
return a
|
||||
|
||||
def _ShouldConfigureUser(self):
|
||||
def _ShouldConfigureUser(self, opt):
|
||||
gc = self.manifest.globalConfig
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
@ -333,21 +372,24 @@ to update the working directory files.
|
||||
mp.config.SetString('user.name', gc.GetString('user.name'))
|
||||
mp.config.SetString('user.email', gc.GetString('user.email'))
|
||||
|
||||
print()
|
||||
print('Your identity is: %s <%s>' % (mp.config.GetString('user.name'),
|
||||
mp.config.GetString('user.email')))
|
||||
print('If you want to change this, please re-run \'repo init\' with --config-name')
|
||||
if not opt.quiet:
|
||||
print()
|
||||
print('Your identity is: %s <%s>' % (mp.config.GetString('user.name'),
|
||||
mp.config.GetString('user.email')))
|
||||
print("If you want to change this, please re-run 'repo init' with --config-name")
|
||||
return False
|
||||
|
||||
def _ConfigureUser(self):
|
||||
def _ConfigureUser(self, opt):
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
while True:
|
||||
print()
|
||||
name = self._Prompt('Your Name', mp.UserName)
|
||||
if not opt.quiet:
|
||||
print()
|
||||
name = self._Prompt('Your Name', mp.UserName)
|
||||
email = self._Prompt('Your Email', mp.UserEmail)
|
||||
|
||||
print()
|
||||
if not opt.quiet:
|
||||
print()
|
||||
print('Your identity is: %s <%s>' % (name, email))
|
||||
print('is this correct [y/N]? ', end='')
|
||||
# TODO: When we require Python 3, use flush=True w/print above.
|
||||
@ -419,15 +461,16 @@ to update the working directory files.
|
||||
# We store the depth in the main manifest project.
|
||||
self.manifest.manifestProject.config.SetString('repo.depth', depth)
|
||||
|
||||
def _DisplayResult(self):
|
||||
def _DisplayResult(self, opt):
|
||||
if self.manifest.IsMirror:
|
||||
init_type = 'mirror '
|
||||
else:
|
||||
init_type = ''
|
||||
|
||||
print()
|
||||
print('repo %shas been initialized in %s'
|
||||
% (init_type, self.manifest.topdir))
|
||||
if not opt.quiet:
|
||||
print()
|
||||
print('repo %shas been initialized in %s' %
|
||||
(init_type, self.manifest.topdir))
|
||||
|
||||
current_dir = os.getcwd()
|
||||
if current_dir != self.manifest.topdir:
|
||||
@ -446,14 +489,44 @@ to update the working directory files.
|
||||
self.OptionParser.error('--mirror and --archive cannot be used together.')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
git_require(MIN_GIT_VERSION, fail=True)
|
||||
git_require(MIN_GIT_VERSION_HARD, fail=True)
|
||||
if not git_require(MIN_GIT_VERSION_SOFT):
|
||||
print('repo: warning: git-%s+ will soon be required; please upgrade your '
|
||||
'version of git to maintain support.'
|
||||
% ('.'.join(str(x) for x in MIN_GIT_VERSION_SOFT),),
|
||||
file=sys.stderr)
|
||||
|
||||
opt.quiet = opt.output_mode is False
|
||||
opt.verbose = opt.output_mode is True
|
||||
|
||||
rp = self.manifest.repoProject
|
||||
|
||||
# Handle new --repo-url requests.
|
||||
if opt.repo_url:
|
||||
remote = rp.GetRemote('origin')
|
||||
remote.url = opt.repo_url
|
||||
remote.Save()
|
||||
|
||||
# Handle new --repo-rev requests.
|
||||
if opt.repo_rev:
|
||||
wrapper = Wrapper()
|
||||
remote_ref, rev = wrapper.check_repo_rev(
|
||||
rp.gitdir, opt.repo_rev, repo_verify=opt.repo_verify, quiet=opt.quiet)
|
||||
branch = rp.GetBranch('default')
|
||||
branch.merge = remote_ref
|
||||
rp.work_git.update_ref('refs/heads/default', rev)
|
||||
branch.Save()
|
||||
|
||||
if opt.worktree:
|
||||
# Older versions of git supported worktree, but had dangerous gc bugs.
|
||||
git_require((2, 15, 0), fail=True, msg='git gc worktree corruption')
|
||||
|
||||
self._SyncManifest(opt)
|
||||
self._LinkManifest(opt.manifest_name)
|
||||
|
||||
if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
|
||||
if opt.config_name or self._ShouldConfigureUser():
|
||||
self._ConfigureUser()
|
||||
if opt.config_name or self._ShouldConfigureUser(opt):
|
||||
self._ConfigureUser(opt)
|
||||
self._ConfigureColor()
|
||||
|
||||
self._DisplayResult()
|
||||
self._DisplayResult(opt)
|
||||
|
@ -15,10 +15,10 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
|
||||
from command import Command, MirrorSafeCommand
|
||||
|
||||
|
||||
class List(Command, MirrorSafeCommand):
|
||||
common = True
|
||||
helpSummary = "List projects and their associated directories"
|
||||
@ -77,7 +77,7 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
lines = []
|
||||
for project in projects:
|
||||
if opt.name_only and not opt.path_only:
|
||||
lines.append("%s" % ( project.name))
|
||||
lines.append("%s" % (project.name))
|
||||
elif opt.path_only and not opt.name_only:
|
||||
lines.append("%s" % (_getpath(project)))
|
||||
else:
|
||||
|
@ -20,11 +20,12 @@ import sys
|
||||
|
||||
from command import PagedCommand
|
||||
|
||||
|
||||
class Manifest(PagedCommand):
|
||||
common = False
|
||||
helpSummary = "Manifest inspection utility"
|
||||
helpUsage = """
|
||||
%prog [-o {-|NAME.xml} [-r]]
|
||||
%prog [-o {-|NAME.xml}] [-m MANIFEST.xml] [-r]
|
||||
"""
|
||||
_helpDescription = """
|
||||
|
||||
@ -33,6 +34,12 @@ The manifest and (if present) local_manifest.xml are combined
|
||||
together to produce a single manifest file. This file can be stored
|
||||
in a Git repository for use during future 'repo init' invocations.
|
||||
|
||||
The -r option can be used to generate a manifest file with project
|
||||
revisions set to the current commit hash. These are known as
|
||||
"revision locked manifests", as they don't follow a particular branch.
|
||||
In this case, the 'upstream' attribute is set to the ref we were on
|
||||
when the manifest was generated. The 'dest-branch' attribute is set
|
||||
to indicate the remote ref to push changes to via 'repo upload'.
|
||||
"""
|
||||
|
||||
@property
|
||||
@ -49,11 +56,18 @@ in a Git repository for use during future 'repo init' invocations.
|
||||
p.add_option('-r', '--revision-as-HEAD',
|
||||
dest='peg_rev', action='store_true',
|
||||
help='Save revisions as current HEAD')
|
||||
p.add_option('-m', '--manifest-name',
|
||||
help='temporary manifest to use for this sync', metavar='NAME.xml')
|
||||
p.add_option('--suppress-upstream-revision', dest='peg_rev_upstream',
|
||||
default=True, action='store_false',
|
||||
help='If in -r mode, do not write the upstream field. '
|
||||
'Only of use if the branch names for a sha1 manifest are '
|
||||
'sensitive.')
|
||||
p.add_option('--suppress-dest-branch', dest='peg_rev_dest_branch',
|
||||
default=True, action='store_false',
|
||||
help='If in -r mode, do not write the dest-branch field. '
|
||||
'Only of use if the branch names for a sha1 manifest are '
|
||||
'sensitive.')
|
||||
p.add_option('-o', '--output-file',
|
||||
dest='output_file',
|
||||
default='-',
|
||||
@ -61,13 +75,18 @@ in a Git repository for use during future 'repo init' invocations.
|
||||
metavar='-|NAME.xml')
|
||||
|
||||
def _Output(self, opt):
|
||||
# If alternate manifest is specified, override the manifest file that we're using.
|
||||
if opt.manifest_name:
|
||||
self.manifest.Override(opt.manifest_name, False)
|
||||
|
||||
if opt.output_file == '-':
|
||||
fd = sys.stdout
|
||||
else:
|
||||
fd = open(opt.output_file, 'w')
|
||||
self.manifest.Save(fd,
|
||||
peg_rev = opt.peg_rev,
|
||||
peg_rev_upstream = opt.peg_rev_upstream)
|
||||
peg_rev=opt.peg_rev,
|
||||
peg_rev_upstream=opt.peg_rev_upstream,
|
||||
peg_rev_dest_branch=opt.peg_rev_dest_branch)
|
||||
fd.close()
|
||||
if opt.output_file != '-':
|
||||
print('Saved manifest to %s' % opt.output_file, file=sys.stderr)
|
||||
|
@ -18,6 +18,7 @@ from __future__ import print_function
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
|
||||
|
||||
class Prune(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Prune (delete) already merged topics"
|
||||
|
@ -43,8 +43,8 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-i', '--interactive',
|
||||
dest="interactive", action="store_true",
|
||||
help="interactive rebase (single project only)")
|
||||
dest="interactive", action="store_true",
|
||||
help="interactive rebase (single project only)")
|
||||
|
||||
p.add_option('--fail-fast',
|
||||
dest='fail_fast', action='store_true',
|
||||
@ -53,7 +53,7 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
dest='force_rebase', action='store_true',
|
||||
help='Pass --force-rebase to git rebase')
|
||||
p.add_option('--no-ff',
|
||||
dest='no_ff', action='store_true',
|
||||
dest='ff', default=True, action='store_false',
|
||||
help='Pass --no-ff to git rebase')
|
||||
p.add_option('-q', '--quiet',
|
||||
dest='quiet', action='store_true',
|
||||
@ -82,7 +82,7 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
file=sys.stderr)
|
||||
if len(args) == 1:
|
||||
print('note: project %s is mapped to more than one path' % (args[0],),
|
||||
file=sys.stderr)
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Setup the common git rebase args that we use for all projects.
|
||||
@ -93,7 +93,7 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
common_args.append('--quiet')
|
||||
if opt.force_rebase:
|
||||
common_args.append('--force-rebase')
|
||||
if opt.no_ff:
|
||||
if not opt.ff:
|
||||
common_args.append('--no-ff')
|
||||
if opt.autosquash:
|
||||
common_args.append('--autosquash')
|
||||
|
@ -22,6 +22,7 @@ from command import Command, MirrorSafeCommand
|
||||
from subcmds.sync import _PostRepoUpgrade
|
||||
from subcmds.sync import _PostRepoFetch
|
||||
|
||||
|
||||
class Selfupdate(Command, MirrorSafeCommand):
|
||||
common = False
|
||||
helpSummary = "Update repo to the latest version"
|
||||
@ -39,7 +40,7 @@ need to be performed by an end-user.
|
||||
def _Options(self, p):
|
||||
g = p.add_option_group('repo Version options')
|
||||
g.add_option('--no-repo-verify',
|
||||
dest='no_repo_verify', action='store_true',
|
||||
dest='repo_verify', default=True, action='store_false',
|
||||
help='do not verify repo source code')
|
||||
g.add_option('--repo-upgraded',
|
||||
dest='repo_upgraded', action='store_true',
|
||||
@ -59,5 +60,5 @@ need to be performed by an end-user.
|
||||
|
||||
rp.bare_git.gc('--auto')
|
||||
_PostRepoFetch(rp,
|
||||
no_repo_verify = opt.no_repo_verify,
|
||||
verbose = True)
|
||||
repo_verify=opt.repo_verify,
|
||||
verbose=True)
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
from subcmds.sync import Sync
|
||||
|
||||
|
||||
class Smartsync(Sync):
|
||||
common = True
|
||||
helpSummary = "Update working tree to the latest known good revision"
|
||||
|
@ -21,6 +21,7 @@ from color import Coloring
|
||||
from command import InteractiveCommand
|
||||
from git_command import GitCommand
|
||||
|
||||
|
||||
class _ProjectList(Coloring):
|
||||
def __init__(self, gc):
|
||||
Coloring.__init__(self, gc, 'interactive')
|
||||
@ -28,6 +29,7 @@ class _ProjectList(Coloring):
|
||||
self.header = self.printer('header', attr='bold')
|
||||
self.help = self.printer('help', fg='red', attr='bold')
|
||||
|
||||
|
||||
class Stage(InteractiveCommand):
|
||||
common = True
|
||||
helpSummary = "Stage file(s) for commit"
|
||||
@ -105,6 +107,7 @@ The '%prog' command stages files to prepare the next commit.
|
||||
continue
|
||||
print('Bye.')
|
||||
|
||||
|
||||
def _AddI(project):
|
||||
p = GitCommand(project, ['add', '--interactive'], bare=False)
|
||||
p.Wait()
|
||||
|
@ -25,6 +25,7 @@ import gitc_utils
|
||||
from progress import Progress
|
||||
from project import SyncBuffer
|
||||
|
||||
|
||||
class Start(Command):
|
||||
common = True
|
||||
helpSummary = "Start a new branch for development"
|
||||
@ -60,7 +61,7 @@ revision specified in the manifest.
|
||||
if not opt.all:
|
||||
projects = args[1:]
|
||||
if len(projects) < 1:
|
||||
projects = ['.',] # start it in the local project by default
|
||||
projects = ['.'] # start it in the local project by default
|
||||
|
||||
all_projects = self.GetProjects(projects,
|
||||
missing_ok=bool(self.gitc_manifest))
|
||||
@ -113,7 +114,7 @@ revision specified in the manifest.
|
||||
branch_merge = self.manifest.default.revisionExpr
|
||||
|
||||
if not project.StartBranch(
|
||||
nb, branch_merge=branch_merge, revision=opt.revision):
|
||||
nb, branch_merge=branch_merge, revision=opt.revision):
|
||||
err.append(project)
|
||||
pm.end()
|
||||
|
||||
|
@ -16,6 +16,10 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import glob
|
||||
import itertools
|
||||
import os
|
||||
|
||||
from command import PagedCommand
|
||||
|
||||
try:
|
||||
@ -23,14 +27,10 @@ try:
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
|
||||
import glob
|
||||
|
||||
import itertools
|
||||
import os
|
||||
|
||||
from color import Coloring
|
||||
import platform_utils
|
||||
|
||||
|
||||
class Status(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Show the working tree status"
|
||||
@ -126,8 +126,8 @@ the following meanings:
|
||||
continue
|
||||
if item in proj_dirs_parents:
|
||||
self._FindOrphans(glob.glob('%s/.*' % item) +
|
||||
glob.glob('%s/*' % item),
|
||||
proj_dirs, proj_dirs_parents, outstring)
|
||||
glob.glob('%s/*' % item),
|
||||
proj_dirs, proj_dirs_parents, outstring)
|
||||
continue
|
||||
outstring.append(''.join([status_header, item, '/']))
|
||||
|
||||
@ -170,8 +170,8 @@ the following meanings:
|
||||
class StatusColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'status')
|
||||
self.project = self.printer('header', attr = 'bold')
|
||||
self.untracked = self.printer('untracked', fg = 'red')
|
||||
self.project = self.printer('header', attr='bold')
|
||||
self.untracked = self.printer('untracked', fg='red')
|
||||
|
||||
orig_path = os.getcwd()
|
||||
try:
|
||||
@ -179,8 +179,8 @@ the following meanings:
|
||||
|
||||
outstring = []
|
||||
self._FindOrphans(glob.glob('.*') +
|
||||
glob.glob('*'),
|
||||
proj_dirs, proj_dirs_parents, outstring)
|
||||
glob.glob('*'),
|
||||
proj_dirs, proj_dirs_parents, outstring)
|
||||
|
||||
if outstring:
|
||||
output = StatusColoring(self.manifest.globalConfig)
|
||||
|
327
subcmds/sync.py
327
subcmds/sync.py
@ -15,6 +15,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
import netrc
|
||||
from optparse import SUPPRESS_HELP
|
||||
@ -53,6 +54,7 @@ except ImportError:
|
||||
|
||||
try:
|
||||
import resource
|
||||
|
||||
def _rlimit_nofile():
|
||||
return resource.getrlimit(resource.RLIMIT_NOFILE)
|
||||
except ImportError:
|
||||
@ -81,13 +83,16 @@ from manifest_xml import GitcManifest
|
||||
|
||||
_ONE_DAY_S = 24 * 60 * 60
|
||||
|
||||
|
||||
class _FetchError(Exception):
|
||||
"""Internal error thrown in _FetchHelper() when we don't want stack trace."""
|
||||
pass
|
||||
|
||||
|
||||
class _CheckoutError(Exception):
|
||||
"""Internal error thrown in _CheckoutOne() when we don't want stack trace."""
|
||||
|
||||
|
||||
class Sync(Command, MirrorSafeCommand):
|
||||
jobs = 1
|
||||
common = True
|
||||
@ -217,6 +222,10 @@ later is required to fix a server side protocol bug.
|
||||
p.add_option('-l', '--local-only',
|
||||
dest='local_only', action='store_true',
|
||||
help="only update working tree, don't fetch")
|
||||
p.add_option('--no-manifest-update', '--nmu',
|
||||
dest='mp_update', action='store_false', default='true',
|
||||
help='use the existing manifest checkout as-is. '
|
||||
'(do not update to the latest revision)')
|
||||
p.add_option('-n', '--network-only',
|
||||
dest='network_only', action='store_true',
|
||||
help="fetch only, don't update working tree")
|
||||
@ -226,17 +235,21 @@ later is required to fix a server side protocol bug.
|
||||
p.add_option('-c', '--current-branch',
|
||||
dest='current_branch_only', action='store_true',
|
||||
help='fetch only current branch from server')
|
||||
p.add_option('-v', '--verbose',
|
||||
dest='output_mode', action='store_true',
|
||||
help='show all sync output')
|
||||
p.add_option('-q', '--quiet',
|
||||
dest='quiet', action='store_true',
|
||||
help='be more quiet')
|
||||
dest='output_mode', action='store_false',
|
||||
help='only show errors')
|
||||
p.add_option('-j', '--jobs',
|
||||
dest='jobs', action='store', type='int',
|
||||
help="projects to fetch simultaneously (default %d)" % self.jobs)
|
||||
p.add_option('-m', '--manifest-name',
|
||||
dest='manifest_name',
|
||||
help='temporary manifest to use for this sync', metavar='NAME.xml')
|
||||
p.add_option('--no-clone-bundle',
|
||||
dest='no_clone_bundle', action='store_true',
|
||||
p.add_option('--clone-bundle', action='store_true',
|
||||
help='enable use of /clone.bundle on HTTP/HTTPS')
|
||||
p.add_option('--no-clone-bundle', dest='clone_bundle', action='store_false',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||
p.add_option('-u', '--manifest-server-username', action='store',
|
||||
dest='manifest_server_username',
|
||||
@ -248,11 +261,14 @@ later is required to fix a server side protocol bug.
|
||||
dest='fetch_submodules', action='store_true',
|
||||
help='fetch submodules from server')
|
||||
p.add_option('--no-tags',
|
||||
dest='no_tags', action='store_true',
|
||||
dest='tags', default=True, action='store_false',
|
||||
help="don't fetch tags")
|
||||
p.add_option('--optimized-fetch',
|
||||
dest='optimized_fetch', action='store_true',
|
||||
help='only fetch projects fixed to sha1 if revision does not exist locally')
|
||||
p.add_option('--retry-fetches',
|
||||
default=0, action='store', type='int',
|
||||
help='number of times to retry fetches on transient errors')
|
||||
p.add_option('--prune', dest='prune', action='store_true',
|
||||
help='delete refs that no longer exist on the remote')
|
||||
if show_smart:
|
||||
@ -265,7 +281,7 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
g = p.add_option_group('repo Version options')
|
||||
g.add_option('--no-repo-verify',
|
||||
dest='no_repo_verify', action='store_true',
|
||||
dest='repo_verify', default=True, action='store_false',
|
||||
help='do not verify repo source code')
|
||||
g.add_option('--repo-upgraded',
|
||||
dest='repo_upgraded', action='store_true',
|
||||
@ -323,14 +339,16 @@ later is required to fix a server side protocol bug.
|
||||
try:
|
||||
try:
|
||||
success = project.Sync_NetworkHalf(
|
||||
quiet=opt.quiet,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
force_sync=opt.force_sync,
|
||||
clone_bundle=not opt.no_clone_bundle,
|
||||
no_tags=opt.no_tags, archive=self.manifest.IsArchive,
|
||||
optimized_fetch=opt.optimized_fetch,
|
||||
prune=opt.prune,
|
||||
clone_filter=clone_filter)
|
||||
quiet=opt.quiet,
|
||||
verbose=opt.verbose,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
force_sync=opt.force_sync,
|
||||
clone_bundle=opt.clone_bundle,
|
||||
tags=opt.tags, archive=self.manifest.IsArchive,
|
||||
optimized_fetch=opt.optimized_fetch,
|
||||
retry_fetches=opt.retry_fetches,
|
||||
prune=opt.prune,
|
||||
clone_filter=clone_filter)
|
||||
self._fetch_times.Set(project, time.time() - start)
|
||||
|
||||
# Lock around all the rest of the code, since printing, updating a set
|
||||
@ -351,8 +369,8 @@ later is required to fix a server side protocol bug.
|
||||
except _FetchError:
|
||||
pass
|
||||
except Exception as e:
|
||||
print('error: Cannot fetch %s (%s: %s)' \
|
||||
% (project.name, type(e).__name__, str(e)), file=sys.stderr)
|
||||
print('error: Cannot fetch %s (%s: %s)'
|
||||
% (project.name, type(e).__name__, str(e)), file=sys.stderr)
|
||||
err_event.set()
|
||||
raise
|
||||
finally:
|
||||
@ -364,7 +382,7 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
return success
|
||||
|
||||
def _Fetch(self, projects, opt):
|
||||
def _Fetch(self, projects, opt, err_event):
|
||||
fetched = set()
|
||||
lock = _threading.Lock()
|
||||
pm = Progress('Fetching projects', len(projects),
|
||||
@ -376,7 +394,6 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
threads = set()
|
||||
sem = _threading.Semaphore(self.jobs)
|
||||
err_event = _threading.Event()
|
||||
for project_list in objdir_project_map.values():
|
||||
# Check for any errors before running any more tasks.
|
||||
# ...we'll let existing threads finish, though.
|
||||
@ -393,8 +410,8 @@ later is required to fix a server side protocol bug.
|
||||
err_event=err_event,
|
||||
clone_filter=self.manifest.CloneFilter)
|
||||
if self.jobs > 1:
|
||||
t = _threading.Thread(target = self._FetchProjectList,
|
||||
kwargs = kwargs)
|
||||
t = _threading.Thread(target=self._FetchProjectList,
|
||||
kwargs=kwargs)
|
||||
# Ensure that Ctrl-C will not freeze the repo process.
|
||||
t.daemon = True
|
||||
threads.add(t)
|
||||
@ -405,16 +422,11 @@ later is required to fix a server side protocol bug.
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet() and opt.fail_fast:
|
||||
print('\nerror: Exited sync due to fetch errors', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
pm.end()
|
||||
self._fetch_times.Save()
|
||||
|
||||
if not self.manifest.IsArchive:
|
||||
self._GCProjects(projects)
|
||||
self._GCProjects(projects, opt, err_event)
|
||||
|
||||
return fetched
|
||||
|
||||
@ -500,12 +512,16 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
return success
|
||||
|
||||
def _Checkout(self, all_projects, opt):
|
||||
def _Checkout(self, all_projects, opt, err_event, err_results):
|
||||
"""Checkout projects listed in all_projects
|
||||
|
||||
Args:
|
||||
all_projects: List of all projects that should be checked out.
|
||||
opt: Program options returned from optparse. See _Options().
|
||||
err_event: We'll set this event in the case of an error (after printing
|
||||
out info about the error).
|
||||
err_results: A list of strings, paths to git repos where checkout
|
||||
failed.
|
||||
"""
|
||||
|
||||
# Perform checkouts in multiple threads when we are using partial clone.
|
||||
@ -524,8 +540,6 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
threads = set()
|
||||
sem = _threading.Semaphore(syncjobs)
|
||||
err_event = _threading.Event()
|
||||
err_results = []
|
||||
|
||||
for project in all_projects:
|
||||
# Check for any errors before running any more tasks.
|
||||
@ -556,24 +570,27 @@ later is required to fix a server side protocol bug.
|
||||
t.join()
|
||||
|
||||
pm.end()
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet():
|
||||
print('\nerror: Exited sync due to checkout errors', file=sys.stderr)
|
||||
if err_results:
|
||||
print('Failing repos:\n%s' % '\n'.join(err_results),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def _GCProjects(self, projects):
|
||||
def _GCProjects(self, projects, opt, err_event):
|
||||
gc_gitdirs = {}
|
||||
for project in projects:
|
||||
if len(project.manifest.GetProjectsWithName(project.name)) > 1:
|
||||
print('Shared project %s found, disabling pruning.' % project.name)
|
||||
project.bare_git.config('--replace-all', 'gc.pruneExpire', 'never')
|
||||
# Make sure pruning never kicks in with shared projects.
|
||||
if (not project.use_git_worktrees and
|
||||
len(project.manifest.GetProjectsWithName(project.name)) > 1):
|
||||
print('%s: Shared project %s found, disabling pruning.' %
|
||||
(project.relpath, project.name))
|
||||
if git_require((2, 7, 0)):
|
||||
project.EnableRepositoryExtension('preciousObjects')
|
||||
else:
|
||||
# This isn't perfect, but it's the best we can do with old git.
|
||||
print('%s: WARNING: shared projects are unreliable when using old '
|
||||
'versions of git; please upgrade to git-2.7.0+.'
|
||||
% (project.relpath,),
|
||||
file=sys.stderr)
|
||||
project.config.SetString('gc.pruneExpire', 'never')
|
||||
gc_gitdirs[project.gitdir] = project.bare_git
|
||||
|
||||
has_dash_c = git_require((1, 7, 2))
|
||||
if multiprocessing and has_dash_c:
|
||||
if multiprocessing:
|
||||
cpu_count = multiprocessing.cpu_count()
|
||||
else:
|
||||
cpu_count = 1
|
||||
@ -588,7 +605,6 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
threads = set()
|
||||
sem = _threading.Semaphore(jobs)
|
||||
err_event = _threading.Event()
|
||||
|
||||
def GC(bare_git):
|
||||
try:
|
||||
@ -596,14 +612,14 @@ later is required to fix a server side protocol bug.
|
||||
bare_git.gc('--auto', config=config)
|
||||
except GitError:
|
||||
err_event.set()
|
||||
except:
|
||||
except Exception:
|
||||
err_event.set()
|
||||
raise
|
||||
finally:
|
||||
sem.release()
|
||||
|
||||
for bare_git in gc_gitdirs.values():
|
||||
if err_event.isSet():
|
||||
if err_event.isSet() and opt.fail_fast:
|
||||
break
|
||||
sem.acquire()
|
||||
t = _threading.Thread(target=GC, args=(bare_git,))
|
||||
@ -614,10 +630,6 @@ later is required to fix a server side protocol bug.
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
if err_event.isSet():
|
||||
print('\nerror: Exited sync due to gc errors', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def _ReloadManifest(self, manifest_name=None):
|
||||
if manifest_name:
|
||||
# Override calls _Unload already
|
||||
@ -625,65 +637,6 @@ later is required to fix a server side protocol bug.
|
||||
else:
|
||||
self.manifest._Unload()
|
||||
|
||||
def _DeleteProject(self, path):
|
||||
print('Deleting obsolete path %s' % path, file=sys.stderr)
|
||||
|
||||
# Delete the .git directory first, so we're less likely to have a partially
|
||||
# working git repository around. There shouldn't be any git projects here,
|
||||
# so rmtree works.
|
||||
try:
|
||||
platform_utils.rmtree(os.path.join(path, '.git'))
|
||||
except OSError as e:
|
||||
print('Failed to remove %s (%s)' % (os.path.join(path, '.git'), str(e)), file=sys.stderr)
|
||||
print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
|
||||
print(' remove manually, then run sync again', file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Delete everything under the worktree, except for directories that contain
|
||||
# another git project
|
||||
dirs_to_remove = []
|
||||
failed = False
|
||||
for root, dirs, files in platform_utils.walk(path):
|
||||
for f in files:
|
||||
try:
|
||||
platform_utils.remove(os.path.join(root, f))
|
||||
except OSError as e:
|
||||
print('Failed to remove %s (%s)' % (os.path.join(root, f), str(e)), file=sys.stderr)
|
||||
failed = True
|
||||
dirs[:] = [d for d in dirs
|
||||
if not os.path.lexists(os.path.join(root, d, '.git'))]
|
||||
dirs_to_remove += [os.path.join(root, d) for d in dirs
|
||||
if os.path.join(root, d) not in dirs_to_remove]
|
||||
for d in reversed(dirs_to_remove):
|
||||
if platform_utils.islink(d):
|
||||
try:
|
||||
platform_utils.remove(d)
|
||||
except OSError as e:
|
||||
print('Failed to remove %s (%s)' % (os.path.join(root, d), str(e)), file=sys.stderr)
|
||||
failed = True
|
||||
elif len(platform_utils.listdir(d)) == 0:
|
||||
try:
|
||||
platform_utils.rmdir(d)
|
||||
except OSError as e:
|
||||
print('Failed to remove %s (%s)' % (os.path.join(root, d), str(e)), file=sys.stderr)
|
||||
failed = True
|
||||
continue
|
||||
if failed:
|
||||
print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
|
||||
print(' remove manually, then run sync again', file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Try deleting parent dirs if they are empty
|
||||
project_dir = path
|
||||
while project_dir != self.manifest.topdir:
|
||||
if len(platform_utils.listdir(project_dir)) == 0:
|
||||
platform_utils.rmdir(project_dir)
|
||||
else:
|
||||
break
|
||||
project_dir = os.path.dirname(project_dir)
|
||||
|
||||
return 0
|
||||
|
||||
def UpdateProjectList(self, opt):
|
||||
new_project_paths = []
|
||||
for project in self.GetProjects(None, missing_ok=True):
|
||||
@ -705,28 +658,20 @@ later is required to fix a server side protocol bug.
|
||||
gitdir = os.path.join(self.manifest.topdir, path, '.git')
|
||||
if os.path.exists(gitdir):
|
||||
project = Project(
|
||||
manifest = self.manifest,
|
||||
name = path,
|
||||
remote = RemoteSpec('origin'),
|
||||
gitdir = gitdir,
|
||||
objdir = gitdir,
|
||||
worktree = os.path.join(self.manifest.topdir, path),
|
||||
relpath = path,
|
||||
revisionExpr = 'HEAD',
|
||||
revisionId = None,
|
||||
groups = None)
|
||||
|
||||
if project.IsDirty() and opt.force_remove_dirty:
|
||||
print('WARNING: Removing dirty project "%s": uncommitted changes '
|
||||
'erased' % project.relpath, file=sys.stderr)
|
||||
self._DeleteProject(project.worktree)
|
||||
elif project.IsDirty():
|
||||
print('error: Cannot remove project "%s": uncommitted changes '
|
||||
'are present' % project.relpath, file=sys.stderr)
|
||||
print(' commit changes, then run sync again',
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
elif self._DeleteProject(project.worktree):
|
||||
manifest=self.manifest,
|
||||
name=path,
|
||||
remote=RemoteSpec('origin'),
|
||||
gitdir=gitdir,
|
||||
objdir=gitdir,
|
||||
use_git_worktrees=os.path.isfile(gitdir),
|
||||
worktree=os.path.join(self.manifest.topdir, path),
|
||||
relpath=path,
|
||||
revisionExpr='HEAD',
|
||||
revisionId=None,
|
||||
groups=None)
|
||||
if not project.DeleteWorktree(
|
||||
quiet=opt.quiet,
|
||||
force=opt.force_remove_dirty):
|
||||
return 1
|
||||
|
||||
new_project_paths.sort()
|
||||
@ -745,7 +690,7 @@ later is required to fix a server side protocol bug.
|
||||
if not opt.quiet:
|
||||
print('Using manifest server %s' % manifest_server)
|
||||
|
||||
if not '@' in manifest_server:
|
||||
if '@' not in manifest_server:
|
||||
username = None
|
||||
password = None
|
||||
if opt.manifest_server_username and opt.manifest_server_password:
|
||||
@ -788,13 +733,13 @@ later is required to fix a server side protocol bug.
|
||||
if branch.startswith(R_HEADS):
|
||||
branch = branch[len(R_HEADS):]
|
||||
|
||||
env = os.environ.copy()
|
||||
if 'SYNC_TARGET' in env:
|
||||
target = env['SYNC_TARGET']
|
||||
if 'SYNC_TARGET' in os.environ:
|
||||
target = os.environ['SYNC_TARGET']
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||
elif 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env:
|
||||
target = '%s-%s' % (env['TARGET_PRODUCT'],
|
||||
env['TARGET_BUILD_VARIANT'])
|
||||
elif ('TARGET_PRODUCT' in os.environ and
|
||||
'TARGET_BUILD_VARIANT' in os.environ):
|
||||
target = '%s-%s' % (os.environ['TARGET_PRODUCT'],
|
||||
os.environ['TARGET_BUILD_VARIANT'])
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||
else:
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch)
|
||||
@ -833,10 +778,11 @@ later is required to fix a server side protocol bug.
|
||||
"""Fetch & update the local manifest project."""
|
||||
if not opt.local_only:
|
||||
start = time.time()
|
||||
success = mp.Sync_NetworkHalf(quiet=opt.quiet,
|
||||
success = mp.Sync_NetworkHalf(quiet=opt.quiet, verbose=opt.verbose,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
no_tags=opt.no_tags,
|
||||
tags=opt.tags,
|
||||
optimized_fetch=opt.optimized_fetch,
|
||||
retry_fetches=opt.retry_fetches,
|
||||
submodules=self.manifest.HasSubmodules,
|
||||
clone_filter=self.manifest.CloneFilter)
|
||||
finish = time.time()
|
||||
@ -881,12 +827,18 @@ later is required to fix a server side protocol bug.
|
||||
soft_limit, _ = _rlimit_nofile()
|
||||
self.jobs = min(self.jobs, (soft_limit - 5) // 3)
|
||||
|
||||
opt.quiet = opt.output_mode is False
|
||||
opt.verbose = opt.output_mode is True
|
||||
|
||||
if opt.manifest_name:
|
||||
self.manifest.Override(opt.manifest_name)
|
||||
|
||||
manifest_name = opt.manifest_name
|
||||
smart_sync_manifest_path = os.path.join(
|
||||
self.manifest.manifestProject.worktree, 'smart_sync_override.xml')
|
||||
self.manifest.manifestProject.worktree, 'smart_sync_override.xml')
|
||||
|
||||
if opt.clone_bundle is None:
|
||||
opt.clone_bundle = self.manifest.CloneBundle
|
||||
|
||||
if opt.smart_sync or opt.smart_tag:
|
||||
manifest_name = self._SmartSyncSetup(opt, smart_sync_manifest_path)
|
||||
@ -898,8 +850,17 @@ later is required to fix a server side protocol bug.
|
||||
print('error: failed to remove existing smart sync override manifest: %s' %
|
||||
e, file=sys.stderr)
|
||||
|
||||
err_event = _threading.Event()
|
||||
|
||||
rp = self.manifest.repoProject
|
||||
rp.PreSync()
|
||||
cb = rp.CurrentBranch
|
||||
if cb:
|
||||
base = rp.GetBranch(cb).merge
|
||||
if not base or not base.startswith('refs/heads/'):
|
||||
print('warning: repo is not tracking a remote branch, so it will not '
|
||||
'receive updates; run `repo init --repo-rev=stable` to fix.',
|
||||
file=sys.stderr)
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
mp.PreSync()
|
||||
@ -907,7 +868,10 @@ later is required to fix a server side protocol bug.
|
||||
if opt.repo_upgraded:
|
||||
_PostRepoUpgrade(self.manifest, quiet=opt.quiet)
|
||||
|
||||
self._UpdateManifestProject(opt, mp, manifest_name)
|
||||
if not opt.mp_update:
|
||||
print('Skipping update of local manifest project.')
|
||||
else:
|
||||
self._UpdateManifestProject(opt, mp, manifest_name)
|
||||
|
||||
if self.gitc_manifest:
|
||||
gitc_manifest_projects = self.GetProjects(args,
|
||||
@ -948,6 +912,10 @@ later is required to fix a server side protocol bug.
|
||||
missing_ok=True,
|
||||
submodules_ok=opt.fetch_submodules)
|
||||
|
||||
err_network_sync = False
|
||||
err_update_projects = False
|
||||
err_checkout = False
|
||||
|
||||
self._fetch_times = _FetchTimes(self.manifest)
|
||||
if not opt.local_only:
|
||||
to_fetch = []
|
||||
@ -957,10 +925,14 @@ later is required to fix a server side protocol bug.
|
||||
to_fetch.extend(all_projects)
|
||||
to_fetch.sort(key=self._fetch_times.Get, reverse=True)
|
||||
|
||||
fetched = self._Fetch(to_fetch, opt)
|
||||
_PostRepoFetch(rp, opt.no_repo_verify)
|
||||
fetched = self._Fetch(to_fetch, opt, err_event)
|
||||
|
||||
_PostRepoFetch(rp, opt.repo_verify)
|
||||
if opt.network_only:
|
||||
# bail out now; the rest touches the working tree
|
||||
if err_event.isSet():
|
||||
print('\nerror: Exited sync due to fetch errors.\n', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return
|
||||
|
||||
# Iteratively fetch missing and/or nested unregistered submodules
|
||||
@ -982,22 +954,60 @@ later is required to fix a server side protocol bug.
|
||||
if previously_missing_set == missing_set:
|
||||
break
|
||||
previously_missing_set = missing_set
|
||||
fetched.update(self._Fetch(missing, opt))
|
||||
fetched.update(self._Fetch(missing, opt, err_event))
|
||||
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet():
|
||||
err_network_sync = True
|
||||
if opt.fail_fast:
|
||||
print('\nerror: Exited sync due to fetch errors.\n'
|
||||
'Local checkouts *not* updated. Resolve network issues & '
|
||||
'retry.\n'
|
||||
'`repo sync -l` will update some local checkouts.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if self.manifest.IsMirror or self.manifest.IsArchive:
|
||||
# bail out now, we have no working tree
|
||||
return
|
||||
|
||||
if self.UpdateProjectList(opt):
|
||||
sys.exit(1)
|
||||
err_event.set()
|
||||
err_update_projects = True
|
||||
if opt.fail_fast:
|
||||
print('\nerror: Local checkouts *not* updated.', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
self._Checkout(all_projects, opt)
|
||||
err_results = []
|
||||
self._Checkout(all_projects, opt, err_event, err_results)
|
||||
if err_event.isSet():
|
||||
err_checkout = True
|
||||
# NB: We don't exit here because this is the last step.
|
||||
|
||||
# If there's a notice that's supposed to print at the end of the sync, print
|
||||
# it now...
|
||||
if self.manifest.notice:
|
||||
print(self.manifest.notice)
|
||||
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet():
|
||||
print('\nerror: Unable to fully sync the tree.', file=sys.stderr)
|
||||
if err_network_sync:
|
||||
print('error: Downloading network changes failed.', file=sys.stderr)
|
||||
if err_update_projects:
|
||||
print('error: Updating local project lists failed.', file=sys.stderr)
|
||||
if err_checkout:
|
||||
print('error: Checking out local projects failed.', file=sys.stderr)
|
||||
if err_results:
|
||||
print('Failing repos:\n%s' % '\n'.join(err_results), file=sys.stderr)
|
||||
print('Try re-running with "-j1 --fail-fast" to exit at the first error.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not opt.quiet:
|
||||
print('repo sync has finished successfully.')
|
||||
|
||||
|
||||
def _PostRepoUpgrade(manifest, quiet=False):
|
||||
wrapper = Wrapper()
|
||||
if wrapper.NeedSetupGnuPG():
|
||||
@ -1006,11 +1016,12 @@ def _PostRepoUpgrade(manifest, quiet=False):
|
||||
if project.Exists:
|
||||
project.PostRepoUpgrade()
|
||||
|
||||
def _PostRepoFetch(rp, no_repo_verify=False, verbose=False):
|
||||
|
||||
def _PostRepoFetch(rp, repo_verify=True, verbose=False):
|
||||
if rp.HasChanges:
|
||||
print('info: A new version of repo is available', file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
if no_repo_verify or _VerifyTag(rp):
|
||||
if not repo_verify or _VerifyTag(rp):
|
||||
syncbuf = SyncBuffer(rp.config)
|
||||
rp.Sync_LocalHalf(syncbuf)
|
||||
if not syncbuf.Finish():
|
||||
@ -1024,6 +1035,7 @@ def _PostRepoFetch(rp, no_repo_verify=False, verbose=False):
|
||||
print('repo version %s is current' % rp.work_git.describe(HEAD),
|
||||
file=sys.stderr)
|
||||
|
||||
|
||||
def _VerifyTag(project):
|
||||
gpg_dir = os.path.expanduser('~/.repoconfig/gnupg')
|
||||
if not os.path.exists(gpg_dir):
|
||||
@ -1049,14 +1061,14 @@ def _VerifyTag(project):
|
||||
return False
|
||||
|
||||
env = os.environ.copy()
|
||||
env['GIT_DIR'] = project.gitdir.encode()
|
||||
env['GNUPGHOME'] = gpg_dir.encode()
|
||||
env['GIT_DIR'] = project.gitdir
|
||||
env['GNUPGHOME'] = gpg_dir
|
||||
|
||||
cmd = [GIT, 'tag', '-v', cur]
|
||||
proc = subprocess.Popen(cmd,
|
||||
stdout = subprocess.PIPE,
|
||||
stderr = subprocess.PIPE,
|
||||
env = env)
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
env=env)
|
||||
out = proc.stdout.read()
|
||||
proc.stdout.close()
|
||||
|
||||
@ -1090,7 +1102,7 @@ class _FetchTimes(object):
|
||||
old = self._times.get(name, t)
|
||||
self._seen.add(name)
|
||||
a = self._ALPHA
|
||||
self._times[name] = (a*t) + ((1-a) * old)
|
||||
self._times[name] = (a * t) + ((1 - a) * old)
|
||||
|
||||
def _Load(self):
|
||||
if self._times is None:
|
||||
@ -1128,6 +1140,8 @@ class _FetchTimes(object):
|
||||
# and supporting persistent-http[s]. It cannot change hosts from
|
||||
# request to request like the normal transport, the real url
|
||||
# is passed during initialization.
|
||||
|
||||
|
||||
class PersistentTransport(xmlrpc.client.Transport):
|
||||
def __init__(self, orig_host):
|
||||
self.orig_host = orig_host
|
||||
@ -1138,7 +1152,7 @@ class PersistentTransport(xmlrpc.client.Transport):
|
||||
# Since we're only using them for HTTP, copy the file temporarily,
|
||||
# stripping those prefixes away.
|
||||
if cookiefile:
|
||||
tmpcookiefile = tempfile.NamedTemporaryFile()
|
||||
tmpcookiefile = tempfile.NamedTemporaryFile(mode='w')
|
||||
tmpcookiefile.write("# HTTP Cookie File")
|
||||
try:
|
||||
with open(cookiefile) as f:
|
||||
@ -1162,7 +1176,7 @@ class PersistentTransport(xmlrpc.client.Transport):
|
||||
if proxy:
|
||||
proxyhandler = urllib.request.ProxyHandler({
|
||||
"http": proxy,
|
||||
"https": proxy })
|
||||
"https": proxy})
|
||||
|
||||
opener = urllib.request.build_opener(
|
||||
urllib.request.HTTPCookieProcessor(cookiejar),
|
||||
@ -1219,4 +1233,3 @@ class PersistentTransport(xmlrpc.client.Transport):
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
|
@ -23,16 +23,18 @@ from command import InteractiveCommand
|
||||
from editor import Editor
|
||||
from error import HookError, UploadError
|
||||
from git_command import GitCommand
|
||||
from git_refs import R_HEADS
|
||||
from project import RepoHook
|
||||
|
||||
from pyversion import is_python3
|
||||
if not is_python3():
|
||||
input = raw_input
|
||||
input = raw_input # noqa: F821
|
||||
else:
|
||||
unicode = str
|
||||
|
||||
UNUSUAL_COMMIT_THRESHOLD = 5
|
||||
|
||||
|
||||
def _ConfirmManyUploads(multiple_branches=False):
|
||||
if multiple_branches:
|
||||
print('ATTENTION: One or more branches has an unusually high number '
|
||||
@ -44,17 +46,20 @@ def _ConfirmManyUploads(multiple_branches=False):
|
||||
answer = input("If you are sure you intend to do this, type 'yes': ").strip()
|
||||
return answer == "yes"
|
||||
|
||||
|
||||
def _die(fmt, *args):
|
||||
msg = fmt % args
|
||||
print('error: %s' % msg, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _SplitEmails(values):
|
||||
result = []
|
||||
for value in values:
|
||||
result.extend([s.strip() for s in value.split(',')])
|
||||
return result
|
||||
|
||||
|
||||
class Upload(InteractiveCommand):
|
||||
common = True
|
||||
helpSummary = "Upload changes for code review"
|
||||
@ -126,6 +131,23 @@ is set to "true" then repo will assume you always want the equivalent
|
||||
of the -t option to the repo command. If unset or set to "false" then
|
||||
repo will make use of only the command line option.
|
||||
|
||||
review.URL.uploadhashtags:
|
||||
|
||||
To add hashtags whenever uploading a commit, you can set a per-project
|
||||
or global Git option to do so. The value of review.URL.uploadhashtags
|
||||
will be used as comma delimited hashtags like the --hashtag option.
|
||||
|
||||
review.URL.uploadlabels:
|
||||
|
||||
To add labels whenever uploading a commit, you can set a per-project
|
||||
or global Git option to do so. The value of review.URL.uploadlabels
|
||||
will be used as comma delimited labels like the --label option.
|
||||
|
||||
review.URL.uploadnotify:
|
||||
|
||||
Control e-mail notifications when uploading.
|
||||
https://gerrit-review.googlesource.com/Documentation/user-upload.html#notify
|
||||
|
||||
# References
|
||||
|
||||
Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
@ -136,21 +158,27 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
p.add_option('-t',
|
||||
dest='auto_topic', action='store_true',
|
||||
help='Send local branch name to Gerrit Code Review')
|
||||
p.add_option('--hashtag', '--ht',
|
||||
dest='hashtags', action='append', default=[],
|
||||
help='Add hashtags (comma delimited) to the review.')
|
||||
p.add_option('--hashtag-branch', '--htb',
|
||||
action='store_true',
|
||||
help='Add local branch name as a hashtag.')
|
||||
p.add_option('-l', '--label',
|
||||
dest='labels', action='append', default=[],
|
||||
help='Add a label when uploading.')
|
||||
p.add_option('--re', '--reviewers',
|
||||
type='string', action='append', dest='reviewers',
|
||||
type='string', action='append', dest='reviewers',
|
||||
help='Request reviews from these people.')
|
||||
p.add_option('--cc',
|
||||
type='string', action='append', dest='cc',
|
||||
type='string', action='append', dest='cc',
|
||||
help='Also send email to these email addresses.')
|
||||
p.add_option('--br',
|
||||
type='string', action='store', dest='branch',
|
||||
type='string', action='store', dest='branch',
|
||||
help='Branch to upload.')
|
||||
p.add_option('--cbr', '--current-branch',
|
||||
dest='current_branch', action='store_true',
|
||||
help='Upload current git branch.')
|
||||
p.add_option('-d', '--draft',
|
||||
action='store_true', dest='draft', default=False,
|
||||
help='If specified, upload as a draft.')
|
||||
p.add_option('--ne', '--no-emails',
|
||||
action='store_false', dest='notify', default=True,
|
||||
help='If specified, do not send emails on upload.')
|
||||
@ -168,6 +196,15 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
type='string', action='store', dest='dest_branch',
|
||||
metavar='BRANCH',
|
||||
help='Submit for review on this target branch.')
|
||||
p.add_option('-n', '--dry-run',
|
||||
dest='dryrun', default=False, action='store_true',
|
||||
help='Do everything except actually upload the CL.')
|
||||
p.add_option('-y', '--yes',
|
||||
default=False, action='store_true',
|
||||
help='Answer yes to all safe prompts.')
|
||||
p.add_option('--no-cert-checks',
|
||||
dest='validate_certs', action='store_false', default=True,
|
||||
help='Disable verifying ssl certs (unsafe).')
|
||||
|
||||
# Options relating to upload hook. Note that verify and no-verify are NOT
|
||||
# opposites of each other, which is why they store to different locations.
|
||||
@ -185,15 +222,16 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
# Never run upload hooks, but upload anyway (AKA bypass hooks).
|
||||
# - no-verify=True, verify=True:
|
||||
# Invalid
|
||||
p.add_option('--no-cert-checks',
|
||||
dest='validate_certs', action='store_false', default=True,
|
||||
help='Disable verifying ssl certs (unsafe).')
|
||||
p.add_option('--no-verify',
|
||||
g = p.add_option_group('Upload hooks')
|
||||
g.add_option('--no-verify',
|
||||
dest='bypass_hooks', action='store_true',
|
||||
help='Do not run the upload hook.')
|
||||
p.add_option('--verify',
|
||||
g.add_option('--verify',
|
||||
dest='allow_all_hooks', action='store_true',
|
||||
help='Run the upload hook without prompting.')
|
||||
g.add_option('--ignore-hooks',
|
||||
dest='ignore_hooks', action='store_true',
|
||||
help='Do not abort uploading if upload hooks fail.')
|
||||
|
||||
def _SingleBranch(self, opt, branch, people):
|
||||
project = branch.project
|
||||
@ -212,20 +250,24 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
|
||||
destination = opt.dest_branch or project.dest_branch or project.revisionExpr
|
||||
print('Upload project %s/ to remote branch %s%s:' %
|
||||
(project.relpath, destination, ' (draft)' if opt.draft else ''))
|
||||
(project.relpath, destination, ' (private)' if opt.private else ''))
|
||||
print(' branch %s (%2d commit%s, %s):' % (
|
||||
name,
|
||||
len(commit_list),
|
||||
len(commit_list) != 1 and 's' or '',
|
||||
date))
|
||||
name,
|
||||
len(commit_list),
|
||||
len(commit_list) != 1 and 's' or '',
|
||||
date))
|
||||
for commit in commit_list:
|
||||
print(' %s' % commit)
|
||||
|
||||
print('to %s (y/N)? ' % remote.review, end='')
|
||||
# TODO: When we require Python 3, use flush=True w/print above.
|
||||
sys.stdout.flush()
|
||||
answer = sys.stdin.readline().strip().lower()
|
||||
answer = answer in ('y', 'yes', '1', 'true', 't')
|
||||
if opt.yes:
|
||||
print('<--yes>')
|
||||
answer = True
|
||||
else:
|
||||
answer = sys.stdin.readline().strip().lower()
|
||||
answer = answer in ('y', 'yes', '1', 'true', 't')
|
||||
|
||||
if answer:
|
||||
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
|
||||
@ -322,12 +364,12 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
|
||||
key = 'review.%s.autoreviewer' % project.GetBranch(name).remote.review
|
||||
raw_list = project.config.GetString(key)
|
||||
if not raw_list is None:
|
||||
if raw_list is not None:
|
||||
people[0].extend([entry.strip() for entry in raw_list.split(',')])
|
||||
|
||||
key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
|
||||
raw_list = project.config.GetString(key)
|
||||
if not raw_list is None and len(people[0]) > 0:
|
||||
if raw_list is not None and len(people[0]) > 0:
|
||||
people[1].extend([entry.strip() for entry in raw_list.split(',')])
|
||||
|
||||
def _FindGerritChange(self, branch):
|
||||
@ -364,7 +406,11 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
print('Continue uploading? (y/N) ', end='')
|
||||
# TODO: When we require Python 3, use flush=True w/print above.
|
||||
sys.stdout.flush()
|
||||
a = sys.stdin.readline().strip().lower()
|
||||
if opt.yes:
|
||||
print('<--yes>')
|
||||
a = 'yes'
|
||||
else:
|
||||
a = sys.stdin.readline().strip().lower()
|
||||
if a not in ('y', 'yes', 't', 'true', 'on'):
|
||||
print("skipping upload", file=sys.stderr)
|
||||
branch.uploaded = False
|
||||
@ -376,12 +422,51 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
key = 'review.%s.uploadtopic' % branch.project.remote.review
|
||||
opt.auto_topic = branch.project.config.GetBoolean(key)
|
||||
|
||||
def _ExpandCommaList(value):
|
||||
"""Split |value| up into comma delimited entries."""
|
||||
if not value:
|
||||
return
|
||||
for ret in value.split(','):
|
||||
ret = ret.strip()
|
||||
if ret:
|
||||
yield ret
|
||||
|
||||
# Check if hashtags should be included.
|
||||
key = 'review.%s.uploadhashtags' % branch.project.remote.review
|
||||
hashtags = set(_ExpandCommaList(branch.project.config.GetString(key)))
|
||||
for tag in opt.hashtags:
|
||||
hashtags.update(_ExpandCommaList(tag))
|
||||
if opt.hashtag_branch:
|
||||
hashtags.add(branch.name)
|
||||
|
||||
# Check if labels should be included.
|
||||
key = 'review.%s.uploadlabels' % branch.project.remote.review
|
||||
labels = set(_ExpandCommaList(branch.project.config.GetString(key)))
|
||||
for label in opt.labels:
|
||||
labels.update(_ExpandCommaList(label))
|
||||
# Basic sanity check on label syntax.
|
||||
for label in labels:
|
||||
if not re.match(r'^.+[+-][0-9]+$', label):
|
||||
print('repo: error: invalid label syntax "%s": labels use forms '
|
||||
'like CodeReview+1 or Verified-1' % (label,), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Handle e-mail notifications.
|
||||
if opt.notify is False:
|
||||
notify = 'NONE'
|
||||
else:
|
||||
key = 'review.%s.uploadnotify' % branch.project.remote.review
|
||||
notify = branch.project.config.GetString(key)
|
||||
|
||||
destination = opt.dest_branch or branch.project.dest_branch
|
||||
|
||||
# Make sure our local branch is not setup to track a different remote branch
|
||||
merge_branch = self._GetMergeBranch(branch.project)
|
||||
if destination:
|
||||
full_dest = 'refs/heads/%s' % destination
|
||||
full_dest = destination
|
||||
if not full_dest.startswith(R_HEADS):
|
||||
full_dest = R_HEADS + full_dest
|
||||
|
||||
if not opt.dest_branch and merge_branch and merge_branch != full_dest:
|
||||
print('merge branch %s does not match destination branch %s'
|
||||
% (merge_branch, full_dest))
|
||||
@ -392,10 +477,12 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
continue
|
||||
|
||||
branch.UploadForReview(people,
|
||||
dryrun=opt.dryrun,
|
||||
auto_topic=opt.auto_topic,
|
||||
draft=opt.draft,
|
||||
hashtags=hashtags,
|
||||
labels=labels,
|
||||
private=opt.private,
|
||||
notify=None if opt.notify else 'NONE',
|
||||
notify=notify,
|
||||
wip=opt.wip,
|
||||
dest_branch=destination,
|
||||
validate_certs=opt.validate_certs,
|
||||
@ -418,18 +505,18 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
else:
|
||||
fmt = '\n (%s)'
|
||||
print(('[FAILED] %-15s %-15s' + fmt) % (
|
||||
branch.project.relpath + '/', \
|
||||
branch.name, \
|
||||
str(branch.error)),
|
||||
file=sys.stderr)
|
||||
branch.project.relpath + '/',
|
||||
branch.name,
|
||||
str(branch.error)),
|
||||
file=sys.stderr)
|
||||
print()
|
||||
|
||||
for branch in todo:
|
||||
if branch.uploaded:
|
||||
print('[OK ] %-15s %s' % (
|
||||
branch.project.relpath + '/',
|
||||
branch.name),
|
||||
file=sys.stderr)
|
||||
branch.project.relpath + '/',
|
||||
branch.name),
|
||||
file=sys.stderr)
|
||||
|
||||
if have_errors:
|
||||
sys.exit(1)
|
||||
@ -437,14 +524,14 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
def _GetMergeBranch(self, project):
|
||||
p = GitCommand(project,
|
||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
p.Wait()
|
||||
local_branch = p.stdout.strip()
|
||||
p = GitCommand(project,
|
||||
['config', '--get', 'branch.%s.merge' % local_branch],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
p.Wait()
|
||||
merge_branch = p.stdout.strip()
|
||||
return merge_branch
|
||||
@ -478,8 +565,12 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
pending.append((project, avail))
|
||||
|
||||
if not pending:
|
||||
print("no branches ready for upload", file=sys.stderr)
|
||||
return
|
||||
if branch is None:
|
||||
print('repo: error: no branches ready for upload', file=sys.stderr)
|
||||
else:
|
||||
print('repo: error: no branches named "%s" ready for upload' %
|
||||
(branch,), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if not opt.bypass_hooks:
|
||||
hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
|
||||
@ -488,12 +579,24 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
abort_if_user_denies=True)
|
||||
pending_proj_names = [project.name for (project, available) in pending]
|
||||
pending_worktrees = [project.worktree for (project, available) in pending]
|
||||
passed = True
|
||||
try:
|
||||
hook.Run(opt.allow_all_hooks, project_list=pending_proj_names,
|
||||
worktree_list=pending_worktrees)
|
||||
except SystemExit:
|
||||
passed = False
|
||||
if not opt.ignore_hooks:
|
||||
raise
|
||||
except HookError as e:
|
||||
passed = False
|
||||
print("ERROR: %s" % str(e), file=sys.stderr)
|
||||
return
|
||||
|
||||
if not passed:
|
||||
if opt.ignore_hooks:
|
||||
print('\nWARNING: pre-upload hooks failed, but uploading anyways.',
|
||||
file=sys.stderr)
|
||||
else:
|
||||
return
|
||||
|
||||
if opt.reviewers:
|
||||
reviewers = _SplitEmails(opt.reviewers)
|
||||
|
@ -15,11 +15,15 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import platform
|
||||
import sys
|
||||
|
||||
from command import Command, MirrorSafeCommand
|
||||
from git_command import git, RepoSourceVersion, user_agent
|
||||
from git_refs import HEAD
|
||||
|
||||
|
||||
class Version(Command, MirrorSafeCommand):
|
||||
wrapper_version = None
|
||||
wrapper_path = None
|
||||
@ -39,10 +43,11 @@ class Version(Command, MirrorSafeCommand):
|
||||
rp_ver = rp.bare_git.describe(HEAD)
|
||||
print('repo version %s' % rp_ver)
|
||||
print(' (from %s)' % rem.url)
|
||||
print(' (%s)' % rp.bare_git.log('-1', '--format=%cD', HEAD))
|
||||
|
||||
if Version.wrapper_path is not None:
|
||||
print('repo launcher version %s' % Version.wrapper_version)
|
||||
print(' (from %s)' % Version.wrapper_path)
|
||||
if self.wrapper_path is not None:
|
||||
print('repo launcher version %s' % self.wrapper_version)
|
||||
print(' (from %s)' % self.wrapper_path)
|
||||
|
||||
if src_ver != rp_ver:
|
||||
print(' (currently at %s)' % src_ver)
|
||||
@ -51,3 +56,11 @@ class Version(Command, MirrorSafeCommand):
|
||||
print('git %s' % git.version_tuple().full)
|
||||
print('git User-Agent %s' % user_agent.git)
|
||||
print('Python %s' % sys.version)
|
||||
uname = platform.uname()
|
||||
if sys.version_info.major < 3:
|
||||
# Python 3 returns a named tuple, but Python 2 is simpler.
|
||||
print(uname)
|
||||
else:
|
||||
print('OS %s %s (%s)' % (uname.system, uname.release, uname.version))
|
||||
print('CPU %s (%s)' %
|
||||
(uname.machine, uname.processor if uname.processor else 'unknown'))
|
||||
|
10
tests/fixtures/test.gitconfig
vendored
10
tests/fixtures/test.gitconfig
vendored
@ -1,3 +1,13 @@
|
||||
[section]
|
||||
empty
|
||||
nonempty = true
|
||||
boolinvalid = oops
|
||||
booltrue = true
|
||||
boolfalse = false
|
||||
intinvalid = oops
|
||||
inthex = 0x10
|
||||
inthexk = 0x10k
|
||||
int = 10
|
||||
intk = 10k
|
||||
intm = 10m
|
||||
intg = 10g
|
||||
|
@ -21,7 +21,40 @@ from __future__ import print_function
|
||||
import re
|
||||
import unittest
|
||||
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
|
||||
import git_command
|
||||
import wrapper
|
||||
|
||||
|
||||
class SSHUnitTest(unittest.TestCase):
|
||||
"""Tests the ssh functions."""
|
||||
|
||||
def test_ssh_version(self):
|
||||
"""Check ssh_version() handling."""
|
||||
ver = git_command._parse_ssh_version('Unknown\n')
|
||||
self.assertEqual(ver, ())
|
||||
ver = git_command._parse_ssh_version('OpenSSH_1.0\n')
|
||||
self.assertEqual(ver, (1, 0))
|
||||
ver = git_command._parse_ssh_version('OpenSSH_6.6.1p1 Ubuntu-2ubuntu2.13, OpenSSL 1.0.1f 6 Jan 2014\n')
|
||||
self.assertEqual(ver, (6, 6, 1))
|
||||
ver = git_command._parse_ssh_version('OpenSSH_7.6p1 Ubuntu-4ubuntu0.3, OpenSSL 1.0.2n 7 Dec 2017\n')
|
||||
self.assertEqual(ver, (7, 6))
|
||||
|
||||
def test_ssh_sock(self):
|
||||
"""Check ssh_sock() function."""
|
||||
with mock.patch('tempfile.mkdtemp', return_value='/tmp/foo'):
|
||||
# old ssh version uses port
|
||||
with mock.patch('git_command.ssh_version', return_value=(6, 6)):
|
||||
self.assertTrue(git_command.ssh_sock().endswith('%p'))
|
||||
git_command._ssh_sock_path = None
|
||||
# new ssh version uses hash
|
||||
with mock.patch('git_command.ssh_version', return_value=(6, 7)):
|
||||
self.assertTrue(git_command.ssh_sock().endswith('%C'))
|
||||
git_command._ssh_sock_path = None
|
||||
|
||||
|
||||
class GitCallUnitTest(unittest.TestCase):
|
||||
@ -35,7 +68,7 @@ class GitCallUnitTest(unittest.TestCase):
|
||||
# We don't dive too deep into the values here to avoid having to update
|
||||
# whenever git versions change. We do check relative to this min version
|
||||
# as this is what `repo` itself requires via MIN_GIT_VERSION.
|
||||
MIN_GIT_VERSION = (1, 7, 2)
|
||||
MIN_GIT_VERSION = (2, 10, 2)
|
||||
self.assertTrue(isinstance(ver.major, int))
|
||||
self.assertTrue(isinstance(ver.minor, int))
|
||||
self.assertTrue(isinstance(ver.micro, int))
|
||||
@ -76,3 +109,45 @@ class UserAgentUnitTest(unittest.TestCase):
|
||||
# the general form.
|
||||
m = re.match(r'^git/[^ ]+ ([^ ]+) git-repo/[^ ]+', ua)
|
||||
self.assertIsNotNone(m)
|
||||
|
||||
|
||||
class GitRequireTests(unittest.TestCase):
|
||||
"""Test the git_require helper."""
|
||||
|
||||
def setUp(self):
|
||||
ver = wrapper.GitVersion(1, 2, 3, 4)
|
||||
mock.patch.object(git_command.git, 'version_tuple', return_value=ver).start()
|
||||
|
||||
def tearDown(self):
|
||||
mock.patch.stopall()
|
||||
|
||||
def test_older_nonfatal(self):
|
||||
"""Test non-fatal require calls with old versions."""
|
||||
self.assertFalse(git_command.git_require((2,)))
|
||||
self.assertFalse(git_command.git_require((1, 3)))
|
||||
self.assertFalse(git_command.git_require((1, 2, 4)))
|
||||
self.assertFalse(git_command.git_require((1, 2, 3, 5)))
|
||||
|
||||
def test_newer_nonfatal(self):
|
||||
"""Test non-fatal require calls with newer versions."""
|
||||
self.assertTrue(git_command.git_require((0,)))
|
||||
self.assertTrue(git_command.git_require((1, 0)))
|
||||
self.assertTrue(git_command.git_require((1, 2, 0)))
|
||||
self.assertTrue(git_command.git_require((1, 2, 3, 0)))
|
||||
|
||||
def test_equal_nonfatal(self):
|
||||
"""Test require calls with equal values."""
|
||||
self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=False))
|
||||
self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=True))
|
||||
|
||||
def test_older_fatal(self):
|
||||
"""Test fatal require calls with old versions."""
|
||||
with self.assertRaises(SystemExit) as e:
|
||||
git_command.git_require((2,), fail=True)
|
||||
self.assertNotEqual(0, e.code)
|
||||
|
||||
def test_older_fatal_msg(self):
|
||||
"""Test fatal require calls with old versions and message."""
|
||||
with self.assertRaises(SystemExit) as e:
|
||||
git_command.git_require((2,), fail=True, msg='so sad')
|
||||
self.assertNotEqual(0, e.code)
|
||||
|
@ -23,14 +23,17 @@ import unittest
|
||||
|
||||
import git_config
|
||||
|
||||
|
||||
def fixture(*paths):
|
||||
"""Return a path relative to test/fixtures.
|
||||
"""
|
||||
return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
|
||||
|
||||
|
||||
class GitConfigUnitTest(unittest.TestCase):
|
||||
"""Tests the GitConfig class.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""Create a GitConfig object using the test.gitconfig fixture.
|
||||
"""
|
||||
@ -68,5 +71,43 @@ class GitConfigUnitTest(unittest.TestCase):
|
||||
val = config.GetString('empty')
|
||||
self.assertEqual(val, None)
|
||||
|
||||
def test_GetBoolean_undefined(self):
|
||||
"""Test GetBoolean on key that doesn't exist."""
|
||||
self.assertIsNone(self.config.GetBoolean('section.missing'))
|
||||
|
||||
def test_GetBoolean_invalid(self):
|
||||
"""Test GetBoolean on invalid boolean value."""
|
||||
self.assertIsNone(self.config.GetBoolean('section.boolinvalid'))
|
||||
|
||||
def test_GetBoolean_true(self):
|
||||
"""Test GetBoolean on valid true boolean."""
|
||||
self.assertTrue(self.config.GetBoolean('section.booltrue'))
|
||||
|
||||
def test_GetBoolean_false(self):
|
||||
"""Test GetBoolean on valid false boolean."""
|
||||
self.assertFalse(self.config.GetBoolean('section.boolfalse'))
|
||||
|
||||
def test_GetInt_undefined(self):
|
||||
"""Test GetInt on key that doesn't exist."""
|
||||
self.assertIsNone(self.config.GetInt('section.missing'))
|
||||
|
||||
def test_GetInt_invalid(self):
|
||||
"""Test GetInt on invalid integer value."""
|
||||
self.assertIsNone(self.config.GetBoolean('section.intinvalid'))
|
||||
|
||||
def test_GetInt_valid(self):
|
||||
"""Test GetInt on valid integers."""
|
||||
TESTS = (
|
||||
('inthex', 16),
|
||||
('inthexk', 16384),
|
||||
('int', 10),
|
||||
('intk', 10240),
|
||||
('intm', 10485760),
|
||||
('intg', 10737418240),
|
||||
)
|
||||
for key, value in TESTS:
|
||||
self.assertEqual(value, self.config.GetInt('section.%s' % (key,)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
148
tests/test_manifest_xml.py
Normal file
148
tests/test_manifest_xml.py
Normal file
@ -0,0 +1,148 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the manifest_xml.py module."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import unittest
|
||||
import xml.dom.minidom
|
||||
|
||||
import error
|
||||
import manifest_xml
|
||||
|
||||
|
||||
class ManifestValidateFilePaths(unittest.TestCase):
|
||||
"""Check _ValidateFilePaths helper.
|
||||
|
||||
This doesn't access a real filesystem.
|
||||
"""
|
||||
|
||||
def check_both(self, *args):
|
||||
manifest_xml.XmlManifest._ValidateFilePaths('copyfile', *args)
|
||||
manifest_xml.XmlManifest._ValidateFilePaths('linkfile', *args)
|
||||
|
||||
def test_normal_path(self):
|
||||
"""Make sure good paths are accepted."""
|
||||
self.check_both('foo', 'bar')
|
||||
self.check_both('foo/bar', 'bar')
|
||||
self.check_both('foo', 'bar/bar')
|
||||
self.check_both('foo/bar', 'bar/bar')
|
||||
|
||||
def test_symlink_targets(self):
|
||||
"""Some extra checks for symlinks."""
|
||||
def check(*args):
|
||||
manifest_xml.XmlManifest._ValidateFilePaths('linkfile', *args)
|
||||
|
||||
# We allow symlinks to end in a slash since we allow them to point to dirs
|
||||
# in general. Technically the slash isn't necessary.
|
||||
check('foo/', 'bar')
|
||||
# We allow a single '.' to get a reference to the project itself.
|
||||
check('.', 'bar')
|
||||
|
||||
def test_bad_paths(self):
|
||||
"""Make sure bad paths (src & dest) are rejected."""
|
||||
PATHS = (
|
||||
'..',
|
||||
'../',
|
||||
'./',
|
||||
'foo/',
|
||||
'./foo',
|
||||
'../foo',
|
||||
'foo/./bar',
|
||||
'foo/../../bar',
|
||||
'/foo',
|
||||
'./../foo',
|
||||
'.git/foo',
|
||||
# Check case folding.
|
||||
'.GIT/foo',
|
||||
'blah/.git/foo',
|
||||
'.repo/foo',
|
||||
'.repoconfig',
|
||||
# Block ~ due to 8.3 filenames on Windows filesystems.
|
||||
'~',
|
||||
'foo~',
|
||||
'blah/foo~',
|
||||
# Block Unicode characters that get normalized out by filesystems.
|
||||
u'foo\u200Cbar',
|
||||
)
|
||||
# Make sure platforms that use path separators (e.g. Windows) are also
|
||||
# rejected properly.
|
||||
if os.path.sep != '/':
|
||||
PATHS += tuple(x.replace('/', os.path.sep) for x in PATHS)
|
||||
|
||||
for path in PATHS:
|
||||
self.assertRaises(
|
||||
error.ManifestInvalidPathError, self.check_both, path, 'a')
|
||||
self.assertRaises(
|
||||
error.ManifestInvalidPathError, self.check_both, 'a', path)
|
||||
|
||||
|
||||
class ValueTests(unittest.TestCase):
|
||||
"""Check utility parsing code."""
|
||||
|
||||
def _get_node(self, text):
|
||||
return xml.dom.minidom.parseString(text).firstChild
|
||||
|
||||
def test_bool_default(self):
|
||||
"""Check XmlBool default handling."""
|
||||
node = self._get_node('<node/>')
|
||||
self.assertIsNone(manifest_xml.XmlBool(node, 'a'))
|
||||
self.assertIsNone(manifest_xml.XmlBool(node, 'a', None))
|
||||
self.assertEqual(123, manifest_xml.XmlBool(node, 'a', 123))
|
||||
|
||||
node = self._get_node('<node a=""/>')
|
||||
self.assertIsNone(manifest_xml.XmlBool(node, 'a'))
|
||||
|
||||
def test_bool_invalid(self):
|
||||
"""Check XmlBool invalid handling."""
|
||||
node = self._get_node('<node a="moo"/>')
|
||||
self.assertEqual(123, manifest_xml.XmlBool(node, 'a', 123))
|
||||
|
||||
def test_bool_true(self):
|
||||
"""Check XmlBool true values."""
|
||||
for value in ('yes', 'true', '1'):
|
||||
node = self._get_node('<node a="%s"/>' % (value,))
|
||||
self.assertTrue(manifest_xml.XmlBool(node, 'a'))
|
||||
|
||||
def test_bool_false(self):
|
||||
"""Check XmlBool false values."""
|
||||
for value in ('no', 'false', '0'):
|
||||
node = self._get_node('<node a="%s"/>' % (value,))
|
||||
self.assertFalse(manifest_xml.XmlBool(node, 'a'))
|
||||
|
||||
def test_int_default(self):
|
||||
"""Check XmlInt default handling."""
|
||||
node = self._get_node('<node/>')
|
||||
self.assertIsNone(manifest_xml.XmlInt(node, 'a'))
|
||||
self.assertIsNone(manifest_xml.XmlInt(node, 'a', None))
|
||||
self.assertEqual(123, manifest_xml.XmlInt(node, 'a', 123))
|
||||
|
||||
node = self._get_node('<node a=""/>')
|
||||
self.assertIsNone(manifest_xml.XmlInt(node, 'a'))
|
||||
|
||||
def test_int_good(self):
|
||||
"""Check XmlInt numeric handling."""
|
||||
for value in (-1, 0, 1, 50000):
|
||||
node = self._get_node('<node a="%s"/>' % (value,))
|
||||
self.assertEqual(value, manifest_xml.XmlInt(node, 'a'))
|
||||
|
||||
def test_int_invalid(self):
|
||||
"""Check XmlInt invalid handling."""
|
||||
with self.assertRaises(error.ManifestParseError):
|
||||
node = self._get_node('<node a="xx"/>')
|
||||
manifest_xml.XmlInt(node, 'a')
|
@ -25,7 +25,9 @@ import subprocess
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import error
|
||||
import git_config
|
||||
import platform_utils
|
||||
import project
|
||||
|
||||
|
||||
@ -39,7 +41,7 @@ def TempGitTree():
|
||||
subprocess.check_call(['git', 'init'], cwd=tempdir)
|
||||
yield tempdir
|
||||
finally:
|
||||
shutil.rmtree(tempdir)
|
||||
platform_utils.rmtree(tempdir)
|
||||
|
||||
|
||||
class RepoHookShebang(unittest.TestCase):
|
||||
@ -134,3 +136,232 @@ class ReviewableBranchTests(unittest.TestCase):
|
||||
self.assertFalse(rb.base_exists)
|
||||
# Hard to assert anything useful about this.
|
||||
self.assertTrue(rb.date)
|
||||
|
||||
|
||||
class CopyLinkTestCase(unittest.TestCase):
|
||||
"""TestCase for stub repo client checkouts.
|
||||
|
||||
It'll have a layout like:
|
||||
tempdir/ # self.tempdir
|
||||
checkout/ # self.topdir
|
||||
git-project/ # self.worktree
|
||||
|
||||
Attributes:
|
||||
tempdir: A dedicated temporary directory.
|
||||
worktree: The top of the repo client checkout.
|
||||
topdir: The top of a project checkout.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tempdir = tempfile.mkdtemp(prefix='repo_tests')
|
||||
self.topdir = os.path.join(self.tempdir, 'checkout')
|
||||
self.worktree = os.path.join(self.topdir, 'git-project')
|
||||
os.makedirs(self.topdir)
|
||||
os.makedirs(self.worktree)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tempdir, ignore_errors=True)
|
||||
|
||||
@staticmethod
|
||||
def touch(path):
|
||||
with open(path, 'w'):
|
||||
pass
|
||||
|
||||
def assertExists(self, path, msg=None):
|
||||
"""Make sure |path| exists."""
|
||||
if os.path.exists(path):
|
||||
return
|
||||
|
||||
if msg is None:
|
||||
msg = ['path is missing: %s' % path]
|
||||
while path != '/':
|
||||
path = os.path.dirname(path)
|
||||
if not path:
|
||||
# If we're given something like "foo", abort once we get to "".
|
||||
break
|
||||
result = os.path.exists(path)
|
||||
msg.append('\tos.path.exists(%s): %s' % (path, result))
|
||||
if result:
|
||||
msg.append('\tcontents: %r' % os.listdir(path))
|
||||
break
|
||||
msg = '\n'.join(msg)
|
||||
|
||||
raise self.failureException(msg)
|
||||
|
||||
|
||||
class CopyFile(CopyLinkTestCase):
|
||||
"""Check _CopyFile handling."""
|
||||
|
||||
def CopyFile(self, src, dest):
|
||||
return project._CopyFile(self.worktree, src, self.topdir, dest)
|
||||
|
||||
def test_basic(self):
|
||||
"""Basic test of copying a file from a project to the toplevel."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
cf = self.CopyFile('foo.txt', 'foo')
|
||||
cf._Copy()
|
||||
self.assertExists(os.path.join(self.topdir, 'foo'))
|
||||
|
||||
def test_src_subdir(self):
|
||||
"""Copy a file from a subdir of a project."""
|
||||
src = os.path.join(self.worktree, 'bar', 'foo.txt')
|
||||
os.makedirs(os.path.dirname(src))
|
||||
self.touch(src)
|
||||
cf = self.CopyFile('bar/foo.txt', 'new.txt')
|
||||
cf._Copy()
|
||||
self.assertExists(os.path.join(self.topdir, 'new.txt'))
|
||||
|
||||
def test_dest_subdir(self):
|
||||
"""Copy a file to a subdir of a checkout."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
cf = self.CopyFile('foo.txt', 'sub/dir/new.txt')
|
||||
self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub')))
|
||||
cf._Copy()
|
||||
self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'new.txt'))
|
||||
|
||||
def test_update(self):
|
||||
"""Make sure changed files get copied again."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
dest = os.path.join(self.topdir, 'bar')
|
||||
with open(src, 'w') as f:
|
||||
f.write('1st')
|
||||
cf = self.CopyFile('foo.txt', 'bar')
|
||||
cf._Copy()
|
||||
self.assertExists(dest)
|
||||
with open(dest) as f:
|
||||
self.assertEqual(f.read(), '1st')
|
||||
|
||||
with open(src, 'w') as f:
|
||||
f.write('2nd!')
|
||||
cf._Copy()
|
||||
with open(dest) as f:
|
||||
self.assertEqual(f.read(), '2nd!')
|
||||
|
||||
def test_src_block_symlink(self):
|
||||
"""Do not allow reading from a symlinked path."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
sym = os.path.join(self.worktree, 'sym')
|
||||
self.touch(src)
|
||||
platform_utils.symlink('foo.txt', sym)
|
||||
self.assertExists(sym)
|
||||
cf = self.CopyFile('sym', 'foo')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_src_block_symlink_traversal(self):
|
||||
"""Do not allow reading through a symlink dir."""
|
||||
realfile = os.path.join(self.tempdir, 'file.txt')
|
||||
self.touch(realfile)
|
||||
src = os.path.join(self.worktree, 'bar', 'file.txt')
|
||||
platform_utils.symlink(self.tempdir, os.path.join(self.worktree, 'bar'))
|
||||
self.assertExists(src)
|
||||
cf = self.CopyFile('bar/file.txt', 'foo')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_src_block_copy_from_dir(self):
|
||||
"""Do not allow copying from a directory."""
|
||||
src = os.path.join(self.worktree, 'dir')
|
||||
os.makedirs(src)
|
||||
cf = self.CopyFile('dir', 'foo')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_dest_block_symlink(self):
|
||||
"""Do not allow writing to a symlink."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
platform_utils.symlink('dest', os.path.join(self.topdir, 'sym'))
|
||||
cf = self.CopyFile('foo.txt', 'sym')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_dest_block_symlink_traversal(self):
|
||||
"""Do not allow writing through a symlink dir."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
platform_utils.symlink(tempfile.gettempdir(),
|
||||
os.path.join(self.topdir, 'sym'))
|
||||
cf = self.CopyFile('foo.txt', 'sym/foo.txt')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_src_block_copy_to_dir(self):
|
||||
"""Do not allow copying to a directory."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
os.makedirs(os.path.join(self.topdir, 'dir'))
|
||||
cf = self.CopyFile('foo.txt', 'dir')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
|
||||
class LinkFile(CopyLinkTestCase):
|
||||
"""Check _LinkFile handling."""
|
||||
|
||||
def LinkFile(self, src, dest):
|
||||
return project._LinkFile(self.worktree, src, self.topdir, dest)
|
||||
|
||||
def test_basic(self):
|
||||
"""Basic test of linking a file from a project into the toplevel."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
lf = self.LinkFile('foo.txt', 'foo')
|
||||
lf._Link()
|
||||
dest = os.path.join(self.topdir, 'foo')
|
||||
self.assertExists(dest)
|
||||
self.assertTrue(os.path.islink(dest))
|
||||
self.assertEqual(os.path.join('git-project', 'foo.txt'), os.readlink(dest))
|
||||
|
||||
def test_src_subdir(self):
|
||||
"""Link to a file in a subdir of a project."""
|
||||
src = os.path.join(self.worktree, 'bar', 'foo.txt')
|
||||
os.makedirs(os.path.dirname(src))
|
||||
self.touch(src)
|
||||
lf = self.LinkFile('bar/foo.txt', 'foo')
|
||||
lf._Link()
|
||||
self.assertExists(os.path.join(self.topdir, 'foo'))
|
||||
|
||||
def test_src_self(self):
|
||||
"""Link to the project itself."""
|
||||
dest = os.path.join(self.topdir, 'foo', 'bar')
|
||||
lf = self.LinkFile('.', 'foo/bar')
|
||||
lf._Link()
|
||||
self.assertExists(dest)
|
||||
self.assertEqual(os.path.join('..', 'git-project'), os.readlink(dest))
|
||||
|
||||
def test_dest_subdir(self):
|
||||
"""Link a file to a subdir of a checkout."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
lf = self.LinkFile('foo.txt', 'sub/dir/foo/bar')
|
||||
self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub')))
|
||||
lf._Link()
|
||||
self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'foo', 'bar'))
|
||||
|
||||
def test_src_block_relative(self):
|
||||
"""Do not allow relative symlinks."""
|
||||
BAD_SOURCES = (
|
||||
'./',
|
||||
'..',
|
||||
'../',
|
||||
'foo/.',
|
||||
'foo/./bar',
|
||||
'foo/..',
|
||||
'foo/../foo',
|
||||
)
|
||||
for src in BAD_SOURCES:
|
||||
lf = self.LinkFile(src, 'foo')
|
||||
self.assertRaises(error.ManifestInvalidPathError, lf._Link)
|
||||
|
||||
def test_update(self):
|
||||
"""Make sure changed targets get updated."""
|
||||
dest = os.path.join(self.topdir, 'sym')
|
||||
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
lf = self.LinkFile('foo.txt', 'sym')
|
||||
lf._Link()
|
||||
self.assertEqual(os.path.join('git-project', 'foo.txt'), os.readlink(dest))
|
||||
|
||||
# Point the symlink somewhere else.
|
||||
os.unlink(dest)
|
||||
platform_utils.symlink(self.tempdir, dest)
|
||||
lf._Link()
|
||||
self.assertEqual(os.path.join('git-project', 'foo.txt'), os.readlink(dest))
|
||||
|
@ -18,25 +18,84 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import platform_utils
|
||||
from pyversion import is_python3
|
||||
import wrapper
|
||||
|
||||
|
||||
if is_python3():
|
||||
from unittest import mock
|
||||
from io import StringIO
|
||||
else:
|
||||
import mock
|
||||
from StringIO import StringIO
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def TemporaryDirectory():
|
||||
"""Create a new empty git checkout for testing."""
|
||||
# TODO(vapier): Convert this to tempfile.TemporaryDirectory once we drop
|
||||
# Python 2 support entirely.
|
||||
try:
|
||||
tempdir = tempfile.mkdtemp(prefix='repo-tests')
|
||||
yield tempdir
|
||||
finally:
|
||||
platform_utils.rmtree(tempdir)
|
||||
|
||||
|
||||
def fixture(*paths):
|
||||
"""Return a path relative to tests/fixtures.
|
||||
"""
|
||||
return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
|
||||
|
||||
class RepoWrapperUnitTest(unittest.TestCase):
|
||||
"""Tests helper functions in the repo wrapper
|
||||
"""
|
||||
|
||||
class RepoWrapperTestCase(unittest.TestCase):
|
||||
"""TestCase for the wrapper module."""
|
||||
|
||||
def setUp(self):
|
||||
"""Load the wrapper module every time
|
||||
"""
|
||||
"""Load the wrapper module every time."""
|
||||
wrapper._wrapper_module = None
|
||||
self.wrapper = wrapper.Wrapper()
|
||||
|
||||
if not is_python3():
|
||||
self.assertRegex = self.assertRegexpMatches
|
||||
|
||||
|
||||
class RepoWrapperUnitTest(RepoWrapperTestCase):
|
||||
"""Tests helper functions in the repo wrapper
|
||||
"""
|
||||
|
||||
def test_version(self):
|
||||
"""Make sure _Version works."""
|
||||
with self.assertRaises(SystemExit) as e:
|
||||
with mock.patch('sys.stdout', new_callable=StringIO) as stdout:
|
||||
with mock.patch('sys.stderr', new_callable=StringIO) as stderr:
|
||||
self.wrapper._Version()
|
||||
self.assertEqual(0, e.exception.code)
|
||||
self.assertEqual('', stderr.getvalue())
|
||||
self.assertIn('repo launcher version', stdout.getvalue())
|
||||
|
||||
def test_init_parser(self):
|
||||
"""Make sure 'init' GetParser works."""
|
||||
parser = self.wrapper.GetParser(gitc_init=False)
|
||||
opts, args = parser.parse_args([])
|
||||
self.assertEqual([], args)
|
||||
self.assertIsNone(opts.manifest_url)
|
||||
|
||||
def test_gitc_init_parser(self):
|
||||
"""Make sure 'gitc-init' GetParser works."""
|
||||
parser = self.wrapper.GetParser(gitc_init=True)
|
||||
opts, args = parser.parse_args([])
|
||||
self.assertEqual([], args)
|
||||
self.assertIsNone(opts.manifest_file)
|
||||
|
||||
def test_get_gitc_manifest_dir_no_gitc(self):
|
||||
"""
|
||||
Test reading a missing gitc config file
|
||||
@ -72,9 +131,355 @@ class RepoWrapperUnitTest(unittest.TestCase):
|
||||
self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test/extra'), 'test')
|
||||
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test'), 'test')
|
||||
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test/'), 'test')
|
||||
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test/extra'), 'test')
|
||||
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test/extra'),
|
||||
'test')
|
||||
self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/'), None)
|
||||
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/'), None)
|
||||
|
||||
|
||||
class SetGitTrace2ParentSid(RepoWrapperTestCase):
|
||||
"""Check SetGitTrace2ParentSid behavior."""
|
||||
|
||||
KEY = 'GIT_TRACE2_PARENT_SID'
|
||||
VALID_FORMAT = re.compile(r'^repo-[0-9]{8}T[0-9]{6}Z-P[0-9a-f]{8}$')
|
||||
|
||||
def test_first_set(self):
|
||||
"""Test env var not yet set."""
|
||||
env = {}
|
||||
self.wrapper.SetGitTrace2ParentSid(env)
|
||||
self.assertIn(self.KEY, env)
|
||||
value = env[self.KEY]
|
||||
self.assertRegex(value, self.VALID_FORMAT)
|
||||
|
||||
def test_append(self):
|
||||
"""Test env var is appended."""
|
||||
env = {self.KEY: 'pfx'}
|
||||
self.wrapper.SetGitTrace2ParentSid(env)
|
||||
self.assertIn(self.KEY, env)
|
||||
value = env[self.KEY]
|
||||
self.assertTrue(value.startswith('pfx/'))
|
||||
self.assertRegex(value[4:], self.VALID_FORMAT)
|
||||
|
||||
def test_global_context(self):
|
||||
"""Check os.environ gets updated by default."""
|
||||
os.environ.pop(self.KEY, None)
|
||||
self.wrapper.SetGitTrace2ParentSid()
|
||||
self.assertIn(self.KEY, os.environ)
|
||||
value = os.environ[self.KEY]
|
||||
self.assertRegex(value, self.VALID_FORMAT)
|
||||
|
||||
|
||||
class RunCommand(RepoWrapperTestCase):
|
||||
"""Check run_command behavior."""
|
||||
|
||||
def test_capture(self):
|
||||
"""Check capture_output handling."""
|
||||
ret = self.wrapper.run_command(['echo', 'hi'], capture_output=True)
|
||||
self.assertEqual(ret.stdout, 'hi\n')
|
||||
|
||||
def test_check(self):
|
||||
"""Check check handling."""
|
||||
self.wrapper.run_command(['true'], check=False)
|
||||
self.wrapper.run_command(['true'], check=True)
|
||||
self.wrapper.run_command(['false'], check=False)
|
||||
with self.assertRaises(self.wrapper.RunError):
|
||||
self.wrapper.run_command(['false'], check=True)
|
||||
|
||||
|
||||
class RunGit(RepoWrapperTestCase):
|
||||
"""Check run_git behavior."""
|
||||
|
||||
def test_capture(self):
|
||||
"""Check capture_output handling."""
|
||||
ret = self.wrapper.run_git('--version')
|
||||
self.assertIn('git', ret.stdout)
|
||||
|
||||
def test_check(self):
|
||||
"""Check check handling."""
|
||||
with self.assertRaises(self.wrapper.CloneFailure):
|
||||
self.wrapper.run_git('--version-asdfasdf')
|
||||
self.wrapper.run_git('--version-asdfasdf', check=False)
|
||||
|
||||
|
||||
class ParseGitVersion(RepoWrapperTestCase):
|
||||
"""Check ParseGitVersion behavior."""
|
||||
|
||||
def test_autoload(self):
|
||||
"""Check we can load the version from the live git."""
|
||||
ret = self.wrapper.ParseGitVersion()
|
||||
self.assertIsNotNone(ret)
|
||||
|
||||
def test_bad_ver(self):
|
||||
"""Check handling of bad git versions."""
|
||||
ret = self.wrapper.ParseGitVersion(ver_str='asdf')
|
||||
self.assertIsNone(ret)
|
||||
|
||||
def test_normal_ver(self):
|
||||
"""Check handling of normal git versions."""
|
||||
ret = self.wrapper.ParseGitVersion(ver_str='git version 2.25.1')
|
||||
self.assertEqual(2, ret.major)
|
||||
self.assertEqual(25, ret.minor)
|
||||
self.assertEqual(1, ret.micro)
|
||||
self.assertEqual('2.25.1', ret.full)
|
||||
|
||||
def test_extended_ver(self):
|
||||
"""Check handling of extended distro git versions."""
|
||||
ret = self.wrapper.ParseGitVersion(
|
||||
ver_str='git version 1.30.50.696.g5e7596f4ac-goog')
|
||||
self.assertEqual(1, ret.major)
|
||||
self.assertEqual(30, ret.minor)
|
||||
self.assertEqual(50, ret.micro)
|
||||
self.assertEqual('1.30.50.696.g5e7596f4ac-goog', ret.full)
|
||||
|
||||
|
||||
class CheckGitVersion(RepoWrapperTestCase):
|
||||
"""Check _CheckGitVersion behavior."""
|
||||
|
||||
def test_unknown(self):
|
||||
"""Unknown versions should abort."""
|
||||
with mock.patch.object(self.wrapper, 'ParseGitVersion', return_value=None):
|
||||
with self.assertRaises(self.wrapper.CloneFailure):
|
||||
self.wrapper._CheckGitVersion()
|
||||
|
||||
def test_old(self):
|
||||
"""Old versions should abort."""
|
||||
with mock.patch.object(
|
||||
self.wrapper, 'ParseGitVersion',
|
||||
return_value=self.wrapper.GitVersion(1, 0, 0, '1.0.0')):
|
||||
with self.assertRaises(self.wrapper.CloneFailure):
|
||||
self.wrapper._CheckGitVersion()
|
||||
|
||||
def test_new(self):
|
||||
"""Newer versions should run fine."""
|
||||
with mock.patch.object(
|
||||
self.wrapper, 'ParseGitVersion',
|
||||
return_value=self.wrapper.GitVersion(100, 0, 0, '100.0.0')):
|
||||
self.wrapper._CheckGitVersion()
|
||||
|
||||
|
||||
class NeedSetupGnuPG(RepoWrapperTestCase):
|
||||
"""Check NeedSetupGnuPG behavior."""
|
||||
|
||||
def test_missing_dir(self):
|
||||
"""The ~/.repoconfig tree doesn't exist yet."""
|
||||
with TemporaryDirectory() as tempdir:
|
||||
self.wrapper.home_dot_repo = os.path.join(tempdir, 'foo')
|
||||
self.assertTrue(self.wrapper.NeedSetupGnuPG())
|
||||
|
||||
def test_missing_keyring(self):
|
||||
"""The keyring-version file doesn't exist yet."""
|
||||
with TemporaryDirectory() as tempdir:
|
||||
self.wrapper.home_dot_repo = tempdir
|
||||
self.assertTrue(self.wrapper.NeedSetupGnuPG())
|
||||
|
||||
def test_empty_keyring(self):
|
||||
"""The keyring-version file exists, but is empty."""
|
||||
with TemporaryDirectory() as tempdir:
|
||||
self.wrapper.home_dot_repo = tempdir
|
||||
with open(os.path.join(tempdir, 'keyring-version'), 'w'):
|
||||
pass
|
||||
self.assertTrue(self.wrapper.NeedSetupGnuPG())
|
||||
|
||||
def test_old_keyring(self):
|
||||
"""The keyring-version file exists, but it's old."""
|
||||
with TemporaryDirectory() as tempdir:
|
||||
self.wrapper.home_dot_repo = tempdir
|
||||
with open(os.path.join(tempdir, 'keyring-version'), 'w') as fp:
|
||||
fp.write('1.0\n')
|
||||
self.assertTrue(self.wrapper.NeedSetupGnuPG())
|
||||
|
||||
def test_new_keyring(self):
|
||||
"""The keyring-version file exists, and is up-to-date."""
|
||||
with TemporaryDirectory() as tempdir:
|
||||
self.wrapper.home_dot_repo = tempdir
|
||||
with open(os.path.join(tempdir, 'keyring-version'), 'w') as fp:
|
||||
fp.write('1000.0\n')
|
||||
self.assertFalse(self.wrapper.NeedSetupGnuPG())
|
||||
|
||||
|
||||
class SetupGnuPG(RepoWrapperTestCase):
|
||||
"""Check SetupGnuPG behavior."""
|
||||
|
||||
def test_full(self):
|
||||
"""Make sure it works completely."""
|
||||
with TemporaryDirectory() as tempdir:
|
||||
self.wrapper.home_dot_repo = tempdir
|
||||
self.wrapper.gpg_dir = os.path.join(self.wrapper.home_dot_repo, 'gnupg')
|
||||
self.assertTrue(self.wrapper.SetupGnuPG(True))
|
||||
with open(os.path.join(tempdir, 'keyring-version'), 'r') as fp:
|
||||
data = fp.read()
|
||||
self.assertEqual('.'.join(str(x) for x in self.wrapper.KEYRING_VERSION),
|
||||
data.strip())
|
||||
|
||||
|
||||
class VerifyRev(RepoWrapperTestCase):
|
||||
"""Check verify_rev behavior."""
|
||||
|
||||
def test_verify_passes(self):
|
||||
"""Check when we have a valid signed tag."""
|
||||
desc_result = self.wrapper.RunResult(0, 'v1.0\n', '')
|
||||
gpg_result = self.wrapper.RunResult(0, '', '')
|
||||
with mock.patch.object(self.wrapper, 'run_git',
|
||||
side_effect=(desc_result, gpg_result)):
|
||||
ret = self.wrapper.verify_rev('/', 'refs/heads/stable', '1234', True)
|
||||
self.assertEqual('v1.0^0', ret)
|
||||
|
||||
def test_unsigned_commit(self):
|
||||
"""Check we fall back to signed tag when we have an unsigned commit."""
|
||||
desc_result = self.wrapper.RunResult(0, 'v1.0-10-g1234\n', '')
|
||||
gpg_result = self.wrapper.RunResult(0, '', '')
|
||||
with mock.patch.object(self.wrapper, 'run_git',
|
||||
side_effect=(desc_result, gpg_result)):
|
||||
ret = self.wrapper.verify_rev('/', 'refs/heads/stable', '1234', True)
|
||||
self.assertEqual('v1.0^0', ret)
|
||||
|
||||
def test_verify_fails(self):
|
||||
"""Check we fall back to signed tag when we have an unsigned commit."""
|
||||
desc_result = self.wrapper.RunResult(0, 'v1.0-10-g1234\n', '')
|
||||
gpg_result = Exception
|
||||
with mock.patch.object(self.wrapper, 'run_git',
|
||||
side_effect=(desc_result, gpg_result)):
|
||||
with self.assertRaises(Exception):
|
||||
self.wrapper.verify_rev('/', 'refs/heads/stable', '1234', True)
|
||||
|
||||
|
||||
class GitCheckoutTestCase(RepoWrapperTestCase):
|
||||
"""Tests that use a real/small git checkout."""
|
||||
|
||||
GIT_DIR = None
|
||||
REV_LIST = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
# Create a repo to operate on, but do it once per-class.
|
||||
cls.GIT_DIR = tempfile.mkdtemp(prefix='repo-rev-tests')
|
||||
run_git = wrapper.Wrapper().run_git
|
||||
|
||||
remote = os.path.join(cls.GIT_DIR, 'remote')
|
||||
os.mkdir(remote)
|
||||
run_git('init', cwd=remote)
|
||||
run_git('commit', '--allow-empty', '-minit', cwd=remote)
|
||||
run_git('branch', 'stable', cwd=remote)
|
||||
run_git('tag', 'v1.0', cwd=remote)
|
||||
run_git('commit', '--allow-empty', '-m2nd commit', cwd=remote)
|
||||
cls.REV_LIST = run_git('rev-list', 'HEAD', cwd=remote).stdout.splitlines()
|
||||
|
||||
run_git('init', cwd=cls.GIT_DIR)
|
||||
run_git('fetch', remote, '+refs/heads/*:refs/remotes/origin/*', cwd=cls.GIT_DIR)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if not cls.GIT_DIR:
|
||||
return
|
||||
|
||||
shutil.rmtree(cls.GIT_DIR)
|
||||
|
||||
|
||||
class ResolveRepoRev(GitCheckoutTestCase):
|
||||
"""Check resolve_repo_rev behavior."""
|
||||
|
||||
def test_explicit_branch(self):
|
||||
"""Check refs/heads/branch argument."""
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/heads/stable')
|
||||
self.assertEqual('refs/heads/stable', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
with self.assertRaises(wrapper.CloneFailure):
|
||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/heads/unknown')
|
||||
|
||||
def test_explicit_tag(self):
|
||||
"""Check refs/tags/tag argument."""
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/tags/v1.0')
|
||||
self.assertEqual('refs/tags/v1.0', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
with self.assertRaises(wrapper.CloneFailure):
|
||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/tags/unknown')
|
||||
|
||||
def test_branch_name(self):
|
||||
"""Check branch argument."""
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'stable')
|
||||
self.assertEqual('refs/heads/stable', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'master')
|
||||
self.assertEqual('refs/heads/master', rrev)
|
||||
self.assertEqual(self.REV_LIST[0], lrev)
|
||||
|
||||
def test_tag_name(self):
|
||||
"""Check tag argument."""
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'v1.0')
|
||||
self.assertEqual('refs/tags/v1.0', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
def test_full_commit(self):
|
||||
"""Check specific commit argument."""
|
||||
commit = self.REV_LIST[0]
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, commit)
|
||||
self.assertEqual(commit, rrev)
|
||||
self.assertEqual(commit, lrev)
|
||||
|
||||
def test_partial_commit(self):
|
||||
"""Check specific (partial) commit argument."""
|
||||
commit = self.REV_LIST[0][0:20]
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, commit)
|
||||
self.assertEqual(self.REV_LIST[0], rrev)
|
||||
self.assertEqual(self.REV_LIST[0], lrev)
|
||||
|
||||
def test_unknown(self):
|
||||
"""Check unknown ref/commit argument."""
|
||||
with self.assertRaises(wrapper.CloneFailure):
|
||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'boooooooya')
|
||||
|
||||
|
||||
class CheckRepoVerify(RepoWrapperTestCase):
|
||||
"""Check check_repo_verify behavior."""
|
||||
|
||||
def test_no_verify(self):
|
||||
"""Always fail with --no-repo-verify."""
|
||||
self.assertFalse(self.wrapper.check_repo_verify(False))
|
||||
|
||||
def test_gpg_initialized(self):
|
||||
"""Should pass if gpg is setup already."""
|
||||
with mock.patch.object(self.wrapper, 'NeedSetupGnuPG', return_value=False):
|
||||
self.assertTrue(self.wrapper.check_repo_verify(True))
|
||||
|
||||
def test_need_gpg_setup(self):
|
||||
"""Should pass/fail based on gpg setup."""
|
||||
with mock.patch.object(self.wrapper, 'NeedSetupGnuPG', return_value=True):
|
||||
with mock.patch.object(self.wrapper, 'SetupGnuPG') as m:
|
||||
m.return_value = True
|
||||
self.assertTrue(self.wrapper.check_repo_verify(True))
|
||||
|
||||
m.return_value = False
|
||||
self.assertFalse(self.wrapper.check_repo_verify(True))
|
||||
|
||||
|
||||
class CheckRepoRev(GitCheckoutTestCase):
|
||||
"""Check check_repo_rev behavior."""
|
||||
|
||||
def test_verify_works(self):
|
||||
"""Should pass when verification passes."""
|
||||
with mock.patch.object(self.wrapper, 'check_repo_verify', return_value=True):
|
||||
with mock.patch.object(self.wrapper, 'verify_rev', return_value='12345'):
|
||||
rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, 'stable')
|
||||
self.assertEqual('refs/heads/stable', rrev)
|
||||
self.assertEqual('12345', lrev)
|
||||
|
||||
def test_verify_fails(self):
|
||||
"""Should fail when verification fails."""
|
||||
with mock.patch.object(self.wrapper, 'check_repo_verify', return_value=True):
|
||||
with mock.patch.object(self.wrapper, 'verify_rev', side_effect=Exception):
|
||||
with self.assertRaises(Exception):
|
||||
self.wrapper.check_repo_rev(self.GIT_DIR, 'stable')
|
||||
|
||||
def test_verify_ignore(self):
|
||||
"""Should pass when verification is disabled."""
|
||||
with mock.patch.object(self.wrapper, 'verify_rev', side_effect=Exception):
|
||||
rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, 'stable', repo_verify=False)
|
||||
self.assertEqual('refs/heads/stable', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
18
tox.ini
18
tox.ini
@ -17,6 +17,22 @@
|
||||
[tox]
|
||||
envlist = py27, py36, py37, py38
|
||||
|
||||
[gh-actions]
|
||||
python =
|
||||
2.7: py27
|
||||
3.6: py36
|
||||
3.7: py37
|
||||
3.8: py38
|
||||
|
||||
[testenv]
|
||||
deps = pytest
|
||||
commands = {toxinidir}/run_tests
|
||||
commands = {envpython} run_tests
|
||||
setenv =
|
||||
GIT_AUTHOR_NAME = Repo test author
|
||||
GIT_COMMITTER_NAME = Repo test committer
|
||||
EMAIL = repo@gerrit.nodomain
|
||||
|
||||
[testenv:py27]
|
||||
deps =
|
||||
mock
|
||||
pytest
|
||||
|
@ -27,7 +27,10 @@ import os
|
||||
def WrapperPath():
|
||||
return os.path.join(os.path.dirname(__file__), 'repo')
|
||||
|
||||
|
||||
_wrapper_module = None
|
||||
|
||||
|
||||
def Wrapper():
|
||||
global _wrapper_module
|
||||
if not _wrapper_module:
|
||||
|
Reference in New Issue
Block a user