mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
92 Commits
Author | SHA1 | Date | |
---|---|---|---|
1379a9b185 | |||
128f34e874 | |||
30bc354e25 | |||
ce9b6c43b2 | |||
47692019b3 | |||
1469c28ec3 | |||
8add62325d | |||
974774761c | |||
dc60e54d36 | |||
0a849b660f | |||
5e2f32fe13 | |||
51e39d536d | |||
6342d56914 | |||
9dfd69f773 | |||
08eb63cea4 | |||
352c93b680 | |||
7f7acfe9fd | |||
169b0218b3 | |||
44bc9643ed | |||
d7f8683daf | |||
8c1e9cbef1 | |||
a488af5ea5 | |||
e283b95cf2 | |||
dc5c4d1d11 | |||
23411d3f9c | |||
160748f828 | |||
6e89c965f4 | |||
1f20776dbb | |||
16c1328fec | |||
6248e0fd1d | |||
50a81de2bc | |||
0501b29e7a | |||
4e1fc1013c | |||
4b325813fc | |||
0578ebf61a | |||
65f51ad29b | |||
80944b538d | |||
89f3ae5ae6 | |||
ac29ac397f | |||
cebf227026 | |||
7ae210a15b | |||
60fc51bb1d | |||
72325c5f3e | |||
d79a4bc51b | |||
682f0b6426 | |||
e7082ccb54 | |||
dbfbcb14c1 | |||
d0ca0f6814 | |||
433977e958 | |||
dd37fb2222 | |||
af908cb543 | |||
74e8ed4bde | |||
2fe84e17b9 | |||
1122353683 | |||
b6871899be | |||
8e0fe1920e | |||
d086467012 | |||
2735bfc5ff | |||
653f8b711b | |||
9bc283e49b | |||
b4a6f6d798 | |||
3e5b269fc6 | |||
cdb344c0e7 | |||
e257d56665 | |||
3599cc3975 | |||
cfc8111f5e | |||
587f162033 | |||
78964472ad | |||
05097c6222 | |||
915fda130e | |||
ea43176de0 | |||
58ac1678e8 | |||
e1111f5710 | |||
7936ce8677 | |||
23c900f105 | |||
bb930461ce | |||
d3639c53d5 | |||
f725e548db | |||
4847e05743 | |||
bb8ee7f54a | |||
23d7dafd10 | |||
8b40c00eab | |||
e20da3eeed | |||
910dfe8497 | |||
21b7fbe14d | |||
b967f5c17a | |||
dc15532bee | |||
eea23b44a9 | |||
5f11eac147 | |||
b0fbc7fb58 | |||
4c418bf423 | |||
fc1b18ae9e |
7
.github/workflows/test-ci.yml
vendored
7
.github/workflows/test-ci.yml
vendored
@ -5,7 +5,7 @@ name: Test CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, repo-1, stable, maint]
|
||||
branches: [main, repo-1, stable, maint]
|
||||
tags: [v*]
|
||||
|
||||
jobs:
|
||||
@ -14,10 +14,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [2.7, 3.6, 3.7, 3.8]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
python-version: 2.7
|
||||
python-version: [3.6, 3.7, 3.8]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -7,6 +7,7 @@ __pycache__
|
||||
.repopickle_*
|
||||
/repoc
|
||||
/.tox
|
||||
/.venv
|
||||
|
||||
# PyCharm related
|
||||
/.idea/
|
||||
|
17
README.md
17
README.md
@ -7,6 +7,7 @@ easier to work with Git. The repo command is an executable Python script
|
||||
that you can put anywhere in your path.
|
||||
|
||||
* Homepage: <https://gerrit.googlesource.com/git-repo/>
|
||||
* Mailing list: [repo-discuss on Google Groups][repo-discuss]
|
||||
* Bug reports: <https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo>
|
||||
* Source: <https://gerrit.googlesource.com/git-repo/>
|
||||
* Overview: <https://source.android.com/source/developing.html>
|
||||
@ -18,6 +19,17 @@ that you can put anywhere in your path.
|
||||
* GitHub mirror: <https://github.com/GerritCodeReview/git-repo>
|
||||
* Postsubmit tests: <https://github.com/GerritCodeReview/git-repo/actions>
|
||||
|
||||
## Contact
|
||||
|
||||
Please use the [repo-discuss] mailing list or [issue tracker] for questions.
|
||||
|
||||
You can [file a new bug report][new-bug] under the "repo" component.
|
||||
|
||||
Please do not e-mail individual developers for support.
|
||||
They do not have the bandwidth for it, and often times questions have already
|
||||
been asked on [repo-discuss] or bugs posted to the [issue tracker].
|
||||
So please search those sites first.
|
||||
|
||||
## Install
|
||||
|
||||
Many distros include repo, so you might be able to install from there.
|
||||
@ -36,3 +48,8 @@ $ PATH="${HOME}/.bin:${PATH}"
|
||||
$ curl https://storage.googleapis.com/git-repo-downloads/repo > ~/.bin/repo
|
||||
$ chmod a+rx ~/.bin/repo
|
||||
```
|
||||
|
||||
|
||||
[new-bug]: https://bugs.chromium.org/p/gerrit/issues/entry?template=Repo+tool+issue
|
||||
[issue tracker]: https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo
|
||||
[repo-discuss]: https://groups.google.com/forum/#!forum/repo-discuss
|
||||
|
@ -10,7 +10,7 @@
|
||||
- Make corrections if requested.
|
||||
- Verify your changes on gerrit so they can be submitted.
|
||||
|
||||
`git push https://gerrit-review.googlesource.com/git-repo HEAD:refs/for/master`
|
||||
`git push https://gerrit-review.googlesource.com/git-repo HEAD:refs/for/main`
|
||||
|
||||
|
||||
# Long Version
|
||||
@ -150,7 +150,7 @@ Push your patches over HTTPS to the review server, possibly through
|
||||
a remembered remote to make this easier in the future:
|
||||
|
||||
git config remote.review.url https://gerrit-review.googlesource.com/git-repo
|
||||
git config remote.review.push HEAD:refs/for/master
|
||||
git config remote.review.push HEAD:refs/for/main
|
||||
|
||||
git push review
|
||||
|
||||
|
@ -34,7 +34,7 @@ For example, if you want to change the manifest branch, you can simply run
|
||||
|
||||
It tracks the git repository at `REPO_URL` using the `REPO_REV` branch.
|
||||
Those are specified at `repo init` time using the `--repo-url=<REPO_URL>`
|
||||
and `--repo-branch=<REPO_REV>` options.
|
||||
and `--repo-rev=<REPO_REV>` options.
|
||||
|
||||
Any changes made to this directory will usually be automatically discarded
|
||||
by repo itself when it checks for updates. If you want to update to the
|
||||
@ -106,7 +106,7 @@ support, see the [manifest-format.md] file.
|
||||
setting in the manifest (i.e. the path on the remote server) with a `.git`
|
||||
suffix. This allows for multiple checkouts of the same remote git repo to
|
||||
share their objects. For example, you could have different branches of
|
||||
`foo/bar.git` checked out to `foo/bar-master`, `foo/bar-release`, etc...
|
||||
`foo/bar.git` checked out to `foo/bar-main`, `foo/bar-release`, etc...
|
||||
There will be multiple trees under `projects/` for each one, but only one
|
||||
under `project-objects/`.
|
||||
|
||||
@ -134,6 +134,7 @@ User controlled settings are initialized when running `repo init`.
|
||||
|-------------------|---------------------------|-------------|
|
||||
| manifest.groups | `--groups` & `--platform` | The manifest groups to sync |
|
||||
| repo.archive | `--archive` | Use `git archive` for checkouts |
|
||||
| repo.clonebundle | `--clone-bundle` | Whether the initial sync used clone.bundle explicitly |
|
||||
| repo.clonefilter | `--clone-filter` | Filter setting when using [partial git clones] |
|
||||
| repo.depth | `--depth` | Create shallow checkouts when cloning |
|
||||
| repo.dissociate | `--dissociate` | Dissociate from any reference/mirrors after initial clone |
|
||||
@ -193,7 +194,9 @@ The `[branch]` settings are updated by `repo start` and `git branch`.
|
||||
| review.\<url\>.autocopy | upload | Automatically add to `--cc=<value>` |
|
||||
| review.\<url\>.autoreviewer | upload | Automatically add to `--reviewers=<value>` |
|
||||
| review.\<url\>.autoupload | upload | Automatically answer "yes" or "no" to all prompts |
|
||||
| review.\<url\>.uploadhashtags | upload | Automatically add to `--hashtags=<value>` |
|
||||
| review.\<url\>.uploadhashtags | upload | Automatically add to `--hashtag=<value>` |
|
||||
| review.\<url\>.uploadlabels | upload | Automatically add to `--label=<value>` |
|
||||
| review.\<url\>.uploadnotify | upload | [Notify setting][upload-notify] to use |
|
||||
| review.\<url\>.uploadtopic | upload | Default [topic] to use |
|
||||
| review.\<url\>.username | upload | Override username with `ssh://` review URIs |
|
||||
| remote.\<remote\>.fetch | sync | Set of refs to fetch |
|
||||
@ -226,3 +229,4 @@ Repo will create & maintain a few files in the user's home directory.
|
||||
[manifest-format.md]: ./manifest-format.md
|
||||
[local manifests]: ./manifest-format.md#Local-Manifests
|
||||
[topic]: https://gerrit-review.googlesource.com/Documentation/intro-user.html#topics
|
||||
[upload-notify]: https://gerrit-review.googlesource.com/Documentation/user-upload.html#notify
|
||||
|
@ -99,7 +99,8 @@ following DTD:
|
||||
<!ATTLIST repo-hooks enabled-list CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT include EMPTY>
|
||||
<!ATTLIST include name CDATA #REQUIRED>
|
||||
<!ATTLIST include name CDATA #REQUIRED>
|
||||
<!ATTLIST include groups CDATA #IMPLIED>
|
||||
]>
|
||||
```
|
||||
|
||||
@ -110,6 +111,10 @@ A description of the elements and their attributes follows.
|
||||
|
||||
The root element of the file.
|
||||
|
||||
### Element notice
|
||||
|
||||
Arbitrary text that is displayed to users whenever `repo sync` finishes.
|
||||
The content is simply passed through as it exists in the manifest.
|
||||
|
||||
### Element remote
|
||||
|
||||
@ -142,8 +147,8 @@ Attribute `review`: Hostname of the Gerrit server where reviews
|
||||
are uploaded to by `repo upload`. This attribute is optional;
|
||||
if not specified then `repo upload` will not function.
|
||||
|
||||
Attribute `revision`: Name of a Git branch (e.g. `master` or
|
||||
`refs/heads/master`). Remotes with their own revision will override
|
||||
Attribute `revision`: Name of a Git branch (e.g. `main` or
|
||||
`refs/heads/main`). Remotes with their own revision will override
|
||||
the default revision.
|
||||
|
||||
### Element default
|
||||
@ -156,11 +161,11 @@ Attribute `remote`: Name of a previously defined remote element.
|
||||
Project elements lacking a remote attribute of their own will use
|
||||
this remote.
|
||||
|
||||
Attribute `revision`: Name of a Git branch (e.g. `master` or
|
||||
`refs/heads/master`). Project elements lacking their own
|
||||
Attribute `revision`: Name of a Git branch (e.g. `main` or
|
||||
`refs/heads/main`). Project elements lacking their own
|
||||
revision attribute will use this revision.
|
||||
|
||||
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
|
||||
Attribute `dest-branch`: Name of a Git branch (e.g. `main`).
|
||||
Project elements not setting their own `dest-branch` will inherit
|
||||
this value. If this value is not set, projects will use `revision`
|
||||
by default instead.
|
||||
@ -247,13 +252,13 @@ If not supplied the remote given by the default element is used.
|
||||
|
||||
Attribute `revision`: Name of the Git branch the manifest wants
|
||||
to track for this project. Names can be relative to refs/heads
|
||||
(e.g. just "master") or absolute (e.g. "refs/heads/master").
|
||||
(e.g. just "main") or absolute (e.g. "refs/heads/main").
|
||||
Tags and/or explicit SHA-1s should work in theory, but have not
|
||||
been extensively tested. If not supplied the revision given by
|
||||
the remote element is used if applicable, else the default
|
||||
element is used.
|
||||
|
||||
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
|
||||
Attribute `dest-branch`: Name of a Git branch (e.g. `main`).
|
||||
When using `repo upload`, changes will be submitted for code
|
||||
review on this branch. If unspecified both here and in the
|
||||
default element, `revision` is used instead.
|
||||
@ -262,7 +267,7 @@ Attribute `groups`: List of groups to which this project belongs,
|
||||
whitespace or comma separated. All projects belong to the group
|
||||
"all", and each project automatically belongs to a group of
|
||||
its name:`name` and path:`path`. E.g. for
|
||||
<project name="monkeys" path="barrel-of"/>, that project
|
||||
`<project name="monkeys" path="barrel-of"/>`, that project
|
||||
definition is implicitly in the following manifest groups:
|
||||
default, name:monkeys, and path:barrel-of. If you place a project in the
|
||||
group "notdefault", it will not be automatically downloaded by repo.
|
||||
@ -359,6 +364,19 @@ This element is mostly useful in a local manifest file, where
|
||||
the user can remove a project, and possibly replace it with their
|
||||
own definition.
|
||||
|
||||
### Element repo-hooks
|
||||
|
||||
NB: See the [practical documentation](./repo-hooks.md) for using repo hooks.
|
||||
|
||||
Only one repo-hooks element may be specified at a time.
|
||||
Attempting to redefine it will fail to parse.
|
||||
|
||||
Attribute `in-project`: The project where the hooks are defined. The value
|
||||
must match the `name` attribute (**not** the `path` attribute) of a previously
|
||||
defined `project` element.
|
||||
|
||||
Attribute `enabled-list`: List of hooks to use, whitespace or comma separated.
|
||||
|
||||
### Element include
|
||||
|
||||
This element provides the capability of including another manifest
|
||||
@ -368,6 +386,10 @@ target manifest to include - it must be a usable manifest on its own.
|
||||
Attribute `name`: the manifest to include, specified relative to
|
||||
the manifest repository's root.
|
||||
|
||||
Attribute `groups`: List of additional groups to which all projects
|
||||
in the included manifest belong. This appends and recurses, meaning
|
||||
all projects in sub-manifests carry all parent include groups.
|
||||
Same syntax as the corresponding element of `project`.
|
||||
|
||||
## Local Manifests
|
||||
|
||||
@ -396,10 +418,4 @@ these extra projects.
|
||||
Manifest files stored in `$TOP_DIR/.repo/local_manifests/*.xml` will
|
||||
be loaded in alphabetical order.
|
||||
|
||||
Additional remotes and projects may also be added through a local
|
||||
manifest, stored in `$TOP_DIR/.repo/local_manifest.xml`. This method
|
||||
is deprecated in favor of using multiple manifest files as mentioned
|
||||
above.
|
||||
|
||||
If `$TOP_DIR/.repo/local_manifest.xml` exists, it will be loaded before
|
||||
any manifest files stored in `$TOP_DIR/.repo/local_manifests/*.xml`.
|
||||
The legacy `$TOP_DIR/.repo/local_manifest.xml` path is no longer supported.
|
||||
|
@ -18,13 +18,13 @@ Bugfixes may be added on a best-effort basis or from the community, but largely
|
||||
no new features will be added, nor is support guaranteed.
|
||||
|
||||
Users can select this during `repo init` time via the [repo launcher].
|
||||
Otherwise the default branches (e.g. stable & master) will be used which will
|
||||
Otherwise the default branches (e.g. stable & main) will be used which will
|
||||
require Python 3.
|
||||
|
||||
This means the [repo launcher] needs to support both Python 2 & Python 3, but
|
||||
since it doesn't import any other repo code, this shouldn't be too problematic.
|
||||
|
||||
The master branch will require Python 3.6 at a minimum.
|
||||
The main branch will require Python 3.6 at a minimum.
|
||||
If the system has an older version of Python 3, then users will have to select
|
||||
the legacy Python 2 branch instead.
|
||||
|
||||
|
@ -5,6 +5,37 @@ related topics and flows.
|
||||
|
||||
[TOC]
|
||||
|
||||
## Schedule
|
||||
|
||||
There is no specific schedule for when releases are made.
|
||||
Usually it's more along the lines of "enough minor changes have been merged",
|
||||
or "there's a known issue the maintainers know should get fixed".
|
||||
If you find a fix has been merged for an issue important to you, but hasn't been
|
||||
released after a week or so, feel free to [contact] us to request a new release.
|
||||
|
||||
### Release Freezes {#freeze}
|
||||
|
||||
We try to observe a regular schedule for when **not** to release.
|
||||
If something goes wrong, staff need to be active in order to respond quickly &
|
||||
effectively.
|
||||
We also don't want to disrupt non-Google organizations if possible.
|
||||
|
||||
We generally follow the rules:
|
||||
|
||||
* Release during Mon - Thu, 9:00 - 14:00 [US PT]
|
||||
* Avoid holidays
|
||||
* All regular [US holidays]
|
||||
* Large international ones if possible
|
||||
* All the various [New Years]
|
||||
* Jan 1 in Gregorian calendar is the most obvious
|
||||
* Check for large Lunar New Years too
|
||||
* Follow the normal [Google production freeze schedule]
|
||||
|
||||
[US holidays]: https://en.wikipedia.org/wiki/Federal_holidays_in_the_United_States
|
||||
[US PT]: https://en.wikipedia.org/wiki/Pacific_Time_Zone
|
||||
[New Years]: https://en.wikipedia.org/wiki/New_Year
|
||||
[Google production freeze schedule]: http://goto.google.com/prod-freeze
|
||||
|
||||
## Launcher script
|
||||
|
||||
The main repo script serves as a standalone program and is often referred to as
|
||||
@ -49,11 +80,11 @@ control how repo finds updates:
|
||||
|
||||
* `--repo-url`: This tells repo where to clone the full repo project itself.
|
||||
It defaults to the official project (`REPO_URL` in the launcher script).
|
||||
* `--repo-branch`: This tells repo which branch to use for the full project.
|
||||
* `--repo-rev`: This tells repo which branch to use for the full project.
|
||||
It defaults to the `stable` branch (`REPO_REV` in the launcher script).
|
||||
|
||||
Whenever `repo sync` is run, repo will check to see if an update is available.
|
||||
It fetches the latest repo-branch from the repo-url.
|
||||
It fetches the latest repo-rev from the repo-url.
|
||||
Then it verifies that the latest commit in the branch has a valid signed tag
|
||||
using `git tag -v` (which uses gpg).
|
||||
If the tag is valid, then repo will update its internal checkout to it.
|
||||
@ -66,7 +97,7 @@ If that tag cannot be verified, it gives up and forces the user to resolve.
|
||||
|
||||
## Branch management
|
||||
|
||||
All development happens on the `master` branch and should generally be stable.
|
||||
All development happens on the `main` branch and should generally be stable.
|
||||
|
||||
Since the repo launcher defaults to tracking the `stable` branch, it is not
|
||||
normally updated until a new release is available.
|
||||
@ -81,7 +112,7 @@ For example, when `stable` moves from `v1.10.x` to `v1.11.x`, then the `maint`
|
||||
branch will be updated from `v1.9.x` to `v1.10.x`.
|
||||
|
||||
We don't have parallel release branches/series.
|
||||
Typically all tags are made against the `master` branch and then pushed to the
|
||||
Typically all tags are made against the `main` branch and then pushed to the
|
||||
`stable` branch to make it available to the rest of the world.
|
||||
Since repo doesn't typically see a lot of changes, this tends to be OK.
|
||||
|
||||
@ -89,10 +120,10 @@ Since repo doesn't typically see a lot of changes, this tends to be OK.
|
||||
|
||||
When you want to create a new release, you'll need to select a good version and
|
||||
create a signed tag using a key registered in repo itself.
|
||||
Typically we just tag the latest version of the `master` branch.
|
||||
Typically we just tag the latest version of the `main` branch.
|
||||
The tag could be pushed now, but it won't be used by clients normally (since the
|
||||
default `repo-branch` setting is `stable`).
|
||||
This would allow some early testing on systems who explicitly select `master`.
|
||||
default `repo-rev` setting is `stable`).
|
||||
This would allow some early testing on systems who explicitly select `main`.
|
||||
|
||||
### Creating a signed tag
|
||||
|
||||
@ -113,7 +144,7 @@ $ export GNUPGHOME=~/.gnupg/repo/
|
||||
$ gpg -K
|
||||
|
||||
# Pick whatever branch or commit you want to tag.
|
||||
$ r=master
|
||||
$ r=main
|
||||
|
||||
# Pick the new version.
|
||||
$ t=1.12.10
|
||||
@ -242,6 +273,7 @@ Things in italics are things we used to care about but probably don't anymore.
|
||||
| Apr 2020 | **Apr 2030** | | | **20.04 Focal** | 2.25.0 | 2.7.17 3.7.5 |
|
||||
|
||||
|
||||
[contact]: ../README.md#contact
|
||||
[rel-d]: https://en.wikipedia.org/wiki/Debian_version_history
|
||||
[rel-g]: https://en.wikipedia.org/wiki/Git#Releases
|
||||
[rel-p]: https://en.wikipedia.org/wiki/History_of_Python#Table_of_versions
|
||||
|
@ -27,7 +27,7 @@ repohooks project is updated and a hook is triggered.
|
||||
For the full syntax, see the [repo manifest format](./manifest-format.md).
|
||||
|
||||
Here's a short example from
|
||||
[Android](https://android.googlesource.com/platform/manifest/+/master/default.xml).
|
||||
[Android](https://android.googlesource.com/platform/manifest/+/HEAD/default.xml).
|
||||
The `<project>` line checks out the repohooks git repo to the local
|
||||
`tools/repohooks/` path. The `<repo-hooks>` line says to look in the project
|
||||
with the name `platform/tools/repohooks` for hooks to run during the
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
import tempfile
|
||||
@ -47,6 +48,35 @@ LAST_CWD = None
|
||||
_ssh_proxy_path = None
|
||||
_ssh_sock_path = None
|
||||
_ssh_clients = []
|
||||
_ssh_version = None
|
||||
|
||||
|
||||
def _run_ssh_version():
|
||||
"""run ssh -V to display the version number"""
|
||||
return subprocess.check_output(['ssh', '-V'], stderr=subprocess.STDOUT).decode()
|
||||
|
||||
|
||||
def _parse_ssh_version(ver_str=None):
|
||||
"""parse a ssh version string into a tuple"""
|
||||
if ver_str is None:
|
||||
ver_str = _run_ssh_version()
|
||||
m = re.match(r'^OpenSSH_([0-9.]+)(p[0-9]+)?\s', ver_str)
|
||||
if m:
|
||||
return tuple(int(x) for x in m.group(1).split('.'))
|
||||
else:
|
||||
return ()
|
||||
|
||||
|
||||
def ssh_version():
|
||||
"""return ssh version as a tuple"""
|
||||
global _ssh_version
|
||||
if _ssh_version is None:
|
||||
try:
|
||||
_ssh_version = _parse_ssh_version()
|
||||
except subprocess.CalledProcessError:
|
||||
print('fatal: unable to detect ssh version', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return _ssh_version
|
||||
|
||||
|
||||
def ssh_sock(create=True):
|
||||
@ -57,9 +87,13 @@ def ssh_sock(create=True):
|
||||
tmp_dir = '/tmp'
|
||||
if not os.path.exists(tmp_dir):
|
||||
tmp_dir = tempfile.gettempdir()
|
||||
if ssh_version() < (6, 7):
|
||||
tokens = '%r@%h:%p'
|
||||
else:
|
||||
tokens = '%C' # hash of %l%h%p%r
|
||||
_ssh_sock_path = os.path.join(
|
||||
tempfile.mkdtemp('', 'ssh-', tmp_dir),
|
||||
'master-%r@%h:%p')
|
||||
'master-' + tokens)
|
||||
return _ssh_sock_path
|
||||
|
||||
|
||||
|
@ -362,7 +362,7 @@ class GitConfig(object):
|
||||
return c
|
||||
|
||||
def _do(self, *args):
|
||||
command = ['config', '--file', self.file]
|
||||
command = ['config', '--file', self.file, '--includes']
|
||||
command.extend(args)
|
||||
|
||||
p = GitCommand(None,
|
||||
|
@ -23,6 +23,8 @@ R_CHANGES = 'refs/changes/'
|
||||
R_HEADS = 'refs/heads/'
|
||||
R_TAGS = 'refs/tags/'
|
||||
R_PUB = 'refs/published/'
|
||||
R_WORKTREE = 'refs/worktree/'
|
||||
R_WORKTREE_M = R_WORKTREE + 'm/'
|
||||
R_M = 'refs/remotes/m/'
|
||||
|
||||
|
||||
|
197
git_trace2_event_log.py
Normal file
197
git_trace2_event_log.py
Normal file
@ -0,0 +1,197 @@
|
||||
# Copyright (C) 2020 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Provide event logging in the git trace2 EVENT format.
|
||||
|
||||
The git trace2 EVENT format is defined at:
|
||||
https://www.kernel.org/pub/software/scm/git/docs/technical/api-trace2.html#_event_format
|
||||
https://git-scm.com/docs/api-trace2#_the_event_format_target
|
||||
|
||||
Usage:
|
||||
|
||||
git_trace_log = EventLog()
|
||||
git_trace_log.StartEvent()
|
||||
...
|
||||
git_trace_log.ExitEvent()
|
||||
git_trace_log.Write()
|
||||
"""
|
||||
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
|
||||
from git_command import GitCommand, RepoSourceVersion
|
||||
|
||||
|
||||
class EventLog(object):
|
||||
"""Event log that records events that occurred during a repo invocation.
|
||||
|
||||
Events are written to the log as a consecutive JSON entries, one per line.
|
||||
Entries follow the git trace2 EVENT format.
|
||||
|
||||
Each entry contains the following common keys:
|
||||
- event: The event name
|
||||
- sid: session-id - Unique string to allow process instance to be identified.
|
||||
- thread: The thread name.
|
||||
- time: is the UTC time of the event.
|
||||
|
||||
Valid 'event' names and event specific fields are documented here:
|
||||
https://git-scm.com/docs/api-trace2#_event_format
|
||||
"""
|
||||
|
||||
def __init__(self, env=None):
|
||||
"""Initializes the event log."""
|
||||
self._log = []
|
||||
# Try to get session-id (sid) from environment (setup in repo launcher).
|
||||
KEY = 'GIT_TRACE2_PARENT_SID'
|
||||
if env is None:
|
||||
env = os.environ
|
||||
|
||||
now = datetime.datetime.utcnow()
|
||||
|
||||
# Save both our sid component and the complete sid.
|
||||
# We use our sid component (self._sid) as the unique filename prefix and
|
||||
# the full sid (self._full_sid) in the log itself.
|
||||
self._sid = 'repo-%s-P%08x' % (now.strftime('%Y%m%dT%H%M%SZ'), os.getpid())
|
||||
parent_sid = env.get(KEY)
|
||||
# Append our sid component to the parent sid (if it exists).
|
||||
if parent_sid is not None:
|
||||
self._full_sid = parent_sid + '/' + self._sid
|
||||
else:
|
||||
self._full_sid = self._sid
|
||||
|
||||
# Set/update the environment variable.
|
||||
# Environment handling across systems is messy.
|
||||
try:
|
||||
env[KEY] = self._full_sid
|
||||
except UnicodeEncodeError:
|
||||
env[KEY] = self._full_sid.encode()
|
||||
|
||||
# Add a version event to front of the log.
|
||||
self._AddVersionEvent()
|
||||
|
||||
@property
|
||||
def full_sid(self):
|
||||
return self._full_sid
|
||||
|
||||
def _AddVersionEvent(self):
|
||||
"""Adds a 'version' event at the beginning of current log."""
|
||||
version_event = self._CreateEventDict('version')
|
||||
version_event['evt'] = 2
|
||||
version_event['exe'] = RepoSourceVersion()
|
||||
self._log.insert(0, version_event)
|
||||
|
||||
def _CreateEventDict(self, event_name):
|
||||
"""Returns a dictionary with the common keys/values for git trace2 events.
|
||||
|
||||
Args:
|
||||
event_name: The event name.
|
||||
|
||||
Returns:
|
||||
Dictionary with the common event fields populated.
|
||||
"""
|
||||
return {
|
||||
'event': event_name,
|
||||
'sid': self._full_sid,
|
||||
'thread': threading.currentThread().getName(),
|
||||
'time': datetime.datetime.utcnow().isoformat() + 'Z',
|
||||
}
|
||||
|
||||
def StartEvent(self):
|
||||
"""Append a 'start' event to the current log."""
|
||||
start_event = self._CreateEventDict('start')
|
||||
start_event['argv'] = sys.argv
|
||||
self._log.append(start_event)
|
||||
|
||||
def ExitEvent(self, result):
|
||||
"""Append an 'exit' event to the current log.
|
||||
|
||||
Args:
|
||||
result: Exit code of the event
|
||||
"""
|
||||
exit_event = self._CreateEventDict('exit')
|
||||
|
||||
# Consider 'None' success (consistent with event_log result handling).
|
||||
if result is None:
|
||||
result = 0
|
||||
exit_event['code'] = result
|
||||
self._log.append(exit_event)
|
||||
|
||||
def Write(self, path=None):
|
||||
"""Writes the log out to a file.
|
||||
|
||||
Log is only written if 'path' or 'git config --get trace2.eventtarget'
|
||||
provide a valid path to write logs to.
|
||||
|
||||
Logging filename format follows the git trace2 style of being a unique
|
||||
(exclusive writable) file.
|
||||
|
||||
Args:
|
||||
path: Path to where logs should be written.
|
||||
|
||||
Returns:
|
||||
log_path: Path to the log file if log is written, otherwise None
|
||||
"""
|
||||
log_path = None
|
||||
# If no logging path is specified, get the path from 'trace2.eventtarget'.
|
||||
if path is None:
|
||||
cmd = ['config', '--get', 'trace2.eventtarget']
|
||||
# TODO(https://crbug.com/gerrit/13706): Use GitConfig when it supports
|
||||
# system git config variables.
|
||||
p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
|
||||
bare=True)
|
||||
retval = p.Wait()
|
||||
if retval == 0:
|
||||
# Strip trailing carriage-return in path.
|
||||
path = p.stdout.rstrip('\n')
|
||||
elif retval != 1:
|
||||
# `git config --get` is documented to produce an exit status of `1` if
|
||||
# the requested variable is not present in the configuration. Report any
|
||||
# other return value as an error.
|
||||
print("repo: error: 'git config --get' call failed with return code: %r, stderr: %r" % (
|
||||
retval, p.stderr), file=sys.stderr)
|
||||
|
||||
if isinstance(path, str):
|
||||
# Get absolute path.
|
||||
path = os.path.abspath(os.path.expanduser(path))
|
||||
else:
|
||||
raise TypeError('path: str required but got %s.' % type(path))
|
||||
|
||||
# Git trace2 requires a directory to write log to.
|
||||
|
||||
# TODO(https://crbug.com/gerrit/13706): Support file (append) mode also.
|
||||
if not os.path.isdir(path):
|
||||
return None
|
||||
# Use NamedTemporaryFile to generate a unique filename as required by git trace2.
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(mode='x', prefix=self._sid, dir=path,
|
||||
delete=False) as f:
|
||||
# TODO(https://crbug.com/gerrit/13706): Support writing events as they
|
||||
# occur.
|
||||
for e in self._log:
|
||||
# Dump in compact encoding mode.
|
||||
# See 'Compact encoding' in Python docs:
|
||||
# https://docs.python.org/3/library/json.html#module-json
|
||||
json.dump(e, f, indent=None, separators=(',', ':'))
|
||||
f.write('\n')
|
||||
log_path = f.name
|
||||
except FileExistsError as err:
|
||||
print('repo: warning: git trace2 logging failed: %r' % err,
|
||||
file=sys.stderr)
|
||||
return None
|
||||
return log_path
|
@ -45,7 +45,8 @@ def _set_project_revisions(projects):
|
||||
should not be overly large. Recommend calling this function multiple times
|
||||
with each call not exceeding NUM_BATCH_RETRIEVE_REVISIONID projects.
|
||||
|
||||
@param projects: List of project objects to set the revionExpr for.
|
||||
Args:
|
||||
projects: List of project objects to set the revionExpr for.
|
||||
"""
|
||||
# Retrieve the commit id for each project based off of it's current
|
||||
# revisionExpr and it is not already a commit id.
|
||||
@ -73,7 +74,8 @@ def _manifest_groups(manifest):
|
||||
This is the same logic used by Command.GetProjects(), which is used during
|
||||
repo sync
|
||||
|
||||
@param manifest: The XmlManifest object
|
||||
Args:
|
||||
manifest: The XmlManifest object
|
||||
"""
|
||||
mp = manifest.manifestProject
|
||||
groups = mp.config.GetString('manifest.groups')
|
||||
@ -85,9 +87,10 @@ def _manifest_groups(manifest):
|
||||
def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
|
||||
"""Generate a manifest for shafsd to use for this GITC client.
|
||||
|
||||
@param gitc_manifest: Current gitc manifest, or None if there isn't one yet.
|
||||
@param manifest: A GitcManifest object loaded with the current repo manifest.
|
||||
@param paths: List of project paths we want to update.
|
||||
Args:
|
||||
gitc_manifest: Current gitc manifest, or None if there isn't one yet.
|
||||
manifest: A GitcManifest object loaded with the current repo manifest.
|
||||
paths: List of project paths we want to update.
|
||||
"""
|
||||
|
||||
print('Generating GITC Manifest by fetching revision SHAs for each '
|
||||
@ -149,12 +152,15 @@ def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
|
||||
def save_manifest(manifest, client_dir=None):
|
||||
"""Save the manifest file in the client_dir.
|
||||
|
||||
@param client_dir: Client directory to save the manifest in.
|
||||
@param manifest: Manifest object to save.
|
||||
Args:
|
||||
manifest: Manifest object to save.
|
||||
client_dir: Client directory to save the manifest in.
|
||||
"""
|
||||
if not client_dir:
|
||||
client_dir = manifest.gitc_client_dir
|
||||
with open(os.path.join(client_dir, '.manifest'), 'w') as f:
|
||||
manifest_file = manifest.manifestFile
|
||||
else:
|
||||
manifest_file = os.path.join(client_dir, '.manifest')
|
||||
with open(manifest_file, 'w') as f:
|
||||
manifest.Save(f, groups=_manifest_groups(manifest))
|
||||
# TODO(sbasi/jorg): Come up with a solution to remove the sleep below.
|
||||
# Give the GITC filesystem time to register the manifest changes.
|
||||
|
520
hooks.py
Normal file
520
hooks.py
Normal file
@ -0,0 +1,520 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from error import HookError
|
||||
from git_refs import HEAD
|
||||
|
||||
from pyversion import is_python3
|
||||
if is_python3():
|
||||
import urllib.parse
|
||||
else:
|
||||
import imp
|
||||
import urlparse
|
||||
urllib = imp.new_module('urllib')
|
||||
urllib.parse = urlparse
|
||||
input = raw_input # noqa: F821
|
||||
|
||||
|
||||
class RepoHook(object):
|
||||
"""A RepoHook contains information about a script to run as a hook.
|
||||
|
||||
Hooks are used to run a python script before running an upload (for instance,
|
||||
to run presubmit checks). Eventually, we may have hooks for other actions.
|
||||
|
||||
This shouldn't be confused with files in the 'repo/hooks' directory. Those
|
||||
files are copied into each '.git/hooks' folder for each project. Repo-level
|
||||
hooks are associated instead with repo actions.
|
||||
|
||||
Hooks are always python. When a hook is run, we will load the hook into the
|
||||
interpreter and execute its main() function.
|
||||
|
||||
Combinations of hook option flags:
|
||||
- no-verify=False, verify=False (DEFAULT):
|
||||
If stdout is a tty, can prompt about running hooks if needed.
|
||||
If user denies running hooks, the action is cancelled. If stdout is
|
||||
not a tty and we would need to prompt about hooks, action is
|
||||
cancelled.
|
||||
- no-verify=False, verify=True:
|
||||
Always run hooks with no prompt.
|
||||
- no-verify=True, verify=False:
|
||||
Never run hooks, but run action anyway (AKA bypass hooks).
|
||||
- no-verify=True, verify=True:
|
||||
Invalid
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
hook_type,
|
||||
hooks_project,
|
||||
repo_topdir,
|
||||
manifest_url,
|
||||
bypass_hooks=False,
|
||||
allow_all_hooks=False,
|
||||
ignore_hooks=False,
|
||||
abort_if_user_denies=False):
|
||||
"""RepoHook constructor.
|
||||
|
||||
Params:
|
||||
hook_type: A string representing the type of hook. This is also used
|
||||
to figure out the name of the file containing the hook. For
|
||||
example: 'pre-upload'.
|
||||
hooks_project: The project containing the repo hooks.
|
||||
If you have a manifest, this is manifest.repo_hooks_project.
|
||||
OK if this is None, which will make the hook a no-op.
|
||||
repo_topdir: The top directory of the repo client checkout.
|
||||
This is the one containing the .repo directory. Scripts will
|
||||
run with CWD as this directory.
|
||||
If you have a manifest, this is manifest.topdir.
|
||||
manifest_url: The URL to the manifest git repo.
|
||||
bypass_hooks: If True, then 'Do not run the hook'.
|
||||
allow_all_hooks: If True, then 'Run the hook without prompting'.
|
||||
ignore_hooks: If True, then 'Do not abort action if hooks fail'.
|
||||
abort_if_user_denies: If True, we'll abort running the hook if the user
|
||||
doesn't allow us to run the hook.
|
||||
"""
|
||||
self._hook_type = hook_type
|
||||
self._hooks_project = hooks_project
|
||||
self._repo_topdir = repo_topdir
|
||||
self._manifest_url = manifest_url
|
||||
self._bypass_hooks = bypass_hooks
|
||||
self._allow_all_hooks = allow_all_hooks
|
||||
self._ignore_hooks = ignore_hooks
|
||||
self._abort_if_user_denies = abort_if_user_denies
|
||||
|
||||
# Store the full path to the script for convenience.
|
||||
if self._hooks_project:
|
||||
self._script_fullpath = os.path.join(self._hooks_project.worktree,
|
||||
self._hook_type + '.py')
|
||||
else:
|
||||
self._script_fullpath = None
|
||||
|
||||
def _GetHash(self):
|
||||
"""Return a hash of the contents of the hooks directory.
|
||||
|
||||
We'll just use git to do this. This hash has the property that if anything
|
||||
changes in the directory we will return a different has.
|
||||
|
||||
SECURITY CONSIDERATION:
|
||||
This hash only represents the contents of files in the hook directory, not
|
||||
any other files imported or called by hooks. Changes to imported files
|
||||
can change the script behavior without affecting the hash.
|
||||
|
||||
Returns:
|
||||
A string representing the hash. This will always be ASCII so that it can
|
||||
be printed to the user easily.
|
||||
"""
|
||||
assert self._hooks_project, "Must have hooks to calculate their hash."
|
||||
|
||||
# We will use the work_git object rather than just calling GetRevisionId().
|
||||
# That gives us a hash of the latest checked in version of the files that
|
||||
# the user will actually be executing. Specifically, GetRevisionId()
|
||||
# doesn't appear to change even if a user checks out a different version
|
||||
# of the hooks repo (via git checkout) nor if a user commits their own revs.
|
||||
#
|
||||
# NOTE: Local (non-committed) changes will not be factored into this hash.
|
||||
# I think this is OK, since we're really only worried about warning the user
|
||||
# about upstream changes.
|
||||
return self._hooks_project.work_git.rev_parse(HEAD)
|
||||
|
||||
def _GetMustVerb(self):
|
||||
"""Return 'must' if the hook is required; 'should' if not."""
|
||||
if self._abort_if_user_denies:
|
||||
return 'must'
|
||||
else:
|
||||
return 'should'
|
||||
|
||||
def _CheckForHookApproval(self):
|
||||
"""Check to see whether this hook has been approved.
|
||||
|
||||
We'll accept approval of manifest URLs if they're using secure transports.
|
||||
This way the user can say they trust the manifest hoster. For insecure
|
||||
hosts, we fall back to checking the hash of the hooks repo.
|
||||
|
||||
Note that we ask permission for each individual hook even though we use
|
||||
the hash of all hooks when detecting changes. We'd like the user to be
|
||||
able to approve / deny each hook individually. We only use the hash of all
|
||||
hooks because there is no other easy way to detect changes to local imports.
|
||||
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
|
||||
Raises:
|
||||
HookError: Raised if the user doesn't approve and abort_if_user_denies
|
||||
was passed to the consturctor.
|
||||
"""
|
||||
if self._ManifestUrlHasSecureScheme():
|
||||
return self._CheckForHookApprovalManifest()
|
||||
else:
|
||||
return self._CheckForHookApprovalHash()
|
||||
|
||||
def _CheckForHookApprovalHelper(self, subkey, new_val, main_prompt,
|
||||
changed_prompt):
|
||||
"""Check for approval for a particular attribute and hook.
|
||||
|
||||
Args:
|
||||
subkey: The git config key under [repo.hooks.<hook_type>] to store the
|
||||
last approved string.
|
||||
new_val: The new value to compare against the last approved one.
|
||||
main_prompt: Message to display to the user to ask for approval.
|
||||
changed_prompt: Message explaining why we're re-asking for approval.
|
||||
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
|
||||
Raises:
|
||||
HookError: Raised if the user doesn't approve and abort_if_user_denies
|
||||
was passed to the consturctor.
|
||||
"""
|
||||
hooks_config = self._hooks_project.config
|
||||
git_approval_key = 'repo.hooks.%s.%s' % (self._hook_type, subkey)
|
||||
|
||||
# Get the last value that the user approved for this hook; may be None.
|
||||
old_val = hooks_config.GetString(git_approval_key)
|
||||
|
||||
if old_val is not None:
|
||||
# User previously approved hook and asked not to be prompted again.
|
||||
if new_val == old_val:
|
||||
# Approval matched. We're done.
|
||||
return True
|
||||
else:
|
||||
# Give the user a reason why we're prompting, since they last told
|
||||
# us to "never ask again".
|
||||
prompt = 'WARNING: %s\n\n' % (changed_prompt,)
|
||||
else:
|
||||
prompt = ''
|
||||
|
||||
# Prompt the user if we're not on a tty; on a tty we'll assume "no".
|
||||
if sys.stdout.isatty():
|
||||
prompt += main_prompt + ' (yes/always/NO)? '
|
||||
response = input(prompt).lower()
|
||||
print()
|
||||
|
||||
# User is doing a one-time approval.
|
||||
if response in ('y', 'yes'):
|
||||
return True
|
||||
elif response == 'always':
|
||||
hooks_config.SetString(git_approval_key, new_val)
|
||||
return True
|
||||
|
||||
# For anything else, we'll assume no approval.
|
||||
if self._abort_if_user_denies:
|
||||
raise HookError('You must allow the %s hook or use --no-verify.' %
|
||||
self._hook_type)
|
||||
|
||||
return False
|
||||
|
||||
def _ManifestUrlHasSecureScheme(self):
|
||||
"""Check if the URI for the manifest is a secure transport."""
|
||||
secure_schemes = ('file', 'https', 'ssh', 'persistent-https', 'sso', 'rpc')
|
||||
parse_results = urllib.parse.urlparse(self._manifest_url)
|
||||
return parse_results.scheme in secure_schemes
|
||||
|
||||
def _CheckForHookApprovalManifest(self):
|
||||
"""Check whether the user has approved this manifest host.
|
||||
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
"""
|
||||
return self._CheckForHookApprovalHelper(
|
||||
'approvedmanifest',
|
||||
self._manifest_url,
|
||||
'Run hook scripts from %s' % (self._manifest_url,),
|
||||
'Manifest URL has changed since %s was allowed.' % (self._hook_type,))
|
||||
|
||||
def _CheckForHookApprovalHash(self):
|
||||
"""Check whether the user has approved the hooks repo.
|
||||
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
"""
|
||||
prompt = ('Repo %s run the script:\n'
|
||||
' %s\n'
|
||||
'\n'
|
||||
'Do you want to allow this script to run')
|
||||
return self._CheckForHookApprovalHelper(
|
||||
'approvedhash',
|
||||
self._GetHash(),
|
||||
prompt % (self._GetMustVerb(), self._script_fullpath),
|
||||
'Scripts have changed since %s was allowed.' % (self._hook_type,))
|
||||
|
||||
@staticmethod
|
||||
def _ExtractInterpFromShebang(data):
|
||||
"""Extract the interpreter used in the shebang.
|
||||
|
||||
Try to locate the interpreter the script is using (ignoring `env`).
|
||||
|
||||
Args:
|
||||
data: The file content of the script.
|
||||
|
||||
Returns:
|
||||
The basename of the main script interpreter, or None if a shebang is not
|
||||
used or could not be parsed out.
|
||||
"""
|
||||
firstline = data.splitlines()[:1]
|
||||
if not firstline:
|
||||
return None
|
||||
|
||||
# The format here can be tricky.
|
||||
shebang = firstline[0].strip()
|
||||
m = re.match(r'^#!\s*([^\s]+)(?:\s+([^\s]+))?', shebang)
|
||||
if not m:
|
||||
return None
|
||||
|
||||
# If the using `env`, find the target program.
|
||||
interp = m.group(1)
|
||||
if os.path.basename(interp) == 'env':
|
||||
interp = m.group(2)
|
||||
|
||||
return interp
|
||||
|
||||
def _ExecuteHookViaReexec(self, interp, context, **kwargs):
|
||||
"""Execute the hook script through |interp|.
|
||||
|
||||
Note: Support for this feature should be dropped ~Jun 2021.
|
||||
|
||||
Args:
|
||||
interp: The Python program to run.
|
||||
context: Basic Python context to execute the hook inside.
|
||||
kwargs: Arbitrary arguments to pass to the hook script.
|
||||
|
||||
Raises:
|
||||
HookError: When the hooks failed for any reason.
|
||||
"""
|
||||
# This logic needs to be kept in sync with _ExecuteHookViaImport below.
|
||||
script = """
|
||||
import json, os, sys
|
||||
path = '''%(path)s'''
|
||||
kwargs = json.loads('''%(kwargs)s''')
|
||||
context = json.loads('''%(context)s''')
|
||||
sys.path.insert(0, os.path.dirname(path))
|
||||
data = open(path).read()
|
||||
exec(compile(data, path, 'exec'), context)
|
||||
context['main'](**kwargs)
|
||||
""" % {
|
||||
'path': self._script_fullpath,
|
||||
'kwargs': json.dumps(kwargs),
|
||||
'context': json.dumps(context),
|
||||
}
|
||||
|
||||
# We pass the script via stdin to avoid OS argv limits. It also makes
|
||||
# unhandled exception tracebacks less verbose/confusing for users.
|
||||
cmd = [interp, '-c', 'import sys; exec(sys.stdin.read())']
|
||||
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE)
|
||||
proc.communicate(input=script.encode('utf-8'))
|
||||
if proc.returncode:
|
||||
raise HookError('Failed to run %s hook.' % (self._hook_type,))
|
||||
|
||||
def _ExecuteHookViaImport(self, data, context, **kwargs):
|
||||
"""Execute the hook code in |data| directly.
|
||||
|
||||
Args:
|
||||
data: The code of the hook to execute.
|
||||
context: Basic Python context to execute the hook inside.
|
||||
kwargs: Arbitrary arguments to pass to the hook script.
|
||||
|
||||
Raises:
|
||||
HookError: When the hooks failed for any reason.
|
||||
"""
|
||||
# Exec, storing global context in the context dict. We catch exceptions
|
||||
# and convert to a HookError w/ just the failing traceback.
|
||||
try:
|
||||
exec(compile(data, self._script_fullpath, 'exec'), context)
|
||||
except Exception:
|
||||
raise HookError('%s\nFailed to import %s hook; see traceback above.' %
|
||||
(traceback.format_exc(), self._hook_type))
|
||||
|
||||
# Running the script should have defined a main() function.
|
||||
if 'main' not in context:
|
||||
raise HookError('Missing main() in: "%s"' % self._script_fullpath)
|
||||
|
||||
# Call the main function in the hook. If the hook should cause the
|
||||
# build to fail, it will raise an Exception. We'll catch that convert
|
||||
# to a HookError w/ just the failing traceback.
|
||||
try:
|
||||
context['main'](**kwargs)
|
||||
except Exception:
|
||||
raise HookError('%s\nFailed to run main() for %s hook; see traceback '
|
||||
'above.' % (traceback.format_exc(), self._hook_type))
|
||||
|
||||
def _ExecuteHook(self, **kwargs):
|
||||
"""Actually execute the given hook.
|
||||
|
||||
This will run the hook's 'main' function in our python interpreter.
|
||||
|
||||
Args:
|
||||
kwargs: Keyword arguments to pass to the hook. These are often specific
|
||||
to the hook type. For instance, pre-upload hooks will contain
|
||||
a project_list.
|
||||
"""
|
||||
# Keep sys.path and CWD stashed away so that we can always restore them
|
||||
# upon function exit.
|
||||
orig_path = os.getcwd()
|
||||
orig_syspath = sys.path
|
||||
|
||||
try:
|
||||
# Always run hooks with CWD as topdir.
|
||||
os.chdir(self._repo_topdir)
|
||||
|
||||
# Put the hook dir as the first item of sys.path so hooks can do
|
||||
# relative imports. We want to replace the repo dir as [0] so
|
||||
# hooks can't import repo files.
|
||||
sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:]
|
||||
|
||||
# Initial global context for the hook to run within.
|
||||
context = {'__file__': self._script_fullpath}
|
||||
|
||||
# Add 'hook_should_take_kwargs' to the arguments to be passed to main.
|
||||
# We don't actually want hooks to define their main with this argument--
|
||||
# it's there to remind them that their hook should always take **kwargs.
|
||||
# For instance, a pre-upload hook should be defined like:
|
||||
# def main(project_list, **kwargs):
|
||||
#
|
||||
# This allows us to later expand the API without breaking old hooks.
|
||||
kwargs = kwargs.copy()
|
||||
kwargs['hook_should_take_kwargs'] = True
|
||||
|
||||
# See what version of python the hook has been written against.
|
||||
data = open(self._script_fullpath).read()
|
||||
interp = self._ExtractInterpFromShebang(data)
|
||||
reexec = False
|
||||
if interp:
|
||||
prog = os.path.basename(interp)
|
||||
if prog.startswith('python2') and sys.version_info.major != 2:
|
||||
reexec = True
|
||||
elif prog.startswith('python3') and sys.version_info.major == 2:
|
||||
reexec = True
|
||||
|
||||
# Attempt to execute the hooks through the requested version of Python.
|
||||
if reexec:
|
||||
try:
|
||||
self._ExecuteHookViaReexec(interp, context, **kwargs)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
# We couldn't find the interpreter, so fallback to importing.
|
||||
reexec = False
|
||||
else:
|
||||
raise
|
||||
|
||||
# Run the hook by importing directly.
|
||||
if not reexec:
|
||||
self._ExecuteHookViaImport(data, context, **kwargs)
|
||||
finally:
|
||||
# Restore sys.path and CWD.
|
||||
sys.path = orig_syspath
|
||||
os.chdir(orig_path)
|
||||
|
||||
def _CheckHook(self):
|
||||
# Bail with a nice error if we can't find the hook.
|
||||
if not os.path.isfile(self._script_fullpath):
|
||||
raise HookError('Couldn\'t find repo hook: %s' % self._script_fullpath)
|
||||
|
||||
def Run(self, **kwargs):
|
||||
"""Run the hook.
|
||||
|
||||
If the hook doesn't exist (because there is no hooks project or because
|
||||
this particular hook is not enabled), this is a no-op.
|
||||
|
||||
Args:
|
||||
user_allows_all_hooks: If True, we will never prompt about running the
|
||||
hook--we'll just assume it's OK to run it.
|
||||
kwargs: Keyword arguments to pass to the hook. These are often specific
|
||||
to the hook type. For instance, pre-upload hooks will contain
|
||||
a project_list.
|
||||
|
||||
Returns:
|
||||
True: On success or ignore hooks by user-request
|
||||
False: The hook failed. The caller should respond with aborting the action.
|
||||
Some examples in which False is returned:
|
||||
* Finding the hook failed while it was enabled, or
|
||||
* the user declined to run a required hook (from _CheckForHookApproval)
|
||||
In all these cases the user did not pass the proper arguments to
|
||||
ignore the result through the option combinations as listed in
|
||||
AddHookOptionGroup().
|
||||
"""
|
||||
# Do not do anything in case bypass_hooks is set, or
|
||||
# no-op if there is no hooks project or if hook is disabled.
|
||||
if (self._bypass_hooks or
|
||||
not self._hooks_project or
|
||||
self._hook_type not in self._hooks_project.enabled_repo_hooks):
|
||||
return True
|
||||
|
||||
passed = True
|
||||
try:
|
||||
self._CheckHook()
|
||||
|
||||
# Make sure the user is OK with running the hook.
|
||||
if self._allow_all_hooks or self._CheckForHookApproval():
|
||||
# Run the hook with the same version of python we're using.
|
||||
self._ExecuteHook(**kwargs)
|
||||
except SystemExit as e:
|
||||
passed = False
|
||||
print('ERROR: %s hooks exited with exit code: %s' % (self._hook_type, str(e)),
|
||||
file=sys.stderr)
|
||||
except HookError as e:
|
||||
passed = False
|
||||
print('ERROR: %s' % str(e), file=sys.stderr)
|
||||
|
||||
if not passed and self._ignore_hooks:
|
||||
print('\nWARNING: %s hooks failed, but continuing anyways.' % self._hook_type,
|
||||
file=sys.stderr)
|
||||
passed = True
|
||||
|
||||
return passed
|
||||
|
||||
@classmethod
|
||||
def FromSubcmd(cls, manifest, opt, *args, **kwargs):
|
||||
"""Method to construct the repo hook class
|
||||
|
||||
Args:
|
||||
manifest: The current active manifest for this command from which we
|
||||
extract a couple of fields.
|
||||
opt: Contains the commandline options for the action of this hook.
|
||||
It should contain the options added by AddHookOptionGroup() in which
|
||||
we are interested in RepoHook execution.
|
||||
"""
|
||||
for key in ('bypass_hooks', 'allow_all_hooks', 'ignore_hooks'):
|
||||
kwargs.setdefault(key, getattr(opt, key))
|
||||
kwargs.update({
|
||||
'hooks_project': manifest.repo_hooks_project,
|
||||
'repo_topdir': manifest.topdir,
|
||||
'manifest_url': manifest.manifestProject.GetRemote('origin').url,
|
||||
})
|
||||
return cls(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def AddOptionGroup(parser, name):
|
||||
"""Help options relating to the various hooks."""
|
||||
|
||||
# Note that verify and no-verify are NOT opposites of each other, which
|
||||
# is why they store to different locations. We are using them to match
|
||||
# 'git commit' syntax.
|
||||
group = parser.add_option_group(name + ' hooks')
|
||||
group.add_option('--no-verify',
|
||||
dest='bypass_hooks', action='store_true',
|
||||
help='Do not run the %s hook.' % name)
|
||||
group.add_option('--verify',
|
||||
dest='allow_all_hooks', action='store_true',
|
||||
help='Run the %s hook without prompting.' % name)
|
||||
group.add_option('--ignore-hooks',
|
||||
action='store_true',
|
||||
help='Do not abort if %s hooks fail.' % name)
|
51
main.py
51
main.py
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
@ -50,6 +50,7 @@ import event_log
|
||||
from repo_trace import SetTrace
|
||||
from git_command import user_agent
|
||||
from git_config import init_ssh, close_ssh, RepoConfig
|
||||
from git_trace2_event_log import EventLog
|
||||
from command import InteractiveCommand
|
||||
from command import MirrorSafeCommand
|
||||
from command import GitcAvailableCommand, GitcClientCommand
|
||||
@ -63,7 +64,7 @@ from error import NoManifestException
|
||||
from error import NoSuchProjectError
|
||||
from error import RepoChangedException
|
||||
import gitc_utils
|
||||
from manifest_xml import GitcManifest, XmlManifest
|
||||
from manifest_xml import GitcClient, RepoClient
|
||||
from pager import RunPager, TerminatePager
|
||||
from wrapper import WrapperPath, Wrapper
|
||||
|
||||
@ -82,12 +83,13 @@ if not is_python3():
|
||||
#
|
||||
# python-3.6 is in Ubuntu Bionic.
|
||||
MIN_PYTHON_VERSION_SOFT = (3, 6)
|
||||
MIN_PYTHON_VERSION_HARD = (3, 4)
|
||||
MIN_PYTHON_VERSION_HARD = (3, 5)
|
||||
|
||||
if sys.version_info.major < 3:
|
||||
print('repo: warning: Python 2 is no longer supported; '
|
||||
print('repo: error: Python 2 is no longer supported; '
|
||||
'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION_SOFT),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
if sys.version_info < MIN_PYTHON_VERSION_HARD:
|
||||
print('repo: error: Python 3 version is too old; '
|
||||
@ -129,14 +131,14 @@ global_options.add_option('--version',
|
||||
global_options.add_option('--event-log',
|
||||
dest='event_log', action='store',
|
||||
help='filename of event log to append timeline to')
|
||||
global_options.add_option('--git-trace2-event-log', action='store',
|
||||
help='directory to write git trace2 event log to')
|
||||
|
||||
|
||||
class _Repo(object):
|
||||
def __init__(self, repodir):
|
||||
self.repodir = repodir
|
||||
self.commands = all_commands
|
||||
# add 'branch' as an alias for 'branches'
|
||||
all_commands['branch'] = all_commands['branches']
|
||||
|
||||
def _ParseArgs(self, argv):
|
||||
"""Parse the main `repo` command line options."""
|
||||
@ -206,21 +208,23 @@ class _Repo(object):
|
||||
SetDefaultColoring(gopts.color)
|
||||
|
||||
try:
|
||||
cmd = self.commands[name]
|
||||
cmd = self.commands[name]()
|
||||
except KeyError:
|
||||
print("repo: '%s' is not a repo command. See 'repo help'." % name,
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
git_trace2_event_log = EventLog()
|
||||
cmd.repodir = self.repodir
|
||||
cmd.manifest = XmlManifest(cmd.repodir)
|
||||
cmd.client = RepoClient(cmd.repodir)
|
||||
cmd.manifest = cmd.client.manifest
|
||||
cmd.gitc_manifest = None
|
||||
gitc_client_name = gitc_utils.parse_clientdir(os.getcwd())
|
||||
if gitc_client_name:
|
||||
cmd.gitc_manifest = GitcManifest(cmd.repodir, gitc_client_name)
|
||||
cmd.manifest.isGitcClient = True
|
||||
cmd.gitc_manifest = GitcClient(cmd.repodir, gitc_client_name)
|
||||
cmd.client.isGitcClient = True
|
||||
|
||||
Editor.globalConfig = cmd.manifest.globalConfig
|
||||
Editor.globalConfig = cmd.client.globalConfig
|
||||
|
||||
if not isinstance(cmd, MirrorSafeCommand) and cmd.manifest.IsMirror:
|
||||
print("fatal: '%s' requires a working directory" % name,
|
||||
@ -248,7 +252,7 @@ class _Repo(object):
|
||||
return 1
|
||||
|
||||
if gopts.pager is not False and not isinstance(cmd, InteractiveCommand):
|
||||
config = cmd.manifest.globalConfig
|
||||
config = cmd.client.globalConfig
|
||||
if gopts.pager:
|
||||
use_pager = True
|
||||
else:
|
||||
@ -261,6 +265,8 @@ class _Repo(object):
|
||||
start = time.time()
|
||||
cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
|
||||
cmd.event_log.SetParent(cmd_event)
|
||||
git_trace2_event_log.StartEvent()
|
||||
|
||||
try:
|
||||
cmd.ValidateOptions(copts, cargs)
|
||||
result = cmd.Execute(copts, cargs)
|
||||
@ -303,10 +309,13 @@ class _Repo(object):
|
||||
|
||||
cmd.event_log.FinishEvent(cmd_event, finish,
|
||||
result is None or result == 0)
|
||||
git_trace2_event_log.ExitEvent(result)
|
||||
|
||||
if gopts.event_log:
|
||||
cmd.event_log.Write(os.path.abspath(
|
||||
os.path.expanduser(gopts.event_log)))
|
||||
|
||||
git_trace2_event_log.Write(gopts.git_trace2_event_log)
|
||||
return result
|
||||
|
||||
|
||||
@ -348,12 +357,20 @@ repo: error:
|
||||
sys.exit(1)
|
||||
|
||||
if exp > ver:
|
||||
print("""
|
||||
... A new version of repo (%s) is available.
|
||||
print('\n... A new version of repo (%s) is available.' % (exp_str,),
|
||||
file=sys.stderr)
|
||||
if os.access(repo_path, os.W_OK):
|
||||
print("""\
|
||||
... You should upgrade soon:
|
||||
|
||||
cp %s %s
|
||||
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||
""" % (WrapperPath(), repo_path), file=sys.stderr)
|
||||
else:
|
||||
print("""\
|
||||
... New version is available at: %s
|
||||
... The launcher is run from: %s
|
||||
!!! The launcher is not writable. Please talk to your sysadmin or distro
|
||||
!!! to get an update installed.
|
||||
""" % (WrapperPath(), repo_path), file=sys.stderr)
|
||||
|
||||
|
||||
def _CheckRepoDir(repo_dir):
|
||||
@ -608,7 +625,7 @@ def _Main(argv):
|
||||
argv = list(sys.argv)
|
||||
argv.extend(rce.extra_args)
|
||||
try:
|
||||
os.execv(__file__, argv)
|
||||
os.execv(sys.executable, [__file__] + argv)
|
||||
except OSError as e:
|
||||
print('fatal: cannot restart repo after upgrade', file=sys.stderr)
|
||||
print('fatal: %s' % e, file=sys.stderr)
|
||||
|
321
manifest_xml.py
321
manifest_xml.py
@ -31,7 +31,7 @@ else:
|
||||
urllib.parse = urlparse
|
||||
|
||||
import gitc_utils
|
||||
from git_config import GitConfig
|
||||
from git_config import GitConfig, IsId
|
||||
from git_refs import R_HEADS, HEAD
|
||||
import platform_utils
|
||||
from project import RemoteSpec, Project, MetaProject
|
||||
@ -57,6 +57,60 @@ urllib.parse.uses_netloc.extend([
|
||||
'rpc'])
|
||||
|
||||
|
||||
def XmlBool(node, attr, default=None):
|
||||
"""Determine boolean value of |node|'s |attr|.
|
||||
|
||||
Invalid values will issue a non-fatal warning.
|
||||
|
||||
Args:
|
||||
node: XML node whose attributes we access.
|
||||
attr: The attribute to access.
|
||||
default: If the attribute is not set (value is empty), then use this.
|
||||
|
||||
Returns:
|
||||
True if the attribute is a valid string representing true.
|
||||
False if the attribute is a valid string representing false.
|
||||
|default| otherwise.
|
||||
"""
|
||||
value = node.getAttribute(attr)
|
||||
s = value.lower()
|
||||
if s == '':
|
||||
return default
|
||||
elif s in {'yes', 'true', '1'}:
|
||||
return True
|
||||
elif s in {'no', 'false', '0'}:
|
||||
return False
|
||||
else:
|
||||
print('warning: manifest: %s="%s": ignoring invalid XML boolean' %
|
||||
(attr, value), file=sys.stderr)
|
||||
return default
|
||||
|
||||
|
||||
def XmlInt(node, attr, default=None):
|
||||
"""Determine integer value of |node|'s |attr|.
|
||||
|
||||
Args:
|
||||
node: XML node whose attributes we access.
|
||||
attr: The attribute to access.
|
||||
default: If the attribute is not set (value is empty), then use this.
|
||||
|
||||
Returns:
|
||||
The number if the attribute is a valid number.
|
||||
|
||||
Raises:
|
||||
ManifestParseError: The number is invalid.
|
||||
"""
|
||||
value = node.getAttribute(attr)
|
||||
if not value:
|
||||
return default
|
||||
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError:
|
||||
raise ManifestParseError('manifest: invalid %s="%s" integer' %
|
||||
(attr, value))
|
||||
|
||||
|
||||
class _Default(object):
|
||||
"""Project defaults within the manifest."""
|
||||
|
||||
@ -133,13 +187,24 @@ class _XmlRemote(object):
|
||||
class XmlManifest(object):
|
||||
"""manages the repo configuration file"""
|
||||
|
||||
def __init__(self, repodir):
|
||||
def __init__(self, repodir, manifest_file, local_manifests=None):
|
||||
"""Initialize.
|
||||
|
||||
Args:
|
||||
repodir: Path to the .repo/ dir for holding all internal checkout state.
|
||||
It must be in the top directory of the repo client checkout.
|
||||
manifest_file: Full path to the manifest file to parse. This will usually
|
||||
be |repodir|/|MANIFEST_FILE_NAME|.
|
||||
local_manifests: Full path to the directory of local override manifests.
|
||||
This will usually be |repodir|/|LOCAL_MANIFESTS_DIR_NAME|.
|
||||
"""
|
||||
# TODO(vapier): Move this out of this class.
|
||||
self.globalConfig = GitConfig.ForUser()
|
||||
|
||||
self.repodir = os.path.abspath(repodir)
|
||||
self.topdir = os.path.dirname(self.repodir)
|
||||
self.manifestFile = os.path.join(self.repodir, MANIFEST_FILE_NAME)
|
||||
self.globalConfig = GitConfig.ForUser()
|
||||
self.localManifestWarning = False
|
||||
self.isGitcClient = False
|
||||
self.manifestFile = manifest_file
|
||||
self.local_manifests = local_manifests
|
||||
self._load_local_manifests = True
|
||||
|
||||
self.repoProject = MetaProject(self, 'repo',
|
||||
@ -227,18 +292,21 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
if r.revision is not None:
|
||||
e.setAttribute('revision', r.revision)
|
||||
|
||||
def _ParseGroups(self, groups):
|
||||
return [x for x in re.split(r'[,\s]+', groups) if x]
|
||||
def _ParseList(self, field):
|
||||
"""Parse fields that contain flattened lists.
|
||||
|
||||
def Save(self, fd, peg_rev=False, peg_rev_upstream=True, groups=None):
|
||||
"""Write the current manifest out to the given file descriptor.
|
||||
These are whitespace & comma separated. Empty elements will be discarded.
|
||||
"""
|
||||
return [x for x in re.split(r'[,\s]+', field) if x]
|
||||
|
||||
def ToXml(self, peg_rev=False, peg_rev_upstream=True, peg_rev_dest_branch=True, groups=None):
|
||||
"""Return the current manifest XML."""
|
||||
mp = self.manifestProject
|
||||
|
||||
if groups is None:
|
||||
groups = mp.config.GetString('manifest.groups')
|
||||
if groups:
|
||||
groups = self._ParseGroups(groups)
|
||||
groups = self._ParseList(groups)
|
||||
|
||||
doc = xml.dom.minidom.Document()
|
||||
root = doc.createElement('manifest')
|
||||
@ -335,6 +403,13 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
# Only save the origin if the origin is not a sha1, and the default
|
||||
# isn't our value
|
||||
e.setAttribute('upstream', p.revisionExpr)
|
||||
|
||||
if peg_rev_dest_branch:
|
||||
if p.dest_branch:
|
||||
e.setAttribute('dest-branch', p.dest_branch)
|
||||
elif value != p.revisionExpr:
|
||||
e.setAttribute('dest-branch', p.revisionExpr)
|
||||
|
||||
else:
|
||||
revision = self.remotes[p.remote.orig_name].revision or d.revisionExpr
|
||||
if not revision or revision != p.revisionExpr:
|
||||
@ -399,6 +474,56 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
' '.join(self._repo_hooks_project.enabled_repo_hooks))
|
||||
root.appendChild(e)
|
||||
|
||||
return doc
|
||||
|
||||
def ToDict(self, **kwargs):
|
||||
"""Return the current manifest as a dictionary."""
|
||||
# Elements that may only appear once.
|
||||
SINGLE_ELEMENTS = {
|
||||
'notice',
|
||||
'default',
|
||||
'manifest-server',
|
||||
'repo-hooks',
|
||||
}
|
||||
# Elements that may be repeated.
|
||||
MULTI_ELEMENTS = {
|
||||
'remote',
|
||||
'remove-project',
|
||||
'project',
|
||||
'extend-project',
|
||||
'include',
|
||||
# These are children of 'project' nodes.
|
||||
'annotation',
|
||||
'project',
|
||||
'copyfile',
|
||||
'linkfile',
|
||||
}
|
||||
|
||||
doc = self.ToXml(**kwargs)
|
||||
ret = {}
|
||||
|
||||
def append_children(ret, node):
|
||||
for child in node.childNodes:
|
||||
if child.nodeType == xml.dom.Node.ELEMENT_NODE:
|
||||
attrs = child.attributes
|
||||
element = dict((attrs.item(i).localName, attrs.item(i).value)
|
||||
for i in range(attrs.length))
|
||||
if child.nodeName in SINGLE_ELEMENTS:
|
||||
ret[child.nodeName] = element
|
||||
elif child.nodeName in MULTI_ELEMENTS:
|
||||
ret.setdefault(child.nodeName, []).append(element)
|
||||
else:
|
||||
raise ManifestParseError('Unhandled element "%s"' % (child.nodeName,))
|
||||
|
||||
append_children(element, child)
|
||||
|
||||
append_children(ret, doc.firstChild)
|
||||
|
||||
return ret
|
||||
|
||||
def Save(self, fd, **kwargs):
|
||||
"""Write the current manifest out to the given file descriptor."""
|
||||
doc = self.ToXml(**kwargs)
|
||||
doc.writexml(fd, '', ' ', '\n', 'UTF-8')
|
||||
|
||||
def _output_manifest_project_extras(self, p, e):
|
||||
@ -440,6 +565,14 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
self._Load()
|
||||
return self._manifest_server
|
||||
|
||||
@property
|
||||
def CloneBundle(self):
|
||||
clone_bundle = self.manifestProject.config.GetBoolean('repo.clonebundle')
|
||||
if clone_bundle is None:
|
||||
return False if self.manifestProject.config.GetBoolean('repo.partialclone') else True
|
||||
else:
|
||||
return clone_bundle
|
||||
|
||||
@property
|
||||
def CloneFilter(self):
|
||||
if self.manifestProject.config.GetBoolean('repo.partialclone'):
|
||||
@ -485,23 +618,11 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
nodes.append(self._ParseManifestXml(self.manifestFile,
|
||||
self.manifestProject.worktree))
|
||||
|
||||
if self._load_local_manifests:
|
||||
local = os.path.join(self.repodir, LOCAL_MANIFEST_NAME)
|
||||
if os.path.exists(local):
|
||||
if not self.localManifestWarning:
|
||||
self.localManifestWarning = True
|
||||
print('warning: %s is deprecated; put local manifests '
|
||||
'in `%s` instead' % (LOCAL_MANIFEST_NAME,
|
||||
os.path.join(self.repodir, LOCAL_MANIFESTS_DIR_NAME)),
|
||||
file=sys.stderr)
|
||||
nodes.append(self._ParseManifestXml(local, self.repodir))
|
||||
|
||||
local_dir = os.path.abspath(os.path.join(self.repodir,
|
||||
LOCAL_MANIFESTS_DIR_NAME))
|
||||
if self._load_local_manifests and self.local_manifests:
|
||||
try:
|
||||
for local_file in sorted(platform_utils.listdir(local_dir)):
|
||||
for local_file in sorted(platform_utils.listdir(self.local_manifests)):
|
||||
if local_file.endswith('.xml'):
|
||||
local = os.path.join(local_dir, local_file)
|
||||
local = os.path.join(self.local_manifests, local_file)
|
||||
nodes.append(self._ParseManifestXml(local, self.repodir))
|
||||
except OSError:
|
||||
pass
|
||||
@ -520,7 +641,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
|
||||
self._loaded = True
|
||||
|
||||
def _ParseManifestXml(self, path, include_root):
|
||||
def _ParseManifestXml(self, path, include_root, parent_groups=''):
|
||||
try:
|
||||
root = xml.dom.minidom.parse(path)
|
||||
except (OSError, xml.parsers.expat.ExpatError) as e:
|
||||
@ -539,12 +660,17 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
for node in manifest.childNodes:
|
||||
if node.nodeName == 'include':
|
||||
name = self._reqatt(node, 'name')
|
||||
include_groups = ''
|
||||
if parent_groups:
|
||||
include_groups = parent_groups
|
||||
if node.hasAttribute('groups'):
|
||||
include_groups = node.getAttribute('groups') + ',' + include_groups
|
||||
fp = os.path.join(include_root, name)
|
||||
if not os.path.isfile(fp):
|
||||
raise ManifestParseError("include %s doesn't exist or isn't a file"
|
||||
% (name,))
|
||||
try:
|
||||
nodes.extend(self._ParseManifestXml(fp, include_root))
|
||||
nodes.extend(self._ParseManifestXml(fp, include_root, include_groups))
|
||||
# should isolate this to the exact exception, but that's
|
||||
# tricky. actual parsing implementation may vary.
|
||||
except (KeyboardInterrupt, RuntimeError, SystemExit):
|
||||
@ -553,6 +679,11 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
raise ManifestParseError(
|
||||
"failed parsing included manifest %s: %s" % (name, e))
|
||||
else:
|
||||
if parent_groups and node.nodeName == 'project':
|
||||
nodeGroups = parent_groups
|
||||
if node.hasAttribute('groups'):
|
||||
nodeGroups = node.getAttribute('groups') + ',' + nodeGroups
|
||||
node.setAttribute('groups', nodeGroups)
|
||||
nodes.append(node)
|
||||
return nodes
|
||||
|
||||
@ -627,7 +758,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
path = node.getAttribute('path')
|
||||
groups = node.getAttribute('groups')
|
||||
if groups:
|
||||
groups = self._ParseGroups(groups)
|
||||
groups = self._ParseList(groups)
|
||||
revision = node.getAttribute('revision')
|
||||
remote = node.getAttribute('remote')
|
||||
if remote:
|
||||
@ -640,12 +771,16 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
p.groups.extend(groups)
|
||||
if revision:
|
||||
p.revisionExpr = revision
|
||||
if IsId(revision):
|
||||
p.revisionId = revision
|
||||
else:
|
||||
p.revisionId = None
|
||||
if remote:
|
||||
p.remote = remote.ToRemoteSpec(name)
|
||||
if node.nodeName == 'repo-hooks':
|
||||
# Get the name of the project and the (space-separated) list of enabled.
|
||||
repo_hooks_project = self._reqatt(node, 'in-project')
|
||||
enabled_repo_hooks = self._reqatt(node, 'enabled-list').split()
|
||||
enabled_repo_hooks = self._ParseList(self._reqatt(node, 'enabled-list'))
|
||||
|
||||
# Only one project can be the hooks project
|
||||
if self._repo_hooks_project is not None:
|
||||
@ -757,29 +892,14 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
d.destBranchExpr = node.getAttribute('dest-branch') or None
|
||||
d.upstreamExpr = node.getAttribute('upstream') or None
|
||||
|
||||
sync_j = node.getAttribute('sync-j')
|
||||
if sync_j == '' or sync_j is None:
|
||||
d.sync_j = 1
|
||||
else:
|
||||
d.sync_j = int(sync_j)
|
||||
d.sync_j = XmlInt(node, 'sync-j', 1)
|
||||
if d.sync_j <= 0:
|
||||
raise ManifestParseError('%s: sync-j must be greater than 0, not "%s"' %
|
||||
(self.manifestFile, d.sync_j))
|
||||
|
||||
sync_c = node.getAttribute('sync-c')
|
||||
if not sync_c:
|
||||
d.sync_c = False
|
||||
else:
|
||||
d.sync_c = sync_c.lower() in ("yes", "true", "1")
|
||||
|
||||
sync_s = node.getAttribute('sync-s')
|
||||
if not sync_s:
|
||||
d.sync_s = False
|
||||
else:
|
||||
d.sync_s = sync_s.lower() in ("yes", "true", "1")
|
||||
|
||||
sync_tags = node.getAttribute('sync-tags')
|
||||
if not sync_tags:
|
||||
d.sync_tags = True
|
||||
else:
|
||||
d.sync_tags = sync_tags.lower() in ("yes", "true", "1")
|
||||
d.sync_c = XmlBool(node, 'sync-c', False)
|
||||
d.sync_s = XmlBool(node, 'sync-s', False)
|
||||
d.sync_tags = XmlBool(node, 'sync-tags', True)
|
||||
return d
|
||||
|
||||
def _ParseNotice(self, node):
|
||||
@ -856,39 +976,15 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
raise ManifestParseError("project %s path cannot be absolute in %s" %
|
||||
(name, self.manifestFile))
|
||||
|
||||
rebase = node.getAttribute('rebase')
|
||||
if not rebase:
|
||||
rebase = True
|
||||
else:
|
||||
rebase = rebase.lower() in ("yes", "true", "1")
|
||||
rebase = XmlBool(node, 'rebase', True)
|
||||
sync_c = XmlBool(node, 'sync-c', False)
|
||||
sync_s = XmlBool(node, 'sync-s', self._default.sync_s)
|
||||
sync_tags = XmlBool(node, 'sync-tags', self._default.sync_tags)
|
||||
|
||||
sync_c = node.getAttribute('sync-c')
|
||||
if not sync_c:
|
||||
sync_c = False
|
||||
else:
|
||||
sync_c = sync_c.lower() in ("yes", "true", "1")
|
||||
|
||||
sync_s = node.getAttribute('sync-s')
|
||||
if not sync_s:
|
||||
sync_s = self._default.sync_s
|
||||
else:
|
||||
sync_s = sync_s.lower() in ("yes", "true", "1")
|
||||
|
||||
sync_tags = node.getAttribute('sync-tags')
|
||||
if not sync_tags:
|
||||
sync_tags = self._default.sync_tags
|
||||
else:
|
||||
sync_tags = sync_tags.lower() in ("yes", "true", "1")
|
||||
|
||||
clone_depth = node.getAttribute('clone-depth')
|
||||
if clone_depth:
|
||||
try:
|
||||
clone_depth = int(clone_depth)
|
||||
if clone_depth <= 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ManifestParseError('invalid clone-depth %s in %s' %
|
||||
(clone_depth, self.manifestFile))
|
||||
clone_depth = XmlInt(node, 'clone-depth')
|
||||
if clone_depth is not None and clone_depth <= 0:
|
||||
raise ManifestParseError('%s: clone-depth must be greater than 0, not "%s"' %
|
||||
(self.manifestFile, clone_depth))
|
||||
|
||||
dest_branch = node.getAttribute('dest-branch') or self._default.destBranchExpr
|
||||
|
||||
@ -897,7 +993,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
groups = ''
|
||||
if node.hasAttribute('groups'):
|
||||
groups = node.getAttribute('groups')
|
||||
groups = self._ParseGroups(groups)
|
||||
groups = self._ParseList(groups)
|
||||
|
||||
if parent is None:
|
||||
relpath, worktree, gitdir, objdir, use_git_worktrees = \
|
||||
@ -911,7 +1007,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
groups.extend(set(default_groups).difference(groups))
|
||||
|
||||
if self.IsMirror and node.hasAttribute('force-path'):
|
||||
if node.getAttribute('force-path').lower() in ("yes", "true", "1"):
|
||||
if XmlBool(node, 'force-path', False):
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % path)
|
||||
|
||||
project = Project(manifest=self,
|
||||
@ -948,6 +1044,10 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
return project
|
||||
|
||||
def GetProjectPaths(self, name, path):
|
||||
# The manifest entries might have trailing slashes. Normalize them to avoid
|
||||
# unexpected filesystem behavior since we do string concatenation below.
|
||||
path = path.rstrip('/')
|
||||
name = name.rstrip('/')
|
||||
use_git_worktrees = False
|
||||
relpath = path
|
||||
if self.IsMirror:
|
||||
@ -980,6 +1080,10 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
return os.path.relpath(relpath, parent_relpath)
|
||||
|
||||
def GetSubprojectPaths(self, parent, name, path):
|
||||
# The manifest entries might have trailing slashes. Normalize them to avoid
|
||||
# unexpected filesystem behavior since we do string concatenation below.
|
||||
path = path.rstrip('/')
|
||||
name = name.rstrip('/')
|
||||
relpath = self._JoinRelpath(parent.relpath, path)
|
||||
gitdir = os.path.join(parent.gitdir, 'subprojects', '%s.git' % path)
|
||||
objdir = os.path.join(parent.gitdir, 'subproject-objects', '%s.git' % name)
|
||||
@ -1166,15 +1270,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
|
||||
|
||||
class GitcManifest(XmlManifest):
|
||||
|
||||
def __init__(self, repodir, gitc_client_name):
|
||||
"""Initialize the GitcManifest object."""
|
||||
super(GitcManifest, self).__init__(repodir)
|
||||
self.isGitcClient = True
|
||||
self.gitc_client_name = gitc_client_name
|
||||
self.gitc_client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(),
|
||||
gitc_client_name)
|
||||
self.manifestFile = os.path.join(self.gitc_client_dir, '.manifest')
|
||||
"""Parser for GitC (git-in-the-cloud) manifests."""
|
||||
|
||||
def _ParseProject(self, node, parent=None):
|
||||
"""Override _ParseProject and add support for GITC specific attributes."""
|
||||
@ -1185,3 +1281,38 @@ class GitcManifest(XmlManifest):
|
||||
"""Output GITC Specific Project attributes"""
|
||||
if p.old_revision:
|
||||
e.setAttribute('old-revision', str(p.old_revision))
|
||||
|
||||
|
||||
class RepoClient(XmlManifest):
|
||||
"""Manages a repo client checkout."""
|
||||
|
||||
def __init__(self, repodir, manifest_file=None):
|
||||
self.isGitcClient = False
|
||||
|
||||
if os.path.exists(os.path.join(repodir, LOCAL_MANIFEST_NAME)):
|
||||
print('error: %s is not supported; put local manifests in `%s` instead' %
|
||||
(LOCAL_MANIFEST_NAME, os.path.join(repodir, LOCAL_MANIFESTS_DIR_NAME)),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if manifest_file is None:
|
||||
manifest_file = os.path.join(repodir, MANIFEST_FILE_NAME)
|
||||
local_manifests = os.path.abspath(os.path.join(repodir, LOCAL_MANIFESTS_DIR_NAME))
|
||||
super(RepoClient, self).__init__(repodir, manifest_file, local_manifests)
|
||||
|
||||
# TODO: Completely separate manifest logic out of the client.
|
||||
self.manifest = self
|
||||
|
||||
|
||||
class GitcClient(RepoClient, GitcManifest):
|
||||
"""Manages a GitC client checkout."""
|
||||
|
||||
def __init__(self, repodir, gitc_client_name):
|
||||
"""Initialize the GitcManifest object."""
|
||||
self.gitc_client_name = gitc_client_name
|
||||
self.gitc_client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(),
|
||||
gitc_client_name)
|
||||
|
||||
super(GitcManifest, self).__init__(
|
||||
repodir, os.path.join(self.gitc_client_dir, '.manifest'))
|
||||
self.isGitcClient = True
|
||||
|
@ -90,6 +90,11 @@ class _FileDescriptorStreamsNonBlocking(FileDescriptorStreams):
|
||||
""" Implementation of FileDescriptorStreams for platforms that support
|
||||
non blocking I/O.
|
||||
"""
|
||||
def __init__(self):
|
||||
super(_FileDescriptorStreamsNonBlocking, self).__init__()
|
||||
self._poll = select.poll()
|
||||
self._fd_to_stream = {}
|
||||
|
||||
class Stream(object):
|
||||
""" Encapsulates a file descriptor """
|
||||
|
||||
@ -114,11 +119,18 @@ class _FileDescriptorStreamsNonBlocking(FileDescriptorStreams):
|
||||
self.fd.close()
|
||||
|
||||
def _create_stream(self, fd, dest, std_name):
|
||||
return self.Stream(fd, dest, std_name)
|
||||
stream = self.Stream(fd, dest, std_name)
|
||||
self._fd_to_stream[stream.fileno()] = stream
|
||||
self._poll.register(stream, select.POLLIN)
|
||||
return stream
|
||||
|
||||
def remove(self, stream):
|
||||
self._poll.unregister(stream)
|
||||
del self._fd_to_stream[stream.fileno()]
|
||||
super(_FileDescriptorStreamsNonBlocking, self).remove(stream)
|
||||
|
||||
def select(self):
|
||||
ready_streams, _, _ = select.select(self.streams, [], [])
|
||||
return ready_streams
|
||||
return [self._fd_to_stream[fd] for fd, _ in self._poll.poll()]
|
||||
|
||||
|
||||
class _FileDescriptorStreamsThreads(FileDescriptorStreams):
|
||||
|
601
project.py
601
project.py
@ -18,7 +18,6 @@ from __future__ import print_function
|
||||
import errno
|
||||
import filecmp
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
@ -29,20 +28,19 @@ import sys
|
||||
import tarfile
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from color import Coloring
|
||||
from git_command import GitCommand, git_require
|
||||
from git_config import GitConfig, IsId, GetSchemeFromUrl, GetUrlCookieFile, \
|
||||
ID_RE
|
||||
from error import GitError, HookError, UploadError, DownloadError
|
||||
from error import GitError, UploadError, DownloadError
|
||||
from error import ManifestInvalidRevisionError, ManifestInvalidPathError
|
||||
from error import NoManifestException
|
||||
import platform_utils
|
||||
import progress
|
||||
from repo_trace import IsTrace, Trace
|
||||
|
||||
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M
|
||||
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M, R_WORKTREE_M
|
||||
|
||||
from pyversion import is_python3
|
||||
if is_python3():
|
||||
@ -55,10 +53,17 @@ else:
|
||||
input = raw_input # noqa: F821
|
||||
|
||||
|
||||
# Maximum sleep time allowed during retries.
|
||||
MAXIMUM_RETRY_SLEEP_SEC = 3600.0
|
||||
# +-10% random jitter is added to each Fetches retry sleep duration.
|
||||
RETRY_JITTER_PERCENT = 0.1
|
||||
|
||||
|
||||
def _lwrite(path, content):
|
||||
lock = '%s.lock' % path
|
||||
|
||||
with open(lock, 'w') as fd:
|
||||
# Maintain Unix line endings on all OS's to match git behavior.
|
||||
with open(lock, 'w', newline='\n') as fd:
|
||||
fd.write(content)
|
||||
|
||||
try:
|
||||
@ -201,7 +206,7 @@ class ReviewableBranch(object):
|
||||
dryrun=False,
|
||||
auto_topic=False,
|
||||
hashtags=(),
|
||||
draft=False,
|
||||
labels=(),
|
||||
private=False,
|
||||
notify=None,
|
||||
wip=False,
|
||||
@ -213,7 +218,7 @@ class ReviewableBranch(object):
|
||||
dryrun=dryrun,
|
||||
auto_topic=auto_topic,
|
||||
hashtags=hashtags,
|
||||
draft=draft,
|
||||
labels=labels,
|
||||
private=private,
|
||||
notify=notify,
|
||||
wip=wip,
|
||||
@ -399,8 +404,8 @@ class _LinkFile(object):
|
||||
else:
|
||||
src = _SafeExpandPath(self.git_worktree, self.src)
|
||||
|
||||
if os.path.exists(src):
|
||||
# Entity exists so just a simple one to one link operation.
|
||||
if not glob.has_magic(src):
|
||||
# Entity does not contain a wild card so just a simple one to one link operation.
|
||||
dest = _SafeExpandPath(self.topdir, self.dest, skipfinal=True)
|
||||
# dest & src are absolute paths at this point. Make sure the target of
|
||||
# the symlink is relative in the context of the repo client checkout.
|
||||
@ -408,7 +413,7 @@ class _LinkFile(object):
|
||||
self.__linkIt(relpath, dest)
|
||||
else:
|
||||
dest = _SafeExpandPath(self.topdir, self.dest)
|
||||
# Entity doesn't exist assume there is a wild card
|
||||
# Entity contains a wild card.
|
||||
if os.path.exists(dest) and not platform_utils.isdir(dest):
|
||||
_error('Link error: src with wildcard, %s must be a directory', dest)
|
||||
else:
|
||||
@ -445,406 +450,6 @@ class RemoteSpec(object):
|
||||
self.orig_name = orig_name
|
||||
self.fetchUrl = fetchUrl
|
||||
|
||||
|
||||
class RepoHook(object):
|
||||
|
||||
"""A RepoHook contains information about a script to run as a hook.
|
||||
|
||||
Hooks are used to run a python script before running an upload (for instance,
|
||||
to run presubmit checks). Eventually, we may have hooks for other actions.
|
||||
|
||||
This shouldn't be confused with files in the 'repo/hooks' directory. Those
|
||||
files are copied into each '.git/hooks' folder for each project. Repo-level
|
||||
hooks are associated instead with repo actions.
|
||||
|
||||
Hooks are always python. When a hook is run, we will load the hook into the
|
||||
interpreter and execute its main() function.
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
hook_type,
|
||||
hooks_project,
|
||||
topdir,
|
||||
manifest_url,
|
||||
abort_if_user_denies=False):
|
||||
"""RepoHook constructor.
|
||||
|
||||
Params:
|
||||
hook_type: A string representing the type of hook. This is also used
|
||||
to figure out the name of the file containing the hook. For
|
||||
example: 'pre-upload'.
|
||||
hooks_project: The project containing the repo hooks. If you have a
|
||||
manifest, this is manifest.repo_hooks_project. OK if this is None,
|
||||
which will make the hook a no-op.
|
||||
topdir: Repo's top directory (the one containing the .repo directory).
|
||||
Scripts will run with CWD as this directory. If you have a manifest,
|
||||
this is manifest.topdir
|
||||
manifest_url: The URL to the manifest git repo.
|
||||
abort_if_user_denies: If True, we'll throw a HookError() if the user
|
||||
doesn't allow us to run the hook.
|
||||
"""
|
||||
self._hook_type = hook_type
|
||||
self._hooks_project = hooks_project
|
||||
self._manifest_url = manifest_url
|
||||
self._topdir = topdir
|
||||
self._abort_if_user_denies = abort_if_user_denies
|
||||
|
||||
# Store the full path to the script for convenience.
|
||||
if self._hooks_project:
|
||||
self._script_fullpath = os.path.join(self._hooks_project.worktree,
|
||||
self._hook_type + '.py')
|
||||
else:
|
||||
self._script_fullpath = None
|
||||
|
||||
def _GetHash(self):
|
||||
"""Return a hash of the contents of the hooks directory.
|
||||
|
||||
We'll just use git to do this. This hash has the property that if anything
|
||||
changes in the directory we will return a different has.
|
||||
|
||||
SECURITY CONSIDERATION:
|
||||
This hash only represents the contents of files in the hook directory, not
|
||||
any other files imported or called by hooks. Changes to imported files
|
||||
can change the script behavior without affecting the hash.
|
||||
|
||||
Returns:
|
||||
A string representing the hash. This will always be ASCII so that it can
|
||||
be printed to the user easily.
|
||||
"""
|
||||
assert self._hooks_project, "Must have hooks to calculate their hash."
|
||||
|
||||
# We will use the work_git object rather than just calling GetRevisionId().
|
||||
# That gives us a hash of the latest checked in version of the files that
|
||||
# the user will actually be executing. Specifically, GetRevisionId()
|
||||
# doesn't appear to change even if a user checks out a different version
|
||||
# of the hooks repo (via git checkout) nor if a user commits their own revs.
|
||||
#
|
||||
# NOTE: Local (non-committed) changes will not be factored into this hash.
|
||||
# I think this is OK, since we're really only worried about warning the user
|
||||
# about upstream changes.
|
||||
return self._hooks_project.work_git.rev_parse('HEAD')
|
||||
|
||||
def _GetMustVerb(self):
|
||||
"""Return 'must' if the hook is required; 'should' if not."""
|
||||
if self._abort_if_user_denies:
|
||||
return 'must'
|
||||
else:
|
||||
return 'should'
|
||||
|
||||
def _CheckForHookApproval(self):
|
||||
"""Check to see whether this hook has been approved.
|
||||
|
||||
We'll accept approval of manifest URLs if they're using secure transports.
|
||||
This way the user can say they trust the manifest hoster. For insecure
|
||||
hosts, we fall back to checking the hash of the hooks repo.
|
||||
|
||||
Note that we ask permission for each individual hook even though we use
|
||||
the hash of all hooks when detecting changes. We'd like the user to be
|
||||
able to approve / deny each hook individually. We only use the hash of all
|
||||
hooks because there is no other easy way to detect changes to local imports.
|
||||
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
|
||||
Raises:
|
||||
HookError: Raised if the user doesn't approve and abort_if_user_denies
|
||||
was passed to the consturctor.
|
||||
"""
|
||||
if self._ManifestUrlHasSecureScheme():
|
||||
return self._CheckForHookApprovalManifest()
|
||||
else:
|
||||
return self._CheckForHookApprovalHash()
|
||||
|
||||
def _CheckForHookApprovalHelper(self, subkey, new_val, main_prompt,
|
||||
changed_prompt):
|
||||
"""Check for approval for a particular attribute and hook.
|
||||
|
||||
Args:
|
||||
subkey: The git config key under [repo.hooks.<hook_type>] to store the
|
||||
last approved string.
|
||||
new_val: The new value to compare against the last approved one.
|
||||
main_prompt: Message to display to the user to ask for approval.
|
||||
changed_prompt: Message explaining why we're re-asking for approval.
|
||||
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
|
||||
Raises:
|
||||
HookError: Raised if the user doesn't approve and abort_if_user_denies
|
||||
was passed to the consturctor.
|
||||
"""
|
||||
hooks_config = self._hooks_project.config
|
||||
git_approval_key = 'repo.hooks.%s.%s' % (self._hook_type, subkey)
|
||||
|
||||
# Get the last value that the user approved for this hook; may be None.
|
||||
old_val = hooks_config.GetString(git_approval_key)
|
||||
|
||||
if old_val is not None:
|
||||
# User previously approved hook and asked not to be prompted again.
|
||||
if new_val == old_val:
|
||||
# Approval matched. We're done.
|
||||
return True
|
||||
else:
|
||||
# Give the user a reason why we're prompting, since they last told
|
||||
# us to "never ask again".
|
||||
prompt = 'WARNING: %s\n\n' % (changed_prompt,)
|
||||
else:
|
||||
prompt = ''
|
||||
|
||||
# Prompt the user if we're not on a tty; on a tty we'll assume "no".
|
||||
if sys.stdout.isatty():
|
||||
prompt += main_prompt + ' (yes/always/NO)? '
|
||||
response = input(prompt).lower()
|
||||
print()
|
||||
|
||||
# User is doing a one-time approval.
|
||||
if response in ('y', 'yes'):
|
||||
return True
|
||||
elif response == 'always':
|
||||
hooks_config.SetString(git_approval_key, new_val)
|
||||
return True
|
||||
|
||||
# For anything else, we'll assume no approval.
|
||||
if self._abort_if_user_denies:
|
||||
raise HookError('You must allow the %s hook or use --no-verify.' %
|
||||
self._hook_type)
|
||||
|
||||
return False
|
||||
|
||||
def _ManifestUrlHasSecureScheme(self):
|
||||
"""Check if the URI for the manifest is a secure transport."""
|
||||
secure_schemes = ('file', 'https', 'ssh', 'persistent-https', 'sso', 'rpc')
|
||||
parse_results = urllib.parse.urlparse(self._manifest_url)
|
||||
return parse_results.scheme in secure_schemes
|
||||
|
||||
def _CheckForHookApprovalManifest(self):
|
||||
"""Check whether the user has approved this manifest host.
|
||||
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
"""
|
||||
return self._CheckForHookApprovalHelper(
|
||||
'approvedmanifest',
|
||||
self._manifest_url,
|
||||
'Run hook scripts from %s' % (self._manifest_url,),
|
||||
'Manifest URL has changed since %s was allowed.' % (self._hook_type,))
|
||||
|
||||
def _CheckForHookApprovalHash(self):
|
||||
"""Check whether the user has approved the hooks repo.
|
||||
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
"""
|
||||
prompt = ('Repo %s run the script:\n'
|
||||
' %s\n'
|
||||
'\n'
|
||||
'Do you want to allow this script to run')
|
||||
return self._CheckForHookApprovalHelper(
|
||||
'approvedhash',
|
||||
self._GetHash(),
|
||||
prompt % (self._GetMustVerb(), self._script_fullpath),
|
||||
'Scripts have changed since %s was allowed.' % (self._hook_type,))
|
||||
|
||||
@staticmethod
|
||||
def _ExtractInterpFromShebang(data):
|
||||
"""Extract the interpreter used in the shebang.
|
||||
|
||||
Try to locate the interpreter the script is using (ignoring `env`).
|
||||
|
||||
Args:
|
||||
data: The file content of the script.
|
||||
|
||||
Returns:
|
||||
The basename of the main script interpreter, or None if a shebang is not
|
||||
used or could not be parsed out.
|
||||
"""
|
||||
firstline = data.splitlines()[:1]
|
||||
if not firstline:
|
||||
return None
|
||||
|
||||
# The format here can be tricky.
|
||||
shebang = firstline[0].strip()
|
||||
m = re.match(r'^#!\s*([^\s]+)(?:\s+([^\s]+))?', shebang)
|
||||
if not m:
|
||||
return None
|
||||
|
||||
# If the using `env`, find the target program.
|
||||
interp = m.group(1)
|
||||
if os.path.basename(interp) == 'env':
|
||||
interp = m.group(2)
|
||||
|
||||
return interp
|
||||
|
||||
def _ExecuteHookViaReexec(self, interp, context, **kwargs):
|
||||
"""Execute the hook script through |interp|.
|
||||
|
||||
Note: Support for this feature should be dropped ~Jun 2021.
|
||||
|
||||
Args:
|
||||
interp: The Python program to run.
|
||||
context: Basic Python context to execute the hook inside.
|
||||
kwargs: Arbitrary arguments to pass to the hook script.
|
||||
|
||||
Raises:
|
||||
HookError: When the hooks failed for any reason.
|
||||
"""
|
||||
# This logic needs to be kept in sync with _ExecuteHookViaImport below.
|
||||
script = """
|
||||
import json, os, sys
|
||||
path = '''%(path)s'''
|
||||
kwargs = json.loads('''%(kwargs)s''')
|
||||
context = json.loads('''%(context)s''')
|
||||
sys.path.insert(0, os.path.dirname(path))
|
||||
data = open(path).read()
|
||||
exec(compile(data, path, 'exec'), context)
|
||||
context['main'](**kwargs)
|
||||
""" % {
|
||||
'path': self._script_fullpath,
|
||||
'kwargs': json.dumps(kwargs),
|
||||
'context': json.dumps(context),
|
||||
}
|
||||
|
||||
# We pass the script via stdin to avoid OS argv limits. It also makes
|
||||
# unhandled exception tracebacks less verbose/confusing for users.
|
||||
cmd = [interp, '-c', 'import sys; exec(sys.stdin.read())']
|
||||
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE)
|
||||
proc.communicate(input=script.encode('utf-8'))
|
||||
if proc.returncode:
|
||||
raise HookError('Failed to run %s hook.' % (self._hook_type,))
|
||||
|
||||
def _ExecuteHookViaImport(self, data, context, **kwargs):
|
||||
"""Execute the hook code in |data| directly.
|
||||
|
||||
Args:
|
||||
data: The code of the hook to execute.
|
||||
context: Basic Python context to execute the hook inside.
|
||||
kwargs: Arbitrary arguments to pass to the hook script.
|
||||
|
||||
Raises:
|
||||
HookError: When the hooks failed for any reason.
|
||||
"""
|
||||
# Exec, storing global context in the context dict. We catch exceptions
|
||||
# and convert to a HookError w/ just the failing traceback.
|
||||
try:
|
||||
exec(compile(data, self._script_fullpath, 'exec'), context)
|
||||
except Exception:
|
||||
raise HookError('%s\nFailed to import %s hook; see traceback above.' %
|
||||
(traceback.format_exc(), self._hook_type))
|
||||
|
||||
# Running the script should have defined a main() function.
|
||||
if 'main' not in context:
|
||||
raise HookError('Missing main() in: "%s"' % self._script_fullpath)
|
||||
|
||||
# Call the main function in the hook. If the hook should cause the
|
||||
# build to fail, it will raise an Exception. We'll catch that convert
|
||||
# to a HookError w/ just the failing traceback.
|
||||
try:
|
||||
context['main'](**kwargs)
|
||||
except Exception:
|
||||
raise HookError('%s\nFailed to run main() for %s hook; see traceback '
|
||||
'above.' % (traceback.format_exc(), self._hook_type))
|
||||
|
||||
def _ExecuteHook(self, **kwargs):
|
||||
"""Actually execute the given hook.
|
||||
|
||||
This will run the hook's 'main' function in our python interpreter.
|
||||
|
||||
Args:
|
||||
kwargs: Keyword arguments to pass to the hook. These are often specific
|
||||
to the hook type. For instance, pre-upload hooks will contain
|
||||
a project_list.
|
||||
"""
|
||||
# Keep sys.path and CWD stashed away so that we can always restore them
|
||||
# upon function exit.
|
||||
orig_path = os.getcwd()
|
||||
orig_syspath = sys.path
|
||||
|
||||
try:
|
||||
# Always run hooks with CWD as topdir.
|
||||
os.chdir(self._topdir)
|
||||
|
||||
# Put the hook dir as the first item of sys.path so hooks can do
|
||||
# relative imports. We want to replace the repo dir as [0] so
|
||||
# hooks can't import repo files.
|
||||
sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:]
|
||||
|
||||
# Initial global context for the hook to run within.
|
||||
context = {'__file__': self._script_fullpath}
|
||||
|
||||
# Add 'hook_should_take_kwargs' to the arguments to be passed to main.
|
||||
# We don't actually want hooks to define their main with this argument--
|
||||
# it's there to remind them that their hook should always take **kwargs.
|
||||
# For instance, a pre-upload hook should be defined like:
|
||||
# def main(project_list, **kwargs):
|
||||
#
|
||||
# This allows us to later expand the API without breaking old hooks.
|
||||
kwargs = kwargs.copy()
|
||||
kwargs['hook_should_take_kwargs'] = True
|
||||
|
||||
# See what version of python the hook has been written against.
|
||||
data = open(self._script_fullpath).read()
|
||||
interp = self._ExtractInterpFromShebang(data)
|
||||
reexec = False
|
||||
if interp:
|
||||
prog = os.path.basename(interp)
|
||||
if prog.startswith('python2') and sys.version_info.major != 2:
|
||||
reexec = True
|
||||
elif prog.startswith('python3') and sys.version_info.major == 2:
|
||||
reexec = True
|
||||
|
||||
# Attempt to execute the hooks through the requested version of Python.
|
||||
if reexec:
|
||||
try:
|
||||
self._ExecuteHookViaReexec(interp, context, **kwargs)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
# We couldn't find the interpreter, so fallback to importing.
|
||||
reexec = False
|
||||
else:
|
||||
raise
|
||||
|
||||
# Run the hook by importing directly.
|
||||
if not reexec:
|
||||
self._ExecuteHookViaImport(data, context, **kwargs)
|
||||
finally:
|
||||
# Restore sys.path and CWD.
|
||||
sys.path = orig_syspath
|
||||
os.chdir(orig_path)
|
||||
|
||||
def Run(self, user_allows_all_hooks, **kwargs):
|
||||
"""Run the hook.
|
||||
|
||||
If the hook doesn't exist (because there is no hooks project or because
|
||||
this particular hook is not enabled), this is a no-op.
|
||||
|
||||
Args:
|
||||
user_allows_all_hooks: If True, we will never prompt about running the
|
||||
hook--we'll just assume it's OK to run it.
|
||||
kwargs: Keyword arguments to pass to the hook. These are often specific
|
||||
to the hook type. For instance, pre-upload hooks will contain
|
||||
a project_list.
|
||||
|
||||
Raises:
|
||||
HookError: If there was a problem finding the hook or the user declined
|
||||
to run a required hook (from _CheckForHookApproval).
|
||||
"""
|
||||
# No-op if there is no hooks project or if hook is disabled.
|
||||
if ((not self._hooks_project) or (self._hook_type not in
|
||||
self._hooks_project.enabled_repo_hooks)):
|
||||
return
|
||||
|
||||
# Bail with a nice error if we can't find the hook.
|
||||
if not os.path.isfile(self._script_fullpath):
|
||||
raise HookError('Couldn\'t find repo hook: "%s"' % self._script_fullpath)
|
||||
|
||||
# Make sure the user is OK with running the hook.
|
||||
if (not user_allows_all_hooks) and (not self._CheckForHookApproval()):
|
||||
return
|
||||
|
||||
# Run the hook with the same version of python we're using.
|
||||
self._ExecuteHook(**kwargs)
|
||||
|
||||
|
||||
class Project(object):
|
||||
# These objects can be shared between several working trees.
|
||||
shareable_files = ['description', 'info']
|
||||
@ -875,6 +480,7 @@ class Project(object):
|
||||
is_derived=False,
|
||||
dest_branch=None,
|
||||
optimized_fetch=False,
|
||||
retry_fetches=0,
|
||||
old_revision=None):
|
||||
"""Init a Project object.
|
||||
|
||||
@ -901,9 +507,11 @@ class Project(object):
|
||||
dest_branch: The branch to which to push changes for review by default.
|
||||
optimized_fetch: If True, when a project is set to a sha1 revision, only
|
||||
fetch from the remote if the sha1 is not present locally.
|
||||
retry_fetches: Retry remote fetches n times upon receiving transient error
|
||||
with exponential backoff and jitter.
|
||||
old_revision: saved git commit id for open GITC projects.
|
||||
"""
|
||||
self.manifest = manifest
|
||||
self.client = self.manifest = manifest
|
||||
self.name = name
|
||||
self.remote = remote
|
||||
self.gitdir = gitdir.replace('\\', '/')
|
||||
@ -936,6 +544,7 @@ class Project(object):
|
||||
self.use_git_worktrees = use_git_worktrees
|
||||
self.is_derived = is_derived
|
||||
self.optimized_fetch = optimized_fetch
|
||||
self.retry_fetches = max(0, retry_fetches)
|
||||
self.subprojects = []
|
||||
|
||||
self.snapshots = {}
|
||||
@ -943,7 +552,7 @@ class Project(object):
|
||||
self.linkfiles = []
|
||||
self.annotations = []
|
||||
self.config = GitConfig.ForRepository(gitdir=self.gitdir,
|
||||
defaults=self.manifest.globalConfig)
|
||||
defaults=self.client.globalConfig)
|
||||
|
||||
if self.worktree:
|
||||
self.work_git = self._GitGetByExec(self, bare=False, gitdir=gitdir)
|
||||
@ -1346,7 +955,7 @@ class Project(object):
|
||||
dryrun=False,
|
||||
auto_topic=False,
|
||||
hashtags=(),
|
||||
draft=False,
|
||||
labels=(),
|
||||
private=False,
|
||||
notify=None,
|
||||
wip=False,
|
||||
@ -1396,16 +1005,12 @@ class Project(object):
|
||||
if dest_branch.startswith(R_HEADS):
|
||||
dest_branch = dest_branch[len(R_HEADS):]
|
||||
|
||||
upload_type = 'for'
|
||||
if draft:
|
||||
upload_type = 'drafts'
|
||||
|
||||
ref_spec = '%s:refs/%s/%s' % (R_HEADS + branch.name, upload_type,
|
||||
dest_branch)
|
||||
ref_spec = '%s:refs/for/%s' % (R_HEADS + branch.name, dest_branch)
|
||||
opts = []
|
||||
if auto_topic:
|
||||
opts += ['topic=' + branch.name]
|
||||
opts += ['t=%s' % p for p in hashtags]
|
||||
opts += ['l=%s' % p for p in labels]
|
||||
|
||||
opts += ['r=%s' % p for p in people[0]]
|
||||
opts += ['cc=%s' % p for p in people[1]]
|
||||
@ -1422,10 +1027,11 @@ class Project(object):
|
||||
if GitCommand(self, cmd, bare=True).Wait() != 0:
|
||||
raise UploadError('Upload failed')
|
||||
|
||||
msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
|
||||
self.bare_git.UpdateRef(R_PUB + branch.name,
|
||||
R_HEADS + branch.name,
|
||||
message=msg)
|
||||
if not dryrun:
|
||||
msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
|
||||
self.bare_git.UpdateRef(R_PUB + branch.name,
|
||||
R_HEADS + branch.name,
|
||||
message=msg)
|
||||
|
||||
# Sync ##
|
||||
def _ExtractArchive(self, tarpath, path=None):
|
||||
@ -1453,6 +1059,7 @@ class Project(object):
|
||||
tags=True,
|
||||
archive=False,
|
||||
optimized_fetch=False,
|
||||
retry_fetches=0,
|
||||
prune=False,
|
||||
submodules=False,
|
||||
clone_filter=None):
|
||||
@ -1536,7 +1143,7 @@ class Project(object):
|
||||
current_branch_only=current_branch_only,
|
||||
tags=tags, prune=prune, depth=depth,
|
||||
submodules=submodules, force_sync=force_sync,
|
||||
clone_filter=clone_filter):
|
||||
clone_filter=clone_filter, retry_fetches=retry_fetches):
|
||||
return False
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
@ -1563,7 +1170,7 @@ class Project(object):
|
||||
self._InitHooks()
|
||||
|
||||
def _CopyAndLinkFiles(self):
|
||||
if self.manifest.isGitcClient:
|
||||
if self.client.isGitcClient:
|
||||
return
|
||||
for copyfile in self.copyfiles:
|
||||
copyfile._Copy()
|
||||
@ -2304,6 +1911,27 @@ class Project(object):
|
||||
# Enable the extension!
|
||||
self.config.SetString('extensions.%s' % (key,), value)
|
||||
|
||||
def ResolveRemoteHead(self, name=None):
|
||||
"""Find out what the default branch (HEAD) points to.
|
||||
|
||||
Normally this points to refs/heads/master, but projects are moving to main.
|
||||
Support whatever the server uses rather than hardcoding "master" ourselves.
|
||||
"""
|
||||
if name is None:
|
||||
name = self.remote.name
|
||||
|
||||
# The output will look like (NB: tabs are separators):
|
||||
# ref: refs/heads/master HEAD
|
||||
# 5f6803b100bb3cd0f534e96e88c91373e8ed1c44 HEAD
|
||||
output = self.bare_git.ls_remote('-q', '--symref', '--exit-code', name, 'HEAD')
|
||||
|
||||
for line in output.splitlines():
|
||||
lhs, rhs = line.split('\t', 1)
|
||||
if rhs == 'HEAD' and lhs.startswith('ref:'):
|
||||
return lhs[4:].strip()
|
||||
|
||||
return None
|
||||
|
||||
def _CheckForImmutableRevision(self):
|
||||
try:
|
||||
# if revision (sha or tag) is not present then following function
|
||||
@ -2338,8 +1966,10 @@ class Project(object):
|
||||
depth=None,
|
||||
submodules=False,
|
||||
force_sync=False,
|
||||
clone_filter=None):
|
||||
|
||||
clone_filter=None,
|
||||
retry_fetches=2,
|
||||
retry_sleep_initial_sec=4.0,
|
||||
retry_exp_factor=2.0):
|
||||
is_sha1 = False
|
||||
tag_name = None
|
||||
# The depth should not be used when fetching to a mirror because
|
||||
@ -2444,8 +2074,10 @@ class Project(object):
|
||||
if os.path.exists(os.path.join(self.gitdir, 'shallow')):
|
||||
cmd.append('--depth=2147483647')
|
||||
|
||||
if quiet:
|
||||
if not verbose:
|
||||
cmd.append('--quiet')
|
||||
if not quiet and sys.stdout.isatty():
|
||||
cmd.append('--progress')
|
||||
if not self.worktree:
|
||||
cmd.append('--update-head-ok')
|
||||
cmd.append(name)
|
||||
@ -2499,18 +2131,37 @@ class Project(object):
|
||||
|
||||
cmd.extend(spec)
|
||||
|
||||
ok = False
|
||||
for _i in range(2):
|
||||
# At least one retry minimum due to git remote prune.
|
||||
retry_fetches = max(retry_fetches, 2)
|
||||
retry_cur_sleep = retry_sleep_initial_sec
|
||||
ok = prune_tried = False
|
||||
for try_n in range(retry_fetches):
|
||||
gitcmd = GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy,
|
||||
merge_output=True, capture_stdout=not verbose)
|
||||
merge_output=True, capture_stdout=quiet)
|
||||
ret = gitcmd.Wait()
|
||||
if ret == 0:
|
||||
ok = True
|
||||
break
|
||||
# If needed, run the 'git remote prune' the first time through the loop
|
||||
elif (not _i and
|
||||
"error:" in gitcmd.stderr and
|
||||
"git remote prune" in gitcmd.stderr):
|
||||
|
||||
# Retry later due to HTTP 429 Too Many Requests.
|
||||
elif ('error:' in gitcmd.stderr and
|
||||
'HTTP 429' in gitcmd.stderr):
|
||||
if not quiet:
|
||||
print('429 received, sleeping: %s sec' % retry_cur_sleep,
|
||||
file=sys.stderr)
|
||||
time.sleep(retry_cur_sleep)
|
||||
retry_cur_sleep = min(retry_exp_factor * retry_cur_sleep,
|
||||
MAXIMUM_RETRY_SLEEP_SEC)
|
||||
retry_cur_sleep *= (1 - random.uniform(-RETRY_JITTER_PERCENT,
|
||||
RETRY_JITTER_PERCENT))
|
||||
continue
|
||||
|
||||
# If this is not last attempt, try 'git remote prune'.
|
||||
elif (try_n < retry_fetches - 1 and
|
||||
'error:' in gitcmd.stderr and
|
||||
'git remote prune' in gitcmd.stderr and
|
||||
not prune_tried):
|
||||
prune_tried = True
|
||||
prunecmd = GitCommand(self, ['remote', 'prune', name], bare=True,
|
||||
ssh_proxy=ssh_proxy)
|
||||
ret = prunecmd.Wait()
|
||||
@ -2582,8 +2233,10 @@ class Project(object):
|
||||
return False
|
||||
|
||||
cmd = ['fetch']
|
||||
if quiet:
|
||||
if not verbose:
|
||||
cmd.append('--quiet')
|
||||
if not quiet and sys.stdout.isatty():
|
||||
cmd.append('--progress')
|
||||
if not self.worktree:
|
||||
cmd.append('--update-head-ok')
|
||||
cmd.append(bundle_dst)
|
||||
@ -2643,9 +2296,10 @@ class Project(object):
|
||||
# 22: HTTP page not retrieved. The requested url was not found or
|
||||
# returned another error with the HTTP error code being 400 or above.
|
||||
# This return code only appears if -f, --fail is used.
|
||||
if not quiet:
|
||||
print("Server does not provide clone.bundle; ignoring.",
|
||||
file=sys.stderr)
|
||||
if verbose:
|
||||
print('%s: Unable to retrieve clone.bundle; ignoring.' % self.name)
|
||||
if output:
|
||||
print('Curl output:\n%s' % output)
|
||||
return False
|
||||
elif curlret and not verbose and output:
|
||||
print('%s' % output, file=sys.stderr)
|
||||
@ -2682,8 +2336,12 @@ class Project(object):
|
||||
if self._allrefs:
|
||||
raise GitError('%s checkout %s ' % (self.name, rev))
|
||||
|
||||
def _CherryPick(self, rev):
|
||||
def _CherryPick(self, rev, ffonly=False, record_origin=False):
|
||||
cmd = ['cherry-pick']
|
||||
if ffonly:
|
||||
cmd.append('--ff')
|
||||
if record_origin:
|
||||
cmd.append('-x')
|
||||
cmd.append(rev)
|
||||
cmd.append('--')
|
||||
if GitCommand(self, cmd).Wait() != 0:
|
||||
@ -2745,10 +2403,19 @@ class Project(object):
|
||||
os.makedirs(self.objdir)
|
||||
self.bare_objdir.init()
|
||||
|
||||
# Enable per-worktree config file support if possible. This is more a
|
||||
# nice-to-have feature for users rather than a hard requirement.
|
||||
if self.use_git_worktrees and git_require((2, 19, 0)):
|
||||
self.EnableRepositoryExtension('worktreeConfig')
|
||||
if self.use_git_worktrees:
|
||||
# Set up the m/ space to point to the worktree-specific ref space.
|
||||
# We'll update the worktree-specific ref space on each checkout.
|
||||
if self.manifest.branch:
|
||||
self.bare_git.symbolic_ref(
|
||||
'-m', 'redirecting to worktree scope',
|
||||
R_M + self.manifest.branch,
|
||||
R_WORKTREE_M + self.manifest.branch)
|
||||
|
||||
# Enable per-worktree config file support if possible. This is more a
|
||||
# nice-to-have feature for users rather than a hard requirement.
|
||||
if git_require((2, 20, 0)):
|
||||
self.EnableRepositoryExtension('worktreeConfig')
|
||||
|
||||
# If we have a separate directory to hold refs, initialize it as well.
|
||||
if self.objdir != self.gitdir:
|
||||
@ -2883,25 +2550,42 @@ class Project(object):
|
||||
|
||||
def _InitMRef(self):
|
||||
if self.manifest.branch:
|
||||
self._InitAnyMRef(R_M + self.manifest.branch)
|
||||
if self.use_git_worktrees:
|
||||
# We can't update this ref with git worktrees until it exists.
|
||||
# We'll wait until the initial checkout to set it.
|
||||
if not os.path.exists(self.worktree):
|
||||
return
|
||||
|
||||
base = R_WORKTREE_M
|
||||
active_git = self.work_git
|
||||
|
||||
self._InitAnyMRef(HEAD, self.bare_git, detach=True)
|
||||
else:
|
||||
base = R_M
|
||||
active_git = self.bare_git
|
||||
|
||||
self._InitAnyMRef(base + self.manifest.branch, active_git)
|
||||
|
||||
def _InitMirrorHead(self):
|
||||
self._InitAnyMRef(HEAD)
|
||||
self._InitAnyMRef(HEAD, self.bare_git)
|
||||
|
||||
def _InitAnyMRef(self, ref):
|
||||
def _InitAnyMRef(self, ref, active_git, detach=False):
|
||||
cur = self.bare_ref.symref(ref)
|
||||
|
||||
if self.revisionId:
|
||||
if cur != '' or self.bare_ref.get(ref) != self.revisionId:
|
||||
msg = 'manifest set to %s' % self.revisionId
|
||||
dst = self.revisionId + '^0'
|
||||
self.bare_git.UpdateRef(ref, dst, message=msg, detach=True)
|
||||
active_git.UpdateRef(ref, dst, message=msg, detach=True)
|
||||
else:
|
||||
remote = self.GetRemote(self.remote.name)
|
||||
dst = remote.ToLocal(self.revisionExpr)
|
||||
if cur != dst:
|
||||
msg = 'manifest set to %s' % self.revisionExpr
|
||||
self.bare_git.symbolic_ref('-m', msg, ref, dst)
|
||||
if detach:
|
||||
active_git.UpdateRef(ref, dst, message=msg, detach=True)
|
||||
else:
|
||||
active_git.symbolic_ref('-m', msg, ref, dst)
|
||||
|
||||
def _CheckDirReference(self, srcdir, destdir, share_refs):
|
||||
# Git worktrees don't use symlinks to share at all.
|
||||
@ -3024,14 +2708,18 @@ class Project(object):
|
||||
# Some platforms (e.g. Windows) won't let us update dotgit in situ because
|
||||
# of file permissions. Delete it and recreate it from scratch to avoid.
|
||||
platform_utils.remove(dotgit)
|
||||
# Use relative path from checkout->worktree.
|
||||
with open(dotgit, 'w') as fp:
|
||||
# Use relative path from checkout->worktree & maintain Unix line endings
|
||||
# on all OS's to match git behavior.
|
||||
with open(dotgit, 'w', newline='\n') as fp:
|
||||
print('gitdir:', os.path.relpath(git_worktree_path, self.worktree),
|
||||
file=fp)
|
||||
# Use relative path from worktree->checkout.
|
||||
with open(os.path.join(git_worktree_path, 'gitdir'), 'w') as fp:
|
||||
# Use relative path from worktree->checkout & maintain Unix line endings
|
||||
# on all OS's to match git behavior.
|
||||
with open(os.path.join(git_worktree_path, 'gitdir'), 'w', newline='\n') as fp:
|
||||
print(os.path.relpath(dotgit, git_worktree_path), file=fp)
|
||||
|
||||
self._InitMRef()
|
||||
|
||||
def _InitWorkTree(self, force_sync=False, submodules=False):
|
||||
realdotgit = os.path.join(self.worktree, '.git')
|
||||
tmpdotgit = realdotgit + '.tmp'
|
||||
@ -3148,6 +2836,13 @@ class Project(object):
|
||||
self._bare = bare
|
||||
self._gitdir = gitdir
|
||||
|
||||
# __getstate__ and __setstate__ are required for pickling because __getattr__ exists.
|
||||
def __getstate__(self):
|
||||
return (self._project, self._bare, self._gitdir)
|
||||
|
||||
def __setstate__(self, state):
|
||||
self._project, self._bare, self._gitdir = state
|
||||
|
||||
def LsOthers(self):
|
||||
p = GitCommand(self._project,
|
||||
['ls-files',
|
||||
|
@ -15,7 +15,10 @@
|
||||
|
||||
"""Helper tool for signing repo release tags correctly.
|
||||
|
||||
This is intended to be run only by the official Repo release managers.
|
||||
This is intended to be run only by the official Repo release managers, but it
|
||||
could be run by people maintaining their own fork of the project.
|
||||
|
||||
NB: Check docs/release-process.md for production freeze information.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
@ -86,7 +89,9 @@ To roll back a release:
|
||||
|
||||
def get_parser():
|
||||
"""Get a CLI parser."""
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser = argparse.ArgumentParser(
|
||||
description=__doc__,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
parser.add_argument('-n', '--dry-run',
|
||||
dest='dryrun', action='store_true',
|
||||
help='show everything that would be done')
|
||||
|
254
repo
254
repo
@ -32,6 +32,13 @@ import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
# These should never be newer than the main.py version since this needs to be a
|
||||
# bit more flexible with older systems. See that file for more details on the
|
||||
# versions we select.
|
||||
MIN_PYTHON_VERSION_SOFT = (3, 6)
|
||||
MIN_PYTHON_VERSION_HARD = (3, 5)
|
||||
|
||||
|
||||
# Keep basic logic in sync with repo_trace.py.
|
||||
class Trace(object):
|
||||
"""Trace helper logic."""
|
||||
@ -70,8 +77,6 @@ def check_python_version():
|
||||
def reexec(prog):
|
||||
exec_command([prog] + sys.argv)
|
||||
|
||||
MIN_PYTHON_VERSION = (3, 6)
|
||||
|
||||
ver = sys.version_info
|
||||
major = ver.major
|
||||
minor = ver.minor
|
||||
@ -80,19 +85,26 @@ def check_python_version():
|
||||
if (major, minor) < (2, 7):
|
||||
print('repo: error: Your Python version is too old. '
|
||||
'Please use Python {}.{} or newer instead.'.format(
|
||||
*MIN_PYTHON_VERSION), file=sys.stderr)
|
||||
*MIN_PYTHON_VERSION_SOFT), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Try to re-exec the version specific Python 3 if needed.
|
||||
if (major, minor) < MIN_PYTHON_VERSION:
|
||||
if (major, minor) < MIN_PYTHON_VERSION_SOFT:
|
||||
# Python makes releases ~once a year, so try our min version +10 to help
|
||||
# bridge the gap. This is the fallback anyways so perf isn't critical.
|
||||
min_major, min_minor = MIN_PYTHON_VERSION
|
||||
min_major, min_minor = MIN_PYTHON_VERSION_SOFT
|
||||
for inc in range(0, 10):
|
||||
reexec('python{}.{}'.format(min_major, min_minor + inc))
|
||||
|
||||
# Try the generic Python 3 wrapper, but only if it's new enough. We don't
|
||||
# want to go from (still supported) Python 2.7 to (unsupported) Python 3.5.
|
||||
# Fallback to older versions if possible.
|
||||
for inc in range(MIN_PYTHON_VERSION_SOFT[1] - MIN_PYTHON_VERSION_HARD[1], 0, -1):
|
||||
# Don't downgrade, and don't reexec ourselves (which would infinite loop).
|
||||
if (min_major, min_minor - inc) <= (major, minor):
|
||||
break
|
||||
reexec('python{}.{}'.format(min_major, min_minor - inc))
|
||||
|
||||
# Try the generic Python 3 wrapper, but only if it's new enough. If it
|
||||
# isn't, we want to just give up below and make the user resolve things.
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
['python3', '-c', 'import sys; '
|
||||
@ -103,18 +115,20 @@ def check_python_version():
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
python3_ver = None
|
||||
|
||||
# The python3 version looks like it's new enough, so give it a try.
|
||||
if python3_ver and python3_ver >= MIN_PYTHON_VERSION:
|
||||
# If the python3 version looks like it's new enough, give it a try.
|
||||
if (python3_ver and python3_ver >= MIN_PYTHON_VERSION_HARD
|
||||
and python3_ver != (major, minor)):
|
||||
reexec('python3')
|
||||
|
||||
# We're still here, so diagnose things for the user.
|
||||
if major < 3:
|
||||
print('repo: warning: Python 2 is no longer supported; '
|
||||
'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION),
|
||||
print('repo: error: Python 2 is no longer supported; '
|
||||
'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION_HARD),
|
||||
file=sys.stderr)
|
||||
else:
|
||||
sys.exit(1)
|
||||
elif (major, minor) < MIN_PYTHON_VERSION_HARD:
|
||||
print('repo: error: Python 3 version is too old; '
|
||||
'Please use Python {}.{} or newer.'.format(*MIN_PYTHON_VERSION),
|
||||
'Please use Python {}.{} or newer.'.format(*MIN_PYTHON_VERSION_HARD),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
@ -133,7 +147,7 @@ if not REPO_REV:
|
||||
REPO_REV = 'stable'
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (2, 4)
|
||||
VERSION = (2, 8)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (2, 3)
|
||||
@ -251,8 +265,6 @@ else:
|
||||
home_dot_repo = os.path.expanduser('~/.repoconfig')
|
||||
gpg_dir = os.path.join(home_dot_repo, 'gnupg')
|
||||
|
||||
extra_args = []
|
||||
|
||||
|
||||
def GetParser(gitc_init=False):
|
||||
"""Setup the CLI parser."""
|
||||
@ -319,9 +331,11 @@ def GetParser(gitc_init=False):
|
||||
help='restrict manifest projects to ones with a specified '
|
||||
'platform group [auto|all|none|linux|darwin|...]',
|
||||
metavar='PLATFORM')
|
||||
group.add_option('--clone-bundle', action='store_true',
|
||||
help='enable use of /clone.bundle on HTTP/HTTPS (default if not --partial-clone)')
|
||||
group.add_option('--no-clone-bundle',
|
||||
dest='clone_bundle', default=True, action='store_false',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||
dest='clone_bundle', action='store_false',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS (default if --partial-clone)')
|
||||
group.add_option('--no-tags',
|
||||
dest='tags', default=True, action='store_false',
|
||||
help="don't fetch tags in the manifest")
|
||||
@ -330,8 +344,10 @@ def GetParser(gitc_init=False):
|
||||
group = parser.add_option_group('repo Version options')
|
||||
group.add_option('--repo-url', metavar='URL',
|
||||
help='repo repository location ($REPO_URL)')
|
||||
group.add_option('--repo-branch', metavar='REVISION',
|
||||
group.add_option('--repo-rev', metavar='REV',
|
||||
help='repo branch or revision ($REPO_REV)')
|
||||
group.add_option('--repo-branch', dest='repo_rev',
|
||||
help=optparse.SUPPRESS_HELP)
|
||||
group.add_option('--no-repo-verify',
|
||||
dest='repo_verify', default=True, action='store_false',
|
||||
help='do not verify repo source code')
|
||||
@ -437,9 +453,11 @@ def get_gitc_manifest_dir():
|
||||
def gitc_parse_clientdir(gitc_fs_path):
|
||||
"""Parse a path in the GITC FS and return its client name.
|
||||
|
||||
@param gitc_fs_path: A subdirectory path within the GITC_FS_ROOT_DIR.
|
||||
Args:
|
||||
gitc_fs_path: A subdirectory path within the GITC_FS_ROOT_DIR.
|
||||
|
||||
@returns: The GITC client name
|
||||
Returns:
|
||||
The GITC client name.
|
||||
"""
|
||||
if gitc_fs_path == GITC_FS_ROOT_DIR:
|
||||
return None
|
||||
@ -463,6 +481,34 @@ class CloneFailure(Exception):
|
||||
"""
|
||||
|
||||
|
||||
def check_repo_verify(repo_verify, quiet=False):
|
||||
"""Check the --repo-verify state."""
|
||||
if not repo_verify:
|
||||
print('repo: warning: verification of repo code has been disabled;\n'
|
||||
'repo will not be able to verify the integrity of itself.\n',
|
||||
file=sys.stderr)
|
||||
return False
|
||||
|
||||
if NeedSetupGnuPG():
|
||||
return SetupGnuPG(quiet)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_repo_rev(dst, rev, repo_verify=True, quiet=False):
|
||||
"""Check that |rev| is valid."""
|
||||
do_verify = check_repo_verify(repo_verify, quiet=quiet)
|
||||
remote_ref, local_rev = resolve_repo_rev(dst, rev)
|
||||
if not quiet and not remote_ref.startswith('refs/heads/'):
|
||||
print('warning: repo is not tracking a remote branch, so it will not '
|
||||
'receive updates', file=sys.stderr)
|
||||
if do_verify:
|
||||
rev = verify_rev(dst, remote_ref, local_rev, quiet)
|
||||
else:
|
||||
rev = local_rev
|
||||
return (remote_ref, rev)
|
||||
|
||||
|
||||
def _Init(args, gitc_init=False):
|
||||
"""Installs repo by cloning it over the network.
|
||||
"""
|
||||
@ -474,21 +520,11 @@ def _Init(args, gitc_init=False):
|
||||
opt.quiet = opt.output_mode is False
|
||||
opt.verbose = opt.output_mode is True
|
||||
|
||||
url = opt.repo_url
|
||||
if not url:
|
||||
url = REPO_URL
|
||||
extra_args.append('--repo-url=%s' % url)
|
||||
if opt.clone_bundle is None:
|
||||
opt.clone_bundle = False if opt.partial_clone else True
|
||||
|
||||
branch = opt.repo_branch
|
||||
if not branch:
|
||||
branch = REPO_REV
|
||||
extra_args.append('--repo-branch=%s' % branch)
|
||||
|
||||
if branch.startswith('refs/heads/'):
|
||||
branch = branch[len('refs/heads/'):]
|
||||
if branch.startswith('refs/'):
|
||||
print("fatal: invalid branch name '%s'" % branch, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
url = opt.repo_url or REPO_URL
|
||||
rev = opt.repo_rev or REPO_REV
|
||||
|
||||
try:
|
||||
if gitc_init:
|
||||
@ -523,25 +559,13 @@ def _Init(args, gitc_init=False):
|
||||
|
||||
_CheckGitVersion()
|
||||
try:
|
||||
if not opt.repo_verify:
|
||||
do_verify = False
|
||||
else:
|
||||
if NeedSetupGnuPG():
|
||||
do_verify = SetupGnuPG(opt.quiet)
|
||||
else:
|
||||
do_verify = True
|
||||
|
||||
if not opt.quiet:
|
||||
print('Downloading Repo source from', url)
|
||||
dst = os.path.abspath(os.path.join(repodir, S_repo))
|
||||
_Clone(url, dst, opt.clone_bundle, opt.quiet, opt.verbose)
|
||||
|
||||
if do_verify:
|
||||
rev = _Verify(dst, branch, opt.quiet)
|
||||
else:
|
||||
rev = 'refs/remotes/origin/%s^0' % branch
|
||||
|
||||
_Checkout(dst, branch, rev, opt.quiet)
|
||||
remote_ref, rev = check_repo_rev(dst, rev, opt.repo_verify, quiet=opt.quiet)
|
||||
_Checkout(dst, remote_ref, rev, opt.quiet)
|
||||
|
||||
if not os.path.isfile(os.path.join(dst, 'repo')):
|
||||
print("warning: '%s' does not look like a git-repo repository, is "
|
||||
@ -606,7 +630,8 @@ def _CheckGitVersion():
|
||||
|
||||
if ver_act < MIN_GIT_VERSION:
|
||||
need = '.'.join(map(str, MIN_GIT_VERSION))
|
||||
print('fatal: git %s or later required' % need, file=sys.stderr)
|
||||
print('fatal: git %s or later required; found %s' % (need, ver_act.full),
|
||||
file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
|
||||
@ -755,15 +780,17 @@ def _InitHttp():
|
||||
|
||||
def _Fetch(url, cwd, src, quiet, verbose):
|
||||
cmd = ['fetch']
|
||||
if quiet:
|
||||
if not verbose:
|
||||
cmd.append('--quiet')
|
||||
err = None
|
||||
if not quiet and sys.stdout.isatty():
|
||||
cmd.append('--progress')
|
||||
elif not verbose:
|
||||
err = subprocess.PIPE
|
||||
else:
|
||||
err = None
|
||||
cmd.append(src)
|
||||
cmd.append('+refs/heads/*:refs/remotes/origin/*')
|
||||
cmd.append('+refs/tags/*:refs/tags/*')
|
||||
run_git(*cmd, stderr=err, cwd=cwd)
|
||||
run_git(*cmd, stderr=err, capture_output=False, cwd=cwd)
|
||||
|
||||
|
||||
def _DownloadBundle(url, cwd, quiet, verbose):
|
||||
@ -846,23 +873,83 @@ def _Clone(url, cwd, clone_bundle, quiet, verbose):
|
||||
_Fetch(url, cwd, 'origin', quiet, verbose)
|
||||
|
||||
|
||||
def _Verify(cwd, branch, quiet):
|
||||
"""Verify the branch has been signed by a tag.
|
||||
def resolve_repo_rev(cwd, committish):
|
||||
"""Figure out what REPO_REV represents.
|
||||
|
||||
We support:
|
||||
* refs/heads/xxx: Branch.
|
||||
* refs/tags/xxx: Tag.
|
||||
* xxx: Branch or tag or commit.
|
||||
|
||||
Args:
|
||||
cwd: The git checkout to run in.
|
||||
committish: The REPO_REV argument to resolve.
|
||||
|
||||
Returns:
|
||||
A tuple of (remote ref, commit) as makes sense for the committish.
|
||||
For branches, this will look like ('refs/heads/stable', <revision>).
|
||||
For tags, this will look like ('refs/tags/v1.0', <revision>).
|
||||
For commits, this will be (<revision>, <revision>).
|
||||
"""
|
||||
try:
|
||||
ret = run_git('describe', 'origin/%s' % branch, cwd=cwd)
|
||||
cur = ret.stdout.strip()
|
||||
except CloneFailure:
|
||||
print("fatal: branch '%s' has not been signed" % branch, file=sys.stderr)
|
||||
raise
|
||||
def resolve(committish):
|
||||
ret = run_git('rev-parse', '--verify', '%s^{commit}' % (committish,),
|
||||
cwd=cwd, check=False)
|
||||
return None if ret.returncode else ret.stdout.strip()
|
||||
|
||||
# An explicit branch.
|
||||
if committish.startswith('refs/heads/'):
|
||||
remote_ref = committish
|
||||
committish = committish[len('refs/heads/'):]
|
||||
rev = resolve('refs/remotes/origin/%s' % committish)
|
||||
if rev is None:
|
||||
print('repo: error: unknown branch "%s"' % (committish,),
|
||||
file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
return (remote_ref, rev)
|
||||
|
||||
# An explicit tag.
|
||||
if committish.startswith('refs/tags/'):
|
||||
remote_ref = committish
|
||||
committish = committish[len('refs/tags/'):]
|
||||
rev = resolve(remote_ref)
|
||||
if rev is None:
|
||||
print('repo: error: unknown tag "%s"' % (committish,),
|
||||
file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
return (remote_ref, rev)
|
||||
|
||||
# See if it's a short branch name.
|
||||
rev = resolve('refs/remotes/origin/%s' % committish)
|
||||
if rev:
|
||||
return ('refs/heads/%s' % (committish,), rev)
|
||||
|
||||
# See if it's a tag.
|
||||
rev = resolve('refs/tags/%s' % committish)
|
||||
if rev:
|
||||
return ('refs/tags/%s' % (committish,), rev)
|
||||
|
||||
# See if it's a commit.
|
||||
rev = resolve(committish)
|
||||
if rev and rev.lower().startswith(committish.lower()):
|
||||
return (rev, rev)
|
||||
|
||||
# Give up!
|
||||
print('repo: error: unable to resolve "%s"' % (committish,), file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
|
||||
def verify_rev(cwd, remote_ref, rev, quiet):
|
||||
"""Verify the commit has been signed by a tag."""
|
||||
ret = run_git('describe', rev, cwd=cwd)
|
||||
cur = ret.stdout.strip()
|
||||
|
||||
m = re.compile(r'^(.*)-[0-9]{1,}-g[0-9a-f]{1,}$').match(cur)
|
||||
if m:
|
||||
cur = m.group(1)
|
||||
if not quiet:
|
||||
print(file=sys.stderr)
|
||||
print("info: Ignoring branch '%s'; using tagged release '%s'"
|
||||
% (branch, cur), file=sys.stderr)
|
||||
print("warning: '%s' is not signed; falling back to signed release '%s'"
|
||||
% (remote_ref, cur), file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
|
||||
env = os.environ.copy()
|
||||
@ -871,13 +958,13 @@ def _Verify(cwd, branch, quiet):
|
||||
return '%s^0' % cur
|
||||
|
||||
|
||||
def _Checkout(cwd, branch, rev, quiet):
|
||||
def _Checkout(cwd, remote_ref, rev, quiet):
|
||||
"""Checkout an upstream branch into the repository and track it.
|
||||
"""
|
||||
run_git('update-ref', 'refs/heads/default', rev, cwd=cwd)
|
||||
|
||||
_SetConfig(cwd, 'branch.default.remote', 'origin')
|
||||
_SetConfig(cwd, 'branch.default.merge', 'refs/heads/%s' % branch)
|
||||
_SetConfig(cwd, 'branch.default.merge', remote_ref)
|
||||
|
||||
run_git('symbolic-ref', 'HEAD', 'refs/heads/default', cwd=cwd)
|
||||
|
||||
@ -895,9 +982,7 @@ def _FindRepo():
|
||||
repo = None
|
||||
|
||||
olddir = None
|
||||
while curdir != '/' \
|
||||
and curdir != olddir \
|
||||
and not repo:
|
||||
while curdir != olddir and not repo:
|
||||
repo = os.path.join(curdir, repodir, REPO_MAIN)
|
||||
if not os.path.isfile(repo):
|
||||
repo = None
|
||||
@ -993,6 +1078,14 @@ def _Version():
|
||||
print(' (from %s)' % (__file__,))
|
||||
print('git %s' % (ParseGitVersion().full,))
|
||||
print('Python %s' % sys.version)
|
||||
uname = platform.uname()
|
||||
if sys.version_info.major < 3:
|
||||
# Python 3 returns a named tuple, but Python 2 is simpler.
|
||||
print(uname)
|
||||
else:
|
||||
print('OS %s %s (%s)' % (uname.system, uname.release, uname.version))
|
||||
print('CPU %s (%s)' %
|
||||
(uname.machine, uname.processor if uname.processor else 'unknown'))
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
@ -1028,12 +1121,18 @@ def _SetDefaultsTo(gitdir):
|
||||
global REPO_REV
|
||||
|
||||
REPO_URL = gitdir
|
||||
try:
|
||||
ret = run_git('--git-dir=%s' % gitdir, 'symbolic-ref', 'HEAD')
|
||||
REPO_REV = ret.stdout.strip()
|
||||
except CloneFailure:
|
||||
print('fatal: %s has no current branch' % gitdir, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
ret = run_git('--git-dir=%s' % gitdir, 'symbolic-ref', 'HEAD', check=False)
|
||||
if ret.returncode:
|
||||
# If we're not tracking a branch (bisect/etc...), then fall back to commit.
|
||||
print('repo: warning: %s has no current branch; using HEAD' % gitdir,
|
||||
file=sys.stderr)
|
||||
try:
|
||||
ret = run_git('rev-parse', 'HEAD', cwd=gitdir)
|
||||
except CloneFailure:
|
||||
print('fatal: %s has invalid HEAD' % gitdir, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
REPO_REV = ret.stdout.strip()
|
||||
|
||||
|
||||
def main(orig_args):
|
||||
@ -1089,6 +1188,10 @@ def main(orig_args):
|
||||
if my_main:
|
||||
repo_main = my_main
|
||||
|
||||
if not repo_main:
|
||||
print("fatal: unable to find repo entry point", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
ver_str = '.'.join(map(str, VERSION))
|
||||
me = [sys.executable, repo_main,
|
||||
'--repo-dir=%s' % rel_repo_dir,
|
||||
@ -1096,7 +1199,6 @@ def main(orig_args):
|
||||
'--wrapper-path=%s' % wrapper_path,
|
||||
'--']
|
||||
me.extend(orig_args)
|
||||
me.extend(extra_args)
|
||||
exec_command(me)
|
||||
print("fatal: unable to start %s" % repo_main, file=sys.stderr)
|
||||
sys.exit(148)
|
||||
|
33
run_tests
33
run_tests
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
#
|
||||
@ -20,22 +20,26 @@ from __future__ import print_function
|
||||
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def run_pytest(cmd, argv):
|
||||
"""Run the unittests via |cmd|."""
|
||||
try:
|
||||
return subprocess.call([cmd] + argv)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
print('%s: unable to run `%s`: %s' % (__file__, cmd, e), file=sys.stderr)
|
||||
print('%s: Try installing pytest: sudo apt-get install python-pytest' %
|
||||
(__file__,), file=sys.stderr)
|
||||
return 127
|
||||
else:
|
||||
raise
|
||||
def find_pytest():
|
||||
"""Try to locate a good version of pytest."""
|
||||
# Use the Python 3 version if available.
|
||||
ret = shutil.which('pytest-3')
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
# Hopefully this is a Python 3 version.
|
||||
ret = shutil.which('pytest')
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
print(f'{__file__}: unable to find pytest.', file=sys.stderr)
|
||||
print(f'{__file__}: Try installing: sudo apt-get install python-pytest',
|
||||
file=sys.stderr)
|
||||
|
||||
|
||||
def main(argv):
|
||||
@ -48,7 +52,8 @@ def main(argv):
|
||||
pythonpath += os.pathsep + oldpythonpath
|
||||
os.environ['PYTHONPATH'] = pythonpath
|
||||
|
||||
return run_pytest('pytest', argv)
|
||||
pytest = find_pytest()
|
||||
return subprocess.run([pytest] + argv, check=True)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
7
setup.py
7
setup.py
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
#
|
||||
@ -55,9 +55,10 @@ setuptools.setup(
|
||||
'Operating System :: MacOS :: MacOS X',
|
||||
'Operating System :: Microsoft :: Windows :: Windows 10',
|
||||
'Operating System :: POSIX :: Linux',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3 :: Only',
|
||||
'Topic :: Software Development :: Version Control :: Git',
|
||||
],
|
||||
# We support Python 2.7 and Python 3.6+.
|
||||
python_requires='>=2.7, ' + ', '.join('!=3.%i.*' % x for x in range(0, 6)),
|
||||
python_requires='>=3.6',
|
||||
packages=['subcmds'],
|
||||
)
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
import os
|
||||
|
||||
# A mapping of the subcommand name to the class that implements it.
|
||||
all_commands = {}
|
||||
|
||||
my_dir = os.path.dirname(__file__)
|
||||
@ -37,7 +38,7 @@ for py in os.listdir(my_dir):
|
||||
['%s' % name])
|
||||
mod = getattr(mod, name)
|
||||
try:
|
||||
cmd = getattr(mod, clsn)()
|
||||
cmd = getattr(mod, clsn)
|
||||
except AttributeError:
|
||||
raise SyntaxError('%s/%s does not define class %s' % (
|
||||
__name__, py, clsn))
|
||||
@ -46,5 +47,5 @@ for py in os.listdir(my_dir):
|
||||
cmd.NAME = name
|
||||
all_commands[name] = cmd
|
||||
|
||||
if 'help' in all_commands:
|
||||
all_commands['help'].commands = all_commands
|
||||
# Add 'branch' as an alias for 'branches'.
|
||||
all_commands['branch'] = all_commands['branches']
|
||||
|
@ -15,10 +15,20 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import itertools
|
||||
import multiprocessing
|
||||
import sys
|
||||
from color import Coloring
|
||||
from command import Command
|
||||
|
||||
# Number of projects to submit to a single worker process at a time.
|
||||
# This number represents a tradeoff between the overhead of IPC and finer
|
||||
# grained opportunity for parallelism. This particular value was chosen by
|
||||
# iterating through powers of two until the overall performance no longer
|
||||
# improved. The performance of this batch size is not a function of the
|
||||
# number of cores on the system.
|
||||
WORKER_BATCH_SIZE = 32
|
||||
|
||||
|
||||
class BranchColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
@ -97,20 +107,32 @@ is shown, then the branch appears in all projects.
|
||||
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
"""Add flags to CLI parser for this subcommand."""
|
||||
default_jobs = min(multiprocessing.cpu_count(), 8)
|
||||
p.add_option(
|
||||
'-j',
|
||||
'--jobs',
|
||||
type=int,
|
||||
default=default_jobs,
|
||||
help='Number of worker processes to spawn '
|
||||
'(default: %s)' % default_jobs)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
projects = self.GetProjects(args)
|
||||
out = BranchColoring(self.manifest.manifestProject.config)
|
||||
all_branches = {}
|
||||
project_cnt = len(projects)
|
||||
with multiprocessing.Pool(processes=opt.jobs) as pool:
|
||||
project_branches = pool.imap_unordered(
|
||||
expand_project_to_branches, projects, chunksize=WORKER_BATCH_SIZE)
|
||||
|
||||
for project in projects:
|
||||
for name, b in project.GetBranches().items():
|
||||
b.project = project
|
||||
for name, b in itertools.chain.from_iterable(project_branches):
|
||||
if name not in all_branches:
|
||||
all_branches[name] = BranchInfo(name)
|
||||
all_branches[name].add(b)
|
||||
|
||||
names = list(sorted(all_branches))
|
||||
names = sorted(all_branches)
|
||||
|
||||
if not names:
|
||||
print(' (no branches)', file=sys.stderr)
|
||||
@ -180,3 +202,19 @@ is shown, then the branch appears in all projects.
|
||||
else:
|
||||
out.write(' in all projects')
|
||||
out.nl()
|
||||
|
||||
|
||||
def expand_project_to_branches(project):
|
||||
"""Expands a project into a list of branch names & associated information.
|
||||
|
||||
Args:
|
||||
project: project.Project
|
||||
|
||||
Returns:
|
||||
List[Tuple[str, git_config.Branch]]
|
||||
"""
|
||||
branches = []
|
||||
for name, b in project.GetBranches().items():
|
||||
b.project = project
|
||||
branches.append((name, b))
|
||||
return branches
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
from manifest_xml import XmlManifest
|
||||
from manifest_xml import RepoClient
|
||||
|
||||
|
||||
class _Coloring(Coloring):
|
||||
@ -79,7 +79,7 @@ synced and their revisions won't be found.
|
||||
metavar='<FORMAT>',
|
||||
help='print the log using a custom git pretty format string')
|
||||
|
||||
def _printRawDiff(self, diff):
|
||||
def _printRawDiff(self, diff, pretty_format=None):
|
||||
for project in diff['added']:
|
||||
self.printText("A %s %s" % (project.relpath, project.revisionExpr))
|
||||
self.out.nl()
|
||||
@ -92,7 +92,7 @@ synced and their revisions won't be found.
|
||||
self.printText("C %s %s %s" % (project.relpath, project.revisionExpr,
|
||||
otherProject.revisionExpr))
|
||||
self.out.nl()
|
||||
self._printLogs(project, otherProject, raw=True, color=False)
|
||||
self._printLogs(project, otherProject, raw=True, color=False, pretty_format=pretty_format)
|
||||
|
||||
for project, otherProject in diff['unreachable']:
|
||||
self.printText("U %s %s %s" % (project.relpath, project.revisionExpr,
|
||||
@ -183,7 +183,7 @@ synced and their revisions won't be found.
|
||||
self.OptionParser.error('missing manifests to diff')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
self.out = _Coloring(self.manifest.globalConfig)
|
||||
self.out = _Coloring(self.client.globalConfig)
|
||||
self.printText = self.out.nofmt_printer('text')
|
||||
if opt.color:
|
||||
self.printProject = self.out.nofmt_printer('project', attr='bold')
|
||||
@ -193,16 +193,16 @@ synced and their revisions won't be found.
|
||||
else:
|
||||
self.printProject = self.printAdded = self.printRemoved = self.printRevision = self.printText
|
||||
|
||||
manifest1 = XmlManifest(self.manifest.repodir)
|
||||
manifest1 = RepoClient(self.manifest.repodir)
|
||||
manifest1.Override(args[0], load_local_manifests=False)
|
||||
if len(args) == 1:
|
||||
manifest2 = self.manifest
|
||||
else:
|
||||
manifest2 = XmlManifest(self.manifest.repodir)
|
||||
manifest2 = RepoClient(self.manifest.repodir)
|
||||
manifest2.Override(args[1], load_local_manifests=False)
|
||||
|
||||
diff = manifest1.projectsDiff(manifest2)
|
||||
if opt.raw:
|
||||
self._printRawDiff(diff)
|
||||
self._printRawDiff(diff, pretty_format=opt.pretty_format)
|
||||
else:
|
||||
self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format)
|
||||
|
@ -37,9 +37,13 @@ If no project is specified try to use current directory as a project.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-b', '--branch',
|
||||
help='create a new branch first')
|
||||
p.add_option('-c', '--cherry-pick',
|
||||
dest='cherrypick', action='store_true',
|
||||
help="cherry-pick instead of checkout")
|
||||
p.add_option('-x', '--record-origin', action='store_true',
|
||||
help='pass -x when cherry-picking')
|
||||
p.add_option('-r', '--revert',
|
||||
dest='revert', action='store_true',
|
||||
help="revert instead of checkout")
|
||||
@ -78,6 +82,14 @@ If no project is specified try to use current directory as a project.
|
||||
project = self.GetProjects([a])[0]
|
||||
return to_get
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if opt.record_origin:
|
||||
if not opt.cherrypick:
|
||||
self.OptionParser.error('-x only makes sense with --cherry-pick')
|
||||
|
||||
if opt.ffonly:
|
||||
self.OptionParser.error('-x and --ff are mutually exclusive options')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
for project, change_id, ps_id in self._ParseChangeIds(args):
|
||||
dl = project.DownloadPatchSet(change_id, ps_id)
|
||||
@ -99,17 +111,36 @@ If no project is specified try to use current directory as a project.
|
||||
file=sys.stderr)
|
||||
for c in dl.commits:
|
||||
print(' %s' % (c), file=sys.stderr)
|
||||
if opt.cherrypick:
|
||||
try:
|
||||
project._CherryPick(dl.commit)
|
||||
except GitError:
|
||||
print('[%s] Could not complete the cherry-pick of %s'
|
||||
% (project.name, dl.commit), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if opt.cherrypick:
|
||||
mode = 'cherry-pick'
|
||||
elif opt.revert:
|
||||
project._Revert(dl.commit)
|
||||
mode = 'revert'
|
||||
elif opt.ffonly:
|
||||
project._FastForward(dl.commit, ffonly=True)
|
||||
mode = 'fast-forward merge'
|
||||
else:
|
||||
project._Checkout(dl.commit)
|
||||
mode = 'checkout'
|
||||
|
||||
# We'll combine the branch+checkout operation, but all the rest need a
|
||||
# dedicated branch start.
|
||||
if opt.branch and mode != 'checkout':
|
||||
project.StartBranch(opt.branch)
|
||||
|
||||
try:
|
||||
if opt.cherrypick:
|
||||
project._CherryPick(dl.commit, ffonly=opt.ffonly,
|
||||
record_origin=opt.record_origin)
|
||||
elif opt.revert:
|
||||
project._Revert(dl.commit)
|
||||
elif opt.ffonly:
|
||||
project._FastForward(dl.commit, ffonly=True)
|
||||
else:
|
||||
if opt.branch:
|
||||
project.StartBranch(opt.branch, revision=dl.commit)
|
||||
else:
|
||||
project._Checkout(dl.commit)
|
||||
|
||||
except GitError:
|
||||
print('[%s] Could not complete the %s of %s'
|
||||
% (project.name, mode, dl.commit), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
@ -179,6 +179,8 @@ without iterating through the remaining projects.
|
||||
'annotations': dict((a.name, a.value) for a in project.annotations),
|
||||
'gitdir': project.gitdir,
|
||||
'worktree': project.worktree,
|
||||
'upstream': project.upstream,
|
||||
'dest_branch': project.dest_branch,
|
||||
}
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
@ -317,6 +319,8 @@ def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||
setenv('REPO_REMOTE', project['remote_name'])
|
||||
setenv('REPO_LREV', project['lrev'])
|
||||
setenv('REPO_RREV', project['rrev'])
|
||||
setenv('REPO_UPSTREAM', project['upstream'])
|
||||
setenv('REPO_DEST_BRANCH', project['dest_branch'])
|
||||
setenv('REPO_I', str(cnt + 1))
|
||||
for name in project['annotations']:
|
||||
setenv("REPO__%s" % (name), project['annotations'][name])
|
||||
@ -370,8 +374,8 @@ def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||
for s in in_ready:
|
||||
buf = s.read().decode()
|
||||
if not buf:
|
||||
s.close()
|
||||
s_in.remove(s)
|
||||
s.close()
|
||||
continue
|
||||
|
||||
if not opt.verbose:
|
||||
|
@ -19,6 +19,7 @@ import re
|
||||
import sys
|
||||
from formatter import AbstractFormatter, DumbWriter
|
||||
|
||||
from subcmds import all_commands
|
||||
from color import Coloring
|
||||
from command import PagedCommand, MirrorSafeCommand, GitcAvailableCommand, GitcClientCommand
|
||||
import gitc_utils
|
||||
@ -42,7 +43,7 @@ Displays detailed usage information about a command.
|
||||
fmt = ' %%-%ds %%s' % maxlen
|
||||
|
||||
for name in commandNames:
|
||||
command = self.commands[name]
|
||||
command = all_commands[name]()
|
||||
try:
|
||||
summary = command.helpSummary.strip()
|
||||
except AttributeError:
|
||||
@ -52,7 +53,7 @@ Displays detailed usage information about a command.
|
||||
def _PrintAllCommands(self):
|
||||
print('usage: repo COMMAND [ARGS]')
|
||||
print('The complete list of recognized repo commands are:')
|
||||
commandNames = list(sorted(self.commands))
|
||||
commandNames = list(sorted(all_commands))
|
||||
self._PrintCommands(commandNames)
|
||||
print("See 'repo help <command>' for more information on a "
|
||||
'specific command.')
|
||||
@ -64,7 +65,7 @@ Displays detailed usage information about a command.
|
||||
def gitc_supported(cmd):
|
||||
if not isinstance(cmd, GitcAvailableCommand) and not isinstance(cmd, GitcClientCommand):
|
||||
return True
|
||||
if self.manifest.isGitcClient:
|
||||
if self.client.isGitcClient:
|
||||
return True
|
||||
if isinstance(cmd, GitcClientCommand):
|
||||
return False
|
||||
@ -73,7 +74,7 @@ Displays detailed usage information about a command.
|
||||
return False
|
||||
|
||||
commandNames = list(sorted([name
|
||||
for name, command in self.commands.items()
|
||||
for name, command in all_commands.items()
|
||||
if command.common and gitc_supported(command)]))
|
||||
self._PrintCommands(commandNames)
|
||||
|
||||
@ -126,14 +127,14 @@ Displays detailed usage information about a command.
|
||||
self.wrap.end_paragraph(1)
|
||||
self.wrap.end_paragraph(0)
|
||||
|
||||
out = _Out(self.manifest.globalConfig)
|
||||
out = _Out(self.client.globalConfig)
|
||||
out._PrintSection('Summary', 'helpSummary')
|
||||
cmd.OptionParser.print_help()
|
||||
out._PrintSection('Description', 'helpDescription')
|
||||
|
||||
def _PrintAllCommandHelp(self):
|
||||
for name in sorted(self.commands):
|
||||
cmd = self.commands[name]
|
||||
for name in sorted(all_commands):
|
||||
cmd = all_commands[name]()
|
||||
cmd.manifest = self.manifest
|
||||
self._PrintCommandHelp(cmd, header_prefix='[%s] ' % (name,))
|
||||
|
||||
@ -158,7 +159,7 @@ Displays detailed usage information about a command.
|
||||
name = args[0]
|
||||
|
||||
try:
|
||||
cmd = self.commands[name]
|
||||
cmd = all_commands[name]()
|
||||
except KeyError:
|
||||
print("repo: '%s' is not a repo command." % name, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
from command import PagedCommand
|
||||
from color import Coloring
|
||||
from git_refs import R_M
|
||||
from git_refs import R_M, R_HEADS
|
||||
|
||||
|
||||
class _Coloring(Coloring):
|
||||
@ -44,7 +44,7 @@ class Info(PagedCommand):
|
||||
help="Disable all remote operations")
|
||||
|
||||
def Execute(self, opt, args):
|
||||
self.out = _Coloring(self.manifest.globalConfig)
|
||||
self.out = _Coloring(self.client.globalConfig)
|
||||
self.heading = self.out.printer('heading', attr='bold')
|
||||
self.headtext = self.out.nofmt_printer('headtext', fg='yellow')
|
||||
self.redtext = self.out.printer('redtext', fg='red')
|
||||
@ -127,7 +127,10 @@ class Info(PagedCommand):
|
||||
if not self.opt.local:
|
||||
project.Sync_NetworkHalf(quiet=True, current_branch_only=True)
|
||||
|
||||
logTarget = R_M + self.manifest.manifestProject.config.GetBranch("default").merge
|
||||
branch = self.manifest.manifestProject.config.GetBranch('default').merge
|
||||
if branch.startswith(R_HEADS):
|
||||
branch = branch[len(R_HEADS):]
|
||||
logTarget = R_M + branch
|
||||
|
||||
bareTmp = project.bare_git._bare
|
||||
project.bare_git._bare = False
|
||||
|
@ -38,6 +38,7 @@ from project import SyncBuffer
|
||||
from git_config import GitConfig
|
||||
from git_command import git_require, MIN_GIT_VERSION_SOFT, MIN_GIT_VERSION_HARD
|
||||
import platform_utils
|
||||
from wrapper import Wrapper
|
||||
|
||||
|
||||
class Init(InteractiveCommand, MirrorSafeCommand):
|
||||
@ -53,7 +54,8 @@ from the server and is installed in the .repo/ directory in the
|
||||
current working directory.
|
||||
|
||||
The optional -b argument can be used to select the manifest branch
|
||||
to checkout and use. If no branch is specified, master is assumed.
|
||||
to checkout and use. If no branch is specified, the remote's default
|
||||
branch is used.
|
||||
|
||||
The optional -m argument can be used to specify an alternate manifest
|
||||
to be used. If no manifest is specified, the manifest default.xml
|
||||
@ -154,9 +156,11 @@ to update the working directory files.
|
||||
help='restrict manifest projects to ones with a specified '
|
||||
'platform group [auto|all|none|linux|darwin|...]',
|
||||
metavar='PLATFORM')
|
||||
g.add_option('--clone-bundle', action='store_true',
|
||||
help='force use of /clone.bundle on HTTP/HTTPS (default if not --partial-clone)')
|
||||
g.add_option('--no-clone-bundle',
|
||||
dest='clone_bundle', default=True, action='store_false',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||
dest='clone_bundle', action='store_false',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS (default if --partial-clone)')
|
||||
g.add_option('--no-tags',
|
||||
dest='tags', default=True, action='store_false',
|
||||
help="don't fetch tags in the manifest")
|
||||
@ -166,9 +170,10 @@ to update the working directory files.
|
||||
g.add_option('--repo-url',
|
||||
dest='repo_url',
|
||||
help='repo repository location', metavar='URL')
|
||||
g.add_option('--repo-branch',
|
||||
dest='repo_branch',
|
||||
help='repo branch or revision', metavar='REVISION')
|
||||
g.add_option('--repo-rev', metavar='REV',
|
||||
help='repo branch or revision')
|
||||
g.add_option('--repo-branch', dest='repo_rev',
|
||||
help=optparse.SUPPRESS_HELP)
|
||||
g.add_option('--no-repo-verify',
|
||||
dest='repo_verify', default=True, action='store_false',
|
||||
help='do not verify repo source code')
|
||||
@ -211,24 +216,27 @@ to update the working directory files.
|
||||
|
||||
m._InitGitDir(mirror_git=mirrored_manifest_git)
|
||||
|
||||
if opt.manifest_branch:
|
||||
m.revisionExpr = opt.manifest_branch
|
||||
else:
|
||||
m.revisionExpr = 'refs/heads/master'
|
||||
else:
|
||||
if opt.manifest_branch:
|
||||
m.revisionExpr = opt.manifest_branch
|
||||
else:
|
||||
m.PreSync()
|
||||
|
||||
self._ConfigureDepth(opt)
|
||||
|
||||
# Set the remote URL before the remote branch as we might need it below.
|
||||
if opt.manifest_url:
|
||||
r = m.GetRemote(m.remote.name)
|
||||
r.url = opt.manifest_url
|
||||
r.ResetFetch()
|
||||
r.Save()
|
||||
|
||||
if opt.manifest_branch:
|
||||
m.revisionExpr = opt.manifest_branch
|
||||
else:
|
||||
if is_new:
|
||||
default_branch = m.ResolveRemoteHead()
|
||||
if default_branch is None:
|
||||
# If the remote doesn't have HEAD configured, default to master.
|
||||
default_branch = 'refs/heads/master'
|
||||
m.revisionExpr = default_branch
|
||||
else:
|
||||
m.PreSync()
|
||||
|
||||
groups = re.split(r'[,\s]+', opt.groups)
|
||||
all_platforms = ['linux', 'darwin', 'windows']
|
||||
platformize = lambda x: 'platform-' + x
|
||||
@ -301,6 +309,11 @@ to update the working directory files.
|
||||
else:
|
||||
opt.clone_filter = None
|
||||
|
||||
if opt.clone_bundle is None:
|
||||
opt.clone_bundle = False if opt.partial_clone else True
|
||||
else:
|
||||
m.config.SetString('repo.clonebundle', 'true' if opt.clone_bundle else 'false')
|
||||
|
||||
if opt.submodules:
|
||||
m.config.SetString('repo.submodules', 'true')
|
||||
|
||||
@ -352,7 +365,7 @@ to update the working directory files.
|
||||
return a
|
||||
|
||||
def _ShouldConfigureUser(self, opt):
|
||||
gc = self.manifest.globalConfig
|
||||
gc = self.client.globalConfig
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
# If we don't have local settings, get from global.
|
||||
@ -401,7 +414,7 @@ to update the working directory files.
|
||||
return False
|
||||
|
||||
def _ConfigureColor(self):
|
||||
gc = self.manifest.globalConfig
|
||||
gc = self.client.globalConfig
|
||||
if self._HasColorSet(gc):
|
||||
return
|
||||
|
||||
@ -479,6 +492,9 @@ to update the working directory files.
|
||||
if opt.archive and opt.mirror:
|
||||
self.OptionParser.error('--mirror and --archive cannot be used together.')
|
||||
|
||||
if args:
|
||||
self.OptionParser.error('init takes no arguments')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
git_require(MIN_GIT_VERSION_HARD, fail=True)
|
||||
if not git_require(MIN_GIT_VERSION_SOFT):
|
||||
@ -490,6 +506,24 @@ to update the working directory files.
|
||||
opt.quiet = opt.output_mode is False
|
||||
opt.verbose = opt.output_mode is True
|
||||
|
||||
rp = self.manifest.repoProject
|
||||
|
||||
# Handle new --repo-url requests.
|
||||
if opt.repo_url:
|
||||
remote = rp.GetRemote('origin')
|
||||
remote.url = opt.repo_url
|
||||
remote.Save()
|
||||
|
||||
# Handle new --repo-rev requests.
|
||||
if opt.repo_rev:
|
||||
wrapper = Wrapper()
|
||||
remote_ref, rev = wrapper.check_repo_rev(
|
||||
rp.gitdir, opt.repo_rev, repo_verify=opt.repo_verify, quiet=opt.quiet)
|
||||
branch = rp.GetBranch('default')
|
||||
branch.merge = remote_ref
|
||||
rp.work_git.reset('--hard', rev)
|
||||
branch.Save()
|
||||
|
||||
if opt.worktree:
|
||||
# Older versions of git supported worktree, but had dangerous gc bugs.
|
||||
git_require((2, 15, 0), fail=True, msg='git gc worktree corruption')
|
||||
|
@ -15,6 +15,8 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
@ -30,10 +32,16 @@ class Manifest(PagedCommand):
|
||||
_helpDescription = """
|
||||
|
||||
With the -o option, exports the current manifest for inspection.
|
||||
The manifest and (if present) local_manifest.xml are combined
|
||||
The manifest and (if present) local_manifests/ are combined
|
||||
together to produce a single manifest file. This file can be stored
|
||||
in a Git repository for use during future 'repo init' invocations.
|
||||
|
||||
The -r option can be used to generate a manifest file with project
|
||||
revisions set to the current commit hash. These are known as
|
||||
"revision locked manifests", as they don't follow a particular branch.
|
||||
In this case, the 'upstream' attribute is set to the ref we were on
|
||||
when the manifest was generated. The 'dest-branch' attribute is set
|
||||
to indicate the remote ref to push changes to via 'repo upload'.
|
||||
"""
|
||||
|
||||
@property
|
||||
@ -57,6 +65,15 @@ in a Git repository for use during future 'repo init' invocations.
|
||||
help='If in -r mode, do not write the upstream field. '
|
||||
'Only of use if the branch names for a sha1 manifest are '
|
||||
'sensitive.')
|
||||
p.add_option('--suppress-dest-branch', dest='peg_rev_dest_branch',
|
||||
default=True, action='store_false',
|
||||
help='If in -r mode, do not write the dest-branch field. '
|
||||
'Only of use if the branch names for a sha1 manifest are '
|
||||
'sensitive.')
|
||||
p.add_option('--json', default=False, action='store_true',
|
||||
help='Output manifest in JSON format (experimental).')
|
||||
p.add_option('--pretty', default=False, action='store_true',
|
||||
help='Format output for humans to read.')
|
||||
p.add_option('-o', '--output-file',
|
||||
dest='output_file',
|
||||
default='-',
|
||||
@ -72,9 +89,26 @@ in a Git repository for use during future 'repo init' invocations.
|
||||
fd = sys.stdout
|
||||
else:
|
||||
fd = open(opt.output_file, 'w')
|
||||
self.manifest.Save(fd,
|
||||
peg_rev=opt.peg_rev,
|
||||
peg_rev_upstream=opt.peg_rev_upstream)
|
||||
if opt.json:
|
||||
print('warning: --json is experimental!', file=sys.stderr)
|
||||
doc = self.manifest.ToDict(peg_rev=opt.peg_rev,
|
||||
peg_rev_upstream=opt.peg_rev_upstream,
|
||||
peg_rev_dest_branch=opt.peg_rev_dest_branch)
|
||||
|
||||
json_settings = {
|
||||
# JSON style guide says Uunicode characters are fully allowed.
|
||||
'ensure_ascii': False,
|
||||
# We use 2 space indent to match JSON style guide.
|
||||
'indent': 2 if opt.pretty else None,
|
||||
'separators': (',', ': ') if opt.pretty else (',', ':'),
|
||||
'sort_keys': True,
|
||||
}
|
||||
fd.write(json.dumps(doc, **json_settings))
|
||||
else:
|
||||
self.manifest.Save(fd,
|
||||
peg_rev=opt.peg_rev,
|
||||
peg_rev_upstream=opt.peg_rev_upstream,
|
||||
peg_rev_dest_branch=opt.peg_rev_dest_branch)
|
||||
fd.close()
|
||||
if opt.output_file != '-':
|
||||
print('Saved manifest to %s' % opt.output_file, file=sys.stderr)
|
||||
|
@ -16,17 +16,13 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import functools
|
||||
import glob
|
||||
import itertools
|
||||
import multiprocessing
|
||||
import os
|
||||
|
||||
from command import PagedCommand
|
||||
|
||||
try:
|
||||
import threading as _threading
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
|
||||
from color import Coloring
|
||||
import platform_utils
|
||||
|
||||
@ -95,25 +91,20 @@ the following meanings:
|
||||
p.add_option('-q', '--quiet', action='store_true',
|
||||
help="only print the name of modified projects")
|
||||
|
||||
def _StatusHelper(self, project, clean_counter, sem, quiet):
|
||||
def _StatusHelper(self, quiet, project):
|
||||
"""Obtains the status for a specific project.
|
||||
|
||||
Obtains the status for a project, redirecting the output to
|
||||
the specified object. It will release the semaphore
|
||||
when done.
|
||||
the specified object.
|
||||
|
||||
Args:
|
||||
quiet: Where to output the status.
|
||||
project: Project to get status of.
|
||||
clean_counter: Counter for clean projects.
|
||||
sem: Semaphore, will call release() when complete.
|
||||
output: Where to output the status.
|
||||
|
||||
Returns:
|
||||
The status of the project.
|
||||
"""
|
||||
try:
|
||||
state = project.PrintWorkTreeStatus(quiet=quiet)
|
||||
if state == 'CLEAN':
|
||||
next(clean_counter)
|
||||
finally:
|
||||
sem.release()
|
||||
return project.PrintWorkTreeStatus(quiet=quiet)
|
||||
|
||||
def _FindOrphans(self, dirs, proj_dirs, proj_dirs_parents, outstring):
|
||||
"""find 'dirs' that are present in 'proj_dirs_parents' but not in 'proj_dirs'"""
|
||||
@ -133,27 +124,18 @@ the following meanings:
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all_projects = self.GetProjects(args)
|
||||
counter = itertools.count()
|
||||
counter = 0
|
||||
|
||||
if opt.jobs == 1:
|
||||
for project in all_projects:
|
||||
state = project.PrintWorkTreeStatus(quiet=opt.quiet)
|
||||
if state == 'CLEAN':
|
||||
next(counter)
|
||||
counter += 1
|
||||
else:
|
||||
sem = _threading.Semaphore(opt.jobs)
|
||||
threads = []
|
||||
for project in all_projects:
|
||||
sem.acquire()
|
||||
|
||||
t = _threading.Thread(target=self._StatusHelper,
|
||||
args=(project, counter, sem, opt.quiet))
|
||||
threads.append(t)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
for t in threads:
|
||||
t.join()
|
||||
if not opt.quiet and len(all_projects) == next(counter):
|
||||
with multiprocessing.Pool(opt.jobs) as pool:
|
||||
states = pool.map(functools.partial(self._StatusHelper, opt.quiet), all_projects)
|
||||
counter += states.count('CLEAN')
|
||||
if not opt.quiet and len(all_projects) == counter:
|
||||
print('nothing to commit (working directory clean)')
|
||||
|
||||
if opt.orphans:
|
||||
@ -183,7 +165,7 @@ the following meanings:
|
||||
proj_dirs, proj_dirs_parents, outstring)
|
||||
|
||||
if outstring:
|
||||
output = StatusColoring(self.manifest.globalConfig)
|
||||
output = StatusColoring(self.client.globalConfig)
|
||||
output.project('Objects not within a project (orphans)')
|
||||
output.nl()
|
||||
for entry in outstring:
|
||||
|
@ -138,11 +138,11 @@ if the manifest server specified in the manifest file already includes
|
||||
credentials.
|
||||
|
||||
By default, all projects will be synced. The --fail-fast option can be used
|
||||
to halt syncing as soon as possible when the the first project fails to sync.
|
||||
to halt syncing as soon as possible when the first project fails to sync.
|
||||
|
||||
The --force-sync option can be used to overwrite existing git
|
||||
directories if they have previously been linked to a different
|
||||
object direcotry. WARNING: This may cause data to be lost since
|
||||
object directory. WARNING: This may cause data to be lost since
|
||||
refs may be removed when overwriting.
|
||||
|
||||
The --force-remove-dirty option can be used to remove previously used
|
||||
@ -247,8 +247,9 @@ later is required to fix a server side protocol bug.
|
||||
p.add_option('-m', '--manifest-name',
|
||||
dest='manifest_name',
|
||||
help='temporary manifest to use for this sync', metavar='NAME.xml')
|
||||
p.add_option('--no-clone-bundle',
|
||||
dest='clone_bundle', default=True, action='store_false',
|
||||
p.add_option('--clone-bundle', action='store_true',
|
||||
help='enable use of /clone.bundle on HTTP/HTTPS')
|
||||
p.add_option('--no-clone-bundle', dest='clone_bundle', action='store_false',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||
p.add_option('-u', '--manifest-server-username', action='store',
|
||||
dest='manifest_server_username',
|
||||
@ -265,6 +266,9 @@ later is required to fix a server side protocol bug.
|
||||
p.add_option('--optimized-fetch',
|
||||
dest='optimized_fetch', action='store_true',
|
||||
help='only fetch projects fixed to sha1 if revision does not exist locally')
|
||||
p.add_option('--retry-fetches',
|
||||
default=0, action='store', type='int',
|
||||
help='number of times to retry fetches on transient errors')
|
||||
p.add_option('--prune', dest='prune', action='store_true',
|
||||
help='delete refs that no longer exist on the remote')
|
||||
if show_smart:
|
||||
@ -342,6 +346,7 @@ later is required to fix a server side protocol bug.
|
||||
clone_bundle=opt.clone_bundle,
|
||||
tags=opt.tags, archive=self.manifest.IsArchive,
|
||||
optimized_fetch=opt.optimized_fetch,
|
||||
retry_fetches=opt.retry_fetches,
|
||||
prune=opt.prune,
|
||||
clone_filter=clone_filter)
|
||||
self._fetch_times.Set(project, time.time() - start)
|
||||
@ -729,12 +734,12 @@ later is required to fix a server side protocol bug.
|
||||
branch = branch[len(R_HEADS):]
|
||||
|
||||
if 'SYNC_TARGET' in os.environ:
|
||||
target = os.environ('SYNC_TARGET')
|
||||
target = os.environ['SYNC_TARGET']
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||
elif ('TARGET_PRODUCT' in os.environ and
|
||||
'TARGET_BUILD_VARIANT' in os.environ):
|
||||
target = '%s-%s' % (os.environ('TARGET_PRODUCT'),
|
||||
os.environ('TARGET_BUILD_VARIANT'))
|
||||
target = '%s-%s' % (os.environ['TARGET_PRODUCT'],
|
||||
os.environ['TARGET_BUILD_VARIANT'])
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||
else:
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch)
|
||||
@ -775,8 +780,10 @@ later is required to fix a server side protocol bug.
|
||||
start = time.time()
|
||||
success = mp.Sync_NetworkHalf(quiet=opt.quiet, verbose=opt.verbose,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
force_sync=opt.force_sync,
|
||||
tags=opt.tags,
|
||||
optimized_fetch=opt.optimized_fetch,
|
||||
retry_fetches=opt.retry_fetches,
|
||||
submodules=self.manifest.HasSubmodules,
|
||||
clone_filter=self.manifest.CloneFilter)
|
||||
finish = time.time()
|
||||
@ -831,6 +838,9 @@ later is required to fix a server side protocol bug.
|
||||
smart_sync_manifest_path = os.path.join(
|
||||
self.manifest.manifestProject.worktree, 'smart_sync_override.xml')
|
||||
|
||||
if opt.clone_bundle is None:
|
||||
opt.clone_bundle = self.manifest.CloneBundle
|
||||
|
||||
if opt.smart_sync or opt.smart_tag:
|
||||
manifest_name = self._SmartSyncSetup(opt, smart_sync_manifest_path)
|
||||
else:
|
||||
@ -845,6 +855,13 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
rp = self.manifest.repoProject
|
||||
rp.PreSync()
|
||||
cb = rp.CurrentBranch
|
||||
if cb:
|
||||
base = rp.GetBranch(cb).merge
|
||||
if not base or not base.startswith('refs/heads/'):
|
||||
print('warning: repo is not tracking a remote branch, so it will not '
|
||||
'receive updates; run `repo init --repo-rev=stable` to fix.',
|
||||
file=sys.stderr)
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
mp.PreSync()
|
||||
|
@ -21,9 +21,10 @@ import sys
|
||||
|
||||
from command import InteractiveCommand
|
||||
from editor import Editor
|
||||
from error import HookError, UploadError
|
||||
from error import UploadError
|
||||
from git_command import GitCommand
|
||||
from project import RepoHook
|
||||
from git_refs import R_HEADS
|
||||
from hooks import RepoHook
|
||||
|
||||
from pyversion import is_python3
|
||||
if not is_python3():
|
||||
@ -134,7 +135,18 @@ review.URL.uploadhashtags:
|
||||
|
||||
To add hashtags whenever uploading a commit, you can set a per-project
|
||||
or global Git option to do so. The value of review.URL.uploadhashtags
|
||||
will be used as comma delimited hashtags like the --hashtags option.
|
||||
will be used as comma delimited hashtags like the --hashtag option.
|
||||
|
||||
review.URL.uploadlabels:
|
||||
|
||||
To add labels whenever uploading a commit, you can set a per-project
|
||||
or global Git option to do so. The value of review.URL.uploadlabels
|
||||
will be used as comma delimited labels like the --label option.
|
||||
|
||||
review.URL.uploadnotify:
|
||||
|
||||
Control e-mail notifications when uploading.
|
||||
https://gerrit-review.googlesource.com/Documentation/user-upload.html#notify
|
||||
|
||||
# References
|
||||
|
||||
@ -152,6 +164,9 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
p.add_option('--hashtag-branch', '--htb',
|
||||
action='store_true',
|
||||
help='Add local branch name as a hashtag.')
|
||||
p.add_option('-l', '--label',
|
||||
dest='labels', action='append', default=[],
|
||||
help='Add a label when uploading.')
|
||||
p.add_option('--re', '--reviewers',
|
||||
type='string', action='append', dest='reviewers',
|
||||
help='Request reviews from these people.')
|
||||
@ -164,9 +179,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
p.add_option('--cbr', '--current-branch',
|
||||
dest='current_branch', action='store_true',
|
||||
help='Upload current git branch.')
|
||||
p.add_option('-d', '--draft',
|
||||
action='store_true', dest='draft', default=False,
|
||||
help='If specified, upload as a draft.')
|
||||
p.add_option('--ne', '--no-emails',
|
||||
action='store_false', dest='notify', default=True,
|
||||
help='If specified, do not send emails on upload.')
|
||||
@ -193,33 +205,7 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
p.add_option('--no-cert-checks',
|
||||
dest='validate_certs', action='store_false', default=True,
|
||||
help='Disable verifying ssl certs (unsafe).')
|
||||
|
||||
# Options relating to upload hook. Note that verify and no-verify are NOT
|
||||
# opposites of each other, which is why they store to different locations.
|
||||
# We are using them to match 'git commit' syntax.
|
||||
#
|
||||
# Combinations:
|
||||
# - no-verify=False, verify=False (DEFAULT):
|
||||
# If stdout is a tty, can prompt about running upload hooks if needed.
|
||||
# If user denies running hooks, the upload is cancelled. If stdout is
|
||||
# not a tty and we would need to prompt about upload hooks, upload is
|
||||
# cancelled.
|
||||
# - no-verify=False, verify=True:
|
||||
# Always run upload hooks with no prompt.
|
||||
# - no-verify=True, verify=False:
|
||||
# Never run upload hooks, but upload anyway (AKA bypass hooks).
|
||||
# - no-verify=True, verify=True:
|
||||
# Invalid
|
||||
g = p.add_option_group('Upload hooks')
|
||||
g.add_option('--no-verify',
|
||||
dest='bypass_hooks', action='store_true',
|
||||
help='Do not run the upload hook.')
|
||||
g.add_option('--verify',
|
||||
dest='allow_all_hooks', action='store_true',
|
||||
help='Run the upload hook without prompting.')
|
||||
g.add_option('--ignore-hooks',
|
||||
dest='ignore_hooks', action='store_true',
|
||||
help='Do not abort uploading if upload hooks fail.')
|
||||
RepoHook.AddOptionGroup(p, 'pre-upload')
|
||||
|
||||
def _SingleBranch(self, opt, branch, people):
|
||||
project = branch.project
|
||||
@ -238,7 +224,7 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
|
||||
destination = opt.dest_branch or project.dest_branch or project.revisionExpr
|
||||
print('Upload project %s/ to remote branch %s%s:' %
|
||||
(project.relpath, destination, ' (draft)' if opt.draft else ''))
|
||||
(project.relpath, destination, ' (private)' if opt.private else ''))
|
||||
print(' branch %s (%2d commit%s, %s):' % (
|
||||
name,
|
||||
len(commit_list),
|
||||
@ -410,28 +396,51 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
key = 'review.%s.uploadtopic' % branch.project.remote.review
|
||||
opt.auto_topic = branch.project.config.GetBoolean(key)
|
||||
|
||||
# Check if hashtags should be included.
|
||||
def _ExpandHashtag(value):
|
||||
"""Split |value| up into comma delimited tags."""
|
||||
def _ExpandCommaList(value):
|
||||
"""Split |value| up into comma delimited entries."""
|
||||
if not value:
|
||||
return
|
||||
for tag in value.split(','):
|
||||
tag = tag.strip()
|
||||
if tag:
|
||||
yield tag
|
||||
for ret in value.split(','):
|
||||
ret = ret.strip()
|
||||
if ret:
|
||||
yield ret
|
||||
|
||||
# Check if hashtags should be included.
|
||||
key = 'review.%s.uploadhashtags' % branch.project.remote.review
|
||||
hashtags = set(_ExpandHashtag(branch.project.config.GetString(key)))
|
||||
hashtags = set(_ExpandCommaList(branch.project.config.GetString(key)))
|
||||
for tag in opt.hashtags:
|
||||
hashtags.update(_ExpandHashtag(tag))
|
||||
hashtags.update(_ExpandCommaList(tag))
|
||||
if opt.hashtag_branch:
|
||||
hashtags.add(branch.name)
|
||||
|
||||
# Check if labels should be included.
|
||||
key = 'review.%s.uploadlabels' % branch.project.remote.review
|
||||
labels = set(_ExpandCommaList(branch.project.config.GetString(key)))
|
||||
for label in opt.labels:
|
||||
labels.update(_ExpandCommaList(label))
|
||||
# Basic sanity check on label syntax.
|
||||
for label in labels:
|
||||
if not re.match(r'^.+[+-][0-9]+$', label):
|
||||
print('repo: error: invalid label syntax "%s": labels use forms '
|
||||
'like CodeReview+1 or Verified-1' % (label,), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Handle e-mail notifications.
|
||||
if opt.notify is False:
|
||||
notify = 'NONE'
|
||||
else:
|
||||
key = 'review.%s.uploadnotify' % branch.project.remote.review
|
||||
notify = branch.project.config.GetString(key)
|
||||
|
||||
destination = opt.dest_branch or branch.project.dest_branch
|
||||
|
||||
# Make sure our local branch is not setup to track a different remote branch
|
||||
merge_branch = self._GetMergeBranch(branch.project)
|
||||
if destination:
|
||||
full_dest = 'refs/heads/%s' % destination
|
||||
full_dest = destination
|
||||
if not full_dest.startswith(R_HEADS):
|
||||
full_dest = R_HEADS + full_dest
|
||||
|
||||
if not opt.dest_branch and merge_branch and merge_branch != full_dest:
|
||||
print('merge branch %s does not match destination branch %s'
|
||||
% (merge_branch, full_dest))
|
||||
@ -445,9 +454,9 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
dryrun=opt.dryrun,
|
||||
auto_topic=opt.auto_topic,
|
||||
hashtags=hashtags,
|
||||
draft=opt.draft,
|
||||
labels=labels,
|
||||
private=opt.private,
|
||||
notify=None if opt.notify else 'NONE',
|
||||
notify=notify,
|
||||
wip=opt.wip,
|
||||
dest_branch=destination,
|
||||
validate_certs=opt.validate_certs,
|
||||
@ -519,10 +528,10 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
avail = [up_branch]
|
||||
else:
|
||||
avail = None
|
||||
print('ERROR: Current branch (%s) not uploadable. '
|
||||
'You may be able to type '
|
||||
'"git branch --set-upstream-to m/master" to fix '
|
||||
'your branch.' % str(cbr),
|
||||
print('repo: error: Unable to upload branch "%s". '
|
||||
'You might be able to fix the branch by running:\n'
|
||||
' git branch --set-upstream-to m/%s' %
|
||||
(str(cbr), self.manifest.branch),
|
||||
file=sys.stderr)
|
||||
else:
|
||||
avail = project.GetUploadableBranches(branch)
|
||||
@ -537,31 +546,15 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
(branch,), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if not opt.bypass_hooks:
|
||||
hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
|
||||
self.manifest.topdir,
|
||||
self.manifest.manifestProject.GetRemote('origin').url,
|
||||
abort_if_user_denies=True)
|
||||
pending_proj_names = [project.name for (project, available) in pending]
|
||||
pending_worktrees = [project.worktree for (project, available) in pending]
|
||||
passed = True
|
||||
try:
|
||||
hook.Run(opt.allow_all_hooks, project_list=pending_proj_names,
|
||||
worktree_list=pending_worktrees)
|
||||
except SystemExit:
|
||||
passed = False
|
||||
if not opt.ignore_hooks:
|
||||
raise
|
||||
except HookError as e:
|
||||
passed = False
|
||||
print("ERROR: %s" % str(e), file=sys.stderr)
|
||||
|
||||
if not passed:
|
||||
if opt.ignore_hooks:
|
||||
print('\nWARNING: pre-upload hooks failed, but uploading anyways.',
|
||||
file=sys.stderr)
|
||||
else:
|
||||
return
|
||||
pending_proj_names = [project.name for (project, available) in pending]
|
||||
pending_worktrees = [project.worktree for (project, available) in pending]
|
||||
hook = RepoHook.FromSubcmd(
|
||||
hook_type='pre-upload', manifest=self.manifest,
|
||||
opt=opt, abort_if_user_denies=True)
|
||||
if not hook.Run(
|
||||
project_list=pending_proj_names,
|
||||
worktree_list=pending_worktrees):
|
||||
return 1
|
||||
|
||||
if opt.reviewers:
|
||||
reviewers = _SplitEmails(opt.reviewers)
|
||||
|
@ -15,7 +15,10 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import platform
|
||||
import sys
|
||||
|
||||
from command import Command, MirrorSafeCommand
|
||||
from git_command import git, RepoSourceVersion, user_agent
|
||||
from git_refs import HEAD
|
||||
@ -40,10 +43,11 @@ class Version(Command, MirrorSafeCommand):
|
||||
rp_ver = rp.bare_git.describe(HEAD)
|
||||
print('repo version %s' % rp_ver)
|
||||
print(' (from %s)' % rem.url)
|
||||
print(' (%s)' % rp.bare_git.log('-1', '--format=%cD', HEAD))
|
||||
|
||||
if Version.wrapper_path is not None:
|
||||
print('repo launcher version %s' % Version.wrapper_version)
|
||||
print(' (from %s)' % Version.wrapper_path)
|
||||
if self.wrapper_path is not None:
|
||||
print('repo launcher version %s' % self.wrapper_version)
|
||||
print(' (from %s)' % self.wrapper_path)
|
||||
|
||||
if src_ver != rp_ver:
|
||||
print(' (currently at %s)' % src_ver)
|
||||
@ -52,3 +56,11 @@ class Version(Command, MirrorSafeCommand):
|
||||
print('git %s' % git.version_tuple().full)
|
||||
print('git User-Agent %s' % user_agent.git)
|
||||
print('Python %s' % sys.version)
|
||||
uname = platform.uname()
|
||||
if sys.version_info.major < 3:
|
||||
# Python 3 returns a named tuple, but Python 2 is simpler.
|
||||
print(uname)
|
||||
else:
|
||||
print('OS %s %s (%s)' % (uname.system, uname.release, uname.version))
|
||||
print('CPU %s (%s)' %
|
||||
(uname.machine, uname.processor if uname.processor else 'unknown'))
|
||||
|
@ -30,6 +30,33 @@ import git_command
|
||||
import wrapper
|
||||
|
||||
|
||||
class SSHUnitTest(unittest.TestCase):
|
||||
"""Tests the ssh functions."""
|
||||
|
||||
def test_ssh_version(self):
|
||||
"""Check ssh_version() handling."""
|
||||
ver = git_command._parse_ssh_version('Unknown\n')
|
||||
self.assertEqual(ver, ())
|
||||
ver = git_command._parse_ssh_version('OpenSSH_1.0\n')
|
||||
self.assertEqual(ver, (1, 0))
|
||||
ver = git_command._parse_ssh_version('OpenSSH_6.6.1p1 Ubuntu-2ubuntu2.13, OpenSSL 1.0.1f 6 Jan 2014\n')
|
||||
self.assertEqual(ver, (6, 6, 1))
|
||||
ver = git_command._parse_ssh_version('OpenSSH_7.6p1 Ubuntu-4ubuntu0.3, OpenSSL 1.0.2n 7 Dec 2017\n')
|
||||
self.assertEqual(ver, (7, 6))
|
||||
|
||||
def test_ssh_sock(self):
|
||||
"""Check ssh_sock() function."""
|
||||
with mock.patch('tempfile.mkdtemp', return_value='/tmp/foo'):
|
||||
# old ssh version uses port
|
||||
with mock.patch('git_command.ssh_version', return_value=(6, 6)):
|
||||
self.assertTrue(git_command.ssh_sock().endswith('%p'))
|
||||
git_command._ssh_sock_path = None
|
||||
# new ssh version uses hash
|
||||
with mock.patch('git_command.ssh_version', return_value=(6, 7)):
|
||||
self.assertTrue(git_command.ssh_sock().endswith('%C'))
|
||||
git_command._ssh_sock_path = None
|
||||
|
||||
|
||||
class GitCallUnitTest(unittest.TestCase):
|
||||
"""Tests the _GitCall class (via git_command.git)."""
|
||||
|
||||
|
169
tests/test_git_trace2_event_log.py
Normal file
169
tests/test_git_trace2_event_log.py
Normal file
@ -0,0 +1,169 @@
|
||||
# Copyright (C) 2020 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the git_trace2_event_log.py module."""
|
||||
|
||||
import json
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import git_trace2_event_log
|
||||
|
||||
|
||||
class EventLogTestCase(unittest.TestCase):
|
||||
"""TestCase for the EventLog module."""
|
||||
|
||||
PARENT_SID_KEY = 'GIT_TRACE2_PARENT_SID'
|
||||
PARENT_SID_VALUE = 'parent_sid'
|
||||
SELF_SID_REGEX = r'repo-\d+T\d+Z-.*'
|
||||
FULL_SID_REGEX = r'^%s/%s' % (PARENT_SID_VALUE, SELF_SID_REGEX)
|
||||
|
||||
def setUp(self):
|
||||
"""Load the event_log module every time."""
|
||||
self._event_log_module = None
|
||||
# By default we initialize with the expected case where
|
||||
# repo launches us (so GIT_TRACE2_PARENT_SID is set).
|
||||
env = {
|
||||
self.PARENT_SID_KEY: self.PARENT_SID_VALUE,
|
||||
}
|
||||
self._event_log_module = git_trace2_event_log.EventLog(env=env)
|
||||
self._log_data = None
|
||||
|
||||
def verifyCommonKeys(self, log_entry, expected_event_name, full_sid=True):
|
||||
"""Helper function to verify common event log keys."""
|
||||
self.assertIn('event', log_entry)
|
||||
self.assertIn('sid', log_entry)
|
||||
self.assertIn('thread', log_entry)
|
||||
self.assertIn('time', log_entry)
|
||||
|
||||
# Do basic data format validation.
|
||||
self.assertEqual(expected_event_name, log_entry['event'])
|
||||
if full_sid:
|
||||
self.assertRegex(log_entry['sid'], self.FULL_SID_REGEX)
|
||||
else:
|
||||
self.assertRegex(log_entry['sid'], self.SELF_SID_REGEX)
|
||||
self.assertRegex(log_entry['time'], r'^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$')
|
||||
|
||||
def readLog(self, log_path):
|
||||
"""Helper function to read log data into a list."""
|
||||
log_data = []
|
||||
with open(log_path, mode='rb') as f:
|
||||
for line in f:
|
||||
log_data.append(json.loads(line))
|
||||
return log_data
|
||||
|
||||
def test_initial_state_with_parent_sid(self):
|
||||
"""Test initial state when 'GIT_TRACE2_PARENT_SID' is set by parent."""
|
||||
self.assertRegex(self._event_log_module.full_sid, self.FULL_SID_REGEX)
|
||||
|
||||
def test_initial_state_no_parent_sid(self):
|
||||
"""Test initial state when 'GIT_TRACE2_PARENT_SID' is not set."""
|
||||
# Setup an empty environment dict (no parent sid).
|
||||
self._event_log_module = git_trace2_event_log.EventLog(env={})
|
||||
self.assertRegex(self._event_log_module.full_sid, self.SELF_SID_REGEX)
|
||||
|
||||
def test_version_event(self):
|
||||
"""Test 'version' event data is valid.
|
||||
|
||||
Verify that the 'version' event is written even when no other
|
||||
events are addded.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
"""
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
# A log with no added events should only have the version entry.
|
||||
self.assertEqual(len(self._log_data), 1)
|
||||
version_event = self._log_data[0]
|
||||
self.verifyCommonKeys(version_event, expected_event_name='version')
|
||||
# Check for 'version' event specific fields.
|
||||
self.assertIn('evt', version_event)
|
||||
self.assertIn('exe', version_event)
|
||||
|
||||
def test_start_event(self):
|
||||
"""Test and validate 'start' event data is valid.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<start event>
|
||||
"""
|
||||
self._event_log_module.StartEvent()
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 2)
|
||||
start_event = self._log_data[1]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
|
||||
self.verifyCommonKeys(start_event, expected_event_name='start')
|
||||
# Check for 'start' event specific fields.
|
||||
self.assertIn('argv', start_event)
|
||||
self.assertTrue(isinstance(start_event['argv'], list))
|
||||
|
||||
def test_exit_event_result_none(self):
|
||||
"""Test 'exit' event data is valid when result is None.
|
||||
|
||||
We expect None result to be converted to 0 in the exit event data.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<exit event>
|
||||
"""
|
||||
self._event_log_module.ExitEvent(None)
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 2)
|
||||
exit_event = self._log_data[1]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
|
||||
self.verifyCommonKeys(exit_event, expected_event_name='exit')
|
||||
# Check for 'exit' event specific fields.
|
||||
self.assertIn('code', exit_event)
|
||||
# 'None' result should convert to 0 (successful) return code.
|
||||
self.assertEqual(exit_event['code'], 0)
|
||||
|
||||
def test_exit_event_result_integer(self):
|
||||
"""Test 'exit' event data is valid when result is an integer.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<exit event>
|
||||
"""
|
||||
self._event_log_module.ExitEvent(2)
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 2)
|
||||
exit_event = self._log_data[1]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
|
||||
self.verifyCommonKeys(exit_event, expected_event_name='exit')
|
||||
# Check for 'exit' event specific fields.
|
||||
self.assertIn('code', exit_event)
|
||||
self.assertEqual(exit_event['code'], 2)
|
||||
|
||||
# TODO(https://crbug.com/gerrit/13706): Add additional test coverage for
|
||||
# Write() where:
|
||||
# - path=None (using git config call)
|
||||
# - path=<Non-String type> (raises TypeError)
|
||||
# - path=<Non-Directory> (should return None)
|
||||
# - tempfile.NamedTemporaryFile errors with FileExistsError (should return None)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
60
tests/test_hooks.py
Normal file
60
tests/test_hooks.py
Normal file
@ -0,0 +1,60 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the hooks.py module."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import hooks
|
||||
import unittest
|
||||
|
||||
class RepoHookShebang(unittest.TestCase):
|
||||
"""Check shebang parsing in RepoHook."""
|
||||
|
||||
def test_no_shebang(self):
|
||||
"""Lines w/out shebangs should be rejected."""
|
||||
DATA = (
|
||||
'',
|
||||
'# -*- coding:utf-8 -*-\n',
|
||||
'#\n# foo\n',
|
||||
'# Bad shebang in script\n#!/foo\n'
|
||||
)
|
||||
for data in DATA:
|
||||
self.assertIsNone(hooks.RepoHook._ExtractInterpFromShebang(data))
|
||||
|
||||
def test_direct_interp(self):
|
||||
"""Lines whose shebang points directly to the interpreter."""
|
||||
DATA = (
|
||||
('#!/foo', '/foo'),
|
||||
('#! /foo', '/foo'),
|
||||
('#!/bin/foo ', '/bin/foo'),
|
||||
('#! /usr/foo ', '/usr/foo'),
|
||||
('#! /usr/foo -args', '/usr/foo'),
|
||||
)
|
||||
for shebang, interp in DATA:
|
||||
self.assertEqual(hooks.RepoHook._ExtractInterpFromShebang(shebang),
|
||||
interp)
|
||||
|
||||
def test_env_interp(self):
|
||||
"""Lines whose shebang launches through `env`."""
|
||||
DATA = (
|
||||
('#!/usr/bin/env foo', 'foo'),
|
||||
('#!/bin/env foo', 'foo'),
|
||||
('#! /bin/env /bin/foo ', '/bin/foo'),
|
||||
)
|
||||
for shebang, interp in DATA:
|
||||
self.assertEqual(hooks.RepoHook._ExtractInterpFromShebang(shebang),
|
||||
interp)
|
@ -19,7 +19,10 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
import xml.dom.minidom
|
||||
|
||||
import error
|
||||
import manifest_xml
|
||||
@ -89,3 +92,202 @@ class ManifestValidateFilePaths(unittest.TestCase):
|
||||
error.ManifestInvalidPathError, self.check_both, path, 'a')
|
||||
self.assertRaises(
|
||||
error.ManifestInvalidPathError, self.check_both, 'a', path)
|
||||
|
||||
|
||||
class ValueTests(unittest.TestCase):
|
||||
"""Check utility parsing code."""
|
||||
|
||||
def _get_node(self, text):
|
||||
return xml.dom.minidom.parseString(text).firstChild
|
||||
|
||||
def test_bool_default(self):
|
||||
"""Check XmlBool default handling."""
|
||||
node = self._get_node('<node/>')
|
||||
self.assertIsNone(manifest_xml.XmlBool(node, 'a'))
|
||||
self.assertIsNone(manifest_xml.XmlBool(node, 'a', None))
|
||||
self.assertEqual(123, manifest_xml.XmlBool(node, 'a', 123))
|
||||
|
||||
node = self._get_node('<node a=""/>')
|
||||
self.assertIsNone(manifest_xml.XmlBool(node, 'a'))
|
||||
|
||||
def test_bool_invalid(self):
|
||||
"""Check XmlBool invalid handling."""
|
||||
node = self._get_node('<node a="moo"/>')
|
||||
self.assertEqual(123, manifest_xml.XmlBool(node, 'a', 123))
|
||||
|
||||
def test_bool_true(self):
|
||||
"""Check XmlBool true values."""
|
||||
for value in ('yes', 'true', '1'):
|
||||
node = self._get_node('<node a="%s"/>' % (value,))
|
||||
self.assertTrue(manifest_xml.XmlBool(node, 'a'))
|
||||
|
||||
def test_bool_false(self):
|
||||
"""Check XmlBool false values."""
|
||||
for value in ('no', 'false', '0'):
|
||||
node = self._get_node('<node a="%s"/>' % (value,))
|
||||
self.assertFalse(manifest_xml.XmlBool(node, 'a'))
|
||||
|
||||
def test_int_default(self):
|
||||
"""Check XmlInt default handling."""
|
||||
node = self._get_node('<node/>')
|
||||
self.assertIsNone(manifest_xml.XmlInt(node, 'a'))
|
||||
self.assertIsNone(manifest_xml.XmlInt(node, 'a', None))
|
||||
self.assertEqual(123, manifest_xml.XmlInt(node, 'a', 123))
|
||||
|
||||
node = self._get_node('<node a=""/>')
|
||||
self.assertIsNone(manifest_xml.XmlInt(node, 'a'))
|
||||
|
||||
def test_int_good(self):
|
||||
"""Check XmlInt numeric handling."""
|
||||
for value in (-1, 0, 1, 50000):
|
||||
node = self._get_node('<node a="%s"/>' % (value,))
|
||||
self.assertEqual(value, manifest_xml.XmlInt(node, 'a'))
|
||||
|
||||
def test_int_invalid(self):
|
||||
"""Check XmlInt invalid handling."""
|
||||
with self.assertRaises(error.ManifestParseError):
|
||||
node = self._get_node('<node a="xx"/>')
|
||||
manifest_xml.XmlInt(node, 'a')
|
||||
|
||||
|
||||
class XmlManifestTests(unittest.TestCase):
|
||||
"""Check manifest processing."""
|
||||
|
||||
def setUp(self):
|
||||
self.tempdir = tempfile.mkdtemp(prefix='repo_tests')
|
||||
self.repodir = os.path.join(self.tempdir, '.repo')
|
||||
self.manifest_dir = os.path.join(self.repodir, 'manifests')
|
||||
self.manifest_file = os.path.join(
|
||||
self.repodir, manifest_xml.MANIFEST_FILE_NAME)
|
||||
self.local_manifest_dir = os.path.join(
|
||||
self.repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME)
|
||||
os.mkdir(self.repodir)
|
||||
os.mkdir(self.manifest_dir)
|
||||
|
||||
# The manifest parsing really wants a git repo currently.
|
||||
gitdir = os.path.join(self.repodir, 'manifests.git')
|
||||
os.mkdir(gitdir)
|
||||
with open(os.path.join(gitdir, 'config'), 'w') as fp:
|
||||
fp.write("""[remote "origin"]
|
||||
url = https://localhost:0/manifest
|
||||
""")
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tempdir, ignore_errors=True)
|
||||
|
||||
def getXmlManifest(self, data):
|
||||
"""Helper to initialize a manifest for testing."""
|
||||
with open(self.manifest_file, 'w') as fp:
|
||||
fp.write(data)
|
||||
return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
|
||||
|
||||
def test_empty(self):
|
||||
"""Parse an 'empty' manifest file."""
|
||||
manifest = self.getXmlManifest(
|
||||
'<?xml version="1.0" encoding="UTF-8"?>'
|
||||
'<manifest></manifest>')
|
||||
self.assertEqual(manifest.remotes, {})
|
||||
self.assertEqual(manifest.projects, [])
|
||||
|
||||
def test_link(self):
|
||||
"""Verify Link handling with new names."""
|
||||
manifest = manifest_xml.XmlManifest(self.repodir, self.manifest_file)
|
||||
with open(os.path.join(self.manifest_dir, 'foo.xml'), 'w') as fp:
|
||||
fp.write('<manifest></manifest>')
|
||||
manifest.Link('foo.xml')
|
||||
with open(self.manifest_file) as fp:
|
||||
self.assertIn('<include name="foo.xml" />', fp.read())
|
||||
|
||||
def test_toxml_empty(self):
|
||||
"""Verify the ToXml() helper."""
|
||||
manifest = self.getXmlManifest(
|
||||
'<?xml version="1.0" encoding="UTF-8"?>'
|
||||
'<manifest></manifest>')
|
||||
self.assertEqual(manifest.ToXml().toxml(), '<?xml version="1.0" ?><manifest/>')
|
||||
|
||||
def test_todict_empty(self):
|
||||
"""Verify the ToDict() helper."""
|
||||
manifest = self.getXmlManifest(
|
||||
'<?xml version="1.0" encoding="UTF-8"?>'
|
||||
'<manifest></manifest>')
|
||||
self.assertEqual(manifest.ToDict(), {})
|
||||
|
||||
def test_repo_hooks(self):
|
||||
"""Check repo-hooks settings."""
|
||||
manifest = self.getXmlManifest("""
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="http://localhost" />
|
||||
<default remote="test-remote" revision="refs/heads/main" />
|
||||
<project name="repohooks" path="src/repohooks"/>
|
||||
<repo-hooks in-project="repohooks" enabled-list="a, b"/>
|
||||
</manifest>
|
||||
""")
|
||||
self.assertEqual(manifest.repo_hooks_project.name, 'repohooks')
|
||||
self.assertEqual(manifest.repo_hooks_project.enabled_repo_hooks, ['a', 'b'])
|
||||
|
||||
def test_project_group(self):
|
||||
"""Check project group settings."""
|
||||
manifest = self.getXmlManifest("""
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="http://localhost" />
|
||||
<default remote="test-remote" revision="refs/heads/main" />
|
||||
<project name="test-name" path="test-path"/>
|
||||
<project name="extras" path="path" groups="g1,g2,g1"/>
|
||||
</manifest>
|
||||
""")
|
||||
self.assertEqual(len(manifest.projects), 2)
|
||||
# Ordering isn't guaranteed.
|
||||
result = {
|
||||
manifest.projects[0].name: manifest.projects[0].groups,
|
||||
manifest.projects[1].name: manifest.projects[1].groups,
|
||||
}
|
||||
project = manifest.projects[0]
|
||||
self.assertCountEqual(
|
||||
result['test-name'],
|
||||
['name:test-name', 'all', 'path:test-path'])
|
||||
self.assertCountEqual(
|
||||
result['extras'],
|
||||
['g1', 'g2', 'g1', 'name:extras', 'all', 'path:path'])
|
||||
|
||||
def test_include_levels(self):
|
||||
root_m = os.path.join(self.manifest_dir, 'root.xml')
|
||||
with open(root_m, 'w') as fp:
|
||||
fp.write("""
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="http://localhost" />
|
||||
<default remote="test-remote" revision="refs/heads/main" />
|
||||
<include name="level1.xml" groups="level1-group" />
|
||||
<project name="root-name1" path="root-path1" />
|
||||
<project name="root-name2" path="root-path2" groups="r2g1,r2g2" />
|
||||
</manifest>
|
||||
""")
|
||||
with open(os.path.join(self.manifest_dir, 'level1.xml'), 'w') as fp:
|
||||
fp.write("""
|
||||
<manifest>
|
||||
<include name="level2.xml" groups="level2-group" />
|
||||
<project name="level1-name1" path="level1-path1" />
|
||||
</manifest>
|
||||
""")
|
||||
with open(os.path.join(self.manifest_dir, 'level2.xml'), 'w') as fp:
|
||||
fp.write("""
|
||||
<manifest>
|
||||
<project name="level2-name1" path="level2-path1" groups="l2g1,l2g2" />
|
||||
</manifest>
|
||||
""")
|
||||
include_m = manifest_xml.XmlManifest(self.repodir, root_m)
|
||||
for proj in include_m.projects:
|
||||
if proj.name == 'root-name1':
|
||||
# Check include group not set on root level proj.
|
||||
self.assertNotIn('level1-group', proj.groups)
|
||||
if proj.name == 'root-name2':
|
||||
# Check root proj group not removed.
|
||||
self.assertIn('r2g1', proj.groups)
|
||||
if proj.name == 'level1-name1':
|
||||
# Check level1 proj has inherited group level 1.
|
||||
self.assertIn('level1-group', proj.groups)
|
||||
if proj.name == 'level2-name1':
|
||||
# Check level2 proj has inherited group levels 1 and 2.
|
||||
self.assertIn('level1-group', proj.groups)
|
||||
self.assertIn('level2-group', proj.groups)
|
||||
# Check level2 proj group not removed.
|
||||
self.assertIn('l2g1', proj.groups)
|
||||
|
@ -26,6 +26,7 @@ import tempfile
|
||||
import unittest
|
||||
|
||||
import error
|
||||
import git_command
|
||||
import git_config
|
||||
import platform_utils
|
||||
import project
|
||||
@ -38,51 +39,24 @@ def TempGitTree():
|
||||
# Python 2 support entirely.
|
||||
try:
|
||||
tempdir = tempfile.mkdtemp(prefix='repo-tests')
|
||||
subprocess.check_call(['git', 'init'], cwd=tempdir)
|
||||
|
||||
# Tests need to assume, that main is default branch at init,
|
||||
# which is not supported in config until 2.28.
|
||||
cmd = ['git', 'init']
|
||||
if git_command.git_require((2, 28, 0)):
|
||||
cmd += ['--initial-branch=main']
|
||||
else:
|
||||
# Use template dir for init.
|
||||
templatedir = tempfile.mkdtemp(prefix='.test-template')
|
||||
with open(os.path.join(templatedir, 'HEAD'), 'w') as fp:
|
||||
fp.write('ref: refs/heads/main\n')
|
||||
cmd += ['--template=', templatedir]
|
||||
subprocess.check_call(cmd, cwd=tempdir)
|
||||
yield tempdir
|
||||
finally:
|
||||
platform_utils.rmtree(tempdir)
|
||||
|
||||
|
||||
class RepoHookShebang(unittest.TestCase):
|
||||
"""Check shebang parsing in RepoHook."""
|
||||
|
||||
def test_no_shebang(self):
|
||||
"""Lines w/out shebangs should be rejected."""
|
||||
DATA = (
|
||||
'',
|
||||
'# -*- coding:utf-8 -*-\n',
|
||||
'#\n# foo\n',
|
||||
'# Bad shebang in script\n#!/foo\n'
|
||||
)
|
||||
for data in DATA:
|
||||
self.assertIsNone(project.RepoHook._ExtractInterpFromShebang(data))
|
||||
|
||||
def test_direct_interp(self):
|
||||
"""Lines whose shebang points directly to the interpreter."""
|
||||
DATA = (
|
||||
('#!/foo', '/foo'),
|
||||
('#! /foo', '/foo'),
|
||||
('#!/bin/foo ', '/bin/foo'),
|
||||
('#! /usr/foo ', '/usr/foo'),
|
||||
('#! /usr/foo -args', '/usr/foo'),
|
||||
)
|
||||
for shebang, interp in DATA:
|
||||
self.assertEqual(project.RepoHook._ExtractInterpFromShebang(shebang),
|
||||
interp)
|
||||
|
||||
def test_env_interp(self):
|
||||
"""Lines whose shebang launches through `env`."""
|
||||
DATA = (
|
||||
('#!/usr/bin/env foo', 'foo'),
|
||||
('#!/bin/env foo', 'foo'),
|
||||
('#! /bin/env /bin/foo ', '/bin/foo'),
|
||||
)
|
||||
for shebang, interp in DATA:
|
||||
self.assertEqual(project.RepoHook._ExtractInterpFromShebang(shebang),
|
||||
interp)
|
||||
|
||||
|
||||
class FakeProject(object):
|
||||
"""A fake for Project for basic functionality."""
|
||||
|
||||
@ -116,7 +90,7 @@ class ReviewableBranchTests(unittest.TestCase):
|
||||
|
||||
# Start off with the normal details.
|
||||
rb = project.ReviewableBranch(
|
||||
fakeproj, fakeproj.config.GetBranch('work'), 'master')
|
||||
fakeproj, fakeproj.config.GetBranch('work'), 'main')
|
||||
self.assertEqual('work', rb.name)
|
||||
self.assertEqual(1, len(rb.commits))
|
||||
self.assertIn('Del file', rb.commits[0])
|
||||
@ -129,9 +103,9 @@ class ReviewableBranchTests(unittest.TestCase):
|
||||
self.assertTrue(rb.date)
|
||||
|
||||
# Now delete the tracking branch!
|
||||
fakeproj.work_git.branch('-D', 'master')
|
||||
fakeproj.work_git.branch('-D', 'main')
|
||||
rb = project.ReviewableBranch(
|
||||
fakeproj, fakeproj.config.GetBranch('work'), 'master')
|
||||
fakeproj, fakeproj.config.GetBranch('work'), 'main')
|
||||
self.assertEqual(0, len(rb.commits))
|
||||
self.assertFalse(rb.base_exists)
|
||||
# Hard to assert anything useful about this.
|
||||
|
43
tests/test_subcmds.py
Normal file
43
tests/test_subcmds.py
Normal file
@ -0,0 +1,43 @@
|
||||
# Copyright (C) 2020 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the subcmds module (mostly __init__.py than subcommands)."""
|
||||
|
||||
import unittest
|
||||
|
||||
import subcmds
|
||||
|
||||
|
||||
class AllCommands(unittest.TestCase):
|
||||
"""Check registered all_commands."""
|
||||
|
||||
def test_required_basic(self):
|
||||
"""Basic checking of registered commands."""
|
||||
# NB: We don't test all subcommands as we want to avoid "change detection"
|
||||
# tests, so we just look for the most common/important ones here that are
|
||||
# unlikely to ever change.
|
||||
for cmd in {'cherry-pick', 'help', 'init', 'start', 'sync', 'upload'}:
|
||||
self.assertIn(cmd, subcmds.all_commands)
|
||||
|
||||
def test_naming(self):
|
||||
"""Verify we don't add things that we shouldn't."""
|
||||
for cmd in subcmds.all_commands:
|
||||
# Reject filename suffixes like "help.py".
|
||||
self.assertNotIn('.', cmd)
|
||||
|
||||
# Make sure all '_' were converted to '-'.
|
||||
self.assertNotIn('_', cmd)
|
||||
|
||||
# Reject internal python paths like "__init__".
|
||||
self.assertFalse(cmd.startswith('__'))
|
49
tests/test_subcmds_init.py
Normal file
49
tests/test_subcmds_init.py
Normal file
@ -0,0 +1,49 @@
|
||||
# Copyright (C) 2020 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the subcmds/init.py module."""
|
||||
|
||||
import unittest
|
||||
|
||||
from subcmds import init
|
||||
|
||||
|
||||
class InitCommand(unittest.TestCase):
|
||||
"""Check registered all_commands."""
|
||||
|
||||
def setUp(self):
|
||||
self.cmd = init.Init()
|
||||
|
||||
def test_cli_parser_good(self):
|
||||
"""Check valid command line options."""
|
||||
ARGV = (
|
||||
[],
|
||||
)
|
||||
for argv in ARGV:
|
||||
opts, args = self.cmd.OptionParser.parse_args(argv)
|
||||
self.cmd.ValidateOptions(opts, args)
|
||||
|
||||
def test_cli_parser_bad(self):
|
||||
"""Check invalid command line options."""
|
||||
ARGV = (
|
||||
# Too many arguments.
|
||||
['asdf'],
|
||||
|
||||
# Conflicting options.
|
||||
['--mirror', '--archive'],
|
||||
)
|
||||
for argv in ARGV:
|
||||
opts, args = self.cmd.OptionParser.parse_args(argv)
|
||||
with self.assertRaises(SystemExit):
|
||||
self.cmd.ValidateOptions(opts, args)
|
@ -18,10 +18,16 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import git_command
|
||||
import main
|
||||
import platform_utils
|
||||
from pyversion import is_python3
|
||||
import wrapper
|
||||
|
||||
@ -34,6 +40,18 @@ else:
|
||||
from StringIO import StringIO
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def TemporaryDirectory():
|
||||
"""Create a new empty git checkout for testing."""
|
||||
# TODO(vapier): Convert this to tempfile.TemporaryDirectory once we drop
|
||||
# Python 2 support entirely.
|
||||
try:
|
||||
tempdir = tempfile.mkdtemp(prefix='repo-tests')
|
||||
yield tempdir
|
||||
finally:
|
||||
platform_utils.rmtree(tempdir)
|
||||
|
||||
|
||||
def fixture(*paths):
|
||||
"""Return a path relative to tests/fixtures.
|
||||
"""
|
||||
@ -66,6 +84,16 @@ class RepoWrapperUnitTest(RepoWrapperTestCase):
|
||||
self.assertEqual('', stderr.getvalue())
|
||||
self.assertIn('repo launcher version', stdout.getvalue())
|
||||
|
||||
def test_python_constraints(self):
|
||||
"""The launcher should never require newer than main.py."""
|
||||
self.assertGreaterEqual(main.MIN_PYTHON_VERSION_HARD,
|
||||
wrapper.MIN_PYTHON_VERSION_HARD)
|
||||
self.assertGreaterEqual(main.MIN_PYTHON_VERSION_SOFT,
|
||||
wrapper.MIN_PYTHON_VERSION_SOFT)
|
||||
# Make sure the versions are themselves in sync.
|
||||
self.assertGreaterEqual(wrapper.MIN_PYTHON_VERSION_SOFT,
|
||||
wrapper.MIN_PYTHON_VERSION_HARD)
|
||||
|
||||
def test_init_parser(self):
|
||||
"""Make sure 'init' GetParser works."""
|
||||
parser = self.wrapper.GetParser(gitc_init=False)
|
||||
@ -153,5 +181,329 @@ class SetGitTrace2ParentSid(RepoWrapperTestCase):
|
||||
self.assertRegex(value, self.VALID_FORMAT)
|
||||
|
||||
|
||||
class RunCommand(RepoWrapperTestCase):
|
||||
"""Check run_command behavior."""
|
||||
|
||||
def test_capture(self):
|
||||
"""Check capture_output handling."""
|
||||
ret = self.wrapper.run_command(['echo', 'hi'], capture_output=True)
|
||||
self.assertEqual(ret.stdout, 'hi\n')
|
||||
|
||||
def test_check(self):
|
||||
"""Check check handling."""
|
||||
self.wrapper.run_command(['true'], check=False)
|
||||
self.wrapper.run_command(['true'], check=True)
|
||||
self.wrapper.run_command(['false'], check=False)
|
||||
with self.assertRaises(self.wrapper.RunError):
|
||||
self.wrapper.run_command(['false'], check=True)
|
||||
|
||||
|
||||
class RunGit(RepoWrapperTestCase):
|
||||
"""Check run_git behavior."""
|
||||
|
||||
def test_capture(self):
|
||||
"""Check capture_output handling."""
|
||||
ret = self.wrapper.run_git('--version')
|
||||
self.assertIn('git', ret.stdout)
|
||||
|
||||
def test_check(self):
|
||||
"""Check check handling."""
|
||||
with self.assertRaises(self.wrapper.CloneFailure):
|
||||
self.wrapper.run_git('--version-asdfasdf')
|
||||
self.wrapper.run_git('--version-asdfasdf', check=False)
|
||||
|
||||
|
||||
class ParseGitVersion(RepoWrapperTestCase):
|
||||
"""Check ParseGitVersion behavior."""
|
||||
|
||||
def test_autoload(self):
|
||||
"""Check we can load the version from the live git."""
|
||||
ret = self.wrapper.ParseGitVersion()
|
||||
self.assertIsNotNone(ret)
|
||||
|
||||
def test_bad_ver(self):
|
||||
"""Check handling of bad git versions."""
|
||||
ret = self.wrapper.ParseGitVersion(ver_str='asdf')
|
||||
self.assertIsNone(ret)
|
||||
|
||||
def test_normal_ver(self):
|
||||
"""Check handling of normal git versions."""
|
||||
ret = self.wrapper.ParseGitVersion(ver_str='git version 2.25.1')
|
||||
self.assertEqual(2, ret.major)
|
||||
self.assertEqual(25, ret.minor)
|
||||
self.assertEqual(1, ret.micro)
|
||||
self.assertEqual('2.25.1', ret.full)
|
||||
|
||||
def test_extended_ver(self):
|
||||
"""Check handling of extended distro git versions."""
|
||||
ret = self.wrapper.ParseGitVersion(
|
||||
ver_str='git version 1.30.50.696.g5e7596f4ac-goog')
|
||||
self.assertEqual(1, ret.major)
|
||||
self.assertEqual(30, ret.minor)
|
||||
self.assertEqual(50, ret.micro)
|
||||
self.assertEqual('1.30.50.696.g5e7596f4ac-goog', ret.full)
|
||||
|
||||
|
||||
class CheckGitVersion(RepoWrapperTestCase):
|
||||
"""Check _CheckGitVersion behavior."""
|
||||
|
||||
def test_unknown(self):
|
||||
"""Unknown versions should abort."""
|
||||
with mock.patch.object(self.wrapper, 'ParseGitVersion', return_value=None):
|
||||
with self.assertRaises(self.wrapper.CloneFailure):
|
||||
self.wrapper._CheckGitVersion()
|
||||
|
||||
def test_old(self):
|
||||
"""Old versions should abort."""
|
||||
with mock.patch.object(
|
||||
self.wrapper, 'ParseGitVersion',
|
||||
return_value=self.wrapper.GitVersion(1, 0, 0, '1.0.0')):
|
||||
with self.assertRaises(self.wrapper.CloneFailure):
|
||||
self.wrapper._CheckGitVersion()
|
||||
|
||||
def test_new(self):
|
||||
"""Newer versions should run fine."""
|
||||
with mock.patch.object(
|
||||
self.wrapper, 'ParseGitVersion',
|
||||
return_value=self.wrapper.GitVersion(100, 0, 0, '100.0.0')):
|
||||
self.wrapper._CheckGitVersion()
|
||||
|
||||
|
||||
class NeedSetupGnuPG(RepoWrapperTestCase):
|
||||
"""Check NeedSetupGnuPG behavior."""
|
||||
|
||||
def test_missing_dir(self):
|
||||
"""The ~/.repoconfig tree doesn't exist yet."""
|
||||
with TemporaryDirectory() as tempdir:
|
||||
self.wrapper.home_dot_repo = os.path.join(tempdir, 'foo')
|
||||
self.assertTrue(self.wrapper.NeedSetupGnuPG())
|
||||
|
||||
def test_missing_keyring(self):
|
||||
"""The keyring-version file doesn't exist yet."""
|
||||
with TemporaryDirectory() as tempdir:
|
||||
self.wrapper.home_dot_repo = tempdir
|
||||
self.assertTrue(self.wrapper.NeedSetupGnuPG())
|
||||
|
||||
def test_empty_keyring(self):
|
||||
"""The keyring-version file exists, but is empty."""
|
||||
with TemporaryDirectory() as tempdir:
|
||||
self.wrapper.home_dot_repo = tempdir
|
||||
with open(os.path.join(tempdir, 'keyring-version'), 'w'):
|
||||
pass
|
||||
self.assertTrue(self.wrapper.NeedSetupGnuPG())
|
||||
|
||||
def test_old_keyring(self):
|
||||
"""The keyring-version file exists, but it's old."""
|
||||
with TemporaryDirectory() as tempdir:
|
||||
self.wrapper.home_dot_repo = tempdir
|
||||
with open(os.path.join(tempdir, 'keyring-version'), 'w') as fp:
|
||||
fp.write('1.0\n')
|
||||
self.assertTrue(self.wrapper.NeedSetupGnuPG())
|
||||
|
||||
def test_new_keyring(self):
|
||||
"""The keyring-version file exists, and is up-to-date."""
|
||||
with TemporaryDirectory() as tempdir:
|
||||
self.wrapper.home_dot_repo = tempdir
|
||||
with open(os.path.join(tempdir, 'keyring-version'), 'w') as fp:
|
||||
fp.write('1000.0\n')
|
||||
self.assertFalse(self.wrapper.NeedSetupGnuPG())
|
||||
|
||||
|
||||
class SetupGnuPG(RepoWrapperTestCase):
|
||||
"""Check SetupGnuPG behavior."""
|
||||
|
||||
def test_full(self):
|
||||
"""Make sure it works completely."""
|
||||
with TemporaryDirectory() as tempdir:
|
||||
self.wrapper.home_dot_repo = tempdir
|
||||
self.wrapper.gpg_dir = os.path.join(self.wrapper.home_dot_repo, 'gnupg')
|
||||
self.assertTrue(self.wrapper.SetupGnuPG(True))
|
||||
with open(os.path.join(tempdir, 'keyring-version'), 'r') as fp:
|
||||
data = fp.read()
|
||||
self.assertEqual('.'.join(str(x) for x in self.wrapper.KEYRING_VERSION),
|
||||
data.strip())
|
||||
|
||||
|
||||
class VerifyRev(RepoWrapperTestCase):
|
||||
"""Check verify_rev behavior."""
|
||||
|
||||
def test_verify_passes(self):
|
||||
"""Check when we have a valid signed tag."""
|
||||
desc_result = self.wrapper.RunResult(0, 'v1.0\n', '')
|
||||
gpg_result = self.wrapper.RunResult(0, '', '')
|
||||
with mock.patch.object(self.wrapper, 'run_git',
|
||||
side_effect=(desc_result, gpg_result)):
|
||||
ret = self.wrapper.verify_rev('/', 'refs/heads/stable', '1234', True)
|
||||
self.assertEqual('v1.0^0', ret)
|
||||
|
||||
def test_unsigned_commit(self):
|
||||
"""Check we fall back to signed tag when we have an unsigned commit."""
|
||||
desc_result = self.wrapper.RunResult(0, 'v1.0-10-g1234\n', '')
|
||||
gpg_result = self.wrapper.RunResult(0, '', '')
|
||||
with mock.patch.object(self.wrapper, 'run_git',
|
||||
side_effect=(desc_result, gpg_result)):
|
||||
ret = self.wrapper.verify_rev('/', 'refs/heads/stable', '1234', True)
|
||||
self.assertEqual('v1.0^0', ret)
|
||||
|
||||
def test_verify_fails(self):
|
||||
"""Check we fall back to signed tag when we have an unsigned commit."""
|
||||
desc_result = self.wrapper.RunResult(0, 'v1.0-10-g1234\n', '')
|
||||
gpg_result = Exception
|
||||
with mock.patch.object(self.wrapper, 'run_git',
|
||||
side_effect=(desc_result, gpg_result)):
|
||||
with self.assertRaises(Exception):
|
||||
self.wrapper.verify_rev('/', 'refs/heads/stable', '1234', True)
|
||||
|
||||
|
||||
class GitCheckoutTestCase(RepoWrapperTestCase):
|
||||
"""Tests that use a real/small git checkout."""
|
||||
|
||||
GIT_DIR = None
|
||||
REV_LIST = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
# Create a repo to operate on, but do it once per-class.
|
||||
cls.GIT_DIR = tempfile.mkdtemp(prefix='repo-rev-tests')
|
||||
run_git = wrapper.Wrapper().run_git
|
||||
|
||||
remote = os.path.join(cls.GIT_DIR, 'remote')
|
||||
os.mkdir(remote)
|
||||
|
||||
# Tests need to assume, that main is default branch at init,
|
||||
# which is not supported in config until 2.28.
|
||||
if git_command.git_require((2, 28, 0)):
|
||||
initstr = '--initial-branch=main'
|
||||
else:
|
||||
# Use template dir for init.
|
||||
templatedir = tempfile.mkdtemp(prefix='.test-template')
|
||||
with open(os.path.join(templatedir, 'HEAD'), 'w') as fp:
|
||||
fp.write('ref: refs/heads/main\n')
|
||||
initstr = '--template=' + templatedir
|
||||
|
||||
run_git('init', initstr, cwd=remote)
|
||||
run_git('commit', '--allow-empty', '-minit', cwd=remote)
|
||||
run_git('branch', 'stable', cwd=remote)
|
||||
run_git('tag', 'v1.0', cwd=remote)
|
||||
run_git('commit', '--allow-empty', '-m2nd commit', cwd=remote)
|
||||
cls.REV_LIST = run_git('rev-list', 'HEAD', cwd=remote).stdout.splitlines()
|
||||
|
||||
run_git('init', cwd=cls.GIT_DIR)
|
||||
run_git('fetch', remote, '+refs/heads/*:refs/remotes/origin/*', cwd=cls.GIT_DIR)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if not cls.GIT_DIR:
|
||||
return
|
||||
|
||||
shutil.rmtree(cls.GIT_DIR)
|
||||
|
||||
|
||||
class ResolveRepoRev(GitCheckoutTestCase):
|
||||
"""Check resolve_repo_rev behavior."""
|
||||
|
||||
def test_explicit_branch(self):
|
||||
"""Check refs/heads/branch argument."""
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/heads/stable')
|
||||
self.assertEqual('refs/heads/stable', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
with self.assertRaises(wrapper.CloneFailure):
|
||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/heads/unknown')
|
||||
|
||||
def test_explicit_tag(self):
|
||||
"""Check refs/tags/tag argument."""
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/tags/v1.0')
|
||||
self.assertEqual('refs/tags/v1.0', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
with self.assertRaises(wrapper.CloneFailure):
|
||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/tags/unknown')
|
||||
|
||||
def test_branch_name(self):
|
||||
"""Check branch argument."""
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'stable')
|
||||
self.assertEqual('refs/heads/stable', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'main')
|
||||
self.assertEqual('refs/heads/main', rrev)
|
||||
self.assertEqual(self.REV_LIST[0], lrev)
|
||||
|
||||
def test_tag_name(self):
|
||||
"""Check tag argument."""
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'v1.0')
|
||||
self.assertEqual('refs/tags/v1.0', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
def test_full_commit(self):
|
||||
"""Check specific commit argument."""
|
||||
commit = self.REV_LIST[0]
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, commit)
|
||||
self.assertEqual(commit, rrev)
|
||||
self.assertEqual(commit, lrev)
|
||||
|
||||
def test_partial_commit(self):
|
||||
"""Check specific (partial) commit argument."""
|
||||
commit = self.REV_LIST[0][0:20]
|
||||
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, commit)
|
||||
self.assertEqual(self.REV_LIST[0], rrev)
|
||||
self.assertEqual(self.REV_LIST[0], lrev)
|
||||
|
||||
def test_unknown(self):
|
||||
"""Check unknown ref/commit argument."""
|
||||
with self.assertRaises(wrapper.CloneFailure):
|
||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'boooooooya')
|
||||
|
||||
|
||||
class CheckRepoVerify(RepoWrapperTestCase):
|
||||
"""Check check_repo_verify behavior."""
|
||||
|
||||
def test_no_verify(self):
|
||||
"""Always fail with --no-repo-verify."""
|
||||
self.assertFalse(self.wrapper.check_repo_verify(False))
|
||||
|
||||
def test_gpg_initialized(self):
|
||||
"""Should pass if gpg is setup already."""
|
||||
with mock.patch.object(self.wrapper, 'NeedSetupGnuPG', return_value=False):
|
||||
self.assertTrue(self.wrapper.check_repo_verify(True))
|
||||
|
||||
def test_need_gpg_setup(self):
|
||||
"""Should pass/fail based on gpg setup."""
|
||||
with mock.patch.object(self.wrapper, 'NeedSetupGnuPG', return_value=True):
|
||||
with mock.patch.object(self.wrapper, 'SetupGnuPG') as m:
|
||||
m.return_value = True
|
||||
self.assertTrue(self.wrapper.check_repo_verify(True))
|
||||
|
||||
m.return_value = False
|
||||
self.assertFalse(self.wrapper.check_repo_verify(True))
|
||||
|
||||
|
||||
class CheckRepoRev(GitCheckoutTestCase):
|
||||
"""Check check_repo_rev behavior."""
|
||||
|
||||
def test_verify_works(self):
|
||||
"""Should pass when verification passes."""
|
||||
with mock.patch.object(self.wrapper, 'check_repo_verify', return_value=True):
|
||||
with mock.patch.object(self.wrapper, 'verify_rev', return_value='12345'):
|
||||
rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, 'stable')
|
||||
self.assertEqual('refs/heads/stable', rrev)
|
||||
self.assertEqual('12345', lrev)
|
||||
|
||||
def test_verify_fails(self):
|
||||
"""Should fail when verification fails."""
|
||||
with mock.patch.object(self.wrapper, 'check_repo_verify', return_value=True):
|
||||
with mock.patch.object(self.wrapper, 'verify_rev', side_effect=Exception):
|
||||
with self.assertRaises(Exception):
|
||||
self.wrapper.check_repo_rev(self.GIT_DIR, 'stable')
|
||||
|
||||
def test_verify_ignore(self):
|
||||
"""Should pass when verification is disabled."""
|
||||
with mock.patch.object(self.wrapper, 'verify_rev', side_effect=Exception):
|
||||
rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, 'stable', repo_verify=False)
|
||||
self.assertEqual('refs/heads/stable', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
8
tox.ini
8
tox.ini
@ -15,11 +15,10 @@
|
||||
# https://tox.readthedocs.io/
|
||||
|
||||
[tox]
|
||||
envlist = py27, py36, py37, py38
|
||||
envlist = py36, py37, py38
|
||||
|
||||
[gh-actions]
|
||||
python =
|
||||
2.7: py27
|
||||
3.6: py36
|
||||
3.7: py37
|
||||
3.8: py38
|
||||
@ -31,8 +30,3 @@ setenv =
|
||||
GIT_AUTHOR_NAME = Repo test author
|
||||
GIT_COMMITTER_NAME = Repo test committer
|
||||
EMAIL = repo@gerrit.nodomain
|
||||
|
||||
[testenv:py27]
|
||||
deps =
|
||||
mock
|
||||
pytest
|
||||
|
Reference in New Issue
Block a user