mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-07-02 20:17:19 +00:00
Compare commits
61 Commits
Author | SHA1 | Date | |
---|---|---|---|
7fa149b47a | |||
a56e0e17e2 | |||
3ed84466f4 | |||
48067714ec | |||
69427da8c9 | |||
dccf38e34f | |||
7f44d366d0 | |||
2aa5d32d70 | |||
016a25447f | |||
7eab0eedf2 | |||
7e3b65beb7 | |||
c3d61ec252 | |||
78e82ec78e | |||
37ae75f27d | |||
7438aef1ca | |||
e641281d14 | |||
035f22abec | |||
e0728a5ecd | |||
d98f393524 | |||
0324e43242 | |||
8d25584f69 | |||
0e4f1e7fba | |||
e815286492 | |||
0ab6b11688 | |||
a621254b26 | |||
f159ce0f9e | |||
802cd0c601 | |||
100a214315 | |||
8051cdb629 | |||
43549d8d08 | |||
55b7125d6a | |||
d793553804 | |||
ea5239ddd9 | |||
1b8714937c | |||
50a2c0e368 | |||
35af2f8daf | |||
e287fa760b | |||
3593a10643 | |||
003684b6e5 | |||
0297f8312c | |||
7b3afcab7a | |||
eda6b1ead7 | |||
4364a79088 | |||
a98a5ebc6d | |||
f8d342beac | |||
6d2e8c8237 | |||
a24185ee6c | |||
d686365449 | |||
d3cadf1856 | |||
fa90f7a36f | |||
bee4efb874 | |||
f8af33c9f0 | |||
ed25be569e | |||
afd767103e | |||
b240d28bc0 | |||
47020ba249 | |||
5ed8c63942 | |||
24c6314fca | |||
7efab539f0 | |||
a3ff64cae5 | |||
776138a938 |
23
.github/workflows/flake8-postsubmit.yml
vendored
Normal file
23
.github/workflows/flake8-postsubmit.yml
vendored
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# GitHub actions workflow.
|
||||||
|
# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions
|
||||||
|
# https://github.com/marketplace/actions/python-flake8
|
||||||
|
|
||||||
|
name: Flake8
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
name: Python Lint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.9"
|
||||||
|
- name: Run flake8
|
||||||
|
uses: julianwachholz/flake8-action@v2
|
||||||
|
with:
|
||||||
|
checkName: "Python Lint"
|
8
.github/workflows/test-ci.yml
vendored
8
.github/workflows/test-ci.yml
vendored
@ -14,18 +14,18 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
python-version: [3.6, 3.7, 3.8, 3.9]
|
python-version: ['3.6', '3.7', '3.8', '3.9', '3.10']
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v1
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install tox tox-gh-actions
|
python -m pip install tox tox-gh-actions
|
||||||
- name: Test with tox
|
- name: Test with tox
|
||||||
run: tox
|
run: tox
|
||||||
|
@ -8,7 +8,7 @@ that you can put anywhere in your path.
|
|||||||
|
|
||||||
* Homepage: <https://gerrit.googlesource.com/git-repo/>
|
* Homepage: <https://gerrit.googlesource.com/git-repo/>
|
||||||
* Mailing list: [repo-discuss on Google Groups][repo-discuss]
|
* Mailing list: [repo-discuss on Google Groups][repo-discuss]
|
||||||
* Bug reports: <https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo>
|
* Bug reports: <https://bugs.chromium.org/p/gerrit/issues/list?q=component:Applications%3Erepo>
|
||||||
* Source: <https://gerrit.googlesource.com/git-repo/>
|
* Source: <https://gerrit.googlesource.com/git-repo/>
|
||||||
* Overview: <https://source.android.com/source/developing.html>
|
* Overview: <https://source.android.com/source/developing.html>
|
||||||
* Docs: <https://source.android.com/source/using-repo.html>
|
* Docs: <https://source.android.com/source/using-repo.html>
|
||||||
@ -51,5 +51,5 @@ $ chmod a+rx ~/.bin/repo
|
|||||||
|
|
||||||
|
|
||||||
[new-bug]: https://bugs.chromium.org/p/gerrit/issues/entry?template=Repo+tool+issue
|
[new-bug]: https://bugs.chromium.org/p/gerrit/issues/entry?template=Repo+tool+issue
|
||||||
[issue tracker]: https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo
|
[issue tracker]: https://bugs.chromium.org/p/gerrit/issues/list?q=component:Applications%3Erepo
|
||||||
[repo-discuss]: https://groups.google.com/forum/#!forum/repo-discuss
|
[repo-discuss]: https://groups.google.com/forum/#!forum/repo-discuss
|
||||||
|
@ -320,7 +320,8 @@ class Command(object):
|
|||||||
for arg in args:
|
for arg in args:
|
||||||
# We have to filter by manifest groups in case the requested project is
|
# We have to filter by manifest groups in case the requested project is
|
||||||
# checked out multiple times or differently based on them.
|
# checked out multiple times or differently based on them.
|
||||||
projects = [project for project in manifest.GetProjectsWithName(
|
projects = [project
|
||||||
|
for project in manifest.GetProjectsWithName(
|
||||||
arg, all_manifests=all_manifests)
|
arg, all_manifests=all_manifests)
|
||||||
if project.MatchesGroups(groups)]
|
if project.MatchesGroups(groups)]
|
||||||
|
|
||||||
|
@ -222,27 +222,30 @@ The `[remote]` settings are automatically populated/updated from the manifest.
|
|||||||
|
|
||||||
The `[branch]` settings are updated by `repo start` and `git branch`.
|
The `[branch]` settings are updated by `repo start` and `git branch`.
|
||||||
|
|
||||||
| Setting | Subcommands | Use/Meaning |
|
| Setting | Subcommands | Use/Meaning |
|
||||||
|-------------------------------|---------------|-------------|
|
|---------------------------------------|---------------|-------------|
|
||||||
| review.\<url\>.autocopy | upload | Automatically add to `--cc=<value>` |
|
| review.\<url\>.autocopy | upload | Automatically add to `--cc=<value>` |
|
||||||
| review.\<url\>.autoreviewer | upload | Automatically add to `--reviewers=<value>` |
|
| review.\<url\>.autoreviewer | upload | Automatically add to `--reviewers=<value>` |
|
||||||
| review.\<url\>.autoupload | upload | Automatically answer "yes" or "no" to all prompts |
|
| review.\<url\>.autoupload | upload | Automatically answer "yes" or "no" to all prompts |
|
||||||
| review.\<url\>.uploadhashtags | upload | Automatically add to `--hashtag=<value>` |
|
| review.\<url\>.uploadhashtags | upload | Automatically add to `--hashtag=<value>` |
|
||||||
| review.\<url\>.uploadlabels | upload | Automatically add to `--label=<value>` |
|
| review.\<url\>.uploadlabels | upload | Automatically add to `--label=<value>` |
|
||||||
| review.\<url\>.uploadnotify | upload | [Notify setting][upload-notify] to use |
|
| review.\<url\>.uploadnotify | upload | [Notify setting][upload-notify] to use |
|
||||||
| review.\<url\>.uploadtopic | upload | Default [topic] to use |
|
| review.\<url\>.uploadtopic | upload | Default [topic] to use |
|
||||||
| review.\<url\>.username | upload | Override username with `ssh://` review URIs |
|
| review.\<url\>.uploadwarningthreshold | upload | Warn when attempting to upload more than this many CLs |
|
||||||
| remote.\<remote\>.fetch | sync | Set of refs to fetch |
|
| review.\<url\>.username | upload | Override username with `ssh://` review URIs |
|
||||||
| remote.\<remote\>.projectname | \<network\> | The name of the project as it exists in Gerrit review |
|
| remote.\<remote\>.fetch | sync | Set of refs to fetch |
|
||||||
| remote.\<remote\>.pushurl | upload | The base URI for pushing CLs |
|
| remote.\<remote\>.projectname | \<network\> | The name of the project as it exists in Gerrit review |
|
||||||
| remote.\<remote\>.review | upload | The URI of the Gerrit review server |
|
| remote.\<remote\>.pushurl | upload | The base URI for pushing CLs |
|
||||||
| remote.\<remote\>.url | sync & upload | The URI of the git project to fetch |
|
| remote.\<remote\>.review | upload | The URI of the Gerrit review server |
|
||||||
| branch.\<branch\>.merge | sync & upload | The branch to merge & upload & track |
|
| remote.\<remote\>.url | sync & upload | The URI of the git project to fetch |
|
||||||
| branch.\<branch\>.remote | sync & upload | The remote to track |
|
| branch.\<branch\>.merge | sync & upload | The branch to merge & upload & track |
|
||||||
|
| branch.\<branch\>.remote | sync & upload | The remote to track |
|
||||||
|
|
||||||
## ~/ dotconfig layout
|
## ~/ dotconfig layout
|
||||||
|
|
||||||
Repo will create & maintain a few files in the user's home directory.
|
Repo will create & maintain a few files under the `.repoconfig/` directory.
|
||||||
|
This is placed in the user's home directory by default but can be changed by
|
||||||
|
setting `REPO_CONFIG_DIR`.
|
||||||
|
|
||||||
* `.repoconfig/`: Repo's per-user directory for all random config files/state.
|
* `.repoconfig/`: Repo's per-user directory for all random config files/state.
|
||||||
* `.repoconfig/config`: Per-user settings using [git-config] file format.
|
* `.repoconfig/config`: Per-user settings using [git-config] file format.
|
||||||
|
@ -143,23 +143,14 @@ internal processes for accessing the restricted keys.
|
|||||||
***
|
***
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
# Set the gpg key directory.
|
|
||||||
$ export GNUPGHOME=~/.gnupg/repo/
|
|
||||||
|
|
||||||
# Verify the listed key is “Repo Maintainer”.
|
|
||||||
$ gpg -K
|
|
||||||
|
|
||||||
# Pick whatever branch or commit you want to tag.
|
|
||||||
$ r=main
|
|
||||||
|
|
||||||
# Pick the new version.
|
# Pick the new version.
|
||||||
$ t=1.12.10
|
$ t=v2.30
|
||||||
|
|
||||||
# Create the signed tag.
|
# Create a new signed tag with the current HEAD.
|
||||||
$ git tag -s v$t -u "Repo Maintainer <repo@android.kernel.org>" -m "repo $t" $r
|
$ ./release/sign-tag.py $t
|
||||||
|
|
||||||
# Verify the signed tag.
|
# Verify the signed tag.
|
||||||
$ git show v$t
|
$ git show $t
|
||||||
```
|
```
|
||||||
|
|
||||||
### Push the new release
|
### Push the new release
|
||||||
@ -168,11 +159,11 @@ Once you're ready to make the release available to everyone, push it to the
|
|||||||
`stable` branch.
|
`stable` branch.
|
||||||
|
|
||||||
Make sure you never push the tag itself to the stable branch!
|
Make sure you never push the tag itself to the stable branch!
|
||||||
Only push the commit -- notice the use of `$t` and `$r` below.
|
Only push the commit -- note the use of `^0` below.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
$ git push https://gerrit-review.googlesource.com/git-repo v$t
|
$ git push https://gerrit-review.googlesource.com/git-repo $t
|
||||||
$ git push https://gerrit-review.googlesource.com/git-repo $r:stable
|
$ git push https://gerrit-review.googlesource.com/git-repo $t^0:stable
|
||||||
```
|
```
|
||||||
|
|
||||||
If something goes horribly wrong, you can force push the previous version to the
|
If something goes horribly wrong, you can force push the previous version to the
|
||||||
@ -195,7 +186,9 @@ You can create a short changelog using the command:
|
|||||||
```sh
|
```sh
|
||||||
# If you haven't pushed to the stable branch yet, you can use origin/stable.
|
# If you haven't pushed to the stable branch yet, you can use origin/stable.
|
||||||
# If you have pushed, change origin/stable to the previous release tag.
|
# If you have pushed, change origin/stable to the previous release tag.
|
||||||
$ git log --format="%h (%aN) %s" --no-merges origin/stable..$r
|
# This assumes "main" is the current tagged release. If it's newer, change it
|
||||||
|
# to the current release tag too.
|
||||||
|
$ git log --format="%h (%aN) %s" --no-merges origin/stable..main
|
||||||
```
|
```
|
||||||
|
|
||||||
## Project References
|
## Project References
|
||||||
|
132
git_command.py
132
git_command.py
@ -16,6 +16,7 @@ import functools
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
from error import GitError
|
from error import GitError
|
||||||
from git_refs import HEAD
|
from git_refs import HEAD
|
||||||
@ -157,6 +158,52 @@ def git_require(min_version, fail=False, msg=''):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _build_env(
|
||||||
|
_kwargs_only=(),
|
||||||
|
bare: Optional[bool] = False,
|
||||||
|
disable_editor: Optional[bool] = False,
|
||||||
|
ssh_proxy: Optional[Any] = None,
|
||||||
|
gitdir: Optional[str] = None,
|
||||||
|
objdir: Optional[str] = None
|
||||||
|
):
|
||||||
|
"""Constucts an env dict for command execution."""
|
||||||
|
|
||||||
|
assert _kwargs_only == (), '_build_env only accepts keyword arguments.'
|
||||||
|
|
||||||
|
env = GitCommand._GetBasicEnv()
|
||||||
|
|
||||||
|
if disable_editor:
|
||||||
|
env['GIT_EDITOR'] = ':'
|
||||||
|
if ssh_proxy:
|
||||||
|
env['REPO_SSH_SOCK'] = ssh_proxy.sock()
|
||||||
|
env['GIT_SSH'] = ssh_proxy.proxy
|
||||||
|
env['GIT_SSH_VARIANT'] = 'ssh'
|
||||||
|
if 'http_proxy' in env and 'darwin' == sys.platform:
|
||||||
|
s = "'http.proxy=%s'" % (env['http_proxy'],)
|
||||||
|
p = env.get('GIT_CONFIG_PARAMETERS')
|
||||||
|
if p is not None:
|
||||||
|
s = p + ' ' + s
|
||||||
|
env['GIT_CONFIG_PARAMETERS'] = s
|
||||||
|
if 'GIT_ALLOW_PROTOCOL' not in env:
|
||||||
|
env['GIT_ALLOW_PROTOCOL'] = (
|
||||||
|
'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
|
||||||
|
env['GIT_HTTP_USER_AGENT'] = user_agent.git
|
||||||
|
|
||||||
|
if objdir:
|
||||||
|
# Set to the place we want to save the objects.
|
||||||
|
env['GIT_OBJECT_DIRECTORY'] = objdir
|
||||||
|
|
||||||
|
alt_objects = os.path.join(gitdir, 'objects') if gitdir else None
|
||||||
|
if alt_objects and os.path.realpath(alt_objects) != os.path.realpath(objdir):
|
||||||
|
# Allow git to search the original place in case of local or unique refs
|
||||||
|
# that git will attempt to resolve even if we aren't fetching them.
|
||||||
|
env['GIT_ALTERNATE_OBJECT_DIRECTORIES'] = alt_objects
|
||||||
|
if bare and gitdir is not None:
|
||||||
|
env[GIT_DIR] = gitdir
|
||||||
|
|
||||||
|
return env
|
||||||
|
|
||||||
|
|
||||||
class GitCommand(object):
|
class GitCommand(object):
|
||||||
"""Wrapper around a single git invocation."""
|
"""Wrapper around a single git invocation."""
|
||||||
|
|
||||||
@ -173,30 +220,13 @@ class GitCommand(object):
|
|||||||
cwd=None,
|
cwd=None,
|
||||||
gitdir=None,
|
gitdir=None,
|
||||||
objdir=None):
|
objdir=None):
|
||||||
env = self._GetBasicEnv()
|
|
||||||
|
|
||||||
if disable_editor:
|
|
||||||
env['GIT_EDITOR'] = ':'
|
|
||||||
if ssh_proxy:
|
|
||||||
env['REPO_SSH_SOCK'] = ssh_proxy.sock()
|
|
||||||
env['GIT_SSH'] = ssh_proxy.proxy
|
|
||||||
env['GIT_SSH_VARIANT'] = 'ssh'
|
|
||||||
if 'http_proxy' in env and 'darwin' == sys.platform:
|
|
||||||
s = "'http.proxy=%s'" % (env['http_proxy'],)
|
|
||||||
p = env.get('GIT_CONFIG_PARAMETERS')
|
|
||||||
if p is not None:
|
|
||||||
s = p + ' ' + s
|
|
||||||
env['GIT_CONFIG_PARAMETERS'] = s
|
|
||||||
if 'GIT_ALLOW_PROTOCOL' not in env:
|
|
||||||
env['GIT_ALLOW_PROTOCOL'] = (
|
|
||||||
'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
|
|
||||||
env['GIT_HTTP_USER_AGENT'] = user_agent.git
|
|
||||||
|
|
||||||
if project:
|
if project:
|
||||||
if not cwd:
|
if not cwd:
|
||||||
cwd = project.worktree
|
cwd = project.worktree
|
||||||
if not gitdir:
|
if not gitdir:
|
||||||
gitdir = project.gitdir
|
gitdir = project.gitdir
|
||||||
|
|
||||||
# Git on Windows wants its paths only using / for reliability.
|
# Git on Windows wants its paths only using / for reliability.
|
||||||
if platform_utils.isWindows():
|
if platform_utils.isWindows():
|
||||||
if objdir:
|
if objdir:
|
||||||
@ -204,18 +234,16 @@ class GitCommand(object):
|
|||||||
if gitdir:
|
if gitdir:
|
||||||
gitdir = gitdir.replace('\\', '/')
|
gitdir = gitdir.replace('\\', '/')
|
||||||
|
|
||||||
if objdir:
|
env = _build_env(
|
||||||
# Set to the place we want to save the objects.
|
disable_editor=disable_editor,
|
||||||
env['GIT_OBJECT_DIRECTORY'] = objdir
|
ssh_proxy=ssh_proxy,
|
||||||
if gitdir:
|
objdir=objdir,
|
||||||
# Allow git to search the original place in case of local or unique refs
|
gitdir=gitdir,
|
||||||
# that git will attempt to resolve even if we aren't fetching them.
|
bare=bare,
|
||||||
env['GIT_ALTERNATE_OBJECT_DIRECTORIES'] = gitdir + '/objects'
|
)
|
||||||
|
|
||||||
command = [GIT]
|
command = [GIT]
|
||||||
if bare:
|
if bare:
|
||||||
if gitdir:
|
|
||||||
env[GIT_DIR] = gitdir
|
|
||||||
cwd = None
|
cwd = None
|
||||||
command.append(cmdv[0])
|
command.append(cmdv[0])
|
||||||
# Need to use the --progress flag for fetch/clone so output will be
|
# Need to use the --progress flag for fetch/clone so output will be
|
||||||
@ -230,12 +258,11 @@ class GitCommand(object):
|
|||||||
stderr = (subprocess.STDOUT if merge_output else
|
stderr = (subprocess.STDOUT if merge_output else
|
||||||
(subprocess.PIPE if capture_stderr else None))
|
(subprocess.PIPE if capture_stderr else None))
|
||||||
|
|
||||||
|
dbg = ''
|
||||||
if IsTrace():
|
if IsTrace():
|
||||||
global LAST_CWD
|
global LAST_CWD
|
||||||
global LAST_GITDIR
|
global LAST_GITDIR
|
||||||
|
|
||||||
dbg = ''
|
|
||||||
|
|
||||||
if cwd and LAST_CWD != cwd:
|
if cwd and LAST_CWD != cwd:
|
||||||
if LAST_GITDIR or LAST_CWD:
|
if LAST_GITDIR or LAST_CWD:
|
||||||
dbg += '\n'
|
dbg += '\n'
|
||||||
@ -251,7 +278,8 @@ class GitCommand(object):
|
|||||||
if 'GIT_OBJECT_DIRECTORY' in env:
|
if 'GIT_OBJECT_DIRECTORY' in env:
|
||||||
dbg += ': export GIT_OBJECT_DIRECTORY=%s\n' % env['GIT_OBJECT_DIRECTORY']
|
dbg += ': export GIT_OBJECT_DIRECTORY=%s\n' % env['GIT_OBJECT_DIRECTORY']
|
||||||
if 'GIT_ALTERNATE_OBJECT_DIRECTORIES' in env:
|
if 'GIT_ALTERNATE_OBJECT_DIRECTORIES' in env:
|
||||||
dbg += ': export GIT_ALTERNATE_OBJECT_DIRECTORIES=%s\n' % env['GIT_ALTERNATE_OBJECT_DIRECTORIES']
|
dbg += ': export GIT_ALTERNATE_OBJECT_DIRECTORIES=%s\n' % (
|
||||||
|
env['GIT_ALTERNATE_OBJECT_DIRECTORIES'])
|
||||||
|
|
||||||
dbg += ': '
|
dbg += ': '
|
||||||
dbg += ' '.join(command)
|
dbg += ' '.join(command)
|
||||||
@ -263,31 +291,31 @@ class GitCommand(object):
|
|||||||
dbg += ' 2>|'
|
dbg += ' 2>|'
|
||||||
elif stderr == subprocess.STDOUT:
|
elif stderr == subprocess.STDOUT:
|
||||||
dbg += ' 2>&1'
|
dbg += ' 2>&1'
|
||||||
Trace('%s', dbg)
|
|
||||||
|
|
||||||
try:
|
with Trace('git command %s %s with debug: %s', LAST_GITDIR, command, dbg):
|
||||||
p = subprocess.Popen(command,
|
try:
|
||||||
cwd=cwd,
|
p = subprocess.Popen(command,
|
||||||
env=env,
|
cwd=cwd,
|
||||||
encoding='utf-8',
|
env=env,
|
||||||
errors='backslashreplace',
|
encoding='utf-8',
|
||||||
stdin=stdin,
|
errors='backslashreplace',
|
||||||
stdout=stdout,
|
stdin=stdin,
|
||||||
stderr=stderr)
|
stdout=stdout,
|
||||||
except Exception as e:
|
stderr=stderr)
|
||||||
raise GitError('%s: %s' % (command[1], e))
|
except Exception as e:
|
||||||
|
raise GitError('%s: %s' % (command[1], e))
|
||||||
|
|
||||||
if ssh_proxy:
|
|
||||||
ssh_proxy.add_client(p)
|
|
||||||
|
|
||||||
self.process = p
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.stdout, self.stderr = p.communicate(input=input)
|
|
||||||
finally:
|
|
||||||
if ssh_proxy:
|
if ssh_proxy:
|
||||||
ssh_proxy.remove_client(p)
|
ssh_proxy.add_client(p)
|
||||||
self.rc = p.wait()
|
|
||||||
|
self.process = p
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.stdout, self.stderr = p.communicate(input=input)
|
||||||
|
finally:
|
||||||
|
if ssh_proxy:
|
||||||
|
ssh_proxy.remove_client(p)
|
||||||
|
self.rc = p.wait()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _GetBasicEnv():
|
def _GetBasicEnv():
|
||||||
|
@ -22,6 +22,7 @@ import re
|
|||||||
import ssl
|
import ssl
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
from typing import Union
|
||||||
import urllib.error
|
import urllib.error
|
||||||
import urllib.request
|
import urllib.request
|
||||||
|
|
||||||
@ -68,8 +69,6 @@ def _key(name):
|
|||||||
class GitConfig(object):
|
class GitConfig(object):
|
||||||
_ForUser = None
|
_ForUser = None
|
||||||
|
|
||||||
_USER_CONFIG = '~/.gitconfig'
|
|
||||||
|
|
||||||
_ForSystem = None
|
_ForSystem = None
|
||||||
_SYSTEM_CONFIG = '/etc/gitconfig'
|
_SYSTEM_CONFIG = '/etc/gitconfig'
|
||||||
|
|
||||||
@ -82,9 +81,13 @@ class GitConfig(object):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def ForUser(cls):
|
def ForUser(cls):
|
||||||
if cls._ForUser is None:
|
if cls._ForUser is None:
|
||||||
cls._ForUser = cls(configfile=os.path.expanduser(cls._USER_CONFIG))
|
cls._ForUser = cls(configfile=cls._getUserConfig())
|
||||||
return cls._ForUser
|
return cls._ForUser
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _getUserConfig():
|
||||||
|
return os.path.expanduser('~/.gitconfig')
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def ForRepository(cls, gitdir, defaults=None):
|
def ForRepository(cls, gitdir, defaults=None):
|
||||||
return cls(configfile=os.path.join(gitdir, 'config'),
|
return cls(configfile=os.path.join(gitdir, 'config'),
|
||||||
@ -117,7 +120,7 @@ class GitConfig(object):
|
|||||||
return self.defaults.Has(name, include_defaults=True)
|
return self.defaults.Has(name, include_defaults=True)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def GetInt(self, name):
|
def GetInt(self, name: str) -> Union[int, None]:
|
||||||
"""Returns an integer from the configuration file.
|
"""Returns an integer from the configuration file.
|
||||||
|
|
||||||
This follows the git config syntax.
|
This follows the git config syntax.
|
||||||
@ -126,7 +129,7 @@ class GitConfig(object):
|
|||||||
name: The key to lookup.
|
name: The key to lookup.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
None if the value was not defined, or is not a boolean.
|
None if the value was not defined, or is not an int.
|
||||||
Otherwise, the number itself.
|
Otherwise, the number itself.
|
||||||
"""
|
"""
|
||||||
v = self.GetString(name)
|
v = self.GetString(name)
|
||||||
@ -152,6 +155,9 @@ class GitConfig(object):
|
|||||||
try:
|
try:
|
||||||
return int(v, base=base) * mult
|
return int(v, base=base) * mult
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
print(
|
||||||
|
f"warning: expected {name} to represent an integer, got {v} instead",
|
||||||
|
file=sys.stderr)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def DumpConfigDict(self):
|
def DumpConfigDict(self):
|
||||||
@ -169,7 +175,7 @@ class GitConfig(object):
|
|||||||
config_dict[key] = self.GetString(key)
|
config_dict[key] = self.GetString(key)
|
||||||
return config_dict
|
return config_dict
|
||||||
|
|
||||||
def GetBoolean(self, name):
|
def GetBoolean(self, name: str) -> Union[str, None]:
|
||||||
"""Returns a boolean from the configuration file.
|
"""Returns a boolean from the configuration file.
|
||||||
None : The value was not defined, or is not a boolean.
|
None : The value was not defined, or is not a boolean.
|
||||||
True : The value was set to true or yes.
|
True : The value was set to true or yes.
|
||||||
@ -183,6 +189,8 @@ class GitConfig(object):
|
|||||||
return True
|
return True
|
||||||
if v in ('false', 'no'):
|
if v in ('false', 'no'):
|
||||||
return False
|
return False
|
||||||
|
print(f"warning: expected {name} to represent a boolean, got {v} instead",
|
||||||
|
file=sys.stderr)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def SetBoolean(self, name, value):
|
def SetBoolean(self, name, value):
|
||||||
@ -191,7 +199,7 @@ class GitConfig(object):
|
|||||||
value = 'true' if value else 'false'
|
value = 'true' if value else 'false'
|
||||||
self.SetString(name, value)
|
self.SetString(name, value)
|
||||||
|
|
||||||
def GetString(self, name, all_keys=False):
|
def GetString(self, name: str, all_keys: bool = False) -> Union[str, None]:
|
||||||
"""Get the first value for a key, or None if it is not defined.
|
"""Get the first value for a key, or None if it is not defined.
|
||||||
|
|
||||||
This configuration file is used first, if the key is not
|
This configuration file is used first, if the key is not
|
||||||
@ -219,8 +227,8 @@ class GitConfig(object):
|
|||||||
"""Set the value(s) for a key.
|
"""Set the value(s) for a key.
|
||||||
Only this configuration file is modified.
|
Only this configuration file is modified.
|
||||||
|
|
||||||
The supplied value should be either a string,
|
The supplied value should be either a string, or a list of strings (to
|
||||||
or a list of strings (to store multiple values).
|
store multiple values), or None (to delete the key).
|
||||||
"""
|
"""
|
||||||
key = _key(name)
|
key = _key(name)
|
||||||
|
|
||||||
@ -349,9 +357,9 @@ class GitConfig(object):
|
|||||||
except OSError:
|
except OSError:
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
Trace(': parsing %s', self.file)
|
with Trace(': parsing %s', self.file):
|
||||||
with open(self._json) as fd:
|
with open(self._json) as fd:
|
||||||
return json.load(fd)
|
return json.load(fd)
|
||||||
except (IOError, ValueError):
|
except (IOError, ValueError):
|
||||||
platform_utils.remove(self._json, missing_ok=True)
|
platform_utils.remove(self._json, missing_ok=True)
|
||||||
return None
|
return None
|
||||||
@ -409,7 +417,10 @@ class GitConfig(object):
|
|||||||
class RepoConfig(GitConfig):
|
class RepoConfig(GitConfig):
|
||||||
"""User settings for repo itself."""
|
"""User settings for repo itself."""
|
||||||
|
|
||||||
_USER_CONFIG = '~/.repoconfig/config'
|
@staticmethod
|
||||||
|
def _getUserConfig():
|
||||||
|
repo_config_dir = os.getenv('REPO_CONFIG_DIR', os.path.expanduser('~'))
|
||||||
|
return os.path.join(repo_config_dir, '.repoconfig/config')
|
||||||
|
|
||||||
|
|
||||||
class RefSpec(object):
|
class RefSpec(object):
|
||||||
|
51
git_refs.py
51
git_refs.py
@ -67,38 +67,37 @@ class GitRefs(object):
|
|||||||
self._LoadAll()
|
self._LoadAll()
|
||||||
|
|
||||||
def _NeedUpdate(self):
|
def _NeedUpdate(self):
|
||||||
Trace(': scan refs %s', self._gitdir)
|
with Trace(': scan refs %s', self._gitdir):
|
||||||
|
for name, mtime in self._mtime.items():
|
||||||
for name, mtime in self._mtime.items():
|
try:
|
||||||
try:
|
if mtime != os.path.getmtime(os.path.join(self._gitdir, name)):
|
||||||
if mtime != os.path.getmtime(os.path.join(self._gitdir, name)):
|
return True
|
||||||
|
except OSError:
|
||||||
return True
|
return True
|
||||||
except OSError:
|
return False
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _LoadAll(self):
|
def _LoadAll(self):
|
||||||
Trace(': load refs %s', self._gitdir)
|
with Trace(': load refs %s', self._gitdir):
|
||||||
|
|
||||||
self._phyref = {}
|
self._phyref = {}
|
||||||
self._symref = {}
|
self._symref = {}
|
||||||
self._mtime = {}
|
self._mtime = {}
|
||||||
|
|
||||||
self._ReadPackedRefs()
|
self._ReadPackedRefs()
|
||||||
self._ReadLoose('refs/')
|
self._ReadLoose('refs/')
|
||||||
self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
|
self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
|
||||||
|
|
||||||
scan = self._symref
|
scan = self._symref
|
||||||
attempts = 0
|
attempts = 0
|
||||||
while scan and attempts < 5:
|
while scan and attempts < 5:
|
||||||
scan_next = {}
|
scan_next = {}
|
||||||
for name, dest in scan.items():
|
for name, dest in scan.items():
|
||||||
if dest in self._phyref:
|
if dest in self._phyref:
|
||||||
self._phyref[name] = self._phyref[dest]
|
self._phyref[name] = self._phyref[dest]
|
||||||
else:
|
else:
|
||||||
scan_next[name] = dest
|
scan_next[name] = dest
|
||||||
scan = scan_next
|
scan = scan_next
|
||||||
attempts += 1
|
attempts += 1
|
||||||
|
|
||||||
def _ReadPackedRefs(self):
|
def _ReadPackedRefs(self):
|
||||||
path = os.path.join(self._gitdir, 'packed-refs')
|
path = os.path.join(self._gitdir, 'packed-refs')
|
||||||
|
@ -31,7 +31,7 @@ from typing import NamedTuple
|
|||||||
|
|
||||||
from git_command import git_require, GitCommand
|
from git_command import git_require, GitCommand
|
||||||
from git_config import RepoConfig
|
from git_config import RepoConfig
|
||||||
from git_refs import R_HEADS
|
from git_refs import GitRefs
|
||||||
|
|
||||||
_SUPERPROJECT_GIT_NAME = 'superproject.git'
|
_SUPERPROJECT_GIT_NAME = 'superproject.git'
|
||||||
_SUPERPROJECT_MANIFEST_NAME = 'superproject_override.xml'
|
_SUPERPROJECT_MANIFEST_NAME = 'superproject_override.xml'
|
||||||
@ -125,23 +125,24 @@ class Superproject(object):
|
|||||||
"""Returns the manifest path if the path exists or None."""
|
"""Returns the manifest path if the path exists or None."""
|
||||||
return self._manifest_path if os.path.exists(self._manifest_path) else None
|
return self._manifest_path if os.path.exists(self._manifest_path) else None
|
||||||
|
|
||||||
def _LogMessage(self, message):
|
def _LogMessage(self, fmt, *inputs):
|
||||||
"""Logs message to stderr and _git_event_log."""
|
"""Logs message to stderr and _git_event_log."""
|
||||||
|
message = f'{self._LogMessagePrefix()} {fmt.format(*inputs)}'
|
||||||
if self._print_messages:
|
if self._print_messages:
|
||||||
print(message, file=sys.stderr)
|
print(message, file=sys.stderr)
|
||||||
self._git_event_log.ErrorEvent(message, f'{message}')
|
self._git_event_log.ErrorEvent(message, fmt)
|
||||||
|
|
||||||
def _LogMessagePrefix(self):
|
def _LogMessagePrefix(self):
|
||||||
"""Returns the prefix string to be logged in each log message"""
|
"""Returns the prefix string to be logged in each log message"""
|
||||||
return f'repo superproject branch: {self._branch} url: {self._remote_url}'
|
return f'repo superproject branch: {self._branch} url: {self._remote_url}'
|
||||||
|
|
||||||
def _LogError(self, message):
|
def _LogError(self, fmt, *inputs):
|
||||||
"""Logs error message to stderr and _git_event_log."""
|
"""Logs error message to stderr and _git_event_log."""
|
||||||
self._LogMessage(f'{self._LogMessagePrefix()} error: {message}')
|
self._LogMessage(f'error: {fmt}', *inputs)
|
||||||
|
|
||||||
def _LogWarning(self, message):
|
def _LogWarning(self, fmt, *inputs):
|
||||||
"""Logs warning message to stderr and _git_event_log."""
|
"""Logs warning message to stderr and _git_event_log."""
|
||||||
self._LogMessage(f'{self._LogMessagePrefix()} warning: {message}')
|
self._LogMessage(f'warning: {fmt}', *inputs)
|
||||||
|
|
||||||
def _Init(self):
|
def _Init(self):
|
||||||
"""Sets up a local Git repository to get a copy of a superproject.
|
"""Sets up a local Git repository to get a copy of a superproject.
|
||||||
@ -162,8 +163,8 @@ class Superproject(object):
|
|||||||
capture_stderr=True)
|
capture_stderr=True)
|
||||||
retval = p.Wait()
|
retval = p.Wait()
|
||||||
if retval:
|
if retval:
|
||||||
self._LogWarning(f'git init call failed, command: git {cmd}, '
|
self._LogWarning('git init call failed, command: git {}, '
|
||||||
f'return code: {retval}, stderr: {p.stderr}')
|
'return code: {}, stderr: {}', cmd, retval, p.stderr)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -174,13 +175,23 @@ class Superproject(object):
|
|||||||
True if fetch is successful, or False.
|
True if fetch is successful, or False.
|
||||||
"""
|
"""
|
||||||
if not os.path.exists(self._work_git):
|
if not os.path.exists(self._work_git):
|
||||||
self._LogWarning(f'git fetch missing directory: {self._work_git}')
|
self._LogWarning('git fetch missing directory: {}', self._work_git)
|
||||||
return False
|
return False
|
||||||
if not git_require((2, 28, 0)):
|
if not git_require((2, 28, 0)):
|
||||||
self._LogWarning('superproject requires a git version 2.28 or later')
|
self._LogWarning('superproject requires a git version 2.28 or later')
|
||||||
return False
|
return False
|
||||||
cmd = ['fetch', self._remote_url, '--depth', '1', '--force', '--no-tags',
|
cmd = ['fetch', self._remote_url, '--depth', '1', '--force', '--no-tags',
|
||||||
'--filter', 'blob:none']
|
'--filter', 'blob:none']
|
||||||
|
|
||||||
|
# Check if there is a local ref that we can pass to --negotiation-tip.
|
||||||
|
# If this is the first fetch, it does not exist yet.
|
||||||
|
# We use --negotiation-tip to speed up the fetch. Superproject branches do
|
||||||
|
# not share commits. So this lets git know it only needs to send commits
|
||||||
|
# reachable from the specified local refs.
|
||||||
|
rev_commit = GitRefs(self._work_git).get(f'refs/heads/{self.revision}')
|
||||||
|
if rev_commit:
|
||||||
|
cmd.extend(['--negotiation-tip', rev_commit])
|
||||||
|
|
||||||
if self._branch:
|
if self._branch:
|
||||||
cmd += [self._branch + ':' + self._branch]
|
cmd += [self._branch + ':' + self._branch]
|
||||||
p = GitCommand(None,
|
p = GitCommand(None,
|
||||||
@ -190,8 +201,8 @@ class Superproject(object):
|
|||||||
capture_stderr=True)
|
capture_stderr=True)
|
||||||
retval = p.Wait()
|
retval = p.Wait()
|
||||||
if retval:
|
if retval:
|
||||||
self._LogWarning(f'git fetch call failed, command: git {cmd}, '
|
self._LogWarning('git fetch call failed, command: git {}, '
|
||||||
f'return code: {retval}, stderr: {p.stderr}')
|
'return code: {}, stderr: {}', cmd, retval, p.stderr)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -204,7 +215,7 @@ class Superproject(object):
|
|||||||
data: data returned from 'git ls-tree ...' instead of None.
|
data: data returned from 'git ls-tree ...' instead of None.
|
||||||
"""
|
"""
|
||||||
if not os.path.exists(self._work_git):
|
if not os.path.exists(self._work_git):
|
||||||
self._LogWarning(f'git ls-tree missing directory: {self._work_git}')
|
self._LogWarning('git ls-tree missing directory: {}', self._work_git)
|
||||||
return None
|
return None
|
||||||
data = None
|
data = None
|
||||||
branch = 'HEAD' if not self._branch else self._branch
|
branch = 'HEAD' if not self._branch else self._branch
|
||||||
@ -219,8 +230,8 @@ class Superproject(object):
|
|||||||
if retval == 0:
|
if retval == 0:
|
||||||
data = p.stdout
|
data = p.stdout
|
||||||
else:
|
else:
|
||||||
self._LogWarning(f'git ls-tree call failed, command: git {cmd}, '
|
self._LogWarning('git ls-tree call failed, command: git {}, '
|
||||||
f'return code: {retval}, stderr: {p.stderr}')
|
'return code: {}, stderr: {}', cmd, retval, p.stderr)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def Sync(self, git_event_log):
|
def Sync(self, git_event_log):
|
||||||
@ -234,16 +245,16 @@ class Superproject(object):
|
|||||||
"""
|
"""
|
||||||
self._git_event_log = git_event_log
|
self._git_event_log = git_event_log
|
||||||
if not self._manifest.superproject:
|
if not self._manifest.superproject:
|
||||||
self._LogWarning(f'superproject tag is not defined in manifest: '
|
self._LogWarning('superproject tag is not defined in manifest: {}',
|
||||||
f'{self._manifest.manifestFile}')
|
self._manifest.manifestFile)
|
||||||
return SyncResult(False, False)
|
return SyncResult(False, False)
|
||||||
|
|
||||||
_PrintBetaNotice()
|
_PrintBetaNotice()
|
||||||
|
|
||||||
should_exit = True
|
should_exit = True
|
||||||
if not self._remote_url:
|
if not self._remote_url:
|
||||||
self._LogWarning(f'superproject URL is not defined in manifest: '
|
self._LogWarning('superproject URL is not defined in manifest: {}',
|
||||||
f'{self._manifest.manifestFile}')
|
self._manifest.manifestFile)
|
||||||
return SyncResult(False, should_exit)
|
return SyncResult(False, should_exit)
|
||||||
|
|
||||||
if not self._Init():
|
if not self._Init():
|
||||||
@ -266,8 +277,8 @@ class Superproject(object):
|
|||||||
|
|
||||||
data = self._LsTree()
|
data = self._LsTree()
|
||||||
if not data:
|
if not data:
|
||||||
self._LogWarning(f'git ls-tree failed to return data for manifest: '
|
self._LogWarning('git ls-tree failed to return data for manifest: {}',
|
||||||
f'{self._manifest.manifestFile}')
|
self._manifest.manifestFile)
|
||||||
return CommitIdsResult(None, True)
|
return CommitIdsResult(None, True)
|
||||||
|
|
||||||
# Parse lines like the following to select lines starting with '160000' and
|
# Parse lines like the following to select lines starting with '160000' and
|
||||||
@ -293,7 +304,7 @@ class Superproject(object):
|
|||||||
manifest_path: Path name of the file into which manifest is written instead of None.
|
manifest_path: Path name of the file into which manifest is written instead of None.
|
||||||
"""
|
"""
|
||||||
if not os.path.exists(self._superproject_path):
|
if not os.path.exists(self._superproject_path):
|
||||||
self._LogWarning(f'missing superproject directory: {self._superproject_path}')
|
self._LogWarning('missing superproject directory: {}', self._superproject_path)
|
||||||
return None
|
return None
|
||||||
manifest_str = self._manifest.ToXml(groups=self._manifest.GetGroupsStr(),
|
manifest_str = self._manifest.ToXml(groups=self._manifest.GetGroupsStr(),
|
||||||
omit_local=True).toxml()
|
omit_local=True).toxml()
|
||||||
@ -302,7 +313,8 @@ class Superproject(object):
|
|||||||
with open(manifest_path, 'w', encoding='utf-8') as fp:
|
with open(manifest_path, 'w', encoding='utf-8') as fp:
|
||||||
fp.write(manifest_str)
|
fp.write(manifest_str)
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
self._LogError(f'cannot write manifest to : {manifest_path} {e}')
|
self._LogError('cannot write manifest to : {} {}',
|
||||||
|
manifest_path, e)
|
||||||
return None
|
return None
|
||||||
return manifest_path
|
return manifest_path
|
||||||
|
|
||||||
@ -354,8 +366,9 @@ class Superproject(object):
|
|||||||
# If superproject doesn't have a commit id for a project, then report an
|
# If superproject doesn't have a commit id for a project, then report an
|
||||||
# error event and continue as if do not use superproject is specified.
|
# error event and continue as if do not use superproject is specified.
|
||||||
if projects_missing_commit_ids:
|
if projects_missing_commit_ids:
|
||||||
self._LogWarning(f'please file a bug using {self._manifest.contactinfo.bugurl} '
|
self._LogWarning('please file a bug using {} to report missing '
|
||||||
f'to report missing commit_ids for: {projects_missing_commit_ids}')
|
'commit_ids for: {}', self._manifest.contactinfo.bugurl,
|
||||||
|
projects_missing_commit_ids)
|
||||||
return UpdateProjectsResult(None, False)
|
return UpdateProjectsResult(None, False)
|
||||||
|
|
||||||
for project in projects:
|
for project in projects:
|
||||||
|
@ -110,7 +110,7 @@ class EventLog(object):
|
|||||||
return {
|
return {
|
||||||
'event': event_name,
|
'event': event_name,
|
||||||
'sid': self._full_sid,
|
'sid': self._full_sid,
|
||||||
'thread': threading.currentThread().getName(),
|
'thread': threading.current_thread().name,
|
||||||
'time': datetime.datetime.utcnow().isoformat() + 'Z',
|
'time': datetime.datetime.utcnow().isoformat() + 'Z',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import platform
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
40
main.py
40
main.py
@ -37,7 +37,7 @@ except ImportError:
|
|||||||
|
|
||||||
from color import SetDefaultColoring
|
from color import SetDefaultColoring
|
||||||
import event_log
|
import event_log
|
||||||
from repo_trace import SetTrace
|
from repo_trace import SetTrace, Trace, SetTraceToStderr
|
||||||
from git_command import user_agent
|
from git_command import user_agent
|
||||||
from git_config import RepoConfig
|
from git_config import RepoConfig
|
||||||
from git_trace2_event_log import EventLog
|
from git_trace2_event_log import EventLog
|
||||||
@ -109,6 +109,9 @@ global_options.add_option('--color',
|
|||||||
global_options.add_option('--trace',
|
global_options.add_option('--trace',
|
||||||
dest='trace', action='store_true',
|
dest='trace', action='store_true',
|
||||||
help='trace git command execution (REPO_TRACE=1)')
|
help='trace git command execution (REPO_TRACE=1)')
|
||||||
|
global_options.add_option('--trace-to-stderr',
|
||||||
|
dest='trace_to_stderr', action='store_true',
|
||||||
|
help='trace outputs go to stderr in addition to .repo/TRACE_FILE')
|
||||||
global_options.add_option('--trace-python',
|
global_options.add_option('--trace-python',
|
||||||
dest='trace_python', action='store_true',
|
dest='trace_python', action='store_true',
|
||||||
help='trace python command execution')
|
help='trace python command execution')
|
||||||
@ -198,9 +201,6 @@ class _Repo(object):
|
|||||||
"""Execute the requested subcommand."""
|
"""Execute the requested subcommand."""
|
||||||
result = 0
|
result = 0
|
||||||
|
|
||||||
if gopts.trace:
|
|
||||||
SetTrace()
|
|
||||||
|
|
||||||
# Handle options that terminate quickly first.
|
# Handle options that terminate quickly first.
|
||||||
if gopts.help or gopts.help_all:
|
if gopts.help or gopts.help_all:
|
||||||
self._PrintHelp(short=False, all_commands=gopts.help_all)
|
self._PrintHelp(short=False, all_commands=gopts.help_all)
|
||||||
@ -216,6 +216,21 @@ class _Repo(object):
|
|||||||
self._PrintHelp(short=True)
|
self._PrintHelp(short=True)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
run = lambda: self._RunLong(name, gopts, argv) or 0
|
||||||
|
with Trace('starting new command: %s', ', '.join([name] + argv),
|
||||||
|
first_trace=True):
|
||||||
|
if gopts.trace_python:
|
||||||
|
import trace
|
||||||
|
tracer = trace.Trace(count=False, trace=True, timing=True,
|
||||||
|
ignoredirs=set(sys.path[1:]))
|
||||||
|
result = tracer.runfunc(run)
|
||||||
|
else:
|
||||||
|
result = run()
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _RunLong(self, name, gopts, argv):
|
||||||
|
"""Execute the (longer running) requested subcommand."""
|
||||||
|
result = 0
|
||||||
SetDefaultColoring(gopts.color)
|
SetDefaultColoring(gopts.color)
|
||||||
|
|
||||||
git_trace2_event_log = EventLog()
|
git_trace2_event_log = EventLog()
|
||||||
@ -652,17 +667,18 @@ def _Main(argv):
|
|||||||
Version.wrapper_path = opt.wrapper_path
|
Version.wrapper_path = opt.wrapper_path
|
||||||
|
|
||||||
repo = _Repo(opt.repodir)
|
repo = _Repo(opt.repodir)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
init_http()
|
init_http()
|
||||||
name, gopts, argv = repo._ParseArgs(argv)
|
name, gopts, argv = repo._ParseArgs(argv)
|
||||||
run = lambda: repo._Run(name, gopts, argv) or 0
|
|
||||||
if gopts.trace_python:
|
if gopts.trace:
|
||||||
import trace
|
SetTrace()
|
||||||
tracer = trace.Trace(count=False, trace=True, timing=True,
|
|
||||||
ignoredirs=set(sys.path[1:]))
|
if gopts.trace_to_stderr:
|
||||||
result = tracer.runfunc(run)
|
SetTraceToStderr()
|
||||||
else:
|
|
||||||
result = run()
|
result = repo._Run(name, gopts, argv) or 0
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print('aborted by user', file=sys.stderr)
|
print('aborted by user', file=sys.stderr)
|
||||||
result = 1
|
result = 1
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "August 2022" "repo smartsync" "Repo Manual"
|
.TH REPO "1" "November 2022" "repo smartsync" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repo smartsync - manual page for repo smartsync
|
repo \- repo smartsync - manual page for repo smartsync
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -105,6 +105,13 @@ delete refs that no longer exist on the remote
|
|||||||
.TP
|
.TP
|
||||||
\fB\-\-no\-prune\fR
|
\fB\-\-no\-prune\fR
|
||||||
do not delete refs that no longer exist on the remote
|
do not delete refs that no longer exist on the remote
|
||||||
|
.TP
|
||||||
|
\fB\-\-auto\-gc\fR
|
||||||
|
run garbage collection on all synced projects
|
||||||
|
.TP
|
||||||
|
\fB\-\-no\-auto\-gc\fR
|
||||||
|
do not run garbage collection on any projects
|
||||||
|
(default)
|
||||||
.SS Logging options:
|
.SS Logging options:
|
||||||
.TP
|
.TP
|
||||||
\fB\-v\fR, \fB\-\-verbose\fR
|
\fB\-v\fR, \fB\-\-verbose\fR
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "August 2022" "repo sync" "Repo Manual"
|
.TH REPO "1" "November 2022" "repo sync" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repo sync - manual page for repo sync
|
repo \- repo sync - manual page for repo sync
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -106,6 +106,13 @@ delete refs that no longer exist on the remote
|
|||||||
\fB\-\-no\-prune\fR
|
\fB\-\-no\-prune\fR
|
||||||
do not delete refs that no longer exist on the remote
|
do not delete refs that no longer exist on the remote
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-auto\-gc\fR
|
||||||
|
run garbage collection on all synced projects
|
||||||
|
.TP
|
||||||
|
\fB\-\-no\-auto\-gc\fR
|
||||||
|
do not run garbage collection on any projects
|
||||||
|
(default)
|
||||||
|
.TP
|
||||||
\fB\-s\fR, \fB\-\-smart\-sync\fR
|
\fB\-s\fR, \fB\-\-smart\-sync\fR
|
||||||
smart sync using manifest from the latest known good
|
smart sync using manifest from the latest known good
|
||||||
build
|
build
|
||||||
@ -200,6 +207,9 @@ to a sha1 revision if the sha1 revision does not already exist locally.
|
|||||||
The \fB\-\-prune\fR option can be used to remove any refs that no longer exist on the
|
The \fB\-\-prune\fR option can be used to remove any refs that no longer exist on the
|
||||||
remote.
|
remote.
|
||||||
.PP
|
.PP
|
||||||
|
The \fB\-\-auto\-gc\fR option can be used to trigger garbage collection on all projects.
|
||||||
|
By default, repo does not run garbage collection.
|
||||||
|
.PP
|
||||||
SSH Connections
|
SSH Connections
|
||||||
.PP
|
.PP
|
||||||
If at least one project remote URL uses an SSH connection (ssh://, git+ssh://,
|
If at least one project remote URL uses an SSH connection (ssh://, git+ssh://,
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||||
.TH REPO "1" "July 2022" "repo" "Repo Manual"
|
.TH REPO "1" "November 2022" "repo" "Repo Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
repo \- repository management tool built on top of git
|
repo \- repository management tool built on top of git
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
@ -25,6 +25,10 @@ control color usage: auto, always, never
|
|||||||
\fB\-\-trace\fR
|
\fB\-\-trace\fR
|
||||||
trace git command execution (REPO_TRACE=1)
|
trace git command execution (REPO_TRACE=1)
|
||||||
.TP
|
.TP
|
||||||
|
\fB\-\-trace\-to\-stderr\fR
|
||||||
|
trace outputs go to stderr in addition to
|
||||||
|
\&.repo/TRACE_FILE
|
||||||
|
.TP
|
||||||
\fB\-\-trace\-python\fR
|
\fB\-\-trace\-python\fR
|
||||||
trace python command execution
|
trace python command execution
|
||||||
.TP
|
.TP
|
||||||
|
@ -22,7 +22,7 @@ import xml.dom.minidom
|
|||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
import gitc_utils
|
import gitc_utils
|
||||||
from git_config import GitConfig, IsId
|
from git_config import GitConfig
|
||||||
from git_refs import R_HEADS, HEAD
|
from git_refs import R_HEADS, HEAD
|
||||||
from git_superproject import Superproject
|
from git_superproject import Superproject
|
||||||
import platform_utils
|
import platform_utils
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from time import time
|
from time import time
|
||||||
from repo_trace import IsTrace
|
from repo_trace import IsTraceToStderr
|
||||||
|
|
||||||
_NOT_TTY = not os.isatty(2)
|
_NOT_TTY = not os.isatty(2)
|
||||||
|
|
||||||
@ -80,7 +80,7 @@ class Progress(object):
|
|||||||
def update(self, inc=1, msg=''):
|
def update(self, inc=1, msg=''):
|
||||||
self._done += inc
|
self._done += inc
|
||||||
|
|
||||||
if _NOT_TTY or IsTrace():
|
if _NOT_TTY or IsTraceToStderr():
|
||||||
return
|
return
|
||||||
|
|
||||||
if not self._show:
|
if not self._show:
|
||||||
@ -113,7 +113,7 @@ class Progress(object):
|
|||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
def end(self):
|
def end(self):
|
||||||
if _NOT_TTY or IsTrace() or not self._show:
|
if _NOT_TTY or IsTraceToStderr() or not self._show:
|
||||||
return
|
return
|
||||||
|
|
||||||
duration = duration_str(time() - self._start)
|
duration = duration_str(time() - self._start)
|
||||||
|
48
project.py
48
project.py
@ -41,7 +41,7 @@ from error import ManifestInvalidRevisionError, ManifestInvalidPathError
|
|||||||
from error import NoManifestException, ManifestParseError
|
from error import NoManifestException, ManifestParseError
|
||||||
import platform_utils
|
import platform_utils
|
||||||
import progress
|
import progress
|
||||||
from repo_trace import IsTrace, Trace
|
from repo_trace import Trace
|
||||||
|
|
||||||
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M, R_WORKTREE_M
|
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M, R_WORKTREE_M
|
||||||
|
|
||||||
@ -54,15 +54,17 @@ class SyncNetworkHalfResult(NamedTuple):
|
|||||||
# commit already present.
|
# commit already present.
|
||||||
remote_fetched: bool
|
remote_fetched: bool
|
||||||
|
|
||||||
|
|
||||||
# Maximum sleep time allowed during retries.
|
# Maximum sleep time allowed during retries.
|
||||||
MAXIMUM_RETRY_SLEEP_SEC = 3600.0
|
MAXIMUM_RETRY_SLEEP_SEC = 3600.0
|
||||||
# +-10% random jitter is added to each Fetches retry sleep duration.
|
# +-10% random jitter is added to each Fetches retry sleep duration.
|
||||||
RETRY_JITTER_PERCENT = 0.1
|
RETRY_JITTER_PERCENT = 0.1
|
||||||
|
|
||||||
# Whether to use alternates.
|
# Whether to use alternates. Switching back and forth is *NOT* supported.
|
||||||
# TODO(vapier): Remove knob once behavior is verified.
|
# TODO(vapier): Remove knob once behavior is verified.
|
||||||
_ALTERNATES = os.environ.get('REPO_USE_ALTERNATES') == '1'
|
_ALTERNATES = os.environ.get('REPO_USE_ALTERNATES') == '1'
|
||||||
|
|
||||||
|
|
||||||
def _lwrite(path, content):
|
def _lwrite(path, content):
|
||||||
lock = '%s.lock' % path
|
lock = '%s.lock' % path
|
||||||
|
|
||||||
@ -2416,16 +2418,16 @@ class Project(object):
|
|||||||
srcUrl = 'http' + srcUrl[len('persistent-http'):]
|
srcUrl = 'http' + srcUrl[len('persistent-http'):]
|
||||||
cmd += [srcUrl]
|
cmd += [srcUrl]
|
||||||
|
|
||||||
if IsTrace():
|
proc = None
|
||||||
Trace('%s', ' '.join(cmd))
|
with Trace('Fetching bundle: %s', ' '.join(cmd)):
|
||||||
if verbose:
|
if verbose:
|
||||||
print('%s: Downloading bundle: %s' % (self.name, srcUrl))
|
print('%s: Downloading bundle: %s' % (self.name, srcUrl))
|
||||||
stdout = None if verbose else subprocess.PIPE
|
stdout = None if verbose else subprocess.PIPE
|
||||||
stderr = None if verbose else subprocess.STDOUT
|
stderr = None if verbose else subprocess.STDOUT
|
||||||
try:
|
try:
|
||||||
proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr)
|
proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr)
|
||||||
except OSError:
|
except OSError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
(output, _) = proc.communicate()
|
(output, _) = proc.communicate()
|
||||||
curlret = proc.returncode
|
curlret = proc.returncode
|
||||||
@ -3415,6 +3417,7 @@ class RepoProject(MetaProject):
|
|||||||
except OSError:
|
except OSError:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
class ManifestProject(MetaProject):
|
class ManifestProject(MetaProject):
|
||||||
"""The MetaProject for manifests."""
|
"""The MetaProject for manifests."""
|
||||||
|
|
||||||
@ -3505,7 +3508,7 @@ class ManifestProject(MetaProject):
|
|||||||
@property
|
@property
|
||||||
def partial_clone_exclude(self):
|
def partial_clone_exclude(self):
|
||||||
"""Partial clone exclude string"""
|
"""Partial clone exclude string"""
|
||||||
return self.config.GetBoolean('repo.partialcloneexclude')
|
return self.config.GetString('repo.partialcloneexclude')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def manifest_platform(self):
|
def manifest_platform(self):
|
||||||
@ -3845,11 +3848,12 @@ class ManifestProject(MetaProject):
|
|||||||
self.config.SetBoolean('repo.superproject', use_superproject)
|
self.config.SetBoolean('repo.superproject', use_superproject)
|
||||||
|
|
||||||
if not standalone_manifest:
|
if not standalone_manifest:
|
||||||
if not self.Sync_NetworkHalf(
|
success = self.Sync_NetworkHalf(
|
||||||
is_new=is_new, quiet=not verbose, verbose=verbose,
|
is_new=is_new, quiet=not verbose, verbose=verbose,
|
||||||
clone_bundle=clone_bundle, current_branch_only=current_branch_only,
|
clone_bundle=clone_bundle, current_branch_only=current_branch_only,
|
||||||
tags=tags, submodules=submodules, clone_filter=clone_filter,
|
tags=tags, submodules=submodules, clone_filter=clone_filter,
|
||||||
partial_clone_exclude=self.manifest.PartialCloneExclude).success:
|
partial_clone_exclude=self.manifest.PartialCloneExclude).success
|
||||||
|
if not success:
|
||||||
r = self.GetRemote()
|
r = self.GetRemote()
|
||||||
print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
|
print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
|
||||||
|
|
||||||
@ -3928,12 +3932,14 @@ class ManifestProject(MetaProject):
|
|||||||
if git_superproject.UseSuperproject(use_superproject, self.manifest):
|
if git_superproject.UseSuperproject(use_superproject, self.manifest):
|
||||||
sync_result = self.manifest.superproject.Sync(git_event_log)
|
sync_result = self.manifest.superproject.Sync(git_event_log)
|
||||||
if not sync_result.success:
|
if not sync_result.success:
|
||||||
print('warning: git update of superproject for '
|
submanifest = ''
|
||||||
f'{self.manifest.path_prefix} failed, repo sync will not use '
|
if self.manifest.path_prefix:
|
||||||
'superproject to fetch source; while this error is not fatal, '
|
submanifest = f'for {self.manifest.path_prefix} '
|
||||||
'and you can continue to run repo sync, please run repo init '
|
print(f'warning: git update of superproject {submanifest}failed, repo '
|
||||||
'with the --no-use-superproject option to stop seeing this '
|
'sync will not use superproject to fetch source; while this '
|
||||||
'warning', file=sys.stderr)
|
'error is not fatal, and you can continue to run repo sync, '
|
||||||
|
'please run repo init with the --no-use-superproject option to '
|
||||||
|
'stop seeing this warning', file=sys.stderr)
|
||||||
if sync_result.fatal and use_superproject is not None:
|
if sync_result.fatal and use_superproject is not None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -18,93 +18,8 @@
|
|||||||
This is intended to be run before every official Repo release.
|
This is intended to be run before every official Repo release.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
from functools import partial
|
|
||||||
import argparse
|
|
||||||
import multiprocessing
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
|
||||||
|
|
||||||
TOPDIR = Path(__file__).resolve().parent.parent
|
import update_manpages
|
||||||
MANDIR = TOPDIR.joinpath('man')
|
|
||||||
|
|
||||||
# Load repo local modules.
|
sys.exit(update_manpages.main(sys.argv[1:]))
|
||||||
sys.path.insert(0, str(TOPDIR))
|
|
||||||
from git_command import RepoSourceVersion
|
|
||||||
import subcmds
|
|
||||||
|
|
||||||
def worker(cmd, **kwargs):
|
|
||||||
subprocess.run(cmd, **kwargs)
|
|
||||||
|
|
||||||
def main(argv):
|
|
||||||
parser = argparse.ArgumentParser(description=__doc__)
|
|
||||||
opts = parser.parse_args(argv)
|
|
||||||
|
|
||||||
if not shutil.which('help2man'):
|
|
||||||
sys.exit('Please install help2man to continue.')
|
|
||||||
|
|
||||||
# Let repo know we're generating man pages so it can avoid some dynamic
|
|
||||||
# behavior (like probing active number of CPUs). We use a weird name &
|
|
||||||
# value to make it less likely for users to set this var themselves.
|
|
||||||
os.environ['_REPO_GENERATE_MANPAGES_'] = ' indeed! '
|
|
||||||
|
|
||||||
# "repo branch" is an alias for "repo branches".
|
|
||||||
del subcmds.all_commands['branch']
|
|
||||||
(MANDIR / 'repo-branch.1').write_text('.so man1/repo-branches.1')
|
|
||||||
|
|
||||||
version = RepoSourceVersion()
|
|
||||||
cmdlist = [['help2man', '-N', '-n', f'repo {cmd} - manual page for repo {cmd}',
|
|
||||||
'-S', f'repo {cmd}', '-m', 'Repo Manual', f'--version-string={version}',
|
|
||||||
'-o', MANDIR.joinpath(f'repo-{cmd}.1.tmp'), './repo',
|
|
||||||
'-h', f'help {cmd}'] for cmd in subcmds.all_commands]
|
|
||||||
cmdlist.append(['help2man', '-N', '-n', 'repository management tool built on top of git',
|
|
||||||
'-S', 'repo', '-m', 'Repo Manual', f'--version-string={version}',
|
|
||||||
'-o', MANDIR.joinpath('repo.1.tmp'), './repo',
|
|
||||||
'-h', '--help-all'])
|
|
||||||
|
|
||||||
with tempfile.TemporaryDirectory() as tempdir:
|
|
||||||
tempdir = Path(tempdir)
|
|
||||||
repo_dir = tempdir / '.repo'
|
|
||||||
repo_dir.mkdir()
|
|
||||||
(repo_dir / 'repo').symlink_to(TOPDIR)
|
|
||||||
|
|
||||||
# Create a repo wrapper using the active Python executable. We can't pass
|
|
||||||
# this directly to help2man as it's too simple, so insert it via shebang.
|
|
||||||
data = (TOPDIR / 'repo').read_text(encoding='utf-8')
|
|
||||||
tempbin = tempdir / 'repo'
|
|
||||||
tempbin.write_text(f'#!{sys.executable}\n' + data, encoding='utf-8')
|
|
||||||
tempbin.chmod(0o755)
|
|
||||||
|
|
||||||
# Run all cmd in parallel, and wait for them to finish.
|
|
||||||
with multiprocessing.Pool() as pool:
|
|
||||||
pool.map(partial(worker, cwd=tempdir, check=True), cmdlist)
|
|
||||||
|
|
||||||
regex = (
|
|
||||||
(r'(It was generated by help2man) [0-9.]+', '\g<1>.'),
|
|
||||||
(r'^\.IP\n(.*:)\n', '.SS \g<1>\n'),
|
|
||||||
(r'^\.PP\nDescription', '.SH DETAILS'),
|
|
||||||
)
|
|
||||||
for tmp_path in MANDIR.glob('*.1.tmp'):
|
|
||||||
path = tmp_path.parent / tmp_path.stem
|
|
||||||
old_data = path.read_text() if path.exists() else ''
|
|
||||||
|
|
||||||
data = tmp_path.read_text()
|
|
||||||
tmp_path.unlink()
|
|
||||||
|
|
||||||
for pattern, replacement in regex:
|
|
||||||
data = re.sub(pattern, replacement, data, flags=re.M)
|
|
||||||
|
|
||||||
# If the only thing that changed was the date, don't refresh. This avoids
|
|
||||||
# a lot of noise when only one file actually updates.
|
|
||||||
old_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r'\1', old_data, flags=re.M)
|
|
||||||
new_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r'\1', data, flags=re.M)
|
|
||||||
if old_data != new_data:
|
|
||||||
path.write_text(data)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.exit(main(sys.argv[1:]))
|
|
||||||
|
119
release/update_manpages.py
Normal file
119
release/update_manpages.py
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
# Copyright (C) 2021 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Helper tool for generating manual page for all repo commands.
|
||||||
|
|
||||||
|
Most code lives in this module so it can be unittested.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from functools import partial
|
||||||
|
import argparse
|
||||||
|
import multiprocessing
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
TOPDIR = Path(__file__).resolve().parent.parent
|
||||||
|
MANDIR = TOPDIR.joinpath('man')
|
||||||
|
|
||||||
|
# Load repo local modules.
|
||||||
|
sys.path.insert(0, str(TOPDIR))
|
||||||
|
from git_command import RepoSourceVersion
|
||||||
|
import subcmds
|
||||||
|
|
||||||
|
def worker(cmd, **kwargs):
|
||||||
|
subprocess.run(cmd, **kwargs)
|
||||||
|
|
||||||
|
def main(argv):
|
||||||
|
parser = argparse.ArgumentParser(description=__doc__)
|
||||||
|
opts = parser.parse_args(argv)
|
||||||
|
|
||||||
|
if not shutil.which('help2man'):
|
||||||
|
sys.exit('Please install help2man to continue.')
|
||||||
|
|
||||||
|
# Let repo know we're generating man pages so it can avoid some dynamic
|
||||||
|
# behavior (like probing active number of CPUs). We use a weird name &
|
||||||
|
# value to make it less likely for users to set this var themselves.
|
||||||
|
os.environ['_REPO_GENERATE_MANPAGES_'] = ' indeed! '
|
||||||
|
|
||||||
|
# "repo branch" is an alias for "repo branches".
|
||||||
|
del subcmds.all_commands['branch']
|
||||||
|
(MANDIR / 'repo-branch.1').write_text('.so man1/repo-branches.1')
|
||||||
|
|
||||||
|
version = RepoSourceVersion()
|
||||||
|
cmdlist = [['help2man', '-N', '-n', f'repo {cmd} - manual page for repo {cmd}',
|
||||||
|
'-S', f'repo {cmd}', '-m', 'Repo Manual', f'--version-string={version}',
|
||||||
|
'-o', MANDIR.joinpath(f'repo-{cmd}.1.tmp'), './repo',
|
||||||
|
'-h', f'help {cmd}'] for cmd in subcmds.all_commands]
|
||||||
|
cmdlist.append(['help2man', '-N', '-n', 'repository management tool built on top of git',
|
||||||
|
'-S', 'repo', '-m', 'Repo Manual', f'--version-string={version}',
|
||||||
|
'-o', MANDIR.joinpath('repo.1.tmp'), './repo',
|
||||||
|
'-h', '--help-all'])
|
||||||
|
|
||||||
|
with tempfile.TemporaryDirectory() as tempdir:
|
||||||
|
tempdir = Path(tempdir)
|
||||||
|
repo_dir = tempdir / '.repo'
|
||||||
|
repo_dir.mkdir()
|
||||||
|
(repo_dir / 'repo').symlink_to(TOPDIR)
|
||||||
|
|
||||||
|
# Create a repo wrapper using the active Python executable. We can't pass
|
||||||
|
# this directly to help2man as it's too simple, so insert it via shebang.
|
||||||
|
data = (TOPDIR / 'repo').read_text(encoding='utf-8')
|
||||||
|
tempbin = tempdir / 'repo'
|
||||||
|
tempbin.write_text(f'#!{sys.executable}\n' + data, encoding='utf-8')
|
||||||
|
tempbin.chmod(0o755)
|
||||||
|
|
||||||
|
# Run all cmd in parallel, and wait for them to finish.
|
||||||
|
with multiprocessing.Pool() as pool:
|
||||||
|
pool.map(partial(worker, cwd=tempdir, check=True), cmdlist)
|
||||||
|
|
||||||
|
for tmp_path in MANDIR.glob('*.1.tmp'):
|
||||||
|
path = tmp_path.parent / tmp_path.stem
|
||||||
|
old_data = path.read_text() if path.exists() else ''
|
||||||
|
|
||||||
|
data = tmp_path.read_text()
|
||||||
|
tmp_path.unlink()
|
||||||
|
|
||||||
|
data = replace_regex(data)
|
||||||
|
|
||||||
|
# If the only thing that changed was the date, don't refresh. This avoids
|
||||||
|
# a lot of noise when only one file actually updates.
|
||||||
|
old_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r'\1', old_data, flags=re.M)
|
||||||
|
new_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r'\1', data, flags=re.M)
|
||||||
|
if old_data != new_data:
|
||||||
|
path.write_text(data)
|
||||||
|
|
||||||
|
|
||||||
|
def replace_regex(data):
|
||||||
|
"""Replace semantically null regexes in the data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: manpage text.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Updated manpage text.
|
||||||
|
"""
|
||||||
|
regex = (
|
||||||
|
(r'(It was generated by help2man) [0-9.]+', r'\g<1>.'),
|
||||||
|
(r'^\033\[[0-9;]*m([^\033]*)\033\[m', r'\g<1>'),
|
||||||
|
(r'^\.IP\n(.*:)\n', r'.SS \g<1>\n'),
|
||||||
|
(r'^\.PP\nDescription', r'.SH DETAILS'),
|
||||||
|
)
|
||||||
|
for pattern, replacement in regex:
|
||||||
|
data = re.sub(pattern, replacement, data, flags=re.M)
|
||||||
|
return data
|
8
repo
8
repo
@ -149,7 +149,7 @@ if not REPO_REV:
|
|||||||
BUG_URL = 'https://bugs.chromium.org/p/gerrit/issues/entry?template=Repo+tool+issue'
|
BUG_URL = 'https://bugs.chromium.org/p/gerrit/issues/entry?template=Repo+tool+issue'
|
||||||
|
|
||||||
# increment this whenever we make important changes to this script
|
# increment this whenever we make important changes to this script
|
||||||
VERSION = (2, 29)
|
VERSION = (2, 32)
|
||||||
|
|
||||||
# increment this if the MAINTAINER_KEYS block is modified
|
# increment this if the MAINTAINER_KEYS block is modified
|
||||||
KEYRING_VERSION = (2, 3)
|
KEYRING_VERSION = (2, 3)
|
||||||
@ -265,7 +265,8 @@ else:
|
|||||||
urllib.error = urllib2
|
urllib.error = urllib2
|
||||||
|
|
||||||
|
|
||||||
home_dot_repo = os.path.expanduser('~/.repoconfig')
|
repo_config_dir = os.getenv('REPO_CONFIG_DIR', os.path.expanduser('~'))
|
||||||
|
home_dot_repo = os.path.join(repo_config_dir, '.repoconfig')
|
||||||
gpg_dir = os.path.join(home_dot_repo, 'gnupg')
|
gpg_dir = os.path.join(home_dot_repo, 'gnupg')
|
||||||
|
|
||||||
|
|
||||||
@ -447,8 +448,7 @@ def run_command(cmd, **kwargs):
|
|||||||
except UnicodeError:
|
except UnicodeError:
|
||||||
print('repo: warning: Invalid UTF-8 output:\ncmd: %r\n%r' % (cmd, output),
|
print('repo: warning: Invalid UTF-8 output:\ncmd: %r\n%r' % (cmd, output),
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
# TODO(vapier): Once we require Python 3, use 'backslashreplace'.
|
return output.decode('utf-8', 'backslashreplace')
|
||||||
return output.decode('utf-8', 'replace')
|
|
||||||
|
|
||||||
# Run & package the results.
|
# Run & package the results.
|
||||||
proc = subprocess.Popen(cmd, **kwargs)
|
proc = subprocess.Popen(cmd, **kwargs)
|
||||||
|
134
repo_trace.py
134
repo_trace.py
@ -15,26 +15,152 @@
|
|||||||
"""Logic for tracing repo interactions.
|
"""Logic for tracing repo interactions.
|
||||||
|
|
||||||
Activated via `repo --trace ...` or `REPO_TRACE=1 repo ...`.
|
Activated via `repo --trace ...` or `REPO_TRACE=1 repo ...`.
|
||||||
|
|
||||||
|
Temporary: Tracing is always on. Set `REPO_TRACE=0` to turn off.
|
||||||
|
To also include trace outputs in stderr do `repo --trace_to_stderr ...`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
import time
|
||||||
|
import tempfile
|
||||||
|
from contextlib import ContextDecorator
|
||||||
|
|
||||||
|
import platform_utils
|
||||||
|
|
||||||
# Env var to implicitly turn on tracing.
|
# Env var to implicitly turn on tracing.
|
||||||
REPO_TRACE = 'REPO_TRACE'
|
REPO_TRACE = 'REPO_TRACE'
|
||||||
|
|
||||||
_TRACE = os.environ.get(REPO_TRACE) == '1'
|
# Temporarily set tracing to always on unless user expicitly sets to 0.
|
||||||
|
_TRACE = os.environ.get(REPO_TRACE) != '0'
|
||||||
|
_TRACE_TO_STDERR = False
|
||||||
|
_TRACE_FILE = None
|
||||||
|
_TRACE_FILE_NAME = 'TRACE_FILE'
|
||||||
|
_MAX_SIZE = 70 # in MiB
|
||||||
|
_NEW_COMMAND_SEP = '+++++++++++++++NEW COMMAND+++++++++++++++++++'
|
||||||
|
|
||||||
|
|
||||||
|
def IsTraceToStderr():
|
||||||
|
"""Whether traces are written to stderr."""
|
||||||
|
return _TRACE_TO_STDERR
|
||||||
|
|
||||||
|
|
||||||
def IsTrace():
|
def IsTrace():
|
||||||
|
"""Whether tracing is enabled."""
|
||||||
return _TRACE
|
return _TRACE
|
||||||
|
|
||||||
|
|
||||||
|
def SetTraceToStderr():
|
||||||
|
"""Enables tracing logging to stderr."""
|
||||||
|
global _TRACE_TO_STDERR
|
||||||
|
_TRACE_TO_STDERR = True
|
||||||
|
|
||||||
|
|
||||||
def SetTrace():
|
def SetTrace():
|
||||||
|
"""Enables tracing."""
|
||||||
global _TRACE
|
global _TRACE
|
||||||
_TRACE = True
|
_TRACE = True
|
||||||
|
|
||||||
|
|
||||||
def Trace(fmt, *args):
|
def _SetTraceFile(quiet):
|
||||||
if IsTrace():
|
"""Sets the trace file location."""
|
||||||
print(fmt % args, file=sys.stderr)
|
global _TRACE_FILE
|
||||||
|
_TRACE_FILE = _GetTraceFile(quiet)
|
||||||
|
|
||||||
|
|
||||||
|
class Trace(ContextDecorator):
|
||||||
|
"""Used to capture and save git traces."""
|
||||||
|
|
||||||
|
def _time(self):
|
||||||
|
"""Generate nanoseconds of time in a py3.6 safe way"""
|
||||||
|
return int(time.time() * 1e+9)
|
||||||
|
|
||||||
|
def __init__(self, fmt, *args, first_trace=False, quiet=True):
|
||||||
|
"""Initialize the object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
fmt: The format string for the trace.
|
||||||
|
*args: Arguments to pass to formatting.
|
||||||
|
first_trace: Whether this is the first trace of a `repo` invocation.
|
||||||
|
quiet: Whether to suppress notification of trace file location.
|
||||||
|
"""
|
||||||
|
if not IsTrace():
|
||||||
|
return
|
||||||
|
self._trace_msg = fmt % args
|
||||||
|
|
||||||
|
if not _TRACE_FILE:
|
||||||
|
_SetTraceFile(quiet)
|
||||||
|
|
||||||
|
if first_trace:
|
||||||
|
_ClearOldTraces()
|
||||||
|
self._trace_msg = f'{_NEW_COMMAND_SEP} {self._trace_msg}'
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
if not IsTrace():
|
||||||
|
return self
|
||||||
|
|
||||||
|
print_msg = f'PID: {os.getpid()} START: {self._time()} :{self._trace_msg}\n'
|
||||||
|
|
||||||
|
with open(_TRACE_FILE, 'a') as f:
|
||||||
|
print(print_msg, file=f)
|
||||||
|
|
||||||
|
if _TRACE_TO_STDERR:
|
||||||
|
print(print_msg, file=sys.stderr)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, *exc):
|
||||||
|
if not IsTrace():
|
||||||
|
return False
|
||||||
|
|
||||||
|
print_msg = f'PID: {os.getpid()} END: {self._time()} :{self._trace_msg}\n'
|
||||||
|
|
||||||
|
with open(_TRACE_FILE, 'a') as f:
|
||||||
|
print(print_msg, file=f)
|
||||||
|
|
||||||
|
if _TRACE_TO_STDERR:
|
||||||
|
print(print_msg, file=sys.stderr)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _GetTraceFile(quiet):
|
||||||
|
"""Get the trace file or create one."""
|
||||||
|
# TODO: refactor to pass repodir to Trace.
|
||||||
|
repo_dir = os.path.dirname(os.path.dirname(__file__))
|
||||||
|
trace_file = os.path.join(repo_dir, _TRACE_FILE_NAME)
|
||||||
|
if not quiet:
|
||||||
|
print(f'Trace outputs in {trace_file}', file=sys.stderr)
|
||||||
|
return trace_file
|
||||||
|
|
||||||
|
|
||||||
|
def _ClearOldTraces():
|
||||||
|
"""Clear the oldest commands if trace file is too big."""
|
||||||
|
try:
|
||||||
|
with open(_TRACE_FILE, 'r', errors='ignore') as f:
|
||||||
|
if os.path.getsize(f.name) / (1024 * 1024) <= _MAX_SIZE:
|
||||||
|
return
|
||||||
|
trace_lines = f.readlines()
|
||||||
|
except FileNotFoundError:
|
||||||
|
return
|
||||||
|
|
||||||
|
while sum(len(x) for x in trace_lines) / (1024 * 1024) > _MAX_SIZE:
|
||||||
|
for i, line in enumerate(trace_lines):
|
||||||
|
if 'END:' in line and _NEW_COMMAND_SEP in line:
|
||||||
|
trace_lines = trace_lines[i + 1:]
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# The last chunk is bigger than _MAX_SIZE, so just throw everything away.
|
||||||
|
trace_lines = []
|
||||||
|
|
||||||
|
while trace_lines and trace_lines[-1] == '\n':
|
||||||
|
trace_lines = trace_lines[:-1]
|
||||||
|
# Write to a temporary file with a unique name in the same filesystem
|
||||||
|
# before replacing the original trace file.
|
||||||
|
temp_dir, temp_prefix = os.path.split(_TRACE_FILE)
|
||||||
|
with tempfile.NamedTemporaryFile('w',
|
||||||
|
dir=temp_dir,
|
||||||
|
prefix=temp_prefix,
|
||||||
|
delete=False) as f:
|
||||||
|
f.writelines(trace_lines)
|
||||||
|
platform_utils.rename(f.name, _TRACE_FILE)
|
||||||
|
43
run_tests
43
run_tests
@ -15,47 +15,8 @@
|
|||||||
|
|
||||||
"""Wrapper to run pytest with the right settings."""
|
"""Wrapper to run pytest with the right settings."""
|
||||||
|
|
||||||
import errno
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
|
import pytest
|
||||||
|
|
||||||
def find_pytest():
|
|
||||||
"""Try to locate a good version of pytest."""
|
|
||||||
# If we're in a virtualenv, assume that it's provided the right pytest.
|
|
||||||
if 'VIRTUAL_ENV' in os.environ:
|
|
||||||
return 'pytest'
|
|
||||||
|
|
||||||
# Use the Python 3 version if available.
|
|
||||||
ret = shutil.which('pytest-3')
|
|
||||||
if ret:
|
|
||||||
return ret
|
|
||||||
|
|
||||||
# Hopefully this is a Python 3 version.
|
|
||||||
ret = shutil.which('pytest')
|
|
||||||
if ret:
|
|
||||||
return ret
|
|
||||||
|
|
||||||
print('%s: unable to find pytest.' % (__file__,), file=sys.stderr)
|
|
||||||
print('%s: Try installing: sudo apt-get install python-pytest' % (__file__,),
|
|
||||||
file=sys.stderr)
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv):
|
|
||||||
"""The main entry."""
|
|
||||||
# Add the repo tree to PYTHONPATH as the tests expect to be able to import
|
|
||||||
# modules directly.
|
|
||||||
pythonpath = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
oldpythonpath = os.environ.get('PYTHONPATH', None)
|
|
||||||
if oldpythonpath is not None:
|
|
||||||
pythonpath += os.pathsep + oldpythonpath
|
|
||||||
os.environ['PYTHONPATH'] = pythonpath
|
|
||||||
|
|
||||||
pytest = find_pytest()
|
|
||||||
return subprocess.run([pytest] + argv, check=False).returncode
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main(sys.argv[1:]))
|
sys.exit(pytest.main(sys.argv[1:]))
|
||||||
|
61
run_tests.vpython3
Normal file
61
run_tests.vpython3
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
# This is a vpython "spec" file.
|
||||||
|
#
|
||||||
|
# Read more about `vpython` and how to modify this file here:
|
||||||
|
# https://chromium.googlesource.com/infra/infra/+/main/doc/users/vpython.md
|
||||||
|
# List of available wheels:
|
||||||
|
# https://chromium.googlesource.com/infra/infra/+/main/infra/tools/dockerbuild/wheels.md
|
||||||
|
|
||||||
|
python_version: "3.8"
|
||||||
|
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/pytest-py3"
|
||||||
|
version: "version:6.2.2"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==6.2.2
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/py-py2_py3"
|
||||||
|
version: "version:1.10.0"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==6.2.2
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/iniconfig-py3"
|
||||||
|
version: "version:1.1.1"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==6.2.2
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/packaging-py2_py3"
|
||||||
|
version: "version:16.8"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==6.2.2
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/pluggy-py3"
|
||||||
|
version: "version:0.13.1"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==6.2.2
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/toml-py3"
|
||||||
|
version: "version:0.10.1"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==6.2.2
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/pyparsing-py3"
|
||||||
|
version: "version:3.0.7"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by pytest==6.2.2
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/attrs-py2_py3"
|
||||||
|
version: "version:21.4.0"
|
||||||
|
>
|
||||||
|
|
||||||
|
# Required by packaging==16.8
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/six-py2_py3"
|
||||||
|
version: "version:1.16.0"
|
||||||
|
>
|
2
setup.py
2
setup.py
@ -40,7 +40,7 @@ setuptools.setup(
|
|||||||
long_description_content_type='text/plain',
|
long_description_content_type='text/plain',
|
||||||
url='https://gerrit.googlesource.com/git-repo/',
|
url='https://gerrit.googlesource.com/git-repo/',
|
||||||
project_urls={
|
project_urls={
|
||||||
'Bug Tracker': 'https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo',
|
'Bug Tracker': 'https://bugs.chromium.org/p/gerrit/issues/list?q=component:Applications%3Erepo',
|
||||||
},
|
},
|
||||||
# https://pypi.org/classifiers/
|
# https://pypi.org/classifiers/
|
||||||
classifiers=[
|
classifiers=[
|
||||||
|
37
ssh.py
37
ssh.py
@ -182,28 +182,29 @@ class ProxyManager:
|
|||||||
# be important because we can't tell that that 'git@myhost.com' is the same
|
# be important because we can't tell that that 'git@myhost.com' is the same
|
||||||
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
|
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
|
||||||
check_command = command_base + ['-O', 'check']
|
check_command = command_base + ['-O', 'check']
|
||||||
try:
|
with Trace('Call to ssh (check call): %s', ' '.join(check_command)):
|
||||||
Trace(': %s', ' '.join(check_command))
|
try:
|
||||||
check_process = subprocess.Popen(check_command,
|
check_process = subprocess.Popen(check_command,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.PIPE)
|
stderr=subprocess.PIPE)
|
||||||
check_process.communicate() # read output, but ignore it...
|
check_process.communicate() # read output, but ignore it...
|
||||||
isnt_running = check_process.wait()
|
isnt_running = check_process.wait()
|
||||||
|
|
||||||
if not isnt_running:
|
if not isnt_running:
|
||||||
# Our double-check found that the master _was_ infact running. Add to
|
# Our double-check found that the master _was_ infact running. Add to
|
||||||
# the list of keys.
|
# the list of keys.
|
||||||
self._master_keys[key] = True
|
self._master_keys[key] = True
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except Exception:
|
||||||
# Ignore excpetions. We we will fall back to the normal command and print
|
# Ignore excpetions. We we will fall back to the normal command and
|
||||||
# to the log there.
|
# print to the log there.
|
||||||
pass
|
pass
|
||||||
|
|
||||||
command = command_base[:1] + ['-M', '-N'] + command_base[1:]
|
command = command_base[:1] + ['-M', '-N'] + command_base[1:]
|
||||||
|
p = None
|
||||||
try:
|
try:
|
||||||
Trace(': %s', ' '.join(command))
|
with Trace('Call to ssh: %s', ' '.join(command)):
|
||||||
p = subprocess.Popen(command)
|
p = subprocess.Popen(command)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self._master_broken.value = True
|
self._master_broken.value = True
|
||||||
print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
|
print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
|
||||||
|
@ -155,11 +155,11 @@ is shown, then the branch appears in all projects.
|
|||||||
if i.IsSplitCurrent or (in_cnt <= project_cnt - in_cnt):
|
if i.IsSplitCurrent or (in_cnt <= project_cnt - in_cnt):
|
||||||
in_type = 'in'
|
in_type = 'in'
|
||||||
for b in i.projects:
|
for b in i.projects:
|
||||||
relpath = b.project.relpath
|
relpath = _RelPath(b.project)
|
||||||
if not i.IsSplitCurrent or b.current:
|
if not i.IsSplitCurrent or b.current:
|
||||||
paths.append(_RelPath(b.project))
|
paths.append(relpath)
|
||||||
else:
|
else:
|
||||||
non_cur_paths.append(_RelPath(b.project))
|
non_cur_paths.append(relpath)
|
||||||
else:
|
else:
|
||||||
fmt = out.notinproject
|
fmt = out.notinproject
|
||||||
in_type = 'not in'
|
in_type = 'not in'
|
||||||
|
@ -77,33 +77,35 @@ synced and their revisions won't be found.
|
|||||||
metavar='<FORMAT>',
|
metavar='<FORMAT>',
|
||||||
help='print the log using a custom git pretty format string')
|
help='print the log using a custom git pretty format string')
|
||||||
|
|
||||||
def _printRawDiff(self, diff, pretty_format=None):
|
def _printRawDiff(self, diff, pretty_format=None, local=False):
|
||||||
|
_RelPath = lambda p: p.RelPath(local=local)
|
||||||
for project in diff['added']:
|
for project in diff['added']:
|
||||||
self.printText("A %s %s" % (project.relpath, project.revisionExpr))
|
self.printText("A %s %s" % (_RelPath(project), project.revisionExpr))
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
|
|
||||||
for project in diff['removed']:
|
for project in diff['removed']:
|
||||||
self.printText("R %s %s" % (project.relpath, project.revisionExpr))
|
self.printText("R %s %s" % (_RelPath(project), project.revisionExpr))
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
|
|
||||||
for project, otherProject in diff['changed']:
|
for project, otherProject in diff['changed']:
|
||||||
self.printText("C %s %s %s" % (project.relpath, project.revisionExpr,
|
self.printText("C %s %s %s" % (_RelPath(project), project.revisionExpr,
|
||||||
otherProject.revisionExpr))
|
otherProject.revisionExpr))
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
self._printLogs(project, otherProject, raw=True, color=False, pretty_format=pretty_format)
|
self._printLogs(project, otherProject, raw=True, color=False, pretty_format=pretty_format)
|
||||||
|
|
||||||
for project, otherProject in diff['unreachable']:
|
for project, otherProject in diff['unreachable']:
|
||||||
self.printText("U %s %s %s" % (project.relpath, project.revisionExpr,
|
self.printText("U %s %s %s" % (_RelPath(project), project.revisionExpr,
|
||||||
otherProject.revisionExpr))
|
otherProject.revisionExpr))
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
|
|
||||||
def _printDiff(self, diff, color=True, pretty_format=None):
|
def _printDiff(self, diff, color=True, pretty_format=None, local=False):
|
||||||
|
_RelPath = lambda p: p.RelPath(local=local)
|
||||||
if diff['added']:
|
if diff['added']:
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
self.printText('added projects : \n')
|
self.printText('added projects : \n')
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
for project in diff['added']:
|
for project in diff['added']:
|
||||||
self.printProject('\t%s' % (project.relpath))
|
self.printProject('\t%s' % (_RelPath(project)))
|
||||||
self.printText(' at revision ')
|
self.printText(' at revision ')
|
||||||
self.printRevision(project.revisionExpr)
|
self.printRevision(project.revisionExpr)
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
@ -113,7 +115,7 @@ synced and their revisions won't be found.
|
|||||||
self.printText('removed projects : \n')
|
self.printText('removed projects : \n')
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
for project in diff['removed']:
|
for project in diff['removed']:
|
||||||
self.printProject('\t%s' % (project.relpath))
|
self.printProject('\t%s' % (_RelPath(project)))
|
||||||
self.printText(' at revision ')
|
self.printText(' at revision ')
|
||||||
self.printRevision(project.revisionExpr)
|
self.printRevision(project.revisionExpr)
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
@ -123,7 +125,7 @@ synced and their revisions won't be found.
|
|||||||
self.printText('missing projects : \n')
|
self.printText('missing projects : \n')
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
for project in diff['missing']:
|
for project in diff['missing']:
|
||||||
self.printProject('\t%s' % (project.relpath))
|
self.printProject('\t%s' % (_RelPath(project)))
|
||||||
self.printText(' at revision ')
|
self.printText(' at revision ')
|
||||||
self.printRevision(project.revisionExpr)
|
self.printRevision(project.revisionExpr)
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
@ -133,7 +135,7 @@ synced and their revisions won't be found.
|
|||||||
self.printText('changed projects : \n')
|
self.printText('changed projects : \n')
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
for project, otherProject in diff['changed']:
|
for project, otherProject in diff['changed']:
|
||||||
self.printProject('\t%s' % (project.relpath))
|
self.printProject('\t%s' % (_RelPath(project)))
|
||||||
self.printText(' changed from ')
|
self.printText(' changed from ')
|
||||||
self.printRevision(project.revisionExpr)
|
self.printRevision(project.revisionExpr)
|
||||||
self.printText(' to ')
|
self.printText(' to ')
|
||||||
@ -148,7 +150,7 @@ synced and their revisions won't be found.
|
|||||||
self.printText('projects with unreachable revisions : \n')
|
self.printText('projects with unreachable revisions : \n')
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
for project, otherProject in diff['unreachable']:
|
for project, otherProject in diff['unreachable']:
|
||||||
self.printProject('\t%s ' % (project.relpath))
|
self.printProject('\t%s ' % (_RelPath(project)))
|
||||||
self.printRevision(project.revisionExpr)
|
self.printRevision(project.revisionExpr)
|
||||||
self.printText(' or ')
|
self.printText(' or ')
|
||||||
self.printRevision(otherProject.revisionExpr)
|
self.printRevision(otherProject.revisionExpr)
|
||||||
@ -214,6 +216,8 @@ synced and their revisions won't be found.
|
|||||||
|
|
||||||
diff = manifest1.projectsDiff(manifest2)
|
diff = manifest1.projectsDiff(manifest2)
|
||||||
if opt.raw:
|
if opt.raw:
|
||||||
self._printRawDiff(diff, pretty_format=opt.pretty_format)
|
self._printRawDiff(diff, pretty_format=opt.pretty_format,
|
||||||
|
local=opt.this_manifest_only)
|
||||||
else:
|
else:
|
||||||
self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format)
|
self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format,
|
||||||
|
local=opt.this_manifest_only)
|
||||||
|
@ -68,7 +68,8 @@ use for this GITC client.
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
manifest_file = opt.manifest_file
|
manifest_file = opt.manifest_file
|
||||||
|
|
||||||
manifest = GitcManifest(self.repodir, gitc_client)
|
manifest = GitcManifest(self.repodir, os.path.join(self.client_dir,
|
||||||
|
'.manifest'))
|
||||||
manifest.Override(manifest_file)
|
manifest.Override(manifest_file)
|
||||||
gitc_utils.generate_gitc_manifest(None, manifest)
|
gitc_utils.generate_gitc_manifest(None, manifest)
|
||||||
print('Please run `cd %s` to view your GITC client.' %
|
print('Please run `cd %s` to view your GITC client.' %
|
||||||
|
@ -13,16 +13,10 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import platform
|
|
||||||
import re
|
|
||||||
import sys
|
import sys
|
||||||
import urllib.parse
|
|
||||||
|
|
||||||
from color import Coloring
|
from color import Coloring
|
||||||
from command import InteractiveCommand, MirrorSafeCommand
|
from command import InteractiveCommand, MirrorSafeCommand
|
||||||
from error import ManifestParseError
|
|
||||||
from project import SyncBuffer
|
|
||||||
from git_config import GitConfig
|
|
||||||
from git_command import git_require, MIN_GIT_VERSION_SOFT, MIN_GIT_VERSION_HARD
|
from git_command import git_require, MIN_GIT_VERSION_SOFT, MIN_GIT_VERSION_HARD
|
||||||
from wrapper import Wrapper
|
from wrapper import Wrapper
|
||||||
|
|
||||||
@ -140,9 +134,7 @@ to update the working directory files.
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def _Prompt(self, prompt, value):
|
def _Prompt(self, prompt, value):
|
||||||
print('%-10s [%s]: ' % (prompt, value), end='')
|
print('%-10s [%s]: ' % (prompt, value), end='', flush=True)
|
||||||
# TODO: When we require Python 3, use flush=True w/print above.
|
|
||||||
sys.stdout.flush()
|
|
||||||
a = sys.stdin.readline().strip()
|
a = sys.stdin.readline().strip()
|
||||||
if a == '':
|
if a == '':
|
||||||
return value
|
return value
|
||||||
@ -179,9 +171,7 @@ to update the working directory files.
|
|||||||
if not opt.quiet:
|
if not opt.quiet:
|
||||||
print()
|
print()
|
||||||
print('Your identity is: %s <%s>' % (name, email))
|
print('Your identity is: %s <%s>' % (name, email))
|
||||||
print('is this correct [y/N]? ', end='')
|
print('is this correct [y/N]? ', end='', flush=True)
|
||||||
# TODO: When we require Python 3, use flush=True w/print above.
|
|
||||||
sys.stdout.flush()
|
|
||||||
a = sys.stdin.readline().strip().lower()
|
a = sys.stdin.readline().strip().lower()
|
||||||
if a in ('yes', 'y', 't', 'true'):
|
if a in ('yes', 'y', 't', 'true'):
|
||||||
break
|
break
|
||||||
@ -223,23 +213,19 @@ to update the working directory files.
|
|||||||
out.printer(fg='black', attr=c)(' %-6s ', c)
|
out.printer(fg='black', attr=c)(' %-6s ', c)
|
||||||
out.nl()
|
out.nl()
|
||||||
|
|
||||||
print('Enable color display in this user account (y/N)? ', end='')
|
print('Enable color display in this user account (y/N)? ', end='', flush=True)
|
||||||
# TODO: When we require Python 3, use flush=True w/print above.
|
|
||||||
sys.stdout.flush()
|
|
||||||
a = sys.stdin.readline().strip().lower()
|
a = sys.stdin.readline().strip().lower()
|
||||||
if a in ('y', 'yes', 't', 'true', 'on'):
|
if a in ('y', 'yes', 't', 'true', 'on'):
|
||||||
gc.SetString('color.ui', 'auto')
|
gc.SetString('color.ui', 'auto')
|
||||||
|
|
||||||
def _DisplayResult(self, opt):
|
def _DisplayResult(self):
|
||||||
if self.manifest.IsMirror:
|
if self.manifest.IsMirror:
|
||||||
init_type = 'mirror '
|
init_type = 'mirror '
|
||||||
else:
|
else:
|
||||||
init_type = ''
|
init_type = ''
|
||||||
|
|
||||||
if not opt.quiet:
|
print()
|
||||||
print()
|
print('repo %shas been initialized in %s' % (init_type, self.manifest.topdir))
|
||||||
print('repo %shas been initialized in %s' %
|
|
||||||
(init_type, self.manifest.topdir))
|
|
||||||
|
|
||||||
current_dir = os.getcwd()
|
current_dir = os.getcwd()
|
||||||
if current_dir != self.manifest.topdir:
|
if current_dir != self.manifest.topdir:
|
||||||
@ -329,4 +315,5 @@ to update the working directory files.
|
|||||||
self._ConfigureUser(opt)
|
self._ConfigureUser(opt)
|
||||||
self._ConfigureColor()
|
self._ConfigureColor()
|
||||||
|
|
||||||
self._DisplayResult(opt)
|
if not opt.quiet:
|
||||||
|
self._DisplayResult()
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import optparse
|
|
||||||
|
|
||||||
from command import PagedCommand
|
from command import PagedCommand
|
||||||
|
|
||||||
|
263
subcmds/sync.py
263
subcmds/sync.py
@ -21,7 +21,6 @@ import multiprocessing
|
|||||||
import netrc
|
import netrc
|
||||||
from optparse import SUPPRESS_HELP
|
from optparse import SUPPRESS_HELP
|
||||||
import os
|
import os
|
||||||
import shutil
|
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
@ -56,20 +55,21 @@ import gitc_utils
|
|||||||
from project import Project
|
from project import Project
|
||||||
from project import RemoteSpec
|
from project import RemoteSpec
|
||||||
from command import Command, DEFAULT_LOCAL_JOBS, MirrorSafeCommand, WORKER_BATCH_SIZE
|
from command import Command, DEFAULT_LOCAL_JOBS, MirrorSafeCommand, WORKER_BATCH_SIZE
|
||||||
from error import RepoChangedException, GitError, ManifestParseError
|
from error import RepoChangedException, GitError
|
||||||
import platform_utils
|
import platform_utils
|
||||||
from project import SyncBuffer
|
from project import SyncBuffer
|
||||||
from progress import Progress
|
from progress import Progress
|
||||||
from repo_trace import IsTrace, Trace
|
from repo_trace import Trace
|
||||||
import ssh
|
import ssh
|
||||||
from wrapper import Wrapper
|
from wrapper import Wrapper
|
||||||
from manifest_xml import GitcManifest
|
from manifest_xml import GitcManifest
|
||||||
|
|
||||||
_ONE_DAY_S = 24 * 60 * 60
|
_ONE_DAY_S = 24 * 60 * 60
|
||||||
# Env var to implicitly turn off object backups.
|
|
||||||
REPO_BACKUP_OBJECTS = 'REPO_BACKUP_OBJECTS'
|
|
||||||
|
|
||||||
_BACKUP_OBJECTS = os.environ.get(REPO_BACKUP_OBJECTS) != '0'
|
# Env var to implicitly turn auto-gc back on. This was added to allow a user to
|
||||||
|
# revert a change in default behavior in v2.29.9. Remove after 2023-04-01.
|
||||||
|
_REPO_AUTO_GC = 'REPO_AUTO_GC'
|
||||||
|
_AUTO_GC = os.environ.get(_REPO_AUTO_GC) == '1'
|
||||||
|
|
||||||
|
|
||||||
class _FetchOneResult(NamedTuple):
|
class _FetchOneResult(NamedTuple):
|
||||||
@ -200,6 +200,9 @@ exist locally.
|
|||||||
The --prune option can be used to remove any refs that no longer
|
The --prune option can be used to remove any refs that no longer
|
||||||
exist on the remote.
|
exist on the remote.
|
||||||
|
|
||||||
|
The --auto-gc option can be used to trigger garbage collection on all
|
||||||
|
projects. By default, repo does not run garbage collection.
|
||||||
|
|
||||||
# SSH Connections
|
# SSH Connections
|
||||||
|
|
||||||
If at least one project remote URL uses an SSH connection (ssh://,
|
If at least one project remote URL uses an SSH connection (ssh://,
|
||||||
@ -309,6 +312,10 @@ later is required to fix a server side protocol bug.
|
|||||||
help='delete refs that no longer exist on the remote (default)')
|
help='delete refs that no longer exist on the remote (default)')
|
||||||
p.add_option('--no-prune', dest='prune', action='store_false',
|
p.add_option('--no-prune', dest='prune', action='store_false',
|
||||||
help='do not delete refs that no longer exist on the remote')
|
help='do not delete refs that no longer exist on the remote')
|
||||||
|
p.add_option('--auto-gc', action='store_true', default=None,
|
||||||
|
help='run garbage collection on all synced projects')
|
||||||
|
p.add_option('--no-auto-gc', dest='auto_gc', action='store_false',
|
||||||
|
help='do not run garbage collection on any projects (default)')
|
||||||
if show_smart:
|
if show_smart:
|
||||||
p.add_option('-s', '--smart-sync',
|
p.add_option('-s', '--smart-sync',
|
||||||
dest='smart_sync', action='store_true',
|
dest='smart_sync', action='store_true',
|
||||||
@ -463,6 +470,7 @@ later is required to fix a server side protocol bug.
|
|||||||
"""
|
"""
|
||||||
start = time.time()
|
start = time.time()
|
||||||
success = False
|
success = False
|
||||||
|
remote_fetched = False
|
||||||
buf = io.StringIO()
|
buf = io.StringIO()
|
||||||
try:
|
try:
|
||||||
sync_result = project.Sync_NetworkHalf(
|
sync_result = project.Sync_NetworkHalf(
|
||||||
@ -480,6 +488,7 @@ later is required to fix a server side protocol bug.
|
|||||||
clone_filter=project.manifest.CloneFilter,
|
clone_filter=project.manifest.CloneFilter,
|
||||||
partial_clone_exclude=project.manifest.PartialCloneExclude)
|
partial_clone_exclude=project.manifest.PartialCloneExclude)
|
||||||
success = sync_result.success
|
success = sync_result.success
|
||||||
|
remote_fetched = sync_result.remote_fetched
|
||||||
|
|
||||||
output = buf.getvalue()
|
output = buf.getvalue()
|
||||||
if (opt.verbose or not success) and output:
|
if (opt.verbose or not success) and output:
|
||||||
@ -489,6 +498,8 @@ later is required to fix a server side protocol bug.
|
|||||||
print('error: Cannot fetch %s from %s'
|
print('error: Cannot fetch %s from %s'
|
||||||
% (project.name, project.remote.url),
|
% (project.name, project.remote.url),
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print(f'Keyboard interrupt while processing {project.name}')
|
||||||
except GitError as e:
|
except GitError as e:
|
||||||
print('error.GitError: Cannot fetch %s' % str(e), file=sys.stderr)
|
print('error.GitError: Cannot fetch %s' % str(e), file=sys.stderr)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -497,8 +508,7 @@ later is required to fix a server side protocol bug.
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
finish = time.time()
|
finish = time.time()
|
||||||
return _FetchOneResult(success, project, start, finish,
|
return _FetchOneResult(success, project, start, finish, remote_fetched)
|
||||||
sync_result.remote_fetched)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _FetchInitChild(cls, ssh_proxy):
|
def _FetchInitChild(cls, ssh_proxy):
|
||||||
@ -536,7 +546,7 @@ later is required to fix a server side protocol bug.
|
|||||||
ret = False
|
ret = False
|
||||||
else:
|
else:
|
||||||
fetched.add(project.gitdir)
|
fetched.add(project.gitdir)
|
||||||
pm.update(msg=project.name)
|
pm.update(msg=f'Last synced: {project.name}')
|
||||||
if not ret and opt.fail_fast:
|
if not ret and opt.fail_fast:
|
||||||
break
|
break
|
||||||
return ret
|
return ret
|
||||||
@ -713,7 +723,7 @@ later is required to fix a server side protocol bug.
|
|||||||
# ...we'll let existing jobs finish, though.
|
# ...we'll let existing jobs finish, though.
|
||||||
if not success:
|
if not success:
|
||||||
ret = False
|
ret = False
|
||||||
err_results.append(project.relpath)
|
err_results.append(project.RelPath(local=opt.this_manifest_only))
|
||||||
if opt.fail_fast:
|
if opt.fail_fast:
|
||||||
if pool:
|
if pool:
|
||||||
pool.close()
|
pool.close()
|
||||||
@ -728,63 +738,99 @@ later is required to fix a server side protocol bug.
|
|||||||
callback=_ProcessResults,
|
callback=_ProcessResults,
|
||||||
output=Progress('Checking out', len(all_projects), quiet=opt.quiet)) and not err_results
|
output=Progress('Checking out', len(all_projects), quiet=opt.quiet)) and not err_results
|
||||||
|
|
||||||
def _backup_cruft(self, bare_git):
|
@staticmethod
|
||||||
"""Save a copy of any cruft from `git gc`."""
|
def _GetPreciousObjectsState(project: Project, opt):
|
||||||
# Find any cruft packs in the current gitdir, and save them.
|
"""Get the preciousObjects state for the project.
|
||||||
# b/221065125 (repo sync complains that objects are missing). This does
|
|
||||||
# not prevent that state, but makes it so that the missing objects are
|
Args:
|
||||||
# available.
|
project (Project): the project to examine, and possibly correct.
|
||||||
objdir = bare_git._project.objdir
|
opt (optparse.Values): options given to sync.
|
||||||
pack_dir = os.path.join(objdir, 'pack')
|
|
||||||
bak_dir = os.path.join(objdir, '.repo', 'pack.bak')
|
Returns:
|
||||||
if not _BACKUP_OBJECTS or not platform_utils.isdir(pack_dir):
|
Expected state of extensions.preciousObjects:
|
||||||
return
|
False: Should be disabled. (not present)
|
||||||
saved = []
|
True: Should be enabled.
|
||||||
files = set(platform_utils.listdir(pack_dir))
|
"""
|
||||||
to_backup = []
|
if project.use_git_worktrees:
|
||||||
for f in files:
|
return False
|
||||||
base, ext = os.path.splitext(f)
|
projects = project.manifest.GetProjectsWithName(project.name,
|
||||||
if base + '.mtimes' in files:
|
all_manifests=True)
|
||||||
to_backup.append(f)
|
if len(projects) == 1:
|
||||||
if to_backup:
|
return False
|
||||||
os.makedirs(bak_dir, exist_ok=True)
|
relpath = project.RelPath(local=opt.this_manifest_only)
|
||||||
for fname in to_backup:
|
if len(projects) > 1:
|
||||||
bak_fname = os.path.join(bak_dir, fname)
|
# Objects are potentially shared with another project.
|
||||||
if not os.path.exists(bak_fname):
|
# See the logic in Project.Sync_NetworkHalf regarding UseAlternates.
|
||||||
saved.append(fname)
|
# - When False, shared projects share (via symlink)
|
||||||
# Use a tmp file so that we are sure of a complete copy.
|
# .repo/project-objects/{PROJECT_NAME}.git as the one-and-only objects
|
||||||
shutil.copy(os.path.join(pack_dir, fname), bak_fname + '.tmp')
|
# directory. All objects are precious, since there is no project with a
|
||||||
shutil.move(bak_fname + '.tmp', bak_fname)
|
# complete set of refs.
|
||||||
if saved:
|
# - When True, shared projects share (via info/alternates)
|
||||||
Trace('%s saved %s', bare_git._project.name, ' '.join(saved))
|
# .repo/project-objects/{PROJECT_NAME}.git as an alternate object store,
|
||||||
|
# which is written only on the first clone of the project, and is not
|
||||||
|
# written subsequently. (When Sync_NetworkHalf sees that it exists, it
|
||||||
|
# makes sure that the alternates file points there, and uses a
|
||||||
|
# project-local .git/objects directory for all syncs going forward.
|
||||||
|
# We do not support switching between the options. The environment
|
||||||
|
# variable is present for testing and migration only.
|
||||||
|
return not project.UseAlternates
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _SetPreciousObjectsState(self, project: Project, opt):
|
||||||
|
"""Correct the preciousObjects state for the project.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project: the project to examine, and possibly correct.
|
||||||
|
opt: options given to sync.
|
||||||
|
"""
|
||||||
|
expected = self._GetPreciousObjectsState(project, opt)
|
||||||
|
actual = project.config.GetBoolean('extensions.preciousObjects') or False
|
||||||
|
relpath = project.RelPath(local=opt.this_manifest_only)
|
||||||
|
|
||||||
|
if expected != actual:
|
||||||
|
# If this is unexpected, log it and repair.
|
||||||
|
Trace(f'{relpath} expected preciousObjects={expected}, got {actual}')
|
||||||
|
if expected:
|
||||||
|
if not opt.quiet:
|
||||||
|
print('\r%s: Shared project %s found, disabling pruning.' %
|
||||||
|
(relpath, project.name))
|
||||||
|
if git_require((2, 7, 0)):
|
||||||
|
project.EnableRepositoryExtension('preciousObjects')
|
||||||
|
else:
|
||||||
|
# This isn't perfect, but it's the best we can do with old git.
|
||||||
|
print('\r%s: WARNING: shared projects are unreliable when using '
|
||||||
|
'old versions of git; please upgrade to git-2.7.0+.'
|
||||||
|
% (relpath,),
|
||||||
|
file=sys.stderr)
|
||||||
|
project.config.SetString('gc.pruneExpire', 'never')
|
||||||
|
else:
|
||||||
|
if not opt.quiet:
|
||||||
|
print(f'\r{relpath}: not shared, disabling pruning.')
|
||||||
|
project.config.SetString('extensions.preciousObjects', None)
|
||||||
|
project.config.SetString('gc.pruneExpire', None)
|
||||||
|
|
||||||
def _GCProjects(self, projects, opt, err_event):
|
def _GCProjects(self, projects, opt, err_event):
|
||||||
pm = Progress('Garbage collecting', len(projects), delay=False, quiet=opt.quiet)
|
"""Perform garbage collection.
|
||||||
|
|
||||||
|
If We are skipping garbage collection (opt.auto_gc not set), we still want
|
||||||
|
to potentially mark objects precious, so that `git gc` does not discard
|
||||||
|
shared objects.
|
||||||
|
"""
|
||||||
|
if not opt.auto_gc:
|
||||||
|
# Just repair preciousObjects state, and return.
|
||||||
|
for project in projects:
|
||||||
|
self._SetPreciousObjectsState(project, opt)
|
||||||
|
return
|
||||||
|
|
||||||
|
pm = Progress('Garbage collecting', len(projects), delay=False,
|
||||||
|
quiet=opt.quiet)
|
||||||
pm.update(inc=0, msg='prescan')
|
pm.update(inc=0, msg='prescan')
|
||||||
|
|
||||||
tidy_dirs = {}
|
tidy_dirs = {}
|
||||||
for project in projects:
|
for project in projects:
|
||||||
# Make sure pruning never kicks in with shared projects that do not use
|
self._SetPreciousObjectsState(project, opt)
|
||||||
# alternates to avoid corruption.
|
|
||||||
if (not project.use_git_worktrees and
|
|
||||||
len(project.manifest.GetProjectsWithName(project.name, all_manifests=True)) > 1):
|
|
||||||
if project.UseAlternates:
|
|
||||||
# Undo logic set by previous versions of repo.
|
|
||||||
project.config.SetString('extensions.preciousObjects', None)
|
|
||||||
project.config.SetString('gc.pruneExpire', None)
|
|
||||||
else:
|
|
||||||
if not opt.quiet:
|
|
||||||
print('\r%s: Shared project %s found, disabling pruning.' %
|
|
||||||
(project.relpath, project.name))
|
|
||||||
if git_require((2, 7, 0)):
|
|
||||||
project.EnableRepositoryExtension('preciousObjects')
|
|
||||||
else:
|
|
||||||
# This isn't perfect, but it's the best we can do with old git.
|
|
||||||
print('\r%s: WARNING: shared projects are unreliable when using old '
|
|
||||||
'versions of git; please upgrade to git-2.7.0+.'
|
|
||||||
% (project.relpath,),
|
|
||||||
file=sys.stderr)
|
|
||||||
project.config.SetString('gc.pruneExpire', 'never')
|
|
||||||
project.config.SetString('gc.autoDetach', 'false')
|
project.config.SetString('gc.autoDetach', 'false')
|
||||||
# Only call git gc once per objdir, but call pack-refs for the remainder.
|
# Only call git gc once per objdir, but call pack-refs for the remainder.
|
||||||
if project.objdir not in tidy_dirs:
|
if project.objdir not in tidy_dirs:
|
||||||
@ -800,22 +846,14 @@ later is required to fix a server side protocol bug.
|
|||||||
|
|
||||||
jobs = opt.jobs
|
jobs = opt.jobs
|
||||||
|
|
||||||
gc_args = ['--auto']
|
|
||||||
backup_cruft = False
|
|
||||||
if git_require((2, 37, 0)):
|
|
||||||
gc_args.append('--cruft')
|
|
||||||
backup_cruft = True
|
|
||||||
pack_refs_args = ()
|
|
||||||
if jobs < 2:
|
if jobs < 2:
|
||||||
for (run_gc, bare_git) in tidy_dirs.values():
|
for (run_gc, bare_git) in tidy_dirs.values():
|
||||||
pm.update(msg=bare_git._project.name)
|
pm.update(msg=bare_git._project.name)
|
||||||
|
|
||||||
if run_gc:
|
if run_gc:
|
||||||
bare_git.gc(*gc_args)
|
bare_git.gc('--auto')
|
||||||
else:
|
else:
|
||||||
bare_git.pack_refs(*pack_refs_args)
|
bare_git.pack_refs()
|
||||||
if backup_cruft:
|
|
||||||
self._backup_cruft(bare_git)
|
|
||||||
pm.end()
|
pm.end()
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -830,17 +868,15 @@ later is required to fix a server side protocol bug.
|
|||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
if run_gc:
|
if run_gc:
|
||||||
bare_git.gc(*gc_args, config=config)
|
bare_git.gc('--auto', config=config)
|
||||||
else:
|
else:
|
||||||
bare_git.pack_refs(*pack_refs_args, config=config)
|
bare_git.pack_refs(config=config)
|
||||||
except GitError:
|
except GitError:
|
||||||
err_event.set()
|
err_event.set()
|
||||||
except Exception:
|
except Exception:
|
||||||
err_event.set()
|
err_event.set()
|
||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
if backup_cruft:
|
|
||||||
self._backup_cruft(bare_git)
|
|
||||||
pm.finish(bare_git._project.name)
|
pm.finish(bare_git._project.name)
|
||||||
sem.release()
|
sem.release()
|
||||||
|
|
||||||
@ -1150,6 +1186,51 @@ later is required to fix a server side protocol bug.
|
|||||||
if opt.prune is None:
|
if opt.prune is None:
|
||||||
opt.prune = True
|
opt.prune = True
|
||||||
|
|
||||||
|
if opt.auto_gc is None and _AUTO_GC:
|
||||||
|
print(f"Will run `git gc --auto` because {_REPO_AUTO_GC} is set.",
|
||||||
|
f'{_REPO_AUTO_GC} is deprecated and will be removed in a future',
|
||||||
|
'release. Use `--auto-gc` instead.', file=sys.stderr)
|
||||||
|
opt.auto_gc = True
|
||||||
|
|
||||||
|
def _ValidateOptionsWithManifest(self, opt, mp):
|
||||||
|
"""Like ValidateOptions, but after we've updated the manifest.
|
||||||
|
|
||||||
|
Needed to handle sync-xxx option defaults in the manifest.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
opt: The options to process.
|
||||||
|
mp: The manifest project to pull defaults from.
|
||||||
|
"""
|
||||||
|
if not opt.jobs:
|
||||||
|
# If the user hasn't made a choice, use the manifest value.
|
||||||
|
opt.jobs = mp.manifest.default.sync_j
|
||||||
|
if opt.jobs:
|
||||||
|
# If --jobs has a non-default value, propagate it as the default for
|
||||||
|
# --jobs-xxx flags too.
|
||||||
|
if not opt.jobs_network:
|
||||||
|
opt.jobs_network = opt.jobs
|
||||||
|
if not opt.jobs_checkout:
|
||||||
|
opt.jobs_checkout = opt.jobs
|
||||||
|
else:
|
||||||
|
# Neither user nor manifest have made a choice, so setup defaults.
|
||||||
|
if not opt.jobs_network:
|
||||||
|
opt.jobs_network = 1
|
||||||
|
if not opt.jobs_checkout:
|
||||||
|
opt.jobs_checkout = DEFAULT_LOCAL_JOBS
|
||||||
|
opt.jobs = os.cpu_count()
|
||||||
|
|
||||||
|
# Try to stay under user rlimit settings.
|
||||||
|
#
|
||||||
|
# Since each worker requires at 3 file descriptors to run `git fetch`, use
|
||||||
|
# that to scale down the number of jobs. Unfortunately there isn't an easy
|
||||||
|
# way to determine this reliably as systems change, but it was last measured
|
||||||
|
# by hand in 2011.
|
||||||
|
soft_limit, _ = _rlimit_nofile()
|
||||||
|
jobs_soft_limit = max(1, (soft_limit - 5) // 3)
|
||||||
|
opt.jobs = min(opt.jobs, jobs_soft_limit)
|
||||||
|
opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
|
||||||
|
opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
manifest = self.outer_manifest
|
manifest = self.outer_manifest
|
||||||
if not opt.outer_manifest:
|
if not opt.outer_manifest:
|
||||||
@ -1200,35 +1281,9 @@ later is required to fix a server side protocol bug.
|
|||||||
else:
|
else:
|
||||||
print('Skipping update of local manifest project.')
|
print('Skipping update of local manifest project.')
|
||||||
|
|
||||||
# Now that the manifests are up-to-date, setup the jobs value.
|
# Now that the manifests are up-to-date, setup options whose defaults might
|
||||||
if opt.jobs is None:
|
# be in the manifest.
|
||||||
# User has not made a choice, so use the manifest settings.
|
self._ValidateOptionsWithManifest(opt, mp)
|
||||||
opt.jobs = mp.default.sync_j
|
|
||||||
if opt.jobs is not None:
|
|
||||||
# Neither user nor manifest have made a choice.
|
|
||||||
if opt.jobs_network is None:
|
|
||||||
opt.jobs_network = opt.jobs
|
|
||||||
if opt.jobs_checkout is None:
|
|
||||||
opt.jobs_checkout = opt.jobs
|
|
||||||
# Setup defaults if jobs==0.
|
|
||||||
if not opt.jobs:
|
|
||||||
if not opt.jobs_network:
|
|
||||||
opt.jobs_network = 1
|
|
||||||
if not opt.jobs_checkout:
|
|
||||||
opt.jobs_checkout = DEFAULT_LOCAL_JOBS
|
|
||||||
opt.jobs = os.cpu_count()
|
|
||||||
|
|
||||||
# Try to stay under user rlimit settings.
|
|
||||||
#
|
|
||||||
# Since each worker requires at 3 file descriptors to run `git fetch`, use
|
|
||||||
# that to scale down the number of jobs. Unfortunately there isn't an easy
|
|
||||||
# way to determine this reliably as systems change, but it was last measured
|
|
||||||
# by hand in 2011.
|
|
||||||
soft_limit, _ = _rlimit_nofile()
|
|
||||||
jobs_soft_limit = max(1, (soft_limit - 5) // 3)
|
|
||||||
opt.jobs = min(opt.jobs, jobs_soft_limit)
|
|
||||||
opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
|
|
||||||
opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
|
|
||||||
|
|
||||||
superproject_logging_data = {}
|
superproject_logging_data = {}
|
||||||
self._UpdateProjectsRevisionId(opt, args, superproject_logging_data,
|
self._UpdateProjectsRevisionId(opt, args, superproject_logging_data,
|
||||||
|
@ -17,6 +17,7 @@ import functools
|
|||||||
import optparse
|
import optparse
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
from typing import List
|
||||||
|
|
||||||
from command import DEFAULT_LOCAL_JOBS, InteractiveCommand
|
from command import DEFAULT_LOCAL_JOBS, InteractiveCommand
|
||||||
from editor import Editor
|
from editor import Editor
|
||||||
@ -24,21 +25,54 @@ from error import UploadError
|
|||||||
from git_command import GitCommand
|
from git_command import GitCommand
|
||||||
from git_refs import R_HEADS
|
from git_refs import R_HEADS
|
||||||
from hooks import RepoHook
|
from hooks import RepoHook
|
||||||
|
from project import ReviewableBranch
|
||||||
|
|
||||||
|
|
||||||
UNUSUAL_COMMIT_THRESHOLD = 5
|
_DEFAULT_UNUSUAL_COMMIT_THRESHOLD = 5
|
||||||
|
|
||||||
|
|
||||||
def _ConfirmManyUploads(multiple_branches=False):
|
def _VerifyPendingCommits(branches: List[ReviewableBranch]) -> bool:
|
||||||
if multiple_branches:
|
"""Perform basic safety checks on the given set of branches.
|
||||||
print('ATTENTION: One or more branches has an unusually high number '
|
|
||||||
'of commits.')
|
Ensures that each branch does not have a "large" number of commits
|
||||||
else:
|
and, if so, prompts the user to confirm they want to proceed with
|
||||||
print('ATTENTION: You are uploading an unusually high number of commits.')
|
the upload.
|
||||||
print('YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across '
|
|
||||||
'branches?)')
|
Returns true if all branches pass the safety check or the user
|
||||||
answer = input("If you are sure you intend to do this, type 'yes': ").strip()
|
confirmed. Returns false if the upload should be aborted.
|
||||||
return answer == "yes"
|
"""
|
||||||
|
|
||||||
|
# Determine if any branch has a suspicious number of commits.
|
||||||
|
many_commits = False
|
||||||
|
for branch in branches:
|
||||||
|
# Get the user's unusual threshold for the branch.
|
||||||
|
#
|
||||||
|
# Each branch may be configured to have a different threshold.
|
||||||
|
remote = branch.project.GetBranch(branch.name).remote
|
||||||
|
key = f'review.{remote.review}.uploadwarningthreshold'
|
||||||
|
threshold = branch.project.config.GetInt(key)
|
||||||
|
if threshold is None:
|
||||||
|
threshold = _DEFAULT_UNUSUAL_COMMIT_THRESHOLD
|
||||||
|
|
||||||
|
# If the branch has more commits than the threshold, show a warning.
|
||||||
|
if len(branch.commits) > threshold:
|
||||||
|
many_commits = True
|
||||||
|
break
|
||||||
|
|
||||||
|
# If any branch has many commits, prompt the user.
|
||||||
|
if many_commits:
|
||||||
|
if len(branches) > 1:
|
||||||
|
print('ATTENTION: One or more branches has an unusually high number '
|
||||||
|
'of commits.')
|
||||||
|
else:
|
||||||
|
print('ATTENTION: You are uploading an unusually high number of commits.')
|
||||||
|
print('YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across '
|
||||||
|
'branches?)')
|
||||||
|
answer = input(
|
||||||
|
"If you are sure you intend to do this, type 'yes': ").strip()
|
||||||
|
return answer == 'yes'
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def _die(fmt, *args):
|
def _die(fmt, *args):
|
||||||
@ -149,6 +183,13 @@ review.URL.uploadnotify:
|
|||||||
Control e-mail notifications when uploading.
|
Control e-mail notifications when uploading.
|
||||||
https://gerrit-review.googlesource.com/Documentation/user-upload.html#notify
|
https://gerrit-review.googlesource.com/Documentation/user-upload.html#notify
|
||||||
|
|
||||||
|
review.URL.uploadwarningthreshold:
|
||||||
|
|
||||||
|
Repo will warn you if you are attempting to upload a large number
|
||||||
|
of commits in one or more branches. By default, the threshold
|
||||||
|
is five commits. This option allows you to override the warning
|
||||||
|
threshold to a different value.
|
||||||
|
|
||||||
# References
|
# References
|
||||||
|
|
||||||
Gerrit Code Review: https://www.gerritcodereview.com/
|
Gerrit Code Review: https://www.gerritcodereview.com/
|
||||||
@ -252,25 +293,22 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
for commit in commit_list:
|
for commit in commit_list:
|
||||||
print(' %s' % commit)
|
print(' %s' % commit)
|
||||||
|
|
||||||
print('to %s (y/N)? ' % remote.review, end='')
|
print('to %s (y/N)? ' % remote.review, end='', flush=True)
|
||||||
# TODO: When we require Python 3, use flush=True w/print above.
|
|
||||||
sys.stdout.flush()
|
|
||||||
if opt.yes:
|
if opt.yes:
|
||||||
print('<--yes>')
|
print('<--yes>')
|
||||||
answer = True
|
answer = True
|
||||||
else:
|
else:
|
||||||
answer = sys.stdin.readline().strip().lower()
|
answer = sys.stdin.readline().strip().lower()
|
||||||
answer = answer in ('y', 'yes', '1', 'true', 't')
|
answer = answer in ('y', 'yes', '1', 'true', 't')
|
||||||
|
if not answer:
|
||||||
|
_die("upload aborted by user")
|
||||||
|
|
||||||
if not opt.yes and answer:
|
# Perform some basic safety checks prior to uploading.
|
||||||
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
|
if not opt.yes and not _VerifyPendingCommits([branch]):
|
||||||
answer = _ConfirmManyUploads()
|
|
||||||
|
|
||||||
if answer:
|
|
||||||
self._UploadAndReport(opt, [branch], people)
|
|
||||||
else:
|
|
||||||
_die("upload aborted by user")
|
_die("upload aborted by user")
|
||||||
|
|
||||||
|
self._UploadAndReport(opt, [branch], people)
|
||||||
|
|
||||||
def _MultipleBranches(self, opt, pending, people):
|
def _MultipleBranches(self, opt, pending, people):
|
||||||
projects = {}
|
projects = {}
|
||||||
branches = {}
|
branches = {}
|
||||||
@ -278,8 +316,9 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
script = []
|
script = []
|
||||||
script.append('# Uncomment the branches to upload:')
|
script.append('# Uncomment the branches to upload:')
|
||||||
for project, avail in pending:
|
for project, avail in pending:
|
||||||
|
project_path = project.RelPath(local=opt.this_manifest_only)
|
||||||
script.append('#')
|
script.append('#')
|
||||||
script.append('# project %s/:' % project.RelPath(local=opt.this_manifest_only))
|
script.append(f'# project {project_path}/:')
|
||||||
|
|
||||||
b = {}
|
b = {}
|
||||||
for branch in avail:
|
for branch in avail:
|
||||||
@ -302,8 +341,8 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
script.append('# %s' % commit)
|
script.append('# %s' % commit)
|
||||||
b[name] = branch
|
b[name] = branch
|
||||||
|
|
||||||
projects[project.RelPath(local=opt.this_manifest_only)] = project
|
projects[project_path] = project
|
||||||
branches[project.name] = b
|
branches[project_path] = b
|
||||||
script.append('')
|
script.append('')
|
||||||
|
|
||||||
script = Editor.EditString("\n".join(script)).split("\n")
|
script = Editor.EditString("\n".join(script)).split("\n")
|
||||||
@ -328,22 +367,17 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
name = m.group(1)
|
name = m.group(1)
|
||||||
if not project:
|
if not project:
|
||||||
_die('project for branch %s not in script', name)
|
_die('project for branch %s not in script', name)
|
||||||
branch = branches[project.name].get(name)
|
project_path = project.RelPath(local=opt.this_manifest_only)
|
||||||
|
branch = branches[project_path].get(name)
|
||||||
if not branch:
|
if not branch:
|
||||||
_die('branch %s not in %s', name, project.RelPath(local=opt.this_manifest_only))
|
_die('branch %s not in %s', name, project_path)
|
||||||
todo.append(branch)
|
todo.append(branch)
|
||||||
if not todo:
|
if not todo:
|
||||||
_die("nothing uncommented for upload")
|
_die("nothing uncommented for upload")
|
||||||
|
|
||||||
if not opt.yes:
|
# Perform some basic safety checks prior to uploading.
|
||||||
many_commits = False
|
if not opt.yes and not _VerifyPendingCommits(todo):
|
||||||
for branch in todo:
|
_die("upload aborted by user")
|
||||||
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
|
|
||||||
many_commits = True
|
|
||||||
break
|
|
||||||
if many_commits:
|
|
||||||
if not _ConfirmManyUploads(multiple_branches=True):
|
|
||||||
_die("upload aborted by user")
|
|
||||||
|
|
||||||
self._UploadAndReport(opt, todo, people)
|
self._UploadAndReport(opt, todo, people)
|
||||||
|
|
||||||
@ -401,9 +435,7 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
print('Uncommitted changes in %s (did you forget to amend?):'
|
print('Uncommitted changes in %s (did you forget to amend?):'
|
||||||
% branch.project.name)
|
% branch.project.name)
|
||||||
print('\n'.join(changes))
|
print('\n'.join(changes))
|
||||||
print('Continue uploading? (y/N) ', end='')
|
print('Continue uploading? (y/N) ', end='', flush=True)
|
||||||
# TODO: When we require Python 3, use flush=True w/print above.
|
|
||||||
sys.stdout.flush()
|
|
||||||
if opt.yes:
|
if opt.yes:
|
||||||
print('<--yes>')
|
print('<--yes>')
|
||||||
a = 'yes'
|
a = 'yes'
|
||||||
@ -452,19 +484,24 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
|
|
||||||
destination = opt.dest_branch or branch.project.dest_branch
|
destination = opt.dest_branch or branch.project.dest_branch
|
||||||
|
|
||||||
# Make sure our local branch is not setup to track a different remote branch
|
if branch.project.dest_branch and not opt.dest_branch:
|
||||||
merge_branch = self._GetMergeBranch(branch.project)
|
|
||||||
if destination:
|
merge_branch = self._GetMergeBranch(
|
||||||
|
branch.project, local_branch=branch.name)
|
||||||
|
|
||||||
full_dest = destination
|
full_dest = destination
|
||||||
if not full_dest.startswith(R_HEADS):
|
if not full_dest.startswith(R_HEADS):
|
||||||
full_dest = R_HEADS + full_dest
|
full_dest = R_HEADS + full_dest
|
||||||
|
|
||||||
if not opt.dest_branch and merge_branch and merge_branch != full_dest:
|
# If the merge branch of the local branch is different from the
|
||||||
print('merge branch %s does not match destination branch %s'
|
# project's revision AND destination, this might not be intentional.
|
||||||
% (merge_branch, full_dest))
|
if (merge_branch and merge_branch != branch.project.revisionExpr
|
||||||
|
and merge_branch != full_dest):
|
||||||
|
print(f'For local branch {branch.name}: merge branch '
|
||||||
|
f'{merge_branch} does not match destination branch '
|
||||||
|
f'{destination}')
|
||||||
print('skipping upload.')
|
print('skipping upload.')
|
||||||
print('Please use `--destination %s` if this is intentional'
|
print(f'Please use `--destination {destination}` if this is intentional')
|
||||||
% destination)
|
|
||||||
branch.uploaded = False
|
branch.uploaded = False
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -514,13 +551,14 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
if have_errors:
|
if have_errors:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def _GetMergeBranch(self, project):
|
def _GetMergeBranch(self, project, local_branch=None):
|
||||||
p = GitCommand(project,
|
if local_branch is None:
|
||||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
p = GitCommand(project,
|
||||||
capture_stdout=True,
|
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||||
capture_stderr=True)
|
capture_stdout=True,
|
||||||
p.Wait()
|
capture_stderr=True)
|
||||||
local_branch = p.stdout.strip()
|
p.Wait()
|
||||||
|
local_branch = p.stdout.strip()
|
||||||
p = GitCommand(project,
|
p = GitCommand(project,
|
||||||
['config', '--get', 'branch.%s.merge' % local_branch],
|
['config', '--get', 'branch.%s.merge' % local_branch],
|
||||||
capture_stdout=True,
|
capture_stdout=True,
|
||||||
@ -583,9 +621,8 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
|||||||
hook = RepoHook.FromSubcmd(
|
hook = RepoHook.FromSubcmd(
|
||||||
hook_type='pre-upload', manifest=manifest,
|
hook_type='pre-upload', manifest=manifest,
|
||||||
opt=opt, abort_if_user_denies=True)
|
opt=opt, abort_if_user_denies=True)
|
||||||
if not hook.Run(
|
if not hook.Run(project_list=pending_proj_names,
|
||||||
project_list=pending_proj_names,
|
worktree_list=pending_worktrees):
|
||||||
worktree_list=pending_worktrees):
|
|
||||||
ret = 1
|
ret = 1
|
||||||
if ret:
|
if ret:
|
||||||
return ret
|
return ret
|
||||||
|
25
tests/conftest.py
Normal file
25
tests/conftest.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
# Copyright 2022 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Common fixtures for pytests."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import repo_trace
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def disable_repo_trace(tmp_path):
|
||||||
|
"""Set an environment marker to relax certain strict checks for test code."""
|
||||||
|
repo_trace._TRACE_FILE = str(tmp_path / 'TRACE_FILE_from_test')
|
@ -15,6 +15,7 @@
|
|||||||
"""Unittests for the git_command.py module."""
|
"""Unittests for the git_command.py module."""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -26,6 +27,38 @@ import git_command
|
|||||||
import wrapper
|
import wrapper
|
||||||
|
|
||||||
|
|
||||||
|
class GitCommandTest(unittest.TestCase):
|
||||||
|
"""Tests the GitCommand class (via git_command.git)."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
|
||||||
|
def realpath_mock(val):
|
||||||
|
return val
|
||||||
|
|
||||||
|
mock.patch.object(os.path, 'realpath', side_effect=realpath_mock).start()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
mock.patch.stopall()
|
||||||
|
|
||||||
|
def test_alternative_setting_when_matching(self):
|
||||||
|
r = git_command._build_env(
|
||||||
|
objdir = os.path.join('zap', 'objects'),
|
||||||
|
gitdir = 'zap'
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertIsNone(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'))
|
||||||
|
self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('zap', 'objects'))
|
||||||
|
|
||||||
|
def test_alternative_setting_when_different(self):
|
||||||
|
r = git_command._build_env(
|
||||||
|
objdir = os.path.join('wow', 'objects'),
|
||||||
|
gitdir = 'zap'
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'), os.path.join('zap', 'objects'))
|
||||||
|
self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('wow', 'objects'))
|
||||||
|
|
||||||
|
|
||||||
class GitCallUnitTest(unittest.TestCase):
|
class GitCallUnitTest(unittest.TestCase):
|
||||||
"""Tests the _GitCall class (via git_command.git)."""
|
"""Tests the _GitCall class (via git_command.git)."""
|
||||||
|
|
||||||
@ -84,7 +117,8 @@ class GitRequireTests(unittest.TestCase):
|
|||||||
"""Test the git_require helper."""
|
"""Test the git_require helper."""
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
ver = wrapper.GitVersion(1, 2, 3, 4)
|
self.wrapper = wrapper.Wrapper()
|
||||||
|
ver = self.wrapper.GitVersion(1, 2, 3, 4)
|
||||||
mock.patch.object(git_command.git, 'version_tuple', return_value=ver).start()
|
mock.patch.object(git_command.git, 'version_tuple', return_value=ver).start()
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
@ -186,7 +186,3 @@ class GitConfigReadWriteTests(unittest.TestCase):
|
|||||||
for key, value in TESTS:
|
for key, value in TESTS:
|
||||||
self.assertEqual(sync_data[f'{git_config.SYNC_STATE_PREFIX}{key}'], value)
|
self.assertEqual(sync_data[f'{git_config.SYNC_STATE_PREFIX}{key}'], value)
|
||||||
self.assertTrue(sync_data[f'{git_config.SYNC_STATE_PREFIX}main.synctime'])
|
self.assertTrue(sync_data[f'{git_config.SYNC_STATE_PREFIX}main.synctime'])
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
|
@ -163,6 +163,7 @@ class SuperprojectTestCase(unittest.TestCase):
|
|||||||
sync_result = self._superproject.Sync(self.git_event_log)
|
sync_result = self._superproject.Sync(self.git_event_log)
|
||||||
self.assertFalse(sync_result.success)
|
self.assertFalse(sync_result.success)
|
||||||
self.assertTrue(sync_result.fatal)
|
self.assertTrue(sync_result.fatal)
|
||||||
|
self.verifyErrorEvent()
|
||||||
|
|
||||||
def test_superproject_get_superproject_mock_init(self):
|
def test_superproject_get_superproject_mock_init(self):
|
||||||
"""Test with _Init failing."""
|
"""Test with _Init failing."""
|
||||||
@ -365,6 +366,40 @@ class SuperprojectTestCase(unittest.TestCase):
|
|||||||
'<superproject name="superproject"/>'
|
'<superproject name="superproject"/>'
|
||||||
'</manifest>')
|
'</manifest>')
|
||||||
|
|
||||||
|
def test_Fetch(self):
|
||||||
|
manifest = self.getXmlManifest("""
|
||||||
|
<manifest>
|
||||||
|
<remote name="default-remote" fetch="http://localhost" />
|
||||||
|
<default remote="default-remote" revision="refs/heads/main" />
|
||||||
|
<superproject name="superproject"/>
|
||||||
|
" /></manifest>
|
||||||
|
""")
|
||||||
|
self.maxDiff = None
|
||||||
|
self._superproject = git_superproject.Superproject(
|
||||||
|
manifest, name='superproject',
|
||||||
|
remote=manifest.remotes.get('default-remote').ToRemoteSpec('superproject'),
|
||||||
|
revision='refs/heads/main')
|
||||||
|
os.mkdir(self._superproject._superproject_path)
|
||||||
|
os.mkdir(self._superproject._work_git)
|
||||||
|
with mock.patch.object(self._superproject, '_Init', return_value=True):
|
||||||
|
with mock.patch('git_superproject.GitCommand', autospec=True) as mock_git_command:
|
||||||
|
with mock.patch('git_superproject.GitRefs.get', autospec=True) as mock_git_refs:
|
||||||
|
instance = mock_git_command.return_value
|
||||||
|
instance.Wait.return_value = 0
|
||||||
|
mock_git_refs.side_effect = ['', '1234']
|
||||||
|
|
||||||
if __name__ == '__main__':
|
self.assertTrue(self._superproject._Fetch())
|
||||||
unittest.main()
|
self.assertEqual(mock_git_command.call_args.args,(None, [
|
||||||
|
'fetch', 'http://localhost/superproject', '--depth', '1',
|
||||||
|
'--force', '--no-tags', '--filter', 'blob:none',
|
||||||
|
'refs/heads/main:refs/heads/main'
|
||||||
|
]))
|
||||||
|
|
||||||
|
# If branch for revision exists, set as --negotiation-tip.
|
||||||
|
self.assertTrue(self._superproject._Fetch())
|
||||||
|
self.assertEqual(mock_git_command.call_args.args,(None, [
|
||||||
|
'fetch', 'http://localhost/superproject', '--depth', '1',
|
||||||
|
'--force', '--no-tags', '--filter', 'blob:none',
|
||||||
|
'--negotiation-tip', '1234',
|
||||||
|
'refs/heads/main:refs/heads/main'
|
||||||
|
]))
|
||||||
|
@ -369,7 +369,7 @@ class EventLogTestCase(unittest.TestCase):
|
|||||||
server_thread.start()
|
server_thread.start()
|
||||||
|
|
||||||
with server_ready:
|
with server_ready:
|
||||||
server_ready.wait()
|
server_ready.wait(timeout=120)
|
||||||
|
|
||||||
self._event_log_module.StartEvent()
|
self._event_log_module.StartEvent()
|
||||||
path = self._event_log_module.Write(path=f'af_unix:{socket_path}')
|
path = self._event_log_module.Write(path=f'af_unix:{socket_path}')
|
||||||
@ -385,7 +385,3 @@ class EventLogTestCase(unittest.TestCase):
|
|||||||
# Check for 'start' event specific fields.
|
# Check for 'start' event specific fields.
|
||||||
self.assertIn('argv', start_event)
|
self.assertIn('argv', start_event)
|
||||||
self.assertIsInstance(start_event['argv'], list)
|
self.assertIsInstance(start_event['argv'], list)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
|
@ -115,7 +115,7 @@ class ManifestParseTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def getXmlManifest(self, data):
|
def getXmlManifest(self, data):
|
||||||
"""Helper to initialize a manifest for testing."""
|
"""Helper to initialize a manifest for testing."""
|
||||||
with open(self.manifest_file, 'w') as fp:
|
with open(self.manifest_file, 'w', encoding="utf-8") as fp:
|
||||||
fp.write(data)
|
fp.write(data)
|
||||||
return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
|
return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
|
||||||
|
|
||||||
@ -262,10 +262,10 @@ class XmlManifestTests(ManifestParseTestCase):
|
|||||||
'<project name="r" groups="keep"/>'
|
'<project name="r" groups="keep"/>'
|
||||||
'</manifest>')
|
'</manifest>')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
manifest.ToXml(omit_local=True).toxml(),
|
sort_attributes(manifest.ToXml(omit_local=True).toxml()),
|
||||||
'<?xml version="1.0" ?><manifest>'
|
'<?xml version="1.0" ?><manifest>'
|
||||||
'<remote name="a" fetch=".."/><default remote="a" revision="r"/>'
|
'<remote fetch=".." name="a"/><default remote="a" revision="r"/>'
|
||||||
'<project name="q"/><project name="r" groups="keep"/></manifest>')
|
'<project name="q"/><project groups="keep" name="r"/></manifest>')
|
||||||
|
|
||||||
def test_toxml_with_local(self):
|
def test_toxml_with_local(self):
|
||||||
"""Does include local_manifests projects when omit_local=False."""
|
"""Does include local_manifests projects when omit_local=False."""
|
||||||
@ -277,11 +277,11 @@ class XmlManifestTests(ManifestParseTestCase):
|
|||||||
'<project name="r" groups="keep"/>'
|
'<project name="r" groups="keep"/>'
|
||||||
'</manifest>')
|
'</manifest>')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
manifest.ToXml(omit_local=False).toxml(),
|
sort_attributes(manifest.ToXml(omit_local=False).toxml()),
|
||||||
'<?xml version="1.0" ?><manifest>'
|
'<?xml version="1.0" ?><manifest>'
|
||||||
'<remote name="a" fetch=".."/><default remote="a" revision="r"/>'
|
'<remote fetch=".." name="a"/><default remote="a" revision="r"/>'
|
||||||
'<project name="p" groups="local::me"/>'
|
'<project groups="local::me" name="p"/>'
|
||||||
'<project name="q"/><project name="r" groups="keep"/></manifest>')
|
'<project name="q"/><project groups="keep" name="r"/></manifest>')
|
||||||
|
|
||||||
def test_repo_hooks(self):
|
def test_repo_hooks(self):
|
||||||
"""Check repo-hooks settings."""
|
"""Check repo-hooks settings."""
|
||||||
@ -426,7 +426,7 @@ class IncludeElementTests(ManifestParseTestCase):
|
|||||||
def parse(name):
|
def parse(name):
|
||||||
name = self.encodeXmlAttr(name)
|
name = self.encodeXmlAttr(name)
|
||||||
# Setup target of the include.
|
# Setup target of the include.
|
||||||
with open(os.path.join(self.manifest_dir, 'target.xml'), 'w') as fp:
|
with open(os.path.join(self.manifest_dir, 'target.xml'), 'w', encoding="utf-8") as fp:
|
||||||
fp.write(f'<manifest><include name="{name}"/></manifest>')
|
fp.write(f'<manifest><include name="{name}"/></manifest>')
|
||||||
|
|
||||||
manifest = self.getXmlManifest("""
|
manifest = self.getXmlManifest("""
|
||||||
@ -517,22 +517,22 @@ class ProjectElementTests(ManifestParseTestCase):
|
|||||||
""")
|
""")
|
||||||
|
|
||||||
manifest = parse('a/path/', 'foo')
|
manifest = parse('a/path/', 'foo')
|
||||||
self.assertEqual(manifest.projects[0].gitdir,
|
self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
|
||||||
os.path.join(self.tempdir, '.repo/projects/foo.git'))
|
os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
|
||||||
self.assertEqual(manifest.projects[0].objdir,
|
self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
|
||||||
os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
|
os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
|
||||||
|
|
||||||
manifest = parse('a/path', 'foo/')
|
manifest = parse('a/path', 'foo/')
|
||||||
self.assertEqual(manifest.projects[0].gitdir,
|
self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
|
||||||
os.path.join(self.tempdir, '.repo/projects/foo.git'))
|
os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
|
||||||
self.assertEqual(manifest.projects[0].objdir,
|
self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
|
||||||
os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
|
os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
|
||||||
|
|
||||||
manifest = parse('a/path', 'foo//////')
|
manifest = parse('a/path', 'foo//////')
|
||||||
self.assertEqual(manifest.projects[0].gitdir,
|
self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
|
||||||
os.path.join(self.tempdir, '.repo/projects/foo.git'))
|
os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
|
||||||
self.assertEqual(manifest.projects[0].objdir,
|
self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
|
||||||
os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
|
os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
|
||||||
|
|
||||||
def test_toplevel_path(self):
|
def test_toplevel_path(self):
|
||||||
"""Check handling of path=. specially."""
|
"""Check handling of path=. specially."""
|
||||||
@ -549,8 +549,8 @@ class ProjectElementTests(ManifestParseTestCase):
|
|||||||
|
|
||||||
for path in ('.', './', './/', './//'):
|
for path in ('.', './', './/', './//'):
|
||||||
manifest = parse('server/path', path)
|
manifest = parse('server/path', path)
|
||||||
self.assertEqual(manifest.projects[0].gitdir,
|
self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
|
||||||
os.path.join(self.tempdir, '.repo/projects/..git'))
|
os.path.join(self.tempdir, '.repo', 'projects', '..git'))
|
||||||
|
|
||||||
def test_bad_path_name_checks(self):
|
def test_bad_path_name_checks(self):
|
||||||
"""Check handling of bad path & name attributes."""
|
"""Check handling of bad path & name attributes."""
|
||||||
@ -576,7 +576,7 @@ class ProjectElementTests(ManifestParseTestCase):
|
|||||||
parse('', 'ok')
|
parse('', 'ok')
|
||||||
|
|
||||||
for path in INVALID_FS_PATHS:
|
for path in INVALID_FS_PATHS:
|
||||||
if not path or path.endswith('/'):
|
if not path or path.endswith('/') or path.endswith(os.path.sep):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
with self.assertRaises(error.ManifestInvalidPathError):
|
with self.assertRaises(error.ManifestInvalidPathError):
|
||||||
|
@ -22,6 +22,7 @@ import tempfile
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import error
|
import error
|
||||||
|
import manifest_xml
|
||||||
import git_command
|
import git_command
|
||||||
import git_config
|
import git_config
|
||||||
import platform_utils
|
import platform_utils
|
||||||
@ -105,7 +106,7 @@ class ReviewableBranchTests(unittest.TestCase):
|
|||||||
class CopyLinkTestCase(unittest.TestCase):
|
class CopyLinkTestCase(unittest.TestCase):
|
||||||
"""TestCase for stub repo client checkouts.
|
"""TestCase for stub repo client checkouts.
|
||||||
|
|
||||||
It'll have a layout like:
|
It'll have a layout like this:
|
||||||
tempdir/ # self.tempdir
|
tempdir/ # self.tempdir
|
||||||
checkout/ # self.topdir
|
checkout/ # self.topdir
|
||||||
git-project/ # self.worktree
|
git-project/ # self.worktree
|
||||||
@ -376,7 +377,7 @@ class MigrateWorkTreeTests(unittest.TestCase):
|
|||||||
|
|
||||||
# Make sure the dir was transformed into a symlink.
|
# Make sure the dir was transformed into a symlink.
|
||||||
self.assertTrue(dotgit.is_symlink())
|
self.assertTrue(dotgit.is_symlink())
|
||||||
self.assertEqual(os.readlink(dotgit), '../../.repo/projects/src/test.git')
|
self.assertEqual(os.readlink(dotgit), os.path.normpath('../../.repo/projects/src/test.git'))
|
||||||
|
|
||||||
# Make sure files were moved over.
|
# Make sure files were moved over.
|
||||||
gitdir = tempdir / '.repo/projects/src/test.git'
|
gitdir = tempdir / '.repo/projects/src/test.git'
|
||||||
@ -403,3 +404,79 @@ class MigrateWorkTreeTests(unittest.TestCase):
|
|||||||
self.assertTrue((dotgit / name).is_file())
|
self.assertTrue((dotgit / name).is_file())
|
||||||
for name in self._SYMLINKS:
|
for name in self._SYMLINKS:
|
||||||
self.assertTrue((dotgit / name).is_symlink())
|
self.assertTrue((dotgit / name).is_symlink())
|
||||||
|
|
||||||
|
|
||||||
|
class ManifestPropertiesFetchedCorrectly(unittest.TestCase):
|
||||||
|
"""Ensure properties are fetched properly."""
|
||||||
|
|
||||||
|
def setUpManifest(self, tempdir):
|
||||||
|
repodir = os.path.join(tempdir, '.repo')
|
||||||
|
manifest_dir = os.path.join(repodir, 'manifests')
|
||||||
|
manifest_file = os.path.join(
|
||||||
|
repodir, manifest_xml.MANIFEST_FILE_NAME)
|
||||||
|
local_manifest_dir = os.path.join(
|
||||||
|
repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME)
|
||||||
|
os.mkdir(repodir)
|
||||||
|
os.mkdir(manifest_dir)
|
||||||
|
manifest = manifest_xml.XmlManifest(repodir, manifest_file)
|
||||||
|
|
||||||
|
return project.ManifestProject(
|
||||||
|
manifest, 'test/manifest', os.path.join(tempdir, '.git'), tempdir)
|
||||||
|
|
||||||
|
def test_manifest_config_properties(self):
|
||||||
|
"""Test we are fetching the manifest config properties correctly."""
|
||||||
|
|
||||||
|
with TempGitTree() as tempdir:
|
||||||
|
fakeproj = self.setUpManifest(tempdir)
|
||||||
|
|
||||||
|
# Set property using the expected Set method, then ensure
|
||||||
|
# the porperty functions are using the correct Get methods.
|
||||||
|
fakeproj.config.SetString(
|
||||||
|
'manifest.standalone', 'https://chicken/manifest.git')
|
||||||
|
self.assertEqual(
|
||||||
|
fakeproj.standalone_manifest_url, 'https://chicken/manifest.git')
|
||||||
|
|
||||||
|
fakeproj.config.SetString('manifest.groups', 'test-group, admin-group')
|
||||||
|
self.assertEqual(fakeproj.manifest_groups, 'test-group, admin-group')
|
||||||
|
|
||||||
|
fakeproj.config.SetString('repo.reference', 'mirror/ref')
|
||||||
|
self.assertEqual(fakeproj.reference, 'mirror/ref')
|
||||||
|
|
||||||
|
fakeproj.config.SetBoolean('repo.dissociate', False)
|
||||||
|
self.assertFalse(fakeproj.dissociate)
|
||||||
|
|
||||||
|
fakeproj.config.SetBoolean('repo.archive', False)
|
||||||
|
self.assertFalse(fakeproj.archive)
|
||||||
|
|
||||||
|
fakeproj.config.SetBoolean('repo.mirror', False)
|
||||||
|
self.assertFalse(fakeproj.mirror)
|
||||||
|
|
||||||
|
fakeproj.config.SetBoolean('repo.worktree', False)
|
||||||
|
self.assertFalse(fakeproj.use_worktree)
|
||||||
|
|
||||||
|
fakeproj.config.SetBoolean('repo.clonebundle', False)
|
||||||
|
self.assertFalse(fakeproj.clone_bundle)
|
||||||
|
|
||||||
|
fakeproj.config.SetBoolean('repo.submodules', False)
|
||||||
|
self.assertFalse(fakeproj.submodules)
|
||||||
|
|
||||||
|
fakeproj.config.SetBoolean('repo.git-lfs', False)
|
||||||
|
self.assertFalse(fakeproj.git_lfs)
|
||||||
|
|
||||||
|
fakeproj.config.SetBoolean('repo.superproject', False)
|
||||||
|
self.assertFalse(fakeproj.use_superproject)
|
||||||
|
|
||||||
|
fakeproj.config.SetBoolean('repo.partialclone', False)
|
||||||
|
self.assertFalse(fakeproj.partial_clone)
|
||||||
|
|
||||||
|
fakeproj.config.SetString('repo.depth', '48')
|
||||||
|
self.assertEqual(fakeproj.depth, '48')
|
||||||
|
|
||||||
|
fakeproj.config.SetString('repo.clonefilter', 'blob:limit=10M')
|
||||||
|
self.assertEqual(fakeproj.clone_filter, 'blob:limit=10M')
|
||||||
|
|
||||||
|
fakeproj.config.SetString('repo.partialcloneexclude', 'third_party/big_repo')
|
||||||
|
self.assertEqual(fakeproj.partial_clone_exclude, 'third_party/big_repo')
|
||||||
|
|
||||||
|
fakeproj.config.SetString('manifest.platform', 'auto')
|
||||||
|
self.assertEqual(fakeproj.manifest_platform, 'auto')
|
||||||
|
56
tests/test_repo_trace.py
Normal file
56
tests/test_repo_trace.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
# Copyright 2022 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Unittests for the repo_trace.py module."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
import repo_trace
|
||||||
|
|
||||||
|
|
||||||
|
class TraceTests(unittest.TestCase):
|
||||||
|
"""Check Trace behavior."""
|
||||||
|
|
||||||
|
def testTrace_MaxSizeEnforced(self):
|
||||||
|
content = 'git chicken'
|
||||||
|
|
||||||
|
with repo_trace.Trace(content, first_trace=True):
|
||||||
|
pass
|
||||||
|
first_trace_size = os.path.getsize(repo_trace._TRACE_FILE)
|
||||||
|
|
||||||
|
with repo_trace.Trace(content):
|
||||||
|
pass
|
||||||
|
self.assertGreater(
|
||||||
|
os.path.getsize(repo_trace._TRACE_FILE), first_trace_size)
|
||||||
|
|
||||||
|
# Check we clear everything is the last chunk is larger than _MAX_SIZE.
|
||||||
|
with mock.patch('repo_trace._MAX_SIZE', 0):
|
||||||
|
with repo_trace.Trace(content, first_trace=True):
|
||||||
|
pass
|
||||||
|
self.assertEqual(first_trace_size,
|
||||||
|
os.path.getsize(repo_trace._TRACE_FILE))
|
||||||
|
|
||||||
|
# Check we only clear the chunks we need to.
|
||||||
|
repo_trace._MAX_SIZE = (first_trace_size + 1) / (1024 * 1024)
|
||||||
|
with repo_trace.Trace(content, first_trace=True):
|
||||||
|
pass
|
||||||
|
self.assertEqual(first_trace_size * 2,
|
||||||
|
os.path.getsize(repo_trace._TRACE_FILE))
|
||||||
|
|
||||||
|
with repo_trace.Trace(content, first_trace=True):
|
||||||
|
pass
|
||||||
|
self.assertEqual(first_trace_size * 2,
|
||||||
|
os.path.getsize(repo_trace._TRACE_FILE))
|
@ -11,27 +11,26 @@
|
|||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
"""Unittests for the subcmds/sync.py module."""
|
"""Unittests for the subcmds/sync.py module."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import command
|
||||||
from subcmds import sync
|
from subcmds import sync
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize('use_superproject, cli_args, result', [
|
||||||
'use_superproject, cli_args, result',
|
|
||||||
[
|
|
||||||
(True, ['--current-branch'], True),
|
(True, ['--current-branch'], True),
|
||||||
(True, ['--no-current-branch'], True),
|
(True, ['--no-current-branch'], True),
|
||||||
(True, [], True),
|
(True, [], True),
|
||||||
(False, ['--current-branch'], True),
|
(False, ['--current-branch'], True),
|
||||||
(False, ['--no-current-branch'], False),
|
(False, ['--no-current-branch'], False),
|
||||||
(False, [], None),
|
(False, [], None),
|
||||||
]
|
])
|
||||||
)
|
|
||||||
def test_get_current_branch_only(use_superproject, cli_args, result):
|
def test_get_current_branch_only(use_superproject, cli_args, result):
|
||||||
"""Test Sync._GetCurrentBranchOnly logic.
|
"""Test Sync._GetCurrentBranchOnly logic.
|
||||||
|
|
||||||
@ -41,5 +40,94 @@ def test_get_current_branch_only(use_superproject, cli_args, result):
|
|||||||
cmd = sync.Sync()
|
cmd = sync.Sync()
|
||||||
opts, _ = cmd.OptionParser.parse_args(cli_args)
|
opts, _ = cmd.OptionParser.parse_args(cli_args)
|
||||||
|
|
||||||
with mock.patch('git_superproject.UseSuperproject', return_value=use_superproject):
|
with mock.patch('git_superproject.UseSuperproject',
|
||||||
|
return_value=use_superproject):
|
||||||
assert cmd._GetCurrentBranchOnly(opts, cmd.manifest) == result
|
assert cmd._GetCurrentBranchOnly(opts, cmd.manifest) == result
|
||||||
|
|
||||||
|
|
||||||
|
# Used to patch os.cpu_count() for reliable results.
|
||||||
|
OS_CPU_COUNT = 24
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('argv, jobs_manifest, jobs, jobs_net, jobs_check', [
|
||||||
|
# No user or manifest settings.
|
||||||
|
([], None, OS_CPU_COUNT, 1, command.DEFAULT_LOCAL_JOBS),
|
||||||
|
# No user settings, so manifest settings control.
|
||||||
|
([], 3, 3, 3, 3),
|
||||||
|
# User settings, but no manifest.
|
||||||
|
(['--jobs=4'], None, 4, 4, 4),
|
||||||
|
(['--jobs=4', '--jobs-network=5'], None, 4, 5, 4),
|
||||||
|
(['--jobs=4', '--jobs-checkout=6'], None, 4, 4, 6),
|
||||||
|
(['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], None, 4, 5, 6),
|
||||||
|
(['--jobs-network=5'], None, OS_CPU_COUNT, 5, command.DEFAULT_LOCAL_JOBS),
|
||||||
|
(['--jobs-checkout=6'], None, OS_CPU_COUNT, 1, 6),
|
||||||
|
(['--jobs-network=5', '--jobs-checkout=6'], None, OS_CPU_COUNT, 5, 6),
|
||||||
|
# User settings with manifest settings.
|
||||||
|
(['--jobs=4'], 3, 4, 4, 4),
|
||||||
|
(['--jobs=4', '--jobs-network=5'], 3, 4, 5, 4),
|
||||||
|
(['--jobs=4', '--jobs-checkout=6'], 3, 4, 4, 6),
|
||||||
|
(['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], 3, 4, 5, 6),
|
||||||
|
(['--jobs-network=5'], 3, 3, 5, 3),
|
||||||
|
(['--jobs-checkout=6'], 3, 3, 3, 6),
|
||||||
|
(['--jobs-network=5', '--jobs-checkout=6'], 3, 3, 5, 6),
|
||||||
|
# Settings that exceed rlimits get capped.
|
||||||
|
(['--jobs=1000000'], None, 83, 83, 83),
|
||||||
|
([], 1000000, 83, 83, 83),
|
||||||
|
])
|
||||||
|
def test_cli_jobs(argv, jobs_manifest, jobs, jobs_net, jobs_check):
|
||||||
|
"""Tests --jobs option behavior."""
|
||||||
|
mp = mock.MagicMock()
|
||||||
|
mp.manifest.default.sync_j = jobs_manifest
|
||||||
|
|
||||||
|
cmd = sync.Sync()
|
||||||
|
opts, args = cmd.OptionParser.parse_args(argv)
|
||||||
|
cmd.ValidateOptions(opts, args)
|
||||||
|
|
||||||
|
with mock.patch.object(sync, '_rlimit_nofile', return_value=(256, 256)):
|
||||||
|
with mock.patch.object(os, 'cpu_count', return_value=OS_CPU_COUNT):
|
||||||
|
cmd._ValidateOptionsWithManifest(opts, mp)
|
||||||
|
assert opts.jobs == jobs
|
||||||
|
assert opts.jobs_network == jobs_net
|
||||||
|
assert opts.jobs_checkout == jobs_check
|
||||||
|
|
||||||
|
|
||||||
|
class GetPreciousObjectsState(unittest.TestCase):
|
||||||
|
"""Tests for _GetPreciousObjectsState."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
"""Common setup."""
|
||||||
|
self.cmd = sync.Sync()
|
||||||
|
self.project = p = mock.MagicMock(use_git_worktrees=False,
|
||||||
|
UseAlternates=False)
|
||||||
|
p.manifest.GetProjectsWithName.return_value = [p]
|
||||||
|
|
||||||
|
self.opt = mock.Mock(spec_set=['this_manifest_only'])
|
||||||
|
self.opt.this_manifest_only = False
|
||||||
|
|
||||||
|
def test_worktrees(self):
|
||||||
|
"""False for worktrees."""
|
||||||
|
self.project.use_git_worktrees = True
|
||||||
|
self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||||
|
|
||||||
|
def test_not_shared(self):
|
||||||
|
"""Singleton project."""
|
||||||
|
self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||||
|
|
||||||
|
def test_shared(self):
|
||||||
|
"""Shared project."""
|
||||||
|
self.project.manifest.GetProjectsWithName.return_value = [
|
||||||
|
self.project, self.project
|
||||||
|
]
|
||||||
|
self.assertTrue(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||||
|
|
||||||
|
def test_shared_with_alternates(self):
|
||||||
|
"""Shared project, with alternates."""
|
||||||
|
self.project.manifest.GetProjectsWithName.return_value = [
|
||||||
|
self.project, self.project
|
||||||
|
]
|
||||||
|
self.project.UseAlternates = True
|
||||||
|
self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||||
|
|
||||||
|
def test_not_found(self):
|
||||||
|
"""Project not found in manifest."""
|
||||||
|
self.project.manifest.GetProjectsWithName.return_value = []
|
||||||
|
self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||||
|
28
tests/test_update_manpages.py
Normal file
28
tests/test_update_manpages.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
# Copyright 2022 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Unittests for the update_manpages module."""
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from release import update_manpages
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateManpagesTest(unittest.TestCase):
|
||||||
|
"""Tests the update-manpages code."""
|
||||||
|
|
||||||
|
def test_replace_regex(self):
|
||||||
|
"""Check that replace_regex works."""
|
||||||
|
data = '\n\033[1mSummary\033[m\n'
|
||||||
|
self.assertEqual(update_manpages.replace_regex(data),'\nSummary\n')
|
@ -38,7 +38,7 @@ class RepoWrapperTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
"""Load the wrapper module every time."""
|
"""Load the wrapper module every time."""
|
||||||
wrapper._wrapper_module = None
|
wrapper.Wrapper.cache_clear()
|
||||||
self.wrapper = wrapper.Wrapper()
|
self.wrapper = wrapper.Wrapper()
|
||||||
|
|
||||||
|
|
||||||
@ -59,12 +59,12 @@ class RepoWrapperUnitTest(RepoWrapperTestCase):
|
|||||||
def test_python_constraints(self):
|
def test_python_constraints(self):
|
||||||
"""The launcher should never require newer than main.py."""
|
"""The launcher should never require newer than main.py."""
|
||||||
self.assertGreaterEqual(main.MIN_PYTHON_VERSION_HARD,
|
self.assertGreaterEqual(main.MIN_PYTHON_VERSION_HARD,
|
||||||
wrapper.MIN_PYTHON_VERSION_HARD)
|
self.wrapper.MIN_PYTHON_VERSION_HARD)
|
||||||
self.assertGreaterEqual(main.MIN_PYTHON_VERSION_SOFT,
|
self.assertGreaterEqual(main.MIN_PYTHON_VERSION_SOFT,
|
||||||
wrapper.MIN_PYTHON_VERSION_SOFT)
|
self.wrapper.MIN_PYTHON_VERSION_SOFT)
|
||||||
# Make sure the versions are themselves in sync.
|
# Make sure the versions are themselves in sync.
|
||||||
self.assertGreaterEqual(wrapper.MIN_PYTHON_VERSION_SOFT,
|
self.assertGreaterEqual(self.wrapper.MIN_PYTHON_VERSION_SOFT,
|
||||||
wrapper.MIN_PYTHON_VERSION_HARD)
|
self.wrapper.MIN_PYTHON_VERSION_HARD)
|
||||||
|
|
||||||
def test_init_parser(self):
|
def test_init_parser(self):
|
||||||
"""Make sure 'init' GetParser works."""
|
"""Make sure 'init' GetParser works."""
|
||||||
@ -159,7 +159,9 @@ class RunCommand(RepoWrapperTestCase):
|
|||||||
def test_capture(self):
|
def test_capture(self):
|
||||||
"""Check capture_output handling."""
|
"""Check capture_output handling."""
|
||||||
ret = self.wrapper.run_command(['echo', 'hi'], capture_output=True)
|
ret = self.wrapper.run_command(['echo', 'hi'], capture_output=True)
|
||||||
self.assertEqual(ret.stdout, 'hi\n')
|
# echo command appends OS specific linesep, but on Windows + Git Bash
|
||||||
|
# we get UNIX ending, so we allow both.
|
||||||
|
self.assertIn(ret.stdout, ['hi' + os.linesep, 'hi\n'])
|
||||||
|
|
||||||
def test_check(self):
|
def test_check(self):
|
||||||
"""Check check handling."""
|
"""Check check handling."""
|
||||||
@ -456,7 +458,7 @@ class ResolveRepoRev(GitCheckoutTestCase):
|
|||||||
self.assertEqual('refs/heads/stable', rrev)
|
self.assertEqual('refs/heads/stable', rrev)
|
||||||
self.assertEqual(self.REV_LIST[1], lrev)
|
self.assertEqual(self.REV_LIST[1], lrev)
|
||||||
|
|
||||||
with self.assertRaises(wrapper.CloneFailure):
|
with self.assertRaises(self.wrapper.CloneFailure):
|
||||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/heads/unknown')
|
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/heads/unknown')
|
||||||
|
|
||||||
def test_explicit_tag(self):
|
def test_explicit_tag(self):
|
||||||
@ -465,7 +467,7 @@ class ResolveRepoRev(GitCheckoutTestCase):
|
|||||||
self.assertEqual('refs/tags/v1.0', rrev)
|
self.assertEqual('refs/tags/v1.0', rrev)
|
||||||
self.assertEqual(self.REV_LIST[1], lrev)
|
self.assertEqual(self.REV_LIST[1], lrev)
|
||||||
|
|
||||||
with self.assertRaises(wrapper.CloneFailure):
|
with self.assertRaises(self.wrapper.CloneFailure):
|
||||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/tags/unknown')
|
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/tags/unknown')
|
||||||
|
|
||||||
def test_branch_name(self):
|
def test_branch_name(self):
|
||||||
@ -500,7 +502,7 @@ class ResolveRepoRev(GitCheckoutTestCase):
|
|||||||
|
|
||||||
def test_unknown(self):
|
def test_unknown(self):
|
||||||
"""Check unknown ref/commit argument."""
|
"""Check unknown ref/commit argument."""
|
||||||
with self.assertRaises(wrapper.CloneFailure):
|
with self.assertRaises(self.wrapper.CloneFailure):
|
||||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'boooooooya')
|
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'boooooooya')
|
||||||
|
|
||||||
|
|
||||||
@ -551,7 +553,3 @@ class CheckRepoRev(GitCheckoutTestCase):
|
|||||||
rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, 'stable', repo_verify=False)
|
rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, 'stable', repo_verify=False)
|
||||||
self.assertEqual('refs/heads/stable', rrev)
|
self.assertEqual('refs/heads/stable', rrev)
|
||||||
self.assertEqual(self.REV_LIST[1], lrev)
|
self.assertEqual(self.REV_LIST[1], lrev)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
|
12
tox.ini
12
tox.ini
@ -15,7 +15,7 @@
|
|||||||
# https://tox.readthedocs.io/
|
# https://tox.readthedocs.io/
|
||||||
|
|
||||||
[tox]
|
[tox]
|
||||||
envlist = py36, py37, py38, py39
|
envlist = py36, py37, py38, py39, py310
|
||||||
|
|
||||||
[gh-actions]
|
[gh-actions]
|
||||||
python =
|
python =
|
||||||
@ -23,11 +23,17 @@ python =
|
|||||||
3.7: py37
|
3.7: py37
|
||||||
3.8: py38
|
3.8: py38
|
||||||
3.9: py39
|
3.9: py39
|
||||||
|
3.10: py310
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
deps = pytest
|
deps =
|
||||||
commands = {envpython} run_tests
|
pytest
|
||||||
|
pytest-timeout
|
||||||
|
commands = {envpython} run_tests {posargs}
|
||||||
setenv =
|
setenv =
|
||||||
GIT_AUTHOR_NAME = Repo test author
|
GIT_AUTHOR_NAME = Repo test author
|
||||||
GIT_COMMITTER_NAME = Repo test committer
|
GIT_COMMITTER_NAME = Repo test committer
|
||||||
EMAIL = repo@gerrit.nodomain
|
EMAIL = repo@gerrit.nodomain
|
||||||
|
|
||||||
|
[pytest]
|
||||||
|
timeout = 300
|
||||||
|
23
wrapper.py
23
wrapper.py
@ -12,12 +12,9 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
try:
|
import functools
|
||||||
from importlib.machinery import SourceFileLoader
|
import importlib.machinery
|
||||||
_loader = lambda *args: SourceFileLoader(*args).load_module()
|
import importlib.util
|
||||||
except ImportError:
|
|
||||||
import imp
|
|
||||||
_loader = lambda *args: imp.load_source(*args)
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
@ -25,11 +22,11 @@ def WrapperPath():
|
|||||||
return os.path.join(os.path.dirname(__file__), 'repo')
|
return os.path.join(os.path.dirname(__file__), 'repo')
|
||||||
|
|
||||||
|
|
||||||
_wrapper_module = None
|
@functools.lru_cache(maxsize=None)
|
||||||
|
|
||||||
|
|
||||||
def Wrapper():
|
def Wrapper():
|
||||||
global _wrapper_module
|
modname = 'wrapper'
|
||||||
if not _wrapper_module:
|
loader = importlib.machinery.SourceFileLoader(modname, WrapperPath())
|
||||||
_wrapper_module = _loader('wrapper', WrapperPath())
|
spec = importlib.util.spec_from_loader(modname, loader)
|
||||||
return _wrapper_module
|
module = importlib.util.module_from_spec(spec)
|
||||||
|
loader.exec_module(module)
|
||||||
|
return module
|
||||||
|
Reference in New Issue
Block a user