Compare commits

..

1 Commits

Author SHA1 Message Date
a99f19f40e docs: add deprecated branch banner
The master branch is dead.  Add banners to all the docs in case people
try referring to these and don't realize they're on the wrong branch.

Change-Id: I3488d0d96df25fafd7285848fe9f519b4205519c
Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/400918
Tested-by: Mike Frysinger <vapier@google.com>
Reviewed-by: Josip Sokcevic <sokcevic@google.com>
2024-01-04 17:47:22 +00:00
81 changed files with 2295 additions and 4742 deletions

View File

@ -5,7 +5,7 @@ name: Test CI
on:
push:
branches: [main, repo-1, stable, maint]
branches: [master, repo-1, stable, maint]
tags: [v*]
jobs:
@ -14,7 +14,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: [3.5, 3.6, 3.7, 3.8, 3.9]
python-version: [3.6, 3.7, 3.8]
runs-on: ${{ matrix.os }}
steps:

1
.gitignore vendored
View File

@ -7,7 +7,6 @@ __pycache__
.repopickle_*
/repoc
/.tox
/.venv
# PyCharm related
/.idea/

View File

@ -1,5 +1,8 @@
# repo
> **Warning: The "master" branch is no longer used. Use "main" instead.**<br>
> https://gerrit.googlesource.com/git-repo/+/HEAD/README.md
Repo is a tool built on top of Git. Repo helps manage many Git repositories,
does the uploads to revision control systems, and automates parts of the
development workflow. Repo is not meant to replace Git, only to make it

View File

@ -1,3 +1,6 @@
> **Warning: The "master" branch is no longer used. Use "main" instead.**<br>
> https://gerrit.googlesource.com/git-repo/+/HEAD/SUBMITTING_PATCHES.md
[TOC]
# Short Version
@ -10,7 +13,7 @@
- Make corrections if requested.
- Verify your changes on gerrit so they can be submitted.
`git push https://gerrit-review.googlesource.com/git-repo HEAD:refs/for/main`
`git push https://gerrit-review.googlesource.com/git-repo HEAD:refs/for/master`
# Long Version
@ -150,7 +153,7 @@ Push your patches over HTTPS to the review server, possibly through
a remembered remote to make this easier in the future:
git config remote.review.url https://gerrit-review.googlesource.com/git-repo
git config remote.review.push HEAD:refs/for/main
git config remote.review.push HEAD:refs/for/master
git push review

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,7 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import multiprocessing
import os
import optparse
import platform
@ -22,21 +23,6 @@ import sys
from event_log import EventLog
from error import NoSuchProjectError
from error import InvalidProjectGroupsError
import progress
# Number of projects to submit to a single worker process at a time.
# This number represents a tradeoff between the overhead of IPC and finer
# grained opportunity for parallelism. This particular value was chosen by
# iterating through powers of two until the overall performance no longer
# improved. The performance of this batch size is not a function of the
# number of cores on the system.
WORKER_BATCH_SIZE = 32
# How many jobs to run in parallel by default? This assumes the jobs are
# largely I/O bound and do not hit the network.
DEFAULT_LOCAL_JOBS = min(os.cpu_count(), 8)
class Command(object):
@ -48,10 +34,6 @@ class Command(object):
manifest = None
_optparse = None
# Whether this command supports running in parallel. If greater than 0,
# it is the number of parallel jobs to default to.
PARALLEL_JOBS = None
def WantPager(self, _opt):
return False
@ -86,33 +68,12 @@ class Command(object):
usage = 'repo %s' % self.NAME
epilog = 'Run `repo help %s` to view the detailed manual.' % self.NAME
self._optparse = optparse.OptionParser(usage=usage, epilog=epilog)
self._CommonOptions(self._optparse)
self._Options(self._optparse)
return self._optparse
def _CommonOptions(self, p, opt_v=True):
"""Initialize the option parser with common options.
These will show up for *all* subcommands, so use sparingly.
NB: Keep in sync with repo:InitParser().
"""
g = p.add_option_group('Logging options')
opts = ['-v'] if opt_v else []
g.add_option(*opts, '--verbose',
dest='output_mode', action='store_true',
help='show all output')
g.add_option('-q', '--quiet',
dest='output_mode', action='store_false',
help='only show errors')
if self.PARALLEL_JOBS is not None:
p.add_option(
'-j', '--jobs',
type=int, default=self.PARALLEL_JOBS,
help='number of jobs to run in parallel (default: %s)' % self.PARALLEL_JOBS)
def _Options(self, p):
"""Initialize the option parser with subcommand-specific options."""
"""Initialize the option parser.
"""
def _RegisteredEnvironmentOptions(self):
"""Get options that can be set from environment variables.
@ -138,11 +99,6 @@ class Command(object):
self.OptionParser.print_usage()
sys.exit(1)
def CommonValidateOptions(self, opt, args):
"""Validate common options."""
opt.quiet = opt.output_mode is False
opt.verbose = opt.output_mode is True
def ValidateOptions(self, opt, args):
"""Validate the user options & arguments before executing.
@ -158,44 +114,6 @@ class Command(object):
"""
raise NotImplementedError
@staticmethod
def ExecuteInParallel(jobs, func, inputs, callback, output=None, ordered=False):
"""Helper for managing parallel execution boiler plate.
For subcommands that can easily split their work up.
Args:
jobs: How many parallel processes to use.
func: The function to apply to each of the |inputs|. Usually a
functools.partial for wrapping additional arguments. It will be run
in a separate process, so it must be pickalable, so nested functions
won't work. Methods on the subcommand Command class should work.
inputs: The list of items to process. Must be a list.
callback: The function to pass the results to for processing. It will be
executed in the main thread and process the results of |func| as they
become available. Thus it may be a local nested function. Its return
value is passed back directly. It takes three arguments:
- The processing pool (or None with one job).
- The |output| argument.
- An iterator for the results.
output: An output manager. May be progress.Progess or color.Coloring.
ordered: Whether the jobs should be processed in order.
Returns:
The |callback| function's results are returned.
"""
try:
# NB: Multiprocessing is heavy, so don't spin it up for one job.
if len(inputs) == 1 or jobs == 1:
return callback(None, output, (func(x) for x in inputs))
else:
with multiprocessing.Pool(jobs) as pool:
submit = pool.imap if ordered else pool.imap_unordered
return callback(pool, output, submit(func, inputs, chunksize=WORKER_BATCH_SIZE))
finally:
if isinstance(output, progress.Progress):
output.end()
def _ResetPathToProjectMap(self, projects):
self._by_path = dict((p.worktree, p) for p in projects)
@ -239,7 +157,9 @@ class Command(object):
mp = manifest.manifestProject
if not groups:
groups = manifest.GetGroupsStr()
groups = mp.config.GetString('manifest.groups')
if not groups:
groups = 'default,platform-' + platform.system().lower()
groups = [x for x in re.split(r'[,\s]+', groups) if x]
if not args:

View File

@ -1,121 +0,0 @@
# Copyright 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Programmable bash completion. https://github.com/scop/bash-completion
# Complete the list of repo subcommands.
__complete_repo_list_commands() {
local repo=${COMP_WORDS[0]}
(
# Handle completions if running outside of a checkout.
if ! "${repo}" help --all 2>/dev/null; then
repo help 2>/dev/null
fi
) | sed -n '/^ /{s/ \([^ ]\+\) .\+/\1/;p}'
}
# Complete list of all branches available in all projects in the repo client
# checkout.
__complete_repo_list_branches() {
local repo=${COMP_WORDS[0]}
"${repo}" branches 2>/dev/null | \
sed -n '/|/{s/[ *][Pp ] *\([^ ]\+\) .*/\1/;p}'
}
# Complete list of all projects available in the repo client checkout.
__complete_repo_list_projects() {
local repo=${COMP_WORDS[0]}
"${repo}" list -n 2>/dev/null
}
# Complete the repo <command> argument.
__complete_repo_command() {
if [[ ${COMP_CWORD} -ne 1 ]]; then
return 1
fi
local command=${COMP_WORDS[1]}
COMPREPLY=($(compgen -W "$(__complete_repo_list_commands)" -- "${command}"))
return 0
}
# Complete repo subcommands that take <branch> <projects>.
__complete_repo_command_branch_projects() {
local current=$1
if [[ ${COMP_CWORD} -eq 2 ]]; then
COMPREPLY=($(compgen -W "$(__complete_repo_list_branches)" -- "${current}"))
else
COMPREPLY=($(compgen -W "$(__complete_repo_list_projects)" -- "${current}"))
fi
}
# Complete repo subcommands that take only <projects>.
__complete_repo_command_projects() {
local current=$1
COMPREPLY=($(compgen -W "$(__complete_repo_list_projects)" -- "${current}"))
}
# Complete the repo subcommand arguments.
__complete_repo_arg() {
if [[ ${COMP_CWORD} -le 1 ]]; then
return 1
fi
local command=${COMP_WORDS[1]}
local current=${COMP_WORDS[COMP_CWORD]}
case ${command} in
abandon|checkout)
__complete_repo_command_branch_projects "${current}"
return 0
;;
branch|branches|diff|info|list|overview|prune|rebase|smartsync|stage|status|\
sync|upload)
__complete_repo_command_projects "${current}"
return 0
;;
help)
if [[ ${COMP_CWORD} -eq 2 ]]; then
COMPREPLY=(
$(compgen -W "$(__complete_repo_list_commands)" -- "${current}")
)
fi
return 0
;;
start)
if [[ ${COMP_CWORD} -gt 2 ]]; then
COMPREPLY=(
$(compgen -W "$(__complete_repo_list_projects)" -- "${current}")
)
fi
return 0
;;
*)
return 1
;;
esac
}
# Complete the repo arguments.
__complete_repo() {
COMPREPLY=()
__complete_repo_command && return 0
__complete_repo_arg && return 0
return 0
}
complete -F __complete_repo repo

View File

@ -1,5 +1,8 @@
# Repo internal filesystem layout
> **Warning: The "master" branch is no longer used. Use "main" instead.**<br>
> https://gerrit.googlesource.com/git-repo/+/HEAD/docs/internal-fs-layout.md
A reference to the `.repo/` tree in repo client checkouts.
Hopefully it's complete & up-to-date, but who knows!
@ -93,25 +96,6 @@ support, see the [manifest-format.md] file.
### Project objects
*** note
**Warning**: Please do not use repo's approach to projects/ & project-objects/
layouts as a model for other tools to implement similar approaches.
It has a number of known downsides like:
* [Symlinks do not work well under Windows](./windows.md).
* Git sometimes replaces symlinks under .git/ with real files (under unknown
circumstances), and then the internal state gets out of sync, and data loss
may ensue.
* When sharing project-objects between multiple project checkouts, Git might
automatically run `gc` or `prune` which may lead to data loss or corruption
(since those operate on leaf projects and miss refs in other leaves). See
https://gerrit-review.googlesource.com/c/git-repo/+/254392 for more details.
Instead, you should use standard Git workflows like [git worktree] or
[gitsubmodules] with [superprojects].
***
* `copy-link-files.json`: Tracking file used by `repo sync` to determine when
copyfile or linkfile are added or removed and need corresponding updates.
* `project.list`: Tracking file used by `repo sync` to determine when projects
are added or removed and need corresponding updates in the checkout.
* `projects/`: Bare checkouts of every project synced by the manifest. The
@ -125,7 +109,7 @@ Instead, you should use standard Git workflows like [git worktree] or
setting in the manifest (i.e. the path on the remote server) with a `.git`
suffix. This allows for multiple checkouts of the same remote git repo to
share their objects. For example, you could have different branches of
`foo/bar.git` checked out to `foo/bar-main`, `foo/bar-release`, etc...
`foo/bar.git` checked out to `foo/bar-master`, `foo/bar-release`, etc...
There will be multiple trees under `projects/` for each one, but only one
under `project-objects/`.
@ -140,7 +124,7 @@ Instead, you should use standard Git workflows like [git worktree] or
(i.e. the path on the remote server) with a `.git` suffix. This has the
same advantages as the `project-objects/` layout above.
This is used when [git worktree]'s are enabled.
This is used when git worktrees are enabled.
### Global settings
@ -149,26 +133,23 @@ repo client checkout.
Most settings use the `[repo]` section to avoid conflicts with git.
User controlled settings are initialized when running `repo init`.
| Setting | `repo init` Option | Use/Meaning |
|------------------- |---------------------------|-------------|
| manifest.groups | `--groups` & `--platform` | The manifest groups to sync |
| repo.archive | `--archive` | Use `git archive` for checkouts |
| repo.clonebundle | `--clone-bundle` | Whether the initial sync used clone.bundle explicitly |
| repo.clonefilter | `--clone-filter` | Filter setting when using [partial git clones] |
| repo.depth | `--depth` | Create shallow checkouts when cloning |
| repo.dissociate | `--dissociate` | Dissociate from any reference/mirrors after initial clone |
| repo.mirror | `--mirror` | Checkout is a repo mirror |
| repo.partialclone | `--partial-clone` | Create [partial git clones] |
| repo.partialcloneexclude | `--partial-clone-exclude` | Comma-delimited list of project names (not paths) to exclude while using [partial git clones] |
| repo.reference | `--reference` | Reference repo client checkout |
| repo.submodules | `--submodules` | Sync git submodules |
| repo.superproject | `--use-superproject` | Sync [superproject] |
| repo.worktree | `--worktree` | Use [git worktree] for checkouts |
| user.email | `--config-name` | User's e-mail address; Copied into `.git/config` when checking out a new project |
| user.name | `--config-name` | User's name; Copied into `.git/config` when checking out a new project |
| Setting | `repo init` Option | Use/Meaning |
|-------------------|---------------------------|-------------|
| manifest.groups | `--groups` & `--platform` | The manifest groups to sync |
| repo.archive | `--archive` | Use `git archive` for checkouts |
| repo.clonebundle | `--clone-bundle` | Whether the initial sync used clone.bundle explicitly |
| repo.clonefilter | `--clone-filter` | Filter setting when using [partial git clones] |
| repo.depth | `--depth` | Create shallow checkouts when cloning |
| repo.dissociate | `--dissociate` | Dissociate from any reference/mirrors after initial clone |
| repo.mirror | `--mirror` | Checkout is a repo mirror |
| repo.partialclone | `--partial-clone` | Create [partial git clones] |
| repo.reference | `--reference` | Reference repo client checkout |
| repo.submodules | `--submodules` | Sync git submodules |
| repo.worktree | `--worktree` | Use `git worktree` for checkouts |
| user.email | `--config-name` | User's e-mail address; Copied into `.git/config` when checking out a new project |
| user.name | `--config-name` | User's name; Copied into `.git/config` when checking out a new project |
[partial git clones]: https://git-scm.com/docs/gitrepository-layout#_code_partialclone_code
[superproject]: https://en.wikibooks.org/wiki/Git/Submodules_and_Superprojects
### Repo hooks settings
@ -248,10 +229,7 @@ Repo will create & maintain a few files in the user's home directory.
[git-config]: https://git-scm.com/docs/git-config
[git worktree]: https://git-scm.com/docs/git-worktree
[gitsubmodules]: https://git-scm.com/docs/gitsubmodules
[manifest-format.md]: ./manifest-format.md
[local manifests]: ./manifest-format.md#Local-Manifests
[superprojects]: https://en.wikibooks.org/wiki/Git/Submodules_and_Superprojects
[topic]: https://gerrit-review.googlesource.com/Documentation/intro-user.html#topics
[upload-notify]: https://gerrit-review.googlesource.com/Documentation/user-upload.html#notify

View File

@ -1,5 +1,8 @@
# repo Manifest Format
> **Warning: The "master" branch is no longer used. Use "main" instead.**<br>
> https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
A repo manifest describes the structure of a repo client; that is
the directories that are visible and where they should be obtained
from with git.
@ -21,7 +24,6 @@ following DTD:
```xml
<!DOCTYPE manifest [
<!ELEMENT manifest (notice?,
remote*,
default?,
@ -30,8 +32,6 @@ following DTD:
project*,
extend-project*,
repo-hooks?,
superproject?,
contactinfo?,
include*)>
<!ELEMENT notice (#PCDATA)>
@ -101,25 +101,11 @@ following DTD:
<!ATTLIST repo-hooks in-project CDATA #REQUIRED>
<!ATTLIST repo-hooks enabled-list CDATA #REQUIRED>
<!ELEMENT superproject EMPTY>
<!ATTLIST superproject name CDATA #REQUIRED>
<!ATTLIST superproject remote IDREF #IMPLIED>
<!ELEMENT contactinfo EMPTY>
<!ATTLIST contactinfo bugurl CDATA #REQUIRED>
<!ELEMENT include EMPTY>
<!ATTLIST include name CDATA #REQUIRED>
<!ATTLIST include groups CDATA #IMPLIED>
<!ATTLIST include name CDATA #REQUIRED>
]>
```
For compatibility purposes across repo releases, all unknown elements are
silently ignored. However, repo reserves all possible names for itself for
future use. If you want to use custom elements, the `x-*` namespace is
reserved for that purpose, and repo guarantees to never allocate any
corresponding names.
A description of the elements and their attributes follows.
@ -127,10 +113,6 @@ A description of the elements and their attributes follows.
The root element of the file.
### Element notice
Arbitrary text that is displayed to users whenever `repo sync` finishes.
The content is simply passed through as it exists in the manifest.
### Element remote
@ -163,8 +145,8 @@ Attribute `review`: Hostname of the Gerrit server where reviews
are uploaded to by `repo upload`. This attribute is optional;
if not specified then `repo upload` will not function.
Attribute `revision`: Name of a Git branch (e.g. `main` or
`refs/heads/main`). Remotes with their own revision will override
Attribute `revision`: Name of a Git branch (e.g. `master` or
`refs/heads/master`). Remotes with their own revision will override
the default revision.
### Element default
@ -177,11 +159,11 @@ Attribute `remote`: Name of a previously defined remote element.
Project elements lacking a remote attribute of their own will use
this remote.
Attribute `revision`: Name of a Git branch (e.g. `main` or
`refs/heads/main`). Project elements lacking their own
Attribute `revision`: Name of a Git branch (e.g. `master` or
`refs/heads/master`). Project elements lacking their own
revision attribute will use this revision.
Attribute `dest-branch`: Name of a Git branch (e.g. `main`).
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
Project elements not setting their own `dest-branch` will inherit
this value. If this value is not set, projects will use `revision`
by default instead.
@ -257,37 +239,24 @@ name will be prefixed by the parent's.
The project name must match the name Gerrit knows, if Gerrit is
being used for code reviews.
"name" must not be empty, and may not be an absolute path or use "." or ".."
path components. It is always interpreted relative to the remote's fetch
settings, so if a different base path is needed, declare a different remote
with the new settings needed.
These restrictions are not enforced for [Local Manifests].
Attribute `path`: An optional path relative to the top directory
of the repo client where the Git working directory for this project
should be placed. If not supplied the project "name" is used.
should be placed. If not supplied the project name is used.
If the project has a parent element, its path will be prefixed
by the parent's.
"path" may not be an absolute path or use "." or ".." path components.
These restrictions are not enforced for [Local Manifests].
If you want to place files into the root of the checkout (e.g. a README or
Makefile or another build script), use the [copyfile] or [linkfile] elements
instead.
Attribute `remote`: Name of a previously defined remote element.
If not supplied the remote given by the default element is used.
Attribute `revision`: Name of the Git branch the manifest wants
to track for this project. Names can be relative to refs/heads
(e.g. just "main") or absolute (e.g. "refs/heads/main").
(e.g. just "master") or absolute (e.g. "refs/heads/master").
Tags and/or explicit SHA-1s should work in theory, but have not
been extensively tested. If not supplied the revision given by
the remote element is used if applicable, else the default
element is used.
Attribute `dest-branch`: Name of a Git branch (e.g. `main`).
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
When using `repo upload`, changes will be submitted for code
review on this branch. If unspecified both here and in the
default element, `revision` is used instead.
@ -296,7 +265,7 @@ Attribute `groups`: List of groups to which this project belongs,
whitespace or comma separated. All projects belong to the group
"all", and each project automatically belongs to a group of
its name:`name` and path:`path`. E.g. for
`<project name="monkeys" path="barrel-of"/>`, that project
<project name="monkeys" path="barrel-of"/>, that project
definition is implicitly in the following manifest groups:
default, name:monkeys, and path:barrel-of. If you place a project in the
group "notdefault", it will not be automatically downloaded by repo.
@ -393,54 +362,6 @@ This element is mostly useful in a local manifest file, where
the user can remove a project, and possibly replace it with their
own definition.
### Element repo-hooks
NB: See the [practical documentation](./repo-hooks.md) for using repo hooks.
Only one repo-hooks element may be specified at a time.
Attempting to redefine it will fail to parse.
Attribute `in-project`: The project where the hooks are defined. The value
must match the `name` attribute (**not** the `path` attribute) of a previously
defined `project` element.
Attribute `enabled-list`: List of hooks to use, whitespace or comma separated.
### Element superproject
***
*Note*: This is currently a WIP.
***
NB: See the [git superprojects documentation](
https://en.wikibooks.org/wiki/Git/Submodules_and_Superprojects) for background
information.
This element is used to specify the URL of the superproject. It has "name" and
"remote" as atrributes. Only "name" is required while the others have
reasonable defaults. At most one superproject may be specified.
Attempting to redefine it will fail to parse.
Attribute `name`: A unique name for the superproject. This attribute has the
same meaning as project's name attribute. See the
[element project](#element-project) for more information.
Attribute `remote`: Name of a previously defined remote element.
If not supplied the remote given by the default element is used.
### Element contactinfo
***
*Note*: This is currently a WIP.
***
This element is used to let manifest authors self-register contact info.
It has "bugurl" as a required atrribute. This element can be repeated,
and any later entries will clobber earlier ones. This would allow manifest
authors who extend manifests to specify their own contact info.
Attribute `bugurl`: The URL to file a bug against the manifest owner.
### Element include
This element provides the capability of including another manifest
@ -450,15 +371,8 @@ target manifest to include - it must be a usable manifest on its own.
Attribute `name`: the manifest to include, specified relative to
the manifest repository's root.
"name" may not be an absolute path or use "." or ".." path components.
These restrictions are not enforced for [Local Manifests].
Attribute `groups`: List of additional groups to which all projects
in the included manifest belong. This appends and recurses, meaning
all projects in sub-manifests carry all parent include groups.
Same syntax as the corresponding element of `project`.
## Local Manifests {#local-manifests}
## Local Manifests
Additional remotes and projects may be added through local manifest
files stored in `$TOP_DIR/.repo/local_manifests/*.xml`.
@ -486,8 +400,3 @@ Manifest files stored in `$TOP_DIR/.repo/local_manifests/*.xml` will
be loaded in alphabetical order.
The legacy `$TOP_DIR/.repo/local_manifest.xml` path is no longer supported.
[copyfile]: #Element-copyfile
[linkfile]: #Element-linkfile
[Local Manifests]: #local-manifests

View File

@ -1,5 +1,8 @@
# Supported Python Versions
> **Warning: The "master" branch is no longer used. Use "main" instead.**<br>
> https://gerrit.googlesource.com/git-repo/+/HEAD/docs/python-support.md
With Python 2.7 officially going EOL on [01 Jan 2020](https://pythonclock.org/),
we need a support plan for the repo project itself.
Inevitably, there will be a long tail of users who still want to use Python 2 on
@ -18,13 +21,13 @@ Bugfixes may be added on a best-effort basis or from the community, but largely
no new features will be added, nor is support guaranteed.
Users can select this during `repo init` time via the [repo launcher].
Otherwise the default branches (e.g. stable & main) will be used which will
Otherwise the default branches (e.g. stable & master) will be used which will
require Python 3.
This means the [repo launcher] needs to support both Python 2 & Python 3, but
since it doesn't import any other repo code, this shouldn't be too problematic.
The main branch will require Python 3.6 at a minimum.
The master branch will require Python 3.6 at a minimum.
If the system has an older version of Python 3, then users will have to select
the legacy Python 2 branch instead.

View File

@ -1,5 +1,8 @@
# repo release process
> **Warning: The "master" branch is no longer used. Use "main" instead.**<br>
> https://gerrit.googlesource.com/git-repo/+/HEAD/docs/release-process.md
This is the process for creating a new release of repo, as well as all the
related topics and flows.
@ -83,8 +86,7 @@ control how repo finds updates:
* `--repo-rev`: This tells repo which branch to use for the full project.
It defaults to the `stable` branch (`REPO_REV` in the launcher script).
Whenever `repo sync` is run, repo will, once every 24 hours, see if an update
is available.
Whenever `repo sync` is run, repo will check to see if an update is available.
It fetches the latest repo-rev from the repo-url.
Then it verifies that the latest commit in the branch has a valid signed tag
using `git tag -v` (which uses gpg).
@ -96,14 +98,9 @@ If that tag is valid, then repo will warn and use that commit instead.
If that tag cannot be verified, it gives up and forces the user to resolve.
### Force an update
The `repo selfupdate` command can be used to force an immediate update.
It is not subject to the 24 hour limitation.
## Branch management
All development happens on the `main` branch and should generally be stable.
All development happens on the `master` branch and should generally be stable.
Since the repo launcher defaults to tracking the `stable` branch, it is not
normally updated until a new release is available.
@ -118,7 +115,7 @@ For example, when `stable` moves from `v1.10.x` to `v1.11.x`, then the `maint`
branch will be updated from `v1.9.x` to `v1.10.x`.
We don't have parallel release branches/series.
Typically all tags are made against the `main` branch and then pushed to the
Typically all tags are made against the `master` branch and then pushed to the
`stable` branch to make it available to the rest of the world.
Since repo doesn't typically see a lot of changes, this tends to be OK.
@ -126,10 +123,10 @@ Since repo doesn't typically see a lot of changes, this tends to be OK.
When you want to create a new release, you'll need to select a good version and
create a signed tag using a key registered in repo itself.
Typically we just tag the latest version of the `main` branch.
Typically we just tag the latest version of the `master` branch.
The tag could be pushed now, but it won't be used by clients normally (since the
default `repo-rev` setting is `stable`).
This would allow some early testing on systems who explicitly select `main`.
This would allow some early testing on systems who explicitly select `master`.
### Creating a signed tag
@ -150,7 +147,7 @@ $ export GNUPGHOME=~/.gnupg/repo/
$ gpg -K
# Pick whatever branch or commit you want to tag.
$ r=main
$ r=master
# Pick the new version.
$ t=1.12.10

View File

@ -1,5 +1,8 @@
# repo hooks
> **Warning: The "master" branch is no longer used. Use "main" instead.**<br>
> https://gerrit.googlesource.com/git-repo/+/HEAD/docs/repo-hooks.md
[TOC]
Repo provides a mechanism to hook specific stages of the runtime with custom
@ -27,7 +30,7 @@ repohooks project is updated and a hook is triggered.
For the full syntax, see the [repo manifest format](./manifest-format.md).
Here's a short example from
[Android](https://android.googlesource.com/platform/manifest/+/HEAD/default.xml).
[Android](https://android.googlesource.com/platform/manifest/+/master/default.xml).
The `<project>` line checks out the repohooks git repo to the local
`tools/repohooks/` path. The `<repo-hooks>` line says to look in the project
with the name `platform/tools/repohooks` for hooks to run during the

View File

@ -1,5 +1,8 @@
# Microsoft Windows Details
> **Warning: The "master" branch is no longer used. Use "main" instead.**<br>
> https://gerrit.googlesource.com/git-repo/+/HEAD/docs/windows.md
Repo is primarily developed on Linux with a lot of users on macOS.
Windows is, unfortunately, not a common platform.
There is support in repo for Windows, but there might be some rough edges.

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import re
import sys

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -18,12 +20,12 @@ class ManifestParseError(Exception):
"""
class ManifestInvalidRevisionError(ManifestParseError):
class ManifestInvalidRevisionError(Exception):
"""The revision value in a project is incorrect.
"""
class ManifestInvalidPathError(ManifestParseError):
class ManifestInvalidPathError(Exception):
"""A path used in <copyfile> or <linkfile> is incorrect.
"""
@ -33,7 +35,7 @@ class NoManifestException(Exception):
"""
def __init__(self, path, reason):
super().__init__(path, reason)
super(NoManifestException, self).__init__()
self.path = path
self.reason = reason
@ -46,7 +48,7 @@ class EditorError(Exception):
"""
def __init__(self, reason):
super().__init__(reason)
super(EditorError, self).__init__()
self.reason = reason
def __str__(self):
@ -58,7 +60,7 @@ class GitError(Exception):
"""
def __init__(self, command):
super().__init__(command)
super(GitError, self).__init__()
self.command = command
def __str__(self):
@ -70,7 +72,7 @@ class UploadError(Exception):
"""
def __init__(self, reason):
super().__init__(reason)
super(UploadError, self).__init__()
self.reason = reason
def __str__(self):
@ -82,7 +84,7 @@ class DownloadError(Exception):
"""
def __init__(self, reason):
super().__init__(reason)
super(DownloadError, self).__init__()
self.reason = reason
def __str__(self):
@ -94,7 +96,7 @@ class NoSuchProjectError(Exception):
"""
def __init__(self, name=None):
super().__init__(name)
super(NoSuchProjectError, self).__init__()
self.name = name
def __str__(self):
@ -108,7 +110,7 @@ class InvalidProjectGroupsError(Exception):
"""
def __init__(self, name=None):
super().__init__(name)
super(InvalidProjectGroupsError, self).__init__()
self.name = name
def __str__(self):
@ -124,7 +126,7 @@ class RepoChangedException(Exception):
"""
def __init__(self, extra_args=None):
super().__init__(extra_args)
super(RepoChangedException, self).__init__()
self.extra_args = extra_args or []

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +14,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import json
import multiprocessing

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,10 +14,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
from __future__ import print_function
import os
import re
import sys
import subprocess
import tempfile
from signal import SIGTERM
from error import GitError
from git_refs import HEAD
@ -40,15 +45,101 @@ GIT_DIR = 'GIT_DIR'
LAST_GITDIR = None
LAST_CWD = None
_ssh_proxy_path = None
_ssh_sock_path = None
_ssh_clients = []
_ssh_version = None
def _run_ssh_version():
"""run ssh -V to display the version number"""
return subprocess.check_output(['ssh', '-V'], stderr=subprocess.STDOUT).decode()
def _parse_ssh_version(ver_str=None):
"""parse a ssh version string into a tuple"""
if ver_str is None:
ver_str = _run_ssh_version()
m = re.match(r'^OpenSSH_([0-9.]+)(p[0-9]+)?\s', ver_str)
if m:
return tuple(int(x) for x in m.group(1).split('.'))
else:
return ()
def ssh_version():
"""return ssh version as a tuple"""
global _ssh_version
if _ssh_version is None:
try:
_ssh_version = _parse_ssh_version()
except subprocess.CalledProcessError:
print('fatal: unable to detect ssh version', file=sys.stderr)
sys.exit(1)
return _ssh_version
def ssh_sock(create=True):
global _ssh_sock_path
if _ssh_sock_path is None:
if not create:
return None
tmp_dir = '/tmp'
if not os.path.exists(tmp_dir):
tmp_dir = tempfile.gettempdir()
if ssh_version() < (6, 7):
tokens = '%r@%h:%p'
else:
tokens = '%C' # hash of %l%h%p%r
_ssh_sock_path = os.path.join(
tempfile.mkdtemp('', 'ssh-', tmp_dir),
'master-' + tokens)
return _ssh_sock_path
def _ssh_proxy():
global _ssh_proxy_path
if _ssh_proxy_path is None:
_ssh_proxy_path = os.path.join(
os.path.dirname(__file__),
'git_ssh')
return _ssh_proxy_path
def _add_ssh_client(p):
_ssh_clients.append(p)
def _remove_ssh_client(p):
try:
_ssh_clients.remove(p)
except ValueError:
pass
def terminate_ssh_clients():
global _ssh_clients
for p in _ssh_clients:
try:
os.kill(p.pid, SIGTERM)
p.wait()
except OSError:
pass
_ssh_clients = []
_git_version = None
class _GitCall(object):
@functools.lru_cache(maxsize=None)
def version_tuple(self):
ret = Wrapper().ParseGitVersion()
if ret is None:
print('fatal: unable to detect git version', file=sys.stderr)
sys.exit(1)
return ret
global _git_version
if _git_version is None:
_git_version = Wrapper().ParseGitVersion()
if _git_version is None:
print('fatal: unable to detect git version', file=sys.stderr)
sys.exit(1)
return _git_version
def __getattr__(self, name):
name = name.replace('_', '-')
@ -74,10 +165,11 @@ def RepoSourceVersion():
proj = os.path.dirname(os.path.abspath(__file__))
env[GIT_DIR] = os.path.join(proj, '.git')
result = subprocess.run([GIT, 'describe', HEAD], stdout=subprocess.PIPE,
encoding='utf-8', env=env, check=False)
if result.returncode == 0:
ver = result.stdout.strip()
p = subprocess.Popen([GIT, 'describe', HEAD], stdout=subprocess.PIPE,
env=env)
if p.wait() == 0:
ver = p.stdout.read().strip().decode('utf-8')
if ver.startswith('v'):
ver = ver[1:]
else:
@ -161,21 +253,24 @@ class GitCommand(object):
project,
cmdv,
bare=False,
input=None,
provide_stdin=False,
capture_stdout=False,
capture_stderr=False,
merge_output=False,
disable_editor=False,
ssh_proxy=None,
ssh_proxy=False,
cwd=None,
gitdir=None):
env = self._GetBasicEnv()
# If we are not capturing std* then need to print it.
self.tee = {'stdout': not capture_stdout, 'stderr': not capture_stderr}
if disable_editor:
env['GIT_EDITOR'] = ':'
if ssh_proxy:
env['REPO_SSH_SOCK'] = ssh_proxy.sock()
env['GIT_SSH'] = ssh_proxy.proxy
env['REPO_SSH_SOCK'] = ssh_sock()
env['GIT_SSH'] = _ssh_proxy()
env['GIT_SSH_VARIANT'] = 'ssh'
if 'http_proxy' in env and 'darwin' == sys.platform:
s = "'http.proxy=%s'" % (env['http_proxy'],)
@ -197,9 +292,6 @@ class GitCommand(object):
command = [GIT]
if bare:
if gitdir:
# Git on Windows wants its paths only using / for reliability.
if platform_utils.isWindows():
gitdir = gitdir.replace('\\', '/')
env[GIT_DIR] = gitdir
cwd = None
command.append(cmdv[0])
@ -210,10 +302,13 @@ class GitCommand(object):
command.append('--progress')
command.extend(cmdv[1:])
stdin = subprocess.PIPE if input else None
stdout = subprocess.PIPE if capture_stdout else None
stderr = (subprocess.STDOUT if merge_output else
(subprocess.PIPE if capture_stderr else None))
if provide_stdin:
stdin = subprocess.PIPE
else:
stdin = None
stdout = subprocess.PIPE
stderr = subprocess.STDOUT if merge_output else subprocess.PIPE
if IsTrace():
global LAST_CWD
@ -249,8 +344,6 @@ class GitCommand(object):
p = subprocess.Popen(command,
cwd=cwd,
env=env,
encoding='utf-8',
errors='backslashreplace',
stdin=stdin,
stdout=stdout,
stderr=stderr)
@ -258,21 +351,10 @@ class GitCommand(object):
raise GitError('%s: %s' % (command[1], e))
if ssh_proxy:
ssh_proxy.add_client(p)
_add_ssh_client(p)
self.process = p
if input:
if isinstance(input, str):
input = input.encode('utf-8')
p.stdin.write(input)
p.stdin.close()
try:
self.stdout, self.stderr = p.communicate()
finally:
if ssh_proxy:
ssh_proxy.remove_client(p)
self.rc = p.wait()
self.stdin = p.stdin
@staticmethod
def _GetBasicEnv():
@ -292,4 +374,36 @@ class GitCommand(object):
return env
def Wait(self):
return self.rc
try:
p = self.process
rc = self._CaptureOutput()
finally:
_remove_ssh_client(p)
return rc
def _CaptureOutput(self):
p = self.process
s_in = platform_utils.FileDescriptorStreams.create()
s_in.add(p.stdout, sys.stdout, 'stdout')
if p.stderr is not None:
s_in.add(p.stderr, sys.stderr, 'stderr')
self.stdout = ''
self.stderr = ''
while not s_in.is_done:
in_ready = s_in.select()
for s in in_ready:
buf = s.read()
if not buf:
s_in.remove(s)
continue
if not hasattr(buf, 'encode'):
buf = buf.decode()
if s.std_name == 'stdout':
self.stdout += buf
else:
self.stderr += buf
if self.tee[s.std_name]:
s.dest.write(buf)
s.dest.flush()
return p.wait()

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,22 +14,45 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import contextlib
import errno
from http.client import HTTPException
import json
import os
import re
import signal
import ssl
import subprocess
import sys
import urllib.error
import urllib.request
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
import time
from pyversion import is_python3
if is_python3():
import urllib.request
import urllib.error
else:
import urllib2
import imp
urllib = imp.new_module('urllib')
urllib.request = urllib2
urllib.error = urllib2
from error import GitError, UploadError
import platform_utils
from repo_trace import Trace
if is_python3():
from http.client import HTTPException
else:
from httplib import HTTPException
from git_command import GitCommand
from git_command import ssh_sock
from git_command import terminate_ssh_clients
from git_refs import R_CHANGES, R_HEADS, R_TAGS
ID_RE = re.compile(r'^[0-9a-f]{40}$')
@ -136,21 +161,6 @@ class GitConfig(object):
except ValueError:
return None
def DumpConfigDict(self):
"""Returns the current configuration dict.
Configuration data is information only (e.g. logging) and
should not be considered a stable data-source.
Returns:
dict of {<key>, <value>} for git configuration cache.
<value> are strings converted by GetString.
"""
config_dict = {}
for key in self._cache:
config_dict[key] = self.GetString(key)
return config_dict
def GetBoolean(self, name):
"""Returns a boolean from the configuration file.
None : The value was not defined, or is not a boolean.
@ -167,12 +177,6 @@ class GitConfig(object):
return False
return None
def SetBoolean(self, name, value):
"""Set the truthy value for a key."""
if value is not None:
value = 'true' if value else 'false'
self.SetString(name, value)
def GetString(self, name, all_keys=False):
"""Get the first value for a key, or None if it is not defined.
@ -341,6 +345,8 @@ class GitConfig(object):
d = self._do('--null', '--list')
if d is None:
return c
if not is_python3():
d = d.decode('utf-8')
for line in d.rstrip('\0').split('\0'):
if '\n' in line:
key, val = line.split('\n', 1)
@ -431,6 +437,127 @@ class RefSpec(object):
return s
_master_processes = []
_master_keys = set()
_ssh_master = True
_master_keys_lock = None
def init_ssh():
"""Should be called once at the start of repo to init ssh master handling.
At the moment, all we do is to create our lock.
"""
global _master_keys_lock
assert _master_keys_lock is None, "Should only call init_ssh once"
_master_keys_lock = _threading.Lock()
def _open_ssh(host, port=None):
global _ssh_master
# Acquire the lock. This is needed to prevent opening multiple masters for
# the same host when we're running "repo sync -jN" (for N > 1) _and_ the
# manifest <remote fetch="ssh://xyz"> specifies a different host from the
# one that was passed to repo init.
_master_keys_lock.acquire()
try:
# Check to see whether we already think that the master is running; if we
# think it's already running, return right away.
if port is not None:
key = '%s:%s' % (host, port)
else:
key = host
if key in _master_keys:
return True
if (not _ssh_master
or 'GIT_SSH' in os.environ
or sys.platform in ('win32', 'cygwin')):
# failed earlier, or cygwin ssh can't do this
#
return False
# We will make two calls to ssh; this is the common part of both calls.
command_base = ['ssh',
'-o', 'ControlPath %s' % ssh_sock(),
host]
if port is not None:
command_base[1:1] = ['-p', str(port)]
# Since the key wasn't in _master_keys, we think that master isn't running.
# ...but before actually starting a master, we'll double-check. This can
# be important because we can't tell that that 'git@myhost.com' is the same
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
check_command = command_base + ['-O', 'check']
try:
Trace(': %s', ' '.join(check_command))
check_process = subprocess.Popen(check_command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
check_process.communicate() # read output, but ignore it...
isnt_running = check_process.wait()
if not isnt_running:
# Our double-check found that the master _was_ infact running. Add to
# the list of keys.
_master_keys.add(key)
return True
except Exception:
# Ignore excpetions. We we will fall back to the normal command and print
# to the log there.
pass
command = command_base[:1] + ['-M', '-N'] + command_base[1:]
try:
Trace(': %s', ' '.join(command))
p = subprocess.Popen(command)
except Exception as e:
_ssh_master = False
print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
% (host, port, str(e)), file=sys.stderr)
return False
time.sleep(1)
ssh_died = (p.poll() is not None)
if ssh_died:
return False
_master_processes.append(p)
_master_keys.add(key)
return True
finally:
_master_keys_lock.release()
def close_ssh():
global _master_keys_lock
terminate_ssh_clients()
for p in _master_processes:
try:
os.kill(p.pid, signal.SIGTERM)
p.wait()
except OSError:
pass
del _master_processes[:]
_master_keys.clear()
d = ssh_sock(create=False)
if d:
try:
platform_utils.rmdir(os.path.dirname(d))
except OSError:
pass
# We're done with the lock, so we can delete it.
_master_keys_lock = None
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
@ -482,6 +609,27 @@ def GetUrlCookieFile(url, quiet):
yield cookiefile, None
def _preconnect(url):
m = URI_ALL.match(url)
if m:
scheme = m.group(1)
host = m.group(2)
if ':' in host:
host, port = host.split(':')
else:
port = None
if scheme in ('ssh', 'git+ssh', 'ssh+git'):
return _open_ssh(host, port)
return False
m = URI_SCP.match(url)
if m:
host = m.group(1)
return _open_ssh(host)
return False
class Remote(object):
"""Configuration options related to a remote.
"""
@ -518,23 +666,9 @@ class Remote(object):
return self.url.replace(longest, longestUrl, 1)
def PreConnectFetch(self, ssh_proxy):
"""Run any setup for this remote before we connect to it.
In practice, if the remote is using SSH, we'll attempt to create a new
SSH master session to it for reuse across projects.
Args:
ssh_proxy: The SSH settings for managing master sessions.
Returns:
Whether the preconnect phase for this remote was successful.
"""
if not ssh_proxy:
return True
def PreConnectFetch(self):
connectionUrl = self._InsteadOf()
return ssh_proxy.preconnect(connectionUrl)
return _preconnect(connectionUrl)
def ReviewUrl(self, userEmail, validate_certs):
if self._review_url is None:

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -131,14 +133,11 @@ class GitRefs(object):
base = os.path.join(self._gitdir, prefix)
for name in platform_utils.listdir(base):
p = os.path.join(base, name)
# We don't implement the full ref validation algorithm, just the simple
# rules that would show up in local filesystems.
# https://git-scm.com/docs/git-check-ref-format
if name.startswith('.') or name.endswith('.lock'):
pass
elif platform_utils.isdir(p):
if platform_utils.isdir(p):
self._mtime[prefix] = os.path.getmtime(base)
self._ReadLoose(prefix + name + '/')
elif name.endswith('.lock'):
pass
else:
self._ReadLoose1(p, prefix + name)
@ -147,7 +146,7 @@ class GitRefs(object):
with open(path) as fd:
mtime = os.path.getmtime(path)
ref_id = fd.readline()
except (OSError, UnicodeError):
except (IOError, OSError):
return
try:

View File

@ -1,292 +0,0 @@
# Copyright (C) 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provide functionality to get all projects and their commit ids from Superproject.
For more information on superproject, check out:
https://en.wikibooks.org/wiki/Git/Submodules_and_Superprojects
Examples:
superproject = Superproject()
project_commit_ids = superproject.UpdateProjectsRevisionId(projects)
"""
import hashlib
import os
import sys
from git_command import git_require, GitCommand
from git_refs import R_HEADS
_SUPERPROJECT_GIT_NAME = 'superproject.git'
_SUPERPROJECT_MANIFEST_NAME = 'superproject_override.xml'
class Superproject(object):
"""Get commit ids from superproject.
Initializes a local copy of a superproject for the manifest. This allows
lookup of commit ids for all projects. It contains _project_commit_ids which
is a dictionary with project/commit id entries.
"""
def __init__(self, manifest, repodir, superproject_dir='exp-superproject',
quiet=False):
"""Initializes superproject.
Args:
manifest: A Manifest object that is to be written to a file.
repodir: Path to the .repo/ dir for holding all internal checkout state.
It must be in the top directory of the repo client checkout.
superproject_dir: Relative path under |repodir| to checkout superproject.
quiet: If True then only print the progress messages.
"""
self._project_commit_ids = None
self._manifest = manifest
self._quiet = quiet
self._branch = self._GetBranch()
self._repodir = os.path.abspath(repodir)
self._superproject_dir = superproject_dir
self._superproject_path = os.path.join(self._repodir, superproject_dir)
self._manifest_path = os.path.join(self._superproject_path,
_SUPERPROJECT_MANIFEST_NAME)
git_name = ''
if self._manifest.superproject:
remote_name = self._manifest.superproject['remote'].name
git_name = hashlib.md5(remote_name.encode('utf8')).hexdigest() + '-'
self._work_git_name = git_name + _SUPERPROJECT_GIT_NAME
self._work_git = os.path.join(self._superproject_path, self._work_git_name)
@property
def project_commit_ids(self):
"""Returns a dictionary of projects and their commit ids."""
return self._project_commit_ids
def _GetBranch(self):
"""Returns the branch name for getting the approved manifest."""
p = self._manifest.manifestProject
b = p.GetBranch(p.CurrentBranch)
if not b:
return None
branch = b.merge
if branch and branch.startswith(R_HEADS):
branch = branch[len(R_HEADS):]
return branch
def _Init(self):
"""Sets up a local Git repository to get a copy of a superproject.
Returns:
True if initialization is successful, or False.
"""
if not os.path.exists(self._superproject_path):
os.mkdir(self._superproject_path)
if not self._quiet and not os.path.exists(self._work_git):
print('%s: Performing initial setup for superproject; this might take '
'several minutes.' % self._work_git)
cmd = ['init', '--bare', self._work_git_name]
p = GitCommand(None,
cmd,
cwd=self._superproject_path,
capture_stdout=True,
capture_stderr=True)
retval = p.Wait()
if retval:
print('repo: error: git init call failed with return code: %r, stderr: %r' %
(retval, p.stderr), file=sys.stderr)
return False
return True
def _Fetch(self, url):
"""Fetches a local copy of a superproject for the manifest based on url.
Args:
url: superproject's url.
Returns:
True if fetch is successful, or False.
"""
if not os.path.exists(self._work_git):
print('git fetch missing drectory: %s' % self._work_git,
file=sys.stderr)
return False
if not git_require((2, 28, 0)):
print('superproject requires a git version 2.28 or later', file=sys.stderr)
return False
cmd = ['fetch', url, '--depth', '1', '--force', '--no-tags', '--filter', 'blob:none']
if self._branch:
cmd += [self._branch + ':' + self._branch]
p = GitCommand(None,
cmd,
cwd=self._work_git,
capture_stdout=True,
capture_stderr=True)
retval = p.Wait()
if retval:
print('repo: error: git fetch call failed with return code: %r, stderr: %r' %
(retval, p.stderr), file=sys.stderr)
return False
return True
def _LsTree(self):
"""Gets the commit ids for all projects.
Works only in git repositories.
Returns:
data: data returned from 'git ls-tree ...' instead of None.
"""
if not os.path.exists(self._work_git):
print('git ls-tree missing drectory: %s' % self._work_git,
file=sys.stderr)
return None
data = None
branch = 'HEAD' if not self._branch else self._branch
cmd = ['ls-tree', '-z', '-r', branch]
p = GitCommand(None,
cmd,
cwd=self._work_git,
capture_stdout=True,
capture_stderr=True)
retval = p.Wait()
if retval == 0:
data = p.stdout
else:
print('repo: error: git ls-tree call failed with return code: %r, stderr: %r' % (
retval, p.stderr), file=sys.stderr)
return data
def Sync(self):
"""Gets a local copy of a superproject for the manifest.
Returns:
True if sync of superproject is successful, or False.
"""
print('WARNING: --use-superproject is experimental and not '
'for general use', file=sys.stderr)
if not self._manifest.superproject:
print('error: superproject tag is not defined in manifest',
file=sys.stderr)
return False
url = self._manifest.superproject['remote'].url
if not url:
print('error: superproject URL is not defined in manifest',
file=sys.stderr)
return False
if not self._Init():
return False
if not self._Fetch(url):
return False
if not self._quiet:
print('%s: Initial setup for superproject completed.' % self._work_git)
return True
def _GetAllProjectsCommitIds(self):
"""Get commit ids for all projects from superproject and save them in _project_commit_ids.
Returns:
A dictionary with the projects/commit ids on success, otherwise None.
"""
if not self.Sync():
return None
data = self._LsTree()
if not data:
print('error: git ls-tree failed to return data for superproject',
file=sys.stderr)
return None
# Parse lines like the following to select lines starting with '160000' and
# build a dictionary with project path (last element) and its commit id (3rd element).
#
# 160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00
# 120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00
commit_ids = {}
for line in data.split('\x00'):
ls_data = line.split(None, 3)
if not ls_data:
break
if ls_data[0] == '160000':
commit_ids[ls_data[3]] = ls_data[2]
self._project_commit_ids = commit_ids
return commit_ids
def _WriteManfiestFile(self):
"""Writes manifest to a file.
Returns:
manifest_path: Path name of the file into which manifest is written instead of None.
"""
if not os.path.exists(self._superproject_path):
print('error: missing superproject directory %s' %
self._superproject_path,
file=sys.stderr)
return None
manifest_str = self._manifest.ToXml(groups=self._manifest.GetGroupsStr()).toxml()
manifest_path = self._manifest_path
try:
with open(manifest_path, 'w', encoding='utf-8') as fp:
fp.write(manifest_str)
except IOError as e:
print('error: cannot write manifest to %s:\n%s'
% (manifest_path, e),
file=sys.stderr)
return None
return manifest_path
def UpdateProjectsRevisionId(self, projects):
"""Update revisionId of every project in projects with the commit id.
Args:
projects: List of projects whose revisionId needs to be updated.
Returns:
manifest_path: Path name of the overriding manfiest file instead of None.
"""
commit_ids = self._GetAllProjectsCommitIds()
if not commit_ids:
print('error: Cannot get project commit ids from manifest', file=sys.stderr)
return None
projects_missing_commit_ids = []
superproject_fetchUrl = self._manifest.superproject['remote'].fetchUrl
for project in projects:
path = project.relpath
if not path:
continue
# Some manifests that pull projects from the "chromium" GoB
# (remote="chromium"), and have a private manifest that pulls projects
# from both the chromium GoB and "chrome-internal" GoB (remote="chrome").
# For such projects, one of the remotes will be different from
# superproject's remote. Until superproject, supports multiple remotes,
# don't update the commit ids of remotes that don't match superproject's
# remote.
if project.remote.fetchUrl != superproject_fetchUrl:
continue
commit_id = commit_ids.get(path)
if commit_id:
project.SetRevisionId(commit_id)
else:
projects_missing_commit_ids.append(path)
if projects_missing_commit_ids:
print('error: please file a bug using %s to report missing commit_ids for: %s' %
(self._manifest.contactinfo.bugurl, projects_missing_commit_ids), file=sys.stderr)
return None
manifest_path = self._WriteManfiestFile()
return manifest_path

View File

@ -1,238 +0,0 @@
# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provide event logging in the git trace2 EVENT format.
The git trace2 EVENT format is defined at:
https://www.kernel.org/pub/software/scm/git/docs/technical/api-trace2.html#_event_format
https://git-scm.com/docs/api-trace2#_the_event_format_target
Usage:
git_trace_log = EventLog()
git_trace_log.StartEvent()
...
git_trace_log.ExitEvent()
git_trace_log.Write()
"""
import datetime
import json
import os
import sys
import tempfile
import threading
from git_command import GitCommand, RepoSourceVersion
class EventLog(object):
"""Event log that records events that occurred during a repo invocation.
Events are written to the log as a consecutive JSON entries, one per line.
Entries follow the git trace2 EVENT format.
Each entry contains the following common keys:
- event: The event name
- sid: session-id - Unique string to allow process instance to be identified.
- thread: The thread name.
- time: is the UTC time of the event.
Valid 'event' names and event specific fields are documented here:
https://git-scm.com/docs/api-trace2#_event_format
"""
def __init__(self, env=None):
"""Initializes the event log."""
self._log = []
# Try to get session-id (sid) from environment (setup in repo launcher).
KEY = 'GIT_TRACE2_PARENT_SID'
if env is None:
env = os.environ
now = datetime.datetime.utcnow()
# Save both our sid component and the complete sid.
# We use our sid component (self._sid) as the unique filename prefix and
# the full sid (self._full_sid) in the log itself.
self._sid = 'repo-%s-P%08x' % (now.strftime('%Y%m%dT%H%M%SZ'), os.getpid())
parent_sid = env.get(KEY)
# Append our sid component to the parent sid (if it exists).
if parent_sid is not None:
self._full_sid = parent_sid + '/' + self._sid
else:
self._full_sid = self._sid
# Set/update the environment variable.
# Environment handling across systems is messy.
try:
env[KEY] = self._full_sid
except UnicodeEncodeError:
env[KEY] = self._full_sid.encode()
# Add a version event to front of the log.
self._AddVersionEvent()
@property
def full_sid(self):
return self._full_sid
def _AddVersionEvent(self):
"""Adds a 'version' event at the beginning of current log."""
version_event = self._CreateEventDict('version')
version_event['evt'] = "2"
version_event['exe'] = RepoSourceVersion()
self._log.insert(0, version_event)
def _CreateEventDict(self, event_name):
"""Returns a dictionary with the common keys/values for git trace2 events.
Args:
event_name: The event name.
Returns:
Dictionary with the common event fields populated.
"""
return {
'event': event_name,
'sid': self._full_sid,
'thread': threading.currentThread().getName(),
'time': datetime.datetime.utcnow().isoformat() + 'Z',
}
def StartEvent(self):
"""Append a 'start' event to the current log."""
start_event = self._CreateEventDict('start')
start_event['argv'] = sys.argv
self._log.append(start_event)
def ExitEvent(self, result):
"""Append an 'exit' event to the current log.
Args:
result: Exit code of the event
"""
exit_event = self._CreateEventDict('exit')
# Consider 'None' success (consistent with event_log result handling).
if result is None:
result = 0
exit_event['code'] = result
self._log.append(exit_event)
def CommandEvent(self, name, subcommands):
"""Append a 'command' event to the current log.
Args:
name: Name of the primary command (ex: repo, git)
subcommands: List of the sub-commands (ex: version, init, sync)
"""
command_event = self._CreateEventDict('command')
command_event['name'] = name
command_event['subcommands'] = subcommands
self._log.append(command_event)
def DefParamRepoEvents(self, config):
"""Append a 'def_param' event for each repo.* config key to the current log.
Args:
config: Repo configuration dictionary
"""
# Only output the repo.* config parameters.
repo_config = {k: v for k, v in config.items() if k.startswith('repo.')}
for param, value in repo_config.items():
def_param_event = self._CreateEventDict('def_param')
def_param_event['param'] = param
def_param_event['value'] = value
self._log.append(def_param_event)
def _GetEventTargetPath(self):
"""Get the 'trace2.eventtarget' path from git configuration.
Returns:
path: git config's 'trace2.eventtarget' path if it exists, or None
"""
path = None
cmd = ['config', '--get', 'trace2.eventtarget']
# TODO(https://crbug.com/gerrit/13706): Use GitConfig when it supports
# system git config variables.
p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
bare=True)
retval = p.Wait()
if retval == 0:
# Strip trailing carriage-return in path.
path = p.stdout.rstrip('\n')
elif retval != 1:
# `git config --get` is documented to produce an exit status of `1` if
# the requested variable is not present in the configuration. Report any
# other return value as an error.
print("repo: error: 'git config --get' call failed with return code: %r, stderr: %r" % (
retval, p.stderr), file=sys.stderr)
return path
def Write(self, path=None):
"""Writes the log out to a file.
Log is only written if 'path' or 'git config --get trace2.eventtarget'
provide a valid path to write logs to.
Logging filename format follows the git trace2 style of being a unique
(exclusive writable) file.
Args:
path: Path to where logs should be written.
Returns:
log_path: Path to the log file if log is written, otherwise None
"""
log_path = None
# If no logging path is specified, get the path from 'trace2.eventtarget'.
if path is None:
path = self._GetEventTargetPath()
# If no logging path is specified, exit.
if path is None:
return None
if isinstance(path, str):
# Get absolute path.
path = os.path.abspath(os.path.expanduser(path))
else:
raise TypeError('path: str required but got %s.' % type(path))
# Git trace2 requires a directory to write log to.
# TODO(https://crbug.com/gerrit/13706): Support file (append) mode also.
if not os.path.isdir(path):
return None
# Use NamedTemporaryFile to generate a unique filename as required by git trace2.
try:
with tempfile.NamedTemporaryFile(mode='x', prefix=self._sid, dir=path,
delete=False) as f:
# TODO(https://crbug.com/gerrit/13706): Support writing events as they
# occur.
for e in self._log:
# Dump in compact encoding mode.
# See 'Compact encoding' in Python docs:
# https://docs.python.org/3/library/json.html#module-json
json.dump(e, f, indent=None, separators=(',', ':'))
f.write('\n')
log_path = f.name
except FileExistsError as err:
print('repo: warning: git trace2 logging failed: %r' % err,
file=sys.stderr)
return None
return log_path

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,8 +14,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import multiprocessing
import platform
import re
import sys
@ -36,15 +38,6 @@ def parse_clientdir(gitc_fs_path):
return wrapper.Wrapper().gitc_parse_clientdir(gitc_fs_path)
def _get_project_revision(args):
"""Worker for _set_project_revisions to lookup one project remote."""
(i, url, expr) = args
gitcmd = git_command.GitCommand(
None, ['ls-remote', url, expr], capture_stdout=True, cwd='/tmp')
rc = gitcmd.Wait()
return (i, rc, gitcmd.stdout.split('\t', 1)[0])
def _set_project_revisions(projects):
"""Sets the revisionExpr for a list of projects.
@ -52,38 +45,49 @@ def _set_project_revisions(projects):
should not be overly large. Recommend calling this function multiple times
with each call not exceeding NUM_BATCH_RETRIEVE_REVISIONID projects.
Args:
projects: List of project objects to set the revionExpr for.
@param projects: List of project objects to set the revionExpr for.
"""
# Retrieve the commit id for each project based off of it's current
# revisionExpr and it is not already a commit id.
with multiprocessing.Pool(NUM_BATCH_RETRIEVE_REVISIONID) as pool:
results_iter = pool.imap_unordered(
_get_project_revision,
((i, project.remote.url, project.revisionExpr)
for i, project in enumerate(projects)
if not git_config.IsId(project.revisionExpr)),
chunksize=8)
for (i, rc, revisionExpr) in results_iter:
project = projects[i]
if rc:
print('FATAL: Failed to retrieve revisionExpr for %s' % project.name)
pool.terminate()
sys.exit(1)
if not revisionExpr:
pool.terminate()
raise ManifestParseError('Invalid SHA-1 revision project %s (%s)' %
(project.remote.url, project.revisionExpr))
project.revisionExpr = revisionExpr
project_gitcmds = [(
project, git_command.GitCommand(None,
['ls-remote',
project.remote.url,
project.revisionExpr],
capture_stdout=True, cwd='/tmp'))
for project in projects if not git_config.IsId(project.revisionExpr)]
for proj, gitcmd in project_gitcmds:
if gitcmd.Wait():
print('FATAL: Failed to retrieve revisionExpr for %s' % proj)
sys.exit(1)
revisionExpr = gitcmd.stdout.split('\t')[0]
if not revisionExpr:
raise ManifestParseError('Invalid SHA-1 revision project %s (%s)' %
(proj.remote.url, proj.revisionExpr))
proj.revisionExpr = revisionExpr
def _manifest_groups(manifest):
"""Returns the manifest group string that should be synced
This is the same logic used by Command.GetProjects(), which is used during
repo sync
@param manifest: The XmlManifest object
"""
mp = manifest.manifestProject
groups = mp.config.GetString('manifest.groups')
if not groups:
groups = 'default,platform-' + platform.system().lower()
return groups
def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
"""Generate a manifest for shafsd to use for this GITC client.
Args:
gitc_manifest: Current gitc manifest, or None if there isn't one yet.
manifest: A GitcManifest object loaded with the current repo manifest.
paths: List of project paths we want to update.
@param gitc_manifest: Current gitc manifest, or None if there isn't one yet.
@param manifest: A GitcManifest object loaded with the current repo manifest.
@param paths: List of project paths we want to update.
"""
print('Generating GITC Manifest by fetching revision SHAs for each '
@ -91,7 +95,7 @@ def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
if paths is None:
paths = list(manifest.paths.keys())
groups = [x for x in re.split(r'[,\s]+', manifest.GetGroupsStr()) if x]
groups = [x for x in re.split(r'[,\s]+', _manifest_groups(manifest)) if x]
# Convert the paths to projects, and filter them to the matched groups.
projects = [manifest.paths[p] for p in paths]
@ -119,7 +123,11 @@ def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
else:
proj.revisionExpr = gitc_proj.revisionExpr
_set_project_revisions(projects)
index = 0
while index < len(projects):
_set_project_revisions(
projects[index:(index + NUM_BATCH_RETRIEVE_REVISIONID)])
index += NUM_BATCH_RETRIEVE_REVISIONID
if gitc_manifest is not None:
for path, proj in gitc_manifest.paths.items():
@ -141,16 +149,13 @@ def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
def save_manifest(manifest, client_dir=None):
"""Save the manifest file in the client_dir.
Args:
manifest: Manifest object to save.
client_dir: Client directory to save the manifest in.
@param client_dir: Client directory to save the manifest in.
@param manifest: Manifest object to save.
"""
if not client_dir:
manifest_file = manifest.manifestFile
else:
manifest_file = os.path.join(client_dir, '.manifest')
with open(manifest_file, 'w') as f:
manifest.Save(f, groups=manifest.GetGroupsStr())
client_dir = manifest.gitc_client_dir
with open(os.path.join(client_dir, '.manifest'), 'w') as f:
manifest.Save(f, groups=_manifest_groups(manifest))
# TODO(sbasi/jorg): Come up with a solution to remove the sleep below.
# Give the GITC filesystem time to register the manifest changes.
time.sleep(3)

154
hooks.py
View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,18 +14,24 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import json
import os
import re
import subprocess
import sys
import traceback
import urllib.parse
from error import HookError
from git_refs import HEAD
from pyversion import is_python3
if is_python3():
import urllib.parse
else:
import imp
import urlparse
urllib = imp.new_module('urllib')
urllib.parse = urlparse
input = raw_input # noqa: F821
class RepoHook(object):
"""A RepoHook contains information about a script to run as a hook.
@ -37,29 +45,13 @@ class RepoHook(object):
Hooks are always python. When a hook is run, we will load the hook into the
interpreter and execute its main() function.
Combinations of hook option flags:
- no-verify=False, verify=False (DEFAULT):
If stdout is a tty, can prompt about running hooks if needed.
If user denies running hooks, the action is cancelled. If stdout is
not a tty and we would need to prompt about hooks, action is
cancelled.
- no-verify=False, verify=True:
Always run hooks with no prompt.
- no-verify=True, verify=False:
Never run hooks, but run action anyway (AKA bypass hooks).
- no-verify=True, verify=True:
Invalid
"""
def __init__(self,
hook_type,
hooks_project,
repo_topdir,
topdir,
manifest_url,
bypass_hooks=False,
allow_all_hooks=False,
ignore_hooks=False,
abort_if_user_denies=False):
"""RepoHook constructor.
@ -67,27 +59,20 @@ class RepoHook(object):
hook_type: A string representing the type of hook. This is also used
to figure out the name of the file containing the hook. For
example: 'pre-upload'.
hooks_project: The project containing the repo hooks.
If you have a manifest, this is manifest.repo_hooks_project.
OK if this is None, which will make the hook a no-op.
repo_topdir: The top directory of the repo client checkout.
This is the one containing the .repo directory. Scripts will
run with CWD as this directory.
If you have a manifest, this is manifest.topdir.
hooks_project: The project containing the repo hooks. If you have a
manifest, this is manifest.repo_hooks_project. OK if this is None,
which will make the hook a no-op.
topdir: Repo's top directory (the one containing the .repo directory).
Scripts will run with CWD as this directory. If you have a manifest,
this is manifest.topdir
manifest_url: The URL to the manifest git repo.
bypass_hooks: If True, then 'Do not run the hook'.
allow_all_hooks: If True, then 'Run the hook without prompting'.
ignore_hooks: If True, then 'Do not abort action if hooks fail'.
abort_if_user_denies: If True, we'll abort running the hook if the user
abort_if_user_denies: If True, we'll throw a HookError() if the user
doesn't allow us to run the hook.
"""
self._hook_type = hook_type
self._hooks_project = hooks_project
self._repo_topdir = repo_topdir
self._manifest_url = manifest_url
self._bypass_hooks = bypass_hooks
self._allow_all_hooks = allow_all_hooks
self._ignore_hooks = ignore_hooks
self._topdir = topdir
self._abort_if_user_denies = abort_if_user_denies
# Store the full path to the script for convenience.
@ -123,7 +108,7 @@ class RepoHook(object):
# NOTE: Local (non-committed) changes will not be factored into this hash.
# I think this is OK, since we're really only worried about warning the user
# about upstream changes.
return self._hooks_project.work_git.rev_parse(HEAD)
return self._hooks_project.work_git.rev_parse('HEAD')
def _GetMustVerb(self):
"""Return 'must' if the hook is required; 'should' if not."""
@ -362,7 +347,7 @@ context['main'](**kwargs)
try:
# Always run hooks with CWD as topdir.
os.chdir(self._repo_topdir)
os.chdir(self._topdir)
# Put the hook dir as the first item of sys.path so hooks can do
# relative imports. We want to replace the repo dir as [0] so
@ -412,12 +397,7 @@ context['main'](**kwargs)
sys.path = orig_syspath
os.chdir(orig_path)
def _CheckHook(self):
# Bail with a nice error if we can't find the hook.
if not os.path.isfile(self._script_fullpath):
raise HookError('Couldn\'t find repo hook: %s' % self._script_fullpath)
def Run(self, **kwargs):
def Run(self, user_allows_all_hooks, **kwargs):
"""Run the hook.
If the hook doesn't exist (because there is no hooks project or because
@ -430,80 +410,22 @@ context['main'](**kwargs)
to the hook type. For instance, pre-upload hooks will contain
a project_list.
Returns:
True: On success or ignore hooks by user-request
False: The hook failed. The caller should respond with aborting the action.
Some examples in which False is returned:
* Finding the hook failed while it was enabled, or
* the user declined to run a required hook (from _CheckForHookApproval)
In all these cases the user did not pass the proper arguments to
ignore the result through the option combinations as listed in
AddHookOptionGroup().
Raises:
HookError: If there was a problem finding the hook or the user declined
to run a required hook (from _CheckForHookApproval).
"""
# Do not do anything in case bypass_hooks is set, or
# no-op if there is no hooks project or if hook is disabled.
if (self._bypass_hooks or
not self._hooks_project or
self._hook_type not in self._hooks_project.enabled_repo_hooks):
return True
# No-op if there is no hooks project or if hook is disabled.
if ((not self._hooks_project) or (self._hook_type not in
self._hooks_project.enabled_repo_hooks)):
return
passed = True
try:
self._CheckHook()
# Bail with a nice error if we can't find the hook.
if not os.path.isfile(self._script_fullpath):
raise HookError('Couldn\'t find repo hook: "%s"' % self._script_fullpath)
# Make sure the user is OK with running the hook.
if self._allow_all_hooks or self._CheckForHookApproval():
# Run the hook with the same version of python we're using.
self._ExecuteHook(**kwargs)
except SystemExit as e:
passed = False
print('ERROR: %s hooks exited with exit code: %s' % (self._hook_type, str(e)),
file=sys.stderr)
except HookError as e:
passed = False
print('ERROR: %s' % str(e), file=sys.stderr)
# Make sure the user is OK with running the hook.
if (not user_allows_all_hooks) and (not self._CheckForHookApproval()):
return
if not passed and self._ignore_hooks:
print('\nWARNING: %s hooks failed, but continuing anyways.' % self._hook_type,
file=sys.stderr)
passed = True
return passed
@classmethod
def FromSubcmd(cls, manifest, opt, *args, **kwargs):
"""Method to construct the repo hook class
Args:
manifest: The current active manifest for this command from which we
extract a couple of fields.
opt: Contains the commandline options for the action of this hook.
It should contain the options added by AddHookOptionGroup() in which
we are interested in RepoHook execution.
"""
for key in ('bypass_hooks', 'allow_all_hooks', 'ignore_hooks'):
kwargs.setdefault(key, getattr(opt, key))
kwargs.update({
'hooks_project': manifest.repo_hooks_project,
'repo_topdir': manifest.topdir,
'manifest_url': manifest.manifestProject.GetRemote('origin').url,
})
return cls(*args, **kwargs)
@staticmethod
def AddOptionGroup(parser, name):
"""Help options relating to the various hooks."""
# Note that verify and no-verify are NOT opposites of each other, which
# is why they store to different locations. We are using them to match
# 'git commit' syntax.
group = parser.add_option_group(name + ' hooks')
group.add_option('--no-verify',
dest='bypass_hooks', action='store_true',
help='Do not run the %s hook.' % name)
group.add_option('--verify',
dest='allow_all_hooks', action='store_true',
help='Run the %s hook without prompting.' % name)
group.add_option('--ignore-hooks',
action='store_true',
help='Do not abort if %s hooks fail.' % name)
# Run the hook with the same version of python we're using.
self._ExecuteHook(**kwargs)

73
main.py
View File

@ -1,4 +1,5 @@
#!/usr/bin/env python3
#!/usr/bin/env python
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
@ -20,6 +21,7 @@ People shouldn't run this directly; instead, they should use the `repo` wrapper
which takes care of execing this entry point.
"""
from __future__ import print_function
import getpass
import netrc
import optparse
@ -28,7 +30,15 @@ import shlex
import sys
import textwrap
import time
import urllib.request
from pyversion import is_python3
if is_python3():
import urllib.request
else:
import imp
import urllib2
urllib = imp.new_module('urllib')
urllib.request = urllib2
try:
import kerberos
@ -39,8 +49,7 @@ from color import SetDefaultColoring
import event_log
from repo_trace import SetTrace
from git_command import user_agent
from git_config import RepoConfig
from git_trace2_event_log import EventLog
from git_config import init_ssh, close_ssh, RepoConfig
from command import InteractiveCommand
from command import MirrorSafeCommand
from command import GitcAvailableCommand, GitcClientCommand
@ -54,12 +63,14 @@ from error import NoManifestException
from error import NoSuchProjectError
from error import RepoChangedException
import gitc_utils
from manifest_xml import GitcClient, RepoClient
from manifest_xml import GitcManifest, XmlManifest
from pager import RunPager, TerminatePager
from wrapper import WrapperPath, Wrapper
from subcmds import all_commands
if not is_python3():
input = raw_input # noqa: F821
# NB: These do not need to be kept in sync with the repo launcher script.
# These may be much newer as it allows the repo launcher to roll between
@ -71,13 +82,12 @@ from subcmds import all_commands
#
# python-3.6 is in Ubuntu Bionic.
MIN_PYTHON_VERSION_SOFT = (3, 6)
MIN_PYTHON_VERSION_HARD = (3, 5)
MIN_PYTHON_VERSION_HARD = (3, 4)
if sys.version_info.major < 3:
print('repo: error: Python 2 is no longer supported; '
print('repo: warning: Python 2 is no longer supported; '
'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION_SOFT),
file=sys.stderr)
sys.exit(1)
else:
if sys.version_info < MIN_PYTHON_VERSION_HARD:
print('repo: error: Python 3 version is too old; '
@ -119,8 +129,6 @@ global_options.add_option('--version',
global_options.add_option('--event-log',
dest='event_log', action='store',
help='filename of event log to append timeline to')
global_options.add_option('--git-trace2-event-log', action='store',
help='directory to write git trace2 event log to')
class _Repo(object):
@ -202,17 +210,15 @@ class _Repo(object):
file=sys.stderr)
return 1
git_trace2_event_log = EventLog()
cmd.repodir = self.repodir
cmd.client = RepoClient(cmd.repodir)
cmd.manifest = cmd.client.manifest
cmd.manifest = XmlManifest(cmd.repodir)
cmd.gitc_manifest = None
gitc_client_name = gitc_utils.parse_clientdir(os.getcwd())
if gitc_client_name:
cmd.gitc_manifest = GitcClient(cmd.repodir, gitc_client_name)
cmd.client.isGitcClient = True
cmd.gitc_manifest = GitcManifest(cmd.repodir, gitc_client_name)
cmd.manifest.isGitcClient = True
Editor.globalConfig = cmd.client.globalConfig
Editor.globalConfig = cmd.manifest.globalConfig
if not isinstance(cmd, MirrorSafeCommand) and cmd.manifest.IsMirror:
print("fatal: '%s' requires a working directory" % name,
@ -240,7 +246,7 @@ class _Repo(object):
return 1
if gopts.pager is not False and not isinstance(cmd, InteractiveCommand):
config = cmd.client.globalConfig
config = cmd.manifest.globalConfig
if gopts.pager:
use_pager = True
else:
@ -253,11 +259,7 @@ class _Repo(object):
start = time.time()
cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
cmd.event_log.SetParent(cmd_event)
git_trace2_event_log.StartEvent()
git_trace2_event_log.CommandEvent(name='repo', subcommands=[name])
try:
cmd.CommonValidateOptions(copts, cargs)
cmd.ValidateOptions(copts, cargs)
result = cmd.Execute(copts, cargs)
except (DownloadError, ManifestInvalidRevisionError,
@ -299,15 +301,10 @@ class _Repo(object):
cmd.event_log.FinishEvent(cmd_event, finish,
result is None or result == 0)
git_trace2_event_log.DefParamRepoEvents(
cmd.manifest.manifestProject.config.DumpConfigDict())
git_trace2_event_log.ExitEvent(result)
if gopts.event_log:
cmd.event_log.Write(os.path.abspath(
os.path.expanduser(gopts.event_log)))
git_trace2_event_log.Write(gopts.git_trace2_event_log)
return result
@ -591,16 +588,20 @@ def _Main(argv):
repo = _Repo(opt.repodir)
try:
init_http()
name, gopts, argv = repo._ParseArgs(argv)
run = lambda: repo._Run(name, gopts, argv) or 0
if gopts.trace_python:
import trace
tracer = trace.Trace(count=False, trace=True, timing=True,
ignoredirs=set(sys.path[1:]))
result = tracer.runfunc(run)
else:
result = run()
try:
init_ssh()
init_http()
name, gopts, argv = repo._ParseArgs(argv)
run = lambda: repo._Run(name, gopts, argv) or 0
if gopts.trace_python:
import trace
tracer = trace.Trace(count=False, trace=True, timing=True,
ignoredirs=set(sys.path[1:]))
result = tracer.runfunc(run)
else:
result = run()
finally:
close_ssh()
except KeyboardInterrupt:
print('aborted by user', file=sys.stderr)
result = 1

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,14 +14,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
from __future__ import print_function
import itertools
import os
import platform
import re
import sys
import xml.dom.minidom
import urllib.parse
from pyversion import is_python3
if is_python3():
import urllib.parse
else:
import imp
import urlparse
urllib = imp.new_module('urllib')
urllib.parse = urlparse
import gitc_utils
from git_config import GitConfig, IsId
@ -28,15 +37,11 @@ import platform_utils
from project import RemoteSpec, Project, MetaProject
from error import (ManifestParseError, ManifestInvalidPathError,
ManifestInvalidRevisionError)
from wrapper import Wrapper
MANIFEST_FILE_NAME = 'manifest.xml'
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
LOCAL_MANIFESTS_DIR_NAME = 'local_manifests'
# ContactInfo has the self-registered bug url, supplied by the manifest authors.
ContactInfo = collections.namedtuple('ContactInfo', 'bugurl')
# urljoin gets confused if the scheme is not known.
urllib.parse.uses_relative.extend([
'ssh',
@ -182,24 +187,12 @@ class _XmlRemote(object):
class XmlManifest(object):
"""manages the repo configuration file"""
def __init__(self, repodir, manifest_file, local_manifests=None):
"""Initialize.
Args:
repodir: Path to the .repo/ dir for holding all internal checkout state.
It must be in the top directory of the repo client checkout.
manifest_file: Full path to the manifest file to parse. This will usually
be |repodir|/|MANIFEST_FILE_NAME|.
local_manifests: Full path to the directory of local override manifests.
This will usually be |repodir|/|LOCAL_MANIFESTS_DIR_NAME|.
"""
# TODO(vapier): Move this out of this class.
self.globalConfig = GitConfig.ForUser()
def __init__(self, repodir):
self.repodir = os.path.abspath(repodir)
self.topdir = os.path.dirname(self.repodir)
self.manifestFile = manifest_file
self.local_manifests = local_manifests
self.manifestFile = os.path.join(self.repodir, MANIFEST_FILE_NAME)
self.globalConfig = GitConfig.ForUser()
self.isGitcClient = False
self._load_local_manifests = True
self.repoProject = MetaProject(self, 'repo',
@ -287,21 +280,18 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
if r.revision is not None:
e.setAttribute('revision', r.revision)
def _ParseList(self, field):
"""Parse fields that contain flattened lists.
def _ParseGroups(self, groups):
return [x for x in re.split(r'[,\s]+', groups) if x]
These are whitespace & comma separated. Empty elements will be discarded.
def Save(self, fd, peg_rev=False, peg_rev_upstream=True, peg_rev_dest_branch=True, groups=None):
"""Write the current manifest out to the given file descriptor.
"""
return [x for x in re.split(r'[,\s]+', field) if x]
def ToXml(self, peg_rev=False, peg_rev_upstream=True, peg_rev_dest_branch=True, groups=None):
"""Return the current manifest XML."""
mp = self.manifestProject
if groups is None:
groups = mp.config.GetString('manifest.groups')
if groups:
groups = self._ParseList(groups)
groups = self._ParseGroups(groups)
doc = xml.dom.minidom.Document()
root = doc.createElement('manifest')
@ -409,8 +399,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
revision = self.remotes[p.remote.orig_name].revision or d.revisionExpr
if not revision or revision != p.revisionExpr:
e.setAttribute('revision', p.revisionExpr)
elif p.revisionId:
e.setAttribute('revision', p.revisionId)
if (p.upstream and (p.upstream != p.revisionExpr or
p.upstream != d.upstreamExpr)):
e.setAttribute('upstream', p.upstream)
@ -471,81 +459,11 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
' '.join(self._repo_hooks_project.enabled_repo_hooks))
root.appendChild(e)
if self._superproject:
root.appendChild(doc.createTextNode(''))
e = doc.createElement('superproject')
e.setAttribute('name', self._superproject['name'])
remoteName = None
if d.remote:
remoteName = d.remote.name
remote = self._superproject.get('remote')
if not d.remote or remote.orig_name != remoteName:
remoteName = remote.orig_name
e.setAttribute('remote', remoteName)
root.appendChild(e)
if self._contactinfo.bugurl != Wrapper().BUG_URL:
root.appendChild(doc.createTextNode(''))
e = doc.createElement('contactinfo')
e.setAttribute('bugurl', self._contactinfo.bugurl)
root.appendChild(e)
return doc
def ToDict(self, **kwargs):
"""Return the current manifest as a dictionary."""
# Elements that may only appear once.
SINGLE_ELEMENTS = {
'notice',
'default',
'manifest-server',
'repo-hooks',
'superproject',
'contactinfo',
}
# Elements that may be repeated.
MULTI_ELEMENTS = {
'remote',
'remove-project',
'project',
'extend-project',
'include',
# These are children of 'project' nodes.
'annotation',
'project',
'copyfile',
'linkfile',
}
doc = self.ToXml(**kwargs)
ret = {}
def append_children(ret, node):
for child in node.childNodes:
if child.nodeType == xml.dom.Node.ELEMENT_NODE:
attrs = child.attributes
element = dict((attrs.item(i).localName, attrs.item(i).value)
for i in range(attrs.length))
if child.nodeName in SINGLE_ELEMENTS:
ret[child.nodeName] = element
elif child.nodeName in MULTI_ELEMENTS:
ret.setdefault(child.nodeName, []).append(element)
else:
raise ManifestParseError('Unhandled element "%s"' % (child.nodeName,))
append_children(element, child)
append_children(ret, doc.firstChild)
return ret
def Save(self, fd, **kwargs):
"""Write the current manifest out to the given file descriptor."""
doc = self.ToXml(**kwargs)
doc.writexml(fd, '', ' ', '\n', 'UTF-8')
def _output_manifest_project_extras(self, p, e):
"""Manifests can modify e if they support extra project attributes."""
pass
@property
def paths(self):
@ -572,16 +490,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
self._Load()
return self._repo_hooks_project
@property
def superproject(self):
self._Load()
return self._superproject
@property
def contactinfo(self):
self._Load()
return self._contactinfo
@property
def notice(self):
self._Load()
@ -606,16 +514,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
return self.manifestProject.config.GetString('repo.clonefilter')
return None
@property
def PartialCloneExclude(self):
exclude = self.manifest.manifestProject.config.GetString(
'repo.partialcloneexclude') or ''
return set(x.strip() for x in exclude.split(','))
@property
def HasLocalManifests(self):
return self._load_local_manifests and self.local_manifests
@property
def IsMirror(self):
return self.manifestProject.config.GetBoolean('repo.mirror')
@ -632,17 +530,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
def HasSubmodules(self):
return self.manifestProject.config.GetBoolean('repo.submodules')
def GetDefaultGroupsStr(self):
"""Returns the default group string for the platform."""
return 'default,platform-' + platform.system().lower()
def GetGroupsStr(self):
"""Returns the manifest group string that should be synced."""
groups = self.manifestProject.config.GetString('manifest.groups')
if not groups:
groups = self.GetDefaultGroupsStr()
return groups
def _Unload(self):
self._loaded = False
self._projects = {}
@ -650,8 +537,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
self._remotes = {}
self._default = None
self._repo_hooks_project = None
self._superproject = {}
self._contactinfo = ContactInfo(Wrapper().BUG_URL)
self._notice = None
self.branch = None
self._manifest_server = None
@ -664,22 +549,25 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
b = b[len(R_HEADS):]
self.branch = b
# The manifestFile was specified by the user which is why we allow include
# paths to point anywhere.
nodes = []
nodes.append(self._ParseManifestXml(
self.manifestFile, self.manifestProject.worktree,
restrict_includes=False))
nodes.append(self._ParseManifestXml(self.manifestFile,
self.manifestProject.worktree))
if self._load_local_manifests and self.local_manifests:
if self._load_local_manifests:
if os.path.exists(os.path.join(self.repodir, LOCAL_MANIFEST_NAME)):
print('error: %s is not supported; put local manifests in `%s`'
'instead' % (LOCAL_MANIFEST_NAME,
os.path.join(self.repodir, LOCAL_MANIFESTS_DIR_NAME)),
file=sys.stderr)
sys.exit(1)
local_dir = os.path.abspath(os.path.join(self.repodir,
LOCAL_MANIFESTS_DIR_NAME))
try:
for local_file in sorted(platform_utils.listdir(self.local_manifests)):
for local_file in sorted(platform_utils.listdir(local_dir)):
if local_file.endswith('.xml'):
local = os.path.join(self.local_manifests, local_file)
# Since local manifests are entirely managed by the user, allow
# them to point anywhere the user wants.
nodes.append(self._ParseManifestXml(
local, self.repodir, restrict_includes=False))
local = os.path.join(local_dir, local_file)
nodes.append(self._ParseManifestXml(local, self.repodir))
except OSError:
pass
@ -697,19 +585,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
self._loaded = True
def _ParseManifestXml(self, path, include_root, parent_groups='',
restrict_includes=True):
"""Parse a manifest XML and return the computed nodes.
Args:
path: The XML file to read & parse.
include_root: The path to interpret include "name"s relative to.
parent_groups: The groups to apply to this projects.
restrict_includes: Whether to constrain the "name" attribute of includes.
Returns:
List of XML nodes.
"""
def _ParseManifestXml(self, path, include_root):
try:
root = xml.dom.minidom.parse(path)
except (OSError, xml.parsers.expat.ExpatError) as e:
@ -728,35 +604,20 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
for node in manifest.childNodes:
if node.nodeName == 'include':
name = self._reqatt(node, 'name')
if restrict_includes:
msg = self._CheckLocalPath(name)
if msg:
raise ManifestInvalidPathError(
'<include> invalid "name": %s: %s' % (name, msg))
include_groups = ''
if parent_groups:
include_groups = parent_groups
if node.hasAttribute('groups'):
include_groups = node.getAttribute('groups') + ',' + include_groups
fp = os.path.join(include_root, name)
if not os.path.isfile(fp):
raise ManifestParseError("include [%s/]%s doesn't exist or isn't a file"
% (include_root, name))
raise ManifestParseError("include %s doesn't exist or isn't a file"
% (name,))
try:
nodes.extend(self._ParseManifestXml(fp, include_root, include_groups))
nodes.extend(self._ParseManifestXml(fp, include_root))
# should isolate this to the exact exception, but that's
# tricky. actual parsing implementation may vary.
except (KeyboardInterrupt, RuntimeError, SystemExit, ManifestParseError):
except (KeyboardInterrupt, RuntimeError, SystemExit):
raise
except Exception as e:
raise ManifestParseError(
"failed parsing included manifest %s: %s" % (name, e))
else:
if parent_groups and node.nodeName == 'project':
nodeGroups = parent_groups
if node.hasAttribute('groups'):
nodeGroups = node.getAttribute('groups') + ',' + nodeGroups
node.setAttribute('groups', nodeGroups)
nodes.append(node)
return nodes
@ -831,7 +692,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
path = node.getAttribute('path')
groups = node.getAttribute('groups')
if groups:
groups = self._ParseList(groups)
groups = self._ParseGroups(groups)
revision = node.getAttribute('revision')
remote = node.getAttribute('remote')
if remote:
@ -853,7 +714,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
if node.nodeName == 'repo-hooks':
# Get the name of the project and the (space-separated) list of enabled.
repo_hooks_project = self._reqatt(node, 'in-project')
enabled_repo_hooks = self._ParseList(self._reqatt(node, 'enabled-list'))
enabled_repo_hooks = self._reqatt(node, 'enabled-list').split()
# Only one project can be the hooks project
if self._repo_hooks_project is not None:
@ -877,28 +738,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
# Store the enabled hooks in the Project object.
self._repo_hooks_project.enabled_repo_hooks = enabled_repo_hooks
if node.nodeName == 'superproject':
name = self._reqatt(node, 'name')
# There can only be one superproject.
if self._superproject.get('name'):
raise ManifestParseError(
'duplicate superproject in %s' %
(self.manifestFile))
self._superproject['name'] = name
remote_name = node.getAttribute('remote')
if not remote_name:
remote = self._default.remote
else:
remote = self._get_remote(node)
if remote is None:
raise ManifestParseError("no remote for superproject %s within %s" %
(name, self.manifestFile))
self._superproject['remote'] = remote.ToRemoteSpec(name)
if node.nodeName == 'contactinfo':
bugurl = self._reqatt(node, 'bugurl')
# This element can be repeated, later entries will clobber earlier ones.
self._contactinfo = ContactInfo(bugurl)
if node.nodeName == 'remove-project':
name = self._reqatt(node, 'name')
@ -1047,10 +886,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
reads a <project> element from the manifest file
"""
name = self._reqatt(node, 'name')
msg = self._CheckLocalPath(name, dir_ok=True)
if msg:
raise ManifestInvalidPathError(
'<project> invalid "name": %s: %s' % (name, msg))
if parent:
name = self._JoinName(parent.name, name)
@ -1071,12 +906,9 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
path = node.getAttribute('path')
if not path:
path = name
else:
# NB: The "." project is handled specially in Project.Sync_LocalHalf.
msg = self._CheckLocalPath(path, dir_ok=True, cwd_dot_ok=True)
if msg:
raise ManifestInvalidPathError(
'<project> invalid "path": %s: %s' % (path, msg))
if path.startswith('/'):
raise ManifestParseError("project %s path cannot be absolute in %s" %
(name, self.manifestFile))
rebase = XmlBool(node, 'rebase', True)
sync_c = XmlBool(node, 'sync-c', False)
@ -1095,7 +927,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
groups = ''
if node.hasAttribute('groups'):
groups = node.getAttribute('groups')
groups = self._ParseList(groups)
groups = self._ParseGroups(groups)
if parent is None:
relpath, worktree, gitdir, objdir, use_git_worktrees = \
@ -1196,38 +1028,11 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
return relpath, worktree, gitdir, objdir
@staticmethod
def _CheckLocalPath(path, dir_ok=False, cwd_dot_ok=False):
"""Verify |path| is reasonable for use in filesystem paths.
Used with <copyfile> & <linkfile> & <project> elements.
This only validates the |path| in isolation: it does not check against the
current filesystem state. Thus it is suitable as a first-past in a parser.
It enforces a number of constraints:
* No empty paths.
* No "~" in paths.
* No Unicode codepoints that filesystems might elide when normalizing.
* No relative path components like "." or "..".
* No absolute paths.
* No ".git" or ".repo*" path components.
Args:
path: The path name to validate.
dir_ok: Whether |path| may force a directory (e.g. end in a /).
cwd_dot_ok: Whether |path| may be just ".".
Returns:
None if |path| is OK, a failure message otherwise.
"""
if not path:
return 'empty paths not allowed'
def _CheckLocalPath(path, symlink=False):
"""Verify |path| is reasonable for use in <copyfile> & <linkfile>."""
if '~' in path:
return '~ not allowed (due to 8.3 filenames on Windows filesystems)'
path_codepoints = set(path)
# Some filesystems (like Apple's HFS+) try to normalize Unicode codepoints
# which means there are alternative names for ".git". Reject paths with
# these in it as there shouldn't be any reasonable need for them here.
@ -1251,17 +1056,10 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
u'\u206F', # NOMINAL DIGIT SHAPES
u'\uFEFF', # ZERO WIDTH NO-BREAK SPACE
}
if BAD_CODEPOINTS & path_codepoints:
if BAD_CODEPOINTS & set(path):
# This message is more expansive than reality, but should be fine.
return 'Unicode combining characters not allowed'
# Reject newlines as there shouldn't be any legitmate use for them, they'll
# be confusing to users, and they can easily break tools that expect to be
# able to iterate over newline delimited lists. This even applies to our
# own code like .repo/project.list.
if {'\r', '\n'} & path_codepoints:
return 'Newlines not allowed'
# Assume paths might be used on case-insensitive filesystems.
path = path.lower()
@ -1270,18 +1068,16 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
# our constructed logic here. Especially since manifest authors only use
# / in their paths.
resep = re.compile(r'[/%s]' % re.escape(os.path.sep))
# Strip off trailing slashes as those only produce '' elements, and we use
# parts to look for individual bad components.
parts = resep.split(path.rstrip('/'))
parts = resep.split(path)
# Some people use src="." to create stable links to projects. Lets allow
# that but reject all other uses of "." to keep things simple.
if not cwd_dot_ok or parts != ['.']:
if parts != ['.']:
for part in set(parts):
if part in {'.', '..', '.git'} or part.startswith('.repo'):
return 'bad component: %s' % (part,)
if not dir_ok and resep.match(path[-1]):
if not symlink and resep.match(path[-1]):
return 'dirs not allowed'
# NB: The two abspath checks here are to handle platforms with multiple
@ -1313,8 +1109,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
# |src| is the file we read from or path we point to for symlinks.
# It is relative to the top of the git project checkout.
is_linkfile = element == 'linkfile'
msg = cls._CheckLocalPath(src, dir_ok=is_linkfile, cwd_dot_ok=is_linkfile)
msg = cls._CheckLocalPath(src, symlink=element == 'linkfile')
if msg:
raise ManifestInvalidPathError(
'<%s> invalid "src": %s: %s' % (element, src, msg))
@ -1409,48 +1204,22 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
class GitcManifest(XmlManifest):
"""Parser for GitC (git-in-the-cloud) manifests."""
def __init__(self, repodir, gitc_client_name):
"""Initialize the GitcManifest object."""
super(GitcManifest, self).__init__(repodir)
self.isGitcClient = True
self.gitc_client_name = gitc_client_name
self.gitc_client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(),
gitc_client_name)
self.manifestFile = os.path.join(self.gitc_client_dir, '.manifest')
def _ParseProject(self, node, parent=None):
"""Override _ParseProject and add support for GITC specific attributes."""
return super()._ParseProject(
return super(GitcManifest, self)._ParseProject(
node, parent=parent, old_revision=node.getAttribute('old-revision'))
def _output_manifest_project_extras(self, p, e):
"""Output GITC Specific Project attributes"""
if p.old_revision:
e.setAttribute('old-revision', str(p.old_revision))
class RepoClient(XmlManifest):
"""Manages a repo client checkout."""
def __init__(self, repodir, manifest_file=None):
self.isGitcClient = False
if os.path.exists(os.path.join(repodir, LOCAL_MANIFEST_NAME)):
print('error: %s is not supported; put local manifests in `%s` instead' %
(LOCAL_MANIFEST_NAME, os.path.join(repodir, LOCAL_MANIFESTS_DIR_NAME)),
file=sys.stderr)
sys.exit(1)
if manifest_file is None:
manifest_file = os.path.join(repodir, MANIFEST_FILE_NAME)
local_manifests = os.path.abspath(os.path.join(repodir, LOCAL_MANIFESTS_DIR_NAME))
super().__init__(repodir, manifest_file, local_manifests)
# TODO: Completely separate manifest logic out of the client.
self.manifest = self
class GitcClient(RepoClient, GitcManifest):
"""Manages a GitC client checkout."""
def __init__(self, repodir, gitc_client_name):
"""Initialize the GitcManifest object."""
self.gitc_client_name = gitc_client_name
self.gitc_client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(),
gitc_client_name)
super().__init__(repodir, os.path.join(self.gitc_client_dir, '.manifest'))
self.isGitcClient = True

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import select
import subprocess

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -15,9 +17,18 @@
import errno
import os
import platform
import select
import shutil
import stat
from pyversion import is_python3
if is_python3():
from queue import Queue
else:
from Queue import Queue
from threading import Thread
def isWindows():
""" Returns True when running with the native port of Python for Windows,
@ -28,6 +39,161 @@ def isWindows():
return platform.system() == "Windows"
class FileDescriptorStreams(object):
""" Platform agnostic abstraction enabling non-blocking I/O over a
collection of file descriptors. This abstraction is required because
fctnl(os.O_NONBLOCK) is not supported on Windows.
"""
@classmethod
def create(cls):
""" Factory method: instantiates the concrete class according to the
current platform.
"""
if isWindows():
return _FileDescriptorStreamsThreads()
else:
return _FileDescriptorStreamsNonBlocking()
def __init__(self):
self.streams = []
def add(self, fd, dest, std_name):
""" Wraps an existing file descriptor as a stream.
"""
self.streams.append(self._create_stream(fd, dest, std_name))
def remove(self, stream):
""" Removes a stream, when done with it.
"""
self.streams.remove(stream)
@property
def is_done(self):
""" Returns True when all streams have been processed.
"""
return len(self.streams) == 0
def select(self):
""" Returns the set of streams that have data available to read.
The returned streams each expose a read() and a close() method.
When done with a stream, call the remove(stream) method.
"""
raise NotImplementedError
def _create_stream(self, fd, dest, std_name):
""" Creates a new stream wrapping an existing file descriptor.
"""
raise NotImplementedError
class _FileDescriptorStreamsNonBlocking(FileDescriptorStreams):
""" Implementation of FileDescriptorStreams for platforms that support
non blocking I/O.
"""
def __init__(self):
super(_FileDescriptorStreamsNonBlocking, self).__init__()
self._poll = select.poll()
self._fd_to_stream = {}
class Stream(object):
""" Encapsulates a file descriptor """
def __init__(self, fd, dest, std_name):
self.fd = fd
self.dest = dest
self.std_name = std_name
self.set_non_blocking()
def set_non_blocking(self):
import fcntl
flags = fcntl.fcntl(self.fd, fcntl.F_GETFL)
fcntl.fcntl(self.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
def fileno(self):
return self.fd.fileno()
def read(self):
return self.fd.read(4096)
def close(self):
self.fd.close()
def _create_stream(self, fd, dest, std_name):
stream = self.Stream(fd, dest, std_name)
self._fd_to_stream[stream.fileno()] = stream
self._poll.register(stream, select.POLLIN)
return stream
def remove(self, stream):
self._poll.unregister(stream)
del self._fd_to_stream[stream.fileno()]
super(_FileDescriptorStreamsNonBlocking, self).remove(stream)
def select(self):
return [self._fd_to_stream[fd] for fd, _ in self._poll.poll()]
class _FileDescriptorStreamsThreads(FileDescriptorStreams):
""" Implementation of FileDescriptorStreams for platforms that don't support
non blocking I/O. This implementation requires creating threads issuing
blocking read operations on file descriptors.
"""
def __init__(self):
super(_FileDescriptorStreamsThreads, self).__init__()
# The queue is shared accross all threads so we can simulate the
# behavior of the select() function
self.queue = Queue(10) # Limit incoming data from streams
def _create_stream(self, fd, dest, std_name):
return self.Stream(fd, dest, std_name, self.queue)
def select(self):
# Return only one stream at a time, as it is the most straighforward
# thing to do and it is compatible with the select() function.
item = self.queue.get()
stream = item.stream
stream.data = item.data
return [stream]
class QueueItem(object):
""" Item put in the shared queue """
def __init__(self, stream, data):
self.stream = stream
self.data = data
class Stream(object):
""" Encapsulates a file descriptor """
def __init__(self, fd, dest, std_name, queue):
self.fd = fd
self.dest = dest
self.std_name = std_name
self.queue = queue
self.data = None
self.thread = Thread(target=self.read_to_queue)
self.thread.daemon = True
self.thread.start()
def close(self):
self.fd.close()
def read(self):
data = self.data
self.data = None
return data
def read_to_queue(self):
""" The thread function: reads everything from the file descriptor into
the shared queue and terminates when reaching EOF.
"""
for line in iter(self.fd.readline, b''):
self.queue.put(_FileDescriptorStreamsThreads.QueueItem(self, line))
self.fd.close()
self.queue.put(_FileDescriptorStreamsThreads.QueueItem(self, b''))
def symlink(source, link_name):
"""Creates a symbolic link pointing to source named link_name.
Note: On Windows, source must exist on disk, as the implementation needs

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -14,10 +16,18 @@
import errno
from pyversion import is_python3
from ctypes import WinDLL, get_last_error, FormatError, WinError, addressof
from ctypes import c_buffer, c_ubyte, Structure, Union, byref
from ctypes import c_buffer
from ctypes.wintypes import BOOL, BOOLEAN, LPCWSTR, DWORD, HANDLE
from ctypes.wintypes import WCHAR, USHORT, LPVOID, ULONG, LPDWORD
from ctypes.wintypes import WCHAR, USHORT, LPVOID, ULONG
if is_python3():
from ctypes import c_ubyte, Structure, Union, byref
from ctypes.wintypes import LPDWORD
else:
# For legacy Python2 different imports are needed.
from ctypes.wintypes import POINTER, c_ubyte, Structure, Union, byref
LPDWORD = POINTER(DWORD)
kernel32 = WinDLL('kernel32', use_last_error=True)
@ -194,15 +204,26 @@ def readlink(path):
'Error reading symbolic link \"%s\"'.format(path))
rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer)
if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK:
return rdb.SymbolicLinkReparseBuffer.PrintName
return _preserve_encoding(path, rdb.SymbolicLinkReparseBuffer.PrintName)
elif rdb.ReparseTag == IO_REPARSE_TAG_MOUNT_POINT:
return rdb.MountPointReparseBuffer.PrintName
return _preserve_encoding(path, rdb.MountPointReparseBuffer.PrintName)
# Unsupported reparse point type
_raise_winerror(
ERROR_NOT_SUPPORTED,
'Error reading symbolic link \"%s\"'.format(path))
def _preserve_encoding(source, target):
"""Ensures target is the same string type (i.e. unicode or str) as source."""
if is_python3():
return target
if isinstance(source, unicode): # noqa: F821
return unicode(target) # noqa: F821
return str(target)
def _raise_winerror(code, error_desc):
win_error_desc = FormatError(code).strip()
error_desc = "%s: %s".format(error_desc, win_error_desc)

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -25,52 +27,18 @@ _NOT_TTY = not os.isatty(2)
CSI_ERASE_LINE = '\x1b[2K'
def duration_str(total):
"""A less noisy timedelta.__str__.
The default timedelta stringification contains a lot of leading zeros and
uses microsecond resolution. This makes for noisy output.
"""
hours, rem = divmod(total, 3600)
mins, secs = divmod(rem, 60)
ret = '%.3fs' % (secs,)
if mins:
ret = '%im%s' % (mins, ret)
if hours:
ret = '%ih%s' % (hours, ret)
return ret
class Progress(object):
def __init__(self, title, total=0, units='', print_newline=False, delay=True,
quiet=False):
def __init__(self, title, total=0, units='', print_newline=False,
always_print_percentage=False):
self._title = title
self._total = total
self._done = 0
self._lastp = -1
self._start = time()
self._show = not delay
self._show = False
self._units = units
self._print_newline = print_newline
# Only show the active jobs section if we run more than one in parallel.
self._show_jobs = False
self._active = 0
# When quiet, never show any output. It's a bit hacky, but reusing the
# existing logic that delays initial output keeps the rest of the class
# clean. Basically we set the start time to years in the future.
if quiet:
self._show = False
self._start += 2**32
def start(self, name):
self._active += 1
if not self._show_jobs:
self._show_jobs = self._active > 1
self.update(inc=0, msg='started ' + name)
def finish(self, name):
self.update(msg='finished ' + name)
self._active -= 1
self._always_print_percentage = always_print_percentage
def update(self, inc=1, msg=''):
self._done += inc
@ -92,40 +60,35 @@ class Progress(object):
sys.stderr.flush()
else:
p = (100 * self._done) / self._total
if self._show_jobs:
jobs = '[%d job%s] ' % (self._active, 's' if self._active > 1 else '')
else:
jobs = ''
sys.stderr.write('%s\r%s: %2d%% %s(%d%s/%d%s)%s%s%s' % (
CSI_ERASE_LINE,
self._title,
p,
jobs,
self._done, self._units,
self._total, self._units,
' ' if msg else '', msg,
'\n' if self._print_newline else ''))
sys.stderr.flush()
if self._lastp != p or self._always_print_percentage:
self._lastp = p
sys.stderr.write('%s\r%s: %3d%% (%d%s/%d%s)%s%s%s' % (
CSI_ERASE_LINE,
self._title,
p,
self._done, self._units,
self._total, self._units,
' ' if msg else '', msg,
"\n" if self._print_newline else ""))
sys.stderr.flush()
def end(self):
if _NOT_TTY or IsTrace() or not self._show:
return
duration = duration_str(time() - self._start)
if self._total <= 0:
sys.stderr.write('%s\r%s: %d, done in %s\n' % (
sys.stderr.write('%s\r%s: %d, done.\n' % (
CSI_ERASE_LINE,
self._title,
self._done,
duration))
self._done))
sys.stderr.flush()
else:
p = (100 * self._done) / self._total
sys.stderr.write('%s\r%s: %3d%% (%d%s/%d%s), done in %s\n' % (
sys.stderr.write('%s\r%s: %3d%% (%d%s/%d%s), done.\n' % (
CSI_ERASE_LINE,
self._title,
p,
self._done, self._units,
self._total, self._units,
duration))
self._total, self._units))
sys.stderr.flush()

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import errno
import filecmp
import glob
@ -25,7 +28,6 @@ import sys
import tarfile
import tempfile
import time
import urllib.parse
from color import Coloring
from git_command import GitCommand, git_require
@ -40,6 +42,16 @@ from repo_trace import IsTrace, Trace
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M, R_WORKTREE_M
from pyversion import is_python3
if is_python3():
import urllib.parse
else:
import imp
import urlparse
urllib = imp.new_module('urllib')
urllib.parse = urlparse
input = raw_input # noqa: F821
# Maximum sleep time allowed during retries.
MAXIMUM_RETRY_SLEEP_SEC = 3600.0
@ -50,8 +62,7 @@ RETRY_JITTER_PERCENT = 0.1
def _lwrite(path, content):
lock = '%s.lock' % path
# Maintain Unix line endings on all OS's to match git behavior.
with open(lock, 'w', newline='\n') as fd:
with open(lock, 'w') as fd:
fd.write(content)
try:
@ -232,7 +243,7 @@ class ReviewableBranch(object):
class StatusColoring(Coloring):
def __init__(self, config):
super().__init__(config, 'status')
Coloring.__init__(self, config, 'status')
self.project = self.printer('header', attr='bold')
self.branch = self.printer('header', attr='bold')
self.nobranch = self.printer('nobranch', fg='red')
@ -246,7 +257,7 @@ class StatusColoring(Coloring):
class DiffColoring(Coloring):
def __init__(self, config):
super().__init__(config, 'diff')
Coloring.__init__(self, config, 'diff')
self.project = self.printer('header', attr='bold')
self.fail = self.printer('fail', fg='red')
@ -438,7 +449,6 @@ class RemoteSpec(object):
self.orig_name = orig_name
self.fetchUrl = fetchUrl
class Project(object):
# These objects can be shared between several working trees.
shareable_files = ['description', 'info']
@ -500,7 +510,7 @@ class Project(object):
with exponential backoff and jitter.
old_revision: saved git commit id for open GITC projects.
"""
self.client = self.manifest = manifest
self.manifest = manifest
self.name = name
self.remote = remote
self.gitdir = gitdir.replace('\\', '/')
@ -541,7 +551,7 @@ class Project(object):
self.linkfiles = []
self.annotations = []
self.config = GitConfig.ForRepository(gitdir=self.gitdir,
defaults=self.client.globalConfig)
defaults=self.manifest.globalConfig)
if self.worktree:
self.work_git = self._GitGetByExec(self, bare=False, gitdir=gitdir)
@ -832,12 +842,10 @@ class Project(object):
return 'DIRTY'
def PrintWorkTreeDiff(self, absolute_paths=False, output_redir=None):
def PrintWorkTreeDiff(self, absolute_paths=False):
"""Prints the status of the repository to stdout.
"""
out = DiffColoring(self.config)
if output_redir:
out.redirect(output_redir)
cmd = ['diff']
if out.is_on:
cmd.append('--color')
@ -851,7 +859,6 @@ class Project(object):
cmd,
capture_stdout=True,
capture_stderr=True)
p.Wait()
except GitError as e:
out.nl()
out.project('project %s/' % self.relpath)
@ -859,11 +866,16 @@ class Project(object):
out.fail('%s', str(e))
out.nl()
return False
if p.stdout:
out.nl()
out.project('project %s/' % self.relpath)
out.nl()
out.write('%s', p.stdout)
has_diff = False
for line in p.process.stdout:
if not hasattr(line, 'encode'):
line = line.decode()
if not has_diff:
out.nl()
out.project('project %s/' % self.relpath)
out.nl()
has_diff = True
print(line[:-1])
return p.Wait() == 0
# Publish / Upload ##
@ -1014,11 +1026,10 @@ class Project(object):
if GitCommand(self, cmd, bare=True).Wait() != 0:
raise UploadError('Upload failed')
if not dryrun:
msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
self.bare_git.UpdateRef(R_PUB + branch.name,
R_HEADS + branch.name,
message=msg)
msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
self.bare_git.UpdateRef(R_PUB + branch.name,
R_HEADS + branch.name,
message=msg)
# Sync ##
def _ExtractArchive(self, tarpath, path=None):
@ -1039,20 +1050,17 @@ class Project(object):
def Sync_NetworkHalf(self,
quiet=False,
verbose=False,
output_redir=None,
is_new=None,
current_branch_only=None,
current_branch_only=False,
force_sync=False,
clone_bundle=True,
tags=None,
tags=True,
archive=False,
optimized_fetch=False,
retry_fetches=0,
prune=False,
submodules=False,
ssh_proxy=None,
clone_filter=None,
partial_clone_exclude=set()):
clone_filter=None):
"""Perform only the network IO portion of the sync process.
Local working directory/branch state is not affected.
"""
@ -1083,16 +1091,6 @@ class Project(object):
_warn("Cannot remove archive %s: %s", tarpath, str(e))
self._CopyAndLinkFiles()
return True
# If the shared object dir already exists, don't try to rebootstrap with a
# clone bundle download. We should have the majority of objects already.
if clone_bundle and os.path.exists(self.objdir):
clone_bundle = False
if self.name in partial_clone_exclude:
clone_bundle = True
clone_filter = None
if is_new is None:
is_new = not self.Exists
if is_new:
@ -1117,7 +1115,7 @@ class Project(object):
and self._ApplyCloneBundle(initial=is_new, quiet=quiet, verbose=verbose)):
is_new = False
if current_branch_only is None:
if not current_branch_only:
if self.sync_c:
current_branch_only = True
elif not self.manifest._loaded:
@ -1126,8 +1124,8 @@ class Project(object):
elif self.manifest.default.sync_c:
current_branch_only = True
if tags is None:
tags = self.sync_tags
if not self.sync_tags:
tags = False
if self.clone_depth:
depth = self.clone_depth
@ -1139,12 +1137,10 @@ class Project(object):
(ID_RE.match(self.revisionExpr) and
self._CheckForImmutableRevision())):
if not self._RemoteFetch(
initial=is_new,
quiet=quiet, verbose=verbose, output_redir=output_redir,
alt_dir=alt_dir, current_branch_only=current_branch_only,
initial=is_new, quiet=quiet, verbose=verbose, alt_dir=alt_dir,
current_branch_only=current_branch_only,
tags=tags, prune=prune, depth=depth,
submodules=submodules, force_sync=force_sync,
ssh_proxy=ssh_proxy,
clone_filter=clone_filter, retry_fetches=retry_fetches):
return False
@ -1154,11 +1150,7 @@ class Project(object):
alternates_file = os.path.join(self.gitdir, 'objects/info/alternates')
if os.path.exists(alternates_file):
cmd = ['repack', '-a', '-d']
p = GitCommand(self, cmd, bare=True, capture_stdout=bool(output_redir),
merge_output=bool(output_redir))
if p.stdout and output_redir:
output_redir.write(p.stdout)
if p.Wait() != 0:
if GitCommand(self, cmd, bare=True).Wait() != 0:
return False
platform_utils.remove(alternates_file)
@ -1176,7 +1168,7 @@ class Project(object):
self._InitHooks()
def _CopyAndLinkFiles(self):
if self.client.isGitcClient:
if self.manifest.isGitcClient:
return
for copyfile in self.copyfiles:
copyfile._Copy()
@ -1215,12 +1207,6 @@ class Project(object):
raise ManifestInvalidRevisionError('revision %s in %s not found' %
(self.revisionExpr, self.name))
def SetRevisionId(self, revisionId):
if self.clone_depth or self.manifest.manifestProject.config.GetString('repo.depth'):
self.upstream = self.revisionExpr
self.revisionId = revisionId
def Sync_LocalHalf(self, syncbuf, force_sync=False, submodules=False):
"""Perform only the local IO portion of the sync process.
Network access is not required.
@ -1237,18 +1223,6 @@ class Project(object):
self.CleanPublishedCache(all_refs)
revid = self.GetRevisionId(all_refs)
# Special case the root of the repo client checkout. Make sure it doesn't
# contain files being checked out to dirs we don't allow.
if self.relpath == '.':
PROTECTED_PATHS = {'.repo'}
paths = set(self.work_git.ls_tree('-z', '--name-only', '--', revid).split('\0'))
bad_paths = paths & PROTECTED_PATHS
if bad_paths:
syncbuf.fail(self,
'Refusing to checkout project that writes to protected '
'paths: %s' % (', '.join(bad_paths),))
return
def _doff():
self._FastForward(revid)
self._CopyAndLinkFiles()
@ -1720,11 +1694,6 @@ class Project(object):
if cb is None or name != cb:
kill.append(name)
# Minor optimization: If there's nothing to prune, then don't try to read
# any project state.
if not kill and not cb:
return []
rev = self.GetRevisionId(left)
if cb is not None \
and not self._revlist(HEAD + '...' + rev) \
@ -1965,8 +1934,7 @@ class Project(object):
try:
# if revision (sha or tag) is not present then following function
# throws an error.
self.bare_git.rev_list('-1', '--missing=allow-any',
'%s^0' % self.revisionExpr, '--')
self.bare_git.rev_parse('--verify', '%s^0' % self.revisionExpr)
return True
except GitError:
# There is no such persistent revision. We have to fetch it.
@ -1990,13 +1958,11 @@ class Project(object):
initial=False,
quiet=False,
verbose=False,
output_redir=None,
alt_dir=None,
tags=True,
prune=False,
depth=None,
submodules=False,
ssh_proxy=None,
force_sync=False,
clone_filter=None,
retry_fetches=2,
@ -2044,14 +2010,16 @@ class Project(object):
if not name:
name = self.remote.name
ssh_proxy = False
remote = self.GetRemote(name)
if not remote.PreConnectFetch(ssh_proxy):
ssh_proxy = None
if remote.PreConnectFetch():
ssh_proxy = True
if initial:
if alt_dir and 'objects' == os.path.basename(alt_dir):
ref_dir = os.path.dirname(alt_dir)
packed_refs = os.path.join(self.gitdir, 'packed-refs')
remote = self.GetRemote(name)
all_refs = self.bare_ref.all
ids = set(all_refs.values())
@ -2138,8 +2106,6 @@ class Project(object):
# Shallow checkout of a specific commit, fetch from that commit and not
# the heads only as the commit might be deeper in the history.
spec.append(branch)
if self.upstream:
spec.append(self.upstream)
else:
if is_sha1:
branch = self.upstream
@ -2169,27 +2135,29 @@ class Project(object):
ok = prune_tried = False
for try_n in range(retry_fetches):
gitcmd = GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy,
merge_output=True, capture_stdout=quiet or bool(output_redir))
if gitcmd.stdout and not quiet and output_redir:
output_redir.write(gitcmd.stdout)
merge_output=True, capture_stdout=quiet)
ret = gitcmd.Wait()
if ret == 0:
ok = True
break
# Retry later due to HTTP 429 Too Many Requests.
elif (gitcmd.stdout and
'error:' in gitcmd.stdout and
'HTTP 429' in gitcmd.stdout):
# Fallthru to sleep+retry logic at the bottom.
pass
elif ('error:' in gitcmd.stderr and
'HTTP 429' in gitcmd.stderr):
if not quiet:
print('429 received, sleeping: %s sec' % retry_cur_sleep,
file=sys.stderr)
time.sleep(retry_cur_sleep)
retry_cur_sleep = min(retry_exp_factor * retry_cur_sleep,
MAXIMUM_RETRY_SLEEP_SEC)
retry_cur_sleep *= (1 - random.uniform(-RETRY_JITTER_PERCENT,
RETRY_JITTER_PERCENT))
continue
# Try to prune remote branches once in case there are conflicts.
# For example, if the remote had refs/heads/upstream, but deleted that and
# now has refs/heads/upstream/foo.
elif (gitcmd.stdout and
'error:' in gitcmd.stdout and
'git remote prune' in gitcmd.stdout and
# If this is not last attempt, try 'git remote prune'.
elif (try_n < retry_fetches - 1 and
'error:' in gitcmd.stderr and
'git remote prune' in gitcmd.stderr and
not prune_tried):
prune_tried = True
prunecmd = GitCommand(self, ['remote', 'prune', name], bare=True,
@ -2197,8 +2165,6 @@ class Project(object):
ret = prunecmd.Wait()
if ret:
break
print('retrying fetch after pruning remote branches', file=output_redir)
# Continue right away so we don't sleep as we shouldn't need to.
continue
elif current_branch_only and is_sha1 and ret == 128:
# Exit code 128 means "couldn't find the ref you asked for"; if we're
@ -2208,18 +2174,9 @@ class Project(object):
elif ret < 0:
# Git died with a signal, exit immediately
break
# Figure out how long to sleep before the next attempt, if there is one.
if not verbose and gitcmd.stdout:
print('\n%s:\n%s' % (self.name, gitcmd.stdout), end='', file=output_redir)
if try_n < retry_fetches - 1:
print('%s: sleeping %s seconds before retrying' % (self.name, retry_cur_sleep),
file=output_redir)
time.sleep(retry_cur_sleep)
retry_cur_sleep = min(retry_exp_factor * retry_cur_sleep,
MAXIMUM_RETRY_SLEEP_SEC)
retry_cur_sleep *= (1 - random.uniform(-RETRY_JITTER_PERCENT,
RETRY_JITTER_PERCENT))
if not verbose:
print('%s:\n%s' % (self.name, gitcmd.stdout), file=sys.stderr)
time.sleep(random.randint(30, 45))
if initial:
if alt_dir:
@ -2237,10 +2194,10 @@ class Project(object):
# Sync the current branch only with depth set to None.
# We always pass depth=None down to avoid infinite recursion.
return self._RemoteFetch(
name=name, quiet=quiet, verbose=verbose, output_redir=output_redir,
name=name, quiet=quiet, verbose=verbose,
current_branch_only=current_branch_only and depth,
initial=False, alt_dir=alt_dir,
depth=None, ssh_proxy=ssh_proxy, clone_filter=clone_filter)
depth=None, clone_filter=clone_filter)
return ok
@ -2445,6 +2402,14 @@ class Project(object):
self.bare_objdir.init()
if self.use_git_worktrees:
# Set up the m/ space to point to the worktree-specific ref space.
# We'll update the worktree-specific ref space on each checkout.
if self.manifest.branch:
self.bare_git.symbolic_ref(
'-m', 'redirecting to worktree scope',
R_M + self.manifest.branch,
R_WORKTREE_M + self.manifest.branch)
# Enable per-worktree config file support if possible. This is more a
# nice-to-have feature for users rather than a hard requirement.
if git_require((2, 20, 0)):
@ -2512,7 +2477,10 @@ class Project(object):
self.config.SetString(key, m.GetString(key))
self.config.SetString('filter.lfs.smudge', 'git-lfs smudge --skip -- %f')
self.config.SetString('filter.lfs.process', 'git-lfs filter-process --skip')
self.config.SetBoolean('core.bare', True if self.manifest.IsMirror else None)
if self.manifest.IsMirror:
self.config.SetString('core.bare', 'true')
else:
self.config.SetString('core.bare', None)
except Exception:
if init_obj_dir and os.path.exists(self.objdir):
platform_utils.rmtree(self.objdir)
@ -2581,14 +2549,6 @@ class Project(object):
def _InitMRef(self):
if self.manifest.branch:
if self.use_git_worktrees:
# Set up the m/ space to point to the worktree-specific ref space.
# We'll update the worktree-specific ref space on each checkout.
ref = R_M + self.manifest.branch
if not self.bare_ref.symref(ref):
self.bare_git.symbolic_ref(
'-m', 'redirecting to worktree scope',
ref, R_WORKTREE_M + self.manifest.branch)
# We can't update this ref with git worktrees until it exists.
# We'll wait until the initial checkout to set it.
if not os.path.exists(self.worktree):
@ -2596,8 +2556,6 @@ class Project(object):
base = R_WORKTREE_M
active_git = self.work_git
self._InitAnyMRef(HEAD, self.bare_git, detach=True)
else:
base = R_M
active_git = self.bare_git
@ -2607,7 +2565,7 @@ class Project(object):
def _InitMirrorHead(self):
self._InitAnyMRef(HEAD, self.bare_git)
def _InitAnyMRef(self, ref, active_git, detach=False):
def _InitAnyMRef(self, ref, active_git):
cur = self.bare_ref.symref(ref)
if self.revisionId:
@ -2620,10 +2578,7 @@ class Project(object):
dst = remote.ToLocal(self.revisionExpr)
if cur != dst:
msg = 'manifest set to %s' % self.revisionExpr
if detach:
active_git.UpdateRef(ref, dst, message=msg, detach=True)
else:
active_git.symbolic_ref('-m', msg, ref, dst)
active_git.symbolic_ref('-m', msg, ref, dst)
def _CheckDirReference(self, srcdir, destdir, share_refs):
# Git worktrees don't use symlinks to share at all.
@ -2746,14 +2701,12 @@ class Project(object):
# Some platforms (e.g. Windows) won't let us update dotgit in situ because
# of file permissions. Delete it and recreate it from scratch to avoid.
platform_utils.remove(dotgit)
# Use relative path from checkout->worktree & maintain Unix line endings
# on all OS's to match git behavior.
with open(dotgit, 'w', newline='\n') as fp:
# Use relative path from checkout->worktree.
with open(dotgit, 'w') as fp:
print('gitdir:', os.path.relpath(git_worktree_path, self.worktree),
file=fp)
# Use relative path from worktree->checkout & maintain Unix line endings
# on all OS's to match git behavior.
with open(os.path.join(git_worktree_path, 'gitdir'), 'w', newline='\n') as fp:
# Use relative path from worktree->checkout.
with open(os.path.join(git_worktree_path, 'gitdir'), 'w') as fp:
print(os.path.relpath(dotgit, git_worktree_path), file=fp)
self._InitMRef()
@ -2909,44 +2862,48 @@ class Project(object):
bare=False,
capture_stdout=True,
capture_stderr=True)
p.Wait()
r = {}
out = p.stdout
if out:
out = iter(out[:-1].split('\0'))
while out:
try:
info = next(out)
path = next(out)
except StopIteration:
break
try:
out = p.process.stdout.read()
if not hasattr(out, 'encode'):
out = out.decode()
r = {}
if out:
out = iter(out[:-1].split('\0'))
while out:
try:
info = next(out)
path = next(out)
except StopIteration:
break
class _Info(object):
class _Info(object):
def __init__(self, path, omode, nmode, oid, nid, state):
self.path = path
self.src_path = None
self.old_mode = omode
self.new_mode = nmode
self.old_id = oid
self.new_id = nid
def __init__(self, path, omode, nmode, oid, nid, state):
self.path = path
self.src_path = None
self.old_mode = omode
self.new_mode = nmode
self.old_id = oid
self.new_id = nid
if len(state) == 1:
self.status = state
self.level = None
else:
self.status = state[:1]
self.level = state[1:]
while self.level.startswith('0'):
self.level = self.level[1:]
if len(state) == 1:
self.status = state
self.level = None
else:
self.status = state[:1]
self.level = state[1:]
while self.level.startswith('0'):
self.level = self.level[1:]
info = info[1:].split(' ')
info = _Info(path, *info)
if info.status in ('R', 'C'):
info.src_path = info.path
info.path = next(out)
r[info.path] = info
return r
info = info[1:].split(' ')
info = _Info(path, *info)
if info.status in ('R', 'C'):
info.src_path = info.path
info.path = next(out)
r[info.path] = info
return r
finally:
p.Wait()
def GetDotgitPath(self, subpath=None):
"""Return the full path to the .git dir.
@ -3143,7 +3100,7 @@ class _Later(object):
class _SyncColoring(Coloring):
def __init__(self, config):
super().__init__(config, 'reposync')
Coloring.__init__(self, config, 'reposync')
self.project = self.printer('header', attr='bold')
self.info = self.printer('info')
self.fail = self.printer('fail', fg='red')

21
pyversion.py Normal file
View File

@ -0,0 +1,21 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
def is_python3():
return sys.version_info[0] == 3

View File

@ -1,2 +1,5 @@
> **Warning: The "master" branch is no longer used. Use "main" instead.**<br>
> https://gerrit.googlesource.com/git-repo/+/HEAD/release/README.md
These are helper tools for managing official releases.
See the [release process](../docs/release-process.md) document for more details.

241
repo
View File

@ -32,13 +32,6 @@ import subprocess
import sys
# These should never be newer than the main.py version since this needs to be a
# bit more flexible with older systems. See that file for more details on the
# versions we select.
MIN_PYTHON_VERSION_SOFT = (3, 6)
MIN_PYTHON_VERSION_HARD = (3, 5)
# Keep basic logic in sync with repo_trace.py.
class Trace(object):
"""Trace helper logic."""
@ -77,6 +70,8 @@ def check_python_version():
def reexec(prog):
exec_command([prog] + sys.argv)
MIN_PYTHON_VERSION = (3, 6)
ver = sys.version_info
major = ver.major
minor = ver.minor
@ -85,26 +80,19 @@ def check_python_version():
if (major, minor) < (2, 7):
print('repo: error: Your Python version is too old. '
'Please use Python {}.{} or newer instead.'.format(
*MIN_PYTHON_VERSION_SOFT), file=sys.stderr)
*MIN_PYTHON_VERSION), file=sys.stderr)
sys.exit(1)
# Try to re-exec the version specific Python 3 if needed.
if (major, minor) < MIN_PYTHON_VERSION_SOFT:
if (major, minor) < MIN_PYTHON_VERSION:
# Python makes releases ~once a year, so try our min version +10 to help
# bridge the gap. This is the fallback anyways so perf isn't critical.
min_major, min_minor = MIN_PYTHON_VERSION_SOFT
min_major, min_minor = MIN_PYTHON_VERSION
for inc in range(0, 10):
reexec('python{}.{}'.format(min_major, min_minor + inc))
# Fallback to older versions if possible.
for inc in range(MIN_PYTHON_VERSION_SOFT[1] - MIN_PYTHON_VERSION_HARD[1], 0, -1):
# Don't downgrade, and don't reexec ourselves (which would infinite loop).
if (min_major, min_minor - inc) <= (major, minor):
break
reexec('python{}.{}'.format(min_major, min_minor - inc))
# Try the generic Python 3 wrapper, but only if it's new enough. If it
# isn't, we want to just give up below and make the user resolve things.
# Try the generic Python 3 wrapper, but only if it's new enough. We don't
# want to go from (still supported) Python 2.7 to (unsupported) Python 3.5.
try:
proc = subprocess.Popen(
['python3', '-c', 'import sys; '
@ -115,20 +103,18 @@ def check_python_version():
except (OSError, subprocess.CalledProcessError):
python3_ver = None
# If the python3 version looks like it's new enough, give it a try.
if (python3_ver and python3_ver >= MIN_PYTHON_VERSION_HARD
and python3_ver != (major, minor)):
# The python3 version looks like it's new enough, so give it a try.
if python3_ver and python3_ver >= MIN_PYTHON_VERSION:
reexec('python3')
# We're still here, so diagnose things for the user.
if major < 3:
print('repo: error: Python 2 is no longer supported; '
'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION_HARD),
print('repo: warning: Python 2 is no longer supported; '
'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION),
file=sys.stderr)
sys.exit(1)
elif (major, minor) < MIN_PYTHON_VERSION_HARD:
else:
print('repo: error: Python 3 version is too old; '
'Please use Python {}.{} or newer.'.format(*MIN_PYTHON_VERSION_HARD),
'Please use Python {}.{} or newer.'.format(*MIN_PYTHON_VERSION),
file=sys.stderr)
sys.exit(1)
@ -145,11 +131,9 @@ if not REPO_URL:
REPO_REV = os.environ.get('REPO_REV')
if not REPO_REV:
REPO_REV = 'stable'
# URL to file bug reports for repo tool issues.
BUG_URL = 'https://bugs.chromium.org/p/gerrit/issues/entry?template=Repo+tool+issue'
# increment this whenever we make important changes to this script
VERSION = (2, 15)
VERSION = (2, 8)
# increment this if the MAINTAINER_KEYS block is modified
KEYRING_VERSION = (2, 3)
@ -248,7 +232,6 @@ GITC_FS_ROOT_DIR = '/gitc/manifest-rw/'
import collections
import errno
import json
import optparse
import re
import shutil
@ -272,18 +255,11 @@ gpg_dir = os.path.join(home_dot_repo, 'gnupg')
def GetParser(gitc_init=False):
"""Setup the CLI parser."""
if gitc_init:
usage = 'repo gitc-init -c client [options] [-u] url'
usage = 'repo gitc-init -u url -c client [options]'
else:
usage = 'repo init [options] [-u] url'
usage = 'repo init -u url [options]'
parser = optparse.OptionParser(usage=usage)
InitParser(parser, gitc_init=gitc_init)
return parser
def InitParser(parser, gitc_init=False):
"""Setup the CLI parser."""
# NB: Keep in sync with command.py:_CommonOptions().
# Logging.
group = parser.add_option_group('Logging options')
@ -298,24 +274,10 @@ def InitParser(parser, gitc_init=False):
group = parser.add_option_group('Manifest options')
group.add_option('-u', '--manifest-url',
help='manifest repository location', metavar='URL')
group.add_option('-b', '--manifest-branch', metavar='REVISION',
help='manifest branch or revision (use HEAD for default)')
group.add_option('-m', '--manifest-name', default='default.xml',
group.add_option('-b', '--manifest-branch',
help='manifest branch or revision', metavar='REVISION')
group.add_option('-m', '--manifest-name',
help='initial manifest file', metavar='NAME.xml')
group.add_option('-g', '--groups', default='default',
help='restrict manifest projects to ones with specified '
'group(s) [default|all|G1,G2,G3|G4,-G5,-G6]',
metavar='GROUP')
group.add_option('-p', '--platform', default='auto',
help='restrict manifest projects to ones with a specified '
'platform group [auto|all|none|linux|darwin|...]',
metavar='PLATFORM')
group.add_option('--submodules', action='store_true',
help='sync any submodules associated with the manifest repo')
# Options that only affect manifest project, and not any of the projects
# specified in the manifest itself.
group = parser.add_option_group('Manifest (only) checkout options')
cbr_opts = ['--current-branch']
# The gitc-init subcommand allocates -c itself, but a lot of init users
# want -c, so try to satisfy both as best we can.
@ -324,29 +286,9 @@ def InitParser(parser, gitc_init=False):
group.add_option(*cbr_opts,
dest='current_branch_only', action='store_true',
help='fetch only current manifest branch from server')
group.add_option('--no-current-branch',
dest='current_branch_only', action='store_false',
help='fetch all manifest branches from server')
group.add_option('--tags',
action='store_true',
help='fetch tags in the manifest')
group.add_option('--no-tags',
dest='tags', action='store_false',
help="don't fetch tags in the manifest")
# These are fundamentally different ways of structuring the checkout.
group = parser.add_option_group('Checkout modes')
group.add_option('--mirror', action='store_true',
help='create a replica of the remote repositories '
'rather than a client working directory')
group.add_option('--archive', action='store_true',
help='checkout an archive instead of a git repository for '
'each project. See git archive.')
group.add_option('--worktree', action='store_true',
help='use git-worktree to manage projects')
# These are fundamentally different ways of structuring the checkout.
group = parser.add_option_group('Project checkout optimizations')
group.add_option('--reference',
help='location of mirror directory', metavar='DIR')
group.add_option('--dissociate', action='store_true',
@ -357,27 +299,32 @@ def InitParser(parser, gitc_init=False):
group.add_option('--partial-clone', action='store_true',
help='perform partial clone (https://git-scm.com/'
'docs/gitrepository-layout#_code_partialclone_code)')
group.add_option('--no-partial-clone', action='store_false',
help='disable use of partial clone (https://git-scm.com/'
'docs/gitrepository-layout#_code_partialclone_code)')
group.add_option('--partial-clone-exclude', action='store',
help='exclude the specified projects (a comma-delimited '
'project names) from partial clone (https://git-scm.com'
'/docs/gitrepository-layout#_code_partialclone_code)')
group.add_option('--clone-filter', action='store', default='blob:none',
help='filter for use with --partial-clone '
'[default: %default]')
group.add_option('--use-superproject', action='store_true', default=None,
help='use the manifest superproject to sync projects')
group.add_option('--no-use-superproject', action='store_false',
dest='use_superproject',
help='disable use of manifest superprojects')
group.add_option('--worktree', action='store_true',
help=optparse.SUPPRESS_HELP)
group.add_option('--archive', action='store_true',
help='checkout an archive instead of a git repository for '
'each project. See git archive.')
group.add_option('--submodules', action='store_true',
help='sync any submodules associated with the manifest repo')
group.add_option('-g', '--groups', default='default',
help='restrict manifest projects to ones with specified '
'group(s) [default|all|G1,G2,G3|G4,-G5,-G6]',
metavar='GROUP')
group.add_option('-p', '--platform', default='auto',
help='restrict manifest projects to ones with a specified '
'platform group [auto|all|none|linux|darwin|...]',
metavar='PLATFORM')
group.add_option('--clone-bundle', action='store_true',
help='enable use of /clone.bundle on HTTP/HTTPS '
'(default if not --partial-clone)')
help='enable use of /clone.bundle on HTTP/HTTPS (default if not --partial-clone)')
group.add_option('--no-clone-bundle',
dest='clone_bundle', action='store_false',
help='disable use of /clone.bundle on HTTP/HTTPS (default if --partial-clone)')
group.add_option('--no-tags',
dest='tags', default=True, action='store_false',
help="don't fetch tags in the manifest")
# Tool.
group = parser.add_option_group('repo Version options')
@ -492,11 +439,9 @@ def get_gitc_manifest_dir():
def gitc_parse_clientdir(gitc_fs_path):
"""Parse a path in the GITC FS and return its client name.
Args:
gitc_fs_path: A subdirectory path within the GITC_FS_ROOT_DIR.
@param gitc_fs_path: A subdirectory path within the GITC_FS_ROOT_DIR.
Returns:
The GITC client name.
@returns: The GITC client name
"""
if gitc_fs_path == GITC_FS_ROOT_DIR:
return None
@ -554,11 +499,8 @@ def _Init(args, gitc_init=False):
parser = GetParser(gitc_init=gitc_init)
opt, args = parser.parse_args(args)
if args:
if not opt.manifest_url:
opt.manifest_url = args.pop(0)
if args:
parser.print_usage()
sys.exit(1)
parser.print_usage()
sys.exit(1)
opt.quiet = opt.output_mode is False
opt.verbose = opt.output_mode is True
@ -859,10 +801,11 @@ def _DownloadBundle(url, cwd, quiet, verbose):
try:
r = urllib.request.urlopen(url)
except urllib.error.HTTPError as e:
if e.code not in [400, 401, 403, 404, 501]:
print('warning: Cannot get %s' % url, file=sys.stderr)
print('warning: HTTP error %s' % e.code, file=sys.stderr)
return False
if e.code in [401, 403, 404, 501]:
return False
print('fatal: Cannot get %s' % url, file=sys.stderr)
print('fatal: HTTP error %s' % e.code, file=sys.stderr)
raise CloneFailure()
except urllib.error.URLError as e:
print('fatal: Cannot get %s' % url, file=sys.stderr)
print('fatal: error %s' % e.reason, file=sys.stderr)
@ -1076,90 +1019,6 @@ def _ParseArguments(args):
return cmd, opt, arg
class Requirements(object):
"""Helper for checking repo's system requirements."""
REQUIREMENTS_NAME = 'requirements.json'
def __init__(self, requirements):
"""Initialize.
Args:
requirements: A dictionary of settings.
"""
self.requirements = requirements
@classmethod
def from_dir(cls, path):
return cls.from_file(os.path.join(path, cls.REQUIREMENTS_NAME))
@classmethod
def from_file(cls, path):
try:
with open(path, 'rb') as f:
data = f.read()
except EnvironmentError:
# NB: EnvironmentError is used for Python 2 & 3 compatibility.
# If we couldn't open the file, assume it's an old source tree.
return None
return cls.from_data(data)
@classmethod
def from_data(cls, data):
comment_line = re.compile(br'^ *#')
strip_data = b''.join(x for x in data.splitlines() if not comment_line.match(x))
try:
json_data = json.loads(strip_data)
except Exception: # pylint: disable=broad-except
# If we couldn't parse it, assume it's incompatible.
return None
return cls(json_data)
def _get_soft_ver(self, pkg):
"""Return the soft version for |pkg| if it exists."""
return self.requirements.get(pkg, {}).get('soft', ())
def _get_hard_ver(self, pkg):
"""Return the hard version for |pkg| if it exists."""
return self.requirements.get(pkg, {}).get('hard', ())
@staticmethod
def _format_ver(ver):
"""Return a dotted version from |ver|."""
return '.'.join(str(x) for x in ver)
def assert_ver(self, pkg, curr_ver):
"""Verify |pkg|'s |curr_ver| is new enough."""
curr_ver = tuple(curr_ver)
soft_ver = tuple(self._get_soft_ver(pkg))
hard_ver = tuple(self._get_hard_ver(pkg))
if curr_ver < hard_ver:
print('repo: error: Your version of "%s" (%s) is unsupported; '
'Please upgrade to at least version %s to continue.' %
(pkg, self._format_ver(curr_ver), self._format_ver(soft_ver)),
file=sys.stderr)
sys.exit(1)
if curr_ver < soft_ver:
print('repo: warning: Your version of "%s" (%s) is no longer supported; '
'Please upgrade to at least version %s to avoid breakage.' %
(pkg, self._format_ver(curr_ver), self._format_ver(soft_ver)),
file=sys.stderr)
def assert_all(self):
"""Assert all of the requirements are satisified."""
# See if we need a repo launcher upgrade first.
self.assert_ver('repo', VERSION)
# Check python before we try to import the repo code.
self.assert_ver('python', sys.version_info)
# Check git while we're at it.
self.assert_ver('git', ParseGitVersion())
def _Usage():
gitc_usage = ""
if get_gitc_manifest_dir():
@ -1178,7 +1037,6 @@ The most commonly used repo commands are:
For access to the full online help, install repo ("repo init").
""")
print('Bug reports:', BUG_URL)
sys.exit(0)
@ -1212,7 +1070,6 @@ def _Version():
print('OS %s %s (%s)' % (uname.system, uname.release, uname.version))
print('CPU %s (%s)' %
(uname.machine, uname.processor if uname.processor else 'unknown'))
print('Bug reports:', BUG_URL)
sys.exit(0)
@ -1319,10 +1176,6 @@ def main(orig_args):
print("fatal: unable to find repo entry point", file=sys.stderr)
sys.exit(1)
reqs = Requirements.from_dir(os.path.dirname(repo_main))
if reqs:
reqs.assert_all()
ver_str = '.'.join(map(str, VERSION))
me = [sys.executable, repo_main,
'--repo-dir=%s' % rel_repo_dir,

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -17,6 +19,7 @@
Activated via `repo --trace ...` or `REPO_TRACE=1 repo ...`.
"""
from __future__ import print_function
import sys
import os

View File

@ -1,57 +0,0 @@
# This file declares various requirements for this version of repo. The
# launcher script will load it and check the constraints before trying to run
# us. This avoids issues of the launcher using an old version of Python (e.g.
# 3.5) while the codebase has moved on to requiring something much newer (e.g.
# 3.8). If the launcher tried to import us, it would fail with syntax errors.
# This is a JSON file with line-level comments allowed.
# Always keep backwards compatibility in mine. The launcher script is robust
# against missing values, but when a field is renamed/removed, it means older
# versions of the launcher script won't be able to enforce the constraint.
# When requiring versions, always use lists as they are easy to parse & compare
# in Python. Strings would require futher processing to turn into a list.
# Version constraints should be expressed in pairs: soft & hard. Soft versions
# are when we start warning users that their software too old and we're planning
# on dropping support for it, so they need to start planning system upgrades.
# Hard versions are when we refuse to work the tool. Users will be shown an
# error message before we abort entirely.
# When deciding whether to upgrade a version requirement, check out the distro
# lists to see who will be impacted:
# https://gerrit.googlesource.com/git-repo/+/HEAD/docs/release-process.md#Project-References
{
# The repo launcher itself. This allows us to force people to upgrade as some
# ignore the warnings about it being out of date, or install ancient versions
# to start with for whatever reason.
#
# NB: Repo launchers started checking this file with repo-2.12, so listing
# versions older than that won't make a difference.
"repo": {
"hard": [2, 11],
"soft": [2, 11]
},
# Supported Python versions.
#
# python-3.6 is in Ubuntu Bionic.
# python-3.5 is in Debian Stretch.
"python": {
"hard": [3, 5],
"soft": [3, 6]
},
# Supported git versions.
#
# git-1.7.2 is in Debian Squeeze.
# git-1.7.9 is in Ubuntu Precise.
# git-1.9.1 is in Ubuntu Trusty.
# git-1.7.10 is in Debian Wheezy.
"git": {
"hard": [1, 7, 2],
"soft": [1, 9, 1]
}
}

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python3
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Copyright 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -15,28 +16,26 @@
"""Wrapper to run pytest with the right settings."""
from __future__ import print_function
import errno
import os
import shutil
import subprocess
import sys
def find_pytest():
"""Try to locate a good version of pytest."""
# Use the Python 3 version if available.
ret = shutil.which('pytest-3')
if ret:
return ret
# Hopefully this is a Python 3 version.
ret = shutil.which('pytest')
if ret:
return ret
print('%s: unable to find pytest.' % (__file__,), file=sys.stderr)
print('%s: Try installing: sudo apt-get install python-pytest' % (__file__,),
file=sys.stderr)
def run_pytest(cmd, argv):
"""Run the unittests via |cmd|."""
try:
return subprocess.call([cmd] + argv)
except OSError as e:
if e.errno == errno.ENOENT:
print('%s: unable to run `%s`: %s' % (__file__, cmd, e), file=sys.stderr)
print('%s: Try installing pytest: sudo apt-get install python-pytest' %
(__file__,), file=sys.stderr)
return 127
else:
raise
def main(argv):
@ -49,8 +48,7 @@ def main(argv):
pythonpath += os.pathsep + oldpythonpath
os.environ['PYTHONPATH'] = pythonpath
pytest = find_pytest()
return subprocess.run([pytest] + argv, check=False).returncode
return run_pytest('pytest', argv)
if __name__ == '__main__':

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python3
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Copyright 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the 'License");
@ -15,6 +16,8 @@
"""Python packaging for repo."""
from __future__ import print_function
import os
import setuptools
@ -32,7 +35,7 @@ with open(os.path.join(TOPDIR, 'README.md')) as fp:
# https://packaging.python.org/tutorials/packaging-projects/
setuptools.setup(
name='repo',
version='2',
version='1.13.8',
maintainer='Various',
maintainer_email='repo-discuss@googlegroups.com',
description='Repo helps manage many Git repositories',
@ -52,10 +55,9 @@ setuptools.setup(
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows :: Windows 10',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Version Control :: Git',
],
python_requires='>=3.5',
# We support Python 2.7 and Python 3.6+.
python_requires='>=2.7, ' + ', '.join('!=3.%i.*' % x for x in range(0, 6)),
packages=['subcmds'],
)

277
ssh.py
View File

@ -1,277 +0,0 @@
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common SSH management logic."""
import functools
import multiprocessing
import os
import re
import signal
import subprocess
import sys
import tempfile
import time
import platform_utils
from repo_trace import Trace
PROXY_PATH = os.path.join(os.path.dirname(__file__), 'git_ssh')
def _run_ssh_version():
"""run ssh -V to display the version number"""
return subprocess.check_output(['ssh', '-V'], stderr=subprocess.STDOUT).decode()
def _parse_ssh_version(ver_str=None):
"""parse a ssh version string into a tuple"""
if ver_str is None:
ver_str = _run_ssh_version()
m = re.match(r'^OpenSSH_([0-9.]+)(p[0-9]+)?\s', ver_str)
if m:
return tuple(int(x) for x in m.group(1).split('.'))
else:
return ()
@functools.lru_cache(maxsize=None)
def version():
"""return ssh version as a tuple"""
try:
return _parse_ssh_version()
except subprocess.CalledProcessError:
print('fatal: unable to detect ssh version', file=sys.stderr)
sys.exit(1)
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
class ProxyManager:
"""Manage various ssh clients & masters that we spawn.
This will take care of sharing state between multiprocessing children, and
make sure that if we crash, we don't leak any of the ssh sessions.
The code should work with a single-process scenario too, and not add too much
overhead due to the manager.
"""
# Path to the ssh program to run which will pass our master settings along.
# Set here more as a convenience API.
proxy = PROXY_PATH
def __init__(self, manager):
# Protect access to the list of active masters.
self._lock = multiprocessing.Lock()
# List of active masters (pid). These will be spawned on demand, and we are
# responsible for shutting them all down at the end.
self._masters = manager.list()
# Set of active masters indexed by "host:port" information.
# The value isn't used, but multiprocessing doesn't provide a set class.
self._master_keys = manager.dict()
# Whether ssh masters are known to be broken, so we give up entirely.
self._master_broken = manager.Value('b', False)
# List of active ssh sesssions. Clients will be added & removed as
# connections finish, so this list is just for safety & cleanup if we crash.
self._clients = manager.list()
# Path to directory for holding master sockets.
self._sock_path = None
def __enter__(self):
"""Enter a new context."""
return self
def __exit__(self, exc_type, exc_value, traceback):
"""Exit a context & clean up all resources."""
self.close()
def add_client(self, proc):
"""Track a new ssh session."""
self._clients.append(proc.pid)
def remove_client(self, proc):
"""Remove a completed ssh session."""
try:
self._clients.remove(proc.pid)
except ValueError:
pass
def add_master(self, proc):
"""Track a new master connection."""
self._masters.append(proc.pid)
def _terminate(self, procs):
"""Kill all |procs|."""
for pid in procs:
try:
os.kill(pid, signal.SIGTERM)
os.waitpid(pid, 0)
except OSError:
pass
# The multiprocessing.list() API doesn't provide many standard list()
# methods, so we have to manually clear the list.
while True:
try:
procs.pop(0)
except:
break
def close(self):
"""Close this active ssh session.
Kill all ssh clients & masters we created, and nuke the socket dir.
"""
self._terminate(self._clients)
self._terminate(self._masters)
d = self.sock(create=False)
if d:
try:
platform_utils.rmdir(os.path.dirname(d))
except OSError:
pass
def _open_unlocked(self, host, port=None):
"""Make sure a ssh master session exists for |host| & |port|.
If one doesn't exist already, we'll create it.
We won't grab any locks, so the caller has to do that. This helps keep the
business logic of actually creating the master separate from grabbing locks.
"""
# Check to see whether we already think that the master is running; if we
# think it's already running, return right away.
if port is not None:
key = '%s:%s' % (host, port)
else:
key = host
if key in self._master_keys:
return True
if self._master_broken.value or 'GIT_SSH' in os.environ:
# Failed earlier, so don't retry.
return False
# We will make two calls to ssh; this is the common part of both calls.
command_base = ['ssh', '-o', 'ControlPath %s' % self.sock(), host]
if port is not None:
command_base[1:1] = ['-p', str(port)]
# Since the key wasn't in _master_keys, we think that master isn't running.
# ...but before actually starting a master, we'll double-check. This can
# be important because we can't tell that that 'git@myhost.com' is the same
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
check_command = command_base + ['-O', 'check']
try:
Trace(': %s', ' '.join(check_command))
check_process = subprocess.Popen(check_command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
check_process.communicate() # read output, but ignore it...
isnt_running = check_process.wait()
if not isnt_running:
# Our double-check found that the master _was_ infact running. Add to
# the list of keys.
self._master_keys[key] = True
return True
except Exception:
# Ignore excpetions. We we will fall back to the normal command and print
# to the log there.
pass
command = command_base[:1] + ['-M', '-N'] + command_base[1:]
try:
Trace(': %s', ' '.join(command))
p = subprocess.Popen(command)
except Exception as e:
self._master_broken.value = True
print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
% (host, port, str(e)), file=sys.stderr)
return False
time.sleep(1)
ssh_died = (p.poll() is not None)
if ssh_died:
return False
self.add_master(p)
self._master_keys[key] = True
return True
def _open(self, host, port=None):
"""Make sure a ssh master session exists for |host| & |port|.
If one doesn't exist already, we'll create it.
This will obtain any necessary locks to avoid inter-process races.
"""
# Bail before grabbing the lock if we already know that we aren't going to
# try creating new masters below.
if sys.platform in ('win32', 'cygwin'):
return False
# Acquire the lock. This is needed to prevent opening multiple masters for
# the same host when we're running "repo sync -jN" (for N > 1) _and_ the
# manifest <remote fetch="ssh://xyz"> specifies a different host from the
# one that was passed to repo init.
with self._lock:
return self._open_unlocked(host, port)
def preconnect(self, url):
"""If |uri| will create a ssh connection, setup the ssh master for it."""
m = URI_ALL.match(url)
if m:
scheme = m.group(1)
host = m.group(2)
if ':' in host:
host, port = host.split(':')
else:
port = None
if scheme in ('ssh', 'git+ssh', 'ssh+git'):
return self._open(host, port)
return False
m = URI_SCP.match(url)
if m:
host = m.group(1)
return self._open(host)
return False
def sock(self, create=True):
"""Return the path to the ssh socket dir.
This has all the master sockets so clients can talk to them.
"""
if self._sock_path is None:
if not create:
return None
tmp_dir = '/tmp'
if not os.path.exists(tmp_dir):
tmp_dir = tempfile.gettempdir()
if version() < (6, 7):
tokens = '%r@%h:%p'
else:
tokens = '%C' # hash of %l%h%p%r
self._sock_path = os.path.join(
tempfile.mkdtemp('', 'ssh-', tmp_dir),
'master-' + tokens)
return self._sock_path

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,12 +14,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from collections import defaultdict
import functools
import itertools
import sys
from command import Command, DEFAULT_LOCAL_JOBS
from command import Command
from git_command import git
from progress import Progress
@ -33,9 +35,11 @@ deleting it (and all its history) from your local repository.
It is equivalent to "git branch -D <branchname>".
"""
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
def _Options(self, p):
p.add_option('-q', '--quiet',
action='store_true', default=False,
help='be quiet')
p.add_option('--all',
dest='all', action='store_true',
help='delete all branches in all projects')
@ -51,44 +55,35 @@ It is equivalent to "git branch -D <branchname>".
else:
args.insert(0, "'All local branches'")
def _ExecuteOne(self, all_branches, nb, project):
"""Abandon one project."""
if all_branches:
branches = project.GetBranches()
else:
branches = [nb]
ret = {}
for name in branches:
status = project.AbandonBranch(name)
if status is not None:
ret[name] = status
return (ret, project)
def Execute(self, opt, args):
nb = args[0]
err = defaultdict(list)
success = defaultdict(list)
all_projects = self.GetProjects(args[1:])
def _ProcessResults(_pool, pm, states):
for (results, project) in states:
for branch, status in results.items():
pm = Progress('Abandon %s' % nb, len(all_projects))
for project in all_projects:
pm.update()
if opt.all:
branches = list(project.GetBranches().keys())
else:
branches = [nb]
for name in branches:
status = project.AbandonBranch(name)
if status is not None:
if status:
success[branch].append(project)
success[name].append(project)
else:
err[branch].append(project)
pm.update()
err[name].append(project)
pm.end()
self.ExecuteInParallel(
opt.jobs,
functools.partial(self._ExecuteOne, opt.all, nb),
all_projects,
callback=_ProcessResults,
output=Progress('Abandon %s' % (nb,), len(all_projects), quiet=opt.quiet))
width = 25
for name in branches:
if width < len(name):
width = len(name)
width = max(itertools.chain(
[25], (len(x) for x in itertools.chain(success, err))))
if err:
for br in err.keys():
err_msg = "error: cannot abandon %s" % br

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,11 +14,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
from __future__ import print_function
import sys
from color import Coloring
from command import Command, DEFAULT_LOCAL_JOBS
from command import Command
class BranchColoring(Coloring):
@ -95,7 +96,6 @@ the branch appears in, or does not appear in. If no project list
is shown, then the branch appears in all projects.
"""
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
def Execute(self, opt, args):
projects = self.GetProjects(args)
@ -103,19 +103,14 @@ is shown, then the branch appears in all projects.
all_branches = {}
project_cnt = len(projects)
def _ProcessResults(_pool, _output, results):
for name, b in itertools.chain.from_iterable(results):
for project in projects:
for name, b in project.GetBranches().items():
b.project = project
if name not in all_branches:
all_branches[name] = BranchInfo(name)
all_branches[name].add(b)
self.ExecuteInParallel(
opt.jobs,
expand_project_to_branches,
projects,
callback=_ProcessResults)
names = sorted(all_branches)
names = list(sorted(all_branches))
if not names:
print(' (no branches)', file=sys.stderr)
@ -185,19 +180,3 @@ is shown, then the branch appears in all projects.
else:
out.write(' in all projects')
out.nl()
def expand_project_to_branches(project):
"""Expands a project into a list of branch names & associated information.
Args:
project: project.Project
Returns:
List[Tuple[str, git_config.Branch]]
"""
branches = []
for name, b in project.GetBranches().items():
b.project = project
branches.append((name, b))
return branches

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,10 +14,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
from __future__ import print_function
import sys
from command import Command, DEFAULT_LOCAL_JOBS
from command import Command
from progress import Progress
@ -33,37 +34,28 @@ The command is equivalent to:
repo forall [<project>...] -c git checkout <branchname>
"""
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
def ValidateOptions(self, opt, args):
if not args:
self.Usage()
def _ExecuteOne(self, nb, project):
"""Checkout one project."""
return (project.CheckoutBranch(nb), project)
def Execute(self, opt, args):
nb = args[0]
err = []
success = []
all_projects = self.GetProjects(args[1:])
def _ProcessResults(_pool, pm, results):
for status, project in results:
if status is not None:
if status:
success.append(project)
else:
err.append(project)
pm.update()
pm = Progress('Checkout %s' % nb, len(all_projects))
for project in all_projects:
pm.update()
self.ExecuteInParallel(
opt.jobs,
functools.partial(self._ExecuteOne, nb),
all_projects,
callback=_ProcessResults,
output=Progress('Checkout %s' % (nb,), len(all_projects), quiet=opt.quiet))
status = project.CheckoutBranch(nb)
if status is not None:
if status:
success.append(project)
else:
err.append(project)
pm.end()
if err:
for p in err:

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2010 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import re
import sys
from command import Command
@ -32,6 +35,9 @@ The change id will be updated, and a reference to the old
change id will be added.
"""
def _Options(self, p):
pass
def ValidateOptions(self, opt, args):
if len(args) != 1:
self.Usage()
@ -69,9 +75,11 @@ change id will be added.
new_msg = self._Reformat(old_msg, sha1)
p = GitCommand(None, ['commit', '--amend', '-F', '-'],
input=new_msg,
provide_stdin=True,
capture_stdout=True,
capture_stderr=True)
p.stdin.write(new_msg)
p.stdin.close()
if p.Wait() != 0:
print("error: Failed to update commit message", file=sys.stderr)
sys.exit(1)

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,10 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import io
from command import DEFAULT_LOCAL_JOBS, PagedCommand
from command import PagedCommand
class Diff(PagedCommand):
@ -28,42 +27,15 @@ The -u option causes '%prog' to generate diff output with file paths
relative to the repository root, so the output can be applied
to the Unix 'patch' command.
"""
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
def _Options(self, p):
p.add_option('-u', '--absolute',
dest='absolute', action='store_true',
help='paths are relative to the repository root')
def _ExecuteOne(self, absolute, project):
"""Obtains the diff for a specific project.
Args:
absolute: Paths are relative to the root.
project: Project to get status of.
Returns:
The status of the project.
"""
buf = io.StringIO()
ret = project.PrintWorkTreeDiff(absolute, output_redir=buf)
return (ret, buf.getvalue())
help='Paths are relative to the repository root')
def Execute(self, opt, args):
all_projects = self.GetProjects(args)
def _ProcessResults(_pool, _output, results):
ret = 0
for (state, output) in results:
if output:
print(output, end='')
if not state:
ret = 1
return ret
return self.ExecuteInParallel(
opt.jobs,
functools.partial(self._ExecuteOne, opt.absolute),
all_projects,
callback=_ProcessResults,
ordered=True)
ret = 0
for project in self.GetProjects(args):
if not project.PrintWorkTreeDiff(opt.absolute):
ret = 1
return ret

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2014 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -14,7 +16,7 @@
from color import Coloring
from command import PagedCommand
from manifest_xml import RepoClient
from manifest_xml import XmlManifest
class _Coloring(Coloring):
@ -68,10 +70,10 @@ synced and their revisions won't be found.
def _Options(self, p):
p.add_option('--raw',
dest='raw', action='store_true',
help='display raw diff')
help='Display raw diff.')
p.add_option('--no-color',
dest='color', action='store_false', default=True,
help='does not display the diff in color')
help='does not display the diff in color.')
p.add_option('--pretty-format',
dest='pretty_format', action='store',
metavar='<FORMAT>',
@ -181,7 +183,7 @@ synced and their revisions won't be found.
self.OptionParser.error('missing manifests to diff')
def Execute(self, opt, args):
self.out = _Coloring(self.client.globalConfig)
self.out = _Coloring(self.manifest.globalConfig)
self.printText = self.out.nofmt_printer('text')
if opt.color:
self.printProject = self.out.nofmt_printer('project', attr='bold')
@ -191,12 +193,12 @@ synced and their revisions won't be found.
else:
self.printProject = self.printAdded = self.printRemoved = self.printRevision = self.printText
manifest1 = RepoClient(self.repodir)
manifest1 = XmlManifest(self.manifest.repodir)
manifest1.Override(args[0], load_local_manifests=False)
if len(args) == 1:
manifest2 = self.manifest
else:
manifest2 = RepoClient(self.repodir)
manifest2 = XmlManifest(self.manifest.repodir)
manifest2.Override(args[1], load_local_manifests=False)
diff = manifest1.projectsDiff(manifest2)

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,11 +14,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import re
import sys
from command import Command
from error import GitError, NoSuchProjectError
from error import GitError
CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
@ -60,7 +63,6 @@ If no project is specified try to use current directory as a project.
if m:
if not project:
project = self.GetProjects(".")[0]
print('Defaulting to cwd project', project.name)
chg_id = int(m.group(1))
if m.group(2):
ps_id = int(m.group(2))
@ -77,23 +79,7 @@ If no project is specified try to use current directory as a project.
ps_id = max(int(match.group(1)), ps_id)
to_get.append((project, chg_id, ps_id))
else:
projects = self.GetProjects([a])
if len(projects) > 1:
# If the cwd is one of the projects, assume they want that.
try:
project = self.GetProjects('.')[0]
except NoSuchProjectError:
project = None
if project not in projects:
print('error: %s matches too many projects; please re-run inside '
'the project checkout.' % (a,), file=sys.stderr)
for project in projects:
print(' %s/ @ %s' % (project.relpath, project.revisionExpr),
file=sys.stderr)
sys.exit(1)
else:
project = projects[0]
print('Defaulting to cwd project', project.name)
project = self.GetProjects([a])[0]
return to_get
def ValidateOptions(self, opt, args):

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,9 +14,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import errno
import functools
import io
import multiprocessing
import re
import os
@ -23,8 +24,8 @@ import sys
import subprocess
from color import Coloring
from command import DEFAULT_LOCAL_JOBS, Command, MirrorSafeCommand, WORKER_BATCH_SIZE
from error import ManifestInvalidRevisionError
from command import Command, MirrorSafeCommand
import platform_utils
_CAN_COLOR = [
'branch',
@ -45,7 +46,7 @@ class Forall(Command, MirrorSafeCommand):
helpSummary = "Run a shell command in each project"
helpUsage = """
%prog [<project>...] -c <command> [<arg>...]
%prog -r str1 [str2] ... -c <command> [<arg>...]
%prog -r str1 [str2] ... -c <command> [<arg>...]"
"""
helpDescription = """
Executes the same shell command in each project.
@ -53,11 +54,6 @@ Executes the same shell command in each project.
The -r option allows running the command only on projects matching
regex or wildcard expression.
By default, projects are processed non-interactively in parallel. If you want
to run interactive commands, make sure to pass --interactive to force --jobs 1.
While the processing order of projects is not guaranteed, the order of project
output is stable.
# Output Formatting
The -p option causes '%prog' to bind pipes to the command's stdin,
@ -120,48 +116,73 @@ terminal and are not redirected.
If -e is used, when a command exits unsuccessfully, '%prog' will abort
without iterating through the remaining projects.
"""
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
@staticmethod
def _cmd_option(option, _opt_str, _value, parser):
setattr(parser.values, option.dest, list(parser.rargs))
while parser.rargs:
del parser.rargs[0]
def _Options(self, p):
def cmd(option, opt_str, value, parser):
setattr(parser.values, option.dest, list(parser.rargs))
while parser.rargs:
del parser.rargs[0]
p.add_option('-r', '--regex',
dest='regex', action='store_true',
help='execute the command only on projects matching regex or wildcard expression')
help="Execute the command only on projects matching regex or wildcard expression")
p.add_option('-i', '--inverse-regex',
dest='inverse_regex', action='store_true',
help='execute the command only on projects not matching regex or '
'wildcard expression')
help="Execute the command only on projects not matching regex or "
"wildcard expression")
p.add_option('-g', '--groups',
dest='groups',
help='execute the command only on projects matching the specified groups')
help="Execute the command only on projects matching the specified groups")
p.add_option('-c', '--command',
help='command (and arguments) to execute',
help='Command (and arguments) to execute',
dest='command',
action='callback',
callback=self._cmd_option)
callback=cmd)
p.add_option('-e', '--abort-on-errors',
dest='abort_on_errors', action='store_true',
help='abort if a command exits unsuccessfully')
help='Abort if a command exits unsuccessfully')
p.add_option('--ignore-missing', action='store_true',
help='silently skip & do not exit non-zero due missing '
help='Silently skip & do not exit non-zero due missing '
'checkouts')
g = p.get_option_group('--quiet')
g = p.add_option_group('Output')
g.add_option('-p',
dest='project_header', action='store_true',
help='show project headers before output')
p.add_option('--interactive',
action='store_true',
help='force interactive usage')
help='Show project headers before output')
g.add_option('-v', '--verbose',
dest='verbose', action='store_true',
help='Show command error messages')
g.add_option('-j', '--jobs',
dest='jobs', action='store', type='int', default=1,
help='number of commands to execute simultaneously')
def WantPager(self, opt):
return opt.project_header and opt.jobs == 1
def _SerializeProject(self, project):
""" Serialize a project._GitGetByExec instance.
project._GitGetByExec is not pickle-able. Instead of trying to pass it
around between processes, make a dict ourselves containing only the
attributes that we need.
"""
if not self.manifest.IsMirror:
lrev = project.GetRevisionId()
else:
lrev = None
return {
'name': project.name,
'relpath': project.relpath,
'remote_name': project.remote.name,
'lrev': lrev,
'rrev': project.revisionExpr,
'annotations': dict((a.name, a.value) for a in project.annotations),
'gitdir': project.gitdir,
'worktree': project.worktree,
'upstream': project.upstream,
'dest_branch': project.dest_branch,
}
def ValidateOptions(self, opt, args):
if not opt.command:
self.Usage()
@ -177,11 +198,6 @@ without iterating through the remaining projects.
cmd.append(cmd[0])
cmd.extend(opt.command[1:])
# Historically, forall operated interactively, and in serial. If the user
# has selected 1 job, then default to interacive mode.
if opt.jobs == 1:
opt.interactive = True
if opt.project_header \
and not shell \
and cmd[0] == 'git':
@ -221,50 +237,60 @@ without iterating through the remaining projects.
os.environ['REPO_COUNT'] = str(len(projects))
pool = multiprocessing.Pool(opt.jobs, InitWorker)
try:
config = self.manifest.manifestProject.config
with multiprocessing.Pool(opt.jobs, InitWorker) as pool:
results_it = pool.imap(
functools.partial(DoWorkWrapper, mirror, opt, cmd, shell, config),
enumerate(projects),
chunksize=WORKER_BATCH_SIZE)
first = True
for (r, output) in results_it:
if output:
if first:
first = False
elif opt.project_header:
print()
# To simplify the DoWorkWrapper, take care of automatic newlines.
end = '\n'
if output[-1] == '\n':
end = ''
print(output, end=end)
rc = rc or r
if r != 0 and opt.abort_on_errors:
raise Exception('Aborting due to previous error')
results_it = pool.imap(
DoWorkWrapper,
self.ProjectArgs(projects, mirror, opt, cmd, shell, config))
pool.close()
for r in results_it:
rc = rc or r
if r != 0 and opt.abort_on_errors:
raise Exception('Aborting due to previous error')
except (KeyboardInterrupt, WorkerKeyboardInterrupt):
# Catch KeyboardInterrupt raised inside and outside of workers
print('Interrupted - terminating the pool')
pool.terminate()
rc = rc or errno.EINTR
except Exception as e:
# Catch any other exceptions raised
print('forall: unhandled error, terminating the pool: %s: %s' %
print('Got an error, terminating the pool: %s: %s' %
(type(e).__name__, e),
file=sys.stderr)
pool.terminate()
rc = rc or getattr(e, 'errno', 1)
finally:
pool.join()
if rc != 0:
sys.exit(rc)
def ProjectArgs(self, projects, mirror, opt, cmd, shell, config):
for cnt, p in enumerate(projects):
try:
project = self._SerializeProject(p)
except Exception as e:
print('Project list error on project %s: %s: %s' %
(p.name, type(e).__name__, e),
file=sys.stderr)
return
except KeyboardInterrupt:
print('Project list interrupted',
file=sys.stderr)
return
yield [mirror, opt, cmd, shell, cnt, config, project]
class WorkerKeyboardInterrupt(Exception):
""" Keyboard interrupt exception for worker processes. """
pass
def InitWorker():
signal.signal(signal.SIGINT, signal.SIG_IGN)
def DoWorkWrapper(mirror, opt, cmd, shell, config, args):
def DoWorkWrapper(args):
""" A wrapper around the DoWork() method.
Catch the KeyboardInterrupt exceptions here and re-raise them as a different,
@ -272,11 +298,11 @@ def DoWorkWrapper(mirror, opt, cmd, shell, config, args):
and making the parent hang indefinitely.
"""
cnt, project = args
project = args.pop()
try:
return DoWork(project, mirror, opt, cmd, shell, cnt, config)
return DoWork(project, *args)
except KeyboardInterrupt:
print('%s: Worker interrupted' % project.name)
print('%s: Worker interrupted' % project['name'])
raise WorkerKeyboardInterrupt()
@ -288,65 +314,94 @@ def DoWork(project, mirror, opt, cmd, shell, cnt, config):
val = ''
env[name] = val
setenv('REPO_PROJECT', project.name)
setenv('REPO_PATH', project.relpath)
setenv('REPO_REMOTE', project.remote.name)
try:
# If we aren't in a fully synced state and we don't have the ref the manifest
# wants, then this will fail. Ignore it for the purposes of this code.
lrev = '' if mirror else project.GetRevisionId()
except ManifestInvalidRevisionError:
lrev = ''
setenv('REPO_LREV', lrev)
setenv('REPO_RREV', project.revisionExpr)
setenv('REPO_UPSTREAM', project.upstream)
setenv('REPO_DEST_BRANCH', project.dest_branch)
setenv('REPO_PROJECT', project['name'])
setenv('REPO_PATH', project['relpath'])
setenv('REPO_REMOTE', project['remote_name'])
setenv('REPO_LREV', project['lrev'])
setenv('REPO_RREV', project['rrev'])
setenv('REPO_UPSTREAM', project['upstream'])
setenv('REPO_DEST_BRANCH', project['dest_branch'])
setenv('REPO_I', str(cnt + 1))
for annotation in project.annotations:
setenv("REPO__%s" % (annotation.name), annotation.value)
for name in project['annotations']:
setenv("REPO__%s" % (name), project['annotations'][name])
if mirror:
setenv('GIT_DIR', project.gitdir)
cwd = project.gitdir
setenv('GIT_DIR', project['gitdir'])
cwd = project['gitdir']
else:
cwd = project.worktree
cwd = project['worktree']
if not os.path.exists(cwd):
# Allow the user to silently ignore missing checkouts so they can run on
# partial checkouts (good for infra recovery tools).
if opt.ignore_missing:
return (0, '')
output = ''
return 0
if ((opt.project_header and opt.verbose)
or not opt.project_header):
output = 'skipping %s/' % project.relpath
return (1, output)
print('skipping %s/' % project['relpath'], file=sys.stderr)
return 1
if opt.verbose:
stderr = subprocess.STDOUT
else:
stderr = subprocess.DEVNULL
stdin = None if opt.interactive else subprocess.DEVNULL
result = subprocess.run(
cmd, cwd=cwd, shell=shell, env=env, check=False,
encoding='utf-8', errors='replace',
stdin=stdin, stdout=subprocess.PIPE, stderr=stderr)
output = result.stdout
if opt.project_header:
if output:
buf = io.StringIO()
out = ForallColoring(config)
out.redirect(buf)
if mirror:
project_header_path = project.name
else:
project_header_path = project.relpath
out.project('project %s/' % project_header_path)
out.nl()
buf.write(output)
output = buf.getvalue()
return (result.returncode, output)
stdin = subprocess.PIPE
stdout = subprocess.PIPE
stderr = subprocess.PIPE
else:
stdin = None
stdout = None
stderr = None
p = subprocess.Popen(cmd,
cwd=cwd,
shell=shell,
env=env,
stdin=stdin,
stdout=stdout,
stderr=stderr)
if opt.project_header:
out = ForallColoring(config)
out.redirect(sys.stdout)
empty = True
errbuf = ''
p.stdin.close()
s_in = platform_utils.FileDescriptorStreams.create()
s_in.add(p.stdout, sys.stdout, 'stdout')
s_in.add(p.stderr, sys.stderr, 'stderr')
while not s_in.is_done:
in_ready = s_in.select()
for s in in_ready:
buf = s.read().decode()
if not buf:
s_in.remove(s)
s.close()
continue
if not opt.verbose:
if s.std_name == 'stderr':
errbuf += buf
continue
if empty and out:
if not cnt == 0:
out.nl()
if mirror:
project_header_path = project['name']
else:
project_header_path = project['relpath']
out.project('project %s/', project_header_path)
out.nl()
out.flush()
if errbuf:
sys.stderr.write(errbuf)
sys.stderr.flush()
errbuf = ''
empty = False
s.dest.write(buf)
s.dest.flush()
r = p.wait()
return r

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,11 +14,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
from command import Command, GitcClientCommand
import platform_utils
from pyversion import is_python3
if not is_python3():
input = raw_input # noqa: F821
class GitcDelete(Command, GitcClientCommand):
common = True
@ -33,7 +40,7 @@ and all locally downloaded sources.
def _Options(self, p):
p.add_option('-f', '--force',
dest='force', action='store_true',
help='force the deletion (no prompt)')
help='Force the deletion (no prompt).')
def Execute(self, opt, args):
if not opt.force:

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import sys
@ -47,7 +50,14 @@ use for this GITC client.
"""
def _Options(self, p):
super()._Options(p, gitc_init=True)
super(GitcInit, self)._Options(p, gitc_init=True)
g = p.add_option_group('GITC options')
g.add_option('-f', '--manifest-file',
dest='manifest_file',
help='Optional manifest file to use for this GITC client.')
g.add_option('-c', '--gitc-client',
dest='gitc_client',
help='The name of the gitc_client instance to create or modify.')
def Execute(self, opt, args):
gitc_client = gitc_utils.parse_clientdir(os.getcwd())
@ -57,7 +67,7 @@ use for this GITC client.
sys.exit(1)
self.client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(),
gitc_client)
super().Execute(opt, args)
super(GitcInit, self).Execute(opt, args)
manifest_file = self.manifest.manifestFile
if opt.manifest_file:

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,11 +14,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
from __future__ import print_function
import sys
from color import Coloring
from command import DEFAULT_LOCAL_JOBS, PagedCommand
from command import PagedCommand
from error import GitError
from git_command import GitCommand
@ -62,33 +65,30 @@ contain a line that matches both expressions:
repo grep --all-match -e NODE -e Unexpected
"""
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
@staticmethod
def _carry_option(_option, opt_str, value, parser):
pt = getattr(parser.values, 'cmd_argv', None)
if pt is None:
pt = []
setattr(parser.values, 'cmd_argv', pt)
if opt_str == '-(':
pt.append('(')
elif opt_str == '-)':
pt.append(')')
else:
pt.append(opt_str)
if value is not None:
pt.append(value)
def _CommonOptions(self, p):
"""Override common options slightly."""
super()._CommonOptions(p, opt_v=False)
def _Options(self, p):
def carry(option,
opt_str,
value,
parser):
pt = getattr(parser.values, 'cmd_argv', None)
if pt is None:
pt = []
setattr(parser.values, 'cmd_argv', pt)
if opt_str == '-(':
pt.append('(')
elif opt_str == '-)':
pt.append(')')
else:
pt.append(opt_str)
if value is not None:
pt.append(value)
g = p.add_option_group('Sources')
g.add_option('--cached',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help='Search the index, instead of the work tree')
g.add_option('-r', '--revision',
dest='revision', action='append', metavar='TREEish',
@ -96,134 +96,68 @@ contain a line that matches both expressions:
g = p.add_option_group('Pattern')
g.add_option('-e',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
metavar='PATTERN', type='str',
help='Pattern to search for')
g.add_option('-i', '--ignore-case',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help='Ignore case differences')
g.add_option('-a', '--text',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help="Process binary files as if they were text")
g.add_option('-I',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help="Don't match the pattern in binary files")
g.add_option('-w', '--word-regexp',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help='Match the pattern only at word boundaries')
g.add_option('-v', '--invert-match',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help='Select non-matching lines')
g.add_option('-G', '--basic-regexp',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help='Use POSIX basic regexp for patterns (default)')
g.add_option('-E', '--extended-regexp',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help='Use POSIX extended regexp for patterns')
g.add_option('-F', '--fixed-strings',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help='Use fixed strings (not regexp) for pattern')
g = p.add_option_group('Pattern Grouping')
g.add_option('--all-match',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help='Limit match to lines that have all patterns')
g.add_option('--and', '--or', '--not',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help='Boolean operators to combine patterns')
g.add_option('-(', '-)',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help='Boolean operator grouping')
g = p.add_option_group('Output')
g.add_option('-n',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help='Prefix the line number to matching lines')
g.add_option('-C',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
metavar='CONTEXT', type='str',
help='Show CONTEXT lines around match')
g.add_option('-B',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
metavar='CONTEXT', type='str',
help='Show CONTEXT lines before match')
g.add_option('-A',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
metavar='CONTEXT', type='str',
help='Show CONTEXT lines after match')
g.add_option('-l', '--name-only', '--files-with-matches',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help='Show only file names containing matching lines')
g.add_option('-L', '--files-without-match',
action='callback', callback=self._carry_option,
action='callback', callback=carry,
help='Show only file names not containing matching lines')
def _ExecuteOne(self, cmd_argv, project):
"""Process one project."""
try:
p = GitCommand(project,
cmd_argv,
bare=False,
capture_stdout=True,
capture_stderr=True)
except GitError as e:
return (project, -1, None, str(e))
return (project, p.Wait(), p.stdout, p.stderr)
@staticmethod
def _ProcessResults(full_name, have_rev, _pool, out, results):
git_failed = False
bad_rev = False
have_match = False
for project, rc, stdout, stderr in results:
if rc < 0:
git_failed = True
out.project('--- project %s ---' % project.relpath)
out.nl()
out.fail('%s', stderr)
out.nl()
continue
if rc:
# no results
if stderr:
if have_rev and 'fatal: ambiguous argument' in stderr:
bad_rev = True
else:
out.project('--- project %s ---' % project.relpath)
out.nl()
out.fail('%s', stderr.strip())
out.nl()
continue
have_match = True
# We cut the last element, to avoid a blank line.
r = stdout.split('\n')
r = r[0:-1]
if have_rev and full_name:
for line in r:
rev, line = line.split(':', 1)
out.write("%s", rev)
out.write(':')
out.project(project.relpath)
out.write('/')
out.write("%s", line)
out.nl()
elif full_name:
for line in r:
out.project(project.relpath)
out.write('/')
out.write("%s", line)
out.nl()
else:
for line in r:
print(line)
return (git_failed, bad_rev, have_match)
def Execute(self, opt, args):
out = GrepColoring(self.manifest.manifestProject.config)
@ -255,13 +189,62 @@ contain a line that matches both expressions:
cmd_argv.extend(opt.revision)
cmd_argv.append('--')
git_failed, bad_rev, have_match = self.ExecuteInParallel(
opt.jobs,
functools.partial(self._ExecuteOne, cmd_argv),
projects,
callback=functools.partial(self._ProcessResults, full_name, have_rev),
output=out,
ordered=True)
git_failed = False
bad_rev = False
have_match = False
for project in projects:
try:
p = GitCommand(project,
cmd_argv,
bare=False,
capture_stdout=True,
capture_stderr=True)
except GitError as e:
git_failed = True
out.project('--- project %s ---' % project.relpath)
out.nl()
out.fail('%s', str(e))
out.nl()
continue
if p.Wait() != 0:
# no results
#
if p.stderr:
if have_rev and 'fatal: ambiguous argument' in p.stderr:
bad_rev = True
else:
out.project('--- project %s ---' % project.relpath)
out.nl()
out.fail('%s', p.stderr.strip())
out.nl()
continue
have_match = True
# We cut the last element, to avoid a blank line.
#
r = p.stdout.split('\n')
r = r[0:-1]
if have_rev and full_name:
for line in r:
rev, line = line.split(':', 1)
out.write("%s", rev)
out.write(':')
out.project(project.relpath)
out.write('/')
out.write("%s", line)
out.nl()
elif full_name:
for line in r:
out.project(project.relpath)
out.write('/')
out.write("%s", line)
out.nl()
else:
for line in r:
print(line)
if git_failed:
sys.exit(1)

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,15 +14,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import re
import sys
import textwrap
from formatter import AbstractFormatter, DumbWriter
from subcmds import all_commands
from color import Coloring
from command import PagedCommand, MirrorSafeCommand, GitcAvailableCommand, GitcClientCommand
import gitc_utils
from wrapper import Wrapper
class Help(PagedCommand, MirrorSafeCommand):
@ -63,7 +65,7 @@ Displays detailed usage information about a command.
def gitc_supported(cmd):
if not isinstance(cmd, GitcAvailableCommand) and not isinstance(cmd, GitcClientCommand):
return True
if self.client.isGitcClient:
if self.manifest.isGitcClient:
return True
if isinstance(cmd, GitcClientCommand):
return False
@ -79,14 +81,14 @@ Displays detailed usage information about a command.
print(
"See 'repo help <command>' for more information on a specific command.\n"
"See 'repo help --all' for a complete list of recognized commands.")
print('Bug reports:', Wrapper().BUG_URL)
def _PrintCommandHelp(self, cmd, header_prefix=''):
class _Out(Coloring):
def __init__(self, gc):
Coloring.__init__(self, gc, 'help')
self.heading = self.printer('heading', attr='bold')
self._first = True
self.wrap = AbstractFormatter(DumbWriter())
def _PrintSection(self, heading, bodyAttr):
try:
@ -96,9 +98,7 @@ Displays detailed usage information about a command.
if body == '' or body is None:
return
if not self._first:
self.nl()
self._first = False
self.nl()
self.heading('%s%s', header_prefix, heading)
self.nl()
@ -108,8 +108,7 @@ Displays detailed usage information about a command.
body = body.strip()
body = body.replace('%prog', me)
# Extract the title, but skip any trailing {#anchors}.
asciidoc_hdr = re.compile(r'^\n?#+ ([^{]+)(\{#.+\})?$')
asciidoc_hdr = re.compile(r'^\n?#+ (.+)$')
for para in body.split("\n\n"):
if para.startswith(' '):
self.write('%s', para)
@ -124,14 +123,11 @@ Displays detailed usage information about a command.
self.nl()
continue
lines = textwrap.wrap(para.replace(' ', ' '), width=80,
break_long_words=False, break_on_hyphens=False)
for line in lines:
self.write('%s', line)
self.nl()
self.nl()
self.wrap.add_flowing_data(para)
self.wrap.end_paragraph(1)
self.wrap.end_paragraph(0)
out = _Out(self.client.globalConfig)
out = _Out(self.manifest.globalConfig)
out._PrintSection('Summary', 'helpSummary')
cmd.OptionParser.print_help()
out._PrintSection('Description', 'helpDescription')

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2012 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,8 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import optparse
from command import PagedCommand
from color import Coloring
from git_refs import R_M, R_HEADS
@ -27,7 +27,7 @@ class _Coloring(Coloring):
class Info(PagedCommand):
common = True
helpSummary = "Get info on the manifest branch, current branch or unmerged branches"
helpUsage = "%prog [-dl] [-o [-c]] [<project>...]"
helpUsage = "%prog [-dl] [-o [-b]] [<project>...]"
def _Options(self, p):
p.add_option('-d', '--diff',
@ -36,22 +36,15 @@ class Info(PagedCommand):
p.add_option('-o', '--overview',
dest='overview', action='store_true',
help='show overview of all local commits')
p.add_option('-c', '--current-branch',
p.add_option('-b', '--current-branch',
dest="current_branch", action="store_true",
help="consider only checked out branches")
p.add_option('--no-current-branch',
dest='current_branch', action='store_false',
help='consider all local branches')
# Turn this into a warning & remove this someday.
p.add_option('-b',
dest='current_branch', action='store_true',
help=optparse.SUPPRESS_HELP)
p.add_option('-l', '--local-only',
dest="local", action="store_true",
help="disable all remote operations")
help="Disable all remote operations")
def Execute(self, opt, args):
self.out = _Coloring(self.client.globalConfig)
self.out = _Coloring(self.manifest.globalConfig)
self.heading = self.out.printer('heading', attr='bold')
self.headtext = self.out.nofmt_printer('headtext', fg='yellow')
self.redtext = self.out.printer('redtext', fg='red')

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,12 +14,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import optparse
import os
import platform
import re
import sys
import urllib.parse
from pyversion import is_python3
if is_python3():
import urllib.parse
else:
import imp
import urlparse
urllib = imp.new_module('urllib')
urllib.parse = urlparse
from color import Coloring
from command import InteractiveCommand, MirrorSafeCommand
@ -25,16 +37,15 @@ from error import ManifestParseError
from project import SyncBuffer
from git_config import GitConfig
from git_command import git_require, MIN_GIT_VERSION_SOFT, MIN_GIT_VERSION_HARD
import git_superproject
import platform_utils
from wrapper import Wrapper
class Init(InteractiveCommand, MirrorSafeCommand):
common = True
helpSummary = "Initialize a repo client checkout in the current directory"
helpSummary = "Initialize repo in the current directory"
helpUsage = """
%prog [options] [manifest url]
%prog [options]
"""
helpDescription = """
The '%prog' command is run once to install and initialize repo.
@ -42,13 +53,9 @@ The latest repo source code and manifest collection is downloaded
from the server and is installed in the .repo/ directory in the
current working directory.
When creating a new checkout, the manifest URL is the only required setting.
It may be specified using the --manifest-url option, or as the first optional
argument.
The optional -b argument can be used to select the manifest branch
to checkout and use. If no branch is specified, the remote's default
branch is used. This is equivalent to using -b HEAD.
branch is used.
The optional -m argument can be used to specify an alternate manifest
to be used. If no manifest is specified, the manifest default.xml
@ -79,36 +86,115 @@ manifest, a subsequent `repo sync` (or `repo sync -d`) is necessary
to update the working directory files.
"""
def _CommonOptions(self, p):
"""Disable due to re-use of Wrapper()."""
def _Options(self, p, gitc_init=False):
Wrapper().InitParser(p, gitc_init=gitc_init)
# Logging
g = p.add_option_group('Logging options')
g.add_option('-v', '--verbose',
dest='output_mode', action='store_true',
help='show all output')
g.add_option('-q', '--quiet',
dest='output_mode', action='store_false',
help='only show errors')
# Manifest
g = p.add_option_group('Manifest options')
g.add_option('-u', '--manifest-url',
dest='manifest_url',
help='manifest repository location', metavar='URL')
g.add_option('-b', '--manifest-branch',
dest='manifest_branch',
help='manifest branch or revision', metavar='REVISION')
cbr_opts = ['--current-branch']
# The gitc-init subcommand allocates -c itself, but a lot of init users
# want -c, so try to satisfy both as best we can.
if not gitc_init:
cbr_opts += ['-c']
g.add_option(*cbr_opts,
dest='current_branch_only', action='store_true',
help='fetch only current manifest branch from server')
g.add_option('-m', '--manifest-name',
dest='manifest_name', default='default.xml',
help='initial manifest file', metavar='NAME.xml')
g.add_option('--mirror',
dest='mirror', action='store_true',
help='create a replica of the remote repositories '
'rather than a client working directory')
g.add_option('--reference',
dest='reference',
help='location of mirror directory', metavar='DIR')
g.add_option('--dissociate',
dest='dissociate', action='store_true',
help='dissociate from reference mirrors after clone')
g.add_option('--depth', type='int', default=None,
dest='depth',
help='create a shallow clone with given depth; see git clone')
g.add_option('--partial-clone', action='store_true',
dest='partial_clone',
help='perform partial clone (https://git-scm.com/'
'docs/gitrepository-layout#_code_partialclone_code)')
g.add_option('--clone-filter', action='store', default='blob:none',
dest='clone_filter',
help='filter for use with --partial-clone [default: %default]')
# TODO(vapier): Expose option with real help text once this has been in the
# wild for a while w/out significant bug reports. Goal is by ~Sep 2020.
g.add_option('--worktree', action='store_true',
help=optparse.SUPPRESS_HELP)
g.add_option('--archive',
dest='archive', action='store_true',
help='checkout an archive instead of a git repository for '
'each project. See git archive.')
g.add_option('--submodules',
dest='submodules', action='store_true',
help='sync any submodules associated with the manifest repo')
g.add_option('-g', '--groups',
dest='groups', default='default',
help='restrict manifest projects to ones with specified '
'group(s) [default|all|G1,G2,G3|G4,-G5,-G6]',
metavar='GROUP')
g.add_option('-p', '--platform',
dest='platform', default='auto',
help='restrict manifest projects to ones with a specified '
'platform group [auto|all|none|linux|darwin|...]',
metavar='PLATFORM')
g.add_option('--clone-bundle', action='store_true',
help='force use of /clone.bundle on HTTP/HTTPS (default if not --partial-clone)')
g.add_option('--no-clone-bundle',
dest='clone_bundle', action='store_false',
help='disable use of /clone.bundle on HTTP/HTTPS (default if --partial-clone)')
g.add_option('--no-tags',
dest='tags', default=True, action='store_false',
help="don't fetch tags in the manifest")
# Tool
g = p.add_option_group('repo Version options')
g.add_option('--repo-url',
dest='repo_url',
help='repo repository location', metavar='URL')
g.add_option('--repo-rev', metavar='REV',
help='repo branch or revision')
g.add_option('--repo-branch', dest='repo_rev',
help=optparse.SUPPRESS_HELP)
g.add_option('--no-repo-verify',
dest='repo_verify', default=True, action='store_false',
help='do not verify repo source code')
# Other
g = p.add_option_group('Other options')
g.add_option('--config-name',
dest='config_name', action="store_true", default=False,
help='Always prompt for name/e-mail')
def _RegisteredEnvironmentOptions(self):
return {'REPO_MANIFEST_URL': 'manifest_url',
'REPO_MIRROR_LOCATION': 'reference'}
def _CloneSuperproject(self, opt):
"""Clone the superproject based on the superproject's url and branch.
Args:
opt: Program options returned from optparse. See _Options().
"""
superproject = git_superproject.Superproject(self.manifest,
self.repodir,
quiet=opt.quiet)
if not superproject.Sync():
print('error: git update of superproject failed', file=sys.stderr)
sys.exit(1)
def _SyncManifest(self, opt):
m = self.manifest.manifestProject
is_new = not m.Exists
if is_new:
if not opt.manifest_url:
print('fatal: manifest url is required.', file=sys.stderr)
print('fatal: manifest url (-u) is required.', file=sys.stderr)
sys.exit(1)
if not opt.quiet:
@ -140,11 +226,6 @@ to update the working directory files.
r.Save()
if opt.manifest_branch:
if opt.manifest_branch == 'HEAD':
opt.manifest_branch = m.ResolveRemoteHead()
if opt.manifest_branch is None:
print('fatal: unable to resolve HEAD', file=sys.stderr)
sys.exit(1)
m.revisionExpr = opt.manifest_branch
else:
if is_new:
@ -173,7 +254,7 @@ to update the working directory files.
groups = [x for x in groups if x]
groupstr = ','.join(groups)
if opt.platform == 'auto' and groupstr == self.manifest.GetDefaultGroupsStr():
if opt.platform == 'auto' and groupstr == 'default,platform-' + platform.system().lower():
groupstr = None
m.config.SetString('manifest.groups', groupstr)
@ -181,7 +262,7 @@ to update the working directory files.
m.config.SetString('repo.reference', opt.reference)
if opt.dissociate:
m.config.SetBoolean('repo.dissociate', opt.dissociate)
m.config.SetString('repo.dissociate', 'true')
if opt.worktree:
if opt.mirror:
@ -192,14 +273,14 @@ to update the working directory files.
print('fatal: --submodules and --worktree are incompatible',
file=sys.stderr)
sys.exit(1)
m.config.SetBoolean('repo.worktree', opt.worktree)
m.config.SetString('repo.worktree', 'true')
if is_new:
m.use_git_worktrees = True
print('warning: --worktree is experimental!', file=sys.stderr)
if opt.archive:
if is_new:
m.config.SetBoolean('repo.archive', opt.archive)
m.config.SetString('repo.archive', 'true')
else:
print('fatal: --archive is only supported when initializing a new '
'workspace.', file=sys.stderr)
@ -209,7 +290,7 @@ to update the working directory files.
if opt.mirror:
if is_new:
m.config.SetBoolean('repo.mirror', opt.mirror)
m.config.SetString('repo.mirror', 'true')
else:
print('fatal: --mirror is only supported when initializing a new '
'workspace.', file=sys.stderr)
@ -217,39 +298,30 @@ to update the working directory files.
'in another location.', file=sys.stderr)
sys.exit(1)
if opt.partial_clone is not None:
if opt.partial_clone:
if opt.mirror:
print('fatal: --mirror and --partial-clone are mutually exclusive',
file=sys.stderr)
sys.exit(1)
m.config.SetBoolean('repo.partialclone', opt.partial_clone)
m.config.SetString('repo.partialclone', 'true')
if opt.clone_filter:
m.config.SetString('repo.clonefilter', opt.clone_filter)
elif m.config.GetBoolean('repo.partialclone'):
opt.clone_filter = m.config.GetString('repo.clonefilter')
else:
opt.clone_filter = None
if opt.partial_clone_exclude is not None:
m.config.SetString('repo.partialcloneexclude', opt.partial_clone_exclude)
if opt.clone_bundle is None:
opt.clone_bundle = False if opt.partial_clone else True
else:
m.config.SetBoolean('repo.clonebundle', opt.clone_bundle)
m.config.SetString('repo.clonebundle', 'true' if opt.clone_bundle else 'false')
if opt.submodules:
m.config.SetBoolean('repo.submodules', opt.submodules)
if opt.use_superproject is not None:
m.config.SetBoolean('repo.superproject', opt.use_superproject)
m.config.SetString('repo.submodules', 'true')
if not m.Sync_NetworkHalf(is_new=is_new, quiet=opt.quiet, verbose=opt.verbose,
clone_bundle=opt.clone_bundle,
current_branch_only=opt.current_branch_only,
tags=opt.tags, submodules=opt.submodules,
clone_filter=opt.clone_filter,
partial_clone_exclude=self.manifest.PartialCloneExclude):
clone_filter=opt.clone_filter):
r = m.GetRemote(m.remote.name)
print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
@ -293,7 +365,7 @@ to update the working directory files.
return a
def _ShouldConfigureUser(self, opt):
gc = self.client.globalConfig
gc = self.manifest.globalConfig
mp = self.manifest.manifestProject
# If we don't have local settings, get from global.
@ -342,7 +414,7 @@ to update the working directory files.
return False
def _ConfigureColor(self):
gc = self.client.globalConfig
gc = self.manifest.globalConfig
if self._HasColorSet(gc):
return
@ -421,15 +493,7 @@ to update the working directory files.
self.OptionParser.error('--mirror and --archive cannot be used together.')
if args:
if opt.manifest_url:
self.OptionParser.error(
'--manifest-url option and URL argument both specified: only use '
'one to select the manifest URL.')
opt.manifest_url = args.pop(0)
if args:
self.OptionParser.error('too many arguments to init')
self.OptionParser.error('init takes no arguments')
def Execute(self, opt, args):
git_require(MIN_GIT_VERSION_HARD, fail=True)
@ -439,6 +503,9 @@ to update the working directory files.
% ('.'.join(str(x) for x in MIN_GIT_VERSION_SOFT),),
file=sys.stderr)
opt.quiet = opt.output_mode is False
opt.verbose = opt.output_mode is True
rp = self.manifest.repoProject
# Handle new --repo-url requests.
@ -454,7 +521,7 @@ to update the working directory files.
rp.gitdir, opt.repo_rev, repo_verify=opt.repo_verify, quiet=opt.quiet)
branch = rp.GetBranch('default')
branch.merge = remote_ref
rp.work_git.reset('--hard', rev)
rp.work_git.update_ref('refs/heads/default', rev)
branch.Save()
if opt.worktree:
@ -464,9 +531,6 @@ to update the working directory files.
self._SyncManifest(opt)
self._LinkManifest(opt.manifest_name)
if self.manifest.manifestProject.config.GetBoolean('repo.superproject'):
self._CloneSuperproject(opt)
if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
if opt.config_name or self._ShouldConfigureUser(opt):
self._ConfigureUser(opt)

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2011 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +14,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from command import Command, MirrorSafeCommand
@ -20,38 +24,30 @@ class List(Command, MirrorSafeCommand):
helpSummary = "List projects and their associated directories"
helpUsage = """
%prog [-f] [<project>...]
%prog [-f] -r str1 [str2]...
%prog [-f] -r str1 [str2]..."
"""
helpDescription = """
List all projects; pass '.' to list the project for the cwd.
By default, only projects that currently exist in the checkout are shown. If
you want to list all projects (using the specified filter settings), use the
--all option. If you want to show all projects regardless of the manifest
groups, then also pass --groups all.
This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
"""
def _Options(self, p):
p.add_option('-r', '--regex',
dest='regex', action='store_true',
help='filter the project list based on regex or wildcard matching of strings')
help="Filter the project list based on regex or wildcard matching of strings")
p.add_option('-g', '--groups',
dest='groups',
help='filter the project list based on the groups the project is in')
p.add_option('-a', '--all',
action='store_true',
help='show projects regardless of checkout state')
help="Filter the project list based on the groups the project is in")
p.add_option('-f', '--fullpath',
dest='fullpath', action='store_true',
help='display the full work tree path instead of the relative path')
help="Display the full work tree path instead of the relative path")
p.add_option('-n', '--name-only',
dest='name_only', action='store_true',
help='display only the name of the repository')
help="Display only the name of the repository")
p.add_option('-p', '--path-only',
dest='path_only', action='store_true',
help='display only the path of the repository')
help="Display only the path of the repository")
def ValidateOptions(self, opt, args):
if opt.fullpath and opt.name_only:
@ -69,7 +65,7 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
args: Positional args. Can be a list of projects to list, or empty.
"""
if not opt.regex:
projects = self.GetProjects(args, groups=opt.groups, missing_ok=opt.all)
projects = self.GetProjects(args, groups=opt.groups)
else:
projects = self.FindProjects(args)
@ -87,6 +83,5 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
else:
lines.append("%s : %s" % (_getpath(project), project.name))
if lines:
lines.sort()
print('\n'.join(lines))
lines.sort()
print('\n'.join(lines))

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,7 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from __future__ import print_function
import os
import sys
@ -53,27 +55,23 @@ to indicate the remote ref to push changes to via 'repo upload'.
def _Options(self, p):
p.add_option('-r', '--revision-as-HEAD',
dest='peg_rev', action='store_true',
help='save revisions as current HEAD')
help='Save revisions as current HEAD')
p.add_option('-m', '--manifest-name',
help='temporary manifest to use for this sync', metavar='NAME.xml')
p.add_option('--suppress-upstream-revision', dest='peg_rev_upstream',
default=True, action='store_false',
help='if in -r mode, do not write the upstream field '
'(only of use if the branch names for a sha1 manifest are '
'sensitive)')
help='If in -r mode, do not write the upstream field. '
'Only of use if the branch names for a sha1 manifest are '
'sensitive.')
p.add_option('--suppress-dest-branch', dest='peg_rev_dest_branch',
default=True, action='store_false',
help='if in -r mode, do not write the dest-branch field '
'(only of use if the branch names for a sha1 manifest are '
'sensitive)')
p.add_option('--json', default=False, action='store_true',
help='output manifest in JSON format (experimental)')
p.add_option('--pretty', default=False, action='store_true',
help='format output for humans to read')
help='If in -r mode, do not write the dest-branch field. '
'Only of use if the branch names for a sha1 manifest are '
'sensitive.')
p.add_option('-o', '--output-file',
dest='output_file',
default='-',
help='file to save the manifest to',
help='File to save the manifest to',
metavar='-|NAME.xml')
def _Output(self, opt):
@ -85,26 +83,10 @@ to indicate the remote ref to push changes to via 'repo upload'.
fd = sys.stdout
else:
fd = open(opt.output_file, 'w')
if opt.json:
print('warning: --json is experimental!', file=sys.stderr)
doc = self.manifest.ToDict(peg_rev=opt.peg_rev,
peg_rev_upstream=opt.peg_rev_upstream,
peg_rev_dest_branch=opt.peg_rev_dest_branch)
json_settings = {
# JSON style guide says Uunicode characters are fully allowed.
'ensure_ascii': False,
# We use 2 space indent to match JSON style guide.
'indent': 2 if opt.pretty else None,
'separators': (',', ': ') if opt.pretty else (',', ':'),
'sort_keys': True,
}
fd.write(json.dumps(doc, **json_settings))
else:
self.manifest.Save(fd,
peg_rev=opt.peg_rev,
peg_rev_upstream=opt.peg_rev_upstream,
peg_rev_dest_branch=opt.peg_rev_dest_branch)
self.manifest.Save(fd,
peg_rev=opt.peg_rev,
peg_rev_upstream=opt.peg_rev_upstream,
peg_rev_dest_branch=opt.peg_rev_dest_branch)
fd.close()
if opt.output_file != '-':
print('Saved manifest to %s' % opt.output_file, file=sys.stderr)

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2012 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,8 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import optparse
from __future__ import print_function
from color import Coloring
from command import PagedCommand
@ -28,22 +29,15 @@ class Overview(PagedCommand):
The '%prog' command is used to display an overview of the projects branches,
and list any local commits that have not yet been merged into the project.
The -c/--current-branch option can be used to restrict the output to only
The -b/--current-branch option can be used to restrict the output to only
branches currently checked out in each project. By default, all branches
are displayed.
"""
def _Options(self, p):
p.add_option('-c', '--current-branch',
p.add_option('-b', '--current-branch',
dest="current_branch", action="store_true",
help="consider only checked out branches")
p.add_option('--no-current-branch',
dest='current_branch', action='store_false',
help='consider all local branches')
# Turn this into a warning & remove this someday.
p.add_option('-b',
dest='current_branch', action='store_true',
help=optparse.SUPPRESS_HELP)
help="Consider only checked out branches")
def Execute(self, opt, args):
all_branches = []

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,10 +14,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
from __future__ import print_function
from color import Coloring
from command import DEFAULT_LOCAL_JOBS, PagedCommand
from command import PagedCommand
class Prune(PagedCommand):
@ -24,26 +25,11 @@ class Prune(PagedCommand):
helpUsage = """
%prog [<project>...]
"""
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
def _ExecuteOne(self, project):
"""Process one project."""
return project.PruneHeads()
def Execute(self, opt, args):
projects = self.GetProjects(args)
# NB: Should be able to refactor this module to display summary as results
# come back from children.
def _ProcessResults(_pool, _output, results):
return list(itertools.chain.from_iterable(results))
all_branches = self.ExecuteInParallel(
opt.jobs,
self._ExecuteOne,
projects,
callback=_ProcessResults,
ordered=True)
all_branches = []
for project in self.GetProjects(args):
all_branches.extend(project.PruneHeads())
if not all_branches:
return

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2010 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
from color import Coloring
@ -39,34 +42,36 @@ branch but need to incorporate new upstream changes "underneath" them.
"""
def _Options(self, p):
g = p.get_option_group('--quiet')
g.add_option('-i', '--interactive',
p.add_option('-i', '--interactive',
dest="interactive", action="store_true",
help="interactive rebase (single project only)")
p.add_option('--fail-fast',
dest='fail_fast', action='store_true',
help='stop rebasing after first error is hit')
help='Stop rebasing after first error is hit')
p.add_option('-f', '--force-rebase',
dest='force_rebase', action='store_true',
help='pass --force-rebase to git rebase')
help='Pass --force-rebase to git rebase')
p.add_option('--no-ff',
dest='ff', default=True, action='store_false',
help='pass --no-ff to git rebase')
help='Pass --no-ff to git rebase')
p.add_option('-q', '--quiet',
dest='quiet', action='store_true',
help='Pass --quiet to git rebase')
p.add_option('--autosquash',
dest='autosquash', action='store_true',
help='pass --autosquash to git rebase')
help='Pass --autosquash to git rebase')
p.add_option('--whitespace',
dest='whitespace', action='store', metavar='WS',
help='pass --whitespace to git rebase')
help='Pass --whitespace to git rebase')
p.add_option('--auto-stash',
dest='auto_stash', action='store_true',
help='stash local modifications before starting')
help='Stash local modifications before starting')
p.add_option('-m', '--onto-manifest',
dest='onto_manifest', action='store_true',
help='rebase onto the manifest version instead of upstream '
'HEAD (this helps to make sure the local tree stays '
'consistent if you previously synced to a manifest)')
help='Rebase onto the manifest version instead of upstream '
'HEAD. This helps to make sure the local tree stays '
'consistent if you previously synced to a manifest.')
def Execute(self, opt, args):
all_projects = self.GetProjects(args)

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from optparse import SUPPRESS_HELP
import sys

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2010 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
from color import Coloring
@ -38,8 +41,7 @@ The '%prog' command stages files to prepare the next commit.
"""
def _Options(self, p):
g = p.get_option_group('--quiet')
g.add_option('-i', '--interactive',
p.add_option('-i', '--interactive',
dest='interactive', action='store_true',
help='use interactive staging')

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,11 +14,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
from __future__ import print_function
import os
import sys
from command import Command, DEFAULT_LOCAL_JOBS
from command import Command
from git_config import IsImmutable
from git_command import git
import gitc_utils
@ -34,7 +36,6 @@ class Start(Command):
'%prog' begins a new branch of development, starting from the
revision specified in the manifest.
"""
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
def _Options(self, p):
p.add_option('--all',
@ -42,8 +43,7 @@ revision specified in the manifest.
help='begin branch in all projects')
p.add_option('-r', '--rev', '--revision', dest='revision',
help='point branch at this revision instead of upstream')
p.add_option('--head', '--HEAD',
dest='revision', action='store_const', const='HEAD',
p.add_option('--head', dest='revision', action='store_const', const='HEAD',
help='abbreviation for --rev HEAD')
def ValidateOptions(self, opt, args):
@ -54,26 +54,6 @@ revision specified in the manifest.
if not git.check_ref_format('heads/%s' % nb):
self.OptionParser.error("'%s' is not a valid name" % nb)
def _ExecuteOne(self, revision, nb, project):
"""Start one project."""
# If the current revision is immutable, such as a SHA1, a tag or
# a change, then we can't push back to it. Substitute with
# dest_branch, if defined; or with manifest default revision instead.
branch_merge = ''
if IsImmutable(project.revisionExpr):
if project.dest_branch:
branch_merge = project.dest_branch
else:
branch_merge = self.manifest.default.revisionExpr
try:
ret = project.StartBranch(
nb, branch_merge=branch_merge, revision=revision)
except Exception as e:
print('error: unable to checkout %s: %s' % (project.name, e), file=sys.stderr)
ret = False
return (ret, project)
def Execute(self, opt, args):
nb = args[0]
err = []
@ -105,8 +85,11 @@ revision specified in the manifest.
if not os.path.exists(os.getcwd()):
os.chdir(self.manifest.topdir)
pm = Progress('Syncing %s' % nb, len(all_projects), quiet=opt.quiet)
for project in all_projects:
pm = Progress('Starting %s' % nb, len(all_projects))
for project in all_projects:
pm.update()
if self.gitc_manifest:
gitc_project = self.gitc_manifest.paths[project.relpath]
# Sync projects that have not been opened.
if not gitc_project.already_synced:
@ -119,21 +102,21 @@ revision specified in the manifest.
sync_buf = SyncBuffer(self.manifest.manifestProject.config)
project.Sync_LocalHalf(sync_buf)
project.revisionId = gitc_project.old_revision
pm.update()
pm.end()
def _ProcessResults(_pool, pm, results):
for (result, project) in results:
if not result:
err.append(project)
pm.update()
# If the current revision is immutable, such as a SHA1, a tag or
# a change, then we can't push back to it. Substitute with
# dest_branch, if defined; or with manifest default revision instead.
branch_merge = ''
if IsImmutable(project.revisionExpr):
if project.dest_branch:
branch_merge = project.dest_branch
else:
branch_merge = self.manifest.default.revisionExpr
self.ExecuteInParallel(
opt.jobs,
functools.partial(self._ExecuteOne, opt.revision, nb),
all_projects,
callback=_ProcessResults,
output=Progress('Starting %s' % (nb,), len(all_projects), quiet=opt.quiet))
if not project.StartBranch(
nb, branch_merge=branch_merge, revision=opt.revision):
err.append(project)
pm.end()
if err:
for p in err:

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,12 +14,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import functools
import glob
import io
import multiprocessing
import os
from command import DEFAULT_LOCAL_JOBS, PagedCommand
from command import PagedCommand
from color import Coloring
import platform_utils
@ -76,12 +80,16 @@ the following meanings:
d: deleted ( in index, not in work tree )
"""
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
def _Options(self, p):
p.add_option('-j', '--jobs',
dest='jobs', action='store', type='int', default=2,
help="number of projects to check simultaneously")
p.add_option('-o', '--orphans',
dest='orphans', action='store_true',
help="include objects in working directory outside of repo projects")
p.add_option('-q', '--quiet', action='store_true',
help="only print the name of modified projects")
def _StatusHelper(self, quiet, project):
"""Obtains the status for a specific project.
@ -96,9 +104,7 @@ the following meanings:
Returns:
The status of the project.
"""
buf = io.StringIO()
ret = project.PrintWorkTreeStatus(quiet=quiet, output_redir=buf)
return (ret, buf.getvalue())
return project.PrintWorkTreeStatus(quiet=quiet)
def _FindOrphans(self, dirs, proj_dirs, proj_dirs_parents, outstring):
"""find 'dirs' that are present in 'proj_dirs_parents' but not in 'proj_dirs'"""
@ -118,23 +124,17 @@ the following meanings:
def Execute(self, opt, args):
all_projects = self.GetProjects(args)
counter = 0
def _ProcessResults(_pool, _output, results):
ret = 0
for (state, output) in results:
if output:
print(output, end='')
if opt.jobs == 1:
for project in all_projects:
state = project.PrintWorkTreeStatus(quiet=opt.quiet)
if state == 'CLEAN':
ret += 1
return ret
counter = self.ExecuteInParallel(
opt.jobs,
functools.partial(self._StatusHelper, opt.quiet),
all_projects,
callback=_ProcessResults,
ordered=True)
counter += 1
else:
with multiprocessing.Pool(opt.jobs) as pool:
states = pool.map(functools.partial(self._StatusHelper, opt.quiet), all_projects)
counter += states.count('CLEAN')
if not opt.quiet and len(all_projects) == counter:
print('nothing to commit (working directory clean)')
@ -165,7 +165,7 @@ the following meanings:
proj_dirs, proj_dirs_parents, outstring)
if outstring:
output = StatusColoring(self.client.globalConfig)
output = StatusColoring(self.manifest.globalConfig)
output.project('Objects not within a project (orphans)')
output.nl()
for entry in outstring:

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,19 +14,23 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import copy
import functools
import optparse
import re
import sys
from command import DEFAULT_LOCAL_JOBS, InteractiveCommand
from command import InteractiveCommand
from editor import Editor
from error import UploadError
from error import HookError, UploadError
from git_command import GitCommand
from git_refs import R_HEADS
from hooks import RepoHook
from pyversion import is_python3
if not is_python3():
input = raw_input # noqa: F821
else:
unicode = str
UNUSUAL_COMMIT_THRESHOLD = 5
@ -147,67 +153,85 @@ https://gerrit-review.googlesource.com/Documentation/user-upload.html#notify
Gerrit Code Review: https://www.gerritcodereview.com/
"""
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
def _Options(self, p):
p.add_option('-t',
dest='auto_topic', action='store_true',
help='send local branch name to Gerrit Code Review')
help='Send local branch name to Gerrit Code Review')
p.add_option('--hashtag', '--ht',
dest='hashtags', action='append', default=[],
help='add hashtags (comma delimited) to the review')
help='Add hashtags (comma delimited) to the review.')
p.add_option('--hashtag-branch', '--htb',
action='store_true',
help='add local branch name as a hashtag')
help='Add local branch name as a hashtag.')
p.add_option('-l', '--label',
dest='labels', action='append', default=[],
help='add a label when uploading')
help='Add a label when uploading.')
p.add_option('--re', '--reviewers',
type='string', action='append', dest='reviewers',
help='request reviews from these people')
help='Request reviews from these people.')
p.add_option('--cc',
type='string', action='append', dest='cc',
help='also send email to these email addresses')
p.add_option('--br', '--branch',
help='Also send email to these email addresses.')
p.add_option('--br',
type='string', action='store', dest='branch',
help='(local) branch to upload')
p.add_option('-c', '--current-branch',
help='Branch to upload.')
p.add_option('--cbr', '--current-branch',
dest='current_branch', action='store_true',
help='upload current git branch')
p.add_option('--no-current-branch',
dest='current_branch', action='store_false',
help='upload all git branches')
# Turn this into a warning & remove this someday.
p.add_option('--cbr',
dest='current_branch', action='store_true',
help=optparse.SUPPRESS_HELP)
help='Upload current git branch.')
p.add_option('--ne', '--no-emails',
action='store_false', dest='notify', default=True,
help='do not send e-mails on upload')
help='If specified, do not send emails on upload.')
p.add_option('-p', '--private',
action='store_true', dest='private', default=False,
help='upload as a private change (deprecated; use --wip)')
help='If specified, upload as a private change.')
p.add_option('-w', '--wip',
action='store_true', dest='wip', default=False,
help='upload as a work-in-progress change')
help='If specified, upload as a work-in-progress change.')
p.add_option('-o', '--push-option',
type='string', action='append', dest='push_options',
default=[],
help='additional push options to transmit')
help='Additional push options to transmit')
p.add_option('-D', '--destination', '--dest',
type='string', action='store', dest='dest_branch',
metavar='BRANCH',
help='submit for review on this target branch')
help='Submit for review on this target branch.')
p.add_option('-n', '--dry-run',
dest='dryrun', default=False, action='store_true',
help='do everything except actually upload the CL')
help='Do everything except actually upload the CL.')
p.add_option('-y', '--yes',
default=False, action='store_true',
help='answer yes to all safe prompts')
help='Answer yes to all safe prompts.')
p.add_option('--no-cert-checks',
dest='validate_certs', action='store_false', default=True,
help='disable verifying ssl certs (unsafe)')
RepoHook.AddOptionGroup(p, 'pre-upload')
help='Disable verifying ssl certs (unsafe).')
# Options relating to upload hook. Note that verify and no-verify are NOT
# opposites of each other, which is why they store to different locations.
# We are using them to match 'git commit' syntax.
#
# Combinations:
# - no-verify=False, verify=False (DEFAULT):
# If stdout is a tty, can prompt about running upload hooks if needed.
# If user denies running hooks, the upload is cancelled. If stdout is
# not a tty and we would need to prompt about upload hooks, upload is
# cancelled.
# - no-verify=False, verify=True:
# Always run upload hooks with no prompt.
# - no-verify=True, verify=False:
# Never run upload hooks, but upload anyway (AKA bypass hooks).
# - no-verify=True, verify=True:
# Invalid
g = p.add_option_group('Upload hooks')
g.add_option('--no-verify',
dest='bypass_hooks', action='store_true',
help='Do not run the upload hook.')
g.add_option('--verify',
dest='allow_all_hooks', action='store_true',
help='Run the upload hook without prompting.')
g.add_option('--ignore-hooks',
dest='ignore_hooks', action='store_true',
help='Do not abort uploading if upload hooks fail.')
def _SingleBranch(self, opt, branch, people):
project = branch.project
@ -512,60 +536,72 @@ Gerrit Code Review: https://www.gerritcodereview.com/
merge_branch = p.stdout.strip()
return merge_branch
@staticmethod
def _GatherOne(opt, project):
"""Figure out the upload status for |project|."""
if opt.current_branch:
cbr = project.CurrentBranch
up_branch = project.GetUploadableBranch(cbr)
avail = [up_branch] if up_branch else None
else:
avail = project.GetUploadableBranches(opt.branch)
return (project, avail)
def Execute(self, opt, args):
projects = self.GetProjects(args)
project_list = self.GetProjects(args)
pending = []
reviewers = []
cc = []
branch = None
def _ProcessResults(_pool, _out, results):
pending = []
for result in results:
project, avail = result
if avail is None:
print('repo: error: %s: Unable to upload branch "%s". '
'You might be able to fix the branch by running:\n'
' git branch --set-upstream-to m/%s' %
(project.relpath, project.CurrentBranch, self.manifest.branch),
if opt.branch:
branch = opt.branch
for project in project_list:
if opt.current_branch:
cbr = project.CurrentBranch
up_branch = project.GetUploadableBranch(cbr)
if up_branch:
avail = [up_branch]
else:
avail = None
print('ERROR: Current branch (%s) not uploadable. '
'You may be able to type '
'"git branch --set-upstream-to m/master" to fix '
'your branch.' % str(cbr),
file=sys.stderr)
elif avail:
pending.append(result)
return pending
pending = self.ExecuteInParallel(
opt.jobs,
functools.partial(self._GatherOne, opt),
projects,
callback=_ProcessResults)
else:
avail = project.GetUploadableBranches(branch)
if avail:
pending.append((project, avail))
if not pending:
if opt.branch is None:
if branch is None:
print('repo: error: no branches ready for upload', file=sys.stderr)
else:
print('repo: error: no branches named "%s" ready for upload' %
(opt.branch,), file=sys.stderr)
(branch,), file=sys.stderr)
return 1
pending_proj_names = [project.name for (project, available) in pending]
pending_worktrees = [project.worktree for (project, available) in pending]
hook = RepoHook.FromSubcmd(
hook_type='pre-upload', manifest=self.manifest,
opt=opt, abort_if_user_denies=True)
if not hook.Run(
project_list=pending_proj_names,
worktree_list=pending_worktrees):
return 1
if not opt.bypass_hooks:
hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
self.manifest.topdir,
self.manifest.manifestProject.GetRemote('origin').url,
abort_if_user_denies=True)
pending_proj_names = [project.name for (project, available) in pending]
pending_worktrees = [project.worktree for (project, available) in pending]
passed = True
try:
hook.Run(opt.allow_all_hooks, project_list=pending_proj_names,
worktree_list=pending_worktrees)
except SystemExit:
passed = False
if not opt.ignore_hooks:
raise
except HookError as e:
passed = False
print("ERROR: %s" % str(e), file=sys.stderr)
reviewers = _SplitEmails(opt.reviewers) if opt.reviewers else []
cc = _SplitEmails(opt.cc) if opt.cc else []
if not passed:
if opt.ignore_hooks:
print('\nWARNING: pre-upload hooks failed, but uploading anyways.',
file=sys.stderr)
else:
return 1
if opt.reviewers:
reviewers = _SplitEmails(opt.reviewers)
if opt.cc:
cc = _SplitEmails(opt.cc)
people = (reviewers, cc)
if len(pending) == 1 and len(pending[0][1]) == 1:

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,13 +14,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import platform
import sys
from command import Command, MirrorSafeCommand
from git_command import git, RepoSourceVersion, user_agent
from git_refs import HEAD
from wrapper import Wrapper
class Version(Command, MirrorSafeCommand):
@ -34,14 +37,12 @@ class Version(Command, MirrorSafeCommand):
def Execute(self, opt, args):
rp = self.manifest.repoProject
rem = rp.GetRemote(rp.remote.name)
branch = rp.GetBranch('default')
# These might not be the same. Report them both.
src_ver = RepoSourceVersion()
rp_ver = rp.bare_git.describe(HEAD)
print('repo version %s' % rp_ver)
print(' (from %s)' % rem.url)
print(' (tracking %s)' % branch.merge)
print(' (%s)' % rp.bare_git.log('-1', '--format=%cD', HEAD))
if self.wrapper_path is not None:
@ -63,4 +64,3 @@ class Version(Command, MirrorSafeCommand):
print('OS %s %s (%s)' % (uname.system, uname.release, uname.version))
print('CPU %s (%s)' %
(uname.machine, uname.processor if uname.processor else 'unknown'))
print('Bug reports:', Wrapper().BUG_URL)

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -14,6 +16,8 @@
"""Unittests for the editor.py module."""
from __future__ import print_function
import unittest
from editor import Editor

View File

@ -1,53 +0,0 @@
# Copyright 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittests for the error.py module."""
import inspect
import pickle
import unittest
import error
class PickleTests(unittest.TestCase):
"""Make sure all our custom exceptions can be pickled."""
def getExceptions(self):
"""Return all our custom exceptions."""
for name in dir(error):
cls = getattr(error, name)
if isinstance(cls, type) and issubclass(cls, Exception):
yield cls
def testExceptionLookup(self):
"""Make sure our introspection logic works."""
classes = list(self.getExceptions())
self.assertIn(error.HookError, classes)
# Don't assert the exact number to avoid being a change-detector test.
self.assertGreater(len(classes), 10)
def testPickle(self):
"""Try to pickle all the exceptions."""
for cls in self.getExceptions():
args = inspect.getfullargspec(cls.__init__).args[1:]
obj = cls(*args)
p = pickle.dumps(obj)
try:
newobj = pickle.loads(p)
except Exception as e: # pylint: disable=broad-except
self.fail('Class %s is unable to be pickled: %s\n'
'Incomplete super().__init__(...) call?' % (cls, e))
self.assertIsInstance(newobj, cls)
self.assertEqual(str(obj), str(newobj))

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -14,6 +16,8 @@
"""Unittests for the git_command.py module."""
from __future__ import print_function
import re
import unittest
@ -26,6 +30,33 @@ import git_command
import wrapper
class SSHUnitTest(unittest.TestCase):
"""Tests the ssh functions."""
def test_ssh_version(self):
"""Check ssh_version() handling."""
ver = git_command._parse_ssh_version('Unknown\n')
self.assertEqual(ver, ())
ver = git_command._parse_ssh_version('OpenSSH_1.0\n')
self.assertEqual(ver, (1, 0))
ver = git_command._parse_ssh_version('OpenSSH_6.6.1p1 Ubuntu-2ubuntu2.13, OpenSSL 1.0.1f 6 Jan 2014\n')
self.assertEqual(ver, (6, 6, 1))
ver = git_command._parse_ssh_version('OpenSSH_7.6p1 Ubuntu-4ubuntu0.3, OpenSSL 1.0.2n 7 Dec 2017\n')
self.assertEqual(ver, (7, 6))
def test_ssh_sock(self):
"""Check ssh_sock() function."""
with mock.patch('tempfile.mkdtemp', return_value='/tmp/foo'):
# old ssh version uses port
with mock.patch('git_command.ssh_version', return_value=(6, 6)):
self.assertTrue(git_command.ssh_sock().endswith('%p'))
git_command._ssh_sock_path = None
# new ssh version uses hash
with mock.patch('git_command.ssh_version', return_value=(6, 7)):
self.assertTrue(git_command.ssh_sock().endswith('%C'))
git_command._ssh_sock_path = None
class GitCallUnitTest(unittest.TestCase):
"""Tests the _GitCall class (via git_command.git)."""

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -14,8 +16,9 @@
"""Unittests for the git_config.py module."""
from __future__ import print_function
import os
import tempfile
import unittest
import git_config
@ -27,8 +30,9 @@ def fixture(*paths):
return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
class GitConfigReadOnlyTests(unittest.TestCase):
"""Read-only tests of the GitConfig class."""
class GitConfigUnitTest(unittest.TestCase):
"""Tests the GitConfig class.
"""
def setUp(self):
"""Create a GitConfig object using the test.gitconfig fixture.
@ -105,69 +109,5 @@ class GitConfigReadOnlyTests(unittest.TestCase):
self.assertEqual(value, self.config.GetInt('section.%s' % (key,)))
class GitConfigReadWriteTests(unittest.TestCase):
"""Read/write tests of the GitConfig class."""
def setUp(self):
self.tmpfile = tempfile.NamedTemporaryFile()
self.config = self.get_config()
def get_config(self):
"""Get a new GitConfig instance."""
return git_config.GitConfig(self.tmpfile.name)
def test_SetString(self):
"""Test SetString behavior."""
# Set a value.
self.assertIsNone(self.config.GetString('foo.bar'))
self.config.SetString('foo.bar', 'val')
self.assertEqual('val', self.config.GetString('foo.bar'))
# Make sure the value was actually written out.
config = self.get_config()
self.assertEqual('val', config.GetString('foo.bar'))
# Update the value.
self.config.SetString('foo.bar', 'valll')
self.assertEqual('valll', self.config.GetString('foo.bar'))
config = self.get_config()
self.assertEqual('valll', config.GetString('foo.bar'))
# Delete the value.
self.config.SetString('foo.bar', None)
self.assertIsNone(self.config.GetString('foo.bar'))
config = self.get_config()
self.assertIsNone(config.GetString('foo.bar'))
def test_SetBoolean(self):
"""Test SetBoolean behavior."""
# Set a true value.
self.assertIsNone(self.config.GetBoolean('foo.bar'))
for val in (True, 1):
self.config.SetBoolean('foo.bar', val)
self.assertTrue(self.config.GetBoolean('foo.bar'))
# Make sure the value was actually written out.
config = self.get_config()
self.assertTrue(config.GetBoolean('foo.bar'))
self.assertEqual('true', config.GetString('foo.bar'))
# Set a false value.
for val in (False, 0):
self.config.SetBoolean('foo.bar', val)
self.assertFalse(self.config.GetBoolean('foo.bar'))
# Make sure the value was actually written out.
config = self.get_config()
self.assertFalse(config.GetBoolean('foo.bar'))
self.assertEqual('false', config.GetString('foo.bar'))
# Delete the value.
self.config.SetBoolean('foo.bar', None)
self.assertIsNone(self.config.GetBoolean('foo.bar'))
config = self.get_config()
self.assertIsNone(config.GetBoolean('foo.bar'))
if __name__ == '__main__':
unittest.main()

View File

@ -1,228 +0,0 @@
# Copyright (C) 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittests for the git_superproject.py module."""
import os
import platform
import tempfile
import unittest
from unittest import mock
import git_superproject
import manifest_xml
import platform_utils
from test_manifest_xml import sort_attributes
class SuperprojectTestCase(unittest.TestCase):
"""TestCase for the Superproject module."""
def setUp(self):
"""Set up superproject every time."""
self.tempdir = tempfile.mkdtemp(prefix='repo_tests')
self.repodir = os.path.join(self.tempdir, '.repo')
self.manifest_file = os.path.join(
self.repodir, manifest_xml.MANIFEST_FILE_NAME)
os.mkdir(self.repodir)
self.platform = platform.system().lower()
# The manifest parsing really wants a git repo currently.
gitdir = os.path.join(self.repodir, 'manifests.git')
os.mkdir(gitdir)
with open(os.path.join(gitdir, 'config'), 'w') as fp:
fp.write("""[remote "origin"]
url = https://localhost:0/manifest
""")
manifest = self.getXmlManifest("""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<superproject name="superproject"/>
<project path="art" name="platform/art" groups="notdefault,platform-""" + self.platform + """
" /></manifest>
""")
self._superproject = git_superproject.Superproject(manifest, self.repodir)
def tearDown(self):
"""Tear down superproject every time."""
platform_utils.rmtree(self.tempdir)
def getXmlManifest(self, data):
"""Helper to initialize a manifest for testing."""
with open(self.manifest_file, 'w') as fp:
fp.write(data)
return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
def test_superproject_get_superproject_no_superproject(self):
"""Test with no url."""
manifest = self.getXmlManifest("""
<manifest>
</manifest>
""")
superproject = git_superproject.Superproject(manifest, self.repodir)
self.assertFalse(superproject.Sync())
def test_superproject_get_superproject_invalid_url(self):
"""Test with an invalid url."""
manifest = self.getXmlManifest("""
<manifest>
<remote name="test-remote" fetch="localhost" />
<default remote="test-remote" revision="refs/heads/main" />
<superproject name="superproject"/>
</manifest>
""")
superproject = git_superproject.Superproject(manifest, self.repodir)
self.assertFalse(superproject.Sync())
def test_superproject_get_superproject_invalid_branch(self):
"""Test with an invalid branch."""
manifest = self.getXmlManifest("""
<manifest>
<remote name="test-remote" fetch="localhost" />
<default remote="test-remote" revision="refs/heads/main" />
<superproject name="superproject"/>
</manifest>
""")
superproject = git_superproject.Superproject(manifest, self.repodir)
with mock.patch.object(self._superproject, '_GetBranch', return_value='junk'):
self.assertFalse(superproject.Sync())
def test_superproject_get_superproject_mock_init(self):
"""Test with _Init failing."""
with mock.patch.object(self._superproject, '_Init', return_value=False):
self.assertFalse(self._superproject.Sync())
def test_superproject_get_superproject_mock_fetch(self):
"""Test with _Fetch failing."""
with mock.patch.object(self._superproject, '_Init', return_value=True):
os.mkdir(self._superproject._superproject_path)
with mock.patch.object(self._superproject, '_Fetch', return_value=False):
self.assertFalse(self._superproject.Sync())
def test_superproject_get_all_project_commit_ids_mock_ls_tree(self):
"""Test with LsTree being a mock."""
data = ('120000 blob 158258bdf146f159218e2b90f8b699c4d85b5804\tAndroid.bp\x00'
'160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00'
'160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00'
'120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00'
'160000 commit ade9b7a0d874e25fff4bf2552488825c6f111928\tbuild/bazel\x00')
with mock.patch.object(self._superproject, '_Init', return_value=True):
with mock.patch.object(self._superproject, '_Fetch', return_value=True):
with mock.patch.object(self._superproject, '_LsTree', return_value=data):
commit_ids = self._superproject._GetAllProjectsCommitIds()
self.assertEqual(commit_ids, {
'art': '2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea',
'bootable/recovery': 'e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06',
'build/bazel': 'ade9b7a0d874e25fff4bf2552488825c6f111928'
})
def test_superproject_write_manifest_file(self):
"""Test with writing manifest to a file after setting revisionId."""
self.assertEqual(len(self._superproject._manifest.projects), 1)
project = self._superproject._manifest.projects[0]
project.SetRevisionId('ABCDEF')
# Create temporary directory so that it can write the file.
os.mkdir(self._superproject._superproject_path)
manifest_path = self._superproject._WriteManfiestFile()
self.assertIsNotNone(manifest_path)
with open(manifest_path, 'r') as fp:
manifest_xml = fp.read()
self.assertEqual(
sort_attributes(manifest_xml),
'<?xml version="1.0" ?><manifest>'
'<remote fetch="http://localhost" name="default-remote"/>'
'<default remote="default-remote" revision="refs/heads/main"/>'
'<project groups="notdefault,platform-' + self.platform + '" '
'name="platform/art" path="art" revision="ABCDEF"/>'
'<superproject name="superproject"/>'
'</manifest>')
def test_superproject_update_project_revision_id(self):
"""Test with LsTree being a mock."""
self.assertEqual(len(self._superproject._manifest.projects), 1)
projects = self._superproject._manifest.projects
data = ('160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00'
'160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00')
with mock.patch.object(self._superproject, '_Init', return_value=True):
with mock.patch.object(self._superproject, '_Fetch', return_value=True):
with mock.patch.object(self._superproject,
'_LsTree',
return_value=data):
# Create temporary directory so that it can write the file.
os.mkdir(self._superproject._superproject_path)
manifest_path = self._superproject.UpdateProjectsRevisionId(projects)
self.assertIsNotNone(manifest_path)
with open(manifest_path, 'r') as fp:
manifest_xml = fp.read()
self.assertEqual(
sort_attributes(manifest_xml),
'<?xml version="1.0" ?><manifest>'
'<remote fetch="http://localhost" name="default-remote"/>'
'<default remote="default-remote" revision="refs/heads/main"/>'
'<project groups="notdefault,platform-' + self.platform + '" '
'name="platform/art" path="art" '
'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea"/>'
'<superproject name="superproject"/>'
'</manifest>')
def test_superproject_update_project_revision_id_with_different_remotes(self):
"""Test update of commit ids of a manifest with mutiple remotes."""
manifest = self.getXmlManifest("""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<remote name="goog" fetch="http://localhost2" />
<default remote="default-remote" revision="refs/heads/main" />
<superproject name="superproject"/>
<project path="vendor/x" name="platform/vendor/x" remote="goog" groups="vendor"
revision="master-with-vendor" clone-depth="1" />
<project path="art" name="platform/art" groups="notdefault,platform-""" + self.platform + """
" /></manifest>
""")
self.maxDiff = None
self._superproject = git_superproject.Superproject(manifest, self.repodir)
self.assertEqual(len(self._superproject._manifest.projects), 2)
projects = self._superproject._manifest.projects
data = ('160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00'
'160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00')
with mock.patch.object(self._superproject, '_Init', return_value=True):
with mock.patch.object(self._superproject, '_Fetch', return_value=True):
with mock.patch.object(self._superproject,
'_LsTree',
return_value=data):
# Create temporary directory so that it can write the file.
os.mkdir(self._superproject._superproject_path)
manifest_path = self._superproject.UpdateProjectsRevisionId(projects)
self.assertIsNotNone(manifest_path)
with open(manifest_path, 'r') as fp:
manifest_xml = fp.read()
self.assertEqual(
sort_attributes(manifest_xml),
'<?xml version="1.0" ?><manifest>'
'<remote fetch="http://localhost" name="default-remote"/>'
'<remote fetch="http://localhost2" name="goog"/>'
'<default remote="default-remote" revision="refs/heads/main"/>'
'<project groups="notdefault,platform-' + self.platform + '" '
'name="platform/art" path="art" '
'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea"/>'
'<project clone-depth="1" groups="vendor" '
'name="platform/vendor/x" path="vendor/x" remote="goog" '
'revision="master-with-vendor"/>'
'<superproject name="superproject"/>'
'</manifest>')
if __name__ == '__main__':
unittest.main()

View File

@ -1,261 +0,0 @@
# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittests for the git_trace2_event_log.py module."""
import json
import os
import tempfile
import unittest
from unittest import mock
import git_trace2_event_log
class EventLogTestCase(unittest.TestCase):
"""TestCase for the EventLog module."""
PARENT_SID_KEY = 'GIT_TRACE2_PARENT_SID'
PARENT_SID_VALUE = 'parent_sid'
SELF_SID_REGEX = r'repo-\d+T\d+Z-.*'
FULL_SID_REGEX = r'^%s/%s' % (PARENT_SID_VALUE, SELF_SID_REGEX)
def setUp(self):
"""Load the event_log module every time."""
self._event_log_module = None
# By default we initialize with the expected case where
# repo launches us (so GIT_TRACE2_PARENT_SID is set).
env = {
self.PARENT_SID_KEY: self.PARENT_SID_VALUE,
}
self._event_log_module = git_trace2_event_log.EventLog(env=env)
self._log_data = None
def verifyCommonKeys(self, log_entry, expected_event_name, full_sid=True):
"""Helper function to verify common event log keys."""
self.assertIn('event', log_entry)
self.assertIn('sid', log_entry)
self.assertIn('thread', log_entry)
self.assertIn('time', log_entry)
# Do basic data format validation.
self.assertEqual(expected_event_name, log_entry['event'])
if full_sid:
self.assertRegex(log_entry['sid'], self.FULL_SID_REGEX)
else:
self.assertRegex(log_entry['sid'], self.SELF_SID_REGEX)
self.assertRegex(log_entry['time'], r'^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$')
def readLog(self, log_path):
"""Helper function to read log data into a list."""
log_data = []
with open(log_path, mode='rb') as f:
for line in f:
log_data.append(json.loads(line))
return log_data
def test_initial_state_with_parent_sid(self):
"""Test initial state when 'GIT_TRACE2_PARENT_SID' is set by parent."""
self.assertRegex(self._event_log_module.full_sid, self.FULL_SID_REGEX)
def test_initial_state_no_parent_sid(self):
"""Test initial state when 'GIT_TRACE2_PARENT_SID' is not set."""
# Setup an empty environment dict (no parent sid).
self._event_log_module = git_trace2_event_log.EventLog(env={})
self.assertRegex(self._event_log_module.full_sid, self.SELF_SID_REGEX)
def test_version_event(self):
"""Test 'version' event data is valid.
Verify that the 'version' event is written even when no other
events are addded.
Expected event log:
<version event>
"""
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
log_path = self._event_log_module.Write(path=tempdir)
self._log_data = self.readLog(log_path)
# A log with no added events should only have the version entry.
self.assertEqual(len(self._log_data), 1)
version_event = self._log_data[0]
self.verifyCommonKeys(version_event, expected_event_name='version')
# Check for 'version' event specific fields.
self.assertIn('evt', version_event)
self.assertIn('exe', version_event)
# Verify "evt" version field is a string.
self.assertIsInstance(version_event['evt'], str)
def test_start_event(self):
"""Test and validate 'start' event data is valid.
Expected event log:
<version event>
<start event>
"""
self._event_log_module.StartEvent()
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
log_path = self._event_log_module.Write(path=tempdir)
self._log_data = self.readLog(log_path)
self.assertEqual(len(self._log_data), 2)
start_event = self._log_data[1]
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
self.verifyCommonKeys(start_event, expected_event_name='start')
# Check for 'start' event specific fields.
self.assertIn('argv', start_event)
self.assertTrue(isinstance(start_event['argv'], list))
def test_exit_event_result_none(self):
"""Test 'exit' event data is valid when result is None.
We expect None result to be converted to 0 in the exit event data.
Expected event log:
<version event>
<exit event>
"""
self._event_log_module.ExitEvent(None)
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
log_path = self._event_log_module.Write(path=tempdir)
self._log_data = self.readLog(log_path)
self.assertEqual(len(self._log_data), 2)
exit_event = self._log_data[1]
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
self.verifyCommonKeys(exit_event, expected_event_name='exit')
# Check for 'exit' event specific fields.
self.assertIn('code', exit_event)
# 'None' result should convert to 0 (successful) return code.
self.assertEqual(exit_event['code'], 0)
def test_exit_event_result_integer(self):
"""Test 'exit' event data is valid when result is an integer.
Expected event log:
<version event>
<exit event>
"""
self._event_log_module.ExitEvent(2)
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
log_path = self._event_log_module.Write(path=tempdir)
self._log_data = self.readLog(log_path)
self.assertEqual(len(self._log_data), 2)
exit_event = self._log_data[1]
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
self.verifyCommonKeys(exit_event, expected_event_name='exit')
# Check for 'exit' event specific fields.
self.assertIn('code', exit_event)
self.assertEqual(exit_event['code'], 2)
def test_command_event(self):
"""Test and validate 'command' event data is valid.
Expected event log:
<version event>
<command event>
"""
name = 'repo'
subcommands = ['init' 'this']
self._event_log_module.CommandEvent(name='repo', subcommands=subcommands)
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
log_path = self._event_log_module.Write(path=tempdir)
self._log_data = self.readLog(log_path)
self.assertEqual(len(self._log_data), 2)
command_event = self._log_data[1]
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
self.verifyCommonKeys(command_event, expected_event_name='command')
# Check for 'command' event specific fields.
self.assertIn('name', command_event)
self.assertIn('subcommands', command_event)
self.assertEqual(command_event['name'], name)
self.assertEqual(command_event['subcommands'], subcommands)
def test_def_params_event_repo_config(self):
"""Test 'def_params' event data outputs only repo config keys.
Expected event log:
<version event>
<def_param event>
<def_param event>
"""
config = {
'git.foo': 'bar',
'repo.partialclone': 'true',
'repo.partialclonefilter': 'blob:none',
}
self._event_log_module.DefParamRepoEvents(config)
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
log_path = self._event_log_module.Write(path=tempdir)
self._log_data = self.readLog(log_path)
self.assertEqual(len(self._log_data), 3)
def_param_events = self._log_data[1:]
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
for event in def_param_events:
self.verifyCommonKeys(event, expected_event_name='def_param')
# Check for 'def_param' event specific fields.
self.assertIn('param', event)
self.assertIn('value', event)
self.assertTrue(event['param'].startswith('repo.'))
def test_def_params_event_no_repo_config(self):
"""Test 'def_params' event data won't output non-repo config keys.
Expected event log:
<version event>
"""
config = {
'git.foo': 'bar',
'git.core.foo2': 'baz',
}
self._event_log_module.DefParamRepoEvents(config)
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
log_path = self._event_log_module.Write(path=tempdir)
self._log_data = self.readLog(log_path)
self.assertEqual(len(self._log_data), 1)
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
def test_write_with_filename(self):
"""Test Write() with a path to a file exits with None."""
self.assertIsNone(self._event_log_module.Write(path='path/to/file'))
def test_write_with_git_config(self):
"""Test Write() uses the git config path when 'git config' call succeeds."""
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
with mock.patch.object(self._event_log_module,
'_GetEventTargetPath', return_value=tempdir):
self.assertEqual(os.path.dirname(self._event_log_module.Write()), tempdir)
def test_write_no_git_config(self):
"""Test Write() with no git config variable present exits with None."""
with mock.patch.object(self._event_log_module,
'_GetEventTargetPath', return_value=None):
self.assertIsNone(self._event_log_module.Write())
def test_write_non_string(self):
"""Test Write() with non-string type for |path| throws TypeError."""
with self.assertRaises(TypeError):
self._event_log_module.Write(path=1234)
if __name__ == '__main__':
unittest.main()

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -14,6 +16,8 @@
"""Unittests for the hooks.py module."""
from __future__ import print_function
import hooks
import unittest
@ -24,6 +28,7 @@ class RepoHookShebang(unittest.TestCase):
"""Lines w/out shebangs should be rejected."""
DATA = (
'',
'# -*- coding:utf-8 -*-\n',
'#\n# foo\n',
'# Bad shebang in script\n#!/foo\n'
)

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -14,11 +16,9 @@
"""Unittests for the manifest_xml.py module."""
from __future__ import print_function
import os
import platform
import re
import shutil
import tempfile
import unittest
import xml.dom.minidom
@ -26,105 +26,6 @@ import error
import manifest_xml
# Invalid paths that we don't want in the filesystem.
INVALID_FS_PATHS = (
'',
'.',
'..',
'../',
'./',
'.//',
'foo/',
'./foo',
'../foo',
'foo/./bar',
'foo/../../bar',
'/foo',
'./../foo',
'.git/foo',
# Check case folding.
'.GIT/foo',
'blah/.git/foo',
'.repo/foo',
'.repoconfig',
# Block ~ due to 8.3 filenames on Windows filesystems.
'~',
'foo~',
'blah/foo~',
# Block Unicode characters that get normalized out by filesystems.
u'foo\u200Cbar',
# Block newlines.
'f\n/bar',
'f\r/bar',
)
# Make sure platforms that use path separators (e.g. Windows) are also
# rejected properly.
if os.path.sep != '/':
INVALID_FS_PATHS += tuple(x.replace('/', os.path.sep) for x in INVALID_FS_PATHS)
def sort_attributes(manifest):
"""Sort the attributes of all elements alphabetically.
This is needed because different versions of the toxml() function from
xml.dom.minidom outputs the attributes of elements in different orders.
Before Python 3.8 they were output alphabetically, later versions preserve
the order specified by the user.
Args:
manifest: String containing an XML manifest.
Returns:
The XML manifest with the attributes of all elements sorted alphabetically.
"""
new_manifest = ''
# This will find every element in the XML manifest, whether they have
# attributes or not. This simplifies recreating the manifest below.
matches = re.findall(r'(<[/?]?[a-z-]+\s*)((?:\S+?="[^"]+"\s*?)*)(\s*[/?]?>)', manifest)
for head, attrs, tail in matches:
m = re.findall(r'\S+?="[^"]+"', attrs)
new_manifest += head + ' '.join(sorted(m)) + tail
return new_manifest
class ManifestParseTestCase(unittest.TestCase):
"""TestCase for parsing manifests."""
def setUp(self):
self.tempdir = tempfile.mkdtemp(prefix='repo_tests')
self.repodir = os.path.join(self.tempdir, '.repo')
self.manifest_dir = os.path.join(self.repodir, 'manifests')
self.manifest_file = os.path.join(
self.repodir, manifest_xml.MANIFEST_FILE_NAME)
self.local_manifest_dir = os.path.join(
self.repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME)
os.mkdir(self.repodir)
os.mkdir(self.manifest_dir)
# The manifest parsing really wants a git repo currently.
gitdir = os.path.join(self.repodir, 'manifests.git')
os.mkdir(gitdir)
with open(os.path.join(gitdir, 'config'), 'w') as fp:
fp.write("""[remote "origin"]
url = https://localhost:0/manifest
""")
def tearDown(self):
shutil.rmtree(self.tempdir, ignore_errors=True)
def getXmlManifest(self, data):
"""Helper to initialize a manifest for testing."""
with open(self.manifest_file, 'w') as fp:
fp.write(data)
return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
@staticmethod
def encodeXmlAttr(attr):
"""Encode |attr| using XML escape rules."""
return attr.replace('\r', '&#x000d;').replace('\n', '&#x000a;')
class ManifestValidateFilePaths(unittest.TestCase):
"""Check _ValidateFilePaths helper.
@ -155,7 +56,36 @@ class ManifestValidateFilePaths(unittest.TestCase):
def test_bad_paths(self):
"""Make sure bad paths (src & dest) are rejected."""
for path in INVALID_FS_PATHS:
PATHS = (
'..',
'../',
'./',
'foo/',
'./foo',
'../foo',
'foo/./bar',
'foo/../../bar',
'/foo',
'./../foo',
'.git/foo',
# Check case folding.
'.GIT/foo',
'blah/.git/foo',
'.repo/foo',
'.repoconfig',
# Block ~ due to 8.3 filenames on Windows filesystems.
'~',
'foo~',
'blah/foo~',
# Block Unicode characters that get normalized out by filesystems.
u'foo\u200Cbar',
)
# Make sure platforms that use path separators (e.g. Windows) are also
# rejected properly.
if os.path.sep != '/':
PATHS += tuple(x.replace('/', os.path.sep) for x in PATHS)
for path in PATHS:
self.assertRaises(
error.ManifestInvalidPathError, self.check_both, path, 'a')
self.assertRaises(
@ -216,394 +146,3 @@ class ValueTests(unittest.TestCase):
with self.assertRaises(error.ManifestParseError):
node = self._get_node('<node a="xx"/>')
manifest_xml.XmlInt(node, 'a')
class XmlManifestTests(ManifestParseTestCase):
"""Check manifest processing."""
def test_empty(self):
"""Parse an 'empty' manifest file."""
manifest = self.getXmlManifest(
'<?xml version="1.0" encoding="UTF-8"?>'
'<manifest></manifest>')
self.assertEqual(manifest.remotes, {})
self.assertEqual(manifest.projects, [])
def test_link(self):
"""Verify Link handling with new names."""
manifest = manifest_xml.XmlManifest(self.repodir, self.manifest_file)
with open(os.path.join(self.manifest_dir, 'foo.xml'), 'w') as fp:
fp.write('<manifest></manifest>')
manifest.Link('foo.xml')
with open(self.manifest_file) as fp:
self.assertIn('<include name="foo.xml" />', fp.read())
def test_toxml_empty(self):
"""Verify the ToXml() helper."""
manifest = self.getXmlManifest(
'<?xml version="1.0" encoding="UTF-8"?>'
'<manifest></manifest>')
self.assertEqual(manifest.ToXml().toxml(), '<?xml version="1.0" ?><manifest/>')
def test_todict_empty(self):
"""Verify the ToDict() helper."""
manifest = self.getXmlManifest(
'<?xml version="1.0" encoding="UTF-8"?>'
'<manifest></manifest>')
self.assertEqual(manifest.ToDict(), {})
def test_repo_hooks(self):
"""Check repo-hooks settings."""
manifest = self.getXmlManifest("""
<manifest>
<remote name="test-remote" fetch="http://localhost" />
<default remote="test-remote" revision="refs/heads/main" />
<project name="repohooks" path="src/repohooks"/>
<repo-hooks in-project="repohooks" enabled-list="a, b"/>
</manifest>
""")
self.assertEqual(manifest.repo_hooks_project.name, 'repohooks')
self.assertEqual(manifest.repo_hooks_project.enabled_repo_hooks, ['a', 'b'])
def test_unknown_tags(self):
"""Check superproject settings."""
manifest = self.getXmlManifest("""
<manifest>
<remote name="test-remote" fetch="http://localhost" />
<default remote="test-remote" revision="refs/heads/main" />
<superproject name="superproject"/>
<iankaz value="unknown (possible) future tags are ignored"/>
<x-custom-tag>X tags are always ignored</x-custom-tag>
</manifest>
""")
self.assertEqual(manifest.superproject['name'], 'superproject')
self.assertEqual(manifest.superproject['remote'].name, 'test-remote')
self.assertEqual(
sort_attributes(manifest.ToXml().toxml()),
'<?xml version="1.0" ?><manifest>'
'<remote fetch="http://localhost" name="test-remote"/>'
'<default remote="test-remote" revision="refs/heads/main"/>'
'<superproject name="superproject"/>'
'</manifest>')
class IncludeElementTests(ManifestParseTestCase):
"""Tests for <include>."""
def test_group_levels(self):
root_m = os.path.join(self.manifest_dir, 'root.xml')
with open(root_m, 'w') as fp:
fp.write("""
<manifest>
<remote name="test-remote" fetch="http://localhost" />
<default remote="test-remote" revision="refs/heads/main" />
<include name="level1.xml" groups="level1-group" />
<project name="root-name1" path="root-path1" />
<project name="root-name2" path="root-path2" groups="r2g1,r2g2" />
</manifest>
""")
with open(os.path.join(self.manifest_dir, 'level1.xml'), 'w') as fp:
fp.write("""
<manifest>
<include name="level2.xml" groups="level2-group" />
<project name="level1-name1" path="level1-path1" />
</manifest>
""")
with open(os.path.join(self.manifest_dir, 'level2.xml'), 'w') as fp:
fp.write("""
<manifest>
<project name="level2-name1" path="level2-path1" groups="l2g1,l2g2" />
</manifest>
""")
include_m = manifest_xml.XmlManifest(self.repodir, root_m)
for proj in include_m.projects:
if proj.name == 'root-name1':
# Check include group not set on root level proj.
self.assertNotIn('level1-group', proj.groups)
if proj.name == 'root-name2':
# Check root proj group not removed.
self.assertIn('r2g1', proj.groups)
if proj.name == 'level1-name1':
# Check level1 proj has inherited group level 1.
self.assertIn('level1-group', proj.groups)
if proj.name == 'level2-name1':
# Check level2 proj has inherited group levels 1 and 2.
self.assertIn('level1-group', proj.groups)
self.assertIn('level2-group', proj.groups)
# Check level2 proj group not removed.
self.assertIn('l2g1', proj.groups)
def test_allow_bad_name_from_user(self):
"""Check handling of bad name attribute from the user's input."""
def parse(name):
name = self.encodeXmlAttr(name)
manifest = self.getXmlManifest(f"""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<include name="{name}" />
</manifest>
""")
# Force the manifest to be parsed.
manifest.ToXml()
# Setup target of the include.
target = os.path.join(self.tempdir, 'target.xml')
with open(target, 'w') as fp:
fp.write('<manifest></manifest>')
# Include with absolute path.
parse(os.path.abspath(target))
# Include with relative path.
parse(os.path.relpath(target, self.manifest_dir))
def test_bad_name_checks(self):
"""Check handling of bad name attribute."""
def parse(name):
name = self.encodeXmlAttr(name)
# Setup target of the include.
with open(os.path.join(self.manifest_dir, 'target.xml'), 'w') as fp:
fp.write(f'<manifest><include name="{name}"/></manifest>')
manifest = self.getXmlManifest("""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<include name="target.xml" />
</manifest>
""")
# Force the manifest to be parsed.
manifest.ToXml()
# Handle empty name explicitly because a different codepath rejects it.
with self.assertRaises(error.ManifestParseError):
parse('')
for path in INVALID_FS_PATHS:
if not path:
continue
with self.assertRaises(error.ManifestInvalidPathError):
parse(path)
class ProjectElementTests(ManifestParseTestCase):
"""Tests for <project>."""
def test_group(self):
"""Check project group settings."""
manifest = self.getXmlManifest("""
<manifest>
<remote name="test-remote" fetch="http://localhost" />
<default remote="test-remote" revision="refs/heads/main" />
<project name="test-name" path="test-path"/>
<project name="extras" path="path" groups="g1,g2,g1"/>
</manifest>
""")
self.assertEqual(len(manifest.projects), 2)
# Ordering isn't guaranteed.
result = {
manifest.projects[0].name: manifest.projects[0].groups,
manifest.projects[1].name: manifest.projects[1].groups,
}
project = manifest.projects[0]
self.assertCountEqual(
result['test-name'],
['name:test-name', 'all', 'path:test-path'])
self.assertCountEqual(
result['extras'],
['g1', 'g2', 'g1', 'name:extras', 'all', 'path:path'])
groupstr = 'default,platform-' + platform.system().lower()
self.assertEqual(groupstr, manifest.GetGroupsStr())
groupstr = 'g1,g2,g1'
manifest.manifestProject.config.SetString('manifest.groups', groupstr)
self.assertEqual(groupstr, manifest.GetGroupsStr())
def test_set_revision_id(self):
"""Check setting of project's revisionId."""
manifest = self.getXmlManifest("""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="test-name"/>
</manifest>
""")
self.assertEqual(len(manifest.projects), 1)
project = manifest.projects[0]
project.SetRevisionId('ABCDEF')
self.assertEqual(
sort_attributes(manifest.ToXml().toxml()),
'<?xml version="1.0" ?><manifest>'
'<remote fetch="http://localhost" name="default-remote"/>'
'<default remote="default-remote" revision="refs/heads/main"/>'
'<project name="test-name" revision="ABCDEF"/>'
'</manifest>')
def test_trailing_slash(self):
"""Check handling of trailing slashes in attributes."""
def parse(name, path):
name = self.encodeXmlAttr(name)
path = self.encodeXmlAttr(path)
return self.getXmlManifest(f"""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="{name}" path="{path}" />
</manifest>
""")
manifest = parse('a/path/', 'foo')
self.assertEqual(manifest.projects[0].gitdir,
os.path.join(self.tempdir, '.repo/projects/foo.git'))
self.assertEqual(manifest.projects[0].objdir,
os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
manifest = parse('a/path', 'foo/')
self.assertEqual(manifest.projects[0].gitdir,
os.path.join(self.tempdir, '.repo/projects/foo.git'))
self.assertEqual(manifest.projects[0].objdir,
os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
manifest = parse('a/path', 'foo//////')
self.assertEqual(manifest.projects[0].gitdir,
os.path.join(self.tempdir, '.repo/projects/foo.git'))
self.assertEqual(manifest.projects[0].objdir,
os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
def test_toplevel_path(self):
"""Check handling of path=. specially."""
def parse(name, path):
name = self.encodeXmlAttr(name)
path = self.encodeXmlAttr(path)
return self.getXmlManifest(f"""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="{name}" path="{path}" />
</manifest>
""")
for path in ('.', './', './/', './//'):
manifest = parse('server/path', path)
self.assertEqual(manifest.projects[0].gitdir,
os.path.join(self.tempdir, '.repo/projects/..git'))
def test_bad_path_name_checks(self):
"""Check handling of bad path & name attributes."""
def parse(name, path):
name = self.encodeXmlAttr(name)
path = self.encodeXmlAttr(path)
manifest = self.getXmlManifest(f"""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<project name="{name}" path="{path}" />
</manifest>
""")
# Force the manifest to be parsed.
manifest.ToXml()
# Verify the parser is valid by default to avoid buggy tests below.
parse('ok', 'ok')
# Handle empty name explicitly because a different codepath rejects it.
# Empty path is OK because it defaults to the name field.
with self.assertRaises(error.ManifestParseError):
parse('', 'ok')
for path in INVALID_FS_PATHS:
if not path or path.endswith('/'):
continue
with self.assertRaises(error.ManifestInvalidPathError):
parse(path, 'ok')
# We have a dedicated test for path=".".
if path not in {'.'}:
with self.assertRaises(error.ManifestInvalidPathError):
parse('ok', path)
class SuperProjectElementTests(ManifestParseTestCase):
"""Tests for <superproject>."""
def test_superproject(self):
"""Check superproject settings."""
manifest = self.getXmlManifest("""
<manifest>
<remote name="test-remote" fetch="http://localhost" />
<default remote="test-remote" revision="refs/heads/main" />
<superproject name="superproject"/>
</manifest>
""")
self.assertEqual(manifest.superproject['name'], 'superproject')
self.assertEqual(manifest.superproject['remote'].name, 'test-remote')
self.assertEqual(manifest.superproject['remote'].url, 'http://localhost/superproject')
self.assertEqual(
sort_attributes(manifest.ToXml().toxml()),
'<?xml version="1.0" ?><manifest>'
'<remote fetch="http://localhost" name="test-remote"/>'
'<default remote="test-remote" revision="refs/heads/main"/>'
'<superproject name="superproject"/>'
'</manifest>')
def test_remote(self):
"""Check superproject settings with a remote."""
manifest = self.getXmlManifest("""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<remote name="superproject-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<superproject name="platform/superproject" remote="superproject-remote"/>
</manifest>
""")
self.assertEqual(manifest.superproject['name'], 'platform/superproject')
self.assertEqual(manifest.superproject['remote'].name, 'superproject-remote')
self.assertEqual(manifest.superproject['remote'].url, 'http://localhost/platform/superproject')
self.assertEqual(
sort_attributes(manifest.ToXml().toxml()),
'<?xml version="1.0" ?><manifest>'
'<remote fetch="http://localhost" name="default-remote"/>'
'<remote fetch="http://localhost" name="superproject-remote"/>'
'<default remote="default-remote" revision="refs/heads/main"/>'
'<superproject name="platform/superproject" remote="superproject-remote"/>'
'</manifest>')
def test_defalut_remote(self):
"""Check superproject settings with a default remote."""
manifest = self.getXmlManifest("""
<manifest>
<remote name="default-remote" fetch="http://localhost" />
<default remote="default-remote" revision="refs/heads/main" />
<superproject name="superproject" remote="default-remote"/>
</manifest>
""")
self.assertEqual(manifest.superproject['name'], 'superproject')
self.assertEqual(manifest.superproject['remote'].name, 'default-remote')
self.assertEqual(
sort_attributes(manifest.ToXml().toxml()),
'<?xml version="1.0" ?><manifest>'
'<remote fetch="http://localhost" name="default-remote"/>'
'<default remote="default-remote" revision="refs/heads/main"/>'
'<superproject name="superproject"/>'
'</manifest>')
class ContactinfoElementTests(ManifestParseTestCase):
"""Tests for <contactinfo>."""
def test_contactinfo(self):
"""Check contactinfo settings."""
bugurl = 'http://localhost/contactinfo'
manifest = self.getXmlManifest(f"""
<manifest>
<contactinfo bugurl="{bugurl}"/>
</manifest>
""")
self.assertEqual(manifest.contactinfo.bugurl, bugurl)
self.assertEqual(
manifest.ToXml().toxml(),
'<?xml version="1.0" ?><manifest>'
f'<contactinfo bugurl="{bugurl}"/>'
'</manifest>')

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -14,6 +16,8 @@
"""Unittests for the project.py module."""
from __future__ import print_function
import contextlib
import os
import shutil
@ -22,7 +26,6 @@ import tempfile
import unittest
import error
import git_command
import git_config
import platform_utils
import project
@ -35,19 +38,7 @@ def TempGitTree():
# Python 2 support entirely.
try:
tempdir = tempfile.mkdtemp(prefix='repo-tests')
# Tests need to assume, that main is default branch at init,
# which is not supported in config until 2.28.
cmd = ['git', 'init']
if git_command.git_require((2, 28, 0)):
cmd += ['--initial-branch=main']
else:
# Use template dir for init.
templatedir = tempfile.mkdtemp(prefix='.test-template')
with open(os.path.join(templatedir, 'HEAD'), 'w') as fp:
fp.write('ref: refs/heads/main\n')
cmd += ['--template', templatedir]
subprocess.check_call(cmd, cwd=tempdir)
subprocess.check_call(['git', 'init'], cwd=tempdir)
yield tempdir
finally:
platform_utils.rmtree(tempdir)
@ -86,7 +77,7 @@ class ReviewableBranchTests(unittest.TestCase):
# Start off with the normal details.
rb = project.ReviewableBranch(
fakeproj, fakeproj.config.GetBranch('work'), 'main')
fakeproj, fakeproj.config.GetBranch('work'), 'master')
self.assertEqual('work', rb.name)
self.assertEqual(1, len(rb.commits))
self.assertIn('Del file', rb.commits[0])
@ -99,9 +90,9 @@ class ReviewableBranchTests(unittest.TestCase):
self.assertTrue(rb.date)
# Now delete the tracking branch!
fakeproj.work_git.branch('-D', 'main')
fakeproj.work_git.branch('-D', 'master')
rb = project.ReviewableBranch(
fakeproj, fakeproj.config.GetBranch('work'), 'main')
fakeproj, fakeproj.config.GetBranch('work'), 'master')
self.assertEqual(0, len(rb.commits))
self.assertFalse(rb.base_exists)
# Hard to assert anything useful about this.

View File

@ -1,74 +0,0 @@
# Copyright 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittests for the ssh.py module."""
import multiprocessing
import subprocess
import unittest
from unittest import mock
import ssh
class SshTests(unittest.TestCase):
"""Tests the ssh functions."""
def test_parse_ssh_version(self):
"""Check _parse_ssh_version() handling."""
ver = ssh._parse_ssh_version('Unknown\n')
self.assertEqual(ver, ())
ver = ssh._parse_ssh_version('OpenSSH_1.0\n')
self.assertEqual(ver, (1, 0))
ver = ssh._parse_ssh_version('OpenSSH_6.6.1p1 Ubuntu-2ubuntu2.13, OpenSSL 1.0.1f 6 Jan 2014\n')
self.assertEqual(ver, (6, 6, 1))
ver = ssh._parse_ssh_version('OpenSSH_7.6p1 Ubuntu-4ubuntu0.3, OpenSSL 1.0.2n 7 Dec 2017\n')
self.assertEqual(ver, (7, 6))
def test_version(self):
"""Check version() handling."""
with mock.patch('ssh._run_ssh_version', return_value='OpenSSH_1.2\n'):
self.assertEqual(ssh.version(), (1, 2))
def test_context_manager_empty(self):
"""Verify context manager with no clients works correctly."""
with multiprocessing.Manager() as manager:
with ssh.ProxyManager(manager):
pass
def test_context_manager_child_cleanup(self):
"""Verify orphaned clients & masters get cleaned up."""
with multiprocessing.Manager() as manager:
with ssh.ProxyManager(manager) as ssh_proxy:
client = subprocess.Popen(['sleep', '964853320'])
ssh_proxy.add_client(client)
master = subprocess.Popen(['sleep', '964853321'])
ssh_proxy.add_master(master)
# If the process still exists, these will throw timeout errors.
client.wait(0)
master.wait(0)
def test_ssh_sock(self):
"""Check sock() function."""
manager = multiprocessing.Manager()
proxy = ssh.ProxyManager(manager)
with mock.patch('tempfile.mkdtemp', return_value='/tmp/foo'):
# old ssh version uses port
with mock.patch('ssh.version', return_value=(6, 6)):
self.assertTrue(proxy.sock().endswith('%p'))
proxy._sock_path = None
# new ssh version uses hash
with mock.patch('ssh.version', return_value=(6, 7)):
self.assertTrue(proxy.sock().endswith('%C'))

View File

@ -14,7 +14,6 @@
"""Unittests for the subcmds module (mostly __init__.py than subcommands)."""
import optparse
import unittest
import subcmds
@ -42,32 +41,3 @@ class AllCommands(unittest.TestCase):
# Reject internal python paths like "__init__".
self.assertFalse(cmd.startswith('__'))
def test_help_desc_style(self):
"""Force some consistency in option descriptions.
Python's optparse & argparse has a few default options like --help. Their
option description text uses lowercase sentence fragments, so enforce our
options follow the same style so UI is consistent.
We enforce:
* Text starts with lowercase.
* Text doesn't end with period.
"""
for name, cls in subcmds.all_commands.items():
cmd = cls()
parser = cmd.OptionParser
for option in parser.option_list:
if option.help == optparse.SUPPRESS_HELP:
continue
c = option.help[0]
self.assertEqual(
c.lower(), c,
msg=f'subcmds/{name}.py: {option.get_opt_string()}: help text '
f'should start with lowercase: "{option.help}"')
self.assertNotEqual(
option.help[-1], '.',
msg=f'subcmds/{name}.py: {option.get_opt_string()}: help text '
f'should not end in a period: "{option.help}"')

View File

@ -38,7 +38,7 @@ class InitCommand(unittest.TestCase):
"""Check invalid command line options."""
ARGV = (
# Too many arguments.
['url', 'asdf'],
['asdf'],
# Conflicting options.
['--mirror', '--archive'],

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -14,22 +16,28 @@
"""Unittests for the wrapper.py module."""
from __future__ import print_function
import contextlib
from io import StringIO
import os
import re
import shutil
import sys
import tempfile
import unittest
from unittest import mock
import git_command
import main
import platform_utils
from pyversion import is_python3
import wrapper
if is_python3():
from unittest import mock
from io import StringIO
else:
import mock
from StringIO import StringIO
@contextlib.contextmanager
def TemporaryDirectory():
"""Create a new empty git checkout for testing."""
@ -56,6 +64,9 @@ class RepoWrapperTestCase(unittest.TestCase):
wrapper._wrapper_module = None
self.wrapper = wrapper.Wrapper()
if not is_python3():
self.assertRegex = self.assertRegexpMatches
class RepoWrapperUnitTest(RepoWrapperTestCase):
"""Tests helper functions in the repo wrapper
@ -71,16 +82,6 @@ class RepoWrapperUnitTest(RepoWrapperTestCase):
self.assertEqual('', stderr.getvalue())
self.assertIn('repo launcher version', stdout.getvalue())
def test_python_constraints(self):
"""The launcher should never require newer than main.py."""
self.assertGreaterEqual(main.MIN_PYTHON_VERSION_HARD,
wrapper.MIN_PYTHON_VERSION_HARD)
self.assertGreaterEqual(main.MIN_PYTHON_VERSION_SOFT,
wrapper.MIN_PYTHON_VERSION_SOFT)
# Make sure the versions are themselves in sync.
self.assertGreaterEqual(wrapper.MIN_PYTHON_VERSION_SOFT,
wrapper.MIN_PYTHON_VERSION_HARD)
def test_init_parser(self):
"""Make sure 'init' GetParser works."""
parser = self.wrapper.GetParser(gitc_init=False)
@ -256,81 +257,6 @@ class CheckGitVersion(RepoWrapperTestCase):
self.wrapper._CheckGitVersion()
class Requirements(RepoWrapperTestCase):
"""Check Requirements handling."""
def test_missing_file(self):
"""Don't crash if the file is missing (old version)."""
testdir = os.path.dirname(os.path.realpath(__file__))
self.assertIsNone(self.wrapper.Requirements.from_dir(testdir))
self.assertIsNone(self.wrapper.Requirements.from_file(
os.path.join(testdir, 'xxxxxxxxxxxxxxxxxxxxxxxx')))
def test_corrupt_data(self):
"""If the file can't be parsed, don't blow up."""
self.assertIsNone(self.wrapper.Requirements.from_file(__file__))
self.assertIsNone(self.wrapper.Requirements.from_data(b'x'))
def test_valid_data(self):
"""Make sure we can parse the file we ship."""
self.assertIsNotNone(self.wrapper.Requirements.from_data(b'{}'))
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
self.assertIsNotNone(self.wrapper.Requirements.from_dir(rootdir))
self.assertIsNotNone(self.wrapper.Requirements.from_file(os.path.join(
rootdir, 'requirements.json')))
def test_format_ver(self):
"""Check format_ver can format."""
self.assertEqual('1.2.3', self.wrapper.Requirements._format_ver((1, 2, 3)))
self.assertEqual('1', self.wrapper.Requirements._format_ver([1]))
def test_assert_all_unknown(self):
"""Check assert_all works with incompatible file."""
reqs = self.wrapper.Requirements({})
reqs.assert_all()
def test_assert_all_new_repo(self):
"""Check assert_all accepts new enough repo."""
reqs = self.wrapper.Requirements({'repo': {'hard': [1, 0]}})
reqs.assert_all()
def test_assert_all_old_repo(self):
"""Check assert_all rejects old repo."""
reqs = self.wrapper.Requirements({'repo': {'hard': [99999, 0]}})
with self.assertRaises(SystemExit):
reqs.assert_all()
def test_assert_all_new_python(self):
"""Check assert_all accepts new enough python."""
reqs = self.wrapper.Requirements({'python': {'hard': sys.version_info}})
reqs.assert_all()
def test_assert_all_old_python(self):
"""Check assert_all rejects old python."""
reqs = self.wrapper.Requirements({'python': {'hard': [99999, 0]}})
with self.assertRaises(SystemExit):
reqs.assert_all()
def test_assert_ver_unknown(self):
"""Check assert_ver works with incompatible file."""
reqs = self.wrapper.Requirements({})
reqs.assert_ver('xxx', (1, 0))
def test_assert_ver_new(self):
"""Check assert_ver allows new enough versions."""
reqs = self.wrapper.Requirements({'git': {'hard': [1, 0], 'soft': [2, 0]}})
reqs.assert_ver('git', (1, 0))
reqs.assert_ver('git', (1, 5))
reqs.assert_ver('git', (2, 0))
reqs.assert_ver('git', (2, 5))
def test_assert_ver_old(self):
"""Check assert_ver rejects old versions."""
reqs = self.wrapper.Requirements({'git': {'hard': [1, 0], 'soft': [2, 0]}})
with self.assertRaises(SystemExit):
reqs.assert_ver('git', (0, 5))
class NeedSetupGnuPG(RepoWrapperTestCase):
"""Check NeedSetupGnuPG behavior."""
@ -431,19 +357,7 @@ class GitCheckoutTestCase(RepoWrapperTestCase):
remote = os.path.join(cls.GIT_DIR, 'remote')
os.mkdir(remote)
# Tests need to assume, that main is default branch at init,
# which is not supported in config until 2.28.
if git_command.git_require((2, 28, 0)):
initstr = '--initial-branch=main'
else:
# Use template dir for init.
templatedir = tempfile.mkdtemp(prefix='.test-template')
with open(os.path.join(templatedir, 'HEAD'), 'w') as fp:
fp.write('ref: refs/heads/main\n')
initstr = '--template=' + templatedir
run_git('init', initstr, cwd=remote)
run_git('init', cwd=remote)
run_git('commit', '--allow-empty', '-minit', cwd=remote)
run_git('branch', 'stable', cwd=remote)
run_git('tag', 'v1.0', cwd=remote)
@ -488,8 +402,8 @@ class ResolveRepoRev(GitCheckoutTestCase):
self.assertEqual('refs/heads/stable', rrev)
self.assertEqual(self.REV_LIST[1], lrev)
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'main')
self.assertEqual('refs/heads/main', rrev)
rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'master')
self.assertEqual('refs/heads/master', rrev)
self.assertEqual(self.REV_LIST[0], lrev)
def test_tag_name(self):

View File

@ -15,15 +15,13 @@
# https://tox.readthedocs.io/
[tox]
envlist = py35, py36, py37, py38, py39
envlist = py36, py37, py38
[gh-actions]
python =
3.5: py35
3.6: py36
3.7: py37
3.8: py38
3.9: py39
[testenv]
deps = pytest

View File

@ -1,3 +1,5 @@
# -*- coding:utf-8 -*-
#
# Copyright (C) 2014 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
try:
from importlib.machinery import SourceFileLoader
_loader = lambda *args: SourceFileLoader(*args).load_module()