mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
42 Commits
Author | SHA1 | Date | |
---|---|---|---|
a621254b26 | |||
f159ce0f9e | |||
802cd0c601 | |||
100a214315 | |||
8051cdb629 | |||
43549d8d08 | |||
55b7125d6a | |||
d793553804 | |||
ea5239ddd9 | |||
1b8714937c | |||
50a2c0e368 | |||
35af2f8daf | |||
e287fa760b | |||
3593a10643 | |||
003684b6e5 | |||
0297f8312c | |||
7b3afcab7a | |||
eda6b1ead7 | |||
4364a79088 | |||
a98a5ebc6d | |||
f8d342beac | |||
6d2e8c8237 | |||
a24185ee6c | |||
d686365449 | |||
d3cadf1856 | |||
fa90f7a36f | |||
bee4efb874 | |||
f8af33c9f0 | |||
ed25be569e | |||
afd767103e | |||
b240d28bc0 | |||
47020ba249 | |||
5ed8c63942 | |||
24c6314fca | |||
7efab539f0 | |||
a3ff64cae5 | |||
776138a938 | |||
5fb9c6a5b3 | |||
859d3d9580 | |||
fa8d939c8f | |||
a6c52f566a | |||
0d130d2da0 |
8
.github/workflows/test-ci.yml
vendored
8
.github/workflows/test-ci.yml
vendored
@ -14,18 +14,18 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [3.6, 3.7, 3.8, 3.9]
|
||||
python-version: ['3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox tox-gh-actions
|
||||
python -m pip install tox tox-gh-actions
|
||||
- name: Test with tox
|
||||
run: tox
|
||||
|
@ -143,23 +143,14 @@ internal processes for accessing the restricted keys.
|
||||
***
|
||||
|
||||
```sh
|
||||
# Set the gpg key directory.
|
||||
$ export GNUPGHOME=~/.gnupg/repo/
|
||||
|
||||
# Verify the listed key is “Repo Maintainer”.
|
||||
$ gpg -K
|
||||
|
||||
# Pick whatever branch or commit you want to tag.
|
||||
$ r=main
|
||||
|
||||
# Pick the new version.
|
||||
$ t=1.12.10
|
||||
$ t=v2.30
|
||||
|
||||
# Create the signed tag.
|
||||
$ git tag -s v$t -u "Repo Maintainer <repo@android.kernel.org>" -m "repo $t" $r
|
||||
# Create a new signed tag with the current HEAD.
|
||||
$ ./release/sign-tag.py $t
|
||||
|
||||
# Verify the signed tag.
|
||||
$ git show v$t
|
||||
$ git show $t
|
||||
```
|
||||
|
||||
### Push the new release
|
||||
@ -168,11 +159,11 @@ Once you're ready to make the release available to everyone, push it to the
|
||||
`stable` branch.
|
||||
|
||||
Make sure you never push the tag itself to the stable branch!
|
||||
Only push the commit -- notice the use of `$t` and `$r` below.
|
||||
Only push the commit -- note the use of `^0` below.
|
||||
|
||||
```sh
|
||||
$ git push https://gerrit-review.googlesource.com/git-repo v$t
|
||||
$ git push https://gerrit-review.googlesource.com/git-repo $r:stable
|
||||
$ git push https://gerrit-review.googlesource.com/git-repo $t
|
||||
$ git push https://gerrit-review.googlesource.com/git-repo $t^0:stable
|
||||
```
|
||||
|
||||
If something goes horribly wrong, you can force push the previous version to the
|
||||
@ -195,7 +186,9 @@ You can create a short changelog using the command:
|
||||
```sh
|
||||
# If you haven't pushed to the stable branch yet, you can use origin/stable.
|
||||
# If you have pushed, change origin/stable to the previous release tag.
|
||||
$ git log --format="%h (%aN) %s" --no-merges origin/stable..$r
|
||||
# This assumes "main" is the current tagged release. If it's newer, change it
|
||||
# to the current release tag too.
|
||||
$ git log --format="%h (%aN) %s" --no-merges origin/stable..main
|
||||
```
|
||||
|
||||
## Project References
|
||||
|
130
git_command.py
130
git_command.py
@ -16,6 +16,7 @@ import functools
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
from typing import Any, Optional
|
||||
|
||||
from error import GitError
|
||||
from git_refs import HEAD
|
||||
@ -157,6 +158,53 @@ def git_require(min_version, fail=False, msg=''):
|
||||
return False
|
||||
|
||||
|
||||
def _build_env(
|
||||
_kwargs_only=(),
|
||||
bare: Optional[bool] = False,
|
||||
disable_editor: Optional[bool] = False,
|
||||
ssh_proxy: Optional[Any] = None,
|
||||
gitdir: Optional[str] = None,
|
||||
objdir: Optional[str] = None
|
||||
):
|
||||
"""Constucts an env dict for command execution."""
|
||||
|
||||
assert _kwargs_only == (), '_build_env only accepts keyword arguments.'
|
||||
|
||||
env = GitCommand._GetBasicEnv()
|
||||
|
||||
if disable_editor:
|
||||
env['GIT_EDITOR'] = ':'
|
||||
if ssh_proxy:
|
||||
env['REPO_SSH_SOCK'] = ssh_proxy.sock()
|
||||
env['GIT_SSH'] = ssh_proxy.proxy
|
||||
env['GIT_SSH_VARIANT'] = 'ssh'
|
||||
if 'http_proxy' in env and 'darwin' == sys.platform:
|
||||
s = "'http.proxy=%s'" % (env['http_proxy'],)
|
||||
p = env.get('GIT_CONFIG_PARAMETERS')
|
||||
if p is not None:
|
||||
s = p + ' ' + s
|
||||
env['GIT_CONFIG_PARAMETERS'] = s
|
||||
if 'GIT_ALLOW_PROTOCOL' not in env:
|
||||
env['GIT_ALLOW_PROTOCOL'] = (
|
||||
'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
|
||||
env['GIT_HTTP_USER_AGENT'] = user_agent.git
|
||||
|
||||
if objdir:
|
||||
# Set to the place we want to save the objects.
|
||||
env['GIT_OBJECT_DIRECTORY'] = objdir
|
||||
|
||||
alt_objects = os.path.join(gitdir, 'objects') if gitdir else None
|
||||
if (alt_objects and
|
||||
os.path.realpath(alt_objects) != os.path.realpath(objdir)):
|
||||
# Allow git to search the original place in case of local or unique refs
|
||||
# that git will attempt to resolve even if we aren't fetching them.
|
||||
env['GIT_ALTERNATE_OBJECT_DIRECTORIES'] = alt_objects
|
||||
if bare and gitdir is not None:
|
||||
env[GIT_DIR] = gitdir
|
||||
|
||||
return env
|
||||
|
||||
|
||||
class GitCommand(object):
|
||||
"""Wrapper around a single git invocation."""
|
||||
|
||||
@ -173,30 +221,13 @@ class GitCommand(object):
|
||||
cwd=None,
|
||||
gitdir=None,
|
||||
objdir=None):
|
||||
env = self._GetBasicEnv()
|
||||
|
||||
if disable_editor:
|
||||
env['GIT_EDITOR'] = ':'
|
||||
if ssh_proxy:
|
||||
env['REPO_SSH_SOCK'] = ssh_proxy.sock()
|
||||
env['GIT_SSH'] = ssh_proxy.proxy
|
||||
env['GIT_SSH_VARIANT'] = 'ssh'
|
||||
if 'http_proxy' in env and 'darwin' == sys.platform:
|
||||
s = "'http.proxy=%s'" % (env['http_proxy'],)
|
||||
p = env.get('GIT_CONFIG_PARAMETERS')
|
||||
if p is not None:
|
||||
s = p + ' ' + s
|
||||
env['GIT_CONFIG_PARAMETERS'] = s
|
||||
if 'GIT_ALLOW_PROTOCOL' not in env:
|
||||
env['GIT_ALLOW_PROTOCOL'] = (
|
||||
'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
|
||||
env['GIT_HTTP_USER_AGENT'] = user_agent.git
|
||||
|
||||
if project:
|
||||
if not cwd:
|
||||
cwd = project.worktree
|
||||
if not gitdir:
|
||||
gitdir = project.gitdir
|
||||
|
||||
# Git on Windows wants its paths only using / for reliability.
|
||||
if platform_utils.isWindows():
|
||||
if objdir:
|
||||
@ -204,18 +235,16 @@ class GitCommand(object):
|
||||
if gitdir:
|
||||
gitdir = gitdir.replace('\\', '/')
|
||||
|
||||
if objdir:
|
||||
# Set to the place we want to save the objects.
|
||||
env['GIT_OBJECT_DIRECTORY'] = objdir
|
||||
if gitdir:
|
||||
# Allow git to search the original place in case of local or unique refs
|
||||
# that git will attempt to resolve even if we aren't fetching them.
|
||||
env['GIT_ALTERNATE_OBJECT_DIRECTORIES'] = gitdir + '/objects'
|
||||
env = _build_env(
|
||||
disable_editor=disable_editor,
|
||||
ssh_proxy=ssh_proxy,
|
||||
objdir=objdir,
|
||||
gitdir=gitdir,
|
||||
bare=bare,
|
||||
)
|
||||
|
||||
command = [GIT]
|
||||
if bare:
|
||||
if gitdir:
|
||||
env[GIT_DIR] = gitdir
|
||||
cwd = None
|
||||
command.append(cmdv[0])
|
||||
# Need to use the --progress flag for fetch/clone so output will be
|
||||
@ -230,12 +259,11 @@ class GitCommand(object):
|
||||
stderr = (subprocess.STDOUT if merge_output else
|
||||
(subprocess.PIPE if capture_stderr else None))
|
||||
|
||||
dbg = ''
|
||||
if IsTrace():
|
||||
global LAST_CWD
|
||||
global LAST_GITDIR
|
||||
|
||||
dbg = ''
|
||||
|
||||
if cwd and LAST_CWD != cwd:
|
||||
if LAST_GITDIR or LAST_CWD:
|
||||
dbg += '\n'
|
||||
@ -263,31 +291,31 @@ class GitCommand(object):
|
||||
dbg += ' 2>|'
|
||||
elif stderr == subprocess.STDOUT:
|
||||
dbg += ' 2>&1'
|
||||
Trace('%s', dbg)
|
||||
|
||||
try:
|
||||
p = subprocess.Popen(command,
|
||||
cwd=cwd,
|
||||
env=env,
|
||||
encoding='utf-8',
|
||||
errors='backslashreplace',
|
||||
stdin=stdin,
|
||||
stdout=stdout,
|
||||
stderr=stderr)
|
||||
except Exception as e:
|
||||
raise GitError('%s: %s' % (command[1], e))
|
||||
with Trace('git command %s %s with debug: %s', LAST_GITDIR, command, dbg):
|
||||
try:
|
||||
p = subprocess.Popen(command,
|
||||
cwd=cwd,
|
||||
env=env,
|
||||
encoding='utf-8',
|
||||
errors='backslashreplace',
|
||||
stdin=stdin,
|
||||
stdout=stdout,
|
||||
stderr=stderr)
|
||||
except Exception as e:
|
||||
raise GitError('%s: %s' % (command[1], e))
|
||||
|
||||
if ssh_proxy:
|
||||
ssh_proxy.add_client(p)
|
||||
|
||||
self.process = p
|
||||
|
||||
try:
|
||||
self.stdout, self.stderr = p.communicate(input=input)
|
||||
finally:
|
||||
if ssh_proxy:
|
||||
ssh_proxy.remove_client(p)
|
||||
self.rc = p.wait()
|
||||
ssh_proxy.add_client(p)
|
||||
|
||||
self.process = p
|
||||
|
||||
try:
|
||||
self.stdout, self.stderr = p.communicate(input=input)
|
||||
finally:
|
||||
if ssh_proxy:
|
||||
ssh_proxy.remove_client(p)
|
||||
self.rc = p.wait()
|
||||
|
||||
@staticmethod
|
||||
def _GetBasicEnv():
|
||||
|
@ -22,6 +22,7 @@ import re
|
||||
import ssl
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Union
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
|
||||
@ -117,7 +118,7 @@ class GitConfig(object):
|
||||
return self.defaults.Has(name, include_defaults=True)
|
||||
return False
|
||||
|
||||
def GetInt(self, name):
|
||||
def GetInt(self, name: str) -> Union[int, None]:
|
||||
"""Returns an integer from the configuration file.
|
||||
|
||||
This follows the git config syntax.
|
||||
@ -126,7 +127,7 @@ class GitConfig(object):
|
||||
name: The key to lookup.
|
||||
|
||||
Returns:
|
||||
None if the value was not defined, or is not a boolean.
|
||||
None if the value was not defined, or is not an int.
|
||||
Otherwise, the number itself.
|
||||
"""
|
||||
v = self.GetString(name)
|
||||
@ -152,6 +153,9 @@ class GitConfig(object):
|
||||
try:
|
||||
return int(v, base=base) * mult
|
||||
except ValueError:
|
||||
print(
|
||||
f"warning: expected {name} to represent an integer, got {v} instead",
|
||||
file=sys.stderr)
|
||||
return None
|
||||
|
||||
def DumpConfigDict(self):
|
||||
@ -169,7 +173,7 @@ class GitConfig(object):
|
||||
config_dict[key] = self.GetString(key)
|
||||
return config_dict
|
||||
|
||||
def GetBoolean(self, name):
|
||||
def GetBoolean(self, name: str) -> Union[str, None]:
|
||||
"""Returns a boolean from the configuration file.
|
||||
None : The value was not defined, or is not a boolean.
|
||||
True : The value was set to true or yes.
|
||||
@ -183,6 +187,8 @@ class GitConfig(object):
|
||||
return True
|
||||
if v in ('false', 'no'):
|
||||
return False
|
||||
print(f"warning: expected {name} to represent a boolean, got {v} instead",
|
||||
file=sys.stderr)
|
||||
return None
|
||||
|
||||
def SetBoolean(self, name, value):
|
||||
@ -191,7 +197,7 @@ class GitConfig(object):
|
||||
value = 'true' if value else 'false'
|
||||
self.SetString(name, value)
|
||||
|
||||
def GetString(self, name, all_keys=False):
|
||||
def GetString(self, name: str, all_keys: bool = False) -> Union[str, None]:
|
||||
"""Get the first value for a key, or None if it is not defined.
|
||||
|
||||
This configuration file is used first, if the key is not
|
||||
@ -219,8 +225,8 @@ class GitConfig(object):
|
||||
"""Set the value(s) for a key.
|
||||
Only this configuration file is modified.
|
||||
|
||||
The supplied value should be either a string,
|
||||
or a list of strings (to store multiple values).
|
||||
The supplied value should be either a string, or a list of strings (to
|
||||
store multiple values), or None (to delete the key).
|
||||
"""
|
||||
key = _key(name)
|
||||
|
||||
@ -349,9 +355,9 @@ class GitConfig(object):
|
||||
except OSError:
|
||||
return None
|
||||
try:
|
||||
Trace(': parsing %s', self.file)
|
||||
with open(self._json) as fd:
|
||||
return json.load(fd)
|
||||
with Trace(': parsing %s', self.file):
|
||||
with open(self._json) as fd:
|
||||
return json.load(fd)
|
||||
except (IOError, ValueError):
|
||||
platform_utils.remove(self._json, missing_ok=True)
|
||||
return None
|
||||
|
51
git_refs.py
51
git_refs.py
@ -67,38 +67,37 @@ class GitRefs(object):
|
||||
self._LoadAll()
|
||||
|
||||
def _NeedUpdate(self):
|
||||
Trace(': scan refs %s', self._gitdir)
|
||||
|
||||
for name, mtime in self._mtime.items():
|
||||
try:
|
||||
if mtime != os.path.getmtime(os.path.join(self._gitdir, name)):
|
||||
with Trace(': scan refs %s', self._gitdir):
|
||||
for name, mtime in self._mtime.items():
|
||||
try:
|
||||
if mtime != os.path.getmtime(os.path.join(self._gitdir, name)):
|
||||
return True
|
||||
except OSError:
|
||||
return True
|
||||
except OSError:
|
||||
return True
|
||||
return False
|
||||
return False
|
||||
|
||||
def _LoadAll(self):
|
||||
Trace(': load refs %s', self._gitdir)
|
||||
with Trace(': load refs %s', self._gitdir):
|
||||
|
||||
self._phyref = {}
|
||||
self._symref = {}
|
||||
self._mtime = {}
|
||||
self._phyref = {}
|
||||
self._symref = {}
|
||||
self._mtime = {}
|
||||
|
||||
self._ReadPackedRefs()
|
||||
self._ReadLoose('refs/')
|
||||
self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
|
||||
self._ReadPackedRefs()
|
||||
self._ReadLoose('refs/')
|
||||
self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
|
||||
|
||||
scan = self._symref
|
||||
attempts = 0
|
||||
while scan and attempts < 5:
|
||||
scan_next = {}
|
||||
for name, dest in scan.items():
|
||||
if dest in self._phyref:
|
||||
self._phyref[name] = self._phyref[dest]
|
||||
else:
|
||||
scan_next[name] = dest
|
||||
scan = scan_next
|
||||
attempts += 1
|
||||
scan = self._symref
|
||||
attempts = 0
|
||||
while scan and attempts < 5:
|
||||
scan_next = {}
|
||||
for name, dest in scan.items():
|
||||
if dest in self._phyref:
|
||||
self._phyref[name] = self._phyref[dest]
|
||||
else:
|
||||
scan_next[name] = dest
|
||||
scan = scan_next
|
||||
attempts += 1
|
||||
|
||||
def _ReadPackedRefs(self):
|
||||
path = os.path.join(self._gitdir, 'packed-refs')
|
||||
|
@ -110,7 +110,7 @@ class EventLog(object):
|
||||
return {
|
||||
'event': event_name,
|
||||
'sid': self._full_sid,
|
||||
'thread': threading.currentThread().getName(),
|
||||
'thread': threading.current_thread().name,
|
||||
'time': datetime.datetime.utcnow().isoformat() + 'Z',
|
||||
}
|
||||
|
||||
|
40
main.py
40
main.py
@ -37,7 +37,7 @@ except ImportError:
|
||||
|
||||
from color import SetDefaultColoring
|
||||
import event_log
|
||||
from repo_trace import SetTrace
|
||||
from repo_trace import SetTrace, Trace, SetTraceToStderr
|
||||
from git_command import user_agent
|
||||
from git_config import RepoConfig
|
||||
from git_trace2_event_log import EventLog
|
||||
@ -109,6 +109,9 @@ global_options.add_option('--color',
|
||||
global_options.add_option('--trace',
|
||||
dest='trace', action='store_true',
|
||||
help='trace git command execution (REPO_TRACE=1)')
|
||||
global_options.add_option('--trace-to-stderr',
|
||||
dest='trace_to_stderr', action='store_true',
|
||||
help='trace outputs go to stderr in addition to .repo/TRACE_FILE')
|
||||
global_options.add_option('--trace-python',
|
||||
dest='trace_python', action='store_true',
|
||||
help='trace python command execution')
|
||||
@ -198,9 +201,6 @@ class _Repo(object):
|
||||
"""Execute the requested subcommand."""
|
||||
result = 0
|
||||
|
||||
if gopts.trace:
|
||||
SetTrace()
|
||||
|
||||
# Handle options that terminate quickly first.
|
||||
if gopts.help or gopts.help_all:
|
||||
self._PrintHelp(short=False, all_commands=gopts.help_all)
|
||||
@ -216,6 +216,21 @@ class _Repo(object):
|
||||
self._PrintHelp(short=True)
|
||||
return 1
|
||||
|
||||
run = lambda: self._RunLong(name, gopts, argv) or 0
|
||||
with Trace('starting new command: %s', ', '.join([name] + argv),
|
||||
first_trace=True):
|
||||
if gopts.trace_python:
|
||||
import trace
|
||||
tracer = trace.Trace(count=False, trace=True, timing=True,
|
||||
ignoredirs=set(sys.path[1:]))
|
||||
result = tracer.runfunc(run)
|
||||
else:
|
||||
result = run()
|
||||
return result
|
||||
|
||||
def _RunLong(self, name, gopts, argv):
|
||||
"""Execute the (longer running) requested subcommand."""
|
||||
result = 0
|
||||
SetDefaultColoring(gopts.color)
|
||||
|
||||
git_trace2_event_log = EventLog()
|
||||
@ -652,17 +667,18 @@ def _Main(argv):
|
||||
Version.wrapper_path = opt.wrapper_path
|
||||
|
||||
repo = _Repo(opt.repodir)
|
||||
|
||||
try:
|
||||
init_http()
|
||||
name, gopts, argv = repo._ParseArgs(argv)
|
||||
run = lambda: repo._Run(name, gopts, argv) or 0
|
||||
if gopts.trace_python:
|
||||
import trace
|
||||
tracer = trace.Trace(count=False, trace=True, timing=True,
|
||||
ignoredirs=set(sys.path[1:]))
|
||||
result = tracer.runfunc(run)
|
||||
else:
|
||||
result = run()
|
||||
|
||||
if gopts.trace:
|
||||
SetTrace()
|
||||
|
||||
if gopts.trace_to_stderr:
|
||||
SetTraceToStderr()
|
||||
|
||||
result = repo._Run(name, gopts, argv) or 0
|
||||
except KeyboardInterrupt:
|
||||
print('aborted by user', file=sys.stderr)
|
||||
result = 1
|
||||
|
@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "August 2022" "repo smartsync" "Repo Manual"
|
||||
.TH REPO "1" "November 2022" "repo smartsync" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo smartsync - manual page for repo smartsync
|
||||
.SH SYNOPSIS
|
||||
@ -105,6 +105,13 @@ delete refs that no longer exist on the remote
|
||||
.TP
|
||||
\fB\-\-no\-prune\fR
|
||||
do not delete refs that no longer exist on the remote
|
||||
.TP
|
||||
\fB\-\-auto\-gc\fR
|
||||
run garbage collection on all synced projects
|
||||
.TP
|
||||
\fB\-\-no\-auto\-gc\fR
|
||||
do not run garbage collection on any projects
|
||||
(default)
|
||||
.SS Logging options:
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
|
@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "August 2022" "repo sync" "Repo Manual"
|
||||
.TH REPO "1" "November 2022" "repo sync" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo sync - manual page for repo sync
|
||||
.SH SYNOPSIS
|
||||
@ -106,6 +106,13 @@ delete refs that no longer exist on the remote
|
||||
\fB\-\-no\-prune\fR
|
||||
do not delete refs that no longer exist on the remote
|
||||
.TP
|
||||
\fB\-\-auto\-gc\fR
|
||||
run garbage collection on all synced projects
|
||||
.TP
|
||||
\fB\-\-no\-auto\-gc\fR
|
||||
do not run garbage collection on any projects
|
||||
(default)
|
||||
.TP
|
||||
\fB\-s\fR, \fB\-\-smart\-sync\fR
|
||||
smart sync using manifest from the latest known good
|
||||
build
|
||||
@ -200,6 +207,9 @@ to a sha1 revision if the sha1 revision does not already exist locally.
|
||||
The \fB\-\-prune\fR option can be used to remove any refs that no longer exist on the
|
||||
remote.
|
||||
.PP
|
||||
The \fB\-\-auto\-gc\fR option can be used to trigger garbage collection on all projects.
|
||||
By default, repo does not run garbage collection.
|
||||
.PP
|
||||
SSH Connections
|
||||
.PP
|
||||
If at least one project remote URL uses an SSH connection (ssh://, git+ssh://,
|
||||
|
@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2022" "repo" "Repo Manual"
|
||||
.TH REPO "1" "November 2022" "repo" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repository management tool built on top of git
|
||||
.SH SYNOPSIS
|
||||
@ -25,6 +25,10 @@ control color usage: auto, always, never
|
||||
\fB\-\-trace\fR
|
||||
trace git command execution (REPO_TRACE=1)
|
||||
.TP
|
||||
\fB\-\-trace\-to\-stderr\fR
|
||||
trace outputs go to stderr in addition to
|
||||
\&.repo/TRACE_FILE
|
||||
.TP
|
||||
\fB\-\-trace\-python\fR
|
||||
trace python command execution
|
||||
.TP
|
||||
|
@ -15,7 +15,7 @@
|
||||
import os
|
||||
import sys
|
||||
from time import time
|
||||
from repo_trace import IsTrace
|
||||
from repo_trace import IsTraceToStderr
|
||||
|
||||
_NOT_TTY = not os.isatty(2)
|
||||
|
||||
@ -80,7 +80,7 @@ class Progress(object):
|
||||
def update(self, inc=1, msg=''):
|
||||
self._done += inc
|
||||
|
||||
if _NOT_TTY or IsTrace():
|
||||
if _NOT_TTY or IsTraceToStderr():
|
||||
return
|
||||
|
||||
if not self._show:
|
||||
@ -113,7 +113,7 @@ class Progress(object):
|
||||
sys.stderr.flush()
|
||||
|
||||
def end(self):
|
||||
if _NOT_TTY or IsTrace() or not self._show:
|
||||
if _NOT_TTY or IsTraceToStderr() or not self._show:
|
||||
return
|
||||
|
||||
duration = duration_str(time() - self._start)
|
||||
|
26
project.py
26
project.py
@ -41,7 +41,7 @@ from error import ManifestInvalidRevisionError, ManifestInvalidPathError
|
||||
from error import NoManifestException, ManifestParseError
|
||||
import platform_utils
|
||||
import progress
|
||||
from repo_trace import IsTrace, Trace
|
||||
from repo_trace import Trace
|
||||
|
||||
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M, R_WORKTREE_M
|
||||
|
||||
@ -59,7 +59,7 @@ MAXIMUM_RETRY_SLEEP_SEC = 3600.0
|
||||
# +-10% random jitter is added to each Fetches retry sleep duration.
|
||||
RETRY_JITTER_PERCENT = 0.1
|
||||
|
||||
# Whether to use alternates.
|
||||
# Whether to use alternates. Switching back and forth is *NOT* supported.
|
||||
# TODO(vapier): Remove knob once behavior is verified.
|
||||
_ALTERNATES = os.environ.get('REPO_USE_ALTERNATES') == '1'
|
||||
|
||||
@ -2416,16 +2416,16 @@ class Project(object):
|
||||
srcUrl = 'http' + srcUrl[len('persistent-http'):]
|
||||
cmd += [srcUrl]
|
||||
|
||||
if IsTrace():
|
||||
Trace('%s', ' '.join(cmd))
|
||||
if verbose:
|
||||
print('%s: Downloading bundle: %s' % (self.name, srcUrl))
|
||||
stdout = None if verbose else subprocess.PIPE
|
||||
stderr = None if verbose else subprocess.STDOUT
|
||||
try:
|
||||
proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr)
|
||||
except OSError:
|
||||
return False
|
||||
proc = None
|
||||
with Trace('Fetching bundle: %s', ' '.join(cmd)):
|
||||
if verbose:
|
||||
print('%s: Downloading bundle: %s' % (self.name, srcUrl))
|
||||
stdout = None if verbose else subprocess.PIPE
|
||||
stderr = None if verbose else subprocess.STDOUT
|
||||
try:
|
||||
proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr)
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
(output, _) = proc.communicate()
|
||||
curlret = proc.returncode
|
||||
@ -3505,7 +3505,7 @@ class ManifestProject(MetaProject):
|
||||
@property
|
||||
def partial_clone_exclude(self):
|
||||
"""Partial clone exclude string"""
|
||||
return self.config.GetBoolean('repo.partialcloneexclude')
|
||||
return self.config.GetString('repo.partialcloneexclude')
|
||||
|
||||
@property
|
||||
def manifest_platform(self):
|
||||
|
@ -83,11 +83,6 @@ def main(argv):
|
||||
with multiprocessing.Pool() as pool:
|
||||
pool.map(partial(worker, cwd=tempdir, check=True), cmdlist)
|
||||
|
||||
regex = (
|
||||
(r'(It was generated by help2man) [0-9.]+', '\g<1>.'),
|
||||
(r'^\.IP\n(.*:)\n', '.SS \g<1>\n'),
|
||||
(r'^\.PP\nDescription', '.SH DETAILS'),
|
||||
)
|
||||
for tmp_path in MANDIR.glob('*.1.tmp'):
|
||||
path = tmp_path.parent / tmp_path.stem
|
||||
old_data = path.read_text() if path.exists() else ''
|
||||
@ -95,8 +90,7 @@ def main(argv):
|
||||
data = tmp_path.read_text()
|
||||
tmp_path.unlink()
|
||||
|
||||
for pattern, replacement in regex:
|
||||
data = re.sub(pattern, replacement, data, flags=re.M)
|
||||
data = replace_regex(data)
|
||||
|
||||
# If the only thing that changed was the date, don't refresh. This avoids
|
||||
# a lot of noise when only one file actually updates.
|
||||
@ -106,5 +100,25 @@ def main(argv):
|
||||
path.write_text(data)
|
||||
|
||||
|
||||
def replace_regex(data):
|
||||
"""Replace semantically null regexes in the data.
|
||||
|
||||
Args:
|
||||
data: manpage text.
|
||||
|
||||
Returns:
|
||||
Updated manpage text.
|
||||
"""
|
||||
regex = (
|
||||
(r'(It was generated by help2man) [0-9.]+', '\g<1>.'),
|
||||
(r'^\033\[[0-9;]*m([^\033]*)\033\[m', '\g<1>'),
|
||||
(r'^\.IP\n(.*:)\n', '.SS \g<1>\n'),
|
||||
(r'^\.PP\nDescription', '.SH DETAILS'),
|
||||
)
|
||||
for pattern, replacement in regex:
|
||||
data = re.sub(pattern, replacement, data, flags=re.M)
|
||||
return data
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
2
repo
2
repo
@ -149,7 +149,7 @@ if not REPO_REV:
|
||||
BUG_URL = 'https://bugs.chromium.org/p/gerrit/issues/entry?template=Repo+tool+issue'
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (2, 29)
|
||||
VERSION = (2, 30)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (2, 3)
|
||||
|
115
repo_trace.py
115
repo_trace.py
@ -15,26 +15,133 @@
|
||||
"""Logic for tracing repo interactions.
|
||||
|
||||
Activated via `repo --trace ...` or `REPO_TRACE=1 repo ...`.
|
||||
|
||||
Temporary: Tracing is always on. Set `REPO_TRACE=0` to turn off.
|
||||
To also include trace outputs in stderr do `repo --trace_to_stderr ...`
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
from contextlib import ContextDecorator
|
||||
|
||||
import platform_utils
|
||||
|
||||
# Env var to implicitly turn on tracing.
|
||||
REPO_TRACE = 'REPO_TRACE'
|
||||
|
||||
_TRACE = os.environ.get(REPO_TRACE) == '1'
|
||||
# Temporarily set tracing to always on unless user expicitly sets to 0.
|
||||
_TRACE = os.environ.get(REPO_TRACE) != '0'
|
||||
_TRACE_TO_STDERR = False
|
||||
_TRACE_FILE = None
|
||||
_TRACE_FILE_NAME = 'TRACE_FILE'
|
||||
_MAX_SIZE = 70 # in mb
|
||||
_NEW_COMMAND_SEP = '+++++++++++++++NEW COMMAND+++++++++++++++++++'
|
||||
|
||||
|
||||
def IsTraceToStderr():
|
||||
return _TRACE_TO_STDERR
|
||||
|
||||
|
||||
def IsTrace():
|
||||
return _TRACE
|
||||
|
||||
|
||||
def SetTraceToStderr():
|
||||
global _TRACE_TO_STDERR
|
||||
_TRACE_TO_STDERR = True
|
||||
|
||||
|
||||
def SetTrace():
|
||||
global _TRACE
|
||||
_TRACE = True
|
||||
|
||||
|
||||
def Trace(fmt, *args):
|
||||
if IsTrace():
|
||||
print(fmt % args, file=sys.stderr)
|
||||
def _SetTraceFile(quiet):
|
||||
global _TRACE_FILE
|
||||
_TRACE_FILE = _GetTraceFile(quiet)
|
||||
|
||||
|
||||
class Trace(ContextDecorator):
|
||||
|
||||
def _time(self):
|
||||
"""Generate nanoseconds of time in a py3.6 safe way"""
|
||||
return int(time.time() * 1e+9)
|
||||
|
||||
def __init__(self, fmt, *args, first_trace=False, quiet=True):
|
||||
"""Initialize the object.
|
||||
|
||||
Args:
|
||||
fmt: The format string for the trace.
|
||||
*args: Arguments to pass to formatting.
|
||||
first_trace: Whether this is the first trace of a `repo` invocation.
|
||||
quiet: Whether to suppress notification of trace file location.
|
||||
"""
|
||||
if not IsTrace():
|
||||
return
|
||||
self._trace_msg = fmt % args
|
||||
|
||||
if not _TRACE_FILE:
|
||||
_SetTraceFile(quiet)
|
||||
|
||||
if first_trace:
|
||||
_ClearOldTraces()
|
||||
self._trace_msg = f'{_NEW_COMMAND_SEP} {self._trace_msg}'
|
||||
|
||||
def __enter__(self):
|
||||
if not IsTrace():
|
||||
return self
|
||||
|
||||
print_msg = f'PID: {os.getpid()} START: {self._time()} :{self._trace_msg}\n'
|
||||
|
||||
with open(_TRACE_FILE, 'a') as f:
|
||||
print(print_msg, file=f)
|
||||
|
||||
if _TRACE_TO_STDERR:
|
||||
print(print_msg, file=sys.stderr)
|
||||
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc):
|
||||
if not IsTrace():
|
||||
return False
|
||||
|
||||
print_msg = f'PID: {os.getpid()} END: {self._time()} :{self._trace_msg}\n'
|
||||
|
||||
with open(_TRACE_FILE, 'a') as f:
|
||||
print(print_msg, file=f)
|
||||
|
||||
if _TRACE_TO_STDERR:
|
||||
print(print_msg, file=sys.stderr)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _GetTraceFile(quiet):
|
||||
"""Get the trace file or create one."""
|
||||
# TODO: refactor to pass repodir to Trace.
|
||||
repo_dir = os.path.dirname(os.path.dirname(__file__))
|
||||
trace_file = os.path.join(repo_dir, _TRACE_FILE_NAME)
|
||||
if not quiet:
|
||||
print(f'Trace outputs in {trace_file}', file=sys.stderr)
|
||||
return trace_file
|
||||
|
||||
|
||||
def _ClearOldTraces():
|
||||
"""Clear the oldest commands if trace file is too big.
|
||||
|
||||
Note: If the trace file contains output from two `repo`
|
||||
commands that were running at the same time, this
|
||||
will not work precisely.
|
||||
"""
|
||||
if os.path.isfile(_TRACE_FILE):
|
||||
while os.path.getsize(_TRACE_FILE) / (1024 * 1024) > _MAX_SIZE:
|
||||
temp_file = _TRACE_FILE + '.tmp'
|
||||
with open(_TRACE_FILE, 'r', errors='ignore') as fin:
|
||||
with open(temp_file, 'w') as tf:
|
||||
trace_lines = fin.readlines()
|
||||
for i, l in enumerate(trace_lines):
|
||||
if 'END:' in l and _NEW_COMMAND_SEP in l:
|
||||
tf.writelines(trace_lines[i + 1:])
|
||||
break
|
||||
platform_utils.rename(temp_file, _TRACE_FILE)
|
||||
|
@ -20,6 +20,7 @@ import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import repo_trace
|
||||
|
||||
|
||||
def find_pytest():
|
||||
|
37
ssh.py
37
ssh.py
@ -182,28 +182,29 @@ class ProxyManager:
|
||||
# be important because we can't tell that that 'git@myhost.com' is the same
|
||||
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
|
||||
check_command = command_base + ['-O', 'check']
|
||||
try:
|
||||
Trace(': %s', ' '.join(check_command))
|
||||
check_process = subprocess.Popen(check_command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
check_process.communicate() # read output, but ignore it...
|
||||
isnt_running = check_process.wait()
|
||||
with Trace('Call to ssh (check call): %s', ' '.join(check_command)):
|
||||
try:
|
||||
check_process = subprocess.Popen(check_command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
check_process.communicate() # read output, but ignore it...
|
||||
isnt_running = check_process.wait()
|
||||
|
||||
if not isnt_running:
|
||||
# Our double-check found that the master _was_ infact running. Add to
|
||||
# the list of keys.
|
||||
self._master_keys[key] = True
|
||||
return True
|
||||
except Exception:
|
||||
# Ignore excpetions. We we will fall back to the normal command and print
|
||||
# to the log there.
|
||||
pass
|
||||
if not isnt_running:
|
||||
# Our double-check found that the master _was_ infact running. Add to
|
||||
# the list of keys.
|
||||
self._master_keys[key] = True
|
||||
return True
|
||||
except Exception:
|
||||
# Ignore excpetions. We we will fall back to the normal command and
|
||||
# print to the log there.
|
||||
pass
|
||||
|
||||
command = command_base[:1] + ['-M', '-N'] + command_base[1:]
|
||||
p = None
|
||||
try:
|
||||
Trace(': %s', ' '.join(command))
|
||||
p = subprocess.Popen(command)
|
||||
with Trace('Call to ssh: %s', ' '.join(command)):
|
||||
p = subprocess.Popen(command)
|
||||
except Exception as e:
|
||||
self._master_broken.value = True
|
||||
print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
|
||||
|
@ -155,11 +155,11 @@ is shown, then the branch appears in all projects.
|
||||
if i.IsSplitCurrent or (in_cnt <= project_cnt - in_cnt):
|
||||
in_type = 'in'
|
||||
for b in i.projects:
|
||||
relpath = b.project.relpath
|
||||
relpath = _RelPath(b.project)
|
||||
if not i.IsSplitCurrent or b.current:
|
||||
paths.append(_RelPath(b.project))
|
||||
paths.append(relpath)
|
||||
else:
|
||||
non_cur_paths.append(_RelPath(b.project))
|
||||
non_cur_paths.append(relpath)
|
||||
else:
|
||||
fmt = out.notinproject
|
||||
in_type = 'not in'
|
||||
|
@ -77,33 +77,35 @@ synced and their revisions won't be found.
|
||||
metavar='<FORMAT>',
|
||||
help='print the log using a custom git pretty format string')
|
||||
|
||||
def _printRawDiff(self, diff, pretty_format=None):
|
||||
def _printRawDiff(self, diff, pretty_format=None, local=False):
|
||||
_RelPath = lambda p: p.RelPath(local=local)
|
||||
for project in diff['added']:
|
||||
self.printText("A %s %s" % (project.relpath, project.revisionExpr))
|
||||
self.printText("A %s %s" % (_RelPath(project), project.revisionExpr))
|
||||
self.out.nl()
|
||||
|
||||
for project in diff['removed']:
|
||||
self.printText("R %s %s" % (project.relpath, project.revisionExpr))
|
||||
self.printText("R %s %s" % (_RelPath(project), project.revisionExpr))
|
||||
self.out.nl()
|
||||
|
||||
for project, otherProject in diff['changed']:
|
||||
self.printText("C %s %s %s" % (project.relpath, project.revisionExpr,
|
||||
self.printText("C %s %s %s" % (_RelPath(project), project.revisionExpr,
|
||||
otherProject.revisionExpr))
|
||||
self.out.nl()
|
||||
self._printLogs(project, otherProject, raw=True, color=False, pretty_format=pretty_format)
|
||||
|
||||
for project, otherProject in diff['unreachable']:
|
||||
self.printText("U %s %s %s" % (project.relpath, project.revisionExpr,
|
||||
self.printText("U %s %s %s" % (_RelPath(project), project.revisionExpr,
|
||||
otherProject.revisionExpr))
|
||||
self.out.nl()
|
||||
|
||||
def _printDiff(self, diff, color=True, pretty_format=None):
|
||||
def _printDiff(self, diff, color=True, pretty_format=None, local=False):
|
||||
_RelPath = lambda p: p.RelPath(local=local)
|
||||
if diff['added']:
|
||||
self.out.nl()
|
||||
self.printText('added projects : \n')
|
||||
self.out.nl()
|
||||
for project in diff['added']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printProject('\t%s' % (_RelPath(project)))
|
||||
self.printText(' at revision ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
@ -113,7 +115,7 @@ synced and their revisions won't be found.
|
||||
self.printText('removed projects : \n')
|
||||
self.out.nl()
|
||||
for project in diff['removed']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printProject('\t%s' % (_RelPath(project)))
|
||||
self.printText(' at revision ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
@ -123,7 +125,7 @@ synced and their revisions won't be found.
|
||||
self.printText('missing projects : \n')
|
||||
self.out.nl()
|
||||
for project in diff['missing']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printProject('\t%s' % (_RelPath(project)))
|
||||
self.printText(' at revision ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
@ -133,7 +135,7 @@ synced and their revisions won't be found.
|
||||
self.printText('changed projects : \n')
|
||||
self.out.nl()
|
||||
for project, otherProject in diff['changed']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printProject('\t%s' % (_RelPath(project)))
|
||||
self.printText(' changed from ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.printText(' to ')
|
||||
@ -148,7 +150,7 @@ synced and their revisions won't be found.
|
||||
self.printText('projects with unreachable revisions : \n')
|
||||
self.out.nl()
|
||||
for project, otherProject in diff['unreachable']:
|
||||
self.printProject('\t%s ' % (project.relpath))
|
||||
self.printProject('\t%s ' % (_RelPath(project)))
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.printText(' or ')
|
||||
self.printRevision(otherProject.revisionExpr)
|
||||
@ -214,6 +216,8 @@ synced and their revisions won't be found.
|
||||
|
||||
diff = manifest1.projectsDiff(manifest2)
|
||||
if opt.raw:
|
||||
self._printRawDiff(diff, pretty_format=opt.pretty_format)
|
||||
self._printRawDiff(diff, pretty_format=opt.pretty_format,
|
||||
local=opt.this_manifest_only)
|
||||
else:
|
||||
self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format)
|
||||
self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format,
|
||||
local=opt.this_manifest_only)
|
||||
|
@ -68,7 +68,8 @@ use for this GITC client.
|
||||
sys.exit(1)
|
||||
manifest_file = opt.manifest_file
|
||||
|
||||
manifest = GitcManifest(self.repodir, gitc_client)
|
||||
manifest = GitcManifest(self.repodir, os.path.join(self.client_dir,
|
||||
'.manifest'))
|
||||
manifest.Override(manifest_file)
|
||||
gitc_utils.generate_gitc_manifest(None, manifest)
|
||||
print('Please run `cd %s` to view your GITC client.' %
|
||||
|
257
subcmds/sync.py
257
subcmds/sync.py
@ -21,7 +21,6 @@ import multiprocessing
|
||||
import netrc
|
||||
from optparse import SUPPRESS_HELP
|
||||
import os
|
||||
import shutil
|
||||
import socket
|
||||
import sys
|
||||
import tempfile
|
||||
@ -60,16 +59,17 @@ from error import RepoChangedException, GitError, ManifestParseError
|
||||
import platform_utils
|
||||
from project import SyncBuffer
|
||||
from progress import Progress
|
||||
from repo_trace import IsTrace, Trace
|
||||
from repo_trace import Trace
|
||||
import ssh
|
||||
from wrapper import Wrapper
|
||||
from manifest_xml import GitcManifest
|
||||
|
||||
_ONE_DAY_S = 24 * 60 * 60
|
||||
# Env var to implicitly turn off object backups.
|
||||
REPO_BACKUP_OBJECTS = 'REPO_BACKUP_OBJECTS'
|
||||
|
||||
_BACKUP_OBJECTS = os.environ.get(REPO_BACKUP_OBJECTS) != '0'
|
||||
# Env var to implicitly turn auto-gc back on. This was added to allow a user to
|
||||
# revert a change in default behavior in v2.29.9. Remove after 2023-04-01.
|
||||
_REPO_AUTO_GC = 'REPO_AUTO_GC'
|
||||
_AUTO_GC = os.environ.get(_REPO_AUTO_GC) == '1'
|
||||
|
||||
|
||||
class _FetchOneResult(NamedTuple):
|
||||
@ -200,6 +200,9 @@ exist locally.
|
||||
The --prune option can be used to remove any refs that no longer
|
||||
exist on the remote.
|
||||
|
||||
The --auto-gc option can be used to trigger garbage collection on all
|
||||
projects. By default, repo does not run garbage collection.
|
||||
|
||||
# SSH Connections
|
||||
|
||||
If at least one project remote URL uses an SSH connection (ssh://,
|
||||
@ -309,6 +312,10 @@ later is required to fix a server side protocol bug.
|
||||
help='delete refs that no longer exist on the remote (default)')
|
||||
p.add_option('--no-prune', dest='prune', action='store_false',
|
||||
help='do not delete refs that no longer exist on the remote')
|
||||
p.add_option('--auto-gc', action='store_true', default=None,
|
||||
help='run garbage collection on all synced projects')
|
||||
p.add_option('--no-auto-gc', dest='auto_gc', action='store_false',
|
||||
help='do not run garbage collection on any projects (default)')
|
||||
if show_smart:
|
||||
p.add_option('-s', '--smart-sync',
|
||||
dest='smart_sync', action='store_true',
|
||||
@ -463,6 +470,7 @@ later is required to fix a server side protocol bug.
|
||||
"""
|
||||
start = time.time()
|
||||
success = False
|
||||
remote_fetched = False
|
||||
buf = io.StringIO()
|
||||
try:
|
||||
sync_result = project.Sync_NetworkHalf(
|
||||
@ -480,6 +488,7 @@ later is required to fix a server side protocol bug.
|
||||
clone_filter=project.manifest.CloneFilter,
|
||||
partial_clone_exclude=project.manifest.PartialCloneExclude)
|
||||
success = sync_result.success
|
||||
remote_fetched = sync_result.remote_fetched
|
||||
|
||||
output = buf.getvalue()
|
||||
if (opt.verbose or not success) and output:
|
||||
@ -497,8 +506,7 @@ later is required to fix a server side protocol bug.
|
||||
raise
|
||||
|
||||
finish = time.time()
|
||||
return _FetchOneResult(success, project, start, finish,
|
||||
sync_result.remote_fetched)
|
||||
return _FetchOneResult(success, project, start, finish, remote_fetched)
|
||||
|
||||
@classmethod
|
||||
def _FetchInitChild(cls, ssh_proxy):
|
||||
@ -713,7 +721,7 @@ later is required to fix a server side protocol bug.
|
||||
# ...we'll let existing jobs finish, though.
|
||||
if not success:
|
||||
ret = False
|
||||
err_results.append(project.relpath)
|
||||
err_results.append(project.RelPath(local=opt.this_manifest_only))
|
||||
if opt.fail_fast:
|
||||
if pool:
|
||||
pool.close()
|
||||
@ -728,63 +736,99 @@ later is required to fix a server side protocol bug.
|
||||
callback=_ProcessResults,
|
||||
output=Progress('Checking out', len(all_projects), quiet=opt.quiet)) and not err_results
|
||||
|
||||
def _backup_cruft(self, bare_git):
|
||||
"""Save a copy of any cruft from `git gc`."""
|
||||
# Find any cruft packs in the current gitdir, and save them.
|
||||
# b/221065125 (repo sync complains that objects are missing). This does
|
||||
# not prevent that state, but makes it so that the missing objects are
|
||||
# available.
|
||||
objdir = bare_git._project.objdir
|
||||
pack_dir = os.path.join(objdir, 'pack')
|
||||
bak_dir = os.path.join(objdir, '.repo', 'pack.bak')
|
||||
if not _BACKUP_OBJECTS or not platform_utils.isdir(pack_dir):
|
||||
return
|
||||
saved = []
|
||||
files = set(platform_utils.listdir(pack_dir))
|
||||
to_backup = []
|
||||
for f in files:
|
||||
base, ext = os.path.splitext(f)
|
||||
if base + '.mtimes' in files:
|
||||
to_backup.append(f)
|
||||
if to_backup:
|
||||
os.makedirs(bak_dir, exist_ok=True)
|
||||
for fname in to_backup:
|
||||
bak_fname = os.path.join(bak_dir, fname)
|
||||
if not os.path.exists(bak_fname):
|
||||
saved.append(fname)
|
||||
# Use a tmp file so that we are sure of a complete copy.
|
||||
shutil.copy(os.path.join(pack_dir, fname), bak_fname + '.tmp')
|
||||
shutil.move(bak_fname + '.tmp', bak_fname)
|
||||
if saved:
|
||||
Trace('%s saved %s', bare_git._project.name, ' '.join(saved))
|
||||
@staticmethod
|
||||
def _GetPreciousObjectsState(project: Project, opt):
|
||||
"""Get the preciousObjects state for the project.
|
||||
|
||||
Args:
|
||||
project (Project): the project to examine, and possibly correct.
|
||||
opt (optparse.Values): options given to sync.
|
||||
|
||||
Returns:
|
||||
Expected state of extensions.preciousObjects:
|
||||
False: Should be disabled. (not present)
|
||||
True: Should be enabled.
|
||||
"""
|
||||
if project.use_git_worktrees:
|
||||
return False
|
||||
projects = project.manifest.GetProjectsWithName(project.name,
|
||||
all_manifests=True)
|
||||
if len(projects) == 1:
|
||||
return False
|
||||
relpath = project.RelPath(local=opt.this_manifest_only)
|
||||
if len(projects) > 1:
|
||||
# Objects are potentially shared with another project.
|
||||
# See the logic in Project.Sync_NetworkHalf regarding UseAlternates.
|
||||
# - When False, shared projects share (via symlink)
|
||||
# .repo/project-objects/{PROJECT_NAME}.git as the one-and-only objects
|
||||
# directory. All objects are precious, since there is no project with a
|
||||
# complete set of refs.
|
||||
# - When True, shared projects share (via info/alternates)
|
||||
# .repo/project-objects/{PROJECT_NAME}.git as an alternate object store,
|
||||
# which is written only on the first clone of the project, and is not
|
||||
# written subsequently. (When Sync_NetworkHalf sees that it exists, it
|
||||
# makes sure that the alternates file points there, and uses a
|
||||
# project-local .git/objects directory for all syncs going forward.
|
||||
# We do not support switching between the options. The environment
|
||||
# variable is present for testing and migration only.
|
||||
return not project.UseAlternates
|
||||
print(f'\r{relpath}: project not found in manifest.', file=sys.stderr)
|
||||
return False
|
||||
|
||||
def _SetPreciousObjectsState(self, project: Project, opt):
|
||||
"""Correct the preciousObjects state for the project.
|
||||
|
||||
Args:
|
||||
project: the project to examine, and possibly correct.
|
||||
opt: options given to sync.
|
||||
"""
|
||||
expected = self._GetPreciousObjectsState(project, opt)
|
||||
actual = project.config.GetBoolean('extensions.preciousObjects') or False
|
||||
relpath = project.RelPath(local=opt.this_manifest_only)
|
||||
|
||||
if expected != actual:
|
||||
# If this is unexpected, log it and repair.
|
||||
Trace(f'{relpath} expected preciousObjects={expected}, got {actual}')
|
||||
if expected:
|
||||
if not opt.quiet:
|
||||
print('\r%s: Shared project %s found, disabling pruning.' %
|
||||
(relpath, project.name))
|
||||
if git_require((2, 7, 0)):
|
||||
project.EnableRepositoryExtension('preciousObjects')
|
||||
else:
|
||||
# This isn't perfect, but it's the best we can do with old git.
|
||||
print('\r%s: WARNING: shared projects are unreliable when using '
|
||||
'old versions of git; please upgrade to git-2.7.0+.'
|
||||
% (relpath,),
|
||||
file=sys.stderr)
|
||||
project.config.SetString('gc.pruneExpire', 'never')
|
||||
else:
|
||||
if not opt.quiet:
|
||||
print(f'\r{relpath}: not shared, disabling pruning.')
|
||||
project.config.SetString('extensions.preciousObjects', None)
|
||||
project.config.SetString('gc.pruneExpire', None)
|
||||
|
||||
def _GCProjects(self, projects, opt, err_event):
|
||||
pm = Progress('Garbage collecting', len(projects), delay=False, quiet=opt.quiet)
|
||||
"""Perform garbage collection.
|
||||
|
||||
If We are skipping garbage collection (opt.auto_gc not set), we still want
|
||||
to potentially mark objects precious, so that `git gc` does not discard
|
||||
shared objects.
|
||||
"""
|
||||
if not opt.auto_gc:
|
||||
# Just repair preciousObjects state, and return.
|
||||
for project in projects:
|
||||
self._SetPreciousObjectsState(project, opt)
|
||||
return
|
||||
|
||||
pm = Progress('Garbage collecting', len(projects), delay=False,
|
||||
quiet=opt.quiet)
|
||||
pm.update(inc=0, msg='prescan')
|
||||
|
||||
tidy_dirs = {}
|
||||
for project in projects:
|
||||
# Make sure pruning never kicks in with shared projects that do not use
|
||||
# alternates to avoid corruption.
|
||||
if (not project.use_git_worktrees and
|
||||
len(project.manifest.GetProjectsWithName(project.name, all_manifests=True)) > 1):
|
||||
if project.UseAlternates:
|
||||
# Undo logic set by previous versions of repo.
|
||||
project.config.SetString('extensions.preciousObjects', None)
|
||||
project.config.SetString('gc.pruneExpire', None)
|
||||
else:
|
||||
if not opt.quiet:
|
||||
print('\r%s: Shared project %s found, disabling pruning.' %
|
||||
(project.relpath, project.name))
|
||||
if git_require((2, 7, 0)):
|
||||
project.EnableRepositoryExtension('preciousObjects')
|
||||
else:
|
||||
# This isn't perfect, but it's the best we can do with old git.
|
||||
print('\r%s: WARNING: shared projects are unreliable when using old '
|
||||
'versions of git; please upgrade to git-2.7.0+.'
|
||||
% (project.relpath,),
|
||||
file=sys.stderr)
|
||||
project.config.SetString('gc.pruneExpire', 'never')
|
||||
self._SetPreciousObjectsState(project, opt)
|
||||
|
||||
project.config.SetString('gc.autoDetach', 'false')
|
||||
# Only call git gc once per objdir, but call pack-refs for the remainder.
|
||||
if project.objdir not in tidy_dirs:
|
||||
@ -800,22 +844,14 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
jobs = opt.jobs
|
||||
|
||||
gc_args = ['--auto']
|
||||
backup_cruft = False
|
||||
if git_require((2, 37, 0)):
|
||||
gc_args.append('--cruft')
|
||||
backup_cruft = True
|
||||
pack_refs_args = ()
|
||||
if jobs < 2:
|
||||
for (run_gc, bare_git) in tidy_dirs.values():
|
||||
pm.update(msg=bare_git._project.name)
|
||||
|
||||
if run_gc:
|
||||
bare_git.gc(*gc_args)
|
||||
bare_git.gc('--auto')
|
||||
else:
|
||||
bare_git.pack_refs(*pack_refs_args)
|
||||
if backup_cruft:
|
||||
self._backup_cruft(bare_git)
|
||||
bare_git.pack_refs()
|
||||
pm.end()
|
||||
return
|
||||
|
||||
@ -830,17 +866,15 @@ later is required to fix a server side protocol bug.
|
||||
try:
|
||||
try:
|
||||
if run_gc:
|
||||
bare_git.gc(*gc_args, config=config)
|
||||
bare_git.gc('--auto', config=config)
|
||||
else:
|
||||
bare_git.pack_refs(*pack_refs_args, config=config)
|
||||
bare_git.pack_refs(config=config)
|
||||
except GitError:
|
||||
err_event.set()
|
||||
except Exception:
|
||||
err_event.set()
|
||||
raise
|
||||
finally:
|
||||
if backup_cruft:
|
||||
self._backup_cruft(bare_git)
|
||||
pm.finish(bare_git._project.name)
|
||||
sem.release()
|
||||
|
||||
@ -1150,6 +1184,51 @@ later is required to fix a server side protocol bug.
|
||||
if opt.prune is None:
|
||||
opt.prune = True
|
||||
|
||||
if opt.auto_gc is None and _AUTO_GC:
|
||||
print(f"Will run `git gc --auto` because {_REPO_AUTO_GC} is set.",
|
||||
f'{_REPO_AUTO_GC} is deprecated and will be removed in a future',
|
||||
'release. Use `--auto-gc` instead.', file=sys.stderr)
|
||||
opt.auto_gc = True
|
||||
|
||||
def _ValidateOptionsWithManifest(self, opt, mp):
|
||||
"""Like ValidateOptions, but after we've updated the manifest.
|
||||
|
||||
Needed to handle sync-xxx option defaults in the manifest.
|
||||
|
||||
Args:
|
||||
opt: The options to process.
|
||||
mp: The manifest project to pull defaults from.
|
||||
"""
|
||||
if not opt.jobs:
|
||||
# If the user hasn't made a choice, use the manifest value.
|
||||
opt.jobs = mp.manifest.default.sync_j
|
||||
if opt.jobs:
|
||||
# If --jobs has a non-default value, propagate it as the default for
|
||||
# --jobs-xxx flags too.
|
||||
if not opt.jobs_network:
|
||||
opt.jobs_network = opt.jobs
|
||||
if not opt.jobs_checkout:
|
||||
opt.jobs_checkout = opt.jobs
|
||||
else:
|
||||
# Neither user nor manifest have made a choice, so setup defaults.
|
||||
if not opt.jobs_network:
|
||||
opt.jobs_network = 1
|
||||
if not opt.jobs_checkout:
|
||||
opt.jobs_checkout = DEFAULT_LOCAL_JOBS
|
||||
opt.jobs = os.cpu_count()
|
||||
|
||||
# Try to stay under user rlimit settings.
|
||||
#
|
||||
# Since each worker requires at 3 file descriptors to run `git fetch`, use
|
||||
# that to scale down the number of jobs. Unfortunately there isn't an easy
|
||||
# way to determine this reliably as systems change, but it was last measured
|
||||
# by hand in 2011.
|
||||
soft_limit, _ = _rlimit_nofile()
|
||||
jobs_soft_limit = max(1, (soft_limit - 5) // 3)
|
||||
opt.jobs = min(opt.jobs, jobs_soft_limit)
|
||||
opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
|
||||
opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
manifest = self.outer_manifest
|
||||
if not opt.outer_manifest:
|
||||
@ -1200,35 +1279,9 @@ later is required to fix a server side protocol bug.
|
||||
else:
|
||||
print('Skipping update of local manifest project.')
|
||||
|
||||
# Now that the manifests are up-to-date, setup the jobs value.
|
||||
if opt.jobs is None:
|
||||
# User has not made a choice, so use the manifest settings.
|
||||
opt.jobs = mp.default.sync_j
|
||||
if opt.jobs is not None:
|
||||
# Neither user nor manifest have made a choice.
|
||||
if opt.jobs_network is None:
|
||||
opt.jobs_network = opt.jobs
|
||||
if opt.jobs_checkout is None:
|
||||
opt.jobs_checkout = opt.jobs
|
||||
# Setup defaults if jobs==0.
|
||||
if not opt.jobs:
|
||||
if not opt.jobs_network:
|
||||
opt.jobs_network = 1
|
||||
if not opt.jobs_checkout:
|
||||
opt.jobs_checkout = DEFAULT_LOCAL_JOBS
|
||||
opt.jobs = os.cpu_count()
|
||||
|
||||
# Try to stay under user rlimit settings.
|
||||
#
|
||||
# Since each worker requires at 3 file descriptors to run `git fetch`, use
|
||||
# that to scale down the number of jobs. Unfortunately there isn't an easy
|
||||
# way to determine this reliably as systems change, but it was last measured
|
||||
# by hand in 2011.
|
||||
soft_limit, _ = _rlimit_nofile()
|
||||
jobs_soft_limit = max(1, (soft_limit - 5) // 3)
|
||||
opt.jobs = min(opt.jobs, jobs_soft_limit)
|
||||
opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
|
||||
opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
|
||||
# Now that the manifests are up-to-date, setup options whose defaults might
|
||||
# be in the manifest.
|
||||
self._ValidateOptionsWithManifest(opt, mp)
|
||||
|
||||
superproject_logging_data = {}
|
||||
self._UpdateProjectsRevisionId(opt, args, superproject_logging_data,
|
||||
|
@ -278,8 +278,9 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
script = []
|
||||
script.append('# Uncomment the branches to upload:')
|
||||
for project, avail in pending:
|
||||
project_path = project.RelPath(local=opt.this_manifest_only)
|
||||
script.append('#')
|
||||
script.append('# project %s/:' % project.RelPath(local=opt.this_manifest_only))
|
||||
script.append(f'# project {project_path}/:')
|
||||
|
||||
b = {}
|
||||
for branch in avail:
|
||||
@ -302,8 +303,8 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
script.append('# %s' % commit)
|
||||
b[name] = branch
|
||||
|
||||
projects[project.RelPath(local=opt.this_manifest_only)] = project
|
||||
branches[project.name] = b
|
||||
projects[project_path] = project
|
||||
branches[project_path] = b
|
||||
script.append('')
|
||||
|
||||
script = Editor.EditString("\n".join(script)).split("\n")
|
||||
@ -328,9 +329,10 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
name = m.group(1)
|
||||
if not project:
|
||||
_die('project for branch %s not in script', name)
|
||||
branch = branches[project.name].get(name)
|
||||
project_path = project.RelPath(local=opt.this_manifest_only)
|
||||
branch = branches[project_path].get(name)
|
||||
if not branch:
|
||||
_die('branch %s not in %s', name, project.RelPath(local=opt.this_manifest_only))
|
||||
_die('branch %s not in %s', name, project_path)
|
||||
todo.append(branch)
|
||||
if not todo:
|
||||
_die("nothing uncommented for upload")
|
||||
|
@ -15,6 +15,7 @@
|
||||
"""Unittests for the git_command.py module."""
|
||||
|
||||
import re
|
||||
import os
|
||||
import unittest
|
||||
|
||||
try:
|
||||
@ -26,6 +27,38 @@ import git_command
|
||||
import wrapper
|
||||
|
||||
|
||||
class GitCommandTest(unittest.TestCase):
|
||||
"""Tests the GitCommand class (via git_command.git)."""
|
||||
|
||||
def setUp(self):
|
||||
|
||||
def realpath_mock(val):
|
||||
return val
|
||||
|
||||
mock.patch.object(os.path, 'realpath', side_effect=realpath_mock).start()
|
||||
|
||||
def tearDown(self):
|
||||
mock.patch.stopall()
|
||||
|
||||
def test_alternative_setting_when_matching(self):
|
||||
r = git_command._build_env(
|
||||
objdir = os.path.join('zap', 'objects'),
|
||||
gitdir = 'zap'
|
||||
)
|
||||
|
||||
self.assertIsNone(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'))
|
||||
self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('zap', 'objects'))
|
||||
|
||||
def test_alternative_setting_when_different(self):
|
||||
r = git_command._build_env(
|
||||
objdir = os.path.join('wow', 'objects'),
|
||||
gitdir = 'zap'
|
||||
)
|
||||
|
||||
self.assertEqual(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'), os.path.join('zap', 'objects'))
|
||||
self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('wow', 'objects'))
|
||||
|
||||
|
||||
class GitCallUnitTest(unittest.TestCase):
|
||||
"""Tests the _GitCall class (via git_command.git)."""
|
||||
|
||||
@ -84,7 +117,8 @@ class GitRequireTests(unittest.TestCase):
|
||||
"""Test the git_require helper."""
|
||||
|
||||
def setUp(self):
|
||||
ver = wrapper.GitVersion(1, 2, 3, 4)
|
||||
self.wrapper = wrapper.Wrapper()
|
||||
ver = self.wrapper.GitVersion(1, 2, 3, 4)
|
||||
mock.patch.object(git_command.git, 'version_tuple', return_value=ver).start()
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -19,6 +19,7 @@ import tempfile
|
||||
import unittest
|
||||
|
||||
import git_config
|
||||
import repo_trace
|
||||
|
||||
|
||||
def fixture(*paths):
|
||||
@ -33,9 +34,16 @@ class GitConfigReadOnlyTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
"""Create a GitConfig object using the test.gitconfig fixture.
|
||||
"""
|
||||
|
||||
self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
|
||||
repo_trace._TRACE_FILE = os.path.join(self.tempdirobj.name, 'TRACE_FILE_from_test')
|
||||
|
||||
config_fixture = fixture('test.gitconfig')
|
||||
self.config = git_config.GitConfig(config_fixture)
|
||||
|
||||
def tearDown(self):
|
||||
self.tempdirobj.cleanup()
|
||||
|
||||
def test_GetString_with_empty_config_values(self):
|
||||
"""
|
||||
Test config entries with no value.
|
||||
@ -109,9 +117,15 @@ class GitConfigReadWriteTests(unittest.TestCase):
|
||||
"""Read/write tests of the GitConfig class."""
|
||||
|
||||
def setUp(self):
|
||||
self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
|
||||
repo_trace._TRACE_FILE = os.path.join(self.tempdirobj.name, 'TRACE_FILE_from_test')
|
||||
|
||||
self.tmpfile = tempfile.NamedTemporaryFile()
|
||||
self.config = self.get_config()
|
||||
|
||||
def tearDown(self):
|
||||
self.tempdirobj.cleanup()
|
||||
|
||||
def get_config(self):
|
||||
"""Get a new GitConfig instance."""
|
||||
return git_config.GitConfig(self.tmpfile.name)
|
||||
@ -186,7 +200,3 @@ class GitConfigReadWriteTests(unittest.TestCase):
|
||||
for key, value in TESTS:
|
||||
self.assertEqual(sync_data[f'{git_config.SYNC_STATE_PREFIX}{key}'], value)
|
||||
self.assertTrue(sync_data[f'{git_config.SYNC_STATE_PREFIX}main.synctime'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -24,6 +24,7 @@ from unittest import mock
|
||||
import git_superproject
|
||||
import git_trace2_event_log
|
||||
import manifest_xml
|
||||
import repo_trace
|
||||
from test_manifest_xml import sort_attributes
|
||||
|
||||
|
||||
@ -39,6 +40,7 @@ class SuperprojectTestCase(unittest.TestCase):
|
||||
"""Set up superproject every time."""
|
||||
self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
|
||||
self.tempdir = self.tempdirobj.name
|
||||
repo_trace._TRACE_FILE = os.path.join(self.tempdir, 'TRACE_FILE_from_test')
|
||||
self.repodir = os.path.join(self.tempdir, '.repo')
|
||||
self.manifest_file = os.path.join(
|
||||
self.repodir, manifest_xml.MANIFEST_FILE_NAME)
|
||||
@ -364,7 +366,3 @@ class SuperprojectTestCase(unittest.TestCase):
|
||||
'revision="52d3c9f7c107839ece2319d077de0cd922aa9d8f"/>'
|
||||
'<superproject name="superproject"/>'
|
||||
'</manifest>')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -369,7 +369,7 @@ class EventLogTestCase(unittest.TestCase):
|
||||
server_thread.start()
|
||||
|
||||
with server_ready:
|
||||
server_ready.wait()
|
||||
server_ready.wait(timeout=120)
|
||||
|
||||
self._event_log_module.StartEvent()
|
||||
path = self._event_log_module.Write(path=f'af_unix:{socket_path}')
|
||||
@ -385,7 +385,3 @@ class EventLogTestCase(unittest.TestCase):
|
||||
# Check for 'start' event specific fields.
|
||||
self.assertIn('argv', start_event)
|
||||
self.assertIsInstance(start_event['argv'], list)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -23,6 +23,7 @@ import xml.dom.minidom
|
||||
|
||||
import error
|
||||
import manifest_xml
|
||||
import repo_trace
|
||||
|
||||
|
||||
# Invalid paths that we don't want in the filesystem.
|
||||
@ -93,6 +94,7 @@ class ManifestParseTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
|
||||
self.tempdir = self.tempdirobj.name
|
||||
repo_trace._TRACE_FILE = os.path.join(self.tempdir, 'TRACE_FILE_from_test')
|
||||
self.repodir = os.path.join(self.tempdir, '.repo')
|
||||
self.manifest_dir = os.path.join(self.repodir, 'manifests')
|
||||
self.manifest_file = os.path.join(
|
||||
@ -115,7 +117,7 @@ class ManifestParseTestCase(unittest.TestCase):
|
||||
|
||||
def getXmlManifest(self, data):
|
||||
"""Helper to initialize a manifest for testing."""
|
||||
with open(self.manifest_file, 'w') as fp:
|
||||
with open(self.manifest_file, 'w', encoding="utf-8") as fp:
|
||||
fp.write(data)
|
||||
return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
|
||||
|
||||
@ -262,10 +264,10 @@ class XmlManifestTests(ManifestParseTestCase):
|
||||
'<project name="r" groups="keep"/>'
|
||||
'</manifest>')
|
||||
self.assertEqual(
|
||||
manifest.ToXml(omit_local=True).toxml(),
|
||||
sort_attributes(manifest.ToXml(omit_local=True).toxml()),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote name="a" fetch=".."/><default remote="a" revision="r"/>'
|
||||
'<project name="q"/><project name="r" groups="keep"/></manifest>')
|
||||
'<remote fetch=".." name="a"/><default remote="a" revision="r"/>'
|
||||
'<project name="q"/><project groups="keep" name="r"/></manifest>')
|
||||
|
||||
def test_toxml_with_local(self):
|
||||
"""Does include local_manifests projects when omit_local=False."""
|
||||
@ -277,11 +279,11 @@ class XmlManifestTests(ManifestParseTestCase):
|
||||
'<project name="r" groups="keep"/>'
|
||||
'</manifest>')
|
||||
self.assertEqual(
|
||||
manifest.ToXml(omit_local=False).toxml(),
|
||||
sort_attributes(manifest.ToXml(omit_local=False).toxml()),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote name="a" fetch=".."/><default remote="a" revision="r"/>'
|
||||
'<project name="p" groups="local::me"/>'
|
||||
'<project name="q"/><project name="r" groups="keep"/></manifest>')
|
||||
'<remote fetch=".." name="a"/><default remote="a" revision="r"/>'
|
||||
'<project groups="local::me" name="p"/>'
|
||||
'<project name="q"/><project groups="keep" name="r"/></manifest>')
|
||||
|
||||
def test_repo_hooks(self):
|
||||
"""Check repo-hooks settings."""
|
||||
@ -426,7 +428,7 @@ class IncludeElementTests(ManifestParseTestCase):
|
||||
def parse(name):
|
||||
name = self.encodeXmlAttr(name)
|
||||
# Setup target of the include.
|
||||
with open(os.path.join(self.manifest_dir, 'target.xml'), 'w') as fp:
|
||||
with open(os.path.join(self.manifest_dir, 'target.xml'), 'w', encoding="utf-8") as fp:
|
||||
fp.write(f'<manifest><include name="{name}"/></manifest>')
|
||||
|
||||
manifest = self.getXmlManifest("""
|
||||
@ -517,22 +519,22 @@ class ProjectElementTests(ManifestParseTestCase):
|
||||
""")
|
||||
|
||||
manifest = parse('a/path/', 'foo')
|
||||
self.assertEqual(manifest.projects[0].gitdir,
|
||||
os.path.join(self.tempdir, '.repo/projects/foo.git'))
|
||||
self.assertEqual(manifest.projects[0].objdir,
|
||||
os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
|
||||
os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
|
||||
os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
|
||||
|
||||
manifest = parse('a/path', 'foo/')
|
||||
self.assertEqual(manifest.projects[0].gitdir,
|
||||
os.path.join(self.tempdir, '.repo/projects/foo.git'))
|
||||
self.assertEqual(manifest.projects[0].objdir,
|
||||
os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
|
||||
os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
|
||||
os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
|
||||
|
||||
manifest = parse('a/path', 'foo//////')
|
||||
self.assertEqual(manifest.projects[0].gitdir,
|
||||
os.path.join(self.tempdir, '.repo/projects/foo.git'))
|
||||
self.assertEqual(manifest.projects[0].objdir,
|
||||
os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
|
||||
os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
|
||||
os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
|
||||
|
||||
def test_toplevel_path(self):
|
||||
"""Check handling of path=. specially."""
|
||||
@ -549,8 +551,8 @@ class ProjectElementTests(ManifestParseTestCase):
|
||||
|
||||
for path in ('.', './', './/', './//'):
|
||||
manifest = parse('server/path', path)
|
||||
self.assertEqual(manifest.projects[0].gitdir,
|
||||
os.path.join(self.tempdir, '.repo/projects/..git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
|
||||
os.path.join(self.tempdir, '.repo', 'projects', '..git'))
|
||||
|
||||
def test_bad_path_name_checks(self):
|
||||
"""Check handling of bad path & name attributes."""
|
||||
@ -576,7 +578,7 @@ class ProjectElementTests(ManifestParseTestCase):
|
||||
parse('', 'ok')
|
||||
|
||||
for path in INVALID_FS_PATHS:
|
||||
if not path or path.endswith('/'):
|
||||
if not path or path.endswith('/') or path.endswith(os.path.sep):
|
||||
continue
|
||||
|
||||
with self.assertRaises(error.ManifestInvalidPathError):
|
||||
|
@ -22,10 +22,12 @@ import tempfile
|
||||
import unittest
|
||||
|
||||
import error
|
||||
import manifest_xml
|
||||
import git_command
|
||||
import git_config
|
||||
import platform_utils
|
||||
import project
|
||||
import repo_trace
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@ -64,6 +66,13 @@ class FakeProject(object):
|
||||
class ReviewableBranchTests(unittest.TestCase):
|
||||
"""Check ReviewableBranch behavior."""
|
||||
|
||||
def setUp(self):
|
||||
self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
|
||||
repo_trace._TRACE_FILE = os.path.join(self.tempdirobj.name, 'TRACE_FILE_from_test')
|
||||
|
||||
def tearDown(self):
|
||||
self.tempdirobj.cleanup()
|
||||
|
||||
def test_smoke(self):
|
||||
"""A quick run through everything."""
|
||||
with TempGitTree() as tempdir:
|
||||
@ -376,7 +385,7 @@ class MigrateWorkTreeTests(unittest.TestCase):
|
||||
|
||||
# Make sure the dir was transformed into a symlink.
|
||||
self.assertTrue(dotgit.is_symlink())
|
||||
self.assertEqual(os.readlink(dotgit), '../../.repo/projects/src/test.git')
|
||||
self.assertEqual(os.readlink(dotgit), os.path.normpath('../../.repo/projects/src/test.git'))
|
||||
|
||||
# Make sure files were moved over.
|
||||
gitdir = tempdir / '.repo/projects/src/test.git'
|
||||
@ -403,3 +412,81 @@ class MigrateWorkTreeTests(unittest.TestCase):
|
||||
self.assertTrue((dotgit / name).is_file())
|
||||
for name in self._SYMLINKS:
|
||||
self.assertTrue((dotgit / name).is_symlink())
|
||||
|
||||
|
||||
class ManifestPropertiesFetchedCorrectly(unittest.TestCase):
|
||||
"""Ensure properties are fetched properly."""
|
||||
|
||||
def setUpManifest(self, tempdir):
|
||||
repo_trace._TRACE_FILE = os.path.join(tempdir, 'TRACE_FILE_from_test')
|
||||
|
||||
repodir = os.path.join(tempdir, '.repo')
|
||||
manifest_dir = os.path.join(repodir, 'manifests')
|
||||
manifest_file = os.path.join(
|
||||
repodir, manifest_xml.MANIFEST_FILE_NAME)
|
||||
local_manifest_dir = os.path.join(
|
||||
repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME)
|
||||
os.mkdir(repodir)
|
||||
os.mkdir(manifest_dir)
|
||||
manifest = manifest_xml.XmlManifest(repodir, manifest_file)
|
||||
|
||||
return project.ManifestProject(
|
||||
manifest, 'test/manifest', os.path.join(tempdir, '.git'), tempdir)
|
||||
|
||||
def test_manifest_config_properties(self):
|
||||
"""Test we are fetching the manifest config properties correctly."""
|
||||
|
||||
with TempGitTree() as tempdir:
|
||||
fakeproj = self.setUpManifest(tempdir)
|
||||
|
||||
# Set property using the expected Set method, then ensure
|
||||
# the porperty functions are using the correct Get methods.
|
||||
fakeproj.config.SetString(
|
||||
'manifest.standalone', 'https://chicken/manifest.git')
|
||||
self.assertEqual(
|
||||
fakeproj.standalone_manifest_url, 'https://chicken/manifest.git')
|
||||
|
||||
fakeproj.config.SetString('manifest.groups', 'test-group, admin-group')
|
||||
self.assertEqual(fakeproj.manifest_groups, 'test-group, admin-group')
|
||||
|
||||
fakeproj.config.SetString('repo.reference', 'mirror/ref')
|
||||
self.assertEqual(fakeproj.reference, 'mirror/ref')
|
||||
|
||||
fakeproj.config.SetBoolean('repo.dissociate', False)
|
||||
self.assertFalse(fakeproj.dissociate)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.archive', False)
|
||||
self.assertFalse(fakeproj.archive)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.mirror', False)
|
||||
self.assertFalse(fakeproj.mirror)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.worktree', False)
|
||||
self.assertFalse(fakeproj.use_worktree)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.clonebundle', False)
|
||||
self.assertFalse(fakeproj.clone_bundle)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.submodules', False)
|
||||
self.assertFalse(fakeproj.submodules)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.git-lfs', False)
|
||||
self.assertFalse(fakeproj.git_lfs)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.superproject', False)
|
||||
self.assertFalse(fakeproj.use_superproject)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.partialclone', False)
|
||||
self.assertFalse(fakeproj.partial_clone)
|
||||
|
||||
fakeproj.config.SetString('repo.depth', '48')
|
||||
self.assertEqual(fakeproj.depth, '48')
|
||||
|
||||
fakeproj.config.SetString('repo.clonefilter', 'blob:limit=10M')
|
||||
self.assertEqual(fakeproj.clone_filter, 'blob:limit=10M')
|
||||
|
||||
fakeproj.config.SetString('repo.partialcloneexclude', 'third_party/big_repo')
|
||||
self.assertEqual(fakeproj.partial_clone_exclude, 'third_party/big_repo')
|
||||
|
||||
fakeproj.config.SetString('manifest.platform', 'auto')
|
||||
self.assertEqual(fakeproj.manifest_platform, 'auto')
|
||||
|
@ -11,27 +11,26 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the subcmds/sync.py module."""
|
||||
|
||||
import os
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
import command
|
||||
from subcmds import sync
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'use_superproject, cli_args, result',
|
||||
[
|
||||
@pytest.mark.parametrize('use_superproject, cli_args, result', [
|
||||
(True, ['--current-branch'], True),
|
||||
(True, ['--no-current-branch'], True),
|
||||
(True, [], True),
|
||||
(False, ['--current-branch'], True),
|
||||
(False, ['--no-current-branch'], False),
|
||||
(False, [], None),
|
||||
]
|
||||
)
|
||||
])
|
||||
def test_get_current_branch_only(use_superproject, cli_args, result):
|
||||
"""Test Sync._GetCurrentBranchOnly logic.
|
||||
|
||||
@ -41,5 +40,94 @@ def test_get_current_branch_only(use_superproject, cli_args, result):
|
||||
cmd = sync.Sync()
|
||||
opts, _ = cmd.OptionParser.parse_args(cli_args)
|
||||
|
||||
with mock.patch('git_superproject.UseSuperproject', return_value=use_superproject):
|
||||
with mock.patch('git_superproject.UseSuperproject',
|
||||
return_value=use_superproject):
|
||||
assert cmd._GetCurrentBranchOnly(opts, cmd.manifest) == result
|
||||
|
||||
|
||||
# Used to patch os.cpu_count() for reliable results.
|
||||
OS_CPU_COUNT = 24
|
||||
|
||||
@pytest.mark.parametrize('argv, jobs_manifest, jobs, jobs_net, jobs_check', [
|
||||
# No user or manifest settings.
|
||||
([], None, OS_CPU_COUNT, 1, command.DEFAULT_LOCAL_JOBS),
|
||||
# No user settings, so manifest settings control.
|
||||
([], 3, 3, 3, 3),
|
||||
# User settings, but no manifest.
|
||||
(['--jobs=4'], None, 4, 4, 4),
|
||||
(['--jobs=4', '--jobs-network=5'], None, 4, 5, 4),
|
||||
(['--jobs=4', '--jobs-checkout=6'], None, 4, 4, 6),
|
||||
(['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], None, 4, 5, 6),
|
||||
(['--jobs-network=5'], None, OS_CPU_COUNT, 5, command.DEFAULT_LOCAL_JOBS),
|
||||
(['--jobs-checkout=6'], None, OS_CPU_COUNT, 1, 6),
|
||||
(['--jobs-network=5', '--jobs-checkout=6'], None, OS_CPU_COUNT, 5, 6),
|
||||
# User settings with manifest settings.
|
||||
(['--jobs=4'], 3, 4, 4, 4),
|
||||
(['--jobs=4', '--jobs-network=5'], 3, 4, 5, 4),
|
||||
(['--jobs=4', '--jobs-checkout=6'], 3, 4, 4, 6),
|
||||
(['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], 3, 4, 5, 6),
|
||||
(['--jobs-network=5'], 3, 3, 5, 3),
|
||||
(['--jobs-checkout=6'], 3, 3, 3, 6),
|
||||
(['--jobs-network=5', '--jobs-checkout=6'], 3, 3, 5, 6),
|
||||
# Settings that exceed rlimits get capped.
|
||||
(['--jobs=1000000'], None, 83, 83, 83),
|
||||
([], 1000000, 83, 83, 83),
|
||||
])
|
||||
def test_cli_jobs(argv, jobs_manifest, jobs, jobs_net, jobs_check):
|
||||
"""Tests --jobs option behavior."""
|
||||
mp = mock.MagicMock()
|
||||
mp.manifest.default.sync_j = jobs_manifest
|
||||
|
||||
cmd = sync.Sync()
|
||||
opts, args = cmd.OptionParser.parse_args(argv)
|
||||
cmd.ValidateOptions(opts, args)
|
||||
|
||||
with mock.patch.object(sync, '_rlimit_nofile', return_value=(256, 256)):
|
||||
with mock.patch.object(os, 'cpu_count', return_value=OS_CPU_COUNT):
|
||||
cmd._ValidateOptionsWithManifest(opts, mp)
|
||||
assert opts.jobs == jobs
|
||||
assert opts.jobs_network == jobs_net
|
||||
assert opts.jobs_checkout == jobs_check
|
||||
|
||||
|
||||
class GetPreciousObjectsState(unittest.TestCase):
|
||||
"""Tests for _GetPreciousObjectsState."""
|
||||
|
||||
def setUp(self):
|
||||
"""Common setup."""
|
||||
self.cmd = sync.Sync()
|
||||
self.project = p = mock.MagicMock(use_git_worktrees=False,
|
||||
UseAlternates=False)
|
||||
p.manifest.GetProjectsWithName.return_value = [p]
|
||||
|
||||
self.opt = mock.Mock(spec_set=['this_manifest_only'])
|
||||
self.opt.this_manifest_only = False
|
||||
|
||||
def test_worktrees(self):
|
||||
"""False for worktrees."""
|
||||
self.project.use_git_worktrees = True
|
||||
self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||
|
||||
def test_not_shared(self):
|
||||
"""Singleton project."""
|
||||
self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||
|
||||
def test_shared(self):
|
||||
"""Shared project."""
|
||||
self.project.manifest.GetProjectsWithName.return_value = [
|
||||
self.project, self.project
|
||||
]
|
||||
self.assertTrue(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||
|
||||
def test_shared_with_alternates(self):
|
||||
"""Shared project, with alternates."""
|
||||
self.project.manifest.GetProjectsWithName.return_value = [
|
||||
self.project, self.project
|
||||
]
|
||||
self.project.UseAlternates = True
|
||||
self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||
|
||||
def test_not_found(self):
|
||||
"""Project not found in manifest."""
|
||||
self.project.manifest.GetProjectsWithName.return_value = []
|
||||
self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||
|
27
tests/test_update_manpages.py
Normal file
27
tests/test_update_manpages.py
Normal file
@ -0,0 +1,27 @@
|
||||
# Copyright 2022 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the update_manpages module."""
|
||||
|
||||
import unittest
|
||||
import tests.update_manpages as um
|
||||
|
||||
|
||||
class UpdateManpagesTest(unittest.TestCase):
|
||||
"""Tests the update-manpages code."""
|
||||
|
||||
def test_replace_regex(self):
|
||||
"""Check that replace_regex works."""
|
||||
data = '\n\033[1mSummary\033[m\n'
|
||||
self.assertEqual(um.replace_regex(data),'\nSummary\n')
|
@ -59,12 +59,12 @@ class RepoWrapperUnitTest(RepoWrapperTestCase):
|
||||
def test_python_constraints(self):
|
||||
"""The launcher should never require newer than main.py."""
|
||||
self.assertGreaterEqual(main.MIN_PYTHON_VERSION_HARD,
|
||||
wrapper.MIN_PYTHON_VERSION_HARD)
|
||||
self.wrapper.MIN_PYTHON_VERSION_HARD)
|
||||
self.assertGreaterEqual(main.MIN_PYTHON_VERSION_SOFT,
|
||||
wrapper.MIN_PYTHON_VERSION_SOFT)
|
||||
self.wrapper.MIN_PYTHON_VERSION_SOFT)
|
||||
# Make sure the versions are themselves in sync.
|
||||
self.assertGreaterEqual(wrapper.MIN_PYTHON_VERSION_SOFT,
|
||||
wrapper.MIN_PYTHON_VERSION_HARD)
|
||||
self.assertGreaterEqual(self.wrapper.MIN_PYTHON_VERSION_SOFT,
|
||||
self.wrapper.MIN_PYTHON_VERSION_HARD)
|
||||
|
||||
def test_init_parser(self):
|
||||
"""Make sure 'init' GetParser works."""
|
||||
@ -159,7 +159,9 @@ class RunCommand(RepoWrapperTestCase):
|
||||
def test_capture(self):
|
||||
"""Check capture_output handling."""
|
||||
ret = self.wrapper.run_command(['echo', 'hi'], capture_output=True)
|
||||
self.assertEqual(ret.stdout, 'hi\n')
|
||||
# echo command appends OS specific linesep, but on Windows + Git Bash
|
||||
# we get UNIX ending, so we allow both.
|
||||
self.assertIn(ret.stdout, ['hi' + os.linesep, 'hi\n'])
|
||||
|
||||
def test_check(self):
|
||||
"""Check check handling."""
|
||||
@ -456,7 +458,7 @@ class ResolveRepoRev(GitCheckoutTestCase):
|
||||
self.assertEqual('refs/heads/stable', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
with self.assertRaises(wrapper.CloneFailure):
|
||||
with self.assertRaises(self.wrapper.CloneFailure):
|
||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/heads/unknown')
|
||||
|
||||
def test_explicit_tag(self):
|
||||
@ -465,7 +467,7 @@ class ResolveRepoRev(GitCheckoutTestCase):
|
||||
self.assertEqual('refs/tags/v1.0', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
with self.assertRaises(wrapper.CloneFailure):
|
||||
with self.assertRaises(self.wrapper.CloneFailure):
|
||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/tags/unknown')
|
||||
|
||||
def test_branch_name(self):
|
||||
@ -500,7 +502,7 @@ class ResolveRepoRev(GitCheckoutTestCase):
|
||||
|
||||
def test_unknown(self):
|
||||
"""Check unknown ref/commit argument."""
|
||||
with self.assertRaises(wrapper.CloneFailure):
|
||||
with self.assertRaises(self.wrapper.CloneFailure):
|
||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'boooooooya')
|
||||
|
||||
|
||||
@ -551,7 +553,3 @@ class CheckRepoRev(GitCheckoutTestCase):
|
||||
rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, 'stable', repo_verify=False)
|
||||
self.assertEqual('refs/heads/stable', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
1
tests/update_manpages.py
Symbolic link
1
tests/update_manpages.py
Symbolic link
@ -0,0 +1 @@
|
||||
../release/update-manpages
|
12
tox.ini
12
tox.ini
@ -15,7 +15,7 @@
|
||||
# https://tox.readthedocs.io/
|
||||
|
||||
[tox]
|
||||
envlist = py36, py37, py38, py39
|
||||
envlist = py36, py37, py38, py39, py310
|
||||
|
||||
[gh-actions]
|
||||
python =
|
||||
@ -23,11 +23,17 @@ python =
|
||||
3.7: py37
|
||||
3.8: py38
|
||||
3.9: py39
|
||||
3.10: py310
|
||||
|
||||
[testenv]
|
||||
deps = pytest
|
||||
commands = {envpython} run_tests
|
||||
deps =
|
||||
pytest
|
||||
pytest-timeout
|
||||
commands = {envpython} run_tests {posargs}
|
||||
setenv =
|
||||
GIT_AUTHOR_NAME = Repo test author
|
||||
GIT_COMMITTER_NAME = Repo test committer
|
||||
EMAIL = repo@gerrit.nodomain
|
||||
|
||||
[pytest]
|
||||
timeout = 300
|
||||
|
14
wrapper.py
14
wrapper.py
@ -12,12 +12,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
try:
|
||||
from importlib.machinery import SourceFileLoader
|
||||
_loader = lambda *args: SourceFileLoader(*args).load_module()
|
||||
except ImportError:
|
||||
import imp
|
||||
_loader = lambda *args: imp.load_source(*args)
|
||||
import importlib.machinery
|
||||
import importlib.util
|
||||
import os
|
||||
|
||||
|
||||
@ -31,5 +27,9 @@ _wrapper_module = None
|
||||
def Wrapper():
|
||||
global _wrapper_module
|
||||
if not _wrapper_module:
|
||||
_wrapper_module = _loader('wrapper', WrapperPath())
|
||||
modname = 'wrapper'
|
||||
loader = importlib.machinery.SourceFileLoader(modname, WrapperPath())
|
||||
spec = importlib.util.spec_from_loader(modname, loader)
|
||||
_wrapper_module = importlib.util.module_from_spec(spec)
|
||||
loader.exec_module(_wrapper_module)
|
||||
return _wrapper_module
|
||||
|
Reference in New Issue
Block a user