mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
89 Commits
Author | SHA1 | Date | |
---|---|---|---|
a9f11b3cb2 | |||
7bdbde7af8 | |||
b2bd91c99b | |||
3f5ea0b182 | |||
b148ac9d9a | |||
a67df63ef1 | |||
f91074881f | |||
75ee0570da | |||
88b86728a4 | |||
e66291f6d0 | |||
7ba25bedf9 | |||
3794a78b80 | |||
33949c34d2 | |||
8f62fb7bd3 | |||
98ffba1401 | |||
c1b86a2323 | |||
cecd1d864f | |||
fc241240d8 | |||
9f3406ea46 | |||
b1525bffae | |||
685f080d62 | |||
8898e2f26d | |||
52f1e5d911 | |||
8e3d355d44 | |||
4a4776e9ab | |||
2fa715f8b5 | |||
6287543e35 | |||
b0936b0e20 | |||
0b8df7be79 | |||
717ece9d81 | |||
5566ae5dde | |||
2d5a0df798 | |||
f7fc8a95be | |||
1ad7b555df | |||
7e6dd2dff0 | |||
8d070cfb25 | |||
a6053d54f1 | |||
e072a92a9b | |||
7601ee2608 | |||
1f7627fd3c | |||
b42b4746af | |||
e21526754b | |||
60798a32f6 | |||
1d947b3034 | |||
2d113f3546 | |||
de7eae4826 | |||
2fe99e8820 | |||
cd81dd6403 | |||
80d2ceb222 | |||
c5aa4d3528 | |||
bed45f9400 | |||
55e4d464a7 | |||
75cc353380 | |||
c9129d90de | |||
57365c98cc | |||
dc96476af3 | |||
2577cec095 | |||
e48d34659e | |||
ab8f911a67 | |||
608aff7f62 | |||
13657c407d | |||
e4ed8f65f3 | |||
fdb44479f8 | |||
188572170e | |||
d75c669fac | |||
091f893625 | |||
d947858325 | |||
67700e9b90 | |||
a5be53f9c8 | |||
9ed12c5d9c | |||
4f7bdea9d2 | |||
69998b0c6f | |||
5c6eeac8f0 | |||
e98607248e | |||
2f6ab7f5b8 | |||
3a6cd4200e | |||
25f17682ca | |||
8a68ff9605 | |||
297e7c6ee6 | |||
e3b1c45aeb | |||
7119f94aba | |||
01f443d75a | |||
b926116a14 | |||
3ff9decfd4 | |||
14a6674e32 | |||
9779565abf | |||
cf76b1bcec | |||
e00aa6b923 | |||
86d973d24e |
@ -5,6 +5,6 @@
|
||||
<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
|
||||
<path>/repo</path>
|
||||
</pydev_pathproperty>
|
||||
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.4</pydev_property>
|
||||
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.6</pydev_property>
|
||||
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
|
||||
</pydev_project>
|
||||
|
301
.pylintrc
Normal file
301
.pylintrc
Normal file
@ -0,0 +1,301 @@
|
||||
# lint Python modules using external checkers.
|
||||
#
|
||||
# This is the main checker controling the other ones and the reports
|
||||
# generation. It is itself both a raw checker and an astng checker in order
|
||||
# to:
|
||||
# * handle message activation / deactivation at the module level
|
||||
# * handle some basic but necessary stats'data (number of classes, methods...)
|
||||
#
|
||||
[MASTER]
|
||||
|
||||
# Specify a configuration file.
|
||||
#rcfile=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Profiled execution.
|
||||
profile=no
|
||||
|
||||
# Add <file or directory> to the black list. It should be a base name, not a
|
||||
# path. You may set this option multiple times.
|
||||
ignore=SVN
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# Set the cache size for astng objects.
|
||||
cache-size=500
|
||||
|
||||
# List of plugins (as comma separated values of python modules names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Enable only checker(s) with the given id(s). This option conflicts with the
|
||||
# disable-checker option
|
||||
#enable-checker=
|
||||
|
||||
# Enable all checker(s) except those with the given id(s). This option
|
||||
# conflicts with the enable-checker option
|
||||
#disable-checker=
|
||||
|
||||
# Enable all messages in the listed categories.
|
||||
#enable-msg-cat=
|
||||
|
||||
# Disable all messages in the listed categories.
|
||||
#disable-msg-cat=
|
||||
|
||||
# Enable the message(s) with the given id(s).
|
||||
enable=RP0004
|
||||
|
||||
# Disable the message(s) with the given id(s).
|
||||
disable=R0903,R0912,R0913,R0914,R0915,W0141,C0111,C0103,W0603,W0703,R0911,C0301,C0302,R0902,R0904,W0142,W0212,E1101,E1103,R0201,W0201,W0122,W0232,RP0001,RP0003,RP0101,RP0002,RP0401,RP0701,RP0801
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# set the output format. Available formats are text, parseable, colorized, msvs
|
||||
# (visual studio) and html
|
||||
output-format=text
|
||||
|
||||
# Include message's id in output
|
||||
include-ids=yes
|
||||
|
||||
# Put messages in a separate file for each module / package specified on the
|
||||
# command line instead of printing them on stdout. Reports (if any) will be
|
||||
# written in a file name "pylint_global.[txt|html]".
|
||||
files-output=no
|
||||
|
||||
# Tells whether to display a full report or only the messages
|
||||
reports=yes
|
||||
|
||||
# Python expression which should return a note less than 10 (10 is the highest
|
||||
# note).You have access to the variables errors warning, statement which
|
||||
# respectivly contain the number of errors / warnings messages and the total
|
||||
# number of statements analyzed. This is used by the global evaluation report
|
||||
# (R0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Add a comment according to your evaluation note. This is used by the global
|
||||
# evaluation report (R0004).
|
||||
comment=no
|
||||
|
||||
# checks for
|
||||
# * unused variables / imports
|
||||
# * undefined variables
|
||||
# * redefinition of variable from builtins or from an outer scope
|
||||
# * use of variable before assigment
|
||||
#
|
||||
[VARIABLES]
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# A regular expression matching names used for dummy variables (i.e. not used).
|
||||
dummy-variables-rgx=_|dummy
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid to define new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
|
||||
# try to find bugs in the code using type inference
|
||||
#
|
||||
[TYPECHECK]
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
|
||||
# List of classes names for which member attributes should not be checked
|
||||
# (useful for classes with attributes dynamicaly set).
|
||||
ignored-classes=SQLObject
|
||||
|
||||
# When zope mode is activated, consider the acquired-members option to ignore
|
||||
# access to some undefined attributes.
|
||||
zope=no
|
||||
|
||||
# List of members which are usually get through zope's acquisition mecanism and
|
||||
# so shouldn't trigger E0201 when accessed (need zope=yes to be considered).
|
||||
acquired-members=REQUEST,acl_users,aq_parent
|
||||
|
||||
|
||||
# checks for :
|
||||
# * doc strings
|
||||
# * modules / classes / functions / methods / arguments / variables name
|
||||
# * number of arguments, local variables, branchs, returns and statements in
|
||||
# functions, methods
|
||||
# * required module attributes
|
||||
# * dangerous default values as arguments
|
||||
# * redefinition of function / method / class
|
||||
# * uses of the global statement
|
||||
#
|
||||
[BASIC]
|
||||
|
||||
# Required attributes for module, separated by a comma
|
||||
required-attributes=
|
||||
|
||||
# Regular expression which should only match functions or classes name which do
|
||||
# not require a docstring
|
||||
no-docstring-rgx=_main|__.*__
|
||||
|
||||
# Regular expression which should only match correct module names
|
||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Regular expression which should only match correct module level names
|
||||
const-rgx=(([A-Z_][A-Z1-9_]*)|(__.*__))|(log)$
|
||||
|
||||
# Regular expression which should only match correct class names
|
||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Regular expression which should only match correct function names
|
||||
function-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct method names
|
||||
method-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct instance attribute names
|
||||
attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct argument names
|
||||
argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct variable names
|
||||
variable-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct list comprehension /
|
||||
# generator expression variable names
|
||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma
|
||||
good-names=i,j,k,ex,Run,_,e,d1,d2,v,f,l,d
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma
|
||||
bad-names=foo,bar,baz,toto,tutu,tata
|
||||
|
||||
# List of builtins function names that should not be used, separated by a comma
|
||||
bad-functions=map,filter,apply,input
|
||||
|
||||
|
||||
# checks for sign of poor/misdesign:
|
||||
# * number of methods, attributes, local variables...
|
||||
# * size, complexity of functions, methods
|
||||
#
|
||||
[DESIGN]
|
||||
|
||||
# Maximum number of arguments for function / method
|
||||
max-args=5
|
||||
|
||||
# Maximum number of locals for function / method body
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of return / yield for function / method body
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of branch for function / method body
|
||||
max-branchs=12
|
||||
|
||||
# Maximum number of statements in function / method body
|
||||
max-statements=50
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=20
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=30
|
||||
|
||||
|
||||
# checks for
|
||||
# * external modules dependencies
|
||||
# * relative / wildcard imports
|
||||
# * cyclic imports
|
||||
# * uses of deprecated modules
|
||||
#
|
||||
[IMPORTS]
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma
|
||||
deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report R0402 must not be disabled)
|
||||
import-graph=
|
||||
|
||||
# Create a graph of external dependencies in the given file (report R0402 must
|
||||
# not be disabled)
|
||||
ext-import-graph=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report R0402 must
|
||||
# not be disabled)
|
||||
int-import-graph=
|
||||
|
||||
|
||||
# checks for :
|
||||
# * methods without self as first argument
|
||||
# * overridden methods signature
|
||||
# * access only to existant members via self
|
||||
# * attributes not defined in the __init__ method
|
||||
# * supported interfaces implementation
|
||||
# * unreachable code
|
||||
#
|
||||
[CLASSES]
|
||||
|
||||
# List of interface methods to ignore, separated by a comma. This is used for
|
||||
# instance to not check methods defines in Zope's Interface base class.
|
||||
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,__new__,setUp
|
||||
|
||||
|
||||
# checks for similarities and duplicated code. This computation may be
|
||||
# memory / CPU intensive, so you should disable it if you experiments some
|
||||
# problems.
|
||||
#
|
||||
[SIMILARITIES]
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
|
||||
# checks for:
|
||||
# * warning notes in the code like FIXME, XXX
|
||||
# * PEP 263: source code with non ascii character but no encoding declaration
|
||||
#
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,XXX,TODO
|
||||
|
||||
|
||||
# checks for :
|
||||
# * unauthorized constructions
|
||||
# * strict indentation
|
||||
# * line length
|
||||
# * use of <> instead of !=
|
||||
#
|
||||
[FORMAT]
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=80
|
||||
|
||||
# Maximum number of lines in a module
|
||||
max-module-lines=1000
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab). In repo it is 2 spaces.
|
||||
indent-string=' '
|
@ -2,6 +2,7 @@ Short Version:
|
||||
|
||||
- Make small logical changes.
|
||||
- Provide a meaningful commit message.
|
||||
- Check for coding errors with pylint
|
||||
- Make sure all code is under the Apache License, 2.0.
|
||||
- Publish your changes for review:
|
||||
|
||||
@ -33,7 +34,14 @@ If your description starts to get too long, that's a sign that you
|
||||
probably need to split up your commit to finer grained pieces.
|
||||
|
||||
|
||||
(2) Check the license
|
||||
(2) Check for coding errors with pylint
|
||||
|
||||
Run pylint on changed modules using the provided configuration:
|
||||
|
||||
pylint --rcfile=.pylintrc file.py
|
||||
|
||||
|
||||
(3) Check the license
|
||||
|
||||
repo is licensed under the Apache License, 2.0.
|
||||
|
||||
@ -49,7 +57,7 @@ your patch. It is virtually impossible to remove a patch once it
|
||||
has been applied and pushed out.
|
||||
|
||||
|
||||
(3) Sending your patches.
|
||||
(4) Sending your patches.
|
||||
|
||||
Do not email your patches to anyone.
|
||||
|
||||
|
82
color.py
82
color.py
@ -36,52 +36,56 @@ ATTRS = {None :-1,
|
||||
'blink' : 5,
|
||||
'reverse': 7}
|
||||
|
||||
RESET = "\033[m"
|
||||
RESET = "\033[m" # pylint: disable=W1401
|
||||
# backslash is not anomalous
|
||||
|
||||
def is_color(s): return s in COLORS
|
||||
def is_attr(s): return s in ATTRS
|
||||
def is_color(s):
|
||||
return s in COLORS
|
||||
|
||||
def is_attr(s):
|
||||
return s in ATTRS
|
||||
|
||||
def _Color(fg = None, bg = None, attr = None):
|
||||
fg = COLORS[fg]
|
||||
bg = COLORS[bg]
|
||||
attr = ATTRS[attr]
|
||||
fg = COLORS[fg]
|
||||
bg = COLORS[bg]
|
||||
attr = ATTRS[attr]
|
||||
|
||||
if attr >= 0 or fg >= 0 or bg >= 0:
|
||||
need_sep = False
|
||||
code = "\033["
|
||||
if attr >= 0 or fg >= 0 or bg >= 0:
|
||||
need_sep = False
|
||||
code = "\033[" #pylint: disable=W1401
|
||||
|
||||
if attr >= 0:
|
||||
code += chr(ord('0') + attr)
|
||||
need_sep = True
|
||||
if attr >= 0:
|
||||
code += chr(ord('0') + attr)
|
||||
need_sep = True
|
||||
|
||||
if fg >= 0:
|
||||
if need_sep:
|
||||
code += ';'
|
||||
need_sep = True
|
||||
if fg >= 0:
|
||||
if need_sep:
|
||||
code += ';'
|
||||
need_sep = True
|
||||
|
||||
if fg < 8:
|
||||
code += '3%c' % (ord('0') + fg)
|
||||
else:
|
||||
code += '38;5;%d' % fg
|
||||
if fg < 8:
|
||||
code += '3%c' % (ord('0') + fg)
|
||||
else:
|
||||
code += '38;5;%d' % fg
|
||||
|
||||
if bg >= 0:
|
||||
if need_sep:
|
||||
code += ';'
|
||||
need_sep = True
|
||||
if bg >= 0:
|
||||
if need_sep:
|
||||
code += ';'
|
||||
need_sep = True
|
||||
|
||||
if bg < 8:
|
||||
code += '4%c' % (ord('0') + bg)
|
||||
else:
|
||||
code += '48;5;%d' % bg
|
||||
code += 'm'
|
||||
else:
|
||||
code = ''
|
||||
return code
|
||||
if bg < 8:
|
||||
code += '4%c' % (ord('0') + bg)
|
||||
else:
|
||||
code += '48;5;%d' % bg
|
||||
code += 'm'
|
||||
else:
|
||||
code = ''
|
||||
return code
|
||||
|
||||
|
||||
class Coloring(object):
|
||||
def __init__(self, config, type):
|
||||
self._section = 'color.%s' % type
|
||||
def __init__(self, config, section_type):
|
||||
self._section = 'color.%s' % section_type
|
||||
self._config = config
|
||||
self._out = sys.stdout
|
||||
|
||||
@ -126,8 +130,8 @@ class Coloring(object):
|
||||
if self._on:
|
||||
c = self._parse(opt, fg, bg, attr)
|
||||
def f(fmt, *args):
|
||||
str = fmt % args
|
||||
return ''.join([c, str, RESET])
|
||||
output = fmt % args
|
||||
return ''.join([c, output, RESET])
|
||||
return f
|
||||
else:
|
||||
def f(fmt, *args):
|
||||
@ -151,8 +155,10 @@ class Coloring(object):
|
||||
have_fg = False
|
||||
for a in v.split(' '):
|
||||
if is_color(a):
|
||||
if have_fg: bg = a
|
||||
else: fg = a
|
||||
if have_fg:
|
||||
bg = a
|
||||
else:
|
||||
fg = a
|
||||
elif is_attr(a):
|
||||
attr = a
|
||||
|
||||
|
124
command.py
124
command.py
@ -22,6 +22,7 @@ import sys
|
||||
from error import NoSuchProjectError
|
||||
from error import InvalidProjectGroupsError
|
||||
|
||||
|
||||
class Command(object):
|
||||
"""Base class for any command line action in repo.
|
||||
"""
|
||||
@ -33,6 +34,27 @@ class Command(object):
|
||||
def WantPager(self, opt):
|
||||
return False
|
||||
|
||||
def ReadEnvironmentOptions(self, opts):
|
||||
""" Set options from environment variables. """
|
||||
|
||||
env_options = self._RegisteredEnvironmentOptions()
|
||||
|
||||
for env_key, opt_key in env_options.items():
|
||||
# Get the user-set option value if any
|
||||
opt_value = getattr(opts, opt_key)
|
||||
|
||||
# If the value is set, it means the user has passed it as a command
|
||||
# line option, and we should use that. Otherwise we can try to set it
|
||||
# with the value from the corresponding environment variable.
|
||||
if opt_value is not None:
|
||||
continue
|
||||
|
||||
env_value = os.environ.get(env_key)
|
||||
if env_value is not None:
|
||||
setattr(opts, opt_key, env_value)
|
||||
|
||||
return opts
|
||||
|
||||
@property
|
||||
def OptionParser(self):
|
||||
if self._optparse is None:
|
||||
@ -49,6 +71,24 @@ class Command(object):
|
||||
"""Initialize the option parser.
|
||||
"""
|
||||
|
||||
def _RegisteredEnvironmentOptions(self):
|
||||
"""Get options that can be set from environment variables.
|
||||
|
||||
Return a dictionary mapping environment variable name
|
||||
to option key name that it can override.
|
||||
|
||||
Example: {'REPO_MY_OPTION': 'my_option'}
|
||||
|
||||
Will allow the option with key value 'my_option' to be set
|
||||
from the value in the environment variable named 'REPO_MY_OPTION'.
|
||||
|
||||
Note: This does not work properly for options that are explicitly
|
||||
set to None by the user, or options that are defined with a
|
||||
default value other than None.
|
||||
|
||||
"""
|
||||
return {}
|
||||
|
||||
def Usage(self):
|
||||
"""Display usage and terminate.
|
||||
"""
|
||||
@ -60,10 +100,36 @@ class Command(object):
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def GetProjects(self, args, missing_ok=False):
|
||||
def _ResetPathToProjectMap(self, projects):
|
||||
self._by_path = dict((p.worktree, p) for p in projects)
|
||||
|
||||
def _UpdatePathToProjectMap(self, project):
|
||||
self._by_path[project.worktree] = project
|
||||
|
||||
def _GetProjectByPath(self, path):
|
||||
project = None
|
||||
if os.path.exists(path):
|
||||
oldpath = None
|
||||
while path \
|
||||
and path != oldpath \
|
||||
and path != self.manifest.topdir:
|
||||
try:
|
||||
project = self._by_path[path]
|
||||
break
|
||||
except KeyError:
|
||||
oldpath = path
|
||||
path = os.path.dirname(path)
|
||||
else:
|
||||
try:
|
||||
project = self._by_path[path]
|
||||
except KeyError:
|
||||
pass
|
||||
return project
|
||||
|
||||
def GetProjects(self, args, missing_ok=False, submodules_ok=False):
|
||||
"""A list of projects that match the arguments.
|
||||
"""
|
||||
all = self.manifest.projects
|
||||
all_projects = self.manifest.projects
|
||||
result = []
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
@ -71,43 +137,40 @@ class Command(object):
|
||||
groups = mp.config.GetString('manifest.groups')
|
||||
if not groups:
|
||||
groups = 'all,-notdefault,platform-' + platform.system().lower()
|
||||
groups = [x for x in re.split('[,\s]+', groups) if x]
|
||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
||||
|
||||
if not args:
|
||||
for project in all.values():
|
||||
all_projects_list = all_projects.values()
|
||||
derived_projects = {}
|
||||
for project in all_projects_list:
|
||||
if submodules_ok or project.sync_s:
|
||||
derived_projects.update((p.name, p)
|
||||
for p in project.GetDerivedSubprojects())
|
||||
all_projects_list.extend(derived_projects.values())
|
||||
for project in all_projects_list:
|
||||
if ((missing_ok or project.Exists) and
|
||||
project.MatchesGroups(groups)):
|
||||
result.append(project)
|
||||
else:
|
||||
by_path = None
|
||||
self._ResetPathToProjectMap(all_projects.values())
|
||||
|
||||
for arg in args:
|
||||
project = all.get(arg)
|
||||
project = all_projects.get(arg)
|
||||
|
||||
if not project:
|
||||
path = os.path.abspath(arg).replace('\\', '/')
|
||||
project = self._GetProjectByPath(path)
|
||||
|
||||
if not by_path:
|
||||
by_path = dict()
|
||||
for p in all.values():
|
||||
by_path[p.worktree] = p
|
||||
|
||||
if os.path.exists(path):
|
||||
oldpath = None
|
||||
while path \
|
||||
and path != oldpath \
|
||||
and path != self.manifest.topdir:
|
||||
try:
|
||||
project = by_path[path]
|
||||
break
|
||||
except KeyError:
|
||||
oldpath = path
|
||||
path = os.path.dirname(path)
|
||||
else:
|
||||
try:
|
||||
project = by_path[path]
|
||||
except KeyError:
|
||||
pass
|
||||
# If it's not a derived project, update path->project mapping and
|
||||
# search again, as arg might actually point to a derived subproject.
|
||||
if (project and not project.Derived and
|
||||
(submodules_ok or project.sync_s)):
|
||||
search_again = False
|
||||
for subproject in project.GetDerivedSubprojects():
|
||||
self._UpdatePathToProjectMap(subproject)
|
||||
search_again = True
|
||||
if search_again:
|
||||
project = self._GetProjectByPath(path) or project
|
||||
|
||||
if not project:
|
||||
raise NoSuchProjectError(arg)
|
||||
@ -123,6 +186,11 @@ class Command(object):
|
||||
result.sort(key=_getpath)
|
||||
return result
|
||||
|
||||
# pylint: disable=W0223
|
||||
# Pylint warns that the `InteractiveCommand` and `PagedCommand` classes do not
|
||||
# override method `Execute` which is abstract in `Command`. Since that method
|
||||
# is always implemented in classes derived from `InteractiveCommand` and
|
||||
# `PagedCommand`, this warning can be suppressed.
|
||||
class InteractiveCommand(Command):
|
||||
"""Command which requires user interaction on the tty and
|
||||
must not run within a pager, even if the user asks to.
|
||||
@ -137,6 +205,8 @@ class PagedCommand(Command):
|
||||
def WantPager(self, opt):
|
||||
return True
|
||||
|
||||
# pylint: enable=W0223
|
||||
|
||||
class MirrorSafeCommand(object):
|
||||
"""Command permits itself to run within a mirror,
|
||||
and does not require a working directory.
|
||||
|
@ -41,17 +41,20 @@ following DTD:
|
||||
<!ATTLIST default revision CDATA #IMPLIED>
|
||||
<!ATTLIST default sync-j CDATA #IMPLIED>
|
||||
<!ATTLIST default sync-c CDATA #IMPLIED>
|
||||
<!ATTLIST default sync-s CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT manifest-server (EMPTY)>
|
||||
<!ATTLIST url CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT project (annotation?)>
|
||||
<!ELEMENT project (annotation?,
|
||||
project*)>
|
||||
<!ATTLIST project name CDATA #REQUIRED>
|
||||
<!ATTLIST project path CDATA #IMPLIED>
|
||||
<!ATTLIST project remote IDREF #IMPLIED>
|
||||
<!ATTLIST project revision CDATA #IMPLIED>
|
||||
<!ATTLIST project groups CDATA #IMPLIED>
|
||||
<!ATTLIST project sync-c CDATA #IMPLIED>
|
||||
<!ATTLIST project sync-s CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT annotation (EMPTY)>
|
||||
<!ATTLIST annotation name CDATA #REQUIRED>
|
||||
@ -152,7 +155,10 @@ Element project
|
||||
|
||||
One or more project elements may be specified. Each element
|
||||
describes a single Git repository to be cloned into the repo
|
||||
client workspace.
|
||||
client workspace. You may specify Git-submodules by creating a
|
||||
nested project. Git-submodules will be automatically
|
||||
recognized and inherit their parent's attributes, but those
|
||||
may be overridden by an explicitly specified project element.
|
||||
|
||||
Attribute `name`: A unique name for this project. The project's
|
||||
name is appended onto its remote's fetch URL to generate the actual
|
||||
@ -163,7 +169,8 @@ URL to configure the Git remote with. The URL gets formed as:
|
||||
where ${remote_fetch} is the remote's fetch attribute and
|
||||
${project_name} is the project's name attribute. The suffix ".git"
|
||||
is always appended as repo assumes the upstream is a forest of
|
||||
bare Git repositories.
|
||||
bare Git repositories. If the project has a parent element, its
|
||||
name will be prefixed by the parent's.
|
||||
|
||||
The project name must match the name Gerrit knows, if Gerrit is
|
||||
being used for code reviews.
|
||||
@ -171,6 +178,8 @@ being used for code reviews.
|
||||
Attribute `path`: An optional path relative to the top directory
|
||||
of the repo client where the Git working directory for this project
|
||||
should be placed. If not supplied the project name is used.
|
||||
If the project has a parent element, its path will be prefixed
|
||||
by the parent's.
|
||||
|
||||
Attribute `remote`: Name of a previously defined remote element.
|
||||
If not supplied the remote given by the default element is used.
|
||||
@ -190,6 +199,8 @@ its name:`name` and path:`path`. E.g. for
|
||||
definition is implicitly in the following manifest groups:
|
||||
default, name:monkeys, and path:barrel-of. If you place a project in the
|
||||
group "notdefault", it will not be automatically downloaded by repo.
|
||||
If the project has a parent element, the `name` and `path` here
|
||||
are the prefixed ones.
|
||||
|
||||
Element annotation
|
||||
------------------
|
||||
@ -209,7 +220,7 @@ Deletes the named project from the internal manifest table, possibly
|
||||
allowing a subsequent project element in the same manifest file to
|
||||
replace the project with a different source.
|
||||
|
||||
This element is mostly useful in the local_manifest.xml, where
|
||||
This element is mostly useful in a local manifest file, where
|
||||
the user can remove a project, and possibly replace it with their
|
||||
own definition.
|
||||
|
||||
@ -218,21 +229,25 @@ Element include
|
||||
|
||||
This element provides the capability of including another manifest
|
||||
file into the originating manifest. Normal rules apply for the
|
||||
target manifest to include- it must be a usable manifest on it's own.
|
||||
target manifest to include - it must be a usable manifest on its own.
|
||||
|
||||
Attribute `name`; the manifest to include, specified relative to
|
||||
the manifest repositories root.
|
||||
Attribute `name`: the manifest to include, specified relative to
|
||||
the manifest repository's root.
|
||||
|
||||
|
||||
Local Manifest
|
||||
==============
|
||||
Local Manifests
|
||||
===============
|
||||
|
||||
Additional remotes and projects may be added through a local
|
||||
manifest, stored in `$TOP_DIR/.repo/local_manifest.xml`.
|
||||
Additional remotes and projects may be added through local manifest
|
||||
files stored in `$TOP_DIR/.repo/local_manifests/*.xml`.
|
||||
|
||||
For example:
|
||||
|
||||
$ cat .repo/local_manifest.xml
|
||||
$ ls .repo/local_manifests
|
||||
local_manifest.xml
|
||||
another_local_manifest.xml
|
||||
|
||||
$ cat .repo/local_manifests/local_manifest.xml
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<manifest>
|
||||
<project path="manifest"
|
||||
@ -241,6 +256,17 @@ For example:
|
||||
name="platform/manifest" />
|
||||
</manifest>
|
||||
|
||||
Users may add projects to the local manifest prior to a `repo sync`
|
||||
Users may add projects to the local manifest(s) prior to a `repo sync`
|
||||
invocation, instructing repo to automatically download and manage
|
||||
these extra projects.
|
||||
|
||||
Manifest files stored in `$TOP_DIR/.repo/local_manifests/*.xml` will
|
||||
be loaded in alphabetical order.
|
||||
|
||||
Additional remotes and projects may also be added through a local
|
||||
manifest, stored in `$TOP_DIR/.repo/local_manifest.xml`. This method
|
||||
is deprecated in favor of using multiple manifest files as mentioned
|
||||
above.
|
||||
|
||||
If `$TOP_DIR/.repo/local_manifest.xml` exists, it will be loaded before
|
||||
any manifest files stored in `$TOP_DIR/.repo/local_manifests/*.xml`.
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@ -53,10 +54,10 @@ class Editor(object):
|
||||
return e
|
||||
|
||||
if os.getenv('TERM') == 'dumb':
|
||||
print >>sys.stderr,\
|
||||
print(
|
||||
"""No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
|
||||
Tried to fall back to vi but terminal is dumb. Please configure at
|
||||
least one of these before using this command."""
|
||||
least one of these before using this command.""", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
return 'vi'
|
||||
@ -67,7 +68,7 @@ least one of these before using this command."""
|
||||
|
||||
Args:
|
||||
data : the text to edit
|
||||
|
||||
|
||||
Returns:
|
||||
new value of edited text; None if editing did not succeed
|
||||
"""
|
||||
@ -91,7 +92,7 @@ least one of these before using this command."""
|
||||
|
||||
try:
|
||||
rc = subprocess.Popen(args, shell=shell).wait()
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
raise EditorError('editor failed, %s: %s %s'
|
||||
% (str(e), editor, path))
|
||||
if rc != 0:
|
||||
|
25
error.py
25
error.py
@ -21,10 +21,15 @@ class ManifestInvalidRevisionError(Exception):
|
||||
"""The revision value in a project is incorrect.
|
||||
"""
|
||||
|
||||
class NoManifestException(Exception):
|
||||
"""The required manifest does not exist.
|
||||
"""
|
||||
|
||||
class EditorError(Exception):
|
||||
"""Unspecified error from the user's text editor.
|
||||
"""
|
||||
def __init__(self, reason):
|
||||
super(EditorError, self).__init__()
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
@ -34,24 +39,17 @@ class GitError(Exception):
|
||||
"""Unspecified internal error from git.
|
||||
"""
|
||||
def __init__(self, command):
|
||||
super(GitError, self).__init__()
|
||||
self.command = command
|
||||
|
||||
def __str__(self):
|
||||
return self.command
|
||||
|
||||
class ImportError(Exception):
|
||||
"""An import from a non-Git format cannot be performed.
|
||||
"""
|
||||
def __init__(self, reason):
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
class UploadError(Exception):
|
||||
"""A bundle upload to Gerrit did not succeed.
|
||||
"""
|
||||
def __init__(self, reason):
|
||||
super(UploadError, self).__init__()
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
@ -61,6 +59,7 @@ class DownloadError(Exception):
|
||||
"""Cannot download a repository.
|
||||
"""
|
||||
def __init__(self, reason):
|
||||
super(DownloadError, self).__init__()
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
@ -70,6 +69,7 @@ class NoSuchProjectError(Exception):
|
||||
"""A specified project does not exist in the work tree.
|
||||
"""
|
||||
def __init__(self, name=None):
|
||||
super(NoSuchProjectError, self).__init__()
|
||||
self.name = name
|
||||
|
||||
def __str__(self):
|
||||
@ -82,6 +82,7 @@ class InvalidProjectGroupsError(Exception):
|
||||
"""A specified project is not suitable for the specified groups
|
||||
"""
|
||||
def __init__(self, name=None):
|
||||
super(InvalidProjectGroupsError, self).__init__()
|
||||
self.name = name
|
||||
|
||||
def __str__(self):
|
||||
@ -94,12 +95,12 @@ class RepoChangedException(Exception):
|
||||
repo or manifest repositories. In this special case we must
|
||||
use exec to re-execute repo with the new code and manifest.
|
||||
"""
|
||||
def __init__(self, extra_args=[]):
|
||||
self.extra_args = extra_args
|
||||
def __init__(self, extra_args=None):
|
||||
super(RepoChangedException, self).__init__()
|
||||
self.extra_args = extra_args or []
|
||||
|
||||
class HookError(Exception):
|
||||
"""Thrown if a 'repo-hook' could not be run.
|
||||
|
||||
The common case is that the file wasn't present when we tried to run it.
|
||||
"""
|
||||
pass
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
@ -37,11 +38,11 @@ def ssh_sock(create=True):
|
||||
if _ssh_sock_path is None:
|
||||
if not create:
|
||||
return None
|
||||
dir = '/tmp'
|
||||
if not os.path.exists(dir):
|
||||
dir = tempfile.gettempdir()
|
||||
tmp_dir = '/tmp'
|
||||
if not os.path.exists(tmp_dir):
|
||||
tmp_dir = tempfile.gettempdir()
|
||||
_ssh_sock_path = os.path.join(
|
||||
tempfile.mkdtemp('', 'ssh-', dir),
|
||||
tempfile.mkdtemp('', 'ssh-', tmp_dir),
|
||||
'master-%r@%h:%p')
|
||||
return _ssh_sock_path
|
||||
|
||||
@ -88,11 +89,11 @@ class _GitCall(object):
|
||||
ver_str = git.version()
|
||||
if ver_str.startswith('git version '):
|
||||
_git_version = tuple(
|
||||
map(lambda x: int(x),
|
||||
map(int,
|
||||
ver_str[len('git version '):].strip().split('-')[0].split('.')[0:3]
|
||||
))
|
||||
else:
|
||||
print >>sys.stderr, 'fatal: "%s" unsupported' % ver_str
|
||||
print('fatal: "%s" unsupported' % ver_str, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return _git_version
|
||||
|
||||
@ -110,8 +111,8 @@ def git_require(min_version, fail=False):
|
||||
if min_version <= git_version:
|
||||
return True
|
||||
if fail:
|
||||
need = '.'.join(map(lambda x: str(x), min_version))
|
||||
print >>sys.stderr, 'fatal: git %s or later required' % need
|
||||
need = '.'.join(map(str, min_version))
|
||||
print('fatal: git %s or later required' % need, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return False
|
||||
|
||||
@ -132,15 +133,15 @@ class GitCommand(object):
|
||||
gitdir = None):
|
||||
env = os.environ.copy()
|
||||
|
||||
for e in [REPO_TRACE,
|
||||
for key in [REPO_TRACE,
|
||||
GIT_DIR,
|
||||
'GIT_ALTERNATE_OBJECT_DIRECTORIES',
|
||||
'GIT_OBJECT_DIRECTORY',
|
||||
'GIT_WORK_TREE',
|
||||
'GIT_GRAFT_FILE',
|
||||
'GIT_INDEX_FILE']:
|
||||
if e in env:
|
||||
del env[e]
|
||||
if key in env:
|
||||
del env[key]
|
||||
|
||||
if disable_editor:
|
||||
_setenv(env, 'GIT_EDITOR', ':')
|
||||
@ -217,7 +218,7 @@ class GitCommand(object):
|
||||
stdin = stdin,
|
||||
stdout = stdout,
|
||||
stderr = stderr)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise GitError('%s: %s' % (command[1], e))
|
||||
|
||||
if ssh_proxy:
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import cPickle
|
||||
import os
|
||||
import re
|
||||
@ -23,7 +24,18 @@ try:
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
import time
|
||||
import urllib2
|
||||
try:
|
||||
import urllib2
|
||||
except ImportError:
|
||||
# For python3
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
else:
|
||||
# For python2
|
||||
import imp
|
||||
urllib = imp.new_module('urllib')
|
||||
urllib.request = urllib2
|
||||
urllib.error = urllib2
|
||||
|
||||
from signal import SIGTERM
|
||||
from error import GitError, UploadError
|
||||
@ -35,7 +47,7 @@ from git_command import terminate_ssh_clients
|
||||
|
||||
R_HEADS = 'refs/heads/'
|
||||
R_TAGS = 'refs/tags/'
|
||||
ID_RE = re.compile('^[0-9a-f]{40}$')
|
||||
ID_RE = re.compile(r'^[0-9a-f]{40}$')
|
||||
|
||||
REVIEW_CACHE = dict()
|
||||
|
||||
@ -56,16 +68,16 @@ class GitConfig(object):
|
||||
@classmethod
|
||||
def ForUser(cls):
|
||||
if cls._ForUser is None:
|
||||
cls._ForUser = cls(file = os.path.expanduser('~/.gitconfig'))
|
||||
cls._ForUser = cls(configfile = os.path.expanduser('~/.gitconfig'))
|
||||
return cls._ForUser
|
||||
|
||||
@classmethod
|
||||
def ForRepository(cls, gitdir, defaults=None):
|
||||
return cls(file = os.path.join(gitdir, 'config'),
|
||||
return cls(configfile = os.path.join(gitdir, 'config'),
|
||||
defaults = defaults)
|
||||
|
||||
def __init__(self, file, defaults=None, pickleFile=None):
|
||||
self.file = file
|
||||
def __init__(self, configfile, defaults=None, pickleFile=None):
|
||||
self.file = configfile
|
||||
self.defaults = defaults
|
||||
self._cache_dict = None
|
||||
self._section_dict = None
|
||||
@ -104,20 +116,20 @@ class GitConfig(object):
|
||||
return False
|
||||
return None
|
||||
|
||||
def GetString(self, name, all=False):
|
||||
def GetString(self, name, all_keys=False):
|
||||
"""Get the first value for a key, or None if it is not defined.
|
||||
|
||||
This configuration file is used first, if the key is not
|
||||
defined or all = True then the defaults are also searched.
|
||||
defined or all_keys = True then the defaults are also searched.
|
||||
"""
|
||||
try:
|
||||
v = self._cache[_key(name)]
|
||||
except KeyError:
|
||||
if self.defaults:
|
||||
return self.defaults.GetString(name, all = all)
|
||||
return self.defaults.GetString(name, all_keys = all_keys)
|
||||
v = []
|
||||
|
||||
if not all:
|
||||
if not all_keys:
|
||||
if v:
|
||||
return v[0]
|
||||
return None
|
||||
@ -125,7 +137,7 @@ class GitConfig(object):
|
||||
r = []
|
||||
r.extend(v)
|
||||
if self.defaults:
|
||||
r.extend(self.defaults.GetString(name, all = True))
|
||||
r.extend(self.defaults.GetString(name, all_keys = True))
|
||||
return r
|
||||
|
||||
def SetString(self, name, value):
|
||||
@ -157,7 +169,7 @@ class GitConfig(object):
|
||||
elif old != value:
|
||||
self._cache[key] = list(value)
|
||||
self._do('--replace-all', name, value[0])
|
||||
for i in xrange(1, len(value)):
|
||||
for i in range(1, len(value)):
|
||||
self._do('--add', name, value[i])
|
||||
|
||||
elif len(old) != 1 or old[0] != value:
|
||||
@ -288,12 +300,13 @@ class GitConfig(object):
|
||||
d = self._do('--null', '--list')
|
||||
if d is None:
|
||||
return c
|
||||
for line in d.rstrip('\0').split('\0'):
|
||||
for line in d.rstrip('\0').split('\0'): # pylint: disable=W1401
|
||||
# Backslash is not anomalous
|
||||
if '\n' in line:
|
||||
key, val = line.split('\n', 1)
|
||||
key, val = line.split('\n', 1)
|
||||
else:
|
||||
key = line
|
||||
val = None
|
||||
key = line
|
||||
val = None
|
||||
|
||||
if key in c:
|
||||
c[key].append(val)
|
||||
@ -418,7 +431,7 @@ def _open_ssh(host, port=None):
|
||||
'-o','ControlPath %s' % ssh_sock(),
|
||||
host]
|
||||
if port is not None:
|
||||
command_base[1:1] = ['-p',str(port)]
|
||||
command_base[1:1] = ['-p', str(port)]
|
||||
|
||||
# Since the key wasn't in _master_keys, we think that master isn't running.
|
||||
# ...but before actually starting a master, we'll double-check. This can
|
||||
@ -449,11 +462,10 @@ def _open_ssh(host, port=None):
|
||||
try:
|
||||
Trace(': %s', ' '.join(command))
|
||||
p = subprocess.Popen(command)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
_ssh_master = False
|
||||
print >>sys.stderr, \
|
||||
'\nwarn: cannot enable ssh control master for %s:%s\n%s' \
|
||||
% (host,port, str(e))
|
||||
print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
|
||||
% (host,port, str(e)), file=sys.stderr)
|
||||
return False
|
||||
|
||||
_master_processes.append(p)
|
||||
@ -525,8 +537,8 @@ class Remote(object):
|
||||
self.url = self._Get('url')
|
||||
self.review = self._Get('review')
|
||||
self.projectname = self._Get('projectname')
|
||||
self.fetch = map(lambda x: RefSpec.FromString(x),
|
||||
self._Get('fetch', all=True))
|
||||
self.fetch = map(RefSpec.FromString,
|
||||
self._Get('fetch', all_keys=True))
|
||||
self._review_url = None
|
||||
|
||||
def _InsteadOf(self):
|
||||
@ -537,7 +549,7 @@ class Remote(object):
|
||||
|
||||
for url in urlList:
|
||||
key = "url." + url + ".insteadOf"
|
||||
insteadOfList = globCfg.GetString(key, all=True)
|
||||
insteadOfList = globCfg.GetString(key, all_keys=True)
|
||||
|
||||
for insteadOf in insteadOfList:
|
||||
if self.url.startswith(insteadOf) \
|
||||
@ -567,7 +579,7 @@ class Remote(object):
|
||||
if u.endswith('/ssh_info'):
|
||||
u = u[:len(u) - len('/ssh_info')]
|
||||
if not u.endswith('/'):
|
||||
u += '/'
|
||||
u += '/'
|
||||
http_url = u
|
||||
|
||||
if u in REVIEW_CACHE:
|
||||
@ -579,7 +591,7 @@ class Remote(object):
|
||||
else:
|
||||
try:
|
||||
info_url = u + 'ssh_info'
|
||||
info = urllib2.urlopen(info_url).read()
|
||||
info = urllib.request.urlopen(info_url).read()
|
||||
if '<' in info:
|
||||
# Assume the server gave us some sort of HTML
|
||||
# response back, like maybe a login page.
|
||||
@ -592,9 +604,9 @@ class Remote(object):
|
||||
else:
|
||||
host, port = info.split()
|
||||
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
||||
except urllib2.HTTPError, e:
|
||||
except urllib.error.HTTPError as e:
|
||||
raise UploadError('%s: %s' % (self.review, str(e)))
|
||||
except urllib2.URLError, e:
|
||||
except urllib.error.URLError as e:
|
||||
raise UploadError('%s: %s' % (self.review, str(e)))
|
||||
|
||||
REVIEW_CACHE[u] = self._review_url
|
||||
@ -645,15 +657,15 @@ class Remote(object):
|
||||
self._Set('url', self.url)
|
||||
self._Set('review', self.review)
|
||||
self._Set('projectname', self.projectname)
|
||||
self._Set('fetch', map(lambda x: str(x), self.fetch))
|
||||
self._Set('fetch', map(str, self.fetch))
|
||||
|
||||
def _Set(self, key, value):
|
||||
key = 'remote.%s.%s' % (self.name, key)
|
||||
return self._config.SetString(key, value)
|
||||
|
||||
def _Get(self, key, all=False):
|
||||
def _Get(self, key, all_keys=False):
|
||||
key = 'remote.%s.%s' % (self.name, key)
|
||||
return self._config.GetString(key, all = all)
|
||||
return self._config.GetString(key, all_keys = all_keys)
|
||||
|
||||
|
||||
class Branch(object):
|
||||
@ -703,6 +715,6 @@ class Branch(object):
|
||||
key = 'branch.%s.%s' % (self.name, key)
|
||||
return self._config.SetString(key, value)
|
||||
|
||||
def _Get(self, key, all=False):
|
||||
def _Get(self, key, all_keys=False):
|
||||
key = 'branch.%s.%s' % (self.name, key)
|
||||
return self._config.GetString(key, all = all)
|
||||
return self._config.GetString(key, all_keys = all_keys)
|
||||
|
20
git_refs.py
20
git_refs.py
@ -115,10 +115,10 @@ class GitRefs(object):
|
||||
|
||||
line = line[:-1]
|
||||
p = line.split(' ')
|
||||
id = p[0]
|
||||
ref_id = p[0]
|
||||
name = p[1]
|
||||
|
||||
self._phyref[name] = id
|
||||
self._phyref[name] = ref_id
|
||||
finally:
|
||||
fd.close()
|
||||
self._mtime['packed-refs'] = mtime
|
||||
@ -138,24 +138,24 @@ class GitRefs(object):
|
||||
def _ReadLoose1(self, path, name):
|
||||
try:
|
||||
fd = open(path, 'rb')
|
||||
except:
|
||||
except IOError:
|
||||
return
|
||||
|
||||
try:
|
||||
try:
|
||||
mtime = os.path.getmtime(path)
|
||||
id = fd.readline()
|
||||
except:
|
||||
ref_id = fd.readline()
|
||||
except (IOError, OSError):
|
||||
return
|
||||
finally:
|
||||
fd.close()
|
||||
|
||||
if not id:
|
||||
if not ref_id:
|
||||
return
|
||||
id = id[:-1]
|
||||
ref_id = ref_id[:-1]
|
||||
|
||||
if id.startswith('ref: '):
|
||||
self._symref[name] = id[5:]
|
||||
if ref_id.startswith('ref: '):
|
||||
self._symref[name] = ref_id[5:]
|
||||
else:
|
||||
self._phyref[name] = id
|
||||
self._phyref[name] = ref_id
|
||||
self._mtime[name] = mtime
|
||||
|
147
hooks/commit-msg
147
hooks/commit-msg
@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# From Gerrit Code Review 2.1.2-rc2-33-g7e30c72
|
||||
# From Gerrit Code Review 2.5-rc0
|
||||
#
|
||||
# Part of Gerrit Code Review (http://code.google.com/p/gerrit/)
|
||||
#
|
||||
@ -24,71 +24,144 @@ MSG="$1"
|
||||
# Check for, and add if missing, a unique Change-Id
|
||||
#
|
||||
add_ChangeId() {
|
||||
clean_message=$(sed -e '
|
||||
clean_message=`sed -e '
|
||||
/^diff --git a\/.*/{
|
||||
s///
|
||||
q
|
||||
}
|
||||
/^Signed-off-by:/d
|
||||
/^#/d
|
||||
' "$MSG" | git stripspace)
|
||||
' "$MSG" | git stripspace`
|
||||
if test -z "$clean_message"
|
||||
then
|
||||
return
|
||||
fi
|
||||
|
||||
# Does Change-Id: already exist? if so, exit (no change).
|
||||
if grep -i '^Change-Id:' "$MSG" >/dev/null
|
||||
then
|
||||
return
|
||||
fi
|
||||
|
||||
id=$(_gen_ChangeId)
|
||||
perl -e '
|
||||
$MSG = shift;
|
||||
$id = shift;
|
||||
$CHANGE_ID_AFTER = shift;
|
||||
id=`_gen_ChangeId`
|
||||
T="$MSG.tmp.$$"
|
||||
AWK=awk
|
||||
if [ -x /usr/xpg4/bin/awk ]; then
|
||||
# Solaris AWK is just too broken
|
||||
AWK=/usr/xpg4/bin/awk
|
||||
fi
|
||||
|
||||
undef $/;
|
||||
open(I, $MSG); $_ = <I>; close I;
|
||||
s|^diff --git a/.*||ms;
|
||||
s|^#.*$||mg;
|
||||
exit unless $_;
|
||||
# How this works:
|
||||
# - parse the commit message as (textLine+ blankLine*)*
|
||||
# - assume textLine+ to be a footer until proven otherwise
|
||||
# - exception: the first block is not footer (as it is the title)
|
||||
# - read textLine+ into a variable
|
||||
# - then count blankLines
|
||||
# - once the next textLine appears, print textLine+ blankLine* as these
|
||||
# aren't footer
|
||||
# - in END, the last textLine+ block is available for footer parsing
|
||||
$AWK '
|
||||
BEGIN {
|
||||
# while we start with the assumption that textLine+
|
||||
# is a footer, the first block is not.
|
||||
isFooter = 0
|
||||
footerComment = 0
|
||||
blankLines = 0
|
||||
}
|
||||
|
||||
@message = split /\n/;
|
||||
$haveFooter = 0;
|
||||
$startFooter = @message;
|
||||
for($line = @message - 1; $line >= 0; $line--) {
|
||||
$_ = $message[$line];
|
||||
# Skip lines starting with "#" without any spaces before it.
|
||||
/^#/ { next }
|
||||
|
||||
($haveFooter++, next) if /^[a-zA-Z0-9-]+:/;
|
||||
next if /^[ []/;
|
||||
$startFooter = $line if ($haveFooter && /^\r?$/);
|
||||
last;
|
||||
# Skip the line starting with the diff command and everything after it,
|
||||
# up to the end of the file, assuming it is only patch data.
|
||||
# If more than one line before the diff was empty, strip all but one.
|
||||
/^diff --git a/ {
|
||||
blankLines = 0
|
||||
while (getline) { }
|
||||
next
|
||||
}
|
||||
|
||||
# Count blank lines outside footer comments
|
||||
/^$/ && (footerComment == 0) {
|
||||
blankLines++
|
||||
next
|
||||
}
|
||||
|
||||
# Catch footer comment
|
||||
/^\[[a-zA-Z0-9-]+:/ && (isFooter == 1) {
|
||||
footerComment = 1
|
||||
}
|
||||
|
||||
/]$/ && (footerComment == 1) {
|
||||
footerComment = 2
|
||||
}
|
||||
|
||||
# We have a non-blank line after blank lines. Handle this.
|
||||
(blankLines > 0) {
|
||||
print lines
|
||||
for (i = 0; i < blankLines; i++) {
|
||||
print ""
|
||||
}
|
||||
|
||||
@footer = @message[$startFooter+1..@message];
|
||||
@message = @message[0..$startFooter];
|
||||
push(@footer, "") unless @footer;
|
||||
lines = ""
|
||||
blankLines = 0
|
||||
isFooter = 1
|
||||
footerComment = 0
|
||||
}
|
||||
|
||||
for ($line = 0; $line < @footer; $line++) {
|
||||
$_ = $footer[$line];
|
||||
next if /^($CHANGE_ID_AFTER):/i;
|
||||
last;
|
||||
# Detect that the current block is not the footer
|
||||
(footerComment == 0) && (!/^\[?[a-zA-Z0-9-]+:/ || /^[a-zA-Z0-9-]+:\/\//) {
|
||||
isFooter = 0
|
||||
}
|
||||
|
||||
{
|
||||
# We need this information about the current last comment line
|
||||
if (footerComment == 2) {
|
||||
footerComment = 0
|
||||
}
|
||||
splice(@footer, $line, 0, "Change-Id: I$id");
|
||||
if (lines != "") {
|
||||
lines = lines "\n";
|
||||
}
|
||||
lines = lines $0
|
||||
}
|
||||
|
||||
$_ = join("\n", @message, @footer);
|
||||
open(O, ">$MSG"); print O; close O;
|
||||
' "$MSG" "$id" "$CHANGE_ID_AFTER"
|
||||
# Footer handling:
|
||||
# If the last block is considered a footer, splice in the Change-Id at the
|
||||
# right place.
|
||||
# Look for the right place to inject Change-Id by considering
|
||||
# CHANGE_ID_AFTER. Keys listed in it (case insensitive) come first,
|
||||
# then Change-Id, then everything else (eg. Signed-off-by:).
|
||||
#
|
||||
# Otherwise just print the last block, a new line and the Change-Id as a
|
||||
# block of its own.
|
||||
END {
|
||||
unprinted = 1
|
||||
if (isFooter == 0) {
|
||||
print lines "\n"
|
||||
lines = ""
|
||||
}
|
||||
changeIdAfter = "^(" tolower("'"$CHANGE_ID_AFTER"'") "):"
|
||||
numlines = split(lines, footer, "\n")
|
||||
for (line = 1; line <= numlines; line++) {
|
||||
if (unprinted && match(tolower(footer[line]), changeIdAfter) != 1) {
|
||||
unprinted = 0
|
||||
print "Change-Id: I'"$id"'"
|
||||
}
|
||||
print footer[line]
|
||||
}
|
||||
if (unprinted) {
|
||||
print "Change-Id: I'"$id"'"
|
||||
}
|
||||
}' "$MSG" > $T && mv $T "$MSG" || rm -f $T
|
||||
}
|
||||
_gen_ChangeIdInput() {
|
||||
echo "tree $(git write-tree)"
|
||||
if parent=$(git rev-parse HEAD^0 2>/dev/null)
|
||||
echo "tree `git write-tree`"
|
||||
if parent=`git rev-parse "HEAD^0" 2>/dev/null`
|
||||
then
|
||||
echo "parent $parent"
|
||||
fi
|
||||
echo "author $(git var GIT_AUTHOR_IDENT)"
|
||||
echo "committer $(git var GIT_COMMITTER_IDENT)"
|
||||
echo "author `git var GIT_AUTHOR_IDENT`"
|
||||
echo "committer `git var GIT_COMMITTER_IDENT`"
|
||||
echo
|
||||
printf '%s' "$clean_message"
|
||||
}
|
||||
|
194
main.py
194
main.py
@ -1,4 +1,4 @@
|
||||
#!/bin/sh
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
@ -14,21 +14,23 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
magic='--calling-python-from-/bin/sh--'
|
||||
"""exec" python -E "$0" "$@" """#$magic"
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
if sys.argv[-1] == '#%s' % magic:
|
||||
del sys.argv[-1]
|
||||
del magic
|
||||
|
||||
from __future__ import print_function
|
||||
import getpass
|
||||
import imp
|
||||
import netrc
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import urllib2
|
||||
try:
|
||||
import urllib2
|
||||
except ImportError:
|
||||
# For python3
|
||||
import urllib.request
|
||||
else:
|
||||
# For python2
|
||||
urllib = imp.new_module('urllib')
|
||||
urllib.request = urllib2
|
||||
|
||||
from trace import SetTrace
|
||||
from git_command import git, GitCommand
|
||||
@ -39,12 +41,14 @@ from subcmds.version import Version
|
||||
from editor import Editor
|
||||
from error import DownloadError
|
||||
from error import ManifestInvalidRevisionError
|
||||
from error import ManifestParseError
|
||||
from error import NoManifestException
|
||||
from error import NoSuchProjectError
|
||||
from error import RepoChangedException
|
||||
from manifest_xml import XmlManifest
|
||||
from pager import RunPager
|
||||
|
||||
from subcmds import all as all_commands
|
||||
from subcmds import all_commands
|
||||
|
||||
global_options = optparse.OptionParser(
|
||||
usage="repo [-p|--paginate|--no-pager] COMMAND [ARGS]"
|
||||
@ -77,7 +81,7 @@ class _Repo(object):
|
||||
name = None
|
||||
glob = []
|
||||
|
||||
for i in xrange(0, len(argv)):
|
||||
for i in range(len(argv)):
|
||||
if not argv[i].startswith('-'):
|
||||
name = argv[i]
|
||||
if i > 0:
|
||||
@ -88,7 +92,7 @@ class _Repo(object):
|
||||
glob = argv
|
||||
name = 'help'
|
||||
argv = []
|
||||
gopts, gargs = global_options.parse_args(glob)
|
||||
gopts, _gargs = global_options.parse_args(glob)
|
||||
|
||||
if gopts.trace:
|
||||
SetTrace()
|
||||
@ -96,15 +100,14 @@ class _Repo(object):
|
||||
if name == 'help':
|
||||
name = 'version'
|
||||
else:
|
||||
print >>sys.stderr, 'fatal: invalid usage of --version'
|
||||
print('fatal: invalid usage of --version', file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
cmd = self.commands[name]
|
||||
except KeyError:
|
||||
print >>sys.stderr,\
|
||||
"repo: '%s' is not a repo command. See 'repo help'."\
|
||||
% name
|
||||
print("repo: '%s' is not a repo command. See 'repo help'." % name,
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
cmd.repodir = self.repodir
|
||||
@ -112,12 +115,12 @@ class _Repo(object):
|
||||
Editor.globalConfig = cmd.manifest.globalConfig
|
||||
|
||||
if not isinstance(cmd, MirrorSafeCommand) and cmd.manifest.IsMirror:
|
||||
print >>sys.stderr, \
|
||||
"fatal: '%s' requires a working directory"\
|
||||
% name
|
||||
print("fatal: '%s' requires a working directory" % name,
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
copts, cargs = cmd.OptionParser.parse_args(argv)
|
||||
copts = cmd.ReadEnvironmentOptions(copts)
|
||||
|
||||
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
|
||||
config = cmd.manifest.globalConfig
|
||||
@ -130,33 +133,35 @@ class _Repo(object):
|
||||
if use_pager:
|
||||
RunPager(config)
|
||||
|
||||
start = time.time()
|
||||
try:
|
||||
start = time.time()
|
||||
try:
|
||||
result = cmd.Execute(copts, cargs)
|
||||
finally:
|
||||
elapsed = time.time() - start
|
||||
hours, remainder = divmod(elapsed, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
if gopts.time:
|
||||
if hours == 0:
|
||||
print >>sys.stderr, 'real\t%dm%.3fs' \
|
||||
% (minutes, seconds)
|
||||
else:
|
||||
print >>sys.stderr, 'real\t%dh%dm%.3fs' \
|
||||
% (hours, minutes, seconds)
|
||||
except DownloadError, e:
|
||||
print >>sys.stderr, 'error: %s' % str(e)
|
||||
return 1
|
||||
except ManifestInvalidRevisionError, e:
|
||||
print >>sys.stderr, 'error: %s' % str(e)
|
||||
return 1
|
||||
except NoSuchProjectError, e:
|
||||
result = cmd.Execute(copts, cargs)
|
||||
except DownloadError as e:
|
||||
print('error: %s' % str(e), file=sys.stderr)
|
||||
result = 1
|
||||
except ManifestInvalidRevisionError as e:
|
||||
print('error: %s' % str(e), file=sys.stderr)
|
||||
result = 1
|
||||
except NoManifestException as e:
|
||||
print('error: manifest required for this command -- please run init',
|
||||
file=sys.stderr)
|
||||
result = 1
|
||||
except NoSuchProjectError as e:
|
||||
if e.name:
|
||||
print >>sys.stderr, 'error: project %s not found' % e.name
|
||||
print('error: project %s not found' % e.name, file=sys.stderr)
|
||||
else:
|
||||
print >>sys.stderr, 'error: no project in current directory'
|
||||
return 1
|
||||
print('error: no project in current directory', file=sys.stderr)
|
||||
result = 1
|
||||
finally:
|
||||
elapsed = time.time() - start
|
||||
hours, remainder = divmod(elapsed, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
if gopts.time:
|
||||
if hours == 0:
|
||||
print('real\t%dm%.3fs' % (minutes, seconds), file=sys.stderr)
|
||||
else:
|
||||
print('real\t%dh%dm%.3fs' % (hours, minutes, seconds),
|
||||
file=sys.stderr)
|
||||
|
||||
return result
|
||||
|
||||
@ -166,53 +171,51 @@ def _MyRepoPath():
|
||||
def _MyWrapperPath():
|
||||
return os.path.join(os.path.dirname(__file__), 'repo')
|
||||
|
||||
_wrapper_module = None
|
||||
def WrapperModule():
|
||||
global _wrapper_module
|
||||
if not _wrapper_module:
|
||||
_wrapper_module = imp.load_source('wrapper', _MyWrapperPath())
|
||||
return _wrapper_module
|
||||
|
||||
def _CurrentWrapperVersion():
|
||||
VERSION = None
|
||||
pat = re.compile(r'^VERSION *=')
|
||||
fd = open(_MyWrapperPath())
|
||||
for line in fd:
|
||||
if pat.match(line):
|
||||
fd.close()
|
||||
exec line
|
||||
return VERSION
|
||||
raise NameError, 'No VERSION in repo script'
|
||||
return WrapperModule().VERSION
|
||||
|
||||
def _CheckWrapperVersion(ver, repo_path):
|
||||
if not repo_path:
|
||||
repo_path = '~/bin/repo'
|
||||
|
||||
if not ver:
|
||||
print >>sys.stderr, 'no --wrapper-version argument'
|
||||
sys.exit(1)
|
||||
print('no --wrapper-version argument', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
exp = _CurrentWrapperVersion()
|
||||
ver = tuple(map(lambda x: int(x), ver.split('.')))
|
||||
ver = tuple(map(int, ver.split('.')))
|
||||
if len(ver) == 1:
|
||||
ver = (0, ver[0])
|
||||
|
||||
exp_str = '.'.join(map(str, exp))
|
||||
if exp[0] > ver[0] or ver < (0, 4):
|
||||
exp_str = '.'.join(map(lambda x: str(x), exp))
|
||||
print >>sys.stderr, """
|
||||
print("""
|
||||
!!! A new repo command (%5s) is available. !!!
|
||||
!!! You must upgrade before you can continue: !!!
|
||||
|
||||
cp %s %s
|
||||
""" % (exp_str, _MyWrapperPath(), repo_path)
|
||||
""" % (exp_str, _MyWrapperPath(), repo_path), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if exp > ver:
|
||||
exp_str = '.'.join(map(lambda x: str(x), exp))
|
||||
print >>sys.stderr, """
|
||||
print("""
|
||||
... A new repo command (%5s) is available.
|
||||
... You should upgrade soon:
|
||||
|
||||
cp %s %s
|
||||
""" % (exp_str, _MyWrapperPath(), repo_path)
|
||||
""" % (exp_str, _MyWrapperPath(), repo_path), file=sys.stderr)
|
||||
|
||||
def _CheckRepoDir(dir):
|
||||
if not dir:
|
||||
print >>sys.stderr, 'no --repo-dir argument'
|
||||
sys.exit(1)
|
||||
def _CheckRepoDir(repo_dir):
|
||||
if not repo_dir:
|
||||
print('no --repo-dir argument', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def _PruneOptions(argv, opt):
|
||||
i = 0
|
||||
@ -263,11 +266,11 @@ def _UserAgent():
|
||||
_user_agent = 'git-repo/%s (%s) git/%s Python/%d.%d.%d' % (
|
||||
repo_version,
|
||||
os_name,
|
||||
'.'.join(map(lambda d: str(d), git.version_tuple())),
|
||||
'.'.join(map(str, git.version_tuple())),
|
||||
py_version[0], py_version[1], py_version[2])
|
||||
return _user_agent
|
||||
|
||||
class _UserAgentHandler(urllib2.BaseHandler):
|
||||
class _UserAgentHandler(urllib.request.BaseHandler):
|
||||
def http_request(self, req):
|
||||
req.add_header('User-Agent', _UserAgent())
|
||||
return req
|
||||
@ -276,7 +279,25 @@ class _UserAgentHandler(urllib2.BaseHandler):
|
||||
req.add_header('User-Agent', _UserAgent())
|
||||
return req
|
||||
|
||||
class _BasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
||||
def _AddPasswordFromUserInput(handler, msg, req):
|
||||
# If repo could not find auth info from netrc, try to get it from user input
|
||||
url = req.get_full_url()
|
||||
user, password = handler.passwd.find_user_password(None, url)
|
||||
if user is None:
|
||||
print(msg)
|
||||
try:
|
||||
user = raw_input('User: ')
|
||||
password = getpass.getpass()
|
||||
except KeyboardInterrupt:
|
||||
return
|
||||
handler.passwd.add_password(None, url, user, password)
|
||||
|
||||
class _BasicAuthHandler(urllib.request.HTTPBasicAuthHandler):
|
||||
def http_error_401(self, req, fp, code, msg, headers):
|
||||
_AddPasswordFromUserInput(self, msg, req)
|
||||
return urllib.request.HTTPBasicAuthHandler.http_error_401(
|
||||
self, req, fp, code, msg, headers)
|
||||
|
||||
def http_error_auth_reqed(self, authreq, host, req, headers):
|
||||
try:
|
||||
old_add_header = req.add_header
|
||||
@ -284,7 +305,7 @@ class _BasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
||||
val = val.replace('\n', '')
|
||||
old_add_header(name, val)
|
||||
req.add_header = _add_header
|
||||
return urllib2.AbstractBasicAuthHandler.http_error_auth_reqed(
|
||||
return urllib.request.AbstractBasicAuthHandler.http_error_auth_reqed(
|
||||
self, authreq, host, req, headers)
|
||||
except:
|
||||
reset = getattr(self, 'reset_retry_count', None)
|
||||
@ -294,7 +315,12 @@ class _BasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
||||
self.retried = 0
|
||||
raise
|
||||
|
||||
class _DigestAuthHandler(urllib2.HTTPDigestAuthHandler):
|
||||
class _DigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
|
||||
def http_error_401(self, req, fp, code, msg, headers):
|
||||
_AddPasswordFromUserInput(self, msg, req)
|
||||
return urllib.request.HTTPDigestAuthHandler.http_error_401(
|
||||
self, req, fp, code, msg, headers)
|
||||
|
||||
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
||||
try:
|
||||
old_add_header = req.add_header
|
||||
@ -302,7 +328,7 @@ class _DigestAuthHandler(urllib2.HTTPDigestAuthHandler):
|
||||
val = val.replace('\n', '')
|
||||
old_add_header(name, val)
|
||||
req.add_header = _add_header
|
||||
return urllib2.AbstractDigestAuthHandler.http_error_auth_reqed(
|
||||
return urllib.request.AbstractDigestAuthHandler.http_error_auth_reqed(
|
||||
self, auth_header, host, req, headers)
|
||||
except:
|
||||
reset = getattr(self, 'reset_retry_count', None)
|
||||
@ -315,7 +341,7 @@ class _DigestAuthHandler(urllib2.HTTPDigestAuthHandler):
|
||||
def init_http():
|
||||
handlers = [_UserAgentHandler()]
|
||||
|
||||
mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
|
||||
try:
|
||||
n = netrc.netrc()
|
||||
for host in n.hosts:
|
||||
@ -331,11 +357,11 @@ def init_http():
|
||||
|
||||
if 'http_proxy' in os.environ:
|
||||
url = os.environ['http_proxy']
|
||||
handlers.append(urllib2.ProxyHandler({'http': url, 'https': url}))
|
||||
handlers.append(urllib.request.ProxyHandler({'http': url, 'https': url}))
|
||||
if 'REPO_CURL_VERBOSE' in os.environ:
|
||||
handlers.append(urllib2.HTTPHandler(debuglevel=1))
|
||||
handlers.append(urllib2.HTTPSHandler(debuglevel=1))
|
||||
urllib2.install_opener(urllib2.build_opener(*handlers))
|
||||
handlers.append(urllib.request.HTTPHandler(debuglevel=1))
|
||||
handlers.append(urllib.request.HTTPSHandler(debuglevel=1))
|
||||
urllib.request.install_opener(urllib.request.build_opener(*handlers))
|
||||
|
||||
def _Main(argv):
|
||||
result = 0
|
||||
@ -365,17 +391,21 @@ def _Main(argv):
|
||||
finally:
|
||||
close_ssh()
|
||||
except KeyboardInterrupt:
|
||||
print('aborted by user', file=sys.stderr)
|
||||
result = 1
|
||||
except RepoChangedException, rce:
|
||||
except ManifestParseError as mpe:
|
||||
print('fatal: %s' % mpe, file=sys.stderr)
|
||||
result = 1
|
||||
except RepoChangedException as rce:
|
||||
# If repo changed, re-exec ourselves.
|
||||
#
|
||||
argv = list(sys.argv)
|
||||
argv.extend(rce.extra_args)
|
||||
try:
|
||||
os.execv(__file__, argv)
|
||||
except OSError, e:
|
||||
print >>sys.stderr, 'fatal: cannot restart repo after upgrade'
|
||||
print >>sys.stderr, 'fatal: %s' % e
|
||||
except OSError as e:
|
||||
print('fatal: cannot restart repo after upgrade', file=sys.stderr)
|
||||
print('fatal: %s' % e, file=sys.stderr)
|
||||
result = 128
|
||||
|
||||
sys.exit(result)
|
||||
|
256
manifest_xml.py
256
manifest_xml.py
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
@ -21,11 +22,13 @@ import urlparse
|
||||
import xml.dom.minidom
|
||||
|
||||
from git_config import GitConfig
|
||||
from project import RemoteSpec, Project, MetaProject, R_HEADS, HEAD
|
||||
from git_refs import R_HEADS, HEAD
|
||||
from project import RemoteSpec, Project, MetaProject
|
||||
from error import ManifestParseError
|
||||
|
||||
MANIFEST_FILE_NAME = 'manifest.xml'
|
||||
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
|
||||
LOCAL_MANIFESTS_DIR_NAME = 'local_manifests'
|
||||
|
||||
urlparse.uses_relative.extend(['ssh', 'git'])
|
||||
urlparse.uses_netloc.extend(['ssh', 'git'])
|
||||
@ -37,6 +40,7 @@ class _Default(object):
|
||||
remote = None
|
||||
sync_j = 1
|
||||
sync_c = False
|
||||
sync_s = False
|
||||
|
||||
class _XmlRemote(object):
|
||||
def __init__(self,
|
||||
@ -52,15 +56,28 @@ class _XmlRemote(object):
|
||||
self.reviewUrl = review
|
||||
self.resolvedFetchUrl = self._resolveFetchUrl()
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.__dict__ != other.__dict__
|
||||
|
||||
def _resolveFetchUrl(self):
|
||||
url = self.fetchUrl.rstrip('/')
|
||||
manifestUrl = self.manifestUrl.rstrip('/')
|
||||
p = manifestUrl.startswith('persistent-http')
|
||||
if p:
|
||||
manifestUrl = manifestUrl[len('persistent-'):]
|
||||
|
||||
# urljoin will get confused if there is no scheme in the base url
|
||||
# ie, if manifestUrl is of the form <hostname:port>
|
||||
if manifestUrl.find(':') != manifestUrl.find('/') - 1:
|
||||
manifestUrl = 'gopher://' + manifestUrl
|
||||
manifestUrl = 'gopher://' + manifestUrl
|
||||
url = urlparse.urljoin(manifestUrl, url)
|
||||
return re.sub(r'^gopher://', '', url)
|
||||
url = re.sub(r'^gopher://', '', url)
|
||||
if p:
|
||||
url = 'persistent-' + url
|
||||
return url
|
||||
|
||||
def ToRemoteSpec(self, projectName):
|
||||
url = self.resolvedFetchUrl.rstrip('/') + '/' + projectName
|
||||
@ -112,7 +129,7 @@ class XmlManifest(object):
|
||||
if os.path.exists(self.manifestFile):
|
||||
os.remove(self.manifestFile)
|
||||
os.symlink('manifests/%s' % name, self.manifestFile)
|
||||
except OSError, e:
|
||||
except OSError:
|
||||
raise ManifestParseError('cannot link manifest %s' % name)
|
||||
|
||||
def _RemoteToXml(self, r, doc, root):
|
||||
@ -123,7 +140,7 @@ class XmlManifest(object):
|
||||
if r.reviewUrl is not None:
|
||||
e.setAttribute('review', r.reviewUrl)
|
||||
|
||||
def Save(self, fd, peg_rev=False):
|
||||
def Save(self, fd, peg_rev=False, peg_rev_upstream=True):
|
||||
"""Write the current manifest out to the given file descriptor.
|
||||
"""
|
||||
mp = self.manifestProject
|
||||
@ -169,6 +186,9 @@ class XmlManifest(object):
|
||||
if d.sync_c:
|
||||
have_default = True
|
||||
e.setAttribute('sync-c', 'true')
|
||||
if d.sync_s:
|
||||
have_default = True
|
||||
e.setAttribute('sync-s', 'true')
|
||||
if have_default:
|
||||
root.appendChild(e)
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
@ -179,29 +199,38 @@ class XmlManifest(object):
|
||||
root.appendChild(e)
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
|
||||
sort_projects = list(self.projects.keys())
|
||||
sort_projects.sort()
|
||||
|
||||
for p in sort_projects:
|
||||
p = self.projects[p]
|
||||
def output_projects(parent, parent_node, projects):
|
||||
for p in projects:
|
||||
output_project(parent, parent_node, self.projects[p])
|
||||
|
||||
def output_project(parent, parent_node, p):
|
||||
if not p.MatchesGroups(groups):
|
||||
continue
|
||||
return
|
||||
|
||||
name = p.name
|
||||
relpath = p.relpath
|
||||
if parent:
|
||||
name = self._UnjoinName(parent.name, name)
|
||||
relpath = self._UnjoinRelpath(parent.relpath, relpath)
|
||||
|
||||
e = doc.createElement('project')
|
||||
root.appendChild(e)
|
||||
e.setAttribute('name', p.name)
|
||||
if p.relpath != p.name:
|
||||
e.setAttribute('path', p.relpath)
|
||||
parent_node.appendChild(e)
|
||||
e.setAttribute('name', name)
|
||||
if relpath != name:
|
||||
e.setAttribute('path', relpath)
|
||||
if not d.remote or p.remote.name != d.remote.name:
|
||||
e.setAttribute('remote', p.remote.name)
|
||||
if peg_rev:
|
||||
if self.IsMirror:
|
||||
e.setAttribute('revision',
|
||||
p.bare_git.rev_parse(p.revisionExpr + '^0'))
|
||||
value = p.bare_git.rev_parse(p.revisionExpr + '^0')
|
||||
else:
|
||||
e.setAttribute('revision',
|
||||
p.work_git.rev_parse(HEAD + '^0'))
|
||||
value = p.work_git.rev_parse(HEAD + '^0')
|
||||
e.setAttribute('revision', value)
|
||||
if peg_rev_upstream and value != p.revisionExpr:
|
||||
# Only save the origin if the origin is not a sha1, and the default
|
||||
# isn't our value, and the if the default doesn't already have that
|
||||
# covered.
|
||||
e.setAttribute('upstream', p.revisionExpr)
|
||||
elif not d.revisionExpr or p.revisionExpr != d.revisionExpr:
|
||||
e.setAttribute('revision', p.revisionExpr)
|
||||
|
||||
@ -226,6 +255,19 @@ class XmlManifest(object):
|
||||
if p.sync_c:
|
||||
e.setAttribute('sync-c', 'true')
|
||||
|
||||
if p.sync_s:
|
||||
e.setAttribute('sync-s', 'true')
|
||||
|
||||
if p.subprojects:
|
||||
sort_projects = [subp.name for subp in p.subprojects]
|
||||
sort_projects.sort()
|
||||
output_projects(p, e, sort_projects)
|
||||
|
||||
sort_projects = [key for key in self.projects.keys()
|
||||
if not self.projects[key].parent]
|
||||
sort_projects.sort()
|
||||
output_projects(None, root, sort_projects)
|
||||
|
||||
if self._repo_hooks_project:
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
e = doc.createElement('repo-hooks')
|
||||
@ -294,8 +336,22 @@ class XmlManifest(object):
|
||||
|
||||
local = os.path.join(self.repodir, LOCAL_MANIFEST_NAME)
|
||||
if os.path.exists(local):
|
||||
print('warning: %s is deprecated; put local manifests in %s instead'
|
||||
% (LOCAL_MANIFEST_NAME, LOCAL_MANIFESTS_DIR_NAME),
|
||||
file=sys.stderr)
|
||||
nodes.append(self._ParseManifestXml(local, self.repodir))
|
||||
|
||||
local_dir = os.path.abspath(os.path.join(self.repodir, LOCAL_MANIFESTS_DIR_NAME))
|
||||
try:
|
||||
for local_file in sorted(os.listdir(local_dir)):
|
||||
if local_file.endswith('.xml'):
|
||||
try:
|
||||
nodes.append(self._ParseManifestXml(local_file, self.repodir))
|
||||
except ManifestParseError as e:
|
||||
print('%s' % str(e), file=sys.stderr)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
self._ParseManifest(nodes)
|
||||
|
||||
if self.IsMirror:
|
||||
@ -305,7 +361,11 @@ class XmlManifest(object):
|
||||
self._loaded = True
|
||||
|
||||
def _ParseManifestXml(self, path, include_root):
|
||||
root = xml.dom.minidom.parse(path)
|
||||
try:
|
||||
root = xml.dom.minidom.parse(path)
|
||||
except (OSError, xml.parsers.expat.ExpatError) as e:
|
||||
raise ManifestParseError("error parsing manifest %s: %s" % (path, e))
|
||||
|
||||
if not root or not root.childNodes:
|
||||
raise ManifestParseError("no root node in %s" % (path,))
|
||||
|
||||
@ -316,36 +376,40 @@ class XmlManifest(object):
|
||||
raise ManifestParseError("no <manifest> in %s" % (path,))
|
||||
|
||||
nodes = []
|
||||
for node in manifest.childNodes:
|
||||
if node.nodeName == 'include':
|
||||
name = self._reqatt(node, 'name')
|
||||
fp = os.path.join(include_root, name)
|
||||
if not os.path.isfile(fp):
|
||||
raise ManifestParseError, \
|
||||
"include %s doesn't exist or isn't a file" % \
|
||||
(name,)
|
||||
try:
|
||||
nodes.extend(self._ParseManifestXml(fp, include_root))
|
||||
# should isolate this to the exact exception, but that's
|
||||
# tricky. actual parsing implementation may vary.
|
||||
except (KeyboardInterrupt, RuntimeError, SystemExit):
|
||||
raise
|
||||
except Exception, e:
|
||||
raise ManifestParseError(
|
||||
"failed parsing included manifest %s: %s", (name, e))
|
||||
else:
|
||||
nodes.append(node)
|
||||
for node in manifest.childNodes: # pylint:disable=W0631
|
||||
# We only get here if manifest is initialised
|
||||
if node.nodeName == 'include':
|
||||
name = self._reqatt(node, 'name')
|
||||
fp = os.path.join(include_root, name)
|
||||
if not os.path.isfile(fp):
|
||||
raise ManifestParseError, \
|
||||
"include %s doesn't exist or isn't a file" % \
|
||||
(name,)
|
||||
try:
|
||||
nodes.extend(self._ParseManifestXml(fp, include_root))
|
||||
# should isolate this to the exact exception, but that's
|
||||
# tricky. actual parsing implementation may vary.
|
||||
except (KeyboardInterrupt, RuntimeError, SystemExit):
|
||||
raise
|
||||
except Exception as e:
|
||||
raise ManifestParseError(
|
||||
"failed parsing included manifest %s: %s", (name, e))
|
||||
else:
|
||||
nodes.append(node)
|
||||
return nodes
|
||||
|
||||
def _ParseManifest(self, node_list):
|
||||
for node in itertools.chain(*node_list):
|
||||
if node.nodeName == 'remote':
|
||||
remote = self._ParseRemote(node)
|
||||
if self._remotes.get(remote.name):
|
||||
raise ManifestParseError(
|
||||
'duplicate remote %s in %s' %
|
||||
(remote.name, self.manifestFile))
|
||||
self._remotes[remote.name] = remote
|
||||
if remote:
|
||||
if remote.name in self._remotes:
|
||||
if remote != self._remotes[remote.name]:
|
||||
raise ManifestParseError(
|
||||
'remote %s already exists with different attributes' %
|
||||
(remote.name))
|
||||
else:
|
||||
self._remotes[remote.name] = remote
|
||||
|
||||
for node in itertools.chain(*node_list):
|
||||
if node.nodeName == 'default':
|
||||
@ -369,19 +433,24 @@ class XmlManifest(object):
|
||||
if node.nodeName == 'manifest-server':
|
||||
url = self._reqatt(node, 'url')
|
||||
if self._manifest_server is not None:
|
||||
raise ManifestParseError(
|
||||
'duplicate manifest-server in %s' %
|
||||
(self.manifestFile))
|
||||
raise ManifestParseError(
|
||||
'duplicate manifest-server in %s' %
|
||||
(self.manifestFile))
|
||||
self._manifest_server = url
|
||||
|
||||
def recursively_add_projects(project):
|
||||
if self._projects.get(project.name):
|
||||
raise ManifestParseError(
|
||||
'duplicate project %s in %s' %
|
||||
(project.name, self.manifestFile))
|
||||
self._projects[project.name] = project
|
||||
for subproject in project.subprojects:
|
||||
recursively_add_projects(subproject)
|
||||
|
||||
for node in itertools.chain(*node_list):
|
||||
if node.nodeName == 'project':
|
||||
project = self._ParseProject(node)
|
||||
if self._projects.get(project.name):
|
||||
raise ManifestParseError(
|
||||
'duplicate project %s in %s' %
|
||||
(project.name, self.manifestFile))
|
||||
self._projects[project.name] = project
|
||||
recursively_add_projects(project)
|
||||
if node.nodeName == 'repo-hooks':
|
||||
# Get the name of the project and the (space-separated) list of enabled.
|
||||
repo_hooks_project = self._reqatt(node, 'in-project')
|
||||
@ -408,9 +477,8 @@ class XmlManifest(object):
|
||||
try:
|
||||
del self._projects[name]
|
||||
except KeyError:
|
||||
raise ManifestParseError(
|
||||
'project %s not found' %
|
||||
(name))
|
||||
raise ManifestParseError('remove-project element specifies non-existent '
|
||||
'project: %s' % name)
|
||||
|
||||
# If the manifest removes the hooks project, treat it as if it deleted
|
||||
# the repo-hooks element too.
|
||||
@ -490,6 +558,12 @@ class XmlManifest(object):
|
||||
d.sync_c = False
|
||||
else:
|
||||
d.sync_c = sync_c.lower() in ("yes", "true", "1")
|
||||
|
||||
sync_s = node.getAttribute('sync-s')
|
||||
if not sync_s:
|
||||
d.sync_s = False
|
||||
else:
|
||||
d.sync_s = sync_s.lower() in ("yes", "true", "1")
|
||||
return d
|
||||
|
||||
def _ParseNotice(self, node):
|
||||
@ -531,11 +605,19 @@ class XmlManifest(object):
|
||||
|
||||
return '\n'.join(cleanLines)
|
||||
|
||||
def _ParseProject(self, node):
|
||||
def _JoinName(self, parent_name, name):
|
||||
return os.path.join(parent_name, name)
|
||||
|
||||
def _UnjoinName(self, parent_name, name):
|
||||
return os.path.relpath(name, parent_name)
|
||||
|
||||
def _ParseProject(self, node, parent = None):
|
||||
"""
|
||||
reads a <project> element from the manifest file
|
||||
"""
|
||||
name = self._reqatt(node, 'name')
|
||||
if parent:
|
||||
name = self._JoinName(parent.name, name)
|
||||
|
||||
remote = self._get_remote(node)
|
||||
if remote is None:
|
||||
@ -573,42 +655,80 @@ class XmlManifest(object):
|
||||
else:
|
||||
sync_c = sync_c.lower() in ("yes", "true", "1")
|
||||
|
||||
sync_s = node.getAttribute('sync-s')
|
||||
if not sync_s:
|
||||
sync_s = self._default.sync_s
|
||||
else:
|
||||
sync_s = sync_s.lower() in ("yes", "true", "1")
|
||||
|
||||
upstream = node.getAttribute('upstream')
|
||||
|
||||
groups = ''
|
||||
if node.hasAttribute('groups'):
|
||||
groups = node.getAttribute('groups')
|
||||
groups = [x for x in re.split('[,\s]+', groups) if x]
|
||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
||||
|
||||
default_groups = ['default', 'name:%s' % name, 'path:%s' % path]
|
||||
groups.extend(set(default_groups).difference(groups))
|
||||
|
||||
if self.IsMirror:
|
||||
relpath = None
|
||||
worktree = None
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||
if parent is None:
|
||||
relpath, worktree, gitdir = self.GetProjectPaths(name, path)
|
||||
else:
|
||||
worktree = os.path.join(self.topdir, path).replace('\\', '/')
|
||||
gitdir = os.path.join(self.repodir, 'projects/%s.git' % path)
|
||||
relpath, worktree, gitdir = self.GetSubprojectPaths(parent, path)
|
||||
|
||||
default_groups = ['all', 'name:%s' % name, 'path:%s' % relpath]
|
||||
groups.extend(set(default_groups).difference(groups))
|
||||
|
||||
project = Project(manifest = self,
|
||||
name = name,
|
||||
remote = remote.ToRemoteSpec(name),
|
||||
gitdir = gitdir,
|
||||
worktree = worktree,
|
||||
relpath = path,
|
||||
relpath = relpath,
|
||||
revisionExpr = revisionExpr,
|
||||
revisionId = None,
|
||||
rebase = rebase,
|
||||
groups = groups,
|
||||
sync_c = sync_c)
|
||||
sync_c = sync_c,
|
||||
sync_s = sync_s,
|
||||
upstream = upstream,
|
||||
parent = parent)
|
||||
|
||||
for n in node.childNodes:
|
||||
if n.nodeName == 'copyfile':
|
||||
self._ParseCopyFile(project, n)
|
||||
if n.nodeName == 'annotation':
|
||||
self._ParseAnnotation(project, n)
|
||||
if n.nodeName == 'project':
|
||||
project.subprojects.append(self._ParseProject(n, parent = project))
|
||||
|
||||
return project
|
||||
|
||||
def GetProjectPaths(self, name, path):
|
||||
relpath = path
|
||||
if self.IsMirror:
|
||||
worktree = None
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||
else:
|
||||
worktree = os.path.join(self.topdir, path).replace('\\', '/')
|
||||
gitdir = os.path.join(self.repodir, 'projects', '%s.git' % path)
|
||||
return relpath, worktree, gitdir
|
||||
|
||||
def GetSubprojectName(self, parent, submodule_path):
|
||||
return os.path.join(parent.name, submodule_path)
|
||||
|
||||
def _JoinRelpath(self, parent_relpath, relpath):
|
||||
return os.path.join(parent_relpath, relpath)
|
||||
|
||||
def _UnjoinRelpath(self, parent_relpath, relpath):
|
||||
return os.path.relpath(relpath, parent_relpath)
|
||||
|
||||
def GetSubprojectPaths(self, parent, path):
|
||||
relpath = self._JoinRelpath(parent.relpath, path)
|
||||
gitdir = os.path.join(parent.gitdir, 'subprojects', '%s.git' % path)
|
||||
if self.IsMirror:
|
||||
worktree = None
|
||||
else:
|
||||
worktree = os.path.join(parent.worktree, path).replace('\\', '/')
|
||||
return relpath, worktree, gitdir
|
||||
|
||||
def _ParseCopyFile(self, project, node):
|
||||
src = self._reqatt(node, 'src')
|
||||
dest = self._reqatt(node, 'dest')
|
||||
|
9
pager.py
9
pager.py
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import select
|
||||
import sys
|
||||
@ -49,8 +50,8 @@ def RunPager(globalConfig):
|
||||
|
||||
_BecomePager(pager)
|
||||
except Exception:
|
||||
print >>sys.stderr, "fatal: cannot start pager '%s'" % pager
|
||||
os.exit(255)
|
||||
print("fatal: cannot start pager '%s'" % pager, file=sys.stderr)
|
||||
sys.exit(255)
|
||||
|
||||
def _SelectPager(globalConfig):
|
||||
try:
|
||||
@ -74,11 +75,11 @@ def _BecomePager(pager):
|
||||
# ready works around a long-standing bug in popularly
|
||||
# available versions of 'less', a better 'more'.
|
||||
#
|
||||
a, b, c = select.select([0], [], [0])
|
||||
_a, _b, _c = select.select([0], [], [0])
|
||||
|
||||
os.environ['LESS'] = 'FRSX'
|
||||
|
||||
try:
|
||||
os.execvp(pager, [pager])
|
||||
except OSError, e:
|
||||
except OSError:
|
||||
os.execv('/bin/sh', ['sh', '-c', pager])
|
||||
|
460
project.py
460
project.py
@ -12,6 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import traceback
|
||||
import errno
|
||||
import filecmp
|
||||
@ -22,13 +23,15 @@ import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
from color import Coloring
|
||||
from git_command import GitCommand
|
||||
from git_command import GitCommand, git_require
|
||||
from git_config import GitConfig, IsId, GetSchemeFromUrl, ID_RE
|
||||
from error import GitError, HookError, UploadError
|
||||
from error import ManifestInvalidRevisionError
|
||||
from error import NoManifestException
|
||||
from trace import IsTrace, Trace
|
||||
|
||||
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M
|
||||
@ -50,7 +53,7 @@ def _lwrite(path, content):
|
||||
|
||||
def _error(fmt, *args):
|
||||
msg = fmt % args
|
||||
print >>sys.stderr, 'error: %s' % msg
|
||||
print('error: %s' % msg, file=sys.stderr)
|
||||
|
||||
def not_rev(r):
|
||||
return '^' + r
|
||||
@ -209,9 +212,9 @@ class _CopyFile:
|
||||
if os.path.exists(dest):
|
||||
os.remove(dest)
|
||||
else:
|
||||
dir = os.path.dirname(dest)
|
||||
if not os.path.isdir(dir):
|
||||
os.makedirs(dir)
|
||||
dest_dir = os.path.dirname(dest)
|
||||
if not os.path.isdir(dest_dir):
|
||||
os.makedirs(dest_dir)
|
||||
shutil.copy(src, dest)
|
||||
# make the file read-only
|
||||
mode = os.stat(dest)[stat.ST_MODE]
|
||||
@ -328,7 +331,6 @@ class RepoHook(object):
|
||||
HookError: Raised if the user doesn't approve and abort_if_user_denies
|
||||
was passed to the consturctor.
|
||||
"""
|
||||
hooks_dir = self._hooks_project.worktree
|
||||
hooks_config = self._hooks_project.config
|
||||
git_approval_key = 'repo.hooks.%s.approvedhash' % self._hook_type
|
||||
|
||||
@ -360,7 +362,7 @@ class RepoHook(object):
|
||||
'(yes/yes-never-ask-again/NO)? ') % (
|
||||
self._GetMustVerb(), self._script_fullpath)
|
||||
response = raw_input(prompt).lower()
|
||||
print
|
||||
print()
|
||||
|
||||
# User is doing a one-time approval.
|
||||
if response in ('y', 'yes'):
|
||||
@ -484,7 +486,31 @@ class Project(object):
|
||||
revisionId,
|
||||
rebase = True,
|
||||
groups = None,
|
||||
sync_c = False):
|
||||
sync_c = False,
|
||||
sync_s = False,
|
||||
upstream = None,
|
||||
parent = None,
|
||||
is_derived = False):
|
||||
"""Init a Project object.
|
||||
|
||||
Args:
|
||||
manifest: The XmlManifest object.
|
||||
name: The `name` attribute of manifest.xml's project element.
|
||||
remote: RemoteSpec object specifying its remote's properties.
|
||||
gitdir: Absolute path of git directory.
|
||||
worktree: Absolute path of git working tree.
|
||||
relpath: Relative path of git working tree to repo's top directory.
|
||||
revisionExpr: The `revision` attribute of manifest.xml's project element.
|
||||
revisionId: git commit id for checking out.
|
||||
rebase: The `rebase` attribute of manifest.xml's project element.
|
||||
groups: The `groups` attribute of manifest.xml's project element.
|
||||
sync_c: The `sync-c` attribute of manifest.xml's project element.
|
||||
sync_s: The `sync-s` attribute of manifest.xml's project element.
|
||||
upstream: The `upstream` attribute of manifest.xml's project element.
|
||||
parent: The parent Project object.
|
||||
is_derived: False if the project was explicitly defined in the manifest;
|
||||
True if the project is a discovered submodule.
|
||||
"""
|
||||
self.manifest = manifest
|
||||
self.name = name
|
||||
self.remote = remote
|
||||
@ -506,6 +532,11 @@ class Project(object):
|
||||
self.rebase = rebase
|
||||
self.groups = groups
|
||||
self.sync_c = sync_c
|
||||
self.sync_s = sync_s
|
||||
self.upstream = upstream
|
||||
self.parent = parent
|
||||
self.is_derived = is_derived
|
||||
self.subprojects = []
|
||||
|
||||
self.snapshots = {}
|
||||
self.copyfiles = []
|
||||
@ -525,6 +556,10 @@ class Project(object):
|
||||
# project containing repo hooks.
|
||||
self.enabled_repo_hooks = []
|
||||
|
||||
@property
|
||||
def Derived(self):
|
||||
return self.is_derived
|
||||
|
||||
@property
|
||||
def Exists(self):
|
||||
return os.path.isdir(self.gitdir)
|
||||
@ -554,7 +589,7 @@ class Project(object):
|
||||
'--unmerged',
|
||||
'--ignore-missing',
|
||||
'--refresh')
|
||||
if self.work_git.DiffZ('diff-index','-M','--cached',HEAD):
|
||||
if self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD):
|
||||
return True
|
||||
if self.work_git.DiffZ('diff-files'):
|
||||
return True
|
||||
@ -583,14 +618,14 @@ class Project(object):
|
||||
return self._userident_email
|
||||
|
||||
def _LoadUserIdentity(self):
|
||||
u = self.bare_git.var('GIT_COMMITTER_IDENT')
|
||||
m = re.compile("^(.*) <([^>]*)> ").match(u)
|
||||
if m:
|
||||
self._userident_name = m.group(1)
|
||||
self._userident_email = m.group(2)
|
||||
else:
|
||||
self._userident_name = ''
|
||||
self._userident_email = ''
|
||||
u = self.bare_git.var('GIT_COMMITTER_IDENT')
|
||||
m = re.compile("^(.*) <([^>]*)> ").match(u)
|
||||
if m:
|
||||
self._userident_name = m.group(1)
|
||||
self._userident_email = m.group(2)
|
||||
else:
|
||||
self._userident_name = ''
|
||||
self._userident_email = ''
|
||||
|
||||
def GetRemote(self, name):
|
||||
"""Get the configuration for a single remote.
|
||||
@ -606,25 +641,24 @@ class Project(object):
|
||||
"""Get all existing local branches.
|
||||
"""
|
||||
current = self.CurrentBranch
|
||||
all = self._allrefs
|
||||
all_refs = self._allrefs
|
||||
heads = {}
|
||||
pubd = {}
|
||||
|
||||
for name, id in all.iteritems():
|
||||
for name, ref_id in all_refs.iteritems():
|
||||
if name.startswith(R_HEADS):
|
||||
name = name[len(R_HEADS):]
|
||||
b = self.GetBranch(name)
|
||||
b.current = name == current
|
||||
b.published = None
|
||||
b.revision = id
|
||||
b.revision = ref_id
|
||||
heads[name] = b
|
||||
|
||||
for name, id in all.iteritems():
|
||||
for name, ref_id in all_refs.iteritems():
|
||||
if name.startswith(R_PUB):
|
||||
name = name[len(R_PUB):]
|
||||
b = heads.get(name)
|
||||
if b:
|
||||
b.published = id
|
||||
b.published = ref_id
|
||||
|
||||
return heads
|
||||
|
||||
@ -683,9 +717,9 @@ class Project(object):
|
||||
if not os.path.isdir(self.worktree):
|
||||
if output_redir == None:
|
||||
output_redir = sys.stdout
|
||||
print >>output_redir, ''
|
||||
print >>output_redir, 'project %s/' % self.relpath
|
||||
print >>output_redir, ' missing (run "repo sync")'
|
||||
print(file=output_redir)
|
||||
print('project %s/' % self.relpath, file=output_redir)
|
||||
print(' missing (run "repo sync")', file=output_redir)
|
||||
return
|
||||
|
||||
self.work_git.update_index('-q',
|
||||
@ -724,17 +758,25 @@ class Project(object):
|
||||
paths.sort()
|
||||
|
||||
for p in paths:
|
||||
try: i = di[p]
|
||||
except KeyError: i = None
|
||||
try:
|
||||
i = di[p]
|
||||
except KeyError:
|
||||
i = None
|
||||
|
||||
try: f = df[p]
|
||||
except KeyError: f = None
|
||||
try:
|
||||
f = df[p]
|
||||
except KeyError:
|
||||
f = None
|
||||
|
||||
if i: i_status = i.status.upper()
|
||||
else: i_status = '-'
|
||||
if i:
|
||||
i_status = i.status.upper()
|
||||
else:
|
||||
i_status = '-'
|
||||
|
||||
if f: f_status = f.status.lower()
|
||||
else: f_status = '-'
|
||||
if f:
|
||||
f_status = f.status.lower()
|
||||
else:
|
||||
f_status = '-'
|
||||
|
||||
if i and i.src_path:
|
||||
line = ' %s%s\t%s => %s (%s%%)' % (i_status, f_status,
|
||||
@ -777,46 +819,46 @@ class Project(object):
|
||||
out.project('project %s/' % self.relpath)
|
||||
out.nl()
|
||||
has_diff = True
|
||||
print line[:-1]
|
||||
print(line[:-1])
|
||||
p.Wait()
|
||||
|
||||
|
||||
## Publish / Upload ##
|
||||
|
||||
def WasPublished(self, branch, all=None):
|
||||
def WasPublished(self, branch, all_refs=None):
|
||||
"""Was the branch published (uploaded) for code review?
|
||||
If so, returns the SHA-1 hash of the last published
|
||||
state for the branch.
|
||||
"""
|
||||
key = R_PUB + branch
|
||||
if all is None:
|
||||
if all_refs is None:
|
||||
try:
|
||||
return self.bare_git.rev_parse(key)
|
||||
except GitError:
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
return all[key]
|
||||
return all_refs[key]
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
def CleanPublishedCache(self, all=None):
|
||||
def CleanPublishedCache(self, all_refs=None):
|
||||
"""Prunes any stale published refs.
|
||||
"""
|
||||
if all is None:
|
||||
all = self._allrefs
|
||||
if all_refs is None:
|
||||
all_refs = self._allrefs
|
||||
heads = set()
|
||||
canrm = {}
|
||||
for name, id in all.iteritems():
|
||||
for name, ref_id in all_refs.iteritems():
|
||||
if name.startswith(R_HEADS):
|
||||
heads.add(name)
|
||||
elif name.startswith(R_PUB):
|
||||
canrm[name] = id
|
||||
canrm[name] = ref_id
|
||||
|
||||
for name, id in canrm.iteritems():
|
||||
for name, ref_id in canrm.iteritems():
|
||||
n = name[len(R_PUB):]
|
||||
if R_HEADS + n not in heads:
|
||||
self.bare_git.DeleteRef(name, id)
|
||||
self.bare_git.DeleteRef(name, ref_id)
|
||||
|
||||
def GetUploadableBranches(self, selected_branch=None):
|
||||
"""List any branches which can be uploaded for review.
|
||||
@ -824,15 +866,15 @@ class Project(object):
|
||||
heads = {}
|
||||
pubed = {}
|
||||
|
||||
for name, id in self._allrefs.iteritems():
|
||||
for name, ref_id in self._allrefs.iteritems():
|
||||
if name.startswith(R_HEADS):
|
||||
heads[name[len(R_HEADS):]] = id
|
||||
heads[name[len(R_HEADS):]] = ref_id
|
||||
elif name.startswith(R_PUB):
|
||||
pubed[name[len(R_PUB):]] = id
|
||||
pubed[name[len(R_PUB):]] = ref_id
|
||||
|
||||
ready = []
|
||||
for branch, id in heads.iteritems():
|
||||
if branch in pubed and pubed[branch] == id:
|
||||
for branch, ref_id in heads.iteritems():
|
||||
if branch in pubed and pubed[branch] == ref_id:
|
||||
continue
|
||||
if selected_branch and branch != selected_branch:
|
||||
continue
|
||||
@ -976,18 +1018,18 @@ class Project(object):
|
||||
self._InitHooks()
|
||||
|
||||
def _CopyFiles(self):
|
||||
for file in self.copyfiles:
|
||||
file._Copy()
|
||||
for copyfile in self.copyfiles:
|
||||
copyfile._Copy()
|
||||
|
||||
def GetRevisionId(self, all=None):
|
||||
def GetRevisionId(self, all_refs=None):
|
||||
if self.revisionId:
|
||||
return self.revisionId
|
||||
|
||||
rem = self.GetRemote(self.remote.name)
|
||||
rev = rem.ToLocal(self.revisionExpr)
|
||||
|
||||
if all is not None and rev in all:
|
||||
return all[rev]
|
||||
if all_refs is not None and rev in all_refs:
|
||||
return all_refs[rev]
|
||||
|
||||
try:
|
||||
return self.bare_git.rev_parse('--verify', '%s^0' % rev)
|
||||
@ -1000,16 +1042,20 @@ class Project(object):
|
||||
"""Perform only the local IO portion of the sync process.
|
||||
Network access is not required.
|
||||
"""
|
||||
all = self.bare_ref.all
|
||||
self.CleanPublishedCache(all)
|
||||
revid = self.GetRevisionId(all)
|
||||
all_refs = self.bare_ref.all
|
||||
self.CleanPublishedCache(all_refs)
|
||||
revid = self.GetRevisionId(all_refs)
|
||||
|
||||
def _doff():
|
||||
self._FastForward(revid)
|
||||
self._CopyFiles()
|
||||
|
||||
self._InitWorkTree()
|
||||
head = self.work_git.GetHead()
|
||||
if head.startswith(R_HEADS):
|
||||
branch = head[len(R_HEADS):]
|
||||
try:
|
||||
head = all[head]
|
||||
head = all_refs[head]
|
||||
except KeyError:
|
||||
head = None
|
||||
else:
|
||||
@ -1036,7 +1082,7 @@ class Project(object):
|
||||
|
||||
try:
|
||||
self._Checkout(revid, quiet=True)
|
||||
except GitError, e:
|
||||
except GitError as e:
|
||||
syncbuf.fail(self, e)
|
||||
return
|
||||
self._CopyFiles()
|
||||
@ -1058,14 +1104,14 @@ class Project(object):
|
||||
branch.name)
|
||||
try:
|
||||
self._Checkout(revid, quiet=True)
|
||||
except GitError, e:
|
||||
except GitError as e:
|
||||
syncbuf.fail(self, e)
|
||||
return
|
||||
self._CopyFiles()
|
||||
return
|
||||
|
||||
upstream_gain = self._revlist(not_rev(HEAD), revid)
|
||||
pub = self.WasPublished(branch.name, all)
|
||||
pub = self.WasPublished(branch.name, all_refs)
|
||||
if pub:
|
||||
not_merged = self._revlist(not_rev(revid), pub)
|
||||
if not_merged:
|
||||
@ -1082,9 +1128,6 @@ class Project(object):
|
||||
# All published commits are merged, and thus we are a
|
||||
# strict subset. We can fast-forward safely.
|
||||
#
|
||||
def _doff():
|
||||
self._FastForward(revid)
|
||||
self._CopyFiles()
|
||||
syncbuf.later1(self, _doff)
|
||||
return
|
||||
|
||||
@ -1143,13 +1186,10 @@ class Project(object):
|
||||
try:
|
||||
self._ResetHard(revid)
|
||||
self._CopyFiles()
|
||||
except GitError, e:
|
||||
except GitError as e:
|
||||
syncbuf.fail(self, e)
|
||||
return
|
||||
else:
|
||||
def _doff():
|
||||
self._FastForward(revid)
|
||||
self._CopyFiles()
|
||||
syncbuf.later1(self, _doff)
|
||||
|
||||
def AddCopyFile(self, src, dest, absdest):
|
||||
@ -1169,7 +1209,7 @@ class Project(object):
|
||||
cmd = ['fetch', remote.name]
|
||||
cmd.append('refs/changes/%2.2d/%d/%d' \
|
||||
% (change_id % 100, change_id, patch_id))
|
||||
cmd.extend(map(lambda x: str(x), remote.fetch))
|
||||
cmd.extend(map(str, remote.fetch))
|
||||
if GitCommand(self, cmd, bare=True).Wait() != 0:
|
||||
return None
|
||||
return DownloadedChange(self,
|
||||
@ -1188,8 +1228,8 @@ class Project(object):
|
||||
if head == (R_HEADS + name):
|
||||
return True
|
||||
|
||||
all = self.bare_ref.all
|
||||
if (R_HEADS + name) in all:
|
||||
all_refs = self.bare_ref.all
|
||||
if (R_HEADS + name) in all_refs:
|
||||
return GitCommand(self,
|
||||
['checkout', name, '--'],
|
||||
capture_stdout = True,
|
||||
@ -1198,11 +1238,11 @@ class Project(object):
|
||||
branch = self.GetBranch(name)
|
||||
branch.remote = self.GetRemote(self.remote.name)
|
||||
branch.merge = self.revisionExpr
|
||||
revid = self.GetRevisionId(all)
|
||||
revid = self.GetRevisionId(all_refs)
|
||||
|
||||
if head.startswith(R_HEADS):
|
||||
try:
|
||||
head = all[head]
|
||||
head = all_refs[head]
|
||||
except KeyError:
|
||||
head = None
|
||||
|
||||
@ -1243,9 +1283,9 @@ class Project(object):
|
||||
#
|
||||
return True
|
||||
|
||||
all = self.bare_ref.all
|
||||
all_refs = self.bare_ref.all
|
||||
try:
|
||||
revid = all[rev]
|
||||
revid = all_refs[rev]
|
||||
except KeyError:
|
||||
# Branch does not exist in this project
|
||||
#
|
||||
@ -1253,7 +1293,7 @@ class Project(object):
|
||||
|
||||
if head.startswith(R_HEADS):
|
||||
try:
|
||||
head = all[head]
|
||||
head = all_refs[head]
|
||||
except KeyError:
|
||||
head = None
|
||||
|
||||
@ -1281,8 +1321,8 @@ class Project(object):
|
||||
didn't exist.
|
||||
"""
|
||||
rev = R_HEADS + name
|
||||
all = self.bare_ref.all
|
||||
if rev not in all:
|
||||
all_refs = self.bare_ref.all
|
||||
if rev not in all_refs:
|
||||
# Doesn't exist
|
||||
return None
|
||||
|
||||
@ -1291,9 +1331,9 @@ class Project(object):
|
||||
# We can't destroy the branch while we are sitting
|
||||
# on it. Switch to a detached HEAD.
|
||||
#
|
||||
head = all[head]
|
||||
head = all_refs[head]
|
||||
|
||||
revid = self.GetRevisionId(all)
|
||||
revid = self.GetRevisionId(all_refs)
|
||||
if head == revid:
|
||||
_lwrite(os.path.join(self.worktree, '.git', HEAD),
|
||||
'%s\n' % revid)
|
||||
@ -1362,6 +1402,149 @@ class Project(object):
|
||||
return kept
|
||||
|
||||
|
||||
## Submodule Management ##
|
||||
|
||||
def GetRegisteredSubprojects(self):
|
||||
result = []
|
||||
def rec(subprojects):
|
||||
if not subprojects:
|
||||
return
|
||||
result.extend(subprojects)
|
||||
for p in subprojects:
|
||||
rec(p.subprojects)
|
||||
rec(self.subprojects)
|
||||
return result
|
||||
|
||||
def _GetSubmodules(self):
|
||||
# Unfortunately we cannot call `git submodule status --recursive` here
|
||||
# because the working tree might not exist yet, and it cannot be used
|
||||
# without a working tree in its current implementation.
|
||||
|
||||
def get_submodules(gitdir, rev):
|
||||
# Parse .gitmodules for submodule sub_paths and sub_urls
|
||||
sub_paths, sub_urls = parse_gitmodules(gitdir, rev)
|
||||
if not sub_paths:
|
||||
return []
|
||||
# Run `git ls-tree` to read SHAs of submodule object, which happen to be
|
||||
# revision of submodule repository
|
||||
sub_revs = git_ls_tree(gitdir, rev, sub_paths)
|
||||
submodules = []
|
||||
for sub_path, sub_url in zip(sub_paths, sub_urls):
|
||||
try:
|
||||
sub_rev = sub_revs[sub_path]
|
||||
except KeyError:
|
||||
# Ignore non-exist submodules
|
||||
continue
|
||||
submodules.append((sub_rev, sub_path, sub_url))
|
||||
return submodules
|
||||
|
||||
re_path = re.compile(r'^submodule\.([^.]+)\.path=(.*)$')
|
||||
re_url = re.compile(r'^submodule\.([^.]+)\.url=(.*)$')
|
||||
def parse_gitmodules(gitdir, rev):
|
||||
cmd = ['cat-file', 'blob', '%s:.gitmodules' % rev]
|
||||
try:
|
||||
p = GitCommand(None, cmd, capture_stdout = True, capture_stderr = True,
|
||||
bare = True, gitdir = gitdir)
|
||||
except GitError:
|
||||
return [], []
|
||||
if p.Wait() != 0:
|
||||
return [], []
|
||||
|
||||
gitmodules_lines = []
|
||||
fd, temp_gitmodules_path = tempfile.mkstemp()
|
||||
try:
|
||||
os.write(fd, p.stdout)
|
||||
os.close(fd)
|
||||
cmd = ['config', '--file', temp_gitmodules_path, '--list']
|
||||
p = GitCommand(None, cmd, capture_stdout = True, capture_stderr = True,
|
||||
bare = True, gitdir = gitdir)
|
||||
if p.Wait() != 0:
|
||||
return [], []
|
||||
gitmodules_lines = p.stdout.split('\n')
|
||||
except GitError:
|
||||
return [], []
|
||||
finally:
|
||||
os.remove(temp_gitmodules_path)
|
||||
|
||||
names = set()
|
||||
paths = {}
|
||||
urls = {}
|
||||
for line in gitmodules_lines:
|
||||
if not line:
|
||||
continue
|
||||
m = re_path.match(line)
|
||||
if m:
|
||||
names.add(m.group(1))
|
||||
paths[m.group(1)] = m.group(2)
|
||||
continue
|
||||
m = re_url.match(line)
|
||||
if m:
|
||||
names.add(m.group(1))
|
||||
urls[m.group(1)] = m.group(2)
|
||||
continue
|
||||
names = sorted(names)
|
||||
return ([paths.get(name, '') for name in names],
|
||||
[urls.get(name, '') for name in names])
|
||||
|
||||
def git_ls_tree(gitdir, rev, paths):
|
||||
cmd = ['ls-tree', rev, '--']
|
||||
cmd.extend(paths)
|
||||
try:
|
||||
p = GitCommand(None, cmd, capture_stdout = True, capture_stderr = True,
|
||||
bare = True, gitdir = gitdir)
|
||||
except GitError:
|
||||
return []
|
||||
if p.Wait() != 0:
|
||||
return []
|
||||
objects = {}
|
||||
for line in p.stdout.split('\n'):
|
||||
if not line.strip():
|
||||
continue
|
||||
object_rev, object_path = line.split()[2:4]
|
||||
objects[object_path] = object_rev
|
||||
return objects
|
||||
|
||||
try:
|
||||
rev = self.GetRevisionId()
|
||||
except GitError:
|
||||
return []
|
||||
return get_submodules(self.gitdir, rev)
|
||||
|
||||
def GetDerivedSubprojects(self):
|
||||
result = []
|
||||
if not self.Exists:
|
||||
# If git repo does not exist yet, querying its submodules will
|
||||
# mess up its states; so return here.
|
||||
return result
|
||||
for rev, path, url in self._GetSubmodules():
|
||||
name = self.manifest.GetSubprojectName(self, path)
|
||||
project = self.manifest.projects.get(name)
|
||||
if project:
|
||||
result.extend(project.GetDerivedSubprojects())
|
||||
continue
|
||||
relpath, worktree, gitdir = self.manifest.GetSubprojectPaths(self, path)
|
||||
remote = RemoteSpec(self.remote.name,
|
||||
url = url,
|
||||
review = self.remote.review)
|
||||
subproject = Project(manifest = self.manifest,
|
||||
name = name,
|
||||
remote = remote,
|
||||
gitdir = gitdir,
|
||||
worktree = worktree,
|
||||
relpath = relpath,
|
||||
revisionExpr = self.revisionExpr,
|
||||
revisionId = rev,
|
||||
rebase = self.rebase,
|
||||
groups = self.groups,
|
||||
sync_c = self.sync_c,
|
||||
sync_s = self.sync_s,
|
||||
parent = self,
|
||||
is_derived = True)
|
||||
result.append(subproject)
|
||||
result.extend(subproject.GetDerivedSubprojects())
|
||||
return result
|
||||
|
||||
|
||||
## Direct Git Commands ##
|
||||
|
||||
def _RemoteFetch(self, name=None,
|
||||
@ -1373,6 +1556,16 @@ class Project(object):
|
||||
is_sha1 = False
|
||||
tag_name = None
|
||||
|
||||
def CheckForSha1():
|
||||
try:
|
||||
# if revision (sha or tag) is not present then following function
|
||||
# throws an error.
|
||||
self.bare_git.rev_parse('--verify', '%s^0' % self.revisionExpr)
|
||||
return True
|
||||
except GitError:
|
||||
# There is no such persistent revision. We have to fetch it.
|
||||
return False
|
||||
|
||||
if current_branch_only:
|
||||
if ID_RE.match(self.revisionExpr) is not None:
|
||||
is_sha1 = True
|
||||
@ -1381,14 +1574,10 @@ class Project(object):
|
||||
tag_name = self.revisionExpr[len(R_TAGS):]
|
||||
|
||||
if is_sha1 or tag_name is not None:
|
||||
try:
|
||||
# if revision (sha or tag) is not present then following function
|
||||
# throws an error.
|
||||
self.bare_git.rev_parse('--verify', '%s^0' % self.revisionExpr)
|
||||
if CheckForSha1():
|
||||
return True
|
||||
except GitError:
|
||||
# There is no such persistent revision. We have to fetch it.
|
||||
pass
|
||||
if is_sha1 and (not self.upstream or ID_RE.match(self.upstream)):
|
||||
current_branch_only = False
|
||||
|
||||
if not name:
|
||||
name = self.remote.name
|
||||
@ -1404,33 +1593,33 @@ class Project(object):
|
||||
packed_refs = os.path.join(self.gitdir, 'packed-refs')
|
||||
remote = self.GetRemote(name)
|
||||
|
||||
all = self.bare_ref.all
|
||||
ids = set(all.values())
|
||||
all_refs = self.bare_ref.all
|
||||
ids = set(all_refs.values())
|
||||
tmp = set()
|
||||
|
||||
for r, id in GitRefs(ref_dir).all.iteritems():
|
||||
if r not in all:
|
||||
for r, ref_id in GitRefs(ref_dir).all.iteritems():
|
||||
if r not in all_refs:
|
||||
if r.startswith(R_TAGS) or remote.WritesTo(r):
|
||||
all[r] = id
|
||||
ids.add(id)
|
||||
all_refs[r] = ref_id
|
||||
ids.add(ref_id)
|
||||
continue
|
||||
|
||||
if id in ids:
|
||||
if ref_id in ids:
|
||||
continue
|
||||
|
||||
r = 'refs/_alt/%s' % id
|
||||
all[r] = id
|
||||
ids.add(id)
|
||||
r = 'refs/_alt/%s' % ref_id
|
||||
all_refs[r] = ref_id
|
||||
ids.add(ref_id)
|
||||
tmp.add(r)
|
||||
|
||||
ref_names = list(all.keys())
|
||||
ref_names = list(all_refs.keys())
|
||||
ref_names.sort()
|
||||
|
||||
tmp_packed = ''
|
||||
old_packed = ''
|
||||
|
||||
for r in ref_names:
|
||||
line = '%s %s\n' % (all[r], r)
|
||||
line = '%s %s\n' % (all_refs[r], r)
|
||||
tmp_packed += line
|
||||
if r not in tmp:
|
||||
old_packed += line
|
||||
@ -1453,7 +1642,7 @@ class Project(object):
|
||||
cmd.append('--update-head-ok')
|
||||
cmd.append(name)
|
||||
|
||||
if not current_branch_only or is_sha1:
|
||||
if not current_branch_only:
|
||||
# Fetch whole repo
|
||||
cmd.append('--tags')
|
||||
cmd.append((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*'))
|
||||
@ -1462,15 +1651,23 @@ class Project(object):
|
||||
cmd.append(tag_name)
|
||||
else:
|
||||
branch = self.revisionExpr
|
||||
if is_sha1:
|
||||
branch = self.upstream
|
||||
if branch.startswith(R_HEADS):
|
||||
branch = branch[len(R_HEADS):]
|
||||
cmd.append((u'+refs/heads/%s:' % branch) + remote.ToLocal('refs/heads/%s' % branch))
|
||||
|
||||
ok = False
|
||||
for i in range(2):
|
||||
if GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy).Wait() == 0:
|
||||
for _i in range(2):
|
||||
ret = GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy).Wait()
|
||||
if ret == 0:
|
||||
ok = True
|
||||
break
|
||||
elif current_branch_only and is_sha1 and ret == 128:
|
||||
# Exit code 128 means "couldn't find the ref you asked for"; if we're in sha1
|
||||
# mode, we just tried sync'ing from the upstream field; it doesn't exist, thus
|
||||
# abort the optimization attempt and do a full sync.
|
||||
break
|
||||
time.sleep(random.randint(30, 45))
|
||||
|
||||
if initial:
|
||||
@ -1480,6 +1677,15 @@ class Project(object):
|
||||
else:
|
||||
os.remove(packed_refs)
|
||||
self.bare_git.pack_refs('--all', '--prune')
|
||||
|
||||
if is_sha1 and current_branch_only and self.upstream:
|
||||
# We just synced the upstream given branch; verify we
|
||||
# got what we wanted, else trigger a second run of all
|
||||
# refs.
|
||||
if not CheckForSha1():
|
||||
return self._RemoteFetch(name=name, current_branch_only=False,
|
||||
initial=False, quiet=quiet, alt_dir=alt_dir)
|
||||
|
||||
return ok
|
||||
|
||||
def _ApplyCloneBundle(self, initial=False, quiet=False):
|
||||
@ -1557,7 +1763,8 @@ class Project(object):
|
||||
# returned another error with the HTTP error code being 400 or above.
|
||||
# This return code only appears if -f, --fail is used.
|
||||
if not quiet:
|
||||
print >> sys.stderr, "Server does not provide clone.bundle; ignoring."
|
||||
print("Server does not provide clone.bundle; ignoring.",
|
||||
file=sys.stderr)
|
||||
return False
|
||||
|
||||
if os.path.exists(tmpPath):
|
||||
@ -1692,7 +1899,7 @@ class Project(object):
|
||||
continue
|
||||
try:
|
||||
os.symlink(os.path.relpath(stock_hook, os.path.dirname(dst)), dst)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
if e.errno == errno.EPERM:
|
||||
raise GitError('filesystem must support symlinks')
|
||||
else:
|
||||
@ -1755,7 +1962,7 @@ class Project(object):
|
||||
os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst)
|
||||
else:
|
||||
raise GitError('cannot overwrite a local work tree')
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
if e.errno == errno.EPERM:
|
||||
raise GitError('filesystem must support symlinks')
|
||||
else:
|
||||
@ -1805,7 +2012,8 @@ class Project(object):
|
||||
if p.Wait() == 0:
|
||||
out = p.stdout
|
||||
if out:
|
||||
return out[:-1].split("\0")
|
||||
return out[:-1].split('\0') # pylint: disable=W1401
|
||||
# Backslash is not anomalous
|
||||
return []
|
||||
|
||||
def DiffZ(self, name, *args):
|
||||
@ -1821,7 +2029,7 @@ class Project(object):
|
||||
out = p.process.stdout.read()
|
||||
r = {}
|
||||
if out:
|
||||
out = iter(out[:-1].split('\0'))
|
||||
out = iter(out[:-1].split('\0')) # pylint: disable=W1401
|
||||
while out:
|
||||
try:
|
||||
info = out.next()
|
||||
@ -1848,7 +2056,7 @@ class Project(object):
|
||||
self.level = self.level[1:]
|
||||
|
||||
info = info[1:].split(' ')
|
||||
info =_Info(path, *info)
|
||||
info = _Info(path, *info)
|
||||
if info.status in ('R', 'C'):
|
||||
info.src_path = info.path
|
||||
info.path = out.next()
|
||||
@ -1862,7 +2070,10 @@ class Project(object):
|
||||
path = os.path.join(self._project.gitdir, HEAD)
|
||||
else:
|
||||
path = os.path.join(self._project.worktree, '.git', HEAD)
|
||||
fd = open(path, 'rb')
|
||||
try:
|
||||
fd = open(path, 'rb')
|
||||
except IOError:
|
||||
raise NoManifestException(path)
|
||||
try:
|
||||
line = fd.read()
|
||||
finally:
|
||||
@ -1938,7 +2149,9 @@ class Project(object):
|
||||
|
||||
Since we don't have a 'rev_parse' method defined, the __getattr__ will
|
||||
run. We'll replace the '_' with a '-' and try to run a git command.
|
||||
Any other arguments will be passed to the git command.
|
||||
Any other positional arguments will be passed to the git command, and the
|
||||
following keyword arguments are supported:
|
||||
config: An optional dict of git config options to be passed with '-c'.
|
||||
|
||||
Args:
|
||||
name: The name of the git command to call. Any '_' characters will
|
||||
@ -1948,8 +2161,20 @@ class Project(object):
|
||||
A callable object that will try to call git with the named command.
|
||||
"""
|
||||
name = name.replace('_', '-')
|
||||
def runner(*args):
|
||||
cmdv = [name]
|
||||
def runner(*args, **kwargs):
|
||||
cmdv = []
|
||||
config = kwargs.pop('config', None)
|
||||
for k in kwargs:
|
||||
raise TypeError('%s() got an unexpected keyword argument %r'
|
||||
% (name, k))
|
||||
if config is not None:
|
||||
if not git_require((1, 7, 2)):
|
||||
raise ValueError('cannot set config on command line for %s()'
|
||||
% name)
|
||||
for k, v in config.iteritems():
|
||||
cmdv.append('-c')
|
||||
cmdv.append('%s=%s' % (k, v))
|
||||
cmdv.append(name)
|
||||
cmdv.extend(args)
|
||||
p = GitCommand(self._project,
|
||||
cmdv,
|
||||
@ -2009,7 +2234,7 @@ class _Later(object):
|
||||
self.action()
|
||||
out.nl()
|
||||
return True
|
||||
except GitError, e:
|
||||
except GitError:
|
||||
out.nl()
|
||||
return False
|
||||
|
||||
@ -2079,7 +2304,6 @@ class MetaProject(Project):
|
||||
"""A special project housed under .repo.
|
||||
"""
|
||||
def __init__(self, manifest, name, gitdir, worktree):
|
||||
repodir = manifest.repodir
|
||||
Project.__init__(self,
|
||||
manifest = manifest,
|
||||
name = name,
|
||||
@ -2131,12 +2355,12 @@ class MetaProject(Project):
|
||||
if not self.remote or not self.revisionExpr:
|
||||
return False
|
||||
|
||||
all = self.bare_ref.all
|
||||
revid = self.GetRevisionId(all)
|
||||
all_refs = self.bare_ref.all
|
||||
revid = self.GetRevisionId(all_refs)
|
||||
head = self.work_git.GetHead()
|
||||
if head.startswith(R_HEADS):
|
||||
try:
|
||||
head = all[head]
|
||||
head = all_refs[head]
|
||||
except KeyError:
|
||||
head = None
|
||||
|
||||
|
290
repo
290
repo
@ -1,9 +1,10 @@
|
||||
#!/bin/sh
|
||||
#!/usr/bin/env python
|
||||
|
||||
## repo default configuration
|
||||
##
|
||||
REPO_URL='https://gerrit.googlesource.com/git-repo'
|
||||
REPO_REV='stable'
|
||||
from __future__ import print_function
|
||||
REPO_URL = 'https://gerrit.googlesource.com/git-repo'
|
||||
REPO_REV = 'stable'
|
||||
|
||||
# Copyright (C) 2008 Google Inc.
|
||||
#
|
||||
@ -19,19 +20,11 @@ REPO_REV='stable'
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
magic='--calling-python-from-/bin/sh--'
|
||||
"""exec" python -E "$0" "$@" """#$magic"
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
if sys.argv[-1] == '#%s' % magic:
|
||||
del sys.argv[-1]
|
||||
del magic
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (1, 17)
|
||||
VERSION = (1, 19)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (1,0)
|
||||
KEYRING_VERSION = (1, 1)
|
||||
MAINTAINER_KEYS = """
|
||||
|
||||
Repo Maintainer <repo@android.kernel.org>
|
||||
@ -74,13 +67,45 @@ HTHs37+/QLMomGEGKZMWi0dShU2J5mNRQu3Hhxl3hHDVbt5CeJBb26aQcQrFz69W
|
||||
zE3GNvmJosh6leayjtI9P2A6iEkEGBECAAkFAkj3uiACGwwACgkQFlMNXpIPXGWp
|
||||
TACbBS+Up3RpfYVfd63c1cDdlru13pQAn3NQy/SN858MkxN+zym86UBgOad2
|
||||
=CMiZ
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
Conley Owens <cco3@android.com>
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
Version: GnuPG v1.4.11 (GNU/Linux)
|
||||
|
||||
mQENBFBiLPwBCACvISTASOgFXwADw2GYRH2I2z9RvYkYoZ6ThTTNlMXbbYYKO2Wo
|
||||
a9LQDNW0TbCEekg5UKk0FD13XOdWaqUt4Gtuvq9c43GRSjMO6NXH+0BjcQ8vUtY2
|
||||
/W4CYUevwdo4nQ1+1zsOCu1XYe/CReXq0fdugv3hgmRmh3sz1soo37Q44W2frxxg
|
||||
U7Rz3Da4FjgAL0RQ8qndD+LwRHXTY7H7wYM8V/3cYFZV7pSodd75q3MAXYQLf0ZV
|
||||
QR1XATu5l1QnXrxgHvz7MmDwb1D+jX3YPKnZveaukigQ6hDHdiVcePBiGXmk8LZC
|
||||
2jQkdXeF7Su1ZYpr2nnEHLJ6vOLcCpPGb8gDABEBAAG0H0NvbmxleSBPd2VucyA8
|
||||
Y2NvM0BhbmRyb2lkLmNvbT6JATgEEwECACIFAlBiLPwCGwMGCwkIBwMCBhUIAgkK
|
||||
CwQWAgMBAh4BAheAAAoJEBkmlFUziHGkHVkH/2Hks2Cif5i2xPtv2IFZcjL42joU
|
||||
T7lO5XFqUYS9ZNHpGa/V0eiPt7rHoO16glR83NZtwlrq2cSN89i9HfOhMYV/qLu8
|
||||
fLCHcV2muw+yCB5s5bxnI5UkToiNZyBNqFkcOt/Kbj9Hpy68A1kmc6myVEaUYebq
|
||||
2Chx/f3xuEthan099t746v1K+/6SvQGDNctHuaMr9cWdxZtHjdRf31SQRc99Phe5
|
||||
w+ZGR/ebxNDKRK9mKgZT8wVFHlXerJsRqWIqtx1fsW1UgLgbpcpe2MChm6B5wTu0
|
||||
s1ltzox3l4q71FyRRPUJxXyvGkDLZWpK7EpiHSCOYq/KP3HkKeXU3xqHpcG5AQ0E
|
||||
UGIs/AEIAKzO/7lO9cB6dshmZYo8Vy/b7aGicThE+ChcDSfhvyOXVdEM2GKAjsR+
|
||||
rlBWbTFX3It301p2HwZPFEi9nEvJxVlqqBiW0bPmNMkDRR55l2vbWg35wwkg6RyE
|
||||
Bc5/TQjhXI2w8IvlimoGoUff4t3JmMOnWrnKSvL+5iuRj12p9WmanCHzw3Ee7ztf
|
||||
/aU/q+FTpr3DLerb6S8xbv86ySgnJT6o5CyL2DCWRtnYQyGVi0ZmLzEouAYiO0hs
|
||||
z0AAu28Mj+12g2WwePRz6gfM9rHtI37ylYW3oT/9M9mO9ei/Bc/1D7Dz6qNV+0vg
|
||||
uSVJxM2Bl6GalHPZLhHntFEdIA6EdoUAEQEAAYkBHwQYAQIACQUCUGIs/AIbDAAK
|
||||
CRAZJpRVM4hxpNfkB/0W/hP5WK/NETXBlWXXW7JPaWO2c5kGwD0lnj5RRmridyo1
|
||||
vbm5PdM91jOsDQYqRu6YOoYBnDnEhB2wL2bPh34HWwwrA+LwB8hlcAV2z1bdwyfl
|
||||
3R823fReKN3QcvLHzmvZPrF4Rk97M9UIyKS0RtnfTWykRgDWHIsrtQPoNwsXrWoT
|
||||
9LrM2v+1+9mp3vuXnE473/NHxmiWEQH9Ez+O/mOxQ7rSOlqGRiKq/IBZCfioJOtV
|
||||
fTQeIu/yASZnsLBqr6SJEGwYBoWcyjG++k4fyw8ocOAo4uGDYbxgN7yYfNQ0OH7o
|
||||
V6pfUgqKLWa/aK7/N1ZHnPdFLD8Xt0Dmy4BPwrKC
|
||||
=O7am
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
"""
|
||||
|
||||
GIT = 'git' # our git command
|
||||
MIN_GIT_VERSION = (1, 5, 4) # minimum supported git version
|
||||
MIN_GIT_VERSION = (1, 7, 2) # minimum supported git version
|
||||
repodir = '.repo' # name of repo's private directory
|
||||
S_repo = 'repo' # special repo reposiory
|
||||
S_repo = 'repo' # special repo repository
|
||||
S_manifests = 'manifests' # special manifest repository
|
||||
REPO_MAIN = S_repo + '/main.py' # main script
|
||||
|
||||
@ -88,10 +113,21 @@ REPO_MAIN = S_repo + '/main.py' # main script
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import readline
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib2
|
||||
try:
|
||||
import urllib2
|
||||
except ImportError:
|
||||
# For python3
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
else:
|
||||
# For python2
|
||||
import imp
|
||||
urllib = imp.new_module('urllib')
|
||||
urllib.request = urllib2
|
||||
urllib.error = urllib2
|
||||
|
||||
home_dot_repo = os.path.expanduser('~/.repoconfig')
|
||||
gpg_dir = os.path.join(home_dot_repo, 'gnupg')
|
||||
@ -118,7 +154,8 @@ group.add_option('-m', '--manifest-name',
|
||||
help='initial manifest file', metavar='NAME.xml')
|
||||
group.add_option('--mirror',
|
||||
dest='mirror', action='store_true',
|
||||
help='mirror the forrest')
|
||||
help='create a replica of the remote repositories '
|
||||
'rather than a client working directory')
|
||||
group.add_option('--reference',
|
||||
dest='reference',
|
||||
help='location of mirror directory', metavar='DIR')
|
||||
@ -131,7 +168,7 @@ group.add_option('-g', '--groups',
|
||||
metavar='GROUP')
|
||||
group.add_option('-p', '--platform',
|
||||
dest='platform', default="auto",
|
||||
help='restrict manifest projects to ones with a specified'
|
||||
help='restrict manifest projects to ones with a specified '
|
||||
'platform group [auto|all|none|linux|darwin|...]',
|
||||
metavar='PLATFORM')
|
||||
|
||||
@ -180,25 +217,24 @@ def _Init(args):
|
||||
if branch.startswith('refs/heads/'):
|
||||
branch = branch[len('refs/heads/'):]
|
||||
if branch.startswith('refs/'):
|
||||
print >>sys.stderr, "fatal: invalid branch name '%s'" % branch
|
||||
print("fatal: invalid branch name '%s'" % branch, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
if not os.path.isdir(repodir):
|
||||
try:
|
||||
os.mkdir(repodir)
|
||||
except OSError, e:
|
||||
print >>sys.stderr, \
|
||||
'fatal: cannot make %s directory: %s' % (
|
||||
repodir, e.strerror)
|
||||
# Don't faise CloneFailure; that would delete the
|
||||
except OSError as e:
|
||||
print('fatal: cannot make %s directory: %s'
|
||||
% (repodir, e.strerror), file=sys.stderr)
|
||||
# Don't raise CloneFailure; that would delete the
|
||||
# name. Instead exit immediately.
|
||||
#
|
||||
sys.exit(1)
|
||||
|
||||
_CheckGitVersion()
|
||||
try:
|
||||
if _NeedSetupGnuPG():
|
||||
can_verify = _SetupGnuPG(opt.quiet)
|
||||
if NeedSetupGnuPG():
|
||||
can_verify = SetupGnuPG(opt.quiet)
|
||||
else:
|
||||
can_verify = True
|
||||
|
||||
@ -213,8 +249,8 @@ def _Init(args):
|
||||
_Checkout(dst, branch, rev, opt.quiet)
|
||||
except CloneFailure:
|
||||
if opt.quiet:
|
||||
print >>sys.stderr, \
|
||||
'fatal: repo init failed; run without --quiet to see why'
|
||||
print('fatal: repo init failed; run without --quiet to see why',
|
||||
file=sys.stderr)
|
||||
raise
|
||||
|
||||
|
||||
@ -222,13 +258,13 @@ def _CheckGitVersion():
|
||||
cmd = [GIT, '--version']
|
||||
try:
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
||||
except OSError, e:
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, "fatal: '%s' is not available" % GIT
|
||||
print >>sys.stderr, 'fatal: %s' % e
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, 'Please make sure %s is installed'\
|
||||
' and in your path.' % GIT
|
||||
except OSError as e:
|
||||
print(file=sys.stderr)
|
||||
print("fatal: '%s' is not available" % GIT, file=sys.stderr)
|
||||
print('fatal: %s' % e, file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
print('Please make sure %s is installed and in your path.' % GIT,
|
||||
file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
ver_str = proc.stdout.read().strip()
|
||||
@ -236,18 +272,18 @@ def _CheckGitVersion():
|
||||
proc.wait()
|
||||
|
||||
if not ver_str.startswith('git version '):
|
||||
print >>sys.stderr, 'error: "%s" unsupported' % ver_str
|
||||
print('error: "%s" unsupported' % ver_str, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
ver_str = ver_str[len('git version '):].strip()
|
||||
ver_act = tuple(map(lambda x: int(x), ver_str.split('.')[0:3]))
|
||||
ver_act = tuple(map(int, ver_str.split('.')[0:3]))
|
||||
if ver_act < MIN_GIT_VERSION:
|
||||
need = '.'.join(map(lambda x: str(x), MIN_GIT_VERSION))
|
||||
print >>sys.stderr, 'fatal: git %s or later required' % need
|
||||
need = '.'.join(map(str, MIN_GIT_VERSION))
|
||||
print('fatal: git %s or later required' % need, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
|
||||
def _NeedSetupGnuPG():
|
||||
def NeedSetupGnuPG():
|
||||
if not os.path.isdir(home_dot_repo):
|
||||
return True
|
||||
|
||||
@ -259,29 +295,27 @@ def _NeedSetupGnuPG():
|
||||
if not kv:
|
||||
return True
|
||||
|
||||
kv = tuple(map(lambda x: int(x), kv.split('.')))
|
||||
kv = tuple(map(int, kv.split('.')))
|
||||
if kv < KEYRING_VERSION:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _SetupGnuPG(quiet):
|
||||
def SetupGnuPG(quiet):
|
||||
if not os.path.isdir(home_dot_repo):
|
||||
try:
|
||||
os.mkdir(home_dot_repo)
|
||||
except OSError, e:
|
||||
print >>sys.stderr, \
|
||||
'fatal: cannot make %s directory: %s' % (
|
||||
home_dot_repo, e.strerror)
|
||||
except OSError as e:
|
||||
print('fatal: cannot make %s directory: %s'
|
||||
% (home_dot_repo, e.strerror), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isdir(gpg_dir):
|
||||
try:
|
||||
os.mkdir(gpg_dir, 0700)
|
||||
except OSError, e:
|
||||
print >>sys.stderr, \
|
||||
'fatal: cannot make %s directory: %s' % (
|
||||
gpg_dir, e.strerror)
|
||||
os.mkdir(gpg_dir, stat.S_IRWXU)
|
||||
except OSError as e:
|
||||
print('fatal: cannot make %s directory: %s' % (gpg_dir, e.strerror),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
env = os.environ.copy()
|
||||
@ -292,23 +326,23 @@ def _SetupGnuPG(quiet):
|
||||
proc = subprocess.Popen(cmd,
|
||||
env = env,
|
||||
stdin = subprocess.PIPE)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
if not quiet:
|
||||
print >>sys.stderr, 'warning: gpg (GnuPG) is not available.'
|
||||
print >>sys.stderr, 'warning: Installing it is strongly encouraged.'
|
||||
print >>sys.stderr
|
||||
print('warning: gpg (GnuPG) is not available.', file=sys.stderr)
|
||||
print('warning: Installing it is strongly encouraged.', file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
return False
|
||||
|
||||
proc.stdin.write(MAINTAINER_KEYS)
|
||||
proc.stdin.close()
|
||||
|
||||
if proc.wait() != 0:
|
||||
print >>sys.stderr, 'fatal: registering repo maintainer keys failed'
|
||||
print('fatal: registering repo maintainer keys failed', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
print
|
||||
print()
|
||||
|
||||
fd = open(os.path.join(home_dot_repo, 'keyring-version'), 'w')
|
||||
fd.write('.'.join(map(lambda x: str(x), KEYRING_VERSION)) + '\n')
|
||||
fd.write('.'.join(map(str, KEYRING_VERSION)) + '\n')
|
||||
fd.close()
|
||||
return True
|
||||
|
||||
@ -324,7 +358,7 @@ def _SetConfig(local, name, value):
|
||||
def _InitHttp():
|
||||
handlers = []
|
||||
|
||||
mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
|
||||
try:
|
||||
import netrc
|
||||
n = netrc.netrc()
|
||||
@ -334,20 +368,20 @@ def _InitHttp():
|
||||
mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2])
|
||||
except:
|
||||
pass
|
||||
handlers.append(urllib2.HTTPBasicAuthHandler(mgr))
|
||||
handlers.append(urllib2.HTTPDigestAuthHandler(mgr))
|
||||
handlers.append(urllib.request.HTTPBasicAuthHandler(mgr))
|
||||
handlers.append(urllib.request.HTTPDigestAuthHandler(mgr))
|
||||
|
||||
if 'http_proxy' in os.environ:
|
||||
url = os.environ['http_proxy']
|
||||
handlers.append(urllib2.ProxyHandler({'http': url, 'https': url}))
|
||||
handlers.append(urllib.request.ProxyHandler({'http': url, 'https': url}))
|
||||
if 'REPO_CURL_VERBOSE' in os.environ:
|
||||
handlers.append(urllib2.HTTPHandler(debuglevel=1))
|
||||
handlers.append(urllib2.HTTPSHandler(debuglevel=1))
|
||||
urllib2.install_opener(urllib2.build_opener(*handlers))
|
||||
handlers.append(urllib.request.HTTPHandler(debuglevel=1))
|
||||
handlers.append(urllib.request.HTTPSHandler(debuglevel=1))
|
||||
urllib.request.install_opener(urllib.request.build_opener(*handlers))
|
||||
|
||||
def _Fetch(url, local, src, quiet):
|
||||
if not quiet:
|
||||
print >>sys.stderr, 'Get %s' % url
|
||||
print('Get %s' % url, file=sys.stderr)
|
||||
|
||||
cmd = [GIT, 'fetch']
|
||||
if quiet:
|
||||
@ -392,20 +426,20 @@ def _DownloadBundle(url, local, quiet):
|
||||
dest = open(os.path.join(local, '.git', 'clone.bundle'), 'w+b')
|
||||
try:
|
||||
try:
|
||||
r = urllib2.urlopen(url)
|
||||
except urllib2.HTTPError, e:
|
||||
r = urllib.request.urlopen(url)
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 404:
|
||||
return False
|
||||
print >>sys.stderr, 'fatal: Cannot get %s' % url
|
||||
print >>sys.stderr, 'fatal: HTTP error %s' % e.code
|
||||
print('fatal: Cannot get %s' % url, file=sys.stderr)
|
||||
print('fatal: HTTP error %s' % e.code, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
except urllib2.URLError, e:
|
||||
print >>sys.stderr, 'fatal: Cannot get %s' % url
|
||||
print >>sys.stderr, 'fatal: error %s' % e.reason
|
||||
except urllib.error.URLError as e:
|
||||
print('fatal: Cannot get %s' % url, file=sys.stderr)
|
||||
print('fatal: error %s' % e.reason, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
try:
|
||||
if not quiet:
|
||||
print >>sys.stderr, 'Get %s' % url
|
||||
print('Get %s' % url, file=sys.stderr)
|
||||
while True:
|
||||
buf = r.read(8192)
|
||||
if buf == '':
|
||||
@ -428,25 +462,24 @@ def _Clone(url, local, quiet):
|
||||
"""
|
||||
try:
|
||||
os.mkdir(local)
|
||||
except OSError, e:
|
||||
print >>sys.stderr, \
|
||||
'fatal: cannot make %s directory: %s' \
|
||||
% (local, e.strerror)
|
||||
except OSError as e:
|
||||
print('fatal: cannot make %s directory: %s' % (local, e.strerror),
|
||||
file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
cmd = [GIT, 'init', '--quiet']
|
||||
try:
|
||||
proc = subprocess.Popen(cmd, cwd = local)
|
||||
except OSError, e:
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, "fatal: '%s' is not available" % GIT
|
||||
print >>sys.stderr, 'fatal: %s' % e
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, 'Please make sure %s is installed'\
|
||||
' and in your path.' % GIT
|
||||
except OSError as e:
|
||||
print(file=sys.stderr)
|
||||
print("fatal: '%s' is not available" % GIT, file=sys.stderr)
|
||||
print('fatal: %s' % e, file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
print('Please make sure %s is installed and in your path.' % GIT,
|
||||
file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
if proc.wait() != 0:
|
||||
print >>sys.stderr, 'fatal: could not create %s' % local
|
||||
print('fatal: could not create %s' % local, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
_InitHttp()
|
||||
@ -474,21 +507,18 @@ def _Verify(cwd, branch, quiet):
|
||||
proc.stderr.close()
|
||||
|
||||
if proc.wait() != 0 or not cur:
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr,\
|
||||
"fatal: branch '%s' has not been signed" \
|
||||
% branch
|
||||
print(file=sys.stderr)
|
||||
print("fatal: branch '%s' has not been signed" % branch, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
m = re.compile(r'^(.*)-[0-9]{1,}-g[0-9a-f]{1,}$').match(cur)
|
||||
if m:
|
||||
cur = m.group(1)
|
||||
if not quiet:
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, \
|
||||
"info: Ignoring branch '%s'; using tagged release '%s'" \
|
||||
% (branch, cur)
|
||||
print >>sys.stderr
|
||||
print(file=sys.stderr)
|
||||
print("info: Ignoring branch '%s'; using tagged release '%s'"
|
||||
% (branch, cur), file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
|
||||
env = os.environ.copy()
|
||||
env['GNUPGHOME'] = gpg_dir.encode()
|
||||
@ -506,10 +536,10 @@ def _Verify(cwd, branch, quiet):
|
||||
proc.stderr.close()
|
||||
|
||||
if proc.wait() != 0:
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, out
|
||||
print >>sys.stderr, err
|
||||
print >>sys.stderr
|
||||
print(file=sys.stderr)
|
||||
print(out, file=sys.stderr)
|
||||
print(err, file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
return '%s^0' % cur
|
||||
|
||||
@ -539,19 +569,19 @@ def _Checkout(cwd, branch, rev, quiet):
|
||||
def _FindRepo():
|
||||
"""Look for a repo installation, starting at the current directory.
|
||||
"""
|
||||
dir = os.getcwd()
|
||||
curdir = os.getcwd()
|
||||
repo = None
|
||||
|
||||
olddir = None
|
||||
while dir != '/' \
|
||||
and dir != olddir \
|
||||
while curdir != '/' \
|
||||
and curdir != olddir \
|
||||
and not repo:
|
||||
repo = os.path.join(dir, repodir, REPO_MAIN)
|
||||
repo = os.path.join(curdir, repodir, REPO_MAIN)
|
||||
if not os.path.isfile(repo):
|
||||
repo = None
|
||||
olddir = dir
|
||||
dir = os.path.dirname(dir)
|
||||
return (repo, os.path.join(dir, repodir))
|
||||
olddir = curdir
|
||||
curdir = os.path.dirname(curdir)
|
||||
return (repo, os.path.join(curdir, repodir))
|
||||
|
||||
|
||||
class _Options:
|
||||
@ -563,7 +593,7 @@ def _ParseArguments(args):
|
||||
opt = _Options()
|
||||
arg = []
|
||||
|
||||
for i in xrange(0, len(args)):
|
||||
for i in range(len(args)):
|
||||
a = args[i]
|
||||
if a == '-h' or a == '--help':
|
||||
opt.help = True
|
||||
@ -576,7 +606,7 @@ def _ParseArguments(args):
|
||||
|
||||
|
||||
def _Usage():
|
||||
print >>sys.stderr,\
|
||||
print(
|
||||
"""usage: repo COMMAND [ARGS]
|
||||
|
||||
repo is not yet installed. Use "repo init" to install it here.
|
||||
@ -587,7 +617,7 @@ The most commonly used repo commands are:
|
||||
help Display detailed help on a command
|
||||
|
||||
For access to the full online help, install repo ("repo init").
|
||||
"""
|
||||
""", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -597,25 +627,23 @@ def _Help(args):
|
||||
init_optparse.print_help()
|
||||
sys.exit(0)
|
||||
else:
|
||||
print >>sys.stderr,\
|
||||
"error: '%s' is not a bootstrap command.\n"\
|
||||
' For access to online help, install repo ("repo init").'\
|
||||
% args[0]
|
||||
print("error: '%s' is not a bootstrap command.\n"
|
||||
' For access to online help, install repo ("repo init").'
|
||||
% args[0], file=sys.stderr)
|
||||
else:
|
||||
_Usage()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _NotInstalled():
|
||||
print >>sys.stderr,\
|
||||
'error: repo is not installed. Use "repo init" to install it here.'
|
||||
print('error: repo is not installed. Use "repo init" to install it here.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _NoCommands(cmd):
|
||||
print >>sys.stderr,\
|
||||
"""error: command '%s' requires repo to be installed first.
|
||||
Use "repo init" to install it here.""" % cmd
|
||||
print("""error: command '%s' requires repo to be installed first.
|
||||
Use "repo init" to install it here.""" % cmd, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -652,18 +680,18 @@ def _SetDefaultsTo(gitdir):
|
||||
proc.stderr.close()
|
||||
|
||||
if proc.wait() != 0:
|
||||
print >>sys.stderr, 'fatal: %s has no current branch' % gitdir
|
||||
print('fatal: %s has no current branch' % gitdir, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main(orig_args):
|
||||
main, dir = _FindRepo()
|
||||
repo_main, rel_repo_dir = _FindRepo()
|
||||
cmd, opt, args = _ParseArguments(orig_args)
|
||||
|
||||
wrapper_path = os.path.abspath(__file__)
|
||||
my_main, my_git = _RunSelf(wrapper_path)
|
||||
|
||||
if not main:
|
||||
if not repo_main:
|
||||
if opt.help:
|
||||
_Usage()
|
||||
if cmd == 'help':
|
||||
@ -683,26 +711,26 @@ def main(orig_args):
|
||||
os.rmdir(os.path.join(root, name))
|
||||
os.rmdir(repodir)
|
||||
sys.exit(1)
|
||||
main, dir = _FindRepo()
|
||||
repo_main, rel_repo_dir = _FindRepo()
|
||||
else:
|
||||
_NoCommands(cmd)
|
||||
|
||||
if my_main:
|
||||
main = my_main
|
||||
repo_main = my_main
|
||||
|
||||
ver_str = '.'.join(map(lambda x: str(x), VERSION))
|
||||
me = [main,
|
||||
'--repo-dir=%s' % dir,
|
||||
ver_str = '.'.join(map(str, VERSION))
|
||||
me = [repo_main,
|
||||
'--repo-dir=%s' % rel_repo_dir,
|
||||
'--wrapper-version=%s' % ver_str,
|
||||
'--wrapper-path=%s' % wrapper_path,
|
||||
'--']
|
||||
me.extend(orig_args)
|
||||
me.extend(extra_args)
|
||||
try:
|
||||
os.execv(main, me)
|
||||
except OSError, e:
|
||||
print >>sys.stderr, "fatal: unable to start %s" % main
|
||||
print >>sys.stderr, "fatal: %s" % e
|
||||
os.execv(repo_main, me)
|
||||
except OSError as e:
|
||||
print("fatal: unable to start %s" % repo_main, file=sys.stderr)
|
||||
print("fatal: %s" % e, file=sys.stderr)
|
||||
sys.exit(148)
|
||||
|
||||
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
import os
|
||||
|
||||
all = {}
|
||||
all_commands = {}
|
||||
|
||||
my_dir = os.path.dirname(__file__)
|
||||
for py in os.listdir(my_dir):
|
||||
@ -43,7 +43,7 @@ for py in os.listdir(my_dir):
|
||||
|
||||
name = name.replace('_', '-')
|
||||
cmd.NAME = name
|
||||
all[name] = cmd
|
||||
all_commands[name] = cmd
|
||||
|
||||
if 'help' in all:
|
||||
all['help'].commands = all
|
||||
if 'help' in all_commands:
|
||||
all_commands['help'].commands = all_commands
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
from command import Command
|
||||
from git_command import git
|
||||
@ -36,16 +37,16 @@ It is equivalent to "git branch -D <branchname>".
|
||||
|
||||
nb = args[0]
|
||||
if not git.check_ref_format('heads/%s' % nb):
|
||||
print >>sys.stderr, "error: '%s' is not a valid name" % nb
|
||||
print("error: '%s' is not a valid name" % nb, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
nb = args[0]
|
||||
err = []
|
||||
success = []
|
||||
all = self.GetProjects(args[1:])
|
||||
all_projects = self.GetProjects(args[1:])
|
||||
|
||||
pm = Progress('Abandon %s' % nb, len(all))
|
||||
for project in all:
|
||||
pm = Progress('Abandon %s' % nb, len(all_projects))
|
||||
for project in all_projects:
|
||||
pm.update()
|
||||
|
||||
status = project.AbandonBranch(nb)
|
||||
@ -58,13 +59,13 @@ It is equivalent to "git branch -D <branchname>".
|
||||
|
||||
if err:
|
||||
for p in err:
|
||||
print >>sys.stderr,\
|
||||
"error: %s/: cannot abandon %s" \
|
||||
% (p.relpath, nb)
|
||||
print("error: %s/: cannot abandon %s" % (p.relpath, nb),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
elif not success:
|
||||
print >>sys.stderr, 'error: no project has branch %s' % nb
|
||||
print('error: no project has branch %s' % nb, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
print >>sys.stderr, 'Abandoned in %d project(s):\n %s' % (
|
||||
len(success), '\n '.join(p.relpath for p in success))
|
||||
print('Abandoned in %d project(s):\n %s'
|
||||
% (len(success), '\n '.join(p.relpath for p in success)),
|
||||
file=sys.stderr)
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
from color import Coloring
|
||||
from command import Command
|
||||
@ -93,21 +94,21 @@ is shown, then the branch appears in all projects.
|
||||
def Execute(self, opt, args):
|
||||
projects = self.GetProjects(args)
|
||||
out = BranchColoring(self.manifest.manifestProject.config)
|
||||
all = {}
|
||||
all_branches = {}
|
||||
project_cnt = len(projects)
|
||||
|
||||
for project in projects:
|
||||
for name, b in project.GetBranches().iteritems():
|
||||
b.project = project
|
||||
if name not in all:
|
||||
all[name] = BranchInfo(name)
|
||||
all[name].add(b)
|
||||
if name not in all_branches:
|
||||
all_branches[name] = BranchInfo(name)
|
||||
all_branches[name].add(b)
|
||||
|
||||
names = all.keys()
|
||||
names = all_branches.keys()
|
||||
names.sort()
|
||||
|
||||
if not names:
|
||||
print >>sys.stderr, ' (no branches)'
|
||||
print(' (no branches)', file=sys.stderr)
|
||||
return
|
||||
|
||||
width = 25
|
||||
@ -116,7 +117,7 @@ is shown, then the branch appears in all projects.
|
||||
width = len(name)
|
||||
|
||||
for name in names:
|
||||
i = all[name]
|
||||
i = all_branches[name]
|
||||
in_cnt = len(i.projects)
|
||||
|
||||
if i.IsCurrent:
|
||||
@ -140,12 +141,12 @@ is shown, then the branch appears in all projects.
|
||||
fmt = out.write
|
||||
paths = []
|
||||
if in_cnt < project_cnt - in_cnt:
|
||||
type = 'in'
|
||||
in_type = 'in'
|
||||
for b in i.projects:
|
||||
paths.append(b.project.relpath)
|
||||
else:
|
||||
fmt = out.notinproject
|
||||
type = 'not in'
|
||||
in_type = 'not in'
|
||||
have = set()
|
||||
for b in i.projects:
|
||||
have.add(b.project)
|
||||
@ -153,11 +154,11 @@ is shown, then the branch appears in all projects.
|
||||
if not p in have:
|
||||
paths.append(p.relpath)
|
||||
|
||||
s = ' %s %s' % (type, ', '.join(paths))
|
||||
s = ' %s %s' % (in_type, ', '.join(paths))
|
||||
if width + 7 + len(s) < 80:
|
||||
fmt(s)
|
||||
else:
|
||||
fmt(' %s:' % type)
|
||||
fmt(' %s:' % in_type)
|
||||
for p in paths:
|
||||
out.nl()
|
||||
fmt(width*' ' + ' %s' % p)
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
from command import Command
|
||||
from progress import Progress
|
||||
@ -39,10 +40,10 @@ The command is equivalent to:
|
||||
nb = args[0]
|
||||
err = []
|
||||
success = []
|
||||
all = self.GetProjects(args[1:])
|
||||
all_projects = self.GetProjects(args[1:])
|
||||
|
||||
pm = Progress('Checkout %s' % nb, len(all))
|
||||
for project in all:
|
||||
pm = Progress('Checkout %s' % nb, len(all_projects))
|
||||
for project in all_projects:
|
||||
pm.update()
|
||||
|
||||
status = project.CheckoutBranch(nb)
|
||||
@ -55,10 +56,9 @@ The command is equivalent to:
|
||||
|
||||
if err:
|
||||
for p in err:
|
||||
print >>sys.stderr,\
|
||||
"error: %s/: cannot checkout %s" \
|
||||
% (p.relpath, nb)
|
||||
print("error: %s/: cannot checkout %s" % (p.relpath, nb),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
elif not success:
|
||||
print >>sys.stderr, 'error: no project has branch %s' % nb
|
||||
print('error: no project has branch %s' % nb, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
@ -13,7 +13,9 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys, re
|
||||
from __future__ import print_function
|
||||
import re
|
||||
import sys
|
||||
from command import Command
|
||||
from git_command import GitCommand
|
||||
|
||||
@ -45,13 +47,13 @@ change id will be added.
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
if p.Wait() != 0:
|
||||
print >>sys.stderr, p.stderr
|
||||
print(p.stderr, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
sha1 = p.stdout.strip()
|
||||
|
||||
p = GitCommand(None, ['cat-file', 'commit', sha1], capture_stdout=True)
|
||||
if p.Wait() != 0:
|
||||
print >>sys.stderr, "error: Failed to retrieve old commit message"
|
||||
print("error: Failed to retrieve old commit message", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
old_msg = self._StripHeader(p.stdout)
|
||||
|
||||
@ -61,8 +63,8 @@ change id will be added.
|
||||
capture_stderr = True)
|
||||
status = p.Wait()
|
||||
|
||||
print >>sys.stdout, p.stdout
|
||||
print >>sys.stderr, p.stderr
|
||||
print(p.stdout, file=sys.stdout)
|
||||
print(p.stderr, file=sys.stderr)
|
||||
|
||||
if status == 0:
|
||||
# The cherry-pick was applied correctly. We just need to edit the
|
||||
@ -75,16 +77,14 @@ change id will be added.
|
||||
capture_stderr = True)
|
||||
p.stdin.write(new_msg)
|
||||
if p.Wait() != 0:
|
||||
print >>sys.stderr, "error: Failed to update commit message"
|
||||
print("error: Failed to update commit message", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
else:
|
||||
print >>sys.stderr, """\
|
||||
NOTE: When committing (please see above) and editing the commit message,
|
||||
please remove the old Change-Id-line and add:
|
||||
"""
|
||||
print >>sys.stderr, self._GetReference(sha1)
|
||||
print >>sys.stderr
|
||||
print('NOTE: When committing (please see above) and editing the commit'
|
||||
'message, please remove the old Change-Id-line and add:')
|
||||
print(self._GetReference(sha1), file=stderr)
|
||||
print(file=stderr)
|
||||
|
||||
def _IsChangeId(self, line):
|
||||
return CHANGE_ID_RE.match(line)
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import re
|
||||
import sys
|
||||
|
||||
@ -32,13 +33,13 @@ makes it available in your project's local working directory.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-c','--cherry-pick',
|
||||
p.add_option('-c', '--cherry-pick',
|
||||
dest='cherrypick', action='store_true',
|
||||
help="cherry-pick instead of checkout")
|
||||
p.add_option('-r','--revert',
|
||||
p.add_option('-r', '--revert',
|
||||
dest='revert', action='store_true',
|
||||
help="revert instead of checkout")
|
||||
p.add_option('-f','--ff-only',
|
||||
p.add_option('-f', '--ff-only',
|
||||
dest='ffonly', action='store_true',
|
||||
help="force fast-forward merge")
|
||||
|
||||
@ -68,23 +69,23 @@ makes it available in your project's local working directory.
|
||||
for project, change_id, ps_id in self._ParseChangeIds(args):
|
||||
dl = project.DownloadPatchSet(change_id, ps_id)
|
||||
if not dl:
|
||||
print >>sys.stderr, \
|
||||
'[%s] change %d/%d not found' \
|
||||
% (project.name, change_id, ps_id)
|
||||
print('[%s] change %d/%d not found'
|
||||
% (project.name, change_id, ps_id),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not opt.revert and not dl.commits:
|
||||
print >>sys.stderr, \
|
||||
'[%s] change %d/%d has already been merged' \
|
||||
% (project.name, change_id, ps_id)
|
||||
print('[%s] change %d/%d has already been merged'
|
||||
% (project.name, change_id, ps_id),
|
||||
file=sys.stderr)
|
||||
continue
|
||||
|
||||
if len(dl.commits) > 1:
|
||||
print >>sys.stderr, \
|
||||
'[%s] %d/%d depends on %d unmerged changes:' \
|
||||
% (project.name, change_id, ps_id, len(dl.commits))
|
||||
print('[%s] %d/%d depends on %d unmerged changes:' \
|
||||
% (project.name, change_id, ps_id, len(dl.commits)),
|
||||
file=sys.stderr)
|
||||
for c in dl.commits:
|
||||
print >>sys.stderr, ' %s' % (c)
|
||||
print(' %s' % (c), file=sys.stderr)
|
||||
if opt.cherrypick:
|
||||
project._CherryPick(dl.commit)
|
||||
elif opt.revert:
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import fcntl
|
||||
import re
|
||||
import os
|
||||
@ -92,6 +93,9 @@ following <command>.
|
||||
|
||||
Unless -p is used, stdin, stdout, stderr are inherited from the
|
||||
terminal and are not redirected.
|
||||
|
||||
If -e is used, when a command exits unsuccessfully, '%prog' will abort
|
||||
without iterating through the remaining projects.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
@ -104,6 +108,9 @@ terminal and are not redirected.
|
||||
dest='command',
|
||||
action='callback',
|
||||
callback=cmd)
|
||||
p.add_option('-e', '--abort-on-errors',
|
||||
dest='abort_on_errors', action='store_true',
|
||||
help='Abort if a command exits unsuccessfully')
|
||||
|
||||
g = p.add_option_group('Output')
|
||||
g.add_option('-p',
|
||||
@ -141,12 +148,16 @@ terminal and are not redirected.
|
||||
for cn in cmd[1:]:
|
||||
if not cn.startswith('-'):
|
||||
break
|
||||
if cn in _CAN_COLOR:
|
||||
else:
|
||||
cn = None
|
||||
# pylint: disable=W0631
|
||||
if cn and cn in _CAN_COLOR:
|
||||
class ColorCmd(Coloring):
|
||||
def __init__(self, config, cmd):
|
||||
Coloring.__init__(self, config, cmd)
|
||||
if ColorCmd(self.manifest.manifestProject.config, cn).is_on:
|
||||
cmd.insert(cmd.index(cn) + 1, '--color')
|
||||
# pylint: enable=W0631
|
||||
|
||||
mirror = self.manifest.IsMirror
|
||||
out = ForallColoring(self.manifest.manifestProject.config)
|
||||
@ -179,7 +190,7 @@ terminal and are not redirected.
|
||||
if not os.path.exists(cwd):
|
||||
if (opt.project_header and opt.verbose) \
|
||||
or not opt.project_header:
|
||||
print >>sys.stderr, 'skipping %s/' % project.relpath
|
||||
print('skipping %s/' % project.relpath, file=sys.stderr)
|
||||
continue
|
||||
|
||||
if opt.project_header:
|
||||
@ -208,7 +219,6 @@ terminal and are not redirected.
|
||||
return self.fd.fileno()
|
||||
|
||||
empty = True
|
||||
didout = False
|
||||
errbuf = ''
|
||||
|
||||
p.stdin.close()
|
||||
@ -220,7 +230,7 @@ terminal and are not redirected.
|
||||
fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
|
||||
|
||||
while s_in:
|
||||
in_ready, out_ready, err_ready = select.select(s_in, [], [])
|
||||
in_ready, _out_ready, _err_ready = select.select(s_in, [], [])
|
||||
for s in in_ready:
|
||||
buf = s.fd.read(4096)
|
||||
if not buf:
|
||||
@ -229,9 +239,7 @@ terminal and are not redirected.
|
||||
continue
|
||||
|
||||
if not opt.verbose:
|
||||
if s.fd == p.stdout:
|
||||
didout = True
|
||||
else:
|
||||
if s.fd != p.stdout:
|
||||
errbuf += buf
|
||||
continue
|
||||
|
||||
@ -253,7 +261,12 @@ terminal and are not redirected.
|
||||
s.dest.flush()
|
||||
|
||||
r = p.wait()
|
||||
if r != 0 and r != rc:
|
||||
rc = r
|
||||
if r != 0:
|
||||
if r != rc:
|
||||
rc = r
|
||||
if opt.abort_on_errors:
|
||||
print("error: %s: Aborting due to previous error" % project.relpath,
|
||||
file=sys.stderr)
|
||||
sys.exit(r)
|
||||
if rc != 0:
|
||||
sys.exit(rc)
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
@ -51,7 +52,7 @@ Examples
|
||||
|
||||
Look for a line that has '#define' and either 'MAX_PATH or 'PATH_MAX':
|
||||
|
||||
repo grep -e '#define' --and -\( -e MAX_PATH -e PATH_MAX \)
|
||||
repo grep -e '#define' --and -\\( -e MAX_PATH -e PATH_MAX \\)
|
||||
|
||||
Look for a line that has 'NODE' or 'Unexpected' in files that
|
||||
contain a line that matches both expressions:
|
||||
@ -84,7 +85,7 @@ contain a line that matches both expressions:
|
||||
g.add_option('--cached',
|
||||
action='callback', callback=carry,
|
||||
help='Search the index, instead of the work tree')
|
||||
g.add_option('-r','--revision',
|
||||
g.add_option('-r', '--revision',
|
||||
dest='revision', action='append', metavar='TREEish',
|
||||
help='Search TREEish, instead of the work tree')
|
||||
|
||||
@ -96,7 +97,7 @@ contain a line that matches both expressions:
|
||||
g.add_option('-i', '--ignore-case',
|
||||
action='callback', callback=carry,
|
||||
help='Ignore case differences')
|
||||
g.add_option('-a','--text',
|
||||
g.add_option('-a', '--text',
|
||||
action='callback', callback=carry,
|
||||
help="Process binary files as if they were text")
|
||||
g.add_option('-I',
|
||||
@ -125,7 +126,7 @@ contain a line that matches both expressions:
|
||||
g.add_option('--and', '--or', '--not',
|
||||
action='callback', callback=carry,
|
||||
help='Boolean operators to combine patterns')
|
||||
g.add_option('-(','-)',
|
||||
g.add_option('-(', '-)',
|
||||
action='callback', callback=carry,
|
||||
help='Boolean operator grouping')
|
||||
|
||||
@ -145,10 +146,10 @@ contain a line that matches both expressions:
|
||||
action='callback', callback=carry,
|
||||
metavar='CONTEXT', type='str',
|
||||
help='Show CONTEXT lines after match')
|
||||
g.add_option('-l','--name-only','--files-with-matches',
|
||||
g.add_option('-l', '--name-only', '--files-with-matches',
|
||||
action='callback', callback=carry,
|
||||
help='Show only file names containing matching lines')
|
||||
g.add_option('-L','--files-without-match',
|
||||
g.add_option('-L', '--files-without-match',
|
||||
action='callback', callback=carry,
|
||||
help='Show only file names not containing matching lines')
|
||||
|
||||
@ -157,9 +158,9 @@ contain a line that matches both expressions:
|
||||
out = GrepColoring(self.manifest.manifestProject.config)
|
||||
|
||||
cmd_argv = ['grep']
|
||||
if out.is_on and git_require((1,6,3)):
|
||||
if out.is_on and git_require((1, 6, 3)):
|
||||
cmd_argv.append('--color')
|
||||
cmd_argv.extend(getattr(opt,'cmd_argv',[]))
|
||||
cmd_argv.extend(getattr(opt, 'cmd_argv', []))
|
||||
|
||||
if '-e' not in cmd_argv:
|
||||
if not args:
|
||||
@ -178,8 +179,7 @@ contain a line that matches both expressions:
|
||||
have_rev = False
|
||||
if opt.revision:
|
||||
if '--cached' in cmd_argv:
|
||||
print >>sys.stderr,\
|
||||
'fatal: cannot combine --cached and --revision'
|
||||
print('fatal: cannot combine --cached and --revision', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
have_rev = True
|
||||
cmd_argv.extend(opt.revision)
|
||||
@ -230,13 +230,13 @@ contain a line that matches both expressions:
|
||||
out.nl()
|
||||
else:
|
||||
for line in r:
|
||||
print line
|
||||
print(line)
|
||||
|
||||
if have_match:
|
||||
sys.exit(0)
|
||||
elif have_rev and bad_rev:
|
||||
for r in opt.revision:
|
||||
print >>sys.stderr, "error: can't search revision %s" % r
|
||||
print("error: can't search revision %s" % r, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
sys.exit(1)
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import re
|
||||
import sys
|
||||
from formatter import AbstractFormatter, DumbWriter
|
||||
@ -31,10 +32,8 @@ Displays detailed usage information about a command.
|
||||
"""
|
||||
|
||||
def _PrintAllCommands(self):
|
||||
print 'usage: repo COMMAND [ARGS]'
|
||||
print """
|
||||
The complete list of recognized repo commands are:
|
||||
"""
|
||||
print('usage: repo COMMAND [ARGS]')
|
||||
print('The complete list of recognized repo commands are:')
|
||||
commandNames = self.commands.keys()
|
||||
commandNames.sort()
|
||||
|
||||
@ -49,17 +48,14 @@ The complete list of recognized repo commands are:
|
||||
summary = command.helpSummary.strip()
|
||||
except AttributeError:
|
||||
summary = ''
|
||||
print fmt % (name, summary)
|
||||
print """
|
||||
See 'repo help <command>' for more information on a specific command.
|
||||
"""
|
||||
print(fmt % (name, summary))
|
||||
print("See 'repo help <command>' for more information on a"
|
||||
'specific command.')
|
||||
|
||||
def _PrintCommonCommands(self):
|
||||
print 'usage: repo COMMAND [ARGS]'
|
||||
print """
|
||||
The most commonly used repo commands are:
|
||||
"""
|
||||
commandNames = [name
|
||||
print('usage: repo COMMAND [ARGS]')
|
||||
print('The most commonly used repo commands are:')
|
||||
commandNames = [name
|
||||
for name in self.commands.keys()
|
||||
if self.commands[name].common]
|
||||
commandNames.sort()
|
||||
@ -75,11 +71,10 @@ The most commonly used repo commands are:
|
||||
summary = command.helpSummary.strip()
|
||||
except AttributeError:
|
||||
summary = ''
|
||||
print fmt % (name, summary)
|
||||
print """
|
||||
See 'repo help <command>' for more information on a specific command.
|
||||
See 'repo help --all' for a complete list of recognized commands.
|
||||
"""
|
||||
print(fmt % (name, summary))
|
||||
print(
|
||||
"See 'repo help <command>' for more information on a specific command.\n"
|
||||
"See 'repo help --all' for a complete list of recognized commands.")
|
||||
|
||||
def _PrintCommandHelp(self, cmd):
|
||||
class _Out(Coloring):
|
||||
@ -120,8 +115,8 @@ See 'repo help --all' for a complete list of recognized commands.
|
||||
m = asciidoc_hdr.match(para)
|
||||
if m:
|
||||
title = m.group(1)
|
||||
type = m.group(2)
|
||||
if type[0] in ('=', '-'):
|
||||
section_type = m.group(2)
|
||||
if section_type[0] in ('=', '-'):
|
||||
p = self.heading
|
||||
else:
|
||||
def _p(fmt, *args):
|
||||
@ -131,7 +126,7 @@ See 'repo help --all' for a complete list of recognized commands.
|
||||
|
||||
p('%s', title)
|
||||
self.nl()
|
||||
p('%s', ''.ljust(len(title),type[0]))
|
||||
p('%s', ''.ljust(len(title), section_type[0]))
|
||||
self.nl()
|
||||
continue
|
||||
|
||||
@ -162,7 +157,7 @@ See 'repo help --all' for a complete list of recognized commands.
|
||||
try:
|
||||
cmd = self.commands[name]
|
||||
except KeyError:
|
||||
print >>sys.stderr, "repo: '%s' is not a repo command." % name
|
||||
print("repo: '%s' is not a repo command." % name, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
cmd.manifest = self.manifest
|
||||
|
195
subcmds/info.py
Normal file
195
subcmds/info.py
Normal file
@ -0,0 +1,195 @@
|
||||
#
|
||||
# Copyright (C) 2012 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from command import PagedCommand
|
||||
from color import Coloring
|
||||
from error import NoSuchProjectError
|
||||
from git_refs import R_M
|
||||
|
||||
class _Coloring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
|
||||
class Info(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Get info on the manifest branch, current branch or unmerged branches"
|
||||
helpUsage = "%prog [-dl] [-o [-b]] [<project>...]"
|
||||
|
||||
def _Options(self, p, show_smart=True):
|
||||
p.add_option('-d', '--diff',
|
||||
dest='all', action='store_true',
|
||||
help="show full info and commit diff including remote branches")
|
||||
p.add_option('-o', '--overview',
|
||||
dest='overview', action='store_true',
|
||||
help='show overview of all local commits')
|
||||
p.add_option('-b', '--current-branch',
|
||||
dest="current_branch", action="store_true",
|
||||
help="consider only checked out branches")
|
||||
p.add_option('-l', '--local-only',
|
||||
dest="local", action="store_true",
|
||||
help="Disable all remote operations")
|
||||
|
||||
|
||||
def Execute(self, opt, args):
|
||||
self.out = _Coloring(self.manifest.globalConfig)
|
||||
self.heading = self.out.printer('heading', attr = 'bold')
|
||||
self.headtext = self.out.printer('headtext', fg = 'yellow')
|
||||
self.redtext = self.out.printer('redtext', fg = 'red')
|
||||
self.sha = self.out.printer("sha", fg = 'yellow')
|
||||
self.text = self.out.printer('text')
|
||||
self.dimtext = self.out.printer('dimtext', attr = 'dim')
|
||||
|
||||
self.opt = opt
|
||||
|
||||
mergeBranch = self.manifest.manifestProject.config.GetBranch("default").merge
|
||||
|
||||
self.heading("Manifest branch: ")
|
||||
self.headtext(self.manifest.default.revisionExpr)
|
||||
self.out.nl()
|
||||
self.heading("Manifest merge branch: ")
|
||||
self.headtext(mergeBranch)
|
||||
self.out.nl()
|
||||
|
||||
self.printSeparator()
|
||||
|
||||
if not opt.overview:
|
||||
self.printDiffInfo(args)
|
||||
else:
|
||||
self.printCommitOverview(args)
|
||||
|
||||
def printSeparator(self):
|
||||
self.text("----------------------------")
|
||||
self.out.nl()
|
||||
|
||||
def printDiffInfo(self, args):
|
||||
try:
|
||||
projs = self.GetProjects(args)
|
||||
except NoSuchProjectError:
|
||||
return
|
||||
|
||||
for p in projs:
|
||||
self.heading("Project: ")
|
||||
self.headtext(p.name)
|
||||
self.out.nl()
|
||||
|
||||
self.heading("Mount path: ")
|
||||
self.headtext(p.worktree)
|
||||
self.out.nl()
|
||||
|
||||
self.heading("Current revision: ")
|
||||
self.headtext(p.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
localBranches = p.GetBranches().keys()
|
||||
self.heading("Local Branches: ")
|
||||
self.redtext(str(len(localBranches)))
|
||||
if len(localBranches) > 0:
|
||||
self.text(" [")
|
||||
self.text(", ".join(localBranches))
|
||||
self.text("]")
|
||||
self.out.nl()
|
||||
|
||||
if self.opt.all:
|
||||
self.findRemoteLocalDiff(p)
|
||||
|
||||
self.printSeparator()
|
||||
|
||||
def findRemoteLocalDiff(self, project):
|
||||
#Fetch all the latest commits
|
||||
if not self.opt.local:
|
||||
project.Sync_NetworkHalf(quiet=True, current_branch_only=True)
|
||||
|
||||
logTarget = R_M + self.manifest.default.revisionExpr
|
||||
|
||||
bareTmp = project.bare_git._bare
|
||||
project.bare_git._bare = False
|
||||
localCommits = project.bare_git.rev_list(
|
||||
'--abbrev=8',
|
||||
'--abbrev-commit',
|
||||
'--pretty=oneline',
|
||||
logTarget + "..",
|
||||
'--')
|
||||
|
||||
originCommits = project.bare_git.rev_list(
|
||||
'--abbrev=8',
|
||||
'--abbrev-commit',
|
||||
'--pretty=oneline',
|
||||
".." + logTarget,
|
||||
'--')
|
||||
project.bare_git._bare = bareTmp
|
||||
|
||||
self.heading("Local Commits: ")
|
||||
self.redtext(str(len(localCommits)))
|
||||
self.dimtext(" (on current branch)")
|
||||
self.out.nl()
|
||||
|
||||
for c in localCommits:
|
||||
split = c.split()
|
||||
self.sha(split[0] + " ")
|
||||
self.text("".join(split[1:]))
|
||||
self.out.nl()
|
||||
|
||||
self.printSeparator()
|
||||
|
||||
self.heading("Remote Commits: ")
|
||||
self.redtext(str(len(originCommits)))
|
||||
self.out.nl()
|
||||
|
||||
for c in originCommits:
|
||||
split = c.split()
|
||||
self.sha(split[0] + " ")
|
||||
self.text("".join(split[1:]))
|
||||
self.out.nl()
|
||||
|
||||
def printCommitOverview(self, args):
|
||||
all_branches = []
|
||||
for project in self.GetProjects(args):
|
||||
br = [project.GetUploadableBranch(x)
|
||||
for x in project.GetBranches().keys()]
|
||||
br = [x for x in br if x]
|
||||
if self.opt.current_branch:
|
||||
br = [x for x in br if x.name == project.CurrentBranch]
|
||||
all_branches.extend(br)
|
||||
|
||||
if not all_branches:
|
||||
return
|
||||
|
||||
self.out.nl()
|
||||
self.heading('Projects Overview')
|
||||
project = None
|
||||
|
||||
for branch in all_branches:
|
||||
if project != branch.project:
|
||||
project = branch.project
|
||||
self.out.nl()
|
||||
self.headtext(project.relpath)
|
||||
self.out.nl()
|
||||
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
self.text('%s %-33s (%2d commit%s, %s)' % (
|
||||
branch.name == project.CurrentBranch and '*' or ' ',
|
||||
branch.name,
|
||||
len(commits),
|
||||
len(commits) != 1 and 's' or '',
|
||||
date))
|
||||
self.out.nl()
|
||||
|
||||
for commit in commits:
|
||||
split = commit.split()
|
||||
self.text('{0:38}{1} '.format('','-'))
|
||||
self.sha(split[0] + " ")
|
||||
self.text("".join(split[1:]))
|
||||
self.out.nl()
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
@ -117,18 +118,22 @@ to update the working directory files.
|
||||
dest='config_name', action="store_true", default=False,
|
||||
help='Always prompt for name/e-mail')
|
||||
|
||||
def _RegisteredEnvironmentOptions(self):
|
||||
return {'REPO_MANIFEST_URL': 'manifest_url',
|
||||
'REPO_MIRROR_LOCATION': 'reference'}
|
||||
|
||||
def _SyncManifest(self, opt):
|
||||
m = self.manifest.manifestProject
|
||||
is_new = not m.Exists
|
||||
|
||||
if is_new:
|
||||
if not opt.manifest_url:
|
||||
print >>sys.stderr, 'fatal: manifest url (-u) is required.'
|
||||
print('fatal: manifest url (-u) is required.', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not opt.quiet:
|
||||
print >>sys.stderr, 'Get %s' \
|
||||
% GitConfig.ForUser().UrlInsteadOf(opt.manifest_url)
|
||||
print('Get %s' % GitConfig.ForUser().UrlInsteadOf(opt.manifest_url),
|
||||
file=sys.stderr)
|
||||
m._InitGitDir()
|
||||
|
||||
if opt.manifest_branch:
|
||||
@ -147,7 +152,7 @@ to update the working directory files.
|
||||
r.ResetFetch()
|
||||
r.Save()
|
||||
|
||||
groups = re.split('[,\s]+', opt.groups)
|
||||
groups = re.split(r'[,\s]+', opt.groups)
|
||||
all_platforms = ['linux', 'darwin']
|
||||
platformize = lambda x: 'platform-' + x
|
||||
if opt.platform == 'auto':
|
||||
@ -159,7 +164,7 @@ to update the working directory files.
|
||||
elif opt.platform in all_platforms:
|
||||
groups.extend(platformize(opt.platform))
|
||||
elif opt.platform != 'none':
|
||||
print >>sys.stderr, 'fatal: invalid platform flag'
|
||||
print('fatal: invalid platform flag', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
groups = [x for x in groups if x]
|
||||
@ -175,12 +180,13 @@ to update the working directory files.
|
||||
if is_new:
|
||||
m.config.SetString('repo.mirror', 'true')
|
||||
else:
|
||||
print >>sys.stderr, 'fatal: --mirror not supported on existing client'
|
||||
print('fatal: --mirror not supported on existing client',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not m.Sync_NetworkHalf(is_new=is_new):
|
||||
r = m.GetRemote(m.remote.name)
|
||||
print >>sys.stderr, 'fatal: cannot obtain manifest %s' % r.url
|
||||
print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
|
||||
|
||||
# Better delete the manifest git dir if we created it; otherwise next
|
||||
# time (when user fixes problems) we won't go through the "is_new" logic.
|
||||
@ -197,24 +203,22 @@ to update the working directory files.
|
||||
|
||||
if is_new or m.CurrentBranch is None:
|
||||
if not m.StartBranch('default'):
|
||||
print >>sys.stderr, 'fatal: cannot create default in manifest'
|
||||
print('fatal: cannot create default in manifest', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def _LinkManifest(self, name):
|
||||
if not name:
|
||||
print >>sys.stderr, 'fatal: manifest name (-m) is required.'
|
||||
print('fatal: manifest name (-m) is required.', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
self.manifest.Link(name)
|
||||
except ManifestParseError, e:
|
||||
print >>sys.stderr, "fatal: manifest '%s' not available" % name
|
||||
print >>sys.stderr, 'fatal: %s' % str(e)
|
||||
except ManifestParseError as e:
|
||||
print("fatal: manifest '%s' not available" % name, file=sys.stderr)
|
||||
print('fatal: %s' % str(e), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def _Prompt(self, prompt, value):
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
sys.stdout.write('%-10s [%s]: ' % (prompt, value))
|
||||
a = sys.stdin.readline().strip()
|
||||
if a == '':
|
||||
@ -233,24 +237,24 @@ to update the working directory files.
|
||||
mp.config.SetString('user.name', gc.GetString('user.name'))
|
||||
mp.config.SetString('user.email', gc.GetString('user.email'))
|
||||
|
||||
print ''
|
||||
print 'Your identity is: %s <%s>' % (mp.config.GetString('user.name'),
|
||||
mp.config.GetString('user.email'))
|
||||
print 'If you want to change this, please re-run \'repo init\' with --config-name'
|
||||
print()
|
||||
print('Your identity is: %s <%s>' % (mp.config.GetString('user.name'),
|
||||
mp.config.GetString('user.email')))
|
||||
print('If you want to change this, please re-run \'repo init\' with --config-name')
|
||||
return False
|
||||
|
||||
def _ConfigureUser(self):
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
while True:
|
||||
print ''
|
||||
print()
|
||||
name = self._Prompt('Your Name', mp.UserName)
|
||||
email = self._Prompt('Your Email', mp.UserEmail)
|
||||
|
||||
print ''
|
||||
print 'Your identity is: %s <%s>' % (name, email)
|
||||
print()
|
||||
print('Your identity is: %s <%s>' % (name, email))
|
||||
sys.stdout.write('is this correct [y/N]? ')
|
||||
a = sys.stdin.readline().strip()
|
||||
a = sys.stdin.readline().strip().lower()
|
||||
if a in ('yes', 'y', 't', 'true'):
|
||||
break
|
||||
|
||||
@ -276,17 +280,17 @@ to update the working directory files.
|
||||
self._on = True
|
||||
out = _Test()
|
||||
|
||||
print ''
|
||||
print "Testing colorized output (for 'repo diff', 'repo status'):"
|
||||
print()
|
||||
print("Testing colorized output (for 'repo diff', 'repo status'):")
|
||||
|
||||
for c in ['black','red','green','yellow','blue','magenta','cyan']:
|
||||
for c in ['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan']:
|
||||
out.write(' ')
|
||||
out.printer(fg=c)(' %-6s ', c)
|
||||
out.write(' ')
|
||||
out.printer(fg='white', bg='black')(' %s ' % 'white')
|
||||
out.nl()
|
||||
|
||||
for c in ['bold','dim','ul','reverse']:
|
||||
for c in ['bold', 'dim', 'ul', 'reverse']:
|
||||
out.write(' ')
|
||||
out.printer(fg='black', attr=c)(' %-6s ', c)
|
||||
out.nl()
|
||||
@ -315,8 +319,29 @@ to update the working directory files.
|
||||
# We store the depth in the main manifest project.
|
||||
self.manifest.manifestProject.config.SetString('repo.depth', depth)
|
||||
|
||||
def _DisplayResult(self):
|
||||
if self.manifest.IsMirror:
|
||||
init_type = 'mirror '
|
||||
else:
|
||||
init_type = ''
|
||||
|
||||
print()
|
||||
print('repo %shas been initialized in %s'
|
||||
% (init_type, self.manifest.topdir))
|
||||
|
||||
current_dir = os.getcwd()
|
||||
if current_dir != self.manifest.topdir:
|
||||
print('If this is not the directory in which you want to initialize'
|
||||
'repo, please run:')
|
||||
print(' rm -r %s/.repo' % self.manifest.topdir)
|
||||
print('and try again.')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
git_require(MIN_GIT_VERSION, fail=True)
|
||||
|
||||
if opt.reference:
|
||||
opt.reference = os.path.expanduser(opt.reference)
|
||||
|
||||
self._SyncManifest(opt)
|
||||
self._LinkManifest(opt.manifest_name)
|
||||
|
||||
@ -327,10 +352,4 @@ to update the working directory files.
|
||||
|
||||
self._ConfigureDepth(opt)
|
||||
|
||||
if self.manifest.IsMirror:
|
||||
type = 'mirror '
|
||||
else:
|
||||
type = ''
|
||||
|
||||
print ''
|
||||
print 'repo %sinitialized in %s' % (type, self.manifest.topdir)
|
||||
self._DisplayResult()
|
||||
|
@ -13,13 +13,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import re
|
||||
|
||||
from command import Command, MirrorSafeCommand
|
||||
|
||||
class List(Command, MirrorSafeCommand):
|
||||
common = True
|
||||
helpSummary = "List projects and their associated directories"
|
||||
helpUsage = """
|
||||
%prog [<project>...]
|
||||
%prog [-f] [<project>...]
|
||||
%prog [-f] -r str1 [str2]..."
|
||||
"""
|
||||
helpDescription = """
|
||||
List all projects; pass '.' to list the project for the cwd.
|
||||
@ -27,6 +31,14 @@ List all projects; pass '.' to list the project for the cwd.
|
||||
This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
"""
|
||||
|
||||
def _Options(self, p, show_smart=True):
|
||||
p.add_option('-r', '--regex',
|
||||
dest='regex', action='store_true',
|
||||
help="Filter the project list based on regex or wildcard matching of strings")
|
||||
p.add_option('-f', '--fullpath',
|
||||
dest='fullpath', action='store_true',
|
||||
help="Display the full work tree path instead of the relative path")
|
||||
|
||||
def Execute(self, opt, args):
|
||||
"""List all projects and the associated directories.
|
||||
|
||||
@ -35,14 +47,33 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
discoverable.
|
||||
|
||||
Args:
|
||||
opt: The options. We don't take any.
|
||||
opt: The options.
|
||||
args: Positional args. Can be a list of projects to list, or empty.
|
||||
"""
|
||||
projects = self.GetProjects(args)
|
||||
if not opt.regex:
|
||||
projects = self.GetProjects(args)
|
||||
else:
|
||||
projects = self.FindProjects(args)
|
||||
|
||||
def _getpath(x):
|
||||
if opt.fullpath:
|
||||
return x.worktree
|
||||
return x.relpath
|
||||
|
||||
lines = []
|
||||
for project in projects:
|
||||
lines.append("%s : %s" % (project.relpath, project.name))
|
||||
lines.append("%s : %s" % (_getpath(project), project.name))
|
||||
|
||||
lines.sort()
|
||||
print '\n'.join(lines)
|
||||
print('\n'.join(lines))
|
||||
|
||||
def FindProjects(self, args):
|
||||
result = []
|
||||
for project in self.GetProjects(''):
|
||||
for arg in args:
|
||||
pattern = re.compile(r'%s' % arg, re.IGNORECASE)
|
||||
if pattern.search(project.name) or pattern.search(project.relpath):
|
||||
result.append(project)
|
||||
break
|
||||
result.sort(key=lambda project: project.relpath)
|
||||
return result
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
|
||||
@ -35,19 +36,24 @@ in a Git repository for use during future 'repo init' invocations.
|
||||
|
||||
@property
|
||||
def helpDescription(self):
|
||||
help = self._helpDescription + '\n'
|
||||
helptext = self._helpDescription + '\n'
|
||||
r = os.path.dirname(__file__)
|
||||
r = os.path.dirname(r)
|
||||
fd = open(os.path.join(r, 'docs', 'manifest-format.txt'))
|
||||
for line in fd:
|
||||
help += line
|
||||
helptext += line
|
||||
fd.close()
|
||||
return help
|
||||
return helptext
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-r', '--revision-as-HEAD',
|
||||
dest='peg_rev', action='store_true',
|
||||
help='Save revisions as current HEAD')
|
||||
p.add_option('--suppress-upstream-revision', dest='peg_rev_upstream',
|
||||
default=True, action='store_false',
|
||||
help='If in -r mode, do not write the upstream field. '
|
||||
'Only of use if the branch names for a sha1 manifest are '
|
||||
'sensitive.')
|
||||
p.add_option('-o', '--output-file',
|
||||
dest='output_file',
|
||||
default='-',
|
||||
@ -60,10 +66,11 @@ in a Git repository for use during future 'repo init' invocations.
|
||||
else:
|
||||
fd = open(opt.output_file, 'w')
|
||||
self.manifest.Save(fd,
|
||||
peg_rev = opt.peg_rev)
|
||||
peg_rev = opt.peg_rev,
|
||||
peg_rev_upstream = opt.peg_rev_upstream)
|
||||
fd.close()
|
||||
if opt.output_file != '-':
|
||||
print >>sys.stderr, 'Saved manifest to %s' % opt.output_file
|
||||
print('Saved manifest to %s' % opt.output_file, file=sys.stderr)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if args:
|
||||
@ -73,6 +80,6 @@ in a Git repository for use during future 'repo init' invocations.
|
||||
self._Output(opt)
|
||||
return
|
||||
|
||||
print >>sys.stderr, 'error: no operation to perform'
|
||||
print >>sys.stderr, 'error: see repo help manifest'
|
||||
print('error: no operation to perform', file=sys.stderr)
|
||||
print('error: see repo help manifest', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
|
||||
@ -38,30 +39,33 @@ are displayed.
|
||||
help="Consider only checked out branches")
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all = []
|
||||
all_branches = []
|
||||
for project in self.GetProjects(args):
|
||||
br = [project.GetUploadableBranch(x)
|
||||
for x in project.GetBranches().keys()]
|
||||
br = [x for x in br if x]
|
||||
if opt.current_branch:
|
||||
br = [x for x in br if x.name == project.CurrentBranch]
|
||||
all.extend(br)
|
||||
all_branches.extend(br)
|
||||
|
||||
if not all:
|
||||
if not all_branches:
|
||||
return
|
||||
|
||||
class Report(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'status')
|
||||
self.project = self.printer('header', attr='bold')
|
||||
self.text = self.printer('text')
|
||||
|
||||
out = Report(all[0].project.config)
|
||||
out = Report(all_branches[0].project.config)
|
||||
out.text("Deprecated. See repo info -o.")
|
||||
out.nl()
|
||||
out.project('Projects Overview')
|
||||
out.nl()
|
||||
|
||||
project = None
|
||||
|
||||
for branch in all:
|
||||
for branch in all_branches:
|
||||
if project != branch.project:
|
||||
project = branch.project
|
||||
out.nl()
|
||||
@ -70,11 +74,11 @@ are displayed.
|
||||
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
print '%s %-33s (%2d commit%s, %s)' % (
|
||||
print('%s %-33s (%2d commit%s, %s)' % (
|
||||
branch.name == project.CurrentBranch and '*' or ' ',
|
||||
branch.name,
|
||||
len(commits),
|
||||
len(commits) != 1 and 's' or ' ',
|
||||
date)
|
||||
date))
|
||||
for commit in commits:
|
||||
print '%-35s - %s' % ('', commit)
|
||||
print('%-35s - %s' % ('', commit))
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
|
||||
@ -24,11 +25,11 @@ class Prune(PagedCommand):
|
||||
"""
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all = []
|
||||
all_branches = []
|
||||
for project in self.GetProjects(args):
|
||||
all.extend(project.PruneHeads())
|
||||
all_branches.extend(project.PruneHeads())
|
||||
|
||||
if not all:
|
||||
if not all_branches:
|
||||
return
|
||||
|
||||
class Report(Coloring):
|
||||
@ -36,13 +37,13 @@ class Prune(PagedCommand):
|
||||
Coloring.__init__(self, config, 'status')
|
||||
self.project = self.printer('header', attr='bold')
|
||||
|
||||
out = Report(all[0].project.config)
|
||||
out = Report(all_branches[0].project.config)
|
||||
out.project('Pending Branches')
|
||||
out.nl()
|
||||
|
||||
project = None
|
||||
|
||||
for branch in all:
|
||||
for branch in all_branches:
|
||||
if project != branch.project:
|
||||
project = branch.project
|
||||
out.nl()
|
||||
@ -51,9 +52,9 @@ class Prune(PagedCommand):
|
||||
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
print '%s %-33s (%2d commit%s, %s)' % (
|
||||
print('%s %-33s (%2d commit%s, %s)' % (
|
||||
branch.name == project.CurrentBranch and '*' or ' ',
|
||||
branch.name,
|
||||
len(commits),
|
||||
len(commits) != 1 and 's' or ' ',
|
||||
date)
|
||||
date))
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
|
||||
from command import Command
|
||||
@ -55,18 +56,20 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
help='Stash local modifications before starting')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all = self.GetProjects(args)
|
||||
one_project = len(all) == 1
|
||||
all_projects = self.GetProjects(args)
|
||||
one_project = len(all_projects) == 1
|
||||
|
||||
if opt.interactive and not one_project:
|
||||
print >>sys.stderr, 'error: interactive rebase not supported with multiple projects'
|
||||
print('error: interactive rebase not supported with multiple projects',
|
||||
file=sys.stderr)
|
||||
return -1
|
||||
|
||||
for project in all:
|
||||
for project in all_projects:
|
||||
cb = project.CurrentBranch
|
||||
if not cb:
|
||||
if one_project:
|
||||
print >>sys.stderr, "error: project %s has a detatched HEAD" % project.relpath
|
||||
print("error: project %s has a detatched HEAD" % project.relpath,
|
||||
file=sys.stderr)
|
||||
return -1
|
||||
# ignore branches with detatched HEADs
|
||||
continue
|
||||
@ -74,7 +77,8 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
upbranch = project.GetBranch(cb)
|
||||
if not upbranch.LocalMerge:
|
||||
if one_project:
|
||||
print >>sys.stderr, "error: project %s does not track any remote branches" % project.relpath
|
||||
print("error: project %s does not track any remote branches"
|
||||
% project.relpath, file=sys.stderr)
|
||||
return -1
|
||||
# ignore branches without remotes
|
||||
continue
|
||||
@ -101,8 +105,8 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
|
||||
args.append(upbranch.LocalMerge)
|
||||
|
||||
print >>sys.stderr, '# %s: rebasing %s -> %s' % \
|
||||
(project.relpath, cb, upbranch.LocalMerge)
|
||||
print('# %s: rebasing %s -> %s'
|
||||
% (project.relpath, cb, upbranch.LocalMerge), file=sys.stderr)
|
||||
|
||||
needs_stash = False
|
||||
if opt.auto_stash:
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
from optparse import SUPPRESS_HELP
|
||||
import sys
|
||||
|
||||
@ -52,7 +53,7 @@ need to be performed by an end-user.
|
||||
|
||||
else:
|
||||
if not rp.Sync_NetworkHalf():
|
||||
print >>sys.stderr, "error: can't update repo"
|
||||
print("error: can't update repo", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
rp.bare_git.gc('--auto')
|
||||
|
@ -13,7 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from sync import Sync
|
||||
from subcmds.sync import Sync
|
||||
|
||||
class Smartsync(Sync):
|
||||
common = True
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
|
||||
from color import Coloring
|
||||
@ -48,9 +49,9 @@ The '%prog' command stages files to prepare the next commit.
|
||||
self.Usage()
|
||||
|
||||
def _Interactive(self, opt, args):
|
||||
all = filter(lambda x: x.IsDirty(), self.GetProjects(args))
|
||||
if not all:
|
||||
print >>sys.stderr,'no projects have uncommitted modifications'
|
||||
all_projects = filter(lambda x: x.IsDirty(), self.GetProjects(args))
|
||||
if not all_projects:
|
||||
print('no projects have uncommitted modifications', file=sys.stderr)
|
||||
return
|
||||
|
||||
out = _ProjectList(self.manifest.manifestProject.config)
|
||||
@ -58,8 +59,8 @@ The '%prog' command stages files to prepare the next commit.
|
||||
out.header(' %s', 'project')
|
||||
out.nl()
|
||||
|
||||
for i in xrange(0, len(all)):
|
||||
p = all[i]
|
||||
for i in range(len(all_projects)):
|
||||
p = all_projects[i]
|
||||
out.write('%3d: %s', i + 1, p.relpath + '/')
|
||||
out.nl()
|
||||
out.nl()
|
||||
@ -93,15 +94,15 @@ The '%prog' command stages files to prepare the next commit.
|
||||
if a_index is not None:
|
||||
if a_index == 0:
|
||||
break
|
||||
if 0 < a_index and a_index <= len(all):
|
||||
_AddI(all[a_index - 1])
|
||||
if 0 < a_index and a_index <= len(all_projects):
|
||||
_AddI(all_projects[a_index - 1])
|
||||
continue
|
||||
|
||||
p = filter(lambda x: x.name == a or x.relpath == a, all)
|
||||
p = filter(lambda x: x.name == a or x.relpath == a, all_projects)
|
||||
if len(p) == 1:
|
||||
_AddI(p[0])
|
||||
continue
|
||||
print 'Bye.'
|
||||
print('Bye.')
|
||||
|
||||
def _AddI(project):
|
||||
p = GitCommand(project, ['add', '--interactive'], bare=False)
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
from command import Command
|
||||
from git_config import IsId
|
||||
@ -41,7 +42,7 @@ revision specified in the manifest.
|
||||
|
||||
nb = args[0]
|
||||
if not git.check_ref_format('heads/%s' % nb):
|
||||
print >>sys.stderr, "error: '%s' is not a valid name" % nb
|
||||
print("error: '%s' is not a valid name" % nb, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
err = []
|
||||
@ -49,13 +50,13 @@ revision specified in the manifest.
|
||||
if not opt.all:
|
||||
projects = args[1:]
|
||||
if len(projects) < 1:
|
||||
print >>sys.stderr, "error: at least one project must be specified"
|
||||
print("error: at least one project must be specified", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
all = self.GetProjects(projects)
|
||||
all_projects = self.GetProjects(projects)
|
||||
|
||||
pm = Progress('Starting %s' % nb, len(all))
|
||||
for project in all:
|
||||
pm = Progress('Starting %s' % nb, len(all_projects))
|
||||
for project in all_projects:
|
||||
pm.update()
|
||||
# If the current revision is a specific SHA1 then we can't push back
|
||||
# to it so substitute the manifest default revision instead.
|
||||
@ -67,7 +68,6 @@ revision specified in the manifest.
|
||||
|
||||
if err:
|
||||
for p in err:
|
||||
print >>sys.stderr,\
|
||||
"error: %s/: cannot start %s" \
|
||||
% (p.relpath, nb)
|
||||
print("error: %s/: cannot start %s" % (p.relpath, nb),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
@ -98,18 +98,18 @@ the following meanings:
|
||||
sem.release()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all = self.GetProjects(args)
|
||||
all_projects = self.GetProjects(args)
|
||||
counter = itertools.count()
|
||||
|
||||
if opt.jobs == 1:
|
||||
for project in all:
|
||||
for project in all_projects:
|
||||
state = project.PrintWorkTreeStatus()
|
||||
if state == 'CLEAN':
|
||||
counter.next()
|
||||
else:
|
||||
sem = _threading.Semaphore(opt.jobs)
|
||||
threads_and_output = []
|
||||
for project in all:
|
||||
for project in all_projects:
|
||||
sem.acquire()
|
||||
|
||||
class BufList(StringIO.StringIO):
|
||||
@ -128,5 +128,5 @@ the following meanings:
|
||||
t.join()
|
||||
output.dump(sys.stdout)
|
||||
output.close()
|
||||
if len(all) == counter.next():
|
||||
print 'nothing to commit (working directory clean)'
|
||||
if len(all_projects) == counter.next():
|
||||
print('nothing to commit (working directory clean)')
|
||||
|
485
subcmds/sync.py
485
subcmds/sync.py
@ -13,14 +13,18 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import netrc
|
||||
from optparse import SUPPRESS_HELP
|
||||
import os
|
||||
import pickle
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import urlparse
|
||||
import xmlrpclib
|
||||
|
||||
try:
|
||||
@ -36,15 +40,23 @@ except ImportError:
|
||||
def _rlimit_nofile():
|
||||
return (256, 256)
|
||||
|
||||
from git_command import GIT
|
||||
try:
|
||||
import multiprocessing
|
||||
except ImportError:
|
||||
multiprocessing = None
|
||||
|
||||
from git_command import GIT, git_require
|
||||
from git_refs import R_HEADS, HEAD
|
||||
from main import WrapperModule
|
||||
from project import Project
|
||||
from project import RemoteSpec
|
||||
from command import Command, MirrorSafeCommand
|
||||
from error import RepoChangedException, GitError
|
||||
from error import RepoChangedException, GitError, ManifestParseError
|
||||
from project import SyncBuffer
|
||||
from progress import Progress
|
||||
|
||||
_ONE_DAY_S = 24 * 60 * 60
|
||||
|
||||
class _FetchError(Exception):
|
||||
"""Internal error thrown in _FetchHelper() when we don't want stack trace."""
|
||||
pass
|
||||
@ -81,6 +93,18 @@ build as specified by the manifest-server element in the current
|
||||
manifest. The -t/--smart-tag option is similar and allows you to
|
||||
specify a custom tag/label.
|
||||
|
||||
The -u/--manifest-server-username and -p/--manifest-server-password
|
||||
options can be used to specify a username and password to authenticate
|
||||
with the manifest server when using the -s or -t option.
|
||||
|
||||
If -u and -p are not specified when using the -s or -t option, '%prog'
|
||||
will attempt to read authentication credentials for the manifest server
|
||||
from the user's .netrc file.
|
||||
|
||||
'%prog' will not use authentication credentials from -u/-p or .netrc
|
||||
if the manifest server specified in the manifest file already includes
|
||||
credentials.
|
||||
|
||||
The -f/--force-broken option can be used to proceed with syncing
|
||||
other projects if a project sync fails.
|
||||
|
||||
@ -90,6 +114,9 @@ resumeable bundle file on a content delivery network. This
|
||||
may be necessary if there are problems with the local Python
|
||||
HTTP client or proxy configuration, but the Git binary works.
|
||||
|
||||
The --fetch-submodules option enables fetching Git submodules
|
||||
of a project from server.
|
||||
|
||||
SSH Connections
|
||||
---------------
|
||||
|
||||
@ -121,27 +148,30 @@ later is required to fix a server side protocol bug.
|
||||
"""
|
||||
|
||||
def _Options(self, p, show_smart=True):
|
||||
self.jobs = self.manifest.default.sync_j
|
||||
try:
|
||||
self.jobs = self.manifest.default.sync_j
|
||||
except ManifestParseError:
|
||||
self.jobs = 1
|
||||
|
||||
p.add_option('-f', '--force-broken',
|
||||
dest='force_broken', action='store_true',
|
||||
help="continue sync even if a project fails to sync")
|
||||
p.add_option('-l','--local-only',
|
||||
p.add_option('-l', '--local-only',
|
||||
dest='local_only', action='store_true',
|
||||
help="only update working tree, don't fetch")
|
||||
p.add_option('-n','--network-only',
|
||||
p.add_option('-n', '--network-only',
|
||||
dest='network_only', action='store_true',
|
||||
help="fetch only, don't update working tree")
|
||||
p.add_option('-d','--detach',
|
||||
p.add_option('-d', '--detach',
|
||||
dest='detach_head', action='store_true',
|
||||
help='detach projects back to manifest revision')
|
||||
p.add_option('-c','--current-branch',
|
||||
p.add_option('-c', '--current-branch',
|
||||
dest='current_branch_only', action='store_true',
|
||||
help='fetch only current branch from server')
|
||||
p.add_option('-q','--quiet',
|
||||
p.add_option('-q', '--quiet',
|
||||
dest='quiet', action='store_true',
|
||||
help='be more quiet')
|
||||
p.add_option('-j','--jobs',
|
||||
p.add_option('-j', '--jobs',
|
||||
dest='jobs', action='store', type='int',
|
||||
help="projects to fetch simultaneously (default %d)" % self.jobs)
|
||||
p.add_option('-m', '--manifest-name',
|
||||
@ -150,6 +180,15 @@ later is required to fix a server side protocol bug.
|
||||
p.add_option('--no-clone-bundle',
|
||||
dest='no_clone_bundle', action='store_true',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||
p.add_option('-u', '--manifest-server-username', action='store',
|
||||
dest='manifest_server_username',
|
||||
help='username to authenticate with the manifest server')
|
||||
p.add_option('-p', '--manifest-server-password', action='store',
|
||||
dest='manifest_server_password',
|
||||
help='password to authenticate with the manifest server')
|
||||
p.add_option('--fetch-submodules',
|
||||
dest='fetch_submodules', action='store_true',
|
||||
help='fetch submodules from server')
|
||||
if show_smart:
|
||||
p.add_option('-s', '--smart-sync',
|
||||
dest='smart_sync', action='store_true',
|
||||
@ -167,59 +206,62 @@ later is required to fix a server side protocol bug.
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
def _FetchHelper(self, opt, project, lock, fetched, pm, sem, err_event):
|
||||
"""Main function of the fetch threads when jobs are > 1.
|
||||
"""Main function of the fetch threads when jobs are > 1.
|
||||
|
||||
Args:
|
||||
opt: Program options returned from optparse. See _Options().
|
||||
project: Project object for the project to fetch.
|
||||
lock: Lock for accessing objects that are shared amongst multiple
|
||||
_FetchHelper() threads.
|
||||
fetched: set object that we will add project.gitdir to when we're done
|
||||
(with our lock held).
|
||||
pm: Instance of a Project object. We will call pm.update() (with our
|
||||
lock held).
|
||||
sem: We'll release() this semaphore when we exit so that another thread
|
||||
can be started up.
|
||||
err_event: We'll set this event in the case of an error (after printing
|
||||
out info about the error).
|
||||
"""
|
||||
# We'll set to true once we've locked the lock.
|
||||
did_lock = False
|
||||
Args:
|
||||
opt: Program options returned from optparse. See _Options().
|
||||
project: Project object for the project to fetch.
|
||||
lock: Lock for accessing objects that are shared amongst multiple
|
||||
_FetchHelper() threads.
|
||||
fetched: set object that we will add project.gitdir to when we're done
|
||||
(with our lock held).
|
||||
pm: Instance of a Project object. We will call pm.update() (with our
|
||||
lock held).
|
||||
sem: We'll release() this semaphore when we exit so that another thread
|
||||
can be started up.
|
||||
err_event: We'll set this event in the case of an error (after printing
|
||||
out info about the error).
|
||||
"""
|
||||
# We'll set to true once we've locked the lock.
|
||||
did_lock = False
|
||||
|
||||
# Encapsulate everything in a try/except/finally so that:
|
||||
# - We always set err_event in the case of an exception.
|
||||
# - We always make sure we call sem.release().
|
||||
# - We always make sure we unlock the lock if we locked it.
|
||||
# Encapsulate everything in a try/except/finally so that:
|
||||
# - We always set err_event in the case of an exception.
|
||||
# - We always make sure we call sem.release().
|
||||
# - We always make sure we unlock the lock if we locked it.
|
||||
try:
|
||||
try:
|
||||
try:
|
||||
success = project.Sync_NetworkHalf(
|
||||
quiet=opt.quiet,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
clone_bundle=not opt.no_clone_bundle)
|
||||
start = time.time()
|
||||
success = project.Sync_NetworkHalf(
|
||||
quiet=opt.quiet,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
clone_bundle=not opt.no_clone_bundle)
|
||||
self._fetch_times.Set(project, time.time() - start)
|
||||
|
||||
# Lock around all the rest of the code, since printing, updating a set
|
||||
# and Progress.update() are not thread safe.
|
||||
lock.acquire()
|
||||
did_lock = True
|
||||
# Lock around all the rest of the code, since printing, updating a set
|
||||
# and Progress.update() are not thread safe.
|
||||
lock.acquire()
|
||||
did_lock = True
|
||||
|
||||
if not success:
|
||||
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
||||
if opt.force_broken:
|
||||
print >>sys.stderr, 'warn: --force-broken, continuing to sync'
|
||||
else:
|
||||
raise _FetchError()
|
||||
if not success:
|
||||
print('error: Cannot fetch %s' % project.name, file=sys.stderr)
|
||||
if opt.force_broken:
|
||||
print('warn: --force-broken, continuing to sync',
|
||||
file=sys.stderr)
|
||||
else:
|
||||
raise _FetchError()
|
||||
|
||||
fetched.add(project.gitdir)
|
||||
pm.update()
|
||||
except _FetchError:
|
||||
err_event.set()
|
||||
except:
|
||||
err_event.set()
|
||||
raise
|
||||
finally:
|
||||
if did_lock:
|
||||
lock.release()
|
||||
sem.release()
|
||||
fetched.add(project.gitdir)
|
||||
pm.update()
|
||||
except _FetchError:
|
||||
err_event.set()
|
||||
except:
|
||||
err_event.set()
|
||||
raise
|
||||
finally:
|
||||
if did_lock:
|
||||
lock.release()
|
||||
sem.release()
|
||||
|
||||
def _Fetch(self, projects, opt):
|
||||
fetched = set()
|
||||
@ -234,9 +276,9 @@ later is required to fix a server side protocol bug.
|
||||
clone_bundle=not opt.no_clone_bundle):
|
||||
fetched.add(project.gitdir)
|
||||
else:
|
||||
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
||||
print('error: Cannot fetch %s' % project.name, file=sys.stderr)
|
||||
if opt.force_broken:
|
||||
print >>sys.stderr, 'warn: --force-broken, continuing to sync'
|
||||
print('warn: --force-broken, continuing to sync', file=sys.stderr)
|
||||
else:
|
||||
sys.exit(1)
|
||||
else:
|
||||
@ -269,14 +311,62 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet():
|
||||
print >>sys.stderr, '\nerror: Exited sync due to fetch errors'
|
||||
print('\nerror: Exited sync due to fetch errors', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
pm.end()
|
||||
for project in projects:
|
||||
project.bare_git.gc('--auto')
|
||||
self._fetch_times.Save()
|
||||
|
||||
self._GCProjects(projects)
|
||||
return fetched
|
||||
|
||||
def _GCProjects(self, projects):
|
||||
has_dash_c = git_require((1, 7, 2))
|
||||
if multiprocessing and has_dash_c:
|
||||
cpu_count = multiprocessing.cpu_count()
|
||||
else:
|
||||
cpu_count = 1
|
||||
jobs = min(self.jobs, cpu_count)
|
||||
|
||||
if jobs < 2:
|
||||
for project in projects:
|
||||
project.bare_git.gc('--auto')
|
||||
return
|
||||
|
||||
config = {'pack.threads': cpu_count / jobs if cpu_count > jobs else 1}
|
||||
|
||||
threads = set()
|
||||
sem = _threading.Semaphore(jobs)
|
||||
err_event = _threading.Event()
|
||||
|
||||
def GC(project):
|
||||
try:
|
||||
try:
|
||||
project.bare_git.gc('--auto', config=config)
|
||||
except GitError:
|
||||
err_event.set()
|
||||
except:
|
||||
err_event.set()
|
||||
raise
|
||||
finally:
|
||||
sem.release()
|
||||
|
||||
for project in projects:
|
||||
if err_event.isSet():
|
||||
break
|
||||
sem.acquire()
|
||||
t = _threading.Thread(target=GC, args=(project,))
|
||||
t.daemon = True
|
||||
threads.add(t)
|
||||
t.start()
|
||||
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
if err_event.isSet():
|
||||
print('\nerror: Exited sync due to gc errors', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def UpdateProjectList(self):
|
||||
new_project_paths = []
|
||||
for project in self.GetProjects(None, missing_ok=True):
|
||||
@ -296,37 +386,38 @@ later is required to fix a server side protocol bug.
|
||||
if not path:
|
||||
continue
|
||||
if path not in new_project_paths:
|
||||
"""If the path has already been deleted, we don't need to do it
|
||||
"""
|
||||
# If the path has already been deleted, we don't need to do it
|
||||
if os.path.exists(self.manifest.topdir + '/' + path):
|
||||
project = Project(
|
||||
manifest = self.manifest,
|
||||
name = path,
|
||||
remote = RemoteSpec('origin'),
|
||||
gitdir = os.path.join(self.manifest.topdir,
|
||||
path, '.git'),
|
||||
worktree = os.path.join(self.manifest.topdir, path),
|
||||
relpath = path,
|
||||
revisionExpr = 'HEAD',
|
||||
revisionId = None,
|
||||
groups = None)
|
||||
project = Project(
|
||||
manifest = self.manifest,
|
||||
name = path,
|
||||
remote = RemoteSpec('origin'),
|
||||
gitdir = os.path.join(self.manifest.topdir,
|
||||
path, '.git'),
|
||||
worktree = os.path.join(self.manifest.topdir, path),
|
||||
relpath = path,
|
||||
revisionExpr = 'HEAD',
|
||||
revisionId = None,
|
||||
groups = None)
|
||||
|
||||
if project.IsDirty():
|
||||
print >>sys.stderr, 'error: Cannot remove project "%s": \
|
||||
uncommitted changes are present' % project.relpath
|
||||
print >>sys.stderr, ' commit changes, then run sync again'
|
||||
return -1
|
||||
else:
|
||||
print >>sys.stderr, 'Deleting obsolete path %s' % project.worktree
|
||||
shutil.rmtree(project.worktree)
|
||||
# Try deleting parent subdirs if they are empty
|
||||
dir = os.path.dirname(project.worktree)
|
||||
while dir != self.manifest.topdir:
|
||||
try:
|
||||
os.rmdir(dir)
|
||||
except OSError:
|
||||
break
|
||||
dir = os.path.dirname(dir)
|
||||
if project.IsDirty():
|
||||
print('error: Cannot remove project "%s": uncommitted changes'
|
||||
'are present' % project.relpath, file=sys.stderr)
|
||||
print(' commit changes, then run sync again',
|
||||
file=sys.stderr)
|
||||
return -1
|
||||
else:
|
||||
print('Deleting obsolete path %s' % project.worktree,
|
||||
file=sys.stderr)
|
||||
shutil.rmtree(project.worktree)
|
||||
# Try deleting parent subdirs if they are empty
|
||||
project_dir = os.path.dirname(project.worktree)
|
||||
while project_dir != self.manifest.topdir:
|
||||
try:
|
||||
os.rmdir(project_dir)
|
||||
except OSError:
|
||||
break
|
||||
project_dir = os.path.dirname(project_dir)
|
||||
|
||||
new_project_paths.sort()
|
||||
fd = open(file_path, 'w')
|
||||
@ -345,28 +436,70 @@ uncommitted changes are present' % project.relpath
|
||||
self.jobs = min(self.jobs, (soft_limit - 5) / 3)
|
||||
|
||||
if opt.network_only and opt.detach_head:
|
||||
print >>sys.stderr, 'error: cannot combine -n and -d'
|
||||
print('error: cannot combine -n and -d', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if opt.network_only and opt.local_only:
|
||||
print >>sys.stderr, 'error: cannot combine -n and -l'
|
||||
print('error: cannot combine -n and -l', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if opt.manifest_name and opt.smart_sync:
|
||||
print >>sys.stderr, 'error: cannot combine -m and -s'
|
||||
print('error: cannot combine -m and -s', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if opt.manifest_name and opt.smart_tag:
|
||||
print >>sys.stderr, 'error: cannot combine -m and -t'
|
||||
print('error: cannot combine -m and -t', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if opt.manifest_server_username or opt.manifest_server_password:
|
||||
if not (opt.smart_sync or opt.smart_tag):
|
||||
print('error: -u and -p may only be combined with -s or -t',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if None in [opt.manifest_server_username, opt.manifest_server_password]:
|
||||
print('error: both -u and -p must be given', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if opt.manifest_name:
|
||||
self.manifest.Override(opt.manifest_name)
|
||||
|
||||
if opt.smart_sync or opt.smart_tag:
|
||||
if not self.manifest.manifest_server:
|
||||
print >>sys.stderr, \
|
||||
'error: cannot smart sync: no manifest server defined in manifest'
|
||||
print('error: cannot smart sync: no manifest server defined in'
|
||||
'manifest', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
manifest_server = self.manifest.manifest_server
|
||||
|
||||
if not '@' in manifest_server:
|
||||
username = None
|
||||
password = None
|
||||
if opt.manifest_server_username and opt.manifest_server_password:
|
||||
username = opt.manifest_server_username
|
||||
password = opt.manifest_server_password
|
||||
else:
|
||||
try:
|
||||
info = netrc.netrc()
|
||||
except IOError:
|
||||
print('.netrc file does not exist or could not be opened',
|
||||
file=sys.stderr)
|
||||
else:
|
||||
try:
|
||||
parse_result = urlparse.urlparse(manifest_server)
|
||||
if parse_result.hostname:
|
||||
username, _account, password = \
|
||||
info.authenticators(parse_result.hostname)
|
||||
except TypeError:
|
||||
# TypeError is raised when the given hostname is not present
|
||||
# in the .netrc file.
|
||||
print('No credentials found for %s in .netrc'
|
||||
% parse_result.hostname, file=sys.stderr)
|
||||
except netrc.NetrcParseError as e:
|
||||
print('Error parsing .netrc file: %s' % e, file=sys.stderr)
|
||||
|
||||
if (username and password):
|
||||
manifest_server = manifest_server.replace('://', '://%s:%s@' %
|
||||
(username, password),
|
||||
1)
|
||||
|
||||
try:
|
||||
server = xmlrpclib.Server(self.manifest.manifest_server)
|
||||
server = xmlrpclib.Server(manifest_server)
|
||||
if opt.smart_sync:
|
||||
p = self.manifest.manifestProject
|
||||
b = p.GetBranch(p.CurrentBranch)
|
||||
@ -397,20 +530,21 @@ uncommitted changes are present' % project.relpath
|
||||
finally:
|
||||
f.close()
|
||||
except IOError:
|
||||
print >>sys.stderr, 'error: cannot write manifest to %s' % \
|
||||
manifest_path
|
||||
print('error: cannot write manifest to %s' % manifest_path,
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
self.manifest.Override(manifest_name)
|
||||
else:
|
||||
print >>sys.stderr, 'error: %s' % manifest_str
|
||||
print('error: %s' % manifest_str, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except (socket.error, IOError, xmlrpclib.Fault), e:
|
||||
print >>sys.stderr, 'error: cannot connect to manifest server %s:\n%s' % (
|
||||
self.manifest.manifest_server, e)
|
||||
except (socket.error, IOError, xmlrpclib.Fault) as e:
|
||||
print('error: cannot connect to manifest server %s:\n%s'
|
||||
% (self.manifest.manifest_server, e), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except xmlrpclib.ProtocolError, e:
|
||||
print >>sys.stderr, 'error: cannot connect to manifest server %s:\n%d %s' % (
|
||||
self.manifest.manifest_server, e.errcode, e.errmsg)
|
||||
except xmlrpclib.ProtocolError as e:
|
||||
print('error: cannot connect to manifest server %s:\n%d %s'
|
||||
% (self.manifest.manifest_server, e.errcode, e.errmsg),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
rp = self.manifest.repoProject
|
||||
@ -420,7 +554,7 @@ uncommitted changes are present' % project.relpath
|
||||
mp.PreSync()
|
||||
|
||||
if opt.repo_upgraded:
|
||||
_PostRepoUpgrade(self.manifest)
|
||||
_PostRepoUpgrade(self.manifest, quiet=opt.quiet)
|
||||
|
||||
if not opt.local_only:
|
||||
mp.Sync_NetworkHalf(quiet=opt.quiet,
|
||||
@ -434,14 +568,18 @@ uncommitted changes are present' % project.relpath
|
||||
self.manifest._Unload()
|
||||
if opt.jobs is None:
|
||||
self.jobs = self.manifest.default.sync_j
|
||||
all = self.GetProjects(args, missing_ok=True)
|
||||
all_projects = self.GetProjects(args,
|
||||
missing_ok=True,
|
||||
submodules_ok=opt.fetch_submodules)
|
||||
|
||||
self._fetch_times = _FetchTimes(self.manifest)
|
||||
if not opt.local_only:
|
||||
to_fetch = []
|
||||
now = time.time()
|
||||
if (24 * 60 * 60) <= (now - rp.LastFetch):
|
||||
if _ONE_DAY_S <= (now - rp.LastFetch):
|
||||
to_fetch.append(rp)
|
||||
to_fetch.extend(all)
|
||||
to_fetch.extend(all_projects)
|
||||
to_fetch.sort(key=self._fetch_times.Get, reverse=True)
|
||||
|
||||
fetched = self._Fetch(to_fetch, opt)
|
||||
_PostRepoFetch(rp, opt.no_repo_verify)
|
||||
@ -449,13 +587,26 @@ uncommitted changes are present' % project.relpath
|
||||
# bail out now; the rest touches the working tree
|
||||
return
|
||||
|
||||
# Iteratively fetch missing and/or nested unregistered submodules
|
||||
previously_missing_set = set()
|
||||
while True:
|
||||
self.manifest._Unload()
|
||||
all = self.GetProjects(args, missing_ok=True)
|
||||
all_projects = self.GetProjects(args,
|
||||
missing_ok=True,
|
||||
submodules_ok=opt.fetch_submodules)
|
||||
missing = []
|
||||
for project in all:
|
||||
for project in all_projects:
|
||||
if project.gitdir not in fetched:
|
||||
missing.append(project)
|
||||
self._Fetch(missing, opt)
|
||||
if not missing:
|
||||
break
|
||||
# Stop us from non-stopped fetching actually-missing repos: If set of
|
||||
# missing repos has not been changed from last fetch, we break.
|
||||
missing_set = set(p.name for p in missing)
|
||||
if previously_missing_set == missing_set:
|
||||
break
|
||||
previously_missing_set = missing_set
|
||||
fetched.update(self._Fetch(missing, opt))
|
||||
|
||||
if self.manifest.IsMirror:
|
||||
# bail out now, we have no working tree
|
||||
@ -466,49 +617,53 @@ uncommitted changes are present' % project.relpath
|
||||
|
||||
syncbuf = SyncBuffer(mp.config,
|
||||
detach_head = opt.detach_head)
|
||||
pm = Progress('Syncing work tree', len(all))
|
||||
for project in all:
|
||||
pm = Progress('Syncing work tree', len(all_projects))
|
||||
for project in all_projects:
|
||||
pm.update()
|
||||
if project.worktree:
|
||||
project.Sync_LocalHalf(syncbuf)
|
||||
pm.end()
|
||||
print >>sys.stderr
|
||||
print(file=sys.stderr)
|
||||
if not syncbuf.Finish():
|
||||
sys.exit(1)
|
||||
|
||||
# If there's a notice that's supposed to print at the end of the sync, print
|
||||
# it now...
|
||||
if self.manifest.notice:
|
||||
print self.manifest.notice
|
||||
print(self.manifest.notice)
|
||||
|
||||
def _PostRepoUpgrade(manifest):
|
||||
def _PostRepoUpgrade(manifest, quiet=False):
|
||||
wrapper = WrapperModule()
|
||||
if wrapper.NeedSetupGnuPG():
|
||||
wrapper.SetupGnuPG(quiet)
|
||||
for project in manifest.projects.values():
|
||||
if project.Exists:
|
||||
project.PostRepoUpgrade()
|
||||
|
||||
def _PostRepoFetch(rp, no_repo_verify=False, verbose=False):
|
||||
if rp.HasChanges:
|
||||
print >>sys.stderr, 'info: A new version of repo is available'
|
||||
print >>sys.stderr, ''
|
||||
print('info: A new version of repo is available', file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
if no_repo_verify or _VerifyTag(rp):
|
||||
syncbuf = SyncBuffer(rp.config)
|
||||
rp.Sync_LocalHalf(syncbuf)
|
||||
if not syncbuf.Finish():
|
||||
sys.exit(1)
|
||||
print >>sys.stderr, 'info: Restarting repo with latest version'
|
||||
print('info: Restarting repo with latest version', file=sys.stderr)
|
||||
raise RepoChangedException(['--repo-upgraded'])
|
||||
else:
|
||||
print >>sys.stderr, 'warning: Skipped upgrade to unverified version'
|
||||
print('warning: Skipped upgrade to unverified version', file=sys.stderr)
|
||||
else:
|
||||
if verbose:
|
||||
print >>sys.stderr, 'repo version %s is current' % rp.work_git.describe(HEAD)
|
||||
print('repo version %s is current' % rp.work_git.describe(HEAD),
|
||||
file=sys.stderr)
|
||||
|
||||
def _VerifyTag(project):
|
||||
gpg_dir = os.path.expanduser('~/.repoconfig/gnupg')
|
||||
if not os.path.exists(gpg_dir):
|
||||
print >>sys.stderr,\
|
||||
"""warning: GnuPG was not available during last "repo init"
|
||||
warning: Cannot automatically authenticate repo."""
|
||||
print('warning: GnuPG was not available during last "repo init"\n'
|
||||
'warning: Cannot automatically authenticate repo."""',
|
||||
file=sys.stderr)
|
||||
return True
|
||||
|
||||
try:
|
||||
@ -522,10 +677,9 @@ warning: Cannot automatically authenticate repo."""
|
||||
if rev.startswith(R_HEADS):
|
||||
rev = rev[len(R_HEADS):]
|
||||
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr,\
|
||||
"warning: project '%s' branch '%s' is not signed" \
|
||||
% (project.name, rev)
|
||||
print(file=sys.stderr)
|
||||
print("warning: project '%s' branch '%s' is not signed"
|
||||
% (project.name, rev), file=sys.stderr)
|
||||
return False
|
||||
|
||||
env = os.environ.copy()
|
||||
@ -544,9 +698,72 @@ warning: Cannot automatically authenticate repo."""
|
||||
proc.stderr.close()
|
||||
|
||||
if proc.wait() != 0:
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, out
|
||||
print >>sys.stderr, err
|
||||
print >>sys.stderr
|
||||
print(file=sys.stderr)
|
||||
print(out, file=sys.stderr)
|
||||
print(err, file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
return False
|
||||
return True
|
||||
|
||||
class _FetchTimes(object):
|
||||
_ALPHA = 0.5
|
||||
|
||||
def __init__(self, manifest):
|
||||
self._path = os.path.join(manifest.repodir, '.repopickle_fetchtimes')
|
||||
self._times = None
|
||||
self._seen = set()
|
||||
|
||||
def Get(self, project):
|
||||
self._Load()
|
||||
return self._times.get(project.name, _ONE_DAY_S)
|
||||
|
||||
def Set(self, project, t):
|
||||
self._Load()
|
||||
name = project.name
|
||||
old = self._times.get(name, t)
|
||||
self._seen.add(name)
|
||||
a = self._ALPHA
|
||||
self._times[name] = (a*t) + ((1-a) * old)
|
||||
|
||||
def _Load(self):
|
||||
if self._times is None:
|
||||
try:
|
||||
f = open(self._path)
|
||||
except IOError:
|
||||
self._times = {}
|
||||
return self._times
|
||||
try:
|
||||
try:
|
||||
self._times = pickle.load(f)
|
||||
except IOError:
|
||||
try:
|
||||
os.remove(self._path)
|
||||
except OSError:
|
||||
pass
|
||||
self._times = {}
|
||||
finally:
|
||||
f.close()
|
||||
return self._times
|
||||
|
||||
def Save(self):
|
||||
if self._times is None:
|
||||
return
|
||||
|
||||
to_delete = []
|
||||
for name in self._times:
|
||||
if name not in self._seen:
|
||||
to_delete.append(name)
|
||||
for name in to_delete:
|
||||
del self._times[name]
|
||||
|
||||
try:
|
||||
f = open(self._path, 'wb')
|
||||
try:
|
||||
pickle.dump(self._times, f)
|
||||
except (IOError, OSError, pickle.PickleError):
|
||||
try:
|
||||
os.remove(self._path)
|
||||
except OSError:
|
||||
pass
|
||||
finally:
|
||||
f.close()
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import copy
|
||||
import re
|
||||
import sys
|
||||
@ -26,28 +27,30 @@ UNUSUAL_COMMIT_THRESHOLD = 5
|
||||
|
||||
def _ConfirmManyUploads(multiple_branches=False):
|
||||
if multiple_branches:
|
||||
print "ATTENTION: One or more branches has an unusually high number of commits."
|
||||
print('ATTENTION: One or more branches has an unusually high number'
|
||||
'of commits.')
|
||||
else:
|
||||
print "ATTENTION: You are uploading an unusually high number of commits."
|
||||
print "YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across branches?)"
|
||||
print('ATTENTION: You are uploading an unusually high number of commits.')
|
||||
print('YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across'
|
||||
'branches?)')
|
||||
answer = raw_input("If you are sure you intend to do this, type 'yes': ").strip()
|
||||
return answer == "yes"
|
||||
|
||||
def _die(fmt, *args):
|
||||
msg = fmt % args
|
||||
print >>sys.stderr, 'error: %s' % msg
|
||||
print('error: %s' % msg, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def _SplitEmails(values):
|
||||
result = []
|
||||
for str in values:
|
||||
result.extend([s.strip() for s in str.split(',')])
|
||||
for value in values:
|
||||
result.extend([s.strip() for s in value.split(',')])
|
||||
return result
|
||||
|
||||
class Upload(InteractiveCommand):
|
||||
common = True
|
||||
helpSummary = "Upload changes for code review"
|
||||
helpUsage="""
|
||||
helpUsage = """
|
||||
%prog [--re --cc] [<project>]...
|
||||
"""
|
||||
helpDescription = """
|
||||
@ -174,20 +177,20 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
|
||||
if answer is None:
|
||||
date = branch.date
|
||||
list = branch.commits
|
||||
commit_list = branch.commits
|
||||
|
||||
print 'Upload project %s/ to remote branch %s:' % (project.relpath, project.revisionExpr)
|
||||
print ' branch %s (%2d commit%s, %s):' % (
|
||||
print('Upload project %s/ to remote branch %s:' % (project.relpath, project.revisionExpr))
|
||||
print(' branch %s (%2d commit%s, %s):' % (
|
||||
name,
|
||||
len(list),
|
||||
len(list) != 1 and 's' or '',
|
||||
date)
|
||||
for commit in list:
|
||||
print ' %s' % commit
|
||||
len(commit_list),
|
||||
len(commit_list) != 1 and 's' or '',
|
||||
date))
|
||||
for commit in commit_list:
|
||||
print(' %s' % commit)
|
||||
|
||||
sys.stdout.write('to %s (y/N)? ' % remote.review)
|
||||
answer = sys.stdin.readline().strip()
|
||||
answer = answer in ('y', 'Y', 'yes', '1', 'true', 't')
|
||||
answer = sys.stdin.readline().strip().lower()
|
||||
answer = answer in ('y', 'yes', '1', 'true', 't')
|
||||
|
||||
if answer:
|
||||
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
|
||||
@ -212,17 +215,17 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
for branch in avail:
|
||||
name = branch.name
|
||||
date = branch.date
|
||||
list = branch.commits
|
||||
commit_list = branch.commits
|
||||
|
||||
if b:
|
||||
script.append('#')
|
||||
script.append('# branch %s (%2d commit%s, %s) to remote branch %s:' % (
|
||||
name,
|
||||
len(list),
|
||||
len(list) != 1 and 's' or '',
|
||||
len(commit_list),
|
||||
len(commit_list) != 1 and 's' or '',
|
||||
date,
|
||||
project.revisionExpr))
|
||||
for commit in list:
|
||||
for commit in commit_list:
|
||||
script.append('# %s' % commit)
|
||||
b[name] = branch
|
||||
|
||||
@ -297,7 +300,7 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
try:
|
||||
# refs/changes/XYZ/N --> XYZ
|
||||
return refs.get(last_pub).split('/')[-2]
|
||||
except:
|
||||
except (AttributeError, IndexError):
|
||||
return ""
|
||||
|
||||
def _UploadAndReport(self, opt, todo, original_people):
|
||||
@ -309,33 +312,33 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
|
||||
# Check if there are local changes that may have been forgotten
|
||||
if branch.project.HasChanges():
|
||||
key = 'review.%s.autoupload' % branch.project.remote.review
|
||||
answer = branch.project.config.GetBoolean(key)
|
||||
key = 'review.%s.autoupload' % branch.project.remote.review
|
||||
answer = branch.project.config.GetBoolean(key)
|
||||
|
||||
# if they want to auto upload, let's not ask because it could be automated
|
||||
if answer is None:
|
||||
sys.stdout.write('Uncommitted changes in ' + branch.project.name + ' (did you forget to amend?). Continue uploading? (y/N) ')
|
||||
a = sys.stdin.readline().strip().lower()
|
||||
if a not in ('y', 'yes', 't', 'true', 'on'):
|
||||
print >>sys.stderr, "skipping upload"
|
||||
branch.uploaded = False
|
||||
branch.error = 'User aborted'
|
||||
continue
|
||||
# if they want to auto upload, let's not ask because it could be automated
|
||||
if answer is None:
|
||||
sys.stdout.write('Uncommitted changes in ' + branch.project.name + ' (did you forget to amend?). Continue uploading? (y/N) ')
|
||||
a = sys.stdin.readline().strip().lower()
|
||||
if a not in ('y', 'yes', 't', 'true', 'on'):
|
||||
print("skipping upload", file=sys.stderr)
|
||||
branch.uploaded = False
|
||||
branch.error = 'User aborted'
|
||||
continue
|
||||
|
||||
# Check if topic branches should be sent to the server during upload
|
||||
if opt.auto_topic is not True:
|
||||
key = 'review.%s.uploadtopic' % branch.project.remote.review
|
||||
opt.auto_topic = branch.project.config.GetBoolean(key)
|
||||
key = 'review.%s.uploadtopic' % branch.project.remote.review
|
||||
opt.auto_topic = branch.project.config.GetBoolean(key)
|
||||
|
||||
branch.UploadForReview(people, auto_topic=opt.auto_topic, draft=opt.draft)
|
||||
branch.uploaded = True
|
||||
except UploadError, e:
|
||||
except UploadError as e:
|
||||
branch.error = e
|
||||
branch.uploaded = False
|
||||
have_errors = True
|
||||
|
||||
print >>sys.stderr, ''
|
||||
print >>sys.stderr, '----------------------------------------------------------------------'
|
||||
print(file=sys.stderr)
|
||||
print('----------------------------------------------------------------------', file=sys.stderr)
|
||||
|
||||
if have_errors:
|
||||
for branch in todo:
|
||||
@ -344,17 +347,19 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
fmt = ' (%s)'
|
||||
else:
|
||||
fmt = '\n (%s)'
|
||||
print >>sys.stderr, ('[FAILED] %-15s %-15s' + fmt) % (
|
||||
print(('[FAILED] %-15s %-15s' + fmt) % (
|
||||
branch.project.relpath + '/', \
|
||||
branch.name, \
|
||||
str(branch.error))
|
||||
print >>sys.stderr, ''
|
||||
str(branch.error)),
|
||||
file=sys.stderr)
|
||||
print()
|
||||
|
||||
for branch in todo:
|
||||
if branch.uploaded:
|
||||
print >>sys.stderr, '[OK ] %-15s %s' % (
|
||||
branch.project.relpath + '/',
|
||||
branch.name)
|
||||
if branch.uploaded:
|
||||
print('[OK ] %-15s %s' % (
|
||||
branch.project.relpath + '/',
|
||||
branch.name),
|
||||
file=sys.stderr)
|
||||
|
||||
if have_errors:
|
||||
sys.exit(1)
|
||||
@ -384,18 +389,18 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
pending_proj_names = [project.name for (project, avail) in pending]
|
||||
try:
|
||||
hook.Run(opt.allow_all_hooks, project_list=pending_proj_names)
|
||||
except HookError, e:
|
||||
print >>sys.stderr, "ERROR: %s" % str(e)
|
||||
except HookError as e:
|
||||
print("ERROR: %s" % str(e), file=sys.stderr)
|
||||
return
|
||||
|
||||
if opt.reviewers:
|
||||
reviewers = _SplitEmails(opt.reviewers)
|
||||
if opt.cc:
|
||||
cc = _SplitEmails(opt.cc)
|
||||
people = (reviewers,cc)
|
||||
people = (reviewers, cc)
|
||||
|
||||
if not pending:
|
||||
print >>sys.stdout, "no branches ready for upload"
|
||||
print("no branches ready for upload", file=sys.stderr)
|
||||
elif len(pending) == 1 and len(pending[0][1]) == 1:
|
||||
self._SingleBranch(opt, pending[0][1][0], people)
|
||||
else:
|
||||
|
@ -13,10 +13,11 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
from command import Command, MirrorSafeCommand
|
||||
from git_command import git
|
||||
from project import HEAD
|
||||
from git_refs import HEAD
|
||||
|
||||
class Version(Command, MirrorSafeCommand):
|
||||
wrapper_version = None
|
||||
@ -32,12 +33,12 @@ class Version(Command, MirrorSafeCommand):
|
||||
rp = self.manifest.repoProject
|
||||
rem = rp.GetRemote(rp.remote.name)
|
||||
|
||||
print 'repo version %s' % rp.work_git.describe(HEAD)
|
||||
print ' (from %s)' % rem.url
|
||||
print('repo version %s' % rp.work_git.describe(HEAD))
|
||||
print(' (from %s)' % rem.url)
|
||||
|
||||
if Version.wrapper_path is not None:
|
||||
print 'repo launcher version %s' % Version.wrapper_version
|
||||
print ' (from %s)' % Version.wrapper_path
|
||||
print('repo launcher version %s' % Version.wrapper_version)
|
||||
print(' (from %s)' % Version.wrapper_path)
|
||||
|
||||
print git.version().strip()
|
||||
print 'Python %s' % sys.version
|
||||
print(git.version().strip())
|
||||
print('Python %s' % sys.version)
|
||||
|
@ -4,49 +4,49 @@ import unittest
|
||||
import git_config
|
||||
|
||||
def fixture(*paths):
|
||||
"""Return a path relative to test/fixtures.
|
||||
"""
|
||||
return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
|
||||
"""Return a path relative to test/fixtures.
|
||||
"""
|
||||
return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
|
||||
|
||||
class GitConfigUnitTest(unittest.TestCase):
|
||||
"""Tests the GitConfig class.
|
||||
"""Tests the GitConfig class.
|
||||
"""
|
||||
def setUp(self):
|
||||
"""Create a GitConfig object using the test.gitconfig fixture.
|
||||
"""
|
||||
def setUp(self):
|
||||
"""Create a GitConfig object using the test.gitconfig fixture.
|
||||
"""
|
||||
config_fixture = fixture('test.gitconfig')
|
||||
self.config = git_config.GitConfig(config_fixture)
|
||||
config_fixture = fixture('test.gitconfig')
|
||||
self.config = git_config.GitConfig(config_fixture)
|
||||
|
||||
def test_GetString_with_empty_config_values(self):
|
||||
"""
|
||||
Test config entries with no value.
|
||||
def test_GetString_with_empty_config_values(self):
|
||||
"""
|
||||
Test config entries with no value.
|
||||
|
||||
[section]
|
||||
empty
|
||||
[section]
|
||||
empty
|
||||
|
||||
"""
|
||||
val = self.config.GetString('section.empty')
|
||||
self.assertEqual(val, None)
|
||||
"""
|
||||
val = self.config.GetString('section.empty')
|
||||
self.assertEqual(val, None)
|
||||
|
||||
def test_GetString_with_true_value(self):
|
||||
"""
|
||||
Test config entries with a string value.
|
||||
def test_GetString_with_true_value(self):
|
||||
"""
|
||||
Test config entries with a string value.
|
||||
|
||||
[section]
|
||||
nonempty = true
|
||||
[section]
|
||||
nonempty = true
|
||||
|
||||
"""
|
||||
val = self.config.GetString('section.nonempty')
|
||||
self.assertEqual(val, 'true')
|
||||
"""
|
||||
val = self.config.GetString('section.nonempty')
|
||||
self.assertEqual(val, 'true')
|
||||
|
||||
def test_GetString_from_missing_file(self):
|
||||
"""
|
||||
Test missing config file
|
||||
"""
|
||||
config_fixture = fixture('not.present.gitconfig')
|
||||
config = git_config.GitConfig(config_fixture)
|
||||
val = config.GetString('empty')
|
||||
self.assertEqual(val, None)
|
||||
def test_GetString_from_missing_file(self):
|
||||
"""
|
||||
Test missing config file
|
||||
"""
|
||||
config_fixture = fixture('not.present.gitconfig')
|
||||
config = git_config.GitConfig(config_fixture)
|
||||
val = config.GetString('empty')
|
||||
self.assertEqual(val, None)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
unittest.main()
|
||||
|
3
trace.py
3
trace.py
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os
|
||||
REPO_TRACE = 'REPO_TRACE'
|
||||
@ -31,4 +32,4 @@ def SetTrace():
|
||||
|
||||
def Trace(fmt, *args):
|
||||
if IsTrace():
|
||||
print >>sys.stderr, fmt % args
|
||||
print(fmt % args, file=sys.stderr)
|
||||
|
Reference in New Issue
Block a user