mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
330 Commits
Author | SHA1 | Date | |
---|---|---|---|
55e4d464a7 | |||
c9129d90de | |||
57365c98cc | |||
dc96476af3 | |||
2577cec095 | |||
e48d34659e | |||
ab8f911a67 | |||
608aff7f62 | |||
13657c407d | |||
e4ed8f65f3 | |||
fdb44479f8 | |||
188572170e | |||
d75c669fac | |||
091f893625 | |||
d947858325 | |||
67700e9b90 | |||
a5be53f9c8 | |||
9ed12c5d9c | |||
4f7bdea9d2 | |||
69998b0c6f | |||
5c6eeac8f0 | |||
e98607248e | |||
2f6ab7f5b8 | |||
3a6cd4200e | |||
25f17682ca | |||
8a68ff9605 | |||
297e7c6ee6 | |||
e3b1c45aeb | |||
7119f94aba | |||
01f443d75a | |||
b926116a14 | |||
3ff9decfd4 | |||
14a6674e32 | |||
9779565abf | |||
cf76b1bcec | |||
e00aa6b923 | |||
86d973d24e | |||
34acdd2534 | |||
d94aaef39e | |||
bd489c4eaa | |||
2dc810c2e4 | |||
bb1b5f5f86 | |||
e2126652a3 | |||
9a27d0111d | |||
918ff85c1e | |||
3d07da82ab | |||
e15c65abc2 | |||
daa851f6cd | |||
a43f42f9ff | |||
bb8337fe0f | |||
17f85eab24 | |||
b9477bc2dd | |||
5e7127d00b | |||
5d0efdb14a | |||
f35b2d9c31 | |||
e0904f721b | |||
9830553748 | |||
2bc7f5cb3a | |||
b292b98c3e | |||
2f127de752 | |||
7da1314e38 | |||
435370c6f0 | |||
e8f75fa368 | |||
87636f2ac2 | |||
337aee0a9c | |||
7cf1b36bcd | |||
5e57234ec6 | |||
5d016502eb | |||
475a47d531 | |||
62d0b10a7b | |||
d666e93ecc | |||
3f61950f01 | |||
4fd38ecc3a | |||
9fae805e04 | |||
6a927c5d19 | |||
eca119e5d6 | |||
6ba6ba0ef3 | |||
23acdd3f14 | |||
2644874d9d | |||
3d125940f6 | |||
a94f162b9f | |||
e5a2122e64 | |||
ccf86432b3 | |||
79770d269e | |||
c39864f5e1 | |||
5465727e53 | |||
d21720db31 | |||
971de8ea7b | |||
24c1308840 | |||
b962a1f5e0 | |||
5acde75e5d | |||
d67872d2f4 | |||
e9d6b611c5 | |||
c3d2f2b76f | |||
cd7c5deca0 | |||
e02ac0af2e | |||
898e12a2d9 | |||
ae0a36c9a5 | |||
76abcc1d1e | |||
d315382572 | |||
43bda84362 | |||
9b017dab46 | |||
e9dc3b3368 | |||
c9571423f8 | |||
34fb20f67c | |||
ecff4f17b0 | |||
cc14fa9820 | |||
3ce2a6b46b | |||
841be34968 | |||
ee1c2f5717 | |||
6a1f737380 | |||
e9311273dd | |||
605a9a487b | |||
2a32f6afa6 | |||
498fe90b45 | |||
53d6f4d17e | |||
9d8f914fe8 | |||
ceea368e88 | |||
b660539c4a | |||
752371d91b | |||
1a68dc58eb | |||
df5ee52050 | |||
fab96c68e3 | |||
bf1fbb20ab | |||
29472463ba | |||
c325dc35f6 | |||
f322b9abb4 | |||
db728cd866 | |||
c4657969eb | |||
7b947de1ee | |||
6392c87945 | |||
97d2b2f7a0 | |||
3a0e782790 | |||
490d09a314 | |||
13111b4e97 | |||
bd0312a484 | |||
334851e4b6 | |||
014d060989 | |||
44da16e8a0 | |||
65e0f35fda | |||
08c880db18 | |||
a101f1c167 | |||
49cd59bc86 | |||
30d452905f | |||
d6c93a28ca | |||
d572a13021 | |||
3ba5f95b46 | |||
2630dd9787 | |||
dafb1d68d3 | |||
4655e81a75 | |||
723c5dc3d6 | |||
e6a0eeb80d | |||
0960b5b53d | |||
fc06ced9f9 | |||
fce89f218a | |||
37282b4b9c | |||
835cd6888f | |||
8ced8641c8 | |||
2536f80625 | |||
e7a3bcbbb8 | |||
0ce6ca9c7b | |||
25b51d8cb7 | |||
0fc3a39829 | |||
cef005c3e8 | |||
c7c57e34db | |||
0d2b61f11d | |||
2bf9db0d3b | |||
f00e0ce556 | |||
1b5a4a0c5d | |||
de8b2c4276 | |||
727ee98a40 | |||
df14a70c45 | |||
71cab95b4c | |||
f18cb76173 | |||
d3fd537ea5 | |||
9275fd4329 | |||
0048b69c03 | |||
13f3da50d4 | |||
3218c13205 | |||
b0f9a02394 | |||
2b8db3ce3e | |||
5df6de075e | |||
a0de6e8eab | |||
16614f86b3 | |||
88443387b1 | |||
99482ae58a | |||
ec1df9b7f6 | |||
06d029c1c8 | |||
b715b14807 | |||
60829ba72f | |||
a22f99ae41 | |||
3575b8f8bd | |||
a5ece0e050 | |||
cc50bac8c7 | |||
0cb1b3f687 | |||
9e426aa432 | |||
08a3f68d38 | |||
feb39d61ef | |||
7198572dd7 | |||
2daf66740b | |||
f4f04d9fa8 | |||
18afd7f679 | |||
6623b21e10 | |||
ca8c32cd7a | |||
f0a9a1a30e | |||
879a9a5cf0 | |||
ff6929dde8 | |||
1c85f4e43b | |||
719965af35 | |||
5732e47ebb | |||
f3fdf823cf | |||
a1bfd2cd72 | |||
6d7508b3d5 | |||
69b1e8aa65 | |||
9452e4ec09 | |||
4c50deea28 | |||
d63060fc95 | |||
b6ea3bfcc3 | |||
aa4982e4c9 | |||
9bb1816bdc | |||
840ed0fab7 | |||
c024912fb8 | |||
15f6579eb3 | |||
d4cd69bdef | |||
d2dfac81ad | |||
4719901067 | |||
a949fa5d20 | |||
0afac0856c | |||
4c0f670465 | |||
33f0e786bb | |||
57272ba82e | |||
0125ae2fda | |||
a7ce096047 | |||
87bda12e85 | |||
5f947bba69 | |||
b3d2c9214b | |||
7354d88914 | |||
ce86abbe8a | |||
75b87c8a51 | |||
abb7a3dfec | |||
cc6c79643e | |||
2095179bee | |||
b0ca41e19a | |||
1875ddd47c | |||
446c4e5556 | |||
67f4563acb | |||
050e4fd591 | |||
60e679209a | |||
f1a6b14fdc | |||
ca3d8ff4fc | |||
98ea26b8d8 | |||
c24c720b61 | |||
2d1a396897 | |||
1dcb58a7d0 | |||
37dbf2bf0f | |||
438c54713a | |||
e020ebee4e | |||
21c5c34ee2 | |||
54fccd71fb | |||
fb5c8fd948 | |||
26120ca18d | |||
7da73d6f3b | |||
f0d4c36701 | |||
2ec00b9272 | |||
2a3a81b51f | |||
7b4f43542a | |||
9fb29ce123 | |||
3a68bb4c7f | |||
cd1d7ff81e | |||
da88ff4411 | |||
8135cdc53c | |||
4f2517ff11 | |||
fe200eeb52 | |||
078a8b270f | |||
3c8dea1f8d | |||
8ad8a0e61d | |||
d1f70d9929 | |||
c8a300f639 | |||
1b34c9118e | |||
366ad214b8 | |||
242b52690d | |||
4cc70ce501 | |||
498a0e8a79 | |||
bc7ef67d9b | |||
2f968c943b | |||
2b5b4ac292 | |||
6f6cd77a50 | |||
896d5dffd3 | |||
9360966bd2 | |||
ef9ce1d0a5 | |||
05f66b6836 | |||
eb7af87bcf | |||
938d608c9c | |||
d63bbf44dc | |||
a8421a128a | |||
fb2316146f | |||
8bd5e60b16 | |||
3d2cdd0ea5 | |||
4e3d6739a1 | |||
552ac89929 | |||
89e717d948 | |||
0f0dfa3930 | |||
76ca9f8145 | |||
accc56d82b | |||
db45da1208 | |||
50fa1ac6db | |||
5da554f294 | |||
77bb4af241 | |||
fd89b67f5c | |||
a490f03dc2 | |||
deec0536d6 | |||
06e556d202 | |||
8225cdc56b | |||
337fb9c7e9 | |||
9bb9617858 | |||
f690687671 | |||
336f7bd0ed | |||
2810cbc778 | |||
6ed4e28346 | |||
ad3193a0e5 | |||
b81ac9e654 | |||
0f3dd233ec | |||
c12c360f89 | |||
fbcde472ca | |||
d237b69865 | |||
5b23f24881 | |||
66bdd46871 | |||
a608fb024b | |||
f8e3273dec | |||
006734b798 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -1 +1,2 @@
|
||||
*.pyc
|
||||
.repopickle_*
|
||||
|
17
.project
Normal file
17
.project
Normal file
@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>repo</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.python.pydev.PyDevBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.python.pydev.pythonNature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
10
.pydevproject
Normal file
10
.pydevproject
Normal file
@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<?eclipse-pydev version="1.0"?>
|
||||
|
||||
<pydev_project>
|
||||
<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
|
||||
<path>/repo</path>
|
||||
</pydev_pathproperty>
|
||||
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.6</pydev_property>
|
||||
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
|
||||
</pydev_project>
|
301
.pylintrc
Normal file
301
.pylintrc
Normal file
@ -0,0 +1,301 @@
|
||||
# lint Python modules using external checkers.
|
||||
#
|
||||
# This is the main checker controling the other ones and the reports
|
||||
# generation. It is itself both a raw checker and an astng checker in order
|
||||
# to:
|
||||
# * handle message activation / deactivation at the module level
|
||||
# * handle some basic but necessary stats'data (number of classes, methods...)
|
||||
#
|
||||
[MASTER]
|
||||
|
||||
# Specify a configuration file.
|
||||
#rcfile=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Profiled execution.
|
||||
profile=no
|
||||
|
||||
# Add <file or directory> to the black list. It should be a base name, not a
|
||||
# path. You may set this option multiple times.
|
||||
ignore=SVN
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# Set the cache size for astng objects.
|
||||
cache-size=500
|
||||
|
||||
# List of plugins (as comma separated values of python modules names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Enable only checker(s) with the given id(s). This option conflicts with the
|
||||
# disable-checker option
|
||||
#enable-checker=
|
||||
|
||||
# Enable all checker(s) except those with the given id(s). This option
|
||||
# conflicts with the enable-checker option
|
||||
#disable-checker=
|
||||
|
||||
# Enable all messages in the listed categories.
|
||||
#enable-msg-cat=
|
||||
|
||||
# Disable all messages in the listed categories.
|
||||
#disable-msg-cat=
|
||||
|
||||
# Enable the message(s) with the given id(s).
|
||||
enable=RP0004
|
||||
|
||||
# Disable the message(s) with the given id(s).
|
||||
disable=R0903,R0912,R0913,R0914,R0915,W0141,C0111,C0103,C0323,C0322,C0324,W0603,W0703,R0911,C0301,C0302,R0902,R0904,W0142,W0212,E1101,E1103,R0201,W0201,W0122,W0232,W0311,RP0001,RP0003,RP0101,RP0002,RP0401,RP0701,RP0801
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# set the output format. Available formats are text, parseable, colorized, msvs
|
||||
# (visual studio) and html
|
||||
output-format=text
|
||||
|
||||
# Include message's id in output
|
||||
include-ids=yes
|
||||
|
||||
# Put messages in a separate file for each module / package specified on the
|
||||
# command line instead of printing them on stdout. Reports (if any) will be
|
||||
# written in a file name "pylint_global.[txt|html]".
|
||||
files-output=no
|
||||
|
||||
# Tells whether to display a full report or only the messages
|
||||
reports=yes
|
||||
|
||||
# Python expression which should return a note less than 10 (10 is the highest
|
||||
# note).You have access to the variables errors warning, statement which
|
||||
# respectivly contain the number of errors / warnings messages and the total
|
||||
# number of statements analyzed. This is used by the global evaluation report
|
||||
# (R0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Add a comment according to your evaluation note. This is used by the global
|
||||
# evaluation report (R0004).
|
||||
comment=no
|
||||
|
||||
# checks for
|
||||
# * unused variables / imports
|
||||
# * undefined variables
|
||||
# * redefinition of variable from builtins or from an outer scope
|
||||
# * use of variable before assigment
|
||||
#
|
||||
[VARIABLES]
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# A regular expression matching names used for dummy variables (i.e. not used).
|
||||
dummy-variables-rgx=_|dummy
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid to define new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
|
||||
# try to find bugs in the code using type inference
|
||||
#
|
||||
[TYPECHECK]
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
|
||||
# List of classes names for which member attributes should not be checked
|
||||
# (useful for classes with attributes dynamicaly set).
|
||||
ignored-classes=SQLObject
|
||||
|
||||
# When zope mode is activated, consider the acquired-members option to ignore
|
||||
# access to some undefined attributes.
|
||||
zope=no
|
||||
|
||||
# List of members which are usually get through zope's acquisition mecanism and
|
||||
# so shouldn't trigger E0201 when accessed (need zope=yes to be considered).
|
||||
acquired-members=REQUEST,acl_users,aq_parent
|
||||
|
||||
|
||||
# checks for :
|
||||
# * doc strings
|
||||
# * modules / classes / functions / methods / arguments / variables name
|
||||
# * number of arguments, local variables, branchs, returns and statements in
|
||||
# functions, methods
|
||||
# * required module attributes
|
||||
# * dangerous default values as arguments
|
||||
# * redefinition of function / method / class
|
||||
# * uses of the global statement
|
||||
#
|
||||
[BASIC]
|
||||
|
||||
# Required attributes for module, separated by a comma
|
||||
required-attributes=
|
||||
|
||||
# Regular expression which should only match functions or classes name which do
|
||||
# not require a docstring
|
||||
no-docstring-rgx=_main|__.*__
|
||||
|
||||
# Regular expression which should only match correct module names
|
||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Regular expression which should only match correct module level names
|
||||
const-rgx=(([A-Z_][A-Z1-9_]*)|(__.*__))|(log)$
|
||||
|
||||
# Regular expression which should only match correct class names
|
||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Regular expression which should only match correct function names
|
||||
function-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct method names
|
||||
method-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct instance attribute names
|
||||
attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct argument names
|
||||
argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct variable names
|
||||
variable-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct list comprehension /
|
||||
# generator expression variable names
|
||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma
|
||||
good-names=i,j,k,ex,Run,_,e,d1,d2,v,f,l,d
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma
|
||||
bad-names=foo,bar,baz,toto,tutu,tata
|
||||
|
||||
# List of builtins function names that should not be used, separated by a comma
|
||||
bad-functions=map,filter,apply,input
|
||||
|
||||
|
||||
# checks for sign of poor/misdesign:
|
||||
# * number of methods, attributes, local variables...
|
||||
# * size, complexity of functions, methods
|
||||
#
|
||||
[DESIGN]
|
||||
|
||||
# Maximum number of arguments for function / method
|
||||
max-args=5
|
||||
|
||||
# Maximum number of locals for function / method body
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of return / yield for function / method body
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of branch for function / method body
|
||||
max-branchs=12
|
||||
|
||||
# Maximum number of statements in function / method body
|
||||
max-statements=50
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=20
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=30
|
||||
|
||||
|
||||
# checks for
|
||||
# * external modules dependencies
|
||||
# * relative / wildcard imports
|
||||
# * cyclic imports
|
||||
# * uses of deprecated modules
|
||||
#
|
||||
[IMPORTS]
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma
|
||||
deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report R0402 must not be disabled)
|
||||
import-graph=
|
||||
|
||||
# Create a graph of external dependencies in the given file (report R0402 must
|
||||
# not be disabled)
|
||||
ext-import-graph=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report R0402 must
|
||||
# not be disabled)
|
||||
int-import-graph=
|
||||
|
||||
|
||||
# checks for :
|
||||
# * methods without self as first argument
|
||||
# * overridden methods signature
|
||||
# * access only to existant members via self
|
||||
# * attributes not defined in the __init__ method
|
||||
# * supported interfaces implementation
|
||||
# * unreachable code
|
||||
#
|
||||
[CLASSES]
|
||||
|
||||
# List of interface methods to ignore, separated by a comma. This is used for
|
||||
# instance to not check methods defines in Zope's Interface base class.
|
||||
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,__new__,setUp
|
||||
|
||||
|
||||
# checks for similarities and duplicated code. This computation may be
|
||||
# memory / CPU intensive, so you should disable it if you experiments some
|
||||
# problems.
|
||||
#
|
||||
[SIMILARITIES]
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
|
||||
# checks for:
|
||||
# * warning notes in the code like FIXME, XXX
|
||||
# * PEP 263: source code with non ascii character but no encoding declaration
|
||||
#
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,XXX,TODO
|
||||
|
||||
|
||||
# checks for :
|
||||
# * unauthorized constructions
|
||||
# * strict indentation
|
||||
# * line length
|
||||
# * use of <> instead of !=
|
||||
#
|
||||
[FORMAT]
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=80
|
||||
|
||||
# Maximum number of lines in a module
|
||||
max-module-lines=1000
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab). In repo it is 2 spaces.
|
||||
indent-string=' '
|
87
SUBMITTING_PATCHES
Normal file
87
SUBMITTING_PATCHES
Normal file
@ -0,0 +1,87 @@
|
||||
Short Version:
|
||||
|
||||
- Make small logical changes.
|
||||
- Provide a meaningful commit message.
|
||||
- Check for coding errors with pylint
|
||||
- Make sure all code is under the Apache License, 2.0.
|
||||
- Publish your changes for review:
|
||||
|
||||
git push https://gerrit-review.googlesource.com/git-repo HEAD:refs/for/master
|
||||
|
||||
|
||||
Long Version:
|
||||
|
||||
I wanted a file describing how to submit patches for repo,
|
||||
so I started with the one found in the core Git distribution
|
||||
(Documentation/SubmittingPatches), which itself was based on the
|
||||
patch submission guidelines for the Linux kernel.
|
||||
|
||||
However there are some differences, so please review and familiarize
|
||||
yourself with the following relevant bits:
|
||||
|
||||
|
||||
(1) Make separate commits for logically separate changes.
|
||||
|
||||
Unless your patch is really trivial, you should not be sending
|
||||
out a patch that was generated between your working tree and your
|
||||
commit head. Instead, always make a commit with complete commit
|
||||
message and generate a series of patches from your repository.
|
||||
It is a good discipline.
|
||||
|
||||
Describe the technical detail of the change(s).
|
||||
|
||||
If your description starts to get too long, that's a sign that you
|
||||
probably need to split up your commit to finer grained pieces.
|
||||
|
||||
|
||||
(2) Check for coding errors with pylint
|
||||
|
||||
Run pylint on changed modules using the provided configuration:
|
||||
|
||||
pylint --rcfile=.pylintrc file.py
|
||||
|
||||
|
||||
(3) Check the license
|
||||
|
||||
repo is licensed under the Apache License, 2.0.
|
||||
|
||||
Because of this licensing model *every* file within the project
|
||||
*must* list the license that covers it in the header of the file.
|
||||
Any new contributions to an existing file *must* be submitted under
|
||||
the current license of that file. Any new files *must* clearly
|
||||
indicate which license they are provided under in the file header.
|
||||
|
||||
Please verify that you are legally allowed and willing to submit your
|
||||
changes under the license covering each file *prior* to submitting
|
||||
your patch. It is virtually impossible to remove a patch once it
|
||||
has been applied and pushed out.
|
||||
|
||||
|
||||
(4) Sending your patches.
|
||||
|
||||
Do not email your patches to anyone.
|
||||
|
||||
Instead, login to the Gerrit Code Review tool at:
|
||||
|
||||
https://gerrit-review.googlesource.com/
|
||||
|
||||
Ensure you have completed one of the necessary contributor
|
||||
agreements, providing documentation to the project maintainers that
|
||||
they have right to redistribute your work under the Apache License:
|
||||
|
||||
https://gerrit-review.googlesource.com/#/settings/agreements
|
||||
|
||||
Ensure you have obtained an HTTP password to authenticate:
|
||||
|
||||
https://gerrit-review.googlesource.com/new-password
|
||||
|
||||
Push your patches over HTTPS to the review server, possibly through
|
||||
a remembered remote to make this easier in the future:
|
||||
|
||||
git config remote.review.url https://gerrit-review.googlesource.com/git-repo
|
||||
git config remote.review.push HEAD:refs/for/master
|
||||
|
||||
git push review
|
||||
|
||||
You will be automatically emailed a copy of your commits, and any
|
||||
comments made by the project maintainers.
|
25
color.py
25
color.py
@ -17,7 +17,6 @@ import os
|
||||
import sys
|
||||
|
||||
import pager
|
||||
from git_config import GitConfig
|
||||
|
||||
COLORS = {None :-1,
|
||||
'normal' :-1,
|
||||
@ -39,8 +38,11 @@ ATTRS = {None :-1,
|
||||
|
||||
RESET = "\033[m"
|
||||
|
||||
def is_color(s): return s in COLORS
|
||||
def is_attr(s): return s in ATTRS
|
||||
def is_color(s):
|
||||
return s in COLORS
|
||||
|
||||
def is_attr(s):
|
||||
return s in ATTRS
|
||||
|
||||
def _Color(fg = None, bg = None, attr = None):
|
||||
fg = COLORS[fg]
|
||||
@ -81,8 +83,8 @@ def _Color(fg = None, bg = None, attr = None):
|
||||
|
||||
|
||||
class Coloring(object):
|
||||
def __init__(self, config, type):
|
||||
self._section = 'color.%s' % type
|
||||
def __init__(self, config, section_type):
|
||||
self._section = 'color.%s' % section_type
|
||||
self._config = config
|
||||
self._out = sys.stdout
|
||||
|
||||
@ -110,6 +112,9 @@ class Coloring(object):
|
||||
def write(self, fmt, *args):
|
||||
self._out.write(fmt % args)
|
||||
|
||||
def flush(self):
|
||||
self._out.flush()
|
||||
|
||||
def nl(self):
|
||||
self._out.write('\n')
|
||||
|
||||
@ -124,8 +129,8 @@ class Coloring(object):
|
||||
if self._on:
|
||||
c = self._parse(opt, fg, bg, attr)
|
||||
def f(fmt, *args):
|
||||
str = fmt % args
|
||||
return ''.join([c, str, RESET])
|
||||
output = fmt % args
|
||||
return ''.join([c, output, RESET])
|
||||
return f
|
||||
else:
|
||||
def f(fmt, *args):
|
||||
@ -149,8 +154,10 @@ class Coloring(object):
|
||||
have_fg = False
|
||||
for a in v.split(' '):
|
||||
if is_color(a):
|
||||
if have_fg: bg = a
|
||||
else: fg = a
|
||||
if have_fg:
|
||||
bg = a
|
||||
else:
|
||||
fg = a
|
||||
elif is_attr(a):
|
||||
attr = a
|
||||
|
||||
|
105
command.py
105
command.py
@ -15,9 +15,12 @@
|
||||
|
||||
import os
|
||||
import optparse
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
|
||||
from error import NoSuchProjectError
|
||||
from error import InvalidProjectGroupsError
|
||||
|
||||
class Command(object):
|
||||
"""Base class for any command line action in repo.
|
||||
@ -27,6 +30,9 @@ class Command(object):
|
||||
manifest = None
|
||||
_optparse = None
|
||||
|
||||
def WantPager(self, opt):
|
||||
return False
|
||||
|
||||
@property
|
||||
def OptionParser(self):
|
||||
if self._optparse is None:
|
||||
@ -53,50 +59,86 @@ class Command(object):
|
||||
"""Perform the action, after option parsing is complete.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def _ResetPathToProjectMap(self, projects):
|
||||
self._by_path = dict((p.worktree, p) for p in projects)
|
||||
|
||||
def _UpdatePathToProjectMap(self, project):
|
||||
self._by_path[project.worktree] = project
|
||||
|
||||
def _GetProjectByPath(self, path):
|
||||
project = None
|
||||
if os.path.exists(path):
|
||||
oldpath = None
|
||||
while path \
|
||||
and path != oldpath \
|
||||
and path != self.manifest.topdir:
|
||||
try:
|
||||
project = self._by_path[path]
|
||||
break
|
||||
except KeyError:
|
||||
oldpath = path
|
||||
path = os.path.dirname(path)
|
||||
else:
|
||||
try:
|
||||
project = self._by_path[path]
|
||||
except KeyError:
|
||||
pass
|
||||
return project
|
||||
|
||||
def GetProjects(self, args, missing_ok=False):
|
||||
"""A list of projects that match the arguments.
|
||||
"""
|
||||
all = self.manifest.projects
|
||||
all_projects = self.manifest.projects
|
||||
result = []
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
groups = mp.config.GetString('manifest.groups')
|
||||
if not groups:
|
||||
groups = 'all,-notdefault,platform-' + platform.system().lower()
|
||||
groups = [x for x in re.split('[,\s]+', groups) if x]
|
||||
|
||||
if not args:
|
||||
for project in all.values():
|
||||
if missing_ok or project.Exists:
|
||||
all_projects_list = all_projects.values()
|
||||
derived_projects = []
|
||||
for project in all_projects_list:
|
||||
if project.Registered:
|
||||
# Do not search registered subproject for derived projects
|
||||
# since its parent has been searched already
|
||||
continue
|
||||
derived_projects.extend(project.GetDerivedSubprojects())
|
||||
all_projects_list.extend(derived_projects)
|
||||
for project in all_projects_list:
|
||||
if ((missing_ok or project.Exists) and
|
||||
project.MatchesGroups(groups)):
|
||||
result.append(project)
|
||||
else:
|
||||
by_path = None
|
||||
self._ResetPathToProjectMap(all_projects.values())
|
||||
|
||||
for arg in args:
|
||||
project = all.get(arg)
|
||||
project = all_projects.get(arg)
|
||||
|
||||
if not project:
|
||||
path = os.path.abspath(arg)
|
||||
path = os.path.abspath(arg).replace('\\', '/')
|
||||
project = self._GetProjectByPath(path)
|
||||
|
||||
if not by_path:
|
||||
by_path = dict()
|
||||
for p in all.values():
|
||||
by_path[p.worktree] = p
|
||||
|
||||
if os.path.exists(path):
|
||||
while path \
|
||||
and path != '/' \
|
||||
and path != self.manifest.topdir:
|
||||
try:
|
||||
project = by_path[path]
|
||||
break
|
||||
except KeyError:
|
||||
path = os.path.dirname(path)
|
||||
else:
|
||||
try:
|
||||
project = by_path[path]
|
||||
except KeyError:
|
||||
pass
|
||||
# If it's not a derived project, update path->project mapping and
|
||||
# search again, as arg might actually point to a derived subproject.
|
||||
if project and not project.Derived:
|
||||
search_again = False
|
||||
for subproject in project.GetDerivedSubprojects():
|
||||
self._UpdatePathToProjectMap(subproject)
|
||||
search_again = True
|
||||
if search_again:
|
||||
project = self._GetProjectByPath(path) or project
|
||||
|
||||
if not project:
|
||||
raise NoSuchProjectError(arg)
|
||||
if not missing_ok and not project.Exists:
|
||||
raise NoSuchProjectError(arg)
|
||||
if not project.MatchesGroups(groups):
|
||||
raise InvalidProjectGroupsError(arg)
|
||||
|
||||
result.append(project)
|
||||
|
||||
@ -105,15 +147,26 @@ class Command(object):
|
||||
result.sort(key=_getpath)
|
||||
return result
|
||||
|
||||
# pylint: disable=W0223
|
||||
# Pylint warns that the `InteractiveCommand` and `PagedCommand` classes do not
|
||||
# override method `Execute` which is abstract in `Command`. Since that method
|
||||
# is always implemented in classes derived from `InteractiveCommand` and
|
||||
# `PagedCommand`, this warning can be suppressed.
|
||||
class InteractiveCommand(Command):
|
||||
"""Command which requires user interaction on the tty and
|
||||
must not run within a pager, even if the user asks to.
|
||||
"""
|
||||
def WantPager(self, opt):
|
||||
return False
|
||||
|
||||
class PagedCommand(Command):
|
||||
"""Command which defaults to output in a pager, as its
|
||||
display tends to be larger than one screen full.
|
||||
"""
|
||||
def WantPager(self, opt):
|
||||
return True
|
||||
|
||||
# pylint: enable=W0223
|
||||
|
||||
class MirrorSafeCommand(object):
|
||||
"""Command permits itself to run within a mirror,
|
||||
|
@ -20,37 +20,54 @@ A manifest XML file (e.g. 'default.xml') roughly conforms to the
|
||||
following DTD:
|
||||
|
||||
<!DOCTYPE manifest [
|
||||
<!ELEMENT manifest (remote*,
|
||||
<!ELEMENT manifest (notice?,
|
||||
remote*,
|
||||
default?,
|
||||
manifest-server?,
|
||||
remove-project*,
|
||||
project*,
|
||||
add-remote*)>
|
||||
repo-hooks?)>
|
||||
|
||||
<!ELEMENT notice (#PCDATA)>
|
||||
|
||||
<!ELEMENT remote (EMPTY)>
|
||||
<!ATTLIST remote name ID #REQUIRED>
|
||||
<!ATTLIST remote alias CDATA #IMPLIED>
|
||||
<!ATTLIST remote fetch CDATA #REQUIRED>
|
||||
<!ATTLIST remote review CDATA #IMPLIED>
|
||||
<!ATTLIST remote project-name CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT default (EMPTY)>
|
||||
<!ATTLIST default remote IDREF #IMPLIED>
|
||||
<!ATTLIST default revision CDATA #IMPLIED>
|
||||
<!ATTLIST default sync-j CDATA #IMPLIED>
|
||||
<!ATTLIST default sync-c CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT manifest-server (EMPTY)>
|
||||
<!ATTLIST url CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT project (remote*)>
|
||||
<!ELEMENT project (annotation?,
|
||||
project*)>
|
||||
<!ATTLIST project name CDATA #REQUIRED>
|
||||
<!ATTLIST project path CDATA #IMPLIED>
|
||||
<!ATTLIST project remote IDREF #IMPLIED>
|
||||
<!ATTLIST project revision CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT add-remote (EMPTY)>
|
||||
<!ATTLIST add-remote to-project ID #REQUIRED>
|
||||
<!ATTLIST add-remote name ID #REQUIRED>
|
||||
<!ATTLIST add-remote fetch CDATA #REQUIRED>
|
||||
<!ATTLIST add-remote review CDATA #IMPLIED>
|
||||
<!ATTLIST add-remote project-name CDATA #IMPLIED>
|
||||
<!ATTLIST project groups CDATA #IMPLIED>
|
||||
<!ATTLIST project sync-c CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT annotation (EMPTY)>
|
||||
<!ATTLIST annotation name CDATA #REQUIRED>
|
||||
<!ATTLIST annotation value CDATA #REQUIRED>
|
||||
<!ATTLIST annotation keep CDATA "true">
|
||||
|
||||
<!ELEMENT remove-project (EMPTY)>
|
||||
<!ATTLIST remove-project name CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT repo-hooks (EMPTY)>
|
||||
<!ATTLIST repo-hooks in-project CDATA #REQUIRED>
|
||||
<!ATTLIST repo-hooks enabled-list CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT include (EMPTY)>
|
||||
<!ATTLIST include name CDATA #REQUIRED>
|
||||
]>
|
||||
|
||||
A description of the elements and their attributes follows.
|
||||
@ -74,6 +91,12 @@ name specified here is used as the remote name in each project's
|
||||
.git/config, and is therefore automatically available to commands
|
||||
like `git fetch`, `git remote`, `git pull` and `git push`.
|
||||
|
||||
Attribute `alias`: The alias, if specified, is used to override
|
||||
`name` to be set as the remote name in each project's .git/config.
|
||||
Its value can be duplicated while attribute `name` has to be unique
|
||||
in the manifest file. This helps each project to be able to have
|
||||
same remote name which actually points to different remote url.
|
||||
|
||||
Attribute `fetch`: The Git URL prefix for all projects which use
|
||||
this remote. Each project's name is appended to this prefix to
|
||||
form the actual URL used to clone the project.
|
||||
@ -82,25 +105,6 @@ Attribute `review`: Hostname of the Gerrit server where reviews
|
||||
are uploaded to by `repo upload`. This attribute is optional;
|
||||
if not specified then `repo upload` will not function.
|
||||
|
||||
Attribute `project-name`: Specifies the name of this project used
|
||||
by the review server given in the review attribute of this element.
|
||||
Only permitted when the remote element is nested inside of a project
|
||||
element (see below). If not given, defaults to the name supplied
|
||||
in the project's name attribute.
|
||||
|
||||
Element add-remote
|
||||
------------------
|
||||
|
||||
Adds a remote to an existing project, whose name is given by the
|
||||
to-project attribute. This is functionally equivalent to nesting
|
||||
a remote element under the project, but has the advantage that it
|
||||
can be specified in the uesr's `local_manifest.xml` to add a remote
|
||||
to a project declared by the normal manifest.
|
||||
|
||||
The element can be used to add a fork of an existing project that
|
||||
the user needs to work with.
|
||||
|
||||
|
||||
Element default
|
||||
---------------
|
||||
|
||||
@ -117,12 +121,42 @@ Attribute `revision`: Name of a Git branch (e.g. `master` or
|
||||
revision attribute will use this revision.
|
||||
|
||||
|
||||
Element manifest-server
|
||||
-----------------------
|
||||
|
||||
At most one manifest-server may be specified. The url attribute
|
||||
is used to specify the URL of a manifest server, which is an
|
||||
XML RPC service.
|
||||
|
||||
The manifest server should implement the following RPC methods:
|
||||
|
||||
GetApprovedManifest(branch, target)
|
||||
|
||||
Return a manifest in which each project is pegged to a known good revision
|
||||
for the current branch and target.
|
||||
|
||||
The target to use is defined by environment variables TARGET_PRODUCT
|
||||
and TARGET_BUILD_VARIANT. These variables are used to create a string
|
||||
of the form $TARGET_PRODUCT-$TARGET_BUILD_VARIANT, e.g. passion-userdebug.
|
||||
If one of those variables or both are not present, the program will call
|
||||
GetApprovedManifest without the target parameter and the manifest server
|
||||
should choose a reasonable default target.
|
||||
|
||||
GetManifest(tag)
|
||||
|
||||
Return a manifest in which each project is pegged to the revision at
|
||||
the specified tag.
|
||||
|
||||
|
||||
Element project
|
||||
---------------
|
||||
|
||||
One or more project elements may be specified. Each element
|
||||
describes a single Git repository to be cloned into the repo
|
||||
client workspace.
|
||||
client workspace. You may specify Git-submodules by creating a
|
||||
nested project. Git-submodules will be automatically
|
||||
recognized and inherit their parent's attributes, but those
|
||||
may be overridden by an explicitly specified project element.
|
||||
|
||||
Attribute `name`: A unique name for this project. The project's
|
||||
name is appended onto its remote's fetch URL to generate the actual
|
||||
@ -132,8 +166,9 @@ URL to configure the Git remote with. The URL gets formed as:
|
||||
|
||||
where ${remote_fetch} is the remote's fetch attribute and
|
||||
${project_name} is the project's name attribute. The suffix ".git"
|
||||
is always appended as repo assumes the upstream is a forrest of
|
||||
bare Git repositories.
|
||||
is always appended as repo assumes the upstream is a forest of
|
||||
bare Git repositories. If the project has a parent element, its
|
||||
name will be prefixed by the parent's.
|
||||
|
||||
The project name must match the name Gerrit knows, if Gerrit is
|
||||
being used for code reviews.
|
||||
@ -141,6 +176,8 @@ being used for code reviews.
|
||||
Attribute `path`: An optional path relative to the top directory
|
||||
of the repo client where the Git working directory for this project
|
||||
should be placed. If not supplied the project name is used.
|
||||
If the project has a parent element, its path will be prefixed
|
||||
by the parent's.
|
||||
|
||||
Attribute `remote`: Name of a previously defined remote element.
|
||||
If not supplied the remote given by the default element is used.
|
||||
@ -152,12 +189,27 @@ Tags and/or explicit SHA-1s should work in theory, but have not
|
||||
been extensively tested. If not supplied the revision given by
|
||||
the default element is used.
|
||||
|
||||
Child element `remote`: Described like the top-level remote element,
|
||||
but adds an additional remote to only this project. These additional
|
||||
remotes are fetched from first on the initial `repo sync`, causing
|
||||
the majority of the project's object database to be obtained through
|
||||
these additional remotes.
|
||||
Attribute `groups`: List of groups to which this project belongs,
|
||||
whitespace or comma separated. All projects belong to the group
|
||||
"all", and each project automatically belongs to a group of
|
||||
its name:`name` and path:`path`. E.g. for
|
||||
<project name="monkeys" path="barrel-of"/>, that project
|
||||
definition is implicitly in the following manifest groups:
|
||||
default, name:monkeys, and path:barrel-of. If you place a project in the
|
||||
group "notdefault", it will not be automatically downloaded by repo.
|
||||
If the project has a parent element, the `name` and `path` here
|
||||
are the prefixed ones.
|
||||
|
||||
Element annotation
|
||||
------------------
|
||||
|
||||
Zero or more annotation elements may be specified as children of a
|
||||
project element. Each element describes a name-value pair that will be
|
||||
exported into each project's environment during a 'forall' command,
|
||||
prefixed with REPO__. In addition, there is an optional attribute
|
||||
"keep" which accepts the case insensitive values "true" (default) or
|
||||
"false". This attribute determines whether or not the annotation will
|
||||
be kept when exported with the manifest subcommand.
|
||||
|
||||
Element remove-project
|
||||
----------------------
|
||||
@ -170,6 +222,16 @@ This element is mostly useful in the local_manifest.xml, where
|
||||
the user can remove a project, and possibly replace it with their
|
||||
own definition.
|
||||
|
||||
Element include
|
||||
---------------
|
||||
|
||||
This element provides the capability of including another manifest
|
||||
file into the originating manifest. Normal rules apply for the
|
||||
target manifest to include- it must be a usable manifest on it's own.
|
||||
|
||||
Attribute `name`; the manifest to include, specified relative to
|
||||
the manifest repositories root.
|
||||
|
||||
|
||||
Local Manifest
|
||||
==============
|
||||
@ -191,8 +253,3 @@ For example:
|
||||
Users may add projects to the local manifest prior to a `repo sync`
|
||||
invocation, instructing repo to automatically download and manage
|
||||
these extra projects.
|
||||
|
||||
Currently the only supported feature of a local manifest is to
|
||||
add new remotes and/or projects. In the future a local manifest
|
||||
may support picking different revisions of a project, or deleting
|
||||
projects specified in the default manifest.
|
||||
|
38
editor.py
38
editor.py
@ -14,6 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
import tempfile
|
||||
@ -38,9 +39,10 @@ class Editor(object):
|
||||
if e:
|
||||
return e
|
||||
|
||||
e = cls.globalConfig.GetString('core.editor')
|
||||
if e:
|
||||
return e
|
||||
if cls.globalConfig:
|
||||
e = cls.globalConfig.GetString('core.editor')
|
||||
if e:
|
||||
return e
|
||||
|
||||
e = os.getenv('VISUAL')
|
||||
if e:
|
||||
@ -69,16 +71,38 @@ least one of these before using this command."""
|
||||
Returns:
|
||||
new value of edited text; None if editing did not succeed
|
||||
"""
|
||||
editor = cls._GetEditor().split()
|
||||
editor = cls._GetEditor()
|
||||
if editor == ':':
|
||||
return data
|
||||
|
||||
fd, path = tempfile.mkstemp()
|
||||
try:
|
||||
os.write(fd, data)
|
||||
os.close(fd)
|
||||
fd = None
|
||||
|
||||
if subprocess.Popen(editor + [path]).wait() != 0:
|
||||
raise EditorError()
|
||||
return open(path).read()
|
||||
if re.compile("^.*[$ \t'].*$").match(editor):
|
||||
args = [editor + ' "$@"', 'sh']
|
||||
shell = True
|
||||
else:
|
||||
args = [editor]
|
||||
shell = False
|
||||
args.append(path)
|
||||
|
||||
try:
|
||||
rc = subprocess.Popen(args, shell=shell).wait()
|
||||
except OSError as e:
|
||||
raise EditorError('editor failed, %s: %s %s'
|
||||
% (str(e), editor, path))
|
||||
if rc != 0:
|
||||
raise EditorError('editor failed with exit status %d: %s %s'
|
||||
% (rc, editor, path))
|
||||
|
||||
fd2 = open(path)
|
||||
try:
|
||||
return fd2.read()
|
||||
finally:
|
||||
fd2.close()
|
||||
finally:
|
||||
if fd:
|
||||
os.close(fd)
|
||||
|
60
error.py
60
error.py
@ -24,29 +24,38 @@ class ManifestInvalidRevisionError(Exception):
|
||||
class EditorError(Exception):
|
||||
"""Unspecified error from the user's text editor.
|
||||
"""
|
||||
|
||||
class GitError(Exception):
|
||||
"""Unspecified internal error from git.
|
||||
"""
|
||||
def __init__(self, command):
|
||||
self.command = command
|
||||
|
||||
def __str__(self):
|
||||
return self.command
|
||||
|
||||
class ImportError(Exception):
|
||||
"""An import from a non-Git format cannot be performed.
|
||||
"""
|
||||
def __init__(self, reason):
|
||||
super(EditorError, self).__init__()
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
class GitError(Exception):
|
||||
"""Unspecified internal error from git.
|
||||
"""
|
||||
def __init__(self, command):
|
||||
super(GitError, self).__init__()
|
||||
self.command = command
|
||||
|
||||
def __str__(self):
|
||||
return self.command
|
||||
|
||||
class UploadError(Exception):
|
||||
"""A bundle upload to Gerrit did not succeed.
|
||||
"""
|
||||
def __init__(self, reason):
|
||||
super(UploadError, self).__init__()
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
class DownloadError(Exception):
|
||||
"""Cannot download a repository.
|
||||
"""
|
||||
def __init__(self, reason):
|
||||
super(DownloadError, self).__init__()
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
@ -56,6 +65,20 @@ class NoSuchProjectError(Exception):
|
||||
"""A specified project does not exist in the work tree.
|
||||
"""
|
||||
def __init__(self, name=None):
|
||||
super(NoSuchProjectError, self).__init__()
|
||||
self.name = name
|
||||
|
||||
def __str__(self):
|
||||
if self.Name is None:
|
||||
return 'in current directory'
|
||||
return self.name
|
||||
|
||||
|
||||
class InvalidProjectGroupsError(Exception):
|
||||
"""A specified project is not suitable for the specified groups
|
||||
"""
|
||||
def __init__(self, name=None):
|
||||
super(InvalidProjectGroupsError, self).__init__()
|
||||
self.name = name
|
||||
|
||||
def __str__(self):
|
||||
@ -68,5 +91,12 @@ class RepoChangedException(Exception):
|
||||
repo or manifest repositories. In this special case we must
|
||||
use exec to re-execute repo with the new code and manifest.
|
||||
"""
|
||||
def __init__(self, extra_args=[]):
|
||||
self.extra_args = extra_args
|
||||
def __init__(self, extra_args=None):
|
||||
super(RepoChangedException, self).__init__()
|
||||
self.extra_args = extra_args or []
|
||||
|
||||
class HookError(Exception):
|
||||
"""Thrown if a 'repo-hook' could not be run.
|
||||
|
||||
The common case is that the file wasn't present when we tried to run it.
|
||||
"""
|
||||
|
132
git_command.py
132
git_command.py
@ -16,20 +16,63 @@
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import tempfile
|
||||
from signal import SIGTERM
|
||||
from error import GitError
|
||||
from trace import REPO_TRACE, IsTrace, Trace
|
||||
|
||||
GIT = 'git'
|
||||
MIN_GIT_VERSION = (1, 5, 4)
|
||||
GIT_DIR = 'GIT_DIR'
|
||||
REPO_TRACE = 'REPO_TRACE'
|
||||
|
||||
LAST_GITDIR = None
|
||||
LAST_CWD = None
|
||||
try:
|
||||
TRACE = os.environ[REPO_TRACE] == '1'
|
||||
except KeyError:
|
||||
TRACE = False
|
||||
|
||||
_ssh_proxy_path = None
|
||||
_ssh_sock_path = None
|
||||
_ssh_clients = []
|
||||
|
||||
def ssh_sock(create=True):
|
||||
global _ssh_sock_path
|
||||
if _ssh_sock_path is None:
|
||||
if not create:
|
||||
return None
|
||||
tmp_dir = '/tmp'
|
||||
if not os.path.exists(tmp_dir):
|
||||
tmp_dir = tempfile.gettempdir()
|
||||
_ssh_sock_path = os.path.join(
|
||||
tempfile.mkdtemp('', 'ssh-', tmp_dir),
|
||||
'master-%r@%h:%p')
|
||||
return _ssh_sock_path
|
||||
|
||||
def _ssh_proxy():
|
||||
global _ssh_proxy_path
|
||||
if _ssh_proxy_path is None:
|
||||
_ssh_proxy_path = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'git_ssh')
|
||||
return _ssh_proxy_path
|
||||
|
||||
def _add_ssh_client(p):
|
||||
_ssh_clients.append(p)
|
||||
|
||||
def _remove_ssh_client(p):
|
||||
try:
|
||||
_ssh_clients.remove(p)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def terminate_ssh_clients():
|
||||
global _ssh_clients
|
||||
for p in _ssh_clients:
|
||||
try:
|
||||
os.kill(p.pid, SIGTERM)
|
||||
p.wait()
|
||||
except OSError:
|
||||
pass
|
||||
_ssh_clients = []
|
||||
|
||||
_git_version = None
|
||||
|
||||
class _GitCall(object):
|
||||
def version(self):
|
||||
@ -38,6 +81,21 @@ class _GitCall(object):
|
||||
return p.stdout
|
||||
return None
|
||||
|
||||
def version_tuple(self):
|
||||
global _git_version
|
||||
|
||||
if _git_version is None:
|
||||
ver_str = git.version()
|
||||
if ver_str.startswith('git version '):
|
||||
_git_version = tuple(
|
||||
map(lambda x: int(x),
|
||||
ver_str[len('git version '):].strip().split('-')[0].split('.')[0:3]
|
||||
))
|
||||
else:
|
||||
print >>sys.stderr, 'fatal: "%s" unsupported' % ver_str
|
||||
sys.exit(1)
|
||||
return _git_version
|
||||
|
||||
def __getattr__(self, name):
|
||||
name = name.replace('_','-')
|
||||
def fun(*cmdv):
|
||||
@ -47,6 +105,19 @@ class _GitCall(object):
|
||||
return fun
|
||||
git = _GitCall()
|
||||
|
||||
def git_require(min_version, fail=False):
|
||||
git_version = git.version_tuple()
|
||||
if min_version <= git_version:
|
||||
return True
|
||||
if fail:
|
||||
need = '.'.join(map(lambda x: str(x), min_version))
|
||||
print >>sys.stderr, 'fatal: git %s or later required' % need
|
||||
sys.exit(1)
|
||||
return False
|
||||
|
||||
def _setenv(env, name, value):
|
||||
env[name] = value.encode()
|
||||
|
||||
class GitCommand(object):
|
||||
def __init__(self,
|
||||
project,
|
||||
@ -56,9 +127,10 @@ class GitCommand(object):
|
||||
capture_stdout = False,
|
||||
capture_stderr = False,
|
||||
disable_editor = False,
|
||||
ssh_proxy = False,
|
||||
cwd = None,
|
||||
gitdir = None):
|
||||
env = dict(os.environ)
|
||||
env = os.environ.copy()
|
||||
|
||||
for e in [REPO_TRACE,
|
||||
GIT_DIR,
|
||||
@ -71,7 +143,16 @@ class GitCommand(object):
|
||||
del env[e]
|
||||
|
||||
if disable_editor:
|
||||
env['GIT_EDITOR'] = ':'
|
||||
_setenv(env, 'GIT_EDITOR', ':')
|
||||
if ssh_proxy:
|
||||
_setenv(env, 'REPO_SSH_SOCK', ssh_sock())
|
||||
_setenv(env, 'GIT_SSH', _ssh_proxy())
|
||||
if 'http_proxy' in env and 'darwin' == sys.platform:
|
||||
s = "'http.proxy=%s'" % (env['http_proxy'],)
|
||||
p = env.get('GIT_CONFIG_PARAMETERS')
|
||||
if p is not None:
|
||||
s = p + ' ' + s
|
||||
_setenv(env, 'GIT_CONFIG_PARAMETERS', s)
|
||||
|
||||
if project:
|
||||
if not cwd:
|
||||
@ -82,7 +163,7 @@ class GitCommand(object):
|
||||
command = [GIT]
|
||||
if bare:
|
||||
if gitdir:
|
||||
env[GIT_DIR] = gitdir
|
||||
_setenv(env, GIT_DIR, gitdir)
|
||||
cwd = None
|
||||
command.extend(cmdv)
|
||||
|
||||
@ -101,7 +182,7 @@ class GitCommand(object):
|
||||
else:
|
||||
stderr = None
|
||||
|
||||
if TRACE:
|
||||
if IsTrace():
|
||||
global LAST_CWD
|
||||
global LAST_GITDIR
|
||||
|
||||
@ -127,7 +208,7 @@ class GitCommand(object):
|
||||
dbg += ' 1>|'
|
||||
if stderr == subprocess.PIPE:
|
||||
dbg += ' 2>|'
|
||||
print >>sys.stderr, dbg
|
||||
Trace('%s', dbg)
|
||||
|
||||
try:
|
||||
p = subprocess.Popen(command,
|
||||
@ -136,29 +217,20 @@ class GitCommand(object):
|
||||
stdin = stdin,
|
||||
stdout = stdout,
|
||||
stderr = stderr)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise GitError('%s: %s' % (command[1], e))
|
||||
|
||||
if ssh_proxy:
|
||||
_add_ssh_client(p)
|
||||
|
||||
self.process = p
|
||||
self.stdin = p.stdin
|
||||
|
||||
def Wait(self):
|
||||
p = self.process
|
||||
|
||||
if p.stdin:
|
||||
p.stdin.close()
|
||||
self.stdin = None
|
||||
|
||||
if p.stdout:
|
||||
self.stdout = p.stdout.read()
|
||||
p.stdout.close()
|
||||
else:
|
||||
p.stdout = None
|
||||
|
||||
if p.stderr:
|
||||
self.stderr = p.stderr.read()
|
||||
p.stderr.close()
|
||||
else:
|
||||
p.stderr = None
|
||||
|
||||
return self.process.wait()
|
||||
try:
|
||||
p = self.process
|
||||
(self.stdout, self.stderr) = p.communicate()
|
||||
rc = p.returncode
|
||||
finally:
|
||||
_remove_ssh_client(p)
|
||||
return rc
|
||||
|
444
git_config.py
444
git_config.py
@ -13,12 +13,25 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import cPickle
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from urllib2 import urlopen, HTTPError
|
||||
try:
|
||||
import threading as _threading
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
import time
|
||||
import urllib2
|
||||
|
||||
from signal import SIGTERM
|
||||
from error import GitError, UploadError
|
||||
from trace import Trace
|
||||
|
||||
from git_command import GitCommand
|
||||
from git_command import ssh_sock
|
||||
from git_command import terminate_ssh_clients
|
||||
|
||||
R_HEADS = 'refs/heads/'
|
||||
R_TAGS = 'refs/tags/'
|
||||
@ -29,6 +42,13 @@ REVIEW_CACHE = dict()
|
||||
def IsId(rev):
|
||||
return ID_RE.match(rev)
|
||||
|
||||
def _key(name):
|
||||
parts = name.split('.')
|
||||
if len(parts) < 2:
|
||||
return name.lower()
|
||||
parts[ 0] = parts[ 0].lower()
|
||||
parts[-1] = parts[-1].lower()
|
||||
return '.'.join(parts)
|
||||
|
||||
class GitConfig(object):
|
||||
_ForUser = None
|
||||
@ -36,26 +56,33 @@ class GitConfig(object):
|
||||
@classmethod
|
||||
def ForUser(cls):
|
||||
if cls._ForUser is None:
|
||||
cls._ForUser = cls(file = os.path.expanduser('~/.gitconfig'))
|
||||
cls._ForUser = cls(configfile = os.path.expanduser('~/.gitconfig'))
|
||||
return cls._ForUser
|
||||
|
||||
@classmethod
|
||||
def ForRepository(cls, gitdir, defaults=None):
|
||||
return cls(file = os.path.join(gitdir, 'config'),
|
||||
return cls(configfile = os.path.join(gitdir, 'config'),
|
||||
defaults = defaults)
|
||||
|
||||
def __init__(self, file, defaults=None):
|
||||
self.file = file
|
||||
def __init__(self, configfile, defaults=None, pickleFile=None):
|
||||
self.file = configfile
|
||||
self.defaults = defaults
|
||||
self._cache_dict = None
|
||||
self._section_dict = None
|
||||
self._remotes = {}
|
||||
self._branches = {}
|
||||
|
||||
if pickleFile is None:
|
||||
self._pickle = os.path.join(
|
||||
os.path.dirname(self.file),
|
||||
'.repopickle_' + os.path.basename(self.file))
|
||||
else:
|
||||
self._pickle = pickleFile
|
||||
|
||||
def Has(self, name, include_defaults = True):
|
||||
"""Return true if this configuration file has the key.
|
||||
"""
|
||||
name = name.lower()
|
||||
if name in self._cache:
|
||||
if _key(name) in self._cache:
|
||||
return True
|
||||
if include_defaults and self.defaults:
|
||||
return self.defaults.Has(name, include_defaults = True)
|
||||
@ -77,22 +104,20 @@ class GitConfig(object):
|
||||
return False
|
||||
return None
|
||||
|
||||
def GetString(self, name, all=False):
|
||||
def GetString(self, name, all_keys=False):
|
||||
"""Get the first value for a key, or None if it is not defined.
|
||||
|
||||
This configuration file is used first, if the key is not
|
||||
defined or all = True then the defaults are also searched.
|
||||
defined or all_keys = True then the defaults are also searched.
|
||||
"""
|
||||
name = name.lower()
|
||||
|
||||
try:
|
||||
v = self._cache[name]
|
||||
v = self._cache[_key(name)]
|
||||
except KeyError:
|
||||
if self.defaults:
|
||||
return self.defaults.GetString(name, all = all)
|
||||
return self.defaults.GetString(name, all_keys = all_keys)
|
||||
v = []
|
||||
|
||||
if not all:
|
||||
if not all_keys:
|
||||
if v:
|
||||
return v[0]
|
||||
return None
|
||||
@ -100,7 +125,7 @@ class GitConfig(object):
|
||||
r = []
|
||||
r.extend(v)
|
||||
if self.defaults:
|
||||
r.extend(self.defaults.GetString(name, all = True))
|
||||
r.extend(self.defaults.GetString(name, all_keys = True))
|
||||
return r
|
||||
|
||||
def SetString(self, name, value):
|
||||
@ -110,16 +135,16 @@ class GitConfig(object):
|
||||
The supplied value should be either a string,
|
||||
or a list of strings (to store multiple values).
|
||||
"""
|
||||
name = name.lower()
|
||||
key = _key(name)
|
||||
|
||||
try:
|
||||
old = self._cache[name]
|
||||
old = self._cache[key]
|
||||
except KeyError:
|
||||
old = []
|
||||
|
||||
if value is None:
|
||||
if old:
|
||||
del self._cache[name]
|
||||
del self._cache[key]
|
||||
self._do('--unset-all', name)
|
||||
|
||||
elif isinstance(value, list):
|
||||
@ -130,13 +155,13 @@ class GitConfig(object):
|
||||
self.SetString(name, value[0])
|
||||
|
||||
elif old != value:
|
||||
self._cache[name] = list(value)
|
||||
self._cache[key] = list(value)
|
||||
self._do('--replace-all', name, value[0])
|
||||
for i in xrange(1, len(value)):
|
||||
self._do('--add', name, value[i])
|
||||
|
||||
elif len(old) != 1 or old[0] != value:
|
||||
self._cache[name] = [value]
|
||||
self._cache[key] = [value]
|
||||
self._do('--replace-all', name, value)
|
||||
|
||||
def GetRemote(self, name):
|
||||
@ -159,6 +184,47 @@ class GitConfig(object):
|
||||
self._branches[b.name] = b
|
||||
return b
|
||||
|
||||
def GetSubSections(self, section):
|
||||
"""List all subsection names matching $section.*.*
|
||||
"""
|
||||
return self._sections.get(section, set())
|
||||
|
||||
def HasSection(self, section, subsection = ''):
|
||||
"""Does at least one key in section.subsection exist?
|
||||
"""
|
||||
try:
|
||||
return subsection in self._sections[section]
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
def UrlInsteadOf(self, url):
|
||||
"""Resolve any url.*.insteadof references.
|
||||
"""
|
||||
for new_url in self.GetSubSections('url'):
|
||||
old_url = self.GetString('url.%s.insteadof' % new_url)
|
||||
if old_url is not None and url.startswith(old_url):
|
||||
return new_url + url[len(old_url):]
|
||||
return url
|
||||
|
||||
@property
|
||||
def _sections(self):
|
||||
d = self._section_dict
|
||||
if d is None:
|
||||
d = {}
|
||||
for name in self._cache.keys():
|
||||
p = name.split('.')
|
||||
if 2 == len(p):
|
||||
section = p[0]
|
||||
subsect = ''
|
||||
else:
|
||||
section = p[0]
|
||||
subsect = '.'.join(p[1:-1])
|
||||
if section not in d:
|
||||
d[section] = set()
|
||||
d[section].add(subsect)
|
||||
self._section_dict = d
|
||||
return d
|
||||
|
||||
@property
|
||||
def _cache(self):
|
||||
if self._cache_dict is None:
|
||||
@ -166,21 +232,74 @@ class GitConfig(object):
|
||||
return self._cache_dict
|
||||
|
||||
def _Read(self):
|
||||
d = self._do('--null', '--list')
|
||||
c = {}
|
||||
while d:
|
||||
lf = d.index('\n')
|
||||
nul = d.index('\0', lf + 1)
|
||||
d = self._ReadPickle()
|
||||
if d is None:
|
||||
d = self._ReadGit()
|
||||
self._SavePickle(d)
|
||||
return d
|
||||
|
||||
key = d[0:lf]
|
||||
val = d[lf + 1:nul]
|
||||
def _ReadPickle(self):
|
||||
try:
|
||||
if os.path.getmtime(self._pickle) \
|
||||
<= os.path.getmtime(self.file):
|
||||
os.remove(self._pickle)
|
||||
return None
|
||||
except OSError:
|
||||
return None
|
||||
try:
|
||||
Trace(': unpickle %s', self.file)
|
||||
fd = open(self._pickle, 'rb')
|
||||
try:
|
||||
return cPickle.load(fd)
|
||||
finally:
|
||||
fd.close()
|
||||
except EOFError:
|
||||
os.remove(self._pickle)
|
||||
return None
|
||||
except IOError:
|
||||
os.remove(self._pickle)
|
||||
return None
|
||||
except cPickle.PickleError:
|
||||
os.remove(self._pickle)
|
||||
return None
|
||||
|
||||
def _SavePickle(self, cache):
|
||||
try:
|
||||
fd = open(self._pickle, 'wb')
|
||||
try:
|
||||
cPickle.dump(cache, fd, cPickle.HIGHEST_PROTOCOL)
|
||||
finally:
|
||||
fd.close()
|
||||
except IOError:
|
||||
if os.path.exists(self._pickle):
|
||||
os.remove(self._pickle)
|
||||
except cPickle.PickleError:
|
||||
if os.path.exists(self._pickle):
|
||||
os.remove(self._pickle)
|
||||
|
||||
def _ReadGit(self):
|
||||
"""
|
||||
Read configuration data from git.
|
||||
|
||||
This internal method populates the GitConfig cache.
|
||||
|
||||
"""
|
||||
c = {}
|
||||
d = self._do('--null', '--list')
|
||||
if d is None:
|
||||
return c
|
||||
for line in d.rstrip('\0').split('\0'):
|
||||
if '\n' in line:
|
||||
key, val = line.split('\n', 1)
|
||||
else:
|
||||
key = line
|
||||
val = None
|
||||
|
||||
if key in c:
|
||||
c[key].append(val)
|
||||
else:
|
||||
c[key] = [val]
|
||||
|
||||
d = d[nul + 1:]
|
||||
return c
|
||||
|
||||
def _do(self, *args):
|
||||
@ -253,6 +372,150 @@ class RefSpec(object):
|
||||
return s
|
||||
|
||||
|
||||
_master_processes = []
|
||||
_master_keys = set()
|
||||
_ssh_master = True
|
||||
_master_keys_lock = None
|
||||
|
||||
def init_ssh():
|
||||
"""Should be called once at the start of repo to init ssh master handling.
|
||||
|
||||
At the moment, all we do is to create our lock.
|
||||
"""
|
||||
global _master_keys_lock
|
||||
assert _master_keys_lock is None, "Should only call init_ssh once"
|
||||
_master_keys_lock = _threading.Lock()
|
||||
|
||||
def _open_ssh(host, port=None):
|
||||
global _ssh_master
|
||||
|
||||
# Acquire the lock. This is needed to prevent opening multiple masters for
|
||||
# the same host when we're running "repo sync -jN" (for N > 1) _and_ the
|
||||
# manifest <remote fetch="ssh://xyz"> specifies a different host from the
|
||||
# one that was passed to repo init.
|
||||
_master_keys_lock.acquire()
|
||||
try:
|
||||
|
||||
# Check to see whether we already think that the master is running; if we
|
||||
# think it's already running, return right away.
|
||||
if port is not None:
|
||||
key = '%s:%s' % (host, port)
|
||||
else:
|
||||
key = host
|
||||
|
||||
if key in _master_keys:
|
||||
return True
|
||||
|
||||
if not _ssh_master \
|
||||
or 'GIT_SSH' in os.environ \
|
||||
or sys.platform in ('win32', 'cygwin'):
|
||||
# failed earlier, or cygwin ssh can't do this
|
||||
#
|
||||
return False
|
||||
|
||||
# We will make two calls to ssh; this is the common part of both calls.
|
||||
command_base = ['ssh',
|
||||
'-o','ControlPath %s' % ssh_sock(),
|
||||
host]
|
||||
if port is not None:
|
||||
command_base[1:1] = ['-p',str(port)]
|
||||
|
||||
# Since the key wasn't in _master_keys, we think that master isn't running.
|
||||
# ...but before actually starting a master, we'll double-check. This can
|
||||
# be important because we can't tell that that 'git@myhost.com' is the same
|
||||
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
|
||||
check_command = command_base + ['-O','check']
|
||||
try:
|
||||
Trace(': %s', ' '.join(check_command))
|
||||
check_process = subprocess.Popen(check_command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
check_process.communicate() # read output, but ignore it...
|
||||
isnt_running = check_process.wait()
|
||||
|
||||
if not isnt_running:
|
||||
# Our double-check found that the master _was_ infact running. Add to
|
||||
# the list of keys.
|
||||
_master_keys.add(key)
|
||||
return True
|
||||
except Exception:
|
||||
# Ignore excpetions. We we will fall back to the normal command and print
|
||||
# to the log there.
|
||||
pass
|
||||
|
||||
command = command_base[:1] + \
|
||||
['-M', '-N'] + \
|
||||
command_base[1:]
|
||||
try:
|
||||
Trace(': %s', ' '.join(command))
|
||||
p = subprocess.Popen(command)
|
||||
except Exception as e:
|
||||
_ssh_master = False
|
||||
print >>sys.stderr, \
|
||||
'\nwarn: cannot enable ssh control master for %s:%s\n%s' \
|
||||
% (host,port, str(e))
|
||||
return False
|
||||
|
||||
_master_processes.append(p)
|
||||
_master_keys.add(key)
|
||||
time.sleep(1)
|
||||
return True
|
||||
finally:
|
||||
_master_keys_lock.release()
|
||||
|
||||
def close_ssh():
|
||||
global _master_keys_lock
|
||||
|
||||
terminate_ssh_clients()
|
||||
|
||||
for p in _master_processes:
|
||||
try:
|
||||
os.kill(p.pid, SIGTERM)
|
||||
p.wait()
|
||||
except OSError:
|
||||
pass
|
||||
del _master_processes[:]
|
||||
_master_keys.clear()
|
||||
|
||||
d = ssh_sock(create=False)
|
||||
if d:
|
||||
try:
|
||||
os.rmdir(os.path.dirname(d))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# We're done with the lock, so we can delete it.
|
||||
_master_keys_lock = None
|
||||
|
||||
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
|
||||
URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
|
||||
|
||||
def GetSchemeFromUrl(url):
|
||||
m = URI_ALL.match(url)
|
||||
if m:
|
||||
return m.group(1)
|
||||
return None
|
||||
|
||||
def _preconnect(url):
|
||||
m = URI_ALL.match(url)
|
||||
if m:
|
||||
scheme = m.group(1)
|
||||
host = m.group(2)
|
||||
if ':' in host:
|
||||
host, port = host.split(':')
|
||||
else:
|
||||
port = None
|
||||
if scheme in ('ssh', 'git+ssh', 'ssh+git'):
|
||||
return _open_ssh(host, port)
|
||||
return False
|
||||
|
||||
m = URI_SCP.match(url)
|
||||
if m:
|
||||
host = m.group(1)
|
||||
return _open_ssh(host)
|
||||
|
||||
return False
|
||||
|
||||
class Remote(object):
|
||||
"""Configuration options related to a remote.
|
||||
"""
|
||||
@ -263,12 +526,36 @@ class Remote(object):
|
||||
self.review = self._Get('review')
|
||||
self.projectname = self._Get('projectname')
|
||||
self.fetch = map(lambda x: RefSpec.FromString(x),
|
||||
self._Get('fetch', all=True))
|
||||
self._review_protocol = None
|
||||
self._Get('fetch', all_keys=True))
|
||||
self._review_url = None
|
||||
|
||||
@property
|
||||
def ReviewProtocol(self):
|
||||
if self._review_protocol is None:
|
||||
def _InsteadOf(self):
|
||||
globCfg = GitConfig.ForUser()
|
||||
urlList = globCfg.GetSubSections('url')
|
||||
longest = ""
|
||||
longestUrl = ""
|
||||
|
||||
for url in urlList:
|
||||
key = "url." + url + ".insteadOf"
|
||||
insteadOfList = globCfg.GetString(key, all_keys=True)
|
||||
|
||||
for insteadOf in insteadOfList:
|
||||
if self.url.startswith(insteadOf) \
|
||||
and len(insteadOf) > len(longest):
|
||||
longest = insteadOf
|
||||
longestUrl = url
|
||||
|
||||
if len(longest) == 0:
|
||||
return self.url
|
||||
|
||||
return self.url.replace(longest, longestUrl, 1)
|
||||
|
||||
def PreConnectFetch(self):
|
||||
connectionUrl = self._InsteadOf()
|
||||
return _preconnect(connectionUrl)
|
||||
|
||||
def ReviewUrl(self, userEmail):
|
||||
if self._review_url is None:
|
||||
if self.review is None:
|
||||
return None
|
||||
|
||||
@ -277,52 +564,47 @@ class Remote(object):
|
||||
u = 'http://%s' % u
|
||||
if u.endswith('/Gerrit'):
|
||||
u = u[:len(u) - len('/Gerrit')]
|
||||
if not u.endswith('/ssh_info'):
|
||||
if not u.endswith('/'):
|
||||
u += '/'
|
||||
u += 'ssh_info'
|
||||
if u.endswith('/ssh_info'):
|
||||
u = u[:len(u) - len('/ssh_info')]
|
||||
if not u.endswith('/'):
|
||||
u += '/'
|
||||
http_url = u
|
||||
|
||||
if u in REVIEW_CACHE:
|
||||
info = REVIEW_CACHE[u]
|
||||
self._review_protocol = info[0]
|
||||
self._review_host = info[1]
|
||||
self._review_port = info[2]
|
||||
self._review_url = REVIEW_CACHE[u]
|
||||
elif 'REPO_HOST_PORT_INFO' in os.environ:
|
||||
host, port = os.environ['REPO_HOST_PORT_INFO'].split()
|
||||
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
||||
REVIEW_CACHE[u] = self._review_url
|
||||
else:
|
||||
try:
|
||||
info = urlopen(u).read()
|
||||
if info == 'NOT_AVAILABLE':
|
||||
raise UploadError('Upload over ssh unavailable')
|
||||
info_url = u + 'ssh_info'
|
||||
info = urllib2.urlopen(info_url).read()
|
||||
if '<' in info:
|
||||
# Assume the server gave us some sort of HTML
|
||||
# response back, like maybe a login page.
|
||||
#
|
||||
raise UploadError('Cannot read %s:\n%s' % (u, info))
|
||||
raise UploadError('%s: Cannot parse response' % info_url)
|
||||
|
||||
self._review_protocol = 'ssh'
|
||||
self._review_host = info.split(" ")[0]
|
||||
self._review_port = info.split(" ")[1]
|
||||
except HTTPError, e:
|
||||
if e.code == 404:
|
||||
self._review_protocol = 'http-post'
|
||||
self._review_host = None
|
||||
self._review_port = None
|
||||
if info == 'NOT_AVAILABLE':
|
||||
# Assume HTTP if SSH is not enabled.
|
||||
self._review_url = http_url + 'p/'
|
||||
else:
|
||||
raise UploadError('Cannot guess Gerrit version')
|
||||
host, port = info.split()
|
||||
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
||||
except urllib2.HTTPError as e:
|
||||
raise UploadError('%s: %s' % (self.review, str(e)))
|
||||
except urllib2.URLError as e:
|
||||
raise UploadError('%s: %s' % (self.review, str(e)))
|
||||
|
||||
REVIEW_CACHE[u] = (
|
||||
self._review_protocol,
|
||||
self._review_host,
|
||||
self._review_port)
|
||||
return self._review_protocol
|
||||
REVIEW_CACHE[u] = self._review_url
|
||||
return self._review_url + self.projectname
|
||||
|
||||
def SshReviewUrl(self, userEmail):
|
||||
if self.ReviewProtocol != 'ssh':
|
||||
return None
|
||||
return 'ssh://%s@%s:%s/%s' % (
|
||||
userEmail.split("@")[0],
|
||||
self._review_host,
|
||||
self._review_port,
|
||||
self.projectname)
|
||||
def _SshReviewUrl(self, userEmail, host, port):
|
||||
username = self._config.GetString('review.%s.username' % self.review)
|
||||
if username is None:
|
||||
username = userEmail.split('@')[0]
|
||||
return 'ssh://%s@%s:%s/' % (username, host, port)
|
||||
|
||||
def ToLocal(self, rev):
|
||||
"""Convert a remote revision string to something we have locally.
|
||||
@ -369,9 +651,9 @@ class Remote(object):
|
||||
key = 'remote.%s.%s' % (self.name, key)
|
||||
return self._config.SetString(key, value)
|
||||
|
||||
def _Get(self, key, all=False):
|
||||
def _Get(self, key, all_keys=False):
|
||||
key = 'remote.%s.%s' % (self.name, key)
|
||||
return self._config.GetString(key, all = all)
|
||||
return self._config.GetString(key, all_keys = all_keys)
|
||||
|
||||
|
||||
class Branch(object):
|
||||
@ -399,16 +681,28 @@ class Branch(object):
|
||||
def Save(self):
|
||||
"""Save this branch back into the configuration.
|
||||
"""
|
||||
self._Set('merge', self.merge)
|
||||
if self.remote:
|
||||
self._Set('remote', self.remote.name)
|
||||
if self._config.HasSection('branch', self.name):
|
||||
if self.remote:
|
||||
self._Set('remote', self.remote.name)
|
||||
else:
|
||||
self._Set('remote', None)
|
||||
self._Set('merge', self.merge)
|
||||
|
||||
else:
|
||||
self._Set('remote', None)
|
||||
fd = open(self._config.file, 'ab')
|
||||
try:
|
||||
fd.write('[branch "%s"]\n' % self.name)
|
||||
if self.remote:
|
||||
fd.write('\tremote = %s\n' % self.remote.name)
|
||||
if self.merge:
|
||||
fd.write('\tmerge = %s\n' % self.merge)
|
||||
finally:
|
||||
fd.close()
|
||||
|
||||
def _Set(self, key, value):
|
||||
key = 'branch.%s.%s' % (self.name, key)
|
||||
return self._config.SetString(key, value)
|
||||
|
||||
def _Get(self, key, all=False):
|
||||
def _Get(self, key, all_keys=False):
|
||||
key = 'branch.%s.%s' % (self.name, key)
|
||||
return self._config.GetString(key, all = all)
|
||||
return self._config.GetString(key, all_keys = all_keys)
|
||||
|
161
git_refs.py
Normal file
161
git_refs.py
Normal file
@ -0,0 +1,161 @@
|
||||
#
|
||||
# Copyright (C) 2009 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from trace import Trace
|
||||
|
||||
HEAD = 'HEAD'
|
||||
R_HEADS = 'refs/heads/'
|
||||
R_TAGS = 'refs/tags/'
|
||||
R_PUB = 'refs/published/'
|
||||
R_M = 'refs/remotes/m/'
|
||||
|
||||
|
||||
class GitRefs(object):
|
||||
def __init__(self, gitdir):
|
||||
self._gitdir = gitdir
|
||||
self._phyref = None
|
||||
self._symref = None
|
||||
self._mtime = {}
|
||||
|
||||
@property
|
||||
def all(self):
|
||||
self._EnsureLoaded()
|
||||
return self._phyref
|
||||
|
||||
def get(self, name):
|
||||
try:
|
||||
return self.all[name]
|
||||
except KeyError:
|
||||
return ''
|
||||
|
||||
def deleted(self, name):
|
||||
if self._phyref is not None:
|
||||
if name in self._phyref:
|
||||
del self._phyref[name]
|
||||
|
||||
if name in self._symref:
|
||||
del self._symref[name]
|
||||
|
||||
if name in self._mtime:
|
||||
del self._mtime[name]
|
||||
|
||||
def symref(self, name):
|
||||
try:
|
||||
self._EnsureLoaded()
|
||||
return self._symref[name]
|
||||
except KeyError:
|
||||
return ''
|
||||
|
||||
def _EnsureLoaded(self):
|
||||
if self._phyref is None or self._NeedUpdate():
|
||||
self._LoadAll()
|
||||
|
||||
def _NeedUpdate(self):
|
||||
Trace(': scan refs %s', self._gitdir)
|
||||
|
||||
for name, mtime in self._mtime.iteritems():
|
||||
try:
|
||||
if mtime != os.path.getmtime(os.path.join(self._gitdir, name)):
|
||||
return True
|
||||
except OSError:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _LoadAll(self):
|
||||
Trace(': load refs %s', self._gitdir)
|
||||
|
||||
self._phyref = {}
|
||||
self._symref = {}
|
||||
self._mtime = {}
|
||||
|
||||
self._ReadPackedRefs()
|
||||
self._ReadLoose('refs/')
|
||||
self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
|
||||
|
||||
scan = self._symref
|
||||
attempts = 0
|
||||
while scan and attempts < 5:
|
||||
scan_next = {}
|
||||
for name, dest in scan.iteritems():
|
||||
if dest in self._phyref:
|
||||
self._phyref[name] = self._phyref[dest]
|
||||
else:
|
||||
scan_next[name] = dest
|
||||
scan = scan_next
|
||||
attempts += 1
|
||||
|
||||
def _ReadPackedRefs(self):
|
||||
path = os.path.join(self._gitdir, 'packed-refs')
|
||||
try:
|
||||
fd = open(path, 'rb')
|
||||
mtime = os.path.getmtime(path)
|
||||
except IOError:
|
||||
return
|
||||
except OSError:
|
||||
return
|
||||
try:
|
||||
for line in fd:
|
||||
if line[0] == '#':
|
||||
continue
|
||||
if line[0] == '^':
|
||||
continue
|
||||
|
||||
line = line[:-1]
|
||||
p = line.split(' ')
|
||||
ref_id = p[0]
|
||||
name = p[1]
|
||||
|
||||
self._phyref[name] = ref_id
|
||||
finally:
|
||||
fd.close()
|
||||
self._mtime['packed-refs'] = mtime
|
||||
|
||||
def _ReadLoose(self, prefix):
|
||||
base = os.path.join(self._gitdir, prefix)
|
||||
for name in os.listdir(base):
|
||||
p = os.path.join(base, name)
|
||||
if os.path.isdir(p):
|
||||
self._mtime[prefix] = os.path.getmtime(base)
|
||||
self._ReadLoose(prefix + name + '/')
|
||||
elif name.endswith('.lock'):
|
||||
pass
|
||||
else:
|
||||
self._ReadLoose1(p, prefix + name)
|
||||
|
||||
def _ReadLoose1(self, path, name):
|
||||
try:
|
||||
fd = open(path, 'rb')
|
||||
except:
|
||||
return
|
||||
|
||||
try:
|
||||
try:
|
||||
mtime = os.path.getmtime(path)
|
||||
ref_id = fd.readline()
|
||||
except:
|
||||
return
|
||||
finally:
|
||||
fd.close()
|
||||
|
||||
if not ref_id:
|
||||
return
|
||||
ref_id = ref_id[:-1]
|
||||
|
||||
if ref_id.startswith('ref: '):
|
||||
self._symref[name] = ref_id[5:]
|
||||
else:
|
||||
self._phyref[name] = ref_id
|
||||
self._mtime[name] = mtime
|
2
git_ssh
Executable file
2
git_ssh
Executable file
@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
exec ssh -o "ControlMaster no" -o "ControlPath $REPO_SSH_SOCK" "$@"
|
174
hooks/commit-msg
Executable file
174
hooks/commit-msg
Executable file
@ -0,0 +1,174 @@
|
||||
#!/bin/sh
|
||||
# From Gerrit Code Review 2.5-rc0
|
||||
#
|
||||
# Part of Gerrit Code Review (http://code.google.com/p/gerrit/)
|
||||
#
|
||||
# Copyright (C) 2009 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
CHANGE_ID_AFTER="Bug|Issue"
|
||||
MSG="$1"
|
||||
|
||||
# Check for, and add if missing, a unique Change-Id
|
||||
#
|
||||
add_ChangeId() {
|
||||
clean_message=`sed -e '
|
||||
/^diff --git a\/.*/{
|
||||
s///
|
||||
q
|
||||
}
|
||||
/^Signed-off-by:/d
|
||||
/^#/d
|
||||
' "$MSG" | git stripspace`
|
||||
if test -z "$clean_message"
|
||||
then
|
||||
return
|
||||
fi
|
||||
|
||||
# Does Change-Id: already exist? if so, exit (no change).
|
||||
if grep -i '^Change-Id:' "$MSG" >/dev/null
|
||||
then
|
||||
return
|
||||
fi
|
||||
|
||||
id=`_gen_ChangeId`
|
||||
T="$MSG.tmp.$$"
|
||||
AWK=awk
|
||||
if [ -x /usr/xpg4/bin/awk ]; then
|
||||
# Solaris AWK is just too broken
|
||||
AWK=/usr/xpg4/bin/awk
|
||||
fi
|
||||
|
||||
# How this works:
|
||||
# - parse the commit message as (textLine+ blankLine*)*
|
||||
# - assume textLine+ to be a footer until proven otherwise
|
||||
# - exception: the first block is not footer (as it is the title)
|
||||
# - read textLine+ into a variable
|
||||
# - then count blankLines
|
||||
# - once the next textLine appears, print textLine+ blankLine* as these
|
||||
# aren't footer
|
||||
# - in END, the last textLine+ block is available for footer parsing
|
||||
$AWK '
|
||||
BEGIN {
|
||||
# while we start with the assumption that textLine+
|
||||
# is a footer, the first block is not.
|
||||
isFooter = 0
|
||||
footerComment = 0
|
||||
blankLines = 0
|
||||
}
|
||||
|
||||
# Skip lines starting with "#" without any spaces before it.
|
||||
/^#/ { next }
|
||||
|
||||
# Skip the line starting with the diff command and everything after it,
|
||||
# up to the end of the file, assuming it is only patch data.
|
||||
# If more than one line before the diff was empty, strip all but one.
|
||||
/^diff --git a/ {
|
||||
blankLines = 0
|
||||
while (getline) { }
|
||||
next
|
||||
}
|
||||
|
||||
# Count blank lines outside footer comments
|
||||
/^$/ && (footerComment == 0) {
|
||||
blankLines++
|
||||
next
|
||||
}
|
||||
|
||||
# Catch footer comment
|
||||
/^\[[a-zA-Z0-9-]+:/ && (isFooter == 1) {
|
||||
footerComment = 1
|
||||
}
|
||||
|
||||
/]$/ && (footerComment == 1) {
|
||||
footerComment = 2
|
||||
}
|
||||
|
||||
# We have a non-blank line after blank lines. Handle this.
|
||||
(blankLines > 0) {
|
||||
print lines
|
||||
for (i = 0; i < blankLines; i++) {
|
||||
print ""
|
||||
}
|
||||
|
||||
lines = ""
|
||||
blankLines = 0
|
||||
isFooter = 1
|
||||
footerComment = 0
|
||||
}
|
||||
|
||||
# Detect that the current block is not the footer
|
||||
(footerComment == 0) && (!/^\[?[a-zA-Z0-9-]+:/ || /^[a-zA-Z0-9-]+:\/\//) {
|
||||
isFooter = 0
|
||||
}
|
||||
|
||||
{
|
||||
# We need this information about the current last comment line
|
||||
if (footerComment == 2) {
|
||||
footerComment = 0
|
||||
}
|
||||
if (lines != "") {
|
||||
lines = lines "\n";
|
||||
}
|
||||
lines = lines $0
|
||||
}
|
||||
|
||||
# Footer handling:
|
||||
# If the last block is considered a footer, splice in the Change-Id at the
|
||||
# right place.
|
||||
# Look for the right place to inject Change-Id by considering
|
||||
# CHANGE_ID_AFTER. Keys listed in it (case insensitive) come first,
|
||||
# then Change-Id, then everything else (eg. Signed-off-by:).
|
||||
#
|
||||
# Otherwise just print the last block, a new line and the Change-Id as a
|
||||
# block of its own.
|
||||
END {
|
||||
unprinted = 1
|
||||
if (isFooter == 0) {
|
||||
print lines "\n"
|
||||
lines = ""
|
||||
}
|
||||
changeIdAfter = "^(" tolower("'"$CHANGE_ID_AFTER"'") "):"
|
||||
numlines = split(lines, footer, "\n")
|
||||
for (line = 1; line <= numlines; line++) {
|
||||
if (unprinted && match(tolower(footer[line]), changeIdAfter) != 1) {
|
||||
unprinted = 0
|
||||
print "Change-Id: I'"$id"'"
|
||||
}
|
||||
print footer[line]
|
||||
}
|
||||
if (unprinted) {
|
||||
print "Change-Id: I'"$id"'"
|
||||
}
|
||||
}' "$MSG" > $T && mv $T "$MSG" || rm -f $T
|
||||
}
|
||||
_gen_ChangeIdInput() {
|
||||
echo "tree `git write-tree`"
|
||||
if parent=`git rev-parse "HEAD^0" 2>/dev/null`
|
||||
then
|
||||
echo "parent $parent"
|
||||
fi
|
||||
echo "author `git var GIT_AUTHOR_IDENT`"
|
||||
echo "committer `git var GIT_COMMITTER_IDENT`"
|
||||
echo
|
||||
printf '%s' "$clean_message"
|
||||
}
|
||||
_gen_ChangeId() {
|
||||
_gen_ChangeIdInput |
|
||||
git hash-object -t commit --stdin
|
||||
}
|
||||
|
||||
|
||||
add_ChangeId
|
@ -38,6 +38,11 @@ elif test -x /usr/bin/pmset && /usr/bin/pmset -g batt |
|
||||
grep -q "Currently drawing from 'AC Power'"
|
||||
then
|
||||
exit 0
|
||||
elif test -d /sys/bus/acpi/drivers/battery && test 0 = \
|
||||
"$(find /sys/bus/acpi/drivers/battery/ -type l | wc -l)";
|
||||
then
|
||||
# No battery exists.
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Auto packing deferred; not on AC"
|
||||
|
252
main.py
252
main.py
@ -22,23 +22,31 @@ if __name__ == '__main__':
|
||||
del sys.argv[-1]
|
||||
del magic
|
||||
|
||||
import getpass
|
||||
import imp
|
||||
import netrc
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import urllib2
|
||||
|
||||
import git_command
|
||||
from trace import SetTrace
|
||||
from git_command import git, GitCommand
|
||||
from git_config import init_ssh, close_ssh
|
||||
from command import InteractiveCommand
|
||||
from command import MirrorSafeCommand
|
||||
from command import PagedCommand
|
||||
from subcmds.version import Version
|
||||
from editor import Editor
|
||||
from error import DownloadError
|
||||
from error import ManifestInvalidRevisionError
|
||||
from error import NoSuchProjectError
|
||||
from error import RepoChangedException
|
||||
from manifest import Manifest
|
||||
from manifest_xml import XmlManifest
|
||||
from pager import RunPager
|
||||
|
||||
from subcmds import all as all_commands
|
||||
from subcmds import all_commands
|
||||
|
||||
global_options = optparse.OptionParser(
|
||||
usage="repo [-p|--paginate|--no-pager] COMMAND [ARGS]"
|
||||
@ -52,6 +60,9 @@ global_options.add_option('--no-pager',
|
||||
global_options.add_option('--trace',
|
||||
dest='trace', action='store_true',
|
||||
help='trace git command execution')
|
||||
global_options.add_option('--time',
|
||||
dest='time', action='store_true',
|
||||
help='time repo command execution')
|
||||
global_options.add_option('--version',
|
||||
dest='show_version', action='store_true',
|
||||
help='display this version of repo')
|
||||
@ -60,8 +71,11 @@ class _Repo(object):
|
||||
def __init__(self, repodir):
|
||||
self.repodir = repodir
|
||||
self.commands = all_commands
|
||||
# add 'branch' as an alias for 'branches'
|
||||
all_commands['branch'] = all_commands['branches']
|
||||
|
||||
def _Run(self, argv):
|
||||
result = 0
|
||||
name = None
|
||||
glob = []
|
||||
|
||||
@ -76,16 +90,16 @@ class _Repo(object):
|
||||
glob = argv
|
||||
name = 'help'
|
||||
argv = []
|
||||
gopts, gargs = global_options.parse_args(glob)
|
||||
gopts, _gargs = global_options.parse_args(glob)
|
||||
|
||||
if gopts.trace:
|
||||
git_command.TRACE = True
|
||||
SetTrace()
|
||||
if gopts.show_version:
|
||||
if name == 'help':
|
||||
name = 'version'
|
||||
else:
|
||||
print >>sys.stderr, 'fatal: invalid usage of --version'
|
||||
sys.exit(1)
|
||||
return 1
|
||||
|
||||
try:
|
||||
cmd = self.commands[name]
|
||||
@ -93,17 +107,19 @@ class _Repo(object):
|
||||
print >>sys.stderr,\
|
||||
"repo: '%s' is not a repo command. See 'repo help'."\
|
||||
% name
|
||||
sys.exit(1)
|
||||
return 1
|
||||
|
||||
cmd.repodir = self.repodir
|
||||
cmd.manifest = Manifest(cmd.repodir)
|
||||
cmd.manifest = XmlManifest(cmd.repodir)
|
||||
Editor.globalConfig = cmd.manifest.globalConfig
|
||||
|
||||
if not isinstance(cmd, MirrorSafeCommand) and cmd.manifest.IsMirror:
|
||||
print >>sys.stderr, \
|
||||
"fatal: '%s' requires a working directory"\
|
||||
% name
|
||||
sys.exit(1)
|
||||
return 1
|
||||
|
||||
copts, cargs = cmd.OptionParser.parse_args(argv)
|
||||
|
||||
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
|
||||
config = cmd.manifest.globalConfig
|
||||
@ -112,44 +128,63 @@ class _Repo(object):
|
||||
else:
|
||||
use_pager = config.GetBoolean('pager.%s' % name)
|
||||
if use_pager is None:
|
||||
use_pager = isinstance(cmd, PagedCommand)
|
||||
use_pager = cmd.WantPager(copts)
|
||||
if use_pager:
|
||||
RunPager(config)
|
||||
|
||||
copts, cargs = cmd.OptionParser.parse_args(argv)
|
||||
try:
|
||||
cmd.Execute(copts, cargs)
|
||||
except ManifestInvalidRevisionError, e:
|
||||
start = time.time()
|
||||
try:
|
||||
result = cmd.Execute(copts, cargs)
|
||||
finally:
|
||||
elapsed = time.time() - start
|
||||
hours, remainder = divmod(elapsed, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
if gopts.time:
|
||||
if hours == 0:
|
||||
print >>sys.stderr, 'real\t%dm%.3fs' \
|
||||
% (minutes, seconds)
|
||||
else:
|
||||
print >>sys.stderr, 'real\t%dh%dm%.3fs' \
|
||||
% (hours, minutes, seconds)
|
||||
except DownloadError as e:
|
||||
print >>sys.stderr, 'error: %s' % str(e)
|
||||
sys.exit(1)
|
||||
except NoSuchProjectError, e:
|
||||
return 1
|
||||
except ManifestInvalidRevisionError as e:
|
||||
print >>sys.stderr, 'error: %s' % str(e)
|
||||
return 1
|
||||
except NoSuchProjectError as e:
|
||||
if e.name:
|
||||
print >>sys.stderr, 'error: project %s not found' % e.name
|
||||
else:
|
||||
print >>sys.stderr, 'error: no project in current directory'
|
||||
sys.exit(1)
|
||||
return 1
|
||||
|
||||
return result
|
||||
|
||||
def _MyRepoPath():
|
||||
return os.path.dirname(__file__)
|
||||
|
||||
def _MyWrapperPath():
|
||||
return os.path.join(os.path.dirname(__file__), 'repo')
|
||||
|
||||
_wrapper_module = None
|
||||
def WrapperModule():
|
||||
global _wrapper_module
|
||||
if not _wrapper_module:
|
||||
_wrapper_module = imp.load_source('wrapper', _MyWrapperPath())
|
||||
return _wrapper_module
|
||||
|
||||
def _CurrentWrapperVersion():
|
||||
VERSION = None
|
||||
pat = re.compile(r'^VERSION *=')
|
||||
fd = open(_MyWrapperPath())
|
||||
for line in fd:
|
||||
if pat.match(line):
|
||||
fd.close()
|
||||
exec line
|
||||
return VERSION
|
||||
raise NameError, 'No VERSION in repo script'
|
||||
return WrapperModule().VERSION
|
||||
|
||||
def _CheckWrapperVersion(ver, repo_path):
|
||||
if not repo_path:
|
||||
repo_path = '~/bin/repo'
|
||||
|
||||
if not ver:
|
||||
print >>sys.stderr, 'no --wrapper-version argument'
|
||||
sys.exit(1)
|
||||
print >>sys.stderr, 'no --wrapper-version argument'
|
||||
sys.exit(1)
|
||||
|
||||
exp = _CurrentWrapperVersion()
|
||||
ver = tuple(map(lambda x: int(x), ver.split('.')))
|
||||
@ -175,10 +210,10 @@ def _CheckWrapperVersion(ver, repo_path):
|
||||
cp %s %s
|
||||
""" % (exp_str, _MyWrapperPath(), repo_path)
|
||||
|
||||
def _CheckRepoDir(dir):
|
||||
if not dir:
|
||||
print >>sys.stderr, 'no --repo-dir argument'
|
||||
sys.exit(1)
|
||||
def _CheckRepoDir(repo_dir):
|
||||
if not repo_dir:
|
||||
print >>sys.stderr, 'no --repo-dir argument'
|
||||
sys.exit(1)
|
||||
|
||||
def _PruneOptions(argv, opt):
|
||||
i = 0
|
||||
@ -195,7 +230,140 @@ def _PruneOptions(argv, opt):
|
||||
continue
|
||||
i += 1
|
||||
|
||||
_user_agent = None
|
||||
|
||||
def _UserAgent():
|
||||
global _user_agent
|
||||
|
||||
if _user_agent is None:
|
||||
py_version = sys.version_info
|
||||
|
||||
os_name = sys.platform
|
||||
if os_name == 'linux2':
|
||||
os_name = 'Linux'
|
||||
elif os_name == 'win32':
|
||||
os_name = 'Win32'
|
||||
elif os_name == 'cygwin':
|
||||
os_name = 'Cygwin'
|
||||
elif os_name == 'darwin':
|
||||
os_name = 'Darwin'
|
||||
|
||||
p = GitCommand(
|
||||
None, ['describe', 'HEAD'],
|
||||
cwd = _MyRepoPath(),
|
||||
capture_stdout = True)
|
||||
if p.Wait() == 0:
|
||||
repo_version = p.stdout
|
||||
if len(repo_version) > 0 and repo_version[-1] == '\n':
|
||||
repo_version = repo_version[0:-1]
|
||||
if len(repo_version) > 0 and repo_version[0] == 'v':
|
||||
repo_version = repo_version[1:]
|
||||
else:
|
||||
repo_version = 'unknown'
|
||||
|
||||
_user_agent = 'git-repo/%s (%s) git/%s Python/%d.%d.%d' % (
|
||||
repo_version,
|
||||
os_name,
|
||||
'.'.join(map(lambda d: str(d), git.version_tuple())),
|
||||
py_version[0], py_version[1], py_version[2])
|
||||
return _user_agent
|
||||
|
||||
class _UserAgentHandler(urllib2.BaseHandler):
|
||||
def http_request(self, req):
|
||||
req.add_header('User-Agent', _UserAgent())
|
||||
return req
|
||||
|
||||
def https_request(self, req):
|
||||
req.add_header('User-Agent', _UserAgent())
|
||||
return req
|
||||
|
||||
def _AddPasswordFromUserInput(handler, msg, req):
|
||||
# If repo could not find auth info from netrc, try to get it from user input
|
||||
url = req.get_full_url()
|
||||
user, password = handler.passwd.find_user_password(None, url)
|
||||
if user is None:
|
||||
print msg
|
||||
try:
|
||||
user = raw_input('User: ')
|
||||
password = getpass.getpass()
|
||||
except KeyboardInterrupt:
|
||||
return
|
||||
handler.passwd.add_password(None, url, user, password)
|
||||
|
||||
class _BasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
||||
def http_error_401(self, req, fp, code, msg, headers):
|
||||
_AddPasswordFromUserInput(self, msg, req)
|
||||
return urllib2.HTTPBasicAuthHandler.http_error_401(
|
||||
self, req, fp, code, msg, headers)
|
||||
|
||||
def http_error_auth_reqed(self, authreq, host, req, headers):
|
||||
try:
|
||||
old_add_header = req.add_header
|
||||
def _add_header(name, val):
|
||||
val = val.replace('\n', '')
|
||||
old_add_header(name, val)
|
||||
req.add_header = _add_header
|
||||
return urllib2.AbstractBasicAuthHandler.http_error_auth_reqed(
|
||||
self, authreq, host, req, headers)
|
||||
except:
|
||||
reset = getattr(self, 'reset_retry_count', None)
|
||||
if reset is not None:
|
||||
reset()
|
||||
elif getattr(self, 'retried', None):
|
||||
self.retried = 0
|
||||
raise
|
||||
|
||||
class _DigestAuthHandler(urllib2.HTTPDigestAuthHandler):
|
||||
def http_error_401(self, req, fp, code, msg, headers):
|
||||
_AddPasswordFromUserInput(self, msg, req)
|
||||
return urllib2.HTTPDigestAuthHandler.http_error_401(
|
||||
self, req, fp, code, msg, headers)
|
||||
|
||||
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
||||
try:
|
||||
old_add_header = req.add_header
|
||||
def _add_header(name, val):
|
||||
val = val.replace('\n', '')
|
||||
old_add_header(name, val)
|
||||
req.add_header = _add_header
|
||||
return urllib2.AbstractDigestAuthHandler.http_error_auth_reqed(
|
||||
self, auth_header, host, req, headers)
|
||||
except:
|
||||
reset = getattr(self, 'reset_retry_count', None)
|
||||
if reset is not None:
|
||||
reset()
|
||||
elif getattr(self, 'retried', None):
|
||||
self.retried = 0
|
||||
raise
|
||||
|
||||
def init_http():
|
||||
handlers = [_UserAgentHandler()]
|
||||
|
||||
mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
try:
|
||||
n = netrc.netrc()
|
||||
for host in n.hosts:
|
||||
p = n.hosts[host]
|
||||
mgr.add_password(p[1], 'http://%s/' % host, p[0], p[2])
|
||||
mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2])
|
||||
except netrc.NetrcParseError:
|
||||
pass
|
||||
except IOError:
|
||||
pass
|
||||
handlers.append(_BasicAuthHandler(mgr))
|
||||
handlers.append(_DigestAuthHandler(mgr))
|
||||
|
||||
if 'http_proxy' in os.environ:
|
||||
url = os.environ['http_proxy']
|
||||
handlers.append(urllib2.ProxyHandler({'http': url, 'https': url}))
|
||||
if 'REPO_CURL_VERBOSE' in os.environ:
|
||||
handlers.append(urllib2.HTTPHandler(debuglevel=1))
|
||||
handlers.append(urllib2.HTTPSHandler(debuglevel=1))
|
||||
urllib2.install_opener(urllib2.build_opener(*handlers))
|
||||
|
||||
def _Main(argv):
|
||||
result = 0
|
||||
|
||||
opt = optparse.OptionParser(usage="repo wrapperinfo -- ...")
|
||||
opt.add_option("--repo-dir", dest="repodir",
|
||||
help="path to .repo/")
|
||||
@ -209,22 +377,32 @@ def _Main(argv):
|
||||
_CheckWrapperVersion(opt.wrapper_version, opt.wrapper_path)
|
||||
_CheckRepoDir(opt.repodir)
|
||||
|
||||
Version.wrapper_version = opt.wrapper_version
|
||||
Version.wrapper_path = opt.wrapper_path
|
||||
|
||||
repo = _Repo(opt.repodir)
|
||||
try:
|
||||
repo._Run(argv)
|
||||
try:
|
||||
init_ssh()
|
||||
init_http()
|
||||
result = repo._Run(argv) or 0
|
||||
finally:
|
||||
close_ssh()
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(1)
|
||||
except RepoChangedException, rce:
|
||||
result = 1
|
||||
except RepoChangedException as rce:
|
||||
# If repo changed, re-exec ourselves.
|
||||
#
|
||||
argv = list(sys.argv)
|
||||
argv.extend(rce.extra_args)
|
||||
try:
|
||||
os.execv(__file__, argv)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
print >>sys.stderr, 'fatal: cannot restart repo after upgrade'
|
||||
print >>sys.stderr, 'fatal: %s' % e
|
||||
sys.exit(128)
|
||||
result = 128
|
||||
|
||||
sys.exit(result)
|
||||
|
||||
if __name__ == '__main__':
|
||||
_Main(sys.argv[1:])
|
||||
|
420
manifest.py
420
manifest.py
@ -1,420 +0,0 @@
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import xml.dom.minidom
|
||||
|
||||
from git_config import GitConfig, IsId
|
||||
from project import Project, MetaProject, R_HEADS, HEAD
|
||||
from remote import Remote
|
||||
from error import ManifestParseError
|
||||
|
||||
MANIFEST_FILE_NAME = 'manifest.xml'
|
||||
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
|
||||
|
||||
class _Default(object):
|
||||
"""Project defaults within the manifest."""
|
||||
|
||||
revision = None
|
||||
remote = None
|
||||
|
||||
|
||||
class Manifest(object):
|
||||
"""manages the repo configuration file"""
|
||||
|
||||
def __init__(self, repodir):
|
||||
self.repodir = os.path.abspath(repodir)
|
||||
self.topdir = os.path.dirname(self.repodir)
|
||||
self.manifestFile = os.path.join(self.repodir, MANIFEST_FILE_NAME)
|
||||
self.globalConfig = GitConfig.ForUser()
|
||||
|
||||
self.repoProject = MetaProject(self, 'repo',
|
||||
gitdir = os.path.join(repodir, 'repo/.git'),
|
||||
worktree = os.path.join(repodir, 'repo'))
|
||||
|
||||
self.manifestProject = MetaProject(self, 'manifests',
|
||||
gitdir = os.path.join(repodir, 'manifests.git'),
|
||||
worktree = os.path.join(repodir, 'manifests'))
|
||||
|
||||
self._Unload()
|
||||
|
||||
def Link(self, name):
|
||||
"""Update the repo metadata to use a different manifest.
|
||||
"""
|
||||
path = os.path.join(self.manifestProject.worktree, name)
|
||||
if not os.path.isfile(path):
|
||||
raise ManifestParseError('manifest %s not found' % name)
|
||||
|
||||
old = self.manifestFile
|
||||
try:
|
||||
self.manifestFile = path
|
||||
self._Unload()
|
||||
self._Load()
|
||||
finally:
|
||||
self.manifestFile = old
|
||||
|
||||
try:
|
||||
if os.path.exists(self.manifestFile):
|
||||
os.remove(self.manifestFile)
|
||||
os.symlink('manifests/%s' % name, self.manifestFile)
|
||||
except OSError, e:
|
||||
raise ManifestParseError('cannot link manifest %s' % name)
|
||||
|
||||
def _RemoteToXml(self, r, doc, root):
|
||||
e = doc.createElement('remote')
|
||||
root.appendChild(e)
|
||||
e.setAttribute('name', r.name)
|
||||
e.setAttribute('fetch', r.fetchUrl)
|
||||
if r.reviewUrl is not None:
|
||||
e.setAttribute('review', r.reviewUrl)
|
||||
if r.projectName is not None:
|
||||
e.setAttribute('project-name', r.projectName)
|
||||
|
||||
def Save(self, fd, peg_rev=False):
|
||||
"""Write the current manifest out to the given file descriptor.
|
||||
"""
|
||||
doc = xml.dom.minidom.Document()
|
||||
root = doc.createElement('manifest')
|
||||
doc.appendChild(root)
|
||||
|
||||
d = self.default
|
||||
sort_remotes = list(self.remotes.keys())
|
||||
sort_remotes.sort()
|
||||
|
||||
for r in sort_remotes:
|
||||
self._RemoteToXml(self.remotes[r], doc, root)
|
||||
if self.remotes:
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
|
||||
have_default = False
|
||||
e = doc.createElement('default')
|
||||
if d.remote:
|
||||
have_default = True
|
||||
e.setAttribute('remote', d.remote.name)
|
||||
if d.revision:
|
||||
have_default = True
|
||||
e.setAttribute('revision', d.revision)
|
||||
if have_default:
|
||||
root.appendChild(e)
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
|
||||
sort_projects = list(self.projects.keys())
|
||||
sort_projects.sort()
|
||||
|
||||
for p in sort_projects:
|
||||
p = self.projects[p]
|
||||
e = doc.createElement('project')
|
||||
root.appendChild(e)
|
||||
e.setAttribute('name', p.name)
|
||||
if p.relpath != p.name:
|
||||
e.setAttribute('path', p.relpath)
|
||||
if not d.remote or p.remote.name != d.remote.name:
|
||||
e.setAttribute('remote', p.remote.name)
|
||||
if peg_rev:
|
||||
if self.IsMirror:
|
||||
e.setAttribute('revision',
|
||||
p.bare_git.rev_parse(p.revision + '^0'))
|
||||
else:
|
||||
e.setAttribute('revision',
|
||||
p.work_git.rev_parse(HEAD + '^0'))
|
||||
elif not d.revision or p.revision != d.revision:
|
||||
e.setAttribute('revision', p.revision)
|
||||
|
||||
for r in p.extraRemotes:
|
||||
self._RemoteToXml(p.extraRemotes[r], doc, e)
|
||||
for c in p.copyfiles:
|
||||
ce = doc.createElement('copyfile')
|
||||
ce.setAttribute('src', c.src)
|
||||
ce.setAttribute('dest', c.dest)
|
||||
e.appendChild(ce)
|
||||
|
||||
doc.writexml(fd, '', ' ', '\n', 'UTF-8')
|
||||
|
||||
@property
|
||||
def projects(self):
|
||||
self._Load()
|
||||
return self._projects
|
||||
|
||||
@property
|
||||
def remotes(self):
|
||||
self._Load()
|
||||
return self._remotes
|
||||
|
||||
@property
|
||||
def default(self):
|
||||
self._Load()
|
||||
return self._default
|
||||
|
||||
@property
|
||||
def IsMirror(self):
|
||||
return self.manifestProject.config.GetBoolean('repo.mirror')
|
||||
|
||||
def _Unload(self):
|
||||
self._loaded = False
|
||||
self._projects = {}
|
||||
self._remotes = {}
|
||||
self._default = None
|
||||
self.branch = None
|
||||
|
||||
def _Load(self):
|
||||
if not self._loaded:
|
||||
m = self.manifestProject
|
||||
b = m.GetBranch(m.CurrentBranch).merge
|
||||
if b.startswith(R_HEADS):
|
||||
b = b[len(R_HEADS):]
|
||||
self.branch = b
|
||||
|
||||
self._ParseManifest(True)
|
||||
|
||||
local = os.path.join(self.repodir, LOCAL_MANIFEST_NAME)
|
||||
if os.path.exists(local):
|
||||
try:
|
||||
real = self.manifestFile
|
||||
self.manifestFile = local
|
||||
self._ParseManifest(False)
|
||||
finally:
|
||||
self.manifestFile = real
|
||||
|
||||
if self.IsMirror:
|
||||
self._AddMetaProjectMirror(self.repoProject)
|
||||
self._AddMetaProjectMirror(self.manifestProject)
|
||||
|
||||
self._loaded = True
|
||||
|
||||
def _ParseManifest(self, is_root_file):
|
||||
root = xml.dom.minidom.parse(self.manifestFile)
|
||||
if not root or not root.childNodes:
|
||||
raise ManifestParseError, \
|
||||
"no root node in %s" % \
|
||||
self.manifestFile
|
||||
|
||||
config = root.childNodes[0]
|
||||
if config.nodeName != 'manifest':
|
||||
raise ManifestParseError, \
|
||||
"no <manifest> in %s" % \
|
||||
self.manifestFile
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'remove-project':
|
||||
name = self._reqatt(node, 'name')
|
||||
try:
|
||||
del self._projects[name]
|
||||
except KeyError:
|
||||
raise ManifestParseError, \
|
||||
'project %s not found' % \
|
||||
(name)
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'remote':
|
||||
remote = self._ParseRemote(node)
|
||||
if self._remotes.get(remote.name):
|
||||
raise ManifestParseError, \
|
||||
'duplicate remote %s in %s' % \
|
||||
(remote.name, self.manifestFile)
|
||||
self._remotes[remote.name] = remote
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'default':
|
||||
if self._default is not None:
|
||||
raise ManifestParseError, \
|
||||
'duplicate default in %s' % \
|
||||
(self.manifestFile)
|
||||
self._default = self._ParseDefault(node)
|
||||
if self._default is None:
|
||||
self._default = _Default()
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'project':
|
||||
project = self._ParseProject(node)
|
||||
if self._projects.get(project.name):
|
||||
raise ManifestParseError, \
|
||||
'duplicate project %s in %s' % \
|
||||
(project.name, self.manifestFile)
|
||||
self._projects[project.name] = project
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'add-remote':
|
||||
pn = self._reqatt(node, 'to-project')
|
||||
project = self._projects.get(pn)
|
||||
if not project:
|
||||
raise ManifestParseError, \
|
||||
'project %s not defined in %s' % \
|
||||
(pn, self.manifestFile)
|
||||
self._ParseProjectExtraRemote(project, node)
|
||||
|
||||
def _AddMetaProjectMirror(self, m):
|
||||
name = None
|
||||
m_url = m.GetRemote(m.remote.name).url
|
||||
if m_url.endswith('/.git'):
|
||||
raise ManifestParseError, 'refusing to mirror %s' % m_url
|
||||
|
||||
if self._default and self._default.remote:
|
||||
url = self._default.remote.fetchUrl
|
||||
if not url.endswith('/'):
|
||||
url += '/'
|
||||
if m_url.startswith(url):
|
||||
remote = self._default.remote
|
||||
name = m_url[len(url):]
|
||||
|
||||
if name is None:
|
||||
s = m_url.rindex('/') + 1
|
||||
remote = Remote('origin', fetch = m_url[:s])
|
||||
name = m_url[s:]
|
||||
|
||||
if name.endswith('.git'):
|
||||
name = name[:-4]
|
||||
|
||||
if name not in self._projects:
|
||||
m.PreSync()
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||
project = Project(manifest = self,
|
||||
name = name,
|
||||
remote = remote,
|
||||
gitdir = gitdir,
|
||||
worktree = None,
|
||||
relpath = None,
|
||||
revision = m.revision)
|
||||
self._projects[project.name] = project
|
||||
|
||||
def _ParseRemote(self, node):
|
||||
"""
|
||||
reads a <remote> element from the manifest file
|
||||
"""
|
||||
name = self._reqatt(node, 'name')
|
||||
fetch = self._reqatt(node, 'fetch')
|
||||
review = node.getAttribute('review')
|
||||
if review == '':
|
||||
review = None
|
||||
|
||||
projectName = node.getAttribute('project-name')
|
||||
if projectName == '':
|
||||
projectName = None
|
||||
|
||||
r = Remote(name=name,
|
||||
fetch=fetch,
|
||||
review=review,
|
||||
projectName=projectName)
|
||||
|
||||
for n in node.childNodes:
|
||||
if n.nodeName == 'require':
|
||||
r.requiredCommits.append(self._reqatt(n, 'commit'))
|
||||
|
||||
return r
|
||||
|
||||
def _ParseDefault(self, node):
|
||||
"""
|
||||
reads a <default> element from the manifest file
|
||||
"""
|
||||
d = _Default()
|
||||
d.remote = self._get_remote(node)
|
||||
d.revision = node.getAttribute('revision')
|
||||
if d.revision == '':
|
||||
d.revision = None
|
||||
return d
|
||||
|
||||
def _ParseProject(self, node):
|
||||
"""
|
||||
reads a <project> element from the manifest file
|
||||
"""
|
||||
name = self._reqatt(node, 'name')
|
||||
|
||||
remote = self._get_remote(node)
|
||||
if remote is None:
|
||||
remote = self._default.remote
|
||||
if remote is None:
|
||||
raise ManifestParseError, \
|
||||
"no remote for project %s within %s" % \
|
||||
(name, self.manifestFile)
|
||||
|
||||
revision = node.getAttribute('revision')
|
||||
if not revision:
|
||||
revision = self._default.revision
|
||||
if not revision:
|
||||
raise ManifestParseError, \
|
||||
"no revision for project %s within %s" % \
|
||||
(name, self.manifestFile)
|
||||
|
||||
path = node.getAttribute('path')
|
||||
if not path:
|
||||
path = name
|
||||
if path.startswith('/'):
|
||||
raise ManifestParseError, \
|
||||
"project %s path cannot be absolute in %s" % \
|
||||
(name, self.manifestFile)
|
||||
|
||||
if self.IsMirror:
|
||||
relpath = None
|
||||
worktree = None
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||
else:
|
||||
worktree = os.path.join(self.topdir, path)
|
||||
gitdir = os.path.join(self.repodir, 'projects/%s.git' % path)
|
||||
|
||||
project = Project(manifest = self,
|
||||
name = name,
|
||||
remote = remote,
|
||||
gitdir = gitdir,
|
||||
worktree = worktree,
|
||||
relpath = path,
|
||||
revision = revision)
|
||||
|
||||
for n in node.childNodes:
|
||||
if n.nodeName == 'remote':
|
||||
self._ParseProjectExtraRemote(project, n)
|
||||
elif n.nodeName == 'copyfile':
|
||||
self._ParseCopyFile(project, n)
|
||||
|
||||
return project
|
||||
|
||||
def _ParseProjectExtraRemote(self, project, n):
|
||||
r = self._ParseRemote(n)
|
||||
if project.extraRemotes.get(r.name) \
|
||||
or project.remote.name == r.name:
|
||||
raise ManifestParseError, \
|
||||
'duplicate remote %s in project %s in %s' % \
|
||||
(r.name, project.name, self.manifestFile)
|
||||
project.extraRemotes[r.name] = r
|
||||
|
||||
def _ParseCopyFile(self, project, node):
|
||||
src = self._reqatt(node, 'src')
|
||||
dest = self._reqatt(node, 'dest')
|
||||
if not self.IsMirror:
|
||||
# src is project relative;
|
||||
# dest is relative to the top of the tree
|
||||
project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
|
||||
|
||||
def _get_remote(self, node):
|
||||
name = node.getAttribute('remote')
|
||||
if not name:
|
||||
return None
|
||||
|
||||
v = self._remotes.get(name)
|
||||
if not v:
|
||||
raise ManifestParseError, \
|
||||
"remote %s not defined in %s" % \
|
||||
(name, self.manifestFile)
|
||||
return v
|
||||
|
||||
def _reqatt(self, node, attname):
|
||||
"""
|
||||
reads a required attribute from the node.
|
||||
"""
|
||||
v = node.getAttribute(attname)
|
||||
if not v:
|
||||
raise ManifestParseError, \
|
||||
"no %s in <%s> within %s" % \
|
||||
(attname, node.nodeName, self.manifestFile)
|
||||
return v
|
716
manifest_xml.py
Normal file
716
manifest_xml.py
Normal file
@ -0,0 +1,716 @@
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urlparse
|
||||
import xml.dom.minidom
|
||||
|
||||
from git_config import GitConfig
|
||||
from git_refs import R_HEADS, HEAD
|
||||
from project import RemoteSpec, Project, MetaProject
|
||||
from error import ManifestParseError
|
||||
|
||||
MANIFEST_FILE_NAME = 'manifest.xml'
|
||||
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
|
||||
|
||||
urlparse.uses_relative.extend(['ssh', 'git'])
|
||||
urlparse.uses_netloc.extend(['ssh', 'git'])
|
||||
|
||||
class _Default(object):
|
||||
"""Project defaults within the manifest."""
|
||||
|
||||
revisionExpr = None
|
||||
remote = None
|
||||
sync_j = 1
|
||||
sync_c = False
|
||||
|
||||
class _XmlRemote(object):
|
||||
def __init__(self,
|
||||
name,
|
||||
alias=None,
|
||||
fetch=None,
|
||||
manifestUrl=None,
|
||||
review=None):
|
||||
self.name = name
|
||||
self.fetchUrl = fetch
|
||||
self.manifestUrl = manifestUrl
|
||||
self.remoteAlias = alias
|
||||
self.reviewUrl = review
|
||||
self.resolvedFetchUrl = self._resolveFetchUrl()
|
||||
|
||||
def _resolveFetchUrl(self):
|
||||
url = self.fetchUrl.rstrip('/')
|
||||
manifestUrl = self.manifestUrl.rstrip('/')
|
||||
# urljoin will get confused if there is no scheme in the base url
|
||||
# ie, if manifestUrl is of the form <hostname:port>
|
||||
if manifestUrl.find(':') != manifestUrl.find('/') - 1:
|
||||
manifestUrl = 'gopher://' + manifestUrl
|
||||
url = urlparse.urljoin(manifestUrl, url)
|
||||
return re.sub(r'^gopher://', '', url)
|
||||
|
||||
def ToRemoteSpec(self, projectName):
|
||||
url = self.resolvedFetchUrl.rstrip('/') + '/' + projectName
|
||||
remoteName = self.name
|
||||
if self.remoteAlias:
|
||||
remoteName = self.remoteAlias
|
||||
return RemoteSpec(remoteName, url, self.reviewUrl)
|
||||
|
||||
class XmlManifest(object):
|
||||
"""manages the repo configuration file"""
|
||||
|
||||
def __init__(self, repodir):
|
||||
self.repodir = os.path.abspath(repodir)
|
||||
self.topdir = os.path.dirname(self.repodir)
|
||||
self.manifestFile = os.path.join(self.repodir, MANIFEST_FILE_NAME)
|
||||
self.globalConfig = GitConfig.ForUser()
|
||||
|
||||
self.repoProject = MetaProject(self, 'repo',
|
||||
gitdir = os.path.join(repodir, 'repo/.git'),
|
||||
worktree = os.path.join(repodir, 'repo'))
|
||||
|
||||
self.manifestProject = MetaProject(self, 'manifests',
|
||||
gitdir = os.path.join(repodir, 'manifests.git'),
|
||||
worktree = os.path.join(repodir, 'manifests'))
|
||||
|
||||
self._Unload()
|
||||
|
||||
def Override(self, name):
|
||||
"""Use a different manifest, just for the current instantiation.
|
||||
"""
|
||||
path = os.path.join(self.manifestProject.worktree, name)
|
||||
if not os.path.isfile(path):
|
||||
raise ManifestParseError('manifest %s not found' % name)
|
||||
|
||||
old = self.manifestFile
|
||||
try:
|
||||
self.manifestFile = path
|
||||
self._Unload()
|
||||
self._Load()
|
||||
finally:
|
||||
self.manifestFile = old
|
||||
|
||||
def Link(self, name):
|
||||
"""Update the repo metadata to use a different manifest.
|
||||
"""
|
||||
self.Override(name)
|
||||
|
||||
try:
|
||||
if os.path.exists(self.manifestFile):
|
||||
os.remove(self.manifestFile)
|
||||
os.symlink('manifests/%s' % name, self.manifestFile)
|
||||
except OSError:
|
||||
raise ManifestParseError('cannot link manifest %s' % name)
|
||||
|
||||
def _RemoteToXml(self, r, doc, root):
|
||||
e = doc.createElement('remote')
|
||||
root.appendChild(e)
|
||||
e.setAttribute('name', r.name)
|
||||
e.setAttribute('fetch', r.fetchUrl)
|
||||
if r.reviewUrl is not None:
|
||||
e.setAttribute('review', r.reviewUrl)
|
||||
|
||||
def Save(self, fd, peg_rev=False, peg_rev_upstream=True):
|
||||
"""Write the current manifest out to the given file descriptor.
|
||||
"""
|
||||
mp = self.manifestProject
|
||||
|
||||
groups = mp.config.GetString('manifest.groups')
|
||||
if not groups:
|
||||
groups = 'all'
|
||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
||||
|
||||
doc = xml.dom.minidom.Document()
|
||||
root = doc.createElement('manifest')
|
||||
doc.appendChild(root)
|
||||
|
||||
# Save out the notice. There's a little bit of work here to give it the
|
||||
# right whitespace, which assumes that the notice is automatically indented
|
||||
# by 4 by minidom.
|
||||
if self.notice:
|
||||
notice_element = root.appendChild(doc.createElement('notice'))
|
||||
notice_lines = self.notice.splitlines()
|
||||
indented_notice = ('\n'.join(" "*4 + line for line in notice_lines))[4:]
|
||||
notice_element.appendChild(doc.createTextNode(indented_notice))
|
||||
|
||||
d = self.default
|
||||
sort_remotes = list(self.remotes.keys())
|
||||
sort_remotes.sort()
|
||||
|
||||
for r in sort_remotes:
|
||||
self._RemoteToXml(self.remotes[r], doc, root)
|
||||
if self.remotes:
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
|
||||
have_default = False
|
||||
e = doc.createElement('default')
|
||||
if d.remote:
|
||||
have_default = True
|
||||
e.setAttribute('remote', d.remote.name)
|
||||
if d.revisionExpr:
|
||||
have_default = True
|
||||
e.setAttribute('revision', d.revisionExpr)
|
||||
if d.sync_j > 1:
|
||||
have_default = True
|
||||
e.setAttribute('sync-j', '%d' % d.sync_j)
|
||||
if d.sync_c:
|
||||
have_default = True
|
||||
e.setAttribute('sync-c', 'true')
|
||||
if have_default:
|
||||
root.appendChild(e)
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
|
||||
if self._manifest_server:
|
||||
e = doc.createElement('manifest-server')
|
||||
e.setAttribute('url', self._manifest_server)
|
||||
root.appendChild(e)
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
|
||||
def output_projects(parent, parent_node, projects):
|
||||
for p in projects:
|
||||
output_project(parent, parent_node, self.projects[p])
|
||||
|
||||
def output_project(parent, parent_node, p):
|
||||
if not p.MatchesGroups(groups):
|
||||
return
|
||||
|
||||
name = p.name
|
||||
relpath = p.relpath
|
||||
if parent:
|
||||
name = self._UnjoinName(parent.name, name)
|
||||
relpath = self._UnjoinRelpath(parent.relpath, relpath)
|
||||
|
||||
e = doc.createElement('project')
|
||||
parent_node.appendChild(e)
|
||||
e.setAttribute('name', name)
|
||||
if relpath != name:
|
||||
e.setAttribute('path', relpath)
|
||||
if not d.remote or p.remote.name != d.remote.name:
|
||||
e.setAttribute('remote', p.remote.name)
|
||||
if peg_rev:
|
||||
if self.IsMirror:
|
||||
value = p.bare_git.rev_parse(p.revisionExpr + '^0')
|
||||
else:
|
||||
value = p.work_git.rev_parse(HEAD + '^0')
|
||||
e.setAttribute('revision', value)
|
||||
if peg_rev_upstream and value != p.revisionExpr:
|
||||
# Only save the origin if the origin is not a sha1, and the default
|
||||
# isn't our value, and the if the default doesn't already have that
|
||||
# covered.
|
||||
e.setAttribute('upstream', p.revisionExpr)
|
||||
elif not d.revisionExpr or p.revisionExpr != d.revisionExpr:
|
||||
e.setAttribute('revision', p.revisionExpr)
|
||||
|
||||
for c in p.copyfiles:
|
||||
ce = doc.createElement('copyfile')
|
||||
ce.setAttribute('src', c.src)
|
||||
ce.setAttribute('dest', c.dest)
|
||||
e.appendChild(ce)
|
||||
|
||||
default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath]
|
||||
egroups = [g for g in p.groups if g not in default_groups]
|
||||
if egroups:
|
||||
e.setAttribute('groups', ','.join(egroups))
|
||||
|
||||
for a in p.annotations:
|
||||
if a.keep == "true":
|
||||
ae = doc.createElement('annotation')
|
||||
ae.setAttribute('name', a.name)
|
||||
ae.setAttribute('value', a.value)
|
||||
e.appendChild(ae)
|
||||
|
||||
if p.sync_c:
|
||||
e.setAttribute('sync-c', 'true')
|
||||
|
||||
if p.subprojects:
|
||||
sort_projects = [subp.name for subp in p.subprojects]
|
||||
sort_projects.sort()
|
||||
output_projects(p, e, sort_projects)
|
||||
|
||||
sort_projects = [key for key in self.projects.keys()
|
||||
if not self.projects[key].parent]
|
||||
sort_projects.sort()
|
||||
output_projects(None, root, sort_projects)
|
||||
|
||||
if self._repo_hooks_project:
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
e = doc.createElement('repo-hooks')
|
||||
e.setAttribute('in-project', self._repo_hooks_project.name)
|
||||
e.setAttribute('enabled-list',
|
||||
' '.join(self._repo_hooks_project.enabled_repo_hooks))
|
||||
root.appendChild(e)
|
||||
|
||||
doc.writexml(fd, '', ' ', '\n', 'UTF-8')
|
||||
|
||||
@property
|
||||
def projects(self):
|
||||
self._Load()
|
||||
return self._projects
|
||||
|
||||
@property
|
||||
def remotes(self):
|
||||
self._Load()
|
||||
return self._remotes
|
||||
|
||||
@property
|
||||
def default(self):
|
||||
self._Load()
|
||||
return self._default
|
||||
|
||||
@property
|
||||
def repo_hooks_project(self):
|
||||
self._Load()
|
||||
return self._repo_hooks_project
|
||||
|
||||
@property
|
||||
def notice(self):
|
||||
self._Load()
|
||||
return self._notice
|
||||
|
||||
@property
|
||||
def manifest_server(self):
|
||||
self._Load()
|
||||
return self._manifest_server
|
||||
|
||||
@property
|
||||
def IsMirror(self):
|
||||
return self.manifestProject.config.GetBoolean('repo.mirror')
|
||||
|
||||
def _Unload(self):
|
||||
self._loaded = False
|
||||
self._projects = {}
|
||||
self._remotes = {}
|
||||
self._default = None
|
||||
self._repo_hooks_project = None
|
||||
self._notice = None
|
||||
self.branch = None
|
||||
self._manifest_server = None
|
||||
|
||||
def _Load(self):
|
||||
if not self._loaded:
|
||||
m = self.manifestProject
|
||||
b = m.GetBranch(m.CurrentBranch).merge
|
||||
if b is not None and b.startswith(R_HEADS):
|
||||
b = b[len(R_HEADS):]
|
||||
self.branch = b
|
||||
|
||||
nodes = []
|
||||
nodes.append(self._ParseManifestXml(self.manifestFile,
|
||||
self.manifestProject.worktree))
|
||||
|
||||
local = os.path.join(self.repodir, LOCAL_MANIFEST_NAME)
|
||||
if os.path.exists(local):
|
||||
nodes.append(self._ParseManifestXml(local, self.repodir))
|
||||
|
||||
self._ParseManifest(nodes)
|
||||
|
||||
if self.IsMirror:
|
||||
self._AddMetaProjectMirror(self.repoProject)
|
||||
self._AddMetaProjectMirror(self.manifestProject)
|
||||
|
||||
self._loaded = True
|
||||
|
||||
def _ParseManifestXml(self, path, include_root):
|
||||
root = xml.dom.minidom.parse(path)
|
||||
if not root or not root.childNodes:
|
||||
raise ManifestParseError("no root node in %s" % (path,))
|
||||
|
||||
for manifest in root.childNodes:
|
||||
if manifest.nodeName == 'manifest':
|
||||
break
|
||||
else:
|
||||
raise ManifestParseError("no <manifest> in %s" % (path,))
|
||||
|
||||
nodes = []
|
||||
for node in manifest.childNodes: # pylint:disable=W0631
|
||||
# We only get here if manifest is initialised
|
||||
if node.nodeName == 'include':
|
||||
name = self._reqatt(node, 'name')
|
||||
fp = os.path.join(include_root, name)
|
||||
if not os.path.isfile(fp):
|
||||
raise ManifestParseError, \
|
||||
"include %s doesn't exist or isn't a file" % \
|
||||
(name,)
|
||||
try:
|
||||
nodes.extend(self._ParseManifestXml(fp, include_root))
|
||||
# should isolate this to the exact exception, but that's
|
||||
# tricky. actual parsing implementation may vary.
|
||||
except (KeyboardInterrupt, RuntimeError, SystemExit):
|
||||
raise
|
||||
except Exception as e:
|
||||
raise ManifestParseError(
|
||||
"failed parsing included manifest %s: %s", (name, e))
|
||||
else:
|
||||
nodes.append(node)
|
||||
return nodes
|
||||
|
||||
def _ParseManifest(self, node_list):
|
||||
for node in itertools.chain(*node_list):
|
||||
if node.nodeName == 'remote':
|
||||
remote = self._ParseRemote(node)
|
||||
if self._remotes.get(remote.name):
|
||||
raise ManifestParseError(
|
||||
'duplicate remote %s in %s' %
|
||||
(remote.name, self.manifestFile))
|
||||
self._remotes[remote.name] = remote
|
||||
|
||||
for node in itertools.chain(*node_list):
|
||||
if node.nodeName == 'default':
|
||||
if self._default is not None:
|
||||
raise ManifestParseError(
|
||||
'duplicate default in %s' %
|
||||
(self.manifestFile))
|
||||
self._default = self._ParseDefault(node)
|
||||
if self._default is None:
|
||||
self._default = _Default()
|
||||
|
||||
for node in itertools.chain(*node_list):
|
||||
if node.nodeName == 'notice':
|
||||
if self._notice is not None:
|
||||
raise ManifestParseError(
|
||||
'duplicate notice in %s' %
|
||||
(self.manifestFile))
|
||||
self._notice = self._ParseNotice(node)
|
||||
|
||||
for node in itertools.chain(*node_list):
|
||||
if node.nodeName == 'manifest-server':
|
||||
url = self._reqatt(node, 'url')
|
||||
if self._manifest_server is not None:
|
||||
raise ManifestParseError(
|
||||
'duplicate manifest-server in %s' %
|
||||
(self.manifestFile))
|
||||
self._manifest_server = url
|
||||
|
||||
for node in itertools.chain(*node_list):
|
||||
if node.nodeName == 'project':
|
||||
project = self._ParseProject(node)
|
||||
def recursively_add_projects(project):
|
||||
if self._projects.get(project.name):
|
||||
raise ManifestParseError(
|
||||
'duplicate project %s in %s' %
|
||||
(project.name, self.manifestFile))
|
||||
self._projects[project.name] = project
|
||||
for subproject in project.subprojects:
|
||||
recursively_add_projects(subproject)
|
||||
recursively_add_projects(project)
|
||||
if node.nodeName == 'repo-hooks':
|
||||
# Get the name of the project and the (space-separated) list of enabled.
|
||||
repo_hooks_project = self._reqatt(node, 'in-project')
|
||||
enabled_repo_hooks = self._reqatt(node, 'enabled-list').split()
|
||||
|
||||
# Only one project can be the hooks project
|
||||
if self._repo_hooks_project is not None:
|
||||
raise ManifestParseError(
|
||||
'duplicate repo-hooks in %s' %
|
||||
(self.manifestFile))
|
||||
|
||||
# Store a reference to the Project.
|
||||
try:
|
||||
self._repo_hooks_project = self._projects[repo_hooks_project]
|
||||
except KeyError:
|
||||
raise ManifestParseError(
|
||||
'project %s not found for repo-hooks' %
|
||||
(repo_hooks_project))
|
||||
|
||||
# Store the enabled hooks in the Project object.
|
||||
self._repo_hooks_project.enabled_repo_hooks = enabled_repo_hooks
|
||||
if node.nodeName == 'remove-project':
|
||||
name = self._reqatt(node, 'name')
|
||||
try:
|
||||
del self._projects[name]
|
||||
except KeyError:
|
||||
raise ManifestParseError(
|
||||
'project %s not found' %
|
||||
(name))
|
||||
|
||||
# If the manifest removes the hooks project, treat it as if it deleted
|
||||
# the repo-hooks element too.
|
||||
if self._repo_hooks_project and (self._repo_hooks_project.name == name):
|
||||
self._repo_hooks_project = None
|
||||
|
||||
|
||||
def _AddMetaProjectMirror(self, m):
|
||||
name = None
|
||||
m_url = m.GetRemote(m.remote.name).url
|
||||
if m_url.endswith('/.git'):
|
||||
raise ManifestParseError, 'refusing to mirror %s' % m_url
|
||||
|
||||
if self._default and self._default.remote:
|
||||
url = self._default.remote.resolvedFetchUrl
|
||||
if not url.endswith('/'):
|
||||
url += '/'
|
||||
if m_url.startswith(url):
|
||||
remote = self._default.remote
|
||||
name = m_url[len(url):]
|
||||
|
||||
if name is None:
|
||||
s = m_url.rindex('/') + 1
|
||||
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
|
||||
remote = _XmlRemote('origin', fetch=m_url[:s], manifestUrl=manifestUrl)
|
||||
name = m_url[s:]
|
||||
|
||||
if name.endswith('.git'):
|
||||
name = name[:-4]
|
||||
|
||||
if name not in self._projects:
|
||||
m.PreSync()
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||
project = Project(manifest = self,
|
||||
name = name,
|
||||
remote = remote.ToRemoteSpec(name),
|
||||
gitdir = gitdir,
|
||||
worktree = None,
|
||||
relpath = None,
|
||||
revisionExpr = m.revisionExpr,
|
||||
revisionId = None)
|
||||
self._projects[project.name] = project
|
||||
|
||||
def _ParseRemote(self, node):
|
||||
"""
|
||||
reads a <remote> element from the manifest file
|
||||
"""
|
||||
name = self._reqatt(node, 'name')
|
||||
alias = node.getAttribute('alias')
|
||||
if alias == '':
|
||||
alias = None
|
||||
fetch = self._reqatt(node, 'fetch')
|
||||
review = node.getAttribute('review')
|
||||
if review == '':
|
||||
review = None
|
||||
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
|
||||
return _XmlRemote(name, alias, fetch, manifestUrl, review)
|
||||
|
||||
def _ParseDefault(self, node):
|
||||
"""
|
||||
reads a <default> element from the manifest file
|
||||
"""
|
||||
d = _Default()
|
||||
d.remote = self._get_remote(node)
|
||||
d.revisionExpr = node.getAttribute('revision')
|
||||
if d.revisionExpr == '':
|
||||
d.revisionExpr = None
|
||||
|
||||
sync_j = node.getAttribute('sync-j')
|
||||
if sync_j == '' or sync_j is None:
|
||||
d.sync_j = 1
|
||||
else:
|
||||
d.sync_j = int(sync_j)
|
||||
|
||||
sync_c = node.getAttribute('sync-c')
|
||||
if not sync_c:
|
||||
d.sync_c = False
|
||||
else:
|
||||
d.sync_c = sync_c.lower() in ("yes", "true", "1")
|
||||
return d
|
||||
|
||||
def _ParseNotice(self, node):
|
||||
"""
|
||||
reads a <notice> element from the manifest file
|
||||
|
||||
The <notice> element is distinct from other tags in the XML in that the
|
||||
data is conveyed between the start and end tag (it's not an empty-element
|
||||
tag).
|
||||
|
||||
The white space (carriage returns, indentation) for the notice element is
|
||||
relevant and is parsed in a way that is based on how python docstrings work.
|
||||
In fact, the code is remarkably similar to here:
|
||||
http://www.python.org/dev/peps/pep-0257/
|
||||
"""
|
||||
# Get the data out of the node...
|
||||
notice = node.childNodes[0].data
|
||||
|
||||
# Figure out minimum indentation, skipping the first line (the same line
|
||||
# as the <notice> tag)...
|
||||
minIndent = sys.maxint
|
||||
lines = notice.splitlines()
|
||||
for line in lines[1:]:
|
||||
lstrippedLine = line.lstrip()
|
||||
if lstrippedLine:
|
||||
indent = len(line) - len(lstrippedLine)
|
||||
minIndent = min(indent, minIndent)
|
||||
|
||||
# Strip leading / trailing blank lines and also indentation.
|
||||
cleanLines = [lines[0].strip()]
|
||||
for line in lines[1:]:
|
||||
cleanLines.append(line[minIndent:].rstrip())
|
||||
|
||||
# Clear completely blank lines from front and back...
|
||||
while cleanLines and not cleanLines[0]:
|
||||
del cleanLines[0]
|
||||
while cleanLines and not cleanLines[-1]:
|
||||
del cleanLines[-1]
|
||||
|
||||
return '\n'.join(cleanLines)
|
||||
|
||||
def _JoinName(self, parent_name, name):
|
||||
return os.path.join(parent_name, name)
|
||||
|
||||
def _UnjoinName(self, parent_name, name):
|
||||
return os.path.relpath(name, parent_name)
|
||||
|
||||
def _ParseProject(self, node, parent = None):
|
||||
"""
|
||||
reads a <project> element from the manifest file
|
||||
"""
|
||||
name = self._reqatt(node, 'name')
|
||||
if parent:
|
||||
name = self._JoinName(parent.name, name)
|
||||
|
||||
remote = self._get_remote(node)
|
||||
if remote is None:
|
||||
remote = self._default.remote
|
||||
if remote is None:
|
||||
raise ManifestParseError, \
|
||||
"no remote for project %s within %s" % \
|
||||
(name, self.manifestFile)
|
||||
|
||||
revisionExpr = node.getAttribute('revision')
|
||||
if not revisionExpr:
|
||||
revisionExpr = self._default.revisionExpr
|
||||
if not revisionExpr:
|
||||
raise ManifestParseError, \
|
||||
"no revision for project %s within %s" % \
|
||||
(name, self.manifestFile)
|
||||
|
||||
path = node.getAttribute('path')
|
||||
if not path:
|
||||
path = name
|
||||
if path.startswith('/'):
|
||||
raise ManifestParseError, \
|
||||
"project %s path cannot be absolute in %s" % \
|
||||
(name, self.manifestFile)
|
||||
|
||||
rebase = node.getAttribute('rebase')
|
||||
if not rebase:
|
||||
rebase = True
|
||||
else:
|
||||
rebase = rebase.lower() in ("yes", "true", "1")
|
||||
|
||||
sync_c = node.getAttribute('sync-c')
|
||||
if not sync_c:
|
||||
sync_c = False
|
||||
else:
|
||||
sync_c = sync_c.lower() in ("yes", "true", "1")
|
||||
|
||||
upstream = node.getAttribute('upstream')
|
||||
|
||||
groups = ''
|
||||
if node.hasAttribute('groups'):
|
||||
groups = node.getAttribute('groups')
|
||||
groups = [x for x in re.split('[,\s]+', groups) if x]
|
||||
|
||||
if parent is None:
|
||||
relpath, worktree, gitdir = self.GetProjectPaths(name, path)
|
||||
else:
|
||||
relpath, worktree, gitdir = self.GetSubprojectPaths(parent, path)
|
||||
|
||||
default_groups = ['all', 'name:%s' % name, 'path:%s' % relpath]
|
||||
groups.extend(set(default_groups).difference(groups))
|
||||
|
||||
project = Project(manifest = self,
|
||||
name = name,
|
||||
remote = remote.ToRemoteSpec(name),
|
||||
gitdir = gitdir,
|
||||
worktree = worktree,
|
||||
relpath = relpath,
|
||||
revisionExpr = revisionExpr,
|
||||
revisionId = None,
|
||||
rebase = rebase,
|
||||
groups = groups,
|
||||
sync_c = sync_c,
|
||||
upstream = upstream,
|
||||
parent = parent)
|
||||
|
||||
for n in node.childNodes:
|
||||
if n.nodeName == 'copyfile':
|
||||
self._ParseCopyFile(project, n)
|
||||
if n.nodeName == 'annotation':
|
||||
self._ParseAnnotation(project, n)
|
||||
if n.nodeName == 'project':
|
||||
project.subprojects.append(self._ParseProject(n, parent = project))
|
||||
|
||||
return project
|
||||
|
||||
def GetProjectPaths(self, name, path):
|
||||
relpath = path
|
||||
if self.IsMirror:
|
||||
worktree = None
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||
else:
|
||||
worktree = os.path.join(self.topdir, path).replace('\\', '/')
|
||||
gitdir = os.path.join(self.repodir, 'projects', '%s.git' % path)
|
||||
return relpath, worktree, gitdir
|
||||
|
||||
def GetSubprojectName(self, parent, submodule_path):
|
||||
return os.path.join(parent.name, submodule_path)
|
||||
|
||||
def _JoinRelpath(self, parent_relpath, relpath):
|
||||
return os.path.join(parent_relpath, relpath)
|
||||
|
||||
def _UnjoinRelpath(self, parent_relpath, relpath):
|
||||
return os.path.relpath(relpath, parent_relpath)
|
||||
|
||||
def GetSubprojectPaths(self, parent, path):
|
||||
relpath = self._JoinRelpath(parent.relpath, path)
|
||||
gitdir = os.path.join(parent.gitdir, 'subprojects', '%s.git' % path)
|
||||
if self.IsMirror:
|
||||
worktree = None
|
||||
else:
|
||||
worktree = os.path.join(parent.worktree, path).replace('\\', '/')
|
||||
return relpath, worktree, gitdir
|
||||
|
||||
def _ParseCopyFile(self, project, node):
|
||||
src = self._reqatt(node, 'src')
|
||||
dest = self._reqatt(node, 'dest')
|
||||
if not self.IsMirror:
|
||||
# src is project relative;
|
||||
# dest is relative to the top of the tree
|
||||
project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
|
||||
|
||||
def _ParseAnnotation(self, project, node):
|
||||
name = self._reqatt(node, 'name')
|
||||
value = self._reqatt(node, 'value')
|
||||
try:
|
||||
keep = self._reqatt(node, 'keep').lower()
|
||||
except ManifestParseError:
|
||||
keep = "true"
|
||||
if keep != "true" and keep != "false":
|
||||
raise ManifestParseError, "optional \"keep\" attribute must be \"true\" or \"false\""
|
||||
project.AddAnnotation(name, value, keep)
|
||||
|
||||
def _get_remote(self, node):
|
||||
name = node.getAttribute('remote')
|
||||
if not name:
|
||||
return None
|
||||
|
||||
v = self._remotes.get(name)
|
||||
if not v:
|
||||
raise ManifestParseError, \
|
||||
"remote %s not defined in %s" % \
|
||||
(name, self.manifestFile)
|
||||
return v
|
||||
|
||||
def _reqatt(self, node, attname):
|
||||
"""
|
||||
reads a required attribute from the node.
|
||||
"""
|
||||
v = node.getAttribute(attname)
|
||||
if not v:
|
||||
raise ManifestParseError, \
|
||||
"no %s in <%s> within %s" % \
|
||||
(attname, node.nodeName, self.manifestFile)
|
||||
return v
|
6
pager.py
6
pager.py
@ -50,7 +50,7 @@ def RunPager(globalConfig):
|
||||
_BecomePager(pager)
|
||||
except Exception:
|
||||
print >>sys.stderr, "fatal: cannot start pager '%s'" % pager
|
||||
os.exit(255)
|
||||
sys.exit(255)
|
||||
|
||||
def _SelectPager(globalConfig):
|
||||
try:
|
||||
@ -74,11 +74,11 @@ def _BecomePager(pager):
|
||||
# ready works around a long-standing bug in popularly
|
||||
# available versions of 'less', a better 'more'.
|
||||
#
|
||||
a, b, c = select.select([0], [], [0])
|
||||
_a, _b, _c = select.select([0], [], [0])
|
||||
|
||||
os.environ['LESS'] = 'FRSX'
|
||||
|
||||
try:
|
||||
os.execvp(pager, [pager])
|
||||
except OSError, e:
|
||||
except OSError:
|
||||
os.execv('/bin/sh', ['sh', '-c', pager])
|
||||
|
34
progress.py
34
progress.py
@ -13,18 +13,35 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
from time import time
|
||||
from trace import IsTrace
|
||||
|
||||
_NOT_TTY = not os.isatty(2)
|
||||
|
||||
class Progress(object):
|
||||
def __init__(self, title, total=0):
|
||||
def __init__(self, title, total=0, units=''):
|
||||
self._title = title
|
||||
self._total = total
|
||||
self._done = 0
|
||||
self._lastp = -1
|
||||
self._start = time()
|
||||
self._show = False
|
||||
self._units = units
|
||||
|
||||
def update(self, inc=1):
|
||||
self._done += inc
|
||||
|
||||
if _NOT_TTY or IsTrace():
|
||||
return
|
||||
|
||||
if not self._show:
|
||||
if 0.5 <= time() - self._start:
|
||||
self._show = True
|
||||
else:
|
||||
return
|
||||
|
||||
if self._total <= 0:
|
||||
sys.stderr.write('\r%s: %d, ' % (
|
||||
self._title,
|
||||
@ -35,14 +52,17 @@ class Progress(object):
|
||||
|
||||
if self._lastp != p:
|
||||
self._lastp = p
|
||||
sys.stderr.write('\r%s: %3d%% (%d/%d) ' % (
|
||||
sys.stderr.write('\r%s: %3d%% (%d%s/%d%s) ' % (
|
||||
self._title,
|
||||
p,
|
||||
self._done,
|
||||
self._total))
|
||||
self._done, self._units,
|
||||
self._total, self._units))
|
||||
sys.stderr.flush()
|
||||
|
||||
def end(self):
|
||||
if _NOT_TTY or IsTrace() or not self._show:
|
||||
return
|
||||
|
||||
if self._total <= 0:
|
||||
sys.stderr.write('\r%s: %d, done. \n' % (
|
||||
self._title,
|
||||
@ -50,9 +70,9 @@ class Progress(object):
|
||||
sys.stderr.flush()
|
||||
else:
|
||||
p = (100 * self._done) / self._total
|
||||
sys.stderr.write('\r%s: %3d%% (%d/%d), done. \n' % (
|
||||
sys.stderr.write('\r%s: %3d%% (%d%s/%d%s), done. \n' % (
|
||||
self._title,
|
||||
p,
|
||||
self._done,
|
||||
self._total))
|
||||
self._done, self._units,
|
||||
self._total, self._units))
|
||||
sys.stderr.flush()
|
||||
|
1673
project.py
1673
project.py
File diff suppressed because it is too large
Load Diff
244
repo
244
repo
@ -2,7 +2,7 @@
|
||||
|
||||
## repo default configuration
|
||||
##
|
||||
REPO_URL='git://android.git.kernel.org/tools/repo.git'
|
||||
REPO_URL='https://gerrit.googlesource.com/git-repo'
|
||||
REPO_REV='stable'
|
||||
|
||||
# Copyright (C) 2008 Google Inc.
|
||||
@ -28,10 +28,10 @@ if __name__ == '__main__':
|
||||
del magic
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (1, 8)
|
||||
VERSION = (1, 18)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (1,0)
|
||||
KEYRING_VERSION = (1,1)
|
||||
MAINTAINER_KEYS = """
|
||||
|
||||
Repo Maintainer <repo@android.kernel.org>
|
||||
@ -74,13 +74,45 @@ HTHs37+/QLMomGEGKZMWi0dShU2J5mNRQu3Hhxl3hHDVbt5CeJBb26aQcQrFz69W
|
||||
zE3GNvmJosh6leayjtI9P2A6iEkEGBECAAkFAkj3uiACGwwACgkQFlMNXpIPXGWp
|
||||
TACbBS+Up3RpfYVfd63c1cDdlru13pQAn3NQy/SN858MkxN+zym86UBgOad2
|
||||
=CMiZ
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
Conley Owens <cco3@android.com>
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
Version: GnuPG v1.4.11 (GNU/Linux)
|
||||
|
||||
mQENBFBiLPwBCACvISTASOgFXwADw2GYRH2I2z9RvYkYoZ6ThTTNlMXbbYYKO2Wo
|
||||
a9LQDNW0TbCEekg5UKk0FD13XOdWaqUt4Gtuvq9c43GRSjMO6NXH+0BjcQ8vUtY2
|
||||
/W4CYUevwdo4nQ1+1zsOCu1XYe/CReXq0fdugv3hgmRmh3sz1soo37Q44W2frxxg
|
||||
U7Rz3Da4FjgAL0RQ8qndD+LwRHXTY7H7wYM8V/3cYFZV7pSodd75q3MAXYQLf0ZV
|
||||
QR1XATu5l1QnXrxgHvz7MmDwb1D+jX3YPKnZveaukigQ6hDHdiVcePBiGXmk8LZC
|
||||
2jQkdXeF7Su1ZYpr2nnEHLJ6vOLcCpPGb8gDABEBAAG0H0NvbmxleSBPd2VucyA8
|
||||
Y2NvM0BhbmRyb2lkLmNvbT6JATgEEwECACIFAlBiLPwCGwMGCwkIBwMCBhUIAgkK
|
||||
CwQWAgMBAh4BAheAAAoJEBkmlFUziHGkHVkH/2Hks2Cif5i2xPtv2IFZcjL42joU
|
||||
T7lO5XFqUYS9ZNHpGa/V0eiPt7rHoO16glR83NZtwlrq2cSN89i9HfOhMYV/qLu8
|
||||
fLCHcV2muw+yCB5s5bxnI5UkToiNZyBNqFkcOt/Kbj9Hpy68A1kmc6myVEaUYebq
|
||||
2Chx/f3xuEthan099t746v1K+/6SvQGDNctHuaMr9cWdxZtHjdRf31SQRc99Phe5
|
||||
w+ZGR/ebxNDKRK9mKgZT8wVFHlXerJsRqWIqtx1fsW1UgLgbpcpe2MChm6B5wTu0
|
||||
s1ltzox3l4q71FyRRPUJxXyvGkDLZWpK7EpiHSCOYq/KP3HkKeXU3xqHpcG5AQ0E
|
||||
UGIs/AEIAKzO/7lO9cB6dshmZYo8Vy/b7aGicThE+ChcDSfhvyOXVdEM2GKAjsR+
|
||||
rlBWbTFX3It301p2HwZPFEi9nEvJxVlqqBiW0bPmNMkDRR55l2vbWg35wwkg6RyE
|
||||
Bc5/TQjhXI2w8IvlimoGoUff4t3JmMOnWrnKSvL+5iuRj12p9WmanCHzw3Ee7ztf
|
||||
/aU/q+FTpr3DLerb6S8xbv86ySgnJT6o5CyL2DCWRtnYQyGVi0ZmLzEouAYiO0hs
|
||||
z0AAu28Mj+12g2WwePRz6gfM9rHtI37ylYW3oT/9M9mO9ei/Bc/1D7Dz6qNV+0vg
|
||||
uSVJxM2Bl6GalHPZLhHntFEdIA6EdoUAEQEAAYkBHwQYAQIACQUCUGIs/AIbDAAK
|
||||
CRAZJpRVM4hxpNfkB/0W/hP5WK/NETXBlWXXW7JPaWO2c5kGwD0lnj5RRmridyo1
|
||||
vbm5PdM91jOsDQYqRu6YOoYBnDnEhB2wL2bPh34HWwwrA+LwB8hlcAV2z1bdwyfl
|
||||
3R823fReKN3QcvLHzmvZPrF4Rk97M9UIyKS0RtnfTWykRgDWHIsrtQPoNwsXrWoT
|
||||
9LrM2v+1+9mp3vuXnE473/NHxmiWEQH9Ez+O/mOxQ7rSOlqGRiKq/IBZCfioJOtV
|
||||
fTQeIu/yASZnsLBqr6SJEGwYBoWcyjG++k4fyw8ocOAo4uGDYbxgN7yYfNQ0OH7o
|
||||
V6pfUgqKLWa/aK7/N1ZHnPdFLD8Xt0Dmy4BPwrKC
|
||||
=O7am
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
"""
|
||||
|
||||
GIT = 'git' # our git command
|
||||
MIN_GIT_VERSION = (1, 5, 4) # minimum supported git version
|
||||
repodir = '.repo' # name of repo's private directory
|
||||
S_repo = 'repo' # special repo reposiory
|
||||
S_repo = 'repo' # special repo repository
|
||||
S_manifests = 'manifests' # special manifest repository
|
||||
REPO_MAIN = S_repo + '/main.py' # main script
|
||||
|
||||
@ -88,9 +120,9 @@ REPO_MAIN = S_repo + '/main.py' # main script
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import readline
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib2
|
||||
|
||||
home_dot_repo = os.path.expanduser('~/.repoconfig')
|
||||
gpg_dir = os.path.join(home_dot_repo, 'gnupg')
|
||||
@ -118,9 +150,25 @@ group.add_option('-m', '--manifest-name',
|
||||
group.add_option('--mirror',
|
||||
dest='mirror', action='store_true',
|
||||
help='mirror the forrest')
|
||||
group.add_option('--reference',
|
||||
dest='reference',
|
||||
help='location of mirror directory', metavar='DIR')
|
||||
group.add_option('--depth', type='int', default=None,
|
||||
dest='depth',
|
||||
help='create a shallow clone with given depth; see git clone')
|
||||
group.add_option('-g', '--groups',
|
||||
dest='groups', default='default',
|
||||
help='restrict manifest projects to ones with a specified group',
|
||||
metavar='GROUP')
|
||||
group.add_option('-p', '--platform',
|
||||
dest='platform', default="auto",
|
||||
help='restrict manifest projects to ones with a specified '
|
||||
'platform group [auto|all|none|linux|darwin|...]',
|
||||
metavar='PLATFORM')
|
||||
|
||||
|
||||
# Tool
|
||||
group = init_optparse.add_option_group('Version options')
|
||||
group = init_optparse.add_option_group('repo Version options')
|
||||
group.add_option('--repo-url',
|
||||
dest='repo_url',
|
||||
help='repo repository location', metavar='URL')
|
||||
@ -131,6 +179,11 @@ group.add_option('--no-repo-verify',
|
||||
dest='no_repo_verify', action='store_true',
|
||||
help='do not verify repo source code')
|
||||
|
||||
# Other
|
||||
group = init_optparse.add_option_group('Other options')
|
||||
group.add_option('--config-name',
|
||||
dest='config_name', action="store_true", default=False,
|
||||
help='Always prompt for name/e-mail')
|
||||
|
||||
class CloneFailure(Exception):
|
||||
"""Indicate the remote clone of repo itself failed.
|
||||
@ -141,7 +194,7 @@ def _Init(args):
|
||||
"""Installs repo by cloning it over the network.
|
||||
"""
|
||||
opt, args = init_optparse.parse_args(args)
|
||||
if args or not opt.manifest_url:
|
||||
if args:
|
||||
init_optparse.print_usage()
|
||||
sys.exit(1)
|
||||
|
||||
@ -164,7 +217,7 @@ def _Init(args):
|
||||
if not os.path.isdir(repodir):
|
||||
try:
|
||||
os.mkdir(repodir)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
print >>sys.stderr, \
|
||||
'fatal: cannot make %s directory: %s' % (
|
||||
repodir, e.strerror)
|
||||
@ -175,15 +228,11 @@ def _Init(args):
|
||||
|
||||
_CheckGitVersion()
|
||||
try:
|
||||
if _NeedSetupGnuPG():
|
||||
can_verify = _SetupGnuPG(opt.quiet)
|
||||
if NeedSetupGnuPG():
|
||||
can_verify = SetupGnuPG(opt.quiet)
|
||||
else:
|
||||
can_verify = True
|
||||
|
||||
if not opt.quiet:
|
||||
print >>sys.stderr, 'Getting repo ...'
|
||||
print >>sys.stderr, ' from %s' % url
|
||||
|
||||
dst = os.path.abspath(os.path.join(repodir, S_repo))
|
||||
_Clone(url, dst, opt.quiet)
|
||||
|
||||
@ -202,7 +251,17 @@ def _Init(args):
|
||||
|
||||
def _CheckGitVersion():
|
||||
cmd = [GIT, '--version']
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
||||
try:
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
||||
except OSError as e:
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, "fatal: '%s' is not available" % GIT
|
||||
print >>sys.stderr, 'fatal: %s' % e
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, 'Please make sure %s is installed'\
|
||||
' and in your path.' % GIT
|
||||
raise CloneFailure()
|
||||
|
||||
ver_str = proc.stdout.read().strip()
|
||||
proc.stdout.close()
|
||||
proc.wait()
|
||||
@ -219,7 +278,7 @@ def _CheckGitVersion():
|
||||
raise CloneFailure()
|
||||
|
||||
|
||||
def _NeedSetupGnuPG():
|
||||
def NeedSetupGnuPG():
|
||||
if not os.path.isdir(home_dot_repo):
|
||||
return True
|
||||
|
||||
@ -237,11 +296,11 @@ def _NeedSetupGnuPG():
|
||||
return False
|
||||
|
||||
|
||||
def _SetupGnuPG(quiet):
|
||||
def SetupGnuPG(quiet):
|
||||
if not os.path.isdir(home_dot_repo):
|
||||
try:
|
||||
os.mkdir(home_dot_repo)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
print >>sys.stderr, \
|
||||
'fatal: cannot make %s directory: %s' % (
|
||||
home_dot_repo, e.strerror)
|
||||
@ -250,21 +309,21 @@ def _SetupGnuPG(quiet):
|
||||
if not os.path.isdir(gpg_dir):
|
||||
try:
|
||||
os.mkdir(gpg_dir, 0700)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
print >>sys.stderr, \
|
||||
'fatal: cannot make %s directory: %s' % (
|
||||
gpg_dir, e.strerror)
|
||||
sys.exit(1)
|
||||
|
||||
env = dict(os.environ)
|
||||
env['GNUPGHOME'] = gpg_dir
|
||||
env = os.environ.copy()
|
||||
env['GNUPGHOME'] = gpg_dir.encode()
|
||||
|
||||
cmd = ['gpg', '--import']
|
||||
try:
|
||||
proc = subprocess.Popen(cmd,
|
||||
env = env,
|
||||
stdin = subprocess.PIPE)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
if not quiet:
|
||||
print >>sys.stderr, 'warning: gpg (GnuPG) is not available.'
|
||||
print >>sys.stderr, 'warning: Installing it is strongly encouraged.'
|
||||
@ -293,15 +352,43 @@ def _SetConfig(local, name, value):
|
||||
raise CloneFailure()
|
||||
|
||||
|
||||
def _Fetch(local, quiet, *args):
|
||||
def _InitHttp():
|
||||
handlers = []
|
||||
|
||||
mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
try:
|
||||
import netrc
|
||||
n = netrc.netrc()
|
||||
for host in n.hosts:
|
||||
p = n.hosts[host]
|
||||
mgr.add_password(p[1], 'http://%s/' % host, p[0], p[2])
|
||||
mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2])
|
||||
except:
|
||||
pass
|
||||
handlers.append(urllib2.HTTPBasicAuthHandler(mgr))
|
||||
handlers.append(urllib2.HTTPDigestAuthHandler(mgr))
|
||||
|
||||
if 'http_proxy' in os.environ:
|
||||
url = os.environ['http_proxy']
|
||||
handlers.append(urllib2.ProxyHandler({'http': url, 'https': url}))
|
||||
if 'REPO_CURL_VERBOSE' in os.environ:
|
||||
handlers.append(urllib2.HTTPHandler(debuglevel=1))
|
||||
handlers.append(urllib2.HTTPSHandler(debuglevel=1))
|
||||
urllib2.install_opener(urllib2.build_opener(*handlers))
|
||||
|
||||
def _Fetch(url, local, src, quiet):
|
||||
if not quiet:
|
||||
print >>sys.stderr, 'Get %s' % url
|
||||
|
||||
cmd = [GIT, 'fetch']
|
||||
if quiet:
|
||||
cmd.append('--quiet')
|
||||
err = subprocess.PIPE
|
||||
else:
|
||||
err = None
|
||||
cmd.extend(args)
|
||||
cmd.append('origin')
|
||||
cmd.append(src)
|
||||
cmd.append('+refs/heads/*:refs/remotes/origin/*')
|
||||
cmd.append('refs/tags/*:refs/tags/*')
|
||||
|
||||
proc = subprocess.Popen(cmd, cwd = local, stderr = err)
|
||||
if err:
|
||||
@ -310,13 +397,69 @@ def _Fetch(local, quiet, *args):
|
||||
if proc.wait() != 0:
|
||||
raise CloneFailure()
|
||||
|
||||
def _DownloadBundle(url, local, quiet):
|
||||
if not url.endswith('/'):
|
||||
url += '/'
|
||||
url += 'clone.bundle'
|
||||
|
||||
proc = subprocess.Popen(
|
||||
[GIT, 'config', '--get-regexp', 'url.*.insteadof'],
|
||||
cwd = local,
|
||||
stdout = subprocess.PIPE)
|
||||
for line in proc.stdout:
|
||||
m = re.compile(r'^url\.(.*)\.insteadof (.*)$').match(line)
|
||||
if m:
|
||||
new_url = m.group(1)
|
||||
old_url = m.group(2)
|
||||
if url.startswith(old_url):
|
||||
url = new_url + url[len(old_url):]
|
||||
break
|
||||
proc.stdout.close()
|
||||
proc.wait()
|
||||
|
||||
if not url.startswith('http:') and not url.startswith('https:'):
|
||||
return False
|
||||
|
||||
dest = open(os.path.join(local, '.git', 'clone.bundle'), 'w+b')
|
||||
try:
|
||||
try:
|
||||
r = urllib2.urlopen(url)
|
||||
except urllib2.HTTPError as e:
|
||||
if e.code == 404:
|
||||
return False
|
||||
print >>sys.stderr, 'fatal: Cannot get %s' % url
|
||||
print >>sys.stderr, 'fatal: HTTP error %s' % e.code
|
||||
raise CloneFailure()
|
||||
except urllib2.URLError as e:
|
||||
print >>sys.stderr, 'fatal: Cannot get %s' % url
|
||||
print >>sys.stderr, 'fatal: error %s' % e.reason
|
||||
raise CloneFailure()
|
||||
try:
|
||||
if not quiet:
|
||||
print >>sys.stderr, 'Get %s' % url
|
||||
while True:
|
||||
buf = r.read(8192)
|
||||
if buf == '':
|
||||
return True
|
||||
dest.write(buf)
|
||||
finally:
|
||||
r.close()
|
||||
finally:
|
||||
dest.close()
|
||||
|
||||
def _ImportBundle(local):
|
||||
path = os.path.join(local, '.git', 'clone.bundle')
|
||||
try:
|
||||
_Fetch(local, local, path, True)
|
||||
finally:
|
||||
os.remove(path)
|
||||
|
||||
def _Clone(url, local, quiet):
|
||||
"""Clones a git repository to a new subdirectory of repodir
|
||||
"""
|
||||
try:
|
||||
os.mkdir(local)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
print >>sys.stderr, \
|
||||
'fatal: cannot make %s directory: %s' \
|
||||
% (local, e.strerror)
|
||||
@ -325,7 +468,7 @@ def _Clone(url, local, quiet):
|
||||
cmd = [GIT, 'init', '--quiet']
|
||||
try:
|
||||
proc = subprocess.Popen(cmd, cwd = local)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, "fatal: '%s' is not available" % GIT
|
||||
print >>sys.stderr, 'fatal: %s' % e
|
||||
@ -337,11 +480,14 @@ def _Clone(url, local, quiet):
|
||||
print >>sys.stderr, 'fatal: could not create %s' % local
|
||||
raise CloneFailure()
|
||||
|
||||
_InitHttp()
|
||||
_SetConfig(local, 'remote.origin.url', url)
|
||||
_SetConfig(local, 'remote.origin.fetch',
|
||||
'+refs/heads/*:refs/remotes/origin/*')
|
||||
_Fetch(local, quiet)
|
||||
_Fetch(local, quiet, '--tags')
|
||||
if _DownloadBundle(url, local, quiet):
|
||||
_ImportBundle(local)
|
||||
else:
|
||||
_Fetch(url, local, 'origin', quiet)
|
||||
|
||||
|
||||
def _Verify(cwd, branch, quiet):
|
||||
@ -375,8 +521,8 @@ def _Verify(cwd, branch, quiet):
|
||||
% (branch, cur)
|
||||
print >>sys.stderr
|
||||
|
||||
env = dict(os.environ)
|
||||
env['GNUPGHOME'] = gpg_dir
|
||||
env = os.environ.copy()
|
||||
env['GNUPGHOME'] = gpg_dir.encode()
|
||||
|
||||
cmd = [GIT, 'tag', '-v', cur]
|
||||
proc = subprocess.Popen(cmd,
|
||||
@ -424,15 +570,19 @@ def _Checkout(cwd, branch, rev, quiet):
|
||||
def _FindRepo():
|
||||
"""Look for a repo installation, starting at the current directory.
|
||||
"""
|
||||
dir = os.getcwd()
|
||||
curdir = os.getcwd()
|
||||
repo = None
|
||||
|
||||
while dir != '/' and not repo:
|
||||
repo = os.path.join(dir, repodir, REPO_MAIN)
|
||||
olddir = None
|
||||
while curdir != '/' \
|
||||
and curdir != olddir \
|
||||
and not repo:
|
||||
repo = os.path.join(curdir, repodir, REPO_MAIN)
|
||||
if not os.path.isfile(repo):
|
||||
repo = None
|
||||
dir = os.path.dirname(dir)
|
||||
return (repo, os.path.join(dir, repodir))
|
||||
olddir = curdir
|
||||
curdir = os.path.dirname(curdir)
|
||||
return (repo, os.path.join(curdir, repodir))
|
||||
|
||||
|
||||
class _Options:
|
||||
@ -476,6 +626,7 @@ def _Help(args):
|
||||
if args:
|
||||
if args[0] == 'init':
|
||||
init_optparse.print_help()
|
||||
sys.exit(0)
|
||||
else:
|
||||
print >>sys.stderr,\
|
||||
"error: '%s' is not a bootstrap command.\n"\
|
||||
@ -505,7 +656,7 @@ def _RunSelf(wrapper_path):
|
||||
my_git = os.path.join(my_dir, '.git')
|
||||
|
||||
if os.path.isfile(my_main) and os.path.isdir(my_git):
|
||||
for name in ['manifest.py',
|
||||
for name in ['git_config.py',
|
||||
'project.py',
|
||||
'subcmds']:
|
||||
if not os.path.exists(os.path.join(my_dir, name)):
|
||||
@ -537,13 +688,13 @@ def _SetDefaultsTo(gitdir):
|
||||
|
||||
|
||||
def main(orig_args):
|
||||
main, dir = _FindRepo()
|
||||
repo_main, rel_repo_dir = _FindRepo()
|
||||
cmd, opt, args = _ParseArguments(orig_args)
|
||||
|
||||
wrapper_path = os.path.abspath(__file__)
|
||||
my_main, my_git = _RunSelf(wrapper_path)
|
||||
|
||||
if not main:
|
||||
if not repo_main:
|
||||
if opt.help:
|
||||
_Usage()
|
||||
if cmd == 'help':
|
||||
@ -563,29 +714,28 @@ def main(orig_args):
|
||||
os.rmdir(os.path.join(root, name))
|
||||
os.rmdir(repodir)
|
||||
sys.exit(1)
|
||||
main, dir = _FindRepo()
|
||||
repo_main, rel_repo_dir = _FindRepo()
|
||||
else:
|
||||
_NoCommands(cmd)
|
||||
|
||||
if my_main:
|
||||
main = my_main
|
||||
repo_main = my_main
|
||||
|
||||
ver_str = '.'.join(map(lambda x: str(x), VERSION))
|
||||
me = [main,
|
||||
'--repo-dir=%s' % dir,
|
||||
me = [repo_main,
|
||||
'--repo-dir=%s' % rel_repo_dir,
|
||||
'--wrapper-version=%s' % ver_str,
|
||||
'--wrapper-path=%s' % wrapper_path,
|
||||
'--']
|
||||
me.extend(orig_args)
|
||||
me.extend(extra_args)
|
||||
try:
|
||||
os.execv(main, me)
|
||||
except OSError, e:
|
||||
print >>sys.stderr, "fatal: unable to start %s" % main
|
||||
os.execv(repo_main, me)
|
||||
except OSError as e:
|
||||
print >>sys.stderr, "fatal: unable to start %s" % repo_main
|
||||
print >>sys.stderr, "fatal: %s" % e
|
||||
sys.exit(148)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
||||
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
import os
|
||||
|
||||
all = {}
|
||||
all_commands = {}
|
||||
|
||||
my_dir = os.path.dirname(__file__)
|
||||
for py in os.listdir(my_dir):
|
||||
@ -43,7 +43,7 @@ for py in os.listdir(my_dir):
|
||||
|
||||
name = name.replace('_', '-')
|
||||
cmd.NAME = name
|
||||
all[name] = cmd
|
||||
all_commands[name] = cmd
|
||||
|
||||
if 'help' in all:
|
||||
all['help'].commands = all
|
||||
if 'help' in all_commands:
|
||||
all_commands['help'].commands = all_commands
|
||||
|
@ -16,6 +16,7 @@
|
||||
import sys
|
||||
from command import Command
|
||||
from git_command import git
|
||||
from progress import Progress
|
||||
|
||||
class Abandon(Command):
|
||||
common = True
|
||||
@ -38,5 +39,32 @@ It is equivalent to "git branch -D <branchname>".
|
||||
print >>sys.stderr, "error: '%s' is not a valid name" % nb
|
||||
sys.exit(1)
|
||||
|
||||
for project in self.GetProjects(args[1:]):
|
||||
project.AbandonBranch(nb)
|
||||
nb = args[0]
|
||||
err = []
|
||||
success = []
|
||||
all_projects = self.GetProjects(args[1:])
|
||||
|
||||
pm = Progress('Abandon %s' % nb, len(all_projects))
|
||||
for project in all_projects:
|
||||
pm.update()
|
||||
|
||||
status = project.AbandonBranch(nb)
|
||||
if status is not None:
|
||||
if status:
|
||||
success.append(project)
|
||||
else:
|
||||
err.append(project)
|
||||
pm.end()
|
||||
|
||||
if err:
|
||||
for p in err:
|
||||
print >>sys.stderr,\
|
||||
"error: %s/: cannot abandon %s" \
|
||||
% (p.relpath, nb)
|
||||
sys.exit(1)
|
||||
elif not success:
|
||||
print >>sys.stderr, 'error: no project has branch %s' % nb
|
||||
sys.exit(1)
|
||||
else:
|
||||
print >>sys.stderr, 'Abandoned in %d project(s):\n %s' % (
|
||||
len(success), '\n '.join(p.relpath for p in success))
|
||||
|
@ -61,40 +61,54 @@ class Branches(Command):
|
||||
%prog [<project>...]
|
||||
|
||||
Summarizes the currently available topic branches.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-a', '--all',
|
||||
dest='all', action='store_true',
|
||||
help='show all branches, not just the majority')
|
||||
Branch Display
|
||||
--------------
|
||||
|
||||
The branch display output by this command is organized into four
|
||||
columns of information; for example:
|
||||
|
||||
*P nocolor | in repo
|
||||
repo2 |
|
||||
|
||||
The first column contains a * if the branch is the currently
|
||||
checked out branch in any of the specified projects, or a blank
|
||||
if no project has the branch checked out.
|
||||
|
||||
The second column contains either blank, p or P, depending upon
|
||||
the upload status of the branch.
|
||||
|
||||
(blank): branch not yet published by repo upload
|
||||
P: all commits were published by repo upload
|
||||
p: only some commits were published by repo upload
|
||||
|
||||
The third column contains the branch name.
|
||||
|
||||
The fourth column (after the | separator) lists the projects that
|
||||
the branch appears in, or does not appear in. If no project list
|
||||
is shown, then the branch appears in all projects.
|
||||
|
||||
"""
|
||||
|
||||
def Execute(self, opt, args):
|
||||
projects = self.GetProjects(args)
|
||||
out = BranchColoring(self.manifest.manifestProject.config)
|
||||
all = {}
|
||||
all_branches = {}
|
||||
project_cnt = len(projects)
|
||||
|
||||
for project in projects:
|
||||
for name, b in project.GetBranches().iteritems():
|
||||
b.project = project
|
||||
if name not in all:
|
||||
all[name] = BranchInfo(name)
|
||||
all[name].add(b)
|
||||
if name not in all_branches:
|
||||
all_branches[name] = BranchInfo(name)
|
||||
all_branches[name].add(b)
|
||||
|
||||
names = all.keys()
|
||||
names = all_branches.keys()
|
||||
names.sort()
|
||||
|
||||
if not opt.all and not args:
|
||||
# No -a and no specific projects listed; try to filter the
|
||||
# results down to only the majority of projects.
|
||||
#
|
||||
n = []
|
||||
for name in names:
|
||||
i = all[name]
|
||||
if i.IsCurrent \
|
||||
or 80 <= (100 * len(i.projects)) / project_cnt:
|
||||
n.append(name)
|
||||
names = n
|
||||
if not names:
|
||||
print >>sys.stderr, ' (no branches)'
|
||||
return
|
||||
|
||||
width = 25
|
||||
for name in names:
|
||||
@ -102,7 +116,7 @@ Summarizes the currently available topic branches.
|
||||
width = len(name)
|
||||
|
||||
for name in names:
|
||||
i = all[name]
|
||||
i = all_branches[name]
|
||||
in_cnt = len(i.projects)
|
||||
|
||||
if i.IsCurrent:
|
||||
@ -122,29 +136,31 @@ Summarizes the currently available topic branches.
|
||||
hdr('%c%c %-*s' % (current, published, width, name))
|
||||
out.write(' |')
|
||||
|
||||
if in_cnt < project_cnt and (in_cnt == 1 or opt.all):
|
||||
if in_cnt < project_cnt:
|
||||
fmt = out.write
|
||||
paths = []
|
||||
if in_cnt < project_cnt - in_cnt:
|
||||
type = 'in'
|
||||
in_type = 'in'
|
||||
for b in i.projects:
|
||||
paths.append(b.project.relpath)
|
||||
else:
|
||||
fmt = out.notinproject
|
||||
type = 'not in'
|
||||
in_type = 'not in'
|
||||
have = set()
|
||||
for b in i.projects:
|
||||
have.add(b.project)
|
||||
for p in projects:
|
||||
paths.append(p.relpath)
|
||||
if not p in have:
|
||||
paths.append(p.relpath)
|
||||
|
||||
s = ' %s %s' % (type, ', '.join(paths))
|
||||
s = ' %s %s' % (in_type, ', '.join(paths))
|
||||
if width + 7 + len(s) < 80:
|
||||
fmt(s)
|
||||
else:
|
||||
out.nl()
|
||||
fmt(' %s:' % type)
|
||||
fmt(' %s:' % in_type)
|
||||
for p in paths:
|
||||
out.nl()
|
||||
fmt(' %s' % p)
|
||||
fmt(width*' ' + ' %s' % p)
|
||||
else:
|
||||
out.write(' in all projects')
|
||||
out.nl()
|
||||
|
@ -15,6 +15,7 @@
|
||||
|
||||
import sys
|
||||
from command import Command
|
||||
from progress import Progress
|
||||
|
||||
class Checkout(Command):
|
||||
common = True
|
||||
@ -35,13 +36,29 @@ The command is equivalent to:
|
||||
if not args:
|
||||
self.Usage()
|
||||
|
||||
retValue = 0;
|
||||
nb = args[0]
|
||||
err = []
|
||||
success = []
|
||||
all_projects = self.GetProjects(args[1:])
|
||||
|
||||
branch = args[0]
|
||||
for project in self.GetProjects(args[1:]):
|
||||
if not project.CheckoutBranch(branch):
|
||||
retValue = 1;
|
||||
print >>sys.stderr, "error: checking out branch '%s' in %s failed" % (branch, project.name)
|
||||
pm = Progress('Checkout %s' % nb, len(all_projects))
|
||||
for project in all_projects:
|
||||
pm.update()
|
||||
|
||||
if (retValue != 0):
|
||||
sys.exit(retValue);
|
||||
status = project.CheckoutBranch(nb)
|
||||
if status is not None:
|
||||
if status:
|
||||
success.append(project)
|
||||
else:
|
||||
err.append(project)
|
||||
pm.end()
|
||||
|
||||
if err:
|
||||
for p in err:
|
||||
print >>sys.stderr,\
|
||||
"error: %s/: cannot checkout %s" \
|
||||
% (p.relpath, nb)
|
||||
sys.exit(1)
|
||||
elif not success:
|
||||
print >>sys.stderr, 'error: no project has branch %s' % nb
|
||||
sys.exit(1)
|
||||
|
115
subcmds/cherry_pick.py
Normal file
115
subcmds/cherry_pick.py
Normal file
@ -0,0 +1,115 @@
|
||||
#
|
||||
# Copyright (C) 2010 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
import sys
|
||||
from command import Command
|
||||
from git_command import GitCommand
|
||||
|
||||
CHANGE_ID_RE = re.compile(r'^\s*Change-Id: I([0-9a-f]{40})\s*$')
|
||||
|
||||
class CherryPick(Command):
|
||||
common = True
|
||||
helpSummary = "Cherry-pick a change."
|
||||
helpUsage = """
|
||||
%prog <sha1>
|
||||
"""
|
||||
helpDescription = """
|
||||
'%prog' cherry-picks a change from one branch to another.
|
||||
The change id will be updated, and a reference to the old
|
||||
change id will be added.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
pass
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if len(args) != 1:
|
||||
self.Usage()
|
||||
|
||||
reference = args[0]
|
||||
|
||||
p = GitCommand(None,
|
||||
['rev-parse', '--verify', reference],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
if p.Wait() != 0:
|
||||
print >>sys.stderr, p.stderr
|
||||
sys.exit(1)
|
||||
sha1 = p.stdout.strip()
|
||||
|
||||
p = GitCommand(None, ['cat-file', 'commit', sha1], capture_stdout=True)
|
||||
if p.Wait() != 0:
|
||||
print >>sys.stderr, "error: Failed to retrieve old commit message"
|
||||
sys.exit(1)
|
||||
old_msg = self._StripHeader(p.stdout)
|
||||
|
||||
p = GitCommand(None,
|
||||
['cherry-pick', sha1],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
status = p.Wait()
|
||||
|
||||
print >>sys.stdout, p.stdout
|
||||
print >>sys.stderr, p.stderr
|
||||
|
||||
if status == 0:
|
||||
# The cherry-pick was applied correctly. We just need to edit the
|
||||
# commit message.
|
||||
new_msg = self._Reformat(old_msg, sha1)
|
||||
|
||||
p = GitCommand(None, ['commit', '--amend', '-F', '-'],
|
||||
provide_stdin = True,
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
p.stdin.write(new_msg)
|
||||
if p.Wait() != 0:
|
||||
print >>sys.stderr, "error: Failed to update commit message"
|
||||
sys.exit(1)
|
||||
|
||||
else:
|
||||
print >>sys.stderr, """\
|
||||
NOTE: When committing (please see above) and editing the commit message,
|
||||
please remove the old Change-Id-line and add:
|
||||
"""
|
||||
print >>sys.stderr, self._GetReference(sha1)
|
||||
print >>sys.stderr
|
||||
|
||||
def _IsChangeId(self, line):
|
||||
return CHANGE_ID_RE.match(line)
|
||||
|
||||
def _GetReference(self, sha1):
|
||||
return "(cherry picked from commit %s)" % sha1
|
||||
|
||||
def _StripHeader(self, commit_msg):
|
||||
lines = commit_msg.splitlines()
|
||||
return "\n".join(lines[lines.index("")+1:])
|
||||
|
||||
def _Reformat(self, old_msg, sha1):
|
||||
new_msg = []
|
||||
|
||||
for line in old_msg.splitlines():
|
||||
if not self._IsChangeId(line):
|
||||
new_msg.append(line)
|
||||
|
||||
# Add a blank line between the message and the change id/reference
|
||||
try:
|
||||
if new_msg[-1].strip() != "":
|
||||
new_msg.append("")
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
new_msg.append(self._GetReference(sha1))
|
||||
return "\n".join(new_msg)
|
@ -20,8 +20,21 @@ class Diff(PagedCommand):
|
||||
helpSummary = "Show changes between commit and working tree"
|
||||
helpUsage = """
|
||||
%prog [<project>...]
|
||||
|
||||
The -u option causes '%prog' to generate diff output with file paths
|
||||
relative to the repository root, so the output can be applied
|
||||
to the Unix 'patch' command.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
def cmd(option, opt_str, value, parser):
|
||||
setattr(parser.values, option.dest, list(parser.rargs))
|
||||
while parser.rargs:
|
||||
del parser.rargs[0]
|
||||
p.add_option('-u', '--absolute',
|
||||
dest='absolute', action='store_true',
|
||||
help='Paths are relative to the repository root')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
for project in self.GetProjects(args):
|
||||
project.PrintWorkTreeDiff()
|
||||
project.PrintWorkTreeDiff(opt.absolute)
|
||||
|
@ -13,7 +13,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
@ -33,9 +32,20 @@ makes it available in your project's local working directory.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
pass
|
||||
p.add_option('-c','--cherry-pick',
|
||||
dest='cherrypick', action='store_true',
|
||||
help="cherry-pick instead of checkout")
|
||||
p.add_option('-r','--revert',
|
||||
dest='revert', action='store_true',
|
||||
help="revert instead of checkout")
|
||||
p.add_option('-f','--ff-only',
|
||||
dest='ffonly', action='store_true',
|
||||
help="force fast-forward merge")
|
||||
|
||||
def _ParseChangeIds(self, args):
|
||||
if not args:
|
||||
self.Usage()
|
||||
|
||||
to_get = []
|
||||
project = None
|
||||
|
||||
@ -63,7 +73,7 @@ makes it available in your project's local working directory.
|
||||
% (project.name, change_id, ps_id)
|
||||
sys.exit(1)
|
||||
|
||||
if not dl.commits:
|
||||
if not opt.revert and not dl.commits:
|
||||
print >>sys.stderr, \
|
||||
'[%s] change %d/%d has already been merged' \
|
||||
% (project.name, change_id, ps_id)
|
||||
@ -75,4 +85,11 @@ makes it available in your project's local working directory.
|
||||
% (project.name, change_id, ps_id, len(dl.commits))
|
||||
for c in dl.commits:
|
||||
print >>sys.stderr, ' %s' % (c)
|
||||
project._Checkout(dl.commit)
|
||||
if opt.cherrypick:
|
||||
project._CherryPick(dl.commit)
|
||||
elif opt.revert:
|
||||
project._Revert(dl.commit)
|
||||
elif opt.ffonly:
|
||||
project._FastForward(dl.commit, ffonly=True)
|
||||
else:
|
||||
project._Checkout(dl.commit)
|
||||
|
@ -13,12 +13,29 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import fcntl
|
||||
import re
|
||||
import os
|
||||
import select
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
from color import Coloring
|
||||
from command import Command, MirrorSafeCommand
|
||||
|
||||
_CAN_COLOR = [
|
||||
'branch',
|
||||
'diff',
|
||||
'grep',
|
||||
'log',
|
||||
]
|
||||
|
||||
class ForallColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'forall')
|
||||
self.project = self.printer('project', attr='bold')
|
||||
|
||||
|
||||
class Forall(Command, MirrorSafeCommand):
|
||||
common = False
|
||||
helpSummary = "Run a shell command in each project"
|
||||
@ -28,6 +45,24 @@ class Forall(Command, MirrorSafeCommand):
|
||||
helpDescription = """
|
||||
Executes the same shell command in each project.
|
||||
|
||||
Output Formatting
|
||||
-----------------
|
||||
|
||||
The -p option causes '%prog' to bind pipes to the command's stdin,
|
||||
stdout and stderr streams, and pipe all output into a continuous
|
||||
stream that is displayed in a single pager session. Project headings
|
||||
are inserted before the output of each command is displayed. If the
|
||||
command produces no output in a project, no heading is displayed.
|
||||
|
||||
The formatting convention used by -p is very suitable for some
|
||||
types of searching, e.g. `repo forall -p -c git log -SFoo` will
|
||||
print all commits that add or remove references to Foo.
|
||||
|
||||
The -v option causes '%prog' to display stderr messages if a
|
||||
command produces output only on stderr. Normally the -p option
|
||||
causes command output to be suppressed until the command produces
|
||||
at least one byte of output on stdout.
|
||||
|
||||
Environment
|
||||
-----------
|
||||
|
||||
@ -47,11 +82,16 @@ revision to a locally executed git command, use REPO_LREV.
|
||||
REPO_RREV is the name of the revision from the manifest, exactly
|
||||
as written in the manifest.
|
||||
|
||||
REPO__* are any extra environment variables, specified by the
|
||||
"annotation" element under any project element. This can be useful
|
||||
for differentiating trees based on user-specific criteria, or simply
|
||||
annotating tree details.
|
||||
|
||||
shell positional arguments ($1, $2, .., $#) are set to any arguments
|
||||
following <command>.
|
||||
|
||||
stdin, stdout, stderr are inherited from the terminal and are
|
||||
not redirected.
|
||||
Unless -p is used, stdin, stdout, stderr are inherited from the
|
||||
terminal and are not redirected.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
@ -65,6 +105,17 @@ not redirected.
|
||||
action='callback',
|
||||
callback=cmd)
|
||||
|
||||
g = p.add_option_group('Output')
|
||||
g.add_option('-p',
|
||||
dest='project_header', action='store_true',
|
||||
help='Show project headers before output')
|
||||
g.add_option('-v', '--verbose',
|
||||
dest='verbose', action='store_true',
|
||||
help='Show command error messages')
|
||||
|
||||
def WantPager(self, opt):
|
||||
return opt.project_header
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if not opt.command:
|
||||
self.Usage()
|
||||
@ -79,22 +130,49 @@ not redirected.
|
||||
cmd.append(cmd[0])
|
||||
cmd.extend(opt.command[1:])
|
||||
|
||||
if opt.project_header \
|
||||
and not shell \
|
||||
and cmd[0] == 'git':
|
||||
# If this is a direct git command that can enable colorized
|
||||
# output and the user prefers coloring, add --color into the
|
||||
# command line because we are going to wrap the command into
|
||||
# a pipe and git won't know coloring should activate.
|
||||
#
|
||||
for cn in cmd[1:]:
|
||||
if not cn.startswith('-'):
|
||||
break
|
||||
else:
|
||||
cn = None
|
||||
# pylint: disable=W0631
|
||||
if cn and cn in _CAN_COLOR:
|
||||
class ColorCmd(Coloring):
|
||||
def __init__(self, config, cmd):
|
||||
Coloring.__init__(self, config, cmd)
|
||||
if ColorCmd(self.manifest.manifestProject.config, cn).is_on:
|
||||
cmd.insert(cmd.index(cn) + 1, '--color')
|
||||
# pylint: enable=W0631
|
||||
|
||||
mirror = self.manifest.IsMirror
|
||||
out = ForallColoring(self.manifest.manifestProject.config)
|
||||
out.redirect(sys.stdout)
|
||||
|
||||
rc = 0
|
||||
first = True
|
||||
|
||||
for project in self.GetProjects(args):
|
||||
env = dict(os.environ.iteritems())
|
||||
env = os.environ.copy()
|
||||
def setenv(name, val):
|
||||
if val is None:
|
||||
val = ''
|
||||
env[name] = val
|
||||
env[name] = val.encode()
|
||||
|
||||
setenv('REPO_PROJECT', project.name)
|
||||
setenv('REPO_PATH', project.relpath)
|
||||
setenv('REPO_REMOTE', project.remote.name)
|
||||
setenv('REPO_LREV', project\
|
||||
.GetRemote(project.remote.name)\
|
||||
.ToLocal(project.revision))
|
||||
setenv('REPO_RREV', project.revision)
|
||||
setenv('REPO_LREV', project.GetRevisionId())
|
||||
setenv('REPO_RREV', project.revisionExpr)
|
||||
for a in project.annotations:
|
||||
setenv("REPO__%s" % (a.name), a.value)
|
||||
|
||||
if mirror:
|
||||
setenv('GIT_DIR', project.gitdir)
|
||||
@ -102,10 +180,79 @@ not redirected.
|
||||
else:
|
||||
cwd = project.worktree
|
||||
|
||||
if not os.path.exists(cwd):
|
||||
if (opt.project_header and opt.verbose) \
|
||||
or not opt.project_header:
|
||||
print >>sys.stderr, 'skipping %s/' % project.relpath
|
||||
continue
|
||||
|
||||
if opt.project_header:
|
||||
stdin = subprocess.PIPE
|
||||
stdout = subprocess.PIPE
|
||||
stderr = subprocess.PIPE
|
||||
else:
|
||||
stdin = None
|
||||
stdout = None
|
||||
stderr = None
|
||||
|
||||
p = subprocess.Popen(cmd,
|
||||
cwd = cwd,
|
||||
shell = shell,
|
||||
env = env)
|
||||
env = env,
|
||||
stdin = stdin,
|
||||
stdout = stdout,
|
||||
stderr = stderr)
|
||||
|
||||
if opt.project_header:
|
||||
class sfd(object):
|
||||
def __init__(self, fd, dest):
|
||||
self.fd = fd
|
||||
self.dest = dest
|
||||
def fileno(self):
|
||||
return self.fd.fileno()
|
||||
|
||||
empty = True
|
||||
errbuf = ''
|
||||
|
||||
p.stdin.close()
|
||||
s_in = [sfd(p.stdout, sys.stdout),
|
||||
sfd(p.stderr, sys.stderr)]
|
||||
|
||||
for s in s_in:
|
||||
flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
|
||||
fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
|
||||
|
||||
while s_in:
|
||||
in_ready, _out_ready, _err_ready = select.select(s_in, [], [])
|
||||
for s in in_ready:
|
||||
buf = s.fd.read(4096)
|
||||
if not buf:
|
||||
s.fd.close()
|
||||
s_in.remove(s)
|
||||
continue
|
||||
|
||||
if not opt.verbose:
|
||||
if s.fd != p.stdout:
|
||||
errbuf += buf
|
||||
continue
|
||||
|
||||
if empty:
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
out.nl()
|
||||
out.project('project %s/', project.relpath)
|
||||
out.nl()
|
||||
out.flush()
|
||||
if errbuf:
|
||||
sys.stderr.write(errbuf)
|
||||
sys.stderr.flush()
|
||||
errbuf = ''
|
||||
empty = False
|
||||
|
||||
s.dest.write(buf)
|
||||
s.dest.flush()
|
||||
|
||||
r = p.wait()
|
||||
if r != 0 and r != rc:
|
||||
rc = r
|
||||
|
@ -14,10 +14,9 @@
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from optparse import SUPPRESS_HELP
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
from git_command import GitCommand
|
||||
from git_command import git_require, GitCommand
|
||||
|
||||
class GrepColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
@ -33,8 +32,8 @@ class Grep(PagedCommand):
|
||||
helpDescription = """
|
||||
Search for the specified patterns in all project files.
|
||||
|
||||
Options
|
||||
-------
|
||||
Boolean Options
|
||||
---------------
|
||||
|
||||
The following options can appear as often as necessary to express
|
||||
the pattern to locate:
|
||||
@ -158,7 +157,7 @@ contain a line that matches both expressions:
|
||||
out = GrepColoring(self.manifest.manifestProject.config)
|
||||
|
||||
cmd_argv = ['grep']
|
||||
if out.is_on:
|
||||
if out.is_on and git_require((1,6,3)):
|
||||
cmd_argv.append('--color')
|
||||
cmd_argv.extend(getattr(opt,'cmd_argv',[]))
|
||||
|
||||
@ -204,7 +203,7 @@ contain a line that matches both expressions:
|
||||
else:
|
||||
out.project('--- project %s ---' % project.relpath)
|
||||
out.nl()
|
||||
out.write(p.stderr)
|
||||
out.write("%s", p.stderr)
|
||||
out.nl()
|
||||
continue
|
||||
have_match = True
|
||||
@ -217,17 +216,17 @@ contain a line that matches both expressions:
|
||||
if have_rev and full_name:
|
||||
for line in r:
|
||||
rev, line = line.split(':', 1)
|
||||
out.write(rev)
|
||||
out.write("%s", rev)
|
||||
out.write(':')
|
||||
out.project(project.relpath)
|
||||
out.write('/')
|
||||
out.write(line)
|
||||
out.write("%s", line)
|
||||
out.nl()
|
||||
elif full_name:
|
||||
for line in r:
|
||||
out.project(project.relpath)
|
||||
out.write('/')
|
||||
out.write(line)
|
||||
out.write("%s", line)
|
||||
out.nl()
|
||||
else:
|
||||
for line in r:
|
||||
|
@ -94,6 +94,8 @@ See 'repo help --all' for a complete list of recognized commands.
|
||||
body = getattr(cmd, bodyAttr)
|
||||
except AttributeError:
|
||||
return
|
||||
if body == '' or body is None:
|
||||
return
|
||||
|
||||
self.nl()
|
||||
|
||||
@ -107,7 +109,7 @@ See 'repo help --all' for a complete list of recognized commands.
|
||||
body = body.strip()
|
||||
body = body.replace('%prog', me)
|
||||
|
||||
asciidoc_hdr = re.compile(r'^\n?([^\n]{1,})\n(={2,}|-{2,})$')
|
||||
asciidoc_hdr = re.compile(r'^\n?([^\n]{1,})\n([=~-]{2,})$')
|
||||
for para in body.split("\n\n"):
|
||||
if para.startswith(' '):
|
||||
self.write('%s', para)
|
||||
@ -117,9 +119,19 @@ See 'repo help --all' for a complete list of recognized commands.
|
||||
|
||||
m = asciidoc_hdr.match(para)
|
||||
if m:
|
||||
self.heading('%s', m.group(1))
|
||||
title = m.group(1)
|
||||
section_type = m.group(2)
|
||||
if section_type[0] in ('=', '-'):
|
||||
p = self.heading
|
||||
else:
|
||||
def _p(fmt, *args):
|
||||
self.write(' ')
|
||||
self.heading(fmt, *args)
|
||||
p = _p
|
||||
|
||||
p('%s', title)
|
||||
self.nl()
|
||||
self.heading('%s', ''.ljust(len(m.group(1)),'-'))
|
||||
p('%s', ''.ljust(len(title),section_type[0]))
|
||||
self.nl()
|
||||
continue
|
||||
|
||||
@ -128,8 +140,8 @@ See 'repo help --all' for a complete list of recognized commands.
|
||||
self.wrap.end_paragraph(0)
|
||||
|
||||
out = _Out(self.manifest.globalConfig)
|
||||
cmd.OptionParser.print_help()
|
||||
out._PrintSection('Summary', 'helpSummary')
|
||||
cmd.OptionParser.print_help()
|
||||
out._PrintSection('Description', 'helpDescription')
|
||||
|
||||
def _Options(self, p):
|
||||
@ -153,6 +165,7 @@ See 'repo help --all' for a complete list of recognized commands.
|
||||
print >>sys.stderr, "repo: '%s' is not a repo command." % name
|
||||
sys.exit(1)
|
||||
|
||||
cmd.manifest = self.manifest
|
||||
self._PrintCommandHelp(cmd)
|
||||
|
||||
else:
|
||||
|
199
subcmds/init.py
199
subcmds/init.py
@ -14,14 +14,17 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from color import Coloring
|
||||
from command import InteractiveCommand, MirrorSafeCommand
|
||||
from error import ManifestParseError
|
||||
from remote import Remote
|
||||
from project import SyncBuffer
|
||||
from git_command import git, MIN_GIT_VERSION
|
||||
from git_config import GitConfig
|
||||
from git_command import git_require, MIN_GIT_VERSION
|
||||
|
||||
class Init(InteractiveCommand, MirrorSafeCommand):
|
||||
common = True
|
||||
@ -35,9 +38,27 @@ The latest repo source code and manifest collection is downloaded
|
||||
from the server and is installed in the .repo/ directory in the
|
||||
current working directory.
|
||||
|
||||
The optional <manifest> argument can be used to specify an alternate
|
||||
manifest to be used. If no manifest is specified, the manifest
|
||||
default.xml will be used.
|
||||
The optional -b argument can be used to select the manifest branch
|
||||
to checkout and use. If no branch is specified, master is assumed.
|
||||
|
||||
The optional -m argument can be used to specify an alternate manifest
|
||||
to be used. If no manifest is specified, the manifest default.xml
|
||||
will be used.
|
||||
|
||||
The --reference option can be used to point to a directory that
|
||||
has the content of a --mirror sync. This will make the working
|
||||
directory use as much data as possible from the local reference
|
||||
directory when fetching from the server. This will make the sync
|
||||
go a lot faster by reducing data traffic on the network.
|
||||
|
||||
|
||||
Switching Manifest Branches
|
||||
---------------------------
|
||||
|
||||
To switch to another manifest branch, `repo init -b otherbranch`
|
||||
may be used in an existing client. However, as this only updates the
|
||||
manifest, a subsequent `repo sync` (or `repo sync -d`) is necessary
|
||||
to update the working directory files.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
@ -60,11 +81,26 @@ default.xml will be used.
|
||||
help='initial manifest file', metavar='NAME.xml')
|
||||
g.add_option('--mirror',
|
||||
dest='mirror', action='store_true',
|
||||
help='mirror the forrest')
|
||||
|
||||
help='create a replica of the remote repositories '
|
||||
'rather than a client working directory')
|
||||
g.add_option('--reference',
|
||||
dest='reference',
|
||||
help='location of mirror directory', metavar='DIR')
|
||||
g.add_option('--depth', type='int', default=None,
|
||||
dest='depth',
|
||||
help='create a shallow clone with given depth; see git clone')
|
||||
g.add_option('-g', '--groups',
|
||||
dest='groups', default='all,-notdefault',
|
||||
help='restrict manifest projects to ones with a specified group',
|
||||
metavar='GROUP')
|
||||
g.add_option('-p', '--platform',
|
||||
dest='platform', default='auto',
|
||||
help='restrict manifest projects to ones with a specified '
|
||||
'platform group [auto|all|none|linux|darwin|...]',
|
||||
metavar='PLATFORM')
|
||||
|
||||
# Tool
|
||||
g = p.add_option_group('Version options')
|
||||
g = p.add_option_group('repo Version options')
|
||||
g.add_option('--repo-url',
|
||||
dest='repo_url',
|
||||
help='repo repository location', metavar='URL')
|
||||
@ -75,18 +111,11 @@ default.xml will be used.
|
||||
dest='no_repo_verify', action='store_true',
|
||||
help='do not verify repo source code')
|
||||
|
||||
def _CheckGitVersion(self):
|
||||
ver_str = git.version()
|
||||
if not ver_str.startswith('git version '):
|
||||
print >>sys.stderr, 'error: "%s" unsupported' % ver_str
|
||||
sys.exit(1)
|
||||
|
||||
ver_str = ver_str[len('git version '):].strip()
|
||||
ver_act = tuple(map(lambda x: int(x), ver_str.split('.')[0:3]))
|
||||
if ver_act < MIN_GIT_VERSION:
|
||||
need = '.'.join(map(lambda x: str(x), MIN_GIT_VERSION))
|
||||
print >>sys.stderr, 'fatal: git %s or later required' % need
|
||||
sys.exit(1)
|
||||
# Other
|
||||
g = p.add_option_group('Other options')
|
||||
g.add_option('--config-name',
|
||||
dest='config_name', action="store_true", default=False,
|
||||
help='Always prompt for name/e-mail')
|
||||
|
||||
def _SyncManifest(self, opt):
|
||||
m = self.manifest.manifestProject
|
||||
@ -98,17 +127,17 @@ default.xml will be used.
|
||||
sys.exit(1)
|
||||
|
||||
if not opt.quiet:
|
||||
print >>sys.stderr, 'Getting manifest ...'
|
||||
print >>sys.stderr, ' from %s' % opt.manifest_url
|
||||
print >>sys.stderr, 'Get %s' \
|
||||
% GitConfig.ForUser().UrlInsteadOf(opt.manifest_url)
|
||||
m._InitGitDir()
|
||||
|
||||
if opt.manifest_branch:
|
||||
m.revision = opt.manifest_branch
|
||||
m.revisionExpr = opt.manifest_branch
|
||||
else:
|
||||
m.revision = 'refs/heads/master'
|
||||
m.revisionExpr = 'refs/heads/master'
|
||||
else:
|
||||
if opt.manifest_branch:
|
||||
m.revision = opt.manifest_branch
|
||||
m.revisionExpr = opt.manifest_branch
|
||||
else:
|
||||
m.PreSync()
|
||||
|
||||
@ -118,6 +147,30 @@ default.xml will be used.
|
||||
r.ResetFetch()
|
||||
r.Save()
|
||||
|
||||
groups = re.split('[,\s]+', opt.groups)
|
||||
all_platforms = ['linux', 'darwin']
|
||||
platformize = lambda x: 'platform-' + x
|
||||
if opt.platform == 'auto':
|
||||
if (not opt.mirror and
|
||||
not m.config.GetString('repo.mirror') == 'true'):
|
||||
groups.append(platformize(platform.system().lower()))
|
||||
elif opt.platform == 'all':
|
||||
groups.extend(map(platformize, all_platforms))
|
||||
elif opt.platform in all_platforms:
|
||||
groups.extend(platformize(opt.platform))
|
||||
elif opt.platform != 'none':
|
||||
print >>sys.stderr, 'fatal: invalid platform flag'
|
||||
sys.exit(1)
|
||||
|
||||
groups = [x for x in groups if x]
|
||||
groupstr = ','.join(groups)
|
||||
if opt.platform == 'auto' and groupstr == 'all,-notdefault,platform-' + platform.system().lower():
|
||||
groupstr = None
|
||||
m.config.SetString('manifest.groups', groupstr)
|
||||
|
||||
if opt.reference:
|
||||
m.config.SetString('repo.reference', opt.reference)
|
||||
|
||||
if opt.mirror:
|
||||
if is_new:
|
||||
m.config.SetString('repo.mirror', 'true')
|
||||
@ -125,11 +178,19 @@ default.xml will be used.
|
||||
print >>sys.stderr, 'fatal: --mirror not supported on existing client'
|
||||
sys.exit(1)
|
||||
|
||||
if not m.Sync_NetworkHalf():
|
||||
if not m.Sync_NetworkHalf(is_new=is_new):
|
||||
r = m.GetRemote(m.remote.name)
|
||||
print >>sys.stderr, 'fatal: cannot obtain manifest %s' % r.url
|
||||
|
||||
# Better delete the manifest git dir if we created it; otherwise next
|
||||
# time (when user fixes problems) we won't go through the "is_new" logic.
|
||||
if is_new:
|
||||
shutil.rmtree(m.gitdir)
|
||||
sys.exit(1)
|
||||
|
||||
if opt.manifest_branch:
|
||||
m.MetaBranchSwitch(opt.manifest_branch)
|
||||
|
||||
syncbuf = SyncBuffer(m.config)
|
||||
m.Sync_LocalHalf(syncbuf)
|
||||
syncbuf.Finish()
|
||||
@ -146,25 +207,55 @@ default.xml will be used.
|
||||
|
||||
try:
|
||||
self.manifest.Link(name)
|
||||
except ManifestParseError, e:
|
||||
except ManifestParseError as e:
|
||||
print >>sys.stderr, "fatal: manifest '%s' not available" % name
|
||||
print >>sys.stderr, 'fatal: %s' % str(e)
|
||||
sys.exit(1)
|
||||
|
||||
def _PromptKey(self, prompt, key, value):
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
def _Prompt(self, prompt, value):
|
||||
sys.stdout.write('%-10s [%s]: ' % (prompt, value))
|
||||
a = sys.stdin.readline().strip()
|
||||
if a != '' and a != value:
|
||||
mp.config.SetString(key, a)
|
||||
if a == '':
|
||||
return value
|
||||
return a
|
||||
|
||||
def _ShouldConfigureUser(self):
|
||||
gc = self.manifest.globalConfig
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
# If we don't have local settings, get from global.
|
||||
if not mp.config.Has('user.name') or not mp.config.Has('user.email'):
|
||||
if not gc.Has('user.name') or not gc.Has('user.email'):
|
||||
return True
|
||||
|
||||
mp.config.SetString('user.name', gc.GetString('user.name'))
|
||||
mp.config.SetString('user.email', gc.GetString('user.email'))
|
||||
|
||||
print ''
|
||||
print 'Your identity is: %s <%s>' % (mp.config.GetString('user.name'),
|
||||
mp.config.GetString('user.email'))
|
||||
print 'If you want to change this, please re-run \'repo init\' with --config-name'
|
||||
return False
|
||||
|
||||
def _ConfigureUser(self):
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
print ''
|
||||
self._PromptKey('Your Name', 'user.name', mp.UserName)
|
||||
self._PromptKey('Your Email', 'user.email', mp.UserEmail)
|
||||
while True:
|
||||
print ''
|
||||
name = self._Prompt('Your Name', mp.UserName)
|
||||
email = self._Prompt('Your Email', mp.UserEmail)
|
||||
|
||||
print ''
|
||||
print 'Your identity is: %s <%s>' % (name, email)
|
||||
sys.stdout.write('is this correct [y/N]? ')
|
||||
a = sys.stdin.readline().strip()
|
||||
if a in ('yes', 'y', 't', 'true'):
|
||||
break
|
||||
|
||||
if name != mp.UserName:
|
||||
mp.config.SetString('user.name', name)
|
||||
if email != mp.UserEmail:
|
||||
mp.config.SetString('user.email', email)
|
||||
|
||||
def _HasColorSet(self, gc):
|
||||
for n in ['ui', 'diff', 'status']:
|
||||
@ -198,24 +289,50 @@ default.xml will be used.
|
||||
out.printer(fg='black', attr=c)(' %-6s ', c)
|
||||
out.nl()
|
||||
|
||||
sys.stdout.write('Enable color display in this user account (y/n)? ')
|
||||
sys.stdout.write('Enable color display in this user account (y/N)? ')
|
||||
a = sys.stdin.readline().strip().lower()
|
||||
if a in ('y', 'yes', 't', 'true', 'on'):
|
||||
gc.SetString('color.ui', 'auto')
|
||||
|
||||
def _ConfigureDepth(self, opt):
|
||||
"""Configure the depth we'll sync down.
|
||||
|
||||
Args:
|
||||
opt: Options from optparse. We care about opt.depth.
|
||||
"""
|
||||
# Opt.depth will be non-None if user actually passed --depth to repo init.
|
||||
if opt.depth is not None:
|
||||
if opt.depth > 0:
|
||||
# Positive values will set the depth.
|
||||
depth = str(opt.depth)
|
||||
else:
|
||||
# Negative numbers will clear the depth; passing None to SetString
|
||||
# will do that.
|
||||
depth = None
|
||||
|
||||
# We store the depth in the main manifest project.
|
||||
self.manifest.manifestProject.config.SetString('repo.depth', depth)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
self._CheckGitVersion()
|
||||
git_require(MIN_GIT_VERSION, fail=True)
|
||||
|
||||
if opt.reference:
|
||||
opt.reference = os.path.expanduser(opt.reference)
|
||||
|
||||
self._SyncManifest(opt)
|
||||
self._LinkManifest(opt.manifest_name)
|
||||
|
||||
if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
|
||||
self._ConfigureUser()
|
||||
if opt.config_name or self._ShouldConfigureUser():
|
||||
self._ConfigureUser()
|
||||
self._ConfigureColor()
|
||||
|
||||
self._ConfigureDepth(opt)
|
||||
|
||||
if self.manifest.IsMirror:
|
||||
type = 'mirror '
|
||||
init_type = 'mirror '
|
||||
else:
|
||||
type = ''
|
||||
init_type = ''
|
||||
|
||||
print ''
|
||||
print 'repo %sinitialized in %s' % (type, self.manifest.topdir)
|
||||
print 'repo %sinitialized in %s' % (init_type, self.manifest.topdir)
|
||||
|
78
subcmds/list.py
Normal file
78
subcmds/list.py
Normal file
@ -0,0 +1,78 @@
|
||||
#
|
||||
# Copyright (C) 2011 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
|
||||
from command import Command, MirrorSafeCommand
|
||||
|
||||
class List(Command, MirrorSafeCommand):
|
||||
common = True
|
||||
helpSummary = "List projects and their associated directories"
|
||||
helpUsage = """
|
||||
%prog [-f] [<project>...]
|
||||
%prog [-f] -r str1 [str2]..."
|
||||
"""
|
||||
helpDescription = """
|
||||
List all projects; pass '.' to list the project for the cwd.
|
||||
|
||||
This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
"""
|
||||
|
||||
def _Options(self, p, show_smart=True):
|
||||
p.add_option('-r', '--regex',
|
||||
dest='regex', action='store_true',
|
||||
help="Filter the project list based on regex or wildcard matching of strings")
|
||||
p.add_option('-f', '--fullpath',
|
||||
dest='fullpath', action='store_true',
|
||||
help="Display the full work tree path instead of the relative path")
|
||||
|
||||
def Execute(self, opt, args):
|
||||
"""List all projects and the associated directories.
|
||||
|
||||
This may be possible to do with 'repo forall', but repo newbies have
|
||||
trouble figuring that out. The idea here is that it should be more
|
||||
discoverable.
|
||||
|
||||
Args:
|
||||
opt: The options.
|
||||
args: Positional args. Can be a list of projects to list, or empty.
|
||||
"""
|
||||
if not opt.regex:
|
||||
projects = self.GetProjects(args)
|
||||
else:
|
||||
projects = self.FindProjects(args)
|
||||
|
||||
def _getpath(x):
|
||||
if opt.fullpath:
|
||||
return x.worktree
|
||||
return x.relpath
|
||||
|
||||
lines = []
|
||||
for project in projects:
|
||||
lines.append("%s : %s" % (_getpath(project), project.name))
|
||||
|
||||
lines.sort()
|
||||
print '\n'.join(lines)
|
||||
|
||||
def FindProjects(self, args):
|
||||
result = []
|
||||
for project in self.GetProjects(''):
|
||||
for arg in args:
|
||||
pattern = re.compile(r'%s' % arg, re.IGNORECASE)
|
||||
if pattern.search(project.name) or pattern.search(project.relpath):
|
||||
result.append(project)
|
||||
break
|
||||
result.sort(key=lambda project: project.relpath)
|
||||
return result
|
@ -35,21 +35,27 @@ in a Git repository for use during future 'repo init' invocations.
|
||||
|
||||
@property
|
||||
def helpDescription(self):
|
||||
help = self._helpDescription + '\n'
|
||||
helptext = self._helpDescription + '\n'
|
||||
r = os.path.dirname(__file__)
|
||||
r = os.path.dirname(r)
|
||||
fd = open(os.path.join(r, 'docs', 'manifest-format.txt'))
|
||||
for line in fd:
|
||||
help += line
|
||||
helptext += line
|
||||
fd.close()
|
||||
return help
|
||||
return helptext
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-r', '--revision-as-HEAD',
|
||||
dest='peg_rev', action='store_true',
|
||||
help='Save revisions as current HEAD')
|
||||
p.add_option('--suppress-upstream-revision', dest='peg_rev_upstream',
|
||||
default=True, action='store_false',
|
||||
help='If in -r mode, do not write the upstream field. '
|
||||
'Only of use if the branch names for a sha1 manifest are '
|
||||
'sensitive.')
|
||||
p.add_option('-o', '--output-file',
|
||||
dest='output_file',
|
||||
default='-',
|
||||
help='File to save the manifest to',
|
||||
metavar='-|NAME.xml')
|
||||
|
||||
@ -59,7 +65,8 @@ in a Git repository for use during future 'repo init' invocations.
|
||||
else:
|
||||
fd = open(opt.output_file, 'w')
|
||||
self.manifest.Save(fd,
|
||||
peg_rev = opt.peg_rev)
|
||||
peg_rev = opt.peg_rev,
|
||||
peg_rev_upstream = opt.peg_rev_upstream)
|
||||
fd.close()
|
||||
if opt.output_file != '-':
|
||||
print >>sys.stderr, 'Saved manifest to %s' % opt.output_file
|
||||
|
80
subcmds/overview.py
Normal file
80
subcmds/overview.py
Normal file
@ -0,0 +1,80 @@
|
||||
#
|
||||
# Copyright (C) 2012 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
|
||||
|
||||
class Overview(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Display overview of unmerged project branches"
|
||||
helpUsage = """
|
||||
%prog [--current-branch] [<project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
The '%prog' command is used to display an overview of the projects branches,
|
||||
and list any local commits that have not yet been merged into the project.
|
||||
|
||||
The -b/--current-branch option can be used to restrict the output to only
|
||||
branches currently checked out in each project. By default, all branches
|
||||
are displayed.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-b', '--current-branch',
|
||||
dest="current_branch", action="store_true",
|
||||
help="Consider only checked out branches")
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all_branches = []
|
||||
for project in self.GetProjects(args):
|
||||
br = [project.GetUploadableBranch(x)
|
||||
for x in project.GetBranches().keys()]
|
||||
br = [x for x in br if x]
|
||||
if opt.current_branch:
|
||||
br = [x for x in br if x.name == project.CurrentBranch]
|
||||
all_branches.extend(br)
|
||||
|
||||
if not all_branches:
|
||||
return
|
||||
|
||||
class Report(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'status')
|
||||
self.project = self.printer('header', attr='bold')
|
||||
|
||||
out = Report(all_branches[0].project.config)
|
||||
out.project('Projects Overview')
|
||||
out.nl()
|
||||
|
||||
project = None
|
||||
|
||||
for branch in all_branches:
|
||||
if project != branch.project:
|
||||
project = branch.project
|
||||
out.nl()
|
||||
out.project('project %s/' % project.relpath)
|
||||
out.nl()
|
||||
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
print '%s %-33s (%2d commit%s, %s)' % (
|
||||
branch.name == project.CurrentBranch and '*' or ' ',
|
||||
branch.name,
|
||||
len(commits),
|
||||
len(commits) != 1 and 's' or ' ',
|
||||
date)
|
||||
for commit in commits:
|
||||
print '%-35s - %s' % ('', commit)
|
@ -24,11 +24,11 @@ class Prune(PagedCommand):
|
||||
"""
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all = []
|
||||
all_branches = []
|
||||
for project in self.GetProjects(args):
|
||||
all.extend(project.PruneHeads())
|
||||
all_branches.extend(project.PruneHeads())
|
||||
|
||||
if not all:
|
||||
if not all_branches:
|
||||
return
|
||||
|
||||
class Report(Coloring):
|
||||
@ -36,13 +36,13 @@ class Prune(PagedCommand):
|
||||
Coloring.__init__(self, config, 'status')
|
||||
self.project = self.printer('header', attr='bold')
|
||||
|
||||
out = Report(all[0].project.config)
|
||||
out = Report(all_branches[0].project.config)
|
||||
out.project('Pending Branches')
|
||||
out.nl()
|
||||
|
||||
project = None
|
||||
|
||||
for branch in all:
|
||||
for branch in all_branches:
|
||||
if project != branch.project:
|
||||
project = branch.project
|
||||
out.nl()
|
||||
|
126
subcmds/rebase.py
Normal file
126
subcmds/rebase.py
Normal file
@ -0,0 +1,126 @@
|
||||
#
|
||||
# Copyright (C) 2010 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
from command import Command
|
||||
from git_command import GitCommand
|
||||
|
||||
class Rebase(Command):
|
||||
common = True
|
||||
helpSummary = "Rebase local branches on upstream branch"
|
||||
helpUsage = """
|
||||
%prog {[<project>...] | -i <project>...}
|
||||
"""
|
||||
helpDescription = """
|
||||
'%prog' uses git rebase to move local changes in the current topic branch to
|
||||
the HEAD of the upstream history, useful when you have made commits in a topic
|
||||
branch but need to incorporate new upstream changes "underneath" them.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-i', '--interactive',
|
||||
dest="interactive", action="store_true",
|
||||
help="interactive rebase (single project only)")
|
||||
|
||||
p.add_option('-f', '--force-rebase',
|
||||
dest='force_rebase', action='store_true',
|
||||
help='Pass --force-rebase to git rebase')
|
||||
p.add_option('--no-ff',
|
||||
dest='no_ff', action='store_true',
|
||||
help='Pass --no-ff to git rebase')
|
||||
p.add_option('-q', '--quiet',
|
||||
dest='quiet', action='store_true',
|
||||
help='Pass --quiet to git rebase')
|
||||
p.add_option('--autosquash',
|
||||
dest='autosquash', action='store_true',
|
||||
help='Pass --autosquash to git rebase')
|
||||
p.add_option('--whitespace',
|
||||
dest='whitespace', action='store', metavar='WS',
|
||||
help='Pass --whitespace to git rebase')
|
||||
p.add_option('--auto-stash',
|
||||
dest='auto_stash', action='store_true',
|
||||
help='Stash local modifications before starting')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all_projects = self.GetProjects(args)
|
||||
one_project = len(all_projects) == 1
|
||||
|
||||
if opt.interactive and not one_project:
|
||||
print >>sys.stderr, 'error: interactive rebase not supported with multiple projects'
|
||||
return -1
|
||||
|
||||
for project in all_projects:
|
||||
cb = project.CurrentBranch
|
||||
if not cb:
|
||||
if one_project:
|
||||
print >>sys.stderr, "error: project %s has a detatched HEAD" % project.relpath
|
||||
return -1
|
||||
# ignore branches with detatched HEADs
|
||||
continue
|
||||
|
||||
upbranch = project.GetBranch(cb)
|
||||
if not upbranch.LocalMerge:
|
||||
if one_project:
|
||||
print >>sys.stderr, "error: project %s does not track any remote branches" % project.relpath
|
||||
return -1
|
||||
# ignore branches without remotes
|
||||
continue
|
||||
|
||||
args = ["rebase"]
|
||||
|
||||
if opt.whitespace:
|
||||
args.append('--whitespace=%s' % opt.whitespace)
|
||||
|
||||
if opt.quiet:
|
||||
args.append('--quiet')
|
||||
|
||||
if opt.force_rebase:
|
||||
args.append('--force-rebase')
|
||||
|
||||
if opt.no_ff:
|
||||
args.append('--no-ff')
|
||||
|
||||
if opt.autosquash:
|
||||
args.append('--autosquash')
|
||||
|
||||
if opt.interactive:
|
||||
args.append("-i")
|
||||
|
||||
args.append(upbranch.LocalMerge)
|
||||
|
||||
print >>sys.stderr, '# %s: rebasing %s -> %s' % \
|
||||
(project.relpath, cb, upbranch.LocalMerge)
|
||||
|
||||
needs_stash = False
|
||||
if opt.auto_stash:
|
||||
stash_args = ["update-index", "--refresh", "-q"]
|
||||
|
||||
if GitCommand(project, stash_args).Wait() != 0:
|
||||
needs_stash = True
|
||||
# Dirty index, requires stash...
|
||||
stash_args = ["stash"]
|
||||
|
||||
if GitCommand(project, stash_args).Wait() != 0:
|
||||
return -1
|
||||
|
||||
if GitCommand(project, args).Wait() != 0:
|
||||
return -1
|
||||
|
||||
if needs_stash:
|
||||
stash_args.append('pop')
|
||||
stash_args.append('--quiet')
|
||||
if GitCommand(project, stash_args).Wait() != 0:
|
||||
return -1
|
@ -35,10 +35,11 @@ need to be performed by an end-user.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('--no-repo-verify',
|
||||
g = p.add_option_group('repo Version options')
|
||||
g.add_option('--no-repo-verify',
|
||||
dest='no_repo_verify', action='store_true',
|
||||
help='do not verify repo source code')
|
||||
p.add_option('--repo-upgraded',
|
||||
g.add_option('--repo-upgraded',
|
||||
dest='repo_upgraded', action='store_true',
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
@ -54,6 +55,7 @@ need to be performed by an end-user.
|
||||
print >>sys.stderr, "error: can't update repo"
|
||||
sys.exit(1)
|
||||
|
||||
rp.bare_git.gc('--auto')
|
||||
_PostRepoFetch(rp,
|
||||
no_repo_verify = opt.no_repo_verify,
|
||||
verbose = True)
|
||||
|
33
subcmds/smartsync.py
Normal file
33
subcmds/smartsync.py
Normal file
@ -0,0 +1,33 @@
|
||||
#
|
||||
# Copyright (C) 2010 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from subcmds.sync import Sync
|
||||
|
||||
class Smartsync(Sync):
|
||||
common = True
|
||||
helpSummary = "Update working tree to the latest known good revision"
|
||||
helpUsage = """
|
||||
%prog [<project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
The '%prog' command is a shortcut for sync -s.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
Sync._Options(self, p, show_smart=False)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
opt.smart_sync = True
|
||||
Sync.Execute(self, opt, args)
|
@ -48,19 +48,19 @@ The '%prog' command stages files to prepare the next commit.
|
||||
self.Usage()
|
||||
|
||||
def _Interactive(self, opt, args):
|
||||
all = filter(lambda x: x.IsDirty(), self.GetProjects(args))
|
||||
if not all:
|
||||
all_projects = filter(lambda x: x.IsDirty(), self.GetProjects(args))
|
||||
if not all_projects:
|
||||
print >>sys.stderr,'no projects have uncommitted modifications'
|
||||
return
|
||||
|
||||
out = _ProjectList(self.manifest.manifestProject.config)
|
||||
while True:
|
||||
out.header(' %-20s %s', 'project', 'path')
|
||||
out.header(' %s', 'project')
|
||||
out.nl()
|
||||
|
||||
for i in xrange(0, len(all)):
|
||||
p = all[i]
|
||||
out.write('%3d: %-20s %s', i + 1, p.name, p.relpath + '/')
|
||||
for i in xrange(0, len(all_projects)):
|
||||
p = all_projects[i]
|
||||
out.write('%3d: %s', i + 1, p.relpath + '/')
|
||||
out.nl()
|
||||
out.nl()
|
||||
|
||||
@ -93,11 +93,11 @@ The '%prog' command stages files to prepare the next commit.
|
||||
if a_index is not None:
|
||||
if a_index == 0:
|
||||
break
|
||||
if 0 < a_index and a_index <= len(all):
|
||||
_AddI(all[a_index - 1])
|
||||
if 0 < a_index and a_index <= len(all_projects):
|
||||
_AddI(all_projects[a_index - 1])
|
||||
continue
|
||||
|
||||
p = filter(lambda x: x.name == a or x.relpath == a, all)
|
||||
p = filter(lambda x: x.name == a or x.relpath == a, all_projects)
|
||||
if len(p) == 1:
|
||||
_AddI(p[0])
|
||||
continue
|
||||
|
@ -15,28 +15,25 @@
|
||||
|
||||
import sys
|
||||
from command import Command
|
||||
from git_config import IsId
|
||||
from git_command import git
|
||||
from progress import Progress
|
||||
|
||||
class Start(Command):
|
||||
common = True
|
||||
helpSummary = "Start a new branch for development"
|
||||
helpUsage = """
|
||||
%prog <newbranchname> [<project>...]
|
||||
|
||||
This subcommand starts a new branch of development that is automatically
|
||||
pulled from a remote branch.
|
||||
|
||||
It is equivalent to the following git commands:
|
||||
|
||||
"git branch --track <newbranchname> m/<codeline>",
|
||||
or
|
||||
"git checkout --track -b <newbranchname> m/<codeline>".
|
||||
|
||||
All three forms set up the config entries that repo bases some of its
|
||||
processing on. Use %prog or git branch or checkout with --track to ensure
|
||||
the configuration data is set up properly.
|
||||
|
||||
%prog <newbranchname> [--all | <project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
'%prog' begins a new branch of development, starting from the
|
||||
revision specified in the manifest.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('--all',
|
||||
dest='all', action='store_true',
|
||||
help='begin branch in all projects')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if not args:
|
||||
@ -48,12 +45,29 @@ the configuration data is set up properly.
|
||||
sys.exit(1)
|
||||
|
||||
err = []
|
||||
for project in self.GetProjects(args[1:]):
|
||||
projects = []
|
||||
if not opt.all:
|
||||
projects = args[1:]
|
||||
if len(projects) < 1:
|
||||
print >>sys.stderr, "error: at least one project must be specified"
|
||||
sys.exit(1)
|
||||
|
||||
all_projects = self.GetProjects(projects)
|
||||
|
||||
pm = Progress('Starting %s' % nb, len(all_projects))
|
||||
for project in all_projects:
|
||||
pm.update()
|
||||
# If the current revision is a specific SHA1 then we can't push back
|
||||
# to it so substitute the manifest default revision instead.
|
||||
if IsId(project.revisionExpr):
|
||||
project.revisionExpr = self.manifest.default.revisionExpr
|
||||
if not project.StartBranch(nb):
|
||||
err.append(project)
|
||||
pm.end()
|
||||
|
||||
if err:
|
||||
err.sort()
|
||||
for p in err:
|
||||
print >>sys.stderr, "error: cannot start in %s" % p.relpath
|
||||
print >>sys.stderr,\
|
||||
"error: %s/: cannot start %s" \
|
||||
% (p.relpath, nb)
|
||||
sys.exit(1)
|
||||
|
@ -15,6 +15,15 @@
|
||||
|
||||
from command import PagedCommand
|
||||
|
||||
try:
|
||||
import threading as _threading
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
|
||||
import itertools
|
||||
import sys
|
||||
import StringIO
|
||||
|
||||
class Status(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Show the working tree status"
|
||||
@ -27,6 +36,9 @@ and the most recent commit on this branch (HEAD), in each project
|
||||
specified. A summary is displayed, one line per file where there
|
||||
is a difference between these three states.
|
||||
|
||||
The -j/--jobs option can be used to run multiple status queries
|
||||
in parallel.
|
||||
|
||||
Status Display
|
||||
--------------
|
||||
|
||||
@ -60,13 +72,61 @@ the following meanings:
|
||||
|
||||
"""
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all = self.GetProjects(args)
|
||||
clean = 0
|
||||
def _Options(self, p):
|
||||
p.add_option('-j', '--jobs',
|
||||
dest='jobs', action='store', type='int', default=2,
|
||||
help="number of projects to check simultaneously")
|
||||
|
||||
for project in all:
|
||||
state = project.PrintWorkTreeStatus()
|
||||
def _StatusHelper(self, project, clean_counter, sem, output):
|
||||
"""Obtains the status for a specific project.
|
||||
|
||||
Obtains the status for a project, redirecting the output to
|
||||
the specified object. It will release the semaphore
|
||||
when done.
|
||||
|
||||
Args:
|
||||
project: Project to get status of.
|
||||
clean_counter: Counter for clean projects.
|
||||
sem: Semaphore, will call release() when complete.
|
||||
output: Where to output the status.
|
||||
"""
|
||||
try:
|
||||
state = project.PrintWorkTreeStatus(output)
|
||||
if state == 'CLEAN':
|
||||
clean += 1
|
||||
if len(all) == clean:
|
||||
clean_counter.next()
|
||||
finally:
|
||||
sem.release()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all_projects = self.GetProjects(args)
|
||||
counter = itertools.count()
|
||||
|
||||
if opt.jobs == 1:
|
||||
for project in all_projects:
|
||||
state = project.PrintWorkTreeStatus()
|
||||
if state == 'CLEAN':
|
||||
counter.next()
|
||||
else:
|
||||
sem = _threading.Semaphore(opt.jobs)
|
||||
threads_and_output = []
|
||||
for project in all_projects:
|
||||
sem.acquire()
|
||||
|
||||
class BufList(StringIO.StringIO):
|
||||
def dump(self, ostream):
|
||||
for entry in self.buflist:
|
||||
ostream.write(entry)
|
||||
|
||||
output = BufList()
|
||||
|
||||
t = _threading.Thread(target=self._StatusHelper,
|
||||
args=(project, counter, sem, output))
|
||||
threads_and_output.append((t, output))
|
||||
t.daemon = True
|
||||
t.start()
|
||||
for (t, output) in threads_and_output:
|
||||
t.join()
|
||||
output.dump(sys.stdout)
|
||||
output.close()
|
||||
if len(all_projects) == counter.next():
|
||||
print 'nothing to commit (working directory clean)'
|
||||
|
608
subcmds/sync.py
608
subcmds/sync.py
@ -13,21 +13,55 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import netrc
|
||||
from optparse import SUPPRESS_HELP
|
||||
import os
|
||||
import pickle
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import urlparse
|
||||
import xmlrpclib
|
||||
|
||||
try:
|
||||
import threading as _threading
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
|
||||
try:
|
||||
import resource
|
||||
def _rlimit_nofile():
|
||||
return resource.getrlimit(resource.RLIMIT_NOFILE)
|
||||
except ImportError:
|
||||
def _rlimit_nofile():
|
||||
return (256, 256)
|
||||
|
||||
try:
|
||||
import multiprocessing
|
||||
except ImportError:
|
||||
multiprocessing = None
|
||||
|
||||
from git_command import GIT
|
||||
from project import HEAD
|
||||
from git_refs import R_HEADS, HEAD
|
||||
from main import WrapperModule
|
||||
from project import Project
|
||||
from project import RemoteSpec
|
||||
from command import Command, MirrorSafeCommand
|
||||
from error import RepoChangedException, GitError
|
||||
from project import R_HEADS
|
||||
from project import SyncBuffer
|
||||
from progress import Progress
|
||||
|
||||
_ONE_DAY_S = 24 * 60 * 60
|
||||
|
||||
class _FetchError(Exception):
|
||||
"""Internal error thrown in _FetchHelper() when we don't want stack trace."""
|
||||
pass
|
||||
|
||||
class Sync(Command, MirrorSafeCommand):
|
||||
jobs = 1
|
||||
common = True
|
||||
helpSummary = "Update working tree to the latest revision"
|
||||
helpUsage = """
|
||||
@ -52,9 +86,69 @@ The -d/--detach option can be used to switch specified projects
|
||||
back to the manifest revision. This option is especially helpful
|
||||
if the project is currently on a topic branch, but the manifest
|
||||
revision is temporarily needed.
|
||||
|
||||
The -s/--smart-sync option can be used to sync to a known good
|
||||
build as specified by the manifest-server element in the current
|
||||
manifest. The -t/--smart-tag option is similar and allows you to
|
||||
specify a custom tag/label.
|
||||
|
||||
The -u/--manifest-server-username and -p/--manifest-server-password
|
||||
options can be used to specify a username and password to authenticate
|
||||
with the manifest server when using the -s or -t option.
|
||||
|
||||
If -u and -p are not specified when using the -s or -t option, '%prog'
|
||||
will attempt to read authentication credentials for the manifest server
|
||||
from the user's .netrc file.
|
||||
|
||||
'%prog' will not use authentication credentials from -u/-p or .netrc
|
||||
if the manifest server specified in the manifest file already includes
|
||||
credentials.
|
||||
|
||||
The -f/--force-broken option can be used to proceed with syncing
|
||||
other projects if a project sync fails.
|
||||
|
||||
The --no-clone-bundle option disables any attempt to use
|
||||
$URL/clone.bundle to bootstrap a new Git repository from a
|
||||
resumeable bundle file on a content delivery network. This
|
||||
may be necessary if there are problems with the local Python
|
||||
HTTP client or proxy configuration, but the Git binary works.
|
||||
|
||||
SSH Connections
|
||||
---------------
|
||||
|
||||
If at least one project remote URL uses an SSH connection (ssh://,
|
||||
git+ssh://, or user@host:path syntax) repo will automatically
|
||||
enable the SSH ControlMaster option when connecting to that host.
|
||||
This feature permits other projects in the same '%prog' session to
|
||||
reuse the same SSH tunnel, saving connection setup overheads.
|
||||
|
||||
To disable this behavior on UNIX platforms, set the GIT_SSH
|
||||
environment variable to 'ssh'. For example:
|
||||
|
||||
export GIT_SSH=ssh
|
||||
%prog
|
||||
|
||||
Compatibility
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
This feature is automatically disabled on Windows, due to the lack
|
||||
of UNIX domain socket support.
|
||||
|
||||
This feature is not compatible with url.insteadof rewrites in the
|
||||
user's ~/.gitconfig. '%prog' is currently not able to perform the
|
||||
rewrite early enough to establish the ControlMaster tunnel.
|
||||
|
||||
If the remote SSH daemon is Gerrit Code Review, version 2.0.10 or
|
||||
later is required to fix a server side protocol bug.
|
||||
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
def _Options(self, p, show_smart=True):
|
||||
self.jobs = self.manifest.default.sync_j
|
||||
|
||||
p.add_option('-f', '--force-broken',
|
||||
dest='force_broken', action='store_true',
|
||||
help="continue sync even if a project fails to sync")
|
||||
p.add_option('-l','--local-only',
|
||||
dest='local_only', action='store_true',
|
||||
help="only update working tree, don't fetch")
|
||||
@ -64,35 +158,378 @@ revision is temporarily needed.
|
||||
p.add_option('-d','--detach',
|
||||
dest='detach_head', action='store_true',
|
||||
help='detach projects back to manifest revision')
|
||||
p.add_option('-c','--current-branch',
|
||||
dest='current_branch_only', action='store_true',
|
||||
help='fetch only current branch from server')
|
||||
p.add_option('-q','--quiet',
|
||||
dest='quiet', action='store_true',
|
||||
help='be more quiet')
|
||||
p.add_option('-j','--jobs',
|
||||
dest='jobs', action='store', type='int',
|
||||
help="projects to fetch simultaneously (default %d)" % self.jobs)
|
||||
p.add_option('-m', '--manifest-name',
|
||||
dest='manifest_name',
|
||||
help='temporary manifest to use for this sync', metavar='NAME.xml')
|
||||
p.add_option('--no-clone-bundle',
|
||||
dest='no_clone_bundle', action='store_true',
|
||||
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||
if show_smart:
|
||||
p.add_option('-s', '--smart-sync',
|
||||
dest='smart_sync', action='store_true',
|
||||
help='smart sync using manifest from a known good build')
|
||||
p.add_option('-t', '--smart-tag',
|
||||
dest='smart_tag', action='store',
|
||||
help='smart sync using manifest from a known tag')
|
||||
p.add_option('-u', '--manifest-server-username', action='store',
|
||||
dest='manifest_server_username',
|
||||
help='username to authenticate with the manifest server')
|
||||
p.add_option('-p', '--manifest-server-password', action='store',
|
||||
dest='manifest_server_password',
|
||||
help='password to authenticate with the manifest server')
|
||||
|
||||
p.add_option('--no-repo-verify',
|
||||
g = p.add_option_group('repo Version options')
|
||||
g.add_option('--no-repo-verify',
|
||||
dest='no_repo_verify', action='store_true',
|
||||
help='do not verify repo source code')
|
||||
p.add_option('--repo-upgraded',
|
||||
g.add_option('--repo-upgraded',
|
||||
dest='repo_upgraded', action='store_true',
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
def _Fetch(self, *projects):
|
||||
def _FetchHelper(self, opt, project, lock, fetched, pm, sem, err_event):
|
||||
"""Main function of the fetch threads when jobs are > 1.
|
||||
|
||||
Args:
|
||||
opt: Program options returned from optparse. See _Options().
|
||||
project: Project object for the project to fetch.
|
||||
lock: Lock for accessing objects that are shared amongst multiple
|
||||
_FetchHelper() threads.
|
||||
fetched: set object that we will add project.gitdir to when we're done
|
||||
(with our lock held).
|
||||
pm: Instance of a Project object. We will call pm.update() (with our
|
||||
lock held).
|
||||
sem: We'll release() this semaphore when we exit so that another thread
|
||||
can be started up.
|
||||
err_event: We'll set this event in the case of an error (after printing
|
||||
out info about the error).
|
||||
"""
|
||||
# We'll set to true once we've locked the lock.
|
||||
did_lock = False
|
||||
|
||||
# Encapsulate everything in a try/except/finally so that:
|
||||
# - We always set err_event in the case of an exception.
|
||||
# - We always make sure we call sem.release().
|
||||
# - We always make sure we unlock the lock if we locked it.
|
||||
try:
|
||||
try:
|
||||
start = time.time()
|
||||
success = project.Sync_NetworkHalf(
|
||||
quiet=opt.quiet,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
clone_bundle=not opt.no_clone_bundle)
|
||||
self._fetch_times.Set(project, time.time() - start)
|
||||
|
||||
# Lock around all the rest of the code, since printing, updating a set
|
||||
# and Progress.update() are not thread safe.
|
||||
lock.acquire()
|
||||
did_lock = True
|
||||
|
||||
if not success:
|
||||
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
||||
if opt.force_broken:
|
||||
print >>sys.stderr, 'warn: --force-broken, continuing to sync'
|
||||
else:
|
||||
raise _FetchError()
|
||||
|
||||
fetched.add(project.gitdir)
|
||||
pm.update()
|
||||
except _FetchError:
|
||||
err_event.set()
|
||||
except:
|
||||
err_event.set()
|
||||
raise
|
||||
finally:
|
||||
if did_lock:
|
||||
lock.release()
|
||||
sem.release()
|
||||
|
||||
def _Fetch(self, projects, opt):
|
||||
fetched = set()
|
||||
pm = Progress('Fetching projects', len(projects))
|
||||
for project in projects:
|
||||
pm.update()
|
||||
|
||||
if project.Sync_NetworkHalf():
|
||||
fetched.add(project.gitdir)
|
||||
else:
|
||||
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
||||
if self.jobs == 1:
|
||||
for project in projects:
|
||||
pm.update()
|
||||
if project.Sync_NetworkHalf(
|
||||
quiet=opt.quiet,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
clone_bundle=not opt.no_clone_bundle):
|
||||
fetched.add(project.gitdir)
|
||||
else:
|
||||
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
||||
if opt.force_broken:
|
||||
print >>sys.stderr, 'warn: --force-broken, continuing to sync'
|
||||
else:
|
||||
sys.exit(1)
|
||||
else:
|
||||
threads = set()
|
||||
lock = _threading.Lock()
|
||||
sem = _threading.Semaphore(self.jobs)
|
||||
err_event = _threading.Event()
|
||||
for project in projects:
|
||||
# Check for any errors before starting any new threads.
|
||||
# ...we'll let existing threads finish, though.
|
||||
if err_event.isSet():
|
||||
break
|
||||
|
||||
sem.acquire()
|
||||
t = _threading.Thread(target = self._FetchHelper,
|
||||
args = (opt,
|
||||
project,
|
||||
lock,
|
||||
fetched,
|
||||
pm,
|
||||
sem,
|
||||
err_event))
|
||||
# Ensure that Ctrl-C will not freeze the repo process.
|
||||
t.daemon = True
|
||||
threads.add(t)
|
||||
t.start()
|
||||
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet():
|
||||
print >>sys.stderr, '\nerror: Exited sync due to fetch errors'
|
||||
sys.exit(1)
|
||||
|
||||
pm.end()
|
||||
self._fetch_times.Save()
|
||||
|
||||
self._GCProjects(projects)
|
||||
return fetched
|
||||
|
||||
def _GCProjects(self, projects):
|
||||
if multiprocessing:
|
||||
cpu_count = multiprocessing.cpu_count()
|
||||
else:
|
||||
cpu_count = 1
|
||||
jobs = min(self.jobs, cpu_count)
|
||||
|
||||
if jobs < 2:
|
||||
for project in projects:
|
||||
project.bare_git.gc('--auto')
|
||||
return
|
||||
|
||||
config = {'pack.threads': cpu_count / jobs if cpu_count > jobs else 1}
|
||||
|
||||
threads = set()
|
||||
sem = _threading.Semaphore(jobs)
|
||||
err_event = _threading.Event()
|
||||
|
||||
def GC(project):
|
||||
try:
|
||||
try:
|
||||
project.bare_git.gc('--auto', config=config)
|
||||
except GitError:
|
||||
err_event.set()
|
||||
except:
|
||||
err_event.set()
|
||||
raise
|
||||
finally:
|
||||
sem.release()
|
||||
|
||||
for project in projects:
|
||||
if err_event.isSet():
|
||||
break
|
||||
sem.acquire()
|
||||
t = _threading.Thread(target=GC, args=(project,))
|
||||
t.daemon = True
|
||||
threads.add(t)
|
||||
t.start()
|
||||
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
if err_event.isSet():
|
||||
print >>sys.stderr, '\nerror: Exited sync due to gc errors'
|
||||
sys.exit(1)
|
||||
|
||||
def UpdateProjectList(self):
|
||||
new_project_paths = []
|
||||
for project in self.GetProjects(None, missing_ok=True):
|
||||
if project.relpath:
|
||||
new_project_paths.append(project.relpath)
|
||||
file_name = 'project.list'
|
||||
file_path = os.path.join(self.manifest.repodir, file_name)
|
||||
old_project_paths = []
|
||||
|
||||
if os.path.exists(file_path):
|
||||
fd = open(file_path, 'r')
|
||||
try:
|
||||
old_project_paths = fd.read().split('\n')
|
||||
finally:
|
||||
fd.close()
|
||||
for path in old_project_paths:
|
||||
if not path:
|
||||
continue
|
||||
if path not in new_project_paths:
|
||||
# If the path has already been deleted, we don't need to do it
|
||||
if os.path.exists(self.manifest.topdir + '/' + path):
|
||||
project = Project(
|
||||
manifest = self.manifest,
|
||||
name = path,
|
||||
remote = RemoteSpec('origin'),
|
||||
gitdir = os.path.join(self.manifest.topdir,
|
||||
path, '.git'),
|
||||
worktree = os.path.join(self.manifest.topdir, path),
|
||||
relpath = path,
|
||||
revisionExpr = 'HEAD',
|
||||
revisionId = None,
|
||||
groups = None)
|
||||
|
||||
if project.IsDirty():
|
||||
print >>sys.stderr, 'error: Cannot remove project "%s": \
|
||||
uncommitted changes are present' % project.relpath
|
||||
print >>sys.stderr, ' commit changes, then run sync again'
|
||||
return -1
|
||||
else:
|
||||
print >>sys.stderr, 'Deleting obsolete path %s' % project.worktree
|
||||
shutil.rmtree(project.worktree)
|
||||
# Try deleting parent subdirs if they are empty
|
||||
project_dir = os.path.dirname(project.worktree)
|
||||
while project_dir != self.manifest.topdir:
|
||||
try:
|
||||
os.rmdir(project_dir)
|
||||
except OSError:
|
||||
break
|
||||
project_dir = os.path.dirname(project_dir)
|
||||
|
||||
new_project_paths.sort()
|
||||
fd = open(file_path, 'w')
|
||||
try:
|
||||
fd.write('\n'.join(new_project_paths))
|
||||
fd.write('\n')
|
||||
finally:
|
||||
fd.close()
|
||||
return 0
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if opt.jobs:
|
||||
self.jobs = opt.jobs
|
||||
if self.jobs > 1:
|
||||
soft_limit, _ = _rlimit_nofile()
|
||||
self.jobs = min(self.jobs, (soft_limit - 5) / 3)
|
||||
|
||||
if opt.network_only and opt.detach_head:
|
||||
print >>sys.stderr, 'error: cannot combine -n and -d'
|
||||
sys.exit(1)
|
||||
if opt.network_only and opt.local_only:
|
||||
print >>sys.stderr, 'error: cannot combine -n and -l'
|
||||
sys.exit(1)
|
||||
if opt.manifest_name and opt.smart_sync:
|
||||
print >>sys.stderr, 'error: cannot combine -m and -s'
|
||||
sys.exit(1)
|
||||
if opt.manifest_name and opt.smart_tag:
|
||||
print >>sys.stderr, 'error: cannot combine -m and -t'
|
||||
sys.exit(1)
|
||||
if opt.manifest_server_username or opt.manifest_server_password:
|
||||
if not (opt.smart_sync or opt.smart_tag):
|
||||
print >>sys.stderr, 'error: -u and -p may only be combined with ' \
|
||||
'-s or -t'
|
||||
sys.exit(1)
|
||||
if None in [opt.manifest_server_username, opt.manifest_server_password]:
|
||||
print >>sys.stderr, 'error: both -u and -p must be given'
|
||||
sys.exit(1)
|
||||
|
||||
if opt.manifest_name:
|
||||
self.manifest.Override(opt.manifest_name)
|
||||
|
||||
if opt.smart_sync or opt.smart_tag:
|
||||
if not self.manifest.manifest_server:
|
||||
print >>sys.stderr, \
|
||||
'error: cannot smart sync: no manifest server defined in manifest'
|
||||
sys.exit(1)
|
||||
|
||||
manifest_server = self.manifest.manifest_server
|
||||
|
||||
if not '@' in manifest_server:
|
||||
username = None
|
||||
password = None
|
||||
if opt.manifest_server_username and opt.manifest_server_password:
|
||||
username = opt.manifest_server_username
|
||||
password = opt.manifest_server_password
|
||||
else:
|
||||
try:
|
||||
info = netrc.netrc()
|
||||
except IOError:
|
||||
print >>sys.stderr, '.netrc file does not exist or could not be opened'
|
||||
else:
|
||||
try:
|
||||
parse_result = urlparse.urlparse(manifest_server)
|
||||
if parse_result.hostname:
|
||||
username, _account, password = \
|
||||
info.authenticators(parse_result.hostname)
|
||||
except TypeError:
|
||||
# TypeError is raised when the given hostname is not present
|
||||
# in the .netrc file.
|
||||
print >>sys.stderr, 'No credentials found for %s in .netrc' % \
|
||||
parse_result.hostname
|
||||
except netrc.NetrcParseError as e:
|
||||
print >>sys.stderr, 'Error parsing .netrc file: %s' % e
|
||||
|
||||
if (username and password):
|
||||
manifest_server = manifest_server.replace('://', '://%s:%s@' %
|
||||
(username, password),
|
||||
1)
|
||||
|
||||
try:
|
||||
server = xmlrpclib.Server(manifest_server)
|
||||
if opt.smart_sync:
|
||||
p = self.manifest.manifestProject
|
||||
b = p.GetBranch(p.CurrentBranch)
|
||||
branch = b.merge
|
||||
if branch.startswith(R_HEADS):
|
||||
branch = branch[len(R_HEADS):]
|
||||
|
||||
env = os.environ.copy()
|
||||
if (env.has_key('TARGET_PRODUCT') and
|
||||
env.has_key('TARGET_BUILD_VARIANT')):
|
||||
target = '%s-%s' % (env['TARGET_PRODUCT'],
|
||||
env['TARGET_BUILD_VARIANT'])
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||
else:
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch)
|
||||
else:
|
||||
assert(opt.smart_tag)
|
||||
[success, manifest_str] = server.GetManifest(opt.smart_tag)
|
||||
|
||||
if success:
|
||||
manifest_name = "smart_sync_override.xml"
|
||||
manifest_path = os.path.join(self.manifest.manifestProject.worktree,
|
||||
manifest_name)
|
||||
try:
|
||||
f = open(manifest_path, 'w')
|
||||
try:
|
||||
f.write(manifest_str)
|
||||
finally:
|
||||
f.close()
|
||||
except IOError:
|
||||
print >>sys.stderr, 'error: cannot write manifest to %s' % \
|
||||
manifest_path
|
||||
sys.exit(1)
|
||||
self.manifest.Override(manifest_name)
|
||||
else:
|
||||
print >>sys.stderr, 'error: %s' % manifest_str
|
||||
sys.exit(1)
|
||||
except (socket.error, IOError, xmlrpclib.Fault) as e:
|
||||
print >>sys.stderr, 'error: cannot connect to manifest server %s:\n%s' % (
|
||||
self.manifest.manifest_server, e)
|
||||
sys.exit(1)
|
||||
except xmlrpclib.ProtocolError as e:
|
||||
print >>sys.stderr, 'error: cannot connect to manifest server %s:\n%d %s' % (
|
||||
self.manifest.manifest_server, e.errcode, e.errmsg)
|
||||
sys.exit(1)
|
||||
|
||||
rp = self.manifest.repoProject
|
||||
rp.PreSync()
|
||||
@ -101,35 +538,67 @@ revision is temporarily needed.
|
||||
mp.PreSync()
|
||||
|
||||
if opt.repo_upgraded:
|
||||
_PostRepoUpgrade(self.manifest)
|
||||
|
||||
all = self.GetProjects(args, missing_ok=True)
|
||||
_PostRepoUpgrade(self.manifest, opt)
|
||||
|
||||
if not opt.local_only:
|
||||
fetched = self._Fetch(rp, mp, *all)
|
||||
mp.Sync_NetworkHalf(quiet=opt.quiet,
|
||||
current_branch_only=opt.current_branch_only)
|
||||
|
||||
if mp.HasChanges:
|
||||
syncbuf = SyncBuffer(mp.config)
|
||||
mp.Sync_LocalHalf(syncbuf)
|
||||
if not syncbuf.Finish():
|
||||
sys.exit(1)
|
||||
self.manifest._Unload()
|
||||
if opt.jobs is None:
|
||||
self.jobs = self.manifest.default.sync_j
|
||||
all_projects = self.GetProjects(args, missing_ok=True)
|
||||
|
||||
self._fetch_times = _FetchTimes(self.manifest)
|
||||
if not opt.local_only:
|
||||
to_fetch = []
|
||||
now = time.time()
|
||||
if _ONE_DAY_S <= (now - rp.LastFetch):
|
||||
to_fetch.append(rp)
|
||||
to_fetch.extend(all_projects)
|
||||
to_fetch.sort(key=self._fetch_times.Get, reverse=True)
|
||||
|
||||
fetched = self._Fetch(to_fetch, opt)
|
||||
_PostRepoFetch(rp, opt.no_repo_verify)
|
||||
if opt.network_only:
|
||||
# bail out now; the rest touches the working tree
|
||||
return
|
||||
|
||||
if mp.HasChanges:
|
||||
syncbuf = SyncBuffer(mp.config)
|
||||
mp.Sync_LocalHalf(syncbuf)
|
||||
if not syncbuf.Finish():
|
||||
sys.exit(1)
|
||||
|
||||
# Iteratively fetch missing and/or nested unregistered submodules
|
||||
previously_missing_set = set()
|
||||
while True:
|
||||
self.manifest._Unload()
|
||||
all = self.GetProjects(args, missing_ok=True)
|
||||
all_projects = self.GetProjects(args, missing_ok=True)
|
||||
missing = []
|
||||
for project in all:
|
||||
for project in all_projects:
|
||||
if project.gitdir not in fetched:
|
||||
missing.append(project)
|
||||
self._Fetch(*missing)
|
||||
if not missing:
|
||||
break
|
||||
# Stop us from non-stopped fetching actually-missing repos: If set of
|
||||
# missing repos has not been changed from last fetch, we break.
|
||||
missing_set = set(p.name for p in missing)
|
||||
if previously_missing_set == missing_set:
|
||||
break
|
||||
previously_missing_set = missing_set
|
||||
fetched.update(self._Fetch(missing, opt))
|
||||
|
||||
if self.manifest.IsMirror:
|
||||
# bail out now, we have no working tree
|
||||
return
|
||||
|
||||
if self.UpdateProjectList():
|
||||
sys.exit(1)
|
||||
|
||||
syncbuf = SyncBuffer(mp.config,
|
||||
detach_head = opt.detach_head)
|
||||
pm = Progress('Syncing work tree', len(all))
|
||||
for project in all:
|
||||
pm = Progress('Syncing work tree', len(all_projects))
|
||||
for project in all_projects:
|
||||
pm.update()
|
||||
if project.worktree:
|
||||
project.Sync_LocalHalf(syncbuf)
|
||||
@ -138,8 +607,15 @@ revision is temporarily needed.
|
||||
if not syncbuf.Finish():
|
||||
sys.exit(1)
|
||||
|
||||
# If there's a notice that's supposed to print at the end of the sync, print
|
||||
# it now...
|
||||
if self.manifest.notice:
|
||||
print self.manifest.notice
|
||||
|
||||
def _PostRepoUpgrade(manifest):
|
||||
def _PostRepoUpgrade(manifest, opt):
|
||||
wrapper = WrapperModule()
|
||||
if wrapper.NeedSetupGnuPG():
|
||||
wrapper.SetupGnuPG(opt.quiet)
|
||||
for project in manifest.projects.values():
|
||||
if project.Exists:
|
||||
project.PostRepoUpgrade()
|
||||
@ -169,17 +645,14 @@ def _VerifyTag(project):
|
||||
warning: Cannot automatically authenticate repo."""
|
||||
return True
|
||||
|
||||
remote = project.GetRemote(project.remote.name)
|
||||
ref = remote.ToLocal(project.revision)
|
||||
|
||||
try:
|
||||
cur = project.bare_git.describe(ref)
|
||||
cur = project.bare_git.describe(project.GetRevisionId())
|
||||
except GitError:
|
||||
cur = None
|
||||
|
||||
if not cur \
|
||||
or re.compile(r'^.*-[0-9]{1,}-g[0-9a-f]{1,}$').match(cur):
|
||||
rev = project.revision
|
||||
rev = project.revisionExpr
|
||||
if rev.startswith(R_HEADS):
|
||||
rev = rev[len(R_HEADS):]
|
||||
|
||||
@ -189,9 +662,9 @@ warning: Cannot automatically authenticate repo."""
|
||||
% (project.name, rev)
|
||||
return False
|
||||
|
||||
env = dict(os.environ)
|
||||
env['GIT_DIR'] = project.gitdir
|
||||
env['GNUPGHOME'] = gpg_dir
|
||||
env = os.environ.copy()
|
||||
env['GIT_DIR'] = project.gitdir.encode()
|
||||
env['GNUPGHOME'] = gpg_dir.encode()
|
||||
|
||||
cmd = [GIT, 'tag', '-v', cur]
|
||||
proc = subprocess.Popen(cmd,
|
||||
@ -211,3 +684,66 @@ warning: Cannot automatically authenticate repo."""
|
||||
print >>sys.stderr
|
||||
return False
|
||||
return True
|
||||
|
||||
class _FetchTimes(object):
|
||||
_ALPHA = 0.5
|
||||
|
||||
def __init__(self, manifest):
|
||||
self._path = os.path.join(manifest.repodir, '.repopickle_fetchtimes')
|
||||
self._times = None
|
||||
self._seen = set()
|
||||
|
||||
def Get(self, project):
|
||||
self._Load()
|
||||
return self._times.get(project.name, _ONE_DAY_S)
|
||||
|
||||
def Set(self, project, t):
|
||||
self._Load()
|
||||
name = project.name
|
||||
old = self._times.get(name, t)
|
||||
self._seen.add(name)
|
||||
a = self._ALPHA
|
||||
self._times[name] = (a*t) + ((1-a) * old)
|
||||
|
||||
def _Load(self):
|
||||
if self._times is None:
|
||||
try:
|
||||
f = open(self._path)
|
||||
except IOError:
|
||||
self._times = {}
|
||||
return self._times
|
||||
try:
|
||||
try:
|
||||
self._times = pickle.load(f)
|
||||
except:
|
||||
try:
|
||||
os.remove(self._path)
|
||||
except OSError:
|
||||
pass
|
||||
self._times = {}
|
||||
finally:
|
||||
f.close()
|
||||
return self._times
|
||||
|
||||
def Save(self):
|
||||
if self._times is None:
|
||||
return
|
||||
|
||||
to_delete = []
|
||||
for name in self._times:
|
||||
if name not in self._seen:
|
||||
to_delete.append(name)
|
||||
for name in to_delete:
|
||||
del self._times[name]
|
||||
|
||||
try:
|
||||
f = open(self._path, 'wb')
|
||||
try:
|
||||
pickle.dump(self._times, f)
|
||||
except (IOError, OSError, pickle.PickleError):
|
||||
try:
|
||||
os.remove(self._path)
|
||||
except OSError:
|
||||
pass
|
||||
finally:
|
||||
f.close()
|
||||
|
@ -13,12 +13,25 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
import re
|
||||
import sys
|
||||
|
||||
from command import InteractiveCommand
|
||||
from editor import Editor
|
||||
from error import UploadError
|
||||
from error import HookError, UploadError
|
||||
from project import RepoHook
|
||||
|
||||
UNUSUAL_COMMIT_THRESHOLD = 5
|
||||
|
||||
def _ConfirmManyUploads(multiple_branches=False):
|
||||
if multiple_branches:
|
||||
print "ATTENTION: One or more branches has an unusually high number of commits."
|
||||
else:
|
||||
print "ATTENTION: You are uploading an unusually high number of commits."
|
||||
print "YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across branches?)"
|
||||
answer = raw_input("If you are sure you intend to do this, type 'yes': ").strip()
|
||||
return answer == "yes"
|
||||
|
||||
def _die(fmt, *args):
|
||||
msg = fmt % args
|
||||
@ -27,76 +40,165 @@ def _die(fmt, *args):
|
||||
|
||||
def _SplitEmails(values):
|
||||
result = []
|
||||
for str in values:
|
||||
result.extend([s.strip() for s in str.split(',')])
|
||||
for value in values:
|
||||
result.extend([s.strip() for s in value.split(',')])
|
||||
return result
|
||||
|
||||
class Upload(InteractiveCommand):
|
||||
common = True
|
||||
helpSummary = "Upload changes for code review"
|
||||
helpUsage="""
|
||||
%prog [--re --cc] {[<project>]... | --replace <project>}
|
||||
%prog [--re --cc] [<project>]...
|
||||
"""
|
||||
helpDescription = """
|
||||
The '%prog' command is used to send changes to the Gerrit code
|
||||
review system. It searches for changes in local projects that do
|
||||
not yet exist in the corresponding remote repository. If multiple
|
||||
changes are found, '%prog' opens an editor to allow the
|
||||
user to choose which change to upload. After a successful upload,
|
||||
repo prints the URL for the change in the Gerrit code review system.
|
||||
The '%prog' command is used to send changes to the Gerrit Code
|
||||
Review system. It searches for topic branches in local projects
|
||||
that have not yet been published for review. If multiple topic
|
||||
branches are found, '%prog' opens an editor to allow the user to
|
||||
select which branches to upload.
|
||||
|
||||
'%prog' searches for uploadable changes in all projects listed
|
||||
at the command line. Projects can be specified either by name, or
|
||||
by a relative or absolute path to the project's local directory. If
|
||||
no projects are specified, '%prog' will search for uploadable
|
||||
changes in all projects listed in the manifest.
|
||||
'%prog' searches for uploadable changes in all projects listed at
|
||||
the command line. Projects can be specified either by name, or by
|
||||
a relative or absolute path to the project's local directory. If no
|
||||
projects are specified, '%prog' will search for uploadable changes
|
||||
in all projects listed in the manifest.
|
||||
|
||||
If the --reviewers or --cc options are passed, those emails are
|
||||
added to the respective list of users, and emails are sent to any
|
||||
new users. Users passed to --reviewers must be already registered
|
||||
new users. Users passed as --reviewers must already be registered
|
||||
with the code review system, or the upload will fail.
|
||||
|
||||
If the --replace option is passed the user can designate which
|
||||
existing change(s) in Gerrit match up to the commits in the branch
|
||||
being uploaded. For each matched pair of change,commit the commit
|
||||
will be added as a new patch set, completely replacing the set of
|
||||
files and description associated with the change in Gerrit.
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
review.URL.autoupload:
|
||||
|
||||
To disable the "Upload ... (y/N)?" prompt, you can set a per-project
|
||||
or global Git configuration option. If review.URL.autoupload is set
|
||||
to "true" then repo will assume you always answer "y" at the prompt,
|
||||
and will not prompt you further. If it is set to "false" then repo
|
||||
will assume you always answer "n", and will abort.
|
||||
|
||||
review.URL.autocopy:
|
||||
|
||||
To automatically copy a user or mailing list to all uploaded reviews,
|
||||
you can set a per-project or global Git option to do so. Specifically,
|
||||
review.URL.autocopy can be set to a comma separated list of reviewers
|
||||
who you always want copied on all uploads with a non-empty --re
|
||||
argument.
|
||||
|
||||
review.URL.username:
|
||||
|
||||
Override the username used to connect to Gerrit Code Review.
|
||||
By default the local part of the email address is used.
|
||||
|
||||
The URL must match the review URL listed in the manifest XML file,
|
||||
or in the .git/config within the project. For example:
|
||||
|
||||
[remote "origin"]
|
||||
url = git://git.example.com/project.git
|
||||
review = http://review.example.com/
|
||||
|
||||
[review "http://review.example.com/"]
|
||||
autoupload = true
|
||||
autocopy = johndoe@company.com,my-team-alias@company.com
|
||||
|
||||
review.URL.uploadtopic:
|
||||
|
||||
To add a topic branch whenever uploading a commit, you can set a
|
||||
per-project or global Git option to do so. If review.URL.uploadtopic
|
||||
is set to "true" then repo will assume you always want the equivalent
|
||||
of the -t option to the repo command. If unset or set to "false" then
|
||||
repo will make use of only the command line option.
|
||||
|
||||
References
|
||||
----------
|
||||
|
||||
Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('--replace',
|
||||
dest='replace', action='store_true',
|
||||
help='Upload replacement patchesets from this branch')
|
||||
p.add_option('-t',
|
||||
dest='auto_topic', action='store_true',
|
||||
help='Send local branch name to Gerrit Code Review')
|
||||
p.add_option('--re', '--reviewers',
|
||||
type='string', action='append', dest='reviewers',
|
||||
help='Request reviews from these people.')
|
||||
p.add_option('--cc',
|
||||
type='string', action='append', dest='cc',
|
||||
help='Also send email to these email addresses.')
|
||||
p.add_option('--br',
|
||||
type='string', action='store', dest='branch',
|
||||
help='Branch to upload.')
|
||||
p.add_option('--cbr', '--current-branch',
|
||||
dest='current_branch', action='store_true',
|
||||
help='Upload current git branch.')
|
||||
p.add_option('-d', '--draft',
|
||||
action='store_true', dest='draft', default=False,
|
||||
help='If specified, upload as a draft.')
|
||||
|
||||
def _SingleBranch(self, branch, people):
|
||||
# Options relating to upload hook. Note that verify and no-verify are NOT
|
||||
# opposites of each other, which is why they store to different locations.
|
||||
# We are using them to match 'git commit' syntax.
|
||||
#
|
||||
# Combinations:
|
||||
# - no-verify=False, verify=False (DEFAULT):
|
||||
# If stdout is a tty, can prompt about running upload hooks if needed.
|
||||
# If user denies running hooks, the upload is cancelled. If stdout is
|
||||
# not a tty and we would need to prompt about upload hooks, upload is
|
||||
# cancelled.
|
||||
# - no-verify=False, verify=True:
|
||||
# Always run upload hooks with no prompt.
|
||||
# - no-verify=True, verify=False:
|
||||
# Never run upload hooks, but upload anyway (AKA bypass hooks).
|
||||
# - no-verify=True, verify=True:
|
||||
# Invalid
|
||||
p.add_option('--no-verify',
|
||||
dest='bypass_hooks', action='store_true',
|
||||
help='Do not run the upload hook.')
|
||||
p.add_option('--verify',
|
||||
dest='allow_all_hooks', action='store_true',
|
||||
help='Run the upload hook without prompting.')
|
||||
|
||||
def _SingleBranch(self, opt, branch, people):
|
||||
project = branch.project
|
||||
name = branch.name
|
||||
date = branch.date
|
||||
list = branch.commits
|
||||
remote = project.GetBranch(name).remote
|
||||
|
||||
print 'Upload project %s/:' % project.relpath
|
||||
print ' branch %s (%2d commit%s, %s):' % (
|
||||
name,
|
||||
len(list),
|
||||
len(list) != 1 and 's' or '',
|
||||
date)
|
||||
for commit in list:
|
||||
print ' %s' % commit
|
||||
key = 'review.%s.autoupload' % remote.review
|
||||
answer = project.config.GetBoolean(key)
|
||||
|
||||
sys.stdout.write('(y/n)? ')
|
||||
answer = sys.stdin.readline().strip()
|
||||
if answer in ('y', 'Y', 'yes', '1', 'true', 't'):
|
||||
self._UploadAndReport([branch], people)
|
||||
if answer is False:
|
||||
_die("upload blocked by %s = false" % key)
|
||||
|
||||
if answer is None:
|
||||
date = branch.date
|
||||
commit_list = branch.commits
|
||||
|
||||
print 'Upload project %s/ to remote branch %s:' % (project.relpath, project.revisionExpr)
|
||||
print ' branch %s (%2d commit%s, %s):' % (
|
||||
name,
|
||||
len(commit_list),
|
||||
len(commit_list) != 1 and 's' or '',
|
||||
date)
|
||||
for commit in commit_list:
|
||||
print ' %s' % commit
|
||||
|
||||
sys.stdout.write('to %s (y/N)? ' % remote.review)
|
||||
answer = sys.stdin.readline().strip()
|
||||
answer = answer in ('y', 'Y', 'yes', '1', 'true', 't')
|
||||
|
||||
if answer:
|
||||
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
|
||||
answer = _ConfirmManyUploads()
|
||||
|
||||
if answer:
|
||||
self._UploadAndReport(opt, [branch], people)
|
||||
else:
|
||||
_die("upload aborted by user")
|
||||
|
||||
def _MultipleBranches(self, pending, people):
|
||||
def _MultipleBranches(self, opt, pending, people):
|
||||
projects = {}
|
||||
branches = {}
|
||||
|
||||
@ -110,16 +212,17 @@ files and description associated with the change in Gerrit.
|
||||
for branch in avail:
|
||||
name = branch.name
|
||||
date = branch.date
|
||||
list = branch.commits
|
||||
commit_list = branch.commits
|
||||
|
||||
if b:
|
||||
script.append('#')
|
||||
script.append('# branch %s (%2d commit%s, %s):' % (
|
||||
script.append('# branch %s (%2d commit%s, %s) to remote branch %s:' % (
|
||||
name,
|
||||
len(list),
|
||||
len(list) != 1 and 's' or '',
|
||||
date))
|
||||
for commit in list:
|
||||
len(commit_list),
|
||||
len(commit_list) != 1 and 's' or '',
|
||||
date,
|
||||
project.revisionExpr))
|
||||
for commit in commit_list:
|
||||
script.append('# %s' % commit)
|
||||
b[name] = branch
|
||||
|
||||
@ -127,6 +230,11 @@ files and description associated with the change in Gerrit.
|
||||
branches[project.name] = b
|
||||
script.append('')
|
||||
|
||||
script = [ x.encode('utf-8')
|
||||
if issubclass(type(x), unicode)
|
||||
else x
|
||||
for x in script ]
|
||||
|
||||
script = Editor.EditString("\n".join(script)).split("\n")
|
||||
|
||||
project_re = re.compile(r'^#?\s*project\s*([^\s]+)/:$')
|
||||
@ -155,78 +263,91 @@ files and description associated with the change in Gerrit.
|
||||
todo.append(branch)
|
||||
if not todo:
|
||||
_die("nothing uncommented for upload")
|
||||
self._UploadAndReport(todo, people)
|
||||
|
||||
def _ReplaceBranch(self, project, people):
|
||||
branch = project.CurrentBranch
|
||||
if not branch:
|
||||
print >>sys.stdout, "no branches ready for upload"
|
||||
return
|
||||
branch = project.GetUploadableBranch(branch)
|
||||
if not branch:
|
||||
print >>sys.stdout, "no branches ready for upload"
|
||||
return
|
||||
many_commits = False
|
||||
for branch in todo:
|
||||
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
|
||||
many_commits = True
|
||||
break
|
||||
if many_commits:
|
||||
if not _ConfirmManyUploads(multiple_branches=True):
|
||||
_die("upload aborted by user")
|
||||
|
||||
script = []
|
||||
script.append('# Replacing from branch %s' % branch.name)
|
||||
for commit in branch.commits:
|
||||
script.append('[ ] %s' % commit)
|
||||
script.append('')
|
||||
script.append('# Insert change numbers in the brackets to add a new patch set.')
|
||||
script.append('# To create a new change record, leave the brackets empty.')
|
||||
self._UploadAndReport(opt, todo, people)
|
||||
|
||||
script = Editor.EditString("\n".join(script)).split("\n")
|
||||
def _AppendAutoCcList(self, branch, people):
|
||||
"""
|
||||
Appends the list of users in the CC list in the git project's config if a
|
||||
non-empty reviewer list was found.
|
||||
"""
|
||||
|
||||
change_re = re.compile(r'^\[\s*(\d{1,})\s*\]\s*([0-9a-f]{1,}) .*$')
|
||||
to_replace = dict()
|
||||
full_hashes = branch.unabbrev_commits
|
||||
name = branch.name
|
||||
project = branch.project
|
||||
key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
|
||||
raw_list = project.config.GetString(key)
|
||||
if not raw_list is None and len(people[0]) > 0:
|
||||
people[1].extend([entry.strip() for entry in raw_list.split(',')])
|
||||
|
||||
for line in script:
|
||||
m = change_re.match(line)
|
||||
if m:
|
||||
c = m.group(1)
|
||||
f = m.group(2)
|
||||
try:
|
||||
f = full_hashes[f]
|
||||
except KeyError:
|
||||
print 'fh = %s' % full_hashes
|
||||
print >>sys.stderr, "error: commit %s not found" % f
|
||||
sys.exit(1)
|
||||
if c in to_replace:
|
||||
print >>sys.stderr,\
|
||||
"error: change %s cannot accept multiple commits" % c
|
||||
sys.exit(1)
|
||||
to_replace[c] = f
|
||||
def _FindGerritChange(self, branch):
|
||||
last_pub = branch.project.WasPublished(branch.name)
|
||||
if last_pub is None:
|
||||
return ""
|
||||
|
||||
if not to_replace:
|
||||
print >>sys.stderr, "error: no replacements specified"
|
||||
print >>sys.stderr, " use 'repo upload' without --replace"
|
||||
sys.exit(1)
|
||||
refs = branch.GetPublishedRefs()
|
||||
try:
|
||||
# refs/changes/XYZ/N --> XYZ
|
||||
return refs.get(last_pub).split('/')[-2]
|
||||
except:
|
||||
return ""
|
||||
|
||||
branch.replace_changes = to_replace
|
||||
self._UploadAndReport([branch], people)
|
||||
|
||||
def _UploadAndReport(self, todo, people):
|
||||
def _UploadAndReport(self, opt, todo, original_people):
|
||||
have_errors = False
|
||||
for branch in todo:
|
||||
try:
|
||||
branch.UploadForReview(people)
|
||||
people = copy.deepcopy(original_people)
|
||||
self._AppendAutoCcList(branch, people)
|
||||
|
||||
# Check if there are local changes that may have been forgotten
|
||||
if branch.project.HasChanges():
|
||||
key = 'review.%s.autoupload' % branch.project.remote.review
|
||||
answer = branch.project.config.GetBoolean(key)
|
||||
|
||||
# if they want to auto upload, let's not ask because it could be automated
|
||||
if answer is None:
|
||||
sys.stdout.write('Uncommitted changes in ' + branch.project.name + ' (did you forget to amend?). Continue uploading? (y/N) ')
|
||||
a = sys.stdin.readline().strip().lower()
|
||||
if a not in ('y', 'yes', 't', 'true', 'on'):
|
||||
print >>sys.stderr, "skipping upload"
|
||||
branch.uploaded = False
|
||||
branch.error = 'User aborted'
|
||||
continue
|
||||
|
||||
# Check if topic branches should be sent to the server during upload
|
||||
if opt.auto_topic is not True:
|
||||
key = 'review.%s.uploadtopic' % branch.project.remote.review
|
||||
opt.auto_topic = branch.project.config.GetBoolean(key)
|
||||
|
||||
branch.UploadForReview(people, auto_topic=opt.auto_topic, draft=opt.draft)
|
||||
branch.uploaded = True
|
||||
except UploadError, e:
|
||||
except UploadError as e:
|
||||
branch.error = e
|
||||
branch.uploaded = False
|
||||
have_errors = True
|
||||
|
||||
print >>sys.stderr, ''
|
||||
print >>sys.stderr, '--------------------------------------------'
|
||||
print >>sys.stderr, '----------------------------------------------------------------------'
|
||||
|
||||
if have_errors:
|
||||
for branch in todo:
|
||||
if not branch.uploaded:
|
||||
print >>sys.stderr, '[FAILED] %-15s %-15s (%s)' % (
|
||||
if len(str(branch.error)) <= 30:
|
||||
fmt = ' (%s)'
|
||||
else:
|
||||
fmt = '\n (%s)'
|
||||
print >>sys.stderr, ('[FAILED] %-15s %-15s' + fmt) % (
|
||||
branch.project.relpath + '/', \
|
||||
branch.name, \
|
||||
branch.error)
|
||||
str(branch.error))
|
||||
print >>sys.stderr, ''
|
||||
|
||||
for branch in todo:
|
||||
@ -243,6 +364,29 @@ files and description associated with the change in Gerrit.
|
||||
pending = []
|
||||
reviewers = []
|
||||
cc = []
|
||||
branch = None
|
||||
|
||||
if opt.branch:
|
||||
branch = opt.branch
|
||||
|
||||
for project in project_list:
|
||||
if opt.current_branch:
|
||||
cbr = project.CurrentBranch
|
||||
avail = [project.GetUploadableBranch(cbr)] if cbr else None
|
||||
else:
|
||||
avail = project.GetUploadableBranches(branch)
|
||||
if avail:
|
||||
pending.append((project, avail))
|
||||
|
||||
if pending and (not opt.bypass_hooks):
|
||||
hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
|
||||
self.manifest.topdir, abort_if_user_denies=True)
|
||||
pending_proj_names = [project.name for (project, avail) in pending]
|
||||
try:
|
||||
hook.Run(opt.allow_all_hooks, project_list=pending_proj_names)
|
||||
except HookError as e:
|
||||
print >>sys.stderr, "ERROR: %s" % str(e)
|
||||
return
|
||||
|
||||
if opt.reviewers:
|
||||
reviewers = _SplitEmails(opt.reviewers)
|
||||
@ -250,22 +394,9 @@ files and description associated with the change in Gerrit.
|
||||
cc = _SplitEmails(opt.cc)
|
||||
people = (reviewers,cc)
|
||||
|
||||
if opt.replace:
|
||||
if len(project_list) != 1:
|
||||
print >>sys.stderr, \
|
||||
'error: --replace requires exactly one project'
|
||||
sys.exit(1)
|
||||
self._ReplaceBranch(project_list[0], people)
|
||||
return
|
||||
|
||||
for project in project_list:
|
||||
avail = project.GetUploadableBranches()
|
||||
if avail:
|
||||
pending.append((project, avail))
|
||||
|
||||
if not pending:
|
||||
print >>sys.stdout, "no branches ready for upload"
|
||||
elif len(pending) == 1 and len(pending[0][1]) == 1:
|
||||
self._SingleBranch(pending[0][1][0], people)
|
||||
self._SingleBranch(opt, pending[0][1][0], people)
|
||||
else:
|
||||
self._MultipleBranches(pending, people)
|
||||
self._MultipleBranches(opt, pending, people)
|
||||
|
@ -16,9 +16,12 @@
|
||||
import sys
|
||||
from command import Command, MirrorSafeCommand
|
||||
from git_command import git
|
||||
from project import HEAD
|
||||
from git_refs import HEAD
|
||||
|
||||
class Version(Command, MirrorSafeCommand):
|
||||
wrapper_version = None
|
||||
wrapper_path = None
|
||||
|
||||
common = False
|
||||
helpSummary = "Display the version of repo"
|
||||
helpUsage = """
|
||||
@ -31,5 +34,10 @@ class Version(Command, MirrorSafeCommand):
|
||||
|
||||
print 'repo version %s' % rp.work_git.describe(HEAD)
|
||||
print ' (from %s)' % rem.url
|
||||
|
||||
if Version.wrapper_path is not None:
|
||||
print 'repo launcher version %s' % Version.wrapper_version
|
||||
print ' (from %s)' % Version.wrapper_path
|
||||
|
||||
print git.version().strip()
|
||||
print 'Python %s' % sys.version
|
||||
|
3
tests/fixtures/test.gitconfig
vendored
Normal file
3
tests/fixtures/test.gitconfig
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
[section]
|
||||
empty
|
||||
nonempty = true
|
52
tests/test_git_config.py
Normal file
52
tests/test_git_config.py
Normal file
@ -0,0 +1,52 @@
|
||||
import os
|
||||
import unittest
|
||||
|
||||
import git_config
|
||||
|
||||
def fixture(*paths):
|
||||
"""Return a path relative to test/fixtures.
|
||||
"""
|
||||
return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
|
||||
|
||||
class GitConfigUnitTest(unittest.TestCase):
|
||||
"""Tests the GitConfig class.
|
||||
"""
|
||||
def setUp(self):
|
||||
"""Create a GitConfig object using the test.gitconfig fixture.
|
||||
"""
|
||||
config_fixture = fixture('test.gitconfig')
|
||||
self.config = git_config.GitConfig(config_fixture)
|
||||
|
||||
def test_GetString_with_empty_config_values(self):
|
||||
"""
|
||||
Test config entries with no value.
|
||||
|
||||
[section]
|
||||
empty
|
||||
|
||||
"""
|
||||
val = self.config.GetString('section.empty')
|
||||
self.assertEqual(val, None)
|
||||
|
||||
def test_GetString_with_true_value(self):
|
||||
"""
|
||||
Test config entries with a string value.
|
||||
|
||||
[section]
|
||||
nonempty = true
|
||||
|
||||
"""
|
||||
val = self.config.GetString('section.nonempty')
|
||||
self.assertEqual(val, 'true')
|
||||
|
||||
def test_GetString_from_missing_file(self):
|
||||
"""
|
||||
Test missing config file
|
||||
"""
|
||||
config_fixture = fixture('not.present.gitconfig')
|
||||
config = git_config.GitConfig(config_fixture)
|
||||
val = config.GetString('empty')
|
||||
self.assertEqual(val, None)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -13,13 +13,22 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
class Remote(object):
|
||||
def __init__(self, name,
|
||||
fetch=None,
|
||||
review=None,
|
||||
projectName=None):
|
||||
self.name = name
|
||||
self.fetchUrl = fetch
|
||||
self.reviewUrl = review
|
||||
self.projectName = projectName
|
||||
self.requiredCommits = []
|
||||
import sys
|
||||
import os
|
||||
REPO_TRACE = 'REPO_TRACE'
|
||||
|
||||
try:
|
||||
_TRACE = os.environ[REPO_TRACE] == '1'
|
||||
except KeyError:
|
||||
_TRACE = False
|
||||
|
||||
def IsTrace():
|
||||
return _TRACE
|
||||
|
||||
def SetTrace():
|
||||
global _TRACE
|
||||
_TRACE = True
|
||||
|
||||
def Trace(fmt, *args):
|
||||
if IsTrace():
|
||||
print >>sys.stderr, fmt % args
|
Reference in New Issue
Block a user