mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
273 Commits
Author | SHA1 | Date | |
---|---|---|---|
40d3952270 | |||
4350791e0d | |||
d648045366 | |||
628456833a | |||
2aa61d0bc8 | |||
4aed6f8c7d | |||
01b7d758d5 | |||
267ac57361 | |||
bb5b1a076b | |||
e01ee026e6 | |||
e4433653db | |||
d9de945d8a | |||
2ff302929c | |||
e5c0ea0a95 | |||
163a3be18b | |||
7a77c16d37 | |||
488bf092d5 | |||
05dc46b0e3 | |||
39252ba028 | |||
71e4cea6de | |||
c4c2b066d1 | |||
6a0a3648f1 | |||
6118faa118 | |||
183c52ab02 | |||
58f85f9a30 | |||
40252c20f7 | |||
76a4a9df86 | |||
befaec1e56 | |||
9711a98d6c | |||
438eade413 | |||
69297c1b77 | |||
8016f60a46 | |||
631d0ec708 | |||
f97e72e5dd | |||
faaddc9b4e | |||
a36af0767b | |||
037040f73e | |||
2598ed06f1 | |||
01952e6634 | |||
9d2b14d2ec | |||
6685106306 | |||
d64e8eee51 | |||
8b39fb4bc0 | |||
96c2d65489 | |||
7ecccf6225 | |||
cee5c77166 | |||
79fba68e40 | |||
e868841782 | |||
f9fe3e14d2 | |||
bdb866ea76 | |||
e121ad558d | |||
1f0564406b | |||
936d6185eb | |||
9322964d14 | |||
4aa4b211c6 | |||
8ccfa74d12 | |||
30b0f4e022 | |||
203153e7bb | |||
4cfb6d7167 | |||
b29e61133e | |||
4088eb434b | |||
5553628601 | |||
5ed805a98e | |||
985ac6b946 | |||
ecf0a6c92b | |||
04197a5144 | |||
0b4cb325c6 | |||
1a799d14b7 | |||
827e547d9e | |||
e9becc079c | |||
466b8c4ea2 | |||
e1e0bd1f75 | |||
74cfd2709b | |||
c2a64ddffd | |||
745b4ad660 | |||
4c5f74e452 | |||
b1ad2190a2 | |||
f231db11a2 | |||
79360640f4 | |||
7b01b2fd01 | |||
aad84232ca | |||
3c03580607 | |||
54527e7e30 | |||
5ea32d1359 | |||
5cc384034d | |||
0375523331 | |||
c32ba1961e | |||
250303b437 | |||
029eaf3bac | |||
ba72d8301e | |||
fee390eea2 | |||
9ff2ece6ab | |||
2487cb7b2c | |||
8ce5041596 | |||
f7a51898d3 | |||
b9a1b73425 | |||
dc2545cad6 | |||
f33929d014 | |||
3010e5ba64 | |||
ba7bc738c1 | |||
f4599a2a3d | |||
022a1d4e6e | |||
41d1baac31 | |||
46496d8761 | |||
7c9263bce0 | |||
dab9e99f0f | |||
c5f15bf7c0 | |||
6d35d676db | |||
0745bb2657 | |||
25857b8988 | |||
bdb5271de3 | |||
884092225d | |||
5d0c3a614e | |||
1efc2b4a01 | |||
d3ddcdbd8a | |||
2635c0e3b6 | |||
43322283dc | |||
f9b7683a3b | |||
eeab6860f1 | |||
7e59de2bcc | |||
163fdbf2fd | |||
555be54790 | |||
c5cd433daf | |||
2a3e15217a | |||
0369a069ad | |||
abaa7f312f | |||
7cccfb2cf0 | |||
57f43f4944 | |||
17af578d72 | |||
b1a07b8276 | |||
4e16c24981 | |||
b3d6e67196 | |||
503d66d8af | |||
679bac4bf3 | |||
97836cf09f | |||
80e3a37ab5 | |||
bb4a1b5274 | |||
551dfecea9 | |||
6944cdb8d1 | |||
59b417493e | |||
30d13eea86 | |||
727cc3e324 | |||
c5ceeb1625 | |||
db75704bfc | |||
87ea5913f2 | |||
185307d1dd | |||
c116f94261 | |||
7993f3cdda | |||
b1d1fd778d | |||
be4456cf24 | |||
cf738ed4a1 | |||
6cfc68e1e6 | |||
4c426ef1d4 | |||
472ce9f5fa | |||
0184dcc510 | |||
c4b301f988 | |||
31a7be561e | |||
384b3c5948 | |||
35de228f33 | |||
ace097c36e | |||
b155354034 | |||
382582728e | |||
b4d43b9f66 | |||
4ccad7554b | |||
403b64edf4 | |||
a38769cda8 | |||
44859d0267 | |||
6ad6dbefe7 | |||
33fe4e99f9 | |||
4214585073 | |||
b51f07cd06 | |||
04f2f0e186 | |||
cb07ba7e3d | |||
23ff7df6a7 | |||
cc1b1a703d | |||
bdf7ed2301 | |||
9c76f67f13 | |||
52b99aa91d | |||
9371979628 | |||
2086004261 | |||
2338788050 | |||
0402cd882a | |||
936183a492 | |||
85e8267031 | |||
e30f46b957 | |||
e4978cfbe3 | |||
126e298214 | |||
38e4387f8e | |||
24245e0094 | |||
db6f1b0884 | |||
f2fad61bde | |||
ee69084421 | |||
d37d43f036 | |||
7bdac71087 | |||
f97e8383a3 | |||
3000cdad22 | |||
b9d9efd394 | |||
497bde4de5 | |||
4abf8e6ef8 | |||
137d0131bf | |||
42e679b9f6 | |||
902665bce6 | |||
c8d882ae2a | |||
3eb87cec5c | |||
5fb8ed217c | |||
7e12e0a2fa | |||
7893b85509 | |||
b4e50e67e8 | |||
0936aeab2c | |||
14e134da02 | |||
04e52d6166 | |||
909d58b2e2 | |||
5cf16607d3 | |||
c190b98ed5 | |||
4863307299 | |||
f75870beac | |||
bf0b0cbc2f | |||
3a10968a70 | |||
c46de6932a | |||
303a82f33a | |||
7a91d51dcf | |||
a8d539189e | |||
588142dfcb | |||
a6d258b84d | |||
a769498568 | |||
884a387eca | |||
80b87fe6c1 | |||
e9f75b1782 | |||
a35e402161 | |||
dd7aea6c11 | |||
5196805fa2 | |||
85b24acd6a | |||
36ea2fb6ee | |||
2cd1f0452e | |||
65e3a78a9e | |||
d792f7928d | |||
6efdde9f6e | |||
7446c5954a | |||
d58bfe5a58 | |||
70f6890352 | |||
666d534636 | |||
f2af756425 | |||
544e7b0a97 | |||
e0df232da7 | |||
5a7c3afa73 | |||
9bc422f130 | |||
e81bc030bb | |||
eb5acc9ae9 | |||
26c45a7958 | |||
68425f4da8 | |||
53e902a19b | |||
4e4d40f7c0 | |||
093fdb6587 | |||
2fb6466f79 | |||
724aafb52d | |||
ccd218cd8f | |||
dd6542268a | |||
baca5f7e88 | |||
89ece429fb | |||
565480588d | |||
1829101e28 | |||
1966133f8e | |||
f1027e23b4 | |||
2cd38a0bf8 | |||
1b46cc9b6d | |||
1242e60bdd | |||
2d0f508648 | |||
143d8a7249 | |||
5db69f3f66 | |||
ff0a3c8f80 | |||
094cdbe090 | |||
148a84de0c | |||
1c5da49e6c |
2
.gitattributes
vendored
2
.gitattributes
vendored
@ -1,4 +1,4 @@
|
|||||||
# Prevent /bin/sh scripts from being clobbered by autocrlf=true
|
# Prevent /bin/sh scripts from being clobbered by autocrlf=true
|
||||||
git_ssh text eol=lf
|
git_ssh text eol=lf
|
||||||
main.py text eol=lf
|
|
||||||
repo text eol=lf
|
repo text eol=lf
|
||||||
|
hooks/* text eol=lf
|
||||||
|
11
.mailmap
Normal file
11
.mailmap
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
Anthony Newnam <anthony.newnam@garmin.com> Anthony <anthony@bnovc.com>
|
||||||
|
Hu Xiuyun <xiuyun.hu@hisilicon.com> Hu xiuyun <xiuyun.hu@hisilicon.com>
|
||||||
|
Hu Xiuyun <xiuyun.hu@hisilicon.com> Hu Xiuyun <clouds08@qq.com>
|
||||||
|
Jelly Chen <chenguodong@huawei.com> chenguodong <chenguodong@huawei.com>
|
||||||
|
Jia Bi <bijia@xiaomi.com> bijia <bijia@xiaomi.com>
|
||||||
|
JoonCheol Park <jooncheol@gmail.com> Jooncheol Park <jooncheol@gmail.com>
|
||||||
|
Sergii Pylypenko <x.pelya.x@gmail.com> pelya <x.pelya.x@gmail.com>
|
||||||
|
Shawn Pearce <sop@google.com> Shawn O. Pearce <sop@google.com>
|
||||||
|
Ulrik Sjölin <ulrik.sjolin@sonyericsson.com> Ulrik Sjolin <ulrik.sjolin@gmail.com>
|
||||||
|
Ulrik Sjölin <ulrik.sjolin@sonyericsson.com> Ulrik Sjolin <ulrik.sjolin@sonyericsson.com>
|
||||||
|
Ulrik Sjölin <ulrik.sjolin@sonyericsson.com> Ulrik Sjölin <ulrik.sjolin@sonyericsson.com>
|
301
.pylintrc
301
.pylintrc
@ -1,301 +0,0 @@
|
|||||||
# lint Python modules using external checkers.
|
|
||||||
#
|
|
||||||
# This is the main checker controling the other ones and the reports
|
|
||||||
# generation. It is itself both a raw checker and an astng checker in order
|
|
||||||
# to:
|
|
||||||
# * handle message activation / deactivation at the module level
|
|
||||||
# * handle some basic but necessary stats'data (number of classes, methods...)
|
|
||||||
#
|
|
||||||
[MASTER]
|
|
||||||
|
|
||||||
# Specify a configuration file.
|
|
||||||
#rcfile=
|
|
||||||
|
|
||||||
# Python code to execute, usually for sys.path manipulation such as
|
|
||||||
# pygtk.require().
|
|
||||||
#init-hook=
|
|
||||||
|
|
||||||
# Profiled execution.
|
|
||||||
profile=no
|
|
||||||
|
|
||||||
# Add <file or directory> to the black list. It should be a base name, not a
|
|
||||||
# path. You may set this option multiple times.
|
|
||||||
ignore=SVN
|
|
||||||
|
|
||||||
# Pickle collected data for later comparisons.
|
|
||||||
persistent=yes
|
|
||||||
|
|
||||||
# Set the cache size for astng objects.
|
|
||||||
cache-size=500
|
|
||||||
|
|
||||||
# List of plugins (as comma separated values of python modules names) to load,
|
|
||||||
# usually to register additional checkers.
|
|
||||||
load-plugins=
|
|
||||||
|
|
||||||
|
|
||||||
[MESSAGES CONTROL]
|
|
||||||
|
|
||||||
# Enable only checker(s) with the given id(s). This option conflicts with the
|
|
||||||
# disable-checker option
|
|
||||||
#enable-checker=
|
|
||||||
|
|
||||||
# Enable all checker(s) except those with the given id(s). This option
|
|
||||||
# conflicts with the enable-checker option
|
|
||||||
#disable-checker=
|
|
||||||
|
|
||||||
# Enable all messages in the listed categories.
|
|
||||||
#enable-msg-cat=
|
|
||||||
|
|
||||||
# Disable all messages in the listed categories.
|
|
||||||
#disable-msg-cat=
|
|
||||||
|
|
||||||
# Enable the message(s) with the given id(s).
|
|
||||||
enable=RP0004
|
|
||||||
|
|
||||||
# Disable the message(s) with the given id(s).
|
|
||||||
disable=R0903,R0912,R0913,R0914,R0915,W0141,C0111,C0103,W0603,W0703,R0911,C0301,C0302,R0902,R0904,W0142,W0212,E1101,E1103,R0201,W0201,W0122,W0232,RP0001,RP0003,RP0101,RP0002,RP0401,RP0701,RP0801,F0401,E0611,R0801,I0011
|
|
||||||
|
|
||||||
[REPORTS]
|
|
||||||
|
|
||||||
# set the output format. Available formats are text, parseable, colorized, msvs
|
|
||||||
# (visual studio) and html
|
|
||||||
output-format=text
|
|
||||||
|
|
||||||
# Include message's id in output
|
|
||||||
include-ids=yes
|
|
||||||
|
|
||||||
# Put messages in a separate file for each module / package specified on the
|
|
||||||
# command line instead of printing them on stdout. Reports (if any) will be
|
|
||||||
# written in a file name "pylint_global.[txt|html]".
|
|
||||||
files-output=no
|
|
||||||
|
|
||||||
# Tells whether to display a full report or only the messages
|
|
||||||
reports=yes
|
|
||||||
|
|
||||||
# Python expression which should return a note less than 10 (10 is the highest
|
|
||||||
# note).You have access to the variables errors warning, statement which
|
|
||||||
# respectivly contain the number of errors / warnings messages and the total
|
|
||||||
# number of statements analyzed. This is used by the global evaluation report
|
|
||||||
# (R0004).
|
|
||||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
|
||||||
|
|
||||||
# Add a comment according to your evaluation note. This is used by the global
|
|
||||||
# evaluation report (R0004).
|
|
||||||
comment=no
|
|
||||||
|
|
||||||
# checks for
|
|
||||||
# * unused variables / imports
|
|
||||||
# * undefined variables
|
|
||||||
# * redefinition of variable from builtins or from an outer scope
|
|
||||||
# * use of variable before assigment
|
|
||||||
#
|
|
||||||
[VARIABLES]
|
|
||||||
|
|
||||||
# Tells whether we should check for unused import in __init__ files.
|
|
||||||
init-import=no
|
|
||||||
|
|
||||||
# A regular expression matching names used for dummy variables (i.e. not used).
|
|
||||||
dummy-variables-rgx=_|dummy
|
|
||||||
|
|
||||||
# List of additional names supposed to be defined in builtins. Remember that
|
|
||||||
# you should avoid to define new builtins when possible.
|
|
||||||
additional-builtins=
|
|
||||||
|
|
||||||
|
|
||||||
# try to find bugs in the code using type inference
|
|
||||||
#
|
|
||||||
[TYPECHECK]
|
|
||||||
|
|
||||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
|
||||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
|
||||||
ignore-mixin-members=yes
|
|
||||||
|
|
||||||
# List of classes names for which member attributes should not be checked
|
|
||||||
# (useful for classes with attributes dynamicaly set).
|
|
||||||
ignored-classes=SQLObject
|
|
||||||
|
|
||||||
# When zope mode is activated, consider the acquired-members option to ignore
|
|
||||||
# access to some undefined attributes.
|
|
||||||
zope=no
|
|
||||||
|
|
||||||
# List of members which are usually get through zope's acquisition mecanism and
|
|
||||||
# so shouldn't trigger E0201 when accessed (need zope=yes to be considered).
|
|
||||||
acquired-members=REQUEST,acl_users,aq_parent
|
|
||||||
|
|
||||||
|
|
||||||
# checks for :
|
|
||||||
# * doc strings
|
|
||||||
# * modules / classes / functions / methods / arguments / variables name
|
|
||||||
# * number of arguments, local variables, branchs, returns and statements in
|
|
||||||
# functions, methods
|
|
||||||
# * required module attributes
|
|
||||||
# * dangerous default values as arguments
|
|
||||||
# * redefinition of function / method / class
|
|
||||||
# * uses of the global statement
|
|
||||||
#
|
|
||||||
[BASIC]
|
|
||||||
|
|
||||||
# Required attributes for module, separated by a comma
|
|
||||||
required-attributes=
|
|
||||||
|
|
||||||
# Regular expression which should only match functions or classes name which do
|
|
||||||
# not require a docstring
|
|
||||||
no-docstring-rgx=_main|__.*__
|
|
||||||
|
|
||||||
# Regular expression which should only match correct module names
|
|
||||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct module level names
|
|
||||||
const-rgx=(([A-Z_][A-Z1-9_]*)|(__.*__))|(log)$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct class names
|
|
||||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct function names
|
|
||||||
function-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct method names
|
|
||||||
method-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct instance attribute names
|
|
||||||
attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct argument names
|
|
||||||
argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct variable names
|
|
||||||
variable-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct list comprehension /
|
|
||||||
# generator expression variable names
|
|
||||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
|
||||||
|
|
||||||
# Good variable names which should always be accepted, separated by a comma
|
|
||||||
good-names=i,j,k,ex,Run,_,e,d1,d2,v,f,l,d
|
|
||||||
|
|
||||||
# Bad variable names which should always be refused, separated by a comma
|
|
||||||
bad-names=foo,bar,baz,toto,tutu,tata
|
|
||||||
|
|
||||||
# List of builtins function names that should not be used, separated by a comma
|
|
||||||
bad-functions=map,filter,apply,input
|
|
||||||
|
|
||||||
|
|
||||||
# checks for sign of poor/misdesign:
|
|
||||||
# * number of methods, attributes, local variables...
|
|
||||||
# * size, complexity of functions, methods
|
|
||||||
#
|
|
||||||
[DESIGN]
|
|
||||||
|
|
||||||
# Maximum number of arguments for function / method
|
|
||||||
max-args=5
|
|
||||||
|
|
||||||
# Maximum number of locals for function / method body
|
|
||||||
max-locals=15
|
|
||||||
|
|
||||||
# Maximum number of return / yield for function / method body
|
|
||||||
max-returns=6
|
|
||||||
|
|
||||||
# Maximum number of branch for function / method body
|
|
||||||
max-branchs=12
|
|
||||||
|
|
||||||
# Maximum number of statements in function / method body
|
|
||||||
max-statements=50
|
|
||||||
|
|
||||||
# Maximum number of parents for a class (see R0901).
|
|
||||||
max-parents=7
|
|
||||||
|
|
||||||
# Maximum number of attributes for a class (see R0902).
|
|
||||||
max-attributes=20
|
|
||||||
|
|
||||||
# Minimum number of public methods for a class (see R0903).
|
|
||||||
min-public-methods=2
|
|
||||||
|
|
||||||
# Maximum number of public methods for a class (see R0904).
|
|
||||||
max-public-methods=30
|
|
||||||
|
|
||||||
|
|
||||||
# checks for
|
|
||||||
# * external modules dependencies
|
|
||||||
# * relative / wildcard imports
|
|
||||||
# * cyclic imports
|
|
||||||
# * uses of deprecated modules
|
|
||||||
#
|
|
||||||
[IMPORTS]
|
|
||||||
|
|
||||||
# Deprecated modules which should not be used, separated by a comma
|
|
||||||
deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
|
|
||||||
|
|
||||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
|
||||||
# given file (report R0402 must not be disabled)
|
|
||||||
import-graph=
|
|
||||||
|
|
||||||
# Create a graph of external dependencies in the given file (report R0402 must
|
|
||||||
# not be disabled)
|
|
||||||
ext-import-graph=
|
|
||||||
|
|
||||||
# Create a graph of internal dependencies in the given file (report R0402 must
|
|
||||||
# not be disabled)
|
|
||||||
int-import-graph=
|
|
||||||
|
|
||||||
|
|
||||||
# checks for :
|
|
||||||
# * methods without self as first argument
|
|
||||||
# * overridden methods signature
|
|
||||||
# * access only to existant members via self
|
|
||||||
# * attributes not defined in the __init__ method
|
|
||||||
# * supported interfaces implementation
|
|
||||||
# * unreachable code
|
|
||||||
#
|
|
||||||
[CLASSES]
|
|
||||||
|
|
||||||
# List of interface methods to ignore, separated by a comma. This is used for
|
|
||||||
# instance to not check methods defines in Zope's Interface base class.
|
|
||||||
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
|
|
||||||
|
|
||||||
# List of method names used to declare (i.e. assign) instance attributes.
|
|
||||||
defining-attr-methods=__init__,__new__,setUp
|
|
||||||
|
|
||||||
|
|
||||||
# checks for similarities and duplicated code. This computation may be
|
|
||||||
# memory / CPU intensive, so you should disable it if you experiments some
|
|
||||||
# problems.
|
|
||||||
#
|
|
||||||
[SIMILARITIES]
|
|
||||||
|
|
||||||
# Minimum lines number of a similarity.
|
|
||||||
min-similarity-lines=4
|
|
||||||
|
|
||||||
# Ignore comments when computing similarities.
|
|
||||||
ignore-comments=yes
|
|
||||||
|
|
||||||
# Ignore docstrings when computing similarities.
|
|
||||||
ignore-docstrings=yes
|
|
||||||
|
|
||||||
|
|
||||||
# checks for:
|
|
||||||
# * warning notes in the code like FIXME, XXX
|
|
||||||
# * PEP 263: source code with non ascii character but no encoding declaration
|
|
||||||
#
|
|
||||||
[MISCELLANEOUS]
|
|
||||||
|
|
||||||
# List of note tags to take in consideration, separated by a comma.
|
|
||||||
notes=FIXME,XXX,TODO
|
|
||||||
|
|
||||||
|
|
||||||
# checks for :
|
|
||||||
# * unauthorized constructions
|
|
||||||
# * strict indentation
|
|
||||||
# * line length
|
|
||||||
# * use of <> instead of !=
|
|
||||||
#
|
|
||||||
[FORMAT]
|
|
||||||
|
|
||||||
# Maximum number of characters on a single line.
|
|
||||||
max-line-length=80
|
|
||||||
|
|
||||||
# Maximum number of lines in a module
|
|
||||||
max-module-lines=1000
|
|
||||||
|
|
||||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
|
||||||
# tab). In repo it is 2 spaces.
|
|
||||||
indent-string=' '
|
|
14
README.md
Normal file
14
README.md
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
# repo
|
||||||
|
|
||||||
|
Repo is a tool built on top of Git. Repo helps manage many Git repositories,
|
||||||
|
does the uploads to revision control systems, and automates parts of the
|
||||||
|
development workflow. Repo is not meant to replace Git, only to make it
|
||||||
|
easier to work with Git. The repo command is an executable Python script
|
||||||
|
that you can put anywhere in your path.
|
||||||
|
|
||||||
|
* Homepage: https://code.google.com/p/git-repo/
|
||||||
|
* Bug reports: https://code.google.com/p/git-repo/issues/
|
||||||
|
* Source: https://code.google.com/p/git-repo/
|
||||||
|
* Overview: https://source.android.com/source/developing.html
|
||||||
|
* Docs: https://source.android.com/source/using-repo.html
|
||||||
|
* [Submitting patches](./SUBMITTING_PATCHES.md)
|
@ -1,87 +0,0 @@
|
|||||||
Short Version:
|
|
||||||
|
|
||||||
- Make small logical changes.
|
|
||||||
- Provide a meaningful commit message.
|
|
||||||
- Check for coding errors with pylint
|
|
||||||
- Make sure all code is under the Apache License, 2.0.
|
|
||||||
- Publish your changes for review:
|
|
||||||
|
|
||||||
git push https://gerrit-review.googlesource.com/git-repo HEAD:refs/for/master
|
|
||||||
|
|
||||||
|
|
||||||
Long Version:
|
|
||||||
|
|
||||||
I wanted a file describing how to submit patches for repo,
|
|
||||||
so I started with the one found in the core Git distribution
|
|
||||||
(Documentation/SubmittingPatches), which itself was based on the
|
|
||||||
patch submission guidelines for the Linux kernel.
|
|
||||||
|
|
||||||
However there are some differences, so please review and familiarize
|
|
||||||
yourself with the following relevant bits:
|
|
||||||
|
|
||||||
|
|
||||||
(1) Make separate commits for logically separate changes.
|
|
||||||
|
|
||||||
Unless your patch is really trivial, you should not be sending
|
|
||||||
out a patch that was generated between your working tree and your
|
|
||||||
commit head. Instead, always make a commit with complete commit
|
|
||||||
message and generate a series of patches from your repository.
|
|
||||||
It is a good discipline.
|
|
||||||
|
|
||||||
Describe the technical detail of the change(s).
|
|
||||||
|
|
||||||
If your description starts to get too long, that's a sign that you
|
|
||||||
probably need to split up your commit to finer grained pieces.
|
|
||||||
|
|
||||||
|
|
||||||
(2) Check for coding errors with pylint
|
|
||||||
|
|
||||||
Run pylint on changed modules using the provided configuration:
|
|
||||||
|
|
||||||
pylint --rcfile=.pylintrc file.py
|
|
||||||
|
|
||||||
|
|
||||||
(3) Check the license
|
|
||||||
|
|
||||||
repo is licensed under the Apache License, 2.0.
|
|
||||||
|
|
||||||
Because of this licensing model *every* file within the project
|
|
||||||
*must* list the license that covers it in the header of the file.
|
|
||||||
Any new contributions to an existing file *must* be submitted under
|
|
||||||
the current license of that file. Any new files *must* clearly
|
|
||||||
indicate which license they are provided under in the file header.
|
|
||||||
|
|
||||||
Please verify that you are legally allowed and willing to submit your
|
|
||||||
changes under the license covering each file *prior* to submitting
|
|
||||||
your patch. It is virtually impossible to remove a patch once it
|
|
||||||
has been applied and pushed out.
|
|
||||||
|
|
||||||
|
|
||||||
(4) Sending your patches.
|
|
||||||
|
|
||||||
Do not email your patches to anyone.
|
|
||||||
|
|
||||||
Instead, login to the Gerrit Code Review tool at:
|
|
||||||
|
|
||||||
https://gerrit-review.googlesource.com/
|
|
||||||
|
|
||||||
Ensure you have completed one of the necessary contributor
|
|
||||||
agreements, providing documentation to the project maintainers that
|
|
||||||
they have right to redistribute your work under the Apache License:
|
|
||||||
|
|
||||||
https://gerrit-review.googlesource.com/#/settings/agreements
|
|
||||||
|
|
||||||
Ensure you have obtained an HTTP password to authenticate:
|
|
||||||
|
|
||||||
https://gerrit-review.googlesource.com/new-password
|
|
||||||
|
|
||||||
Push your patches over HTTPS to the review server, possibly through
|
|
||||||
a remembered remote to make this easier in the future:
|
|
||||||
|
|
||||||
git config remote.review.url https://gerrit-review.googlesource.com/git-repo
|
|
||||||
git config remote.review.push HEAD:refs/for/master
|
|
||||||
|
|
||||||
git push review
|
|
||||||
|
|
||||||
You will be automatically emailed a copy of your commits, and any
|
|
||||||
comments made by the project maintainers.
|
|
135
SUBMITTING_PATCHES.md
Normal file
135
SUBMITTING_PATCHES.md
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
# Short Version
|
||||||
|
|
||||||
|
- Make small logical changes.
|
||||||
|
- Provide a meaningful commit message.
|
||||||
|
- Check for coding errors and style nits with pyflakes and flake8
|
||||||
|
- Make sure all code is under the Apache License, 2.0.
|
||||||
|
- Publish your changes for review.
|
||||||
|
- Make corrections if requested.
|
||||||
|
- Verify your changes on gerrit so they can be submitted.
|
||||||
|
|
||||||
|
`git push https://gerrit-review.googlesource.com/git-repo HEAD:refs/for/master`
|
||||||
|
|
||||||
|
|
||||||
|
# Long Version
|
||||||
|
|
||||||
|
I wanted a file describing how to submit patches for repo,
|
||||||
|
so I started with the one found in the core Git distribution
|
||||||
|
(Documentation/SubmittingPatches), which itself was based on the
|
||||||
|
patch submission guidelines for the Linux kernel.
|
||||||
|
|
||||||
|
However there are some differences, so please review and familiarize
|
||||||
|
yourself with the following relevant bits.
|
||||||
|
|
||||||
|
|
||||||
|
## Make separate commits for logically separate changes.
|
||||||
|
|
||||||
|
Unless your patch is really trivial, you should not be sending
|
||||||
|
out a patch that was generated between your working tree and your
|
||||||
|
commit head. Instead, always make a commit with complete commit
|
||||||
|
message and generate a series of patches from your repository.
|
||||||
|
It is a good discipline.
|
||||||
|
|
||||||
|
Describe the technical detail of the change(s).
|
||||||
|
|
||||||
|
If your description starts to get too long, that's a sign that you
|
||||||
|
probably need to split up your commit to finer grained pieces.
|
||||||
|
|
||||||
|
|
||||||
|
## Check for coding errors and style nits with pyflakes and flake8
|
||||||
|
|
||||||
|
### Coding errors
|
||||||
|
|
||||||
|
Run `pyflakes` on changed modules:
|
||||||
|
|
||||||
|
pyflakes file.py
|
||||||
|
|
||||||
|
Ideally there should be no new errors or warnings introduced.
|
||||||
|
|
||||||
|
### Style violations
|
||||||
|
|
||||||
|
Run `flake8` on changes modules:
|
||||||
|
|
||||||
|
flake8 file.py
|
||||||
|
|
||||||
|
Note that repo generally follows [Google's python style guide]
|
||||||
|
(https://google.github.io/styleguide/pyguide.html) rather than [PEP 8]
|
||||||
|
(https://www.python.org/dev/peps/pep-0008/), so it's possible that
|
||||||
|
the output of `flake8` will be quite noisy. It's not mandatory to
|
||||||
|
avoid all warnings, but at least the maximum line length should be
|
||||||
|
followed.
|
||||||
|
|
||||||
|
If there are many occurrences of the same warning that cannot be
|
||||||
|
avoided without going against the Google style guide, these may be
|
||||||
|
suppressed in the included `.flake8` file.
|
||||||
|
|
||||||
|
## Check the license
|
||||||
|
|
||||||
|
repo is licensed under the Apache License, 2.0.
|
||||||
|
|
||||||
|
Because of this licensing model *every* file within the project
|
||||||
|
*must* list the license that covers it in the header of the file.
|
||||||
|
Any new contributions to an existing file *must* be submitted under
|
||||||
|
the current license of that file. Any new files *must* clearly
|
||||||
|
indicate which license they are provided under in the file header.
|
||||||
|
|
||||||
|
Please verify that you are legally allowed and willing to submit your
|
||||||
|
changes under the license covering each file *prior* to submitting
|
||||||
|
your patch. It is virtually impossible to remove a patch once it
|
||||||
|
has been applied and pushed out.
|
||||||
|
|
||||||
|
|
||||||
|
## Sending your patches.
|
||||||
|
|
||||||
|
Do not email your patches to anyone.
|
||||||
|
|
||||||
|
Instead, login to the Gerrit Code Review tool at:
|
||||||
|
|
||||||
|
https://gerrit-review.googlesource.com/
|
||||||
|
|
||||||
|
Ensure you have completed one of the necessary contributor
|
||||||
|
agreements, providing documentation to the project maintainers that
|
||||||
|
they have right to redistribute your work under the Apache License:
|
||||||
|
|
||||||
|
https://gerrit-review.googlesource.com/#/settings/agreements
|
||||||
|
|
||||||
|
Ensure you have obtained an HTTP password to authenticate:
|
||||||
|
|
||||||
|
https://gerrit-review.googlesource.com/new-password
|
||||||
|
|
||||||
|
Ensure that you have the local commit hook installed to automatically
|
||||||
|
add a ChangeId to your commits:
|
||||||
|
|
||||||
|
curl -Lo `git rev-parse --git-dir`/hooks/commit-msg https://gerrit-review.googlesource.com/tools/hooks/commit-msg
|
||||||
|
chmod +x `git rev-parse --git-dir`/hooks/commit-msg
|
||||||
|
|
||||||
|
If you have already committed your changes you will need to amend the commit
|
||||||
|
to get the ChangeId added.
|
||||||
|
|
||||||
|
git commit --amend
|
||||||
|
|
||||||
|
Push your patches over HTTPS to the review server, possibly through
|
||||||
|
a remembered remote to make this easier in the future:
|
||||||
|
|
||||||
|
git config remote.review.url https://gerrit-review.googlesource.com/git-repo
|
||||||
|
git config remote.review.push HEAD:refs/for/master
|
||||||
|
|
||||||
|
git push review
|
||||||
|
|
||||||
|
You will be automatically emailed a copy of your commits, and any
|
||||||
|
comments made by the project maintainers.
|
||||||
|
|
||||||
|
|
||||||
|
## Make changes if requested
|
||||||
|
|
||||||
|
The project maintainer who reviews your changes might request changes to your
|
||||||
|
commit. If you make the requested changes you will need to amend your commit
|
||||||
|
and push it to the review server again.
|
||||||
|
|
||||||
|
|
||||||
|
## Verify your changes on gerrit
|
||||||
|
|
||||||
|
After you receive a Code-Review+2 from the maintainer, select the Verified
|
||||||
|
button on the gerrit page for the change. This verifies that you have tested
|
||||||
|
your changes and notifies the maintainer that they are ready to be submitted.
|
||||||
|
The maintainer will then submit your changes to the repository.
|
71
color.py
71
color.py
@ -18,41 +18,43 @@ import sys
|
|||||||
|
|
||||||
import pager
|
import pager
|
||||||
|
|
||||||
COLORS = {None :-1,
|
COLORS = {None: -1,
|
||||||
'normal' :-1,
|
'normal': -1,
|
||||||
'black' : 0,
|
'black': 0,
|
||||||
'red' : 1,
|
'red': 1,
|
||||||
'green' : 2,
|
'green': 2,
|
||||||
'yellow' : 3,
|
'yellow': 3,
|
||||||
'blue' : 4,
|
'blue': 4,
|
||||||
'magenta': 5,
|
'magenta': 5,
|
||||||
'cyan' : 6,
|
'cyan': 6,
|
||||||
'white' : 7}
|
'white': 7}
|
||||||
|
|
||||||
ATTRS = {None :-1,
|
ATTRS = {None: -1,
|
||||||
'bold' : 1,
|
'bold': 1,
|
||||||
'dim' : 2,
|
'dim': 2,
|
||||||
'ul' : 4,
|
'ul': 4,
|
||||||
'blink' : 5,
|
'blink': 5,
|
||||||
'reverse': 7}
|
'reverse': 7}
|
||||||
|
|
||||||
RESET = "\033[m" # pylint: disable=W1401
|
RESET = "\033[m"
|
||||||
# backslash is not anomalous
|
|
||||||
|
|
||||||
def is_color(s):
|
def is_color(s):
|
||||||
return s in COLORS
|
return s in COLORS
|
||||||
|
|
||||||
|
|
||||||
def is_attr(s):
|
def is_attr(s):
|
||||||
return s in ATTRS
|
return s in ATTRS
|
||||||
|
|
||||||
def _Color(fg = None, bg = None, attr = None):
|
|
||||||
|
def _Color(fg=None, bg=None, attr=None):
|
||||||
fg = COLORS[fg]
|
fg = COLORS[fg]
|
||||||
bg = COLORS[bg]
|
bg = COLORS[bg]
|
||||||
attr = ATTRS[attr]
|
attr = ATTRS[attr]
|
||||||
|
|
||||||
if attr >= 0 or fg >= 0 or bg >= 0:
|
if attr >= 0 or fg >= 0 or bg >= 0:
|
||||||
need_sep = False
|
need_sep = False
|
||||||
code = "\033[" #pylint: disable=W1401
|
code = "\033["
|
||||||
|
|
||||||
if attr >= 0:
|
if attr >= 0:
|
||||||
code += chr(ord('0') + attr)
|
code += chr(ord('0') + attr)
|
||||||
@ -71,7 +73,6 @@ def _Color(fg = None, bg = None, attr = None):
|
|||||||
if bg >= 0:
|
if bg >= 0:
|
||||||
if need_sep:
|
if need_sep:
|
||||||
code += ';'
|
code += ';'
|
||||||
need_sep = True
|
|
||||||
|
|
||||||
if bg < 8:
|
if bg < 8:
|
||||||
code += '4%c' % (ord('0') + bg)
|
code += '4%c' % (ord('0') + bg)
|
||||||
@ -82,6 +83,27 @@ def _Color(fg = None, bg = None, attr = None):
|
|||||||
code = ''
|
code = ''
|
||||||
return code
|
return code
|
||||||
|
|
||||||
|
DEFAULT = None
|
||||||
|
|
||||||
|
|
||||||
|
def SetDefaultColoring(state):
|
||||||
|
"""Set coloring behavior to |state|.
|
||||||
|
|
||||||
|
This is useful for overriding config options via the command line.
|
||||||
|
"""
|
||||||
|
if state is None:
|
||||||
|
# Leave it alone -- return quick!
|
||||||
|
return
|
||||||
|
|
||||||
|
global DEFAULT
|
||||||
|
state = state.lower()
|
||||||
|
if state in ('auto',):
|
||||||
|
DEFAULT = state
|
||||||
|
elif state in ('always', 'yes', 'true', True):
|
||||||
|
DEFAULT = 'always'
|
||||||
|
elif state in ('never', 'no', 'false', False):
|
||||||
|
DEFAULT = 'never'
|
||||||
|
|
||||||
|
|
||||||
class Coloring(object):
|
class Coloring(object):
|
||||||
def __init__(self, config, section_type):
|
def __init__(self, config, section_type):
|
||||||
@ -89,9 +111,11 @@ class Coloring(object):
|
|||||||
self._config = config
|
self._config = config
|
||||||
self._out = sys.stdout
|
self._out = sys.stdout
|
||||||
|
|
||||||
on = self._config.GetString(self._section)
|
on = DEFAULT
|
||||||
if on is None:
|
if on is None:
|
||||||
on = self._config.GetString('color.ui')
|
on = self._config.GetString(self._section)
|
||||||
|
if on is None:
|
||||||
|
on = self._config.GetString('color.ui')
|
||||||
|
|
||||||
if on == 'auto':
|
if on == 'auto':
|
||||||
if pager.active or os.isatty(1):
|
if pager.active or os.isatty(1):
|
||||||
@ -122,6 +146,7 @@ class Coloring(object):
|
|||||||
def printer(self, opt=None, fg=None, bg=None, attr=None):
|
def printer(self, opt=None, fg=None, bg=None, attr=None):
|
||||||
s = self
|
s = self
|
||||||
c = self.colorer(opt, fg, bg, attr)
|
c = self.colorer(opt, fg, bg, attr)
|
||||||
|
|
||||||
def f(fmt, *args):
|
def f(fmt, *args):
|
||||||
s._out.write(c(fmt, *args))
|
s._out.write(c(fmt, *args))
|
||||||
return f
|
return f
|
||||||
@ -129,6 +154,7 @@ class Coloring(object):
|
|||||||
def nofmt_printer(self, opt=None, fg=None, bg=None, attr=None):
|
def nofmt_printer(self, opt=None, fg=None, bg=None, attr=None):
|
||||||
s = self
|
s = self
|
||||||
c = self.nofmt_colorer(opt, fg, bg, attr)
|
c = self.nofmt_colorer(opt, fg, bg, attr)
|
||||||
|
|
||||||
def f(fmt):
|
def f(fmt):
|
||||||
s._out.write(c(fmt))
|
s._out.write(c(fmt))
|
||||||
return f
|
return f
|
||||||
@ -136,11 +162,13 @@ class Coloring(object):
|
|||||||
def colorer(self, opt=None, fg=None, bg=None, attr=None):
|
def colorer(self, opt=None, fg=None, bg=None, attr=None):
|
||||||
if self._on:
|
if self._on:
|
||||||
c = self._parse(opt, fg, bg, attr)
|
c = self._parse(opt, fg, bg, attr)
|
||||||
|
|
||||||
def f(fmt, *args):
|
def f(fmt, *args):
|
||||||
output = fmt % args
|
output = fmt % args
|
||||||
return ''.join([c, output, RESET])
|
return ''.join([c, output, RESET])
|
||||||
return f
|
return f
|
||||||
else:
|
else:
|
||||||
|
|
||||||
def f(fmt, *args):
|
def f(fmt, *args):
|
||||||
return fmt % args
|
return fmt % args
|
||||||
return f
|
return f
|
||||||
@ -148,6 +176,7 @@ class Coloring(object):
|
|||||||
def nofmt_colorer(self, opt=None, fg=None, bg=None, attr=None):
|
def nofmt_colorer(self, opt=None, fg=None, bg=None, attr=None):
|
||||||
if self._on:
|
if self._on:
|
||||||
c = self._parse(opt, fg, bg, attr)
|
c = self._parse(opt, fg, bg, attr)
|
||||||
|
|
||||||
def f(fmt):
|
def f(fmt):
|
||||||
return ''.join([c, fmt, RESET])
|
return ''.join([c, fmt, RESET])
|
||||||
return f
|
return f
|
||||||
|
71
command.py
71
command.py
@ -31,7 +31,7 @@ class Command(object):
|
|||||||
manifest = None
|
manifest = None
|
||||||
_optparse = None
|
_optparse = None
|
||||||
|
|
||||||
def WantPager(self, opt):
|
def WantPager(self, _opt):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def ReadEnvironmentOptions(self, opts):
|
def ReadEnvironmentOptions(self, opts):
|
||||||
@ -63,7 +63,7 @@ class Command(object):
|
|||||||
usage = self.helpUsage.strip().replace('%prog', me)
|
usage = self.helpUsage.strip().replace('%prog', me)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
usage = 'repo %s' % self.NAME
|
usage = 'repo %s' % self.NAME
|
||||||
self._optparse = optparse.OptionParser(usage = usage)
|
self._optparse = optparse.OptionParser(usage=usage)
|
||||||
self._Options(self._optparse)
|
self._Options(self._optparse)
|
||||||
return self._optparse
|
return self._optparse
|
||||||
|
|
||||||
@ -106,19 +106,24 @@ class Command(object):
|
|||||||
def _UpdatePathToProjectMap(self, project):
|
def _UpdatePathToProjectMap(self, project):
|
||||||
self._by_path[project.worktree] = project
|
self._by_path[project.worktree] = project
|
||||||
|
|
||||||
def _GetProjectByPath(self, path):
|
def _GetProjectByPath(self, manifest, path):
|
||||||
project = None
|
project = None
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
oldpath = None
|
oldpath = None
|
||||||
while path \
|
while path and \
|
||||||
and path != oldpath \
|
path != oldpath and \
|
||||||
and path != self.manifest.topdir:
|
path != manifest.topdir:
|
||||||
try:
|
try:
|
||||||
project = self._by_path[path]
|
project = self._by_path[path]
|
||||||
break
|
break
|
||||||
except KeyError:
|
except KeyError:
|
||||||
oldpath = path
|
oldpath = path
|
||||||
path = os.path.dirname(path)
|
path = os.path.dirname(path)
|
||||||
|
if not project and path == manifest.topdir:
|
||||||
|
try:
|
||||||
|
project = self._by_path[path]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
project = self._by_path[path]
|
project = self._by_path[path]
|
||||||
@ -126,15 +131,19 @@ class Command(object):
|
|||||||
pass
|
pass
|
||||||
return project
|
return project
|
||||||
|
|
||||||
def GetProjects(self, args, missing_ok=False, submodules_ok=False):
|
def GetProjects(self, args, manifest=None, groups='', missing_ok=False,
|
||||||
|
submodules_ok=False):
|
||||||
"""A list of projects that match the arguments.
|
"""A list of projects that match the arguments.
|
||||||
"""
|
"""
|
||||||
all_projects_list = self.manifest.projects
|
if not manifest:
|
||||||
|
manifest = self.manifest
|
||||||
|
all_projects_list = manifest.projects
|
||||||
result = []
|
result = []
|
||||||
|
|
||||||
mp = self.manifest.manifestProject
|
mp = manifest.manifestProject
|
||||||
|
|
||||||
groups = mp.config.GetString('manifest.groups')
|
if not groups:
|
||||||
|
groups = mp.config.GetString('manifest.groups')
|
||||||
if not groups:
|
if not groups:
|
||||||
groups = 'default,platform-' + platform.system().lower()
|
groups = 'default,platform-' + platform.system().lower()
|
||||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
||||||
@ -147,29 +156,28 @@ class Command(object):
|
|||||||
for p in project.GetDerivedSubprojects())
|
for p in project.GetDerivedSubprojects())
|
||||||
all_projects_list.extend(derived_projects.values())
|
all_projects_list.extend(derived_projects.values())
|
||||||
for project in all_projects_list:
|
for project in all_projects_list:
|
||||||
if ((missing_ok or project.Exists) and
|
if (missing_ok or project.Exists) and project.MatchesGroups(groups):
|
||||||
project.MatchesGroups(groups)):
|
|
||||||
result.append(project)
|
result.append(project)
|
||||||
else:
|
else:
|
||||||
self._ResetPathToProjectMap(all_projects_list)
|
self._ResetPathToProjectMap(all_projects_list)
|
||||||
|
|
||||||
for arg in args:
|
for arg in args:
|
||||||
projects = self.manifest.GetProjectsWithName(arg)
|
projects = manifest.GetProjectsWithName(arg)
|
||||||
|
|
||||||
if not projects:
|
if not projects:
|
||||||
path = os.path.abspath(arg).replace('\\', '/')
|
path = os.path.abspath(arg).replace('\\', '/')
|
||||||
project = self._GetProjectByPath(path)
|
project = self._GetProjectByPath(manifest, path)
|
||||||
|
|
||||||
# If it's not a derived project, update path->project mapping and
|
# If it's not a derived project, update path->project mapping and
|
||||||
# search again, as arg might actually point to a derived subproject.
|
# search again, as arg might actually point to a derived subproject.
|
||||||
if (project and not project.Derived and
|
if (project and not project.Derived and (submodules_ok or
|
||||||
(submodules_ok or project.sync_s)):
|
project.sync_s)):
|
||||||
search_again = False
|
search_again = False
|
||||||
for subproject in project.GetDerivedSubprojects():
|
for subproject in project.GetDerivedSubprojects():
|
||||||
self._UpdatePathToProjectMap(subproject)
|
self._UpdatePathToProjectMap(subproject)
|
||||||
search_again = True
|
search_again = True
|
||||||
if search_again:
|
if search_again:
|
||||||
project = self._GetProjectByPath(path) or project
|
project = self._GetProjectByPath(manifest, path) or project
|
||||||
|
|
||||||
if project:
|
if project:
|
||||||
projects = [project]
|
projects = [project]
|
||||||
@ -190,17 +198,24 @@ class Command(object):
|
|||||||
result.sort(key=_getpath)
|
result.sort(key=_getpath)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def FindProjects(self, args):
|
def FindProjects(self, args, inverse=False):
|
||||||
result = []
|
result = []
|
||||||
patterns = [re.compile(r'%s' % a, re.IGNORECASE) for a in args]
|
patterns = [re.compile(r'%s' % a, re.IGNORECASE) for a in args]
|
||||||
for project in self.GetProjects(''):
|
for project in self.GetProjects(''):
|
||||||
for pattern in patterns:
|
for pattern in patterns:
|
||||||
if pattern.search(project.name) or pattern.search(project.relpath):
|
match = pattern.search(project.name) or pattern.search(project.relpath)
|
||||||
|
if not inverse and match:
|
||||||
result.append(project)
|
result.append(project)
|
||||||
break
|
break
|
||||||
|
if inverse and match:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
if inverse:
|
||||||
|
result.append(project)
|
||||||
result.sort(key=lambda project: project.relpath)
|
result.sort(key=lambda project: project.relpath)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=W0223
|
# pylint: disable=W0223
|
||||||
# Pylint warns that the `InteractiveCommand` and `PagedCommand` classes do not
|
# Pylint warns that the `InteractiveCommand` and `PagedCommand` classes do not
|
||||||
# override method `Execute` which is abstract in `Command`. Since that method
|
# override method `Execute` which is abstract in `Command`. Since that method
|
||||||
@ -210,19 +225,33 @@ class InteractiveCommand(Command):
|
|||||||
"""Command which requires user interaction on the tty and
|
"""Command which requires user interaction on the tty and
|
||||||
must not run within a pager, even if the user asks to.
|
must not run within a pager, even if the user asks to.
|
||||||
"""
|
"""
|
||||||
def WantPager(self, opt):
|
def WantPager(self, _opt):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
class PagedCommand(Command):
|
class PagedCommand(Command):
|
||||||
"""Command which defaults to output in a pager, as its
|
"""Command which defaults to output in a pager, as its
|
||||||
display tends to be larger than one screen full.
|
display tends to be larger than one screen full.
|
||||||
"""
|
"""
|
||||||
def WantPager(self, opt):
|
def WantPager(self, _opt):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# pylint: enable=W0223
|
# pylint: enable=W0223
|
||||||
|
|
||||||
|
|
||||||
class MirrorSafeCommand(object):
|
class MirrorSafeCommand(object):
|
||||||
"""Command permits itself to run within a mirror,
|
"""Command permits itself to run within a mirror,
|
||||||
and does not require a working directory.
|
and does not require a working directory.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class GitcAvailableCommand(object):
|
||||||
|
"""Command that requires GITC to be available, but does
|
||||||
|
not require the local client to be a GITC client.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class GitcClientCommand(object):
|
||||||
|
"""Command that requires the local client to be a GITC
|
||||||
|
client.
|
||||||
|
"""
|
||||||
|
@ -26,6 +26,7 @@ following DTD:
|
|||||||
manifest-server?,
|
manifest-server?,
|
||||||
remove-project*,
|
remove-project*,
|
||||||
project*,
|
project*,
|
||||||
|
extend-project*,
|
||||||
repo-hooks?)>
|
repo-hooks?)>
|
||||||
|
|
||||||
<!ELEMENT notice (#PCDATA)>
|
<!ELEMENT notice (#PCDATA)>
|
||||||
@ -34,7 +35,9 @@ following DTD:
|
|||||||
<!ATTLIST remote name ID #REQUIRED>
|
<!ATTLIST remote name ID #REQUIRED>
|
||||||
<!ATTLIST remote alias CDATA #IMPLIED>
|
<!ATTLIST remote alias CDATA #IMPLIED>
|
||||||
<!ATTLIST remote fetch CDATA #REQUIRED>
|
<!ATTLIST remote fetch CDATA #REQUIRED>
|
||||||
|
<!ATTLIST remote pushurl CDATA #IMPLIED>
|
||||||
<!ATTLIST remote review CDATA #IMPLIED>
|
<!ATTLIST remote review CDATA #IMPLIED>
|
||||||
|
<!ATTLIST remote revision CDATA #IMPLIED>
|
||||||
|
|
||||||
<!ELEMENT default (EMPTY)>
|
<!ELEMENT default (EMPTY)>
|
||||||
<!ATTLIST default remote IDREF #IMPLIED>
|
<!ATTLIST default remote IDREF #IMPLIED>
|
||||||
@ -45,10 +48,12 @@ following DTD:
|
|||||||
<!ATTLIST default sync-s CDATA #IMPLIED>
|
<!ATTLIST default sync-s CDATA #IMPLIED>
|
||||||
|
|
||||||
<!ELEMENT manifest-server (EMPTY)>
|
<!ELEMENT manifest-server (EMPTY)>
|
||||||
<!ATTLIST url CDATA #REQUIRED>
|
<!ATTLIST manifest-server url CDATA #REQUIRED>
|
||||||
|
|
||||||
<!ELEMENT project (annotation*,
|
<!ELEMENT project (annotation*,
|
||||||
project*)>
|
project*,
|
||||||
|
copyfile*,
|
||||||
|
linkfile*)>
|
||||||
<!ATTLIST project name CDATA #REQUIRED>
|
<!ATTLIST project name CDATA #REQUIRED>
|
||||||
<!ATTLIST project path CDATA #IMPLIED>
|
<!ATTLIST project path CDATA #IMPLIED>
|
||||||
<!ATTLIST project remote IDREF #IMPLIED>
|
<!ATTLIST project remote IDREF #IMPLIED>
|
||||||
@ -66,6 +71,19 @@ following DTD:
|
|||||||
<!ATTLIST annotation value CDATA #REQUIRED>
|
<!ATTLIST annotation value CDATA #REQUIRED>
|
||||||
<!ATTLIST annotation keep CDATA "true">
|
<!ATTLIST annotation keep CDATA "true">
|
||||||
|
|
||||||
|
<!ELEMENT copyfile (EMPTY)>
|
||||||
|
<!ATTLIST copyfile src CDATA #REQUIRED>
|
||||||
|
<!ATTLIST copyfile dest CDATA #REQUIRED>
|
||||||
|
|
||||||
|
<!ELEMENT linkfile (EMPTY)>
|
||||||
|
<!ATTLIST linkfile src CDATA #REQUIRED>
|
||||||
|
<!ATTLIST linkfile dest CDATA #REQUIRED>
|
||||||
|
|
||||||
|
<!ELEMENT extend-project (EMPTY)>
|
||||||
|
<!ATTLIST extend-project name CDATA #REQUIRED>
|
||||||
|
<!ATTLIST extend-project path CDATA #IMPLIED>
|
||||||
|
<!ATTLIST extend-project groups CDATA #IMPLIED>
|
||||||
|
|
||||||
<!ELEMENT remove-project (EMPTY)>
|
<!ELEMENT remove-project (EMPTY)>
|
||||||
<!ATTLIST remove-project name CDATA #REQUIRED>
|
<!ATTLIST remove-project name CDATA #REQUIRED>
|
||||||
|
|
||||||
@ -108,10 +126,20 @@ Attribute `fetch`: The Git URL prefix for all projects which use
|
|||||||
this remote. Each project's name is appended to this prefix to
|
this remote. Each project's name is appended to this prefix to
|
||||||
form the actual URL used to clone the project.
|
form the actual URL used to clone the project.
|
||||||
|
|
||||||
|
Attribute `pushurl`: The Git "push" URL prefix for all projects
|
||||||
|
which use this remote. Each project's name is appended to this
|
||||||
|
prefix to form the actual URL used to "git push" the project.
|
||||||
|
This attribute is optional; if not specified then "git push"
|
||||||
|
will use the same URL as the `fetch` attribute.
|
||||||
|
|
||||||
Attribute `review`: Hostname of the Gerrit server where reviews
|
Attribute `review`: Hostname of the Gerrit server where reviews
|
||||||
are uploaded to by `repo upload`. This attribute is optional;
|
are uploaded to by `repo upload`. This attribute is optional;
|
||||||
if not specified then `repo upload` will not function.
|
if not specified then `repo upload` will not function.
|
||||||
|
|
||||||
|
Attribute `revision`: Name of a Git branch (e.g. `master` or
|
||||||
|
`refs/heads/master`). Remotes with their own revision will override
|
||||||
|
the default revision.
|
||||||
|
|
||||||
Element default
|
Element default
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
@ -132,14 +160,14 @@ Project elements not setting their own `dest-branch` will inherit
|
|||||||
this value. If this value is not set, projects will use `revision`
|
this value. If this value is not set, projects will use `revision`
|
||||||
by default instead.
|
by default instead.
|
||||||
|
|
||||||
Attribute `sync_j`: Number of parallel jobs to use when synching.
|
Attribute `sync-j`: Number of parallel jobs to use when synching.
|
||||||
|
|
||||||
Attribute `sync_c`: Set to true to only sync the given Git
|
Attribute `sync-c`: Set to true to only sync the given Git
|
||||||
branch (specified in the `revision` attribute) rather than the
|
branch (specified in the `revision` attribute) rather than the
|
||||||
whole ref space. Project elements lacking a sync_c element of
|
whole ref space. Project elements lacking a sync-c element of
|
||||||
their own will use this value.
|
their own will use this value.
|
||||||
|
|
||||||
Attribute `sync_s`: Set to true to also sync sub-projects.
|
Attribute `sync-s`: Set to true to also sync sub-projects.
|
||||||
|
|
||||||
|
|
||||||
Element manifest-server
|
Element manifest-server
|
||||||
@ -154,7 +182,8 @@ The manifest server should implement the following RPC methods:
|
|||||||
GetApprovedManifest(branch, target)
|
GetApprovedManifest(branch, target)
|
||||||
|
|
||||||
Return a manifest in which each project is pegged to a known good revision
|
Return a manifest in which each project is pegged to a known good revision
|
||||||
for the current branch and target.
|
for the current branch and target. This is used by repo sync when the
|
||||||
|
--smart-sync option is given.
|
||||||
|
|
||||||
The target to use is defined by environment variables TARGET_PRODUCT
|
The target to use is defined by environment variables TARGET_PRODUCT
|
||||||
and TARGET_BUILD_VARIANT. These variables are used to create a string
|
and TARGET_BUILD_VARIANT. These variables are used to create a string
|
||||||
@ -166,7 +195,8 @@ should choose a reasonable default target.
|
|||||||
GetManifest(tag)
|
GetManifest(tag)
|
||||||
|
|
||||||
Return a manifest in which each project is pegged to the revision at
|
Return a manifest in which each project is pegged to the revision at
|
||||||
the specified tag.
|
the specified tag. This is used by repo sync when the --smart-tag option
|
||||||
|
is given.
|
||||||
|
|
||||||
|
|
||||||
Element project
|
Element project
|
||||||
@ -208,7 +238,8 @@ to track for this project. Names can be relative to refs/heads
|
|||||||
(e.g. just "master") or absolute (e.g. "refs/heads/master").
|
(e.g. just "master") or absolute (e.g. "refs/heads/master").
|
||||||
Tags and/or explicit SHA-1s should work in theory, but have not
|
Tags and/or explicit SHA-1s should work in theory, but have not
|
||||||
been extensively tested. If not supplied the revision given by
|
been extensively tested. If not supplied the revision given by
|
||||||
the default element is used.
|
the remote element is used if applicable, else the default
|
||||||
|
element is used.
|
||||||
|
|
||||||
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
|
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
|
||||||
When using `repo upload`, changes will be submitted for code
|
When using `repo upload`, changes will be submitted for code
|
||||||
@ -226,13 +257,13 @@ group "notdefault", it will not be automatically downloaded by repo.
|
|||||||
If the project has a parent element, the `name` and `path` here
|
If the project has a parent element, the `name` and `path` here
|
||||||
are the prefixed ones.
|
are the prefixed ones.
|
||||||
|
|
||||||
Attribute `sync_c`: Set to true to only sync the given Git
|
Attribute `sync-c`: Set to true to only sync the given Git
|
||||||
branch (specified in the `revision` attribute) rather than the
|
branch (specified in the `revision` attribute) rather than the
|
||||||
whole ref space.
|
whole ref space.
|
||||||
|
|
||||||
Attribute `sync_s`: Set to true to also sync sub-projects.
|
Attribute `sync-s`: Set to true to also sync sub-projects.
|
||||||
|
|
||||||
Attribute `upstream`: Name of the Git branch in which a sha1
|
Attribute `upstream`: Name of the Git ref in which a sha1
|
||||||
can be found. Used when syncing a revision locked manifest in
|
can be found. Used when syncing a revision locked manifest in
|
||||||
-c mode to avoid having to sync the entire ref space.
|
-c mode to avoid having to sync the entire ref space.
|
||||||
|
|
||||||
@ -246,6 +277,22 @@ rather than the `name` attribute. This attribute only applies to the
|
|||||||
local mirrors syncing, it will be ignored when syncing the projects in a
|
local mirrors syncing, it will be ignored when syncing the projects in a
|
||||||
client working directory.
|
client working directory.
|
||||||
|
|
||||||
|
Element extend-project
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
Modify the attributes of the named project.
|
||||||
|
|
||||||
|
This element is mostly useful in a local manifest file, to modify the
|
||||||
|
attributes of an existing project without completely replacing the
|
||||||
|
existing project definition. This makes the local manifest more robust
|
||||||
|
against changes to the original manifest.
|
||||||
|
|
||||||
|
Attribute `path`: If specified, limit the change to projects checked out
|
||||||
|
at the specified path, rather than all projects with the given name.
|
||||||
|
|
||||||
|
Attribute `groups`: List of additional groups to which this project
|
||||||
|
belongs. Same syntax as the corresponding element of `project`.
|
||||||
|
|
||||||
Element annotation
|
Element annotation
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
@ -257,6 +304,21 @@ prefixed with REPO__. In addition, there is an optional attribute
|
|||||||
"false". This attribute determines whether or not the annotation will
|
"false". This attribute determines whether or not the annotation will
|
||||||
be kept when exported with the manifest subcommand.
|
be kept when exported with the manifest subcommand.
|
||||||
|
|
||||||
|
Element copyfile
|
||||||
|
----------------
|
||||||
|
|
||||||
|
Zero or more copyfile elements may be specified as children of a
|
||||||
|
project element. Each element describes a src-dest pair of files;
|
||||||
|
the "src" file will be copied to the "dest" place during 'repo sync'
|
||||||
|
command.
|
||||||
|
"src" is project relative, "dest" is relative to the top of the tree.
|
||||||
|
|
||||||
|
Element linkfile
|
||||||
|
----------------
|
||||||
|
|
||||||
|
It's just like copyfile and runs at the same time as copyfile but
|
||||||
|
instead of copying it creates a symlink.
|
||||||
|
|
||||||
Element remove-project
|
Element remove-project
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
11
error.py
11
error.py
@ -24,6 +24,13 @@ class ManifestInvalidRevisionError(Exception):
|
|||||||
class NoManifestException(Exception):
|
class NoManifestException(Exception):
|
||||||
"""The required manifest does not exist.
|
"""The required manifest does not exist.
|
||||||
"""
|
"""
|
||||||
|
def __init__(self, path, reason):
|
||||||
|
super(NoManifestException, self).__init__()
|
||||||
|
self.path = path
|
||||||
|
self.reason = reason
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.reason
|
||||||
|
|
||||||
class EditorError(Exception):
|
class EditorError(Exception):
|
||||||
"""Unspecified error from the user's text editor.
|
"""Unspecified error from the user's text editor.
|
||||||
@ -73,7 +80,7 @@ class NoSuchProjectError(Exception):
|
|||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
if self.Name is None:
|
if self.name is None:
|
||||||
return 'in current directory'
|
return 'in current directory'
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
@ -86,7 +93,7 @@ class InvalidProjectGroupsError(Exception):
|
|||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
if self.Name is None:
|
if self.name is None:
|
||||||
return 'in current directory'
|
return 'in current directory'
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
@ -14,13 +14,16 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
import fcntl
|
||||||
import os
|
import os
|
||||||
|
import select
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
from signal import SIGTERM
|
from signal import SIGTERM
|
||||||
from error import GitError
|
from error import GitError
|
||||||
from trace import REPO_TRACE, IsTrace, Trace
|
from trace import REPO_TRACE, IsTrace, Trace
|
||||||
|
from wrapper import Wrapper
|
||||||
|
|
||||||
GIT = 'git'
|
GIT = 'git'
|
||||||
MIN_GIT_VERSION = (1, 5, 4)
|
MIN_GIT_VERSION = (1, 5, 4)
|
||||||
@ -75,24 +78,32 @@ def terminate_ssh_clients():
|
|||||||
|
|
||||||
_git_version = None
|
_git_version = None
|
||||||
|
|
||||||
|
class _sfd(object):
|
||||||
|
"""select file descriptor class"""
|
||||||
|
def __init__(self, fd, dest, std_name):
|
||||||
|
assert std_name in ('stdout', 'stderr')
|
||||||
|
self.fd = fd
|
||||||
|
self.dest = dest
|
||||||
|
self.std_name = std_name
|
||||||
|
def fileno(self):
|
||||||
|
return self.fd.fileno()
|
||||||
|
|
||||||
class _GitCall(object):
|
class _GitCall(object):
|
||||||
def version(self):
|
def version(self):
|
||||||
p = GitCommand(None, ['--version'], capture_stdout=True)
|
p = GitCommand(None, ['--version'], capture_stdout=True)
|
||||||
if p.Wait() == 0:
|
if p.Wait() == 0:
|
||||||
return p.stdout
|
if hasattr(p.stdout, 'decode'):
|
||||||
|
return p.stdout.decode('utf-8')
|
||||||
|
else:
|
||||||
|
return p.stdout
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def version_tuple(self):
|
def version_tuple(self):
|
||||||
global _git_version
|
global _git_version
|
||||||
|
|
||||||
if _git_version is None:
|
if _git_version is None:
|
||||||
ver_str = git.version().decode('utf-8')
|
ver_str = git.version()
|
||||||
if ver_str.startswith('git version '):
|
_git_version = Wrapper().ParseGitVersion(ver_str)
|
||||||
_git_version = tuple(
|
if _git_version is None:
|
||||||
map(int,
|
|
||||||
ver_str[len('git version '):].strip().split('-')[0].split('.')[0:3]
|
|
||||||
))
|
|
||||||
else:
|
|
||||||
print('fatal: "%s" unsupported' % ver_str, file=sys.stderr)
|
print('fatal: "%s" unsupported' % ver_str, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
return _git_version
|
return _git_version
|
||||||
@ -143,6 +154,9 @@ class GitCommand(object):
|
|||||||
if key in env:
|
if key in env:
|
||||||
del env[key]
|
del env[key]
|
||||||
|
|
||||||
|
# If we are not capturing std* then need to print it.
|
||||||
|
self.tee = {'stdout': not capture_stdout, 'stderr': not capture_stderr}
|
||||||
|
|
||||||
if disable_editor:
|
if disable_editor:
|
||||||
_setenv(env, 'GIT_EDITOR', ':')
|
_setenv(env, 'GIT_EDITOR', ':')
|
||||||
if ssh_proxy:
|
if ssh_proxy:
|
||||||
@ -154,6 +168,9 @@ class GitCommand(object):
|
|||||||
if p is not None:
|
if p is not None:
|
||||||
s = p + ' ' + s
|
s = p + ' ' + s
|
||||||
_setenv(env, 'GIT_CONFIG_PARAMETERS', s)
|
_setenv(env, 'GIT_CONFIG_PARAMETERS', s)
|
||||||
|
if 'GIT_ALLOW_PROTOCOL' not in env:
|
||||||
|
_setenv(env, 'GIT_ALLOW_PROTOCOL',
|
||||||
|
'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
|
||||||
|
|
||||||
if project:
|
if project:
|
||||||
if not cwd:
|
if not cwd:
|
||||||
@ -166,22 +183,21 @@ class GitCommand(object):
|
|||||||
if gitdir:
|
if gitdir:
|
||||||
_setenv(env, GIT_DIR, gitdir)
|
_setenv(env, GIT_DIR, gitdir)
|
||||||
cwd = None
|
cwd = None
|
||||||
command.extend(cmdv)
|
command.append(cmdv[0])
|
||||||
|
# Need to use the --progress flag for fetch/clone so output will be
|
||||||
|
# displayed as by default git only does progress output if stderr is a TTY.
|
||||||
|
if sys.stderr.isatty() and cmdv[0] in ('fetch', 'clone'):
|
||||||
|
if '--progress' not in cmdv and '--quiet' not in cmdv:
|
||||||
|
command.append('--progress')
|
||||||
|
command.extend(cmdv[1:])
|
||||||
|
|
||||||
if provide_stdin:
|
if provide_stdin:
|
||||||
stdin = subprocess.PIPE
|
stdin = subprocess.PIPE
|
||||||
else:
|
else:
|
||||||
stdin = None
|
stdin = None
|
||||||
|
|
||||||
if capture_stdout:
|
stdout = subprocess.PIPE
|
||||||
stdout = subprocess.PIPE
|
stderr = subprocess.PIPE
|
||||||
else:
|
|
||||||
stdout = None
|
|
||||||
|
|
||||||
if capture_stderr:
|
|
||||||
stderr = subprocess.PIPE
|
|
||||||
else:
|
|
||||||
stderr = None
|
|
||||||
|
|
||||||
if IsTrace():
|
if IsTrace():
|
||||||
global LAST_CWD
|
global LAST_CWD
|
||||||
@ -230,8 +246,36 @@ class GitCommand(object):
|
|||||||
def Wait(self):
|
def Wait(self):
|
||||||
try:
|
try:
|
||||||
p = self.process
|
p = self.process
|
||||||
(self.stdout, self.stderr) = p.communicate()
|
rc = self._CaptureOutput()
|
||||||
rc = p.returncode
|
|
||||||
finally:
|
finally:
|
||||||
_remove_ssh_client(p)
|
_remove_ssh_client(p)
|
||||||
return rc
|
return rc
|
||||||
|
|
||||||
|
def _CaptureOutput(self):
|
||||||
|
p = self.process
|
||||||
|
s_in = [_sfd(p.stdout, sys.stdout, 'stdout'),
|
||||||
|
_sfd(p.stderr, sys.stderr, 'stderr')]
|
||||||
|
self.stdout = ''
|
||||||
|
self.stderr = ''
|
||||||
|
|
||||||
|
for s in s_in:
|
||||||
|
flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
|
||||||
|
fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
|
||||||
|
|
||||||
|
while s_in:
|
||||||
|
in_ready, _, _ = select.select(s_in, [], [])
|
||||||
|
for s in in_ready:
|
||||||
|
buf = s.fd.read(4096)
|
||||||
|
if not buf:
|
||||||
|
s_in.remove(s)
|
||||||
|
continue
|
||||||
|
if not hasattr(buf, 'encode'):
|
||||||
|
buf = buf.decode()
|
||||||
|
if s.std_name == 'stdout':
|
||||||
|
self.stdout += buf
|
||||||
|
else:
|
||||||
|
self.stderr += buf
|
||||||
|
if self.tee[s.std_name]:
|
||||||
|
s.dest.write(buf)
|
||||||
|
s.dest.flush()
|
||||||
|
return p.wait()
|
||||||
|
134
git_config.py
134
git_config.py
@ -15,8 +15,10 @@
|
|||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import errno
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import pickle
|
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
@ -80,7 +82,7 @@ class GitConfig(object):
|
|||||||
return cls(configfile = os.path.join(gitdir, 'config'),
|
return cls(configfile = os.path.join(gitdir, 'config'),
|
||||||
defaults = defaults)
|
defaults = defaults)
|
||||||
|
|
||||||
def __init__(self, configfile, defaults=None, pickleFile=None):
|
def __init__(self, configfile, defaults=None, jsonFile=None):
|
||||||
self.file = configfile
|
self.file = configfile
|
||||||
self.defaults = defaults
|
self.defaults = defaults
|
||||||
self._cache_dict = None
|
self._cache_dict = None
|
||||||
@ -88,12 +90,11 @@ class GitConfig(object):
|
|||||||
self._remotes = {}
|
self._remotes = {}
|
||||||
self._branches = {}
|
self._branches = {}
|
||||||
|
|
||||||
if pickleFile is None:
|
self._json = jsonFile
|
||||||
self._pickle = os.path.join(
|
if self._json is None:
|
||||||
|
self._json = os.path.join(
|
||||||
os.path.dirname(self.file),
|
os.path.dirname(self.file),
|
||||||
'.repopickle_' + os.path.basename(self.file))
|
'.repo_' + os.path.basename(self.file) + '.json')
|
||||||
else:
|
|
||||||
self._pickle = pickleFile
|
|
||||||
|
|
||||||
def Has(self, name, include_defaults = True):
|
def Has(self, name, include_defaults = True):
|
||||||
"""Return true if this configuration file has the key.
|
"""Return true if this configuration file has the key.
|
||||||
@ -217,9 +218,9 @@ class GitConfig(object):
|
|||||||
"""Resolve any url.*.insteadof references.
|
"""Resolve any url.*.insteadof references.
|
||||||
"""
|
"""
|
||||||
for new_url in self.GetSubSections('url'):
|
for new_url in self.GetSubSections('url'):
|
||||||
old_url = self.GetString('url.%s.insteadof' % new_url)
|
for old_url in self.GetString('url.%s.insteadof' % new_url, True):
|
||||||
if old_url is not None and url.startswith(old_url):
|
if old_url is not None and url.startswith(old_url):
|
||||||
return new_url + url[len(old_url):]
|
return new_url + url[len(old_url):]
|
||||||
return url
|
return url
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -248,50 +249,41 @@ class GitConfig(object):
|
|||||||
return self._cache_dict
|
return self._cache_dict
|
||||||
|
|
||||||
def _Read(self):
|
def _Read(self):
|
||||||
d = self._ReadPickle()
|
d = self._ReadJson()
|
||||||
if d is None:
|
if d is None:
|
||||||
d = self._ReadGit()
|
d = self._ReadGit()
|
||||||
self._SavePickle(d)
|
self._SaveJson(d)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def _ReadPickle(self):
|
def _ReadJson(self):
|
||||||
try:
|
try:
|
||||||
if os.path.getmtime(self._pickle) \
|
if os.path.getmtime(self._json) \
|
||||||
<= os.path.getmtime(self.file):
|
<= os.path.getmtime(self.file):
|
||||||
os.remove(self._pickle)
|
os.remove(self._json)
|
||||||
return None
|
return None
|
||||||
except OSError:
|
except OSError:
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
Trace(': unpickle %s', self.file)
|
Trace(': parsing %s', self.file)
|
||||||
fd = open(self._pickle, 'rb')
|
fd = open(self._json)
|
||||||
try:
|
try:
|
||||||
return pickle.load(fd)
|
return json.load(fd)
|
||||||
finally:
|
finally:
|
||||||
fd.close()
|
fd.close()
|
||||||
except EOFError:
|
except (IOError, ValueError):
|
||||||
os.remove(self._pickle)
|
os.remove(self._json)
|
||||||
return None
|
|
||||||
except IOError:
|
|
||||||
os.remove(self._pickle)
|
|
||||||
return None
|
|
||||||
except pickle.PickleError:
|
|
||||||
os.remove(self._pickle)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _SavePickle(self, cache):
|
def _SaveJson(self, cache):
|
||||||
try:
|
try:
|
||||||
fd = open(self._pickle, 'wb')
|
fd = open(self._json, 'w')
|
||||||
try:
|
try:
|
||||||
pickle.dump(cache, fd, pickle.HIGHEST_PROTOCOL)
|
json.dump(cache, fd, indent=2)
|
||||||
finally:
|
finally:
|
||||||
fd.close()
|
fd.close()
|
||||||
except IOError:
|
except (IOError, TypeError):
|
||||||
if os.path.exists(self._pickle):
|
if os.path.exists(self._json):
|
||||||
os.remove(self._pickle)
|
os.remove(self._json)
|
||||||
except pickle.PickleError:
|
|
||||||
if os.path.exists(self._pickle):
|
|
||||||
os.remove(self._pickle)
|
|
||||||
|
|
||||||
def _ReadGit(self):
|
def _ReadGit(self):
|
||||||
"""
|
"""
|
||||||
@ -472,9 +464,13 @@ def _open_ssh(host, port=None):
|
|||||||
% (host,port, str(e)), file=sys.stderr)
|
% (host,port, str(e)), file=sys.stderr)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
time.sleep(1)
|
||||||
|
ssh_died = (p.poll() is not None)
|
||||||
|
if ssh_died:
|
||||||
|
return False
|
||||||
|
|
||||||
_master_processes.append(p)
|
_master_processes.append(p)
|
||||||
_master_keys.add(key)
|
_master_keys.add(key)
|
||||||
time.sleep(1)
|
|
||||||
return True
|
return True
|
||||||
finally:
|
finally:
|
||||||
_master_keys_lock.release()
|
_master_keys_lock.release()
|
||||||
@ -512,6 +508,43 @@ def GetSchemeFromUrl(url):
|
|||||||
return m.group(1)
|
return m.group(1)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def GetUrlCookieFile(url, quiet):
|
||||||
|
if url.startswith('persistent-'):
|
||||||
|
try:
|
||||||
|
p = subprocess.Popen(
|
||||||
|
['git-remote-persistent-https', '-print_config', url],
|
||||||
|
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE)
|
||||||
|
try:
|
||||||
|
cookieprefix = 'http.cookiefile='
|
||||||
|
proxyprefix = 'http.proxy='
|
||||||
|
cookiefile = None
|
||||||
|
proxy = None
|
||||||
|
for line in p.stdout:
|
||||||
|
line = line.strip()
|
||||||
|
if line.startswith(cookieprefix):
|
||||||
|
cookiefile = line[len(cookieprefix):]
|
||||||
|
if line.startswith(proxyprefix):
|
||||||
|
proxy = line[len(proxyprefix):]
|
||||||
|
# Leave subprocess open, as cookie file may be transient.
|
||||||
|
if cookiefile or proxy:
|
||||||
|
yield cookiefile, proxy
|
||||||
|
return
|
||||||
|
finally:
|
||||||
|
p.stdin.close()
|
||||||
|
if p.wait():
|
||||||
|
err_msg = p.stderr.read()
|
||||||
|
if ' -print_config' in err_msg:
|
||||||
|
pass # Persistent proxy doesn't support -print_config.
|
||||||
|
elif not quiet:
|
||||||
|
print(err_msg, file=sys.stderr)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.ENOENT:
|
||||||
|
pass # No persistent proxy.
|
||||||
|
raise
|
||||||
|
yield GitConfig.ForUser().GetString('http.cookiefile'), None
|
||||||
|
|
||||||
def _preconnect(url):
|
def _preconnect(url):
|
||||||
m = URI_ALL.match(url)
|
m = URI_ALL.match(url)
|
||||||
if m:
|
if m:
|
||||||
@ -539,6 +572,7 @@ class Remote(object):
|
|||||||
self._config = config
|
self._config = config
|
||||||
self.name = name
|
self.name = name
|
||||||
self.url = self._Get('url')
|
self.url = self._Get('url')
|
||||||
|
self.pushUrl = self._Get('pushurl')
|
||||||
self.review = self._Get('review')
|
self.review = self._Get('review')
|
||||||
self.projectname = self._Get('projectname')
|
self.projectname = self._Get('projectname')
|
||||||
self.fetch = list(map(RefSpec.FromString,
|
self.fetch = list(map(RefSpec.FromString,
|
||||||
@ -576,7 +610,9 @@ class Remote(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
u = self.review
|
u = self.review
|
||||||
if not u.startswith('http:') and not u.startswith('https:'):
|
if u.startswith('persistent-'):
|
||||||
|
u = u[len('persistent-'):]
|
||||||
|
if u.split(':')[0] not in ('http', 'https', 'sso'):
|
||||||
u = 'http://%s' % u
|
u = 'http://%s' % u
|
||||||
if u.endswith('/Gerrit'):
|
if u.endswith('/Gerrit'):
|
||||||
u = u[:len(u) - len('/Gerrit')]
|
u = u[:len(u) - len('/Gerrit')]
|
||||||
@ -592,6 +628,9 @@ class Remote(object):
|
|||||||
host, port = os.environ['REPO_HOST_PORT_INFO'].split()
|
host, port = os.environ['REPO_HOST_PORT_INFO'].split()
|
||||||
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
||||||
REVIEW_CACHE[u] = self._review_url
|
REVIEW_CACHE[u] = self._review_url
|
||||||
|
elif u.startswith('sso:'):
|
||||||
|
self._review_url = u # Assume it's right
|
||||||
|
REVIEW_CACHE[u] = self._review_url
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
info_url = u + 'ssh_info'
|
info_url = u + 'ssh_info'
|
||||||
@ -601,10 +640,13 @@ class Remote(object):
|
|||||||
# of HTML response back, like maybe a login page.
|
# of HTML response back, like maybe a login page.
|
||||||
#
|
#
|
||||||
# Assume HTTP if SSH is not enabled or ssh_info doesn't look right.
|
# Assume HTTP if SSH is not enabled or ssh_info doesn't look right.
|
||||||
self._review_url = http_url + 'p/'
|
self._review_url = http_url
|
||||||
else:
|
else:
|
||||||
host, port = info.split()
|
host, port = info.split()
|
||||||
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
if _open_ssh(host, port):
|
||||||
|
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
||||||
|
else:
|
||||||
|
self._review_url = http_url
|
||||||
except urllib.error.HTTPError as e:
|
except urllib.error.HTTPError as e:
|
||||||
raise UploadError('%s: %s' % (self.review, str(e)))
|
raise UploadError('%s: %s' % (self.review, str(e)))
|
||||||
except urllib.error.URLError as e:
|
except urllib.error.URLError as e:
|
||||||
@ -624,9 +666,7 @@ class Remote(object):
|
|||||||
def ToLocal(self, rev):
|
def ToLocal(self, rev):
|
||||||
"""Convert a remote revision string to something we have locally.
|
"""Convert a remote revision string to something we have locally.
|
||||||
"""
|
"""
|
||||||
if IsId(rev):
|
if self.name == '.' or IsId(rev):
|
||||||
return rev
|
|
||||||
if rev.startswith(R_TAGS):
|
|
||||||
return rev
|
return rev
|
||||||
|
|
||||||
if not rev.startswith('refs/'):
|
if not rev.startswith('refs/'):
|
||||||
@ -635,6 +675,10 @@ class Remote(object):
|
|||||||
for spec in self.fetch:
|
for spec in self.fetch:
|
||||||
if spec.SourceMatches(rev):
|
if spec.SourceMatches(rev):
|
||||||
return spec.MapSource(rev)
|
return spec.MapSource(rev)
|
||||||
|
|
||||||
|
if not rev.startswith(R_HEADS):
|
||||||
|
return rev
|
||||||
|
|
||||||
raise GitError('remote %s does not have %s' % (self.name, rev))
|
raise GitError('remote %s does not have %s' % (self.name, rev))
|
||||||
|
|
||||||
def WritesTo(self, ref):
|
def WritesTo(self, ref):
|
||||||
@ -658,6 +702,10 @@ class Remote(object):
|
|||||||
"""Save this remote to the configuration.
|
"""Save this remote to the configuration.
|
||||||
"""
|
"""
|
||||||
self._Set('url', self.url)
|
self._Set('url', self.url)
|
||||||
|
if self.pushUrl is not None:
|
||||||
|
self._Set('pushurl', self.pushUrl + '/' + self.projectname)
|
||||||
|
else:
|
||||||
|
self._Set('pushurl', self.pushUrl)
|
||||||
self._Set('review', self.review)
|
self._Set('review', self.review)
|
||||||
self._Set('projectname', self.projectname)
|
self._Set('projectname', self.projectname)
|
||||||
self._Set('fetch', list(map(str, self.fetch)))
|
self._Set('fetch', list(map(str, self.fetch)))
|
||||||
@ -704,7 +752,7 @@ class Branch(object):
|
|||||||
self._Set('merge', self.merge)
|
self._Set('merge', self.merge)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
fd = open(self._config.file, 'ab')
|
fd = open(self._config.file, 'a')
|
||||||
try:
|
try:
|
||||||
fd.write('[branch "%s"]\n' % self.name)
|
fd.write('[branch "%s"]\n' % self.name)
|
||||||
if self.remote:
|
if self.remote:
|
||||||
|
154
gitc_utils.py
Normal file
154
gitc_utils.py
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
#
|
||||||
|
# Copyright (C) 2015 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
import git_command
|
||||||
|
import git_config
|
||||||
|
import wrapper
|
||||||
|
|
||||||
|
from error import ManifestParseError
|
||||||
|
|
||||||
|
NUM_BATCH_RETRIEVE_REVISIONID = 32
|
||||||
|
|
||||||
|
def get_gitc_manifest_dir():
|
||||||
|
return wrapper.Wrapper().get_gitc_manifest_dir()
|
||||||
|
|
||||||
|
def parse_clientdir(gitc_fs_path):
|
||||||
|
return wrapper.Wrapper().gitc_parse_clientdir(gitc_fs_path)
|
||||||
|
|
||||||
|
def _set_project_revisions(projects):
|
||||||
|
"""Sets the revisionExpr for a list of projects.
|
||||||
|
|
||||||
|
Because of the limit of open file descriptors allowed, length of projects
|
||||||
|
should not be overly large. Recommend calling this function multiple times
|
||||||
|
with each call not exceeding NUM_BATCH_RETRIEVE_REVISIONID projects.
|
||||||
|
|
||||||
|
@param projects: List of project objects to set the revionExpr for.
|
||||||
|
"""
|
||||||
|
# Retrieve the commit id for each project based off of it's current
|
||||||
|
# revisionExpr and it is not already a commit id.
|
||||||
|
project_gitcmds = [(
|
||||||
|
project, git_command.GitCommand(None,
|
||||||
|
['ls-remote',
|
||||||
|
project.remote.url,
|
||||||
|
project.revisionExpr],
|
||||||
|
capture_stdout=True, cwd='/tmp'))
|
||||||
|
for project in projects if not git_config.IsId(project.revisionExpr)]
|
||||||
|
for proj, gitcmd in project_gitcmds:
|
||||||
|
if gitcmd.Wait():
|
||||||
|
print('FATAL: Failed to retrieve revisionExpr for %s' % proj)
|
||||||
|
sys.exit(1)
|
||||||
|
revisionExpr = gitcmd.stdout.split('\t')[0]
|
||||||
|
if not revisionExpr:
|
||||||
|
raise(ManifestParseError('Invalid SHA-1 revision project %s (%s)' %
|
||||||
|
(proj.remote.url, proj.revisionExpr)))
|
||||||
|
proj.revisionExpr = revisionExpr
|
||||||
|
|
||||||
|
def _manifest_groups(manifest):
|
||||||
|
"""Returns the manifest group string that should be synced
|
||||||
|
|
||||||
|
This is the same logic used by Command.GetProjects(), which is used during
|
||||||
|
repo sync
|
||||||
|
|
||||||
|
@param manifest: The XmlManifest object
|
||||||
|
"""
|
||||||
|
mp = manifest.manifestProject
|
||||||
|
groups = mp.config.GetString('manifest.groups')
|
||||||
|
if not groups:
|
||||||
|
groups = 'default,platform-' + platform.system().lower()
|
||||||
|
return groups
|
||||||
|
|
||||||
|
def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
|
||||||
|
"""Generate a manifest for shafsd to use for this GITC client.
|
||||||
|
|
||||||
|
@param gitc_manifest: Current gitc manifest, or None if there isn't one yet.
|
||||||
|
@param manifest: A GitcManifest object loaded with the current repo manifest.
|
||||||
|
@param paths: List of project paths we want to update.
|
||||||
|
"""
|
||||||
|
|
||||||
|
print('Generating GITC Manifest by fetching revision SHAs for each '
|
||||||
|
'project.')
|
||||||
|
if paths is None:
|
||||||
|
paths = manifest.paths.keys()
|
||||||
|
|
||||||
|
groups = [x for x in re.split(r'[,\s]+', _manifest_groups(manifest)) if x]
|
||||||
|
|
||||||
|
# Convert the paths to projects, and filter them to the matched groups.
|
||||||
|
projects = [manifest.paths[p] for p in paths]
|
||||||
|
projects = [p for p in projects if p.MatchesGroups(groups)]
|
||||||
|
|
||||||
|
if gitc_manifest is not None:
|
||||||
|
for path, proj in manifest.paths.iteritems():
|
||||||
|
if not proj.MatchesGroups(groups):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not proj.upstream and not git_config.IsId(proj.revisionExpr):
|
||||||
|
proj.upstream = proj.revisionExpr
|
||||||
|
|
||||||
|
if not path in gitc_manifest.paths:
|
||||||
|
# Any new projects need their first revision, even if we weren't asked
|
||||||
|
# for them.
|
||||||
|
projects.append(proj)
|
||||||
|
elif not path in paths:
|
||||||
|
# And copy revisions from the previous manifest if we're not updating
|
||||||
|
# them now.
|
||||||
|
gitc_proj = gitc_manifest.paths[path]
|
||||||
|
if gitc_proj.old_revision:
|
||||||
|
proj.revisionExpr = None
|
||||||
|
proj.old_revision = gitc_proj.old_revision
|
||||||
|
else:
|
||||||
|
proj.revisionExpr = gitc_proj.revisionExpr
|
||||||
|
|
||||||
|
index = 0
|
||||||
|
while index < len(projects):
|
||||||
|
_set_project_revisions(
|
||||||
|
projects[index:(index+NUM_BATCH_RETRIEVE_REVISIONID)])
|
||||||
|
index += NUM_BATCH_RETRIEVE_REVISIONID
|
||||||
|
|
||||||
|
if gitc_manifest is not None:
|
||||||
|
for path, proj in gitc_manifest.paths.iteritems():
|
||||||
|
if proj.old_revision and path in paths:
|
||||||
|
# If we updated a project that has been started, keep the old-revision
|
||||||
|
# updated.
|
||||||
|
repo_proj = manifest.paths[path]
|
||||||
|
repo_proj.old_revision = repo_proj.revisionExpr
|
||||||
|
repo_proj.revisionExpr = None
|
||||||
|
|
||||||
|
# Convert URLs from relative to absolute.
|
||||||
|
for _name, remote in manifest.remotes.iteritems():
|
||||||
|
remote.fetchUrl = remote.resolvedFetchUrl
|
||||||
|
|
||||||
|
# Save the manifest.
|
||||||
|
save_manifest(manifest)
|
||||||
|
|
||||||
|
def save_manifest(manifest, client_dir=None):
|
||||||
|
"""Save the manifest file in the client_dir.
|
||||||
|
|
||||||
|
@param client_dir: Client directory to save the manifest in.
|
||||||
|
@param manifest: Manifest object to save.
|
||||||
|
"""
|
||||||
|
if not client_dir:
|
||||||
|
client_dir = manifest.gitc_client_dir
|
||||||
|
with open(os.path.join(client_dir, '.manifest'), 'w') as f:
|
||||||
|
manifest.Save(f, groups=_manifest_groups(manifest))
|
||||||
|
# TODO(sbasi/jorg): Come up with a solution to remove the sleep below.
|
||||||
|
# Give the GITC filesystem time to register the manifest changes.
|
||||||
|
time.sleep(3)
|
@ -1,7 +1,7 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
# From Gerrit Code Review 2.6
|
# From Gerrit Code Review 2.12.1
|
||||||
#
|
#
|
||||||
# Part of Gerrit Code Review (http://code.google.com/p/gerrit/)
|
# Part of Gerrit Code Review (https://www.gerritcodereview.com/)
|
||||||
#
|
#
|
||||||
# Copyright (C) 2009 The Android Open Source Project
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -20,14 +20,14 @@
|
|||||||
|
|
||||||
unset GREP_OPTIONS
|
unset GREP_OPTIONS
|
||||||
|
|
||||||
CHANGE_ID_AFTER="Bug|Issue"
|
CHANGE_ID_AFTER="Bug|Issue|Test"
|
||||||
MSG="$1"
|
MSG="$1"
|
||||||
|
|
||||||
# Check for, and add if missing, a unique Change-Id
|
# Check for, and add if missing, a unique Change-Id
|
||||||
#
|
#
|
||||||
add_ChangeId() {
|
add_ChangeId() {
|
||||||
clean_message=`sed -e '
|
clean_message=`sed -e '
|
||||||
/^diff --git a\/.*/{
|
/^diff --git .*/{
|
||||||
s///
|
s///
|
||||||
q
|
q
|
||||||
}
|
}
|
||||||
@ -39,6 +39,17 @@ add_ChangeId() {
|
|||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Do not add Change-Id to temp commits
|
||||||
|
if echo "$clean_message" | head -1 | grep -q '^\(fixup\|squash\)!'
|
||||||
|
then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
if test "false" = "`git config --bool --get gerrit.createChangeId`"
|
||||||
|
then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
# Does Change-Id: already exist? if so, exit (no change).
|
# Does Change-Id: already exist? if so, exit (no change).
|
||||||
if grep -i '^Change-Id:' "$MSG" >/dev/null
|
if grep -i '^Change-Id:' "$MSG" >/dev/null
|
||||||
then
|
then
|
||||||
@ -53,6 +64,10 @@ add_ChangeId() {
|
|||||||
AWK=/usr/xpg4/bin/awk
|
AWK=/usr/xpg4/bin/awk
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Get core.commentChar from git config or use default symbol
|
||||||
|
commentChar=`git config --get core.commentChar`
|
||||||
|
commentChar=${commentChar:-#}
|
||||||
|
|
||||||
# How this works:
|
# How this works:
|
||||||
# - parse the commit message as (textLine+ blankLine*)*
|
# - parse the commit message as (textLine+ blankLine*)*
|
||||||
# - assume textLine+ to be a footer until proven otherwise
|
# - assume textLine+ to be a footer until proven otherwise
|
||||||
@ -71,13 +86,13 @@ add_ChangeId() {
|
|||||||
blankLines = 0
|
blankLines = 0
|
||||||
}
|
}
|
||||||
|
|
||||||
# Skip lines starting with "#" without any spaces before it.
|
# Skip lines starting with commentChar without any spaces before it.
|
||||||
/^#/ { next }
|
/^'"$commentChar"'/ { next }
|
||||||
|
|
||||||
# Skip the line starting with the diff command and everything after it,
|
# Skip the line starting with the diff command and everything after it,
|
||||||
# up to the end of the file, assuming it is only patch data.
|
# up to the end of the file, assuming it is only patch data.
|
||||||
# If more than one line before the diff was empty, strip all but one.
|
# If more than one line before the diff was empty, strip all but one.
|
||||||
/^diff --git a/ {
|
/^diff --git / {
|
||||||
blankLines = 0
|
blankLines = 0
|
||||||
while (getline) { }
|
while (getline) { }
|
||||||
next
|
next
|
||||||
|
167
main.py
167
main.py
@ -31,21 +31,31 @@ else:
|
|||||||
urllib = imp.new_module('urllib')
|
urllib = imp.new_module('urllib')
|
||||||
urllib.request = urllib2
|
urllib.request = urllib2
|
||||||
|
|
||||||
|
try:
|
||||||
|
import kerberos
|
||||||
|
except ImportError:
|
||||||
|
kerberos = None
|
||||||
|
|
||||||
|
from color import SetDefaultColoring
|
||||||
from trace import SetTrace
|
from trace import SetTrace
|
||||||
from git_command import git, GitCommand
|
from git_command import git, GitCommand
|
||||||
from git_config import init_ssh, close_ssh
|
from git_config import init_ssh, close_ssh
|
||||||
from command import InteractiveCommand
|
from command import InteractiveCommand
|
||||||
from command import MirrorSafeCommand
|
from command import MirrorSafeCommand
|
||||||
|
from command import GitcAvailableCommand, GitcClientCommand
|
||||||
from subcmds.version import Version
|
from subcmds.version import Version
|
||||||
from editor import Editor
|
from editor import Editor
|
||||||
from error import DownloadError
|
from error import DownloadError
|
||||||
|
from error import InvalidProjectGroupsError
|
||||||
from error import ManifestInvalidRevisionError
|
from error import ManifestInvalidRevisionError
|
||||||
from error import ManifestParseError
|
from error import ManifestParseError
|
||||||
from error import NoManifestException
|
from error import NoManifestException
|
||||||
from error import NoSuchProjectError
|
from error import NoSuchProjectError
|
||||||
from error import RepoChangedException
|
from error import RepoChangedException
|
||||||
from manifest_xml import XmlManifest
|
import gitc_utils
|
||||||
|
from manifest_xml import GitcManifest, XmlManifest
|
||||||
from pager import RunPager
|
from pager import RunPager
|
||||||
|
from wrapper import WrapperPath, Wrapper
|
||||||
|
|
||||||
from subcmds import all_commands
|
from subcmds import all_commands
|
||||||
|
|
||||||
@ -63,6 +73,9 @@ global_options.add_option('-p', '--paginate',
|
|||||||
global_options.add_option('--no-pager',
|
global_options.add_option('--no-pager',
|
||||||
dest='no_pager', action='store_true',
|
dest='no_pager', action='store_true',
|
||||||
help='disable the pager')
|
help='disable the pager')
|
||||||
|
global_options.add_option('--color',
|
||||||
|
choices=('auto', 'always', 'never'), default=None,
|
||||||
|
help='control color usage: auto, always, never')
|
||||||
global_options.add_option('--trace',
|
global_options.add_option('--trace',
|
||||||
dest='trace', action='store_true',
|
dest='trace', action='store_true',
|
||||||
help='trace git command execution')
|
help='trace git command execution')
|
||||||
@ -107,6 +120,8 @@ class _Repo(object):
|
|||||||
print('fatal: invalid usage of --version', file=sys.stderr)
|
print('fatal: invalid usage of --version', file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
SetDefaultColoring(gopts.color)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cmd = self.commands[name]
|
cmd = self.commands[name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@ -116,6 +131,12 @@ class _Repo(object):
|
|||||||
|
|
||||||
cmd.repodir = self.repodir
|
cmd.repodir = self.repodir
|
||||||
cmd.manifest = XmlManifest(cmd.repodir)
|
cmd.manifest = XmlManifest(cmd.repodir)
|
||||||
|
cmd.gitc_manifest = None
|
||||||
|
gitc_client_name = gitc_utils.parse_clientdir(os.getcwd())
|
||||||
|
if gitc_client_name:
|
||||||
|
cmd.gitc_manifest = GitcManifest(cmd.repodir, gitc_client_name)
|
||||||
|
cmd.manifest.isGitcClient = True
|
||||||
|
|
||||||
Editor.globalConfig = cmd.manifest.globalConfig
|
Editor.globalConfig = cmd.manifest.globalConfig
|
||||||
|
|
||||||
if not isinstance(cmd, MirrorSafeCommand) and cmd.manifest.IsMirror:
|
if not isinstance(cmd, MirrorSafeCommand) and cmd.manifest.IsMirror:
|
||||||
@ -123,8 +144,25 @@ class _Repo(object):
|
|||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
copts, cargs = cmd.OptionParser.parse_args(argv)
|
if isinstance(cmd, GitcAvailableCommand) and not gitc_utils.get_gitc_manifest_dir():
|
||||||
copts = cmd.ReadEnvironmentOptions(copts)
|
print("fatal: '%s' requires GITC to be available" % name,
|
||||||
|
file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if isinstance(cmd, GitcClientCommand) and not gitc_client_name:
|
||||||
|
print("fatal: '%s' requires a GITC client" % name,
|
||||||
|
file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
copts, cargs = cmd.OptionParser.parse_args(argv)
|
||||||
|
copts = cmd.ReadEnvironmentOptions(copts)
|
||||||
|
except NoManifestException as e:
|
||||||
|
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||||
|
file=sys.stderr)
|
||||||
|
print('error: manifest missing or unreadable -- please run init',
|
||||||
|
file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
|
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
|
||||||
config = cmd.manifest.globalConfig
|
config = cmd.manifest.globalConfig
|
||||||
@ -140,15 +178,13 @@ class _Repo(object):
|
|||||||
start = time.time()
|
start = time.time()
|
||||||
try:
|
try:
|
||||||
result = cmd.Execute(copts, cargs)
|
result = cmd.Execute(copts, cargs)
|
||||||
except DownloadError as e:
|
except (DownloadError, ManifestInvalidRevisionError,
|
||||||
print('error: %s' % str(e), file=sys.stderr)
|
NoManifestException) as e:
|
||||||
result = 1
|
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||||
except ManifestInvalidRevisionError as e:
|
file=sys.stderr)
|
||||||
print('error: %s' % str(e), file=sys.stderr)
|
if isinstance(e, NoManifestException):
|
||||||
result = 1
|
print('error: manifest missing or unreadable -- please run init',
|
||||||
except NoManifestException as e:
|
file=sys.stderr)
|
||||||
print('error: manifest required for this command -- please run init',
|
|
||||||
file=sys.stderr)
|
|
||||||
result = 1
|
result = 1
|
||||||
except NoSuchProjectError as e:
|
except NoSuchProjectError as e:
|
||||||
if e.name:
|
if e.name:
|
||||||
@ -156,6 +192,12 @@ class _Repo(object):
|
|||||||
else:
|
else:
|
||||||
print('error: no project in current directory', file=sys.stderr)
|
print('error: no project in current directory', file=sys.stderr)
|
||||||
result = 1
|
result = 1
|
||||||
|
except InvalidProjectGroupsError as e:
|
||||||
|
if e.name:
|
||||||
|
print('error: project group must be enabled for project %s' % e.name, file=sys.stderr)
|
||||||
|
else:
|
||||||
|
print('error: project group must be enabled for the project in the current directory', file=sys.stderr)
|
||||||
|
result = 1
|
||||||
finally:
|
finally:
|
||||||
elapsed = time.time() - start
|
elapsed = time.time() - start
|
||||||
hours, remainder = divmod(elapsed, 3600)
|
hours, remainder = divmod(elapsed, 3600)
|
||||||
@ -169,21 +211,10 @@ class _Repo(object):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _MyRepoPath():
|
def _MyRepoPath():
|
||||||
return os.path.dirname(__file__)
|
return os.path.dirname(__file__)
|
||||||
|
|
||||||
def _MyWrapperPath():
|
|
||||||
return os.path.join(os.path.dirname(__file__), 'repo')
|
|
||||||
|
|
||||||
_wrapper_module = None
|
|
||||||
def WrapperModule():
|
|
||||||
global _wrapper_module
|
|
||||||
if not _wrapper_module:
|
|
||||||
_wrapper_module = imp.load_source('wrapper', _MyWrapperPath())
|
|
||||||
return _wrapper_module
|
|
||||||
|
|
||||||
def _CurrentWrapperVersion():
|
|
||||||
return WrapperModule().VERSION
|
|
||||||
|
|
||||||
def _CheckWrapperVersion(ver, repo_path):
|
def _CheckWrapperVersion(ver, repo_path):
|
||||||
if not repo_path:
|
if not repo_path:
|
||||||
@ -193,7 +224,7 @@ def _CheckWrapperVersion(ver, repo_path):
|
|||||||
print('no --wrapper-version argument', file=sys.stderr)
|
print('no --wrapper-version argument', file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
exp = _CurrentWrapperVersion()
|
exp = Wrapper().VERSION
|
||||||
ver = tuple(map(int, ver.split('.')))
|
ver = tuple(map(int, ver.split('.')))
|
||||||
if len(ver) == 1:
|
if len(ver) == 1:
|
||||||
ver = (0, ver[0])
|
ver = (0, ver[0])
|
||||||
@ -205,7 +236,7 @@ def _CheckWrapperVersion(ver, repo_path):
|
|||||||
!!! You must upgrade before you can continue: !!!
|
!!! You must upgrade before you can continue: !!!
|
||||||
|
|
||||||
cp %s %s
|
cp %s %s
|
||||||
""" % (exp_str, _MyWrapperPath(), repo_path), file=sys.stderr)
|
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if exp > ver:
|
if exp > ver:
|
||||||
@ -214,7 +245,7 @@ def _CheckWrapperVersion(ver, repo_path):
|
|||||||
... You should upgrade soon:
|
... You should upgrade soon:
|
||||||
|
|
||||||
cp %s %s
|
cp %s %s
|
||||||
""" % (exp_str, _MyWrapperPath(), repo_path), file=sys.stderr)
|
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||||
|
|
||||||
def _CheckRepoDir(repo_dir):
|
def _CheckRepoDir(repo_dir):
|
||||||
if not repo_dir:
|
if not repo_dir:
|
||||||
@ -342,6 +373,86 @@ class _DigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
|
|||||||
self.retried = 0
|
self.retried = 0
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
class _KerberosAuthHandler(urllib.request.BaseHandler):
|
||||||
|
def __init__(self):
|
||||||
|
self.retried = 0
|
||||||
|
self.context = None
|
||||||
|
self.handler_order = urllib.request.BaseHandler.handler_order - 50
|
||||||
|
|
||||||
|
def http_error_401(self, req, fp, code, msg, headers): # pylint:disable=unused-argument
|
||||||
|
host = req.get_host()
|
||||||
|
retry = self.http_error_auth_reqed('www-authenticate', host, req, headers)
|
||||||
|
return retry
|
||||||
|
|
||||||
|
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
||||||
|
try:
|
||||||
|
spn = "HTTP@%s" % host
|
||||||
|
authdata = self._negotiate_get_authdata(auth_header, headers)
|
||||||
|
|
||||||
|
if self.retried > 3:
|
||||||
|
raise urllib.request.HTTPError(req.get_full_url(), 401,
|
||||||
|
"Negotiate auth failed", headers, None)
|
||||||
|
else:
|
||||||
|
self.retried += 1
|
||||||
|
|
||||||
|
neghdr = self._negotiate_get_svctk(spn, authdata)
|
||||||
|
if neghdr is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
req.add_unredirected_header('Authorization', neghdr)
|
||||||
|
response = self.parent.open(req)
|
||||||
|
|
||||||
|
srvauth = self._negotiate_get_authdata(auth_header, response.info())
|
||||||
|
if self._validate_response(srvauth):
|
||||||
|
return response
|
||||||
|
except kerberos.GSSError:
|
||||||
|
return None
|
||||||
|
except:
|
||||||
|
self.reset_retry_count()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self._clean_context()
|
||||||
|
|
||||||
|
def reset_retry_count(self):
|
||||||
|
self.retried = 0
|
||||||
|
|
||||||
|
def _negotiate_get_authdata(self, auth_header, headers):
|
||||||
|
authhdr = headers.get(auth_header, None)
|
||||||
|
if authhdr is not None:
|
||||||
|
for mech_tuple in authhdr.split(","):
|
||||||
|
mech, __, authdata = mech_tuple.strip().partition(" ")
|
||||||
|
if mech.lower() == "negotiate":
|
||||||
|
return authdata.strip()
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _negotiate_get_svctk(self, spn, authdata):
|
||||||
|
if authdata is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
result, self.context = kerberos.authGSSClientInit(spn)
|
||||||
|
if result < kerberos.AUTH_GSS_COMPLETE:
|
||||||
|
return None
|
||||||
|
|
||||||
|
result = kerberos.authGSSClientStep(self.context, authdata)
|
||||||
|
if result < kerberos.AUTH_GSS_CONTINUE:
|
||||||
|
return None
|
||||||
|
|
||||||
|
response = kerberos.authGSSClientResponse(self.context)
|
||||||
|
return "Negotiate %s" % response
|
||||||
|
|
||||||
|
def _validate_response(self, authdata):
|
||||||
|
if authdata is None:
|
||||||
|
return None
|
||||||
|
result = kerberos.authGSSClientStep(self.context, authdata)
|
||||||
|
if result == kerberos.AUTH_GSS_COMPLETE:
|
||||||
|
return True
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _clean_context(self):
|
||||||
|
if self.context is not None:
|
||||||
|
kerberos.authGSSClientClean(self.context)
|
||||||
|
self.context = None
|
||||||
|
|
||||||
def init_http():
|
def init_http():
|
||||||
handlers = [_UserAgentHandler()]
|
handlers = [_UserAgentHandler()]
|
||||||
|
|
||||||
@ -358,6 +469,8 @@ def init_http():
|
|||||||
pass
|
pass
|
||||||
handlers.append(_BasicAuthHandler(mgr))
|
handlers.append(_BasicAuthHandler(mgr))
|
||||||
handlers.append(_DigestAuthHandler(mgr))
|
handlers.append(_DigestAuthHandler(mgr))
|
||||||
|
if kerberos:
|
||||||
|
handlers.append(_KerberosAuthHandler())
|
||||||
|
|
||||||
if 'http_proxy' in os.environ:
|
if 'http_proxy' in os.environ:
|
||||||
url = os.environ['http_proxy']
|
url = os.environ['http_proxy']
|
||||||
|
209
manifest_xml.py
209
manifest_xml.py
@ -29,17 +29,19 @@ else:
|
|||||||
urllib = imp.new_module('urllib')
|
urllib = imp.new_module('urllib')
|
||||||
urllib.parse = urlparse
|
urllib.parse = urlparse
|
||||||
|
|
||||||
|
import gitc_utils
|
||||||
from git_config import GitConfig
|
from git_config import GitConfig
|
||||||
from git_refs import R_HEADS, HEAD
|
from git_refs import R_HEADS, HEAD
|
||||||
from project import RemoteSpec, Project, MetaProject
|
from project import RemoteSpec, Project, MetaProject
|
||||||
from error import ManifestParseError
|
from error import ManifestParseError, ManifestInvalidRevisionError
|
||||||
|
|
||||||
MANIFEST_FILE_NAME = 'manifest.xml'
|
MANIFEST_FILE_NAME = 'manifest.xml'
|
||||||
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
|
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
|
||||||
LOCAL_MANIFESTS_DIR_NAME = 'local_manifests'
|
LOCAL_MANIFESTS_DIR_NAME = 'local_manifests'
|
||||||
|
|
||||||
urllib.parse.uses_relative.extend(['ssh', 'git'])
|
# urljoin gets confused if the scheme is not known.
|
||||||
urllib.parse.uses_netloc.extend(['ssh', 'git'])
|
urllib.parse.uses_relative.extend(['ssh', 'git', 'persistent-https', 'rpc'])
|
||||||
|
urllib.parse.uses_netloc.extend(['ssh', 'git', 'persistent-https', 'rpc'])
|
||||||
|
|
||||||
class _Default(object):
|
class _Default(object):
|
||||||
"""Project defaults within the manifest."""
|
"""Project defaults within the manifest."""
|
||||||
@ -62,13 +64,17 @@ class _XmlRemote(object):
|
|||||||
name,
|
name,
|
||||||
alias=None,
|
alias=None,
|
||||||
fetch=None,
|
fetch=None,
|
||||||
|
pushUrl=None,
|
||||||
manifestUrl=None,
|
manifestUrl=None,
|
||||||
review=None):
|
review=None,
|
||||||
|
revision=None):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.fetchUrl = fetch
|
self.fetchUrl = fetch
|
||||||
|
self.pushUrl = pushUrl
|
||||||
self.manifestUrl = manifestUrl
|
self.manifestUrl = manifestUrl
|
||||||
self.remoteAlias = alias
|
self.remoteAlias = alias
|
||||||
self.reviewUrl = review
|
self.reviewUrl = review
|
||||||
|
self.revision = revision
|
||||||
self.resolvedFetchUrl = self._resolveFetchUrl()
|
self.resolvedFetchUrl = self._resolveFetchUrl()
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
@ -80,18 +86,17 @@ class _XmlRemote(object):
|
|||||||
def _resolveFetchUrl(self):
|
def _resolveFetchUrl(self):
|
||||||
url = self.fetchUrl.rstrip('/')
|
url = self.fetchUrl.rstrip('/')
|
||||||
manifestUrl = self.manifestUrl.rstrip('/')
|
manifestUrl = self.manifestUrl.rstrip('/')
|
||||||
p = manifestUrl.startswith('persistent-http')
|
# urljoin will gets confused over quite a few things. The ones we care
|
||||||
if p:
|
# about here are:
|
||||||
manifestUrl = manifestUrl[len('persistent-'):]
|
# * no scheme in the base url, like <hostname:port>
|
||||||
|
# We handle no scheme by replacing it with an obscure protocol, gopher
|
||||||
|
# and then replacing it with the original when we are done.
|
||||||
|
|
||||||
# urljoin will get confused if there is no scheme in the base url
|
|
||||||
# ie, if manifestUrl is of the form <hostname:port>
|
|
||||||
if manifestUrl.find(':') != manifestUrl.find('/') - 1:
|
if manifestUrl.find(':') != manifestUrl.find('/') - 1:
|
||||||
manifestUrl = 'gopher://' + manifestUrl
|
url = urllib.parse.urljoin('gopher://' + manifestUrl, url)
|
||||||
url = urllib.parse.urljoin(manifestUrl, url)
|
url = re.sub(r'^gopher://', '', url)
|
||||||
url = re.sub(r'^gopher://', '', url)
|
else:
|
||||||
if p:
|
url = urllib.parse.urljoin(manifestUrl, url)
|
||||||
url = 'persistent-' + url
|
|
||||||
return url
|
return url
|
||||||
|
|
||||||
def ToRemoteSpec(self, projectName):
|
def ToRemoteSpec(self, projectName):
|
||||||
@ -99,7 +104,11 @@ class _XmlRemote(object):
|
|||||||
remoteName = self.name
|
remoteName = self.name
|
||||||
if self.remoteAlias:
|
if self.remoteAlias:
|
||||||
remoteName = self.remoteAlias
|
remoteName = self.remoteAlias
|
||||||
return RemoteSpec(remoteName, url, self.reviewUrl)
|
return RemoteSpec(remoteName,
|
||||||
|
url=url,
|
||||||
|
pushUrl=self.pushUrl,
|
||||||
|
review=self.reviewUrl,
|
||||||
|
orig_name=self.name)
|
||||||
|
|
||||||
class XmlManifest(object):
|
class XmlManifest(object):
|
||||||
"""manages the repo configuration file"""
|
"""manages the repo configuration file"""
|
||||||
@ -110,6 +119,7 @@ class XmlManifest(object):
|
|||||||
self.manifestFile = os.path.join(self.repodir, MANIFEST_FILE_NAME)
|
self.manifestFile = os.path.join(self.repodir, MANIFEST_FILE_NAME)
|
||||||
self.globalConfig = GitConfig.ForUser()
|
self.globalConfig = GitConfig.ForUser()
|
||||||
self.localManifestWarning = False
|
self.localManifestWarning = False
|
||||||
|
self.isGitcClient = False
|
||||||
|
|
||||||
self.repoProject = MetaProject(self, 'repo',
|
self.repoProject = MetaProject(self, 'repo',
|
||||||
gitdir = os.path.join(repodir, 'repo/.git'),
|
gitdir = os.path.join(repodir, 'repo/.git'),
|
||||||
@ -153,19 +163,27 @@ class XmlManifest(object):
|
|||||||
root.appendChild(e)
|
root.appendChild(e)
|
||||||
e.setAttribute('name', r.name)
|
e.setAttribute('name', r.name)
|
||||||
e.setAttribute('fetch', r.fetchUrl)
|
e.setAttribute('fetch', r.fetchUrl)
|
||||||
|
if r.pushUrl is not None:
|
||||||
|
e.setAttribute('pushurl', r.pushUrl)
|
||||||
if r.remoteAlias is not None:
|
if r.remoteAlias is not None:
|
||||||
e.setAttribute('alias', r.remoteAlias)
|
e.setAttribute('alias', r.remoteAlias)
|
||||||
if r.reviewUrl is not None:
|
if r.reviewUrl is not None:
|
||||||
e.setAttribute('review', r.reviewUrl)
|
e.setAttribute('review', r.reviewUrl)
|
||||||
|
if r.revision is not None:
|
||||||
|
e.setAttribute('revision', r.revision)
|
||||||
|
|
||||||
def Save(self, fd, peg_rev=False, peg_rev_upstream=True):
|
def _ParseGroups(self, groups):
|
||||||
|
return [x for x in re.split(r'[,\s]+', groups) if x]
|
||||||
|
|
||||||
|
def Save(self, fd, peg_rev=False, peg_rev_upstream=True, groups=None):
|
||||||
"""Write the current manifest out to the given file descriptor.
|
"""Write the current manifest out to the given file descriptor.
|
||||||
"""
|
"""
|
||||||
mp = self.manifestProject
|
mp = self.manifestProject
|
||||||
|
|
||||||
groups = mp.config.GetString('manifest.groups')
|
if groups is None:
|
||||||
|
groups = mp.config.GetString('manifest.groups')
|
||||||
if groups:
|
if groups:
|
||||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
groups = self._ParseGroups(groups)
|
||||||
|
|
||||||
doc = xml.dom.minidom.Document()
|
doc = xml.dom.minidom.Document()
|
||||||
root = doc.createElement('manifest')
|
root = doc.createElement('manifest')
|
||||||
@ -195,6 +213,9 @@ class XmlManifest(object):
|
|||||||
if d.revisionExpr:
|
if d.revisionExpr:
|
||||||
have_default = True
|
have_default = True
|
||||||
e.setAttribute('revision', d.revisionExpr)
|
e.setAttribute('revision', d.revisionExpr)
|
||||||
|
if d.destBranchExpr:
|
||||||
|
have_default = True
|
||||||
|
e.setAttribute('dest-branch', d.destBranchExpr)
|
||||||
if d.sync_j > 1:
|
if d.sync_j > 1:
|
||||||
have_default = True
|
have_default = True
|
||||||
e.setAttribute('sync-j', '%d' % d.sync_j)
|
e.setAttribute('sync-j', '%d' % d.sync_j)
|
||||||
@ -236,22 +257,32 @@ class XmlManifest(object):
|
|||||||
e.setAttribute('path', relpath)
|
e.setAttribute('path', relpath)
|
||||||
remoteName = None
|
remoteName = None
|
||||||
if d.remote:
|
if d.remote:
|
||||||
remoteName = d.remote.remoteAlias or d.remote.name
|
remoteName = d.remote.name
|
||||||
if not d.remote or p.remote.name != remoteName:
|
if not d.remote or p.remote.orig_name != remoteName:
|
||||||
e.setAttribute('remote', p.remote.name)
|
remoteName = p.remote.orig_name
|
||||||
|
e.setAttribute('remote', remoteName)
|
||||||
if peg_rev:
|
if peg_rev:
|
||||||
if self.IsMirror:
|
if self.IsMirror:
|
||||||
value = p.bare_git.rev_parse(p.revisionExpr + '^0')
|
value = p.bare_git.rev_parse(p.revisionExpr + '^0')
|
||||||
else:
|
else:
|
||||||
value = p.work_git.rev_parse(HEAD + '^0')
|
value = p.work_git.rev_parse(HEAD + '^0')
|
||||||
e.setAttribute('revision', value)
|
e.setAttribute('revision', value)
|
||||||
if peg_rev_upstream and value != p.revisionExpr:
|
if peg_rev_upstream:
|
||||||
# Only save the origin if the origin is not a sha1, and the default
|
if p.upstream:
|
||||||
# isn't our value, and the if the default doesn't already have that
|
e.setAttribute('upstream', p.upstream)
|
||||||
# covered.
|
elif value != p.revisionExpr:
|
||||||
e.setAttribute('upstream', p.revisionExpr)
|
# Only save the origin if the origin is not a sha1, and the default
|
||||||
elif not d.revisionExpr or p.revisionExpr != d.revisionExpr:
|
# isn't our value
|
||||||
e.setAttribute('revision', p.revisionExpr)
|
e.setAttribute('upstream', p.revisionExpr)
|
||||||
|
else:
|
||||||
|
revision = self.remotes[p.remote.orig_name].revision or d.revisionExpr
|
||||||
|
if not revision or revision != p.revisionExpr:
|
||||||
|
e.setAttribute('revision', p.revisionExpr)
|
||||||
|
if p.upstream and p.upstream != p.revisionExpr:
|
||||||
|
e.setAttribute('upstream', p.upstream)
|
||||||
|
|
||||||
|
if p.dest_branch and p.dest_branch != d.destBranchExpr:
|
||||||
|
e.setAttribute('dest-branch', p.dest_branch)
|
||||||
|
|
||||||
for c in p.copyfiles:
|
for c in p.copyfiles:
|
||||||
ce = doc.createElement('copyfile')
|
ce = doc.createElement('copyfile')
|
||||||
@ -259,6 +290,12 @@ class XmlManifest(object):
|
|||||||
ce.setAttribute('dest', c.dest)
|
ce.setAttribute('dest', c.dest)
|
||||||
e.appendChild(ce)
|
e.appendChild(ce)
|
||||||
|
|
||||||
|
for l in p.linkfiles:
|
||||||
|
le = doc.createElement('linkfile')
|
||||||
|
le.setAttribute('src', l.src)
|
||||||
|
le.setAttribute('dest', l.dest)
|
||||||
|
e.appendChild(le)
|
||||||
|
|
||||||
default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath]
|
default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath]
|
||||||
egroups = [g for g in p.groups if g not in default_groups]
|
egroups = [g for g in p.groups if g not in default_groups]
|
||||||
if egroups:
|
if egroups:
|
||||||
@ -277,6 +314,11 @@ class XmlManifest(object):
|
|||||||
if p.sync_s:
|
if p.sync_s:
|
||||||
e.setAttribute('sync-s', 'true')
|
e.setAttribute('sync-s', 'true')
|
||||||
|
|
||||||
|
if p.clone_depth:
|
||||||
|
e.setAttribute('clone-depth', str(p.clone_depth))
|
||||||
|
|
||||||
|
self._output_manifest_project_extras(p, e)
|
||||||
|
|
||||||
if p.subprojects:
|
if p.subprojects:
|
||||||
subprojects = set(subp.name for subp in p.subprojects)
|
subprojects = set(subp.name for subp in p.subprojects)
|
||||||
output_projects(p, e, list(sorted(subprojects)))
|
output_projects(p, e, list(sorted(subprojects)))
|
||||||
@ -294,6 +336,10 @@ class XmlManifest(object):
|
|||||||
|
|
||||||
doc.writexml(fd, '', ' ', '\n', 'UTF-8')
|
doc.writexml(fd, '', ' ', '\n', 'UTF-8')
|
||||||
|
|
||||||
|
def _output_manifest_project_extras(self, p, e):
|
||||||
|
"""Manifests can modify e if they support extra project attributes."""
|
||||||
|
pass
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def paths(self):
|
def paths(self):
|
||||||
self._Load()
|
self._Load()
|
||||||
@ -302,7 +348,7 @@ class XmlManifest(object):
|
|||||||
@property
|
@property
|
||||||
def projects(self):
|
def projects(self):
|
||||||
self._Load()
|
self._Load()
|
||||||
return self._paths.values()
|
return list(self._paths.values())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def remotes(self):
|
def remotes(self):
|
||||||
@ -490,6 +536,23 @@ class XmlManifest(object):
|
|||||||
if node.nodeName == 'project':
|
if node.nodeName == 'project':
|
||||||
project = self._ParseProject(node)
|
project = self._ParseProject(node)
|
||||||
recursively_add_projects(project)
|
recursively_add_projects(project)
|
||||||
|
if node.nodeName == 'extend-project':
|
||||||
|
name = self._reqatt(node, 'name')
|
||||||
|
|
||||||
|
if name not in self._projects:
|
||||||
|
raise ManifestParseError('extend-project element specifies non-existent '
|
||||||
|
'project: %s' % name)
|
||||||
|
|
||||||
|
path = node.getAttribute('path')
|
||||||
|
groups = node.getAttribute('groups')
|
||||||
|
if groups:
|
||||||
|
groups = self._ParseGroups(groups)
|
||||||
|
|
||||||
|
for p in self._projects[name]:
|
||||||
|
if path and p.relpath != path:
|
||||||
|
continue
|
||||||
|
if groups:
|
||||||
|
p.groups.extend(groups)
|
||||||
if node.nodeName == 'repo-hooks':
|
if node.nodeName == 'repo-hooks':
|
||||||
# Get the name of the project and the (space-separated) list of enabled.
|
# Get the name of the project and the (space-separated) list of enabled.
|
||||||
repo_hooks_project = self._reqatt(node, 'in-project')
|
repo_hooks_project = self._reqatt(node, 'in-project')
|
||||||
@ -566,10 +629,11 @@ class XmlManifest(object):
|
|||||||
gitdir = gitdir,
|
gitdir = gitdir,
|
||||||
objdir = gitdir,
|
objdir = gitdir,
|
||||||
worktree = None,
|
worktree = None,
|
||||||
relpath = None,
|
relpath = name or None,
|
||||||
revisionExpr = m.revisionExpr,
|
revisionExpr = m.revisionExpr,
|
||||||
revisionId = None)
|
revisionId = None)
|
||||||
self._projects[project.name] = [project]
|
self._projects[project.name] = [project]
|
||||||
|
self._paths[project.relpath] = project
|
||||||
|
|
||||||
def _ParseRemote(self, node):
|
def _ParseRemote(self, node):
|
||||||
"""
|
"""
|
||||||
@ -580,11 +644,17 @@ class XmlManifest(object):
|
|||||||
if alias == '':
|
if alias == '':
|
||||||
alias = None
|
alias = None
|
||||||
fetch = self._reqatt(node, 'fetch')
|
fetch = self._reqatt(node, 'fetch')
|
||||||
|
pushUrl = node.getAttribute('pushurl')
|
||||||
|
if pushUrl == '':
|
||||||
|
pushUrl = None
|
||||||
review = node.getAttribute('review')
|
review = node.getAttribute('review')
|
||||||
if review == '':
|
if review == '':
|
||||||
review = None
|
review = None
|
||||||
|
revision = node.getAttribute('revision')
|
||||||
|
if revision == '':
|
||||||
|
revision = None
|
||||||
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
|
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
|
||||||
return _XmlRemote(name, alias, fetch, manifestUrl, review)
|
return _XmlRemote(name, alias, fetch, pushUrl, manifestUrl, review, revision)
|
||||||
|
|
||||||
def _ParseDefault(self, node):
|
def _ParseDefault(self, node):
|
||||||
"""
|
"""
|
||||||
@ -662,7 +732,7 @@ class XmlManifest(object):
|
|||||||
def _UnjoinName(self, parent_name, name):
|
def _UnjoinName(self, parent_name, name):
|
||||||
return os.path.relpath(name, parent_name)
|
return os.path.relpath(name, parent_name)
|
||||||
|
|
||||||
def _ParseProject(self, node, parent = None):
|
def _ParseProject(self, node, parent = None, **extra_proj_attrs):
|
||||||
"""
|
"""
|
||||||
reads a <project> element from the manifest file
|
reads a <project> element from the manifest file
|
||||||
"""
|
"""
|
||||||
@ -677,7 +747,7 @@ class XmlManifest(object):
|
|||||||
raise ManifestParseError("no remote for project %s within %s" %
|
raise ManifestParseError("no remote for project %s within %s" %
|
||||||
(name, self.manifestFile))
|
(name, self.manifestFile))
|
||||||
|
|
||||||
revisionExpr = node.getAttribute('revision')
|
revisionExpr = node.getAttribute('revision') or remote.revision
|
||||||
if not revisionExpr:
|
if not revisionExpr:
|
||||||
revisionExpr = self._default.revisionExpr
|
revisionExpr = self._default.revisionExpr
|
||||||
if not revisionExpr:
|
if not revisionExpr:
|
||||||
@ -726,7 +796,7 @@ class XmlManifest(object):
|
|||||||
groups = ''
|
groups = ''
|
||||||
if node.hasAttribute('groups'):
|
if node.hasAttribute('groups'):
|
||||||
groups = node.getAttribute('groups')
|
groups = node.getAttribute('groups')
|
||||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
groups = self._ParseGroups(groups)
|
||||||
|
|
||||||
if parent is None:
|
if parent is None:
|
||||||
relpath, worktree, gitdir, objdir = self.GetProjectPaths(name, path)
|
relpath, worktree, gitdir, objdir = self.GetProjectPaths(name, path)
|
||||||
@ -757,11 +827,14 @@ class XmlManifest(object):
|
|||||||
clone_depth = clone_depth,
|
clone_depth = clone_depth,
|
||||||
upstream = upstream,
|
upstream = upstream,
|
||||||
parent = parent,
|
parent = parent,
|
||||||
dest_branch = dest_branch)
|
dest_branch = dest_branch,
|
||||||
|
**extra_proj_attrs)
|
||||||
|
|
||||||
for n in node.childNodes:
|
for n in node.childNodes:
|
||||||
if n.nodeName == 'copyfile':
|
if n.nodeName == 'copyfile':
|
||||||
self._ParseCopyFile(project, n)
|
self._ParseCopyFile(project, n)
|
||||||
|
if n.nodeName == 'linkfile':
|
||||||
|
self._ParseLinkFile(project, n)
|
||||||
if n.nodeName == 'annotation':
|
if n.nodeName == 'annotation':
|
||||||
self._ParseAnnotation(project, n)
|
self._ParseAnnotation(project, n)
|
||||||
if n.nodeName == 'project':
|
if n.nodeName == 'project':
|
||||||
@ -811,6 +884,14 @@ class XmlManifest(object):
|
|||||||
# dest is relative to the top of the tree
|
# dest is relative to the top of the tree
|
||||||
project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
|
project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
|
||||||
|
|
||||||
|
def _ParseLinkFile(self, project, node):
|
||||||
|
src = self._reqatt(node, 'src')
|
||||||
|
dest = self._reqatt(node, 'dest')
|
||||||
|
if not self.IsMirror:
|
||||||
|
# src is project relative;
|
||||||
|
# dest is relative to the top of the tree
|
||||||
|
project.AddLinkFile(src, dest, os.path.join(self.topdir, dest))
|
||||||
|
|
||||||
def _ParseAnnotation(self, project, node):
|
def _ParseAnnotation(self, project, node):
|
||||||
name = self._reqatt(node, 'name')
|
name = self._reqatt(node, 'name')
|
||||||
value = self._reqatt(node, 'value')
|
value = self._reqatt(node, 'value')
|
||||||
@ -843,3 +924,61 @@ class XmlManifest(object):
|
|||||||
raise ManifestParseError("no %s in <%s> within %s" %
|
raise ManifestParseError("no %s in <%s> within %s" %
|
||||||
(attname, node.nodeName, self.manifestFile))
|
(attname, node.nodeName, self.manifestFile))
|
||||||
return v
|
return v
|
||||||
|
|
||||||
|
def projectsDiff(self, manifest):
|
||||||
|
"""return the projects differences between two manifests.
|
||||||
|
|
||||||
|
The diff will be from self to given manifest.
|
||||||
|
|
||||||
|
"""
|
||||||
|
fromProjects = self.paths
|
||||||
|
toProjects = manifest.paths
|
||||||
|
|
||||||
|
fromKeys = sorted(fromProjects.keys())
|
||||||
|
toKeys = sorted(toProjects.keys())
|
||||||
|
|
||||||
|
diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []}
|
||||||
|
|
||||||
|
for proj in fromKeys:
|
||||||
|
if not proj in toKeys:
|
||||||
|
diff['removed'].append(fromProjects[proj])
|
||||||
|
else:
|
||||||
|
fromProj = fromProjects[proj]
|
||||||
|
toProj = toProjects[proj]
|
||||||
|
try:
|
||||||
|
fromRevId = fromProj.GetCommitRevisionId()
|
||||||
|
toRevId = toProj.GetCommitRevisionId()
|
||||||
|
except ManifestInvalidRevisionError:
|
||||||
|
diff['unreachable'].append((fromProj, toProj))
|
||||||
|
else:
|
||||||
|
if fromRevId != toRevId:
|
||||||
|
diff['changed'].append((fromProj, toProj))
|
||||||
|
toKeys.remove(proj)
|
||||||
|
|
||||||
|
for proj in toKeys:
|
||||||
|
diff['added'].append(toProjects[proj])
|
||||||
|
|
||||||
|
return diff
|
||||||
|
|
||||||
|
|
||||||
|
class GitcManifest(XmlManifest):
|
||||||
|
|
||||||
|
def __init__(self, repodir, gitc_client_name):
|
||||||
|
"""Initialize the GitcManifest object."""
|
||||||
|
super(GitcManifest, self).__init__(repodir)
|
||||||
|
self.isGitcClient = True
|
||||||
|
self.gitc_client_name = gitc_client_name
|
||||||
|
self.gitc_client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(),
|
||||||
|
gitc_client_name)
|
||||||
|
self.manifestFile = os.path.join(self.gitc_client_dir, '.manifest')
|
||||||
|
|
||||||
|
def _ParseProject(self, node, parent = None):
|
||||||
|
"""Override _ParseProject and add support for GITC specific attributes."""
|
||||||
|
return super(GitcManifest, self)._ParseProject(
|
||||||
|
node, parent=parent, old_revision=node.getAttribute('old-revision'))
|
||||||
|
|
||||||
|
def _output_manifest_project_extras(self, p, e):
|
||||||
|
"""Output GITC Specific Project attributes"""
|
||||||
|
if p.old_revision:
|
||||||
|
e.setAttribute('old-revision', str(p.old_revision))
|
||||||
|
|
||||||
|
1208
project.py
1208
project.py
File diff suppressed because it is too large
Load Diff
256
repo
256
repo
@ -1,8 +1,11 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
## repo default configuration
|
# repo default configuration
|
||||||
##
|
#
|
||||||
REPO_URL = 'https://gerrit.googlesource.com/git-repo'
|
import os
|
||||||
|
REPO_URL = os.environ.get('REPO_URL', None)
|
||||||
|
if not REPO_URL:
|
||||||
|
REPO_URL = 'https://gerrit.googlesource.com/git-repo'
|
||||||
REPO_REV = 'stable'
|
REPO_REV = 'stable'
|
||||||
|
|
||||||
# Copyright (C) 2008 Google Inc.
|
# Copyright (C) 2008 Google Inc.
|
||||||
@ -20,10 +23,13 @@ REPO_REV = 'stable'
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
# increment this whenever we make important changes to this script
|
# increment this whenever we make important changes to this script
|
||||||
VERSION = (1, 20)
|
VERSION = (1, 23)
|
||||||
|
|
||||||
# increment this if the MAINTAINER_KEYS block is modified
|
# increment this if the MAINTAINER_KEYS block is modified
|
||||||
KEYRING_VERSION = (1, 2)
|
KEYRING_VERSION = (1, 2)
|
||||||
|
|
||||||
|
# Each individual key entry is created by using:
|
||||||
|
# gpg --armor --export keyid
|
||||||
MAINTAINER_KEYS = """
|
MAINTAINER_KEYS = """
|
||||||
|
|
||||||
Repo Maintainer <repo@android.kernel.org>
|
Repo Maintainer <repo@android.kernel.org>
|
||||||
@ -101,19 +107,21 @@ JuinEP+AwLAUZ1Bsx9ISC0Agpk2VeHXPL3FGhroEmoMvBzO0kTFGyoeT7PR/BfKv
|
|||||||
-----END PGP PUBLIC KEY BLOCK-----
|
-----END PGP PUBLIC KEY BLOCK-----
|
||||||
"""
|
"""
|
||||||
|
|
||||||
GIT = 'git' # our git command
|
GIT = 'git' # our git command
|
||||||
MIN_GIT_VERSION = (1, 7, 2) # minimum supported git version
|
MIN_GIT_VERSION = (1, 7, 2) # minimum supported git version
|
||||||
repodir = '.repo' # name of repo's private directory
|
repodir = '.repo' # name of repo's private directory
|
||||||
S_repo = 'repo' # special repo repository
|
S_repo = 'repo' # special repo repository
|
||||||
S_manifests = 'manifests' # special manifest repository
|
S_manifests = 'manifests' # special manifest repository
|
||||||
REPO_MAIN = S_repo + '/main.py' # main script
|
REPO_MAIN = S_repo + '/main.py' # main script
|
||||||
MIN_PYTHON_VERSION = (2, 6) # minimum supported python version
|
MIN_PYTHON_VERSION = (2, 6) # minimum supported python version
|
||||||
|
GITC_CONFIG_FILE = '/gitc/.config'
|
||||||
|
GITC_FS_ROOT_DIR = '/gitc/manifest-rw/'
|
||||||
|
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
@ -138,10 +146,6 @@ def _print(*objects, **kwargs):
|
|||||||
|
|
||||||
# Python version check
|
# Python version check
|
||||||
ver = sys.version_info
|
ver = sys.version_info
|
||||||
if ver[0] == 3:
|
|
||||||
_print('warning: Python 3 support is currently experimental. YMMV.\n'
|
|
||||||
'Please use Python 2.6 - 2.7 instead.',
|
|
||||||
file=sys.stderr)
|
|
||||||
if (ver[0], ver[1]) < MIN_PYTHON_VERSION:
|
if (ver[0], ver[1]) < MIN_PYTHON_VERSION:
|
||||||
_print('error: Python version %s unsupported.\n'
|
_print('error: Python version %s unsupported.\n'
|
||||||
'Please use Python 2.6 - 2.7 instead.'
|
'Please use Python 2.6 - 2.7 instead.'
|
||||||
@ -195,6 +199,9 @@ group.add_option('-p', '--platform',
|
|||||||
help='restrict manifest projects to ones with a specified '
|
help='restrict manifest projects to ones with a specified '
|
||||||
'platform group [auto|all|none|linux|darwin|...]',
|
'platform group [auto|all|none|linux|darwin|...]',
|
||||||
metavar='PLATFORM')
|
metavar='PLATFORM')
|
||||||
|
group.add_option('--no-clone-bundle',
|
||||||
|
dest='no_clone_bundle', action='store_true',
|
||||||
|
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||||
|
|
||||||
|
|
||||||
# Tool
|
# Tool
|
||||||
@ -215,14 +222,69 @@ group.add_option('--config-name',
|
|||||||
dest='config_name', action="store_true", default=False,
|
dest='config_name', action="store_true", default=False,
|
||||||
help='Always prompt for name/e-mail')
|
help='Always prompt for name/e-mail')
|
||||||
|
|
||||||
|
|
||||||
|
def _GitcInitOptions(init_optparse_arg):
|
||||||
|
init_optparse_arg.set_usage("repo gitc-init -u url -c client [options]")
|
||||||
|
g = init_optparse_arg.add_option_group('GITC options')
|
||||||
|
g.add_option('-f', '--manifest-file',
|
||||||
|
dest='manifest_file',
|
||||||
|
help='Optional manifest file to use for this GITC client.')
|
||||||
|
g.add_option('-c', '--gitc-client',
|
||||||
|
dest='gitc_client',
|
||||||
|
help='The name of the gitc_client instance to create or modify.')
|
||||||
|
|
||||||
|
_gitc_manifest_dir = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_gitc_manifest_dir():
|
||||||
|
global _gitc_manifest_dir
|
||||||
|
if _gitc_manifest_dir is None:
|
||||||
|
_gitc_manifest_dir = ''
|
||||||
|
try:
|
||||||
|
with open(GITC_CONFIG_FILE, 'r') as gitc_config:
|
||||||
|
for line in gitc_config:
|
||||||
|
match = re.match('gitc_dir=(?P<gitc_manifest_dir>.*)', line)
|
||||||
|
if match:
|
||||||
|
_gitc_manifest_dir = match.group('gitc_manifest_dir')
|
||||||
|
except IOError:
|
||||||
|
pass
|
||||||
|
return _gitc_manifest_dir
|
||||||
|
|
||||||
|
|
||||||
|
def gitc_parse_clientdir(gitc_fs_path):
|
||||||
|
"""Parse a path in the GITC FS and return its client name.
|
||||||
|
|
||||||
|
@param gitc_fs_path: A subdirectory path within the GITC_FS_ROOT_DIR.
|
||||||
|
|
||||||
|
@returns: The GITC client name
|
||||||
|
"""
|
||||||
|
if gitc_fs_path == GITC_FS_ROOT_DIR:
|
||||||
|
return None
|
||||||
|
if not gitc_fs_path.startswith(GITC_FS_ROOT_DIR):
|
||||||
|
manifest_dir = get_gitc_manifest_dir()
|
||||||
|
if manifest_dir == '':
|
||||||
|
return None
|
||||||
|
if manifest_dir[-1] != '/':
|
||||||
|
manifest_dir += '/'
|
||||||
|
if gitc_fs_path == manifest_dir:
|
||||||
|
return None
|
||||||
|
if not gitc_fs_path.startswith(manifest_dir):
|
||||||
|
return None
|
||||||
|
return gitc_fs_path.split(manifest_dir)[1].split('/')[0]
|
||||||
|
return gitc_fs_path.split(GITC_FS_ROOT_DIR)[1].split('/')[0]
|
||||||
|
|
||||||
|
|
||||||
class CloneFailure(Exception):
|
class CloneFailure(Exception):
|
||||||
|
|
||||||
"""Indicate the remote clone of repo itself failed.
|
"""Indicate the remote clone of repo itself failed.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
def _Init(args):
|
def _Init(args, gitc_init=False):
|
||||||
"""Installs repo by cloning it over the network.
|
"""Installs repo by cloning it over the network.
|
||||||
"""
|
"""
|
||||||
|
if gitc_init:
|
||||||
|
_GitcInitOptions(init_optparse)
|
||||||
opt, args = init_optparse.parse_args(args)
|
opt, args = init_optparse.parse_args(args)
|
||||||
if args:
|
if args:
|
||||||
init_optparse.print_usage()
|
init_optparse.print_usage()
|
||||||
@ -245,6 +307,26 @@ def _Init(args):
|
|||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if gitc_init:
|
||||||
|
gitc_manifest_dir = get_gitc_manifest_dir()
|
||||||
|
if not gitc_manifest_dir:
|
||||||
|
_print('fatal: GITC filesystem is not available. Exiting...',
|
||||||
|
file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
gitc_client = opt.gitc_client
|
||||||
|
if not gitc_client:
|
||||||
|
gitc_client = gitc_parse_clientdir(os.getcwd())
|
||||||
|
if not gitc_client:
|
||||||
|
_print('fatal: GITC client (-c) is required.', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
client_dir = os.path.join(gitc_manifest_dir, gitc_client)
|
||||||
|
if not os.path.exists(client_dir):
|
||||||
|
os.makedirs(client_dir)
|
||||||
|
os.chdir(client_dir)
|
||||||
|
if os.path.exists(repodir):
|
||||||
|
# This GITC Client has already initialized repo so continue.
|
||||||
|
return
|
||||||
|
|
||||||
os.mkdir(repodir)
|
os.mkdir(repodir)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.EEXIST:
|
if e.errno != errno.EEXIST:
|
||||||
@ -263,7 +345,7 @@ def _Init(args):
|
|||||||
can_verify = True
|
can_verify = True
|
||||||
|
|
||||||
dst = os.path.abspath(os.path.join(repodir, S_repo))
|
dst = os.path.abspath(os.path.join(repodir, S_repo))
|
||||||
_Clone(url, dst, opt.quiet)
|
_Clone(url, dst, opt.quiet, not opt.no_clone_bundle)
|
||||||
|
|
||||||
if can_verify and not opt.no_repo_verify:
|
if can_verify and not opt.no_repo_verify:
|
||||||
rev = _Verify(dst, branch, opt.quiet)
|
rev = _Verify(dst, branch, opt.quiet)
|
||||||
@ -278,6 +360,20 @@ def _Init(args):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def ParseGitVersion(ver_str):
|
||||||
|
if not ver_str.startswith('git version '):
|
||||||
|
return None
|
||||||
|
|
||||||
|
num_ver_str = ver_str[len('git version '):].strip().split('-')[0]
|
||||||
|
to_tuple = []
|
||||||
|
for num_str in num_ver_str.split('.')[:3]:
|
||||||
|
if num_str.isdigit():
|
||||||
|
to_tuple.append(int(num_str))
|
||||||
|
else:
|
||||||
|
to_tuple.append(0)
|
||||||
|
return tuple(to_tuple)
|
||||||
|
|
||||||
|
|
||||||
def _CheckGitVersion():
|
def _CheckGitVersion():
|
||||||
cmd = [GIT, '--version']
|
cmd = [GIT, '--version']
|
||||||
try:
|
try:
|
||||||
@ -295,12 +391,11 @@ def _CheckGitVersion():
|
|||||||
proc.stdout.close()
|
proc.stdout.close()
|
||||||
proc.wait()
|
proc.wait()
|
||||||
|
|
||||||
if not ver_str.startswith('git version '):
|
ver_act = ParseGitVersion(ver_str)
|
||||||
|
if ver_act is None:
|
||||||
_print('error: "%s" unsupported' % ver_str, file=sys.stderr)
|
_print('error: "%s" unsupported' % ver_str, file=sys.stderr)
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
|
|
||||||
ver_str = ver_str[len('git version '):].strip()
|
|
||||||
ver_act = tuple(map(int, ver_str.split('.')[0:3]))
|
|
||||||
if ver_act < MIN_GIT_VERSION:
|
if ver_act < MIN_GIT_VERSION:
|
||||||
need = '.'.join(map(str, MIN_GIT_VERSION))
|
need = '.'.join(map(str, MIN_GIT_VERSION))
|
||||||
_print('fatal: git %s or later required' % need, file=sys.stderr)
|
_print('fatal: git %s or later required' % need, file=sys.stderr)
|
||||||
@ -343,13 +438,16 @@ def SetupGnuPG(quiet):
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env['GNUPGHOME'] = gpg_dir.encode()
|
try:
|
||||||
|
env['GNUPGHOME'] = gpg_dir
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
env['GNUPGHOME'] = gpg_dir.encode()
|
||||||
|
|
||||||
cmd = ['gpg', '--import']
|
cmd = ['gpg', '--import']
|
||||||
try:
|
try:
|
||||||
proc = subprocess.Popen(cmd,
|
proc = subprocess.Popen(cmd,
|
||||||
env = env,
|
env=env,
|
||||||
stdin = subprocess.PIPE)
|
stdin=subprocess.PIPE)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if not quiet:
|
if not quiet:
|
||||||
_print('warning: gpg (GnuPG) is not available.', file=sys.stderr)
|
_print('warning: gpg (GnuPG) is not available.', file=sys.stderr)
|
||||||
@ -375,7 +473,7 @@ def _SetConfig(local, name, value):
|
|||||||
"""Set a git configuration option to the specified value.
|
"""Set a git configuration option to the specified value.
|
||||||
"""
|
"""
|
||||||
cmd = [GIT, 'config', name, value]
|
cmd = [GIT, 'config', name, value]
|
||||||
if subprocess.Popen(cmd, cwd = local).wait() != 0:
|
if subprocess.Popen(cmd, cwd=local).wait() != 0:
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
|
|
||||||
|
|
||||||
@ -388,9 +486,9 @@ def _InitHttp():
|
|||||||
n = netrc.netrc()
|
n = netrc.netrc()
|
||||||
for host in n.hosts:
|
for host in n.hosts:
|
||||||
p = n.hosts[host]
|
p = n.hosts[host]
|
||||||
mgr.add_password(p[1], 'http://%s/' % host, p[0], p[2])
|
mgr.add_password(p[1], 'http://%s/' % host, p[0], p[2])
|
||||||
mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2])
|
mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2])
|
||||||
except:
|
except: # pylint: disable=bare-except
|
||||||
pass
|
pass
|
||||||
handlers.append(urllib.request.HTTPBasicAuthHandler(mgr))
|
handlers.append(urllib.request.HTTPBasicAuthHandler(mgr))
|
||||||
handlers.append(urllib.request.HTTPDigestAuthHandler(mgr))
|
handlers.append(urllib.request.HTTPDigestAuthHandler(mgr))
|
||||||
@ -403,6 +501,7 @@ def _InitHttp():
|
|||||||
handlers.append(urllib.request.HTTPSHandler(debuglevel=1))
|
handlers.append(urllib.request.HTTPSHandler(debuglevel=1))
|
||||||
urllib.request.install_opener(urllib.request.build_opener(*handlers))
|
urllib.request.install_opener(urllib.request.build_opener(*handlers))
|
||||||
|
|
||||||
|
|
||||||
def _Fetch(url, local, src, quiet):
|
def _Fetch(url, local, src, quiet):
|
||||||
if not quiet:
|
if not quiet:
|
||||||
_print('Get %s' % url, file=sys.stderr)
|
_print('Get %s' % url, file=sys.stderr)
|
||||||
@ -417,22 +516,23 @@ def _Fetch(url, local, src, quiet):
|
|||||||
cmd.append('+refs/heads/*:refs/remotes/origin/*')
|
cmd.append('+refs/heads/*:refs/remotes/origin/*')
|
||||||
cmd.append('refs/tags/*:refs/tags/*')
|
cmd.append('refs/tags/*:refs/tags/*')
|
||||||
|
|
||||||
proc = subprocess.Popen(cmd, cwd = local, stderr = err)
|
proc = subprocess.Popen(cmd, cwd=local, stderr=err)
|
||||||
if err:
|
if err:
|
||||||
proc.stderr.read()
|
proc.stderr.read()
|
||||||
proc.stderr.close()
|
proc.stderr.close()
|
||||||
if proc.wait() != 0:
|
if proc.wait() != 0:
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
|
|
||||||
|
|
||||||
def _DownloadBundle(url, local, quiet):
|
def _DownloadBundle(url, local, quiet):
|
||||||
if not url.endswith('/'):
|
if not url.endswith('/'):
|
||||||
url += '/'
|
url += '/'
|
||||||
url += 'clone.bundle'
|
url += 'clone.bundle'
|
||||||
|
|
||||||
proc = subprocess.Popen(
|
proc = subprocess.Popen(
|
||||||
[GIT, 'config', '--get-regexp', 'url.*.insteadof'],
|
[GIT, 'config', '--get-regexp', 'url.*.insteadof'],
|
||||||
cwd = local,
|
cwd=local,
|
||||||
stdout = subprocess.PIPE)
|
stdout=subprocess.PIPE)
|
||||||
for line in proc.stdout:
|
for line in proc.stdout:
|
||||||
m = re.compile(r'^url\.(.*)\.insteadof (.*)$').match(line)
|
m = re.compile(r'^url\.(.*)\.insteadof (.*)$').match(line)
|
||||||
if m:
|
if m:
|
||||||
@ -452,7 +552,7 @@ def _DownloadBundle(url, local, quiet):
|
|||||||
try:
|
try:
|
||||||
r = urllib.request.urlopen(url)
|
r = urllib.request.urlopen(url)
|
||||||
except urllib.error.HTTPError as e:
|
except urllib.error.HTTPError as e:
|
||||||
if e.code in [403, 404]:
|
if e.code in [401, 403, 404, 501]:
|
||||||
return False
|
return False
|
||||||
_print('fatal: Cannot get %s' % url, file=sys.stderr)
|
_print('fatal: Cannot get %s' % url, file=sys.stderr)
|
||||||
_print('fatal: HTTP error %s' % e.code, file=sys.stderr)
|
_print('fatal: HTTP error %s' % e.code, file=sys.stderr)
|
||||||
@ -474,6 +574,7 @@ def _DownloadBundle(url, local, quiet):
|
|||||||
finally:
|
finally:
|
||||||
dest.close()
|
dest.close()
|
||||||
|
|
||||||
|
|
||||||
def _ImportBundle(local):
|
def _ImportBundle(local):
|
||||||
path = os.path.join(local, '.git', 'clone.bundle')
|
path = os.path.join(local, '.git', 'clone.bundle')
|
||||||
try:
|
try:
|
||||||
@ -481,7 +582,8 @@ def _ImportBundle(local):
|
|||||||
finally:
|
finally:
|
||||||
os.remove(path)
|
os.remove(path)
|
||||||
|
|
||||||
def _Clone(url, local, quiet):
|
|
||||||
|
def _Clone(url, local, quiet, clone_bundle):
|
||||||
"""Clones a git repository to a new subdirectory of repodir
|
"""Clones a git repository to a new subdirectory of repodir
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
@ -493,14 +595,14 @@ def _Clone(url, local, quiet):
|
|||||||
|
|
||||||
cmd = [GIT, 'init', '--quiet']
|
cmd = [GIT, 'init', '--quiet']
|
||||||
try:
|
try:
|
||||||
proc = subprocess.Popen(cmd, cwd = local)
|
proc = subprocess.Popen(cmd, cwd=local)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
_print(file=sys.stderr)
|
_print(file=sys.stderr)
|
||||||
_print("fatal: '%s' is not available" % GIT, file=sys.stderr)
|
_print("fatal: '%s' is not available" % GIT, file=sys.stderr)
|
||||||
_print('fatal: %s' % e, file=sys.stderr)
|
_print('fatal: %s' % e, file=sys.stderr)
|
||||||
_print(file=sys.stderr)
|
_print(file=sys.stderr)
|
||||||
_print('Please make sure %s is installed and in your path.' % GIT,
|
_print('Please make sure %s is installed and in your path.' % GIT,
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
if proc.wait() != 0:
|
if proc.wait() != 0:
|
||||||
_print('fatal: could not create %s' % local, file=sys.stderr)
|
_print('fatal: could not create %s' % local, file=sys.stderr)
|
||||||
@ -508,12 +610,12 @@ def _Clone(url, local, quiet):
|
|||||||
|
|
||||||
_InitHttp()
|
_InitHttp()
|
||||||
_SetConfig(local, 'remote.origin.url', url)
|
_SetConfig(local, 'remote.origin.url', url)
|
||||||
_SetConfig(local, 'remote.origin.fetch',
|
_SetConfig(local,
|
||||||
'+refs/heads/*:refs/remotes/origin/*')
|
'remote.origin.fetch',
|
||||||
if _DownloadBundle(url, local, quiet):
|
'+refs/heads/*:refs/remotes/origin/*')
|
||||||
|
if clone_bundle and _DownloadBundle(url, local, quiet):
|
||||||
_ImportBundle(local)
|
_ImportBundle(local)
|
||||||
else:
|
_Fetch(url, local, 'origin', quiet)
|
||||||
_Fetch(url, local, 'origin', quiet)
|
|
||||||
|
|
||||||
|
|
||||||
def _Verify(cwd, branch, quiet):
|
def _Verify(cwd, branch, quiet):
|
||||||
@ -523,7 +625,7 @@ def _Verify(cwd, branch, quiet):
|
|||||||
proc = subprocess.Popen(cmd,
|
proc = subprocess.Popen(cmd,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.PIPE,
|
stderr=subprocess.PIPE,
|
||||||
cwd = cwd)
|
cwd=cwd)
|
||||||
cur = proc.stdout.read().strip()
|
cur = proc.stdout.read().strip()
|
||||||
proc.stdout.close()
|
proc.stdout.close()
|
||||||
|
|
||||||
@ -541,18 +643,21 @@ def _Verify(cwd, branch, quiet):
|
|||||||
if not quiet:
|
if not quiet:
|
||||||
_print(file=sys.stderr)
|
_print(file=sys.stderr)
|
||||||
_print("info: Ignoring branch '%s'; using tagged release '%s'"
|
_print("info: Ignoring branch '%s'; using tagged release '%s'"
|
||||||
% (branch, cur), file=sys.stderr)
|
% (branch, cur), file=sys.stderr)
|
||||||
_print(file=sys.stderr)
|
_print(file=sys.stderr)
|
||||||
|
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env['GNUPGHOME'] = gpg_dir.encode()
|
try:
|
||||||
|
env['GNUPGHOME'] = gpg_dir
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
env['GNUPGHOME'] = gpg_dir.encode()
|
||||||
|
|
||||||
cmd = [GIT, 'tag', '-v', cur]
|
cmd = [GIT, 'tag', '-v', cur]
|
||||||
proc = subprocess.Popen(cmd,
|
proc = subprocess.Popen(cmd,
|
||||||
stdout = subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr = subprocess.PIPE,
|
stderr=subprocess.PIPE,
|
||||||
cwd = cwd,
|
cwd=cwd,
|
||||||
env = env)
|
env=env)
|
||||||
out = proc.stdout.read()
|
out = proc.stdout.read()
|
||||||
proc.stdout.close()
|
proc.stdout.close()
|
||||||
|
|
||||||
@ -572,21 +677,21 @@ def _Checkout(cwd, branch, rev, quiet):
|
|||||||
"""Checkout an upstream branch into the repository and track it.
|
"""Checkout an upstream branch into the repository and track it.
|
||||||
"""
|
"""
|
||||||
cmd = [GIT, 'update-ref', 'refs/heads/default', rev]
|
cmd = [GIT, 'update-ref', 'refs/heads/default', rev]
|
||||||
if subprocess.Popen(cmd, cwd = cwd).wait() != 0:
|
if subprocess.Popen(cmd, cwd=cwd).wait() != 0:
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
|
|
||||||
_SetConfig(cwd, 'branch.default.remote', 'origin')
|
_SetConfig(cwd, 'branch.default.remote', 'origin')
|
||||||
_SetConfig(cwd, 'branch.default.merge', 'refs/heads/%s' % branch)
|
_SetConfig(cwd, 'branch.default.merge', 'refs/heads/%s' % branch)
|
||||||
|
|
||||||
cmd = [GIT, 'symbolic-ref', 'HEAD', 'refs/heads/default']
|
cmd = [GIT, 'symbolic-ref', 'HEAD', 'refs/heads/default']
|
||||||
if subprocess.Popen(cmd, cwd = cwd).wait() != 0:
|
if subprocess.Popen(cmd, cwd=cwd).wait() != 0:
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
|
|
||||||
cmd = [GIT, 'read-tree', '--reset', '-u']
|
cmd = [GIT, 'read-tree', '--reset', '-u']
|
||||||
if not quiet:
|
if not quiet:
|
||||||
cmd.append('-v')
|
cmd.append('-v')
|
||||||
cmd.append('HEAD')
|
cmd.append('HEAD')
|
||||||
if subprocess.Popen(cmd, cwd = cwd).wait() != 0:
|
if subprocess.Popen(cmd, cwd=cwd).wait() != 0:
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
|
|
||||||
|
|
||||||
@ -598,8 +703,8 @@ def _FindRepo():
|
|||||||
|
|
||||||
olddir = None
|
olddir = None
|
||||||
while curdir != '/' \
|
while curdir != '/' \
|
||||||
and curdir != olddir \
|
and curdir != olddir \
|
||||||
and not repo:
|
and not repo:
|
||||||
repo = os.path.join(curdir, repodir, REPO_MAIN)
|
repo = os.path.join(curdir, repodir, REPO_MAIN)
|
||||||
if not os.path.isfile(repo):
|
if not os.path.isfile(repo):
|
||||||
repo = None
|
repo = None
|
||||||
@ -608,7 +713,7 @@ def _FindRepo():
|
|||||||
return (repo, os.path.join(curdir, repodir))
|
return (repo, os.path.join(curdir, repodir))
|
||||||
|
|
||||||
|
|
||||||
class _Options:
|
class _Options(object):
|
||||||
help = False
|
help = False
|
||||||
|
|
||||||
|
|
||||||
@ -630,15 +735,20 @@ def _ParseArguments(args):
|
|||||||
|
|
||||||
|
|
||||||
def _Usage():
|
def _Usage():
|
||||||
|
gitc_usage = ""
|
||||||
|
if get_gitc_manifest_dir():
|
||||||
|
gitc_usage = " gitc-init Initialize a GITC Client.\n"
|
||||||
|
|
||||||
_print(
|
_print(
|
||||||
"""usage: repo COMMAND [ARGS]
|
"""usage: repo COMMAND [ARGS]
|
||||||
|
|
||||||
repo is not yet installed. Use "repo init" to install it here.
|
repo is not yet installed. Use "repo init" to install it here.
|
||||||
|
|
||||||
The most commonly used repo commands are:
|
The most commonly used repo commands are:
|
||||||
|
|
||||||
init Install repo in the current working directory
|
init Install repo in the current working directory
|
||||||
help Display detailed help on a command
|
""" + gitc_usage +
|
||||||
|
""" help Display detailed help on a command
|
||||||
|
|
||||||
For access to the full online help, install repo ("repo init").
|
For access to the full online help, install repo ("repo init").
|
||||||
""", file=sys.stderr)
|
""", file=sys.stderr)
|
||||||
@ -650,6 +760,10 @@ def _Help(args):
|
|||||||
if args[0] == 'init':
|
if args[0] == 'init':
|
||||||
init_optparse.print_help()
|
init_optparse.print_help()
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
elif args[0] == 'gitc-init':
|
||||||
|
_GitcInitOptions(init_optparse)
|
||||||
|
init_optparse.print_help()
|
||||||
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
_print("error: '%s' is not a bootstrap command.\n"
|
_print("error: '%s' is not a bootstrap command.\n"
|
||||||
' For access to online help, install repo ("repo init").'
|
' For access to online help, install repo ("repo init").'
|
||||||
@ -695,8 +809,8 @@ def _SetDefaultsTo(gitdir):
|
|||||||
'--git-dir=%s' % gitdir,
|
'--git-dir=%s' % gitdir,
|
||||||
'symbolic-ref',
|
'symbolic-ref',
|
||||||
'HEAD'],
|
'HEAD'],
|
||||||
stdout = subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr = subprocess.PIPE)
|
stderr=subprocess.PIPE)
|
||||||
REPO_REV = proc.stdout.read().strip()
|
REPO_REV = proc.stdout.read().strip()
|
||||||
proc.stdout.close()
|
proc.stdout.close()
|
||||||
|
|
||||||
@ -709,12 +823,23 @@ def _SetDefaultsTo(gitdir):
|
|||||||
|
|
||||||
|
|
||||||
def main(orig_args):
|
def main(orig_args):
|
||||||
repo_main, rel_repo_dir = _FindRepo()
|
|
||||||
cmd, opt, args = _ParseArguments(orig_args)
|
cmd, opt, args = _ParseArguments(orig_args)
|
||||||
|
|
||||||
|
repo_main, rel_repo_dir = None, None
|
||||||
|
# Don't use the local repo copy, make sure to switch to the gitc client first.
|
||||||
|
if cmd != 'gitc-init':
|
||||||
|
repo_main, rel_repo_dir = _FindRepo()
|
||||||
|
|
||||||
wrapper_path = os.path.abspath(__file__)
|
wrapper_path = os.path.abspath(__file__)
|
||||||
my_main, my_git = _RunSelf(wrapper_path)
|
my_main, my_git = _RunSelf(wrapper_path)
|
||||||
|
|
||||||
|
cwd = os.getcwd()
|
||||||
|
if get_gitc_manifest_dir() and cwd.startswith(get_gitc_manifest_dir()):
|
||||||
|
_print('error: repo cannot be used in the GITC local manifest directory.'
|
||||||
|
'\nIf you want to work on this GITC client please rerun this '
|
||||||
|
'command from the corresponding client under /gitc/',
|
||||||
|
file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
if not repo_main:
|
if not repo_main:
|
||||||
if opt.help:
|
if opt.help:
|
||||||
_Usage()
|
_Usage()
|
||||||
@ -722,18 +847,13 @@ def main(orig_args):
|
|||||||
_Help(args)
|
_Help(args)
|
||||||
if not cmd:
|
if not cmd:
|
||||||
_NotInstalled()
|
_NotInstalled()
|
||||||
if cmd == 'init':
|
if cmd == 'init' or cmd == 'gitc-init':
|
||||||
if my_git:
|
if my_git:
|
||||||
_SetDefaultsTo(my_git)
|
_SetDefaultsTo(my_git)
|
||||||
try:
|
try:
|
||||||
_Init(args)
|
_Init(args, gitc_init=(cmd == 'gitc-init'))
|
||||||
except CloneFailure:
|
except CloneFailure:
|
||||||
for root, dirs, files in os.walk(repodir, topdown=False):
|
shutil.rmtree(os.path.join(repodir, S_repo), ignore_errors=True)
|
||||||
for name in files:
|
|
||||||
os.remove(os.path.join(root, name))
|
|
||||||
for name in dirs:
|
|
||||||
os.rmdir(os.path.join(root, name))
|
|
||||||
os.rmdir(repodir)
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
repo_main, rel_repo_dir = _FindRepo()
|
repo_main, rel_repo_dir = _FindRepo()
|
||||||
else:
|
else:
|
||||||
@ -759,4 +879,8 @@ def main(orig_args):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
if ver[0] == 3:
|
||||||
|
_print('warning: Python 3 support is currently experimental. YMMV.\n'
|
||||||
|
'Please use Python 2.6 - 2.7 instead.',
|
||||||
|
file=sys.stderr)
|
||||||
main(sys.argv[1:])
|
main(sys.argv[1:])
|
||||||
|
@ -46,6 +46,10 @@ class BranchInfo(object):
|
|||||||
def IsCurrent(self):
|
def IsCurrent(self):
|
||||||
return self.current > 0
|
return self.current > 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def IsSplitCurrent(self):
|
||||||
|
return self.current != 0 and self.current != len(self.projects)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def IsPublished(self):
|
def IsPublished(self):
|
||||||
return self.published > 0
|
return self.published > 0
|
||||||
@ -139,10 +143,14 @@ is shown, then the branch appears in all projects.
|
|||||||
if in_cnt < project_cnt:
|
if in_cnt < project_cnt:
|
||||||
fmt = out.write
|
fmt = out.write
|
||||||
paths = []
|
paths = []
|
||||||
if in_cnt < project_cnt - in_cnt:
|
non_cur_paths = []
|
||||||
|
if i.IsSplitCurrent or (in_cnt < project_cnt - in_cnt):
|
||||||
in_type = 'in'
|
in_type = 'in'
|
||||||
for b in i.projects:
|
for b in i.projects:
|
||||||
paths.append(b.project.relpath)
|
if not i.IsSplitCurrent or b.current:
|
||||||
|
paths.append(b.project.relpath)
|
||||||
|
else:
|
||||||
|
non_cur_paths.append(b.project.relpath)
|
||||||
else:
|
else:
|
||||||
fmt = out.notinproject
|
fmt = out.notinproject
|
||||||
in_type = 'not in'
|
in_type = 'not in'
|
||||||
@ -154,13 +162,19 @@ is shown, then the branch appears in all projects.
|
|||||||
paths.append(p.relpath)
|
paths.append(p.relpath)
|
||||||
|
|
||||||
s = ' %s %s' % (in_type, ', '.join(paths))
|
s = ' %s %s' % (in_type, ', '.join(paths))
|
||||||
if width + 7 + len(s) < 80:
|
if not i.IsSplitCurrent and (width + 7 + len(s) < 80):
|
||||||
|
fmt = out.current if i.IsCurrent else fmt
|
||||||
fmt(s)
|
fmt(s)
|
||||||
else:
|
else:
|
||||||
fmt(' %s:' % in_type)
|
fmt(' %s:' % in_type)
|
||||||
|
fmt = out.current if i.IsCurrent else out.write
|
||||||
for p in paths:
|
for p in paths:
|
||||||
out.nl()
|
out.nl()
|
||||||
fmt(width*' ' + ' %s' % p)
|
fmt(width*' ' + ' %s' % p)
|
||||||
|
fmt = out.write
|
||||||
|
for p in non_cur_paths:
|
||||||
|
out.nl()
|
||||||
|
fmt(width*' ' + ' %s' % p)
|
||||||
else:
|
else:
|
||||||
out.write(' in all projects')
|
out.write(' in all projects')
|
||||||
out.nl()
|
out.nl()
|
||||||
|
@ -76,6 +76,7 @@ change id will be added.
|
|||||||
capture_stdout = True,
|
capture_stdout = True,
|
||||||
capture_stderr = True)
|
capture_stderr = True)
|
||||||
p.stdin.write(new_msg)
|
p.stdin.write(new_msg)
|
||||||
|
p.stdin.close()
|
||||||
if p.Wait() != 0:
|
if p.Wait() != 0:
|
||||||
print("error: Failed to update commit message", file=sys.stderr)
|
print("error: Failed to update commit message", file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
204
subcmds/diffmanifests.py
Normal file
204
subcmds/diffmanifests.py
Normal file
@ -0,0 +1,204 @@
|
|||||||
|
#
|
||||||
|
# Copyright (C) 2014 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from color import Coloring
|
||||||
|
from command import PagedCommand
|
||||||
|
from manifest_xml import XmlManifest
|
||||||
|
|
||||||
|
class _Coloring(Coloring):
|
||||||
|
def __init__(self, config):
|
||||||
|
Coloring.__init__(self, config, "status")
|
||||||
|
|
||||||
|
class Diffmanifests(PagedCommand):
|
||||||
|
""" A command to see logs in projects represented by manifests
|
||||||
|
|
||||||
|
This is used to see deeper differences between manifests. Where a simple
|
||||||
|
diff would only show a diff of sha1s for example, this command will display
|
||||||
|
the logs of the project between both sha1s, allowing user to see diff at a
|
||||||
|
deeper level.
|
||||||
|
"""
|
||||||
|
|
||||||
|
common = True
|
||||||
|
helpSummary = "Manifest diff utility"
|
||||||
|
helpUsage = """%prog manifest1.xml [manifest2.xml] [options]"""
|
||||||
|
|
||||||
|
helpDescription = """
|
||||||
|
The %prog command shows differences between project revisions of manifest1 and
|
||||||
|
manifest2. if manifest2 is not specified, current manifest.xml will be used
|
||||||
|
instead. Both absolute and relative paths may be used for manifests. Relative
|
||||||
|
paths start from project's ".repo/manifests" folder.
|
||||||
|
|
||||||
|
The --raw option Displays the diff in a way that facilitates parsing, the
|
||||||
|
project pattern will be <status> <path> <revision from> [<revision to>] and the
|
||||||
|
commit pattern will be <status> <onelined log> with status values respectively :
|
||||||
|
|
||||||
|
A = Added project
|
||||||
|
R = Removed project
|
||||||
|
C = Changed project
|
||||||
|
U = Project with unreachable revision(s) (revision(s) not found)
|
||||||
|
|
||||||
|
for project, and
|
||||||
|
|
||||||
|
A = Added commit
|
||||||
|
R = Removed commit
|
||||||
|
|
||||||
|
for a commit.
|
||||||
|
|
||||||
|
Only changed projects may contain commits, and commit status always starts with
|
||||||
|
a space, and are part of last printed project.
|
||||||
|
Unreachable revisions may occur if project is not up to date or if repo has not
|
||||||
|
been initialized with all the groups, in which case some projects won't be
|
||||||
|
synced and their revisions won't be found.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option('--raw',
|
||||||
|
dest='raw', action='store_true',
|
||||||
|
help='Display raw diff.')
|
||||||
|
p.add_option('--no-color',
|
||||||
|
dest='color', action='store_false', default=True,
|
||||||
|
help='does not display the diff in color.')
|
||||||
|
p.add_option('--pretty-format',
|
||||||
|
dest='pretty_format', action='store',
|
||||||
|
metavar='<FORMAT>',
|
||||||
|
help='print the log using a custom git pretty format string')
|
||||||
|
|
||||||
|
def _printRawDiff(self, diff):
|
||||||
|
for project in diff['added']:
|
||||||
|
self.printText("A %s %s" % (project.relpath, project.revisionExpr))
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
for project in diff['removed']:
|
||||||
|
self.printText("R %s %s" % (project.relpath, project.revisionExpr))
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
for project, otherProject in diff['changed']:
|
||||||
|
self.printText("C %s %s %s" % (project.relpath, project.revisionExpr,
|
||||||
|
otherProject.revisionExpr))
|
||||||
|
self.out.nl()
|
||||||
|
self._printLogs(project, otherProject, raw=True, color=False)
|
||||||
|
|
||||||
|
for project, otherProject in diff['unreachable']:
|
||||||
|
self.printText("U %s %s %s" % (project.relpath, project.revisionExpr,
|
||||||
|
otherProject.revisionExpr))
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
def _printDiff(self, diff, color=True, pretty_format=None):
|
||||||
|
if diff['added']:
|
||||||
|
self.out.nl()
|
||||||
|
self.printText('added projects : \n')
|
||||||
|
self.out.nl()
|
||||||
|
for project in diff['added']:
|
||||||
|
self.printProject('\t%s' % (project.relpath))
|
||||||
|
self.printText(' at revision ')
|
||||||
|
self.printRevision(project.revisionExpr)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
if diff['removed']:
|
||||||
|
self.out.nl()
|
||||||
|
self.printText('removed projects : \n')
|
||||||
|
self.out.nl()
|
||||||
|
for project in diff['removed']:
|
||||||
|
self.printProject('\t%s' % (project.relpath))
|
||||||
|
self.printText(' at revision ')
|
||||||
|
self.printRevision(project.revisionExpr)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
if diff['changed']:
|
||||||
|
self.out.nl()
|
||||||
|
self.printText('changed projects : \n')
|
||||||
|
self.out.nl()
|
||||||
|
for project, otherProject in diff['changed']:
|
||||||
|
self.printProject('\t%s' % (project.relpath))
|
||||||
|
self.printText(' changed from ')
|
||||||
|
self.printRevision(project.revisionExpr)
|
||||||
|
self.printText(' to ')
|
||||||
|
self.printRevision(otherProject.revisionExpr)
|
||||||
|
self.out.nl()
|
||||||
|
self._printLogs(project, otherProject, raw=False, color=color,
|
||||||
|
pretty_format=pretty_format)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
if diff['unreachable']:
|
||||||
|
self.out.nl()
|
||||||
|
self.printText('projects with unreachable revisions : \n')
|
||||||
|
self.out.nl()
|
||||||
|
for project, otherProject in diff['unreachable']:
|
||||||
|
self.printProject('\t%s ' % (project.relpath))
|
||||||
|
self.printRevision(project.revisionExpr)
|
||||||
|
self.printText(' or ')
|
||||||
|
self.printRevision(otherProject.revisionExpr)
|
||||||
|
self.printText(' not found')
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
def _printLogs(self, project, otherProject, raw=False, color=True,
|
||||||
|
pretty_format=None):
|
||||||
|
|
||||||
|
logs = project.getAddedAndRemovedLogs(otherProject,
|
||||||
|
oneline=(pretty_format is None),
|
||||||
|
color=color,
|
||||||
|
pretty_format=pretty_format)
|
||||||
|
if logs['removed']:
|
||||||
|
removedLogs = logs['removed'].split('\n')
|
||||||
|
for log in removedLogs:
|
||||||
|
if log.strip():
|
||||||
|
if raw:
|
||||||
|
self.printText(' R ' + log)
|
||||||
|
self.out.nl()
|
||||||
|
else:
|
||||||
|
self.printRemoved('\t\t[-] ')
|
||||||
|
self.printText(log)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
if logs['added']:
|
||||||
|
addedLogs = logs['added'].split('\n')
|
||||||
|
for log in addedLogs:
|
||||||
|
if log.strip():
|
||||||
|
if raw:
|
||||||
|
self.printText(' A ' + log)
|
||||||
|
self.out.nl()
|
||||||
|
else:
|
||||||
|
self.printAdded('\t\t[+] ')
|
||||||
|
self.printText(log)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
if not args or len(args) > 2:
|
||||||
|
self.Usage()
|
||||||
|
|
||||||
|
self.out = _Coloring(self.manifest.globalConfig)
|
||||||
|
self.printText = self.out.nofmt_printer('text')
|
||||||
|
if opt.color:
|
||||||
|
self.printProject = self.out.nofmt_printer('project', attr = 'bold')
|
||||||
|
self.printAdded = self.out.nofmt_printer('green', fg = 'green', attr = 'bold')
|
||||||
|
self.printRemoved = self.out.nofmt_printer('red', fg = 'red', attr = 'bold')
|
||||||
|
self.printRevision = self.out.nofmt_printer('revision', fg = 'yellow')
|
||||||
|
else:
|
||||||
|
self.printProject = self.printAdded = self.printRemoved = self.printRevision = self.printText
|
||||||
|
|
||||||
|
manifest1 = XmlManifest(self.manifest.repodir)
|
||||||
|
manifest1.Override(args[0])
|
||||||
|
if len(args) == 1:
|
||||||
|
manifest2 = self.manifest
|
||||||
|
else:
|
||||||
|
manifest2 = XmlManifest(self.manifest.repodir)
|
||||||
|
manifest2.Override(args[1])
|
||||||
|
|
||||||
|
diff = manifest1.projectsDiff(manifest2)
|
||||||
|
if opt.raw:
|
||||||
|
self._printRawDiff(diff)
|
||||||
|
else:
|
||||||
|
self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format)
|
@ -18,6 +18,7 @@ import re
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
from command import Command
|
from command import Command
|
||||||
|
from error import GitError
|
||||||
|
|
||||||
CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
|
CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
|
||||||
|
|
||||||
@ -87,7 +88,13 @@ makes it available in your project's local working directory.
|
|||||||
for c in dl.commits:
|
for c in dl.commits:
|
||||||
print(' %s' % (c), file=sys.stderr)
|
print(' %s' % (c), file=sys.stderr)
|
||||||
if opt.cherrypick:
|
if opt.cherrypick:
|
||||||
project._CherryPick(dl.commit)
|
try:
|
||||||
|
project._CherryPick(dl.commit)
|
||||||
|
except GitError:
|
||||||
|
print('[%s] Could not complete the cherry-pick of %s' \
|
||||||
|
% (project.name, dl.commit), file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
elif opt.revert:
|
elif opt.revert:
|
||||||
project._Revert(dl.commit)
|
project._Revert(dl.commit)
|
||||||
elif opt.ffonly:
|
elif opt.ffonly:
|
||||||
|
@ -14,10 +14,13 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
import errno
|
||||||
import fcntl
|
import fcntl
|
||||||
|
import multiprocessing
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import select
|
import select
|
||||||
|
import signal
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
@ -31,6 +34,7 @@ _CAN_COLOR = [
|
|||||||
'log',
|
'log',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class ForallColoring(Coloring):
|
class ForallColoring(Coloring):
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
Coloring.__init__(self, config, 'forall')
|
Coloring.__init__(self, config, 'forall')
|
||||||
@ -87,6 +91,12 @@ revision to a locally executed git command, use REPO_LREV.
|
|||||||
REPO_RREV is the name of the revision from the manifest, exactly
|
REPO_RREV is the name of the revision from the manifest, exactly
|
||||||
as written in the manifest.
|
as written in the manifest.
|
||||||
|
|
||||||
|
REPO_COUNT is the total number of projects being iterated.
|
||||||
|
|
||||||
|
REPO_I is the current (1-based) iteration count. Can be used in
|
||||||
|
conjunction with REPO_COUNT to add a simple progress indicator to your
|
||||||
|
command.
|
||||||
|
|
||||||
REPO__* are any extra environment variables, specified by the
|
REPO__* are any extra environment variables, specified by the
|
||||||
"annotation" element under any project element. This can be useful
|
"annotation" element under any project element. This can be useful
|
||||||
for differentiating trees based on user-specific criteria, or simply
|
for differentiating trees based on user-specific criteria, or simply
|
||||||
@ -110,6 +120,12 @@ without iterating through the remaining projects.
|
|||||||
p.add_option('-r', '--regex',
|
p.add_option('-r', '--regex',
|
||||||
dest='regex', action='store_true',
|
dest='regex', action='store_true',
|
||||||
help="Execute the command only on projects matching regex or wildcard expression")
|
help="Execute the command only on projects matching regex or wildcard expression")
|
||||||
|
p.add_option('-i', '--inverse-regex',
|
||||||
|
dest='inverse_regex', action='store_true',
|
||||||
|
help="Execute the command only on projects not matching regex or wildcard expression")
|
||||||
|
p.add_option('-g', '--groups',
|
||||||
|
dest='groups',
|
||||||
|
help="Execute the command only on projects matching the specified groups")
|
||||||
p.add_option('-c', '--command',
|
p.add_option('-c', '--command',
|
||||||
help='Command (and arguments) to execute',
|
help='Command (and arguments) to execute',
|
||||||
dest='command',
|
dest='command',
|
||||||
@ -126,9 +142,35 @@ without iterating through the remaining projects.
|
|||||||
g.add_option('-v', '--verbose',
|
g.add_option('-v', '--verbose',
|
||||||
dest='verbose', action='store_true',
|
dest='verbose', action='store_true',
|
||||||
help='Show command error messages')
|
help='Show command error messages')
|
||||||
|
g.add_option('-j', '--jobs',
|
||||||
|
dest='jobs', action='store', type='int', default=1,
|
||||||
|
help='number of commands to execute simultaneously')
|
||||||
|
|
||||||
def WantPager(self, opt):
|
def WantPager(self, opt):
|
||||||
return opt.project_header
|
return opt.project_header and opt.jobs == 1
|
||||||
|
|
||||||
|
def _SerializeProject(self, project):
|
||||||
|
""" Serialize a project._GitGetByExec instance.
|
||||||
|
|
||||||
|
project._GitGetByExec is not pickle-able. Instead of trying to pass it
|
||||||
|
around between processes, make a dict ourselves containing only the
|
||||||
|
attributes that we need.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not self.manifest.IsMirror:
|
||||||
|
lrev = project.GetRevisionId()
|
||||||
|
else:
|
||||||
|
lrev = None
|
||||||
|
return {
|
||||||
|
'name': project.name,
|
||||||
|
'relpath': project.relpath,
|
||||||
|
'remote_name': project.remote.name,
|
||||||
|
'lrev': lrev,
|
||||||
|
'rrev': project.revisionExpr,
|
||||||
|
'annotations': dict((a.name, a.value) for a in project.annotations),
|
||||||
|
'gitdir': project.gitdir,
|
||||||
|
'worktree': project.worktree,
|
||||||
|
}
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
if not opt.command:
|
if not opt.command:
|
||||||
@ -167,123 +209,192 @@ without iterating through the remaining projects.
|
|||||||
# pylint: enable=W0631
|
# pylint: enable=W0631
|
||||||
|
|
||||||
mirror = self.manifest.IsMirror
|
mirror = self.manifest.IsMirror
|
||||||
out = ForallColoring(self.manifest.manifestProject.config)
|
|
||||||
out.redirect(sys.stdout)
|
|
||||||
|
|
||||||
rc = 0
|
rc = 0
|
||||||
first = True
|
|
||||||
|
|
||||||
if not opt.regex:
|
smart_sync_manifest_name = "smart_sync_override.xml"
|
||||||
projects = self.GetProjects(args)
|
smart_sync_manifest_path = os.path.join(
|
||||||
else:
|
self.manifest.manifestProject.worktree, smart_sync_manifest_name)
|
||||||
|
|
||||||
|
if os.path.isfile(smart_sync_manifest_path):
|
||||||
|
self.manifest.Override(smart_sync_manifest_path)
|
||||||
|
|
||||||
|
if opt.regex:
|
||||||
projects = self.FindProjects(args)
|
projects = self.FindProjects(args)
|
||||||
|
elif opt.inverse_regex:
|
||||||
|
projects = self.FindProjects(args, inverse=True)
|
||||||
|
else:
|
||||||
|
projects = self.GetProjects(args, groups=opt.groups)
|
||||||
|
|
||||||
for project in projects:
|
os.environ['REPO_COUNT'] = str(len(projects))
|
||||||
env = os.environ.copy()
|
|
||||||
def setenv(name, val):
|
|
||||||
if val is None:
|
|
||||||
val = ''
|
|
||||||
env[name] = val.encode()
|
|
||||||
|
|
||||||
setenv('REPO_PROJECT', project.name)
|
pool = multiprocessing.Pool(opt.jobs, InitWorker)
|
||||||
setenv('REPO_PATH', project.relpath)
|
try:
|
||||||
setenv('REPO_REMOTE', project.remote.name)
|
config = self.manifest.manifestProject.config
|
||||||
setenv('REPO_LREV', project.GetRevisionId())
|
results_it = pool.imap(
|
||||||
setenv('REPO_RREV', project.revisionExpr)
|
DoWorkWrapper,
|
||||||
for a in project.annotations:
|
self.ProjectArgs(projects, mirror, opt, cmd, shell, config))
|
||||||
setenv("REPO__%s" % (a.name), a.value)
|
pool.close()
|
||||||
|
for r in results_it:
|
||||||
if mirror:
|
rc = rc or r
|
||||||
setenv('GIT_DIR', project.gitdir)
|
if r != 0 and opt.abort_on_errors:
|
||||||
cwd = project.gitdir
|
raise Exception('Aborting due to previous error')
|
||||||
else:
|
except (KeyboardInterrupt, WorkerKeyboardInterrupt):
|
||||||
cwd = project.worktree
|
# Catch KeyboardInterrupt raised inside and outside of workers
|
||||||
|
print('Interrupted - terminating the pool')
|
||||||
if not os.path.exists(cwd):
|
pool.terminate()
|
||||||
if (opt.project_header and opt.verbose) \
|
rc = rc or errno.EINTR
|
||||||
or not opt.project_header:
|
except Exception as e:
|
||||||
print('skipping %s/' % project.relpath, file=sys.stderr)
|
# Catch any other exceptions raised
|
||||||
continue
|
print('Got an error, terminating the pool: %s: %s' %
|
||||||
|
(type(e).__name__, e),
|
||||||
if opt.project_header:
|
file=sys.stderr)
|
||||||
stdin = subprocess.PIPE
|
pool.terminate()
|
||||||
stdout = subprocess.PIPE
|
rc = rc or getattr(e, 'errno', 1)
|
||||||
stderr = subprocess.PIPE
|
finally:
|
||||||
else:
|
pool.join()
|
||||||
stdin = None
|
|
||||||
stdout = None
|
|
||||||
stderr = None
|
|
||||||
|
|
||||||
p = subprocess.Popen(cmd,
|
|
||||||
cwd = cwd,
|
|
||||||
shell = shell,
|
|
||||||
env = env,
|
|
||||||
stdin = stdin,
|
|
||||||
stdout = stdout,
|
|
||||||
stderr = stderr)
|
|
||||||
|
|
||||||
if opt.project_header:
|
|
||||||
class sfd(object):
|
|
||||||
def __init__(self, fd, dest):
|
|
||||||
self.fd = fd
|
|
||||||
self.dest = dest
|
|
||||||
def fileno(self):
|
|
||||||
return self.fd.fileno()
|
|
||||||
|
|
||||||
empty = True
|
|
||||||
errbuf = ''
|
|
||||||
|
|
||||||
p.stdin.close()
|
|
||||||
s_in = [sfd(p.stdout, sys.stdout),
|
|
||||||
sfd(p.stderr, sys.stderr)]
|
|
||||||
|
|
||||||
for s in s_in:
|
|
||||||
flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
|
|
||||||
fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
|
|
||||||
|
|
||||||
while s_in:
|
|
||||||
in_ready, _out_ready, _err_ready = select.select(s_in, [], [])
|
|
||||||
for s in in_ready:
|
|
||||||
buf = s.fd.read(4096)
|
|
||||||
if not buf:
|
|
||||||
s.fd.close()
|
|
||||||
s_in.remove(s)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not opt.verbose:
|
|
||||||
if s.fd != p.stdout:
|
|
||||||
errbuf += buf
|
|
||||||
continue
|
|
||||||
|
|
||||||
if empty:
|
|
||||||
if first:
|
|
||||||
first = False
|
|
||||||
else:
|
|
||||||
out.nl()
|
|
||||||
|
|
||||||
if mirror:
|
|
||||||
project_header_path = project.name
|
|
||||||
else:
|
|
||||||
project_header_path = project.relpath
|
|
||||||
out.project('project %s/', project_header_path)
|
|
||||||
out.nl()
|
|
||||||
out.flush()
|
|
||||||
if errbuf:
|
|
||||||
sys.stderr.write(errbuf)
|
|
||||||
sys.stderr.flush()
|
|
||||||
errbuf = ''
|
|
||||||
empty = False
|
|
||||||
|
|
||||||
s.dest.write(buf)
|
|
||||||
s.dest.flush()
|
|
||||||
|
|
||||||
r = p.wait()
|
|
||||||
if r != 0:
|
|
||||||
if r != rc:
|
|
||||||
rc = r
|
|
||||||
if opt.abort_on_errors:
|
|
||||||
print("error: %s: Aborting due to previous error" % project.relpath,
|
|
||||||
file=sys.stderr)
|
|
||||||
sys.exit(r)
|
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
sys.exit(rc)
|
sys.exit(rc)
|
||||||
|
|
||||||
|
def ProjectArgs(self, projects, mirror, opt, cmd, shell, config):
|
||||||
|
for cnt, p in enumerate(projects):
|
||||||
|
try:
|
||||||
|
project = self._SerializeProject(p)
|
||||||
|
except Exception as e:
|
||||||
|
print('Project list error on project %s: %s: %s' %
|
||||||
|
(p.name, type(e).__name__, e),
|
||||||
|
file=sys.stderr)
|
||||||
|
return
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print('Project list interrupted',
|
||||||
|
file=sys.stderr)
|
||||||
|
return
|
||||||
|
yield [mirror, opt, cmd, shell, cnt, config, project]
|
||||||
|
|
||||||
|
class WorkerKeyboardInterrupt(Exception):
|
||||||
|
""" Keyboard interrupt exception for worker processes. """
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def InitWorker():
|
||||||
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||||
|
|
||||||
|
def DoWorkWrapper(args):
|
||||||
|
""" A wrapper around the DoWork() method.
|
||||||
|
|
||||||
|
Catch the KeyboardInterrupt exceptions here and re-raise them as a different,
|
||||||
|
``Exception``-based exception to stop it flooding the console with stacktraces
|
||||||
|
and making the parent hang indefinitely.
|
||||||
|
|
||||||
|
"""
|
||||||
|
project = args.pop()
|
||||||
|
try:
|
||||||
|
return DoWork(project, *args)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print('%s: Worker interrupted' % project['name'])
|
||||||
|
raise WorkerKeyboardInterrupt()
|
||||||
|
|
||||||
|
|
||||||
|
def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||||
|
env = os.environ.copy()
|
||||||
|
def setenv(name, val):
|
||||||
|
if val is None:
|
||||||
|
val = ''
|
||||||
|
if hasattr(val, 'encode'):
|
||||||
|
val = val.encode()
|
||||||
|
env[name] = val
|
||||||
|
|
||||||
|
setenv('REPO_PROJECT', project['name'])
|
||||||
|
setenv('REPO_PATH', project['relpath'])
|
||||||
|
setenv('REPO_REMOTE', project['remote_name'])
|
||||||
|
setenv('REPO_LREV', project['lrev'])
|
||||||
|
setenv('REPO_RREV', project['rrev'])
|
||||||
|
setenv('REPO_I', str(cnt + 1))
|
||||||
|
for name in project['annotations']:
|
||||||
|
setenv("REPO__%s" % (name), project['annotations'][name])
|
||||||
|
|
||||||
|
if mirror:
|
||||||
|
setenv('GIT_DIR', project['gitdir'])
|
||||||
|
cwd = project['gitdir']
|
||||||
|
else:
|
||||||
|
cwd = project['worktree']
|
||||||
|
|
||||||
|
if not os.path.exists(cwd):
|
||||||
|
if (opt.project_header and opt.verbose) \
|
||||||
|
or not opt.project_header:
|
||||||
|
print('skipping %s/' % project['relpath'], file=sys.stderr)
|
||||||
|
return
|
||||||
|
|
||||||
|
if opt.project_header:
|
||||||
|
stdin = subprocess.PIPE
|
||||||
|
stdout = subprocess.PIPE
|
||||||
|
stderr = subprocess.PIPE
|
||||||
|
else:
|
||||||
|
stdin = None
|
||||||
|
stdout = None
|
||||||
|
stderr = None
|
||||||
|
|
||||||
|
p = subprocess.Popen(cmd,
|
||||||
|
cwd=cwd,
|
||||||
|
shell=shell,
|
||||||
|
env=env,
|
||||||
|
stdin=stdin,
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr)
|
||||||
|
|
||||||
|
if opt.project_header:
|
||||||
|
out = ForallColoring(config)
|
||||||
|
out.redirect(sys.stdout)
|
||||||
|
class sfd(object):
|
||||||
|
def __init__(self, fd, dest):
|
||||||
|
self.fd = fd
|
||||||
|
self.dest = dest
|
||||||
|
def fileno(self):
|
||||||
|
return self.fd.fileno()
|
||||||
|
|
||||||
|
empty = True
|
||||||
|
errbuf = ''
|
||||||
|
|
||||||
|
p.stdin.close()
|
||||||
|
s_in = [sfd(p.stdout, sys.stdout),
|
||||||
|
sfd(p.stderr, sys.stderr)]
|
||||||
|
|
||||||
|
for s in s_in:
|
||||||
|
flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
|
||||||
|
fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
|
||||||
|
|
||||||
|
while s_in:
|
||||||
|
in_ready, _out_ready, _err_ready = select.select(s_in, [], [])
|
||||||
|
for s in in_ready:
|
||||||
|
buf = s.fd.read(4096)
|
||||||
|
if not buf:
|
||||||
|
s.fd.close()
|
||||||
|
s_in.remove(s)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not opt.verbose:
|
||||||
|
if s.fd != p.stdout:
|
||||||
|
errbuf += buf
|
||||||
|
continue
|
||||||
|
|
||||||
|
if empty and out:
|
||||||
|
if not cnt == 0:
|
||||||
|
out.nl()
|
||||||
|
|
||||||
|
if mirror:
|
||||||
|
project_header_path = project['name']
|
||||||
|
else:
|
||||||
|
project_header_path = project['relpath']
|
||||||
|
out.project('project %s/', project_header_path)
|
||||||
|
out.nl()
|
||||||
|
out.flush()
|
||||||
|
if errbuf:
|
||||||
|
sys.stderr.write(errbuf)
|
||||||
|
sys.stderr.flush()
|
||||||
|
errbuf = ''
|
||||||
|
empty = False
|
||||||
|
|
||||||
|
s.dest.write(buf)
|
||||||
|
s.dest.flush()
|
||||||
|
|
||||||
|
r = p.wait()
|
||||||
|
return r
|
||||||
|
55
subcmds/gitc_delete.py
Normal file
55
subcmds/gitc_delete.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
#
|
||||||
|
# Copyright (C) 2015 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from command import Command, GitcClientCommand
|
||||||
|
import gitc_utils
|
||||||
|
|
||||||
|
from pyversion import is_python3
|
||||||
|
if not is_python3():
|
||||||
|
# pylint:disable=W0622
|
||||||
|
input = raw_input
|
||||||
|
# pylint:enable=W0622
|
||||||
|
|
||||||
|
class GitcDelete(Command, GitcClientCommand):
|
||||||
|
common = True
|
||||||
|
visible_everywhere = False
|
||||||
|
helpSummary = "Delete a GITC Client."
|
||||||
|
helpUsage = """
|
||||||
|
%prog
|
||||||
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
This subcommand deletes the current GITC client, deleting the GITC manifest
|
||||||
|
and all locally downloaded sources.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option('-f', '--force',
|
||||||
|
dest='force', action='store_true',
|
||||||
|
help='Force the deletion (no prompt).')
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
if not opt.force:
|
||||||
|
prompt = ('This will delete GITC client: %s\nAre you sure? (yes/no) ' %
|
||||||
|
self.gitc_manifest.gitc_client_name)
|
||||||
|
response = input(prompt).lower()
|
||||||
|
if not response == 'yes':
|
||||||
|
print('Response was not "yes"\n Exiting...')
|
||||||
|
sys.exit(1)
|
||||||
|
shutil.rmtree(self.gitc_manifest.gitc_client_dir)
|
82
subcmds/gitc_init.py
Normal file
82
subcmds/gitc_init.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
#
|
||||||
|
# Copyright (C) 2015 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import gitc_utils
|
||||||
|
from command import GitcAvailableCommand
|
||||||
|
from manifest_xml import GitcManifest
|
||||||
|
from subcmds import init
|
||||||
|
import wrapper
|
||||||
|
|
||||||
|
|
||||||
|
class GitcInit(init.Init, GitcAvailableCommand):
|
||||||
|
common = True
|
||||||
|
helpSummary = "Initialize a GITC Client."
|
||||||
|
helpUsage = """
|
||||||
|
%prog [options] [client name]
|
||||||
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
The '%prog' command is ran to initialize a new GITC client for use
|
||||||
|
with the GITC file system.
|
||||||
|
|
||||||
|
This command will setup the client directory, initialize repo, just
|
||||||
|
like repo init does, and then downloads the manifest collection
|
||||||
|
and installs it in the .repo/directory of the GITC client.
|
||||||
|
|
||||||
|
Once this is done, a GITC manifest is generated by pulling the HEAD
|
||||||
|
SHA for each project and generates the properly formatted XML file
|
||||||
|
and installs it as .manifest in the GITC client directory.
|
||||||
|
|
||||||
|
The -c argument is required to specify the GITC client name.
|
||||||
|
|
||||||
|
The optional -f argument can be used to specify the manifest file to
|
||||||
|
use for this GITC client.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
super(GitcInit, self)._Options(p)
|
||||||
|
g = p.add_option_group('GITC options')
|
||||||
|
g.add_option('-f', '--manifest-file',
|
||||||
|
dest='manifest_file',
|
||||||
|
help='Optional manifest file to use for this GITC client.')
|
||||||
|
g.add_option('-c', '--gitc-client',
|
||||||
|
dest='gitc_client',
|
||||||
|
help='The name of the gitc_client instance to create or modify.')
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
gitc_client = gitc_utils.parse_clientdir(os.getcwd())
|
||||||
|
if not gitc_client or (opt.gitc_client and gitc_client != opt.gitc_client):
|
||||||
|
print('fatal: Please update your repo command. See go/gitc for instructions.', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
self.client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(),
|
||||||
|
gitc_client)
|
||||||
|
super(GitcInit, self).Execute(opt, args)
|
||||||
|
|
||||||
|
manifest_file = self.manifest.manifestFile
|
||||||
|
if opt.manifest_file:
|
||||||
|
if not os.path.exists(opt.manifest_file):
|
||||||
|
print('fatal: Specified manifest file %s does not exist.' %
|
||||||
|
opt.manifest_file)
|
||||||
|
sys.exit(1)
|
||||||
|
manifest_file = opt.manifest_file
|
||||||
|
|
||||||
|
manifest = GitcManifest(self.repodir, gitc_client)
|
||||||
|
manifest.Override(manifest_file)
|
||||||
|
gitc_utils.generate_gitc_manifest(None, manifest)
|
||||||
|
print('Please run `cd %s` to view your GITC client.' %
|
||||||
|
os.path.join(wrapper.Wrapper().GITC_FS_ROOT_DIR, gitc_client))
|
@ -19,7 +19,8 @@ import sys
|
|||||||
from formatter import AbstractFormatter, DumbWriter
|
from formatter import AbstractFormatter, DumbWriter
|
||||||
|
|
||||||
from color import Coloring
|
from color import Coloring
|
||||||
from command import PagedCommand, MirrorSafeCommand
|
from command import PagedCommand, MirrorSafeCommand, GitcAvailableCommand, GitcClientCommand
|
||||||
|
import gitc_utils
|
||||||
|
|
||||||
class Help(PagedCommand, MirrorSafeCommand):
|
class Help(PagedCommand, MirrorSafeCommand):
|
||||||
common = False
|
common = False
|
||||||
@ -54,9 +55,21 @@ Displays detailed usage information about a command.
|
|||||||
def _PrintCommonCommands(self):
|
def _PrintCommonCommands(self):
|
||||||
print('usage: repo COMMAND [ARGS]')
|
print('usage: repo COMMAND [ARGS]')
|
||||||
print('The most commonly used repo commands are:')
|
print('The most commonly used repo commands are:')
|
||||||
|
|
||||||
|
def gitc_supported(cmd):
|
||||||
|
if not isinstance(cmd, GitcAvailableCommand) and not isinstance(cmd, GitcClientCommand):
|
||||||
|
return True
|
||||||
|
if self.manifest.isGitcClient:
|
||||||
|
return True
|
||||||
|
if isinstance(cmd, GitcClientCommand):
|
||||||
|
return False
|
||||||
|
if gitc_utils.get_gitc_manifest_dir():
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
commandNames = list(sorted([name
|
commandNames = list(sorted([name
|
||||||
for name, command in self.commands.items()
|
for name, command in self.commands.items()
|
||||||
if command.common]))
|
if command.common and gitc_supported(command)]))
|
||||||
|
|
||||||
maxlen = 0
|
maxlen = 0
|
||||||
for name in commandNames:
|
for name in commandNames:
|
||||||
|
@ -59,7 +59,8 @@ class Info(PagedCommand):
|
|||||||
or 'all,-notdefault')
|
or 'all,-notdefault')
|
||||||
|
|
||||||
self.heading("Manifest branch: ")
|
self.heading("Manifest branch: ")
|
||||||
self.headtext(self.manifest.default.revisionExpr)
|
if self.manifest.default.revisionExpr:
|
||||||
|
self.headtext(self.manifest.default.revisionExpr)
|
||||||
self.out.nl()
|
self.out.nl()
|
||||||
self.heading("Manifest merge branch: ")
|
self.heading("Manifest merge branch: ")
|
||||||
self.headtext(mergeBranch)
|
self.headtext(mergeBranch)
|
||||||
|
@ -27,7 +27,7 @@ else:
|
|||||||
import imp
|
import imp
|
||||||
import urlparse
|
import urlparse
|
||||||
urllib = imp.new_module('urllib')
|
urllib = imp.new_module('urllib')
|
||||||
urllib.parse = urlparse.urlparse
|
urllib.parse = urlparse
|
||||||
|
|
||||||
from color import Coloring
|
from color import Coloring
|
||||||
from command import InteractiveCommand, MirrorSafeCommand
|
from command import InteractiveCommand, MirrorSafeCommand
|
||||||
@ -61,6 +61,11 @@ directory use as much data as possible from the local reference
|
|||||||
directory when fetching from the server. This will make the sync
|
directory when fetching from the server. This will make the sync
|
||||||
go a lot faster by reducing data traffic on the network.
|
go a lot faster by reducing data traffic on the network.
|
||||||
|
|
||||||
|
The --no-clone-bundle option disables any attempt to use
|
||||||
|
$URL/clone.bundle to bootstrap a new Git repository from a
|
||||||
|
resumeable bundle file on a content delivery network. This
|
||||||
|
may be necessary if there are problems with the local Python
|
||||||
|
HTTP client or proxy configuration, but the Git binary works.
|
||||||
|
|
||||||
Switching Manifest Branches
|
Switching Manifest Branches
|
||||||
---------------------------
|
---------------------------
|
||||||
@ -113,6 +118,9 @@ to update the working directory files.
|
|||||||
help='restrict manifest projects to ones with a specified '
|
help='restrict manifest projects to ones with a specified '
|
||||||
'platform group [auto|all|none|linux|darwin|...]',
|
'platform group [auto|all|none|linux|darwin|...]',
|
||||||
metavar='PLATFORM')
|
metavar='PLATFORM')
|
||||||
|
g.add_option('--no-clone-bundle',
|
||||||
|
dest='no_clone_bundle', action='store_true',
|
||||||
|
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||||
|
|
||||||
# Tool
|
# Tool
|
||||||
g = p.add_option_group('repo Version options')
|
g = p.add_option_group('repo Version options')
|
||||||
@ -153,7 +161,7 @@ to update the working directory files.
|
|||||||
# server where this git is located, so let's save that here.
|
# server where this git is located, so let's save that here.
|
||||||
mirrored_manifest_git = None
|
mirrored_manifest_git = None
|
||||||
if opt.reference:
|
if opt.reference:
|
||||||
manifest_git_path = urllib.parse(opt.manifest_url).path[1:]
|
manifest_git_path = urllib.parse.urlparse(opt.manifest_url).path[1:]
|
||||||
mirrored_manifest_git = os.path.join(opt.reference, manifest_git_path)
|
mirrored_manifest_git = os.path.join(opt.reference, manifest_git_path)
|
||||||
if not mirrored_manifest_git.endswith(".git"):
|
if not mirrored_manifest_git.endswith(".git"):
|
||||||
mirrored_manifest_git += ".git"
|
mirrored_manifest_git += ".git"
|
||||||
@ -179,7 +187,7 @@ to update the working directory files.
|
|||||||
r.Save()
|
r.Save()
|
||||||
|
|
||||||
groups = re.split(r'[,\s]+', opt.groups)
|
groups = re.split(r'[,\s]+', opt.groups)
|
||||||
all_platforms = ['linux', 'darwin']
|
all_platforms = ['linux', 'darwin', 'windows']
|
||||||
platformize = lambda x: 'platform-' + x
|
platformize = lambda x: 'platform-' + x
|
||||||
if opt.platform == 'auto':
|
if opt.platform == 'auto':
|
||||||
if (not opt.mirror and
|
if (not opt.mirror and
|
||||||
@ -188,7 +196,7 @@ to update the working directory files.
|
|||||||
elif opt.platform == 'all':
|
elif opt.platform == 'all':
|
||||||
groups.extend(map(platformize, all_platforms))
|
groups.extend(map(platformize, all_platforms))
|
||||||
elif opt.platform in all_platforms:
|
elif opt.platform in all_platforms:
|
||||||
groups.extend(platformize(opt.platform))
|
groups.append(platformize(opt.platform))
|
||||||
elif opt.platform != 'none':
|
elif opt.platform != 'none':
|
||||||
print('fatal: invalid platform flag', file=sys.stderr)
|
print('fatal: invalid platform flag', file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -222,7 +230,8 @@ to update the working directory files.
|
|||||||
'in another location.', file=sys.stderr)
|
'in another location.', file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if not m.Sync_NetworkHalf(is_new=is_new):
|
if not m.Sync_NetworkHalf(is_new=is_new, quiet=opt.quiet,
|
||||||
|
clone_bundle=not opt.no_clone_bundle):
|
||||||
r = m.GetRemote(m.remote.name)
|
r = m.GetRemote(m.remote.name)
|
||||||
print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
|
print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
|
||||||
|
|
||||||
@ -233,7 +242,7 @@ to update the working directory files.
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if opt.manifest_branch:
|
if opt.manifest_branch:
|
||||||
m.MetaBranchSwitch(opt.manifest_branch)
|
m.MetaBranchSwitch()
|
||||||
|
|
||||||
syncbuf = SyncBuffer(m.config)
|
syncbuf = SyncBuffer(m.config)
|
||||||
m.Sync_LocalHalf(syncbuf)
|
m.Sync_LocalHalf(syncbuf)
|
||||||
|
@ -35,6 +35,9 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
|||||||
p.add_option('-r', '--regex',
|
p.add_option('-r', '--regex',
|
||||||
dest='regex', action='store_true',
|
dest='regex', action='store_true',
|
||||||
help="Filter the project list based on regex or wildcard matching of strings")
|
help="Filter the project list based on regex or wildcard matching of strings")
|
||||||
|
p.add_option('-g', '--groups',
|
||||||
|
dest='groups',
|
||||||
|
help="Filter the project list based on the groups the project is in")
|
||||||
p.add_option('-f', '--fullpath',
|
p.add_option('-f', '--fullpath',
|
||||||
dest='fullpath', action='store_true',
|
dest='fullpath', action='store_true',
|
||||||
help="Display the full work tree path instead of the relative path")
|
help="Display the full work tree path instead of the relative path")
|
||||||
@ -62,7 +65,7 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if not opt.regex:
|
if not opt.regex:
|
||||||
projects = self.GetProjects(args)
|
projects = self.GetProjects(args, groups=opt.groups)
|
||||||
else:
|
else:
|
||||||
projects = self.FindProjects(args)
|
projects = self.FindProjects(args)
|
||||||
|
|
||||||
|
@ -54,6 +54,11 @@ branch but need to incorporate new upstream changes "underneath" them.
|
|||||||
p.add_option('--auto-stash',
|
p.add_option('--auto-stash',
|
||||||
dest='auto_stash', action='store_true',
|
dest='auto_stash', action='store_true',
|
||||||
help='Stash local modifications before starting')
|
help='Stash local modifications before starting')
|
||||||
|
p.add_option('-m', '--onto-manifest',
|
||||||
|
dest='onto_manifest', action='store_true',
|
||||||
|
help='Rebase onto the manifest version instead of upstream '
|
||||||
|
'HEAD. This helps to make sure the local tree stays '
|
||||||
|
'consistent if you previously synced to a manifest.')
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
all_projects = self.GetProjects(args)
|
all_projects = self.GetProjects(args)
|
||||||
@ -106,6 +111,10 @@ branch but need to incorporate new upstream changes "underneath" them.
|
|||||||
if opt.interactive:
|
if opt.interactive:
|
||||||
args.append("-i")
|
args.append("-i")
|
||||||
|
|
||||||
|
if opt.onto_manifest:
|
||||||
|
args.append('--onto')
|
||||||
|
args.append(project.revisionExpr)
|
||||||
|
|
||||||
args.append(upbranch.LocalMerge)
|
args.append(upbranch.LocalMerge)
|
||||||
|
|
||||||
print('# %s: rebasing %s -> %s'
|
print('# %s: rebasing %s -> %s'
|
||||||
|
@ -14,11 +14,15 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from command import Command
|
from command import Command
|
||||||
from git_config import IsId
|
from git_config import IsId
|
||||||
from git_command import git
|
from git_command import git
|
||||||
|
import gitc_utils
|
||||||
from progress import Progress
|
from progress import Progress
|
||||||
|
from project import SyncBuffer
|
||||||
|
|
||||||
class Start(Command):
|
class Start(Command):
|
||||||
common = True
|
common = True
|
||||||
@ -50,19 +54,59 @@ revision specified in the manifest.
|
|||||||
if not opt.all:
|
if not opt.all:
|
||||||
projects = args[1:]
|
projects = args[1:]
|
||||||
if len(projects) < 1:
|
if len(projects) < 1:
|
||||||
print("error: at least one project must be specified", file=sys.stderr)
|
projects = ['.',] # start it in the local project by default
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
all_projects = self.GetProjects(projects)
|
all_projects = self.GetProjects(projects,
|
||||||
|
missing_ok=bool(self.gitc_manifest))
|
||||||
|
|
||||||
|
# This must happen after we find all_projects, since GetProjects may need
|
||||||
|
# the local directory, which will disappear once we save the GITC manifest.
|
||||||
|
if self.gitc_manifest:
|
||||||
|
gitc_projects = self.GetProjects(projects, manifest=self.gitc_manifest,
|
||||||
|
missing_ok=True)
|
||||||
|
for project in gitc_projects:
|
||||||
|
if project.old_revision:
|
||||||
|
project.already_synced = True
|
||||||
|
else:
|
||||||
|
project.already_synced = False
|
||||||
|
project.old_revision = project.revisionExpr
|
||||||
|
project.revisionExpr = None
|
||||||
|
# Save the GITC manifest.
|
||||||
|
gitc_utils.save_manifest(self.gitc_manifest)
|
||||||
|
|
||||||
|
# Make sure we have a valid CWD
|
||||||
|
if not os.path.exists(os.getcwd()):
|
||||||
|
os.chdir(self.manifest.topdir)
|
||||||
|
|
||||||
pm = Progress('Starting %s' % nb, len(all_projects))
|
pm = Progress('Starting %s' % nb, len(all_projects))
|
||||||
for project in all_projects:
|
for project in all_projects:
|
||||||
pm.update()
|
pm.update()
|
||||||
|
|
||||||
|
if self.gitc_manifest:
|
||||||
|
gitc_project = self.gitc_manifest.paths[project.relpath]
|
||||||
|
# Sync projects that have not been opened.
|
||||||
|
if not gitc_project.already_synced:
|
||||||
|
proj_localdir = os.path.join(self.gitc_manifest.gitc_client_dir,
|
||||||
|
project.relpath)
|
||||||
|
project.worktree = proj_localdir
|
||||||
|
if not os.path.exists(proj_localdir):
|
||||||
|
os.makedirs(proj_localdir)
|
||||||
|
project.Sync_NetworkHalf()
|
||||||
|
sync_buf = SyncBuffer(self.manifest.manifestProject.config)
|
||||||
|
project.Sync_LocalHalf(sync_buf)
|
||||||
|
project.revisionId = gitc_project.old_revision
|
||||||
|
|
||||||
# If the current revision is a specific SHA1 then we can't push back
|
# If the current revision is a specific SHA1 then we can't push back
|
||||||
# to it so substitute the manifest default revision instead.
|
# to it; so substitute with dest_branch if defined, or with manifest
|
||||||
|
# default revision instead.
|
||||||
|
branch_merge = ''
|
||||||
if IsId(project.revisionExpr):
|
if IsId(project.revisionExpr):
|
||||||
project.revisionExpr = self.manifest.default.revisionExpr
|
if project.dest_branch:
|
||||||
if not project.StartBranch(nb):
|
branch_merge = project.dest_branch
|
||||||
|
else:
|
||||||
|
branch_merge = self.manifest.default.revisionExpr
|
||||||
|
|
||||||
|
if not project.StartBranch(nb, branch_merge=branch_merge):
|
||||||
err.append(project)
|
err.append(project)
|
||||||
pm.end()
|
pm.end()
|
||||||
|
|
||||||
|
@ -22,15 +22,8 @@ except ImportError:
|
|||||||
|
|
||||||
import glob
|
import glob
|
||||||
|
|
||||||
from pyversion import is_python3
|
|
||||||
if is_python3():
|
|
||||||
import io
|
|
||||||
else:
|
|
||||||
import StringIO as io
|
|
||||||
|
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
|
|
||||||
from color import Coloring
|
from color import Coloring
|
||||||
|
|
||||||
@ -97,7 +90,7 @@ the following meanings:
|
|||||||
dest='orphans', action='store_true',
|
dest='orphans', action='store_true',
|
||||||
help="include objects in working directory outside of repo projects")
|
help="include objects in working directory outside of repo projects")
|
||||||
|
|
||||||
def _StatusHelper(self, project, clean_counter, sem, output):
|
def _StatusHelper(self, project, clean_counter, sem):
|
||||||
"""Obtains the status for a specific project.
|
"""Obtains the status for a specific project.
|
||||||
|
|
||||||
Obtains the status for a project, redirecting the output to
|
Obtains the status for a project, redirecting the output to
|
||||||
@ -111,9 +104,9 @@ the following meanings:
|
|||||||
output: Where to output the status.
|
output: Where to output the status.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
state = project.PrintWorkTreeStatus(output)
|
state = project.PrintWorkTreeStatus()
|
||||||
if state == 'CLEAN':
|
if state == 'CLEAN':
|
||||||
clean_counter.next()
|
next(clean_counter)
|
||||||
finally:
|
finally:
|
||||||
sem.release()
|
sem.release()
|
||||||
|
|
||||||
@ -122,16 +115,16 @@ the following meanings:
|
|||||||
status_header = ' --\t'
|
status_header = ' --\t'
|
||||||
for item in dirs:
|
for item in dirs:
|
||||||
if not os.path.isdir(item):
|
if not os.path.isdir(item):
|
||||||
outstring.write(''.join([status_header, item]))
|
outstring.append(''.join([status_header, item]))
|
||||||
continue
|
continue
|
||||||
if item in proj_dirs:
|
if item in proj_dirs:
|
||||||
continue
|
continue
|
||||||
if item in proj_dirs_parents:
|
if item in proj_dirs_parents:
|
||||||
self._FindOrphans(glob.glob('%s/.*' % item) + \
|
self._FindOrphans(glob.glob('%s/.*' % item) +
|
||||||
glob.glob('%s/*' % item), \
|
glob.glob('%s/*' % item),
|
||||||
proj_dirs, proj_dirs_parents, outstring)
|
proj_dirs, proj_dirs_parents, outstring)
|
||||||
continue
|
continue
|
||||||
outstring.write(''.join([status_header, item, '/']))
|
outstring.append(''.join([status_header, item, '/']))
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
all_projects = self.GetProjects(args)
|
all_projects = self.GetProjects(args)
|
||||||
@ -141,30 +134,21 @@ the following meanings:
|
|||||||
for project in all_projects:
|
for project in all_projects:
|
||||||
state = project.PrintWorkTreeStatus()
|
state = project.PrintWorkTreeStatus()
|
||||||
if state == 'CLEAN':
|
if state == 'CLEAN':
|
||||||
counter.next()
|
next(counter)
|
||||||
else:
|
else:
|
||||||
sem = _threading.Semaphore(opt.jobs)
|
sem = _threading.Semaphore(opt.jobs)
|
||||||
threads_and_output = []
|
threads = []
|
||||||
for project in all_projects:
|
for project in all_projects:
|
||||||
sem.acquire()
|
sem.acquire()
|
||||||
|
|
||||||
class BufList(io.StringIO):
|
|
||||||
def dump(self, ostream):
|
|
||||||
for entry in self.buflist:
|
|
||||||
ostream.write(entry)
|
|
||||||
|
|
||||||
output = BufList()
|
|
||||||
|
|
||||||
t = _threading.Thread(target=self._StatusHelper,
|
t = _threading.Thread(target=self._StatusHelper,
|
||||||
args=(project, counter, sem, output))
|
args=(project, counter, sem))
|
||||||
threads_and_output.append((t, output))
|
threads.append(t)
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
for (t, output) in threads_and_output:
|
for t in threads:
|
||||||
t.join()
|
t.join()
|
||||||
output.dump(sys.stdout)
|
if len(all_projects) == next(counter):
|
||||||
output.close()
|
|
||||||
if len(all_projects) == counter.next():
|
|
||||||
print('nothing to commit (working directory clean)')
|
print('nothing to commit (working directory clean)')
|
||||||
|
|
||||||
if opt.orphans:
|
if opt.orphans:
|
||||||
@ -188,23 +172,21 @@ the following meanings:
|
|||||||
try:
|
try:
|
||||||
os.chdir(self.manifest.topdir)
|
os.chdir(self.manifest.topdir)
|
||||||
|
|
||||||
outstring = io.StringIO()
|
outstring = []
|
||||||
self._FindOrphans(glob.glob('.*') + \
|
self._FindOrphans(glob.glob('.*') +
|
||||||
glob.glob('*'), \
|
glob.glob('*'),
|
||||||
proj_dirs, proj_dirs_parents, outstring)
|
proj_dirs, proj_dirs_parents, outstring)
|
||||||
|
|
||||||
if outstring.buflist:
|
if outstring:
|
||||||
output = StatusColoring(self.manifest.globalConfig)
|
output = StatusColoring(self.manifest.globalConfig)
|
||||||
output.project('Objects not within a project (orphans)')
|
output.project('Objects not within a project (orphans)')
|
||||||
output.nl()
|
output.nl()
|
||||||
for entry in outstring.buflist:
|
for entry in outstring:
|
||||||
output.untracked(entry)
|
output.untracked(entry)
|
||||||
output.nl()
|
output.nl()
|
||||||
else:
|
else:
|
||||||
print('No orphan files or directories')
|
print('No orphan files or directories')
|
||||||
|
|
||||||
outstring.close()
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# Restore CWD.
|
# Restore CWD.
|
||||||
os.chdir(orig_path)
|
os.chdir(orig_path)
|
||||||
|
449
subcmds/sync.py
449
subcmds/sync.py
@ -14,27 +14,35 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
import json
|
||||||
import netrc
|
import netrc
|
||||||
from optparse import SUPPRESS_HELP
|
from optparse import SUPPRESS_HELP
|
||||||
import os
|
import os
|
||||||
import pickle
|
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import socket
|
import socket
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from pyversion import is_python3
|
from pyversion import is_python3
|
||||||
if is_python3():
|
if is_python3():
|
||||||
|
import http.cookiejar as cookielib
|
||||||
|
import urllib.error
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
import xmlrpc.client
|
import xmlrpc.client
|
||||||
else:
|
else:
|
||||||
|
import cookielib
|
||||||
import imp
|
import imp
|
||||||
|
import urllib2
|
||||||
import urlparse
|
import urlparse
|
||||||
import xmlrpclib
|
import xmlrpclib
|
||||||
urllib = imp.new_module('urllib')
|
urllib = imp.new_module('urllib')
|
||||||
|
urllib.error = urllib2
|
||||||
urllib.parse = urlparse
|
urllib.parse = urlparse
|
||||||
|
urllib.request = urllib2
|
||||||
xmlrpc = imp.new_module('xmlrpc')
|
xmlrpc = imp.new_module('xmlrpc')
|
||||||
xmlrpc.client = xmlrpclib
|
xmlrpc.client = xmlrpclib
|
||||||
|
|
||||||
@ -57,14 +65,17 @@ except ImportError:
|
|||||||
multiprocessing = None
|
multiprocessing = None
|
||||||
|
|
||||||
from git_command import GIT, git_require
|
from git_command import GIT, git_require
|
||||||
|
from git_config import GetUrlCookieFile
|
||||||
from git_refs import R_HEADS, HEAD
|
from git_refs import R_HEADS, HEAD
|
||||||
from main import WrapperModule
|
import gitc_utils
|
||||||
from project import Project
|
from project import Project
|
||||||
from project import RemoteSpec
|
from project import RemoteSpec
|
||||||
from command import Command, MirrorSafeCommand
|
from command import Command, MirrorSafeCommand
|
||||||
from error import RepoChangedException, GitError, ManifestParseError
|
from error import RepoChangedException, GitError, ManifestParseError
|
||||||
from project import SyncBuffer
|
from project import SyncBuffer
|
||||||
from progress import Progress
|
from progress import Progress
|
||||||
|
from wrapper import Wrapper
|
||||||
|
from manifest_xml import GitcManifest
|
||||||
|
|
||||||
_ONE_DAY_S = 24 * 60 * 60
|
_ONE_DAY_S = 24 * 60 * 60
|
||||||
|
|
||||||
@ -119,6 +130,11 @@ credentials.
|
|||||||
The -f/--force-broken option can be used to proceed with syncing
|
The -f/--force-broken option can be used to proceed with syncing
|
||||||
other projects if a project sync fails.
|
other projects if a project sync fails.
|
||||||
|
|
||||||
|
The --force-sync option can be used to overwrite existing git
|
||||||
|
directories if they have previously been linked to a different
|
||||||
|
object direcotry. WARNING: This may cause data to be lost since
|
||||||
|
refs may be removed when overwriting.
|
||||||
|
|
||||||
The --no-clone-bundle option disables any attempt to use
|
The --no-clone-bundle option disables any attempt to use
|
||||||
$URL/clone.bundle to bootstrap a new Git repository from a
|
$URL/clone.bundle to bootstrap a new Git repository from a
|
||||||
resumeable bundle file on a content delivery network. This
|
resumeable bundle file on a content delivery network. This
|
||||||
@ -128,6 +144,16 @@ HTTP client or proxy configuration, but the Git binary works.
|
|||||||
The --fetch-submodules option enables fetching Git submodules
|
The --fetch-submodules option enables fetching Git submodules
|
||||||
of a project from server.
|
of a project from server.
|
||||||
|
|
||||||
|
The -c/--current-branch option can be used to only fetch objects that
|
||||||
|
are on the branch specified by a project's revision.
|
||||||
|
|
||||||
|
The --optimized-fetch option can be used to only fetch projects that
|
||||||
|
are fixed to a sha1 revision if the sha1 revision does not already
|
||||||
|
exist locally.
|
||||||
|
|
||||||
|
The --prune option can be used to remove any refs that no longer
|
||||||
|
exist on the remote.
|
||||||
|
|
||||||
SSH Connections
|
SSH Connections
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
@ -167,6 +193,11 @@ later is required to fix a server side protocol bug.
|
|||||||
p.add_option('-f', '--force-broken',
|
p.add_option('-f', '--force-broken',
|
||||||
dest='force_broken', action='store_true',
|
dest='force_broken', action='store_true',
|
||||||
help="continue sync even if a project fails to sync")
|
help="continue sync even if a project fails to sync")
|
||||||
|
p.add_option('--force-sync',
|
||||||
|
dest='force_sync', action='store_true',
|
||||||
|
help="overwrite an existing git directory if it needs to "
|
||||||
|
"point to a different object directory. WARNING: this "
|
||||||
|
"may cause loss of data")
|
||||||
p.add_option('-l', '--local-only',
|
p.add_option('-l', '--local-only',
|
||||||
dest='local_only', action='store_true',
|
dest='local_only', action='store_true',
|
||||||
help="only update working tree, don't fetch")
|
help="only update working tree, don't fetch")
|
||||||
@ -203,10 +234,15 @@ later is required to fix a server side protocol bug.
|
|||||||
p.add_option('--no-tags',
|
p.add_option('--no-tags',
|
||||||
dest='no_tags', action='store_true',
|
dest='no_tags', action='store_true',
|
||||||
help="don't fetch tags")
|
help="don't fetch tags")
|
||||||
|
p.add_option('--optimized-fetch',
|
||||||
|
dest='optimized_fetch', action='store_true',
|
||||||
|
help='only fetch projects fixed to sha1 if revision does not exist locally')
|
||||||
|
p.add_option('--prune', dest='prune', action='store_true',
|
||||||
|
help='delete refs that no longer exist on the remote')
|
||||||
if show_smart:
|
if show_smart:
|
||||||
p.add_option('-s', '--smart-sync',
|
p.add_option('-s', '--smart-sync',
|
||||||
dest='smart_sync', action='store_true',
|
dest='smart_sync', action='store_true',
|
||||||
help='smart sync using manifest from a known good build')
|
help='smart sync using manifest from the latest known good build')
|
||||||
p.add_option('-t', '--smart-tag',
|
p.add_option('-t', '--smart-tag',
|
||||||
dest='smart_tag', action='store',
|
dest='smart_tag', action='store',
|
||||||
help='smart sync using manifest from a known tag')
|
help='smart sync using manifest from a known tag')
|
||||||
@ -219,7 +255,7 @@ later is required to fix a server side protocol bug.
|
|||||||
dest='repo_upgraded', action='store_true',
|
dest='repo_upgraded', action='store_true',
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
def _FetchProjectList(self, opt, projects, *args):
|
def _FetchProjectList(self, opt, projects, *args, **kwargs):
|
||||||
"""Main function of the fetch threads when jobs are > 1.
|
"""Main function of the fetch threads when jobs are > 1.
|
||||||
|
|
||||||
Delegates most of the work to _FetchHelper.
|
Delegates most of the work to _FetchHelper.
|
||||||
@ -227,11 +263,11 @@ later is required to fix a server side protocol bug.
|
|||||||
Args:
|
Args:
|
||||||
opt: Program options returned from optparse. See _Options().
|
opt: Program options returned from optparse. See _Options().
|
||||||
projects: Projects to fetch.
|
projects: Projects to fetch.
|
||||||
*args: Remaining arguments to pass to _FetchHelper. See the
|
*args, **kwargs: Remaining arguments to pass to _FetchHelper. See the
|
||||||
_FetchHelper docstring for details.
|
_FetchHelper docstring for details.
|
||||||
"""
|
"""
|
||||||
for project in projects:
|
for project in projects:
|
||||||
success = self._FetchHelper(opt, project, *args)
|
success = self._FetchHelper(opt, project, *args, **kwargs)
|
||||||
if not success and not opt.force_broken:
|
if not success and not opt.force_broken:
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -271,8 +307,11 @@ later is required to fix a server side protocol bug.
|
|||||||
success = project.Sync_NetworkHalf(
|
success = project.Sync_NetworkHalf(
|
||||||
quiet=opt.quiet,
|
quiet=opt.quiet,
|
||||||
current_branch_only=opt.current_branch_only,
|
current_branch_only=opt.current_branch_only,
|
||||||
|
force_sync=opt.force_sync,
|
||||||
clone_bundle=not opt.no_clone_bundle,
|
clone_bundle=not opt.no_clone_bundle,
|
||||||
no_tags=opt.no_tags, archive=self.manifest.IsArchive)
|
no_tags=opt.no_tags, archive=self.manifest.IsArchive,
|
||||||
|
optimized_fetch=opt.optimized_fetch,
|
||||||
|
prune=opt.prune)
|
||||||
self._fetch_times.Set(project, time.time() - start)
|
self._fetch_times.Set(project, time.time() - start)
|
||||||
|
|
||||||
# Lock around all the rest of the code, since printing, updating a set
|
# Lock around all the rest of the code, since printing, updating a set
|
||||||
@ -281,6 +320,7 @@ later is required to fix a server side protocol bug.
|
|||||||
did_lock = True
|
did_lock = True
|
||||||
|
|
||||||
if not success:
|
if not success:
|
||||||
|
err_event.set()
|
||||||
print('error: Cannot fetch %s' % project.name, file=sys.stderr)
|
print('error: Cannot fetch %s' % project.name, file=sys.stderr)
|
||||||
if opt.force_broken:
|
if opt.force_broken:
|
||||||
print('warn: --force-broken, continuing to sync',
|
print('warn: --force-broken, continuing to sync',
|
||||||
@ -291,8 +331,10 @@ later is required to fix a server side protocol bug.
|
|||||||
fetched.add(project.gitdir)
|
fetched.add(project.gitdir)
|
||||||
pm.update()
|
pm.update()
|
||||||
except _FetchError:
|
except _FetchError:
|
||||||
err_event.set()
|
pass
|
||||||
except:
|
except Exception as e:
|
||||||
|
print('error: Cannot fetch %s (%s: %s)' \
|
||||||
|
% (project.name, type(e).__name__, str(e)), file=sys.stderr)
|
||||||
err_event.set()
|
err_event.set()
|
||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
@ -304,62 +346,47 @@ later is required to fix a server side protocol bug.
|
|||||||
|
|
||||||
def _Fetch(self, projects, opt):
|
def _Fetch(self, projects, opt):
|
||||||
fetched = set()
|
fetched = set()
|
||||||
|
lock = _threading.Lock()
|
||||||
pm = Progress('Fetching projects', len(projects))
|
pm = Progress('Fetching projects', len(projects))
|
||||||
|
|
||||||
if self.jobs == 1:
|
objdir_project_map = dict()
|
||||||
for project in projects:
|
for project in projects:
|
||||||
pm.update()
|
objdir_project_map.setdefault(project.objdir, []).append(project)
|
||||||
if not opt.quiet:
|
|
||||||
print('Fetching project %s' % project.name)
|
|
||||||
if project.Sync_NetworkHalf(
|
|
||||||
quiet=opt.quiet,
|
|
||||||
current_branch_only=opt.current_branch_only,
|
|
||||||
clone_bundle=not opt.no_clone_bundle,
|
|
||||||
no_tags=opt.no_tags,
|
|
||||||
archive=self.manifest.IsArchive):
|
|
||||||
fetched.add(project.gitdir)
|
|
||||||
else:
|
|
||||||
print('error: Cannot fetch %s' % project.name, file=sys.stderr)
|
|
||||||
if opt.force_broken:
|
|
||||||
print('warn: --force-broken, continuing to sync', file=sys.stderr)
|
|
||||||
else:
|
|
||||||
sys.exit(1)
|
|
||||||
else:
|
|
||||||
objdir_project_map = dict()
|
|
||||||
for project in projects:
|
|
||||||
objdir_project_map.setdefault(project.objdir, []).append(project)
|
|
||||||
|
|
||||||
threads = set()
|
threads = set()
|
||||||
lock = _threading.Lock()
|
sem = _threading.Semaphore(self.jobs)
|
||||||
sem = _threading.Semaphore(self.jobs)
|
err_event = _threading.Event()
|
||||||
err_event = _threading.Event()
|
for project_list in objdir_project_map.values():
|
||||||
for project_list in objdir_project_map.values():
|
# Check for any errors before running any more tasks.
|
||||||
# Check for any errors before starting any new threads.
|
# ...we'll let existing threads finish, though.
|
||||||
# ...we'll let existing threads finish, though.
|
if err_event.isSet() and not opt.force_broken:
|
||||||
if err_event.isSet():
|
break
|
||||||
break
|
|
||||||
|
|
||||||
sem.acquire()
|
sem.acquire()
|
||||||
|
kwargs = dict(opt=opt,
|
||||||
|
projects=project_list,
|
||||||
|
lock=lock,
|
||||||
|
fetched=fetched,
|
||||||
|
pm=pm,
|
||||||
|
sem=sem,
|
||||||
|
err_event=err_event)
|
||||||
|
if self.jobs > 1:
|
||||||
t = _threading.Thread(target = self._FetchProjectList,
|
t = _threading.Thread(target = self._FetchProjectList,
|
||||||
args = (opt,
|
kwargs = kwargs)
|
||||||
project_list,
|
|
||||||
lock,
|
|
||||||
fetched,
|
|
||||||
pm,
|
|
||||||
sem,
|
|
||||||
err_event))
|
|
||||||
# Ensure that Ctrl-C will not freeze the repo process.
|
# Ensure that Ctrl-C will not freeze the repo process.
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
threads.add(t)
|
threads.add(t)
|
||||||
t.start()
|
t.start()
|
||||||
|
else:
|
||||||
|
self._FetchProjectList(**kwargs)
|
||||||
|
|
||||||
for t in threads:
|
for t in threads:
|
||||||
t.join()
|
t.join()
|
||||||
|
|
||||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||||
if err_event.isSet():
|
if err_event.isSet():
|
||||||
print('\nerror: Exited sync due to fetch errors', file=sys.stderr)
|
print('\nerror: Exited sync due to fetch errors', file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
pm.end()
|
pm.end()
|
||||||
self._fetch_times.Save()
|
self._fetch_times.Save()
|
||||||
@ -370,9 +397,12 @@ later is required to fix a server side protocol bug.
|
|||||||
return fetched
|
return fetched
|
||||||
|
|
||||||
def _GCProjects(self, projects):
|
def _GCProjects(self, projects):
|
||||||
gitdirs = {}
|
gc_gitdirs = {}
|
||||||
for project in projects:
|
for project in projects:
|
||||||
gitdirs[project.gitdir] = project.bare_git
|
if len(project.manifest.GetProjectsWithName(project.name)) > 1:
|
||||||
|
print('Shared project %s found, disabling pruning.' % project.name)
|
||||||
|
project.bare_git.config('--replace-all', 'gc.pruneExpire', 'never')
|
||||||
|
gc_gitdirs[project.gitdir] = project.bare_git
|
||||||
|
|
||||||
has_dash_c = git_require((1, 7, 2))
|
has_dash_c = git_require((1, 7, 2))
|
||||||
if multiprocessing and has_dash_c:
|
if multiprocessing and has_dash_c:
|
||||||
@ -382,7 +412,7 @@ later is required to fix a server side protocol bug.
|
|||||||
jobs = min(self.jobs, cpu_count)
|
jobs = min(self.jobs, cpu_count)
|
||||||
|
|
||||||
if jobs < 2:
|
if jobs < 2:
|
||||||
for bare_git in gitdirs.values():
|
for bare_git in gc_gitdirs.values():
|
||||||
bare_git.gc('--auto')
|
bare_git.gc('--auto')
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -404,7 +434,7 @@ later is required to fix a server side protocol bug.
|
|||||||
finally:
|
finally:
|
||||||
sem.release()
|
sem.release()
|
||||||
|
|
||||||
for bare_git in gitdirs.values():
|
for bare_git in gc_gitdirs.values():
|
||||||
if err_event.isSet():
|
if err_event.isSet():
|
||||||
break
|
break
|
||||||
sem.acquire()
|
sem.acquire()
|
||||||
@ -427,6 +457,59 @@ later is required to fix a server side protocol bug.
|
|||||||
else:
|
else:
|
||||||
self.manifest._Unload()
|
self.manifest._Unload()
|
||||||
|
|
||||||
|
def _DeleteProject(self, path):
|
||||||
|
print('Deleting obsolete path %s' % path, file=sys.stderr)
|
||||||
|
|
||||||
|
# Delete the .git directory first, so we're less likely to have a partially
|
||||||
|
# working git repository around. There shouldn't be any git projects here,
|
||||||
|
# so rmtree works.
|
||||||
|
try:
|
||||||
|
shutil.rmtree(os.path.join(path, '.git'))
|
||||||
|
except OSError:
|
||||||
|
print('Failed to remove %s' % os.path.join(path, '.git'), file=sys.stderr)
|
||||||
|
print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
|
||||||
|
print(' remove manually, then run sync again', file=sys.stderr)
|
||||||
|
return -1
|
||||||
|
|
||||||
|
# Delete everything under the worktree, except for directories that contain
|
||||||
|
# another git project
|
||||||
|
dirs_to_remove = []
|
||||||
|
failed = False
|
||||||
|
for root, dirs, files in os.walk(path):
|
||||||
|
for f in files:
|
||||||
|
try:
|
||||||
|
os.remove(os.path.join(root, f))
|
||||||
|
except OSError:
|
||||||
|
print('Failed to remove %s' % os.path.join(root, f), file=sys.stderr)
|
||||||
|
failed = True
|
||||||
|
dirs[:] = [d for d in dirs
|
||||||
|
if not os.path.lexists(os.path.join(root, d, '.git'))]
|
||||||
|
dirs_to_remove += [os.path.join(root, d) for d in dirs
|
||||||
|
if os.path.join(root, d) not in dirs_to_remove]
|
||||||
|
for d in reversed(dirs_to_remove):
|
||||||
|
if len(os.listdir(d)) == 0:
|
||||||
|
try:
|
||||||
|
os.rmdir(d)
|
||||||
|
except OSError:
|
||||||
|
print('Failed to remove %s' % os.path.join(root, d), file=sys.stderr)
|
||||||
|
failed = True
|
||||||
|
continue
|
||||||
|
if failed:
|
||||||
|
print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
|
||||||
|
print(' remove manually, then run sync again', file=sys.stderr)
|
||||||
|
return -1
|
||||||
|
|
||||||
|
# Try deleting parent dirs if they are empty
|
||||||
|
project_dir = path
|
||||||
|
while project_dir != self.manifest.topdir:
|
||||||
|
if len(os.listdir(project_dir)) == 0:
|
||||||
|
os.rmdir(project_dir)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
project_dir = os.path.dirname(project_dir)
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
def UpdateProjectList(self):
|
def UpdateProjectList(self):
|
||||||
new_project_paths = []
|
new_project_paths = []
|
||||||
for project in self.GetProjects(None, missing_ok=True):
|
for project in self.GetProjects(None, missing_ok=True):
|
||||||
@ -447,8 +530,8 @@ later is required to fix a server side protocol bug.
|
|||||||
continue
|
continue
|
||||||
if path not in new_project_paths:
|
if path not in new_project_paths:
|
||||||
# If the path has already been deleted, we don't need to do it
|
# If the path has already been deleted, we don't need to do it
|
||||||
if os.path.exists(self.manifest.topdir + '/' + path):
|
gitdir = os.path.join(self.manifest.topdir, path, '.git')
|
||||||
gitdir = os.path.join(self.manifest.topdir, path, '.git')
|
if os.path.exists(gitdir):
|
||||||
project = Project(
|
project = Project(
|
||||||
manifest = self.manifest,
|
manifest = self.manifest,
|
||||||
name = path,
|
name = path,
|
||||||
@ -467,18 +550,8 @@ later is required to fix a server side protocol bug.
|
|||||||
print(' commit changes, then run sync again',
|
print(' commit changes, then run sync again',
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
return -1
|
return -1
|
||||||
else:
|
elif self._DeleteProject(project.worktree):
|
||||||
print('Deleting obsolete path %s' % project.worktree,
|
return -1
|
||||||
file=sys.stderr)
|
|
||||||
shutil.rmtree(project.worktree)
|
|
||||||
# Try deleting parent subdirs if they are empty
|
|
||||||
project_dir = os.path.dirname(project.worktree)
|
|
||||||
while project_dir != self.manifest.topdir:
|
|
||||||
try:
|
|
||||||
os.rmdir(project_dir)
|
|
||||||
except OSError:
|
|
||||||
break
|
|
||||||
project_dir = os.path.dirname(project_dir)
|
|
||||||
|
|
||||||
new_project_paths.sort()
|
new_project_paths.sort()
|
||||||
fd = open(file_path, 'w')
|
fd = open(file_path, 'w')
|
||||||
@ -521,6 +594,9 @@ later is required to fix a server side protocol bug.
|
|||||||
self.manifest.Override(opt.manifest_name)
|
self.manifest.Override(opt.manifest_name)
|
||||||
|
|
||||||
manifest_name = opt.manifest_name
|
manifest_name = opt.manifest_name
|
||||||
|
smart_sync_manifest_name = "smart_sync_override.xml"
|
||||||
|
smart_sync_manifest_path = os.path.join(
|
||||||
|
self.manifest.manifestProject.worktree, smart_sync_manifest_name)
|
||||||
|
|
||||||
if opt.smart_sync or opt.smart_tag:
|
if opt.smart_sync or opt.smart_tag:
|
||||||
if not self.manifest.manifest_server:
|
if not self.manifest.manifest_server:
|
||||||
@ -542,19 +618,18 @@ later is required to fix a server side protocol bug.
|
|||||||
try:
|
try:
|
||||||
info = netrc.netrc()
|
info = netrc.netrc()
|
||||||
except IOError:
|
except IOError:
|
||||||
print('.netrc file does not exist or could not be opened',
|
# .netrc file does not exist or could not be opened
|
||||||
file=sys.stderr)
|
pass
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
parse_result = urllib.parse.urlparse(manifest_server)
|
parse_result = urllib.parse.urlparse(manifest_server)
|
||||||
if parse_result.hostname:
|
if parse_result.hostname:
|
||||||
username, _account, password = \
|
auth = info.authenticators(parse_result.hostname)
|
||||||
info.authenticators(parse_result.hostname)
|
if auth:
|
||||||
except TypeError:
|
username, _account, password = auth
|
||||||
# TypeError is raised when the given hostname is not present
|
else:
|
||||||
# in the .netrc file.
|
print('No credentials found for %s in .netrc'
|
||||||
print('No credentials found for %s in .netrc'
|
% parse_result.hostname, file=sys.stderr)
|
||||||
% parse_result.hostname, file=sys.stderr)
|
|
||||||
except netrc.NetrcParseError as e:
|
except netrc.NetrcParseError as e:
|
||||||
print('Error parsing .netrc file: %s' % e, file=sys.stderr)
|
print('Error parsing .netrc file: %s' % e, file=sys.stderr)
|
||||||
|
|
||||||
@ -563,8 +638,12 @@ later is required to fix a server side protocol bug.
|
|||||||
(username, password),
|
(username, password),
|
||||||
1)
|
1)
|
||||||
|
|
||||||
|
transport = PersistentTransport(manifest_server)
|
||||||
|
if manifest_server.startswith('persistent-'):
|
||||||
|
manifest_server = manifest_server[len('persistent-'):]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
server = xmlrpc.client.Server(manifest_server)
|
server = xmlrpc.client.Server(manifest_server, transport=transport)
|
||||||
if opt.smart_sync:
|
if opt.smart_sync:
|
||||||
p = self.manifest.manifestProject
|
p = self.manifest.manifestProject
|
||||||
b = p.GetBranch(p.CurrentBranch)
|
b = p.GetBranch(p.CurrentBranch)
|
||||||
@ -573,7 +652,10 @@ later is required to fix a server side protocol bug.
|
|||||||
branch = branch[len(R_HEADS):]
|
branch = branch[len(R_HEADS):]
|
||||||
|
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
if 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env:
|
if 'SYNC_TARGET' in env:
|
||||||
|
target = env['SYNC_TARGET']
|
||||||
|
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||||
|
elif 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env:
|
||||||
target = '%s-%s' % (env['TARGET_PRODUCT'],
|
target = '%s-%s' % (env['TARGET_PRODUCT'],
|
||||||
env['TARGET_BUILD_VARIANT'])
|
env['TARGET_BUILD_VARIANT'])
|
||||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||||
@ -584,17 +666,16 @@ later is required to fix a server side protocol bug.
|
|||||||
[success, manifest_str] = server.GetManifest(opt.smart_tag)
|
[success, manifest_str] = server.GetManifest(opt.smart_tag)
|
||||||
|
|
||||||
if success:
|
if success:
|
||||||
manifest_name = "smart_sync_override.xml"
|
manifest_name = smart_sync_manifest_name
|
||||||
manifest_path = os.path.join(self.manifest.manifestProject.worktree,
|
|
||||||
manifest_name)
|
|
||||||
try:
|
try:
|
||||||
f = open(manifest_path, 'w')
|
f = open(smart_sync_manifest_path, 'w')
|
||||||
try:
|
try:
|
||||||
f.write(manifest_str)
|
f.write(manifest_str)
|
||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
except IOError:
|
except IOError as e:
|
||||||
print('error: cannot write manifest to %s' % manifest_path,
|
print('error: cannot write manifest to %s:\n%s'
|
||||||
|
% (smart_sync_manifest_path, e),
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
self._ReloadManifest(manifest_name)
|
self._ReloadManifest(manifest_name)
|
||||||
@ -611,6 +692,13 @@ later is required to fix a server side protocol bug.
|
|||||||
% (self.manifest.manifest_server, e.errcode, e.errmsg),
|
% (self.manifest.manifest_server, e.errcode, e.errmsg),
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
else: # Not smart sync or smart tag mode
|
||||||
|
if os.path.isfile(smart_sync_manifest_path):
|
||||||
|
try:
|
||||||
|
os.remove(smart_sync_manifest_path)
|
||||||
|
except OSError as e:
|
||||||
|
print('error: failed to remove existing smart sync override manifest: %s' %
|
||||||
|
e, file=sys.stderr)
|
||||||
|
|
||||||
rp = self.manifest.repoProject
|
rp = self.manifest.repoProject
|
||||||
rp.PreSync()
|
rp.PreSync()
|
||||||
@ -624,7 +712,8 @@ later is required to fix a server side protocol bug.
|
|||||||
if not opt.local_only:
|
if not opt.local_only:
|
||||||
mp.Sync_NetworkHalf(quiet=opt.quiet,
|
mp.Sync_NetworkHalf(quiet=opt.quiet,
|
||||||
current_branch_only=opt.current_branch_only,
|
current_branch_only=opt.current_branch_only,
|
||||||
no_tags=opt.no_tags)
|
no_tags=opt.no_tags,
|
||||||
|
optimized_fetch=opt.optimized_fetch)
|
||||||
|
|
||||||
if mp.HasChanges:
|
if mp.HasChanges:
|
||||||
syncbuf = SyncBuffer(mp.config)
|
syncbuf = SyncBuffer(mp.config)
|
||||||
@ -634,6 +723,42 @@ later is required to fix a server side protocol bug.
|
|||||||
self._ReloadManifest(manifest_name)
|
self._ReloadManifest(manifest_name)
|
||||||
if opt.jobs is None:
|
if opt.jobs is None:
|
||||||
self.jobs = self.manifest.default.sync_j
|
self.jobs = self.manifest.default.sync_j
|
||||||
|
|
||||||
|
if self.gitc_manifest:
|
||||||
|
gitc_manifest_projects = self.GetProjects(args,
|
||||||
|
missing_ok=True)
|
||||||
|
gitc_projects = []
|
||||||
|
opened_projects = []
|
||||||
|
for project in gitc_manifest_projects:
|
||||||
|
if project.relpath in self.gitc_manifest.paths and \
|
||||||
|
self.gitc_manifest.paths[project.relpath].old_revision:
|
||||||
|
opened_projects.append(project.relpath)
|
||||||
|
else:
|
||||||
|
gitc_projects.append(project.relpath)
|
||||||
|
|
||||||
|
if not args:
|
||||||
|
gitc_projects = None
|
||||||
|
|
||||||
|
if gitc_projects != [] and not opt.local_only:
|
||||||
|
print('Updating GITC client: %s' % self.gitc_manifest.gitc_client_name)
|
||||||
|
manifest = GitcManifest(self.repodir, self.gitc_manifest.gitc_client_name)
|
||||||
|
if manifest_name:
|
||||||
|
manifest.Override(manifest_name)
|
||||||
|
else:
|
||||||
|
manifest.Override(self.manifest.manifestFile)
|
||||||
|
gitc_utils.generate_gitc_manifest(self.gitc_manifest,
|
||||||
|
manifest,
|
||||||
|
gitc_projects)
|
||||||
|
print('GITC client successfully synced.')
|
||||||
|
|
||||||
|
# The opened projects need to be synced as normal, therefore we
|
||||||
|
# generate a new args list to represent the opened projects.
|
||||||
|
# TODO: make this more reliable -- if there's a project name/path overlap,
|
||||||
|
# this may choose the wrong project.
|
||||||
|
args = [os.path.relpath(self.manifest.paths[p].worktree, os.getcwd())
|
||||||
|
for p in opened_projects]
|
||||||
|
if not args:
|
||||||
|
return
|
||||||
all_projects = self.GetProjects(args,
|
all_projects = self.GetProjects(args,
|
||||||
missing_ok=True,
|
missing_ok=True,
|
||||||
submodules_ok=opt.fetch_submodules)
|
submodules_ok=opt.fetch_submodules)
|
||||||
@ -687,7 +812,7 @@ later is required to fix a server side protocol bug.
|
|||||||
for project in all_projects:
|
for project in all_projects:
|
||||||
pm.update()
|
pm.update()
|
||||||
if project.worktree:
|
if project.worktree:
|
||||||
project.Sync_LocalHalf(syncbuf)
|
project.Sync_LocalHalf(syncbuf, force_sync=opt.force_sync)
|
||||||
pm.end()
|
pm.end()
|
||||||
print(file=sys.stderr)
|
print(file=sys.stderr)
|
||||||
if not syncbuf.Finish():
|
if not syncbuf.Finish():
|
||||||
@ -699,7 +824,7 @@ later is required to fix a server side protocol bug.
|
|||||||
print(self.manifest.notice)
|
print(self.manifest.notice)
|
||||||
|
|
||||||
def _PostRepoUpgrade(manifest, quiet=False):
|
def _PostRepoUpgrade(manifest, quiet=False):
|
||||||
wrapper = WrapperModule()
|
wrapper = Wrapper()
|
||||||
if wrapper.NeedSetupGnuPG():
|
if wrapper.NeedSetupGnuPG():
|
||||||
wrapper.SetupGnuPG(quiet)
|
wrapper.SetupGnuPG(quiet)
|
||||||
for project in manifest.projects:
|
for project in manifest.projects:
|
||||||
@ -775,7 +900,7 @@ class _FetchTimes(object):
|
|||||||
_ALPHA = 0.5
|
_ALPHA = 0.5
|
||||||
|
|
||||||
def __init__(self, manifest):
|
def __init__(self, manifest):
|
||||||
self._path = os.path.join(manifest.repodir, '.repopickle_fetchtimes')
|
self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json')
|
||||||
self._times = None
|
self._times = None
|
||||||
self._seen = set()
|
self._seen = set()
|
||||||
|
|
||||||
@ -794,22 +919,17 @@ class _FetchTimes(object):
|
|||||||
def _Load(self):
|
def _Load(self):
|
||||||
if self._times is None:
|
if self._times is None:
|
||||||
try:
|
try:
|
||||||
f = open(self._path, 'rb')
|
f = open(self._path)
|
||||||
except IOError:
|
|
||||||
self._times = {}
|
|
||||||
return self._times
|
|
||||||
try:
|
|
||||||
try:
|
try:
|
||||||
self._times = pickle.load(f)
|
self._times = json.load(f)
|
||||||
except IOError:
|
finally:
|
||||||
try:
|
f.close()
|
||||||
os.remove(self._path)
|
except (IOError, ValueError):
|
||||||
except OSError:
|
try:
|
||||||
pass
|
os.remove(self._path)
|
||||||
self._times = {}
|
except OSError:
|
||||||
finally:
|
pass
|
||||||
f.close()
|
self._times = {}
|
||||||
return self._times
|
|
||||||
|
|
||||||
def Save(self):
|
def Save(self):
|
||||||
if self._times is None:
|
if self._times is None:
|
||||||
@ -823,13 +943,110 @@ class _FetchTimes(object):
|
|||||||
del self._times[name]
|
del self._times[name]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
f = open(self._path, 'wb')
|
f = open(self._path, 'w')
|
||||||
try:
|
try:
|
||||||
pickle.dump(self._times, f)
|
json.dump(self._times, f, indent=2)
|
||||||
except (IOError, OSError, pickle.PickleError):
|
finally:
|
||||||
|
f.close()
|
||||||
|
except (IOError, TypeError):
|
||||||
|
try:
|
||||||
|
os.remove(self._path)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# This is a replacement for xmlrpc.client.Transport using urllib2
|
||||||
|
# and supporting persistent-http[s]. It cannot change hosts from
|
||||||
|
# request to request like the normal transport, the real url
|
||||||
|
# is passed during initialization.
|
||||||
|
class PersistentTransport(xmlrpc.client.Transport):
|
||||||
|
def __init__(self, orig_host):
|
||||||
|
self.orig_host = orig_host
|
||||||
|
|
||||||
|
def request(self, host, handler, request_body, verbose=False):
|
||||||
|
with GetUrlCookieFile(self.orig_host, not verbose) as (cookiefile, proxy):
|
||||||
|
# Python doesn't understand cookies with the #HttpOnly_ prefix
|
||||||
|
# Since we're only using them for HTTP, copy the file temporarily,
|
||||||
|
# stripping those prefixes away.
|
||||||
|
if cookiefile:
|
||||||
|
tmpcookiefile = tempfile.NamedTemporaryFile()
|
||||||
|
tmpcookiefile.write("# HTTP Cookie File")
|
||||||
try:
|
try:
|
||||||
os.remove(self._path)
|
with open(cookiefile) as f:
|
||||||
except OSError:
|
for line in f:
|
||||||
pass
|
if line.startswith("#HttpOnly_"):
|
||||||
finally:
|
line = line[len("#HttpOnly_"):]
|
||||||
f.close()
|
tmpcookiefile.write(line)
|
||||||
|
tmpcookiefile.flush()
|
||||||
|
|
||||||
|
cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name)
|
||||||
|
try:
|
||||||
|
cookiejar.load()
|
||||||
|
except cookielib.LoadError:
|
||||||
|
cookiejar = cookielib.CookieJar()
|
||||||
|
finally:
|
||||||
|
tmpcookiefile.close()
|
||||||
|
else:
|
||||||
|
cookiejar = cookielib.CookieJar()
|
||||||
|
|
||||||
|
proxyhandler = urllib.request.ProxyHandler
|
||||||
|
if proxy:
|
||||||
|
proxyhandler = urllib.request.ProxyHandler({
|
||||||
|
"http": proxy,
|
||||||
|
"https": proxy })
|
||||||
|
|
||||||
|
opener = urllib.request.build_opener(
|
||||||
|
urllib.request.HTTPCookieProcessor(cookiejar),
|
||||||
|
proxyhandler)
|
||||||
|
|
||||||
|
url = urllib.parse.urljoin(self.orig_host, handler)
|
||||||
|
parse_results = urllib.parse.urlparse(url)
|
||||||
|
|
||||||
|
scheme = parse_results.scheme
|
||||||
|
if scheme == 'persistent-http':
|
||||||
|
scheme = 'http'
|
||||||
|
if scheme == 'persistent-https':
|
||||||
|
# If we're proxying through persistent-https, use http. The
|
||||||
|
# proxy itself will do the https.
|
||||||
|
if proxy:
|
||||||
|
scheme = 'http'
|
||||||
|
else:
|
||||||
|
scheme = 'https'
|
||||||
|
|
||||||
|
# Parse out any authentication information using the base class
|
||||||
|
host, extra_headers, _ = self.get_host_info(parse_results.netloc)
|
||||||
|
|
||||||
|
url = urllib.parse.urlunparse((
|
||||||
|
scheme,
|
||||||
|
host,
|
||||||
|
parse_results.path,
|
||||||
|
parse_results.params,
|
||||||
|
parse_results.query,
|
||||||
|
parse_results.fragment))
|
||||||
|
|
||||||
|
request = urllib.request.Request(url, request_body)
|
||||||
|
if extra_headers is not None:
|
||||||
|
for (name, header) in extra_headers:
|
||||||
|
request.add_header(name, header)
|
||||||
|
request.add_header('Content-Type', 'text/xml')
|
||||||
|
try:
|
||||||
|
response = opener.open(request)
|
||||||
|
except urllib.error.HTTPError as e:
|
||||||
|
if e.code == 501:
|
||||||
|
# We may have been redirected through a login process
|
||||||
|
# but our POST turned into a GET. Retry.
|
||||||
|
response = opener.open(request)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
p, u = xmlrpc.client.getparser()
|
||||||
|
while 1:
|
||||||
|
data = response.read(1024)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
p.feed(data)
|
||||||
|
p.close()
|
||||||
|
return u.close()
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@ -25,10 +25,12 @@ from git_command import GitCommand
|
|||||||
from project import RepoHook
|
from project import RepoHook
|
||||||
|
|
||||||
from pyversion import is_python3
|
from pyversion import is_python3
|
||||||
|
# pylint:disable=W0622
|
||||||
if not is_python3():
|
if not is_python3():
|
||||||
# pylint:disable=W0622
|
|
||||||
input = raw_input
|
input = raw_input
|
||||||
# pylint:enable=W0622
|
else:
|
||||||
|
unicode = str
|
||||||
|
# pylint:enable=W0622
|
||||||
|
|
||||||
UNUSUAL_COMMIT_THRESHOLD = 5
|
UNUSUAL_COMMIT_THRESHOLD = 5
|
||||||
|
|
||||||
@ -89,6 +91,11 @@ to "true" then repo will assume you always answer "y" at the prompt,
|
|||||||
and will not prompt you further. If it is set to "false" then repo
|
and will not prompt you further. If it is set to "false" then repo
|
||||||
will assume you always answer "n", and will abort.
|
will assume you always answer "n", and will abort.
|
||||||
|
|
||||||
|
review.URL.autoreviewer:
|
||||||
|
|
||||||
|
To automatically append a user or mailing list to reviews, you can set
|
||||||
|
a per-project or global Git option to do so.
|
||||||
|
|
||||||
review.URL.autocopy:
|
review.URL.autocopy:
|
||||||
|
|
||||||
To automatically copy a user or mailing list to all uploaded reviews,
|
To automatically copy a user or mailing list to all uploaded reviews,
|
||||||
@ -293,14 +300,20 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
|||||||
|
|
||||||
self._UploadAndReport(opt, todo, people)
|
self._UploadAndReport(opt, todo, people)
|
||||||
|
|
||||||
def _AppendAutoCcList(self, branch, people):
|
def _AppendAutoList(self, branch, people):
|
||||||
"""
|
"""
|
||||||
|
Appends the list of reviewers in the git project's config.
|
||||||
Appends the list of users in the CC list in the git project's config if a
|
Appends the list of users in the CC list in the git project's config if a
|
||||||
non-empty reviewer list was found.
|
non-empty reviewer list was found.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
name = branch.name
|
name = branch.name
|
||||||
project = branch.project
|
project = branch.project
|
||||||
|
|
||||||
|
key = 'review.%s.autoreviewer' % project.GetBranch(name).remote.review
|
||||||
|
raw_list = project.config.GetString(key)
|
||||||
|
if not raw_list is None:
|
||||||
|
people[0].extend([entry.strip() for entry in raw_list.split(',')])
|
||||||
|
|
||||||
key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
|
key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
|
||||||
raw_list = project.config.GetString(key)
|
raw_list = project.config.GetString(key)
|
||||||
if not raw_list is None and len(people[0]) > 0:
|
if not raw_list is None and len(people[0]) > 0:
|
||||||
@ -323,16 +336,20 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
|||||||
for branch in todo:
|
for branch in todo:
|
||||||
try:
|
try:
|
||||||
people = copy.deepcopy(original_people)
|
people = copy.deepcopy(original_people)
|
||||||
self._AppendAutoCcList(branch, people)
|
self._AppendAutoList(branch, people)
|
||||||
|
|
||||||
# Check if there are local changes that may have been forgotten
|
# Check if there are local changes that may have been forgotten
|
||||||
if branch.project.HasChanges():
|
changes = branch.project.UncommitedFiles()
|
||||||
|
if changes:
|
||||||
key = 'review.%s.autoupload' % branch.project.remote.review
|
key = 'review.%s.autoupload' % branch.project.remote.review
|
||||||
answer = branch.project.config.GetBoolean(key)
|
answer = branch.project.config.GetBoolean(key)
|
||||||
|
|
||||||
# if they want to auto upload, let's not ask because it could be automated
|
# if they want to auto upload, let's not ask because it could be automated
|
||||||
if answer is None:
|
if answer is None:
|
||||||
sys.stdout.write('Uncommitted changes in ' + branch.project.name + ' (did you forget to amend?). Continue uploading? (y/N) ')
|
sys.stdout.write('Uncommitted changes in ' + branch.project.name)
|
||||||
|
sys.stdout.write(' (did you forget to amend?):\n')
|
||||||
|
sys.stdout.write('\n'.join(changes) + '\n')
|
||||||
|
sys.stdout.write('Continue uploading? (y/N) ')
|
||||||
a = sys.stdin.readline().strip().lower()
|
a = sys.stdin.readline().strip().lower()
|
||||||
if a not in ('y', 'yes', 't', 'true', 'on'):
|
if a not in ('y', 'yes', 't', 'true', 'on'):
|
||||||
print("skipping upload", file=sys.stderr)
|
print("skipping upload", file=sys.stderr)
|
||||||
@ -437,9 +454,15 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
|||||||
if avail:
|
if avail:
|
||||||
pending.append((project, avail))
|
pending.append((project, avail))
|
||||||
|
|
||||||
if pending and (not opt.bypass_hooks):
|
if not pending:
|
||||||
|
print("no branches ready for upload", file=sys.stderr)
|
||||||
|
return
|
||||||
|
|
||||||
|
if not opt.bypass_hooks:
|
||||||
hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
|
hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
|
||||||
self.manifest.topdir, abort_if_user_denies=True)
|
self.manifest.topdir,
|
||||||
|
self.manifest.manifestProject.GetRemote('origin').url,
|
||||||
|
abort_if_user_denies=True)
|
||||||
pending_proj_names = [project.name for (project, avail) in pending]
|
pending_proj_names = [project.name for (project, avail) in pending]
|
||||||
pending_worktrees = [project.worktree for (project, avail) in pending]
|
pending_worktrees = [project.worktree for (project, avail) in pending]
|
||||||
try:
|
try:
|
||||||
@ -455,9 +478,7 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
|||||||
cc = _SplitEmails(opt.cc)
|
cc = _SplitEmails(opt.cc)
|
||||||
people = (reviewers, cc)
|
people = (reviewers, cc)
|
||||||
|
|
||||||
if not pending:
|
if len(pending) == 1 and len(pending[0][1]) == 1:
|
||||||
print("no branches ready for upload", file=sys.stderr)
|
|
||||||
elif len(pending) == 1 and len(pending[0][1]) == 1:
|
|
||||||
self._SingleBranch(opt, pending[0][1][0], people)
|
self._SingleBranch(opt, pending[0][1][0], people)
|
||||||
else:
|
else:
|
||||||
self._MultipleBranches(opt, pending, people)
|
self._MultipleBranches(opt, pending, people)
|
||||||
|
1
tests/fixtures/gitc_config
vendored
Normal file
1
tests/fixtures/gitc_config
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
gitc_dir=/test/usr/local/google/gitc
|
75
tests/test_wrapper.py
Normal file
75
tests/test_wrapper.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
#
|
||||||
|
# Copyright (C) 2015 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import wrapper
|
||||||
|
|
||||||
|
def fixture(*paths):
|
||||||
|
"""Return a path relative to tests/fixtures.
|
||||||
|
"""
|
||||||
|
return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
|
||||||
|
|
||||||
|
class RepoWrapperUnitTest(unittest.TestCase):
|
||||||
|
"""Tests helper functions in the repo wrapper
|
||||||
|
"""
|
||||||
|
def setUp(self):
|
||||||
|
"""Load the wrapper module every time
|
||||||
|
"""
|
||||||
|
wrapper._wrapper_module = None
|
||||||
|
self.wrapper = wrapper.Wrapper()
|
||||||
|
|
||||||
|
def test_get_gitc_manifest_dir_no_gitc(self):
|
||||||
|
"""
|
||||||
|
Test reading a missing gitc config file
|
||||||
|
"""
|
||||||
|
self.wrapper.GITC_CONFIG_FILE = fixture('missing_gitc_config')
|
||||||
|
val = self.wrapper.get_gitc_manifest_dir()
|
||||||
|
self.assertEqual(val, '')
|
||||||
|
|
||||||
|
def test_get_gitc_manifest_dir(self):
|
||||||
|
"""
|
||||||
|
Test reading the gitc config file and parsing the directory
|
||||||
|
"""
|
||||||
|
self.wrapper.GITC_CONFIG_FILE = fixture('gitc_config')
|
||||||
|
val = self.wrapper.get_gitc_manifest_dir()
|
||||||
|
self.assertEqual(val, '/test/usr/local/google/gitc')
|
||||||
|
|
||||||
|
def test_gitc_parse_clientdir_no_gitc(self):
|
||||||
|
"""
|
||||||
|
Test parsing the gitc clientdir without gitc running
|
||||||
|
"""
|
||||||
|
self.wrapper.GITC_CONFIG_FILE = fixture('missing_gitc_config')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/something'), None)
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test'), 'test')
|
||||||
|
|
||||||
|
def test_gitc_parse_clientdir(self):
|
||||||
|
"""
|
||||||
|
Test parsing the gitc clientdir
|
||||||
|
"""
|
||||||
|
self.wrapper.GITC_CONFIG_FILE = fixture('gitc_config')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/something'), None)
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test'), 'test')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test/'), 'test')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test/extra'), 'test')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test'), 'test')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test/'), 'test')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test/extra'), 'test')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/'), None)
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/'), None)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
30
wrapper.py
Normal file
30
wrapper.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Copyright (C) 2014 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import imp
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def WrapperPath():
|
||||||
|
return os.path.join(os.path.dirname(__file__), 'repo')
|
||||||
|
|
||||||
|
_wrapper_module = None
|
||||||
|
def Wrapper():
|
||||||
|
global _wrapper_module
|
||||||
|
if not _wrapper_module:
|
||||||
|
_wrapper_module = imp.load_source('wrapper', WrapperPath())
|
||||||
|
return _wrapper_module
|
Reference in New Issue
Block a user