mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
209 Commits
Author | SHA1 | Date | |
---|---|---|---|
4ccad7554b | |||
403b64edf4 | |||
a38769cda8 | |||
44859d0267 | |||
6ad6dbefe7 | |||
33fe4e99f9 | |||
4214585073 | |||
b51f07cd06 | |||
04f2f0e186 | |||
cb07ba7e3d | |||
23ff7df6a7 | |||
cc1b1a703d | |||
bdf7ed2301 | |||
9c76f67f13 | |||
52b99aa91d | |||
9371979628 | |||
2086004261 | |||
2338788050 | |||
0402cd882a | |||
936183a492 | |||
85e8267031 | |||
e30f46b957 | |||
e4978cfbe3 | |||
126e298214 | |||
38e4387f8e | |||
24245e0094 | |||
db6f1b0884 | |||
f2fad61bde | |||
ee69084421 | |||
d37d43f036 | |||
7bdac71087 | |||
f97e8383a3 | |||
3000cdad22 | |||
b9d9efd394 | |||
497bde4de5 | |||
4abf8e6ef8 | |||
137d0131bf | |||
42e679b9f6 | |||
902665bce6 | |||
c8d882ae2a | |||
3eb87cec5c | |||
5fb8ed217c | |||
7e12e0a2fa | |||
7893b85509 | |||
b4e50e67e8 | |||
0936aeab2c | |||
14e134da02 | |||
04e52d6166 | |||
909d58b2e2 | |||
5cf16607d3 | |||
c190b98ed5 | |||
4863307299 | |||
f75870beac | |||
bf0b0cbc2f | |||
3a10968a70 | |||
c46de6932a | |||
303a82f33a | |||
7a91d51dcf | |||
a8d539189e | |||
588142dfcb | |||
a6d258b84d | |||
a769498568 | |||
884a387eca | |||
80b87fe6c1 | |||
e9f75b1782 | |||
a35e402161 | |||
dd7aea6c11 | |||
5196805fa2 | |||
85b24acd6a | |||
36ea2fb6ee | |||
2cd1f0452e | |||
65e3a78a9e | |||
d792f7928d | |||
6efdde9f6e | |||
7446c5954a | |||
d58bfe5a58 | |||
70f6890352 | |||
666d534636 | |||
f2af756425 | |||
544e7b0a97 | |||
e0df232da7 | |||
5a7c3afa73 | |||
9bc422f130 | |||
e81bc030bb | |||
eb5acc9ae9 | |||
26c45a7958 | |||
68425f4da8 | |||
53e902a19b | |||
4e4d40f7c0 | |||
093fdb6587 | |||
2fb6466f79 | |||
724aafb52d | |||
ccd218cd8f | |||
dd6542268a | |||
baca5f7e88 | |||
89ece429fb | |||
565480588d | |||
1829101e28 | |||
1966133f8e | |||
f1027e23b4 | |||
2cd38a0bf8 | |||
1b46cc9b6d | |||
1242e60bdd | |||
2d0f508648 | |||
143d8a7249 | |||
5db69f3f66 | |||
ff0a3c8f80 | |||
094cdbe090 | |||
148a84de0c | |||
1c5da49e6c | |||
b8433dfd2f | |||
f2fe2d9b86 | |||
c9877c7cf6 | |||
69e04d8953 | |||
f1f1137d61 | |||
f77ef2edb0 | |||
e695338e21 | |||
bd80f7eedd | |||
bf79c6618e | |||
f045d49a71 | |||
719757d6a8 | |||
011d4f426c | |||
53d6a7b895 | |||
335f5ef4ad | |||
672cc499b9 | |||
61df418c59 | |||
4534120628 | |||
cbc0798f67 | |||
d5a5b19efd | |||
5d6cb80b8f | |||
0eb35cbe50 | |||
ce201a5311 | |||
12fd10c201 | |||
a17d7af4d9 | |||
fbd3f2a10b | |||
37128b6f70 | |||
143b4cc992 | |||
8d20116038 | |||
53263d873d | |||
7487992bd3 | |||
b25ea555c3 | |||
3bfd72158c | |||
59b31cb6e0 | |||
1e7ab2a63f | |||
e76efdd7b3 | |||
730ce4c3c2 | |||
745a39ba3d | |||
efc986c508 | |||
edd0151a26 | |||
5e0ee14575 | |||
70df18944a | |||
0836a22d38 | |||
b6a16e6390 | |||
351fe2c793 | |||
fb99c71939 | |||
3a2a59eb87 | |||
bc0308478b | |||
610d3c4e46 | |||
033a7e91de | |||
854f2b6ef4 | |||
a892b1006b | |||
db2ad9dfce | |||
ef668c92c2 | |||
65b162b32f | |||
cd51f17c64 | |||
53a6c5d93a | |||
c2791e85f3 | |||
5bca9fcdd9 | |||
74c1f3d5e6 | |||
91f3ba5a3f | |||
691a75936d | |||
710d4b0391 | |||
a1f77d92c6 | |||
ecf8f2b7c8 | |||
f609f91b72 | |||
59bbb580e3 | |||
da45e5d884 | |||
0826c0749f | |||
de50d81c91 | |||
2b30e3aaba | |||
793f90cdc0 | |||
d503352b14 | |||
2f992cba32 | |||
b5267f9ad2 | |||
45401230cf | |||
56f4eea26c | |||
f385d0ca09 | |||
84c4d3c345 | |||
a8864fba9f | |||
275e4b727a | |||
c4c01f914c | |||
9d5bf60d3c | |||
217ea7d274 | |||
51813dfed1 | |||
fef4ae74e2 | |||
db83b1b5ab | |||
ede7f12d4a | |||
04d84a23fd | |||
0a1c6a1c16 | |||
33e0456737 | |||
07669002cb | |||
a0444584cb | |||
3cba0b8613 | |||
a27852d0e7 | |||
61ac9ae090 | |||
3ee6ffd078 | |||
28db6ffef4 | |||
2f9e7e40c4 | |||
4eb285cf90 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,2 +1,3 @@
|
||||
*.pyc
|
||||
.repopickle_*
|
||||
/repoc
|
||||
|
2
.project
2
.project
@ -1,6 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>repo</name>
|
||||
<name>git-repo</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
|
@ -3,7 +3,7 @@
|
||||
|
||||
<pydev_project>
|
||||
<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
|
||||
<path>/repo</path>
|
||||
<path>/git-repo</path>
|
||||
</pydev_pathproperty>
|
||||
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.6</pydev_property>
|
||||
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
|
||||
|
@ -53,7 +53,7 @@ load-plugins=
|
||||
enable=RP0004
|
||||
|
||||
# Disable the message(s) with the given id(s).
|
||||
disable=R0903,R0912,R0913,R0914,R0915,W0141,C0111,C0103,W0603,W0703,R0911,C0301,C0302,R0902,R0904,W0142,W0212,E1101,E1103,R0201,W0201,W0122,W0232,RP0001,RP0003,RP0101,RP0002,RP0401,RP0701,RP0801
|
||||
disable=R0903,R0912,R0913,R0914,R0915,W0141,C0111,C0103,W0603,W0703,R0911,C0301,C0302,R0902,R0904,W0142,W0212,E1101,E1103,R0201,W0201,W0122,W0232,RP0001,RP0003,RP0101,RP0002,RP0401,RP0701,RP0801,F0401,E0611,R0801,I0011
|
||||
|
||||
[REPORTS]
|
||||
|
||||
@ -61,9 +61,6 @@ disable=R0903,R0912,R0913,R0914,R0915,W0141,C0111,C0103,W0603,W0703,R0911,C0301,
|
||||
# (visual studio) and html
|
||||
output-format=text
|
||||
|
||||
# Include message's id in output
|
||||
include-ids=yes
|
||||
|
||||
# Put messages in a separate file for each module / package specified on the
|
||||
# command line instead of printing them on stdout. Reports (if any) will be
|
||||
# written in a file name "pylint_global.[txt|html]".
|
||||
|
71
color.py
71
color.py
@ -18,41 +18,43 @@ import sys
|
||||
|
||||
import pager
|
||||
|
||||
COLORS = {None :-1,
|
||||
'normal' :-1,
|
||||
'black' : 0,
|
||||
'red' : 1,
|
||||
'green' : 2,
|
||||
'yellow' : 3,
|
||||
'blue' : 4,
|
||||
COLORS = {None: -1,
|
||||
'normal': -1,
|
||||
'black': 0,
|
||||
'red': 1,
|
||||
'green': 2,
|
||||
'yellow': 3,
|
||||
'blue': 4,
|
||||
'magenta': 5,
|
||||
'cyan' : 6,
|
||||
'white' : 7}
|
||||
'cyan': 6,
|
||||
'white': 7}
|
||||
|
||||
ATTRS = {None :-1,
|
||||
'bold' : 1,
|
||||
'dim' : 2,
|
||||
'ul' : 4,
|
||||
'blink' : 5,
|
||||
ATTRS = {None: -1,
|
||||
'bold': 1,
|
||||
'dim': 2,
|
||||
'ul': 4,
|
||||
'blink': 5,
|
||||
'reverse': 7}
|
||||
|
||||
RESET = "\033[m" # pylint: disable=W1401
|
||||
# backslash is not anomalous
|
||||
RESET = "\033[m"
|
||||
|
||||
|
||||
def is_color(s):
|
||||
return s in COLORS
|
||||
|
||||
|
||||
def is_attr(s):
|
||||
return s in ATTRS
|
||||
|
||||
def _Color(fg = None, bg = None, attr = None):
|
||||
|
||||
def _Color(fg=None, bg=None, attr=None):
|
||||
fg = COLORS[fg]
|
||||
bg = COLORS[bg]
|
||||
attr = ATTRS[attr]
|
||||
|
||||
if attr >= 0 or fg >= 0 or bg >= 0:
|
||||
need_sep = False
|
||||
code = "\033[" #pylint: disable=W1401
|
||||
code = "\033["
|
||||
|
||||
if attr >= 0:
|
||||
code += chr(ord('0') + attr)
|
||||
@ -71,7 +73,6 @@ def _Color(fg = None, bg = None, attr = None):
|
||||
if bg >= 0:
|
||||
if need_sep:
|
||||
code += ';'
|
||||
need_sep = True
|
||||
|
||||
if bg < 8:
|
||||
code += '4%c' % (ord('0') + bg)
|
||||
@ -82,6 +83,27 @@ def _Color(fg = None, bg = None, attr = None):
|
||||
code = ''
|
||||
return code
|
||||
|
||||
DEFAULT = None
|
||||
|
||||
|
||||
def SetDefaultColoring(state):
|
||||
"""Set coloring behavior to |state|.
|
||||
|
||||
This is useful for overriding config options via the command line.
|
||||
"""
|
||||
if state is None:
|
||||
# Leave it alone -- return quick!
|
||||
return
|
||||
|
||||
global DEFAULT
|
||||
state = state.lower()
|
||||
if state in ('auto',):
|
||||
DEFAULT = state
|
||||
elif state in ('always', 'yes', 'true', True):
|
||||
DEFAULT = 'always'
|
||||
elif state in ('never', 'no', 'false', False):
|
||||
DEFAULT = 'never'
|
||||
|
||||
|
||||
class Coloring(object):
|
||||
def __init__(self, config, section_type):
|
||||
@ -89,9 +111,11 @@ class Coloring(object):
|
||||
self._config = config
|
||||
self._out = sys.stdout
|
||||
|
||||
on = self._config.GetString(self._section)
|
||||
on = DEFAULT
|
||||
if on is None:
|
||||
on = self._config.GetString('color.ui')
|
||||
on = self._config.GetString(self._section)
|
||||
if on is None:
|
||||
on = self._config.GetString('color.ui')
|
||||
|
||||
if on == 'auto':
|
||||
if pager.active or os.isatty(1):
|
||||
@ -122,6 +146,7 @@ class Coloring(object):
|
||||
def printer(self, opt=None, fg=None, bg=None, attr=None):
|
||||
s = self
|
||||
c = self.colorer(opt, fg, bg, attr)
|
||||
|
||||
def f(fmt, *args):
|
||||
s._out.write(c(fmt, *args))
|
||||
return f
|
||||
@ -129,6 +154,7 @@ class Coloring(object):
|
||||
def nofmt_printer(self, opt=None, fg=None, bg=None, attr=None):
|
||||
s = self
|
||||
c = self.nofmt_colorer(opt, fg, bg, attr)
|
||||
|
||||
def f(fmt):
|
||||
s._out.write(c(fmt))
|
||||
return f
|
||||
@ -136,11 +162,13 @@ class Coloring(object):
|
||||
def colorer(self, opt=None, fg=None, bg=None, attr=None):
|
||||
if self._on:
|
||||
c = self._parse(opt, fg, bg, attr)
|
||||
|
||||
def f(fmt, *args):
|
||||
output = fmt % args
|
||||
return ''.join([c, output, RESET])
|
||||
return f
|
||||
else:
|
||||
|
||||
def f(fmt, *args):
|
||||
return fmt % args
|
||||
return f
|
||||
@ -148,6 +176,7 @@ class Coloring(object):
|
||||
def nofmt_colorer(self, opt=None, fg=None, bg=None, attr=None):
|
||||
if self._on:
|
||||
c = self._parse(opt, fg, bg, attr)
|
||||
|
||||
def f(fmt):
|
||||
return ''.join([c, fmt, RESET])
|
||||
return f
|
||||
|
41
command.py
41
command.py
@ -129,18 +129,17 @@ class Command(object):
|
||||
def GetProjects(self, args, missing_ok=False, submodules_ok=False):
|
||||
"""A list of projects that match the arguments.
|
||||
"""
|
||||
all_projects = self.manifest.projects
|
||||
all_projects_list = self.manifest.projects
|
||||
result = []
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
groups = mp.config.GetString('manifest.groups')
|
||||
if not groups:
|
||||
groups = 'all,-notdefault,platform-' + platform.system().lower()
|
||||
groups = 'default,platform-' + platform.system().lower()
|
||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
||||
|
||||
if not args:
|
||||
all_projects_list = all_projects.values()
|
||||
derived_projects = {}
|
||||
for project in all_projects_list:
|
||||
if submodules_ok or project.sync_s:
|
||||
@ -152,12 +151,12 @@ class Command(object):
|
||||
project.MatchesGroups(groups)):
|
||||
result.append(project)
|
||||
else:
|
||||
self._ResetPathToProjectMap(all_projects.values())
|
||||
self._ResetPathToProjectMap(all_projects_list)
|
||||
|
||||
for arg in args:
|
||||
project = all_projects.get(arg)
|
||||
projects = self.manifest.GetProjectsWithName(arg)
|
||||
|
||||
if not project:
|
||||
if not projects:
|
||||
path = os.path.abspath(arg).replace('\\', '/')
|
||||
project = self._GetProjectByPath(path)
|
||||
|
||||
@ -172,20 +171,36 @@ class Command(object):
|
||||
if search_again:
|
||||
project = self._GetProjectByPath(path) or project
|
||||
|
||||
if not project:
|
||||
raise NoSuchProjectError(arg)
|
||||
if not missing_ok and not project.Exists:
|
||||
raise NoSuchProjectError(arg)
|
||||
if not project.MatchesGroups(groups):
|
||||
raise InvalidProjectGroupsError(arg)
|
||||
if project:
|
||||
projects = [project]
|
||||
|
||||
result.append(project)
|
||||
if not projects:
|
||||
raise NoSuchProjectError(arg)
|
||||
|
||||
for project in projects:
|
||||
if not missing_ok and not project.Exists:
|
||||
raise NoSuchProjectError(arg)
|
||||
if not project.MatchesGroups(groups):
|
||||
raise InvalidProjectGroupsError(arg)
|
||||
|
||||
result.extend(projects)
|
||||
|
||||
def _getpath(x):
|
||||
return x.relpath
|
||||
result.sort(key=_getpath)
|
||||
return result
|
||||
|
||||
def FindProjects(self, args):
|
||||
result = []
|
||||
patterns = [re.compile(r'%s' % a, re.IGNORECASE) for a in args]
|
||||
for project in self.GetProjects(''):
|
||||
for pattern in patterns:
|
||||
if pattern.search(project.name) or pattern.search(project.relpath):
|
||||
result.append(project)
|
||||
break
|
||||
result.sort(key=lambda project: project.relpath)
|
||||
return result
|
||||
|
||||
# pylint: disable=W0223
|
||||
# Pylint warns that the `InteractiveCommand` and `PagedCommand` classes do not
|
||||
# override method `Execute` which is abstract in `Command`. Since that method
|
||||
|
@ -26,42 +26,53 @@ following DTD:
|
||||
manifest-server?,
|
||||
remove-project*,
|
||||
project*,
|
||||
extend-project*,
|
||||
repo-hooks?)>
|
||||
|
||||
|
||||
<!ELEMENT notice (#PCDATA)>
|
||||
|
||||
|
||||
<!ELEMENT remote (EMPTY)>
|
||||
<!ATTLIST remote name ID #REQUIRED>
|
||||
<!ATTLIST remote alias CDATA #IMPLIED>
|
||||
<!ATTLIST remote fetch CDATA #REQUIRED>
|
||||
<!ATTLIST remote review CDATA #IMPLIED>
|
||||
|
||||
<!ATTLIST remote revision CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT default (EMPTY)>
|
||||
<!ATTLIST default remote IDREF #IMPLIED>
|
||||
<!ATTLIST default revision CDATA #IMPLIED>
|
||||
<!ATTLIST default sync-j CDATA #IMPLIED>
|
||||
<!ATTLIST default sync-c CDATA #IMPLIED>
|
||||
<!ATTLIST default sync-s CDATA #IMPLIED>
|
||||
<!ATTLIST default remote IDREF #IMPLIED>
|
||||
<!ATTLIST default revision CDATA #IMPLIED>
|
||||
<!ATTLIST default dest-branch CDATA #IMPLIED>
|
||||
<!ATTLIST default sync-j CDATA #IMPLIED>
|
||||
<!ATTLIST default sync-c CDATA #IMPLIED>
|
||||
<!ATTLIST default sync-s CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT manifest-server (EMPTY)>
|
||||
<!ATTLIST url CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT project (annotation?,
|
||||
|
||||
<!ELEMENT project (annotation*,
|
||||
project*)>
|
||||
<!ATTLIST project name CDATA #REQUIRED>
|
||||
<!ATTLIST project path CDATA #IMPLIED>
|
||||
<!ATTLIST project remote IDREF #IMPLIED>
|
||||
<!ATTLIST project revision CDATA #IMPLIED>
|
||||
<!ATTLIST project groups CDATA #IMPLIED>
|
||||
<!ATTLIST project sync-c CDATA #IMPLIED>
|
||||
<!ATTLIST project sync-s CDATA #IMPLIED>
|
||||
<!ATTLIST project name CDATA #REQUIRED>
|
||||
<!ATTLIST project path CDATA #IMPLIED>
|
||||
<!ATTLIST project remote IDREF #IMPLIED>
|
||||
<!ATTLIST project revision CDATA #IMPLIED>
|
||||
<!ATTLIST project dest-branch CDATA #IMPLIED>
|
||||
<!ATTLIST project groups CDATA #IMPLIED>
|
||||
<!ATTLIST project sync-c CDATA #IMPLIED>
|
||||
<!ATTLIST project sync-s CDATA #IMPLIED>
|
||||
<!ATTLIST project upstream CDATA #IMPLIED>
|
||||
<!ATTLIST project clone-depth CDATA #IMPLIED>
|
||||
<!ATTLIST project force-path CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT annotation (EMPTY)>
|
||||
<!ATTLIST annotation name CDATA #REQUIRED>
|
||||
<!ATTLIST annotation value CDATA #REQUIRED>
|
||||
<!ATTLIST annotation keep CDATA "true">
|
||||
|
||||
|
||||
<!ELEMENT extend-project>
|
||||
<!ATTLIST extend-project name CDATA #REQUIRED>
|
||||
<!ATTLIST extend-project path CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project groups CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT remove-project (EMPTY)>
|
||||
<!ATTLIST remove-project name CDATA #REQUIRED>
|
||||
|
||||
@ -108,6 +119,10 @@ Attribute `review`: Hostname of the Gerrit server where reviews
|
||||
are uploaded to by `repo upload`. This attribute is optional;
|
||||
if not specified then `repo upload` will not function.
|
||||
|
||||
Attribute `revision`: Name of a Git branch (e.g. `master` or
|
||||
`refs/heads/master`). Remotes with their own revision will override
|
||||
the default revision.
|
||||
|
||||
Element default
|
||||
---------------
|
||||
|
||||
@ -123,14 +138,19 @@ Attribute `revision`: Name of a Git branch (e.g. `master` or
|
||||
`refs/heads/master`). Project elements lacking their own
|
||||
revision attribute will use this revision.
|
||||
|
||||
Attribute `sync_j`: Number of parallel jobs to use when synching.
|
||||
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
|
||||
Project elements not setting their own `dest-branch` will inherit
|
||||
this value. If this value is not set, projects will use `revision`
|
||||
by default instead.
|
||||
|
||||
Attribute `sync_c`: Set to true to only sync the given Git
|
||||
Attribute `sync-j`: Number of parallel jobs to use when synching.
|
||||
|
||||
Attribute `sync-c`: Set to true to only sync the given Git
|
||||
branch (specified in the `revision` attribute) rather than the
|
||||
whole ref space. Project elements lacking a sync_c element of
|
||||
whole ref space. Project elements lacking a sync-c element of
|
||||
their own will use this value.
|
||||
|
||||
Attribute `sync_s`: Set to true to also sync sub-projects.
|
||||
Attribute `sync-s`: Set to true to also sync sub-projects.
|
||||
|
||||
|
||||
Element manifest-server
|
||||
@ -199,7 +219,13 @@ to track for this project. Names can be relative to refs/heads
|
||||
(e.g. just "master") or absolute (e.g. "refs/heads/master").
|
||||
Tags and/or explicit SHA-1s should work in theory, but have not
|
||||
been extensively tested. If not supplied the revision given by
|
||||
the default element is used.
|
||||
the remote element is used if applicable, else the default
|
||||
element is used.
|
||||
|
||||
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
|
||||
When using `repo upload`, changes will be submitted for code
|
||||
review on this branch. If unspecified both here and in the
|
||||
default element, `revision` is used instead.
|
||||
|
||||
Attribute `groups`: List of groups to which this project belongs,
|
||||
whitespace or comma separated. All projects belong to the group
|
||||
@ -212,16 +238,42 @@ group "notdefault", it will not be automatically downloaded by repo.
|
||||
If the project has a parent element, the `name` and `path` here
|
||||
are the prefixed ones.
|
||||
|
||||
Attribute `sync_c`: Set to true to only sync the given Git
|
||||
Attribute `sync-c`: Set to true to only sync the given Git
|
||||
branch (specified in the `revision` attribute) rather than the
|
||||
whole ref space.
|
||||
|
||||
Attribute `sync_s`: Set to true to also sync sub-projects.
|
||||
Attribute `sync-s`: Set to true to also sync sub-projects.
|
||||
|
||||
Attribute `upstream`: Name of the Git branch in which a sha1
|
||||
Attribute `upstream`: Name of the Git ref in which a sha1
|
||||
can be found. Used when syncing a revision locked manifest in
|
||||
-c mode to avoid having to sync the entire ref space.
|
||||
|
||||
Attribute `clone-depth`: Set the depth to use when fetching this
|
||||
project. If specified, this value will override any value given
|
||||
to repo init with the --depth option on the command line.
|
||||
|
||||
Attribute `force-path`: Set to true to force this project to create the
|
||||
local mirror repository according to its `path` attribute (if supplied)
|
||||
rather than the `name` attribute. This attribute only applies to the
|
||||
local mirrors syncing, it will be ignored when syncing the projects in a
|
||||
client working directory.
|
||||
|
||||
Element extend-project
|
||||
----------------------
|
||||
|
||||
Modify the attributes of the named project.
|
||||
|
||||
This element is mostly useful in a local manifest file, to modify the
|
||||
attributes of an existing project without completely replacing the
|
||||
existing project definition. This makes the local manifest more robust
|
||||
against changes to the original manifest.
|
||||
|
||||
Attribute `path`: If specified, limit the change to projects checked out
|
||||
at the specified path, rather than all projects with the given name.
|
||||
|
||||
Attribute `groups`: List of additional groups to which this project
|
||||
belongs. Same syntax as the corresponding element of `project`.
|
||||
|
||||
Element annotation
|
||||
------------------
|
||||
|
||||
|
7
error.py
7
error.py
@ -24,6 +24,13 @@ class ManifestInvalidRevisionError(Exception):
|
||||
class NoManifestException(Exception):
|
||||
"""The required manifest does not exist.
|
||||
"""
|
||||
def __init__(self, path, reason):
|
||||
super(NoManifestException, self).__init__()
|
||||
self.path = path
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
class EditorError(Exception):
|
||||
"""Unspecified error from the user's text editor.
|
||||
|
@ -14,13 +14,16 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import fcntl
|
||||
import os
|
||||
import select
|
||||
import sys
|
||||
import subprocess
|
||||
import tempfile
|
||||
from signal import SIGTERM
|
||||
from error import GitError
|
||||
from trace import REPO_TRACE, IsTrace, Trace
|
||||
from wrapper import Wrapper
|
||||
|
||||
GIT = 'git'
|
||||
MIN_GIT_VERSION = (1, 5, 4)
|
||||
@ -75,24 +78,29 @@ def terminate_ssh_clients():
|
||||
|
||||
_git_version = None
|
||||
|
||||
class _sfd(object):
|
||||
"""select file descriptor class"""
|
||||
def __init__(self, fd, dest, std_name):
|
||||
assert std_name in ('stdout', 'stderr')
|
||||
self.fd = fd
|
||||
self.dest = dest
|
||||
self.std_name = std_name
|
||||
def fileno(self):
|
||||
return self.fd.fileno()
|
||||
|
||||
class _GitCall(object):
|
||||
def version(self):
|
||||
p = GitCommand(None, ['--version'], capture_stdout=True)
|
||||
if p.Wait() == 0:
|
||||
return p.stdout
|
||||
return p.stdout.decode('utf-8')
|
||||
return None
|
||||
|
||||
def version_tuple(self):
|
||||
global _git_version
|
||||
|
||||
if _git_version is None:
|
||||
ver_str = git.version()
|
||||
if ver_str.startswith('git version '):
|
||||
_git_version = tuple(
|
||||
map(int,
|
||||
ver_str[len('git version '):].strip().split('-')[0].split('.')[0:3]
|
||||
))
|
||||
else:
|
||||
_git_version = Wrapper().ParseGitVersion(ver_str)
|
||||
if _git_version is None:
|
||||
print('fatal: "%s" unsupported' % ver_str, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return _git_version
|
||||
@ -143,6 +151,9 @@ class GitCommand(object):
|
||||
if key in env:
|
||||
del env[key]
|
||||
|
||||
# If we are not capturing std* then need to print it.
|
||||
self.tee = {'stdout': not capture_stdout, 'stderr': not capture_stderr}
|
||||
|
||||
if disable_editor:
|
||||
_setenv(env, 'GIT_EDITOR', ':')
|
||||
if ssh_proxy:
|
||||
@ -166,22 +177,21 @@ class GitCommand(object):
|
||||
if gitdir:
|
||||
_setenv(env, GIT_DIR, gitdir)
|
||||
cwd = None
|
||||
command.extend(cmdv)
|
||||
command.append(cmdv[0])
|
||||
# Need to use the --progress flag for fetch/clone so output will be
|
||||
# displayed as by default git only does progress output if stderr is a TTY.
|
||||
if sys.stderr.isatty() and cmdv[0] in ('fetch', 'clone'):
|
||||
if '--progress' not in cmdv and '--quiet' not in cmdv:
|
||||
command.append('--progress')
|
||||
command.extend(cmdv[1:])
|
||||
|
||||
if provide_stdin:
|
||||
stdin = subprocess.PIPE
|
||||
else:
|
||||
stdin = None
|
||||
|
||||
if capture_stdout:
|
||||
stdout = subprocess.PIPE
|
||||
else:
|
||||
stdout = None
|
||||
|
||||
if capture_stderr:
|
||||
stderr = subprocess.PIPE
|
||||
else:
|
||||
stderr = None
|
||||
stdout = subprocess.PIPE
|
||||
stderr = subprocess.PIPE
|
||||
|
||||
if IsTrace():
|
||||
global LAST_CWD
|
||||
@ -230,8 +240,34 @@ class GitCommand(object):
|
||||
def Wait(self):
|
||||
try:
|
||||
p = self.process
|
||||
(self.stdout, self.stderr) = p.communicate()
|
||||
rc = p.returncode
|
||||
rc = self._CaptureOutput()
|
||||
finally:
|
||||
_remove_ssh_client(p)
|
||||
return rc
|
||||
|
||||
def _CaptureOutput(self):
|
||||
p = self.process
|
||||
s_in = [_sfd(p.stdout, sys.stdout, 'stdout'),
|
||||
_sfd(p.stderr, sys.stderr, 'stderr')]
|
||||
self.stdout = ''
|
||||
self.stderr = ''
|
||||
|
||||
for s in s_in:
|
||||
flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
|
||||
fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
|
||||
|
||||
while s_in:
|
||||
in_ready, _, _ = select.select(s_in, [], [])
|
||||
for s in in_ready:
|
||||
buf = s.fd.read(4096)
|
||||
if not buf:
|
||||
s_in.remove(s)
|
||||
continue
|
||||
if s.std_name == 'stdout':
|
||||
self.stdout += buf
|
||||
else:
|
||||
self.stderr += buf
|
||||
if self.tee[s.std_name]:
|
||||
s.dest.write(buf)
|
||||
s.dest.flush()
|
||||
return p.wait()
|
||||
|
116
git_config.py
116
git_config.py
@ -14,7 +14,8 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import cPickle
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
@ -24,14 +25,13 @@ try:
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
import time
|
||||
try:
|
||||
import urllib2
|
||||
except ImportError:
|
||||
# For python3
|
||||
|
||||
from pyversion import is_python3
|
||||
if is_python3():
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
else:
|
||||
# For python2
|
||||
import urllib2
|
||||
import imp
|
||||
urllib = imp.new_module('urllib')
|
||||
urllib.request = urllib2
|
||||
@ -40,6 +40,10 @@ else:
|
||||
from signal import SIGTERM
|
||||
from error import GitError, UploadError
|
||||
from trace import Trace
|
||||
if is_python3():
|
||||
from http.client import HTTPException
|
||||
else:
|
||||
from httplib import HTTPException
|
||||
|
||||
from git_command import GitCommand
|
||||
from git_command import ssh_sock
|
||||
@ -76,7 +80,7 @@ class GitConfig(object):
|
||||
return cls(configfile = os.path.join(gitdir, 'config'),
|
||||
defaults = defaults)
|
||||
|
||||
def __init__(self, configfile, defaults=None, pickleFile=None):
|
||||
def __init__(self, configfile, defaults=None, jsonFile=None):
|
||||
self.file = configfile
|
||||
self.defaults = defaults
|
||||
self._cache_dict = None
|
||||
@ -84,12 +88,11 @@ class GitConfig(object):
|
||||
self._remotes = {}
|
||||
self._branches = {}
|
||||
|
||||
if pickleFile is None:
|
||||
self._pickle = os.path.join(
|
||||
self._json = jsonFile
|
||||
if self._json is None:
|
||||
self._json = os.path.join(
|
||||
os.path.dirname(self.file),
|
||||
'.repopickle_' + os.path.basename(self.file))
|
||||
else:
|
||||
self._pickle = pickleFile
|
||||
'.repo_' + os.path.basename(self.file) + '.json')
|
||||
|
||||
def Has(self, name, include_defaults = True):
|
||||
"""Return true if this configuration file has the key.
|
||||
@ -213,9 +216,9 @@ class GitConfig(object):
|
||||
"""Resolve any url.*.insteadof references.
|
||||
"""
|
||||
for new_url in self.GetSubSections('url'):
|
||||
old_url = self.GetString('url.%s.insteadof' % new_url)
|
||||
if old_url is not None and url.startswith(old_url):
|
||||
return new_url + url[len(old_url):]
|
||||
for old_url in self.GetString('url.%s.insteadof' % new_url, True):
|
||||
if old_url is not None and url.startswith(old_url):
|
||||
return new_url + url[len(old_url):]
|
||||
return url
|
||||
|
||||
@property
|
||||
@ -244,50 +247,41 @@ class GitConfig(object):
|
||||
return self._cache_dict
|
||||
|
||||
def _Read(self):
|
||||
d = self._ReadPickle()
|
||||
d = self._ReadJson()
|
||||
if d is None:
|
||||
d = self._ReadGit()
|
||||
self._SavePickle(d)
|
||||
self._SaveJson(d)
|
||||
return d
|
||||
|
||||
def _ReadPickle(self):
|
||||
def _ReadJson(self):
|
||||
try:
|
||||
if os.path.getmtime(self._pickle) \
|
||||
if os.path.getmtime(self._json) \
|
||||
<= os.path.getmtime(self.file):
|
||||
os.remove(self._pickle)
|
||||
os.remove(self._json)
|
||||
return None
|
||||
except OSError:
|
||||
return None
|
||||
try:
|
||||
Trace(': unpickle %s', self.file)
|
||||
fd = open(self._pickle, 'rb')
|
||||
Trace(': parsing %s', self.file)
|
||||
fd = open(self._json)
|
||||
try:
|
||||
return cPickle.load(fd)
|
||||
return json.load(fd)
|
||||
finally:
|
||||
fd.close()
|
||||
except EOFError:
|
||||
os.remove(self._pickle)
|
||||
return None
|
||||
except IOError:
|
||||
os.remove(self._pickle)
|
||||
return None
|
||||
except cPickle.PickleError:
|
||||
os.remove(self._pickle)
|
||||
except (IOError, ValueError):
|
||||
os.remove(self._json)
|
||||
return None
|
||||
|
||||
def _SavePickle(self, cache):
|
||||
def _SaveJson(self, cache):
|
||||
try:
|
||||
fd = open(self._pickle, 'wb')
|
||||
fd = open(self._json, 'w')
|
||||
try:
|
||||
cPickle.dump(cache, fd, cPickle.HIGHEST_PROTOCOL)
|
||||
json.dump(cache, fd, indent=2)
|
||||
finally:
|
||||
fd.close()
|
||||
except IOError:
|
||||
if os.path.exists(self._pickle):
|
||||
os.remove(self._pickle)
|
||||
except cPickle.PickleError:
|
||||
if os.path.exists(self._pickle):
|
||||
os.remove(self._pickle)
|
||||
except (IOError, TypeError):
|
||||
if os.path.exists(self.json):
|
||||
os.remove(self._json)
|
||||
|
||||
def _ReadGit(self):
|
||||
"""
|
||||
@ -300,8 +294,8 @@ class GitConfig(object):
|
||||
d = self._do('--null', '--list')
|
||||
if d is None:
|
||||
return c
|
||||
for line in d.rstrip('\0').split('\0'): # pylint: disable=W1401
|
||||
# Backslash is not anomalous
|
||||
for line in d.decode('utf-8').rstrip('\0').split('\0'): # pylint: disable=W1401
|
||||
# Backslash is not anomalous
|
||||
if '\n' in line:
|
||||
key, val = line.split('\n', 1)
|
||||
else:
|
||||
@ -537,8 +531,8 @@ class Remote(object):
|
||||
self.url = self._Get('url')
|
||||
self.review = self._Get('review')
|
||||
self.projectname = self._Get('projectname')
|
||||
self.fetch = map(RefSpec.FromString,
|
||||
self._Get('fetch', all_keys=True))
|
||||
self.fetch = list(map(RefSpec.FromString,
|
||||
self._Get('fetch', all_keys=True)))
|
||||
self._review_url = None
|
||||
|
||||
def _InsteadOf(self):
|
||||
@ -572,7 +566,9 @@ class Remote(object):
|
||||
return None
|
||||
|
||||
u = self.review
|
||||
if not u.startswith('http:') and not u.startswith('https:'):
|
||||
if u.startswith('persistent-'):
|
||||
u = u[len('persistent-'):]
|
||||
if u.split(':')[0] not in ('http', 'https', 'sso'):
|
||||
u = 'http://%s' % u
|
||||
if u.endswith('/Gerrit'):
|
||||
u = u[:len(u) - len('/Gerrit')]
|
||||
@ -588,19 +584,19 @@ class Remote(object):
|
||||
host, port = os.environ['REPO_HOST_PORT_INFO'].split()
|
||||
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
||||
REVIEW_CACHE[u] = self._review_url
|
||||
elif u.startswith('sso:'):
|
||||
self._review_url = u # Assume it's right
|
||||
REVIEW_CACHE[u] = self._review_url
|
||||
else:
|
||||
try:
|
||||
info_url = u + 'ssh_info'
|
||||
info = urllib.request.urlopen(info_url).read()
|
||||
if '<' in info:
|
||||
# Assume the server gave us some sort of HTML
|
||||
# response back, like maybe a login page.
|
||||
if info == 'NOT_AVAILABLE' or '<' in info:
|
||||
# If `info` contains '<', we assume the server gave us some sort
|
||||
# of HTML response back, like maybe a login page.
|
||||
#
|
||||
raise UploadError('%s: Cannot parse response' % info_url)
|
||||
|
||||
if info == 'NOT_AVAILABLE':
|
||||
# Assume HTTP if SSH is not enabled.
|
||||
self._review_url = http_url + 'p/'
|
||||
# Assume HTTP if SSH is not enabled or ssh_info doesn't look right.
|
||||
self._review_url = http_url
|
||||
else:
|
||||
host, port = info.split()
|
||||
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
||||
@ -608,6 +604,8 @@ class Remote(object):
|
||||
raise UploadError('%s: %s' % (self.review, str(e)))
|
||||
except urllib.error.URLError as e:
|
||||
raise UploadError('%s: %s' % (self.review, str(e)))
|
||||
except HTTPException as e:
|
||||
raise UploadError('%s: %s' % (self.review, e.__class__.__name__))
|
||||
|
||||
REVIEW_CACHE[u] = self._review_url
|
||||
return self._review_url + self.projectname
|
||||
@ -621,9 +619,7 @@ class Remote(object):
|
||||
def ToLocal(self, rev):
|
||||
"""Convert a remote revision string to something we have locally.
|
||||
"""
|
||||
if IsId(rev):
|
||||
return rev
|
||||
if rev.startswith(R_TAGS):
|
||||
if self.name == '.' or IsId(rev):
|
||||
return rev
|
||||
|
||||
if not rev.startswith('refs/'):
|
||||
@ -632,6 +628,10 @@ class Remote(object):
|
||||
for spec in self.fetch:
|
||||
if spec.SourceMatches(rev):
|
||||
return spec.MapSource(rev)
|
||||
|
||||
if not rev.startswith(R_HEADS):
|
||||
return rev
|
||||
|
||||
raise GitError('remote %s does not have %s' % (self.name, rev))
|
||||
|
||||
def WritesTo(self, ref):
|
||||
@ -657,7 +657,7 @@ class Remote(object):
|
||||
self._Set('url', self.url)
|
||||
self._Set('review', self.review)
|
||||
self._Set('projectname', self.projectname)
|
||||
self._Set('fetch', map(str, self.fetch))
|
||||
self._Set('fetch', list(map(str, self.fetch)))
|
||||
|
||||
def _Set(self, key, value):
|
||||
key = 'remote.%s.%s' % (self.name, key)
|
||||
@ -701,7 +701,7 @@ class Branch(object):
|
||||
self._Set('merge', self.merge)
|
||||
|
||||
else:
|
||||
fd = open(self._config.file, 'ab')
|
||||
fd = open(self._config.file, 'a')
|
||||
try:
|
||||
fd.write('[branch "%s"]\n' % self.name)
|
||||
if self.remote:
|
||||
|
11
git_refs.py
11
git_refs.py
@ -66,7 +66,7 @@ class GitRefs(object):
|
||||
def _NeedUpdate(self):
|
||||
Trace(': scan refs %s', self._gitdir)
|
||||
|
||||
for name, mtime in self._mtime.iteritems():
|
||||
for name, mtime in self._mtime.items():
|
||||
try:
|
||||
if mtime != os.path.getmtime(os.path.join(self._gitdir, name)):
|
||||
return True
|
||||
@ -89,7 +89,7 @@ class GitRefs(object):
|
||||
attempts = 0
|
||||
while scan and attempts < 5:
|
||||
scan_next = {}
|
||||
for name, dest in scan.iteritems():
|
||||
for name, dest in scan.items():
|
||||
if dest in self._phyref:
|
||||
self._phyref[name] = self._phyref[dest]
|
||||
else:
|
||||
@ -100,7 +100,7 @@ class GitRefs(object):
|
||||
def _ReadPackedRefs(self):
|
||||
path = os.path.join(self._gitdir, 'packed-refs')
|
||||
try:
|
||||
fd = open(path, 'rb')
|
||||
fd = open(path, 'r')
|
||||
mtime = os.path.getmtime(path)
|
||||
except IOError:
|
||||
return
|
||||
@ -108,6 +108,7 @@ class GitRefs(object):
|
||||
return
|
||||
try:
|
||||
for line in fd:
|
||||
line = str(line)
|
||||
if line[0] == '#':
|
||||
continue
|
||||
if line[0] == '^':
|
||||
@ -150,6 +151,10 @@ class GitRefs(object):
|
||||
finally:
|
||||
fd.close()
|
||||
|
||||
try:
|
||||
ref_id = ref_id.decode()
|
||||
except AttributeError:
|
||||
pass
|
||||
if not ref_id:
|
||||
return
|
||||
ref_id = ref_id[:-1]
|
||||
|
@ -1,5 +1,4 @@
|
||||
#!/bin/sh
|
||||
# From Gerrit Code Review 2.5.2
|
||||
#
|
||||
# Part of Gerrit Code Review (http://code.google.com/p/gerrit/)
|
||||
#
|
||||
@ -27,7 +26,7 @@ MSG="$1"
|
||||
#
|
||||
add_ChangeId() {
|
||||
clean_message=`sed -e '
|
||||
/^diff --git a\/.*/{
|
||||
/^diff --git .*/{
|
||||
s///
|
||||
q
|
||||
}
|
||||
@ -39,6 +38,11 @@ add_ChangeId() {
|
||||
return
|
||||
fi
|
||||
|
||||
if test "false" = "`git config --bool --get gerrit.createChangeId`"
|
||||
then
|
||||
return
|
||||
fi
|
||||
|
||||
# Does Change-Id: already exist? if so, exit (no change).
|
||||
if grep -i '^Change-Id:' "$MSG" >/dev/null
|
||||
then
|
||||
@ -77,7 +81,7 @@ add_ChangeId() {
|
||||
# Skip the line starting with the diff command and everything after it,
|
||||
# up to the end of the file, assuming it is only patch data.
|
||||
# If more than one line before the diff was empty, strip all but one.
|
||||
/^diff --git a/ {
|
||||
/^diff --git / {
|
||||
blankLines = 0
|
||||
while (getline) { }
|
||||
next
|
||||
@ -154,7 +158,7 @@ add_ChangeId() {
|
||||
if (unprinted) {
|
||||
print "Change-Id: I'"$id"'"
|
||||
}
|
||||
}' "$MSG" > $T && mv $T "$MSG" || rm -f $T
|
||||
}' "$MSG" > "$T" && mv "$T" "$MSG" || rm -f "$T"
|
||||
}
|
||||
_gen_ChangeIdInput() {
|
||||
echo "tree `git write-tree`"
|
||||
|
@ -35,7 +35,7 @@ elif grep -q "AC Power \+: 1" /proc/pmu/info 2>/dev/null
|
||||
then
|
||||
exit 0
|
||||
elif test -x /usr/bin/pmset && /usr/bin/pmset -g batt |
|
||||
grep -q "Currently drawing from 'AC Power'"
|
||||
grep -q "drawing from 'AC Power'"
|
||||
then
|
||||
exit 0
|
||||
elif test -d /sys/bus/acpi/drivers/battery && test 0 = \
|
||||
|
156
main.py
156
main.py
@ -22,16 +22,21 @@ import optparse
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
try:
|
||||
import urllib2
|
||||
except ImportError:
|
||||
# For python3
|
||||
|
||||
from pyversion import is_python3
|
||||
if is_python3():
|
||||
import urllib.request
|
||||
else:
|
||||
# For python2
|
||||
import urllib2
|
||||
urllib = imp.new_module('urllib')
|
||||
urllib.request = urllib2
|
||||
|
||||
try:
|
||||
import kerberos
|
||||
except ImportError:
|
||||
kerberos = None
|
||||
|
||||
from color import SetDefaultColoring
|
||||
from trace import SetTrace
|
||||
from git_command import git, GitCommand
|
||||
from git_config import init_ssh, close_ssh
|
||||
@ -47,9 +52,15 @@ from error import NoSuchProjectError
|
||||
from error import RepoChangedException
|
||||
from manifest_xml import XmlManifest
|
||||
from pager import RunPager
|
||||
from wrapper import WrapperPath, Wrapper
|
||||
|
||||
from subcmds import all_commands
|
||||
|
||||
if not is_python3():
|
||||
# pylint:disable=W0622
|
||||
input = raw_input
|
||||
# pylint:enable=W0622
|
||||
|
||||
global_options = optparse.OptionParser(
|
||||
usage="repo [-p|--paginate|--no-pager] COMMAND [ARGS]"
|
||||
)
|
||||
@ -59,6 +70,9 @@ global_options.add_option('-p', '--paginate',
|
||||
global_options.add_option('--no-pager',
|
||||
dest='no_pager', action='store_true',
|
||||
help='disable the pager')
|
||||
global_options.add_option('--color',
|
||||
choices=('auto', 'always', 'never'), default=None,
|
||||
help='control color usage: auto, always, never')
|
||||
global_options.add_option('--trace',
|
||||
dest='trace', action='store_true',
|
||||
help='trace git command execution')
|
||||
@ -103,6 +117,8 @@ class _Repo(object):
|
||||
print('fatal: invalid usage of --version', file=sys.stderr)
|
||||
return 1
|
||||
|
||||
SetDefaultColoring(gopts.color)
|
||||
|
||||
try:
|
||||
cmd = self.commands[name]
|
||||
except KeyError:
|
||||
@ -119,8 +135,15 @@ class _Repo(object):
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
copts, cargs = cmd.OptionParser.parse_args(argv)
|
||||
copts = cmd.ReadEnvironmentOptions(copts)
|
||||
try:
|
||||
copts, cargs = cmd.OptionParser.parse_args(argv)
|
||||
copts = cmd.ReadEnvironmentOptions(copts)
|
||||
except NoManifestException as e:
|
||||
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||
file=sys.stderr)
|
||||
print('error: manifest missing or unreadable -- please run init',
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
|
||||
config = cmd.manifest.globalConfig
|
||||
@ -136,15 +159,13 @@ class _Repo(object):
|
||||
start = time.time()
|
||||
try:
|
||||
result = cmd.Execute(copts, cargs)
|
||||
except DownloadError as e:
|
||||
print('error: %s' % str(e), file=sys.stderr)
|
||||
result = 1
|
||||
except ManifestInvalidRevisionError as e:
|
||||
print('error: %s' % str(e), file=sys.stderr)
|
||||
result = 1
|
||||
except NoManifestException as e:
|
||||
print('error: manifest required for this command -- please run init',
|
||||
file=sys.stderr)
|
||||
except (DownloadError, ManifestInvalidRevisionError,
|
||||
NoManifestException) as e:
|
||||
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||
file=sys.stderr)
|
||||
if isinstance(e, NoManifestException):
|
||||
print('error: manifest missing or unreadable -- please run init',
|
||||
file=sys.stderr)
|
||||
result = 1
|
||||
except NoSuchProjectError as e:
|
||||
if e.name:
|
||||
@ -165,21 +186,10 @@ class _Repo(object):
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _MyRepoPath():
|
||||
return os.path.dirname(__file__)
|
||||
|
||||
def _MyWrapperPath():
|
||||
return os.path.join(os.path.dirname(__file__), 'repo')
|
||||
|
||||
_wrapper_module = None
|
||||
def WrapperModule():
|
||||
global _wrapper_module
|
||||
if not _wrapper_module:
|
||||
_wrapper_module = imp.load_source('wrapper', _MyWrapperPath())
|
||||
return _wrapper_module
|
||||
|
||||
def _CurrentWrapperVersion():
|
||||
return WrapperModule().VERSION
|
||||
|
||||
def _CheckWrapperVersion(ver, repo_path):
|
||||
if not repo_path:
|
||||
@ -189,7 +199,7 @@ def _CheckWrapperVersion(ver, repo_path):
|
||||
print('no --wrapper-version argument', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
exp = _CurrentWrapperVersion()
|
||||
exp = Wrapper().VERSION
|
||||
ver = tuple(map(int, ver.split('.')))
|
||||
if len(ver) == 1:
|
||||
ver = (0, ver[0])
|
||||
@ -201,7 +211,7 @@ def _CheckWrapperVersion(ver, repo_path):
|
||||
!!! You must upgrade before you can continue: !!!
|
||||
|
||||
cp %s %s
|
||||
""" % (exp_str, _MyWrapperPath(), repo_path), file=sys.stderr)
|
||||
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if exp > ver:
|
||||
@ -210,7 +220,7 @@ def _CheckWrapperVersion(ver, repo_path):
|
||||
... You should upgrade soon:
|
||||
|
||||
cp %s %s
|
||||
""" % (exp_str, _MyWrapperPath(), repo_path), file=sys.stderr)
|
||||
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||
|
||||
def _CheckRepoDir(repo_dir):
|
||||
if not repo_dir:
|
||||
@ -286,7 +296,7 @@ def _AddPasswordFromUserInput(handler, msg, req):
|
||||
if user is None:
|
||||
print(msg)
|
||||
try:
|
||||
user = raw_input('User: ')
|
||||
user = input('User: ')
|
||||
password = getpass.getpass()
|
||||
except KeyboardInterrupt:
|
||||
return
|
||||
@ -338,6 +348,86 @@ class _DigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
|
||||
self.retried = 0
|
||||
raise
|
||||
|
||||
class _KerberosAuthHandler(urllib.request.BaseHandler):
|
||||
def __init__(self):
|
||||
self.retried = 0
|
||||
self.context = None
|
||||
self.handler_order = urllib.request.BaseHandler.handler_order - 50
|
||||
|
||||
def http_error_401(self, req, fp, code, msg, headers):
|
||||
host = req.get_host()
|
||||
retry = self.http_error_auth_reqed('www-authenticate', host, req, headers)
|
||||
return retry
|
||||
|
||||
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
||||
try:
|
||||
spn = "HTTP@%s" % host
|
||||
authdata = self._negotiate_get_authdata(auth_header, headers)
|
||||
|
||||
if self.retried > 3:
|
||||
raise urllib.request.HTTPError(req.get_full_url(), 401,
|
||||
"Negotiate auth failed", headers, None)
|
||||
else:
|
||||
self.retried += 1
|
||||
|
||||
neghdr = self._negotiate_get_svctk(spn, authdata)
|
||||
if neghdr is None:
|
||||
return None
|
||||
|
||||
req.add_unredirected_header('Authorization', neghdr)
|
||||
response = self.parent.open(req)
|
||||
|
||||
srvauth = self._negotiate_get_authdata(auth_header, response.info())
|
||||
if self._validate_response(srvauth):
|
||||
return response
|
||||
except kerberos.GSSError:
|
||||
return None
|
||||
except:
|
||||
self.reset_retry_count()
|
||||
raise
|
||||
finally:
|
||||
self._clean_context()
|
||||
|
||||
def reset_retry_count(self):
|
||||
self.retried = 0
|
||||
|
||||
def _negotiate_get_authdata(self, auth_header, headers):
|
||||
authhdr = headers.get(auth_header, None)
|
||||
if authhdr is not None:
|
||||
for mech_tuple in authhdr.split(","):
|
||||
mech, __, authdata = mech_tuple.strip().partition(" ")
|
||||
if mech.lower() == "negotiate":
|
||||
return authdata.strip()
|
||||
return None
|
||||
|
||||
def _negotiate_get_svctk(self, spn, authdata):
|
||||
if authdata is None:
|
||||
return None
|
||||
|
||||
result, self.context = kerberos.authGSSClientInit(spn)
|
||||
if result < kerberos.AUTH_GSS_COMPLETE:
|
||||
return None
|
||||
|
||||
result = kerberos.authGSSClientStep(self.context, authdata)
|
||||
if result < kerberos.AUTH_GSS_CONTINUE:
|
||||
return None
|
||||
|
||||
response = kerberos.authGSSClientResponse(self.context)
|
||||
return "Negotiate %s" % response
|
||||
|
||||
def _validate_response(self, authdata):
|
||||
if authdata is None:
|
||||
return None
|
||||
result = kerberos.authGSSClientStep(self.context, authdata)
|
||||
if result == kerberos.AUTH_GSS_COMPLETE:
|
||||
return True
|
||||
return None
|
||||
|
||||
def _clean_context(self):
|
||||
if self.context is not None:
|
||||
kerberos.authGSSClientClean(self.context)
|
||||
self.context = None
|
||||
|
||||
def init_http():
|
||||
handlers = [_UserAgentHandler()]
|
||||
|
||||
@ -354,6 +444,8 @@ def init_http():
|
||||
pass
|
||||
handlers.append(_BasicAuthHandler(mgr))
|
||||
handlers.append(_DigestAuthHandler(mgr))
|
||||
if kerberos:
|
||||
handlers.append(_KerberosAuthHandler())
|
||||
|
||||
if 'http_proxy' in os.environ:
|
||||
url = os.environ['http_proxy']
|
||||
|
314
manifest_xml.py
314
manifest_xml.py
@ -18,42 +18,60 @@ import itertools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urlparse
|
||||
import xml.dom.minidom
|
||||
|
||||
from pyversion import is_python3
|
||||
if is_python3():
|
||||
import urllib.parse
|
||||
else:
|
||||
import imp
|
||||
import urlparse
|
||||
urllib = imp.new_module('urllib')
|
||||
urllib.parse = urlparse
|
||||
|
||||
from git_config import GitConfig
|
||||
from git_refs import R_HEADS, HEAD
|
||||
from project import RemoteSpec, Project, MetaProject
|
||||
from error import ManifestParseError
|
||||
from error import ManifestParseError, ManifestInvalidRevisionError
|
||||
|
||||
MANIFEST_FILE_NAME = 'manifest.xml'
|
||||
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
|
||||
LOCAL_MANIFESTS_DIR_NAME = 'local_manifests'
|
||||
|
||||
urlparse.uses_relative.extend(['ssh', 'git'])
|
||||
urlparse.uses_netloc.extend(['ssh', 'git'])
|
||||
# urljoin gets confused if the scheme is not known.
|
||||
urllib.parse.uses_relative.extend(['ssh', 'git', 'persistent-https', 'rpc'])
|
||||
urllib.parse.uses_netloc.extend(['ssh', 'git', 'persistent-https', 'rpc'])
|
||||
|
||||
class _Default(object):
|
||||
"""Project defaults within the manifest."""
|
||||
|
||||
revisionExpr = None
|
||||
destBranchExpr = None
|
||||
remote = None
|
||||
sync_j = 1
|
||||
sync_c = False
|
||||
sync_s = False
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.__dict__ != other.__dict__
|
||||
|
||||
class _XmlRemote(object):
|
||||
def __init__(self,
|
||||
name,
|
||||
alias=None,
|
||||
fetch=None,
|
||||
manifestUrl=None,
|
||||
review=None):
|
||||
review=None,
|
||||
revision=None):
|
||||
self.name = name
|
||||
self.fetchUrl = fetch
|
||||
self.manifestUrl = manifestUrl
|
||||
self.remoteAlias = alias
|
||||
self.reviewUrl = review
|
||||
self.revision = revision
|
||||
self.resolvedFetchUrl = self._resolveFetchUrl()
|
||||
|
||||
def __eq__(self, other):
|
||||
@ -65,18 +83,17 @@ class _XmlRemote(object):
|
||||
def _resolveFetchUrl(self):
|
||||
url = self.fetchUrl.rstrip('/')
|
||||
manifestUrl = self.manifestUrl.rstrip('/')
|
||||
p = manifestUrl.startswith('persistent-http')
|
||||
if p:
|
||||
manifestUrl = manifestUrl[len('persistent-'):]
|
||||
# urljoin will gets confused over quite a few things. The ones we care
|
||||
# about here are:
|
||||
# * no scheme in the base url, like <hostname:port>
|
||||
# We handle no scheme by replacing it with an obscure protocol, gopher
|
||||
# and then replacing it with the original when we are done.
|
||||
|
||||
# urljoin will get confused if there is no scheme in the base url
|
||||
# ie, if manifestUrl is of the form <hostname:port>
|
||||
if manifestUrl.find(':') != manifestUrl.find('/') - 1:
|
||||
manifestUrl = 'gopher://' + manifestUrl
|
||||
url = urlparse.urljoin(manifestUrl, url)
|
||||
url = re.sub(r'^gopher://', '', url)
|
||||
if p:
|
||||
url = 'persistent-' + url
|
||||
url = urllib.parse.urljoin('gopher://' + manifestUrl, url)
|
||||
url = re.sub(r'^gopher://', '', url)
|
||||
else:
|
||||
url = urllib.parse.urljoin(manifestUrl, url)
|
||||
return url
|
||||
|
||||
def ToRemoteSpec(self, projectName):
|
||||
@ -94,6 +111,7 @@ class XmlManifest(object):
|
||||
self.topdir = os.path.dirname(self.repodir)
|
||||
self.manifestFile = os.path.join(self.repodir, MANIFEST_FILE_NAME)
|
||||
self.globalConfig = GitConfig.ForUser()
|
||||
self.localManifestWarning = False
|
||||
|
||||
self.repoProject = MetaProject(self, 'repo',
|
||||
gitdir = os.path.join(repodir, 'repo/.git'),
|
||||
@ -137,8 +155,15 @@ class XmlManifest(object):
|
||||
root.appendChild(e)
|
||||
e.setAttribute('name', r.name)
|
||||
e.setAttribute('fetch', r.fetchUrl)
|
||||
if r.remoteAlias is not None:
|
||||
e.setAttribute('alias', r.remoteAlias)
|
||||
if r.reviewUrl is not None:
|
||||
e.setAttribute('review', r.reviewUrl)
|
||||
if r.revision is not None:
|
||||
e.setAttribute('revision', r.revision)
|
||||
|
||||
def _ParseGroups(self, groups):
|
||||
return [x for x in re.split(r'[,\s]+', groups) if x]
|
||||
|
||||
def Save(self, fd, peg_rev=False, peg_rev_upstream=True):
|
||||
"""Write the current manifest out to the given file descriptor.
|
||||
@ -147,7 +172,7 @@ class XmlManifest(object):
|
||||
|
||||
groups = mp.config.GetString('manifest.groups')
|
||||
if groups:
|
||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
||||
groups = self._ParseGroups(groups)
|
||||
|
||||
doc = xml.dom.minidom.Document()
|
||||
root = doc.createElement('manifest')
|
||||
@ -163,10 +188,8 @@ class XmlManifest(object):
|
||||
notice_element.appendChild(doc.createTextNode(indented_notice))
|
||||
|
||||
d = self.default
|
||||
sort_remotes = list(self.remotes.keys())
|
||||
sort_remotes.sort()
|
||||
|
||||
for r in sort_remotes:
|
||||
for r in sorted(self.remotes):
|
||||
self._RemoteToXml(self.remotes[r], doc, root)
|
||||
if self.remotes:
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
@ -199,8 +222,9 @@ class XmlManifest(object):
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
|
||||
def output_projects(parent, parent_node, projects):
|
||||
for p in projects:
|
||||
output_project(parent, parent_node, self.projects[p])
|
||||
for project_name in projects:
|
||||
for project in self._projects[project_name]:
|
||||
output_project(parent, parent_node, project)
|
||||
|
||||
def output_project(parent, parent_node, p):
|
||||
if not p.MatchesGroups(groups):
|
||||
@ -217,8 +241,12 @@ class XmlManifest(object):
|
||||
e.setAttribute('name', name)
|
||||
if relpath != name:
|
||||
e.setAttribute('path', relpath)
|
||||
if not d.remote or p.remote.name != d.remote.name:
|
||||
e.setAttribute('remote', p.remote.name)
|
||||
remoteName = None
|
||||
if d.remote:
|
||||
remoteName = d.remote.remoteAlias or d.remote.name
|
||||
if not d.remote or p.remote.name != remoteName:
|
||||
remoteName = p.remote.name
|
||||
e.setAttribute('remote', remoteName)
|
||||
if peg_rev:
|
||||
if self.IsMirror:
|
||||
value = p.bare_git.rev_parse(p.revisionExpr + '^0')
|
||||
@ -230,8 +258,12 @@ class XmlManifest(object):
|
||||
# isn't our value, and the if the default doesn't already have that
|
||||
# covered.
|
||||
e.setAttribute('upstream', p.revisionExpr)
|
||||
elif not d.revisionExpr or p.revisionExpr != d.revisionExpr:
|
||||
e.setAttribute('revision', p.revisionExpr)
|
||||
else:
|
||||
revision = self.remotes[remoteName].revision or d.revisionExpr
|
||||
if not revision or revision != p.revisionExpr:
|
||||
e.setAttribute('revision', p.revisionExpr)
|
||||
if p.upstream and p.upstream != p.revisionExpr:
|
||||
e.setAttribute('upstream', p.upstream)
|
||||
|
||||
for c in p.copyfiles:
|
||||
ce = doc.createElement('copyfile')
|
||||
@ -239,6 +271,12 @@ class XmlManifest(object):
|
||||
ce.setAttribute('dest', c.dest)
|
||||
e.appendChild(ce)
|
||||
|
||||
for l in p.linkfiles:
|
||||
le = doc.createElement('linkfile')
|
||||
le.setAttribute('src', l.src)
|
||||
le.setAttribute('dest', l.dest)
|
||||
e.appendChild(le)
|
||||
|
||||
default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath]
|
||||
egroups = [g for g in p.groups if g not in default_groups]
|
||||
if egroups:
|
||||
@ -258,14 +296,11 @@ class XmlManifest(object):
|
||||
e.setAttribute('sync-s', 'true')
|
||||
|
||||
if p.subprojects:
|
||||
sort_projects = [subp.name for subp in p.subprojects]
|
||||
sort_projects.sort()
|
||||
output_projects(p, e, sort_projects)
|
||||
subprojects = set(subp.name for subp in p.subprojects)
|
||||
output_projects(p, e, list(sorted(subprojects)))
|
||||
|
||||
sort_projects = [key for key in self.projects.keys()
|
||||
if not self.projects[key].parent]
|
||||
sort_projects.sort()
|
||||
output_projects(None, root, sort_projects)
|
||||
projects = set(p.name for p in self._paths.values() if not p.parent)
|
||||
output_projects(None, root, list(sorted(projects)))
|
||||
|
||||
if self._repo_hooks_project:
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
@ -277,10 +312,15 @@ class XmlManifest(object):
|
||||
|
||||
doc.writexml(fd, '', ' ', '\n', 'UTF-8')
|
||||
|
||||
@property
|
||||
def paths(self):
|
||||
self._Load()
|
||||
return self._paths
|
||||
|
||||
@property
|
||||
def projects(self):
|
||||
self._Load()
|
||||
return self._projects
|
||||
return list(self._paths.values())
|
||||
|
||||
@property
|
||||
def remotes(self):
|
||||
@ -311,9 +351,14 @@ class XmlManifest(object):
|
||||
def IsMirror(self):
|
||||
return self.manifestProject.config.GetBoolean('repo.mirror')
|
||||
|
||||
@property
|
||||
def IsArchive(self):
|
||||
return self.manifestProject.config.GetBoolean('repo.archive')
|
||||
|
||||
def _Unload(self):
|
||||
self._loaded = False
|
||||
self._projects = {}
|
||||
self._paths = {}
|
||||
self._remotes = {}
|
||||
self._default = None
|
||||
self._repo_hooks_project = None
|
||||
@ -335,9 +380,11 @@ class XmlManifest(object):
|
||||
|
||||
local = os.path.join(self.repodir, LOCAL_MANIFEST_NAME)
|
||||
if os.path.exists(local):
|
||||
print('warning: %s is deprecated; put local manifests in `%s` instead'
|
||||
% (LOCAL_MANIFEST_NAME, os.path.join(self.repodir, LOCAL_MANIFESTS_DIR_NAME)),
|
||||
file=sys.stderr)
|
||||
if not self.localManifestWarning:
|
||||
self.localManifestWarning = True
|
||||
print('warning: %s is deprecated; put local manifests in `%s` instead'
|
||||
% (LOCAL_MANIFEST_NAME, os.path.join(self.repodir, LOCAL_MANIFESTS_DIR_NAME)),
|
||||
file=sys.stderr)
|
||||
nodes.append(self._ParseManifestXml(local, self.repodir))
|
||||
|
||||
local_dir = os.path.abspath(os.path.join(self.repodir, LOCAL_MANIFESTS_DIR_NAME))
|
||||
@ -385,9 +432,8 @@ class XmlManifest(object):
|
||||
name = self._reqatt(node, 'name')
|
||||
fp = os.path.join(include_root, name)
|
||||
if not os.path.isfile(fp):
|
||||
raise ManifestParseError, \
|
||||
"include %s doesn't exist or isn't a file" % \
|
||||
(name,)
|
||||
raise ManifestParseError("include %s doesn't exist or isn't a file"
|
||||
% (name,))
|
||||
try:
|
||||
nodes.extend(self._ParseManifestXml(fp, include_root))
|
||||
# should isolate this to the exact exception, but that's
|
||||
@ -416,11 +462,13 @@ class XmlManifest(object):
|
||||
|
||||
for node in itertools.chain(*node_list):
|
||||
if node.nodeName == 'default':
|
||||
if self._default is not None:
|
||||
raise ManifestParseError(
|
||||
'duplicate default in %s' %
|
||||
(self.manifestFile))
|
||||
self._default = self._ParseDefault(node)
|
||||
new_default = self._ParseDefault(node)
|
||||
if self._default is None:
|
||||
self._default = new_default
|
||||
elif new_default != self._default:
|
||||
raise ManifestParseError('duplicate default in %s' %
|
||||
(self.manifestFile))
|
||||
|
||||
if self._default is None:
|
||||
self._default = _Default()
|
||||
|
||||
@ -442,11 +490,17 @@ class XmlManifest(object):
|
||||
self._manifest_server = url
|
||||
|
||||
def recursively_add_projects(project):
|
||||
if self._projects.get(project.name):
|
||||
projects = self._projects.setdefault(project.name, [])
|
||||
if project.relpath is None:
|
||||
raise ManifestParseError(
|
||||
'duplicate project %s in %s' %
|
||||
'missing path for %s in %s' %
|
||||
(project.name, self.manifestFile))
|
||||
self._projects[project.name] = project
|
||||
if project.relpath in self._paths:
|
||||
raise ManifestParseError(
|
||||
'duplicate path %s in %s' %
|
||||
(project.relpath, self.manifestFile))
|
||||
self._paths[project.relpath] = project
|
||||
projects.append(project)
|
||||
for subproject in project.subprojects:
|
||||
recursively_add_projects(subproject)
|
||||
|
||||
@ -454,6 +508,23 @@ class XmlManifest(object):
|
||||
if node.nodeName == 'project':
|
||||
project = self._ParseProject(node)
|
||||
recursively_add_projects(project)
|
||||
if node.nodeName == 'extend-project':
|
||||
name = self._reqatt(node, 'name')
|
||||
|
||||
if name not in self._projects:
|
||||
raise ManifestParseError('extend-project element specifies non-existent '
|
||||
'project: %s' % name)
|
||||
|
||||
path = node.getAttribute('path')
|
||||
groups = node.getAttribute('groups')
|
||||
if groups:
|
||||
groups = self._ParseGroups(groups)
|
||||
|
||||
for p in self._projects[name]:
|
||||
if path and p.relpath != path:
|
||||
continue
|
||||
if groups:
|
||||
p.groups.extend(groups)
|
||||
if node.nodeName == 'repo-hooks':
|
||||
# Get the name of the project and the (space-separated) list of enabled.
|
||||
repo_hooks_project = self._reqatt(node, 'in-project')
|
||||
@ -467,22 +538,31 @@ class XmlManifest(object):
|
||||
|
||||
# Store a reference to the Project.
|
||||
try:
|
||||
self._repo_hooks_project = self._projects[repo_hooks_project]
|
||||
repo_hooks_projects = self._projects[repo_hooks_project]
|
||||
except KeyError:
|
||||
raise ManifestParseError(
|
||||
'project %s not found for repo-hooks' %
|
||||
(repo_hooks_project))
|
||||
|
||||
if len(repo_hooks_projects) != 1:
|
||||
raise ManifestParseError(
|
||||
'internal error parsing repo-hooks in %s' %
|
||||
(self.manifestFile))
|
||||
self._repo_hooks_project = repo_hooks_projects[0]
|
||||
|
||||
# Store the enabled hooks in the Project object.
|
||||
self._repo_hooks_project.enabled_repo_hooks = enabled_repo_hooks
|
||||
if node.nodeName == 'remove-project':
|
||||
name = self._reqatt(node, 'name')
|
||||
try:
|
||||
del self._projects[name]
|
||||
except KeyError:
|
||||
|
||||
if name not in self._projects:
|
||||
raise ManifestParseError('remove-project element specifies non-existent '
|
||||
'project: %s' % name)
|
||||
|
||||
for p in self._projects[name]:
|
||||
del self._paths[p.relpath]
|
||||
del self._projects[name]
|
||||
|
||||
# If the manifest removes the hooks project, treat it as if it deleted
|
||||
# the repo-hooks element too.
|
||||
if self._repo_hooks_project and (self._repo_hooks_project.name == name):
|
||||
@ -493,7 +573,7 @@ class XmlManifest(object):
|
||||
name = None
|
||||
m_url = m.GetRemote(m.remote.name).url
|
||||
if m_url.endswith('/.git'):
|
||||
raise ManifestParseError, 'refusing to mirror %s' % m_url
|
||||
raise ManifestParseError('refusing to mirror %s' % m_url)
|
||||
|
||||
if self._default and self._default.remote:
|
||||
url = self._default.remote.resolvedFetchUrl
|
||||
@ -519,11 +599,13 @@ class XmlManifest(object):
|
||||
name = name,
|
||||
remote = remote.ToRemoteSpec(name),
|
||||
gitdir = gitdir,
|
||||
objdir = gitdir,
|
||||
worktree = None,
|
||||
relpath = None,
|
||||
relpath = name or None,
|
||||
revisionExpr = m.revisionExpr,
|
||||
revisionId = None)
|
||||
self._projects[project.name] = project
|
||||
self._projects[project.name] = [project]
|
||||
self._paths[project.relpath] = project
|
||||
|
||||
def _ParseRemote(self, node):
|
||||
"""
|
||||
@ -537,8 +619,11 @@ class XmlManifest(object):
|
||||
review = node.getAttribute('review')
|
||||
if review == '':
|
||||
review = None
|
||||
revision = node.getAttribute('revision')
|
||||
if revision == '':
|
||||
revision = None
|
||||
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
|
||||
return _XmlRemote(name, alias, fetch, manifestUrl, review)
|
||||
return _XmlRemote(name, alias, fetch, manifestUrl, review, revision)
|
||||
|
||||
def _ParseDefault(self, node):
|
||||
"""
|
||||
@ -550,6 +635,8 @@ class XmlManifest(object):
|
||||
if d.revisionExpr == '':
|
||||
d.revisionExpr = None
|
||||
|
||||
d.destBranchExpr = node.getAttribute('dest-branch') or None
|
||||
|
||||
sync_j = node.getAttribute('sync-j')
|
||||
if sync_j == '' or sync_j is None:
|
||||
d.sync_j = 1
|
||||
@ -587,7 +674,7 @@ class XmlManifest(object):
|
||||
|
||||
# Figure out minimum indentation, skipping the first line (the same line
|
||||
# as the <notice> tag)...
|
||||
minIndent = sys.maxint
|
||||
minIndent = sys.maxsize
|
||||
lines = notice.splitlines()
|
||||
for line in lines[1:]:
|
||||
lstrippedLine = line.lstrip()
|
||||
@ -626,25 +713,22 @@ class XmlManifest(object):
|
||||
if remote is None:
|
||||
remote = self._default.remote
|
||||
if remote is None:
|
||||
raise ManifestParseError, \
|
||||
"no remote for project %s within %s" % \
|
||||
(name, self.manifestFile)
|
||||
raise ManifestParseError("no remote for project %s within %s" %
|
||||
(name, self.manifestFile))
|
||||
|
||||
revisionExpr = node.getAttribute('revision')
|
||||
revisionExpr = node.getAttribute('revision') or remote.revision
|
||||
if not revisionExpr:
|
||||
revisionExpr = self._default.revisionExpr
|
||||
if not revisionExpr:
|
||||
raise ManifestParseError, \
|
||||
"no revision for project %s within %s" % \
|
||||
(name, self.manifestFile)
|
||||
raise ManifestParseError("no revision for project %s within %s" %
|
||||
(name, self.manifestFile))
|
||||
|
||||
path = node.getAttribute('path')
|
||||
if not path:
|
||||
path = name
|
||||
if path.startswith('/'):
|
||||
raise ManifestParseError, \
|
||||
"project %s path cannot be absolute in %s" % \
|
||||
(name, self.manifestFile)
|
||||
raise ManifestParseError("project %s path cannot be absolute in %s" %
|
||||
(name, self.manifestFile))
|
||||
|
||||
rebase = node.getAttribute('rebase')
|
||||
if not rebase:
|
||||
@ -664,25 +748,43 @@ class XmlManifest(object):
|
||||
else:
|
||||
sync_s = sync_s.lower() in ("yes", "true", "1")
|
||||
|
||||
clone_depth = node.getAttribute('clone-depth')
|
||||
if clone_depth:
|
||||
try:
|
||||
clone_depth = int(clone_depth)
|
||||
if clone_depth <= 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ManifestParseError('invalid clone-depth %s in %s' %
|
||||
(clone_depth, self.manifestFile))
|
||||
|
||||
dest_branch = node.getAttribute('dest-branch') or self._default.destBranchExpr
|
||||
|
||||
upstream = node.getAttribute('upstream')
|
||||
|
||||
groups = ''
|
||||
if node.hasAttribute('groups'):
|
||||
groups = node.getAttribute('groups')
|
||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
||||
groups = self._ParseGroups(groups)
|
||||
|
||||
if parent is None:
|
||||
relpath, worktree, gitdir = self.GetProjectPaths(name, path)
|
||||
relpath, worktree, gitdir, objdir = self.GetProjectPaths(name, path)
|
||||
else:
|
||||
relpath, worktree, gitdir = self.GetSubprojectPaths(parent, path)
|
||||
relpath, worktree, gitdir, objdir = \
|
||||
self.GetSubprojectPaths(parent, name, path)
|
||||
|
||||
default_groups = ['all', 'name:%s' % name, 'path:%s' % relpath]
|
||||
groups.extend(set(default_groups).difference(groups))
|
||||
|
||||
if self.IsMirror and node.hasAttribute('force-path'):
|
||||
if node.getAttribute('force-path').lower() in ("yes", "true", "1"):
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % path)
|
||||
|
||||
project = Project(manifest = self,
|
||||
name = name,
|
||||
remote = remote.ToRemoteSpec(name),
|
||||
gitdir = gitdir,
|
||||
objdir = objdir,
|
||||
worktree = worktree,
|
||||
relpath = relpath,
|
||||
revisionExpr = revisionExpr,
|
||||
@ -691,12 +793,16 @@ class XmlManifest(object):
|
||||
groups = groups,
|
||||
sync_c = sync_c,
|
||||
sync_s = sync_s,
|
||||
clone_depth = clone_depth,
|
||||
upstream = upstream,
|
||||
parent = parent)
|
||||
parent = parent,
|
||||
dest_branch = dest_branch)
|
||||
|
||||
for n in node.childNodes:
|
||||
if n.nodeName == 'copyfile':
|
||||
self._ParseCopyFile(project, n)
|
||||
if n.nodeName == 'linkfile':
|
||||
self._ParseLinkFile(project, n)
|
||||
if n.nodeName == 'annotation':
|
||||
self._ParseAnnotation(project, n)
|
||||
if n.nodeName == 'project':
|
||||
@ -709,10 +815,15 @@ class XmlManifest(object):
|
||||
if self.IsMirror:
|
||||
worktree = None
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||
objdir = gitdir
|
||||
else:
|
||||
worktree = os.path.join(self.topdir, path).replace('\\', '/')
|
||||
gitdir = os.path.join(self.repodir, 'projects', '%s.git' % path)
|
||||
return relpath, worktree, gitdir
|
||||
objdir = os.path.join(self.repodir, 'project-objects', '%s.git' % name)
|
||||
return relpath, worktree, gitdir, objdir
|
||||
|
||||
def GetProjectsWithName(self, name):
|
||||
return self._projects.get(name, [])
|
||||
|
||||
def GetSubprojectName(self, parent, submodule_path):
|
||||
return os.path.join(parent.name, submodule_path)
|
||||
@ -723,14 +834,15 @@ class XmlManifest(object):
|
||||
def _UnjoinRelpath(self, parent_relpath, relpath):
|
||||
return os.path.relpath(relpath, parent_relpath)
|
||||
|
||||
def GetSubprojectPaths(self, parent, path):
|
||||
def GetSubprojectPaths(self, parent, name, path):
|
||||
relpath = self._JoinRelpath(parent.relpath, path)
|
||||
gitdir = os.path.join(parent.gitdir, 'subprojects', '%s.git' % path)
|
||||
objdir = os.path.join(parent.gitdir, 'subproject-objects', '%s.git' % name)
|
||||
if self.IsMirror:
|
||||
worktree = None
|
||||
else:
|
||||
worktree = os.path.join(parent.worktree, path).replace('\\', '/')
|
||||
return relpath, worktree, gitdir
|
||||
return relpath, worktree, gitdir, objdir
|
||||
|
||||
def _ParseCopyFile(self, project, node):
|
||||
src = self._reqatt(node, 'src')
|
||||
@ -740,6 +852,14 @@ class XmlManifest(object):
|
||||
# dest is relative to the top of the tree
|
||||
project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
|
||||
|
||||
def _ParseLinkFile(self, project, node):
|
||||
src = self._reqatt(node, 'src')
|
||||
dest = self._reqatt(node, 'dest')
|
||||
if not self.IsMirror:
|
||||
# src is project relative;
|
||||
# dest is relative to the top of the tree
|
||||
project.AddLinkFile(src, dest, os.path.join(self.topdir, dest))
|
||||
|
||||
def _ParseAnnotation(self, project, node):
|
||||
name = self._reqatt(node, 'name')
|
||||
value = self._reqatt(node, 'value')
|
||||
@ -748,7 +868,8 @@ class XmlManifest(object):
|
||||
except ManifestParseError:
|
||||
keep = "true"
|
||||
if keep != "true" and keep != "false":
|
||||
raise ManifestParseError, "optional \"keep\" attribute must be \"true\" or \"false\""
|
||||
raise ManifestParseError('optional "keep" attribute must be '
|
||||
'"true" or "false"')
|
||||
project.AddAnnotation(name, value, keep)
|
||||
|
||||
def _get_remote(self, node):
|
||||
@ -758,9 +879,8 @@ class XmlManifest(object):
|
||||
|
||||
v = self._remotes.get(name)
|
||||
if not v:
|
||||
raise ManifestParseError, \
|
||||
"remote %s not defined in %s" % \
|
||||
(name, self.manifestFile)
|
||||
raise ManifestParseError("remote %s not defined in %s" %
|
||||
(name, self.manifestFile))
|
||||
return v
|
||||
|
||||
def _reqatt(self, node, attname):
|
||||
@ -769,7 +889,41 @@ class XmlManifest(object):
|
||||
"""
|
||||
v = node.getAttribute(attname)
|
||||
if not v:
|
||||
raise ManifestParseError, \
|
||||
"no %s in <%s> within %s" % \
|
||||
(attname, node.nodeName, self.manifestFile)
|
||||
raise ManifestParseError("no %s in <%s> within %s" %
|
||||
(attname, node.nodeName, self.manifestFile))
|
||||
return v
|
||||
|
||||
def projectsDiff(self, manifest):
|
||||
"""return the projects differences between two manifests.
|
||||
|
||||
The diff will be from self to given manifest.
|
||||
|
||||
"""
|
||||
fromProjects = self.paths
|
||||
toProjects = manifest.paths
|
||||
|
||||
fromKeys = sorted(fromProjects.keys())
|
||||
toKeys = sorted(toProjects.keys())
|
||||
|
||||
diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []}
|
||||
|
||||
for proj in fromKeys:
|
||||
if not proj in toKeys:
|
||||
diff['removed'].append(fromProjects[proj])
|
||||
else:
|
||||
fromProj = fromProjects[proj]
|
||||
toProj = toProjects[proj]
|
||||
try:
|
||||
fromRevId = fromProj.GetCommitRevisionId()
|
||||
toRevId = toProj.GetCommitRevisionId()
|
||||
except ManifestInvalidRevisionError:
|
||||
diff['unreachable'].append((fromProj, toProj))
|
||||
else:
|
||||
if fromRevId != toRevId:
|
||||
diff['changed'].append((fromProj, toProj))
|
||||
toKeys.remove(proj)
|
||||
|
||||
for proj in toKeys:
|
||||
diff['added'].append(toProjects[proj])
|
||||
|
||||
return diff
|
||||
|
879
project.py
879
project.py
File diff suppressed because it is too large
Load Diff
19
pyversion.py
Normal file
19
pyversion.py
Normal file
@ -0,0 +1,19 @@
|
||||
#
|
||||
# Copyright (C) 2013 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
def is_python3():
|
||||
return sys.version_info[0] == 3
|
263
repo
263
repo
@ -2,7 +2,6 @@
|
||||
|
||||
## repo default configuration
|
||||
##
|
||||
from __future__ import print_function
|
||||
REPO_URL = 'https://gerrit.googlesource.com/git-repo'
|
||||
REPO_REV = 'stable'
|
||||
|
||||
@ -21,10 +20,10 @@ REPO_REV = 'stable'
|
||||
# limitations under the License.
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (1, 19)
|
||||
VERSION = (1, 21)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (1, 1)
|
||||
KEYRING_VERSION = (1, 2)
|
||||
MAINTAINER_KEYS = """
|
||||
|
||||
Repo Maintainer <repo@android.kernel.org>
|
||||
@ -73,32 +72,32 @@ TACbBS+Up3RpfYVfd63c1cDdlru13pQAn3NQy/SN858MkxN+zym86UBgOad2
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
Version: GnuPG v1.4.11 (GNU/Linux)
|
||||
|
||||
mQENBFBiLPwBCACvISTASOgFXwADw2GYRH2I2z9RvYkYoZ6ThTTNlMXbbYYKO2Wo
|
||||
a9LQDNW0TbCEekg5UKk0FD13XOdWaqUt4Gtuvq9c43GRSjMO6NXH+0BjcQ8vUtY2
|
||||
/W4CYUevwdo4nQ1+1zsOCu1XYe/CReXq0fdugv3hgmRmh3sz1soo37Q44W2frxxg
|
||||
U7Rz3Da4FjgAL0RQ8qndD+LwRHXTY7H7wYM8V/3cYFZV7pSodd75q3MAXYQLf0ZV
|
||||
QR1XATu5l1QnXrxgHvz7MmDwb1D+jX3YPKnZveaukigQ6hDHdiVcePBiGXmk8LZC
|
||||
2jQkdXeF7Su1ZYpr2nnEHLJ6vOLcCpPGb8gDABEBAAG0H0NvbmxleSBPd2VucyA8
|
||||
Y2NvM0BhbmRyb2lkLmNvbT6JATgEEwECACIFAlBiLPwCGwMGCwkIBwMCBhUIAgkK
|
||||
CwQWAgMBAh4BAheAAAoJEBkmlFUziHGkHVkH/2Hks2Cif5i2xPtv2IFZcjL42joU
|
||||
T7lO5XFqUYS9ZNHpGa/V0eiPt7rHoO16glR83NZtwlrq2cSN89i9HfOhMYV/qLu8
|
||||
fLCHcV2muw+yCB5s5bxnI5UkToiNZyBNqFkcOt/Kbj9Hpy68A1kmc6myVEaUYebq
|
||||
2Chx/f3xuEthan099t746v1K+/6SvQGDNctHuaMr9cWdxZtHjdRf31SQRc99Phe5
|
||||
w+ZGR/ebxNDKRK9mKgZT8wVFHlXerJsRqWIqtx1fsW1UgLgbpcpe2MChm6B5wTu0
|
||||
s1ltzox3l4q71FyRRPUJxXyvGkDLZWpK7EpiHSCOYq/KP3HkKeXU3xqHpcG5AQ0E
|
||||
UGIs/AEIAKzO/7lO9cB6dshmZYo8Vy/b7aGicThE+ChcDSfhvyOXVdEM2GKAjsR+
|
||||
rlBWbTFX3It301p2HwZPFEi9nEvJxVlqqBiW0bPmNMkDRR55l2vbWg35wwkg6RyE
|
||||
Bc5/TQjhXI2w8IvlimoGoUff4t3JmMOnWrnKSvL+5iuRj12p9WmanCHzw3Ee7ztf
|
||||
/aU/q+FTpr3DLerb6S8xbv86ySgnJT6o5CyL2DCWRtnYQyGVi0ZmLzEouAYiO0hs
|
||||
z0AAu28Mj+12g2WwePRz6gfM9rHtI37ylYW3oT/9M9mO9ei/Bc/1D7Dz6qNV+0vg
|
||||
uSVJxM2Bl6GalHPZLhHntFEdIA6EdoUAEQEAAYkBHwQYAQIACQUCUGIs/AIbDAAK
|
||||
CRAZJpRVM4hxpNfkB/0W/hP5WK/NETXBlWXXW7JPaWO2c5kGwD0lnj5RRmridyo1
|
||||
vbm5PdM91jOsDQYqRu6YOoYBnDnEhB2wL2bPh34HWwwrA+LwB8hlcAV2z1bdwyfl
|
||||
3R823fReKN3QcvLHzmvZPrF4Rk97M9UIyKS0RtnfTWykRgDWHIsrtQPoNwsXrWoT
|
||||
9LrM2v+1+9mp3vuXnE473/NHxmiWEQH9Ez+O/mOxQ7rSOlqGRiKq/IBZCfioJOtV
|
||||
fTQeIu/yASZnsLBqr6SJEGwYBoWcyjG++k4fyw8ocOAo4uGDYbxgN7yYfNQ0OH7o
|
||||
V6pfUgqKLWa/aK7/N1ZHnPdFLD8Xt0Dmy4BPwrKC
|
||||
=O7am
|
||||
mQENBFHRvc8BCADFg45Xx/y6QDC+T7Y/gGc7vx0ww7qfOwIKlAZ9xG3qKunMxo+S
|
||||
hPCnzEl3cq+6I1Ww/ndop/HB3N3toPXRCoN8Vs4/Hc7by+SnaLFnacrm+tV5/OgT
|
||||
V37Lzt8lhay1Kl+YfpFwHYYpIEBLFV9knyfRXS/428W2qhdzYfvB15/AasRmwmor
|
||||
py4NIzSs8UD/SPr1ihqNCdZM76+MQyN5HMYXW/ALZXUFG0pwluHFA7hrfPG74i8C
|
||||
zMiP7qvMWIl/r/jtzHioH1dRKgbod+LZsrDJ8mBaqsZaDmNJMhss9g76XvfMyLra
|
||||
9DI9/iFuBpGzeqBv0hwOGQspLRrEoyTeR6n1ABEBAAG0H0NvbmxleSBPd2VucyA8
|
||||
Y2NvM0BhbmRyb2lkLmNvbT6JATgEEwECACIFAlHRvc8CGwMGCwkIBwMCBhUIAgkK
|
||||
CwQWAgMBAh4BAheAAAoJEGe35EhpKzgsP6AIAJKJmNtn4l7hkYHKHFSo3egb6RjQ
|
||||
zEIP3MFTcu8HFX1kF1ZFbrp7xqurLaE53kEkKuAAvjJDAgI8mcZHP1JyplubqjQA
|
||||
xvv84gK+OGP3Xk+QK1ZjUQSbjOpjEiSZpRhWcHci3dgOUH4blJfByHw25hlgHowd
|
||||
a/2PrNKZVcJ92YienaxxGjcXEUcd0uYEG2+rwllQigFcnMFDhr9B71MfalRHjFKE
|
||||
fmdoypqLrri61YBc59P88Rw2/WUpTQjgNubSqa3A2+CKdaRyaRw+2fdF4TdR0h8W
|
||||
zbg+lbaPtJHsV+3mJC7fq26MiJDRJa5ZztpMn8su20gbLgi2ShBOaHAYDDi5AQ0E
|
||||
UdG9zwEIAMoOBq+QLNozAhxOOl5GL3StTStGRgPRXINfmViTsihrqGCWBBUfXlUE
|
||||
OytC0mYcrDUQev/8ToVoyqw+iGSwDkcSXkrEUCKFtHV/GECWtk1keyHgR10YKI1R
|
||||
mquSXoubWGqPeG1PAI74XWaRx8UrL8uCXUtmD8Q5J7mDjKR5NpxaXrwlA0bKsf2E
|
||||
Gp9tu1kKauuToZhWHMRMqYSOGikQJwWSFYKT1KdNcOXLQF6+bfoJ6sjVYdwfmNQL
|
||||
Ixn8QVhoTDedcqClSWB17VDEFDFa7MmqXZz2qtM3X1R/MUMHqPtegQzBGNhRdnI2
|
||||
V45+1Nnx/uuCxDbeI4RbHzujnxDiq70AEQEAAYkBHwQYAQIACQUCUdG9zwIbDAAK
|
||||
CRBnt+RIaSs4LNVeB/0Y2pZ8I7gAAcEM0Xw8drr4omg2fUoK1J33ozlA/RxeA/lJ
|
||||
I3KnyCDTpXuIeBKPGkdL8uMATC9Z8DnBBajRlftNDVZS3Hz4G09G9QpMojvJkFJV
|
||||
By+01Flw/X+eeN8NpqSuLV4W+AjEO8at/VvgKr1AFvBRdZ7GkpI1o6DgPe7ZqX+1
|
||||
dzQZt3e13W0rVBb/bUgx9iSLoeWP3aq/k+/GRGOR+S6F6BBSl0SQ2EF2+dIywb1x
|
||||
JuinEP+AwLAUZ1Bsx9ISC0Agpk2VeHXPL3FGhroEmoMvBzO0kTFGyoeT7PR/BfKv
|
||||
+H/g3HsL2LOB9uoIm8/5p2TTU5ttYCXMHhQZ81AY
|
||||
=AUp4
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
"""
|
||||
|
||||
@ -108,27 +107,44 @@ repodir = '.repo' # name of repo's private directory
|
||||
S_repo = 'repo' # special repo repository
|
||||
S_manifests = 'manifests' # special manifest repository
|
||||
REPO_MAIN = S_repo + '/main.py' # main script
|
||||
MIN_PYTHON_VERSION = (2, 6) # minimum supported python version
|
||||
|
||||
|
||||
import errno
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
try:
|
||||
import urllib2
|
||||
except ImportError:
|
||||
# For python3
|
||||
|
||||
if sys.version_info[0] == 3:
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
else:
|
||||
# For python2
|
||||
import imp
|
||||
import urllib2
|
||||
urllib = imp.new_module('urllib')
|
||||
urllib.request = urllib2
|
||||
urllib.error = urllib2
|
||||
|
||||
|
||||
def _print(*objects, **kwargs):
|
||||
sep = kwargs.get('sep', ' ')
|
||||
end = kwargs.get('end', '\n')
|
||||
out = kwargs.get('file', sys.stdout)
|
||||
out.write(sep.join(objects) + end)
|
||||
|
||||
|
||||
# Python version check
|
||||
ver = sys.version_info
|
||||
if (ver[0], ver[1]) < MIN_PYTHON_VERSION:
|
||||
_print('error: Python version %s unsupported.\n'
|
||||
'Please use Python 2.6 - 2.7 instead.'
|
||||
% sys.version.split(' ')[0], file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
home_dot_repo = os.path.expanduser('~/.repoconfig')
|
||||
gpg_dir = os.path.join(home_dot_repo, 'gnupg')
|
||||
|
||||
@ -162,9 +178,14 @@ group.add_option('--reference',
|
||||
group.add_option('--depth', type='int', default=None,
|
||||
dest='depth',
|
||||
help='create a shallow clone with given depth; see git clone')
|
||||
group.add_option('--archive',
|
||||
dest='archive', action='store_true',
|
||||
help='checkout an archive instead of a git repository for '
|
||||
'each project. See git archive.')
|
||||
group.add_option('-g', '--groups',
|
||||
dest='groups', default='default',
|
||||
help='restrict manifest projects to ones with a specified group',
|
||||
help='restrict manifest projects to ones with specified '
|
||||
'group(s) [default|all|G1,G2,G3|G4,-G5,-G6]',
|
||||
metavar='GROUP')
|
||||
group.add_option('-p', '--platform',
|
||||
dest='platform', default="auto",
|
||||
@ -217,15 +238,15 @@ def _Init(args):
|
||||
if branch.startswith('refs/heads/'):
|
||||
branch = branch[len('refs/heads/'):]
|
||||
if branch.startswith('refs/'):
|
||||
print("fatal: invalid branch name '%s'" % branch, file=sys.stderr)
|
||||
_print("fatal: invalid branch name '%s'" % branch, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
if not os.path.isdir(repodir):
|
||||
try:
|
||||
os.mkdir(repodir)
|
||||
except OSError as e:
|
||||
print('fatal: cannot make %s directory: %s'
|
||||
% (repodir, e.strerror), file=sys.stderr)
|
||||
try:
|
||||
os.mkdir(repodir)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
_print('fatal: cannot make %s directory: %s'
|
||||
% (repodir, e.strerror), file=sys.stderr)
|
||||
# Don't raise CloneFailure; that would delete the
|
||||
# name. Instead exit immediately.
|
||||
#
|
||||
@ -249,37 +270,50 @@ def _Init(args):
|
||||
_Checkout(dst, branch, rev, opt.quiet)
|
||||
except CloneFailure:
|
||||
if opt.quiet:
|
||||
print('fatal: repo init failed; run without --quiet to see why',
|
||||
file=sys.stderr)
|
||||
_print('fatal: repo init failed; run without --quiet to see why',
|
||||
file=sys.stderr)
|
||||
raise
|
||||
|
||||
|
||||
def ParseGitVersion(ver_str):
|
||||
if not ver_str.startswith('git version '):
|
||||
return None
|
||||
|
||||
num_ver_str = ver_str[len('git version '):].strip().split('-')[0]
|
||||
to_tuple = []
|
||||
for num_str in num_ver_str.split('.')[:3]:
|
||||
if num_str.isdigit():
|
||||
to_tuple.append(int(num_str))
|
||||
else:
|
||||
to_tuple.append(0)
|
||||
return tuple(to_tuple)
|
||||
|
||||
|
||||
def _CheckGitVersion():
|
||||
cmd = [GIT, '--version']
|
||||
try:
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
||||
except OSError as e:
|
||||
print(file=sys.stderr)
|
||||
print("fatal: '%s' is not available" % GIT, file=sys.stderr)
|
||||
print('fatal: %s' % e, file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
print('Please make sure %s is installed and in your path.' % GIT,
|
||||
file=sys.stderr)
|
||||
_print(file=sys.stderr)
|
||||
_print("fatal: '%s' is not available" % GIT, file=sys.stderr)
|
||||
_print('fatal: %s' % e, file=sys.stderr)
|
||||
_print(file=sys.stderr)
|
||||
_print('Please make sure %s is installed and in your path.' % GIT,
|
||||
file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
ver_str = proc.stdout.read().strip()
|
||||
proc.stdout.close()
|
||||
proc.wait()
|
||||
|
||||
if not ver_str.startswith('git version '):
|
||||
print('error: "%s" unsupported' % ver_str, file=sys.stderr)
|
||||
ver_act = ParseGitVersion(ver_str)
|
||||
if ver_act is None:
|
||||
_print('error: "%s" unsupported' % ver_str, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
ver_str = ver_str[len('git version '):].strip()
|
||||
ver_act = tuple(map(int, ver_str.split('.')[0:3]))
|
||||
if ver_act < MIN_GIT_VERSION:
|
||||
need = '.'.join(map(str, MIN_GIT_VERSION))
|
||||
print('fatal: git %s or later required' % need, file=sys.stderr)
|
||||
_print('fatal: git %s or later required' % need, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
|
||||
@ -302,20 +336,20 @@ def NeedSetupGnuPG():
|
||||
|
||||
|
||||
def SetupGnuPG(quiet):
|
||||
if not os.path.isdir(home_dot_repo):
|
||||
try:
|
||||
os.mkdir(home_dot_repo)
|
||||
except OSError as e:
|
||||
print('fatal: cannot make %s directory: %s'
|
||||
% (home_dot_repo, e.strerror), file=sys.stderr)
|
||||
try:
|
||||
os.mkdir(home_dot_repo)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
_print('fatal: cannot make %s directory: %s'
|
||||
% (home_dot_repo, e.strerror), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isdir(gpg_dir):
|
||||
try:
|
||||
os.mkdir(gpg_dir, stat.S_IRWXU)
|
||||
except OSError as e:
|
||||
print('fatal: cannot make %s directory: %s' % (gpg_dir, e.strerror),
|
||||
file=sys.stderr)
|
||||
try:
|
||||
os.mkdir(gpg_dir, stat.S_IRWXU)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
_print('fatal: cannot make %s directory: %s' % (gpg_dir, e.strerror),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
env = os.environ.copy()
|
||||
@ -328,18 +362,18 @@ def SetupGnuPG(quiet):
|
||||
stdin = subprocess.PIPE)
|
||||
except OSError as e:
|
||||
if not quiet:
|
||||
print('warning: gpg (GnuPG) is not available.', file=sys.stderr)
|
||||
print('warning: Installing it is strongly encouraged.', file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
_print('warning: gpg (GnuPG) is not available.', file=sys.stderr)
|
||||
_print('warning: Installing it is strongly encouraged.', file=sys.stderr)
|
||||
_print(file=sys.stderr)
|
||||
return False
|
||||
|
||||
proc.stdin.write(MAINTAINER_KEYS)
|
||||
proc.stdin.close()
|
||||
|
||||
if proc.wait() != 0:
|
||||
print('fatal: registering repo maintainer keys failed', file=sys.stderr)
|
||||
_print('fatal: registering repo maintainer keys failed', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
print()
|
||||
_print()
|
||||
|
||||
fd = open(os.path.join(home_dot_repo, 'keyring-version'), 'w')
|
||||
fd.write('.'.join(map(str, KEYRING_VERSION)) + '\n')
|
||||
@ -381,7 +415,7 @@ def _InitHttp():
|
||||
|
||||
def _Fetch(url, local, src, quiet):
|
||||
if not quiet:
|
||||
print('Get %s' % url, file=sys.stderr)
|
||||
_print('Get %s' % url, file=sys.stderr)
|
||||
|
||||
cmd = [GIT, 'fetch']
|
||||
if quiet:
|
||||
@ -428,18 +462,18 @@ def _DownloadBundle(url, local, quiet):
|
||||
try:
|
||||
r = urllib.request.urlopen(url)
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code in [403, 404]:
|
||||
if e.code in [401, 403, 404]:
|
||||
return False
|
||||
print('fatal: Cannot get %s' % url, file=sys.stderr)
|
||||
print('fatal: HTTP error %s' % e.code, file=sys.stderr)
|
||||
_print('fatal: Cannot get %s' % url, file=sys.stderr)
|
||||
_print('fatal: HTTP error %s' % e.code, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
except urllib.error.URLError as e:
|
||||
print('fatal: Cannot get %s' % url, file=sys.stderr)
|
||||
print('fatal: error %s' % e.reason, file=sys.stderr)
|
||||
_print('fatal: Cannot get %s' % url, file=sys.stderr)
|
||||
_print('fatal: error %s' % e.reason, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
try:
|
||||
if not quiet:
|
||||
print('Get %s' % url, file=sys.stderr)
|
||||
_print('Get %s' % url, file=sys.stderr)
|
||||
while True:
|
||||
buf = r.read(8192)
|
||||
if buf == '':
|
||||
@ -463,23 +497,23 @@ def _Clone(url, local, quiet):
|
||||
try:
|
||||
os.mkdir(local)
|
||||
except OSError as e:
|
||||
print('fatal: cannot make %s directory: %s' % (local, e.strerror),
|
||||
file=sys.stderr)
|
||||
_print('fatal: cannot make %s directory: %s' % (local, e.strerror),
|
||||
file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
cmd = [GIT, 'init', '--quiet']
|
||||
try:
|
||||
proc = subprocess.Popen(cmd, cwd = local)
|
||||
except OSError as e:
|
||||
print(file=sys.stderr)
|
||||
print("fatal: '%s' is not available" % GIT, file=sys.stderr)
|
||||
print('fatal: %s' % e, file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
print('Please make sure %s is installed and in your path.' % GIT,
|
||||
_print(file=sys.stderr)
|
||||
_print("fatal: '%s' is not available" % GIT, file=sys.stderr)
|
||||
_print('fatal: %s' % e, file=sys.stderr)
|
||||
_print(file=sys.stderr)
|
||||
_print('Please make sure %s is installed and in your path.' % GIT,
|
||||
file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
if proc.wait() != 0:
|
||||
print('fatal: could not create %s' % local, file=sys.stderr)
|
||||
_print('fatal: could not create %s' % local, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
_InitHttp()
|
||||
@ -507,18 +541,18 @@ def _Verify(cwd, branch, quiet):
|
||||
proc.stderr.close()
|
||||
|
||||
if proc.wait() != 0 or not cur:
|
||||
print(file=sys.stderr)
|
||||
print("fatal: branch '%s' has not been signed" % branch, file=sys.stderr)
|
||||
_print(file=sys.stderr)
|
||||
_print("fatal: branch '%s' has not been signed" % branch, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
m = re.compile(r'^(.*)-[0-9]{1,}-g[0-9a-f]{1,}$').match(cur)
|
||||
if m:
|
||||
cur = m.group(1)
|
||||
if not quiet:
|
||||
print(file=sys.stderr)
|
||||
print("info: Ignoring branch '%s'; using tagged release '%s'"
|
||||
_print(file=sys.stderr)
|
||||
_print("info: Ignoring branch '%s'; using tagged release '%s'"
|
||||
% (branch, cur), file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
_print(file=sys.stderr)
|
||||
|
||||
env = os.environ.copy()
|
||||
env['GNUPGHOME'] = gpg_dir.encode()
|
||||
@ -536,10 +570,10 @@ def _Verify(cwd, branch, quiet):
|
||||
proc.stderr.close()
|
||||
|
||||
if proc.wait() != 0:
|
||||
print(file=sys.stderr)
|
||||
print(out, file=sys.stderr)
|
||||
print(err, file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
_print(file=sys.stderr)
|
||||
_print(out, file=sys.stderr)
|
||||
_print(err, file=sys.stderr)
|
||||
_print(file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
return '%s^0' % cur
|
||||
|
||||
@ -606,7 +640,7 @@ def _ParseArguments(args):
|
||||
|
||||
|
||||
def _Usage():
|
||||
print(
|
||||
_print(
|
||||
"""usage: repo COMMAND [ARGS]
|
||||
|
||||
repo is not yet installed. Use "repo init" to install it here.
|
||||
@ -627,23 +661,23 @@ def _Help(args):
|
||||
init_optparse.print_help()
|
||||
sys.exit(0)
|
||||
else:
|
||||
print("error: '%s' is not a bootstrap command.\n"
|
||||
' For access to online help, install repo ("repo init").'
|
||||
% args[0], file=sys.stderr)
|
||||
_print("error: '%s' is not a bootstrap command.\n"
|
||||
' For access to online help, install repo ("repo init").'
|
||||
% args[0], file=sys.stderr)
|
||||
else:
|
||||
_Usage()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _NotInstalled():
|
||||
print('error: repo is not installed. Use "repo init" to install it here.',
|
||||
file=sys.stderr)
|
||||
_print('error: repo is not installed. Use "repo init" to install it here.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _NoCommands(cmd):
|
||||
print("""error: command '%s' requires repo to be installed first.
|
||||
Use "repo init" to install it here.""" % cmd, file=sys.stderr)
|
||||
_print("""error: command '%s' requires repo to be installed first.
|
||||
Use "repo init" to install it here.""" % cmd, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -680,7 +714,7 @@ def _SetDefaultsTo(gitdir):
|
||||
proc.stderr.close()
|
||||
|
||||
if proc.wait() != 0:
|
||||
print('fatal: %s has no current branch' % gitdir, file=sys.stderr)
|
||||
_print('fatal: %s has no current branch' % gitdir, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -704,12 +738,7 @@ def main(orig_args):
|
||||
try:
|
||||
_Init(args)
|
||||
except CloneFailure:
|
||||
for root, dirs, files in os.walk(repodir, topdown=False):
|
||||
for name in files:
|
||||
os.remove(os.path.join(root, name))
|
||||
for name in dirs:
|
||||
os.rmdir(os.path.join(root, name))
|
||||
os.rmdir(repodir)
|
||||
shutil.rmtree(os.path.join(repodir, S_repo), ignore_errors=True)
|
||||
sys.exit(1)
|
||||
repo_main, rel_repo_dir = _FindRepo()
|
||||
else:
|
||||
@ -719,7 +748,7 @@ def main(orig_args):
|
||||
repo_main = my_main
|
||||
|
||||
ver_str = '.'.join(map(str, VERSION))
|
||||
me = [repo_main,
|
||||
me = [sys.executable, repo_main,
|
||||
'--repo-dir=%s' % rel_repo_dir,
|
||||
'--wrapper-version=%s' % ver_str,
|
||||
'--wrapper-path=%s' % wrapper_path,
|
||||
@ -727,12 +756,16 @@ def main(orig_args):
|
||||
me.extend(orig_args)
|
||||
me.extend(extra_args)
|
||||
try:
|
||||
os.execv(repo_main, me)
|
||||
os.execv(sys.executable, me)
|
||||
except OSError as e:
|
||||
print("fatal: unable to start %s" % repo_main, file=sys.stderr)
|
||||
print("fatal: %s" % e, file=sys.stderr)
|
||||
_print("fatal: unable to start %s" % repo_main, file=sys.stderr)
|
||||
_print("fatal: %s" % e, file=sys.stderr)
|
||||
sys.exit(148)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if ver[0] == 3:
|
||||
_print('warning: Python 3 support is currently experimental. YMMV.\n'
|
||||
'Please use Python 2.6 - 2.7 instead.',
|
||||
file=sys.stderr)
|
||||
main(sys.argv[1:])
|
||||
|
@ -38,8 +38,8 @@ for py in os.listdir(my_dir):
|
||||
try:
|
||||
cmd = getattr(mod, clsn)()
|
||||
except AttributeError:
|
||||
raise SyntaxError, '%s/%s does not define class %s' % (
|
||||
__name__, py, clsn)
|
||||
raise SyntaxError('%s/%s does not define class %s' % (
|
||||
__name__, py, clsn))
|
||||
|
||||
name = name.replace('_', '-')
|
||||
cmd.NAME = name
|
||||
|
@ -46,6 +46,10 @@ class BranchInfo(object):
|
||||
def IsCurrent(self):
|
||||
return self.current > 0
|
||||
|
||||
@property
|
||||
def IsSplitCurrent(self):
|
||||
return self.current != 0 and self.current != len(self.projects)
|
||||
|
||||
@property
|
||||
def IsPublished(self):
|
||||
return self.published > 0
|
||||
@ -98,14 +102,13 @@ is shown, then the branch appears in all projects.
|
||||
project_cnt = len(projects)
|
||||
|
||||
for project in projects:
|
||||
for name, b in project.GetBranches().iteritems():
|
||||
for name, b in project.GetBranches().items():
|
||||
b.project = project
|
||||
if name not in all_branches:
|
||||
all_branches[name] = BranchInfo(name)
|
||||
all_branches[name].add(b)
|
||||
|
||||
names = all_branches.keys()
|
||||
names.sort()
|
||||
names = list(sorted(all_branches))
|
||||
|
||||
if not names:
|
||||
print(' (no branches)', file=sys.stderr)
|
||||
@ -140,10 +143,14 @@ is shown, then the branch appears in all projects.
|
||||
if in_cnt < project_cnt:
|
||||
fmt = out.write
|
||||
paths = []
|
||||
if in_cnt < project_cnt - in_cnt:
|
||||
non_cur_paths = []
|
||||
if i.IsSplitCurrent or (in_cnt < project_cnt - in_cnt):
|
||||
in_type = 'in'
|
||||
for b in i.projects:
|
||||
paths.append(b.project.relpath)
|
||||
if not i.IsSplitCurrent or b.current:
|
||||
paths.append(b.project.relpath)
|
||||
else:
|
||||
non_cur_paths.append(b.project.relpath)
|
||||
else:
|
||||
fmt = out.notinproject
|
||||
in_type = 'not in'
|
||||
@ -155,13 +162,19 @@ is shown, then the branch appears in all projects.
|
||||
paths.append(p.relpath)
|
||||
|
||||
s = ' %s %s' % (in_type, ', '.join(paths))
|
||||
if width + 7 + len(s) < 80:
|
||||
if not i.IsSplitCurrent and (width + 7 + len(s) < 80):
|
||||
fmt = out.current if i.IsCurrent else fmt
|
||||
fmt(s)
|
||||
else:
|
||||
fmt(' %s:' % in_type)
|
||||
fmt = out.current if i.IsCurrent else out.write
|
||||
for p in paths:
|
||||
out.nl()
|
||||
fmt(width*' ' + ' %s' % p)
|
||||
fmt = out.write
|
||||
for p in non_cur_paths:
|
||||
out.nl()
|
||||
fmt(width*' ' + ' %s' % p)
|
||||
else:
|
||||
out.write(' in all projects')
|
||||
out.nl()
|
||||
|
@ -81,7 +81,7 @@ change id will be added.
|
||||
sys.exit(1)
|
||||
|
||||
else:
|
||||
print('NOTE: When committing (please see above) and editing the commit'
|
||||
print('NOTE: When committing (please see above) and editing the commit '
|
||||
'message, please remove the old Change-Id-line and add:')
|
||||
print(self._GetReference(sha1), file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
|
195
subcmds/diffmanifests.py
Normal file
195
subcmds/diffmanifests.py
Normal file
@ -0,0 +1,195 @@
|
||||
#
|
||||
# Copyright (C) 2014 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
from manifest_xml import XmlManifest
|
||||
|
||||
class _Coloring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
|
||||
class Diffmanifests(PagedCommand):
|
||||
""" A command to see logs in projects represented by manifests
|
||||
|
||||
This is used to see deeper differences between manifests. Where a simple
|
||||
diff would only show a diff of sha1s for example, this command will display
|
||||
the logs of the project between both sha1s, allowing user to see diff at a
|
||||
deeper level.
|
||||
"""
|
||||
|
||||
common = True
|
||||
helpSummary = "Manifest diff utility"
|
||||
helpUsage = """%prog manifest1.xml [manifest2.xml] [options]"""
|
||||
|
||||
helpDescription = """
|
||||
The %prog command shows differences between project revisions of manifest1 and
|
||||
manifest2. if manifest2 is not specified, current manifest.xml will be used
|
||||
instead. Both absolute and relative paths may be used for manifests. Relative
|
||||
paths start from project's ".repo/manifests" folder.
|
||||
|
||||
The --raw option Displays the diff in a way that facilitates parsing, the
|
||||
project pattern will be <status> <path> <revision from> [<revision to>] and the
|
||||
commit pattern will be <status> <onelined log> with status values respectively :
|
||||
|
||||
A = Added project
|
||||
R = Removed project
|
||||
C = Changed project
|
||||
U = Project with unreachable revision(s) (revision(s) not found)
|
||||
|
||||
for project, and
|
||||
|
||||
A = Added commit
|
||||
R = Removed commit
|
||||
|
||||
for a commit.
|
||||
|
||||
Only changed projects may contain commits, and commit status always starts with
|
||||
a space, and are part of last printed project.
|
||||
Unreachable revisions may occur if project is not up to date or if repo has not
|
||||
been initialized with all the groups, in which case some projects won't be
|
||||
synced and their revisions won't be found.
|
||||
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('--raw',
|
||||
dest='raw', action='store_true',
|
||||
help='Display raw diff.')
|
||||
p.add_option('--no-color',
|
||||
dest='color', action='store_false', default=True,
|
||||
help='does not display the diff in color.')
|
||||
|
||||
def _printRawDiff(self, diff):
|
||||
for project in diff['added']:
|
||||
self.printText("A %s %s" % (project.relpath, project.revisionExpr))
|
||||
self.out.nl()
|
||||
|
||||
for project in diff['removed']:
|
||||
self.printText("R %s %s" % (project.relpath, project.revisionExpr))
|
||||
self.out.nl()
|
||||
|
||||
for project, otherProject in diff['changed']:
|
||||
self.printText("C %s %s %s" % (project.relpath, project.revisionExpr,
|
||||
otherProject.revisionExpr))
|
||||
self.out.nl()
|
||||
self._printLogs(project, otherProject, raw=True, color=False)
|
||||
|
||||
for project, otherProject in diff['unreachable']:
|
||||
self.printText("U %s %s %s" % (project.relpath, project.revisionExpr,
|
||||
otherProject.revisionExpr))
|
||||
self.out.nl()
|
||||
|
||||
def _printDiff(self, diff, color=True):
|
||||
if diff['added']:
|
||||
self.out.nl()
|
||||
self.printText('added projects : \n')
|
||||
self.out.nl()
|
||||
for project in diff['added']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printText(' at revision ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
if diff['removed']:
|
||||
self.out.nl()
|
||||
self.printText('removed projects : \n')
|
||||
self.out.nl()
|
||||
for project in diff['removed']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printText(' at revision ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
if diff['changed']:
|
||||
self.out.nl()
|
||||
self.printText('changed projects : \n')
|
||||
self.out.nl()
|
||||
for project, otherProject in diff['changed']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printText(' changed from ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.printText(' to ')
|
||||
self.printRevision(otherProject.revisionExpr)
|
||||
self.out.nl()
|
||||
self._printLogs(project, otherProject, raw=False, color=color)
|
||||
self.out.nl()
|
||||
|
||||
if diff['unreachable']:
|
||||
self.out.nl()
|
||||
self.printText('projects with unreachable revisions : \n')
|
||||
self.out.nl()
|
||||
for project, otherProject in diff['unreachable']:
|
||||
self.printProject('\t%s ' % (project.relpath))
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.printText(' or ')
|
||||
self.printRevision(otherProject.revisionExpr)
|
||||
self.printText(' not found')
|
||||
self.out.nl()
|
||||
|
||||
def _printLogs(self, project, otherProject, raw=False, color=True):
|
||||
logs = project.getAddedAndRemovedLogs(otherProject, oneline=True,
|
||||
color=color)
|
||||
if logs['removed']:
|
||||
removedLogs = logs['removed'].split('\n')
|
||||
for log in removedLogs:
|
||||
if log.strip():
|
||||
if raw:
|
||||
self.printText(' R ' + log)
|
||||
self.out.nl()
|
||||
else:
|
||||
self.printRemoved('\t\t[-] ')
|
||||
self.printText(log)
|
||||
self.out.nl()
|
||||
|
||||
if logs['added']:
|
||||
addedLogs = logs['added'].split('\n')
|
||||
for log in addedLogs:
|
||||
if log.strip():
|
||||
if raw:
|
||||
self.printText(' A ' + log)
|
||||
self.out.nl()
|
||||
else:
|
||||
self.printAdded('\t\t[+] ')
|
||||
self.printText(log)
|
||||
self.out.nl()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if not args or len(args) > 2:
|
||||
self.Usage()
|
||||
|
||||
self.out = _Coloring(self.manifest.globalConfig)
|
||||
self.printText = self.out.nofmt_printer('text')
|
||||
if opt.color:
|
||||
self.printProject = self.out.nofmt_printer('project', attr = 'bold')
|
||||
self.printAdded = self.out.nofmt_printer('green', fg = 'green', attr = 'bold')
|
||||
self.printRemoved = self.out.nofmt_printer('red', fg = 'red', attr = 'bold')
|
||||
self.printRevision = self.out.nofmt_printer('revision', fg = 'yellow')
|
||||
else:
|
||||
self.printProject = self.printAdded = self.printRemoved = self.printRevision = self.printText
|
||||
|
||||
manifest1 = XmlManifest(self.manifest.repodir)
|
||||
manifest1.Override(args[0])
|
||||
if len(args) == 1:
|
||||
manifest2 = self.manifest
|
||||
else:
|
||||
manifest2 = XmlManifest(self.manifest.repodir)
|
||||
manifest2.Override(args[1])
|
||||
|
||||
diff = manifest1.projectsDiff(manifest2)
|
||||
if opt.raw:
|
||||
self._printRawDiff(diff)
|
||||
else:
|
||||
self._printDiff(diff, color=opt.color)
|
@ -18,6 +18,7 @@ import re
|
||||
import sys
|
||||
|
||||
from command import Command
|
||||
from error import GitError
|
||||
|
||||
CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
|
||||
|
||||
@ -87,7 +88,13 @@ makes it available in your project's local working directory.
|
||||
for c in dl.commits:
|
||||
print(' %s' % (c), file=sys.stderr)
|
||||
if opt.cherrypick:
|
||||
project._CherryPick(dl.commit)
|
||||
try:
|
||||
project._CherryPick(dl.commit)
|
||||
except GitError:
|
||||
print('[%s] Could not complete the cherry-pick of %s' \
|
||||
% (project.name, dl.commit), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
elif opt.revert:
|
||||
project._Revert(dl.commit)
|
||||
elif opt.ffonly:
|
||||
|
@ -14,7 +14,9 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import errno
|
||||
import fcntl
|
||||
import multiprocessing
|
||||
import re
|
||||
import os
|
||||
import select
|
||||
@ -31,6 +33,7 @@ _CAN_COLOR = [
|
||||
'log',
|
||||
]
|
||||
|
||||
|
||||
class ForallColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'forall')
|
||||
@ -42,10 +45,14 @@ class Forall(Command, MirrorSafeCommand):
|
||||
helpSummary = "Run a shell command in each project"
|
||||
helpUsage = """
|
||||
%prog [<project>...] -c <command> [<arg>...]
|
||||
%prog -r str1 [str2] ... -c <command> [<arg>...]"
|
||||
"""
|
||||
helpDescription = """
|
||||
Executes the same shell command in each project.
|
||||
|
||||
The -r option allows running the command only on projects matching
|
||||
regex or wildcard expression.
|
||||
|
||||
Output Formatting
|
||||
-----------------
|
||||
|
||||
@ -83,6 +90,12 @@ revision to a locally executed git command, use REPO_LREV.
|
||||
REPO_RREV is the name of the revision from the manifest, exactly
|
||||
as written in the manifest.
|
||||
|
||||
REPO_COUNT is the total number of projects being iterated.
|
||||
|
||||
REPO_I is the current (1-based) iteration count. Can be used in
|
||||
conjunction with REPO_COUNT to add a simple progress indicator to your
|
||||
command.
|
||||
|
||||
REPO__* are any extra environment variables, specified by the
|
||||
"annotation" element under any project element. This can be useful
|
||||
for differentiating trees based on user-specific criteria, or simply
|
||||
@ -103,6 +116,9 @@ without iterating through the remaining projects.
|
||||
setattr(parser.values, option.dest, list(parser.rargs))
|
||||
while parser.rargs:
|
||||
del parser.rargs[0]
|
||||
p.add_option('-r', '--regex',
|
||||
dest='regex', action='store_true',
|
||||
help="Execute the command only on projects matching regex or wildcard expression")
|
||||
p.add_option('-c', '--command',
|
||||
help='Command (and arguments) to execute',
|
||||
dest='command',
|
||||
@ -119,9 +135,31 @@ without iterating through the remaining projects.
|
||||
g.add_option('-v', '--verbose',
|
||||
dest='verbose', action='store_true',
|
||||
help='Show command error messages')
|
||||
g.add_option('-j', '--jobs',
|
||||
dest='jobs', action='store', type='int', default=1,
|
||||
help='number of commands to execute simultaneously')
|
||||
|
||||
def WantPager(self, opt):
|
||||
return opt.project_header
|
||||
return opt.project_header and opt.jobs == 1
|
||||
|
||||
def _SerializeProject(self, project):
|
||||
""" Serialize a project._GitGetByExec instance.
|
||||
|
||||
project._GitGetByExec is not pickle-able. Instead of trying to pass it
|
||||
around between processes, make a dict ourselves containing only the
|
||||
attributes that we need.
|
||||
|
||||
"""
|
||||
return {
|
||||
'name': project.name,
|
||||
'relpath': project.relpath,
|
||||
'remote_name': project.remote.name,
|
||||
'lrev': project.GetRevisionId(),
|
||||
'rrev': project.revisionExpr,
|
||||
'annotations': dict((a.name, a.value) for a in project.annotations),
|
||||
'gitdir': project.gitdir,
|
||||
'worktree': project.worktree,
|
||||
}
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if not opt.command:
|
||||
@ -160,113 +198,165 @@ without iterating through the remaining projects.
|
||||
# pylint: enable=W0631
|
||||
|
||||
mirror = self.manifest.IsMirror
|
||||
out = ForallColoring(self.manifest.manifestProject.config)
|
||||
out.redirect(sys.stdout)
|
||||
|
||||
rc = 0
|
||||
first = True
|
||||
|
||||
for project in self.GetProjects(args):
|
||||
env = os.environ.copy()
|
||||
def setenv(name, val):
|
||||
if val is None:
|
||||
val = ''
|
||||
env[name] = val.encode()
|
||||
if not opt.regex:
|
||||
projects = self.GetProjects(args)
|
||||
else:
|
||||
projects = self.FindProjects(args)
|
||||
|
||||
setenv('REPO_PROJECT', project.name)
|
||||
setenv('REPO_PATH', project.relpath)
|
||||
setenv('REPO_REMOTE', project.remote.name)
|
||||
setenv('REPO_LREV', project.GetRevisionId())
|
||||
setenv('REPO_RREV', project.revisionExpr)
|
||||
for a in project.annotations:
|
||||
setenv("REPO__%s" % (a.name), a.value)
|
||||
os.environ['REPO_COUNT'] = str(len(projects))
|
||||
|
||||
if mirror:
|
||||
setenv('GIT_DIR', project.gitdir)
|
||||
cwd = project.gitdir
|
||||
else:
|
||||
cwd = project.worktree
|
||||
|
||||
if not os.path.exists(cwd):
|
||||
if (opt.project_header and opt.verbose) \
|
||||
or not opt.project_header:
|
||||
print('skipping %s/' % project.relpath, file=sys.stderr)
|
||||
continue
|
||||
|
||||
if opt.project_header:
|
||||
stdin = subprocess.PIPE
|
||||
stdout = subprocess.PIPE
|
||||
stderr = subprocess.PIPE
|
||||
else:
|
||||
stdin = None
|
||||
stdout = None
|
||||
stderr = None
|
||||
|
||||
p = subprocess.Popen(cmd,
|
||||
cwd = cwd,
|
||||
shell = shell,
|
||||
env = env,
|
||||
stdin = stdin,
|
||||
stdout = stdout,
|
||||
stderr = stderr)
|
||||
|
||||
if opt.project_header:
|
||||
class sfd(object):
|
||||
def __init__(self, fd, dest):
|
||||
self.fd = fd
|
||||
self.dest = dest
|
||||
def fileno(self):
|
||||
return self.fd.fileno()
|
||||
|
||||
empty = True
|
||||
errbuf = ''
|
||||
|
||||
p.stdin.close()
|
||||
s_in = [sfd(p.stdout, sys.stdout),
|
||||
sfd(p.stderr, sys.stderr)]
|
||||
|
||||
for s in s_in:
|
||||
flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
|
||||
fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
|
||||
|
||||
while s_in:
|
||||
in_ready, _out_ready, _err_ready = select.select(s_in, [], [])
|
||||
for s in in_ready:
|
||||
buf = s.fd.read(4096)
|
||||
if not buf:
|
||||
s.fd.close()
|
||||
s_in.remove(s)
|
||||
continue
|
||||
|
||||
if not opt.verbose:
|
||||
if s.fd != p.stdout:
|
||||
errbuf += buf
|
||||
continue
|
||||
|
||||
if empty:
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
out.nl()
|
||||
out.project('project %s/', project.relpath)
|
||||
out.nl()
|
||||
out.flush()
|
||||
if errbuf:
|
||||
sys.stderr.write(errbuf)
|
||||
sys.stderr.flush()
|
||||
errbuf = ''
|
||||
empty = False
|
||||
|
||||
s.dest.write(buf)
|
||||
s.dest.flush()
|
||||
|
||||
r = p.wait()
|
||||
if r != 0:
|
||||
if r != rc:
|
||||
rc = r
|
||||
if opt.abort_on_errors:
|
||||
print("error: %s: Aborting due to previous error" % project.relpath,
|
||||
file=sys.stderr)
|
||||
sys.exit(r)
|
||||
pool = multiprocessing.Pool(opt.jobs)
|
||||
try:
|
||||
config = self.manifest.manifestProject.config
|
||||
results_it = pool.imap(
|
||||
DoWorkWrapper,
|
||||
([mirror, opt, cmd, shell, cnt, config, self._SerializeProject(p)]
|
||||
for cnt, p in enumerate(projects))
|
||||
)
|
||||
pool.close()
|
||||
for r in results_it:
|
||||
rc = rc or r
|
||||
if r != 0 and opt.abort_on_errors:
|
||||
raise Exception('Aborting due to previous error')
|
||||
except (KeyboardInterrupt, WorkerKeyboardInterrupt):
|
||||
# Catch KeyboardInterrupt raised inside and outside of workers
|
||||
print('Interrupted - terminating the pool')
|
||||
pool.terminate()
|
||||
rc = rc or errno.EINTR
|
||||
except Exception as e:
|
||||
# Catch any other exceptions raised
|
||||
print('Got an error, terminating the pool: %r' % e,
|
||||
file=sys.stderr)
|
||||
pool.terminate()
|
||||
rc = rc or getattr(e, 'errno', 1)
|
||||
finally:
|
||||
pool.join()
|
||||
if rc != 0:
|
||||
sys.exit(rc)
|
||||
|
||||
|
||||
class WorkerKeyboardInterrupt(Exception):
|
||||
""" Keyboard interrupt exception for worker processes. """
|
||||
pass
|
||||
|
||||
|
||||
def DoWorkWrapper(args):
|
||||
""" A wrapper around the DoWork() method.
|
||||
|
||||
Catch the KeyboardInterrupt exceptions here and re-raise them as a different,
|
||||
``Exception``-based exception to stop it flooding the console with stacktraces
|
||||
and making the parent hang indefinitely.
|
||||
|
||||
"""
|
||||
project = args.pop()
|
||||
try:
|
||||
return DoWork(project, *args)
|
||||
except KeyboardInterrupt:
|
||||
print('%s: Worker interrupted' % project['name'])
|
||||
raise WorkerKeyboardInterrupt()
|
||||
|
||||
|
||||
def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||
env = os.environ.copy()
|
||||
def setenv(name, val):
|
||||
if val is None:
|
||||
val = ''
|
||||
env[name] = val.encode()
|
||||
|
||||
setenv('REPO_PROJECT', project['name'])
|
||||
setenv('REPO_PATH', project['relpath'])
|
||||
setenv('REPO_REMOTE', project['remote_name'])
|
||||
setenv('REPO_LREV', project['lrev'])
|
||||
setenv('REPO_RREV', project['rrev'])
|
||||
setenv('REPO_I', str(cnt + 1))
|
||||
for name in project['annotations']:
|
||||
setenv("REPO__%s" % (name), project['annotations'][name])
|
||||
|
||||
if mirror:
|
||||
setenv('GIT_DIR', project['gitdir'])
|
||||
cwd = project['gitdir']
|
||||
else:
|
||||
cwd = project['worktree']
|
||||
|
||||
if not os.path.exists(cwd):
|
||||
if (opt.project_header and opt.verbose) \
|
||||
or not opt.project_header:
|
||||
print('skipping %s/' % project['relpath'], file=sys.stderr)
|
||||
return
|
||||
|
||||
if opt.project_header:
|
||||
stdin = subprocess.PIPE
|
||||
stdout = subprocess.PIPE
|
||||
stderr = subprocess.PIPE
|
||||
else:
|
||||
stdin = None
|
||||
stdout = None
|
||||
stderr = None
|
||||
|
||||
p = subprocess.Popen(cmd,
|
||||
cwd=cwd,
|
||||
shell=shell,
|
||||
env=env,
|
||||
stdin=stdin,
|
||||
stdout=stdout,
|
||||
stderr=stderr)
|
||||
|
||||
if opt.project_header:
|
||||
out = ForallColoring(config)
|
||||
out.redirect(sys.stdout)
|
||||
class sfd(object):
|
||||
def __init__(self, fd, dest):
|
||||
self.fd = fd
|
||||
self.dest = dest
|
||||
def fileno(self):
|
||||
return self.fd.fileno()
|
||||
|
||||
empty = True
|
||||
errbuf = ''
|
||||
|
||||
p.stdin.close()
|
||||
s_in = [sfd(p.stdout, sys.stdout),
|
||||
sfd(p.stderr, sys.stderr)]
|
||||
|
||||
for s in s_in:
|
||||
flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
|
||||
fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
|
||||
|
||||
while s_in:
|
||||
in_ready, _out_ready, _err_ready = select.select(s_in, [], [])
|
||||
for s in in_ready:
|
||||
buf = s.fd.read(4096)
|
||||
if not buf:
|
||||
s.fd.close()
|
||||
s_in.remove(s)
|
||||
continue
|
||||
|
||||
if not opt.verbose:
|
||||
if s.fd != p.stdout:
|
||||
errbuf += buf
|
||||
continue
|
||||
|
||||
if empty and out:
|
||||
if not cnt == 0:
|
||||
out.nl()
|
||||
|
||||
if mirror:
|
||||
project_header_path = project['name']
|
||||
else:
|
||||
project_header_path = project['relpath']
|
||||
out.project('project %s/', project_header_path)
|
||||
out.nl()
|
||||
out.flush()
|
||||
if errbuf:
|
||||
sys.stderr.write(errbuf)
|
||||
sys.stderr.flush()
|
||||
errbuf = ''
|
||||
empty = False
|
||||
|
||||
s.dest.write(buf)
|
||||
s.dest.flush()
|
||||
|
||||
r = p.wait()
|
||||
return r
|
||||
|
@ -34,8 +34,7 @@ Displays detailed usage information about a command.
|
||||
def _PrintAllCommands(self):
|
||||
print('usage: repo COMMAND [ARGS]')
|
||||
print('The complete list of recognized repo commands are:')
|
||||
commandNames = self.commands.keys()
|
||||
commandNames.sort()
|
||||
commandNames = list(sorted(self.commands))
|
||||
|
||||
maxlen = 0
|
||||
for name in commandNames:
|
||||
@ -49,16 +48,15 @@ Displays detailed usage information about a command.
|
||||
except AttributeError:
|
||||
summary = ''
|
||||
print(fmt % (name, summary))
|
||||
print("See 'repo help <command>' for more information on a"
|
||||
print("See 'repo help <command>' for more information on a "
|
||||
'specific command.')
|
||||
|
||||
def _PrintCommonCommands(self):
|
||||
print('usage: repo COMMAND [ARGS]')
|
||||
print('The most commonly used repo commands are:')
|
||||
commandNames = [name
|
||||
for name in self.commands.keys()
|
||||
if self.commands[name].common]
|
||||
commandNames.sort()
|
||||
commandNames = list(sorted([name
|
||||
for name, command in self.commands.items()
|
||||
if command.common]))
|
||||
|
||||
maxlen = 0
|
||||
for name in commandNames:
|
||||
|
@ -27,7 +27,7 @@ class Info(PagedCommand):
|
||||
helpSummary = "Get info on the manifest branch, current branch or unmerged branches"
|
||||
helpUsage = "%prog [-dl] [-o [-b]] [<project>...]"
|
||||
|
||||
def _Options(self, p, show_smart=True):
|
||||
def _Options(self, p):
|
||||
p.add_option('-d', '--diff',
|
||||
dest='all', action='store_true',
|
||||
help="show full info and commit diff including remote branches")
|
||||
@ -53,7 +53,10 @@ class Info(PagedCommand):
|
||||
|
||||
self.opt = opt
|
||||
|
||||
mergeBranch = self.manifest.manifestProject.config.GetBranch("default").merge
|
||||
manifestConfig = self.manifest.manifestProject.config
|
||||
mergeBranch = manifestConfig.GetBranch("default").merge
|
||||
manifestGroups = (manifestConfig.GetString('manifest.groups')
|
||||
or 'all,-notdefault')
|
||||
|
||||
self.heading("Manifest branch: ")
|
||||
self.headtext(self.manifest.default.revisionExpr)
|
||||
@ -61,6 +64,9 @@ class Info(PagedCommand):
|
||||
self.heading("Manifest merge branch: ")
|
||||
self.headtext(mergeBranch)
|
||||
self.out.nl()
|
||||
self.heading("Manifest groups: ")
|
||||
self.headtext(manifestGroups)
|
||||
self.out.nl()
|
||||
|
||||
self.printSeparator()
|
||||
|
||||
@ -157,7 +163,7 @@ class Info(PagedCommand):
|
||||
all_branches = []
|
||||
for project in self.GetProjects(args):
|
||||
br = [project.GetUploadableBranch(x)
|
||||
for x in project.GetBranches().keys()]
|
||||
for x in project.GetBranches()]
|
||||
br = [x for x in br if x]
|
||||
if self.opt.current_branch:
|
||||
br = [x for x in br if x.name == project.CurrentBranch]
|
||||
|
@ -20,6 +20,15 @@ import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from pyversion import is_python3
|
||||
if is_python3():
|
||||
import urllib.parse
|
||||
else:
|
||||
import imp
|
||||
import urlparse
|
||||
urllib = imp.new_module('urllib')
|
||||
urllib.parse = urlparse.urlparse
|
||||
|
||||
from color import Coloring
|
||||
from command import InteractiveCommand, MirrorSafeCommand
|
||||
from error import ManifestParseError
|
||||
@ -90,9 +99,14 @@ to update the working directory files.
|
||||
g.add_option('--depth', type='int', default=None,
|
||||
dest='depth',
|
||||
help='create a shallow clone with given depth; see git clone')
|
||||
g.add_option('--archive',
|
||||
dest='archive', action='store_true',
|
||||
help='checkout an archive instead of a git repository for '
|
||||
'each project. See git archive.')
|
||||
g.add_option('-g', '--groups',
|
||||
dest='groups', default='all,-notdefault',
|
||||
help='restrict manifest projects to ones with a specified group',
|
||||
dest='groups', default='default',
|
||||
help='restrict manifest projects to ones with specified '
|
||||
'group(s) [default|all|G1,G2,G3|G4,-G5,-G6]',
|
||||
metavar='GROUP')
|
||||
g.add_option('-p', '--platform',
|
||||
dest='platform', default='auto',
|
||||
@ -134,7 +148,19 @@ to update the working directory files.
|
||||
if not opt.quiet:
|
||||
print('Get %s' % GitConfig.ForUser().UrlInsteadOf(opt.manifest_url),
|
||||
file=sys.stderr)
|
||||
m._InitGitDir()
|
||||
|
||||
# The manifest project object doesn't keep track of the path on the
|
||||
# server where this git is located, so let's save that here.
|
||||
mirrored_manifest_git = None
|
||||
if opt.reference:
|
||||
manifest_git_path = urllib.parse(opt.manifest_url).path[1:]
|
||||
mirrored_manifest_git = os.path.join(opt.reference, manifest_git_path)
|
||||
if not mirrored_manifest_git.endswith(".git"):
|
||||
mirrored_manifest_git += ".git"
|
||||
if not os.path.exists(mirrored_manifest_git):
|
||||
mirrored_manifest_git = os.path.join(opt.reference + '/.repo/manifests.git')
|
||||
|
||||
m._InitGitDir(mirror_git=mirrored_manifest_git)
|
||||
|
||||
if opt.manifest_branch:
|
||||
m.revisionExpr = opt.manifest_branch
|
||||
@ -169,13 +195,23 @@ to update the working directory files.
|
||||
|
||||
groups = [x for x in groups if x]
|
||||
groupstr = ','.join(groups)
|
||||
if opt.platform == 'auto' and groupstr == 'all,-notdefault,platform-' + platform.system().lower():
|
||||
if opt.platform == 'auto' and groupstr == 'default,platform-' + platform.system().lower():
|
||||
groupstr = None
|
||||
m.config.SetString('manifest.groups', groupstr)
|
||||
|
||||
if opt.reference:
|
||||
m.config.SetString('repo.reference', opt.reference)
|
||||
|
||||
if opt.archive:
|
||||
if is_new:
|
||||
m.config.SetString('repo.archive', 'true')
|
||||
else:
|
||||
print('fatal: --archive is only supported when initializing a new '
|
||||
'workspace.', file=sys.stderr)
|
||||
print('Either delete the .repo folder in this workspace, or initialize '
|
||||
'in another location.', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if opt.mirror:
|
||||
if is_new:
|
||||
m.config.SetString('repo.mirror', 'true')
|
||||
@ -197,7 +233,7 @@ to update the working directory files.
|
||||
sys.exit(1)
|
||||
|
||||
if opt.manifest_branch:
|
||||
m.MetaBranchSwitch(opt.manifest_branch)
|
||||
m.MetaBranchSwitch()
|
||||
|
||||
syncbuf = SyncBuffer(m.config)
|
||||
m.Sync_LocalHalf(syncbuf)
|
||||
@ -344,6 +380,13 @@ to update the working directory files.
|
||||
if opt.reference:
|
||||
opt.reference = os.path.expanduser(opt.reference)
|
||||
|
||||
# Check this here, else manifest will be tagged "not new" and init won't be
|
||||
# possible anymore without removing the .repo/manifests directory.
|
||||
if opt.archive and opt.mirror:
|
||||
print('fatal: --mirror and --archive cannot be used together.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
self._SyncManifest(opt)
|
||||
self._LinkManifest(opt.manifest_name)
|
||||
|
||||
|
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import re
|
||||
import sys
|
||||
|
||||
from command import Command, MirrorSafeCommand
|
||||
|
||||
@ -31,13 +31,19 @@ List all projects; pass '.' to list the project for the cwd.
|
||||
This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
"""
|
||||
|
||||
def _Options(self, p, show_smart=True):
|
||||
def _Options(self, p):
|
||||
p.add_option('-r', '--regex',
|
||||
dest='regex', action='store_true',
|
||||
help="Filter the project list based on regex or wildcard matching of strings")
|
||||
p.add_option('-f', '--fullpath',
|
||||
dest='fullpath', action='store_true',
|
||||
help="Display the full work tree path instead of the relative path")
|
||||
p.add_option('-n', '--name-only',
|
||||
dest='name_only', action='store_true',
|
||||
help="Display only the name of the repository")
|
||||
p.add_option('-p', '--path-only',
|
||||
dest='path_only', action='store_true',
|
||||
help="Display only the path of the repository")
|
||||
|
||||
def Execute(self, opt, args):
|
||||
"""List all projects and the associated directories.
|
||||
@ -50,6 +56,11 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
opt: The options.
|
||||
args: Positional args. Can be a list of projects to list, or empty.
|
||||
"""
|
||||
|
||||
if opt.fullpath and opt.name_only:
|
||||
print('error: cannot combine -f and -n', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not opt.regex:
|
||||
projects = self.GetProjects(args)
|
||||
else:
|
||||
@ -62,18 +73,12 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
|
||||
lines = []
|
||||
for project in projects:
|
||||
lines.append("%s : %s" % (_getpath(project), project.name))
|
||||
if opt.name_only and not opt.path_only:
|
||||
lines.append("%s" % ( project.name))
|
||||
elif opt.path_only and not opt.name_only:
|
||||
lines.append("%s" % (_getpath(project)))
|
||||
else:
|
||||
lines.append("%s : %s" % (_getpath(project), project.name))
|
||||
|
||||
lines.sort()
|
||||
print('\n'.join(lines))
|
||||
|
||||
def FindProjects(self, args):
|
||||
result = []
|
||||
for project in self.GetProjects(''):
|
||||
for arg in args:
|
||||
pattern = re.compile(r'%s' % arg, re.IGNORECASE)
|
||||
if pattern.search(project.name) or pattern.search(project.relpath):
|
||||
result.append(project)
|
||||
break
|
||||
result.sort(key=lambda project: project.relpath)
|
||||
return result
|
||||
|
@ -42,7 +42,7 @@ are displayed.
|
||||
all_branches = []
|
||||
for project in self.GetProjects(args):
|
||||
br = [project.GetUploadableBranch(x)
|
||||
for x in project.GetBranches().keys()]
|
||||
for x in project.GetBranches()]
|
||||
br = [x for x in br if x]
|
||||
if opt.current_branch:
|
||||
br = [x for x in br if x.name == project.CurrentBranch]
|
||||
|
@ -62,13 +62,16 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
if opt.interactive and not one_project:
|
||||
print('error: interactive rebase not supported with multiple projects',
|
||||
file=sys.stderr)
|
||||
if len(args) == 1:
|
||||
print('note: project %s is mapped to more than one path' % (args[0],),
|
||||
file=sys.stderr)
|
||||
return -1
|
||||
|
||||
for project in all_projects:
|
||||
cb = project.CurrentBranch
|
||||
if not cb:
|
||||
if one_project:
|
||||
print("error: project %s has a detatched HEAD" % project.relpath,
|
||||
print("error: project %s has a detached HEAD" % project.relpath,
|
||||
file=sys.stderr)
|
||||
return -1
|
||||
# ignore branches with detatched HEADs
|
||||
|
@ -49,7 +49,7 @@ The '%prog' command stages files to prepare the next commit.
|
||||
self.Usage()
|
||||
|
||||
def _Interactive(self, opt, args):
|
||||
all_projects = filter(lambda x: x.IsDirty(), self.GetProjects(args))
|
||||
all_projects = [p for p in self.GetProjects(args) if p.IsDirty()]
|
||||
if not all_projects:
|
||||
print('no projects have uncommitted modifications', file=sys.stderr)
|
||||
return
|
||||
@ -98,9 +98,9 @@ The '%prog' command stages files to prepare the next commit.
|
||||
_AddI(all_projects[a_index - 1])
|
||||
continue
|
||||
|
||||
p = filter(lambda x: x.name == a or x.relpath == a, all_projects)
|
||||
if len(p) == 1:
|
||||
_AddI(p[0])
|
||||
projects = [p for p in all_projects if a in [p.name, p.relpath]]
|
||||
if len(projects) == 1:
|
||||
_AddI(projects[0])
|
||||
continue
|
||||
print('Bye.')
|
||||
|
||||
|
@ -59,9 +59,13 @@ revision specified in the manifest.
|
||||
for project in all_projects:
|
||||
pm.update()
|
||||
# If the current revision is a specific SHA1 then we can't push back
|
||||
# to it so substitute the manifest default revision instead.
|
||||
# to it; so substitute with dest_branch if defined, or with manifest
|
||||
# default revision instead.
|
||||
if IsId(project.revisionExpr):
|
||||
project.revisionExpr = self.manifest.default.revisionExpr
|
||||
if project.dest_branch:
|
||||
project.revisionExpr = project.dest_branch
|
||||
else:
|
||||
project.revisionExpr = self.manifest.default.revisionExpr
|
||||
if not project.StartBranch(nb):
|
||||
err.append(project)
|
||||
pm.end()
|
||||
|
@ -21,10 +21,9 @@ except ImportError:
|
||||
import dummy_threading as _threading
|
||||
|
||||
import glob
|
||||
|
||||
import itertools
|
||||
import os
|
||||
import sys
|
||||
import StringIO
|
||||
|
||||
from color import Coloring
|
||||
|
||||
@ -91,7 +90,7 @@ the following meanings:
|
||||
dest='orphans', action='store_true',
|
||||
help="include objects in working directory outside of repo projects")
|
||||
|
||||
def _StatusHelper(self, project, clean_counter, sem, output):
|
||||
def _StatusHelper(self, project, clean_counter, sem):
|
||||
"""Obtains the status for a specific project.
|
||||
|
||||
Obtains the status for a project, redirecting the output to
|
||||
@ -105,9 +104,9 @@ the following meanings:
|
||||
output: Where to output the status.
|
||||
"""
|
||||
try:
|
||||
state = project.PrintWorkTreeStatus(output)
|
||||
state = project.PrintWorkTreeStatus()
|
||||
if state == 'CLEAN':
|
||||
clean_counter.next()
|
||||
next(clean_counter)
|
||||
finally:
|
||||
sem.release()
|
||||
|
||||
@ -116,16 +115,16 @@ the following meanings:
|
||||
status_header = ' --\t'
|
||||
for item in dirs:
|
||||
if not os.path.isdir(item):
|
||||
outstring.write(''.join([status_header, item]))
|
||||
outstring.append(''.join([status_header, item]))
|
||||
continue
|
||||
if item in proj_dirs:
|
||||
continue
|
||||
if item in proj_dirs_parents:
|
||||
self._FindOrphans(glob.glob('%s/.*' % item) + \
|
||||
glob.glob('%s/*' % item), \
|
||||
self._FindOrphans(glob.glob('%s/.*' % item) +
|
||||
glob.glob('%s/*' % item),
|
||||
proj_dirs, proj_dirs_parents, outstring)
|
||||
continue
|
||||
outstring.write(''.join([status_header, item, '/']))
|
||||
outstring.append(''.join([status_header, item, '/']))
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all_projects = self.GetProjects(args)
|
||||
@ -135,30 +134,21 @@ the following meanings:
|
||||
for project in all_projects:
|
||||
state = project.PrintWorkTreeStatus()
|
||||
if state == 'CLEAN':
|
||||
counter.next()
|
||||
next(counter)
|
||||
else:
|
||||
sem = _threading.Semaphore(opt.jobs)
|
||||
threads_and_output = []
|
||||
threads = []
|
||||
for project in all_projects:
|
||||
sem.acquire()
|
||||
|
||||
class BufList(StringIO.StringIO):
|
||||
def dump(self, ostream):
|
||||
for entry in self.buflist:
|
||||
ostream.write(entry)
|
||||
|
||||
output = BufList()
|
||||
|
||||
t = _threading.Thread(target=self._StatusHelper,
|
||||
args=(project, counter, sem, output))
|
||||
threads_and_output.append((t, output))
|
||||
args=(project, counter, sem))
|
||||
threads.append(t)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
for (t, output) in threads_and_output:
|
||||
for t in threads:
|
||||
t.join()
|
||||
output.dump(sys.stdout)
|
||||
output.close()
|
||||
if len(all_projects) == counter.next():
|
||||
if len(all_projects) == next(counter):
|
||||
print('nothing to commit (working directory clean)')
|
||||
|
||||
if opt.orphans:
|
||||
@ -182,23 +172,21 @@ the following meanings:
|
||||
try:
|
||||
os.chdir(self.manifest.topdir)
|
||||
|
||||
outstring = StringIO.StringIO()
|
||||
self._FindOrphans(glob.glob('.*') + \
|
||||
glob.glob('*'), \
|
||||
outstring = []
|
||||
self._FindOrphans(glob.glob('.*') +
|
||||
glob.glob('*'),
|
||||
proj_dirs, proj_dirs_parents, outstring)
|
||||
|
||||
if outstring.buflist:
|
||||
if outstring:
|
||||
output = StatusColoring(self.manifest.globalConfig)
|
||||
output.project('Objects not within a project (orphans)')
|
||||
output.nl()
|
||||
for entry in outstring.buflist:
|
||||
for entry in outstring:
|
||||
output.untracked(entry)
|
||||
output.nl()
|
||||
else:
|
||||
print('No orphan files or directories')
|
||||
|
||||
outstring.close()
|
||||
|
||||
finally:
|
||||
# Restore CWD.
|
||||
os.chdir(orig_path)
|
||||
|
234
subcmds/sync.py
234
subcmds/sync.py
@ -14,18 +14,29 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import json
|
||||
import netrc
|
||||
from optparse import SUPPRESS_HELP
|
||||
import os
|
||||
import pickle
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import urlparse
|
||||
import xmlrpclib
|
||||
|
||||
from pyversion import is_python3
|
||||
if is_python3():
|
||||
import urllib.parse
|
||||
import xmlrpc.client
|
||||
else:
|
||||
import imp
|
||||
import urlparse
|
||||
import xmlrpclib
|
||||
urllib = imp.new_module('urllib')
|
||||
urllib.parse = urlparse
|
||||
xmlrpc = imp.new_module('xmlrpc')
|
||||
xmlrpc.client = xmlrpclib
|
||||
|
||||
try:
|
||||
import threading as _threading
|
||||
@ -47,13 +58,13 @@ except ImportError:
|
||||
|
||||
from git_command import GIT, git_require
|
||||
from git_refs import R_HEADS, HEAD
|
||||
from main import WrapperModule
|
||||
from project import Project
|
||||
from project import RemoteSpec
|
||||
from command import Command, MirrorSafeCommand
|
||||
from error import RepoChangedException, GitError, ManifestParseError
|
||||
from project import SyncBuffer
|
||||
from progress import Progress
|
||||
from wrapper import Wrapper
|
||||
|
||||
_ONE_DAY_S = 24 * 60 * 60
|
||||
|
||||
@ -117,6 +128,9 @@ HTTP client or proxy configuration, but the Git binary works.
|
||||
The --fetch-submodules option enables fetching Git submodules
|
||||
of a project from server.
|
||||
|
||||
The -c/--current-branch option can be used to only fetch objects that
|
||||
are on the branch specified by a project's revision.
|
||||
|
||||
SSH Connections
|
||||
---------------
|
||||
|
||||
@ -208,9 +222,25 @@ later is required to fix a server side protocol bug.
|
||||
dest='repo_upgraded', action='store_true',
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
def _FetchHelper(self, opt, project, lock, fetched, pm, sem, err_event):
|
||||
def _FetchProjectList(self, opt, projects, *args, **kwargs):
|
||||
"""Main function of the fetch threads when jobs are > 1.
|
||||
|
||||
Delegates most of the work to _FetchHelper.
|
||||
|
||||
Args:
|
||||
opt: Program options returned from optparse. See _Options().
|
||||
projects: Projects to fetch.
|
||||
*args, **kwargs: Remaining arguments to pass to _FetchHelper. See the
|
||||
_FetchHelper docstring for details.
|
||||
"""
|
||||
for project in projects:
|
||||
success = self._FetchHelper(opt, project, *args, **kwargs)
|
||||
if not success and not opt.force_broken:
|
||||
break
|
||||
|
||||
def _FetchHelper(self, opt, project, lock, fetched, pm, sem, err_event):
|
||||
"""Fetch git objects for a single project.
|
||||
|
||||
Args:
|
||||
opt: Program options returned from optparse. See _Options().
|
||||
project: Project object for the project to fetch.
|
||||
@ -224,10 +254,16 @@ later is required to fix a server side protocol bug.
|
||||
can be started up.
|
||||
err_event: We'll set this event in the case of an error (after printing
|
||||
out info about the error).
|
||||
|
||||
Returns:
|
||||
Whether the fetch was successful.
|
||||
"""
|
||||
# We'll set to true once we've locked the lock.
|
||||
did_lock = False
|
||||
|
||||
if not opt.quiet:
|
||||
print('Fetching project %s' % project.name)
|
||||
|
||||
# Encapsulate everything in a try/except/finally so that:
|
||||
# - We always set err_event in the case of an exception.
|
||||
# - We always make sure we call sem.release().
|
||||
@ -239,7 +275,7 @@ later is required to fix a server side protocol bug.
|
||||
quiet=opt.quiet,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
clone_bundle=not opt.no_clone_bundle,
|
||||
no_tags=opt.no_tags)
|
||||
no_tags=opt.no_tags, archive=self.manifest.IsArchive)
|
||||
self._fetch_times.Set(project, time.time() - start)
|
||||
|
||||
# Lock around all the rest of the code, since printing, updating a set
|
||||
@ -267,65 +303,65 @@ later is required to fix a server side protocol bug.
|
||||
lock.release()
|
||||
sem.release()
|
||||
|
||||
return success
|
||||
|
||||
def _Fetch(self, projects, opt):
|
||||
fetched = set()
|
||||
lock = _threading.Lock()
|
||||
pm = Progress('Fetching projects', len(projects))
|
||||
|
||||
if self.jobs == 1:
|
||||
for project in projects:
|
||||
pm.update()
|
||||
if project.Sync_NetworkHalf(
|
||||
quiet=opt.quiet,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
clone_bundle=not opt.no_clone_bundle,
|
||||
no_tags=opt.no_tags):
|
||||
fetched.add(project.gitdir)
|
||||
else:
|
||||
print('error: Cannot fetch %s' % project.name, file=sys.stderr)
|
||||
if opt.force_broken:
|
||||
print('warn: --force-broken, continuing to sync', file=sys.stderr)
|
||||
else:
|
||||
sys.exit(1)
|
||||
else:
|
||||
threads = set()
|
||||
lock = _threading.Lock()
|
||||
sem = _threading.Semaphore(self.jobs)
|
||||
err_event = _threading.Event()
|
||||
for project in projects:
|
||||
# Check for any errors before starting any new threads.
|
||||
# ...we'll let existing threads finish, though.
|
||||
if err_event.isSet():
|
||||
break
|
||||
objdir_project_map = dict()
|
||||
for project in projects:
|
||||
objdir_project_map.setdefault(project.objdir, []).append(project)
|
||||
|
||||
sem.acquire()
|
||||
t = _threading.Thread(target = self._FetchHelper,
|
||||
args = (opt,
|
||||
project,
|
||||
lock,
|
||||
fetched,
|
||||
pm,
|
||||
sem,
|
||||
err_event))
|
||||
threads = set()
|
||||
sem = _threading.Semaphore(self.jobs)
|
||||
err_event = _threading.Event()
|
||||
for project_list in objdir_project_map.values():
|
||||
# Check for any errors before running any more tasks.
|
||||
# ...we'll let existing threads finish, though.
|
||||
if err_event.isSet() and not opt.force_broken:
|
||||
break
|
||||
|
||||
sem.acquire()
|
||||
kwargs = dict(opt=opt,
|
||||
projects=project_list,
|
||||
lock=lock,
|
||||
fetched=fetched,
|
||||
pm=pm,
|
||||
sem=sem,
|
||||
err_event=err_event)
|
||||
if self.jobs > 1:
|
||||
t = _threading.Thread(target = self._FetchProjectList,
|
||||
kwargs = kwargs)
|
||||
# Ensure that Ctrl-C will not freeze the repo process.
|
||||
t.daemon = True
|
||||
threads.add(t)
|
||||
t.start()
|
||||
else:
|
||||
self._FetchProjectList(**kwargs)
|
||||
|
||||
for t in threads:
|
||||
t.join()
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet():
|
||||
print('\nerror: Exited sync due to fetch errors', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet():
|
||||
print('\nerror: Exited sync due to fetch errors', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
pm.end()
|
||||
self._fetch_times.Save()
|
||||
|
||||
self._GCProjects(projects)
|
||||
if not self.manifest.IsArchive:
|
||||
self._GCProjects(projects)
|
||||
|
||||
return fetched
|
||||
|
||||
def _GCProjects(self, projects):
|
||||
gitdirs = {}
|
||||
for project in projects:
|
||||
gitdirs[project.gitdir] = project.bare_git
|
||||
|
||||
has_dash_c = git_require((1, 7, 2))
|
||||
if multiprocessing and has_dash_c:
|
||||
cpu_count = multiprocessing.cpu_count()
|
||||
@ -334,8 +370,8 @@ later is required to fix a server side protocol bug.
|
||||
jobs = min(self.jobs, cpu_count)
|
||||
|
||||
if jobs < 2:
|
||||
for project in projects:
|
||||
project.bare_git.gc('--auto')
|
||||
for bare_git in gitdirs.values():
|
||||
bare_git.gc('--auto')
|
||||
return
|
||||
|
||||
config = {'pack.threads': cpu_count / jobs if cpu_count > jobs else 1}
|
||||
@ -344,10 +380,10 @@ later is required to fix a server side protocol bug.
|
||||
sem = _threading.Semaphore(jobs)
|
||||
err_event = _threading.Event()
|
||||
|
||||
def GC(project):
|
||||
def GC(bare_git):
|
||||
try:
|
||||
try:
|
||||
project.bare_git.gc('--auto', config=config)
|
||||
bare_git.gc('--auto', config=config)
|
||||
except GitError:
|
||||
err_event.set()
|
||||
except:
|
||||
@ -356,11 +392,11 @@ later is required to fix a server side protocol bug.
|
||||
finally:
|
||||
sem.release()
|
||||
|
||||
for project in projects:
|
||||
for bare_git in gitdirs.values():
|
||||
if err_event.isSet():
|
||||
break
|
||||
sem.acquire()
|
||||
t = _threading.Thread(target=GC, args=(project,))
|
||||
t = _threading.Thread(target=GC, args=(bare_git,))
|
||||
t.daemon = True
|
||||
threads.add(t)
|
||||
t.start()
|
||||
@ -372,6 +408,13 @@ later is required to fix a server side protocol bug.
|
||||
print('\nerror: Exited sync due to gc errors', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def _ReloadManifest(self, manifest_name=None):
|
||||
if manifest_name:
|
||||
# Override calls _Unload already
|
||||
self.manifest.Override(manifest_name)
|
||||
else:
|
||||
self.manifest._Unload()
|
||||
|
||||
def UpdateProjectList(self):
|
||||
new_project_paths = []
|
||||
for project in self.GetProjects(None, missing_ok=True):
|
||||
@ -393,12 +436,13 @@ later is required to fix a server side protocol bug.
|
||||
if path not in new_project_paths:
|
||||
# If the path has already been deleted, we don't need to do it
|
||||
if os.path.exists(self.manifest.topdir + '/' + path):
|
||||
gitdir = os.path.join(self.manifest.topdir, path, '.git')
|
||||
project = Project(
|
||||
manifest = self.manifest,
|
||||
name = path,
|
||||
remote = RemoteSpec('origin'),
|
||||
gitdir = os.path.join(self.manifest.topdir,
|
||||
path, '.git'),
|
||||
gitdir = gitdir,
|
||||
objdir = gitdir,
|
||||
worktree = os.path.join(self.manifest.topdir, path),
|
||||
relpath = path,
|
||||
revisionExpr = 'HEAD',
|
||||
@ -406,7 +450,7 @@ later is required to fix a server side protocol bug.
|
||||
groups = None)
|
||||
|
||||
if project.IsDirty():
|
||||
print('error: Cannot remove project "%s": uncommitted changes'
|
||||
print('error: Cannot remove project "%s": uncommitted changes '
|
||||
'are present' % project.relpath, file=sys.stderr)
|
||||
print(' commit changes, then run sync again',
|
||||
file=sys.stderr)
|
||||
@ -464,13 +508,17 @@ later is required to fix a server side protocol bug.
|
||||
if opt.manifest_name:
|
||||
self.manifest.Override(opt.manifest_name)
|
||||
|
||||
manifest_name = opt.manifest_name
|
||||
|
||||
if opt.smart_sync or opt.smart_tag:
|
||||
if not self.manifest.manifest_server:
|
||||
print('error: cannot smart sync: no manifest server defined in'
|
||||
print('error: cannot smart sync: no manifest server defined in '
|
||||
'manifest', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
manifest_server = self.manifest.manifest_server
|
||||
if not opt.quiet:
|
||||
print('Using manifest server %s' % manifest_server)
|
||||
|
||||
if not '@' in manifest_server:
|
||||
username = None
|
||||
@ -486,7 +534,7 @@ later is required to fix a server side protocol bug.
|
||||
file=sys.stderr)
|
||||
else:
|
||||
try:
|
||||
parse_result = urlparse.urlparse(manifest_server)
|
||||
parse_result = urllib.parse.urlparse(manifest_server)
|
||||
if parse_result.hostname:
|
||||
username, _account, password = \
|
||||
info.authenticators(parse_result.hostname)
|
||||
@ -504,7 +552,7 @@ later is required to fix a server side protocol bug.
|
||||
1)
|
||||
|
||||
try:
|
||||
server = xmlrpclib.Server(manifest_server)
|
||||
server = xmlrpc.client.Server(manifest_server)
|
||||
if opt.smart_sync:
|
||||
p = self.manifest.manifestProject
|
||||
b = p.GetBranch(p.CurrentBranch)
|
||||
@ -513,8 +561,10 @@ later is required to fix a server side protocol bug.
|
||||
branch = branch[len(R_HEADS):]
|
||||
|
||||
env = os.environ.copy()
|
||||
if (env.has_key('TARGET_PRODUCT') and
|
||||
env.has_key('TARGET_BUILD_VARIANT')):
|
||||
if 'SYNC_TARGET' in env:
|
||||
target = env['SYNC_TARGET']
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||
elif 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env:
|
||||
target = '%s-%s' % (env['TARGET_PRODUCT'],
|
||||
env['TARGET_BUILD_VARIANT'])
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||
@ -538,15 +588,16 @@ later is required to fix a server side protocol bug.
|
||||
print('error: cannot write manifest to %s' % manifest_path,
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
self.manifest.Override(manifest_name)
|
||||
self._ReloadManifest(manifest_name)
|
||||
else:
|
||||
print('error: %s' % manifest_str, file=sys.stderr)
|
||||
print('error: manifest server RPC call failed: %s' %
|
||||
manifest_str, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except (socket.error, IOError, xmlrpclib.Fault) as e:
|
||||
except (socket.error, IOError, xmlrpc.client.Fault) as e:
|
||||
print('error: cannot connect to manifest server %s:\n%s'
|
||||
% (self.manifest.manifest_server, e), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except xmlrpclib.ProtocolError as e:
|
||||
except xmlrpc.client.ProtocolError as e:
|
||||
print('error: cannot connect to manifest server %s:\n%d %s'
|
||||
% (self.manifest.manifest_server, e.errcode, e.errmsg),
|
||||
file=sys.stderr)
|
||||
@ -571,7 +622,7 @@ later is required to fix a server side protocol bug.
|
||||
mp.Sync_LocalHalf(syncbuf)
|
||||
if not syncbuf.Finish():
|
||||
sys.exit(1)
|
||||
self.manifest._Unload()
|
||||
self._ReloadManifest(manifest_name)
|
||||
if opt.jobs is None:
|
||||
self.jobs = self.manifest.default.sync_j
|
||||
all_projects = self.GetProjects(args,
|
||||
@ -596,7 +647,7 @@ later is required to fix a server side protocol bug.
|
||||
# Iteratively fetch missing and/or nested unregistered submodules
|
||||
previously_missing_set = set()
|
||||
while True:
|
||||
self.manifest._Unload()
|
||||
self._ReloadManifest(manifest_name)
|
||||
all_projects = self.GetProjects(args,
|
||||
missing_ok=True,
|
||||
submodules_ok=opt.fetch_submodules)
|
||||
@ -614,7 +665,7 @@ later is required to fix a server side protocol bug.
|
||||
previously_missing_set = missing_set
|
||||
fetched.update(self._Fetch(missing, opt))
|
||||
|
||||
if self.manifest.IsMirror:
|
||||
if self.manifest.IsMirror or self.manifest.IsArchive:
|
||||
# bail out now, we have no working tree
|
||||
return
|
||||
|
||||
@ -639,10 +690,10 @@ later is required to fix a server side protocol bug.
|
||||
print(self.manifest.notice)
|
||||
|
||||
def _PostRepoUpgrade(manifest, quiet=False):
|
||||
wrapper = WrapperModule()
|
||||
wrapper = Wrapper()
|
||||
if wrapper.NeedSetupGnuPG():
|
||||
wrapper.SetupGnuPG(quiet)
|
||||
for project in manifest.projects.values():
|
||||
for project in manifest.projects:
|
||||
if project.Exists:
|
||||
project.PostRepoUpgrade()
|
||||
|
||||
@ -715,7 +766,7 @@ class _FetchTimes(object):
|
||||
_ALPHA = 0.5
|
||||
|
||||
def __init__(self, manifest):
|
||||
self._path = os.path.join(manifest.repodir, '.repopickle_fetchtimes')
|
||||
self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json')
|
||||
self._times = None
|
||||
self._seen = set()
|
||||
|
||||
@ -735,21 +786,16 @@ class _FetchTimes(object):
|
||||
if self._times is None:
|
||||
try:
|
||||
f = open(self._path)
|
||||
except IOError:
|
||||
self._times = {}
|
||||
return self._times
|
||||
try:
|
||||
try:
|
||||
self._times = pickle.load(f)
|
||||
except IOError:
|
||||
try:
|
||||
os.remove(self._path)
|
||||
except OSError:
|
||||
pass
|
||||
self._times = {}
|
||||
finally:
|
||||
f.close()
|
||||
return self._times
|
||||
self._times = json.load(f)
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, ValueError):
|
||||
try:
|
||||
os.remove(self._path)
|
||||
except OSError:
|
||||
pass
|
||||
self._times = {}
|
||||
|
||||
def Save(self):
|
||||
if self._times is None:
|
||||
@ -763,13 +809,13 @@ class _FetchTimes(object):
|
||||
del self._times[name]
|
||||
|
||||
try:
|
||||
f = open(self._path, 'wb')
|
||||
f = open(self._path, 'w')
|
||||
try:
|
||||
pickle.dump(self._times, f)
|
||||
except (IOError, OSError, pickle.PickleError):
|
||||
try:
|
||||
os.remove(self._path)
|
||||
except OSError:
|
||||
pass
|
||||
finally:
|
||||
f.close()
|
||||
json.dump(self._times, f, indent=2)
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, TypeError):
|
||||
try:
|
||||
os.remove(self._path)
|
||||
except OSError:
|
||||
pass
|
||||
|
@ -21,19 +21,28 @@ import sys
|
||||
from command import InteractiveCommand
|
||||
from editor import Editor
|
||||
from error import HookError, UploadError
|
||||
from git_command import GitCommand
|
||||
from project import RepoHook
|
||||
|
||||
from pyversion import is_python3
|
||||
# pylint:disable=W0622
|
||||
if not is_python3():
|
||||
input = raw_input
|
||||
else:
|
||||
unicode = str
|
||||
# pylint:enable=W0622
|
||||
|
||||
UNUSUAL_COMMIT_THRESHOLD = 5
|
||||
|
||||
def _ConfirmManyUploads(multiple_branches=False):
|
||||
if multiple_branches:
|
||||
print('ATTENTION: One or more branches has an unusually high number'
|
||||
print('ATTENTION: One or more branches has an unusually high number '
|
||||
'of commits.')
|
||||
else:
|
||||
print('ATTENTION: You are uploading an unusually high number of commits.')
|
||||
print('YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across'
|
||||
print('YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across '
|
||||
'branches?)')
|
||||
answer = raw_input("If you are sure you intend to do this, type 'yes': ").strip()
|
||||
answer = input("If you are sure you intend to do this, type 'yes': ").strip()
|
||||
return answer == "yes"
|
||||
|
||||
def _die(fmt, *args):
|
||||
@ -82,6 +91,11 @@ to "true" then repo will assume you always answer "y" at the prompt,
|
||||
and will not prompt you further. If it is set to "false" then repo
|
||||
will assume you always answer "n", and will abort.
|
||||
|
||||
review.URL.autoreviewer:
|
||||
|
||||
To automatically append a user or mailing list to reviews, you can set
|
||||
a per-project or global Git option to do so.
|
||||
|
||||
review.URL.autocopy:
|
||||
|
||||
To automatically copy a user or mailing list to all uploaded reviews,
|
||||
@ -140,6 +154,10 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
p.add_option('-d', '--draft',
|
||||
action='store_true', dest='draft', default=False,
|
||||
help='If specified, upload as a draft.')
|
||||
p.add_option('-D', '--destination', '--dest',
|
||||
type='string', action='store', dest='dest_branch',
|
||||
metavar='BRANCH',
|
||||
help='Submit for review on this target branch.')
|
||||
|
||||
# Options relating to upload hook. Note that verify and no-verify are NOT
|
||||
# opposites of each other, which is why they store to different locations.
|
||||
@ -179,7 +197,8 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
date = branch.date
|
||||
commit_list = branch.commits
|
||||
|
||||
print('Upload project %s/ to remote branch %s:' % (project.relpath, project.revisionExpr))
|
||||
destination = opt.dest_branch or project.dest_branch or project.revisionExpr
|
||||
print('Upload project %s/ to remote branch %s:' % (project.relpath, destination))
|
||||
print(' branch %s (%2d commit%s, %s):' % (
|
||||
name,
|
||||
len(commit_list),
|
||||
@ -213,18 +232,21 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
|
||||
b = {}
|
||||
for branch in avail:
|
||||
if branch is None:
|
||||
continue
|
||||
name = branch.name
|
||||
date = branch.date
|
||||
commit_list = branch.commits
|
||||
|
||||
if b:
|
||||
script.append('#')
|
||||
destination = opt.dest_branch or project.dest_branch or project.revisionExpr
|
||||
script.append('# branch %s (%2d commit%s, %s) to remote branch %s:' % (
|
||||
name,
|
||||
len(commit_list),
|
||||
len(commit_list) != 1 and 's' or '',
|
||||
date,
|
||||
project.revisionExpr))
|
||||
destination))
|
||||
for commit in commit_list:
|
||||
script.append('# %s' % commit)
|
||||
b[name] = branch
|
||||
@ -278,14 +300,20 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
|
||||
self._UploadAndReport(opt, todo, people)
|
||||
|
||||
def _AppendAutoCcList(self, branch, people):
|
||||
def _AppendAutoList(self, branch, people):
|
||||
"""
|
||||
Appends the list of reviewers in the git project's config.
|
||||
Appends the list of users in the CC list in the git project's config if a
|
||||
non-empty reviewer list was found.
|
||||
"""
|
||||
|
||||
name = branch.name
|
||||
project = branch.project
|
||||
|
||||
key = 'review.%s.autoreviewer' % project.GetBranch(name).remote.review
|
||||
raw_list = project.config.GetString(key)
|
||||
if not raw_list is None:
|
||||
people[0].extend([entry.strip() for entry in raw_list.split(',')])
|
||||
|
||||
key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
|
||||
raw_list = project.config.GetString(key)
|
||||
if not raw_list is None and len(people[0]) > 0:
|
||||
@ -308,16 +336,20 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
for branch in todo:
|
||||
try:
|
||||
people = copy.deepcopy(original_people)
|
||||
self._AppendAutoCcList(branch, people)
|
||||
self._AppendAutoList(branch, people)
|
||||
|
||||
# Check if there are local changes that may have been forgotten
|
||||
if branch.project.HasChanges():
|
||||
changes = branch.project.UncommitedFiles()
|
||||
if changes:
|
||||
key = 'review.%s.autoupload' % branch.project.remote.review
|
||||
answer = branch.project.config.GetBoolean(key)
|
||||
|
||||
# if they want to auto upload, let's not ask because it could be automated
|
||||
if answer is None:
|
||||
sys.stdout.write('Uncommitted changes in ' + branch.project.name + ' (did you forget to amend?). Continue uploading? (y/N) ')
|
||||
sys.stdout.write('Uncommitted changes in ' + branch.project.name)
|
||||
sys.stdout.write(' (did you forget to amend?):\n')
|
||||
sys.stdout.write('\n'.join(changes) + '\n')
|
||||
sys.stdout.write('Continue uploading? (y/N) ')
|
||||
a = sys.stdin.readline().strip().lower()
|
||||
if a not in ('y', 'yes', 't', 'true', 'on'):
|
||||
print("skipping upload", file=sys.stderr)
|
||||
@ -330,7 +362,22 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
key = 'review.%s.uploadtopic' % branch.project.remote.review
|
||||
opt.auto_topic = branch.project.config.GetBoolean(key)
|
||||
|
||||
branch.UploadForReview(people, auto_topic=opt.auto_topic, draft=opt.draft)
|
||||
destination = opt.dest_branch or branch.project.dest_branch
|
||||
|
||||
# Make sure our local branch is not setup to track a different remote branch
|
||||
merge_branch = self._GetMergeBranch(branch.project)
|
||||
if destination:
|
||||
full_dest = 'refs/heads/%s' % destination
|
||||
if not opt.dest_branch and merge_branch and merge_branch != full_dest:
|
||||
print('merge branch %s does not match destination branch %s'
|
||||
% (merge_branch, full_dest))
|
||||
print('skipping upload.')
|
||||
print('Please use `--destination %s` if this is intentional'
|
||||
% destination)
|
||||
branch.uploaded = False
|
||||
continue
|
||||
|
||||
branch.UploadForReview(people, auto_topic=opt.auto_topic, draft=opt.draft, dest_branch=destination)
|
||||
branch.uploaded = True
|
||||
except UploadError as e:
|
||||
branch.error = e
|
||||
@ -364,6 +411,21 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
if have_errors:
|
||||
sys.exit(1)
|
||||
|
||||
def _GetMergeBranch(self, project):
|
||||
p = GitCommand(project,
|
||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
p.Wait()
|
||||
local_branch = p.stdout.strip()
|
||||
p = GitCommand(project,
|
||||
['config', '--get', 'branch.%s.merge' % local_branch],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
p.Wait()
|
||||
merge_branch = p.stdout.strip()
|
||||
return merge_branch
|
||||
|
||||
def Execute(self, opt, args):
|
||||
project_list = self.GetProjects(args)
|
||||
pending = []
|
||||
@ -377,7 +439,16 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
for project in project_list:
|
||||
if opt.current_branch:
|
||||
cbr = project.CurrentBranch
|
||||
avail = [project.GetUploadableBranch(cbr)] if cbr else None
|
||||
up_branch = project.GetUploadableBranch(cbr)
|
||||
if up_branch:
|
||||
avail = [up_branch]
|
||||
else:
|
||||
avail = None
|
||||
print('ERROR: Current branch (%s) not uploadable. '
|
||||
'You may be able to type '
|
||||
'"git branch --set-upstream-to m/master" to fix '
|
||||
'your branch.' % str(cbr),
|
||||
file=sys.stderr)
|
||||
else:
|
||||
avail = project.GetUploadableBranches(branch)
|
||||
if avail:
|
||||
@ -387,8 +458,10 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
|
||||
self.manifest.topdir, abort_if_user_denies=True)
|
||||
pending_proj_names = [project.name for (project, avail) in pending]
|
||||
pending_worktrees = [project.worktree for (project, avail) in pending]
|
||||
try:
|
||||
hook.Run(opt.allow_all_hooks, project_list=pending_proj_names)
|
||||
hook.Run(opt.allow_all_hooks, project_list=pending_proj_names,
|
||||
worktree_list=pending_worktrees)
|
||||
except HookError as e:
|
||||
print("ERROR: %s" % str(e), file=sys.stderr)
|
||||
return
|
||||
|
30
wrapper.py
Normal file
30
wrapper.py
Normal file
@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright (C) 2014 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import imp
|
||||
import os
|
||||
|
||||
|
||||
def WrapperPath():
|
||||
return os.path.join(os.path.dirname(__file__), 'repo')
|
||||
|
||||
_wrapper_module = None
|
||||
def Wrapper():
|
||||
global _wrapper_module
|
||||
if not _wrapper_module:
|
||||
_wrapper_module = imp.load_source('wrapper', WrapperPath())
|
||||
return _wrapper_module
|
Reference in New Issue
Block a user