mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-07-02 20:17:19 +00:00
Compare commits
93 Commits
Author | SHA1 | Date | |
---|---|---|---|
936183a492 | |||
85e8267031 | |||
e30f46b957 | |||
e4978cfbe3 | |||
126e298214 | |||
38e4387f8e | |||
24245e0094 | |||
db6f1b0884 | |||
f2fad61bde | |||
ee69084421 | |||
d37d43f036 | |||
7bdac71087 | |||
f97e8383a3 | |||
3000cdad22 | |||
b9d9efd394 | |||
497bde4de5 | |||
4abf8e6ef8 | |||
137d0131bf | |||
42e679b9f6 | |||
902665bce6 | |||
c8d882ae2a | |||
3eb87cec5c | |||
5fb8ed217c | |||
7e12e0a2fa | |||
7893b85509 | |||
b4e50e67e8 | |||
0936aeab2c | |||
14e134da02 | |||
04e52d6166 | |||
909d58b2e2 | |||
5cf16607d3 | |||
c190b98ed5 | |||
4863307299 | |||
f75870beac | |||
bf0b0cbc2f | |||
3a10968a70 | |||
c46de6932a | |||
303a82f33a | |||
7a91d51dcf | |||
a8d539189e | |||
588142dfcb | |||
a6d258b84d | |||
a769498568 | |||
884a387eca | |||
80b87fe6c1 | |||
e9f75b1782 | |||
a35e402161 | |||
dd7aea6c11 | |||
5196805fa2 | |||
85b24acd6a | |||
36ea2fb6ee | |||
2cd1f0452e | |||
65e3a78a9e | |||
d792f7928d | |||
6efdde9f6e | |||
7446c5954a | |||
d58bfe5a58 | |||
70f6890352 | |||
666d534636 | |||
f2af756425 | |||
544e7b0a97 | |||
e0df232da7 | |||
5a7c3afa73 | |||
9bc422f130 | |||
e81bc030bb | |||
eb5acc9ae9 | |||
26c45a7958 | |||
68425f4da8 | |||
53e902a19b | |||
4e4d40f7c0 | |||
093fdb6587 | |||
2fb6466f79 | |||
724aafb52d | |||
ccd218cd8f | |||
dd6542268a | |||
baca5f7e88 | |||
89ece429fb | |||
565480588d | |||
1829101e28 | |||
1966133f8e | |||
f1027e23b4 | |||
2cd38a0bf8 | |||
1b46cc9b6d | |||
1242e60bdd | |||
2d0f508648 | |||
143d8a7249 | |||
5db69f3f66 | |||
ff0a3c8f80 | |||
094cdbe090 | |||
148a84de0c | |||
1c5da49e6c | |||
b8433dfd2f | |||
f2fe2d9b86 |
23
color.py
23
color.py
@ -83,12 +83,35 @@ def _Color(fg = None, bg = None, attr = None):
|
|||||||
return code
|
return code
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT = None
|
||||||
|
|
||||||
|
def SetDefaultColoring(state):
|
||||||
|
"""Set coloring behavior to |state|.
|
||||||
|
|
||||||
|
This is useful for overriding config options via the command line.
|
||||||
|
"""
|
||||||
|
if state is None:
|
||||||
|
# Leave it alone -- return quick!
|
||||||
|
return
|
||||||
|
|
||||||
|
global DEFAULT
|
||||||
|
state = state.lower()
|
||||||
|
if state in ('auto',):
|
||||||
|
DEFAULT = state
|
||||||
|
elif state in ('always', 'yes', 'true', True):
|
||||||
|
DEFAULT = 'always'
|
||||||
|
elif state in ('never', 'no', 'false', False):
|
||||||
|
DEFAULT = 'never'
|
||||||
|
|
||||||
|
|
||||||
class Coloring(object):
|
class Coloring(object):
|
||||||
def __init__(self, config, section_type):
|
def __init__(self, config, section_type):
|
||||||
self._section = 'color.%s' % section_type
|
self._section = 'color.%s' % section_type
|
||||||
self._config = config
|
self._config = config
|
||||||
self._out = sys.stdout
|
self._out = sys.stdout
|
||||||
|
|
||||||
|
on = DEFAULT
|
||||||
|
if on is None:
|
||||||
on = self._config.GetString(self._section)
|
on = self._config.GetString(self._section)
|
||||||
if on is None:
|
if on is None:
|
||||||
on = self._config.GetString('color.ui')
|
on = self._config.GetString('color.ui')
|
||||||
|
@ -26,15 +26,17 @@ following DTD:
|
|||||||
manifest-server?,
|
manifest-server?,
|
||||||
remove-project*,
|
remove-project*,
|
||||||
project*,
|
project*,
|
||||||
|
extend-project*,
|
||||||
repo-hooks?)>
|
repo-hooks?)>
|
||||||
|
|
||||||
<!ELEMENT notice (#PCDATA)>
|
<!ELEMENT notice (#PCDATA)>
|
||||||
|
|
||||||
<!ELEMENT remote (EMPTY)>
|
<!ELEMENT remote (projecthook?)>
|
||||||
<!ATTLIST remote name ID #REQUIRED>
|
<!ATTLIST remote name ID #REQUIRED>
|
||||||
<!ATTLIST remote alias CDATA #IMPLIED>
|
<!ATTLIST remote alias CDATA #IMPLIED>
|
||||||
<!ATTLIST remote fetch CDATA #REQUIRED>
|
<!ATTLIST remote fetch CDATA #REQUIRED>
|
||||||
<!ATTLIST remote review CDATA #IMPLIED>
|
<!ATTLIST remote review CDATA #IMPLIED>
|
||||||
|
<!ATTLIST remote revision CDATA #IMPLIED>
|
||||||
|
|
||||||
<!ELEMENT default (EMPTY)>
|
<!ELEMENT default (EMPTY)>
|
||||||
<!ATTLIST default remote IDREF #IMPLIED>
|
<!ATTLIST default remote IDREF #IMPLIED>
|
||||||
@ -66,6 +68,15 @@ following DTD:
|
|||||||
<!ATTLIST annotation value CDATA #REQUIRED>
|
<!ATTLIST annotation value CDATA #REQUIRED>
|
||||||
<!ATTLIST annotation keep CDATA "true">
|
<!ATTLIST annotation keep CDATA "true">
|
||||||
|
|
||||||
|
<!ELEMENT extend-project>
|
||||||
|
<!ATTLIST extend-project name CDATA #REQUIRED>
|
||||||
|
<!ATTLIST extend-project path CDATA #IMPLIED>
|
||||||
|
<!ATTLIST extend-project groups CDATA #IMPLIED>
|
||||||
|
|
||||||
|
<!ELEMENT projecthook (EMPTY)>
|
||||||
|
<!ATTLIST projecthook name CDATA #REQUIRED>
|
||||||
|
<!ATTLIST projecthook revision CDATA #REQUIRED>
|
||||||
|
|
||||||
<!ELEMENT remove-project (EMPTY)>
|
<!ELEMENT remove-project (EMPTY)>
|
||||||
<!ATTLIST remove-project name CDATA #REQUIRED>
|
<!ATTLIST remove-project name CDATA #REQUIRED>
|
||||||
|
|
||||||
@ -112,6 +123,10 @@ Attribute `review`: Hostname of the Gerrit server where reviews
|
|||||||
are uploaded to by `repo upload`. This attribute is optional;
|
are uploaded to by `repo upload`. This attribute is optional;
|
||||||
if not specified then `repo upload` will not function.
|
if not specified then `repo upload` will not function.
|
||||||
|
|
||||||
|
Attribute `revision`: Name of a Git branch (e.g. `master` or
|
||||||
|
`refs/heads/master`). Remotes with their own revision will override
|
||||||
|
the default revision.
|
||||||
|
|
||||||
Element default
|
Element default
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
@ -132,14 +147,14 @@ Project elements not setting their own `dest-branch` will inherit
|
|||||||
this value. If this value is not set, projects will use `revision`
|
this value. If this value is not set, projects will use `revision`
|
||||||
by default instead.
|
by default instead.
|
||||||
|
|
||||||
Attribute `sync_j`: Number of parallel jobs to use when synching.
|
Attribute `sync-j`: Number of parallel jobs to use when synching.
|
||||||
|
|
||||||
Attribute `sync_c`: Set to true to only sync the given Git
|
Attribute `sync-c`: Set to true to only sync the given Git
|
||||||
branch (specified in the `revision` attribute) rather than the
|
branch (specified in the `revision` attribute) rather than the
|
||||||
whole ref space. Project elements lacking a sync_c element of
|
whole ref space. Project elements lacking a sync-c element of
|
||||||
their own will use this value.
|
their own will use this value.
|
||||||
|
|
||||||
Attribute `sync_s`: Set to true to also sync sub-projects.
|
Attribute `sync-s`: Set to true to also sync sub-projects.
|
||||||
|
|
||||||
|
|
||||||
Element manifest-server
|
Element manifest-server
|
||||||
@ -208,7 +223,8 @@ to track for this project. Names can be relative to refs/heads
|
|||||||
(e.g. just "master") or absolute (e.g. "refs/heads/master").
|
(e.g. just "master") or absolute (e.g. "refs/heads/master").
|
||||||
Tags and/or explicit SHA-1s should work in theory, but have not
|
Tags and/or explicit SHA-1s should work in theory, but have not
|
||||||
been extensively tested. If not supplied the revision given by
|
been extensively tested. If not supplied the revision given by
|
||||||
the default element is used.
|
the remote element is used if applicable, else the default
|
||||||
|
element is used.
|
||||||
|
|
||||||
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
|
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
|
||||||
When using `repo upload`, changes will be submitted for code
|
When using `repo upload`, changes will be submitted for code
|
||||||
@ -226,13 +242,13 @@ group "notdefault", it will not be automatically downloaded by repo.
|
|||||||
If the project has a parent element, the `name` and `path` here
|
If the project has a parent element, the `name` and `path` here
|
||||||
are the prefixed ones.
|
are the prefixed ones.
|
||||||
|
|
||||||
Attribute `sync_c`: Set to true to only sync the given Git
|
Attribute `sync-c`: Set to true to only sync the given Git
|
||||||
branch (specified in the `revision` attribute) rather than the
|
branch (specified in the `revision` attribute) rather than the
|
||||||
whole ref space.
|
whole ref space.
|
||||||
|
|
||||||
Attribute `sync_s`: Set to true to also sync sub-projects.
|
Attribute `sync-s`: Set to true to also sync sub-projects.
|
||||||
|
|
||||||
Attribute `upstream`: Name of the Git branch in which a sha1
|
Attribute `upstream`: Name of the Git ref in which a sha1
|
||||||
can be found. Used when syncing a revision locked manifest in
|
can be found. Used when syncing a revision locked manifest in
|
||||||
-c mode to avoid having to sync the entire ref space.
|
-c mode to avoid having to sync the entire ref space.
|
||||||
|
|
||||||
@ -246,6 +262,22 @@ rather than the `name` attribute. This attribute only applies to the
|
|||||||
local mirrors syncing, it will be ignored when syncing the projects in a
|
local mirrors syncing, it will be ignored when syncing the projects in a
|
||||||
client working directory.
|
client working directory.
|
||||||
|
|
||||||
|
Element extend-project
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
Modify the attributes of the named project.
|
||||||
|
|
||||||
|
This element is mostly useful in a local manifest file, to modify the
|
||||||
|
attributes of an existing project without completely replacing the
|
||||||
|
existing project definition. This makes the local manifest more robust
|
||||||
|
against changes to the original manifest.
|
||||||
|
|
||||||
|
Attribute `path`: If specified, limit the change to projects checked out
|
||||||
|
at the specified path, rather than all projects with the given name.
|
||||||
|
|
||||||
|
Attribute `groups`: List of additional groups to which this project
|
||||||
|
belongs. Same syntax as the corresponding element of `project`.
|
||||||
|
|
||||||
Element annotation
|
Element annotation
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
@ -278,6 +310,15 @@ target manifest to include - it must be a usable manifest on its own.
|
|||||||
Attribute `name`: the manifest to include, specified relative to
|
Attribute `name`: the manifest to include, specified relative to
|
||||||
the manifest repository's root.
|
the manifest repository's root.
|
||||||
|
|
||||||
|
Element projecthook
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
This element is used to define a per-remote hook git that is
|
||||||
|
fetched and applied to all projects using the remote. The project-
|
||||||
|
hook functionality allows for company/team .git/hooks to be used.
|
||||||
|
The hooks in the supplied project and revision are supplemented to
|
||||||
|
the current repo stock hooks for each project. Supplemented hooks
|
||||||
|
overrule any stock hooks.
|
||||||
|
|
||||||
Local Manifests
|
Local Manifests
|
||||||
===============
|
===============
|
||||||
|
7
error.py
7
error.py
@ -24,6 +24,13 @@ class ManifestInvalidRevisionError(Exception):
|
|||||||
class NoManifestException(Exception):
|
class NoManifestException(Exception):
|
||||||
"""The required manifest does not exist.
|
"""The required manifest does not exist.
|
||||||
"""
|
"""
|
||||||
|
def __init__(self, path, reason):
|
||||||
|
super(NoManifestException, self).__init__()
|
||||||
|
self.path = path
|
||||||
|
self.reason = reason
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.reason
|
||||||
|
|
||||||
class EditorError(Exception):
|
class EditorError(Exception):
|
||||||
"""Unspecified error from the user's text editor.
|
"""Unspecified error from the user's text editor.
|
||||||
|
@ -21,6 +21,7 @@ import tempfile
|
|||||||
from signal import SIGTERM
|
from signal import SIGTERM
|
||||||
from error import GitError
|
from error import GitError
|
||||||
from trace import REPO_TRACE, IsTrace, Trace
|
from trace import REPO_TRACE, IsTrace, Trace
|
||||||
|
from wrapper import Wrapper
|
||||||
|
|
||||||
GIT = 'git'
|
GIT = 'git'
|
||||||
MIN_GIT_VERSION = (1, 5, 4)
|
MIN_GIT_VERSION = (1, 5, 4)
|
||||||
@ -79,20 +80,15 @@ class _GitCall(object):
|
|||||||
def version(self):
|
def version(self):
|
||||||
p = GitCommand(None, ['--version'], capture_stdout=True)
|
p = GitCommand(None, ['--version'], capture_stdout=True)
|
||||||
if p.Wait() == 0:
|
if p.Wait() == 0:
|
||||||
return p.stdout
|
return p.stdout.decode('utf-8')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def version_tuple(self):
|
def version_tuple(self):
|
||||||
global _git_version
|
global _git_version
|
||||||
|
|
||||||
if _git_version is None:
|
if _git_version is None:
|
||||||
ver_str = git.version().decode('utf-8')
|
ver_str = git.version()
|
||||||
if ver_str.startswith('git version '):
|
_git_version = Wrapper().ParseGitVersion(ver_str)
|
||||||
_git_version = tuple(
|
if _git_version is None:
|
||||||
map(int,
|
|
||||||
ver_str[len('git version '):].strip().split('-')[0].split('.')[0:3]
|
|
||||||
))
|
|
||||||
else:
|
|
||||||
print('fatal: "%s" unsupported' % ver_str, file=sys.stderr)
|
print('fatal: "%s" unsupported' % ver_str, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
return _git_version
|
return _git_version
|
||||||
|
@ -15,8 +15,8 @@
|
|||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import pickle
|
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
@ -80,7 +80,7 @@ class GitConfig(object):
|
|||||||
return cls(configfile = os.path.join(gitdir, 'config'),
|
return cls(configfile = os.path.join(gitdir, 'config'),
|
||||||
defaults = defaults)
|
defaults = defaults)
|
||||||
|
|
||||||
def __init__(self, configfile, defaults=None, pickleFile=None):
|
def __init__(self, configfile, defaults=None, jsonFile=None):
|
||||||
self.file = configfile
|
self.file = configfile
|
||||||
self.defaults = defaults
|
self.defaults = defaults
|
||||||
self._cache_dict = None
|
self._cache_dict = None
|
||||||
@ -88,12 +88,11 @@ class GitConfig(object):
|
|||||||
self._remotes = {}
|
self._remotes = {}
|
||||||
self._branches = {}
|
self._branches = {}
|
||||||
|
|
||||||
if pickleFile is None:
|
self._json = jsonFile
|
||||||
self._pickle = os.path.join(
|
if self._json is None:
|
||||||
|
self._json = os.path.join(
|
||||||
os.path.dirname(self.file),
|
os.path.dirname(self.file),
|
||||||
'.repopickle_' + os.path.basename(self.file))
|
'.repo_' + os.path.basename(self.file) + '.json')
|
||||||
else:
|
|
||||||
self._pickle = pickleFile
|
|
||||||
|
|
||||||
def Has(self, name, include_defaults = True):
|
def Has(self, name, include_defaults = True):
|
||||||
"""Return true if this configuration file has the key.
|
"""Return true if this configuration file has the key.
|
||||||
@ -217,7 +216,7 @@ class GitConfig(object):
|
|||||||
"""Resolve any url.*.insteadof references.
|
"""Resolve any url.*.insteadof references.
|
||||||
"""
|
"""
|
||||||
for new_url in self.GetSubSections('url'):
|
for new_url in self.GetSubSections('url'):
|
||||||
old_url = self.GetString('url.%s.insteadof' % new_url)
|
for old_url in self.GetString('url.%s.insteadof' % new_url, True):
|
||||||
if old_url is not None and url.startswith(old_url):
|
if old_url is not None and url.startswith(old_url):
|
||||||
return new_url + url[len(old_url):]
|
return new_url + url[len(old_url):]
|
||||||
return url
|
return url
|
||||||
@ -248,50 +247,41 @@ class GitConfig(object):
|
|||||||
return self._cache_dict
|
return self._cache_dict
|
||||||
|
|
||||||
def _Read(self):
|
def _Read(self):
|
||||||
d = self._ReadPickle()
|
d = self._ReadJson()
|
||||||
if d is None:
|
if d is None:
|
||||||
d = self._ReadGit()
|
d = self._ReadGit()
|
||||||
self._SavePickle(d)
|
self._SaveJson(d)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def _ReadPickle(self):
|
def _ReadJson(self):
|
||||||
try:
|
try:
|
||||||
if os.path.getmtime(self._pickle) \
|
if os.path.getmtime(self._json) \
|
||||||
<= os.path.getmtime(self.file):
|
<= os.path.getmtime(self.file):
|
||||||
os.remove(self._pickle)
|
os.remove(self._json)
|
||||||
return None
|
return None
|
||||||
except OSError:
|
except OSError:
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
Trace(': unpickle %s', self.file)
|
Trace(': parsing %s', self.file)
|
||||||
fd = open(self._pickle, 'rb')
|
fd = open(self._json)
|
||||||
try:
|
try:
|
||||||
return pickle.load(fd)
|
return json.load(fd)
|
||||||
finally:
|
finally:
|
||||||
fd.close()
|
fd.close()
|
||||||
except EOFError:
|
except (IOError, ValueError):
|
||||||
os.remove(self._pickle)
|
os.remove(self._json)
|
||||||
return None
|
|
||||||
except IOError:
|
|
||||||
os.remove(self._pickle)
|
|
||||||
return None
|
|
||||||
except pickle.PickleError:
|
|
||||||
os.remove(self._pickle)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _SavePickle(self, cache):
|
def _SaveJson(self, cache):
|
||||||
try:
|
try:
|
||||||
fd = open(self._pickle, 'wb')
|
fd = open(self._json, 'w')
|
||||||
try:
|
try:
|
||||||
pickle.dump(cache, fd, pickle.HIGHEST_PROTOCOL)
|
json.dump(cache, fd, indent=2)
|
||||||
finally:
|
finally:
|
||||||
fd.close()
|
fd.close()
|
||||||
except IOError:
|
except (IOError, TypeError):
|
||||||
if os.path.exists(self._pickle):
|
if os.path.exists(self.json):
|
||||||
os.remove(self._pickle)
|
os.remove(self._json)
|
||||||
except pickle.PickleError:
|
|
||||||
if os.path.exists(self._pickle):
|
|
||||||
os.remove(self._pickle)
|
|
||||||
|
|
||||||
def _ReadGit(self):
|
def _ReadGit(self):
|
||||||
"""
|
"""
|
||||||
@ -576,7 +566,9 @@ class Remote(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
u = self.review
|
u = self.review
|
||||||
if not u.startswith('http:') and not u.startswith('https:'):
|
if u.startswith('persistent-'):
|
||||||
|
u = u[len('persistent-'):]
|
||||||
|
if u.split(':')[0] not in ('http', 'https', 'sso'):
|
||||||
u = 'http://%s' % u
|
u = 'http://%s' % u
|
||||||
if u.endswith('/Gerrit'):
|
if u.endswith('/Gerrit'):
|
||||||
u = u[:len(u) - len('/Gerrit')]
|
u = u[:len(u) - len('/Gerrit')]
|
||||||
@ -592,6 +584,9 @@ class Remote(object):
|
|||||||
host, port = os.environ['REPO_HOST_PORT_INFO'].split()
|
host, port = os.environ['REPO_HOST_PORT_INFO'].split()
|
||||||
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
||||||
REVIEW_CACHE[u] = self._review_url
|
REVIEW_CACHE[u] = self._review_url
|
||||||
|
elif u.startswith('sso:'):
|
||||||
|
self._review_url = u # Assume it's right
|
||||||
|
REVIEW_CACHE[u] = self._review_url
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
info_url = u + 'ssh_info'
|
info_url = u + 'ssh_info'
|
||||||
@ -601,7 +596,7 @@ class Remote(object):
|
|||||||
# of HTML response back, like maybe a login page.
|
# of HTML response back, like maybe a login page.
|
||||||
#
|
#
|
||||||
# Assume HTTP if SSH is not enabled or ssh_info doesn't look right.
|
# Assume HTTP if SSH is not enabled or ssh_info doesn't look right.
|
||||||
self._review_url = http_url + 'p/'
|
self._review_url = http_url
|
||||||
else:
|
else:
|
||||||
host, port = info.split()
|
host, port = info.split()
|
||||||
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
||||||
@ -624,9 +619,7 @@ class Remote(object):
|
|||||||
def ToLocal(self, rev):
|
def ToLocal(self, rev):
|
||||||
"""Convert a remote revision string to something we have locally.
|
"""Convert a remote revision string to something we have locally.
|
||||||
"""
|
"""
|
||||||
if IsId(rev):
|
if self.name == '.' or IsId(rev):
|
||||||
return rev
|
|
||||||
if rev.startswith(R_TAGS):
|
|
||||||
return rev
|
return rev
|
||||||
|
|
||||||
if not rev.startswith('refs/'):
|
if not rev.startswith('refs/'):
|
||||||
@ -635,6 +628,10 @@ class Remote(object):
|
|||||||
for spec in self.fetch:
|
for spec in self.fetch:
|
||||||
if spec.SourceMatches(rev):
|
if spec.SourceMatches(rev):
|
||||||
return spec.MapSource(rev)
|
return spec.MapSource(rev)
|
||||||
|
|
||||||
|
if not rev.startswith(R_HEADS):
|
||||||
|
return rev
|
||||||
|
|
||||||
raise GitError('remote %s does not have %s' % (self.name, rev))
|
raise GitError('remote %s does not have %s' % (self.name, rev))
|
||||||
|
|
||||||
def WritesTo(self, ref):
|
def WritesTo(self, ref):
|
||||||
@ -704,7 +701,7 @@ class Branch(object):
|
|||||||
self._Set('merge', self.merge)
|
self._Set('merge', self.merge)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
fd = open(self._config.file, 'ab')
|
fd = open(self._config.file, 'a')
|
||||||
try:
|
try:
|
||||||
fd.write('[branch "%s"]\n' % self.name)
|
fd.write('[branch "%s"]\n' % self.name)
|
||||||
if self.remote:
|
if self.remote:
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
# From Gerrit Code Review 2.6
|
|
||||||
#
|
#
|
||||||
# Part of Gerrit Code Review (http://code.google.com/p/gerrit/)
|
# Part of Gerrit Code Review (http://code.google.com/p/gerrit/)
|
||||||
#
|
#
|
||||||
@ -27,7 +26,7 @@ MSG="$1"
|
|||||||
#
|
#
|
||||||
add_ChangeId() {
|
add_ChangeId() {
|
||||||
clean_message=`sed -e '
|
clean_message=`sed -e '
|
||||||
/^diff --git a\/.*/{
|
/^diff --git .*/{
|
||||||
s///
|
s///
|
||||||
q
|
q
|
||||||
}
|
}
|
||||||
@ -39,6 +38,11 @@ add_ChangeId() {
|
|||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if test "false" = "`git config --bool --get gerrit.createChangeId`"
|
||||||
|
then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
# Does Change-Id: already exist? if so, exit (no change).
|
# Does Change-Id: already exist? if so, exit (no change).
|
||||||
if grep -i '^Change-Id:' "$MSG" >/dev/null
|
if grep -i '^Change-Id:' "$MSG" >/dev/null
|
||||||
then
|
then
|
||||||
@ -77,7 +81,7 @@ add_ChangeId() {
|
|||||||
# Skip the line starting with the diff command and everything after it,
|
# Skip the line starting with the diff command and everything after it,
|
||||||
# up to the end of the file, assuming it is only patch data.
|
# up to the end of the file, assuming it is only patch data.
|
||||||
# If more than one line before the diff was empty, strip all but one.
|
# If more than one line before the diff was empty, strip all but one.
|
||||||
/^diff --git a/ {
|
/^diff --git / {
|
||||||
blankLines = 0
|
blankLines = 0
|
||||||
while (getline) { }
|
while (getline) { }
|
||||||
next
|
next
|
||||||
|
134
main.py
134
main.py
@ -31,6 +31,12 @@ else:
|
|||||||
urllib = imp.new_module('urllib')
|
urllib = imp.new_module('urllib')
|
||||||
urllib.request = urllib2
|
urllib.request = urllib2
|
||||||
|
|
||||||
|
try:
|
||||||
|
import kerberos
|
||||||
|
except ImportError:
|
||||||
|
kerberos = None
|
||||||
|
|
||||||
|
from color import SetDefaultColoring
|
||||||
from trace import SetTrace
|
from trace import SetTrace
|
||||||
from git_command import git, GitCommand
|
from git_command import git, GitCommand
|
||||||
from git_config import init_ssh, close_ssh
|
from git_config import init_ssh, close_ssh
|
||||||
@ -46,6 +52,7 @@ from error import NoSuchProjectError
|
|||||||
from error import RepoChangedException
|
from error import RepoChangedException
|
||||||
from manifest_xml import XmlManifest
|
from manifest_xml import XmlManifest
|
||||||
from pager import RunPager
|
from pager import RunPager
|
||||||
|
from wrapper import WrapperPath, Wrapper
|
||||||
|
|
||||||
from subcmds import all_commands
|
from subcmds import all_commands
|
||||||
|
|
||||||
@ -63,6 +70,9 @@ global_options.add_option('-p', '--paginate',
|
|||||||
global_options.add_option('--no-pager',
|
global_options.add_option('--no-pager',
|
||||||
dest='no_pager', action='store_true',
|
dest='no_pager', action='store_true',
|
||||||
help='disable the pager')
|
help='disable the pager')
|
||||||
|
global_options.add_option('--color',
|
||||||
|
choices=('auto', 'always', 'never'), default=None,
|
||||||
|
help='control color usage: auto, always, never')
|
||||||
global_options.add_option('--trace',
|
global_options.add_option('--trace',
|
||||||
dest='trace', action='store_true',
|
dest='trace', action='store_true',
|
||||||
help='trace git command execution')
|
help='trace git command execution')
|
||||||
@ -107,6 +117,8 @@ class _Repo(object):
|
|||||||
print('fatal: invalid usage of --version', file=sys.stderr)
|
print('fatal: invalid usage of --version', file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
SetDefaultColoring(gopts.color)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cmd = self.commands[name]
|
cmd = self.commands[name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@ -123,8 +135,15 @@ class _Repo(object):
|
|||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
try:
|
||||||
copts, cargs = cmd.OptionParser.parse_args(argv)
|
copts, cargs = cmd.OptionParser.parse_args(argv)
|
||||||
copts = cmd.ReadEnvironmentOptions(copts)
|
copts = cmd.ReadEnvironmentOptions(copts)
|
||||||
|
except NoManifestException as e:
|
||||||
|
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||||
|
file=sys.stderr)
|
||||||
|
print('error: manifest missing or unreadable -- please run init',
|
||||||
|
file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
|
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
|
||||||
config = cmd.manifest.globalConfig
|
config = cmd.manifest.globalConfig
|
||||||
@ -140,14 +159,12 @@ class _Repo(object):
|
|||||||
start = time.time()
|
start = time.time()
|
||||||
try:
|
try:
|
||||||
result = cmd.Execute(copts, cargs)
|
result = cmd.Execute(copts, cargs)
|
||||||
except DownloadError as e:
|
except (DownloadError, ManifestInvalidRevisionError,
|
||||||
print('error: %s' % str(e), file=sys.stderr)
|
NoManifestException) as e:
|
||||||
result = 1
|
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||||
except ManifestInvalidRevisionError as e:
|
file=sys.stderr)
|
||||||
print('error: %s' % str(e), file=sys.stderr)
|
if isinstance(e, NoManifestException):
|
||||||
result = 1
|
print('error: manifest missing or unreadable -- please run init',
|
||||||
except NoManifestException as e:
|
|
||||||
print('error: manifest required for this command -- please run init',
|
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
result = 1
|
result = 1
|
||||||
except NoSuchProjectError as e:
|
except NoSuchProjectError as e:
|
||||||
@ -169,21 +186,10 @@ class _Repo(object):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _MyRepoPath():
|
def _MyRepoPath():
|
||||||
return os.path.dirname(__file__)
|
return os.path.dirname(__file__)
|
||||||
|
|
||||||
def _MyWrapperPath():
|
|
||||||
return os.path.join(os.path.dirname(__file__), 'repo')
|
|
||||||
|
|
||||||
_wrapper_module = None
|
|
||||||
def WrapperModule():
|
|
||||||
global _wrapper_module
|
|
||||||
if not _wrapper_module:
|
|
||||||
_wrapper_module = imp.load_source('wrapper', _MyWrapperPath())
|
|
||||||
return _wrapper_module
|
|
||||||
|
|
||||||
def _CurrentWrapperVersion():
|
|
||||||
return WrapperModule().VERSION
|
|
||||||
|
|
||||||
def _CheckWrapperVersion(ver, repo_path):
|
def _CheckWrapperVersion(ver, repo_path):
|
||||||
if not repo_path:
|
if not repo_path:
|
||||||
@ -193,7 +199,7 @@ def _CheckWrapperVersion(ver, repo_path):
|
|||||||
print('no --wrapper-version argument', file=sys.stderr)
|
print('no --wrapper-version argument', file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
exp = _CurrentWrapperVersion()
|
exp = Wrapper().VERSION
|
||||||
ver = tuple(map(int, ver.split('.')))
|
ver = tuple(map(int, ver.split('.')))
|
||||||
if len(ver) == 1:
|
if len(ver) == 1:
|
||||||
ver = (0, ver[0])
|
ver = (0, ver[0])
|
||||||
@ -205,7 +211,7 @@ def _CheckWrapperVersion(ver, repo_path):
|
|||||||
!!! You must upgrade before you can continue: !!!
|
!!! You must upgrade before you can continue: !!!
|
||||||
|
|
||||||
cp %s %s
|
cp %s %s
|
||||||
""" % (exp_str, _MyWrapperPath(), repo_path), file=sys.stderr)
|
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if exp > ver:
|
if exp > ver:
|
||||||
@ -214,7 +220,7 @@ def _CheckWrapperVersion(ver, repo_path):
|
|||||||
... You should upgrade soon:
|
... You should upgrade soon:
|
||||||
|
|
||||||
cp %s %s
|
cp %s %s
|
||||||
""" % (exp_str, _MyWrapperPath(), repo_path), file=sys.stderr)
|
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||||
|
|
||||||
def _CheckRepoDir(repo_dir):
|
def _CheckRepoDir(repo_dir):
|
||||||
if not repo_dir:
|
if not repo_dir:
|
||||||
@ -342,6 +348,86 @@ class _DigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
|
|||||||
self.retried = 0
|
self.retried = 0
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
class _KerberosAuthHandler(urllib.request.BaseHandler):
|
||||||
|
def __init__(self):
|
||||||
|
self.retried = 0
|
||||||
|
self.context = None
|
||||||
|
self.handler_order = urllib.request.BaseHandler.handler_order - 50
|
||||||
|
|
||||||
|
def http_error_401(self, req, fp, code, msg, headers):
|
||||||
|
host = req.get_host()
|
||||||
|
retry = self.http_error_auth_reqed('www-authenticate', host, req, headers)
|
||||||
|
return retry
|
||||||
|
|
||||||
|
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
||||||
|
try:
|
||||||
|
spn = "HTTP@%s" % host
|
||||||
|
authdata = self._negotiate_get_authdata(auth_header, headers)
|
||||||
|
|
||||||
|
if self.retried > 3:
|
||||||
|
raise urllib.request.HTTPError(req.get_full_url(), 401,
|
||||||
|
"Negotiate auth failed", headers, None)
|
||||||
|
else:
|
||||||
|
self.retried += 1
|
||||||
|
|
||||||
|
neghdr = self._negotiate_get_svctk(spn, authdata)
|
||||||
|
if neghdr is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
req.add_unredirected_header('Authorization', neghdr)
|
||||||
|
response = self.parent.open(req)
|
||||||
|
|
||||||
|
srvauth = self._negotiate_get_authdata(auth_header, response.info())
|
||||||
|
if self._validate_response(srvauth):
|
||||||
|
return response
|
||||||
|
except kerberos.GSSError:
|
||||||
|
return None
|
||||||
|
except:
|
||||||
|
self.reset_retry_count()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self._clean_context()
|
||||||
|
|
||||||
|
def reset_retry_count(self):
|
||||||
|
self.retried = 0
|
||||||
|
|
||||||
|
def _negotiate_get_authdata(self, auth_header, headers):
|
||||||
|
authhdr = headers.get(auth_header, None)
|
||||||
|
if authhdr is not None:
|
||||||
|
for mech_tuple in authhdr.split(","):
|
||||||
|
mech, __, authdata = mech_tuple.strip().partition(" ")
|
||||||
|
if mech.lower() == "negotiate":
|
||||||
|
return authdata.strip()
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _negotiate_get_svctk(self, spn, authdata):
|
||||||
|
if authdata is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
result, self.context = kerberos.authGSSClientInit(spn)
|
||||||
|
if result < kerberos.AUTH_GSS_COMPLETE:
|
||||||
|
return None
|
||||||
|
|
||||||
|
result = kerberos.authGSSClientStep(self.context, authdata)
|
||||||
|
if result < kerberos.AUTH_GSS_CONTINUE:
|
||||||
|
return None
|
||||||
|
|
||||||
|
response = kerberos.authGSSClientResponse(self.context)
|
||||||
|
return "Negotiate %s" % response
|
||||||
|
|
||||||
|
def _validate_response(self, authdata):
|
||||||
|
if authdata is None:
|
||||||
|
return None
|
||||||
|
result = kerberos.authGSSClientStep(self.context, authdata)
|
||||||
|
if result == kerberos.AUTH_GSS_COMPLETE:
|
||||||
|
return True
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _clean_context(self):
|
||||||
|
if self.context is not None:
|
||||||
|
kerberos.authGSSClientClean(self.context)
|
||||||
|
self.context = None
|
||||||
|
|
||||||
def init_http():
|
def init_http():
|
||||||
handlers = [_UserAgentHandler()]
|
handlers = [_UserAgentHandler()]
|
||||||
|
|
||||||
@ -358,6 +444,8 @@ def init_http():
|
|||||||
pass
|
pass
|
||||||
handlers.append(_BasicAuthHandler(mgr))
|
handlers.append(_BasicAuthHandler(mgr))
|
||||||
handlers.append(_DigestAuthHandler(mgr))
|
handlers.append(_DigestAuthHandler(mgr))
|
||||||
|
if kerberos:
|
||||||
|
handlers.append(_KerberosAuthHandler())
|
||||||
|
|
||||||
if 'http_proxy' in os.environ:
|
if 'http_proxy' in os.environ:
|
||||||
url = os.environ['http_proxy']
|
url = os.environ['http_proxy']
|
||||||
|
155
manifest_xml.py
155
manifest_xml.py
@ -32,7 +32,7 @@ else:
|
|||||||
from git_config import GitConfig
|
from git_config import GitConfig
|
||||||
from git_refs import R_HEADS, HEAD
|
from git_refs import R_HEADS, HEAD
|
||||||
from project import RemoteSpec, Project, MetaProject
|
from project import RemoteSpec, Project, MetaProject
|
||||||
from error import ManifestParseError
|
from error import ManifestParseError, ManifestInvalidRevisionError
|
||||||
|
|
||||||
MANIFEST_FILE_NAME = 'manifest.xml'
|
MANIFEST_FILE_NAME = 'manifest.xml'
|
||||||
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
|
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
|
||||||
@ -63,13 +63,19 @@ class _XmlRemote(object):
|
|||||||
alias=None,
|
alias=None,
|
||||||
fetch=None,
|
fetch=None,
|
||||||
manifestUrl=None,
|
manifestUrl=None,
|
||||||
review=None):
|
review=None,
|
||||||
|
revision=None,
|
||||||
|
projecthookName=None,
|
||||||
|
projecthookRevision=None):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.fetchUrl = fetch
|
self.fetchUrl = fetch
|
||||||
self.manifestUrl = manifestUrl
|
self.manifestUrl = manifestUrl
|
||||||
self.remoteAlias = alias
|
self.remoteAlias = alias
|
||||||
self.reviewUrl = review
|
self.reviewUrl = review
|
||||||
|
self.revision = revision
|
||||||
self.resolvedFetchUrl = self._resolveFetchUrl()
|
self.resolvedFetchUrl = self._resolveFetchUrl()
|
||||||
|
self.projecthookName = projecthookName
|
||||||
|
self.projecthookRevision = projecthookRevision
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return self.__dict__ == other.__dict__
|
return self.__dict__ == other.__dict__
|
||||||
@ -80,18 +86,24 @@ class _XmlRemote(object):
|
|||||||
def _resolveFetchUrl(self):
|
def _resolveFetchUrl(self):
|
||||||
url = self.fetchUrl.rstrip('/')
|
url = self.fetchUrl.rstrip('/')
|
||||||
manifestUrl = self.manifestUrl.rstrip('/')
|
manifestUrl = self.manifestUrl.rstrip('/')
|
||||||
p = manifestUrl.startswith('persistent-http')
|
# urljoin will gets confused over quite a few things. The ones we care
|
||||||
if p:
|
# about here are:
|
||||||
manifestUrl = manifestUrl[len('persistent-'):]
|
# * no scheme in the base url, like <hostname:port>
|
||||||
|
# * persistent-https://
|
||||||
# urljoin will get confused if there is no scheme in the base url
|
# * rpc://
|
||||||
# ie, if manifestUrl is of the form <hostname:port>
|
# We handle this by replacing these with obscure protocols
|
||||||
|
# and then replacing them with the original when we are done.
|
||||||
|
# gopher -> <none>
|
||||||
|
# wais -> persistent-https
|
||||||
|
# nntp -> rpc
|
||||||
if manifestUrl.find(':') != manifestUrl.find('/') - 1:
|
if manifestUrl.find(':') != manifestUrl.find('/') - 1:
|
||||||
manifestUrl = 'gopher://' + manifestUrl
|
manifestUrl = 'gopher://' + manifestUrl
|
||||||
|
manifestUrl = re.sub(r'^persistent-https://', 'wais://', manifestUrl)
|
||||||
|
manifestUrl = re.sub(r'^rpc://', 'nntp://', manifestUrl)
|
||||||
url = urllib.parse.urljoin(manifestUrl, url)
|
url = urllib.parse.urljoin(manifestUrl, url)
|
||||||
url = re.sub(r'^gopher://', '', url)
|
url = re.sub(r'^gopher://', '', url)
|
||||||
if p:
|
url = re.sub(r'^wais://', 'persistent-https://', url)
|
||||||
url = 'persistent-' + url
|
url = re.sub(r'^nntp://', 'rpc://', url)
|
||||||
return url
|
return url
|
||||||
|
|
||||||
def ToRemoteSpec(self, projectName):
|
def ToRemoteSpec(self, projectName):
|
||||||
@ -157,6 +169,16 @@ class XmlManifest(object):
|
|||||||
e.setAttribute('alias', r.remoteAlias)
|
e.setAttribute('alias', r.remoteAlias)
|
||||||
if r.reviewUrl is not None:
|
if r.reviewUrl is not None:
|
||||||
e.setAttribute('review', r.reviewUrl)
|
e.setAttribute('review', r.reviewUrl)
|
||||||
|
if r.revision is not None:
|
||||||
|
e.setAttribute('revision', r.revision)
|
||||||
|
if r.projecthookName is not None:
|
||||||
|
ph = doc.createElement('projecthook')
|
||||||
|
ph.setAttribute('name', r.projecthookName)
|
||||||
|
ph.setAttribute('revision', r.projecthookRevision)
|
||||||
|
e.appendChild(ph)
|
||||||
|
|
||||||
|
def _ParseGroups(self, groups):
|
||||||
|
return [x for x in re.split(r'[,\s]+', groups) if x]
|
||||||
|
|
||||||
def Save(self, fd, peg_rev=False, peg_rev_upstream=True):
|
def Save(self, fd, peg_rev=False, peg_rev_upstream=True):
|
||||||
"""Write the current manifest out to the given file descriptor.
|
"""Write the current manifest out to the given file descriptor.
|
||||||
@ -165,7 +187,7 @@ class XmlManifest(object):
|
|||||||
|
|
||||||
groups = mp.config.GetString('manifest.groups')
|
groups = mp.config.GetString('manifest.groups')
|
||||||
if groups:
|
if groups:
|
||||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
groups = self._ParseGroups(groups)
|
||||||
|
|
||||||
doc = xml.dom.minidom.Document()
|
doc = xml.dom.minidom.Document()
|
||||||
root = doc.createElement('manifest')
|
root = doc.createElement('manifest')
|
||||||
@ -238,7 +260,8 @@ class XmlManifest(object):
|
|||||||
if d.remote:
|
if d.remote:
|
||||||
remoteName = d.remote.remoteAlias or d.remote.name
|
remoteName = d.remote.remoteAlias or d.remote.name
|
||||||
if not d.remote or p.remote.name != remoteName:
|
if not d.remote or p.remote.name != remoteName:
|
||||||
e.setAttribute('remote', p.remote.name)
|
remoteName = p.remote.name
|
||||||
|
e.setAttribute('remote', remoteName)
|
||||||
if peg_rev:
|
if peg_rev:
|
||||||
if self.IsMirror:
|
if self.IsMirror:
|
||||||
value = p.bare_git.rev_parse(p.revisionExpr + '^0')
|
value = p.bare_git.rev_parse(p.revisionExpr + '^0')
|
||||||
@ -250,8 +273,12 @@ class XmlManifest(object):
|
|||||||
# isn't our value, and the if the default doesn't already have that
|
# isn't our value, and the if the default doesn't already have that
|
||||||
# covered.
|
# covered.
|
||||||
e.setAttribute('upstream', p.revisionExpr)
|
e.setAttribute('upstream', p.revisionExpr)
|
||||||
elif not d.revisionExpr or p.revisionExpr != d.revisionExpr:
|
else:
|
||||||
|
revision = self.remotes[remoteName].revision or d.revisionExpr
|
||||||
|
if not revision or revision != p.revisionExpr:
|
||||||
e.setAttribute('revision', p.revisionExpr)
|
e.setAttribute('revision', p.revisionExpr)
|
||||||
|
if p.upstream and p.upstream != p.revisionExpr:
|
||||||
|
e.setAttribute('upstream', p.upstream)
|
||||||
|
|
||||||
for c in p.copyfiles:
|
for c in p.copyfiles:
|
||||||
ce = doc.createElement('copyfile')
|
ce = doc.createElement('copyfile')
|
||||||
@ -259,6 +286,12 @@ class XmlManifest(object):
|
|||||||
ce.setAttribute('dest', c.dest)
|
ce.setAttribute('dest', c.dest)
|
||||||
e.appendChild(ce)
|
e.appendChild(ce)
|
||||||
|
|
||||||
|
for l in p.linkfiles:
|
||||||
|
le = doc.createElement('linkfile')
|
||||||
|
le.setAttribute('src', l.src)
|
||||||
|
le.setAttribute('dest', l.dest)
|
||||||
|
e.appendChild(le)
|
||||||
|
|
||||||
default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath]
|
default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath]
|
||||||
egroups = [g for g in p.groups if g not in default_groups]
|
egroups = [g for g in p.groups if g not in default_groups]
|
||||||
if egroups:
|
if egroups:
|
||||||
@ -302,7 +335,7 @@ class XmlManifest(object):
|
|||||||
@property
|
@property
|
||||||
def projects(self):
|
def projects(self):
|
||||||
self._Load()
|
self._Load()
|
||||||
return self._paths.values()
|
return list(self._paths.values())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def remotes(self):
|
def remotes(self):
|
||||||
@ -490,6 +523,23 @@ class XmlManifest(object):
|
|||||||
if node.nodeName == 'project':
|
if node.nodeName == 'project':
|
||||||
project = self._ParseProject(node)
|
project = self._ParseProject(node)
|
||||||
recursively_add_projects(project)
|
recursively_add_projects(project)
|
||||||
|
if node.nodeName == 'extend-project':
|
||||||
|
name = self._reqatt(node, 'name')
|
||||||
|
|
||||||
|
if name not in self._projects:
|
||||||
|
raise ManifestParseError('extend-project element specifies non-existent '
|
||||||
|
'project: %s' % name)
|
||||||
|
|
||||||
|
path = node.getAttribute('path')
|
||||||
|
groups = node.getAttribute('groups')
|
||||||
|
if groups:
|
||||||
|
groups = self._ParseGroups(groups)
|
||||||
|
|
||||||
|
for p in self._projects[name]:
|
||||||
|
if path and p.relpath != path:
|
||||||
|
continue
|
||||||
|
if groups:
|
||||||
|
p.groups.extend(groups)
|
||||||
if node.nodeName == 'repo-hooks':
|
if node.nodeName == 'repo-hooks':
|
||||||
# Get the name of the project and the (space-separated) list of enabled.
|
# Get the name of the project and the (space-separated) list of enabled.
|
||||||
repo_hooks_project = self._reqatt(node, 'in-project')
|
repo_hooks_project = self._reqatt(node, 'in-project')
|
||||||
@ -519,12 +569,15 @@ class XmlManifest(object):
|
|||||||
self._repo_hooks_project.enabled_repo_hooks = enabled_repo_hooks
|
self._repo_hooks_project.enabled_repo_hooks = enabled_repo_hooks
|
||||||
if node.nodeName == 'remove-project':
|
if node.nodeName == 'remove-project':
|
||||||
name = self._reqatt(node, 'name')
|
name = self._reqatt(node, 'name')
|
||||||
try:
|
|
||||||
del self._projects[name]
|
if name not in self._projects:
|
||||||
except KeyError:
|
|
||||||
raise ManifestParseError('remove-project element specifies non-existent '
|
raise ManifestParseError('remove-project element specifies non-existent '
|
||||||
'project: %s' % name)
|
'project: %s' % name)
|
||||||
|
|
||||||
|
for p in self._projects[name]:
|
||||||
|
del self._paths[p.relpath]
|
||||||
|
del self._projects[name]
|
||||||
|
|
||||||
# If the manifest removes the hooks project, treat it as if it deleted
|
# If the manifest removes the hooks project, treat it as if it deleted
|
||||||
# the repo-hooks element too.
|
# the repo-hooks element too.
|
||||||
if self._repo_hooks_project and (self._repo_hooks_project.name == name):
|
if self._repo_hooks_project and (self._repo_hooks_project.name == name):
|
||||||
@ -563,10 +616,11 @@ class XmlManifest(object):
|
|||||||
gitdir = gitdir,
|
gitdir = gitdir,
|
||||||
objdir = gitdir,
|
objdir = gitdir,
|
||||||
worktree = None,
|
worktree = None,
|
||||||
relpath = None,
|
relpath = name or None,
|
||||||
revisionExpr = m.revisionExpr,
|
revisionExpr = m.revisionExpr,
|
||||||
revisionId = None)
|
revisionId = None)
|
||||||
self._projects[project.name] = [project]
|
self._projects[project.name] = [project]
|
||||||
|
self._paths[project.relpath] = project
|
||||||
|
|
||||||
def _ParseRemote(self, node):
|
def _ParseRemote(self, node):
|
||||||
"""
|
"""
|
||||||
@ -580,8 +634,17 @@ class XmlManifest(object):
|
|||||||
review = node.getAttribute('review')
|
review = node.getAttribute('review')
|
||||||
if review == '':
|
if review == '':
|
||||||
review = None
|
review = None
|
||||||
|
revision = node.getAttribute('revision')
|
||||||
|
if revision == '':
|
||||||
|
revision = None
|
||||||
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
|
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
|
||||||
return _XmlRemote(name, alias, fetch, manifestUrl, review)
|
projecthookName = None
|
||||||
|
projecthookRevision = None
|
||||||
|
for n in node.childNodes:
|
||||||
|
if n.nodeName == 'projecthook':
|
||||||
|
projecthookName, projecthookRevision = self._ParseProjectHooks(n)
|
||||||
|
break
|
||||||
|
return _XmlRemote(name, alias, fetch, manifestUrl, review, revision, projecthookName, projecthookRevision)
|
||||||
|
|
||||||
def _ParseDefault(self, node):
|
def _ParseDefault(self, node):
|
||||||
"""
|
"""
|
||||||
@ -674,7 +737,7 @@ class XmlManifest(object):
|
|||||||
raise ManifestParseError("no remote for project %s within %s" %
|
raise ManifestParseError("no remote for project %s within %s" %
|
||||||
(name, self.manifestFile))
|
(name, self.manifestFile))
|
||||||
|
|
||||||
revisionExpr = node.getAttribute('revision')
|
revisionExpr = node.getAttribute('revision') or remote.revision
|
||||||
if not revisionExpr:
|
if not revisionExpr:
|
||||||
revisionExpr = self._default.revisionExpr
|
revisionExpr = self._default.revisionExpr
|
||||||
if not revisionExpr:
|
if not revisionExpr:
|
||||||
@ -723,7 +786,7 @@ class XmlManifest(object):
|
|||||||
groups = ''
|
groups = ''
|
||||||
if node.hasAttribute('groups'):
|
if node.hasAttribute('groups'):
|
||||||
groups = node.getAttribute('groups')
|
groups = node.getAttribute('groups')
|
||||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
groups = self._ParseGroups(groups)
|
||||||
|
|
||||||
if parent is None:
|
if parent is None:
|
||||||
relpath, worktree, gitdir, objdir = self.GetProjectPaths(name, path)
|
relpath, worktree, gitdir, objdir = self.GetProjectPaths(name, path)
|
||||||
@ -759,6 +822,8 @@ class XmlManifest(object):
|
|||||||
for n in node.childNodes:
|
for n in node.childNodes:
|
||||||
if n.nodeName == 'copyfile':
|
if n.nodeName == 'copyfile':
|
||||||
self._ParseCopyFile(project, n)
|
self._ParseCopyFile(project, n)
|
||||||
|
if n.nodeName == 'linkfile':
|
||||||
|
self._ParseLinkFile(project, n)
|
||||||
if n.nodeName == 'annotation':
|
if n.nodeName == 'annotation':
|
||||||
self._ParseAnnotation(project, n)
|
self._ParseAnnotation(project, n)
|
||||||
if n.nodeName == 'project':
|
if n.nodeName == 'project':
|
||||||
@ -808,6 +873,14 @@ class XmlManifest(object):
|
|||||||
# dest is relative to the top of the tree
|
# dest is relative to the top of the tree
|
||||||
project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
|
project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
|
||||||
|
|
||||||
|
def _ParseLinkFile(self, project, node):
|
||||||
|
src = self._reqatt(node, 'src')
|
||||||
|
dest = self._reqatt(node, 'dest')
|
||||||
|
if not self.IsMirror:
|
||||||
|
# src is project relative;
|
||||||
|
# dest is relative to the top of the tree
|
||||||
|
project.AddLinkFile(src, dest, os.path.join(self.topdir, dest))
|
||||||
|
|
||||||
def _ParseAnnotation(self, project, node):
|
def _ParseAnnotation(self, project, node):
|
||||||
name = self._reqatt(node, 'name')
|
name = self._reqatt(node, 'name')
|
||||||
value = self._reqatt(node, 'value')
|
value = self._reqatt(node, 'value')
|
||||||
@ -840,3 +913,43 @@ class XmlManifest(object):
|
|||||||
raise ManifestParseError("no %s in <%s> within %s" %
|
raise ManifestParseError("no %s in <%s> within %s" %
|
||||||
(attname, node.nodeName, self.manifestFile))
|
(attname, node.nodeName, self.manifestFile))
|
||||||
return v
|
return v
|
||||||
|
|
||||||
|
def projectsDiff(self, manifest):
|
||||||
|
"""return the projects differences between two manifests.
|
||||||
|
|
||||||
|
The diff will be from self to given manifest.
|
||||||
|
|
||||||
|
"""
|
||||||
|
fromProjects = self.paths
|
||||||
|
toProjects = manifest.paths
|
||||||
|
|
||||||
|
fromKeys = sorted(fromProjects.keys())
|
||||||
|
toKeys = sorted(toProjects.keys())
|
||||||
|
|
||||||
|
diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []}
|
||||||
|
|
||||||
|
for proj in fromKeys:
|
||||||
|
if not proj in toKeys:
|
||||||
|
diff['removed'].append(fromProjects[proj])
|
||||||
|
else:
|
||||||
|
fromProj = fromProjects[proj]
|
||||||
|
toProj = toProjects[proj]
|
||||||
|
try:
|
||||||
|
fromRevId = fromProj.GetCommitRevisionId()
|
||||||
|
toRevId = toProj.GetCommitRevisionId()
|
||||||
|
except ManifestInvalidRevisionError:
|
||||||
|
diff['unreachable'].append((fromProj, toProj))
|
||||||
|
else:
|
||||||
|
if fromRevId != toRevId:
|
||||||
|
diff['changed'].append((fromProj, toProj))
|
||||||
|
toKeys.remove(proj)
|
||||||
|
|
||||||
|
for proj in toKeys:
|
||||||
|
diff['added'].append(toProjects[proj])
|
||||||
|
|
||||||
|
return diff
|
||||||
|
|
||||||
|
def _ParseProjectHooks(self, node):
|
||||||
|
name = self._reqatt(node, 'name')
|
||||||
|
revision = self._reqatt(node, 'revision')
|
||||||
|
return name, revision
|
||||||
|
395
project.py
395
project.py
@ -13,7 +13,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import traceback
|
import contextlib
|
||||||
import errno
|
import errno
|
||||||
import filecmp
|
import filecmp
|
||||||
import os
|
import os
|
||||||
@ -26,6 +26,7 @@ import sys
|
|||||||
import tarfile
|
import tarfile
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
import traceback
|
||||||
|
|
||||||
from color import Coloring
|
from color import Coloring
|
||||||
from git_command import GitCommand, git_require
|
from git_command import GitCommand, git_require
|
||||||
@ -46,7 +47,7 @@ if not is_python3():
|
|||||||
def _lwrite(path, content):
|
def _lwrite(path, content):
|
||||||
lock = '%s.lock' % path
|
lock = '%s.lock' % path
|
||||||
|
|
||||||
fd = open(lock, 'wb')
|
fd = open(lock, 'w')
|
||||||
try:
|
try:
|
||||||
fd.write(content)
|
fd.write(content)
|
||||||
finally:
|
finally:
|
||||||
@ -68,27 +69,6 @@ def not_rev(r):
|
|||||||
def sq(r):
|
def sq(r):
|
||||||
return "'" + r.replace("'", "'\''") + "'"
|
return "'" + r.replace("'", "'\''") + "'"
|
||||||
|
|
||||||
_project_hook_list = None
|
|
||||||
def _ProjectHooks():
|
|
||||||
"""List the hooks present in the 'hooks' directory.
|
|
||||||
|
|
||||||
These hooks are project hooks and are copied to the '.git/hooks' directory
|
|
||||||
of all subprojects.
|
|
||||||
|
|
||||||
This function caches the list of hooks (based on the contents of the
|
|
||||||
'repo/hooks' directory) on the first call.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A list of absolute paths to all of the files in the hooks directory.
|
|
||||||
"""
|
|
||||||
global _project_hook_list
|
|
||||||
if _project_hook_list is None:
|
|
||||||
d = os.path.realpath(os.path.abspath(os.path.dirname(__file__)))
|
|
||||||
d = os.path.join(d , 'hooks')
|
|
||||||
_project_hook_list = [os.path.join(d, x) for x in os.listdir(d)]
|
|
||||||
return _project_hook_list
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadedChange(object):
|
class DownloadedChange(object):
|
||||||
_commit_cache = None
|
_commit_cache = None
|
||||||
|
|
||||||
@ -197,13 +177,13 @@ class DiffColoring(Coloring):
|
|||||||
Coloring.__init__(self, config, 'diff')
|
Coloring.__init__(self, config, 'diff')
|
||||||
self.project = self.printer('header', attr='bold')
|
self.project = self.printer('header', attr='bold')
|
||||||
|
|
||||||
class _Annotation:
|
class _Annotation(object):
|
||||||
def __init__(self, name, value, keep):
|
def __init__(self, name, value, keep):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.value = value
|
self.value = value
|
||||||
self.keep = keep
|
self.keep = keep
|
||||||
|
|
||||||
class _CopyFile:
|
class _CopyFile(object):
|
||||||
def __init__(self, src, dest, abssrc, absdest):
|
def __init__(self, src, dest, abssrc, absdest):
|
||||||
self.src = src
|
self.src = src
|
||||||
self.dest = dest
|
self.dest = dest
|
||||||
@ -231,14 +211,40 @@ class _CopyFile:
|
|||||||
except IOError:
|
except IOError:
|
||||||
_error('Cannot copy file %s to %s', src, dest)
|
_error('Cannot copy file %s to %s', src, dest)
|
||||||
|
|
||||||
|
class _LinkFile(object):
|
||||||
|
def __init__(self, src, dest, abssrc, absdest):
|
||||||
|
self.src = src
|
||||||
|
self.dest = dest
|
||||||
|
self.abs_src = abssrc
|
||||||
|
self.abs_dest = absdest
|
||||||
|
|
||||||
|
def _Link(self):
|
||||||
|
src = self.abs_src
|
||||||
|
dest = self.abs_dest
|
||||||
|
# link file if it does not exist or is out of date
|
||||||
|
if not os.path.islink(dest) or os.readlink(dest) != src:
|
||||||
|
try:
|
||||||
|
# remove existing file first, since it might be read-only
|
||||||
|
if os.path.exists(dest):
|
||||||
|
os.remove(dest)
|
||||||
|
else:
|
||||||
|
dest_dir = os.path.dirname(dest)
|
||||||
|
if not os.path.isdir(dest_dir):
|
||||||
|
os.makedirs(dest_dir)
|
||||||
|
os.symlink(src, dest)
|
||||||
|
except IOError:
|
||||||
|
_error('Cannot link file %s to %s', src, dest)
|
||||||
|
|
||||||
class RemoteSpec(object):
|
class RemoteSpec(object):
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
name,
|
name,
|
||||||
url=None,
|
url=None,
|
||||||
review = None):
|
review=None,
|
||||||
|
revision=None):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.url = url
|
self.url = url
|
||||||
self.review = review
|
self.review = review
|
||||||
|
self.revision = revision
|
||||||
|
|
||||||
class RepoHook(object):
|
class RepoHook(object):
|
||||||
"""A RepoHook contains information about a script to run as a hook.
|
"""A RepoHook contains information about a script to run as a hook.
|
||||||
@ -414,7 +420,8 @@ class RepoHook(object):
|
|||||||
# and convert to a HookError w/ just the failing traceback.
|
# and convert to a HookError w/ just the failing traceback.
|
||||||
context = {}
|
context = {}
|
||||||
try:
|
try:
|
||||||
execfile(self._script_fullpath, context)
|
exec(compile(open(self._script_fullpath).read(),
|
||||||
|
self._script_fullpath, 'exec'), context)
|
||||||
except Exception:
|
except Exception:
|
||||||
raise HookError('%s\nFailed to import %s hook; see traceback above.' % (
|
raise HookError('%s\nFailed to import %s hook; see traceback above.' % (
|
||||||
traceback.format_exc(), self._hook_type))
|
traceback.format_exc(), self._hook_type))
|
||||||
@ -555,6 +562,7 @@ class Project(object):
|
|||||||
|
|
||||||
self.snapshots = {}
|
self.snapshots = {}
|
||||||
self.copyfiles = []
|
self.copyfiles = []
|
||||||
|
self.linkfiles = []
|
||||||
self.annotations = []
|
self.annotations = []
|
||||||
self.config = GitConfig.ForRepository(
|
self.config = GitConfig.ForRepository(
|
||||||
gitdir=self.gitdir,
|
gitdir=self.gitdir,
|
||||||
@ -708,26 +716,48 @@ class Project(object):
|
|||||||
return matched
|
return matched
|
||||||
|
|
||||||
## Status Display ##
|
## Status Display ##
|
||||||
|
def UncommitedFiles(self, get_all=True):
|
||||||
|
"""Returns a list of strings, uncommitted files in the git tree.
|
||||||
|
|
||||||
def HasChanges(self):
|
Args:
|
||||||
"""Returns true if there are uncommitted changes.
|
get_all: a boolean, if True - get information about all different
|
||||||
|
uncommitted files. If False - return as soon as any kind of
|
||||||
|
uncommitted files is detected.
|
||||||
"""
|
"""
|
||||||
|
details = []
|
||||||
self.work_git.update_index('-q',
|
self.work_git.update_index('-q',
|
||||||
'--unmerged',
|
'--unmerged',
|
||||||
'--ignore-missing',
|
'--ignore-missing',
|
||||||
'--refresh')
|
'--refresh')
|
||||||
if self.IsRebaseInProgress():
|
if self.IsRebaseInProgress():
|
||||||
return True
|
details.append("rebase in progress")
|
||||||
|
if not get_all:
|
||||||
|
return details
|
||||||
|
|
||||||
if self.work_git.DiffZ('diff-index', '--cached', HEAD):
|
changes = self.work_git.DiffZ('diff-index', '--cached', HEAD).keys()
|
||||||
return True
|
if changes:
|
||||||
|
details.extend(changes)
|
||||||
|
if not get_all:
|
||||||
|
return details
|
||||||
|
|
||||||
if self.work_git.DiffZ('diff-files'):
|
changes = self.work_git.DiffZ('diff-files').keys()
|
||||||
return True
|
if changes:
|
||||||
|
details.extend(changes)
|
||||||
|
if not get_all:
|
||||||
|
return details
|
||||||
|
|
||||||
if self.work_git.LsOthers():
|
changes = self.work_git.LsOthers()
|
||||||
return True
|
if changes:
|
||||||
|
details.extend(changes)
|
||||||
|
|
||||||
|
return details
|
||||||
|
|
||||||
|
def HasChanges(self):
|
||||||
|
"""Returns true if there are uncommitted changes.
|
||||||
|
"""
|
||||||
|
if self.UncommitedFiles(get_all=False):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def PrintWorkTreeStatus(self, output_redir=None):
|
def PrintWorkTreeStatus(self, output_redir=None):
|
||||||
@ -1040,7 +1070,7 @@ class Project(object):
|
|||||||
except OSError as e:
|
except OSError as e:
|
||||||
print("warn: Cannot remove archive %s: "
|
print("warn: Cannot remove archive %s: "
|
||||||
"%s" % (tarpath, str(e)), file=sys.stderr)
|
"%s" % (tarpath, str(e)), file=sys.stderr)
|
||||||
self._CopyFiles()
|
self._CopyAndLinkFiles()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if is_new is None:
|
if is_new is None:
|
||||||
@ -1078,9 +1108,11 @@ class Project(object):
|
|||||||
elif self.manifest.default.sync_c:
|
elif self.manifest.default.sync_c:
|
||||||
current_branch_only = True
|
current_branch_only = True
|
||||||
|
|
||||||
if not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
|
has_sha1 = ID_RE.match(self.revisionExpr) and self._CheckForSha1()
|
||||||
|
if (not has_sha1 #Need to fetch since we don't already have this revision
|
||||||
|
and not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
|
||||||
current_branch_only=current_branch_only,
|
current_branch_only=current_branch_only,
|
||||||
no_tags=no_tags):
|
no_tags=no_tags)):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.worktree:
|
if self.worktree:
|
||||||
@ -1096,9 +1128,28 @@ class Project(object):
|
|||||||
def PostRepoUpgrade(self):
|
def PostRepoUpgrade(self):
|
||||||
self._InitHooks()
|
self._InitHooks()
|
||||||
|
|
||||||
def _CopyFiles(self):
|
def _CopyAndLinkFiles(self):
|
||||||
for copyfile in self.copyfiles:
|
for copyfile in self.copyfiles:
|
||||||
copyfile._Copy()
|
copyfile._Copy()
|
||||||
|
for linkfile in self.linkfiles:
|
||||||
|
linkfile._Link()
|
||||||
|
|
||||||
|
def GetCommitRevisionId(self):
|
||||||
|
"""Get revisionId of a commit.
|
||||||
|
|
||||||
|
Use this method instead of GetRevisionId to get the id of the commit rather
|
||||||
|
than the id of the current git object (for example, a tag)
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not self.revisionExpr.startswith(R_TAGS):
|
||||||
|
return self.GetRevisionId(self._allrefs)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return self.bare_git.rev_list(self.revisionExpr, '-1')[0]
|
||||||
|
except GitError:
|
||||||
|
raise ManifestInvalidRevisionError(
|
||||||
|
'revision %s in %s not found' % (self.revisionExpr,
|
||||||
|
self.name))
|
||||||
|
|
||||||
def GetRevisionId(self, all_refs=None):
|
def GetRevisionId(self, all_refs=None):
|
||||||
if self.revisionId:
|
if self.revisionId:
|
||||||
@ -1128,7 +1179,7 @@ class Project(object):
|
|||||||
|
|
||||||
def _doff():
|
def _doff():
|
||||||
self._FastForward(revid)
|
self._FastForward(revid)
|
||||||
self._CopyFiles()
|
self._CopyAndLinkFiles()
|
||||||
|
|
||||||
head = self.work_git.GetHead()
|
head = self.work_git.GetHead()
|
||||||
if head.startswith(R_HEADS):
|
if head.startswith(R_HEADS):
|
||||||
@ -1164,7 +1215,7 @@ class Project(object):
|
|||||||
except GitError as e:
|
except GitError as e:
|
||||||
syncbuf.fail(self, e)
|
syncbuf.fail(self, e)
|
||||||
return
|
return
|
||||||
self._CopyFiles()
|
self._CopyAndLinkFiles()
|
||||||
return
|
return
|
||||||
|
|
||||||
if head == revid:
|
if head == revid:
|
||||||
@ -1186,7 +1237,7 @@ class Project(object):
|
|||||||
except GitError as e:
|
except GitError as e:
|
||||||
syncbuf.fail(self, e)
|
syncbuf.fail(self, e)
|
||||||
return
|
return
|
||||||
self._CopyFiles()
|
self._CopyAndLinkFiles()
|
||||||
return
|
return
|
||||||
|
|
||||||
upstream_gain = self._revlist(not_rev(HEAD), revid)
|
upstream_gain = self._revlist(not_rev(HEAD), revid)
|
||||||
@ -1259,12 +1310,12 @@ class Project(object):
|
|||||||
if cnt_mine > 0 and self.rebase:
|
if cnt_mine > 0 and self.rebase:
|
||||||
def _dorebase():
|
def _dorebase():
|
||||||
self._Rebase(upstream='%s^1' % last_mine, onto=revid)
|
self._Rebase(upstream='%s^1' % last_mine, onto=revid)
|
||||||
self._CopyFiles()
|
self._CopyAndLinkFiles()
|
||||||
syncbuf.later2(self, _dorebase)
|
syncbuf.later2(self, _dorebase)
|
||||||
elif local_changes:
|
elif local_changes:
|
||||||
try:
|
try:
|
||||||
self._ResetHard(revid)
|
self._ResetHard(revid)
|
||||||
self._CopyFiles()
|
self._CopyAndLinkFiles()
|
||||||
except GitError as e:
|
except GitError as e:
|
||||||
syncbuf.fail(self, e)
|
syncbuf.fail(self, e)
|
||||||
return
|
return
|
||||||
@ -1277,6 +1328,12 @@ class Project(object):
|
|||||||
abssrc = os.path.join(self.worktree, src)
|
abssrc = os.path.join(self.worktree, src)
|
||||||
self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest))
|
self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest))
|
||||||
|
|
||||||
|
def AddLinkFile(self, src, dest, absdest):
|
||||||
|
# dest should already be an absolute path, but src is project relative
|
||||||
|
# make src an absolute path
|
||||||
|
abssrc = os.path.join(self.worktree, src)
|
||||||
|
self.linkfiles.append(_LinkFile(src, dest, abssrc, absdest))
|
||||||
|
|
||||||
def AddAnnotation(self, name, value, keep):
|
def AddAnnotation(self, name, value, keep):
|
||||||
self.annotations.append(_Annotation(name, value, keep))
|
self.annotations.append(_Annotation(name, value, keep))
|
||||||
|
|
||||||
@ -1307,7 +1364,7 @@ class Project(object):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
all_refs = self.bare_ref.all
|
all_refs = self.bare_ref.all
|
||||||
if (R_HEADS + name) in all_refs:
|
if R_HEADS + name in all_refs:
|
||||||
return GitCommand(self,
|
return GitCommand(self,
|
||||||
['checkout', name, '--'],
|
['checkout', name, '--'],
|
||||||
capture_stdout=True,
|
capture_stdout=True,
|
||||||
@ -1471,7 +1528,7 @@ class Project(object):
|
|||||||
|
|
||||||
kept = []
|
kept = []
|
||||||
for branch in kill:
|
for branch in kill:
|
||||||
if (R_HEADS + branch) in left:
|
if R_HEADS + branch in left:
|
||||||
branch = self.GetBranch(branch)
|
branch = self.GetBranch(branch)
|
||||||
base = branch.LocalMerge
|
base = branch.LocalMerge
|
||||||
if not base:
|
if not base:
|
||||||
@ -1605,7 +1662,8 @@ class Project(object):
|
|||||||
|
|
||||||
remote = RemoteSpec(self.remote.name,
|
remote = RemoteSpec(self.remote.name,
|
||||||
url=url,
|
url=url,
|
||||||
review = self.remote.review)
|
review=self.remote.review,
|
||||||
|
revision=self.remote.revision)
|
||||||
subproject = Project(manifest=self.manifest,
|
subproject = Project(manifest=self.manifest,
|
||||||
name=name,
|
name=name,
|
||||||
remote=remote,
|
remote=remote,
|
||||||
@ -1627,6 +1685,15 @@ class Project(object):
|
|||||||
|
|
||||||
|
|
||||||
## Direct Git Commands ##
|
## Direct Git Commands ##
|
||||||
|
def _CheckForSha1(self):
|
||||||
|
try:
|
||||||
|
# if revision (sha or tag) is not present then following function
|
||||||
|
# throws an error.
|
||||||
|
self.bare_git.rev_parse('--verify', '%s^0' % self.revisionExpr)
|
||||||
|
return True
|
||||||
|
except GitError:
|
||||||
|
# There is no such persistent revision. We have to fetch it.
|
||||||
|
return False
|
||||||
|
|
||||||
def _FetchArchive(self, tarpath, cwd=None):
|
def _FetchArchive(self, tarpath, cwd=None):
|
||||||
cmd = ['archive', '-v', '-o', tarpath]
|
cmd = ['archive', '-v', '-o', tarpath]
|
||||||
@ -1641,6 +1708,7 @@ class Project(object):
|
|||||||
if command.Wait() != 0:
|
if command.Wait() != 0:
|
||||||
raise GitError('git archive %s: %s' % (self.name, command.stderr))
|
raise GitError('git archive %s: %s' % (self.name, command.stderr))
|
||||||
|
|
||||||
|
|
||||||
def _RemoteFetch(self, name=None,
|
def _RemoteFetch(self, name=None,
|
||||||
current_branch_only=False,
|
current_branch_only=False,
|
||||||
initial=False,
|
initial=False,
|
||||||
@ -1650,36 +1718,43 @@ class Project(object):
|
|||||||
|
|
||||||
is_sha1 = False
|
is_sha1 = False
|
||||||
tag_name = None
|
tag_name = None
|
||||||
|
depth = None
|
||||||
|
|
||||||
def CheckForSha1():
|
# The depth should not be used when fetching to a mirror because
|
||||||
try:
|
# it will result in a shallow repository that cannot be cloned or
|
||||||
# if revision (sha or tag) is not present then following function
|
# fetched from.
|
||||||
# throws an error.
|
if not self.manifest.IsMirror:
|
||||||
self.bare_git.rev_parse('--verify', '%s^0' % self.revisionExpr)
|
|
||||||
return True
|
|
||||||
except GitError:
|
|
||||||
# There is no such persistent revision. We have to fetch it.
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.clone_depth:
|
if self.clone_depth:
|
||||||
depth = self.clone_depth
|
depth = self.clone_depth
|
||||||
else:
|
else:
|
||||||
depth = self.manifest.manifestProject.config.GetString('repo.depth')
|
depth = self.manifest.manifestProject.config.GetString('repo.depth')
|
||||||
|
# The repo project should never be synced with partial depth
|
||||||
|
if self.relpath == '.repo/repo':
|
||||||
|
depth = None
|
||||||
|
|
||||||
if depth:
|
if depth:
|
||||||
current_branch_only = True
|
current_branch_only = True
|
||||||
|
|
||||||
if current_branch_only:
|
|
||||||
if ID_RE.match(self.revisionExpr) is not None:
|
if ID_RE.match(self.revisionExpr) is not None:
|
||||||
is_sha1 = True
|
is_sha1 = True
|
||||||
elif self.revisionExpr.startswith(R_TAGS):
|
|
||||||
|
if current_branch_only:
|
||||||
|
if self.revisionExpr.startswith(R_TAGS):
|
||||||
# this is a tag and its sha1 value should never change
|
# this is a tag and its sha1 value should never change
|
||||||
tag_name = self.revisionExpr[len(R_TAGS):]
|
tag_name = self.revisionExpr[len(R_TAGS):]
|
||||||
|
|
||||||
if is_sha1 or tag_name is not None:
|
if is_sha1 or tag_name is not None:
|
||||||
if CheckForSha1():
|
if self._CheckForSha1():
|
||||||
return True
|
return True
|
||||||
if is_sha1 and (not self.upstream or ID_RE.match(self.upstream)):
|
if is_sha1 and not depth:
|
||||||
current_branch_only = False
|
# When syncing a specific commit and --depth is not set:
|
||||||
|
# * if upstream is explicitly specified and is not a sha1, fetch only
|
||||||
|
# upstream as users expect only upstream to be fetch.
|
||||||
|
# Note: The commit might not be in upstream in which case the sync
|
||||||
|
# will fail.
|
||||||
|
# * otherwise, fetch all branches to make sure we end up with the
|
||||||
|
# specific commit.
|
||||||
|
current_branch_only = self.upstream and not ID_RE.match(self.upstream)
|
||||||
|
|
||||||
if not name:
|
if not name:
|
||||||
name = self.remote.name
|
name = self.remote.name
|
||||||
@ -1729,9 +1804,7 @@ class Project(object):
|
|||||||
|
|
||||||
cmd = ['fetch']
|
cmd = ['fetch']
|
||||||
|
|
||||||
# The --depth option only affects the initial fetch; after that we'll do
|
if depth:
|
||||||
# full fetches of changes.
|
|
||||||
if depth and initial:
|
|
||||||
cmd.append('--depth=%s' % depth)
|
cmd.append('--depth=%s' % depth)
|
||||||
|
|
||||||
if quiet:
|
if quiet:
|
||||||
@ -1740,32 +1813,64 @@ class Project(object):
|
|||||||
cmd.append('--update-head-ok')
|
cmd.append('--update-head-ok')
|
||||||
cmd.append(name)
|
cmd.append(name)
|
||||||
|
|
||||||
if not current_branch_only:
|
|
||||||
# Fetch whole repo
|
|
||||||
# If using depth then we should not get all the tags since they may
|
# If using depth then we should not get all the tags since they may
|
||||||
# be outside of the depth.
|
# be outside of the depth.
|
||||||
if no_tags or depth:
|
if no_tags or depth:
|
||||||
cmd.append('--no-tags')
|
cmd.append('--no-tags')
|
||||||
else:
|
else:
|
||||||
cmd.append('--tags')
|
cmd.append('--tags')
|
||||||
cmd.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
|
|
||||||
|
spec = []
|
||||||
|
if not current_branch_only:
|
||||||
|
# Fetch whole repo
|
||||||
|
spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
|
||||||
elif tag_name is not None:
|
elif tag_name is not None:
|
||||||
cmd.append('tag')
|
spec.append('tag')
|
||||||
cmd.append(tag_name)
|
spec.append(tag_name)
|
||||||
else:
|
|
||||||
branch = self.revisionExpr
|
branch = self.revisionExpr
|
||||||
|
if is_sha1 and depth:
|
||||||
|
# Shallow checkout of a specific commit, fetch from that commit and not
|
||||||
|
# the heads only as the commit might be deeper in the history.
|
||||||
|
spec.append(branch)
|
||||||
|
else:
|
||||||
if is_sha1:
|
if is_sha1:
|
||||||
branch = self.upstream
|
branch = self.upstream
|
||||||
if branch.startswith(R_HEADS):
|
if branch is not None and branch.strip():
|
||||||
branch = branch[len(R_HEADS):]
|
if not branch.startswith('refs/'):
|
||||||
cmd.append(str((u'+refs/heads/%s:' % branch) + remote.ToLocal('refs/heads/%s' % branch)))
|
branch = R_HEADS + branch
|
||||||
|
spec.append(str((u'+%s:' % branch) + remote.ToLocal(branch)))
|
||||||
|
cmd.extend(spec)
|
||||||
|
|
||||||
|
shallowfetch = self.config.GetString('repo.shallowfetch')
|
||||||
|
if shallowfetch and shallowfetch != ' '.join(spec):
|
||||||
|
GitCommand(self, ['fetch', '--unshallow', name] + shallowfetch.split(),
|
||||||
|
bare=True, ssh_proxy=ssh_proxy).Wait()
|
||||||
|
if depth:
|
||||||
|
self.config.SetString('repo.shallowfetch', ' '.join(spec))
|
||||||
|
else:
|
||||||
|
self.config.SetString('repo.shallowfetch', None)
|
||||||
|
|
||||||
ok = False
|
ok = False
|
||||||
for _i in range(2):
|
for _i in range(2):
|
||||||
ret = GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy).Wait()
|
gitcmd = GitCommand(self, cmd, bare=True, capture_stderr=True,
|
||||||
|
ssh_proxy=ssh_proxy)
|
||||||
|
ret = gitcmd.Wait()
|
||||||
|
print(gitcmd.stderr, file=sys.stderr, end='')
|
||||||
if ret == 0:
|
if ret == 0:
|
||||||
ok = True
|
ok = True
|
||||||
break
|
break
|
||||||
|
# If needed, run the 'git remote prune' the first time through the loop
|
||||||
|
elif (not _i and
|
||||||
|
"error:" in gitcmd.stderr and
|
||||||
|
"git remote prune" in gitcmd.stderr):
|
||||||
|
prunecmd = GitCommand(self, ['remote', 'prune', name], bare=True,
|
||||||
|
capture_stderr=True, ssh_proxy=ssh_proxy)
|
||||||
|
ret = prunecmd.Wait()
|
||||||
|
print(prunecmd.stderr, file=sys.stderr, end='')
|
||||||
|
if ret:
|
||||||
|
break
|
||||||
|
continue
|
||||||
elif current_branch_only and is_sha1 and ret == 128:
|
elif current_branch_only and is_sha1 and ret == 128:
|
||||||
# Exit code 128 means "couldn't find the ref you asked for"; if we're in sha1
|
# Exit code 128 means "couldn't find the ref you asked for"; if we're in sha1
|
||||||
# mode, we just tried sync'ing from the upstream field; it doesn't exist, thus
|
# mode, we just tried sync'ing from the upstream field; it doesn't exist, thus
|
||||||
@ -1785,7 +1890,7 @@ class Project(object):
|
|||||||
# We just synced the upstream given branch; verify we
|
# We just synced the upstream given branch; verify we
|
||||||
# got what we wanted, else trigger a second run of all
|
# got what we wanted, else trigger a second run of all
|
||||||
# refs.
|
# refs.
|
||||||
if not CheckForSha1():
|
if not self._CheckForSha1():
|
||||||
return self._RemoteFetch(name=name, current_branch_only=False,
|
return self._RemoteFetch(name=name, current_branch_only=False,
|
||||||
initial=False, quiet=quiet, alt_dir=alt_dir)
|
initial=False, quiet=quiet, alt_dir=alt_dir)
|
||||||
|
|
||||||
@ -1848,9 +1953,9 @@ class Project(object):
|
|||||||
os.remove(tmpPath)
|
os.remove(tmpPath)
|
||||||
if 'http_proxy' in os.environ and 'darwin' == sys.platform:
|
if 'http_proxy' in os.environ and 'darwin' == sys.platform:
|
||||||
cmd += ['--proxy', os.environ['http_proxy']]
|
cmd += ['--proxy', os.environ['http_proxy']]
|
||||||
cookiefile = self._GetBundleCookieFile(srcUrl)
|
with self._GetBundleCookieFile(srcUrl, quiet) as cookiefile:
|
||||||
if cookiefile:
|
if cookiefile:
|
||||||
cmd += ['--cookie', cookiefile]
|
cmd += ['--cookie', cookiefile, '--cookie-jar', cookiefile]
|
||||||
if srcUrl.startswith('persistent-'):
|
if srcUrl.startswith('persistent-'):
|
||||||
srcUrl = srcUrl[len('persistent-'):]
|
srcUrl = srcUrl[len('persistent-'):]
|
||||||
cmd += [srcUrl]
|
cmd += [srcUrl]
|
||||||
@ -1875,7 +1980,7 @@ class Project(object):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
if os.path.exists(tmpPath):
|
if os.path.exists(tmpPath):
|
||||||
if curlret == 0 and self._IsValidBundle(tmpPath):
|
if curlret == 0 and self._IsValidBundle(tmpPath, quiet):
|
||||||
os.rename(tmpPath, dstPath)
|
os.rename(tmpPath, dstPath)
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
@ -1884,25 +1989,27 @@ class Project(object):
|
|||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _IsValidBundle(self, path):
|
def _IsValidBundle(self, path, quiet):
|
||||||
try:
|
try:
|
||||||
with open(path) as f:
|
with open(path) as f:
|
||||||
if f.read(16) == '# v2 git bundle\n':
|
if f.read(16) == '# v2 git bundle\n':
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
|
if not quiet:
|
||||||
print("Invalid clone.bundle file; ignoring.", file=sys.stderr)
|
print("Invalid clone.bundle file; ignoring.", file=sys.stderr)
|
||||||
return False
|
return False
|
||||||
except OSError:
|
except OSError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _GetBundleCookieFile(self, url):
|
@contextlib.contextmanager
|
||||||
|
def _GetBundleCookieFile(self, url, quiet):
|
||||||
if url.startswith('persistent-'):
|
if url.startswith('persistent-'):
|
||||||
try:
|
try:
|
||||||
p = subprocess.Popen(
|
p = subprocess.Popen(
|
||||||
['git-remote-persistent-https', '-print_config', url],
|
['git-remote-persistent-https', '-print_config', url],
|
||||||
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.PIPE)
|
stderr=subprocess.PIPE)
|
||||||
p.stdin.close() # Tell subprocess it's ok to close.
|
try:
|
||||||
prefix = 'http.cookiefile='
|
prefix = 'http.cookiefile='
|
||||||
cookiefile = None
|
cookiefile = None
|
||||||
for line in p.stdout:
|
for line in p.stdout:
|
||||||
@ -1910,19 +2017,23 @@ class Project(object):
|
|||||||
if line.startswith(prefix):
|
if line.startswith(prefix):
|
||||||
cookiefile = line[len(prefix):]
|
cookiefile = line[len(prefix):]
|
||||||
break
|
break
|
||||||
|
# Leave subprocess open, as cookie file may be transient.
|
||||||
|
if cookiefile:
|
||||||
|
yield cookiefile
|
||||||
|
return
|
||||||
|
finally:
|
||||||
|
p.stdin.close()
|
||||||
if p.wait():
|
if p.wait():
|
||||||
err_msg = p.stderr.read()
|
err_msg = p.stderr.read()
|
||||||
if ' -print_config' in err_msg:
|
if ' -print_config' in err_msg:
|
||||||
pass # Persistent proxy doesn't support -print_config.
|
pass # Persistent proxy doesn't support -print_config.
|
||||||
else:
|
elif not quiet:
|
||||||
print(err_msg, file=sys.stderr)
|
print(err_msg, file=sys.stderr)
|
||||||
if cookiefile:
|
|
||||||
return cookiefile
|
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno == errno.ENOENT:
|
if e.errno == errno.ENOENT:
|
||||||
pass # No persistent proxy.
|
pass # No persistent proxy.
|
||||||
raise
|
raise
|
||||||
return GitConfig.ForUser().GetString('http.cookiefile')
|
yield GitConfig.ForUser().GetString('http.cookiefile')
|
||||||
|
|
||||||
def _Checkout(self, rev, quiet=False):
|
def _Checkout(self, rev, quiet=False):
|
||||||
cmd = ['checkout']
|
cmd = ['checkout']
|
||||||
@ -1934,7 +2045,7 @@ class Project(object):
|
|||||||
if self._allrefs:
|
if self._allrefs:
|
||||||
raise GitError('%s checkout %s ' % (self.name, rev))
|
raise GitError('%s checkout %s ' % (self.name, rev))
|
||||||
|
|
||||||
def _CherryPick(self, rev, quiet=False):
|
def _CherryPick(self, rev):
|
||||||
cmd = ['cherry-pick']
|
cmd = ['cherry-pick']
|
||||||
cmd.append(rev)
|
cmd.append(rev)
|
||||||
cmd.append('--')
|
cmd.append('--')
|
||||||
@ -1942,7 +2053,7 @@ class Project(object):
|
|||||||
if self._allrefs:
|
if self._allrefs:
|
||||||
raise GitError('%s cherry-pick %s ' % (self.name, rev))
|
raise GitError('%s cherry-pick %s ' % (self.name, rev))
|
||||||
|
|
||||||
def _Revert(self, rev, quiet=False):
|
def _Revert(self, rev):
|
||||||
cmd = ['revert']
|
cmd = ['revert']
|
||||||
cmd.append('--no-edit')
|
cmd.append('--no-edit')
|
||||||
cmd.append(rev)
|
cmd.append(rev)
|
||||||
@ -1974,7 +2085,7 @@ class Project(object):
|
|||||||
if GitCommand(self, cmd).Wait() != 0:
|
if GitCommand(self, cmd).Wait() != 0:
|
||||||
raise GitError('%s merge %s ' % (self.name, head))
|
raise GitError('%s merge %s ' % (self.name, head))
|
||||||
|
|
||||||
def _InitGitDir(self, mirror_git=None):
|
def _InitGitDir(self, mirror_git=None, MirrorOverride=False):
|
||||||
if not os.path.exists(self.gitdir):
|
if not os.path.exists(self.gitdir):
|
||||||
|
|
||||||
# Initialize the bare repository, which contains all of the objects.
|
# Initialize the bare repository, which contains all of the objects.
|
||||||
@ -2016,29 +2127,50 @@ class Project(object):
|
|||||||
for key in ['user.name', 'user.email']:
|
for key in ['user.name', 'user.email']:
|
||||||
if m.Has(key, include_defaults=False):
|
if m.Has(key, include_defaults=False):
|
||||||
self.config.SetString(key, m.GetString(key))
|
self.config.SetString(key, m.GetString(key))
|
||||||
if self.manifest.IsMirror:
|
if self.manifest.IsMirror and not MirrorOverride:
|
||||||
self.config.SetString('core.bare', 'true')
|
self.config.SetString('core.bare', 'true')
|
||||||
else:
|
else:
|
||||||
self.config.SetString('core.bare', None)
|
self.config.SetString('core.bare', None)
|
||||||
|
|
||||||
|
def _ProjectHooks(self, remote, repodir):
|
||||||
|
"""List the hooks present in the 'hooks' directory.
|
||||||
|
|
||||||
|
These hooks are project hooks and are copied to the '.git/hooks' directory
|
||||||
|
of all subprojects.
|
||||||
|
|
||||||
|
The remote projecthooks supplement/overrule any stockhook making it possible to
|
||||||
|
have a combination of hooks both from the remote projecthook and
|
||||||
|
.repo/hooks directories.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A list of absolute paths to all of the files in the hooks directory and
|
||||||
|
projecthooks files, excluding the .git folder.
|
||||||
|
"""
|
||||||
|
hooks = {}
|
||||||
|
d = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'hooks')
|
||||||
|
hooks = dict([(x, os.path.join(d, x)) for x in os.listdir(d)])
|
||||||
|
if remote is not None:
|
||||||
|
if remote.projecthookName is not None:
|
||||||
|
d = os.path.abspath('%s/projecthooks/%s/%s' % (repodir, remote.name, remote.projecthookName))
|
||||||
|
if os.path.isdir(d):
|
||||||
|
hooks.update(dict([(x, os.path.join(d, x)) for x in os.listdir(d)]))
|
||||||
|
|
||||||
|
if hooks.has_key('.git'):
|
||||||
|
del hooks['.git']
|
||||||
|
return hooks.values()
|
||||||
|
|
||||||
def _UpdateHooks(self):
|
def _UpdateHooks(self):
|
||||||
if os.path.exists(self.gitdir):
|
if os.path.exists(self.gitdir):
|
||||||
# Always recreate hooks since they can have been changed
|
|
||||||
# since the latest update.
|
|
||||||
hooks = self._gitdir_path('hooks')
|
|
||||||
try:
|
|
||||||
to_rm = os.listdir(hooks)
|
|
||||||
except OSError:
|
|
||||||
to_rm = []
|
|
||||||
for old_hook in to_rm:
|
|
||||||
os.remove(os.path.join(hooks, old_hook))
|
|
||||||
self._InitHooks()
|
self._InitHooks()
|
||||||
|
|
||||||
def _InitHooks(self):
|
def _InitHooks(self):
|
||||||
hooks = os.path.realpath(self._gitdir_path('hooks'))
|
hooks = os.path.realpath(self._gitdir_path('hooks'))
|
||||||
if not os.path.exists(hooks):
|
if not os.path.exists(hooks):
|
||||||
os.makedirs(hooks)
|
os.makedirs(hooks)
|
||||||
for stock_hook in _ProjectHooks():
|
pr = None
|
||||||
|
if self is not self.manifest.manifestProject:
|
||||||
|
pr = self.manifest.remotes.get(self.remote.name)
|
||||||
|
for stock_hook in self._ProjectHooks(pr, self.manifest.repodir):
|
||||||
name = os.path.basename(stock_hook)
|
name = os.path.basename(stock_hook)
|
||||||
|
|
||||||
if name in ('commit-msg',) and not self.remote.review \
|
if name in ('commit-msg',) and not self.remote.review \
|
||||||
@ -2118,7 +2250,7 @@ class Project(object):
|
|||||||
symlink_dirs = ['hooks', 'objects', 'rr-cache', 'svn']
|
symlink_dirs = ['hooks', 'objects', 'rr-cache', 'svn']
|
||||||
if share_refs:
|
if share_refs:
|
||||||
# These objects can only be used by a single working tree.
|
# These objects can only be used by a single working tree.
|
||||||
symlink_files += ['config', 'packed-refs']
|
symlink_files += ['config', 'packed-refs', 'shallow']
|
||||||
symlink_dirs += ['logs', 'refs']
|
symlink_dirs += ['logs', 'refs']
|
||||||
to_symlink = symlink_files + symlink_dirs
|
to_symlink = symlink_files + symlink_dirs
|
||||||
|
|
||||||
@ -2138,6 +2270,14 @@ class Project(object):
|
|||||||
if name in symlink_dirs and not os.path.lexists(src):
|
if name in symlink_dirs and not os.path.lexists(src):
|
||||||
os.makedirs(src)
|
os.makedirs(src)
|
||||||
|
|
||||||
|
# If the source file doesn't exist, ensure the destination
|
||||||
|
# file doesn't either.
|
||||||
|
if name in symlink_files and not os.path.lexists(src):
|
||||||
|
try:
|
||||||
|
os.remove(dst)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
if name in to_symlink:
|
if name in to_symlink:
|
||||||
os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst)
|
os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst)
|
||||||
elif copy_all and not os.path.islink(dst):
|
elif copy_all and not os.path.islink(dst):
|
||||||
@ -2166,7 +2306,7 @@ class Project(object):
|
|||||||
if GitCommand(self, cmd).Wait() != 0:
|
if GitCommand(self, cmd).Wait() != 0:
|
||||||
raise GitError("cannot initialize work tree")
|
raise GitError("cannot initialize work tree")
|
||||||
|
|
||||||
self._CopyFiles()
|
self._CopyAndLinkFiles()
|
||||||
|
|
||||||
def _gitdir_path(self, path):
|
def _gitdir_path(self, path):
|
||||||
return os.path.realpath(os.path.join(self.gitdir, path))
|
return os.path.realpath(os.path.join(self.gitdir, path))
|
||||||
@ -2181,6 +2321,43 @@ class Project(object):
|
|||||||
def _allrefs(self):
|
def _allrefs(self):
|
||||||
return self.bare_ref.all
|
return self.bare_ref.all
|
||||||
|
|
||||||
|
def _getLogs(self, rev1, rev2, oneline=False, color=True):
|
||||||
|
"""Get logs between two revisions of this project."""
|
||||||
|
comp = '..'
|
||||||
|
if rev1:
|
||||||
|
revs = [rev1]
|
||||||
|
if rev2:
|
||||||
|
revs.extend([comp, rev2])
|
||||||
|
cmd = ['log', ''.join(revs)]
|
||||||
|
out = DiffColoring(self.config)
|
||||||
|
if out.is_on and color:
|
||||||
|
cmd.append('--color')
|
||||||
|
if oneline:
|
||||||
|
cmd.append('--oneline')
|
||||||
|
|
||||||
|
try:
|
||||||
|
log = GitCommand(self, cmd, capture_stdout=True, capture_stderr=True)
|
||||||
|
if log.Wait() == 0:
|
||||||
|
return log.stdout
|
||||||
|
except GitError:
|
||||||
|
# worktree may not exist if groups changed for example. In that case,
|
||||||
|
# try in gitdir instead.
|
||||||
|
if not os.path.exists(self.worktree):
|
||||||
|
return self.bare_git.log(*cmd[1:])
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
return None
|
||||||
|
|
||||||
|
def getAddedAndRemovedLogs(self, toProject, oneline=False, color=True):
|
||||||
|
"""Get the list of logs from this revision to given revisionId"""
|
||||||
|
logs = {}
|
||||||
|
selfId = self.GetRevisionId(self._allrefs)
|
||||||
|
toId = toProject.GetRevisionId(toProject._allrefs)
|
||||||
|
|
||||||
|
logs['added'] = self._getLogs(selfId, toId, oneline=oneline, color=color)
|
||||||
|
logs['removed'] = self._getLogs(toId, selfId, oneline=oneline, color=color)
|
||||||
|
return logs
|
||||||
|
|
||||||
class _GitGetByExec(object):
|
class _GitGetByExec(object):
|
||||||
def __init__(self, project, bare, gitdir):
|
def __init__(self, project, bare, gitdir):
|
||||||
self._project = project
|
self._project = project
|
||||||
@ -2221,8 +2398,8 @@ class Project(object):
|
|||||||
out = iter(out[:-1].split('\0')) # pylint: disable=W1401
|
out = iter(out[:-1].split('\0')) # pylint: disable=W1401
|
||||||
while out:
|
while out:
|
||||||
try:
|
try:
|
||||||
info = out.next()
|
info = next(out)
|
||||||
path = out.next()
|
path = next(out)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -2248,7 +2425,7 @@ class Project(object):
|
|||||||
info = _Info(path, *info)
|
info = _Info(path, *info)
|
||||||
if info.status in ('R', 'C'):
|
if info.status in ('R', 'C'):
|
||||||
info.src_path = info.path
|
info.src_path = info.path
|
||||||
info.path = out.next()
|
info.path = next(out)
|
||||||
r[info.path] = info
|
r[info.path] = info
|
||||||
return r
|
return r
|
||||||
finally:
|
finally:
|
||||||
@ -2261,8 +2438,8 @@ class Project(object):
|
|||||||
path = os.path.join(self._project.worktree, '.git', HEAD)
|
path = os.path.join(self._project.worktree, '.git', HEAD)
|
||||||
try:
|
try:
|
||||||
fd = open(path, 'rb')
|
fd = open(path, 'rb')
|
||||||
except IOError:
|
except IOError as e:
|
||||||
raise NoManifestException(path)
|
raise NoManifestException(path, str(e))
|
||||||
try:
|
try:
|
||||||
line = fd.read()
|
line = fd.read()
|
||||||
finally:
|
finally:
|
||||||
@ -2524,7 +2701,7 @@ class MetaProject(Project):
|
|||||||
self.revisionExpr = base
|
self.revisionExpr = base
|
||||||
self.revisionId = None
|
self.revisionId = None
|
||||||
|
|
||||||
def MetaBranchSwitch(self, target):
|
def MetaBranchSwitch(self):
|
||||||
""" Prepare MetaProject for manifest branch switch
|
""" Prepare MetaProject for manifest branch switch
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
37
repo
37
repo
@ -20,7 +20,7 @@ REPO_REV = 'stable'
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
# increment this whenever we make important changes to this script
|
# increment this whenever we make important changes to this script
|
||||||
VERSION = (1, 20)
|
VERSION = (1, 21)
|
||||||
|
|
||||||
# increment this if the MAINTAINER_KEYS block is modified
|
# increment this if the MAINTAINER_KEYS block is modified
|
||||||
KEYRING_VERSION = (1, 2)
|
KEYRING_VERSION = (1, 2)
|
||||||
@ -114,6 +114,7 @@ import errno
|
|||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
@ -138,10 +139,6 @@ def _print(*objects, **kwargs):
|
|||||||
|
|
||||||
# Python version check
|
# Python version check
|
||||||
ver = sys.version_info
|
ver = sys.version_info
|
||||||
if ver[0] == 3:
|
|
||||||
_print('warning: Python 3 support is currently experimental. YMMV.\n'
|
|
||||||
'Please use Python 2.6 - 2.7 instead.',
|
|
||||||
file=sys.stderr)
|
|
||||||
if (ver[0], ver[1]) < MIN_PYTHON_VERSION:
|
if (ver[0], ver[1]) < MIN_PYTHON_VERSION:
|
||||||
_print('error: Python version %s unsupported.\n'
|
_print('error: Python version %s unsupported.\n'
|
||||||
'Please use Python 2.6 - 2.7 instead.'
|
'Please use Python 2.6 - 2.7 instead.'
|
||||||
@ -278,6 +275,20 @@ def _Init(args):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def ParseGitVersion(ver_str):
|
||||||
|
if not ver_str.startswith('git version '):
|
||||||
|
return None
|
||||||
|
|
||||||
|
num_ver_str = ver_str[len('git version '):].strip().split('-')[0]
|
||||||
|
to_tuple = []
|
||||||
|
for num_str in num_ver_str.split('.')[:3]:
|
||||||
|
if num_str.isdigit():
|
||||||
|
to_tuple.append(int(num_str))
|
||||||
|
else:
|
||||||
|
to_tuple.append(0)
|
||||||
|
return tuple(to_tuple)
|
||||||
|
|
||||||
|
|
||||||
def _CheckGitVersion():
|
def _CheckGitVersion():
|
||||||
cmd = [GIT, '--version']
|
cmd = [GIT, '--version']
|
||||||
try:
|
try:
|
||||||
@ -295,12 +306,11 @@ def _CheckGitVersion():
|
|||||||
proc.stdout.close()
|
proc.stdout.close()
|
||||||
proc.wait()
|
proc.wait()
|
||||||
|
|
||||||
if not ver_str.startswith('git version '):
|
ver_act = ParseGitVersion(ver_str)
|
||||||
|
if ver_act is None:
|
||||||
_print('error: "%s" unsupported' % ver_str, file=sys.stderr)
|
_print('error: "%s" unsupported' % ver_str, file=sys.stderr)
|
||||||
raise CloneFailure()
|
raise CloneFailure()
|
||||||
|
|
||||||
ver_str = ver_str[len('git version '):].strip()
|
|
||||||
ver_act = tuple(map(int, ver_str.split('.')[0:3]))
|
|
||||||
if ver_act < MIN_GIT_VERSION:
|
if ver_act < MIN_GIT_VERSION:
|
||||||
need = '.'.join(map(str, MIN_GIT_VERSION))
|
need = '.'.join(map(str, MIN_GIT_VERSION))
|
||||||
_print('fatal: git %s or later required' % need, file=sys.stderr)
|
_print('fatal: git %s or later required' % need, file=sys.stderr)
|
||||||
@ -728,12 +738,7 @@ def main(orig_args):
|
|||||||
try:
|
try:
|
||||||
_Init(args)
|
_Init(args)
|
||||||
except CloneFailure:
|
except CloneFailure:
|
||||||
for root, dirs, files in os.walk(repodir, topdown=False):
|
shutil.rmtree(os.path.join(repodir, S_repo), ignore_errors=True)
|
||||||
for name in files:
|
|
||||||
os.remove(os.path.join(root, name))
|
|
||||||
for name in dirs:
|
|
||||||
os.rmdir(os.path.join(root, name))
|
|
||||||
os.rmdir(repodir)
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
repo_main, rel_repo_dir = _FindRepo()
|
repo_main, rel_repo_dir = _FindRepo()
|
||||||
else:
|
else:
|
||||||
@ -759,4 +764,8 @@ def main(orig_args):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
if ver[0] == 3:
|
||||||
|
_print('warning: Python 3 support is currently experimental. YMMV.\n'
|
||||||
|
'Please use Python 2.6 - 2.7 instead.',
|
||||||
|
file=sys.stderr)
|
||||||
main(sys.argv[1:])
|
main(sys.argv[1:])
|
||||||
|
@ -46,6 +46,10 @@ class BranchInfo(object):
|
|||||||
def IsCurrent(self):
|
def IsCurrent(self):
|
||||||
return self.current > 0
|
return self.current > 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def IsSplitCurrent(self):
|
||||||
|
return self.current != 0 and self.current != len(self.projects)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def IsPublished(self):
|
def IsPublished(self):
|
||||||
return self.published > 0
|
return self.published > 0
|
||||||
@ -139,10 +143,14 @@ is shown, then the branch appears in all projects.
|
|||||||
if in_cnt < project_cnt:
|
if in_cnt < project_cnt:
|
||||||
fmt = out.write
|
fmt = out.write
|
||||||
paths = []
|
paths = []
|
||||||
if in_cnt < project_cnt - in_cnt:
|
non_cur_paths = []
|
||||||
|
if i.IsSplitCurrent or (in_cnt < project_cnt - in_cnt):
|
||||||
in_type = 'in'
|
in_type = 'in'
|
||||||
for b in i.projects:
|
for b in i.projects:
|
||||||
|
if not i.IsSplitCurrent or b.current:
|
||||||
paths.append(b.project.relpath)
|
paths.append(b.project.relpath)
|
||||||
|
else:
|
||||||
|
non_cur_paths.append(b.project.relpath)
|
||||||
else:
|
else:
|
||||||
fmt = out.notinproject
|
fmt = out.notinproject
|
||||||
in_type = 'not in'
|
in_type = 'not in'
|
||||||
@ -154,13 +162,19 @@ is shown, then the branch appears in all projects.
|
|||||||
paths.append(p.relpath)
|
paths.append(p.relpath)
|
||||||
|
|
||||||
s = ' %s %s' % (in_type, ', '.join(paths))
|
s = ' %s %s' % (in_type, ', '.join(paths))
|
||||||
if width + 7 + len(s) < 80:
|
if not i.IsSplitCurrent and (width + 7 + len(s) < 80):
|
||||||
|
fmt = out.current if i.IsCurrent else fmt
|
||||||
fmt(s)
|
fmt(s)
|
||||||
else:
|
else:
|
||||||
fmt(' %s:' % in_type)
|
fmt(' %s:' % in_type)
|
||||||
|
fmt = out.current if i.IsCurrent else out.write
|
||||||
for p in paths:
|
for p in paths:
|
||||||
out.nl()
|
out.nl()
|
||||||
fmt(width*' ' + ' %s' % p)
|
fmt(width*' ' + ' %s' % p)
|
||||||
|
fmt = out.write
|
||||||
|
for p in non_cur_paths:
|
||||||
|
out.nl()
|
||||||
|
fmt(width*' ' + ' %s' % p)
|
||||||
else:
|
else:
|
||||||
out.write(' in all projects')
|
out.write(' in all projects')
|
||||||
out.nl()
|
out.nl()
|
||||||
|
195
subcmds/diffmanifests.py
Normal file
195
subcmds/diffmanifests.py
Normal file
@ -0,0 +1,195 @@
|
|||||||
|
#
|
||||||
|
# Copyright (C) 2014 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from color import Coloring
|
||||||
|
from command import PagedCommand
|
||||||
|
from manifest_xml import XmlManifest
|
||||||
|
|
||||||
|
class _Coloring(Coloring):
|
||||||
|
def __init__(self, config):
|
||||||
|
Coloring.__init__(self, config, "status")
|
||||||
|
|
||||||
|
class Diffmanifests(PagedCommand):
|
||||||
|
""" A command to see logs in projects represented by manifests
|
||||||
|
|
||||||
|
This is used to see deeper differences between manifests. Where a simple
|
||||||
|
diff would only show a diff of sha1s for example, this command will display
|
||||||
|
the logs of the project between both sha1s, allowing user to see diff at a
|
||||||
|
deeper level.
|
||||||
|
"""
|
||||||
|
|
||||||
|
common = True
|
||||||
|
helpSummary = "Manifest diff utility"
|
||||||
|
helpUsage = """%prog manifest1.xml [manifest2.xml] [options]"""
|
||||||
|
|
||||||
|
helpDescription = """
|
||||||
|
The %prog command shows differences between project revisions of manifest1 and
|
||||||
|
manifest2. if manifest2 is not specified, current manifest.xml will be used
|
||||||
|
instead. Both absolute and relative paths may be used for manifests. Relative
|
||||||
|
paths start from project's ".repo/manifests" folder.
|
||||||
|
|
||||||
|
The --raw option Displays the diff in a way that facilitates parsing, the
|
||||||
|
project pattern will be <status> <path> <revision from> [<revision to>] and the
|
||||||
|
commit pattern will be <status> <onelined log> with status values respectively :
|
||||||
|
|
||||||
|
A = Added project
|
||||||
|
R = Removed project
|
||||||
|
C = Changed project
|
||||||
|
U = Project with unreachable revision(s) (revision(s) not found)
|
||||||
|
|
||||||
|
for project, and
|
||||||
|
|
||||||
|
A = Added commit
|
||||||
|
R = Removed commit
|
||||||
|
|
||||||
|
for a commit.
|
||||||
|
|
||||||
|
Only changed projects may contain commits, and commit status always starts with
|
||||||
|
a space, and are part of last printed project.
|
||||||
|
Unreachable revisions may occur if project is not up to date or if repo has not
|
||||||
|
been initialized with all the groups, in which case some projects won't be
|
||||||
|
synced and their revisions won't be found.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option('--raw',
|
||||||
|
dest='raw', action='store_true',
|
||||||
|
help='Display raw diff.')
|
||||||
|
p.add_option('--no-color',
|
||||||
|
dest='color', action='store_false', default=True,
|
||||||
|
help='does not display the diff in color.')
|
||||||
|
|
||||||
|
def _printRawDiff(self, diff):
|
||||||
|
for project in diff['added']:
|
||||||
|
self.printText("A %s %s" % (project.relpath, project.revisionExpr))
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
for project in diff['removed']:
|
||||||
|
self.printText("R %s %s" % (project.relpath, project.revisionExpr))
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
for project, otherProject in diff['changed']:
|
||||||
|
self.printText("C %s %s %s" % (project.relpath, project.revisionExpr,
|
||||||
|
otherProject.revisionExpr))
|
||||||
|
self.out.nl()
|
||||||
|
self._printLogs(project, otherProject, raw=True, color=False)
|
||||||
|
|
||||||
|
for project, otherProject in diff['unreachable']:
|
||||||
|
self.printText("U %s %s %s" % (project.relpath, project.revisionExpr,
|
||||||
|
otherProject.revisionExpr))
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
def _printDiff(self, diff, color=True):
|
||||||
|
if diff['added']:
|
||||||
|
self.out.nl()
|
||||||
|
self.printText('added projects : \n')
|
||||||
|
self.out.nl()
|
||||||
|
for project in diff['added']:
|
||||||
|
self.printProject('\t%s' % (project.relpath))
|
||||||
|
self.printText(' at revision ')
|
||||||
|
self.printRevision(project.revisionExpr)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
if diff['removed']:
|
||||||
|
self.out.nl()
|
||||||
|
self.printText('removed projects : \n')
|
||||||
|
self.out.nl()
|
||||||
|
for project in diff['removed']:
|
||||||
|
self.printProject('\t%s' % (project.relpath))
|
||||||
|
self.printText(' at revision ')
|
||||||
|
self.printRevision(project.revisionExpr)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
if diff['changed']:
|
||||||
|
self.out.nl()
|
||||||
|
self.printText('changed projects : \n')
|
||||||
|
self.out.nl()
|
||||||
|
for project, otherProject in diff['changed']:
|
||||||
|
self.printProject('\t%s' % (project.relpath))
|
||||||
|
self.printText(' changed from ')
|
||||||
|
self.printRevision(project.revisionExpr)
|
||||||
|
self.printText(' to ')
|
||||||
|
self.printRevision(otherProject.revisionExpr)
|
||||||
|
self.out.nl()
|
||||||
|
self._printLogs(project, otherProject, raw=False, color=color)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
if diff['unreachable']:
|
||||||
|
self.out.nl()
|
||||||
|
self.printText('projects with unreachable revisions : \n')
|
||||||
|
self.out.nl()
|
||||||
|
for project, otherProject in diff['unreachable']:
|
||||||
|
self.printProject('\t%s ' % (project.relpath))
|
||||||
|
self.printRevision(project.revisionExpr)
|
||||||
|
self.printText(' or ')
|
||||||
|
self.printRevision(otherProject.revisionExpr)
|
||||||
|
self.printText(' not found')
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
def _printLogs(self, project, otherProject, raw=False, color=True):
|
||||||
|
logs = project.getAddedAndRemovedLogs(otherProject, oneline=True,
|
||||||
|
color=color)
|
||||||
|
if logs['removed']:
|
||||||
|
removedLogs = logs['removed'].split('\n')
|
||||||
|
for log in removedLogs:
|
||||||
|
if log.strip():
|
||||||
|
if raw:
|
||||||
|
self.printText(' R ' + log)
|
||||||
|
self.out.nl()
|
||||||
|
else:
|
||||||
|
self.printRemoved('\t\t[-] ')
|
||||||
|
self.printText(log)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
if logs['added']:
|
||||||
|
addedLogs = logs['added'].split('\n')
|
||||||
|
for log in addedLogs:
|
||||||
|
if log.strip():
|
||||||
|
if raw:
|
||||||
|
self.printText(' A ' + log)
|
||||||
|
self.out.nl()
|
||||||
|
else:
|
||||||
|
self.printAdded('\t\t[+] ')
|
||||||
|
self.printText(log)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
if not args or len(args) > 2:
|
||||||
|
self.Usage()
|
||||||
|
|
||||||
|
self.out = _Coloring(self.manifest.globalConfig)
|
||||||
|
self.printText = self.out.nofmt_printer('text')
|
||||||
|
if opt.color:
|
||||||
|
self.printProject = self.out.nofmt_printer('project', attr = 'bold')
|
||||||
|
self.printAdded = self.out.nofmt_printer('green', fg = 'green', attr = 'bold')
|
||||||
|
self.printRemoved = self.out.nofmt_printer('red', fg = 'red', attr = 'bold')
|
||||||
|
self.printRevision = self.out.nofmt_printer('revision', fg = 'yellow')
|
||||||
|
else:
|
||||||
|
self.printProject = self.printAdded = self.printRemoved = self.printRevision = self.printText
|
||||||
|
|
||||||
|
manifest1 = XmlManifest(self.manifest.repodir)
|
||||||
|
manifest1.Override(args[0])
|
||||||
|
if len(args) == 1:
|
||||||
|
manifest2 = self.manifest
|
||||||
|
else:
|
||||||
|
manifest2 = XmlManifest(self.manifest.repodir)
|
||||||
|
manifest2.Override(args[1])
|
||||||
|
|
||||||
|
diff = manifest1.projectsDiff(manifest2)
|
||||||
|
if opt.raw:
|
||||||
|
self._printRawDiff(diff)
|
||||||
|
else:
|
||||||
|
self._printDiff(diff, color=opt.color)
|
@ -18,6 +18,7 @@ import re
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
from command import Command
|
from command import Command
|
||||||
|
from error import GitError
|
||||||
|
|
||||||
CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
|
CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
|
||||||
|
|
||||||
@ -87,7 +88,13 @@ makes it available in your project's local working directory.
|
|||||||
for c in dl.commits:
|
for c in dl.commits:
|
||||||
print(' %s' % (c), file=sys.stderr)
|
print(' %s' % (c), file=sys.stderr)
|
||||||
if opt.cherrypick:
|
if opt.cherrypick:
|
||||||
|
try:
|
||||||
project._CherryPick(dl.commit)
|
project._CherryPick(dl.commit)
|
||||||
|
except GitError:
|
||||||
|
print('[%s] Could not complete the cherry-pick of %s' \
|
||||||
|
% (project.name, dl.commit), file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
elif opt.revert:
|
elif opt.revert:
|
||||||
project._Revert(dl.commit)
|
project._Revert(dl.commit)
|
||||||
elif opt.ffonly:
|
elif opt.ffonly:
|
||||||
|
@ -14,7 +14,9 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
import errno
|
||||||
import fcntl
|
import fcntl
|
||||||
|
import multiprocessing
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import select
|
import select
|
||||||
@ -31,6 +33,7 @@ _CAN_COLOR = [
|
|||||||
'log',
|
'log',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class ForallColoring(Coloring):
|
class ForallColoring(Coloring):
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
Coloring.__init__(self, config, 'forall')
|
Coloring.__init__(self, config, 'forall')
|
||||||
@ -87,6 +90,12 @@ revision to a locally executed git command, use REPO_LREV.
|
|||||||
REPO_RREV is the name of the revision from the manifest, exactly
|
REPO_RREV is the name of the revision from the manifest, exactly
|
||||||
as written in the manifest.
|
as written in the manifest.
|
||||||
|
|
||||||
|
REPO_COUNT is the total number of projects being iterated.
|
||||||
|
|
||||||
|
REPO_I is the current (1-based) iteration count. Can be used in
|
||||||
|
conjunction with REPO_COUNT to add a simple progress indicator to your
|
||||||
|
command.
|
||||||
|
|
||||||
REPO__* are any extra environment variables, specified by the
|
REPO__* are any extra environment variables, specified by the
|
||||||
"annotation" element under any project element. This can be useful
|
"annotation" element under any project element. This can be useful
|
||||||
for differentiating trees based on user-specific criteria, or simply
|
for differentiating trees based on user-specific criteria, or simply
|
||||||
@ -126,9 +135,31 @@ without iterating through the remaining projects.
|
|||||||
g.add_option('-v', '--verbose',
|
g.add_option('-v', '--verbose',
|
||||||
dest='verbose', action='store_true',
|
dest='verbose', action='store_true',
|
||||||
help='Show command error messages')
|
help='Show command error messages')
|
||||||
|
g.add_option('-j', '--jobs',
|
||||||
|
dest='jobs', action='store', type='int', default=1,
|
||||||
|
help='number of commands to execute simultaneously')
|
||||||
|
|
||||||
def WantPager(self, opt):
|
def WantPager(self, opt):
|
||||||
return opt.project_header
|
return opt.project_header and opt.jobs == 1
|
||||||
|
|
||||||
|
def _SerializeProject(self, project):
|
||||||
|
""" Serialize a project._GitGetByExec instance.
|
||||||
|
|
||||||
|
project._GitGetByExec is not pickle-able. Instead of trying to pass it
|
||||||
|
around between processes, make a dict ourselves containing only the
|
||||||
|
attributes that we need.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
'name': project.name,
|
||||||
|
'relpath': project.relpath,
|
||||||
|
'remote_name': project.remote.name,
|
||||||
|
'lrev': project.GetRevisionId(),
|
||||||
|
'rrev': project.revisionExpr,
|
||||||
|
'annotations': dict((a.name, a.value) for a in project.annotations),
|
||||||
|
'gitdir': project.gitdir,
|
||||||
|
'worktree': project.worktree,
|
||||||
|
}
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
if not opt.command:
|
if not opt.command:
|
||||||
@ -167,43 +198,93 @@ without iterating through the remaining projects.
|
|||||||
# pylint: enable=W0631
|
# pylint: enable=W0631
|
||||||
|
|
||||||
mirror = self.manifest.IsMirror
|
mirror = self.manifest.IsMirror
|
||||||
out = ForallColoring(self.manifest.manifestProject.config)
|
|
||||||
out.redirect(sys.stdout)
|
|
||||||
|
|
||||||
rc = 0
|
rc = 0
|
||||||
first = True
|
|
||||||
|
|
||||||
if not opt.regex:
|
if not opt.regex:
|
||||||
projects = self.GetProjects(args)
|
projects = self.GetProjects(args)
|
||||||
else:
|
else:
|
||||||
projects = self.FindProjects(args)
|
projects = self.FindProjects(args)
|
||||||
|
|
||||||
for project in projects:
|
os.environ['REPO_COUNT'] = str(len(projects))
|
||||||
|
|
||||||
|
pool = multiprocessing.Pool(opt.jobs)
|
||||||
|
try:
|
||||||
|
config = self.manifest.manifestProject.config
|
||||||
|
results_it = pool.imap(
|
||||||
|
DoWorkWrapper,
|
||||||
|
[[mirror, opt, cmd, shell, cnt, config, self._SerializeProject(p)]
|
||||||
|
for cnt, p in enumerate(projects)]
|
||||||
|
)
|
||||||
|
pool.close()
|
||||||
|
for r in results_it:
|
||||||
|
rc = rc or r
|
||||||
|
if r != 0 and opt.abort_on_errors:
|
||||||
|
raise Exception('Aborting due to previous error')
|
||||||
|
except (KeyboardInterrupt, WorkerKeyboardInterrupt):
|
||||||
|
# Catch KeyboardInterrupt raised inside and outside of workers
|
||||||
|
print('Interrupted - terminating the pool')
|
||||||
|
pool.terminate()
|
||||||
|
rc = rc or errno.EINTR
|
||||||
|
except Exception as e:
|
||||||
|
# Catch any other exceptions raised
|
||||||
|
print('Got an error, terminating the pool: %r' % e,
|
||||||
|
file=sys.stderr)
|
||||||
|
pool.terminate()
|
||||||
|
rc = rc or getattr(e, 'errno', 1)
|
||||||
|
finally:
|
||||||
|
pool.join()
|
||||||
|
if rc != 0:
|
||||||
|
sys.exit(rc)
|
||||||
|
|
||||||
|
|
||||||
|
class WorkerKeyboardInterrupt(Exception):
|
||||||
|
""" Keyboard interrupt exception for worker processes. """
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def DoWorkWrapper(args):
|
||||||
|
""" A wrapper around the DoWork() method.
|
||||||
|
|
||||||
|
Catch the KeyboardInterrupt exceptions here and re-raise them as a different,
|
||||||
|
``Exception``-based exception to stop it flooding the console with stacktraces
|
||||||
|
and making the parent hang indefinitely.
|
||||||
|
|
||||||
|
"""
|
||||||
|
project = args.pop()
|
||||||
|
try:
|
||||||
|
return DoWork(project, *args)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print('%s: Worker interrupted' % project['name'])
|
||||||
|
raise WorkerKeyboardInterrupt()
|
||||||
|
|
||||||
|
|
||||||
|
def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
def setenv(name, val):
|
def setenv(name, val):
|
||||||
if val is None:
|
if val is None:
|
||||||
val = ''
|
val = ''
|
||||||
env[name] = val.encode()
|
env[name] = val.encode()
|
||||||
|
|
||||||
setenv('REPO_PROJECT', project.name)
|
setenv('REPO_PROJECT', project['name'])
|
||||||
setenv('REPO_PATH', project.relpath)
|
setenv('REPO_PATH', project['relpath'])
|
||||||
setenv('REPO_REMOTE', project.remote.name)
|
setenv('REPO_REMOTE', project['remote_name'])
|
||||||
setenv('REPO_LREV', project.GetRevisionId())
|
setenv('REPO_LREV', project['lrev'])
|
||||||
setenv('REPO_RREV', project.revisionExpr)
|
setenv('REPO_RREV', project['rrev'])
|
||||||
for a in project.annotations:
|
setenv('REPO_I', str(cnt + 1))
|
||||||
setenv("REPO__%s" % (a.name), a.value)
|
for name in project['annotations']:
|
||||||
|
setenv("REPO__%s" % (name), project['annotations'][name])
|
||||||
|
|
||||||
if mirror:
|
if mirror:
|
||||||
setenv('GIT_DIR', project.gitdir)
|
setenv('GIT_DIR', project['gitdir'])
|
||||||
cwd = project.gitdir
|
cwd = project['gitdir']
|
||||||
else:
|
else:
|
||||||
cwd = project.worktree
|
cwd = project['worktree']
|
||||||
|
|
||||||
if not os.path.exists(cwd):
|
if not os.path.exists(cwd):
|
||||||
if (opt.project_header and opt.verbose) \
|
if (opt.project_header and opt.verbose) \
|
||||||
or not opt.project_header:
|
or not opt.project_header:
|
||||||
print('skipping %s/' % project.relpath, file=sys.stderr)
|
print('skipping %s/' % project['relpath'], file=sys.stderr)
|
||||||
continue
|
return
|
||||||
|
|
||||||
if opt.project_header:
|
if opt.project_header:
|
||||||
stdin = subprocess.PIPE
|
stdin = subprocess.PIPE
|
||||||
@ -223,6 +304,8 @@ without iterating through the remaining projects.
|
|||||||
stderr=stderr)
|
stderr=stderr)
|
||||||
|
|
||||||
if opt.project_header:
|
if opt.project_header:
|
||||||
|
out = ForallColoring(config)
|
||||||
|
out.redirect(sys.stdout)
|
||||||
class sfd(object):
|
class sfd(object):
|
||||||
def __init__(self, fd, dest):
|
def __init__(self, fd, dest):
|
||||||
self.fd = fd
|
self.fd = fd
|
||||||
@ -255,16 +338,14 @@ without iterating through the remaining projects.
|
|||||||
errbuf += buf
|
errbuf += buf
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if empty:
|
if empty and out:
|
||||||
if first:
|
if not cnt == 0:
|
||||||
first = False
|
|
||||||
else:
|
|
||||||
out.nl()
|
out.nl()
|
||||||
|
|
||||||
if mirror:
|
if mirror:
|
||||||
project_header_path = project.name
|
project_header_path = project['name']
|
||||||
else:
|
else:
|
||||||
project_header_path = project.relpath
|
project_header_path = project['relpath']
|
||||||
out.project('project %s/', project_header_path)
|
out.project('project %s/', project_header_path)
|
||||||
out.nl()
|
out.nl()
|
||||||
out.flush()
|
out.flush()
|
||||||
@ -278,12 +359,4 @@ without iterating through the remaining projects.
|
|||||||
s.dest.flush()
|
s.dest.flush()
|
||||||
|
|
||||||
r = p.wait()
|
r = p.wait()
|
||||||
if r != 0:
|
return r
|
||||||
if r != rc:
|
|
||||||
rc = r
|
|
||||||
if opt.abort_on_errors:
|
|
||||||
print("error: %s: Aborting due to previous error" % project.relpath,
|
|
||||||
file=sys.stderr)
|
|
||||||
sys.exit(r)
|
|
||||||
if rc != 0:
|
|
||||||
sys.exit(rc)
|
|
||||||
|
@ -32,7 +32,7 @@ else:
|
|||||||
from color import Coloring
|
from color import Coloring
|
||||||
from command import InteractiveCommand, MirrorSafeCommand
|
from command import InteractiveCommand, MirrorSafeCommand
|
||||||
from error import ManifestParseError
|
from error import ManifestParseError
|
||||||
from project import SyncBuffer
|
from project import SyncBuffer, MetaProject
|
||||||
from git_config import GitConfig
|
from git_config import GitConfig
|
||||||
from git_command import git_require, MIN_GIT_VERSION
|
from git_command import git_require, MIN_GIT_VERSION
|
||||||
|
|
||||||
@ -233,7 +233,7 @@ to update the working directory files.
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if opt.manifest_branch:
|
if opt.manifest_branch:
|
||||||
m.MetaBranchSwitch(opt.manifest_branch)
|
m.MetaBranchSwitch()
|
||||||
|
|
||||||
syncbuf = SyncBuffer(m.config)
|
syncbuf = SyncBuffer(m.config)
|
||||||
m.Sync_LocalHalf(syncbuf)
|
m.Sync_LocalHalf(syncbuf)
|
||||||
@ -374,6 +374,52 @@ to update the working directory files.
|
|||||||
print(' rm -r %s/.repo' % self.manifest.topdir)
|
print(' rm -r %s/.repo' % self.manifest.topdir)
|
||||||
print('and try again.')
|
print('and try again.')
|
||||||
|
|
||||||
|
def _SyncProjectHooks(self, opt, repodir):
|
||||||
|
"""Downloads the defined hooks supplied in the projecthooks element
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Always delete projecthooks and re-download for every new init.
|
||||||
|
projecthooksdir = os.path.join(repodir, 'projecthooks')
|
||||||
|
if os.path.exists(projecthooksdir):
|
||||||
|
shutil.rmtree(projecthooksdir)
|
||||||
|
for remotename in self.manifest.remotes:
|
||||||
|
r = self.manifest.remotes.get(remotename)
|
||||||
|
if r.projecthookName is not None and r.projecthookRevision is not None:
|
||||||
|
projecthookurl = r.resolvedFetchUrl.rstrip('/') + '/' + r.projecthookName
|
||||||
|
|
||||||
|
ph = MetaProject(manifest = self.manifest,
|
||||||
|
name = r.projecthookName,
|
||||||
|
gitdir = os.path.join(projecthooksdir,'%s/%s.git' % (remotename, r.projecthookName)),
|
||||||
|
worktree = os.path.join(projecthooksdir,'%s/%s' % (remotename, r.projecthookName)))
|
||||||
|
|
||||||
|
ph.revisionExpr = r.projecthookRevision
|
||||||
|
is_new = not ph.Exists
|
||||||
|
|
||||||
|
if is_new:
|
||||||
|
if not opt.quiet:
|
||||||
|
print('Get projecthook %s' % \
|
||||||
|
GitConfig.ForUser().UrlInsteadOf(projecthookurl), file=sys.stderr)
|
||||||
|
ph._InitGitDir(MirrorOverride=True)
|
||||||
|
|
||||||
|
phr = ph.GetRemote(remotename)
|
||||||
|
phr.name = 'origin'
|
||||||
|
phr.url = projecthookurl
|
||||||
|
phr.ResetFetch()
|
||||||
|
phr.Save()
|
||||||
|
|
||||||
|
if not ph.Sync_NetworkHalf(quiet=opt.quiet, is_new=is_new, clone_bundle=False):
|
||||||
|
print('fatal: cannot obtain projecthook %s' % phr.url, file=sys.stderr)
|
||||||
|
|
||||||
|
# Better delete the git dir if we created it; otherwise next
|
||||||
|
# time (when user fixes problems) we won't go through the "is_new" logic.
|
||||||
|
if is_new:
|
||||||
|
shutil.rmtree(ph.gitdir)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
syncbuf = SyncBuffer(ph.config)
|
||||||
|
ph.Sync_LocalHalf(syncbuf)
|
||||||
|
syncbuf.Finish()
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
git_require(MIN_GIT_VERSION, fail=True)
|
git_require(MIN_GIT_VERSION, fail=True)
|
||||||
|
|
||||||
@ -389,6 +435,7 @@ to update the working directory files.
|
|||||||
|
|
||||||
self._SyncManifest(opt)
|
self._SyncManifest(opt)
|
||||||
self._LinkManifest(opt.manifest_name)
|
self._LinkManifest(opt.manifest_name)
|
||||||
|
self._SyncProjectHooks(opt, self.manifest.repodir)
|
||||||
|
|
||||||
if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
|
if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
|
||||||
if opt.config_name or self._ShouldConfigureUser():
|
if opt.config_name or self._ShouldConfigureUser():
|
||||||
|
@ -59,8 +59,12 @@ revision specified in the manifest.
|
|||||||
for project in all_projects:
|
for project in all_projects:
|
||||||
pm.update()
|
pm.update()
|
||||||
# If the current revision is a specific SHA1 then we can't push back
|
# If the current revision is a specific SHA1 then we can't push back
|
||||||
# to it so substitute the manifest default revision instead.
|
# to it; so substitute with dest_branch if defined, or with manifest
|
||||||
|
# default revision instead.
|
||||||
if IsId(project.revisionExpr):
|
if IsId(project.revisionExpr):
|
||||||
|
if project.dest_branch:
|
||||||
|
project.revisionExpr = project.dest_branch
|
||||||
|
else:
|
||||||
project.revisionExpr = self.manifest.default.revisionExpr
|
project.revisionExpr = self.manifest.default.revisionExpr
|
||||||
if not project.StartBranch(nb):
|
if not project.StartBranch(nb):
|
||||||
err.append(project)
|
err.append(project)
|
||||||
|
@ -113,7 +113,7 @@ the following meanings:
|
|||||||
try:
|
try:
|
||||||
state = project.PrintWorkTreeStatus(output)
|
state = project.PrintWorkTreeStatus(output)
|
||||||
if state == 'CLEAN':
|
if state == 'CLEAN':
|
||||||
clean_counter.next()
|
next(clean_counter)
|
||||||
finally:
|
finally:
|
||||||
sem.release()
|
sem.release()
|
||||||
|
|
||||||
@ -141,7 +141,7 @@ the following meanings:
|
|||||||
for project in all_projects:
|
for project in all_projects:
|
||||||
state = project.PrintWorkTreeStatus()
|
state = project.PrintWorkTreeStatus()
|
||||||
if state == 'CLEAN':
|
if state == 'CLEAN':
|
||||||
counter.next()
|
next(counter)
|
||||||
else:
|
else:
|
||||||
sem = _threading.Semaphore(opt.jobs)
|
sem = _threading.Semaphore(opt.jobs)
|
||||||
threads_and_output = []
|
threads_and_output = []
|
||||||
@ -164,7 +164,7 @@ the following meanings:
|
|||||||
t.join()
|
t.join()
|
||||||
output.dump(sys.stdout)
|
output.dump(sys.stdout)
|
||||||
output.close()
|
output.close()
|
||||||
if len(all_projects) == counter.next():
|
if len(all_projects) == next(counter):
|
||||||
print('nothing to commit (working directory clean)')
|
print('nothing to commit (working directory clean)')
|
||||||
|
|
||||||
if opt.orphans:
|
if opt.orphans:
|
||||||
|
@ -14,10 +14,10 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
import json
|
||||||
import netrc
|
import netrc
|
||||||
from optparse import SUPPRESS_HELP
|
from optparse import SUPPRESS_HELP
|
||||||
import os
|
import os
|
||||||
import pickle
|
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import socket
|
import socket
|
||||||
@ -58,13 +58,13 @@ except ImportError:
|
|||||||
|
|
||||||
from git_command import GIT, git_require
|
from git_command import GIT, git_require
|
||||||
from git_refs import R_HEADS, HEAD
|
from git_refs import R_HEADS, HEAD
|
||||||
from main import WrapperModule
|
|
||||||
from project import Project
|
from project import Project
|
||||||
from project import RemoteSpec
|
from project import RemoteSpec
|
||||||
from command import Command, MirrorSafeCommand
|
from command import Command, MirrorSafeCommand
|
||||||
from error import RepoChangedException, GitError, ManifestParseError
|
from error import RepoChangedException, GitError, ManifestParseError
|
||||||
from project import SyncBuffer
|
from project import SyncBuffer
|
||||||
from progress import Progress
|
from progress import Progress
|
||||||
|
from wrapper import Wrapper
|
||||||
|
|
||||||
_ONE_DAY_S = 24 * 60 * 60
|
_ONE_DAY_S = 24 * 60 * 60
|
||||||
|
|
||||||
@ -128,6 +128,9 @@ HTTP client or proxy configuration, but the Git binary works.
|
|||||||
The --fetch-submodules option enables fetching Git submodules
|
The --fetch-submodules option enables fetching Git submodules
|
||||||
of a project from server.
|
of a project from server.
|
||||||
|
|
||||||
|
The -c/--current-branch option can be used to only fetch objects that
|
||||||
|
are on the branch specified by a project's revision.
|
||||||
|
|
||||||
SSH Connections
|
SSH Connections
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
@ -219,7 +222,7 @@ later is required to fix a server side protocol bug.
|
|||||||
dest='repo_upgraded', action='store_true',
|
dest='repo_upgraded', action='store_true',
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
def _FetchProjectList(self, opt, projects, *args):
|
def _FetchProjectList(self, opt, projects, *args, **kwargs):
|
||||||
"""Main function of the fetch threads when jobs are > 1.
|
"""Main function of the fetch threads when jobs are > 1.
|
||||||
|
|
||||||
Delegates most of the work to _FetchHelper.
|
Delegates most of the work to _FetchHelper.
|
||||||
@ -227,11 +230,11 @@ later is required to fix a server side protocol bug.
|
|||||||
Args:
|
Args:
|
||||||
opt: Program options returned from optparse. See _Options().
|
opt: Program options returned from optparse. See _Options().
|
||||||
projects: Projects to fetch.
|
projects: Projects to fetch.
|
||||||
*args: Remaining arguments to pass to _FetchHelper. See the
|
*args, **kwargs: Remaining arguments to pass to _FetchHelper. See the
|
||||||
_FetchHelper docstring for details.
|
_FetchHelper docstring for details.
|
||||||
"""
|
"""
|
||||||
for project in projects:
|
for project in projects:
|
||||||
success = self._FetchHelper(opt, project, *args)
|
success = self._FetchHelper(opt, project, *args, **kwargs)
|
||||||
if not success and not opt.force_broken:
|
if not success and not opt.force_broken:
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -304,54 +307,39 @@ later is required to fix a server side protocol bug.
|
|||||||
|
|
||||||
def _Fetch(self, projects, opt):
|
def _Fetch(self, projects, opt):
|
||||||
fetched = set()
|
fetched = set()
|
||||||
|
lock = _threading.Lock()
|
||||||
pm = Progress('Fetching projects', len(projects))
|
pm = Progress('Fetching projects', len(projects))
|
||||||
|
|
||||||
if self.jobs == 1:
|
|
||||||
for project in projects:
|
|
||||||
pm.update()
|
|
||||||
if not opt.quiet:
|
|
||||||
print('Fetching project %s' % project.name)
|
|
||||||
if project.Sync_NetworkHalf(
|
|
||||||
quiet=opt.quiet,
|
|
||||||
current_branch_only=opt.current_branch_only,
|
|
||||||
clone_bundle=not opt.no_clone_bundle,
|
|
||||||
no_tags=opt.no_tags,
|
|
||||||
archive=self.manifest.IsArchive):
|
|
||||||
fetched.add(project.gitdir)
|
|
||||||
else:
|
|
||||||
print('error: Cannot fetch %s' % project.name, file=sys.stderr)
|
|
||||||
if opt.force_broken:
|
|
||||||
print('warn: --force-broken, continuing to sync', file=sys.stderr)
|
|
||||||
else:
|
|
||||||
sys.exit(1)
|
|
||||||
else:
|
|
||||||
objdir_project_map = dict()
|
objdir_project_map = dict()
|
||||||
for project in projects:
|
for project in projects:
|
||||||
objdir_project_map.setdefault(project.objdir, []).append(project)
|
objdir_project_map.setdefault(project.objdir, []).append(project)
|
||||||
|
|
||||||
threads = set()
|
threads = set()
|
||||||
lock = _threading.Lock()
|
|
||||||
sem = _threading.Semaphore(self.jobs)
|
sem = _threading.Semaphore(self.jobs)
|
||||||
err_event = _threading.Event()
|
err_event = _threading.Event()
|
||||||
for project_list in objdir_project_map.values():
|
for project_list in objdir_project_map.values():
|
||||||
# Check for any errors before starting any new threads.
|
# Check for any errors before running any more tasks.
|
||||||
# ...we'll let existing threads finish, though.
|
# ...we'll let existing threads finish, though.
|
||||||
if err_event.isSet():
|
if err_event.isSet() and not opt.force_broken:
|
||||||
break
|
break
|
||||||
|
|
||||||
sem.acquire()
|
sem.acquire()
|
||||||
|
kwargs = dict(opt=opt,
|
||||||
|
projects=project_list,
|
||||||
|
lock=lock,
|
||||||
|
fetched=fetched,
|
||||||
|
pm=pm,
|
||||||
|
sem=sem,
|
||||||
|
err_event=err_event)
|
||||||
|
if self.jobs > 1:
|
||||||
t = _threading.Thread(target = self._FetchProjectList,
|
t = _threading.Thread(target = self._FetchProjectList,
|
||||||
args = (opt,
|
kwargs = kwargs)
|
||||||
project_list,
|
|
||||||
lock,
|
|
||||||
fetched,
|
|
||||||
pm,
|
|
||||||
sem,
|
|
||||||
err_event))
|
|
||||||
# Ensure that Ctrl-C will not freeze the repo process.
|
# Ensure that Ctrl-C will not freeze the repo process.
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
threads.add(t)
|
threads.add(t)
|
||||||
t.start()
|
t.start()
|
||||||
|
else:
|
||||||
|
self._FetchProjectList(**kwargs)
|
||||||
|
|
||||||
for t in threads:
|
for t in threads:
|
||||||
t.join()
|
t.join()
|
||||||
@ -573,7 +561,10 @@ later is required to fix a server side protocol bug.
|
|||||||
branch = branch[len(R_HEADS):]
|
branch = branch[len(R_HEADS):]
|
||||||
|
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
if 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env:
|
if 'SYNC_TARGET' in env:
|
||||||
|
target = env['SYNC_TARGET']
|
||||||
|
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||||
|
elif 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env:
|
||||||
target = '%s-%s' % (env['TARGET_PRODUCT'],
|
target = '%s-%s' % (env['TARGET_PRODUCT'],
|
||||||
env['TARGET_BUILD_VARIANT'])
|
env['TARGET_BUILD_VARIANT'])
|
||||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||||
@ -699,10 +690,10 @@ later is required to fix a server side protocol bug.
|
|||||||
print(self.manifest.notice)
|
print(self.manifest.notice)
|
||||||
|
|
||||||
def _PostRepoUpgrade(manifest, quiet=False):
|
def _PostRepoUpgrade(manifest, quiet=False):
|
||||||
wrapper = WrapperModule()
|
wrapper = Wrapper()
|
||||||
if wrapper.NeedSetupGnuPG():
|
if wrapper.NeedSetupGnuPG():
|
||||||
wrapper.SetupGnuPG(quiet)
|
wrapper.SetupGnuPG(quiet)
|
||||||
for project in manifest.projects.values():
|
for project in manifest.projects:
|
||||||
if project.Exists:
|
if project.Exists:
|
||||||
project.PostRepoUpgrade()
|
project.PostRepoUpgrade()
|
||||||
|
|
||||||
@ -775,7 +766,7 @@ class _FetchTimes(object):
|
|||||||
_ALPHA = 0.5
|
_ALPHA = 0.5
|
||||||
|
|
||||||
def __init__(self, manifest):
|
def __init__(self, manifest):
|
||||||
self._path = os.path.join(manifest.repodir, '.repopickle_fetchtimes')
|
self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json')
|
||||||
self._times = None
|
self._times = None
|
||||||
self._seen = set()
|
self._seen = set()
|
||||||
|
|
||||||
@ -794,22 +785,17 @@ class _FetchTimes(object):
|
|||||||
def _Load(self):
|
def _Load(self):
|
||||||
if self._times is None:
|
if self._times is None:
|
||||||
try:
|
try:
|
||||||
f = open(self._path, 'rb')
|
f = open(self._path)
|
||||||
except IOError:
|
|
||||||
self._times = {}
|
|
||||||
return self._times
|
|
||||||
try:
|
try:
|
||||||
try:
|
self._times = json.load(f)
|
||||||
self._times = pickle.load(f)
|
finally:
|
||||||
except IOError:
|
f.close()
|
||||||
|
except (IOError, ValueError):
|
||||||
try:
|
try:
|
||||||
os.remove(self._path)
|
os.remove(self._path)
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
self._times = {}
|
self._times = {}
|
||||||
finally:
|
|
||||||
f.close()
|
|
||||||
return self._times
|
|
||||||
|
|
||||||
def Save(self):
|
def Save(self):
|
||||||
if self._times is None:
|
if self._times is None:
|
||||||
@ -823,13 +809,13 @@ class _FetchTimes(object):
|
|||||||
del self._times[name]
|
del self._times[name]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
f = open(self._path, 'wb')
|
f = open(self._path, 'w')
|
||||||
try:
|
try:
|
||||||
pickle.dump(self._times, f)
|
json.dump(self._times, f, indent=2)
|
||||||
except (IOError, OSError, pickle.PickleError):
|
finally:
|
||||||
|
f.close()
|
||||||
|
except (IOError, TypeError):
|
||||||
try:
|
try:
|
||||||
os.remove(self._path)
|
os.remove(self._path)
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
finally:
|
|
||||||
f.close()
|
|
||||||
|
@ -25,9 +25,11 @@ from git_command import GitCommand
|
|||||||
from project import RepoHook
|
from project import RepoHook
|
||||||
|
|
||||||
from pyversion import is_python3
|
from pyversion import is_python3
|
||||||
if not is_python3():
|
|
||||||
# pylint:disable=W0622
|
# pylint:disable=W0622
|
||||||
|
if not is_python3():
|
||||||
input = raw_input
|
input = raw_input
|
||||||
|
else:
|
||||||
|
unicode = str
|
||||||
# pylint:enable=W0622
|
# pylint:enable=W0622
|
||||||
|
|
||||||
UNUSUAL_COMMIT_THRESHOLD = 5
|
UNUSUAL_COMMIT_THRESHOLD = 5
|
||||||
@ -89,6 +91,11 @@ to "true" then repo will assume you always answer "y" at the prompt,
|
|||||||
and will not prompt you further. If it is set to "false" then repo
|
and will not prompt you further. If it is set to "false" then repo
|
||||||
will assume you always answer "n", and will abort.
|
will assume you always answer "n", and will abort.
|
||||||
|
|
||||||
|
review.URL.autoreviewer:
|
||||||
|
|
||||||
|
To automatically append a user or mailing list to reviews, you can set
|
||||||
|
a per-project or global Git option to do so.
|
||||||
|
|
||||||
review.URL.autocopy:
|
review.URL.autocopy:
|
||||||
|
|
||||||
To automatically copy a user or mailing list to all uploaded reviews,
|
To automatically copy a user or mailing list to all uploaded reviews,
|
||||||
@ -293,14 +300,20 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
|||||||
|
|
||||||
self._UploadAndReport(opt, todo, people)
|
self._UploadAndReport(opt, todo, people)
|
||||||
|
|
||||||
def _AppendAutoCcList(self, branch, people):
|
def _AppendAutoList(self, branch, people):
|
||||||
"""
|
"""
|
||||||
|
Appends the list of reviewers in the git project's config.
|
||||||
Appends the list of users in the CC list in the git project's config if a
|
Appends the list of users in the CC list in the git project's config if a
|
||||||
non-empty reviewer list was found.
|
non-empty reviewer list was found.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
name = branch.name
|
name = branch.name
|
||||||
project = branch.project
|
project = branch.project
|
||||||
|
|
||||||
|
key = 'review.%s.autoreviewer' % project.GetBranch(name).remote.review
|
||||||
|
raw_list = project.config.GetString(key)
|
||||||
|
if not raw_list is None:
|
||||||
|
people[0].extend([entry.strip() for entry in raw_list.split(',')])
|
||||||
|
|
||||||
key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
|
key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
|
||||||
raw_list = project.config.GetString(key)
|
raw_list = project.config.GetString(key)
|
||||||
if not raw_list is None and len(people[0]) > 0:
|
if not raw_list is None and len(people[0]) > 0:
|
||||||
@ -323,16 +336,20 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
|||||||
for branch in todo:
|
for branch in todo:
|
||||||
try:
|
try:
|
||||||
people = copy.deepcopy(original_people)
|
people = copy.deepcopy(original_people)
|
||||||
self._AppendAutoCcList(branch, people)
|
self._AppendAutoList(branch, people)
|
||||||
|
|
||||||
# Check if there are local changes that may have been forgotten
|
# Check if there are local changes that may have been forgotten
|
||||||
if branch.project.HasChanges():
|
changes = branch.project.UncommitedFiles()
|
||||||
|
if changes:
|
||||||
key = 'review.%s.autoupload' % branch.project.remote.review
|
key = 'review.%s.autoupload' % branch.project.remote.review
|
||||||
answer = branch.project.config.GetBoolean(key)
|
answer = branch.project.config.GetBoolean(key)
|
||||||
|
|
||||||
# if they want to auto upload, let's not ask because it could be automated
|
# if they want to auto upload, let's not ask because it could be automated
|
||||||
if answer is None:
|
if answer is None:
|
||||||
sys.stdout.write('Uncommitted changes in ' + branch.project.name + ' (did you forget to amend?). Continue uploading? (y/N) ')
|
sys.stdout.write('Uncommitted changes in ' + branch.project.name)
|
||||||
|
sys.stdout.write(' (did you forget to amend?):\n')
|
||||||
|
sys.stdout.write('\n'.join(changes) + '\n')
|
||||||
|
sys.stdout.write('Continue uploading? (y/N) ')
|
||||||
a = sys.stdin.readline().strip().lower()
|
a = sys.stdin.readline().strip().lower()
|
||||||
if a not in ('y', 'yes', 't', 'true', 'on'):
|
if a not in ('y', 'yes', 't', 'true', 'on'):
|
||||||
print("skipping upload", file=sys.stderr)
|
print("skipping upload", file=sys.stderr)
|
||||||
|
30
wrapper.py
Normal file
30
wrapper.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Copyright (C) 2014 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import imp
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def WrapperPath():
|
||||||
|
return os.path.join(os.path.dirname(__file__), 'repo')
|
||||||
|
|
||||||
|
_wrapper_module = None
|
||||||
|
def Wrapper():
|
||||||
|
global _wrapper_module
|
||||||
|
if not _wrapper_module:
|
||||||
|
_wrapper_module = imp.load_source('wrapper', WrapperPath())
|
||||||
|
return _wrapper_module
|
Reference in New Issue
Block a user