mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
66 Commits
Author | SHA1 | Date | |
---|---|---|---|
f2af756425 | |||
544e7b0a97 | |||
e0df232da7 | |||
5a7c3afa73 | |||
9bc422f130 | |||
e81bc030bb | |||
eb5acc9ae9 | |||
26c45a7958 | |||
68425f4da8 | |||
53e902a19b | |||
093fdb6587 | |||
2fb6466f79 | |||
724aafb52d | |||
ccd218cd8f | |||
dd6542268a | |||
baca5f7e88 | |||
89ece429fb | |||
565480588d | |||
1829101e28 | |||
1966133f8e | |||
f1027e23b4 | |||
2cd38a0bf8 | |||
1b46cc9b6d | |||
1242e60bdd | |||
2d0f508648 | |||
143d8a7249 | |||
5db69f3f66 | |||
ff0a3c8f80 | |||
094cdbe090 | |||
148a84de0c | |||
1c5da49e6c | |||
b8433dfd2f | |||
f2fe2d9b86 | |||
c9877c7cf6 | |||
69e04d8953 | |||
f1f1137d61 | |||
f77ef2edb0 | |||
e695338e21 | |||
bd80f7eedd | |||
bf79c6618e | |||
f045d49a71 | |||
719757d6a8 | |||
011d4f426c | |||
53d6a7b895 | |||
335f5ef4ad | |||
672cc499b9 | |||
61df418c59 | |||
4534120628 | |||
cbc0798f67 | |||
d5a5b19efd | |||
5d6cb80b8f | |||
0eb35cbe50 | |||
ce201a5311 | |||
12fd10c201 | |||
a17d7af4d9 | |||
fbd3f2a10b | |||
37128b6f70 | |||
143b4cc992 | |||
8d20116038 | |||
53263d873d | |||
7487992bd3 | |||
b25ea555c3 | |||
3bfd72158c | |||
59b31cb6e0 | |||
1e7ab2a63f | |||
3a2a59eb87 |
28
command.py
28
command.py
@ -129,7 +129,7 @@ class Command(object):
|
||||
def GetProjects(self, args, missing_ok=False, submodules_ok=False):
|
||||
"""A list of projects that match the arguments.
|
||||
"""
|
||||
all_projects = self.manifest.projects
|
||||
all_projects_list = self.manifest.projects
|
||||
result = []
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
@ -140,7 +140,6 @@ class Command(object):
|
||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
||||
|
||||
if not args:
|
||||
all_projects_list = list(all_projects.values())
|
||||
derived_projects = {}
|
||||
for project in all_projects_list:
|
||||
if submodules_ok or project.sync_s:
|
||||
@ -152,12 +151,12 @@ class Command(object):
|
||||
project.MatchesGroups(groups)):
|
||||
result.append(project)
|
||||
else:
|
||||
self._ResetPathToProjectMap(all_projects.values())
|
||||
self._ResetPathToProjectMap(all_projects_list)
|
||||
|
||||
for arg in args:
|
||||
project = all_projects.get(arg)
|
||||
projects = self.manifest.GetProjectsWithName(arg)
|
||||
|
||||
if not project:
|
||||
if not projects:
|
||||
path = os.path.abspath(arg).replace('\\', '/')
|
||||
project = self._GetProjectByPath(path)
|
||||
|
||||
@ -172,14 +171,19 @@ class Command(object):
|
||||
if search_again:
|
||||
project = self._GetProjectByPath(path) or project
|
||||
|
||||
if not project:
|
||||
raise NoSuchProjectError(arg)
|
||||
if not missing_ok and not project.Exists:
|
||||
raise NoSuchProjectError(arg)
|
||||
if not project.MatchesGroups(groups):
|
||||
raise InvalidProjectGroupsError(arg)
|
||||
if project:
|
||||
projects = [project]
|
||||
|
||||
result.append(project)
|
||||
if not projects:
|
||||
raise NoSuchProjectError(arg)
|
||||
|
||||
for project in projects:
|
||||
if not missing_ok and not project.Exists:
|
||||
raise NoSuchProjectError(arg)
|
||||
if not project.MatchesGroups(groups):
|
||||
raise InvalidProjectGroupsError(arg)
|
||||
|
||||
result.extend(projects)
|
||||
|
||||
def _getpath(x):
|
||||
return x.relpath
|
||||
|
@ -27,15 +27,15 @@ following DTD:
|
||||
remove-project*,
|
||||
project*,
|
||||
repo-hooks?)>
|
||||
|
||||
|
||||
<!ELEMENT notice (#PCDATA)>
|
||||
|
||||
|
||||
<!ELEMENT remote (EMPTY)>
|
||||
<!ATTLIST remote name ID #REQUIRED>
|
||||
<!ATTLIST remote alias CDATA #IMPLIED>
|
||||
<!ATTLIST remote fetch CDATA #REQUIRED>
|
||||
<!ATTLIST remote review CDATA #IMPLIED>
|
||||
|
||||
|
||||
<!ELEMENT default (EMPTY)>
|
||||
<!ATTLIST default remote IDREF #IMPLIED>
|
||||
<!ATTLIST default revision CDATA #IMPLIED>
|
||||
@ -46,8 +46,8 @@ following DTD:
|
||||
|
||||
<!ELEMENT manifest-server (EMPTY)>
|
||||
<!ATTLIST url CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT project (annotation?,
|
||||
|
||||
<!ELEMENT project (annotation*,
|
||||
project*)>
|
||||
<!ATTLIST project name CDATA #REQUIRED>
|
||||
<!ATTLIST project path CDATA #IMPLIED>
|
||||
@ -65,7 +65,7 @@ following DTD:
|
||||
<!ATTLIST annotation name CDATA #REQUIRED>
|
||||
<!ATTLIST annotation value CDATA #REQUIRED>
|
||||
<!ATTLIST annotation keep CDATA "true">
|
||||
|
||||
|
||||
<!ELEMENT remove-project (EMPTY)>
|
||||
<!ATTLIST remove-project name CDATA #REQUIRED>
|
||||
|
||||
|
7
error.py
7
error.py
@ -24,6 +24,13 @@ class ManifestInvalidRevisionError(Exception):
|
||||
class NoManifestException(Exception):
|
||||
"""The required manifest does not exist.
|
||||
"""
|
||||
def __init__(self, path, reason):
|
||||
super(NoManifestException, self).__init__()
|
||||
self.path = path
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
class EditorError(Exception):
|
||||
"""Unspecified error from the user's text editor.
|
||||
|
@ -21,6 +21,7 @@ import tempfile
|
||||
from signal import SIGTERM
|
||||
from error import GitError
|
||||
from trace import REPO_TRACE, IsTrace, Trace
|
||||
from wrapper import Wrapper
|
||||
|
||||
GIT = 'git'
|
||||
MIN_GIT_VERSION = (1, 5, 4)
|
||||
@ -84,15 +85,10 @@ class _GitCall(object):
|
||||
|
||||
def version_tuple(self):
|
||||
global _git_version
|
||||
|
||||
if _git_version is None:
|
||||
ver_str = git.version()
|
||||
if ver_str.startswith('git version '):
|
||||
_git_version = tuple(
|
||||
map(int,
|
||||
ver_str[len('git version '):].strip().split('-')[0].split('.')[0:3]
|
||||
))
|
||||
else:
|
||||
ver_str = git.version().decode('utf-8')
|
||||
_git_version = Wrapper().ParseGitVersion(ver_str)
|
||||
if _git_version is None:
|
||||
print('fatal: "%s" unsupported' % ver_str, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return _git_version
|
||||
|
@ -304,8 +304,8 @@ class GitConfig(object):
|
||||
d = self._do('--null', '--list')
|
||||
if d is None:
|
||||
return c
|
||||
for line in d.rstrip('\0').split('\0'): # pylint: disable=W1401
|
||||
# Backslash is not anomalous
|
||||
for line in d.decode('utf-8').rstrip('\0').split('\0'): # pylint: disable=W1401
|
||||
# Backslash is not anomalous
|
||||
if '\n' in line:
|
||||
key, val = line.split('\n', 1)
|
||||
else:
|
||||
@ -576,7 +576,7 @@ class Remote(object):
|
||||
return None
|
||||
|
||||
u = self.review
|
||||
if not u.startswith('http:') and not u.startswith('https:'):
|
||||
if u.split(':')[0] not in ('http', 'https', 'sso'):
|
||||
u = 'http://%s' % u
|
||||
if u.endswith('/Gerrit'):
|
||||
u = u[:len(u) - len('/Gerrit')]
|
||||
@ -592,6 +592,9 @@ class Remote(object):
|
||||
host, port = os.environ['REPO_HOST_PORT_INFO'].split()
|
||||
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
||||
REVIEW_CACHE[u] = self._review_url
|
||||
elif u.startswith('sso:'):
|
||||
self._review_url = u # Assume it's right
|
||||
REVIEW_CACHE[u] = self._review_url
|
||||
else:
|
||||
try:
|
||||
info_url = u + 'ssh_info'
|
||||
@ -601,7 +604,7 @@ class Remote(object):
|
||||
# of HTML response back, like maybe a login page.
|
||||
#
|
||||
# Assume HTTP if SSH is not enabled or ssh_info doesn't look right.
|
||||
self._review_url = http_url + 'p/'
|
||||
self._review_url = http_url
|
||||
else:
|
||||
host, port = info.split()
|
||||
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
||||
|
@ -100,7 +100,7 @@ class GitRefs(object):
|
||||
def _ReadPackedRefs(self):
|
||||
path = os.path.join(self._gitdir, 'packed-refs')
|
||||
try:
|
||||
fd = open(path, 'rb')
|
||||
fd = open(path, 'r')
|
||||
mtime = os.path.getmtime(path)
|
||||
except IOError:
|
||||
return
|
||||
|
@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# From Gerrit Code Review 2.5.2
|
||||
# From Gerrit Code Review 2.6
|
||||
#
|
||||
# Part of Gerrit Code Review (http://code.google.com/p/gerrit/)
|
||||
#
|
||||
@ -154,7 +154,7 @@ add_ChangeId() {
|
||||
if (unprinted) {
|
||||
print "Change-Id: I'"$id"'"
|
||||
}
|
||||
}' "$MSG" > $T && mv $T "$MSG" || rm -f $T
|
||||
}' "$MSG" > "$T" && mv "$T" "$MSG" || rm -f "$T"
|
||||
}
|
||||
_gen_ChangeIdInput() {
|
||||
echo "tree `git write-tree`"
|
||||
|
@ -35,7 +35,7 @@ elif grep -q "AC Power \+: 1" /proc/pmu/info 2>/dev/null
|
||||
then
|
||||
exit 0
|
||||
elif test -x /usr/bin/pmset && /usr/bin/pmset -g batt |
|
||||
grep -q "Currently drawing from 'AC Power'"
|
||||
grep -q "drawing from 'AC Power'"
|
||||
then
|
||||
exit 0
|
||||
elif test -d /sys/bus/acpi/drivers/battery && test 0 = \
|
||||
|
134
main.py
134
main.py
@ -31,6 +31,11 @@ else:
|
||||
urllib = imp.new_module('urllib')
|
||||
urllib.request = urllib2
|
||||
|
||||
try:
|
||||
import kerberos
|
||||
except ImportError:
|
||||
kerberos = None
|
||||
|
||||
from trace import SetTrace
|
||||
from git_command import git, GitCommand
|
||||
from git_config import init_ssh, close_ssh
|
||||
@ -46,6 +51,7 @@ from error import NoSuchProjectError
|
||||
from error import RepoChangedException
|
||||
from manifest_xml import XmlManifest
|
||||
from pager import RunPager
|
||||
from wrapper import WrapperPath, Wrapper
|
||||
|
||||
from subcmds import all_commands
|
||||
|
||||
@ -123,8 +129,15 @@ class _Repo(object):
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
copts, cargs = cmd.OptionParser.parse_args(argv)
|
||||
copts = cmd.ReadEnvironmentOptions(copts)
|
||||
try:
|
||||
copts, cargs = cmd.OptionParser.parse_args(argv)
|
||||
copts = cmd.ReadEnvironmentOptions(copts)
|
||||
except NoManifestException as e:
|
||||
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||
file=sys.stderr)
|
||||
print('error: manifest missing or unreadable -- please run init',
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
|
||||
config = cmd.manifest.globalConfig
|
||||
@ -140,15 +153,13 @@ class _Repo(object):
|
||||
start = time.time()
|
||||
try:
|
||||
result = cmd.Execute(copts, cargs)
|
||||
except DownloadError as e:
|
||||
print('error: %s' % str(e), file=sys.stderr)
|
||||
result = 1
|
||||
except ManifestInvalidRevisionError as e:
|
||||
print('error: %s' % str(e), file=sys.stderr)
|
||||
result = 1
|
||||
except NoManifestException as e:
|
||||
print('error: manifest required for this command -- please run init',
|
||||
file=sys.stderr)
|
||||
except (DownloadError, ManifestInvalidRevisionError,
|
||||
NoManifestException) as e:
|
||||
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||
file=sys.stderr)
|
||||
if isinstance(e, NoManifestException):
|
||||
print('error: manifest missing or unreadable -- please run init',
|
||||
file=sys.stderr)
|
||||
result = 1
|
||||
except NoSuchProjectError as e:
|
||||
if e.name:
|
||||
@ -169,21 +180,10 @@ class _Repo(object):
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _MyRepoPath():
|
||||
return os.path.dirname(__file__)
|
||||
|
||||
def _MyWrapperPath():
|
||||
return os.path.join(os.path.dirname(__file__), 'repo')
|
||||
|
||||
_wrapper_module = None
|
||||
def WrapperModule():
|
||||
global _wrapper_module
|
||||
if not _wrapper_module:
|
||||
_wrapper_module = imp.load_source('wrapper', _MyWrapperPath())
|
||||
return _wrapper_module
|
||||
|
||||
def _CurrentWrapperVersion():
|
||||
return WrapperModule().VERSION
|
||||
|
||||
def _CheckWrapperVersion(ver, repo_path):
|
||||
if not repo_path:
|
||||
@ -193,7 +193,7 @@ def _CheckWrapperVersion(ver, repo_path):
|
||||
print('no --wrapper-version argument', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
exp = _CurrentWrapperVersion()
|
||||
exp = Wrapper().VERSION
|
||||
ver = tuple(map(int, ver.split('.')))
|
||||
if len(ver) == 1:
|
||||
ver = (0, ver[0])
|
||||
@ -205,7 +205,7 @@ def _CheckWrapperVersion(ver, repo_path):
|
||||
!!! You must upgrade before you can continue: !!!
|
||||
|
||||
cp %s %s
|
||||
""" % (exp_str, _MyWrapperPath(), repo_path), file=sys.stderr)
|
||||
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if exp > ver:
|
||||
@ -214,7 +214,7 @@ def _CheckWrapperVersion(ver, repo_path):
|
||||
... You should upgrade soon:
|
||||
|
||||
cp %s %s
|
||||
""" % (exp_str, _MyWrapperPath(), repo_path), file=sys.stderr)
|
||||
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||
|
||||
def _CheckRepoDir(repo_dir):
|
||||
if not repo_dir:
|
||||
@ -342,6 +342,86 @@ class _DigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
|
||||
self.retried = 0
|
||||
raise
|
||||
|
||||
class _KerberosAuthHandler(urllib.request.BaseHandler):
|
||||
def __init__(self):
|
||||
self.retried = 0
|
||||
self.context = None
|
||||
self.handler_order = urllib.request.BaseHandler.handler_order - 50
|
||||
|
||||
def http_error_401(self, req, fp, code, msg, headers):
|
||||
host = req.get_host()
|
||||
retry = self.http_error_auth_reqed('www-authenticate', host, req, headers)
|
||||
return retry
|
||||
|
||||
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
||||
try:
|
||||
spn = "HTTP@%s" % host
|
||||
authdata = self._negotiate_get_authdata(auth_header, headers)
|
||||
|
||||
if self.retried > 3:
|
||||
raise urllib.request.HTTPError(req.get_full_url(), 401,
|
||||
"Negotiate auth failed", headers, None)
|
||||
else:
|
||||
self.retried += 1
|
||||
|
||||
neghdr = self._negotiate_get_svctk(spn, authdata)
|
||||
if neghdr is None:
|
||||
return None
|
||||
|
||||
req.add_unredirected_header('Authorization', neghdr)
|
||||
response = self.parent.open(req)
|
||||
|
||||
srvauth = self._negotiate_get_authdata(auth_header, response.info())
|
||||
if self._validate_response(srvauth):
|
||||
return response
|
||||
except kerberos.GSSError:
|
||||
return None
|
||||
except:
|
||||
self.reset_retry_count()
|
||||
raise
|
||||
finally:
|
||||
self._clean_context()
|
||||
|
||||
def reset_retry_count(self):
|
||||
self.retried = 0
|
||||
|
||||
def _negotiate_get_authdata(self, auth_header, headers):
|
||||
authhdr = headers.get(auth_header, None)
|
||||
if authhdr is not None:
|
||||
for mech_tuple in authhdr.split(","):
|
||||
mech, __, authdata = mech_tuple.strip().partition(" ")
|
||||
if mech.lower() == "negotiate":
|
||||
return authdata.strip()
|
||||
return None
|
||||
|
||||
def _negotiate_get_svctk(self, spn, authdata):
|
||||
if authdata is None:
|
||||
return None
|
||||
|
||||
result, self.context = kerberos.authGSSClientInit(spn)
|
||||
if result < kerberos.AUTH_GSS_COMPLETE:
|
||||
return None
|
||||
|
||||
result = kerberos.authGSSClientStep(self.context, authdata)
|
||||
if result < kerberos.AUTH_GSS_CONTINUE:
|
||||
return None
|
||||
|
||||
response = kerberos.authGSSClientResponse(self.context)
|
||||
return "Negotiate %s" % response
|
||||
|
||||
def _validate_response(self, authdata):
|
||||
if authdata is None:
|
||||
return None
|
||||
result = kerberos.authGSSClientStep(self.context, authdata)
|
||||
if result == kerberos.AUTH_GSS_COMPLETE:
|
||||
return True
|
||||
return None
|
||||
|
||||
def _clean_context(self):
|
||||
if self.context is not None:
|
||||
kerberos.authGSSClientClean(self.context)
|
||||
self.context = None
|
||||
|
||||
def init_http():
|
||||
handlers = [_UserAgentHandler()]
|
||||
|
||||
@ -358,6 +438,8 @@ def init_http():
|
||||
pass
|
||||
handlers.append(_BasicAuthHandler(mgr))
|
||||
handlers.append(_DigestAuthHandler(mgr))
|
||||
if kerberos:
|
||||
handlers.append(_KerberosAuthHandler())
|
||||
|
||||
if 'http_proxy' in os.environ:
|
||||
url = os.environ['http_proxy']
|
||||
|
180
manifest_xml.py
180
manifest_xml.py
@ -32,7 +32,7 @@ else:
|
||||
from git_config import GitConfig
|
||||
from git_refs import R_HEADS, HEAD
|
||||
from project import RemoteSpec, Project, MetaProject
|
||||
from error import ManifestParseError
|
||||
from error import ManifestParseError, ManifestInvalidRevisionError
|
||||
|
||||
MANIFEST_FILE_NAME = 'manifest.xml'
|
||||
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
|
||||
@ -51,6 +51,12 @@ class _Default(object):
|
||||
sync_c = False
|
||||
sync_s = False
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.__dict__ != other.__dict__
|
||||
|
||||
class _XmlRemote(object):
|
||||
def __init__(self,
|
||||
name,
|
||||
@ -74,23 +80,27 @@ class _XmlRemote(object):
|
||||
def _resolveFetchUrl(self):
|
||||
url = self.fetchUrl.rstrip('/')
|
||||
manifestUrl = self.manifestUrl.rstrip('/')
|
||||
p = manifestUrl.startswith('persistent-http')
|
||||
if p:
|
||||
manifestUrl = manifestUrl[len('persistent-'):]
|
||||
|
||||
# urljoin will get confused if there is no scheme in the base url
|
||||
# ie, if manifestUrl is of the form <hostname:port>
|
||||
# urljoin will gets confused over quite a few things. The ones we care
|
||||
# about here are:
|
||||
# * no scheme in the base url, like <hostname:port>
|
||||
# * persistent-https://
|
||||
# We handle this by replacing these with obscure protocols
|
||||
# and then replacing them with the original when we are done.
|
||||
# gopher -> <none>
|
||||
# wais -> persistent-https
|
||||
if manifestUrl.find(':') != manifestUrl.find('/') - 1:
|
||||
manifestUrl = 'gopher://' + manifestUrl
|
||||
manifestUrl = re.sub(r'^persistent-https://', 'wais://', manifestUrl)
|
||||
url = urllib.parse.urljoin(manifestUrl, url)
|
||||
url = re.sub(r'^gopher://', '', url)
|
||||
if p:
|
||||
url = 'persistent-' + url
|
||||
url = re.sub(r'^wais://', 'persistent-https://', url)
|
||||
return url
|
||||
|
||||
def ToRemoteSpec(self, projectName):
|
||||
url = self.resolvedFetchUrl.rstrip('/') + '/' + projectName
|
||||
remoteName = self.name
|
||||
if self.remoteAlias:
|
||||
remoteName = self.remoteAlias
|
||||
return RemoteSpec(remoteName, url, self.reviewUrl)
|
||||
|
||||
class XmlManifest(object):
|
||||
@ -145,6 +155,8 @@ class XmlManifest(object):
|
||||
root.appendChild(e)
|
||||
e.setAttribute('name', r.name)
|
||||
e.setAttribute('fetch', r.fetchUrl)
|
||||
if r.remoteAlias is not None:
|
||||
e.setAttribute('alias', r.remoteAlias)
|
||||
if r.reviewUrl is not None:
|
||||
e.setAttribute('review', r.reviewUrl)
|
||||
|
||||
@ -205,8 +217,9 @@ class XmlManifest(object):
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
|
||||
def output_projects(parent, parent_node, projects):
|
||||
for p in projects:
|
||||
output_project(parent, parent_node, self.projects[p])
|
||||
for project_name in projects:
|
||||
for project in self._projects[project_name]:
|
||||
output_project(parent, parent_node, project)
|
||||
|
||||
def output_project(parent, parent_node, p):
|
||||
if not p.MatchesGroups(groups):
|
||||
@ -223,7 +236,10 @@ class XmlManifest(object):
|
||||
e.setAttribute('name', name)
|
||||
if relpath != name:
|
||||
e.setAttribute('path', relpath)
|
||||
if not d.remote or p.remote.name != d.remote.name:
|
||||
remoteName = None
|
||||
if d.remote:
|
||||
remoteName = d.remote.remoteAlias or d.remote.name
|
||||
if not d.remote or p.remote.name != remoteName:
|
||||
e.setAttribute('remote', p.remote.name)
|
||||
if peg_rev:
|
||||
if self.IsMirror:
|
||||
@ -245,6 +261,12 @@ class XmlManifest(object):
|
||||
ce.setAttribute('dest', c.dest)
|
||||
e.appendChild(ce)
|
||||
|
||||
for l in p.linkfiles:
|
||||
le = doc.createElement('linkfile')
|
||||
le.setAttribute('src', l.src)
|
||||
le.setAttribute('dest', l.dest)
|
||||
e.appendChild(le)
|
||||
|
||||
default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath]
|
||||
egroups = [g for g in p.groups if g not in default_groups]
|
||||
if egroups:
|
||||
@ -264,13 +286,11 @@ class XmlManifest(object):
|
||||
e.setAttribute('sync-s', 'true')
|
||||
|
||||
if p.subprojects:
|
||||
sort_projects = list(sorted([subp.name for subp in p.subprojects]))
|
||||
output_projects(p, e, sort_projects)
|
||||
subprojects = set(subp.name for subp in p.subprojects)
|
||||
output_projects(p, e, list(sorted(subprojects)))
|
||||
|
||||
sort_projects = list(sorted([key for key, value in self.projects.items()
|
||||
if not value.parent]))
|
||||
sort_projects.sort()
|
||||
output_projects(None, root, sort_projects)
|
||||
projects = set(p.name for p in self._paths.values() if not p.parent)
|
||||
output_projects(None, root, list(sorted(projects)))
|
||||
|
||||
if self._repo_hooks_project:
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
@ -282,10 +302,15 @@ class XmlManifest(object):
|
||||
|
||||
doc.writexml(fd, '', ' ', '\n', 'UTF-8')
|
||||
|
||||
@property
|
||||
def paths(self):
|
||||
self._Load()
|
||||
return self._paths
|
||||
|
||||
@property
|
||||
def projects(self):
|
||||
self._Load()
|
||||
return self._projects
|
||||
return self._paths.values()
|
||||
|
||||
@property
|
||||
def remotes(self):
|
||||
@ -316,9 +341,14 @@ class XmlManifest(object):
|
||||
def IsMirror(self):
|
||||
return self.manifestProject.config.GetBoolean('repo.mirror')
|
||||
|
||||
@property
|
||||
def IsArchive(self):
|
||||
return self.manifestProject.config.GetBoolean('repo.archive')
|
||||
|
||||
def _Unload(self):
|
||||
self._loaded = False
|
||||
self._projects = {}
|
||||
self._paths = {}
|
||||
self._remotes = {}
|
||||
self._default = None
|
||||
self._repo_hooks_project = None
|
||||
@ -422,11 +452,13 @@ class XmlManifest(object):
|
||||
|
||||
for node in itertools.chain(*node_list):
|
||||
if node.nodeName == 'default':
|
||||
if self._default is not None:
|
||||
raise ManifestParseError(
|
||||
'duplicate default in %s' %
|
||||
(self.manifestFile))
|
||||
self._default = self._ParseDefault(node)
|
||||
new_default = self._ParseDefault(node)
|
||||
if self._default is None:
|
||||
self._default = new_default
|
||||
elif new_default != self._default:
|
||||
raise ManifestParseError('duplicate default in %s' %
|
||||
(self.manifestFile))
|
||||
|
||||
if self._default is None:
|
||||
self._default = _Default()
|
||||
|
||||
@ -448,11 +480,17 @@ class XmlManifest(object):
|
||||
self._manifest_server = url
|
||||
|
||||
def recursively_add_projects(project):
|
||||
if self._projects.get(project.name):
|
||||
projects = self._projects.setdefault(project.name, [])
|
||||
if project.relpath is None:
|
||||
raise ManifestParseError(
|
||||
'duplicate project %s in %s' %
|
||||
'missing path for %s in %s' %
|
||||
(project.name, self.manifestFile))
|
||||
self._projects[project.name] = project
|
||||
if project.relpath in self._paths:
|
||||
raise ManifestParseError(
|
||||
'duplicate path %s in %s' %
|
||||
(project.relpath, self.manifestFile))
|
||||
self._paths[project.relpath] = project
|
||||
projects.append(project)
|
||||
for subproject in project.subprojects:
|
||||
recursively_add_projects(subproject)
|
||||
|
||||
@ -473,22 +511,31 @@ class XmlManifest(object):
|
||||
|
||||
# Store a reference to the Project.
|
||||
try:
|
||||
self._repo_hooks_project = self._projects[repo_hooks_project]
|
||||
repo_hooks_projects = self._projects[repo_hooks_project]
|
||||
except KeyError:
|
||||
raise ManifestParseError(
|
||||
'project %s not found for repo-hooks' %
|
||||
(repo_hooks_project))
|
||||
|
||||
if len(repo_hooks_projects) != 1:
|
||||
raise ManifestParseError(
|
||||
'internal error parsing repo-hooks in %s' %
|
||||
(self.manifestFile))
|
||||
self._repo_hooks_project = repo_hooks_projects[0]
|
||||
|
||||
# Store the enabled hooks in the Project object.
|
||||
self._repo_hooks_project.enabled_repo_hooks = enabled_repo_hooks
|
||||
if node.nodeName == 'remove-project':
|
||||
name = self._reqatt(node, 'name')
|
||||
try:
|
||||
del self._projects[name]
|
||||
except KeyError:
|
||||
|
||||
if name not in self._projects:
|
||||
raise ManifestParseError('remove-project element specifies non-existent '
|
||||
'project: %s' % name)
|
||||
|
||||
for p in self._projects[name]:
|
||||
del self._paths[p.relpath]
|
||||
del self._projects[name]
|
||||
|
||||
# If the manifest removes the hooks project, treat it as if it deleted
|
||||
# the repo-hooks element too.
|
||||
if self._repo_hooks_project and (self._repo_hooks_project.name == name):
|
||||
@ -525,11 +572,13 @@ class XmlManifest(object):
|
||||
name = name,
|
||||
remote = remote.ToRemoteSpec(name),
|
||||
gitdir = gitdir,
|
||||
objdir = gitdir,
|
||||
worktree = None,
|
||||
relpath = None,
|
||||
relpath = name or None,
|
||||
revisionExpr = m.revisionExpr,
|
||||
revisionId = None)
|
||||
self._projects[project.name] = project
|
||||
self._projects[project.name] = [project]
|
||||
self._paths[project.relpath] = project
|
||||
|
||||
def _ParseRemote(self, node):
|
||||
"""
|
||||
@ -689,9 +738,10 @@ class XmlManifest(object):
|
||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
||||
|
||||
if parent is None:
|
||||
relpath, worktree, gitdir = self.GetProjectPaths(name, path)
|
||||
relpath, worktree, gitdir, objdir = self.GetProjectPaths(name, path)
|
||||
else:
|
||||
relpath, worktree, gitdir = self.GetSubprojectPaths(parent, path)
|
||||
relpath, worktree, gitdir, objdir = \
|
||||
self.GetSubprojectPaths(parent, name, path)
|
||||
|
||||
default_groups = ['all', 'name:%s' % name, 'path:%s' % relpath]
|
||||
groups.extend(set(default_groups).difference(groups))
|
||||
@ -704,6 +754,7 @@ class XmlManifest(object):
|
||||
name = name,
|
||||
remote = remote.ToRemoteSpec(name),
|
||||
gitdir = gitdir,
|
||||
objdir = objdir,
|
||||
worktree = worktree,
|
||||
relpath = relpath,
|
||||
revisionExpr = revisionExpr,
|
||||
@ -720,6 +771,8 @@ class XmlManifest(object):
|
||||
for n in node.childNodes:
|
||||
if n.nodeName == 'copyfile':
|
||||
self._ParseCopyFile(project, n)
|
||||
if n.nodeName == 'linkfile':
|
||||
self._ParseLinkFile(project, n)
|
||||
if n.nodeName == 'annotation':
|
||||
self._ParseAnnotation(project, n)
|
||||
if n.nodeName == 'project':
|
||||
@ -732,10 +785,15 @@ class XmlManifest(object):
|
||||
if self.IsMirror:
|
||||
worktree = None
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||
objdir = gitdir
|
||||
else:
|
||||
worktree = os.path.join(self.topdir, path).replace('\\', '/')
|
||||
gitdir = os.path.join(self.repodir, 'projects', '%s.git' % path)
|
||||
return relpath, worktree, gitdir
|
||||
objdir = os.path.join(self.repodir, 'project-objects', '%s.git' % name)
|
||||
return relpath, worktree, gitdir, objdir
|
||||
|
||||
def GetProjectsWithName(self, name):
|
||||
return self._projects.get(name, [])
|
||||
|
||||
def GetSubprojectName(self, parent, submodule_path):
|
||||
return os.path.join(parent.name, submodule_path)
|
||||
@ -746,14 +804,15 @@ class XmlManifest(object):
|
||||
def _UnjoinRelpath(self, parent_relpath, relpath):
|
||||
return os.path.relpath(relpath, parent_relpath)
|
||||
|
||||
def GetSubprojectPaths(self, parent, path):
|
||||
def GetSubprojectPaths(self, parent, name, path):
|
||||
relpath = self._JoinRelpath(parent.relpath, path)
|
||||
gitdir = os.path.join(parent.gitdir, 'subprojects', '%s.git' % path)
|
||||
objdir = os.path.join(parent.gitdir, 'subproject-objects', '%s.git' % name)
|
||||
if self.IsMirror:
|
||||
worktree = None
|
||||
else:
|
||||
worktree = os.path.join(parent.worktree, path).replace('\\', '/')
|
||||
return relpath, worktree, gitdir
|
||||
return relpath, worktree, gitdir, objdir
|
||||
|
||||
def _ParseCopyFile(self, project, node):
|
||||
src = self._reqatt(node, 'src')
|
||||
@ -763,6 +822,14 @@ class XmlManifest(object):
|
||||
# dest is relative to the top of the tree
|
||||
project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
|
||||
|
||||
def _ParseLinkFile(self, project, node):
|
||||
src = self._reqatt(node, 'src')
|
||||
dest = self._reqatt(node, 'dest')
|
||||
if not self.IsMirror:
|
||||
# src is project relative;
|
||||
# dest is relative to the top of the tree
|
||||
project.AddLinkFile(src, dest, os.path.join(self.topdir, dest))
|
||||
|
||||
def _ParseAnnotation(self, project, node):
|
||||
name = self._reqatt(node, 'name')
|
||||
value = self._reqatt(node, 'value')
|
||||
@ -795,3 +862,40 @@ class XmlManifest(object):
|
||||
raise ManifestParseError("no %s in <%s> within %s" %
|
||||
(attname, node.nodeName, self.manifestFile))
|
||||
return v
|
||||
|
||||
def projectsDiff(self, manifest):
|
||||
"""return the projects differences between two manifests.
|
||||
|
||||
The diff will be from self to given manifest.
|
||||
|
||||
"""
|
||||
fromProjects = self.paths
|
||||
toProjects = manifest.paths
|
||||
|
||||
fromKeys = fromProjects.keys()
|
||||
fromKeys.sort()
|
||||
toKeys = toProjects.keys()
|
||||
toKeys.sort()
|
||||
|
||||
diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []}
|
||||
|
||||
for proj in fromKeys:
|
||||
if not proj in toKeys:
|
||||
diff['removed'].append(fromProjects[proj])
|
||||
else:
|
||||
fromProj = fromProjects[proj]
|
||||
toProj = toProjects[proj]
|
||||
try:
|
||||
fromRevId = fromProj.GetCommitRevisionId()
|
||||
toRevId = toProj.GetCommitRevisionId()
|
||||
except ManifestInvalidRevisionError:
|
||||
diff['unreachable'].append((fromProj, toProj))
|
||||
else:
|
||||
if fromRevId != toRevId:
|
||||
diff['changed'].append((fromProj, toProj))
|
||||
toKeys.remove(proj)
|
||||
|
||||
for proj in toKeys:
|
||||
diff['added'].append(toProjects[proj])
|
||||
|
||||
return diff
|
||||
|
361
project.py
361
project.py
@ -23,6 +23,7 @@ import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
@ -82,7 +83,7 @@ def _ProjectHooks():
|
||||
"""
|
||||
global _project_hook_list
|
||||
if _project_hook_list is None:
|
||||
d = os.path.abspath(os.path.dirname(__file__))
|
||||
d = os.path.realpath(os.path.abspath(os.path.dirname(__file__)))
|
||||
d = os.path.join(d , 'hooks')
|
||||
_project_hook_list = [os.path.join(d, x) for x in os.listdir(d)]
|
||||
return _project_hook_list
|
||||
@ -230,6 +231,30 @@ class _CopyFile:
|
||||
except IOError:
|
||||
_error('Cannot copy file %s to %s', src, dest)
|
||||
|
||||
class _LinkFile:
|
||||
def __init__(self, src, dest, abssrc, absdest):
|
||||
self.src = src
|
||||
self.dest = dest
|
||||
self.abs_src = abssrc
|
||||
self.abs_dest = absdest
|
||||
|
||||
def _Link(self):
|
||||
src = self.abs_src
|
||||
dest = self.abs_dest
|
||||
# link file if it does not exist or is out of date
|
||||
if not os.path.islink(dest) or os.readlink(dest) != src:
|
||||
try:
|
||||
# remove existing file first, since it might be read-only
|
||||
if os.path.exists(dest):
|
||||
os.remove(dest)
|
||||
else:
|
||||
dest_dir = os.path.dirname(dest)
|
||||
if not os.path.isdir(dest_dir):
|
||||
os.makedirs(dest_dir)
|
||||
os.symlink(src, dest)
|
||||
except IOError:
|
||||
_error('Cannot link file %s to %s', src, dest)
|
||||
|
||||
class RemoteSpec(object):
|
||||
def __init__(self,
|
||||
name,
|
||||
@ -487,6 +512,7 @@ class Project(object):
|
||||
name,
|
||||
remote,
|
||||
gitdir,
|
||||
objdir,
|
||||
worktree,
|
||||
relpath,
|
||||
revisionExpr,
|
||||
@ -507,6 +533,7 @@ class Project(object):
|
||||
name: The `name` attribute of manifest.xml's project element.
|
||||
remote: RemoteSpec object specifying its remote's properties.
|
||||
gitdir: Absolute path of git directory.
|
||||
objdir: Absolute path of directory to store git objects.
|
||||
worktree: Absolute path of git working tree.
|
||||
relpath: Relative path of git working tree to repo's top directory.
|
||||
revisionExpr: The `revision` attribute of manifest.xml's project element.
|
||||
@ -525,6 +552,7 @@ class Project(object):
|
||||
self.name = name
|
||||
self.remote = remote
|
||||
self.gitdir = gitdir.replace('\\', '/')
|
||||
self.objdir = objdir.replace('\\', '/')
|
||||
if worktree:
|
||||
self.worktree = worktree.replace('\\', '/')
|
||||
else:
|
||||
@ -551,17 +579,19 @@ class Project(object):
|
||||
|
||||
self.snapshots = {}
|
||||
self.copyfiles = []
|
||||
self.linkfiles = []
|
||||
self.annotations = []
|
||||
self.config = GitConfig.ForRepository(
|
||||
gitdir = self.gitdir,
|
||||
defaults = self.manifest.globalConfig)
|
||||
|
||||
if self.worktree:
|
||||
self.work_git = self._GitGetByExec(self, bare=False)
|
||||
self.work_git = self._GitGetByExec(self, bare=False, gitdir=gitdir)
|
||||
else:
|
||||
self.work_git = None
|
||||
self.bare_git = self._GitGetByExec(self, bare=True)
|
||||
self.bare_git = self._GitGetByExec(self, bare=True, gitdir=gitdir)
|
||||
self.bare_ref = GitRefs(gitdir)
|
||||
self.bare_objdir = self._GitGetByExec(self, bare=True, gitdir=objdir)
|
||||
self.dest_branch = dest_branch
|
||||
|
||||
# This will be filled in if a project is later identified to be the
|
||||
@ -982,15 +1012,62 @@ class Project(object):
|
||||
|
||||
## Sync ##
|
||||
|
||||
def _ExtractArchive(self, tarpath, path=None):
|
||||
"""Extract the given tar on its current location
|
||||
|
||||
Args:
|
||||
- tarpath: The path to the actual tar file
|
||||
|
||||
"""
|
||||
try:
|
||||
with tarfile.open(tarpath, 'r') as tar:
|
||||
tar.extractall(path=path)
|
||||
return True
|
||||
except (IOError, tarfile.TarError) as e:
|
||||
print("error: Cannot extract archive %s: "
|
||||
"%s" % (tarpath, str(e)), file=sys.stderr)
|
||||
return False
|
||||
|
||||
def Sync_NetworkHalf(self,
|
||||
quiet=False,
|
||||
is_new=None,
|
||||
current_branch_only=False,
|
||||
clone_bundle=True,
|
||||
no_tags=False):
|
||||
no_tags=False,
|
||||
archive=False):
|
||||
"""Perform only the network IO portion of the sync process.
|
||||
Local working directory/branch state is not affected.
|
||||
"""
|
||||
if archive and not isinstance(self, MetaProject):
|
||||
if self.remote.url.startswith(('http://', 'https://')):
|
||||
print("error: %s: Cannot fetch archives from http/https "
|
||||
"remotes." % self.name, file=sys.stderr)
|
||||
return False
|
||||
|
||||
name = self.relpath.replace('\\', '/')
|
||||
name = name.replace('/', '_')
|
||||
tarpath = '%s.tar' % name
|
||||
topdir = self.manifest.topdir
|
||||
|
||||
try:
|
||||
self._FetchArchive(tarpath, cwd=topdir)
|
||||
except GitError as e:
|
||||
print('error: %s' % str(e), file=sys.stderr)
|
||||
return False
|
||||
|
||||
# From now on, we only need absolute tarpath
|
||||
tarpath = os.path.join(topdir, tarpath)
|
||||
|
||||
if not self._ExtractArchive(tarpath, path=topdir):
|
||||
return False
|
||||
try:
|
||||
os.remove(tarpath)
|
||||
except OSError as e:
|
||||
print("warn: Cannot remove archive %s: "
|
||||
"%s" % (tarpath, str(e)), file=sys.stderr)
|
||||
self._CopyAndLinkFiles()
|
||||
return True
|
||||
|
||||
if is_new is None:
|
||||
is_new = not self.Exists
|
||||
if is_new:
|
||||
@ -1026,6 +1103,13 @@ class Project(object):
|
||||
elif self.manifest.default.sync_c:
|
||||
current_branch_only = True
|
||||
|
||||
is_sha1 = False
|
||||
if ID_RE.match(self.revisionExpr) is not None:
|
||||
is_sha1 = True
|
||||
if is_sha1 and self._CheckForSha1():
|
||||
# Don't need to fetch since we already have this revision
|
||||
return True
|
||||
|
||||
if not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
|
||||
current_branch_only=current_branch_only,
|
||||
no_tags=no_tags):
|
||||
@ -1044,9 +1128,28 @@ class Project(object):
|
||||
def PostRepoUpgrade(self):
|
||||
self._InitHooks()
|
||||
|
||||
def _CopyFiles(self):
|
||||
def _CopyAndLinkFiles(self):
|
||||
for copyfile in self.copyfiles:
|
||||
copyfile._Copy()
|
||||
for linkfile in self.linkfiles:
|
||||
linkfile._Link()
|
||||
|
||||
def GetCommitRevisionId(self):
|
||||
"""Get revisionId of a commit.
|
||||
|
||||
Use this method instead of GetRevisionId to get the id of the commit rather
|
||||
than the id of the current git object (for example, a tag)
|
||||
|
||||
"""
|
||||
if not self.revisionExpr.startswith(R_TAGS):
|
||||
return self.GetRevisionId(self._allrefs)
|
||||
|
||||
try:
|
||||
return self.bare_git.rev_list(self.revisionExpr, '-1')[0]
|
||||
except GitError:
|
||||
raise ManifestInvalidRevisionError(
|
||||
'revision %s in %s not found' % (self.revisionExpr,
|
||||
self.name))
|
||||
|
||||
def GetRevisionId(self, all_refs=None):
|
||||
if self.revisionId:
|
||||
@ -1069,15 +1172,15 @@ class Project(object):
|
||||
"""Perform only the local IO portion of the sync process.
|
||||
Network access is not required.
|
||||
"""
|
||||
self._InitWorkTree()
|
||||
all_refs = self.bare_ref.all
|
||||
self.CleanPublishedCache(all_refs)
|
||||
revid = self.GetRevisionId(all_refs)
|
||||
|
||||
def _doff():
|
||||
self._FastForward(revid)
|
||||
self._CopyFiles()
|
||||
self._CopyAndLinkFiles()
|
||||
|
||||
self._InitWorkTree()
|
||||
head = self.work_git.GetHead()
|
||||
if head.startswith(R_HEADS):
|
||||
branch = head[len(R_HEADS):]
|
||||
@ -1112,7 +1215,7 @@ class Project(object):
|
||||
except GitError as e:
|
||||
syncbuf.fail(self, e)
|
||||
return
|
||||
self._CopyFiles()
|
||||
self._CopyAndLinkFiles()
|
||||
return
|
||||
|
||||
if head == revid:
|
||||
@ -1134,7 +1237,7 @@ class Project(object):
|
||||
except GitError as e:
|
||||
syncbuf.fail(self, e)
|
||||
return
|
||||
self._CopyFiles()
|
||||
self._CopyAndLinkFiles()
|
||||
return
|
||||
|
||||
upstream_gain = self._revlist(not_rev(HEAD), revid)
|
||||
@ -1165,7 +1268,7 @@ class Project(object):
|
||||
last_mine = None
|
||||
cnt_mine = 0
|
||||
for commit in local_changes:
|
||||
commit_id, committer_email = commit.split(' ', 1)
|
||||
commit_id, committer_email = commit.decode('utf-8').split(' ', 1)
|
||||
if committer_email == self.UserEmail:
|
||||
last_mine = commit_id
|
||||
cnt_mine += 1
|
||||
@ -1207,12 +1310,12 @@ class Project(object):
|
||||
if cnt_mine > 0 and self.rebase:
|
||||
def _dorebase():
|
||||
self._Rebase(upstream = '%s^1' % last_mine, onto = revid)
|
||||
self._CopyFiles()
|
||||
self._CopyAndLinkFiles()
|
||||
syncbuf.later2(self, _dorebase)
|
||||
elif local_changes:
|
||||
try:
|
||||
self._ResetHard(revid)
|
||||
self._CopyFiles()
|
||||
self._CopyAndLinkFiles()
|
||||
except GitError as e:
|
||||
syncbuf.fail(self, e)
|
||||
return
|
||||
@ -1225,6 +1328,12 @@ class Project(object):
|
||||
abssrc = os.path.join(self.worktree, src)
|
||||
self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest))
|
||||
|
||||
def AddLinkFile(self, src, dest, absdest):
|
||||
# dest should already be an absolute path, but src is project relative
|
||||
# make src an absolute path
|
||||
abssrc = os.path.join(self.worktree, src)
|
||||
self.linkfiles.append(_LinkFile(src, dest, abssrc, absdest))
|
||||
|
||||
def AddAnnotation(self, name, value, keep):
|
||||
self.annotations.append(_Annotation(name, value, keep))
|
||||
|
||||
@ -1544,11 +1653,13 @@ class Project(object):
|
||||
return result
|
||||
for rev, path, url in self._GetSubmodules():
|
||||
name = self.manifest.GetSubprojectName(self, path)
|
||||
project = self.manifest.projects.get(name)
|
||||
relpath, worktree, gitdir, objdir = \
|
||||
self.manifest.GetSubprojectPaths(self, name, path)
|
||||
project = self.manifest.paths.get(relpath)
|
||||
if project:
|
||||
result.extend(project.GetDerivedSubprojects())
|
||||
continue
|
||||
relpath, worktree, gitdir = self.manifest.GetSubprojectPaths(self, path)
|
||||
|
||||
remote = RemoteSpec(self.remote.name,
|
||||
url = url,
|
||||
review = self.remote.review)
|
||||
@ -1556,6 +1667,7 @@ class Project(object):
|
||||
name = name,
|
||||
remote = remote,
|
||||
gitdir = gitdir,
|
||||
objdir = objdir,
|
||||
worktree = worktree,
|
||||
relpath = relpath,
|
||||
revisionExpr = self.revisionExpr,
|
||||
@ -1572,6 +1684,28 @@ class Project(object):
|
||||
|
||||
|
||||
## Direct Git Commands ##
|
||||
def _CheckForSha1(self):
|
||||
try:
|
||||
# if revision (sha or tag) is not present then following function
|
||||
# throws an error.
|
||||
self.bare_git.rev_parse('--verify', '%s^0' % self.revisionExpr)
|
||||
return True
|
||||
except GitError:
|
||||
# There is no such persistent revision. We have to fetch it.
|
||||
return False
|
||||
|
||||
def _FetchArchive(self, tarpath, cwd=None):
|
||||
cmd = ['archive', '-v', '-o', tarpath]
|
||||
cmd.append('--remote=%s' % self.remote.url)
|
||||
cmd.append('--prefix=%s/' % self.relpath)
|
||||
cmd.append(self.revisionExpr)
|
||||
|
||||
command = GitCommand(self, cmd, cwd=cwd,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
|
||||
if command.Wait() != 0:
|
||||
raise GitError('git archive %s: %s' % (self.name, command.stderr))
|
||||
|
||||
def _RemoteFetch(self, name=None,
|
||||
current_branch_only=False,
|
||||
@ -1582,16 +1716,19 @@ class Project(object):
|
||||
|
||||
is_sha1 = False
|
||||
tag_name = None
|
||||
depth = None
|
||||
|
||||
def CheckForSha1():
|
||||
try:
|
||||
# if revision (sha or tag) is not present then following function
|
||||
# throws an error.
|
||||
self.bare_git.rev_parse('--verify', '%s^0' % self.revisionExpr)
|
||||
return True
|
||||
except GitError:
|
||||
# There is no such persistent revision. We have to fetch it.
|
||||
return False
|
||||
# The depth should not be used when fetching to a mirror because
|
||||
# it will result in a shallow repository that cannot be cloned or
|
||||
# fetched from.
|
||||
if not self.manifest.IsMirror:
|
||||
if self.clone_depth:
|
||||
depth = self.clone_depth
|
||||
else:
|
||||
depth = self.manifest.manifestProject.config.GetString('repo.depth')
|
||||
|
||||
if depth:
|
||||
current_branch_only = True
|
||||
|
||||
if current_branch_only:
|
||||
if ID_RE.match(self.revisionExpr) is not None:
|
||||
@ -1601,7 +1738,7 @@ class Project(object):
|
||||
tag_name = self.revisionExpr[len(R_TAGS):]
|
||||
|
||||
if is_sha1 or tag_name is not None:
|
||||
if CheckForSha1():
|
||||
if self._CheckForSha1():
|
||||
return True
|
||||
if is_sha1 and (not self.upstream or ID_RE.match(self.upstream)):
|
||||
current_branch_only = False
|
||||
@ -1656,10 +1793,6 @@ class Project(object):
|
||||
|
||||
# The --depth option only affects the initial fetch; after that we'll do
|
||||
# full fetches of changes.
|
||||
if self.clone_depth:
|
||||
depth = self.clone_depth
|
||||
else:
|
||||
depth = self.manifest.manifestProject.config.GetString('repo.depth')
|
||||
if depth and initial:
|
||||
cmd.append('--depth=%s' % depth)
|
||||
|
||||
@ -1669,14 +1802,15 @@ class Project(object):
|
||||
cmd.append('--update-head-ok')
|
||||
cmd.append(name)
|
||||
|
||||
# If using depth then we should not get all the tags since they may
|
||||
# be outside of the depth.
|
||||
if no_tags or depth:
|
||||
cmd.append('--no-tags')
|
||||
else:
|
||||
cmd.append('--tags')
|
||||
|
||||
if not current_branch_only:
|
||||
# Fetch whole repo
|
||||
# If using depth then we should not get all the tags since they may
|
||||
# be outside of the depth.
|
||||
if no_tags or depth:
|
||||
cmd.append('--no-tags')
|
||||
else:
|
||||
cmd.append('--tags')
|
||||
cmd.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
|
||||
elif tag_name is not None:
|
||||
cmd.append('tag')
|
||||
@ -1703,6 +1837,11 @@ class Project(object):
|
||||
time.sleep(random.randint(30, 45))
|
||||
|
||||
if initial:
|
||||
# Ensure that some refs exist. Otherwise, we probably aren't looking
|
||||
# at a real git repository and may have a bad url.
|
||||
if not self.bare_ref.all:
|
||||
ok = False
|
||||
|
||||
if alt_dir:
|
||||
if old_packed != '':
|
||||
_lwrite(packed_refs, old_packed)
|
||||
@ -1714,7 +1853,7 @@ class Project(object):
|
||||
# We just synced the upstream given branch; verify we
|
||||
# got what we wanted, else trigger a second run of all
|
||||
# refs.
|
||||
if not CheckForSha1():
|
||||
if not self._CheckForSha1():
|
||||
return self._RemoteFetch(name=name, current_branch_only=False,
|
||||
initial=False, quiet=quiet, alt_dir=alt_dir)
|
||||
|
||||
@ -1840,11 +1979,11 @@ class Project(object):
|
||||
cookiefile = line[len(prefix):]
|
||||
break
|
||||
if p.wait():
|
||||
line = iter(p.stderr).next()
|
||||
if ' -print_config' in line:
|
||||
err_msg = p.stderr.read()
|
||||
if ' -print_config' in err_msg:
|
||||
pass # Persistent proxy doesn't support -print_config.
|
||||
else:
|
||||
print(line + p.stderr.read(), file=sys.stderr)
|
||||
print(err_msg, file=sys.stderr)
|
||||
if cookiefile:
|
||||
return cookiefile
|
||||
except OSError as e:
|
||||
@ -1905,8 +2044,17 @@ class Project(object):
|
||||
|
||||
def _InitGitDir(self, mirror_git=None):
|
||||
if not os.path.exists(self.gitdir):
|
||||
os.makedirs(self.gitdir)
|
||||
self.bare_git.init()
|
||||
|
||||
# Initialize the bare repository, which contains all of the objects.
|
||||
if not os.path.exists(self.objdir):
|
||||
os.makedirs(self.objdir)
|
||||
self.bare_objdir.init()
|
||||
|
||||
# If we have a separate directory to hold refs, initialize it as well.
|
||||
if self.objdir != self.gitdir:
|
||||
os.makedirs(self.gitdir)
|
||||
self._ReferenceGitDir(self.objdir, self.gitdir, share_refs=False,
|
||||
copy_all=True)
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
ref_dir = mp.config.GetString('repo.reference') or ''
|
||||
@ -1955,7 +2103,7 @@ class Project(object):
|
||||
self._InitHooks()
|
||||
|
||||
def _InitHooks(self):
|
||||
hooks = self._gitdir_path('hooks')
|
||||
hooks = os.path.realpath(self._gitdir_path('hooks'))
|
||||
if not os.path.exists(hooks):
|
||||
os.makedirs(hooks)
|
||||
for stock_hook in _ProjectHooks():
|
||||
@ -2022,33 +2170,61 @@ class Project(object):
|
||||
msg = 'manifest set to %s' % self.revisionExpr
|
||||
self.bare_git.symbolic_ref('-m', msg, ref, dst)
|
||||
|
||||
def _ReferenceGitDir(self, gitdir, dotgit, share_refs, copy_all):
|
||||
"""Update |dotgit| to reference |gitdir|, using symlinks where possible.
|
||||
|
||||
Args:
|
||||
gitdir: The bare git repository. Must already be initialized.
|
||||
dotgit: The repository you would like to initialize.
|
||||
share_refs: If true, |dotgit| will store its refs under |gitdir|.
|
||||
Only one work tree can store refs under a given |gitdir|.
|
||||
copy_all: If true, copy all remaining files from |gitdir| -> |dotgit|.
|
||||
This saves you the effort of initializing |dotgit| yourself.
|
||||
"""
|
||||
# These objects can be shared between several working trees.
|
||||
symlink_files = ['description', 'info']
|
||||
symlink_dirs = ['hooks', 'objects', 'rr-cache', 'svn']
|
||||
if share_refs:
|
||||
# These objects can only be used by a single working tree.
|
||||
symlink_files += ['config', 'packed-refs', 'shallow']
|
||||
symlink_dirs += ['logs', 'refs']
|
||||
to_symlink = symlink_files + symlink_dirs
|
||||
|
||||
to_copy = []
|
||||
if copy_all:
|
||||
to_copy = os.listdir(gitdir)
|
||||
|
||||
for name in set(to_copy).union(to_symlink):
|
||||
try:
|
||||
src = os.path.realpath(os.path.join(gitdir, name))
|
||||
dst = os.path.realpath(os.path.join(dotgit, name))
|
||||
|
||||
if os.path.lexists(dst) and not os.path.islink(dst):
|
||||
raise GitError('cannot overwrite a local work tree')
|
||||
|
||||
# If the source dir doesn't exist, create an empty dir.
|
||||
if name in symlink_dirs and not os.path.lexists(src):
|
||||
os.makedirs(src)
|
||||
|
||||
if name in to_symlink:
|
||||
os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst)
|
||||
elif copy_all and not os.path.islink(dst):
|
||||
if os.path.isdir(src):
|
||||
shutil.copytree(src, dst)
|
||||
elif os.path.isfile(src):
|
||||
shutil.copy(src, dst)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EPERM:
|
||||
raise GitError('filesystem must support symlinks')
|
||||
else:
|
||||
raise
|
||||
|
||||
def _InitWorkTree(self):
|
||||
dotgit = os.path.join(self.worktree, '.git')
|
||||
if not os.path.exists(dotgit):
|
||||
os.makedirs(dotgit)
|
||||
|
||||
for name in ['config',
|
||||
'description',
|
||||
'hooks',
|
||||
'info',
|
||||
'logs',
|
||||
'objects',
|
||||
'packed-refs',
|
||||
'refs',
|
||||
'rr-cache',
|
||||
'svn']:
|
||||
try:
|
||||
src = os.path.join(self.gitdir, name)
|
||||
dst = os.path.join(dotgit, name)
|
||||
if os.path.islink(dst) or not os.path.exists(dst):
|
||||
os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst)
|
||||
else:
|
||||
raise GitError('cannot overwrite a local work tree')
|
||||
except OSError as e:
|
||||
if e.errno == errno.EPERM:
|
||||
raise GitError('filesystem must support symlinks')
|
||||
else:
|
||||
raise
|
||||
self._ReferenceGitDir(self.gitdir, dotgit, share_refs=True,
|
||||
copy_all=False)
|
||||
|
||||
_lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId())
|
||||
|
||||
@ -2058,14 +2234,10 @@ class Project(object):
|
||||
if GitCommand(self, cmd).Wait() != 0:
|
||||
raise GitError("cannot initialize work tree")
|
||||
|
||||
rr_cache = os.path.join(self.gitdir, 'rr-cache')
|
||||
if not os.path.exists(rr_cache):
|
||||
os.makedirs(rr_cache)
|
||||
|
||||
self._CopyFiles()
|
||||
self._CopyAndLinkFiles()
|
||||
|
||||
def _gitdir_path(self, path):
|
||||
return os.path.join(self.gitdir, path)
|
||||
return os.path.realpath(os.path.join(self.gitdir, path))
|
||||
|
||||
def _revlist(self, *args, **kw):
|
||||
a = []
|
||||
@ -2077,10 +2249,48 @@ class Project(object):
|
||||
def _allrefs(self):
|
||||
return self.bare_ref.all
|
||||
|
||||
def _getLogs(self, rev1, rev2, oneline=False, color=True):
|
||||
"""Get logs between two revisions of this project."""
|
||||
comp = '..'
|
||||
if rev1:
|
||||
revs = [rev1]
|
||||
if rev2:
|
||||
revs.extend([comp, rev2])
|
||||
cmd = ['log', ''.join(revs)]
|
||||
out = DiffColoring(self.config)
|
||||
if out.is_on and color:
|
||||
cmd.append('--color')
|
||||
if oneline:
|
||||
cmd.append('--oneline')
|
||||
|
||||
try:
|
||||
log = GitCommand(self, cmd, capture_stdout=True, capture_stderr=True)
|
||||
if log.Wait() == 0:
|
||||
return log.stdout
|
||||
except GitError:
|
||||
# worktree may not exist if groups changed for example. In that case,
|
||||
# try in gitdir instead.
|
||||
if not os.path.exists(self.worktree):
|
||||
return self.bare_git.log(*cmd[1:])
|
||||
else:
|
||||
raise
|
||||
return None
|
||||
|
||||
def getAddedAndRemovedLogs(self, toProject, oneline=False, color=True):
|
||||
"""Get the list of logs from this revision to given revisionId"""
|
||||
logs = {}
|
||||
selfId = self.GetRevisionId(self._allrefs)
|
||||
toId = toProject.GetRevisionId(toProject._allrefs)
|
||||
|
||||
logs['added'] = self._getLogs(selfId, toId, oneline=oneline, color=color)
|
||||
logs['removed'] = self._getLogs(toId, selfId, oneline=oneline, color=color)
|
||||
return logs
|
||||
|
||||
class _GitGetByExec(object):
|
||||
def __init__(self, project, bare):
|
||||
def __init__(self, project, bare, gitdir):
|
||||
self._project = project
|
||||
self._bare = bare
|
||||
self._gitdir = gitdir
|
||||
|
||||
def LsOthers(self):
|
||||
p = GitCommand(self._project,
|
||||
@ -2089,6 +2299,7 @@ class Project(object):
|
||||
'--others',
|
||||
'--exclude-standard'],
|
||||
bare = False,
|
||||
gitdir=self._gitdir,
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
if p.Wait() == 0:
|
||||
@ -2104,6 +2315,7 @@ class Project(object):
|
||||
cmd.extend(args)
|
||||
p = GitCommand(self._project,
|
||||
cmd,
|
||||
gitdir=self._gitdir,
|
||||
bare = False,
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
@ -2154,8 +2366,8 @@ class Project(object):
|
||||
path = os.path.join(self._project.worktree, '.git', HEAD)
|
||||
try:
|
||||
fd = open(path, 'rb')
|
||||
except IOError:
|
||||
raise NoManifestException(path)
|
||||
except IOError as e:
|
||||
raise NoManifestException(path, str(e))
|
||||
try:
|
||||
line = fd.read()
|
||||
finally:
|
||||
@ -2213,6 +2425,7 @@ class Project(object):
|
||||
p = GitCommand(self._project,
|
||||
cmdv,
|
||||
bare = self._bare,
|
||||
gitdir=self._gitdir,
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
r = []
|
||||
@ -2265,6 +2478,7 @@ class Project(object):
|
||||
p = GitCommand(self._project,
|
||||
cmdv,
|
||||
bare = self._bare,
|
||||
gitdir=self._gitdir,
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
if p.Wait() != 0:
|
||||
@ -2398,6 +2612,7 @@ class MetaProject(Project):
|
||||
manifest = manifest,
|
||||
name = name,
|
||||
gitdir = gitdir,
|
||||
objdir = gitdir,
|
||||
worktree = worktree,
|
||||
remote = RemoteSpec('origin'),
|
||||
relpath = '.repo/%s' % name,
|
||||
|
65
repo
65
repo
@ -20,7 +20,7 @@ REPO_REV = 'stable'
|
||||
# limitations under the License.
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (1, 20)
|
||||
VERSION = (1, 21)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (1, 2)
|
||||
@ -110,9 +110,11 @@ REPO_MAIN = S_repo + '/main.py' # main script
|
||||
MIN_PYTHON_VERSION = (2, 6) # minimum supported python version
|
||||
|
||||
|
||||
import errno
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
@ -138,10 +140,9 @@ def _print(*objects, **kwargs):
|
||||
# Python version check
|
||||
ver = sys.version_info
|
||||
if ver[0] == 3:
|
||||
_print('error: Python 3 support is not fully implemented in repo yet.\n'
|
||||
_print('warning: Python 3 support is currently experimental. YMMV.\n'
|
||||
'Please use Python 2.6 - 2.7 instead.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if (ver[0], ver[1]) < MIN_PYTHON_VERSION:
|
||||
_print('error: Python version %s unsupported.\n'
|
||||
'Please use Python 2.6 - 2.7 instead.'
|
||||
@ -181,6 +182,10 @@ group.add_option('--reference',
|
||||
group.add_option('--depth', type='int', default=None,
|
||||
dest='depth',
|
||||
help='create a shallow clone with given depth; see git clone')
|
||||
group.add_option('--archive',
|
||||
dest='archive', action='store_true',
|
||||
help='checkout an archive instead of a git repository for '
|
||||
'each project. See git archive.')
|
||||
group.add_option('-g', '--groups',
|
||||
dest='groups', default='default',
|
||||
help='restrict manifest projects to ones with specified '
|
||||
@ -240,10 +245,10 @@ def _Init(args):
|
||||
_print("fatal: invalid branch name '%s'" % branch, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
if not os.path.isdir(repodir):
|
||||
try:
|
||||
os.mkdir(repodir)
|
||||
except OSError as e:
|
||||
try:
|
||||
os.mkdir(repodir)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
_print('fatal: cannot make %s directory: %s'
|
||||
% (repodir, e.strerror), file=sys.stderr)
|
||||
# Don't raise CloneFailure; that would delete the
|
||||
@ -274,6 +279,20 @@ def _Init(args):
|
||||
raise
|
||||
|
||||
|
||||
def ParseGitVersion(ver_str):
|
||||
if not ver_str.startswith('git version '):
|
||||
return None
|
||||
|
||||
num_ver_str = ver_str[len('git version '):].strip().split('-')[0]
|
||||
to_tuple = []
|
||||
for num_str in num_ver_str.split('.')[:3]:
|
||||
if num_str.isdigit():
|
||||
to_tuple.append(int(num_str))
|
||||
else:
|
||||
to_tuple.append(0)
|
||||
return tuple(to_tuple)
|
||||
|
||||
|
||||
def _CheckGitVersion():
|
||||
cmd = [GIT, '--version']
|
||||
try:
|
||||
@ -291,12 +310,11 @@ def _CheckGitVersion():
|
||||
proc.stdout.close()
|
||||
proc.wait()
|
||||
|
||||
if not ver_str.startswith('git version '):
|
||||
ver_act = ParseGitVersion(ver_str)
|
||||
if ver_act is None:
|
||||
_print('error: "%s" unsupported' % ver_str, file=sys.stderr)
|
||||
raise CloneFailure()
|
||||
|
||||
ver_str = ver_str[len('git version '):].strip()
|
||||
ver_act = tuple(map(int, ver_str.split('.')[0:3]))
|
||||
if ver_act < MIN_GIT_VERSION:
|
||||
need = '.'.join(map(str, MIN_GIT_VERSION))
|
||||
_print('fatal: git %s or later required' % need, file=sys.stderr)
|
||||
@ -322,18 +340,18 @@ def NeedSetupGnuPG():
|
||||
|
||||
|
||||
def SetupGnuPG(quiet):
|
||||
if not os.path.isdir(home_dot_repo):
|
||||
try:
|
||||
os.mkdir(home_dot_repo)
|
||||
except OSError as e:
|
||||
try:
|
||||
os.mkdir(home_dot_repo)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
_print('fatal: cannot make %s directory: %s'
|
||||
% (home_dot_repo, e.strerror), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isdir(gpg_dir):
|
||||
try:
|
||||
os.mkdir(gpg_dir, stat.S_IRWXU)
|
||||
except OSError as e:
|
||||
try:
|
||||
os.mkdir(gpg_dir, stat.S_IRWXU)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
_print('fatal: cannot make %s directory: %s' % (gpg_dir, e.strerror),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
@ -724,12 +742,7 @@ def main(orig_args):
|
||||
try:
|
||||
_Init(args)
|
||||
except CloneFailure:
|
||||
for root, dirs, files in os.walk(repodir, topdown=False):
|
||||
for name in files:
|
||||
os.remove(os.path.join(root, name))
|
||||
for name in dirs:
|
||||
os.rmdir(os.path.join(root, name))
|
||||
os.rmdir(repodir)
|
||||
shutil.rmtree(repodir, ignore_errors=True)
|
||||
sys.exit(1)
|
||||
repo_main, rel_repo_dir = _FindRepo()
|
||||
else:
|
||||
@ -739,7 +752,7 @@ def main(orig_args):
|
||||
repo_main = my_main
|
||||
|
||||
ver_str = '.'.join(map(str, VERSION))
|
||||
me = [repo_main,
|
||||
me = [sys.executable, repo_main,
|
||||
'--repo-dir=%s' % rel_repo_dir,
|
||||
'--wrapper-version=%s' % ver_str,
|
||||
'--wrapper-path=%s' % wrapper_path,
|
||||
@ -747,7 +760,7 @@ def main(orig_args):
|
||||
me.extend(orig_args)
|
||||
me.extend(extra_args)
|
||||
try:
|
||||
os.execv(repo_main, me)
|
||||
os.execv(sys.executable, me)
|
||||
except OSError as e:
|
||||
_print("fatal: unable to start %s" % repo_main, file=sys.stderr)
|
||||
_print("fatal: %s" % e, file=sys.stderr)
|
||||
|
@ -139,7 +139,7 @@ is shown, then the branch appears in all projects.
|
||||
if in_cnt < project_cnt:
|
||||
fmt = out.write
|
||||
paths = []
|
||||
if in_cnt < project_cnt - in_cnt:
|
||||
if in_cnt < project_cnt - in_cnt:
|
||||
in_type = 'in'
|
||||
for b in i.projects:
|
||||
paths.append(b.project.relpath)
|
||||
|
195
subcmds/diffmanifests.py
Normal file
195
subcmds/diffmanifests.py
Normal file
@ -0,0 +1,195 @@
|
||||
#
|
||||
# Copyright (C) 2014 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
from manifest_xml import XmlManifest
|
||||
|
||||
class _Coloring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
|
||||
class Diffmanifests(PagedCommand):
|
||||
""" A command to see logs in projects represented by manifests
|
||||
|
||||
This is used to see deeper differences between manifests. Where a simple
|
||||
diff would only show a diff of sha1s for example, this command will display
|
||||
the logs of the project between both sha1s, allowing user to see diff at a
|
||||
deeper level.
|
||||
"""
|
||||
|
||||
common = True
|
||||
helpSummary = "Manifest diff utility"
|
||||
helpUsage = """%prog manifest1.xml [manifest2.xml] [options]"""
|
||||
|
||||
helpDescription = """
|
||||
The %prog command shows differences between project revisions of manifest1 and
|
||||
manifest2. if manifest2 is not specified, current manifest.xml will be used
|
||||
instead. Both absolute and relative paths may be used for manifests. Relative
|
||||
paths start from project's ".repo/manifests" folder.
|
||||
|
||||
The --raw option Displays the diff in a way that facilitates parsing, the
|
||||
project pattern will be <status> <path> <revision from> [<revision to>] and the
|
||||
commit pattern will be <status> <onelined log> with status values respectively :
|
||||
|
||||
A = Added project
|
||||
R = Removed project
|
||||
C = Changed project
|
||||
U = Project with unreachable revision(s) (revision(s) not found)
|
||||
|
||||
for project, and
|
||||
|
||||
A = Added commit
|
||||
R = Removed commit
|
||||
|
||||
for a commit.
|
||||
|
||||
Only changed projects may contain commits, and commit status always starts with
|
||||
a space, and are part of last printed project.
|
||||
Unreachable revisions may occur if project is not up to date or if repo has not
|
||||
been initialized with all the groups, in which case some projects won't be
|
||||
synced and their revisions won't be found.
|
||||
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('--raw',
|
||||
dest='raw', action='store_true',
|
||||
help='Display raw diff.')
|
||||
p.add_option('--no-color',
|
||||
dest='color', action='store_false', default=True,
|
||||
help='does not display the diff in color.')
|
||||
|
||||
def _printRawDiff(self, diff):
|
||||
for project in diff['added']:
|
||||
self.printText("A %s %s" % (project.relpath, project.revisionExpr))
|
||||
self.out.nl()
|
||||
|
||||
for project in diff['removed']:
|
||||
self.printText("R %s %s" % (project.relpath, project.revisionExpr))
|
||||
self.out.nl()
|
||||
|
||||
for project, otherProject in diff['changed']:
|
||||
self.printText("C %s %s %s" % (project.relpath, project.revisionExpr,
|
||||
otherProject.revisionExpr))
|
||||
self.out.nl()
|
||||
self._printLogs(project, otherProject, raw=True, color=False)
|
||||
|
||||
for project, otherProject in diff['unreachable']:
|
||||
self.printText("U %s %s %s" % (project.relpath, project.revisionExpr,
|
||||
otherProject.revisionExpr))
|
||||
self.out.nl()
|
||||
|
||||
def _printDiff(self, diff, color=True):
|
||||
if diff['added']:
|
||||
self.out.nl()
|
||||
self.printText('added projects : \n')
|
||||
self.out.nl()
|
||||
for project in diff['added']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printText(' at revision ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
if diff['removed']:
|
||||
self.out.nl()
|
||||
self.printText('removed projects : \n')
|
||||
self.out.nl()
|
||||
for project in diff['removed']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printText(' at revision ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
if diff['changed']:
|
||||
self.out.nl()
|
||||
self.printText('changed projects : \n')
|
||||
self.out.nl()
|
||||
for project, otherProject in diff['changed']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printText(' changed from ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.printText(' to ')
|
||||
self.printRevision(otherProject.revisionExpr)
|
||||
self.out.nl()
|
||||
self._printLogs(project, otherProject, raw=False, color=color)
|
||||
self.out.nl()
|
||||
|
||||
if diff['unreachable']:
|
||||
self.out.nl()
|
||||
self.printText('projects with unreachable revisions : \n')
|
||||
self.out.nl()
|
||||
for project, otherProject in diff['unreachable']:
|
||||
self.printProject('\t%s ' % (project.relpath))
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.printText(' or ')
|
||||
self.printRevision(otherProject.revisionExpr)
|
||||
self.printText(' not found')
|
||||
self.out.nl()
|
||||
|
||||
def _printLogs(self, project, otherProject, raw=False, color=True):
|
||||
logs = project.getAddedAndRemovedLogs(otherProject, oneline=True,
|
||||
color=color)
|
||||
if logs['removed']:
|
||||
removedLogs = logs['removed'].split('\n')
|
||||
for log in removedLogs:
|
||||
if log.strip():
|
||||
if raw:
|
||||
self.printText(' R ' + log)
|
||||
self.out.nl()
|
||||
else:
|
||||
self.printRemoved('\t\t[-] ')
|
||||
self.printText(log)
|
||||
self.out.nl()
|
||||
|
||||
if logs['added']:
|
||||
addedLogs = logs['added'].split('\n')
|
||||
for log in addedLogs:
|
||||
if log.strip():
|
||||
if raw:
|
||||
self.printText(' A ' + log)
|
||||
self.out.nl()
|
||||
else:
|
||||
self.printAdded('\t\t[+] ')
|
||||
self.printText(log)
|
||||
self.out.nl()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if not args or len(args) > 2:
|
||||
self.Usage()
|
||||
|
||||
self.out = _Coloring(self.manifest.globalConfig)
|
||||
self.printText = self.out.nofmt_printer('text')
|
||||
if opt.color:
|
||||
self.printProject = self.out.nofmt_printer('project', attr = 'bold')
|
||||
self.printAdded = self.out.nofmt_printer('green', fg = 'green', attr = 'bold')
|
||||
self.printRemoved = self.out.nofmt_printer('red', fg = 'red', attr = 'bold')
|
||||
self.printRevision = self.out.nofmt_printer('revision', fg = 'yellow')
|
||||
else:
|
||||
self.printProject = self.printAdded = self.printRemoved = self.printRevision = self.printText
|
||||
|
||||
manifest1 = XmlManifest(self.manifest.repodir)
|
||||
manifest1.Override(args[0])
|
||||
if len(args) == 1:
|
||||
manifest2 = self.manifest
|
||||
else:
|
||||
manifest2 = XmlManifest(self.manifest.repodir)
|
||||
manifest2.Override(args[1])
|
||||
|
||||
diff = manifest1.projectsDiff(manifest2)
|
||||
if opt.raw:
|
||||
self._printRawDiff(diff)
|
||||
else:
|
||||
self._printDiff(diff, color=opt.color)
|
@ -18,6 +18,7 @@ import re
|
||||
import sys
|
||||
|
||||
from command import Command
|
||||
from error import GitError
|
||||
|
||||
CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
|
||||
|
||||
@ -87,7 +88,12 @@ makes it available in your project's local working directory.
|
||||
for c in dl.commits:
|
||||
print(' %s' % (c), file=sys.stderr)
|
||||
if opt.cherrypick:
|
||||
project._CherryPick(dl.commit)
|
||||
try:
|
||||
project._CherryPick(dl.commit)
|
||||
except GitError:
|
||||
print('[%s] Could not complete the cherry-pick of %s' \
|
||||
% (project.name, dl.commit), file=sys.stderr)
|
||||
|
||||
elif opt.revert:
|
||||
project._Revert(dl.commit)
|
||||
elif opt.ffonly:
|
||||
|
@ -87,6 +87,12 @@ revision to a locally executed git command, use REPO_LREV.
|
||||
REPO_RREV is the name of the revision from the manifest, exactly
|
||||
as written in the manifest.
|
||||
|
||||
REPO_COUNT is the total number of projects being iterated.
|
||||
|
||||
REPO_I is the current (1-based) iteration count. Can be used in
|
||||
conjunction with REPO_COUNT to add a simple progress indicator to your
|
||||
command.
|
||||
|
||||
REPO__* are any extra environment variables, specified by the
|
||||
"annotation" element under any project element. This can be useful
|
||||
for differentiating trees based on user-specific criteria, or simply
|
||||
@ -178,7 +184,9 @@ without iterating through the remaining projects.
|
||||
else:
|
||||
projects = self.FindProjects(args)
|
||||
|
||||
for project in projects:
|
||||
os.environ['REPO_COUNT'] = str(len(projects))
|
||||
|
||||
for (cnt, project) in enumerate(projects):
|
||||
env = os.environ.copy()
|
||||
def setenv(name, val):
|
||||
if val is None:
|
||||
@ -190,6 +198,7 @@ without iterating through the remaining projects.
|
||||
setenv('REPO_REMOTE', project.remote.name)
|
||||
setenv('REPO_LREV', project.GetRevisionId())
|
||||
setenv('REPO_RREV', project.revisionExpr)
|
||||
setenv('REPO_I', str(cnt + 1))
|
||||
for a in project.annotations:
|
||||
setenv("REPO__%s" % (a.name), a.value)
|
||||
|
||||
|
@ -99,6 +99,10 @@ to update the working directory files.
|
||||
g.add_option('--depth', type='int', default=None,
|
||||
dest='depth',
|
||||
help='create a shallow clone with given depth; see git clone')
|
||||
g.add_option('--archive',
|
||||
dest='archive', action='store_true',
|
||||
help='checkout an archive instead of a git repository for '
|
||||
'each project. See git archive.')
|
||||
g.add_option('-g', '--groups',
|
||||
dest='groups', default='default',
|
||||
help='restrict manifest projects to ones with specified '
|
||||
@ -198,6 +202,16 @@ to update the working directory files.
|
||||
if opt.reference:
|
||||
m.config.SetString('repo.reference', opt.reference)
|
||||
|
||||
if opt.archive:
|
||||
if is_new:
|
||||
m.config.SetString('repo.archive', 'true')
|
||||
else:
|
||||
print('fatal: --archive is only supported when initializing a new '
|
||||
'workspace.', file=sys.stderr)
|
||||
print('Either delete the .repo folder in this workspace, or initialize '
|
||||
'in another location.', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if opt.mirror:
|
||||
if is_new:
|
||||
m.config.SetString('repo.mirror', 'true')
|
||||
@ -366,6 +380,13 @@ to update the working directory files.
|
||||
if opt.reference:
|
||||
opt.reference = os.path.expanduser(opt.reference)
|
||||
|
||||
# Check this here, else manifest will be tagged "not new" and init won't be
|
||||
# possible anymore without removing the .repo/manifests directory.
|
||||
if opt.archive and opt.mirror:
|
||||
print('fatal: --mirror and --archive cannot be used together.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
self._SyncManifest(opt)
|
||||
self._LinkManifest(opt.manifest_name)
|
||||
|
||||
|
@ -62,6 +62,9 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
if opt.interactive and not one_project:
|
||||
print('error: interactive rebase not supported with multiple projects',
|
||||
file=sys.stderr)
|
||||
if len(args) == 1:
|
||||
print('note: project %s is mapped to more than one path' % (args[0],),
|
||||
file=sys.stderr)
|
||||
return -1
|
||||
|
||||
for project in all_projects:
|
||||
|
134
subcmds/sync.py
134
subcmds/sync.py
@ -58,13 +58,13 @@ except ImportError:
|
||||
|
||||
from git_command import GIT, git_require
|
||||
from git_refs import R_HEADS, HEAD
|
||||
from main import WrapperModule
|
||||
from project import Project
|
||||
from project import RemoteSpec
|
||||
from command import Command, MirrorSafeCommand
|
||||
from error import RepoChangedException, GitError, ManifestParseError
|
||||
from project import SyncBuffer
|
||||
from progress import Progress
|
||||
from wrapper import Wrapper
|
||||
|
||||
_ONE_DAY_S = 24 * 60 * 60
|
||||
|
||||
@ -219,9 +219,25 @@ later is required to fix a server side protocol bug.
|
||||
dest='repo_upgraded', action='store_true',
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
def _FetchHelper(self, opt, project, lock, fetched, pm, sem, err_event):
|
||||
def _FetchProjectList(self, opt, projects, *args, **kwargs):
|
||||
"""Main function of the fetch threads when jobs are > 1.
|
||||
|
||||
Delegates most of the work to _FetchHelper.
|
||||
|
||||
Args:
|
||||
opt: Program options returned from optparse. See _Options().
|
||||
projects: Projects to fetch.
|
||||
*args, **kwargs: Remaining arguments to pass to _FetchHelper. See the
|
||||
_FetchHelper docstring for details.
|
||||
"""
|
||||
for project in projects:
|
||||
success = self._FetchHelper(opt, project, *args, **kwargs)
|
||||
if not success and not opt.force_broken:
|
||||
break
|
||||
|
||||
def _FetchHelper(self, opt, project, lock, fetched, pm, sem, err_event):
|
||||
"""Fetch git objects for a single project.
|
||||
|
||||
Args:
|
||||
opt: Program options returned from optparse. See _Options().
|
||||
project: Project object for the project to fetch.
|
||||
@ -235,6 +251,9 @@ later is required to fix a server side protocol bug.
|
||||
can be started up.
|
||||
err_event: We'll set this event in the case of an error (after printing
|
||||
out info about the error).
|
||||
|
||||
Returns:
|
||||
Whether the fetch was successful.
|
||||
"""
|
||||
# We'll set to true once we've locked the lock.
|
||||
did_lock = False
|
||||
@ -253,7 +272,7 @@ later is required to fix a server side protocol bug.
|
||||
quiet=opt.quiet,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
clone_bundle=not opt.no_clone_bundle,
|
||||
no_tags=opt.no_tags)
|
||||
no_tags=opt.no_tags, archive=self.manifest.IsArchive)
|
||||
self._fetch_times.Set(project, time.time() - start)
|
||||
|
||||
# Lock around all the rest of the code, since printing, updating a set
|
||||
@ -281,67 +300,65 @@ later is required to fix a server side protocol bug.
|
||||
lock.release()
|
||||
sem.release()
|
||||
|
||||
return success
|
||||
|
||||
def _Fetch(self, projects, opt):
|
||||
fetched = set()
|
||||
lock = _threading.Lock()
|
||||
pm = Progress('Fetching projects', len(projects))
|
||||
|
||||
if self.jobs == 1:
|
||||
for project in projects:
|
||||
pm.update()
|
||||
if not opt.quiet:
|
||||
print('Fetching project %s' % project.name)
|
||||
if project.Sync_NetworkHalf(
|
||||
quiet=opt.quiet,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
clone_bundle=not opt.no_clone_bundle,
|
||||
no_tags=opt.no_tags):
|
||||
fetched.add(project.gitdir)
|
||||
else:
|
||||
print('error: Cannot fetch %s' % project.name, file=sys.stderr)
|
||||
if opt.force_broken:
|
||||
print('warn: --force-broken, continuing to sync', file=sys.stderr)
|
||||
else:
|
||||
sys.exit(1)
|
||||
else:
|
||||
threads = set()
|
||||
lock = _threading.Lock()
|
||||
sem = _threading.Semaphore(self.jobs)
|
||||
err_event = _threading.Event()
|
||||
for project in projects:
|
||||
# Check for any errors before starting any new threads.
|
||||
# ...we'll let existing threads finish, though.
|
||||
if err_event.isSet():
|
||||
break
|
||||
objdir_project_map = dict()
|
||||
for project in projects:
|
||||
objdir_project_map.setdefault(project.objdir, []).append(project)
|
||||
|
||||
sem.acquire()
|
||||
t = _threading.Thread(target = self._FetchHelper,
|
||||
args = (opt,
|
||||
project,
|
||||
lock,
|
||||
fetched,
|
||||
pm,
|
||||
sem,
|
||||
err_event))
|
||||
threads = set()
|
||||
sem = _threading.Semaphore(self.jobs)
|
||||
err_event = _threading.Event()
|
||||
for project_list in objdir_project_map.values():
|
||||
# Check for any errors before running any more tasks.
|
||||
# ...we'll let existing threads finish, though.
|
||||
if err_event.isSet() and not opt.force_broken:
|
||||
break
|
||||
|
||||
sem.acquire()
|
||||
kwargs = dict(opt=opt,
|
||||
projects=project_list,
|
||||
lock=lock,
|
||||
fetched=fetched,
|
||||
pm=pm,
|
||||
sem=sem,
|
||||
err_event=err_event)
|
||||
if self.jobs > 1:
|
||||
t = _threading.Thread(target = self._FetchProjectList,
|
||||
kwargs = kwargs)
|
||||
# Ensure that Ctrl-C will not freeze the repo process.
|
||||
t.daemon = True
|
||||
threads.add(t)
|
||||
t.start()
|
||||
else:
|
||||
self._FetchProjectList(**kwargs)
|
||||
|
||||
for t in threads:
|
||||
t.join()
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet():
|
||||
print('\nerror: Exited sync due to fetch errors', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet():
|
||||
print('\nerror: Exited sync due to fetch errors', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
pm.end()
|
||||
self._fetch_times.Save()
|
||||
|
||||
self._GCProjects(projects)
|
||||
if not self.manifest.IsArchive:
|
||||
self._GCProjects(projects)
|
||||
|
||||
return fetched
|
||||
|
||||
def _GCProjects(self, projects):
|
||||
gitdirs = {}
|
||||
for project in projects:
|
||||
gitdirs[project.gitdir] = project.bare_git
|
||||
|
||||
has_dash_c = git_require((1, 7, 2))
|
||||
if multiprocessing and has_dash_c:
|
||||
cpu_count = multiprocessing.cpu_count()
|
||||
@ -350,8 +367,8 @@ later is required to fix a server side protocol bug.
|
||||
jobs = min(self.jobs, cpu_count)
|
||||
|
||||
if jobs < 2:
|
||||
for project in projects:
|
||||
project.bare_git.gc('--auto')
|
||||
for bare_git in gitdirs.values():
|
||||
bare_git.gc('--auto')
|
||||
return
|
||||
|
||||
config = {'pack.threads': cpu_count / jobs if cpu_count > jobs else 1}
|
||||
@ -360,10 +377,10 @@ later is required to fix a server side protocol bug.
|
||||
sem = _threading.Semaphore(jobs)
|
||||
err_event = _threading.Event()
|
||||
|
||||
def GC(project):
|
||||
def GC(bare_git):
|
||||
try:
|
||||
try:
|
||||
project.bare_git.gc('--auto', config=config)
|
||||
bare_git.gc('--auto', config=config)
|
||||
except GitError:
|
||||
err_event.set()
|
||||
except:
|
||||
@ -372,11 +389,11 @@ later is required to fix a server side protocol bug.
|
||||
finally:
|
||||
sem.release()
|
||||
|
||||
for project in projects:
|
||||
for bare_git in gitdirs.values():
|
||||
if err_event.isSet():
|
||||
break
|
||||
sem.acquire()
|
||||
t = _threading.Thread(target=GC, args=(project,))
|
||||
t = _threading.Thread(target=GC, args=(bare_git,))
|
||||
t.daemon = True
|
||||
threads.add(t)
|
||||
t.start()
|
||||
@ -416,12 +433,13 @@ later is required to fix a server side protocol bug.
|
||||
if path not in new_project_paths:
|
||||
# If the path has already been deleted, we don't need to do it
|
||||
if os.path.exists(self.manifest.topdir + '/' + path):
|
||||
gitdir = os.path.join(self.manifest.topdir, path, '.git')
|
||||
project = Project(
|
||||
manifest = self.manifest,
|
||||
name = path,
|
||||
remote = RemoteSpec('origin'),
|
||||
gitdir = os.path.join(self.manifest.topdir,
|
||||
path, '.git'),
|
||||
gitdir = gitdir,
|
||||
objdir = gitdir,
|
||||
worktree = os.path.join(self.manifest.topdir, path),
|
||||
relpath = path,
|
||||
revisionExpr = 'HEAD',
|
||||
@ -641,7 +659,7 @@ later is required to fix a server side protocol bug.
|
||||
previously_missing_set = missing_set
|
||||
fetched.update(self._Fetch(missing, opt))
|
||||
|
||||
if self.manifest.IsMirror:
|
||||
if self.manifest.IsMirror or self.manifest.IsArchive:
|
||||
# bail out now, we have no working tree
|
||||
return
|
||||
|
||||
@ -666,10 +684,10 @@ later is required to fix a server side protocol bug.
|
||||
print(self.manifest.notice)
|
||||
|
||||
def _PostRepoUpgrade(manifest, quiet=False):
|
||||
wrapper = WrapperModule()
|
||||
wrapper = Wrapper()
|
||||
if wrapper.NeedSetupGnuPG():
|
||||
wrapper.SetupGnuPG(quiet)
|
||||
for project in manifest.projects.values():
|
||||
for project in manifest.projects:
|
||||
if project.Exists:
|
||||
project.PostRepoUpgrade()
|
||||
|
||||
@ -761,7 +779,7 @@ class _FetchTimes(object):
|
||||
def _Load(self):
|
||||
if self._times is None:
|
||||
try:
|
||||
f = open(self._path)
|
||||
f = open(self._path, 'rb')
|
||||
except IOError:
|
||||
self._times = {}
|
||||
return self._times
|
||||
|
@ -21,6 +21,7 @@ import sys
|
||||
from command import InteractiveCommand
|
||||
from editor import Editor
|
||||
from error import HookError, UploadError
|
||||
from git_command import GitCommand
|
||||
from project import RepoHook
|
||||
|
||||
from pyversion import is_python3
|
||||
@ -88,6 +89,11 @@ to "true" then repo will assume you always answer "y" at the prompt,
|
||||
and will not prompt you further. If it is set to "false" then repo
|
||||
will assume you always answer "n", and will abort.
|
||||
|
||||
review.URL.autoreviewer:
|
||||
|
||||
To automatically append a user or mailing list to reviews, you can set
|
||||
a per-project or global Git option to do so.
|
||||
|
||||
review.URL.autocopy:
|
||||
|
||||
To automatically copy a user or mailing list to all uploaded reviews,
|
||||
@ -292,14 +298,20 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
|
||||
self._UploadAndReport(opt, todo, people)
|
||||
|
||||
def _AppendAutoCcList(self, branch, people):
|
||||
def _AppendAutoList(self, branch, people):
|
||||
"""
|
||||
Appends the list of reviewers in the git project's config.
|
||||
Appends the list of users in the CC list in the git project's config if a
|
||||
non-empty reviewer list was found.
|
||||
"""
|
||||
|
||||
name = branch.name
|
||||
project = branch.project
|
||||
|
||||
key = 'review.%s.autoreviewer' % project.GetBranch(name).remote.review
|
||||
raw_list = project.config.GetString(key)
|
||||
if not raw_list is None:
|
||||
people[0].extend([entry.strip() for entry in raw_list.split(',')])
|
||||
|
||||
key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
|
||||
raw_list = project.config.GetString(key)
|
||||
if not raw_list is None and len(people[0]) > 0:
|
||||
@ -322,7 +334,7 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
for branch in todo:
|
||||
try:
|
||||
people = copy.deepcopy(original_people)
|
||||
self._AppendAutoCcList(branch, people)
|
||||
self._AppendAutoList(branch, people)
|
||||
|
||||
# Check if there are local changes that may have been forgotten
|
||||
if branch.project.HasChanges():
|
||||
@ -344,7 +356,21 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
key = 'review.%s.uploadtopic' % branch.project.remote.review
|
||||
opt.auto_topic = branch.project.config.GetBoolean(key)
|
||||
|
||||
destination = opt.dest_branch or branch.project.dest_branch or branch.project.revisionExpr
|
||||
destination = opt.dest_branch or branch.project.dest_branch
|
||||
|
||||
# Make sure our local branch is not setup to track a different remote branch
|
||||
merge_branch = self._GetMergeBranch(branch.project)
|
||||
if destination:
|
||||
full_dest = 'refs/heads/%s' % destination
|
||||
if not opt.dest_branch and merge_branch and merge_branch != full_dest:
|
||||
print('merge branch %s does not match destination branch %s'
|
||||
% (merge_branch, full_dest))
|
||||
print('skipping upload.')
|
||||
print('Please use `--destination %s` if this is intentional'
|
||||
% destination)
|
||||
branch.uploaded = False
|
||||
continue
|
||||
|
||||
branch.UploadForReview(people, auto_topic=opt.auto_topic, draft=opt.draft, dest_branch=destination)
|
||||
branch.uploaded = True
|
||||
except UploadError as e:
|
||||
@ -379,6 +405,21 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
if have_errors:
|
||||
sys.exit(1)
|
||||
|
||||
def _GetMergeBranch(self, project):
|
||||
p = GitCommand(project,
|
||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
p.Wait()
|
||||
local_branch = p.stdout.strip()
|
||||
p = GitCommand(project,
|
||||
['config', '--get', 'branch.%s.merge' % local_branch],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
p.Wait()
|
||||
merge_branch = p.stdout.strip()
|
||||
return merge_branch
|
||||
|
||||
def Execute(self, opt, args):
|
||||
project_list = self.GetProjects(args)
|
||||
pending = []
|
||||
@ -392,7 +433,16 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
for project in project_list:
|
||||
if opt.current_branch:
|
||||
cbr = project.CurrentBranch
|
||||
avail = [project.GetUploadableBranch(cbr)] if cbr else None
|
||||
up_branch = project.GetUploadableBranch(cbr)
|
||||
if up_branch:
|
||||
avail = [up_branch]
|
||||
else:
|
||||
avail = None
|
||||
print('ERROR: Current branch (%s) not uploadable. '
|
||||
'You may be able to type '
|
||||
'"git branch --set-upstream-to m/master" to fix '
|
||||
'your branch.' % str(cbr),
|
||||
file=sys.stderr)
|
||||
else:
|
||||
avail = project.GetUploadableBranches(branch)
|
||||
if avail:
|
||||
@ -402,8 +452,10 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
|
||||
self.manifest.topdir, abort_if_user_denies=True)
|
||||
pending_proj_names = [project.name for (project, avail) in pending]
|
||||
pending_worktrees = [project.worktree for (project, avail) in pending]
|
||||
try:
|
||||
hook.Run(opt.allow_all_hooks, project_list=pending_proj_names)
|
||||
hook.Run(opt.allow_all_hooks, project_list=pending_proj_names,
|
||||
worktree_list=pending_worktrees)
|
||||
except HookError as e:
|
||||
print("ERROR: %s" % str(e), file=sys.stderr)
|
||||
return
|
||||
|
30
wrapper.py
Normal file
30
wrapper.py
Normal file
@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright (C) 2014 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import imp
|
||||
import os
|
||||
|
||||
|
||||
def WrapperPath():
|
||||
return os.path.join(os.path.dirname(__file__), 'repo')
|
||||
|
||||
_wrapper_module = None
|
||||
def Wrapper():
|
||||
global _wrapper_module
|
||||
if not _wrapper_module:
|
||||
_wrapper_module = imp.load_source('wrapper', WrapperPath())
|
||||
return _wrapper_module
|
Reference in New Issue
Block a user