mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
26 Commits
Author | SHA1 | Date | |
---|---|---|---|
37282b4b9c | |||
835cd6888f | |||
8ced8641c8 | |||
2536f80625 | |||
0ce6ca9c7b | |||
0fc3a39829 | |||
c7c57e34db | |||
0d2b61f11d | |||
2bf9db0d3b | |||
f00e0ce556 | |||
1b5a4a0c5d | |||
de8b2c4276 | |||
727ee98a40 | |||
df14a70c45 | |||
f18cb76173 | |||
d3fd537ea5 | |||
0048b69c03 | |||
2b8db3ce3e | |||
5df6de075e | |||
a0de6e8eab | |||
16614f86b3 | |||
88443387b1 | |||
99482ae58a | |||
ec1df9b7f6 | |||
06d029c1c8 | |||
b715b14807 |
@ -74,7 +74,7 @@ class Command(object):
|
|||||||
project = all.get(arg)
|
project = all.get(arg)
|
||||||
|
|
||||||
if not project:
|
if not project:
|
||||||
path = os.path.abspath(arg)
|
path = os.path.abspath(arg).replace('\\', '/')
|
||||||
|
|
||||||
if not by_path:
|
if not by_path:
|
||||||
by_path = dict()
|
by_path = dict()
|
||||||
@ -82,13 +82,15 @@ class Command(object):
|
|||||||
by_path[p.worktree] = p
|
by_path[p.worktree] = p
|
||||||
|
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
|
oldpath = None
|
||||||
while path \
|
while path \
|
||||||
and path != '/' \
|
and path != oldpath \
|
||||||
and path != self.manifest.topdir:
|
and path != self.manifest.topdir:
|
||||||
try:
|
try:
|
||||||
project = by_path[path]
|
project = by_path[path]
|
||||||
break
|
break
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
oldpath = path
|
||||||
path = os.path.dirname(path)
|
path = os.path.dirname(path)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
|
@ -20,11 +20,15 @@ A manifest XML file (e.g. 'default.xml') roughly conforms to the
|
|||||||
following DTD:
|
following DTD:
|
||||||
|
|
||||||
<!DOCTYPE manifest [
|
<!DOCTYPE manifest [
|
||||||
<!ELEMENT manifest (remote*,
|
<!ELEMENT manifest (notice?,
|
||||||
|
remote*,
|
||||||
default?,
|
default?,
|
||||||
manifest-server?,
|
manifest-server?,
|
||||||
remove-project*,
|
remove-project*,
|
||||||
project*)>
|
project*,
|
||||||
|
repo-hooks?)>
|
||||||
|
|
||||||
|
<!ELEMENT notice (#PCDATA)>
|
||||||
|
|
||||||
<!ELEMENT remote (EMPTY)>
|
<!ELEMENT remote (EMPTY)>
|
||||||
<!ATTLIST remote name ID #REQUIRED>
|
<!ATTLIST remote name ID #REQUIRED>
|
||||||
@ -46,6 +50,10 @@ following DTD:
|
|||||||
|
|
||||||
<!ELEMENT remove-project (EMPTY)>
|
<!ELEMENT remove-project (EMPTY)>
|
||||||
<!ATTLIST remove-project name CDATA #REQUIRED>
|
<!ATTLIST remove-project name CDATA #REQUIRED>
|
||||||
|
|
||||||
|
<!ELEMENT repo-hooks (EMPTY)>
|
||||||
|
<!ATTLIST repo-hooks in-project CDATA #REQUIRED>
|
||||||
|
<!ATTLIST repo-hooks enabled-list CDATA #REQUIRED>
|
||||||
]>
|
]>
|
||||||
|
|
||||||
A description of the elements and their attributes follows.
|
A description of the elements and their attributes follows.
|
||||||
|
@ -82,7 +82,7 @@ least one of these before using this command."""
|
|||||||
fd = None
|
fd = None
|
||||||
|
|
||||||
if re.compile("^.*[$ \t'].*$").match(editor):
|
if re.compile("^.*[$ \t'].*$").match(editor):
|
||||||
args = [editor + ' "$@"']
|
args = [editor + ' "$@"', 'sh']
|
||||||
shell = True
|
shell = True
|
||||||
else:
|
else:
|
||||||
args = [editor]
|
args = [editor]
|
||||||
|
7
error.py
7
error.py
@ -75,3 +75,10 @@ class RepoChangedException(Exception):
|
|||||||
"""
|
"""
|
||||||
def __init__(self, extra_args=[]):
|
def __init__(self, extra_args=[]):
|
||||||
self.extra_args = extra_args
|
self.extra_args = extra_args
|
||||||
|
|
||||||
|
class HookError(Exception):
|
||||||
|
"""Thrown if a 'repo-hook' could not be run.
|
||||||
|
|
||||||
|
The common case is that the file wasn't present when we tried to run it.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
@ -112,6 +112,9 @@ def git_require(min_version, fail=False):
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def _setenv(env, name, value):
|
||||||
|
env[name] = value.encode()
|
||||||
|
|
||||||
class GitCommand(object):
|
class GitCommand(object):
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
project,
|
project,
|
||||||
@ -124,7 +127,7 @@ class GitCommand(object):
|
|||||||
ssh_proxy = False,
|
ssh_proxy = False,
|
||||||
cwd = None,
|
cwd = None,
|
||||||
gitdir = None):
|
gitdir = None):
|
||||||
env = dict(os.environ)
|
env = os.environ.copy()
|
||||||
|
|
||||||
for e in [REPO_TRACE,
|
for e in [REPO_TRACE,
|
||||||
GIT_DIR,
|
GIT_DIR,
|
||||||
@ -137,10 +140,10 @@ class GitCommand(object):
|
|||||||
del env[e]
|
del env[e]
|
||||||
|
|
||||||
if disable_editor:
|
if disable_editor:
|
||||||
env['GIT_EDITOR'] = ':'
|
_setenv(env, 'GIT_EDITOR', ':')
|
||||||
if ssh_proxy:
|
if ssh_proxy:
|
||||||
env['REPO_SSH_SOCK'] = ssh_sock()
|
_setenv(env, 'REPO_SSH_SOCK', ssh_sock())
|
||||||
env['GIT_SSH'] = _ssh_proxy()
|
_setenv(env, 'GIT_SSH', _ssh_proxy())
|
||||||
|
|
||||||
if project:
|
if project:
|
||||||
if not cwd:
|
if not cwd:
|
||||||
@ -151,7 +154,7 @@ class GitCommand(object):
|
|||||||
command = [GIT]
|
command = [GIT]
|
||||||
if bare:
|
if bare:
|
||||||
if gitdir:
|
if gitdir:
|
||||||
env[GIT_DIR] = gitdir
|
_setenv(env, GIT_DIR, gitdir)
|
||||||
cwd = None
|
cwd = None
|
||||||
command.extend(cmdv)
|
command.extend(cmdv)
|
||||||
|
|
||||||
|
149
git_config.py
149
git_config.py
@ -18,7 +18,13 @@ import os
|
|||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
try:
|
||||||
|
import threading as _threading
|
||||||
|
except ImportError:
|
||||||
|
import dummy_threading as _threading
|
||||||
import time
|
import time
|
||||||
|
import urllib2
|
||||||
|
|
||||||
from signal import SIGTERM
|
from signal import SIGTERM
|
||||||
from urllib2 import urlopen, HTTPError
|
from urllib2 import urlopen, HTTPError
|
||||||
from error import GitError, UploadError
|
from error import GitError, UploadError
|
||||||
@ -257,9 +263,11 @@ class GitConfig(object):
|
|||||||
finally:
|
finally:
|
||||||
fd.close()
|
fd.close()
|
||||||
except IOError:
|
except IOError:
|
||||||
os.remove(self._pickle)
|
if os.path.exists(self._pickle):
|
||||||
|
os.remove(self._pickle)
|
||||||
except cPickle.PickleError:
|
except cPickle.PickleError:
|
||||||
os.remove(self._pickle)
|
if os.path.exists(self._pickle):
|
||||||
|
os.remove(self._pickle)
|
||||||
|
|
||||||
def _ReadGit(self):
|
def _ReadGit(self):
|
||||||
"""
|
"""
|
||||||
@ -356,60 +364,110 @@ class RefSpec(object):
|
|||||||
return s
|
return s
|
||||||
|
|
||||||
|
|
||||||
_ssh_cache = {}
|
_master_processes = []
|
||||||
|
_master_keys = set()
|
||||||
_ssh_master = True
|
_ssh_master = True
|
||||||
|
_master_keys_lock = None
|
||||||
|
|
||||||
|
def init_ssh():
|
||||||
|
"""Should be called once at the start of repo to init ssh master handling.
|
||||||
|
|
||||||
|
At the moment, all we do is to create our lock.
|
||||||
|
"""
|
||||||
|
global _master_keys_lock
|
||||||
|
assert _master_keys_lock is None, "Should only call init_ssh once"
|
||||||
|
_master_keys_lock = _threading.Lock()
|
||||||
|
|
||||||
def _open_ssh(host, port=None):
|
def _open_ssh(host, port=None):
|
||||||
global _ssh_master
|
global _ssh_master
|
||||||
|
|
||||||
if port is not None:
|
# Acquire the lock. This is needed to prevent opening multiple masters for
|
||||||
key = '%s:%s' % (host, port)
|
# the same host when we're running "repo sync -jN" (for N > 1) _and_ the
|
||||||
else:
|
# manifest <remote fetch="ssh://xyz"> specifies a different host from the
|
||||||
key = host
|
# one that was passed to repo init.
|
||||||
|
_master_keys_lock.acquire()
|
||||||
if key in _ssh_cache:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if not _ssh_master \
|
|
||||||
or 'GIT_SSH' in os.environ \
|
|
||||||
or sys.platform in ('win32', 'cygwin'):
|
|
||||||
# failed earlier, or cygwin ssh can't do this
|
|
||||||
#
|
|
||||||
return False
|
|
||||||
|
|
||||||
command = ['ssh',
|
|
||||||
'-o','ControlPath %s' % ssh_sock(),
|
|
||||||
'-M',
|
|
||||||
'-N',
|
|
||||||
host]
|
|
||||||
|
|
||||||
if port is not None:
|
|
||||||
command[3:3] = ['-p',str(port)]
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
Trace(': %s', ' '.join(command))
|
|
||||||
p = subprocess.Popen(command)
|
|
||||||
except Exception, e:
|
|
||||||
_ssh_master = False
|
|
||||||
print >>sys.stderr, \
|
|
||||||
'\nwarn: cannot enable ssh control master for %s:%s\n%s' \
|
|
||||||
% (host,port, str(e))
|
|
||||||
return False
|
|
||||||
|
|
||||||
_ssh_cache[key] = p
|
# Check to see whether we already think that the master is running; if we
|
||||||
time.sleep(1)
|
# think it's already running, return right away.
|
||||||
return True
|
if port is not None:
|
||||||
|
key = '%s:%s' % (host, port)
|
||||||
|
else:
|
||||||
|
key = host
|
||||||
|
|
||||||
|
if key in _master_keys:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if not _ssh_master \
|
||||||
|
or 'GIT_SSH' in os.environ \
|
||||||
|
or sys.platform in ('win32', 'cygwin'):
|
||||||
|
# failed earlier, or cygwin ssh can't do this
|
||||||
|
#
|
||||||
|
return False
|
||||||
|
|
||||||
|
# We will make two calls to ssh; this is the common part of both calls.
|
||||||
|
command_base = ['ssh',
|
||||||
|
'-o','ControlPath %s' % ssh_sock(),
|
||||||
|
host]
|
||||||
|
if port is not None:
|
||||||
|
command_base[1:1] = ['-p',str(port)]
|
||||||
|
|
||||||
|
# Since the key wasn't in _master_keys, we think that master isn't running.
|
||||||
|
# ...but before actually starting a master, we'll double-check. This can
|
||||||
|
# be important because we can't tell that that 'git@myhost.com' is the same
|
||||||
|
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
|
||||||
|
check_command = command_base + ['-O','check']
|
||||||
|
try:
|
||||||
|
Trace(': %s', ' '.join(check_command))
|
||||||
|
check_process = subprocess.Popen(check_command,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE)
|
||||||
|
check_process.communicate() # read output, but ignore it...
|
||||||
|
isnt_running = check_process.wait()
|
||||||
|
|
||||||
|
if not isnt_running:
|
||||||
|
# Our double-check found that the master _was_ infact running. Add to
|
||||||
|
# the list of keys.
|
||||||
|
_master_keys.add(key)
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
# Ignore excpetions. We we will fall back to the normal command and print
|
||||||
|
# to the log there.
|
||||||
|
pass
|
||||||
|
|
||||||
|
command = command_base[:1] + \
|
||||||
|
['-M', '-N'] + \
|
||||||
|
command_base[1:]
|
||||||
|
try:
|
||||||
|
Trace(': %s', ' '.join(command))
|
||||||
|
p = subprocess.Popen(command)
|
||||||
|
except Exception, e:
|
||||||
|
_ssh_master = False
|
||||||
|
print >>sys.stderr, \
|
||||||
|
'\nwarn: cannot enable ssh control master for %s:%s\n%s' \
|
||||||
|
% (host,port, str(e))
|
||||||
|
return False
|
||||||
|
|
||||||
|
_master_processes.append(p)
|
||||||
|
_master_keys.add(key)
|
||||||
|
time.sleep(1)
|
||||||
|
return True
|
||||||
|
finally:
|
||||||
|
_master_keys_lock.release()
|
||||||
|
|
||||||
def close_ssh():
|
def close_ssh():
|
||||||
|
global _master_keys_lock
|
||||||
|
|
||||||
terminate_ssh_clients()
|
terminate_ssh_clients()
|
||||||
|
|
||||||
for key,p in _ssh_cache.iteritems():
|
for p in _master_processes:
|
||||||
try:
|
try:
|
||||||
os.kill(p.pid, SIGTERM)
|
os.kill(p.pid, SIGTERM)
|
||||||
p.wait()
|
p.wait()
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
_ssh_cache.clear()
|
del _master_processes[:]
|
||||||
|
_master_keys.clear()
|
||||||
|
|
||||||
d = ssh_sock(create=False)
|
d = ssh_sock(create=False)
|
||||||
if d:
|
if d:
|
||||||
@ -418,6 +476,9 @@ def close_ssh():
|
|||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# We're done with the lock, so we can delete it.
|
||||||
|
_master_keys_lock = None
|
||||||
|
|
||||||
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
|
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
|
||||||
URI_ALL = re.compile(r'^([a-z][a-z+]*)://([^@/]*@?[^/]*)/')
|
URI_ALL = re.compile(r'^([a-z][a-z+]*)://([^@/]*@?[^/]*)/')
|
||||||
|
|
||||||
@ -504,23 +565,25 @@ class Remote(object):
|
|||||||
try:
|
try:
|
||||||
info = urlopen(u).read()
|
info = urlopen(u).read()
|
||||||
if info == 'NOT_AVAILABLE':
|
if info == 'NOT_AVAILABLE':
|
||||||
raise UploadError('Upload over ssh unavailable')
|
raise UploadError('%s: SSH disabled' % self.review)
|
||||||
if '<' in info:
|
if '<' in info:
|
||||||
# Assume the server gave us some sort of HTML
|
# Assume the server gave us some sort of HTML
|
||||||
# response back, like maybe a login page.
|
# response back, like maybe a login page.
|
||||||
#
|
#
|
||||||
raise UploadError('Cannot read %s:\n%s' % (u, info))
|
raise UploadError('%s: Cannot parse response' % u)
|
||||||
|
|
||||||
self._review_protocol = 'ssh'
|
self._review_protocol = 'ssh'
|
||||||
self._review_host = info.split(" ")[0]
|
self._review_host = info.split(" ")[0]
|
||||||
self._review_port = info.split(" ")[1]
|
self._review_port = info.split(" ")[1]
|
||||||
|
except urllib2.URLError, e:
|
||||||
|
raise UploadError('%s: %s' % (self.review, e.reason[1]))
|
||||||
except HTTPError, e:
|
except HTTPError, e:
|
||||||
if e.code == 404:
|
if e.code == 404:
|
||||||
self._review_protocol = 'http-post'
|
self._review_protocol = 'http-post'
|
||||||
self._review_host = None
|
self._review_host = None
|
||||||
self._review_port = None
|
self._review_port = None
|
||||||
else:
|
else:
|
||||||
raise UploadError('Cannot guess Gerrit version')
|
raise UploadError('Upload over ssh unavailable')
|
||||||
|
|
||||||
REVIEW_CACHE[u] = (
|
REVIEW_CACHE[u] = (
|
||||||
self._review_protocol,
|
self._review_protocol,
|
||||||
|
2
git_ssh
2
git_ssh
@ -1,2 +1,2 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
exec ssh -o "ControlPath $REPO_SSH_SOCK" "$@"
|
exec ssh -o "ControlMaster no" -o "ControlPath $REPO_SSH_SOCK" "$@"
|
||||||
|
5
main.py
5
main.py
@ -28,7 +28,7 @@ import re
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
from trace import SetTrace
|
from trace import SetTrace
|
||||||
from git_config import close_ssh
|
from git_config import init_ssh, close_ssh
|
||||||
from command import InteractiveCommand
|
from command import InteractiveCommand
|
||||||
from command import MirrorSafeCommand
|
from command import MirrorSafeCommand
|
||||||
from command import PagedCommand
|
from command import PagedCommand
|
||||||
@ -61,6 +61,8 @@ class _Repo(object):
|
|||||||
def __init__(self, repodir):
|
def __init__(self, repodir):
|
||||||
self.repodir = repodir
|
self.repodir = repodir
|
||||||
self.commands = all_commands
|
self.commands = all_commands
|
||||||
|
# add 'branch' as an alias for 'branches'
|
||||||
|
all_commands['branch'] = all_commands['branches']
|
||||||
|
|
||||||
def _Run(self, argv):
|
def _Run(self, argv):
|
||||||
name = None
|
name = None
|
||||||
@ -214,6 +216,7 @@ def _Main(argv):
|
|||||||
repo = _Repo(opt.repodir)
|
repo = _Repo(opt.repodir)
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
|
init_ssh()
|
||||||
repo._Run(argv)
|
repo._Run(argv)
|
||||||
finally:
|
finally:
|
||||||
close_ssh()
|
close_ssh()
|
||||||
|
148
manifest_xml.py
148
manifest_xml.py
@ -107,6 +107,15 @@ class XmlManifest(object):
|
|||||||
root = doc.createElement('manifest')
|
root = doc.createElement('manifest')
|
||||||
doc.appendChild(root)
|
doc.appendChild(root)
|
||||||
|
|
||||||
|
# Save out the notice. There's a little bit of work here to give it the
|
||||||
|
# right whitespace, which assumes that the notice is automatically indented
|
||||||
|
# by 4 by minidom.
|
||||||
|
if self.notice:
|
||||||
|
notice_element = root.appendChild(doc.createElement('notice'))
|
||||||
|
notice_lines = self.notice.splitlines()
|
||||||
|
indented_notice = ('\n'.join(" "*4 + line for line in notice_lines))[4:]
|
||||||
|
notice_element.appendChild(doc.createTextNode(indented_notice))
|
||||||
|
|
||||||
d = self.default
|
d = self.default
|
||||||
sort_remotes = list(self.remotes.keys())
|
sort_remotes = list(self.remotes.keys())
|
||||||
sort_remotes.sort()
|
sort_remotes.sort()
|
||||||
@ -162,6 +171,14 @@ class XmlManifest(object):
|
|||||||
ce.setAttribute('dest', c.dest)
|
ce.setAttribute('dest', c.dest)
|
||||||
e.appendChild(ce)
|
e.appendChild(ce)
|
||||||
|
|
||||||
|
if self._repo_hooks_project:
|
||||||
|
root.appendChild(doc.createTextNode(''))
|
||||||
|
e = doc.createElement('repo-hooks')
|
||||||
|
e.setAttribute('in-project', self._repo_hooks_project.name)
|
||||||
|
e.setAttribute('enabled-list',
|
||||||
|
' '.join(self._repo_hooks_project.enabled_repo_hooks))
|
||||||
|
root.appendChild(e)
|
||||||
|
|
||||||
doc.writexml(fd, '', ' ', '\n', 'UTF-8')
|
doc.writexml(fd, '', ' ', '\n', 'UTF-8')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -179,6 +196,16 @@ class XmlManifest(object):
|
|||||||
self._Load()
|
self._Load()
|
||||||
return self._default
|
return self._default
|
||||||
|
|
||||||
|
@property
|
||||||
|
def repo_hooks_project(self):
|
||||||
|
self._Load()
|
||||||
|
return self._repo_hooks_project
|
||||||
|
|
||||||
|
@property
|
||||||
|
def notice(self):
|
||||||
|
self._Load()
|
||||||
|
return self._notice
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def manifest_server(self):
|
def manifest_server(self):
|
||||||
self._Load()
|
self._Load()
|
||||||
@ -193,6 +220,8 @@ class XmlManifest(object):
|
|||||||
self._projects = {}
|
self._projects = {}
|
||||||
self._remotes = {}
|
self._remotes = {}
|
||||||
self._default = None
|
self._default = None
|
||||||
|
self._repo_hooks_project = None
|
||||||
|
self._notice = None
|
||||||
self.branch = None
|
self.branch = None
|
||||||
self._manifest_server = None
|
self._manifest_server = None
|
||||||
|
|
||||||
@ -224,15 +253,15 @@ class XmlManifest(object):
|
|||||||
def _ParseManifest(self, is_root_file):
|
def _ParseManifest(self, is_root_file):
|
||||||
root = xml.dom.minidom.parse(self.manifestFile)
|
root = xml.dom.minidom.parse(self.manifestFile)
|
||||||
if not root or not root.childNodes:
|
if not root or not root.childNodes:
|
||||||
raise ManifestParseError, \
|
raise ManifestParseError(
|
||||||
"no root node in %s" % \
|
"no root node in %s" %
|
||||||
self.manifestFile
|
self.manifestFile)
|
||||||
|
|
||||||
config = root.childNodes[0]
|
config = root.childNodes[0]
|
||||||
if config.nodeName != 'manifest':
|
if config.nodeName != 'manifest':
|
||||||
raise ManifestParseError, \
|
raise ManifestParseError(
|
||||||
"no <manifest> in %s" % \
|
"no <manifest> in %s" %
|
||||||
self.manifestFile
|
self.manifestFile)
|
||||||
|
|
||||||
for node in config.childNodes:
|
for node in config.childNodes:
|
||||||
if node.nodeName == 'remove-project':
|
if node.nodeName == 'remove-project':
|
||||||
@ -240,47 +269,83 @@ class XmlManifest(object):
|
|||||||
try:
|
try:
|
||||||
del self._projects[name]
|
del self._projects[name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise ManifestParseError, \
|
raise ManifestParseError(
|
||||||
'project %s not found' % \
|
'project %s not found' %
|
||||||
(name)
|
(name))
|
||||||
|
|
||||||
|
# If the manifest removes the hooks project, treat it as if it deleted
|
||||||
|
# the repo-hooks element too.
|
||||||
|
if self._repo_hooks_project and (self._repo_hooks_project.name == name):
|
||||||
|
self._repo_hooks_project = None
|
||||||
|
|
||||||
for node in config.childNodes:
|
for node in config.childNodes:
|
||||||
if node.nodeName == 'remote':
|
if node.nodeName == 'remote':
|
||||||
remote = self._ParseRemote(node)
|
remote = self._ParseRemote(node)
|
||||||
if self._remotes.get(remote.name):
|
if self._remotes.get(remote.name):
|
||||||
raise ManifestParseError, \
|
raise ManifestParseError(
|
||||||
'duplicate remote %s in %s' % \
|
'duplicate remote %s in %s' %
|
||||||
(remote.name, self.manifestFile)
|
(remote.name, self.manifestFile))
|
||||||
self._remotes[remote.name] = remote
|
self._remotes[remote.name] = remote
|
||||||
|
|
||||||
for node in config.childNodes:
|
for node in config.childNodes:
|
||||||
if node.nodeName == 'default':
|
if node.nodeName == 'default':
|
||||||
if self._default is not None:
|
if self._default is not None:
|
||||||
raise ManifestParseError, \
|
raise ManifestParseError(
|
||||||
'duplicate default in %s' % \
|
'duplicate default in %s' %
|
||||||
(self.manifestFile)
|
(self.manifestFile))
|
||||||
self._default = self._ParseDefault(node)
|
self._default = self._ParseDefault(node)
|
||||||
if self._default is None:
|
if self._default is None:
|
||||||
self._default = _Default()
|
self._default = _Default()
|
||||||
|
|
||||||
|
for node in config.childNodes:
|
||||||
|
if node.nodeName == 'notice':
|
||||||
|
if self._notice is not None:
|
||||||
|
raise ManifestParseError(
|
||||||
|
'duplicate notice in %s' %
|
||||||
|
(self.manifestFile))
|
||||||
|
self._notice = self._ParseNotice(node)
|
||||||
|
|
||||||
for node in config.childNodes:
|
for node in config.childNodes:
|
||||||
if node.nodeName == 'manifest-server':
|
if node.nodeName == 'manifest-server':
|
||||||
url = self._reqatt(node, 'url')
|
url = self._reqatt(node, 'url')
|
||||||
if self._manifest_server is not None:
|
if self._manifest_server is not None:
|
||||||
raise ManifestParseError, \
|
raise ManifestParseError(
|
||||||
'duplicate manifest-server in %s' % \
|
'duplicate manifest-server in %s' %
|
||||||
(self.manifestFile)
|
(self.manifestFile))
|
||||||
self._manifest_server = url
|
self._manifest_server = url
|
||||||
|
|
||||||
for node in config.childNodes:
|
for node in config.childNodes:
|
||||||
if node.nodeName == 'project':
|
if node.nodeName == 'project':
|
||||||
project = self._ParseProject(node)
|
project = self._ParseProject(node)
|
||||||
if self._projects.get(project.name):
|
if self._projects.get(project.name):
|
||||||
raise ManifestParseError, \
|
raise ManifestParseError(
|
||||||
'duplicate project %s in %s' % \
|
'duplicate project %s in %s' %
|
||||||
(project.name, self.manifestFile)
|
(project.name, self.manifestFile))
|
||||||
self._projects[project.name] = project
|
self._projects[project.name] = project
|
||||||
|
|
||||||
|
for node in config.childNodes:
|
||||||
|
if node.nodeName == 'repo-hooks':
|
||||||
|
# Get the name of the project and the (space-separated) list of enabled.
|
||||||
|
repo_hooks_project = self._reqatt(node, 'in-project')
|
||||||
|
enabled_repo_hooks = self._reqatt(node, 'enabled-list').split()
|
||||||
|
|
||||||
|
# Only one project can be the hooks project
|
||||||
|
if self._repo_hooks_project is not None:
|
||||||
|
raise ManifestParseError(
|
||||||
|
'duplicate repo-hooks in %s' %
|
||||||
|
(self.manifestFile))
|
||||||
|
|
||||||
|
# Store a reference to the Project.
|
||||||
|
try:
|
||||||
|
self._repo_hooks_project = self._projects[repo_hooks_project]
|
||||||
|
except KeyError:
|
||||||
|
raise ManifestParseError(
|
||||||
|
'project %s not found for repo-hooks' %
|
||||||
|
(repo_hooks_project))
|
||||||
|
|
||||||
|
# Store the enabled hooks in the Project object.
|
||||||
|
self._repo_hooks_project.enabled_repo_hooks = enabled_repo_hooks
|
||||||
|
|
||||||
def _AddMetaProjectMirror(self, m):
|
def _AddMetaProjectMirror(self, m):
|
||||||
name = None
|
name = None
|
||||||
m_url = m.GetRemote(m.remote.name).url
|
m_url = m.GetRemote(m.remote.name).url
|
||||||
@ -338,6 +403,45 @@ class XmlManifest(object):
|
|||||||
d.revisionExpr = None
|
d.revisionExpr = None
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
def _ParseNotice(self, node):
|
||||||
|
"""
|
||||||
|
reads a <notice> element from the manifest file
|
||||||
|
|
||||||
|
The <notice> element is distinct from other tags in the XML in that the
|
||||||
|
data is conveyed between the start and end tag (it's not an empty-element
|
||||||
|
tag).
|
||||||
|
|
||||||
|
The white space (carriage returns, indentation) for the notice element is
|
||||||
|
relevant and is parsed in a way that is based on how python docstrings work.
|
||||||
|
In fact, the code is remarkably similar to here:
|
||||||
|
http://www.python.org/dev/peps/pep-0257/
|
||||||
|
"""
|
||||||
|
# Get the data out of the node...
|
||||||
|
notice = node.childNodes[0].data
|
||||||
|
|
||||||
|
# Figure out minimum indentation, skipping the first line (the same line
|
||||||
|
# as the <notice> tag)...
|
||||||
|
minIndent = sys.maxint
|
||||||
|
lines = notice.splitlines()
|
||||||
|
for line in lines[1:]:
|
||||||
|
lstrippedLine = line.lstrip()
|
||||||
|
if lstrippedLine:
|
||||||
|
indent = len(line) - len(lstrippedLine)
|
||||||
|
minIndent = min(indent, minIndent)
|
||||||
|
|
||||||
|
# Strip leading / trailing blank lines and also indentation.
|
||||||
|
cleanLines = [lines[0].strip()]
|
||||||
|
for line in lines[1:]:
|
||||||
|
cleanLines.append(line[minIndent:].rstrip())
|
||||||
|
|
||||||
|
# Clear completely blank lines from front and back...
|
||||||
|
while cleanLines and not cleanLines[0]:
|
||||||
|
del cleanLines[0]
|
||||||
|
while cleanLines and not cleanLines[-1]:
|
||||||
|
del cleanLines[-1]
|
||||||
|
|
||||||
|
return '\n'.join(cleanLines)
|
||||||
|
|
||||||
def _ParseProject(self, node):
|
def _ParseProject(self, node):
|
||||||
"""
|
"""
|
||||||
reads a <project> element from the manifest file
|
reads a <project> element from the manifest file
|
||||||
@ -373,7 +477,7 @@ class XmlManifest(object):
|
|||||||
worktree = None
|
worktree = None
|
||||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||||
else:
|
else:
|
||||||
worktree = os.path.join(self.topdir, path)
|
worktree = os.path.join(self.topdir, path).replace('\\', '/')
|
||||||
gitdir = os.path.join(self.repodir, 'projects/%s.git' % path)
|
gitdir = os.path.join(self.repodir, 'projects/%s.git' % path)
|
||||||
|
|
||||||
project = Project(manifest = self,
|
project = Project(manifest = self,
|
||||||
|
424
project.py
424
project.py
@ -12,6 +12,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import traceback
|
||||||
import errno
|
import errno
|
||||||
import filecmp
|
import filecmp
|
||||||
import os
|
import os
|
||||||
@ -24,7 +25,7 @@ import urllib2
|
|||||||
from color import Coloring
|
from color import Coloring
|
||||||
from git_command import GitCommand
|
from git_command import GitCommand
|
||||||
from git_config import GitConfig, IsId
|
from git_config import GitConfig, IsId
|
||||||
from error import GitError, ImportError, UploadError
|
from error import GitError, HookError, ImportError, UploadError
|
||||||
from error import ManifestInvalidRevisionError
|
from error import ManifestInvalidRevisionError
|
||||||
|
|
||||||
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M
|
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M
|
||||||
@ -54,14 +55,25 @@ def not_rev(r):
|
|||||||
def sq(r):
|
def sq(r):
|
||||||
return "'" + r.replace("'", "'\''") + "'"
|
return "'" + r.replace("'", "'\''") + "'"
|
||||||
|
|
||||||
hook_list = None
|
_project_hook_list = None
|
||||||
def repo_hooks():
|
def _ProjectHooks():
|
||||||
global hook_list
|
"""List the hooks present in the 'hooks' directory.
|
||||||
if hook_list is None:
|
|
||||||
|
These hooks are project hooks and are copied to the '.git/hooks' directory
|
||||||
|
of all subprojects.
|
||||||
|
|
||||||
|
This function caches the list of hooks (based on the contents of the
|
||||||
|
'repo/hooks' directory) on the first call.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A list of absolute paths to all of the files in the hooks directory.
|
||||||
|
"""
|
||||||
|
global _project_hook_list
|
||||||
|
if _project_hook_list is None:
|
||||||
d = os.path.abspath(os.path.dirname(__file__))
|
d = os.path.abspath(os.path.dirname(__file__))
|
||||||
d = os.path.join(d , 'hooks')
|
d = os.path.join(d , 'hooks')
|
||||||
hook_list = map(lambda x: os.path.join(d, x), os.listdir(d))
|
_project_hook_list = map(lambda x: os.path.join(d, x), os.listdir(d))
|
||||||
return hook_list
|
return _project_hook_list
|
||||||
|
|
||||||
def relpath(dst, src):
|
def relpath(dst, src):
|
||||||
src = os.path.dirname(src)
|
src = os.path.dirname(src)
|
||||||
@ -111,7 +123,6 @@ class ReviewableBranch(object):
|
|||||||
self.project = project
|
self.project = project
|
||||||
self.branch = branch
|
self.branch = branch
|
||||||
self.base = base
|
self.base = base
|
||||||
self.replace_changes = None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
@ -151,7 +162,6 @@ class ReviewableBranch(object):
|
|||||||
|
|
||||||
def UploadForReview(self, people, auto_topic=False):
|
def UploadForReview(self, people, auto_topic=False):
|
||||||
self.project.UploadForReview(self.name,
|
self.project.UploadForReview(self.name,
|
||||||
self.replace_changes,
|
|
||||||
people,
|
people,
|
||||||
auto_topic=auto_topic)
|
auto_topic=auto_topic)
|
||||||
|
|
||||||
@ -225,6 +235,249 @@ class RemoteSpec(object):
|
|||||||
self.url = url
|
self.url = url
|
||||||
self.review = review
|
self.review = review
|
||||||
|
|
||||||
|
class RepoHook(object):
|
||||||
|
"""A RepoHook contains information about a script to run as a hook.
|
||||||
|
|
||||||
|
Hooks are used to run a python script before running an upload (for instance,
|
||||||
|
to run presubmit checks). Eventually, we may have hooks for other actions.
|
||||||
|
|
||||||
|
This shouldn't be confused with files in the 'repo/hooks' directory. Those
|
||||||
|
files are copied into each '.git/hooks' folder for each project. Repo-level
|
||||||
|
hooks are associated instead with repo actions.
|
||||||
|
|
||||||
|
Hooks are always python. When a hook is run, we will load the hook into the
|
||||||
|
interpreter and execute its main() function.
|
||||||
|
"""
|
||||||
|
def __init__(self,
|
||||||
|
hook_type,
|
||||||
|
hooks_project,
|
||||||
|
topdir,
|
||||||
|
abort_if_user_denies=False):
|
||||||
|
"""RepoHook constructor.
|
||||||
|
|
||||||
|
Params:
|
||||||
|
hook_type: A string representing the type of hook. This is also used
|
||||||
|
to figure out the name of the file containing the hook. For
|
||||||
|
example: 'pre-upload'.
|
||||||
|
hooks_project: The project containing the repo hooks. If you have a
|
||||||
|
manifest, this is manifest.repo_hooks_project. OK if this is None,
|
||||||
|
which will make the hook a no-op.
|
||||||
|
topdir: Repo's top directory (the one containing the .repo directory).
|
||||||
|
Scripts will run with CWD as this directory. If you have a manifest,
|
||||||
|
this is manifest.topdir
|
||||||
|
abort_if_user_denies: If True, we'll throw a HookError() if the user
|
||||||
|
doesn't allow us to run the hook.
|
||||||
|
"""
|
||||||
|
self._hook_type = hook_type
|
||||||
|
self._hooks_project = hooks_project
|
||||||
|
self._topdir = topdir
|
||||||
|
self._abort_if_user_denies = abort_if_user_denies
|
||||||
|
|
||||||
|
# Store the full path to the script for convenience.
|
||||||
|
if self._hooks_project:
|
||||||
|
self._script_fullpath = os.path.join(self._hooks_project.worktree,
|
||||||
|
self._hook_type + '.py')
|
||||||
|
else:
|
||||||
|
self._script_fullpath = None
|
||||||
|
|
||||||
|
def _GetHash(self):
|
||||||
|
"""Return a hash of the contents of the hooks directory.
|
||||||
|
|
||||||
|
We'll just use git to do this. This hash has the property that if anything
|
||||||
|
changes in the directory we will return a different has.
|
||||||
|
|
||||||
|
SECURITY CONSIDERATION:
|
||||||
|
This hash only represents the contents of files in the hook directory, not
|
||||||
|
any other files imported or called by hooks. Changes to imported files
|
||||||
|
can change the script behavior without affecting the hash.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A string representing the hash. This will always be ASCII so that it can
|
||||||
|
be printed to the user easily.
|
||||||
|
"""
|
||||||
|
assert self._hooks_project, "Must have hooks to calculate their hash."
|
||||||
|
|
||||||
|
# We will use the work_git object rather than just calling GetRevisionId().
|
||||||
|
# That gives us a hash of the latest checked in version of the files that
|
||||||
|
# the user will actually be executing. Specifically, GetRevisionId()
|
||||||
|
# doesn't appear to change even if a user checks out a different version
|
||||||
|
# of the hooks repo (via git checkout) nor if a user commits their own revs.
|
||||||
|
#
|
||||||
|
# NOTE: Local (non-committed) changes will not be factored into this hash.
|
||||||
|
# I think this is OK, since we're really only worried about warning the user
|
||||||
|
# about upstream changes.
|
||||||
|
return self._hooks_project.work_git.rev_parse('HEAD')
|
||||||
|
|
||||||
|
def _GetMustVerb(self):
|
||||||
|
"""Return 'must' if the hook is required; 'should' if not."""
|
||||||
|
if self._abort_if_user_denies:
|
||||||
|
return 'must'
|
||||||
|
else:
|
||||||
|
return 'should'
|
||||||
|
|
||||||
|
def _CheckForHookApproval(self):
|
||||||
|
"""Check to see whether this hook has been approved.
|
||||||
|
|
||||||
|
We'll look at the hash of all of the hooks. If this matches the hash that
|
||||||
|
the user last approved, we're done. If it doesn't, we'll ask the user
|
||||||
|
about approval.
|
||||||
|
|
||||||
|
Note that we ask permission for each individual hook even though we use
|
||||||
|
the hash of all hooks when detecting changes. We'd like the user to be
|
||||||
|
able to approve / deny each hook individually. We only use the hash of all
|
||||||
|
hooks because there is no other easy way to detect changes to local imports.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if this hook is approved to run; False otherwise.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
HookError: Raised if the user doesn't approve and abort_if_user_denies
|
||||||
|
was passed to the consturctor.
|
||||||
|
"""
|
||||||
|
hooks_dir = self._hooks_project.worktree
|
||||||
|
hooks_config = self._hooks_project.config
|
||||||
|
git_approval_key = 'repo.hooks.%s.approvedhash' % self._hook_type
|
||||||
|
|
||||||
|
# Get the last hash that the user approved for this hook; may be None.
|
||||||
|
old_hash = hooks_config.GetString(git_approval_key)
|
||||||
|
|
||||||
|
# Get the current hash so we can tell if scripts changed since approval.
|
||||||
|
new_hash = self._GetHash()
|
||||||
|
|
||||||
|
if old_hash is not None:
|
||||||
|
# User previously approved hook and asked not to be prompted again.
|
||||||
|
if new_hash == old_hash:
|
||||||
|
# Approval matched. We're done.
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
# Give the user a reason why we're prompting, since they last told
|
||||||
|
# us to "never ask again".
|
||||||
|
prompt = 'WARNING: Scripts have changed since %s was allowed.\n\n' % (
|
||||||
|
self._hook_type)
|
||||||
|
else:
|
||||||
|
prompt = ''
|
||||||
|
|
||||||
|
# Prompt the user if we're not on a tty; on a tty we'll assume "no".
|
||||||
|
if sys.stdout.isatty():
|
||||||
|
prompt += ('Repo %s run the script:\n'
|
||||||
|
' %s\n'
|
||||||
|
'\n'
|
||||||
|
'Do you want to allow this script to run '
|
||||||
|
'(yes/yes-never-ask-again/NO)? ') % (
|
||||||
|
self._GetMustVerb(), self._script_fullpath)
|
||||||
|
response = raw_input(prompt).lower()
|
||||||
|
print
|
||||||
|
|
||||||
|
# User is doing a one-time approval.
|
||||||
|
if response in ('y', 'yes'):
|
||||||
|
return True
|
||||||
|
elif response == 'yes-never-ask-again':
|
||||||
|
hooks_config.SetString(git_approval_key, new_hash)
|
||||||
|
return True
|
||||||
|
|
||||||
|
# For anything else, we'll assume no approval.
|
||||||
|
if self._abort_if_user_denies:
|
||||||
|
raise HookError('You must allow the %s hook or use --no-verify.' %
|
||||||
|
self._hook_type)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _ExecuteHook(self, **kwargs):
|
||||||
|
"""Actually execute the given hook.
|
||||||
|
|
||||||
|
This will run the hook's 'main' function in our python interpreter.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
kwargs: Keyword arguments to pass to the hook. These are often specific
|
||||||
|
to the hook type. For instance, pre-upload hooks will contain
|
||||||
|
a project_list.
|
||||||
|
"""
|
||||||
|
# Keep sys.path and CWD stashed away so that we can always restore them
|
||||||
|
# upon function exit.
|
||||||
|
orig_path = os.getcwd()
|
||||||
|
orig_syspath = sys.path
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Always run hooks with CWD as topdir.
|
||||||
|
os.chdir(self._topdir)
|
||||||
|
|
||||||
|
# Put the hook dir as the first item of sys.path so hooks can do
|
||||||
|
# relative imports. We want to replace the repo dir as [0] so
|
||||||
|
# hooks can't import repo files.
|
||||||
|
sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:]
|
||||||
|
|
||||||
|
# Exec, storing global context in the context dict. We catch exceptions
|
||||||
|
# and convert to a HookError w/ just the failing traceback.
|
||||||
|
context = {}
|
||||||
|
try:
|
||||||
|
execfile(self._script_fullpath, context)
|
||||||
|
except Exception:
|
||||||
|
raise HookError('%s\nFailed to import %s hook; see traceback above.' % (
|
||||||
|
traceback.format_exc(), self._hook_type))
|
||||||
|
|
||||||
|
# Running the script should have defined a main() function.
|
||||||
|
if 'main' not in context:
|
||||||
|
raise HookError('Missing main() in: "%s"' % self._script_fullpath)
|
||||||
|
|
||||||
|
|
||||||
|
# Add 'hook_should_take_kwargs' to the arguments to be passed to main.
|
||||||
|
# We don't actually want hooks to define their main with this argument--
|
||||||
|
# it's there to remind them that their hook should always take **kwargs.
|
||||||
|
# For instance, a pre-upload hook should be defined like:
|
||||||
|
# def main(project_list, **kwargs):
|
||||||
|
#
|
||||||
|
# This allows us to later expand the API without breaking old hooks.
|
||||||
|
kwargs = kwargs.copy()
|
||||||
|
kwargs['hook_should_take_kwargs'] = True
|
||||||
|
|
||||||
|
# Call the main function in the hook. If the hook should cause the
|
||||||
|
# build to fail, it will raise an Exception. We'll catch that convert
|
||||||
|
# to a HookError w/ just the failing traceback.
|
||||||
|
try:
|
||||||
|
context['main'](**kwargs)
|
||||||
|
except Exception:
|
||||||
|
raise HookError('%s\nFailed to run main() for %s hook; see traceback '
|
||||||
|
'above.' % (
|
||||||
|
traceback.format_exc(), self._hook_type))
|
||||||
|
finally:
|
||||||
|
# Restore sys.path and CWD.
|
||||||
|
sys.path = orig_syspath
|
||||||
|
os.chdir(orig_path)
|
||||||
|
|
||||||
|
def Run(self, user_allows_all_hooks, **kwargs):
|
||||||
|
"""Run the hook.
|
||||||
|
|
||||||
|
If the hook doesn't exist (because there is no hooks project or because
|
||||||
|
this particular hook is not enabled), this is a no-op.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_allows_all_hooks: If True, we will never prompt about running the
|
||||||
|
hook--we'll just assume it's OK to run it.
|
||||||
|
kwargs: Keyword arguments to pass to the hook. These are often specific
|
||||||
|
to the hook type. For instance, pre-upload hooks will contain
|
||||||
|
a project_list.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
HookError: If there was a problem finding the hook or the user declined
|
||||||
|
to run a required hook (from _CheckForHookApproval).
|
||||||
|
"""
|
||||||
|
# No-op if there is no hooks project or if hook is disabled.
|
||||||
|
if ((not self._hooks_project) or
|
||||||
|
(self._hook_type not in self._hooks_project.enabled_repo_hooks)):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Bail with a nice error if we can't find the hook.
|
||||||
|
if not os.path.isfile(self._script_fullpath):
|
||||||
|
raise HookError('Couldn\'t find repo hook: "%s"' % self._script_fullpath)
|
||||||
|
|
||||||
|
# Make sure the user is OK with running the hook.
|
||||||
|
if (not user_allows_all_hooks) and (not self._CheckForHookApproval()):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Run the hook with the same version of python we're using.
|
||||||
|
self._ExecuteHook(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
class Project(object):
|
class Project(object):
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
manifest,
|
manifest,
|
||||||
@ -238,8 +491,11 @@ class Project(object):
|
|||||||
self.manifest = manifest
|
self.manifest = manifest
|
||||||
self.name = name
|
self.name = name
|
||||||
self.remote = remote
|
self.remote = remote
|
||||||
self.gitdir = gitdir
|
self.gitdir = gitdir.replace('\\', '/')
|
||||||
self.worktree = worktree
|
if worktree:
|
||||||
|
self.worktree = worktree.replace('\\', '/')
|
||||||
|
else:
|
||||||
|
self.worktree = None
|
||||||
self.relpath = relpath
|
self.relpath = relpath
|
||||||
self.revisionExpr = revisionExpr
|
self.revisionExpr = revisionExpr
|
||||||
|
|
||||||
@ -263,6 +519,10 @@ class Project(object):
|
|||||||
self.bare_git = self._GitGetByExec(self, bare=True)
|
self.bare_git = self._GitGetByExec(self, bare=True)
|
||||||
self.bare_ref = GitRefs(gitdir)
|
self.bare_ref = GitRefs(gitdir)
|
||||||
|
|
||||||
|
# This will be filled in if a project is later identified to be the
|
||||||
|
# project containing repo hooks.
|
||||||
|
self.enabled_repo_hooks = []
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def Exists(self):
|
def Exists(self):
|
||||||
return os.path.isdir(self.gitdir)
|
return os.path.isdir(self.gitdir)
|
||||||
@ -557,7 +817,6 @@ class Project(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def UploadForReview(self, branch=None,
|
def UploadForReview(self, branch=None,
|
||||||
replace_changes=None,
|
|
||||||
people=([],[]),
|
people=([],[]),
|
||||||
auto_topic=False):
|
auto_topic=False):
|
||||||
"""Uploads the named branch for code review.
|
"""Uploads the named branch for code review.
|
||||||
@ -600,9 +859,6 @@ class Project(object):
|
|||||||
cmd.append(branch.remote.SshReviewUrl(self.UserEmail))
|
cmd.append(branch.remote.SshReviewUrl(self.UserEmail))
|
||||||
cmd.append(ref_spec)
|
cmd.append(ref_spec)
|
||||||
|
|
||||||
if replace_changes:
|
|
||||||
for change_id,commit_id in replace_changes.iteritems():
|
|
||||||
cmd.append('%s:refs/changes/%s/new' % (commit_id, change_id))
|
|
||||||
if GitCommand(self, cmd, bare = True).Wait() != 0:
|
if GitCommand(self, cmd, bare = True).Wait() != 0:
|
||||||
raise UploadError('Upload failed')
|
raise UploadError('Upload failed')
|
||||||
|
|
||||||
@ -618,17 +874,19 @@ class Project(object):
|
|||||||
|
|
||||||
## Sync ##
|
## Sync ##
|
||||||
|
|
||||||
def Sync_NetworkHalf(self):
|
def Sync_NetworkHalf(self, quiet=False):
|
||||||
"""Perform only the network IO portion of the sync process.
|
"""Perform only the network IO portion of the sync process.
|
||||||
Local working directory/branch state is not affected.
|
Local working directory/branch state is not affected.
|
||||||
"""
|
"""
|
||||||
if not self.Exists:
|
is_new = not self.Exists
|
||||||
print >>sys.stderr
|
if is_new:
|
||||||
print >>sys.stderr, 'Initializing project %s ...' % self.name
|
if not quiet:
|
||||||
|
print >>sys.stderr
|
||||||
|
print >>sys.stderr, 'Initializing project %s ...' % self.name
|
||||||
self._InitGitDir()
|
self._InitGitDir()
|
||||||
|
|
||||||
self._InitRemote()
|
self._InitRemote()
|
||||||
if not self._RemoteFetch():
|
if not self._RemoteFetch(initial=is_new, quiet=quiet):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
#Check that the requested ref was found after fetch
|
#Check that the requested ref was found after fetch
|
||||||
@ -641,7 +899,7 @@ class Project(object):
|
|||||||
#
|
#
|
||||||
rev = self.revisionExpr
|
rev = self.revisionExpr
|
||||||
if rev.startswith(R_TAGS):
|
if rev.startswith(R_TAGS):
|
||||||
self._RemoteFetch(None, rev[len(R_TAGS):])
|
self._RemoteFetch(None, rev[len(R_TAGS):], quiet=quiet)
|
||||||
|
|
||||||
if self.worktree:
|
if self.worktree:
|
||||||
self._InitMRef()
|
self._InitMRef()
|
||||||
@ -681,11 +939,11 @@ class Project(object):
|
|||||||
"""Perform only the local IO portion of the sync process.
|
"""Perform only the local IO portion of the sync process.
|
||||||
Network access is not required.
|
Network access is not required.
|
||||||
"""
|
"""
|
||||||
self._InitWorkTree()
|
|
||||||
all = self.bare_ref.all
|
all = self.bare_ref.all
|
||||||
self.CleanPublishedCache(all)
|
self.CleanPublishedCache(all)
|
||||||
|
|
||||||
revid = self.GetRevisionId(all)
|
revid = self.GetRevisionId(all)
|
||||||
|
|
||||||
|
self._InitWorkTree()
|
||||||
head = self.work_git.GetHead()
|
head = self.work_git.GetHead()
|
||||||
if head.startswith(R_HEADS):
|
if head.startswith(R_HEADS):
|
||||||
branch = head[len(R_HEADS):]
|
branch = head[len(R_HEADS):]
|
||||||
@ -1024,7 +1282,9 @@ class Project(object):
|
|||||||
|
|
||||||
## Direct Git Commands ##
|
## Direct Git Commands ##
|
||||||
|
|
||||||
def _RemoteFetch(self, name=None, tag=None):
|
def _RemoteFetch(self, name=None, tag=None,
|
||||||
|
initial=False,
|
||||||
|
quiet=False):
|
||||||
if not name:
|
if not name:
|
||||||
name = self.remote.name
|
name = self.remote.name
|
||||||
|
|
||||||
@ -1032,17 +1292,84 @@ class Project(object):
|
|||||||
if self.GetRemote(name).PreConnectFetch():
|
if self.GetRemote(name).PreConnectFetch():
|
||||||
ssh_proxy = True
|
ssh_proxy = True
|
||||||
|
|
||||||
|
if initial:
|
||||||
|
alt = os.path.join(self.gitdir, 'objects/info/alternates')
|
||||||
|
try:
|
||||||
|
fd = open(alt, 'rb')
|
||||||
|
try:
|
||||||
|
ref_dir = fd.readline()
|
||||||
|
if ref_dir and ref_dir.endswith('\n'):
|
||||||
|
ref_dir = ref_dir[:-1]
|
||||||
|
finally:
|
||||||
|
fd.close()
|
||||||
|
except IOError, e:
|
||||||
|
ref_dir = None
|
||||||
|
|
||||||
|
if ref_dir and 'objects' == os.path.basename(ref_dir):
|
||||||
|
ref_dir = os.path.dirname(ref_dir)
|
||||||
|
packed_refs = os.path.join(self.gitdir, 'packed-refs')
|
||||||
|
remote = self.GetRemote(name)
|
||||||
|
|
||||||
|
all = self.bare_ref.all
|
||||||
|
ids = set(all.values())
|
||||||
|
tmp = set()
|
||||||
|
|
||||||
|
for r, id in GitRefs(ref_dir).all.iteritems():
|
||||||
|
if r not in all:
|
||||||
|
if r.startswith(R_TAGS) or remote.WritesTo(r):
|
||||||
|
all[r] = id
|
||||||
|
ids.add(id)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if id in ids:
|
||||||
|
continue
|
||||||
|
|
||||||
|
r = 'refs/_alt/%s' % id
|
||||||
|
all[r] = id
|
||||||
|
ids.add(id)
|
||||||
|
tmp.add(r)
|
||||||
|
|
||||||
|
ref_names = list(all.keys())
|
||||||
|
ref_names.sort()
|
||||||
|
|
||||||
|
tmp_packed = ''
|
||||||
|
old_packed = ''
|
||||||
|
|
||||||
|
for r in ref_names:
|
||||||
|
line = '%s %s\n' % (all[r], r)
|
||||||
|
tmp_packed += line
|
||||||
|
if r not in tmp:
|
||||||
|
old_packed += line
|
||||||
|
|
||||||
|
_lwrite(packed_refs, tmp_packed)
|
||||||
|
|
||||||
|
else:
|
||||||
|
ref_dir = None
|
||||||
|
|
||||||
cmd = ['fetch']
|
cmd = ['fetch']
|
||||||
|
if quiet:
|
||||||
|
cmd.append('--quiet')
|
||||||
if not self.worktree:
|
if not self.worktree:
|
||||||
cmd.append('--update-head-ok')
|
cmd.append('--update-head-ok')
|
||||||
cmd.append(name)
|
cmd.append(name)
|
||||||
if tag is not None:
|
if tag is not None:
|
||||||
cmd.append('tag')
|
cmd.append('tag')
|
||||||
cmd.append(tag)
|
cmd.append(tag)
|
||||||
return GitCommand(self,
|
|
||||||
cmd,
|
ok = GitCommand(self,
|
||||||
bare = True,
|
cmd,
|
||||||
ssh_proxy = ssh_proxy).Wait() == 0
|
bare = True,
|
||||||
|
ssh_proxy = ssh_proxy).Wait() == 0
|
||||||
|
|
||||||
|
if initial:
|
||||||
|
if ref_dir:
|
||||||
|
if old_packed != '':
|
||||||
|
_lwrite(packed_refs, old_packed)
|
||||||
|
else:
|
||||||
|
os.remove(packed_refs)
|
||||||
|
self.bare_git.pack_refs('--all', '--prune')
|
||||||
|
|
||||||
|
return ok
|
||||||
|
|
||||||
def _Checkout(self, rev, quiet=False):
|
def _Checkout(self, rev, quiet=False):
|
||||||
cmd = ['checkout']
|
cmd = ['checkout']
|
||||||
@ -1080,6 +1407,27 @@ class Project(object):
|
|||||||
os.makedirs(self.gitdir)
|
os.makedirs(self.gitdir)
|
||||||
self.bare_git.init()
|
self.bare_git.init()
|
||||||
|
|
||||||
|
mp = self.manifest.manifestProject
|
||||||
|
ref_dir = mp.config.GetString('repo.reference')
|
||||||
|
|
||||||
|
if ref_dir:
|
||||||
|
mirror_git = os.path.join(ref_dir, self.name + '.git')
|
||||||
|
repo_git = os.path.join(ref_dir, '.repo', 'projects',
|
||||||
|
self.relpath + '.git')
|
||||||
|
|
||||||
|
if os.path.exists(mirror_git):
|
||||||
|
ref_dir = mirror_git
|
||||||
|
|
||||||
|
elif os.path.exists(repo_git):
|
||||||
|
ref_dir = repo_git
|
||||||
|
|
||||||
|
else:
|
||||||
|
ref_dir = None
|
||||||
|
|
||||||
|
if ref_dir:
|
||||||
|
_lwrite(os.path.join(self.gitdir, 'objects/info/alternates'),
|
||||||
|
os.path.join(ref_dir, 'objects') + '\n')
|
||||||
|
|
||||||
if self.manifest.IsMirror:
|
if self.manifest.IsMirror:
|
||||||
self.config.SetString('core.bare', 'true')
|
self.config.SetString('core.bare', 'true')
|
||||||
else:
|
else:
|
||||||
@ -1103,10 +1451,10 @@ class Project(object):
|
|||||||
hooks = self._gitdir_path('hooks')
|
hooks = self._gitdir_path('hooks')
|
||||||
if not os.path.exists(hooks):
|
if not os.path.exists(hooks):
|
||||||
os.makedirs(hooks)
|
os.makedirs(hooks)
|
||||||
for stock_hook in repo_hooks():
|
for stock_hook in _ProjectHooks():
|
||||||
name = os.path.basename(stock_hook)
|
name = os.path.basename(stock_hook)
|
||||||
|
|
||||||
if name in ('commit-msg') and not self.remote.review:
|
if name in ('commit-msg',) and not self.remote.review:
|
||||||
# Don't install a Gerrit Code Review hook if this
|
# Don't install a Gerrit Code Review hook if this
|
||||||
# project does not appear to use it for reviews.
|
# project does not appear to use it for reviews.
|
||||||
#
|
#
|
||||||
@ -1357,6 +1705,22 @@ class Project(object):
|
|||||||
return r
|
return r
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
|
"""Allow arbitrary git commands using pythonic syntax.
|
||||||
|
|
||||||
|
This allows you to do things like:
|
||||||
|
git_obj.rev_parse('HEAD')
|
||||||
|
|
||||||
|
Since we don't have a 'rev_parse' method defined, the __getattr__ will
|
||||||
|
run. We'll replace the '_' with a '-' and try to run a git command.
|
||||||
|
Any other arguments will be passed to the git command.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: The name of the git command to call. Any '_' characters will
|
||||||
|
be replaced with '-'.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A callable object that will try to call git with the named command.
|
||||||
|
"""
|
||||||
name = name.replace('_', '-')
|
name = name.replace('_', '-')
|
||||||
def runner(*args):
|
def runner(*args):
|
||||||
cmdv = [name]
|
cmdv = [name]
|
||||||
|
20
repo
20
repo
@ -28,7 +28,7 @@ if __name__ == '__main__':
|
|||||||
del magic
|
del magic
|
||||||
|
|
||||||
# increment this whenever we make important changes to this script
|
# increment this whenever we make important changes to this script
|
||||||
VERSION = (1, 8)
|
VERSION = (1, 10)
|
||||||
|
|
||||||
# increment this if the MAINTAINER_KEYS block is modified
|
# increment this if the MAINTAINER_KEYS block is modified
|
||||||
KEYRING_VERSION = (1,0)
|
KEYRING_VERSION = (1,0)
|
||||||
@ -118,6 +118,9 @@ group.add_option('-m', '--manifest-name',
|
|||||||
group.add_option('--mirror',
|
group.add_option('--mirror',
|
||||||
dest='mirror', action='store_true',
|
dest='mirror', action='store_true',
|
||||||
help='mirror the forrest')
|
help='mirror the forrest')
|
||||||
|
group.add_option('--reference',
|
||||||
|
dest='reference',
|
||||||
|
help='location of mirror directory', metavar='DIR')
|
||||||
|
|
||||||
# Tool
|
# Tool
|
||||||
group = init_optparse.add_option_group('repo Version options')
|
group = init_optparse.add_option_group('repo Version options')
|
||||||
@ -256,8 +259,8 @@ def _SetupGnuPG(quiet):
|
|||||||
gpg_dir, e.strerror)
|
gpg_dir, e.strerror)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
env = dict(os.environ)
|
env = os.environ.copy()
|
||||||
env['GNUPGHOME'] = gpg_dir
|
env['GNUPGHOME'] = gpg_dir.encode()
|
||||||
|
|
||||||
cmd = ['gpg', '--import']
|
cmd = ['gpg', '--import']
|
||||||
try:
|
try:
|
||||||
@ -375,8 +378,8 @@ def _Verify(cwd, branch, quiet):
|
|||||||
% (branch, cur)
|
% (branch, cur)
|
||||||
print >>sys.stderr
|
print >>sys.stderr
|
||||||
|
|
||||||
env = dict(os.environ)
|
env = os.environ.copy()
|
||||||
env['GNUPGHOME'] = gpg_dir
|
env['GNUPGHOME'] = gpg_dir.encode()
|
||||||
|
|
||||||
cmd = [GIT, 'tag', '-v', cur]
|
cmd = [GIT, 'tag', '-v', cur]
|
||||||
proc = subprocess.Popen(cmd,
|
proc = subprocess.Popen(cmd,
|
||||||
@ -427,10 +430,14 @@ def _FindRepo():
|
|||||||
dir = os.getcwd()
|
dir = os.getcwd()
|
||||||
repo = None
|
repo = None
|
||||||
|
|
||||||
while dir != '/' and not repo:
|
olddir = None
|
||||||
|
while dir != '/' \
|
||||||
|
and dir != olddir \
|
||||||
|
and not repo:
|
||||||
repo = os.path.join(dir, repodir, REPO_MAIN)
|
repo = os.path.join(dir, repodir, REPO_MAIN)
|
||||||
if not os.path.isfile(repo):
|
if not os.path.isfile(repo):
|
||||||
repo = None
|
repo = None
|
||||||
|
olddir = dir
|
||||||
dir = os.path.dirname(dir)
|
dir = os.path.dirname(dir)
|
||||||
return (repo, os.path.join(dir, repodir))
|
return (repo, os.path.join(dir, repodir))
|
||||||
|
|
||||||
@ -476,6 +483,7 @@ def _Help(args):
|
|||||||
if args:
|
if args:
|
||||||
if args[0] == 'init':
|
if args[0] == 'init':
|
||||||
init_optparse.print_help()
|
init_optparse.print_help()
|
||||||
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
print >>sys.stderr,\
|
print >>sys.stderr,\
|
||||||
"error: '%s' is not a bootstrap command.\n"\
|
"error: '%s' is not a bootstrap command.\n"\
|
||||||
|
@ -36,6 +36,9 @@ makes it available in your project's local working directory.
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def _ParseChangeIds(self, args):
|
def _ParseChangeIds(self, args):
|
||||||
|
if not args:
|
||||||
|
self.Usage()
|
||||||
|
|
||||||
to_get = []
|
to_get = []
|
||||||
project = None
|
project = None
|
||||||
|
|
||||||
|
@ -151,11 +151,11 @@ terminal and are not redirected.
|
|||||||
first = True
|
first = True
|
||||||
|
|
||||||
for project in self.GetProjects(args):
|
for project in self.GetProjects(args):
|
||||||
env = dict(os.environ.iteritems())
|
env = os.environ.copy()
|
||||||
def setenv(name, val):
|
def setenv(name, val):
|
||||||
if val is None:
|
if val is None:
|
||||||
val = ''
|
val = ''
|
||||||
env[name] = val
|
env[name] = val.encode()
|
||||||
|
|
||||||
setenv('REPO_PROJECT', project.name)
|
setenv('REPO_PROJECT', project.name)
|
||||||
setenv('REPO_PATH', project.relpath)
|
setenv('REPO_PATH', project.relpath)
|
||||||
@ -169,6 +169,12 @@ terminal and are not redirected.
|
|||||||
else:
|
else:
|
||||||
cwd = project.worktree
|
cwd = project.worktree
|
||||||
|
|
||||||
|
if not os.path.exists(cwd):
|
||||||
|
if (opt.project_header and opt.verbose) \
|
||||||
|
or not opt.project_header:
|
||||||
|
print >>sys.stderr, 'skipping %s/' % project.relpath
|
||||||
|
continue
|
||||||
|
|
||||||
if opt.project_header:
|
if opt.project_header:
|
||||||
stdin = subprocess.PIPE
|
stdin = subprocess.PIPE
|
||||||
stdout = subprocess.PIPE
|
stdout = subprocess.PIPE
|
||||||
|
@ -94,6 +94,8 @@ See 'repo help --all' for a complete list of recognized commands.
|
|||||||
body = getattr(cmd, bodyAttr)
|
body = getattr(cmd, bodyAttr)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
return
|
return
|
||||||
|
if body == '' or body is None:
|
||||||
|
return
|
||||||
|
|
||||||
self.nl()
|
self.nl()
|
||||||
|
|
||||||
|
@ -41,6 +41,13 @@ The optional -m argument can be used to specify an alternate manifest
|
|||||||
to be used. If no manifest is specified, the manifest default.xml
|
to be used. If no manifest is specified, the manifest default.xml
|
||||||
will be used.
|
will be used.
|
||||||
|
|
||||||
|
The --reference option can be used to point to a directory that
|
||||||
|
has the content of a --mirror sync. This will make the working
|
||||||
|
directory use as much data as possible from the local reference
|
||||||
|
directory when fetching from the server. This will make the sync
|
||||||
|
go a lot faster by reducing data traffic on the network.
|
||||||
|
|
||||||
|
|
||||||
Switching Manifest Branches
|
Switching Manifest Branches
|
||||||
---------------------------
|
---------------------------
|
||||||
|
|
||||||
@ -71,7 +78,9 @@ to update the working directory files.
|
|||||||
g.add_option('--mirror',
|
g.add_option('--mirror',
|
||||||
dest='mirror', action='store_true',
|
dest='mirror', action='store_true',
|
||||||
help='mirror the forrest')
|
help='mirror the forrest')
|
||||||
|
g.add_option('--reference',
|
||||||
|
dest='reference',
|
||||||
|
help='location of mirror directory', metavar='DIR')
|
||||||
|
|
||||||
# Tool
|
# Tool
|
||||||
g = p.add_option_group('repo Version options')
|
g = p.add_option_group('repo Version options')
|
||||||
@ -115,6 +124,9 @@ to update the working directory files.
|
|||||||
r.ResetFetch()
|
r.ResetFetch()
|
||||||
r.Save()
|
r.Save()
|
||||||
|
|
||||||
|
if opt.reference:
|
||||||
|
m.config.SetString('repo.reference', opt.reference)
|
||||||
|
|
||||||
if opt.mirror:
|
if opt.mirror:
|
||||||
if is_new:
|
if is_new:
|
||||||
m.config.SetString('repo.mirror', 'true')
|
m.config.SetString('repo.mirror', 'true')
|
||||||
|
@ -55,6 +55,7 @@ need to be performed by an end-user.
|
|||||||
print >>sys.stderr, "error: can't update repo"
|
print >>sys.stderr, "error: can't update repo"
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
rp.bare_git.gc('--auto')
|
||||||
_PostRepoFetch(rp,
|
_PostRepoFetch(rp,
|
||||||
no_repo_verify = opt.no_repo_verify,
|
no_repo_verify = opt.no_repo_verify,
|
||||||
verbose = True)
|
verbose = True)
|
||||||
|
@ -70,6 +70,9 @@ The -s/--smart-sync option can be used to sync to a known good
|
|||||||
build as specified by the manifest-server element in the current
|
build as specified by the manifest-server element in the current
|
||||||
manifest.
|
manifest.
|
||||||
|
|
||||||
|
The -f/--force-broken option can be used to proceed with syncing
|
||||||
|
other projects if a project sync fails.
|
||||||
|
|
||||||
SSH Connections
|
SSH Connections
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
@ -101,6 +104,9 @@ later is required to fix a server side protocol bug.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def _Options(self, p, show_smart=True):
|
def _Options(self, p, show_smart=True):
|
||||||
|
p.add_option('-f', '--force-broken',
|
||||||
|
dest='force_broken', action='store_true',
|
||||||
|
help="continue sync even if a project fails to sync")
|
||||||
p.add_option('-l','--local-only',
|
p.add_option('-l','--local-only',
|
||||||
dest='local_only', action='store_true',
|
dest='local_only', action='store_true',
|
||||||
help="only update working tree, don't fetch")
|
help="only update working tree, don't fetch")
|
||||||
@ -110,6 +116,9 @@ later is required to fix a server side protocol bug.
|
|||||||
p.add_option('-d','--detach',
|
p.add_option('-d','--detach',
|
||||||
dest='detach_head', action='store_true',
|
dest='detach_head', action='store_true',
|
||||||
help='detach projects back to manifest revision')
|
help='detach projects back to manifest revision')
|
||||||
|
p.add_option('-q','--quiet',
|
||||||
|
dest='quiet', action='store_true',
|
||||||
|
help='be more quiet')
|
||||||
p.add_option('-j','--jobs',
|
p.add_option('-j','--jobs',
|
||||||
dest='jobs', action='store', type='int',
|
dest='jobs', action='store', type='int',
|
||||||
help="number of projects to fetch simultaneously")
|
help="number of projects to fetch simultaneously")
|
||||||
@ -126,11 +135,14 @@ later is required to fix a server side protocol bug.
|
|||||||
dest='repo_upgraded', action='store_true',
|
dest='repo_upgraded', action='store_true',
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
def _FetchHelper(self, project, lock, fetched, pm, sem):
|
def _FetchHelper(self, opt, project, lock, fetched, pm, sem):
|
||||||
if not project.Sync_NetworkHalf():
|
if not project.Sync_NetworkHalf(quiet=opt.quiet):
|
||||||
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
||||||
sem.release()
|
if opt.force_broken:
|
||||||
sys.exit(1)
|
print >>sys.stderr, 'warn: --force-broken, continuing to sync'
|
||||||
|
else:
|
||||||
|
sem.release()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
lock.acquire()
|
lock.acquire()
|
||||||
fetched.add(project.gitdir)
|
fetched.add(project.gitdir)
|
||||||
@ -138,18 +150,21 @@ later is required to fix a server side protocol bug.
|
|||||||
lock.release()
|
lock.release()
|
||||||
sem.release()
|
sem.release()
|
||||||
|
|
||||||
def _Fetch(self, projects):
|
def _Fetch(self, projects, opt):
|
||||||
fetched = set()
|
fetched = set()
|
||||||
pm = Progress('Fetching projects', len(projects))
|
pm = Progress('Fetching projects', len(projects))
|
||||||
|
|
||||||
if self.jobs == 1:
|
if self.jobs == 1:
|
||||||
for project in projects:
|
for project in projects:
|
||||||
pm.update()
|
pm.update()
|
||||||
if project.Sync_NetworkHalf():
|
if project.Sync_NetworkHalf(quiet=opt.quiet):
|
||||||
fetched.add(project.gitdir)
|
fetched.add(project.gitdir)
|
||||||
else:
|
else:
|
||||||
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
||||||
sys.exit(1)
|
if opt.force_broken:
|
||||||
|
print >>sys.stderr, 'warn: --force-broken, continuing to sync'
|
||||||
|
else:
|
||||||
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
threads = set()
|
threads = set()
|
||||||
lock = _threading.Lock()
|
lock = _threading.Lock()
|
||||||
@ -157,7 +172,12 @@ later is required to fix a server side protocol bug.
|
|||||||
for project in projects:
|
for project in projects:
|
||||||
sem.acquire()
|
sem.acquire()
|
||||||
t = _threading.Thread(target = self._FetchHelper,
|
t = _threading.Thread(target = self._FetchHelper,
|
||||||
args = (project, lock, fetched, pm, sem))
|
args = (opt,
|
||||||
|
project,
|
||||||
|
lock,
|
||||||
|
fetched,
|
||||||
|
pm,
|
||||||
|
sem))
|
||||||
threads.add(t)
|
threads.add(t)
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
@ -165,6 +185,8 @@ later is required to fix a server side protocol bug.
|
|||||||
t.join()
|
t.join()
|
||||||
|
|
||||||
pm.end()
|
pm.end()
|
||||||
|
for project in projects:
|
||||||
|
project.bare_git.gc('--auto')
|
||||||
return fetched
|
return fetched
|
||||||
|
|
||||||
def UpdateProjectList(self):
|
def UpdateProjectList(self):
|
||||||
@ -249,7 +271,7 @@ uncommitted changes are present' % project.relpath
|
|||||||
if branch.startswith(R_HEADS):
|
if branch.startswith(R_HEADS):
|
||||||
branch = branch[len(R_HEADS):]
|
branch = branch[len(R_HEADS):]
|
||||||
|
|
||||||
env = dict(os.environ)
|
env = os.environ.copy()
|
||||||
if (env.has_key('TARGET_PRODUCT') and
|
if (env.has_key('TARGET_PRODUCT') and
|
||||||
env.has_key('TARGET_BUILD_VARIANT')):
|
env.has_key('TARGET_BUILD_VARIANT')):
|
||||||
target = '%s-%s' % (env['TARGET_PRODUCT'],
|
target = '%s-%s' % (env['TARGET_PRODUCT'],
|
||||||
@ -291,7 +313,7 @@ uncommitted changes are present' % project.relpath
|
|||||||
_PostRepoUpgrade(self.manifest)
|
_PostRepoUpgrade(self.manifest)
|
||||||
|
|
||||||
if not opt.local_only:
|
if not opt.local_only:
|
||||||
mp.Sync_NetworkHalf()
|
mp.Sync_NetworkHalf(quiet=opt.quiet)
|
||||||
|
|
||||||
if mp.HasChanges:
|
if mp.HasChanges:
|
||||||
syncbuf = SyncBuffer(mp.config)
|
syncbuf = SyncBuffer(mp.config)
|
||||||
@ -308,7 +330,7 @@ uncommitted changes are present' % project.relpath
|
|||||||
to_fetch.append(rp)
|
to_fetch.append(rp)
|
||||||
to_fetch.extend(all)
|
to_fetch.extend(all)
|
||||||
|
|
||||||
fetched = self._Fetch(to_fetch)
|
fetched = self._Fetch(to_fetch, opt)
|
||||||
_PostRepoFetch(rp, opt.no_repo_verify)
|
_PostRepoFetch(rp, opt.no_repo_verify)
|
||||||
if opt.network_only:
|
if opt.network_only:
|
||||||
# bail out now; the rest touches the working tree
|
# bail out now; the rest touches the working tree
|
||||||
@ -320,7 +342,7 @@ uncommitted changes are present' % project.relpath
|
|||||||
for project in all:
|
for project in all:
|
||||||
if project.gitdir not in fetched:
|
if project.gitdir not in fetched:
|
||||||
missing.append(project)
|
missing.append(project)
|
||||||
self._Fetch(missing)
|
self._Fetch(missing, opt)
|
||||||
|
|
||||||
if self.manifest.IsMirror:
|
if self.manifest.IsMirror:
|
||||||
# bail out now, we have no working tree
|
# bail out now, we have no working tree
|
||||||
@ -341,6 +363,11 @@ uncommitted changes are present' % project.relpath
|
|||||||
if not syncbuf.Finish():
|
if not syncbuf.Finish():
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
# If there's a notice that's supposed to print at the end of the sync, print
|
||||||
|
# it now...
|
||||||
|
if self.manifest.notice:
|
||||||
|
print self.manifest.notice
|
||||||
|
|
||||||
def _PostRepoUpgrade(manifest):
|
def _PostRepoUpgrade(manifest):
|
||||||
for project in manifest.projects.values():
|
for project in manifest.projects.values():
|
||||||
if project.Exists:
|
if project.Exists:
|
||||||
@ -388,9 +415,9 @@ warning: Cannot automatically authenticate repo."""
|
|||||||
% (project.name, rev)
|
% (project.name, rev)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
env = dict(os.environ)
|
env = os.environ.copy()
|
||||||
env['GIT_DIR'] = project.gitdir
|
env['GIT_DIR'] = project.gitdir.encode()
|
||||||
env['GNUPGHOME'] = gpg_dir
|
env['GNUPGHOME'] = gpg_dir.encode()
|
||||||
|
|
||||||
cmd = [GIT, 'tag', '-v', cur]
|
cmd = [GIT, 'tag', '-v', cur]
|
||||||
proc = subprocess.Popen(cmd,
|
proc = subprocess.Popen(cmd,
|
||||||
|
@ -19,7 +19,8 @@ import sys
|
|||||||
|
|
||||||
from command import InteractiveCommand
|
from command import InteractiveCommand
|
||||||
from editor import Editor
|
from editor import Editor
|
||||||
from error import UploadError
|
from error import HookError, UploadError
|
||||||
|
from project import RepoHook
|
||||||
|
|
||||||
UNUSUAL_COMMIT_THRESHOLD = 5
|
UNUSUAL_COMMIT_THRESHOLD = 5
|
||||||
|
|
||||||
@ -47,7 +48,7 @@ class Upload(InteractiveCommand):
|
|||||||
common = True
|
common = True
|
||||||
helpSummary = "Upload changes for code review"
|
helpSummary = "Upload changes for code review"
|
||||||
helpUsage="""
|
helpUsage="""
|
||||||
%prog [--re --cc] {[<project>]... | --replace <project>}
|
%prog [--re --cc] [<project>]...
|
||||||
"""
|
"""
|
||||||
helpDescription = """
|
helpDescription = """
|
||||||
The '%prog' command is used to send changes to the Gerrit Code
|
The '%prog' command is used to send changes to the Gerrit Code
|
||||||
@ -67,12 +68,6 @@ added to the respective list of users, and emails are sent to any
|
|||||||
new users. Users passed as --reviewers must already be registered
|
new users. Users passed as --reviewers must already be registered
|
||||||
with the code review system, or the upload will fail.
|
with the code review system, or the upload will fail.
|
||||||
|
|
||||||
If the --replace option is passed the user can designate which
|
|
||||||
existing change(s) in Gerrit match up to the commits in the branch
|
|
||||||
being uploaded. For each matched pair of change,commit the commit
|
|
||||||
will be added as a new patch set, completely replacing the set of
|
|
||||||
files and description associated with the change in Gerrit.
|
|
||||||
|
|
||||||
Configuration
|
Configuration
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
@ -119,9 +114,6 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
|||||||
p.add_option('-t',
|
p.add_option('-t',
|
||||||
dest='auto_topic', action='store_true',
|
dest='auto_topic', action='store_true',
|
||||||
help='Send local branch name to Gerrit Code Review')
|
help='Send local branch name to Gerrit Code Review')
|
||||||
p.add_option('--replace',
|
|
||||||
dest='replace', action='store_true',
|
|
||||||
help='Upload replacement patchesets from this branch')
|
|
||||||
p.add_option('--re', '--reviewers',
|
p.add_option('--re', '--reviewers',
|
||||||
type='string', action='append', dest='reviewers',
|
type='string', action='append', dest='reviewers',
|
||||||
help='Request reviews from these people.')
|
help='Request reviews from these people.')
|
||||||
@ -129,6 +121,29 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
|||||||
type='string', action='append', dest='cc',
|
type='string', action='append', dest='cc',
|
||||||
help='Also send email to these email addresses.')
|
help='Also send email to these email addresses.')
|
||||||
|
|
||||||
|
# Options relating to upload hook. Note that verify and no-verify are NOT
|
||||||
|
# opposites of each other, which is why they store to different locations.
|
||||||
|
# We are using them to match 'git commit' syntax.
|
||||||
|
#
|
||||||
|
# Combinations:
|
||||||
|
# - no-verify=False, verify=False (DEFAULT):
|
||||||
|
# If stdout is a tty, can prompt about running upload hooks if needed.
|
||||||
|
# If user denies running hooks, the upload is cancelled. If stdout is
|
||||||
|
# not a tty and we would need to prompt about upload hooks, upload is
|
||||||
|
# cancelled.
|
||||||
|
# - no-verify=False, verify=True:
|
||||||
|
# Always run upload hooks with no prompt.
|
||||||
|
# - no-verify=True, verify=False:
|
||||||
|
# Never run upload hooks, but upload anyway (AKA bypass hooks).
|
||||||
|
# - no-verify=True, verify=True:
|
||||||
|
# Invalid
|
||||||
|
p.add_option('--no-verify',
|
||||||
|
dest='bypass_hooks', action='store_true',
|
||||||
|
help='Do not run the upload hook.')
|
||||||
|
p.add_option('--verify',
|
||||||
|
dest='allow_all_hooks', action='store_true',
|
||||||
|
help='Run the upload hook without prompting.')
|
||||||
|
|
||||||
def _SingleBranch(self, opt, branch, people):
|
def _SingleBranch(self, opt, branch, people):
|
||||||
project = branch.project
|
project = branch.project
|
||||||
name = branch.name
|
name = branch.name
|
||||||
@ -262,65 +277,6 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
|||||||
except:
|
except:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def _ReplaceBranch(self, opt, project, people):
|
|
||||||
branch = project.CurrentBranch
|
|
||||||
if not branch:
|
|
||||||
print >>sys.stdout, "no branches ready for upload"
|
|
||||||
return
|
|
||||||
branch = project.GetUploadableBranch(branch)
|
|
||||||
if not branch:
|
|
||||||
print >>sys.stdout, "no branches ready for upload"
|
|
||||||
return
|
|
||||||
|
|
||||||
script = []
|
|
||||||
script.append('# Replacing from branch %s' % branch.name)
|
|
||||||
|
|
||||||
if len(branch.commits) == 1:
|
|
||||||
change = self._FindGerritChange(branch)
|
|
||||||
script.append('[%-6s] %s' % (change, branch.commits[0]))
|
|
||||||
else:
|
|
||||||
for commit in branch.commits:
|
|
||||||
script.append('[ ] %s' % commit)
|
|
||||||
|
|
||||||
script.append('')
|
|
||||||
script.append('# Insert change numbers in the brackets to add a new patch set.')
|
|
||||||
script.append('# To create a new change record, leave the brackets empty.')
|
|
||||||
|
|
||||||
script = Editor.EditString("\n".join(script)).split("\n")
|
|
||||||
|
|
||||||
change_re = re.compile(r'^\[\s*(\d{1,})\s*\]\s*([0-9a-f]{1,}) .*$')
|
|
||||||
to_replace = dict()
|
|
||||||
full_hashes = branch.unabbrev_commits
|
|
||||||
|
|
||||||
for line in script:
|
|
||||||
m = change_re.match(line)
|
|
||||||
if m:
|
|
||||||
c = m.group(1)
|
|
||||||
f = m.group(2)
|
|
||||||
try:
|
|
||||||
f = full_hashes[f]
|
|
||||||
except KeyError:
|
|
||||||
print 'fh = %s' % full_hashes
|
|
||||||
print >>sys.stderr, "error: commit %s not found" % f
|
|
||||||
sys.exit(1)
|
|
||||||
if c in to_replace:
|
|
||||||
print >>sys.stderr,\
|
|
||||||
"error: change %s cannot accept multiple commits" % c
|
|
||||||
sys.exit(1)
|
|
||||||
to_replace[c] = f
|
|
||||||
|
|
||||||
if not to_replace:
|
|
||||||
print >>sys.stderr, "error: no replacements specified"
|
|
||||||
print >>sys.stderr, " use 'repo upload' without --replace"
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
|
|
||||||
if not _ConfirmManyUploads(multiple_branches=True):
|
|
||||||
_die("upload aborted by user")
|
|
||||||
|
|
||||||
branch.replace_changes = to_replace
|
|
||||||
self._UploadAndReport(opt, [branch], people)
|
|
||||||
|
|
||||||
def _UploadAndReport(self, opt, todo, original_people):
|
def _UploadAndReport(self, opt, todo, original_people):
|
||||||
have_errors = False
|
have_errors = False
|
||||||
for branch in todo:
|
for branch in todo:
|
||||||
@ -351,15 +307,19 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
|||||||
have_errors = True
|
have_errors = True
|
||||||
|
|
||||||
print >>sys.stderr, ''
|
print >>sys.stderr, ''
|
||||||
print >>sys.stderr, '--------------------------------------------'
|
print >>sys.stderr, '----------------------------------------------------------------------'
|
||||||
|
|
||||||
if have_errors:
|
if have_errors:
|
||||||
for branch in todo:
|
for branch in todo:
|
||||||
if not branch.uploaded:
|
if not branch.uploaded:
|
||||||
print >>sys.stderr, '[FAILED] %-15s %-15s (%s)' % (
|
if len(str(branch.error)) <= 30:
|
||||||
|
fmt = ' (%s)'
|
||||||
|
else:
|
||||||
|
fmt = '\n (%s)'
|
||||||
|
print >>sys.stderr, ('[FAILED] %-15s %-15s' + fmt) % (
|
||||||
branch.project.relpath + '/', \
|
branch.project.relpath + '/', \
|
||||||
branch.name, \
|
branch.name, \
|
||||||
branch.error)
|
str(branch.error))
|
||||||
print >>sys.stderr, ''
|
print >>sys.stderr, ''
|
||||||
|
|
||||||
for branch in todo:
|
for branch in todo:
|
||||||
@ -377,25 +337,27 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
|||||||
reviewers = []
|
reviewers = []
|
||||||
cc = []
|
cc = []
|
||||||
|
|
||||||
|
for project in project_list:
|
||||||
|
avail = project.GetUploadableBranches()
|
||||||
|
if avail:
|
||||||
|
pending.append((project, avail))
|
||||||
|
|
||||||
|
if pending and (not opt.bypass_hooks):
|
||||||
|
hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
|
||||||
|
self.manifest.topdir, abort_if_user_denies=True)
|
||||||
|
pending_proj_names = [project.name for (project, avail) in pending]
|
||||||
|
try:
|
||||||
|
hook.Run(opt.allow_all_hooks, project_list=pending_proj_names)
|
||||||
|
except HookError, e:
|
||||||
|
print >>sys.stderr, "ERROR: %s" % str(e)
|
||||||
|
return
|
||||||
|
|
||||||
if opt.reviewers:
|
if opt.reviewers:
|
||||||
reviewers = _SplitEmails(opt.reviewers)
|
reviewers = _SplitEmails(opt.reviewers)
|
||||||
if opt.cc:
|
if opt.cc:
|
||||||
cc = _SplitEmails(opt.cc)
|
cc = _SplitEmails(opt.cc)
|
||||||
people = (reviewers,cc)
|
people = (reviewers,cc)
|
||||||
|
|
||||||
if opt.replace:
|
|
||||||
if len(project_list) != 1:
|
|
||||||
print >>sys.stderr, \
|
|
||||||
'error: --replace requires exactly one project'
|
|
||||||
sys.exit(1)
|
|
||||||
self._ReplaceBranch(opt, project_list[0], people)
|
|
||||||
return
|
|
||||||
|
|
||||||
for project in project_list:
|
|
||||||
avail = project.GetUploadableBranches()
|
|
||||||
if avail:
|
|
||||||
pending.append((project, avail))
|
|
||||||
|
|
||||||
if not pending:
|
if not pending:
|
||||||
print >>sys.stdout, "no branches ready for upload"
|
print >>sys.stdout, "no branches ready for upload"
|
||||||
elif len(pending) == 1 and len(pending[0][1]) == 1:
|
elif len(pending) == 1 and len(pending[0][1]) == 1:
|
||||||
|
Reference in New Issue
Block a user