mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
96 Commits
Author | SHA1 | Date | |
---|---|---|---|
c325dc35f6 | |||
f322b9abb4 | |||
db728cd866 | |||
c4657969eb | |||
7b947de1ee | |||
6392c87945 | |||
97d2b2f7a0 | |||
3a0e782790 | |||
490d09a314 | |||
13111b4e97 | |||
bd0312a484 | |||
334851e4b6 | |||
014d060989 | |||
44da16e8a0 | |||
65e0f35fda | |||
08c880db18 | |||
a101f1c167 | |||
49cd59bc86 | |||
30d452905f | |||
d6c93a28ca | |||
d572a13021 | |||
3ba5f95b46 | |||
2630dd9787 | |||
dafb1d68d3 | |||
4655e81a75 | |||
723c5dc3d6 | |||
e6a0eeb80d | |||
0960b5b53d | |||
fc06ced9f9 | |||
fce89f218a | |||
37282b4b9c | |||
835cd6888f | |||
8ced8641c8 | |||
2536f80625 | |||
0ce6ca9c7b | |||
0fc3a39829 | |||
c7c57e34db | |||
0d2b61f11d | |||
2bf9db0d3b | |||
f00e0ce556 | |||
1b5a4a0c5d | |||
de8b2c4276 | |||
727ee98a40 | |||
df14a70c45 | |||
f18cb76173 | |||
d3fd537ea5 | |||
0048b69c03 | |||
2b8db3ce3e | |||
5df6de075e | |||
a0de6e8eab | |||
16614f86b3 | |||
88443387b1 | |||
99482ae58a | |||
ec1df9b7f6 | |||
06d029c1c8 | |||
b715b14807 | |||
60829ba72f | |||
a22f99ae41 | |||
3575b8f8bd | |||
a5ece0e050 | |||
cc50bac8c7 | |||
0cb1b3f687 | |||
9e426aa432 | |||
08a3f68d38 | |||
feb39d61ef | |||
7198572dd7 | |||
2daf66740b | |||
f4f04d9fa8 | |||
18afd7f679 | |||
6623b21e10 | |||
ca8c32cd7a | |||
f0a9a1a30e | |||
879a9a5cf0 | |||
ff6929dde8 | |||
1c85f4e43b | |||
719965af35 | |||
5732e47ebb | |||
f3fdf823cf | |||
a1bfd2cd72 | |||
6d7508b3d5 | |||
9452e4ec09 | |||
4c50deea28 | |||
d63060fc95 | |||
b6ea3bfcc3 | |||
aa4982e4c9 | |||
9bb1816bdc | |||
c24c720b61 | |||
2d1a396897 | |||
1dcb58a7d0 | |||
37dbf2bf0f | |||
438c54713a | |||
e020ebee4e | |||
21c5c34ee2 | |||
54fccd71fb | |||
fb5c8fd948 | |||
26120ca18d |
1
.gitignore
vendored
1
.gitignore
vendored
@ -1 +1,2 @@
|
||||
*.pyc
|
||||
.repopickle_*
|
||||
|
80
SUBMITTING_PATCHES
Normal file
80
SUBMITTING_PATCHES
Normal file
@ -0,0 +1,80 @@
|
||||
Short Version:
|
||||
|
||||
- Make small logical changes.
|
||||
- Provide a meaningful commit message.
|
||||
- Make sure all code is under the Apache License, 2.0.
|
||||
- Publish your changes for review:
|
||||
|
||||
git push ssh://review.source.android.com:29418/tools/repo.git HEAD:refs/for/master
|
||||
|
||||
|
||||
Long Version:
|
||||
|
||||
I wanted a file describing how to submit patches for repo,
|
||||
so I started with the one found in the core Git distribution
|
||||
(Documentation/SubmittingPatches), which itself was based on the
|
||||
patch submission guidelines for the Linux kernel.
|
||||
|
||||
However there are some differences, so please review and familiarize
|
||||
yourself with the following relevant bits:
|
||||
|
||||
|
||||
(1) Make separate commits for logically separate changes.
|
||||
|
||||
Unless your patch is really trivial, you should not be sending
|
||||
out a patch that was generated between your working tree and your
|
||||
commit head. Instead, always make a commit with complete commit
|
||||
message and generate a series of patches from your repository.
|
||||
It is a good discipline.
|
||||
|
||||
Describe the technical detail of the change(s).
|
||||
|
||||
If your description starts to get too long, that's a sign that you
|
||||
probably need to split up your commit to finer grained pieces.
|
||||
|
||||
|
||||
(2) Check the license
|
||||
|
||||
repo is licensed under the Apache License, 2.0.
|
||||
|
||||
Because of this licensing model *every* file within the project
|
||||
*must* list the license that covers it in the header of the file.
|
||||
Any new contributions to an existing file *must* be submitted under
|
||||
the current license of that file. Any new files *must* clearly
|
||||
indicate which license they are provided under in the file header.
|
||||
|
||||
Please verify that you are legally allowed and willing to submit your
|
||||
changes under the license covering each file *prior* to submitting
|
||||
your patch. It is virtually impossible to remove a patch once it
|
||||
has been applied and pushed out.
|
||||
|
||||
|
||||
(3) Sending your patches.
|
||||
|
||||
Do not email your patches to anyone.
|
||||
|
||||
Instead, login to the Gerrit Code Review tool at:
|
||||
|
||||
https://review.source.android.com/
|
||||
|
||||
Ensure you have completed one of the necessary contributor
|
||||
agreements, providing documentation to the project maintainers that
|
||||
they have right to redistribute your work under the Apache License:
|
||||
|
||||
https://review.source.android.com/#settings,agreements
|
||||
|
||||
Ensure you have registered one or more SSH public keys, so you can
|
||||
push your commits directly over SSH:
|
||||
|
||||
https://review.source.android.com/#settings,ssh-keys
|
||||
|
||||
Push your patches over SSH to the review server, possibly through
|
||||
a remembered remote to make this easier in the future:
|
||||
|
||||
git config remote.review.url ssh://review.source.android.com:29418/tools/repo.git
|
||||
git config remote.review.push HEAD:refs/for/master
|
||||
|
||||
git push review
|
||||
|
||||
You will be automatically emailed a copy of your commits, and any
|
||||
comments made by the project maintainers.
|
@ -74,7 +74,7 @@ class Command(object):
|
||||
project = all.get(arg)
|
||||
|
||||
if not project:
|
||||
path = os.path.abspath(arg)
|
||||
path = os.path.abspath(arg).replace('\\', '/')
|
||||
|
||||
if not by_path:
|
||||
by_path = dict()
|
||||
@ -82,13 +82,15 @@ class Command(object):
|
||||
by_path[p.worktree] = p
|
||||
|
||||
if os.path.exists(path):
|
||||
oldpath = None
|
||||
while path \
|
||||
and path != '/' \
|
||||
and path != oldpath \
|
||||
and path != self.manifest.topdir:
|
||||
try:
|
||||
project = by_path[path]
|
||||
break
|
||||
except KeyError:
|
||||
oldpath = path
|
||||
path = os.path.dirname(path)
|
||||
else:
|
||||
try:
|
||||
|
@ -20,10 +20,15 @@ A manifest XML file (e.g. 'default.xml') roughly conforms to the
|
||||
following DTD:
|
||||
|
||||
<!DOCTYPE manifest [
|
||||
<!ELEMENT manifest (remote*,
|
||||
<!ELEMENT manifest (notice?,
|
||||
remote*,
|
||||
default?,
|
||||
manifest-server?,
|
||||
remove-project*,
|
||||
project*)>
|
||||
project*,
|
||||
repo-hooks?)>
|
||||
|
||||
<!ELEMENT notice (#PCDATA)>
|
||||
|
||||
<!ELEMENT remote (EMPTY)>
|
||||
<!ATTLIST remote name ID #REQUIRED>
|
||||
@ -33,6 +38,10 @@ following DTD:
|
||||
<!ELEMENT default (EMPTY)>
|
||||
<!ATTLIST default remote IDREF #IMPLIED>
|
||||
<!ATTLIST default revision CDATA #IMPLIED>
|
||||
<!ATTLIST default sync-j CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT manifest-server (EMPTY)>
|
||||
<!ATTLIST url CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT project (EMPTY)>
|
||||
<!ATTLIST project name CDATA #REQUIRED>
|
||||
@ -42,6 +51,10 @@ following DTD:
|
||||
|
||||
<!ELEMENT remove-project (EMPTY)>
|
||||
<!ATTLIST remove-project name CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT repo-hooks (EMPTY)>
|
||||
<!ATTLIST repo-hooks in-project CDATA #REQUIRED>
|
||||
<!ATTLIST repo-hooks enabled-list CDATA #REQUIRED>
|
||||
]>
|
||||
|
||||
A description of the elements and their attributes follows.
|
||||
@ -89,6 +102,27 @@ Attribute `revision`: Name of a Git branch (e.g. `master` or
|
||||
revision attribute will use this revision.
|
||||
|
||||
|
||||
Element manifest-server
|
||||
-----------------------
|
||||
|
||||
At most one manifest-server may be specified. The url attribute
|
||||
is used to specify the URL of a manifest server, which is an
|
||||
XML RPC service that will return a manifest in which each project
|
||||
is pegged to a known good revision for the current branch and
|
||||
target.
|
||||
|
||||
The manifest server should implement:
|
||||
|
||||
GetApprovedManifest(branch, target)
|
||||
|
||||
The target to use is defined by environment variables TARGET_PRODUCT
|
||||
and TARGET_BUILD_VARIANT. These variables are used to create a string
|
||||
of the form $TARGET_PRODUCT-$TARGET_BUILD_VARIANT, e.g. passion-userdebug.
|
||||
If one of those variables or both are not present, the program will call
|
||||
GetApprovedManifest without the target paramater and the manifest server
|
||||
should choose a reasonable default target.
|
||||
|
||||
|
||||
Element project
|
||||
---------------
|
||||
|
||||
|
32
editor.py
32
editor.py
@ -14,6 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
import tempfile
|
||||
@ -38,9 +39,10 @@ class Editor(object):
|
||||
if e:
|
||||
return e
|
||||
|
||||
e = cls.globalConfig.GetString('core.editor')
|
||||
if e:
|
||||
return e
|
||||
if cls.globalConfig:
|
||||
e = cls.globalConfig.GetString('core.editor')
|
||||
if e:
|
||||
return e
|
||||
|
||||
e = os.getenv('VISUAL')
|
||||
if e:
|
||||
@ -69,15 +71,33 @@ least one of these before using this command."""
|
||||
Returns:
|
||||
new value of edited text; None if editing did not succeed
|
||||
"""
|
||||
editor = cls._GetEditor().split()
|
||||
editor = cls._GetEditor()
|
||||
if editor == ':':
|
||||
return data
|
||||
|
||||
fd, path = tempfile.mkstemp()
|
||||
try:
|
||||
os.write(fd, data)
|
||||
os.close(fd)
|
||||
fd = None
|
||||
|
||||
if subprocess.Popen(editor + [path]).wait() != 0:
|
||||
raise EditorError()
|
||||
if re.compile("^.*[$ \t'].*$").match(editor):
|
||||
args = [editor + ' "$@"', 'sh']
|
||||
shell = True
|
||||
else:
|
||||
args = [editor]
|
||||
shell = False
|
||||
args.append(path)
|
||||
|
||||
try:
|
||||
rc = subprocess.Popen(args, shell=shell).wait()
|
||||
except OSError, e:
|
||||
raise EditorError('editor failed, %s: %s %s'
|
||||
% (str(e), editor, path))
|
||||
if rc != 0:
|
||||
raise EditorError('editor failed with exit status %d: %s %s'
|
||||
% (rc, editor, path))
|
||||
|
||||
fd2 = open(path)
|
||||
try:
|
||||
return fd2.read()
|
||||
|
21
error.py
21
error.py
@ -24,6 +24,11 @@ class ManifestInvalidRevisionError(Exception):
|
||||
class EditorError(Exception):
|
||||
"""Unspecified error from the user's text editor.
|
||||
"""
|
||||
def __init__(self, reason):
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
class GitError(Exception):
|
||||
"""Unspecified internal error from git.
|
||||
@ -52,6 +57,15 @@ class UploadError(Exception):
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
class DownloadError(Exception):
|
||||
"""Cannot download a repository.
|
||||
"""
|
||||
def __init__(self, reason):
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
class NoSuchProjectError(Exception):
|
||||
"""A specified project does not exist in the work tree.
|
||||
"""
|
||||
@ -70,3 +84,10 @@ class RepoChangedException(Exception):
|
||||
"""
|
||||
def __init__(self, extra_args=[]):
|
||||
self.extra_args = extra_args
|
||||
|
||||
class HookError(Exception):
|
||||
"""Thrown if a 'repo-hook' could not be run.
|
||||
|
||||
The common case is that the file wasn't present when we tried to run it.
|
||||
"""
|
||||
pass
|
||||
|
@ -17,6 +17,7 @@ import os
|
||||
import sys
|
||||
import subprocess
|
||||
import tempfile
|
||||
from signal import SIGTERM
|
||||
from error import GitError
|
||||
from trace import REPO_TRACE, IsTrace, Trace
|
||||
|
||||
@ -29,8 +30,9 @@ LAST_CWD = None
|
||||
|
||||
_ssh_proxy_path = None
|
||||
_ssh_sock_path = None
|
||||
_ssh_clients = []
|
||||
|
||||
def _ssh_sock(create=True):
|
||||
def ssh_sock(create=True):
|
||||
global _ssh_sock_path
|
||||
if _ssh_sock_path is None:
|
||||
if not create:
|
||||
@ -51,6 +53,26 @@ def _ssh_proxy():
|
||||
'git_ssh')
|
||||
return _ssh_proxy_path
|
||||
|
||||
def _add_ssh_client(p):
|
||||
_ssh_clients.append(p)
|
||||
|
||||
def _remove_ssh_client(p):
|
||||
try:
|
||||
_ssh_clients.remove(p)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def terminate_ssh_clients():
|
||||
global _ssh_clients
|
||||
for p in _ssh_clients:
|
||||
try:
|
||||
os.kill(p.pid, SIGTERM)
|
||||
p.wait()
|
||||
except OSError:
|
||||
pass
|
||||
_ssh_clients = []
|
||||
|
||||
_git_version = None
|
||||
|
||||
class _GitCall(object):
|
||||
def version(self):
|
||||
@ -59,6 +81,21 @@ class _GitCall(object):
|
||||
return p.stdout
|
||||
return None
|
||||
|
||||
def version_tuple(self):
|
||||
global _git_version
|
||||
|
||||
if _git_version is None:
|
||||
ver_str = git.version()
|
||||
if ver_str.startswith('git version '):
|
||||
_git_version = tuple(
|
||||
map(lambda x: int(x),
|
||||
ver_str[len('git version '):].strip().split('.')[0:3]
|
||||
))
|
||||
else:
|
||||
print >>sys.stderr, 'fatal: "%s" unsupported' % ver_str
|
||||
sys.exit(1)
|
||||
return _git_version
|
||||
|
||||
def __getattr__(self, name):
|
||||
name = name.replace('_','-')
|
||||
def fun(*cmdv):
|
||||
@ -68,23 +105,9 @@ class _GitCall(object):
|
||||
return fun
|
||||
git = _GitCall()
|
||||
|
||||
_git_version = None
|
||||
|
||||
def git_require(min_version, fail=False):
|
||||
global _git_version
|
||||
|
||||
if _git_version is None:
|
||||
ver_str = git.version()
|
||||
if ver_str.startswith('git version '):
|
||||
_git_version = tuple(
|
||||
map(lambda x: int(x),
|
||||
ver_str[len('git version '):].strip().split('.')[0:3]
|
||||
))
|
||||
else:
|
||||
print >>sys.stderr, 'fatal: "%s" unsupported' % ver_str
|
||||
sys.exit(1)
|
||||
|
||||
if min_version <= _git_version:
|
||||
git_version = git.version_tuple()
|
||||
if min_version <= git_version:
|
||||
return True
|
||||
if fail:
|
||||
need = '.'.join(map(lambda x: str(x), min_version))
|
||||
@ -92,6 +115,9 @@ def git_require(min_version, fail=False):
|
||||
sys.exit(1)
|
||||
return False
|
||||
|
||||
def _setenv(env, name, value):
|
||||
env[name] = value.encode()
|
||||
|
||||
class GitCommand(object):
|
||||
def __init__(self,
|
||||
project,
|
||||
@ -104,7 +130,7 @@ class GitCommand(object):
|
||||
ssh_proxy = False,
|
||||
cwd = None,
|
||||
gitdir = None):
|
||||
env = dict(os.environ)
|
||||
env = os.environ.copy()
|
||||
|
||||
for e in [REPO_TRACE,
|
||||
GIT_DIR,
|
||||
@ -117,10 +143,10 @@ class GitCommand(object):
|
||||
del env[e]
|
||||
|
||||
if disable_editor:
|
||||
env['GIT_EDITOR'] = ':'
|
||||
_setenv(env, 'GIT_EDITOR', ':')
|
||||
if ssh_proxy:
|
||||
env['REPO_SSH_SOCK'] = _ssh_sock()
|
||||
env['GIT_SSH'] = _ssh_proxy()
|
||||
_setenv(env, 'REPO_SSH_SOCK', ssh_sock())
|
||||
_setenv(env, 'GIT_SSH', _ssh_proxy())
|
||||
|
||||
if project:
|
||||
if not cwd:
|
||||
@ -131,7 +157,7 @@ class GitCommand(object):
|
||||
command = [GIT]
|
||||
if bare:
|
||||
if gitdir:
|
||||
env[GIT_DIR] = gitdir
|
||||
_setenv(env, GIT_DIR, gitdir)
|
||||
cwd = None
|
||||
command.extend(cmdv)
|
||||
|
||||
@ -188,6 +214,9 @@ class GitCommand(object):
|
||||
except Exception, e:
|
||||
raise GitError('%s: %s' % (command[1], e))
|
||||
|
||||
if ssh_proxy:
|
||||
_add_ssh_client(p)
|
||||
|
||||
self.process = p
|
||||
self.stdin = p.stdin
|
||||
|
||||
@ -210,4 +239,8 @@ class GitCommand(object):
|
||||
else:
|
||||
p.stderr = None
|
||||
|
||||
return self.process.wait()
|
||||
try:
|
||||
rc = p.wait()
|
||||
finally:
|
||||
_remove_ssh_client(p)
|
||||
return rc
|
||||
|
231
git_config.py
231
git_config.py
@ -18,12 +18,21 @@ import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
try:
|
||||
import threading as _threading
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
import time
|
||||
import urllib2
|
||||
|
||||
from signal import SIGTERM
|
||||
from urllib2 import urlopen, HTTPError
|
||||
from error import GitError, UploadError
|
||||
from trace import Trace
|
||||
from git_command import GitCommand, _ssh_sock
|
||||
|
||||
from git_command import GitCommand
|
||||
from git_command import ssh_sock
|
||||
from git_command import terminate_ssh_clients
|
||||
|
||||
R_HEADS = 'refs/heads/'
|
||||
R_TAGS = 'refs/tags/'
|
||||
@ -189,6 +198,15 @@ class GitConfig(object):
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
def UrlInsteadOf(self, url):
|
||||
"""Resolve any url.*.insteadof references.
|
||||
"""
|
||||
for new_url in self.GetSubSections('url'):
|
||||
old_url = self.GetString('url.%s.insteadof' % new_url)
|
||||
if old_url is not None and url.startswith(old_url):
|
||||
return new_url + url[len(old_url):]
|
||||
return url
|
||||
|
||||
@property
|
||||
def _sections(self):
|
||||
d = self._section_dict
|
||||
@ -254,26 +272,35 @@ class GitConfig(object):
|
||||
finally:
|
||||
fd.close()
|
||||
except IOError:
|
||||
os.remove(self._pickle)
|
||||
if os.path.exists(self._pickle):
|
||||
os.remove(self._pickle)
|
||||
except cPickle.PickleError:
|
||||
os.remove(self._pickle)
|
||||
if os.path.exists(self._pickle):
|
||||
os.remove(self._pickle)
|
||||
|
||||
def _ReadGit(self):
|
||||
d = self._do('--null', '--list')
|
||||
c = {}
|
||||
while d:
|
||||
lf = d.index('\n')
|
||||
nul = d.index('\0', lf + 1)
|
||||
"""
|
||||
Read configuration data from git.
|
||||
|
||||
key = _key(d[0:lf])
|
||||
val = d[lf + 1:nul]
|
||||
This internal method populates the GitConfig cache.
|
||||
|
||||
"""
|
||||
c = {}
|
||||
d = self._do('--null', '--list')
|
||||
if d is None:
|
||||
return c
|
||||
for line in d.rstrip('\0').split('\0'):
|
||||
if '\n' in line:
|
||||
key, val = line.split('\n', 1)
|
||||
else:
|
||||
key = line
|
||||
val = None
|
||||
|
||||
if key in c:
|
||||
c[key].append(val)
|
||||
else:
|
||||
c[key] = [val]
|
||||
|
||||
d = d[nul + 1:]
|
||||
return c
|
||||
|
||||
def _do(self, *args):
|
||||
@ -346,59 +373,130 @@ class RefSpec(object):
|
||||
return s
|
||||
|
||||
|
||||
_ssh_cache = {}
|
||||
_master_processes = []
|
||||
_master_keys = set()
|
||||
_ssh_master = True
|
||||
_master_keys_lock = None
|
||||
|
||||
def _open_ssh(host, port):
|
||||
def init_ssh():
|
||||
"""Should be called once at the start of repo to init ssh master handling.
|
||||
|
||||
At the moment, all we do is to create our lock.
|
||||
"""
|
||||
global _master_keys_lock
|
||||
assert _master_keys_lock is None, "Should only call init_ssh once"
|
||||
_master_keys_lock = _threading.Lock()
|
||||
|
||||
def _open_ssh(host, port=None):
|
||||
global _ssh_master
|
||||
|
||||
key = '%s:%s' % (host, port)
|
||||
if key in _ssh_cache:
|
||||
return True
|
||||
|
||||
if not _ssh_master \
|
||||
or 'GIT_SSH' in os.environ \
|
||||
or sys.platform in ('win32', 'cygwin'):
|
||||
# failed earlier, or cygwin ssh can't do this
|
||||
#
|
||||
return False
|
||||
|
||||
command = ['ssh',
|
||||
'-o','ControlPath %s' % _ssh_sock(),
|
||||
'-p',str(port),
|
||||
'-M',
|
||||
'-N',
|
||||
host]
|
||||
# Acquire the lock. This is needed to prevent opening multiple masters for
|
||||
# the same host when we're running "repo sync -jN" (for N > 1) _and_ the
|
||||
# manifest <remote fetch="ssh://xyz"> specifies a different host from the
|
||||
# one that was passed to repo init.
|
||||
_master_keys_lock.acquire()
|
||||
try:
|
||||
Trace(': %s', ' '.join(command))
|
||||
p = subprocess.Popen(command)
|
||||
except Exception, e:
|
||||
_ssh_master = False
|
||||
print >>sys.stderr, \
|
||||
'\nwarn: cannot enable ssh control master for %s:%s\n%s' \
|
||||
% (host,port, str(e))
|
||||
return False
|
||||
|
||||
_ssh_cache[key] = p
|
||||
time.sleep(1)
|
||||
return True
|
||||
# Check to see whether we already think that the master is running; if we
|
||||
# think it's already running, return right away.
|
||||
if port is not None:
|
||||
key = '%s:%s' % (host, port)
|
||||
else:
|
||||
key = host
|
||||
|
||||
if key in _master_keys:
|
||||
return True
|
||||
|
||||
if not _ssh_master \
|
||||
or 'GIT_SSH' in os.environ \
|
||||
or sys.platform in ('win32', 'cygwin'):
|
||||
# failed earlier, or cygwin ssh can't do this
|
||||
#
|
||||
return False
|
||||
|
||||
# We will make two calls to ssh; this is the common part of both calls.
|
||||
command_base = ['ssh',
|
||||
'-o','ControlPath %s' % ssh_sock(),
|
||||
host]
|
||||
if port is not None:
|
||||
command_base[1:1] = ['-p',str(port)]
|
||||
|
||||
# Since the key wasn't in _master_keys, we think that master isn't running.
|
||||
# ...but before actually starting a master, we'll double-check. This can
|
||||
# be important because we can't tell that that 'git@myhost.com' is the same
|
||||
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
|
||||
check_command = command_base + ['-O','check']
|
||||
try:
|
||||
Trace(': %s', ' '.join(check_command))
|
||||
check_process = subprocess.Popen(check_command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
check_process.communicate() # read output, but ignore it...
|
||||
isnt_running = check_process.wait()
|
||||
|
||||
if not isnt_running:
|
||||
# Our double-check found that the master _was_ infact running. Add to
|
||||
# the list of keys.
|
||||
_master_keys.add(key)
|
||||
return True
|
||||
except Exception:
|
||||
# Ignore excpetions. We we will fall back to the normal command and print
|
||||
# to the log there.
|
||||
pass
|
||||
|
||||
command = command_base[:1] + \
|
||||
['-M', '-N'] + \
|
||||
command_base[1:]
|
||||
try:
|
||||
Trace(': %s', ' '.join(command))
|
||||
p = subprocess.Popen(command)
|
||||
except Exception, e:
|
||||
_ssh_master = False
|
||||
print >>sys.stderr, \
|
||||
'\nwarn: cannot enable ssh control master for %s:%s\n%s' \
|
||||
% (host,port, str(e))
|
||||
return False
|
||||
|
||||
_master_processes.append(p)
|
||||
_master_keys.add(key)
|
||||
time.sleep(1)
|
||||
return True
|
||||
finally:
|
||||
_master_keys_lock.release()
|
||||
|
||||
def close_ssh():
|
||||
for key,p in _ssh_cache.iteritems():
|
||||
os.kill(p.pid, SIGTERM)
|
||||
p.wait()
|
||||
_ssh_cache.clear()
|
||||
global _master_keys_lock
|
||||
|
||||
d = _ssh_sock(create=False)
|
||||
terminate_ssh_clients()
|
||||
|
||||
for p in _master_processes:
|
||||
try:
|
||||
os.kill(p.pid, SIGTERM)
|
||||
p.wait()
|
||||
except OSError:
|
||||
pass
|
||||
del _master_processes[:]
|
||||
_master_keys.clear()
|
||||
|
||||
d = ssh_sock(create=False)
|
||||
if d:
|
||||
try:
|
||||
os.rmdir(os.path.dirname(d))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# We're done with the lock, so we can delete it.
|
||||
_master_keys_lock = None
|
||||
|
||||
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
|
||||
URI_ALL = re.compile(r'^([a-z][a-z+]*)://([^@/]*@?[^/]*)/')
|
||||
|
||||
def GetSchemeFromUrl(url):
|
||||
m = URI_ALL.match(url)
|
||||
if m:
|
||||
return m.group(1)
|
||||
return None
|
||||
|
||||
def _preconnect(url):
|
||||
m = URI_ALL.match(url)
|
||||
if m:
|
||||
@ -407,7 +505,7 @@ def _preconnect(url):
|
||||
if ':' in host:
|
||||
host, port = host.split(':')
|
||||
else:
|
||||
port = 22
|
||||
port = None
|
||||
if scheme in ('ssh', 'git+ssh', 'ssh+git'):
|
||||
return _open_ssh(host, port)
|
||||
return False
|
||||
@ -415,7 +513,7 @@ def _preconnect(url):
|
||||
m = URI_SCP.match(url)
|
||||
if m:
|
||||
host = m.group(1)
|
||||
return _open_ssh(host, 22)
|
||||
return _open_ssh(host)
|
||||
|
||||
return False
|
||||
|
||||
@ -432,8 +530,30 @@ class Remote(object):
|
||||
self._Get('fetch', all=True))
|
||||
self._review_protocol = None
|
||||
|
||||
def _InsteadOf(self):
|
||||
globCfg = GitConfig.ForUser()
|
||||
urlList = globCfg.GetSubSections('url')
|
||||
longest = ""
|
||||
longestUrl = ""
|
||||
|
||||
for url in urlList:
|
||||
key = "url." + url + ".insteadOf"
|
||||
insteadOfList = globCfg.GetString(key, all=True)
|
||||
|
||||
for insteadOf in insteadOfList:
|
||||
if self.url.startswith(insteadOf) \
|
||||
and len(insteadOf) > len(longest):
|
||||
longest = insteadOf
|
||||
longestUrl = url
|
||||
|
||||
if len(longest) == 0:
|
||||
return self.url
|
||||
|
||||
return self.url.replace(longest, longestUrl, 1)
|
||||
|
||||
def PreConnectFetch(self):
|
||||
return _preconnect(self.url)
|
||||
connectionUrl = self._InsteadOf()
|
||||
return _preconnect(connectionUrl)
|
||||
|
||||
@property
|
||||
def ReviewProtocol(self):
|
||||
@ -460,23 +580,25 @@ class Remote(object):
|
||||
try:
|
||||
info = urlopen(u).read()
|
||||
if info == 'NOT_AVAILABLE':
|
||||
raise UploadError('Upload over ssh unavailable')
|
||||
raise UploadError('%s: SSH disabled' % self.review)
|
||||
if '<' in info:
|
||||
# Assume the server gave us some sort of HTML
|
||||
# response back, like maybe a login page.
|
||||
#
|
||||
raise UploadError('Cannot read %s:\n%s' % (u, info))
|
||||
raise UploadError('%s: Cannot parse response' % u)
|
||||
|
||||
self._review_protocol = 'ssh'
|
||||
self._review_host = info.split(" ")[0]
|
||||
self._review_port = info.split(" ")[1]
|
||||
except urllib2.URLError, e:
|
||||
raise UploadError('%s: %s' % (self.review, e.reason[1]))
|
||||
except HTTPError, e:
|
||||
if e.code == 404:
|
||||
self._review_protocol = 'http-post'
|
||||
self._review_host = None
|
||||
self._review_port = None
|
||||
else:
|
||||
raise UploadError('Cannot guess Gerrit version')
|
||||
raise UploadError('Upload over ssh unavailable')
|
||||
|
||||
REVIEW_CACHE[u] = (
|
||||
self._review_protocol,
|
||||
@ -487,8 +609,11 @@ class Remote(object):
|
||||
def SshReviewUrl(self, userEmail):
|
||||
if self.ReviewProtocol != 'ssh':
|
||||
return None
|
||||
username = self._config.GetString('review.%s.username' % self.review)
|
||||
if username is None:
|
||||
username = userEmail.split("@")[0]
|
||||
return 'ssh://%s@%s:%s/%s' % (
|
||||
userEmail.split("@")[0],
|
||||
username,
|
||||
self._review_host,
|
||||
self._review_port,
|
||||
self.projectname)
|
||||
|
2
git_ssh
2
git_ssh
@ -1,2 +1,2 @@
|
||||
#!/bin/sh
|
||||
exec ssh -o "ControlPath $REPO_SSH_SOCK" "$@"
|
||||
exec ssh -o "ControlMaster no" -o "ControlPath $REPO_SSH_SOCK" "$@"
|
||||
|
101
hooks/commit-msg
Executable file
101
hooks/commit-msg
Executable file
@ -0,0 +1,101 @@
|
||||
#!/bin/sh
|
||||
# From Gerrit Code Review 2.1.2-rc2-33-g7e30c72
|
||||
#
|
||||
# Part of Gerrit Code Review (http://code.google.com/p/gerrit/)
|
||||
#
|
||||
# Copyright (C) 2009 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
CHANGE_ID_AFTER="Bug|Issue"
|
||||
MSG="$1"
|
||||
|
||||
# Check for, and add if missing, a unique Change-Id
|
||||
#
|
||||
add_ChangeId() {
|
||||
clean_message=$(sed -e '
|
||||
/^diff --git a\/.*/{
|
||||
s///
|
||||
q
|
||||
}
|
||||
/^Signed-off-by:/d
|
||||
/^#/d
|
||||
' "$MSG" | git stripspace)
|
||||
if test -z "$clean_message"
|
||||
then
|
||||
return
|
||||
fi
|
||||
|
||||
if grep -i '^Change-Id:' "$MSG" >/dev/null
|
||||
then
|
||||
return
|
||||
fi
|
||||
|
||||
id=$(_gen_ChangeId)
|
||||
perl -e '
|
||||
$MSG = shift;
|
||||
$id = shift;
|
||||
$CHANGE_ID_AFTER = shift;
|
||||
|
||||
undef $/;
|
||||
open(I, $MSG); $_ = <I>; close I;
|
||||
s|^diff --git a/.*||ms;
|
||||
s|^#.*$||mg;
|
||||
exit unless $_;
|
||||
|
||||
@message = split /\n/;
|
||||
$haveFooter = 0;
|
||||
$startFooter = @message;
|
||||
for($line = @message - 1; $line >= 0; $line--) {
|
||||
$_ = $message[$line];
|
||||
|
||||
($haveFooter++, next) if /^[a-zA-Z0-9-]+:/;
|
||||
next if /^[ []/;
|
||||
$startFooter = $line if ($haveFooter && /^\r?$/);
|
||||
last;
|
||||
}
|
||||
|
||||
@footer = @message[$startFooter+1..@message];
|
||||
@message = @message[0..$startFooter];
|
||||
push(@footer, "") unless @footer;
|
||||
|
||||
for ($line = 0; $line < @footer; $line++) {
|
||||
$_ = $footer[$line];
|
||||
next if /^($CHANGE_ID_AFTER):/i;
|
||||
last;
|
||||
}
|
||||
splice(@footer, $line, 0, "Change-Id: I$id");
|
||||
|
||||
$_ = join("\n", @message, @footer);
|
||||
open(O, ">$MSG"); print O; close O;
|
||||
' "$MSG" "$id" "$CHANGE_ID_AFTER"
|
||||
}
|
||||
_gen_ChangeIdInput() {
|
||||
echo "tree $(git write-tree)"
|
||||
if parent=$(git rev-parse HEAD^0 2>/dev/null)
|
||||
then
|
||||
echo "parent $parent"
|
||||
fi
|
||||
echo "author $(git var GIT_AUTHOR_IDENT)"
|
||||
echo "committer $(git var GIT_COMMITTER_IDENT)"
|
||||
echo
|
||||
printf '%s' "$clean_message"
|
||||
}
|
||||
_gen_ChangeId() {
|
||||
_gen_ChangeIdInput |
|
||||
git hash-object -t commit --stdin
|
||||
}
|
||||
|
||||
|
||||
add_ChangeId
|
106
main.py
106
main.py
@ -22,17 +22,22 @@ if __name__ == '__main__':
|
||||
del sys.argv[-1]
|
||||
del magic
|
||||
|
||||
import netrc
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import urllib2
|
||||
|
||||
from trace import SetTrace
|
||||
from git_config import close_ssh
|
||||
from git_command import git, GitCommand
|
||||
from git_config import init_ssh, close_ssh
|
||||
from command import InteractiveCommand
|
||||
from command import MirrorSafeCommand
|
||||
from command import PagedCommand
|
||||
from editor import Editor
|
||||
from error import DownloadError
|
||||
from error import ManifestInvalidRevisionError
|
||||
from error import NoSuchProjectError
|
||||
from error import RepoChangedException
|
||||
@ -53,6 +58,9 @@ global_options.add_option('--no-pager',
|
||||
global_options.add_option('--trace',
|
||||
dest='trace', action='store_true',
|
||||
help='trace git command execution')
|
||||
global_options.add_option('--time',
|
||||
dest='time', action='store_true',
|
||||
help='time repo command execution')
|
||||
global_options.add_option('--version',
|
||||
dest='show_version', action='store_true',
|
||||
help='display this version of repo')
|
||||
@ -61,6 +69,8 @@ class _Repo(object):
|
||||
def __init__(self, repodir):
|
||||
self.repodir = repodir
|
||||
self.commands = all_commands
|
||||
# add 'branch' as an alias for 'branches'
|
||||
all_commands['branch'] = all_commands['branches']
|
||||
|
||||
def _Run(self, argv):
|
||||
name = None
|
||||
@ -120,7 +130,23 @@ class _Repo(object):
|
||||
RunPager(config)
|
||||
|
||||
try:
|
||||
cmd.Execute(copts, cargs)
|
||||
start = time.time()
|
||||
try:
|
||||
cmd.Execute(copts, cargs)
|
||||
finally:
|
||||
elapsed = time.time() - start
|
||||
hours, remainder = divmod(elapsed, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
if gopts.time:
|
||||
if hours == 0:
|
||||
print >>sys.stderr, 'real\t%dm%.3fs' \
|
||||
% (minutes, seconds)
|
||||
else:
|
||||
print >>sys.stderr, 'real\t%dh%dm%.3fs' \
|
||||
% (hours, minutes, seconds)
|
||||
except DownloadError, e:
|
||||
print >>sys.stderr, 'error: %s' % str(e)
|
||||
sys.exit(1)
|
||||
except ManifestInvalidRevisionError, e:
|
||||
print >>sys.stderr, 'error: %s' % str(e)
|
||||
sys.exit(1)
|
||||
@ -131,6 +157,9 @@ class _Repo(object):
|
||||
print >>sys.stderr, 'error: no project in current directory'
|
||||
sys.exit(1)
|
||||
|
||||
def _MyRepoPath():
|
||||
return os.path.dirname(__file__)
|
||||
|
||||
def _MyWrapperPath():
|
||||
return os.path.join(os.path.dirname(__file__), 'repo')
|
||||
|
||||
@ -197,6 +226,77 @@ def _PruneOptions(argv, opt):
|
||||
continue
|
||||
i += 1
|
||||
|
||||
_user_agent = None
|
||||
|
||||
def _UserAgent():
|
||||
global _user_agent
|
||||
|
||||
if _user_agent is None:
|
||||
py_version = sys.version_info
|
||||
|
||||
os_name = sys.platform
|
||||
if os_name == 'linux2':
|
||||
os_name = 'Linux'
|
||||
elif os_name == 'win32':
|
||||
os_name = 'Win32'
|
||||
elif os_name == 'cygwin':
|
||||
os_name = 'Cygwin'
|
||||
elif os_name == 'darwin':
|
||||
os_name = 'Darwin'
|
||||
|
||||
p = GitCommand(
|
||||
None, ['describe', 'HEAD'],
|
||||
cwd = _MyRepoPath(),
|
||||
capture_stdout = True)
|
||||
if p.Wait() == 0:
|
||||
repo_version = p.stdout
|
||||
if len(repo_version) > 0 and repo_version[-1] == '\n':
|
||||
repo_version = repo_version[0:-1]
|
||||
if len(repo_version) > 0 and repo_version[0] == 'v':
|
||||
repo_version = repo_version[1:]
|
||||
else:
|
||||
repo_version = 'unknown'
|
||||
|
||||
_user_agent = 'git-repo/%s (%s) git/%s Python/%d.%d.%d' % (
|
||||
repo_version,
|
||||
os_name,
|
||||
'.'.join(map(lambda d: str(d), git.version_tuple())),
|
||||
py_version[0], py_version[1], py_version[2])
|
||||
return _user_agent
|
||||
|
||||
class _UserAgentHandler(urllib2.BaseHandler):
|
||||
def http_request(self, req):
|
||||
req.add_header('User-Agent', _UserAgent())
|
||||
return req
|
||||
|
||||
def https_request(self, req):
|
||||
req.add_header('User-Agent', _UserAgent())
|
||||
return req
|
||||
|
||||
def init_http():
|
||||
handlers = [_UserAgentHandler()]
|
||||
|
||||
mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
try:
|
||||
n = netrc.netrc()
|
||||
for host in n.hosts:
|
||||
p = n.hosts[host]
|
||||
mgr.add_password(None, 'http://%s/' % host, p[0], p[2])
|
||||
mgr.add_password(None, 'https://%s/' % host, p[0], p[2])
|
||||
except netrc.NetrcParseError:
|
||||
pass
|
||||
except IOError:
|
||||
pass
|
||||
handlers.append(urllib2.HTTPBasicAuthHandler(mgr))
|
||||
|
||||
if 'http_proxy' in os.environ:
|
||||
url = os.environ['http_proxy']
|
||||
handlers.append(urllib2.ProxyHandler({'http': url, 'https': url}))
|
||||
if 'REPO_CURL_VERBOSE' in os.environ:
|
||||
handlers.append(urllib2.HTTPHandler(debuglevel=1))
|
||||
handlers.append(urllib2.HTTPSHandler(debuglevel=1))
|
||||
urllib2.install_opener(urllib2.build_opener(*handlers))
|
||||
|
||||
def _Main(argv):
|
||||
opt = optparse.OptionParser(usage="repo wrapperinfo -- ...")
|
||||
opt.add_option("--repo-dir", dest="repodir",
|
||||
@ -214,6 +314,8 @@ def _Main(argv):
|
||||
repo = _Repo(opt.repodir)
|
||||
try:
|
||||
try:
|
||||
init_ssh()
|
||||
init_http()
|
||||
repo._Run(argv)
|
||||
finally:
|
||||
close_ssh()
|
||||
|
210
manifest_xml.py
210
manifest_xml.py
@ -14,7 +14,9 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urlparse
|
||||
import xml.dom.minidom
|
||||
|
||||
from git_config import GitConfig, IsId
|
||||
@ -24,26 +26,36 @@ from error import ManifestParseError
|
||||
MANIFEST_FILE_NAME = 'manifest.xml'
|
||||
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
|
||||
|
||||
urlparse.uses_relative.extend(['ssh', 'git'])
|
||||
urlparse.uses_netloc.extend(['ssh', 'git'])
|
||||
|
||||
class _Default(object):
|
||||
"""Project defaults within the manifest."""
|
||||
|
||||
revisionExpr = None
|
||||
remote = None
|
||||
sync_j = 1
|
||||
|
||||
class _XmlRemote(object):
|
||||
def __init__(self,
|
||||
name,
|
||||
fetch=None,
|
||||
manifestUrl=None,
|
||||
review=None):
|
||||
self.name = name
|
||||
self.fetchUrl = fetch
|
||||
self.manifestUrl = manifestUrl
|
||||
self.reviewUrl = review
|
||||
|
||||
def ToRemoteSpec(self, projectName):
|
||||
url = self.fetchUrl
|
||||
while url.endswith('/'):
|
||||
url = url[:-1]
|
||||
url += '/%s.git' % projectName
|
||||
url = self.fetchUrl.rstrip('/') + '/' + projectName + '.git'
|
||||
manifestUrl = self.manifestUrl.rstrip('/')
|
||||
# urljoin will get confused if there is no scheme in the base url
|
||||
# ie, if manifestUrl is of the form <hostname:port>
|
||||
if manifestUrl.find(':') != manifestUrl.find('/') - 1:
|
||||
manifestUrl = 'gopher://' + manifestUrl
|
||||
url = urlparse.urljoin(manifestUrl, url)
|
||||
url = re.sub(r'^gopher://', '', url)
|
||||
return RemoteSpec(self.name, url, self.reviewUrl)
|
||||
|
||||
class XmlManifest(object):
|
||||
@ -65,8 +77,8 @@ class XmlManifest(object):
|
||||
|
||||
self._Unload()
|
||||
|
||||
def Link(self, name):
|
||||
"""Update the repo metadata to use a different manifest.
|
||||
def Override(self, name):
|
||||
"""Use a different manifest, just for the current instantiation.
|
||||
"""
|
||||
path = os.path.join(self.manifestProject.worktree, name)
|
||||
if not os.path.isfile(path):
|
||||
@ -80,6 +92,11 @@ class XmlManifest(object):
|
||||
finally:
|
||||
self.manifestFile = old
|
||||
|
||||
def Link(self, name):
|
||||
"""Update the repo metadata to use a different manifest.
|
||||
"""
|
||||
self.Override(name)
|
||||
|
||||
try:
|
||||
if os.path.exists(self.manifestFile):
|
||||
os.remove(self.manifestFile)
|
||||
@ -102,6 +119,15 @@ class XmlManifest(object):
|
||||
root = doc.createElement('manifest')
|
||||
doc.appendChild(root)
|
||||
|
||||
# Save out the notice. There's a little bit of work here to give it the
|
||||
# right whitespace, which assumes that the notice is automatically indented
|
||||
# by 4 by minidom.
|
||||
if self.notice:
|
||||
notice_element = root.appendChild(doc.createElement('notice'))
|
||||
notice_lines = self.notice.splitlines()
|
||||
indented_notice = ('\n'.join(" "*4 + line for line in notice_lines))[4:]
|
||||
notice_element.appendChild(doc.createTextNode(indented_notice))
|
||||
|
||||
d = self.default
|
||||
sort_remotes = list(self.remotes.keys())
|
||||
sort_remotes.sort()
|
||||
@ -119,10 +145,19 @@ class XmlManifest(object):
|
||||
if d.revisionExpr:
|
||||
have_default = True
|
||||
e.setAttribute('revision', d.revisionExpr)
|
||||
if d.sync_j > 1:
|
||||
have_default = True
|
||||
e.setAttribute('sync-j', '%d' % d.sync_j)
|
||||
if have_default:
|
||||
root.appendChild(e)
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
|
||||
if self._manifest_server:
|
||||
e = doc.createElement('manifest-server')
|
||||
e.setAttribute('url', self._manifest_server)
|
||||
root.appendChild(e)
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
|
||||
sort_projects = list(self.projects.keys())
|
||||
sort_projects.sort()
|
||||
|
||||
@ -151,6 +186,14 @@ class XmlManifest(object):
|
||||
ce.setAttribute('dest', c.dest)
|
||||
e.appendChild(ce)
|
||||
|
||||
if self._repo_hooks_project:
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
e = doc.createElement('repo-hooks')
|
||||
e.setAttribute('in-project', self._repo_hooks_project.name)
|
||||
e.setAttribute('enabled-list',
|
||||
' '.join(self._repo_hooks_project.enabled_repo_hooks))
|
||||
root.appendChild(e)
|
||||
|
||||
doc.writexml(fd, '', ' ', '\n', 'UTF-8')
|
||||
|
||||
@property
|
||||
@ -168,6 +211,21 @@ class XmlManifest(object):
|
||||
self._Load()
|
||||
return self._default
|
||||
|
||||
@property
|
||||
def repo_hooks_project(self):
|
||||
self._Load()
|
||||
return self._repo_hooks_project
|
||||
|
||||
@property
|
||||
def notice(self):
|
||||
self._Load()
|
||||
return self._notice
|
||||
|
||||
@property
|
||||
def manifest_server(self):
|
||||
self._Load()
|
||||
return self._manifest_server
|
||||
|
||||
@property
|
||||
def IsMirror(self):
|
||||
return self.manifestProject.config.GetBoolean('repo.mirror')
|
||||
@ -177,13 +235,16 @@ class XmlManifest(object):
|
||||
self._projects = {}
|
||||
self._remotes = {}
|
||||
self._default = None
|
||||
self._repo_hooks_project = None
|
||||
self._notice = None
|
||||
self.branch = None
|
||||
self._manifest_server = None
|
||||
|
||||
def _Load(self):
|
||||
if not self._loaded:
|
||||
m = self.manifestProject
|
||||
b = m.GetBranch(m.CurrentBranch).merge
|
||||
if b.startswith(R_HEADS):
|
||||
if b is not None and b.startswith(R_HEADS):
|
||||
b = b[len(R_HEADS):]
|
||||
self.branch = b
|
||||
|
||||
@ -207,15 +268,15 @@ class XmlManifest(object):
|
||||
def _ParseManifest(self, is_root_file):
|
||||
root = xml.dom.minidom.parse(self.manifestFile)
|
||||
if not root or not root.childNodes:
|
||||
raise ManifestParseError, \
|
||||
"no root node in %s" % \
|
||||
self.manifestFile
|
||||
raise ManifestParseError(
|
||||
"no root node in %s" %
|
||||
self.manifestFile)
|
||||
|
||||
config = root.childNodes[0]
|
||||
if config.nodeName != 'manifest':
|
||||
raise ManifestParseError, \
|
||||
"no <manifest> in %s" % \
|
||||
self.manifestFile
|
||||
raise ManifestParseError(
|
||||
"no <manifest> in %s" %
|
||||
self.manifestFile)
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'remove-project':
|
||||
@ -223,38 +284,83 @@ class XmlManifest(object):
|
||||
try:
|
||||
del self._projects[name]
|
||||
except KeyError:
|
||||
raise ManifestParseError, \
|
||||
'project %s not found' % \
|
||||
(name)
|
||||
raise ManifestParseError(
|
||||
'project %s not found' %
|
||||
(name))
|
||||
|
||||
# If the manifest removes the hooks project, treat it as if it deleted
|
||||
# the repo-hooks element too.
|
||||
if self._repo_hooks_project and (self._repo_hooks_project.name == name):
|
||||
self._repo_hooks_project = None
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'remote':
|
||||
remote = self._ParseRemote(node)
|
||||
if self._remotes.get(remote.name):
|
||||
raise ManifestParseError, \
|
||||
'duplicate remote %s in %s' % \
|
||||
(remote.name, self.manifestFile)
|
||||
raise ManifestParseError(
|
||||
'duplicate remote %s in %s' %
|
||||
(remote.name, self.manifestFile))
|
||||
self._remotes[remote.name] = remote
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'default':
|
||||
if self._default is not None:
|
||||
raise ManifestParseError, \
|
||||
'duplicate default in %s' % \
|
||||
(self.manifestFile)
|
||||
raise ManifestParseError(
|
||||
'duplicate default in %s' %
|
||||
(self.manifestFile))
|
||||
self._default = self._ParseDefault(node)
|
||||
if self._default is None:
|
||||
self._default = _Default()
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'notice':
|
||||
if self._notice is not None:
|
||||
raise ManifestParseError(
|
||||
'duplicate notice in %s' %
|
||||
(self.manifestFile))
|
||||
self._notice = self._ParseNotice(node)
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'manifest-server':
|
||||
url = self._reqatt(node, 'url')
|
||||
if self._manifest_server is not None:
|
||||
raise ManifestParseError(
|
||||
'duplicate manifest-server in %s' %
|
||||
(self.manifestFile))
|
||||
self._manifest_server = url
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'project':
|
||||
project = self._ParseProject(node)
|
||||
if self._projects.get(project.name):
|
||||
raise ManifestParseError, \
|
||||
'duplicate project %s in %s' % \
|
||||
(project.name, self.manifestFile)
|
||||
raise ManifestParseError(
|
||||
'duplicate project %s in %s' %
|
||||
(project.name, self.manifestFile))
|
||||
self._projects[project.name] = project
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'repo-hooks':
|
||||
# Get the name of the project and the (space-separated) list of enabled.
|
||||
repo_hooks_project = self._reqatt(node, 'in-project')
|
||||
enabled_repo_hooks = self._reqatt(node, 'enabled-list').split()
|
||||
|
||||
# Only one project can be the hooks project
|
||||
if self._repo_hooks_project is not None:
|
||||
raise ManifestParseError(
|
||||
'duplicate repo-hooks in %s' %
|
||||
(self.manifestFile))
|
||||
|
||||
# Store a reference to the Project.
|
||||
try:
|
||||
self._repo_hooks_project = self._projects[repo_hooks_project]
|
||||
except KeyError:
|
||||
raise ManifestParseError(
|
||||
'project %s not found for repo-hooks' %
|
||||
(repo_hooks_project))
|
||||
|
||||
# Store the enabled hooks in the Project object.
|
||||
self._repo_hooks_project.enabled_repo_hooks = enabled_repo_hooks
|
||||
|
||||
def _AddMetaProjectMirror(self, m):
|
||||
name = None
|
||||
m_url = m.GetRemote(m.remote.name).url
|
||||
@ -271,7 +377,8 @@ class XmlManifest(object):
|
||||
|
||||
if name is None:
|
||||
s = m_url.rindex('/') + 1
|
||||
remote = _XmlRemote('origin', m_url[:s])
|
||||
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
|
||||
remote = _XmlRemote('origin', m_url[:s], manifestUrl)
|
||||
name = m_url[s:]
|
||||
|
||||
if name.endswith('.git'):
|
||||
@ -299,7 +406,8 @@ class XmlManifest(object):
|
||||
review = node.getAttribute('review')
|
||||
if review == '':
|
||||
review = None
|
||||
return _XmlRemote(name, fetch, review)
|
||||
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
|
||||
return _XmlRemote(name, fetch, manifestUrl, review)
|
||||
|
||||
def _ParseDefault(self, node):
|
||||
"""
|
||||
@ -310,12 +418,56 @@ class XmlManifest(object):
|
||||
d.revisionExpr = node.getAttribute('revision')
|
||||
if d.revisionExpr == '':
|
||||
d.revisionExpr = None
|
||||
sync_j = node.getAttribute('sync-j')
|
||||
if sync_j == '' or sync_j is None:
|
||||
d.sync_j = 1
|
||||
else:
|
||||
d.sync_j = int(sync_j)
|
||||
return d
|
||||
|
||||
def _ParseNotice(self, node):
|
||||
"""
|
||||
reads a <notice> element from the manifest file
|
||||
|
||||
The <notice> element is distinct from other tags in the XML in that the
|
||||
data is conveyed between the start and end tag (it's not an empty-element
|
||||
tag).
|
||||
|
||||
The white space (carriage returns, indentation) for the notice element is
|
||||
relevant and is parsed in a way that is based on how python docstrings work.
|
||||
In fact, the code is remarkably similar to here:
|
||||
http://www.python.org/dev/peps/pep-0257/
|
||||
"""
|
||||
# Get the data out of the node...
|
||||
notice = node.childNodes[0].data
|
||||
|
||||
# Figure out minimum indentation, skipping the first line (the same line
|
||||
# as the <notice> tag)...
|
||||
minIndent = sys.maxint
|
||||
lines = notice.splitlines()
|
||||
for line in lines[1:]:
|
||||
lstrippedLine = line.lstrip()
|
||||
if lstrippedLine:
|
||||
indent = len(line) - len(lstrippedLine)
|
||||
minIndent = min(indent, minIndent)
|
||||
|
||||
# Strip leading / trailing blank lines and also indentation.
|
||||
cleanLines = [lines[0].strip()]
|
||||
for line in lines[1:]:
|
||||
cleanLines.append(line[minIndent:].rstrip())
|
||||
|
||||
# Clear completely blank lines from front and back...
|
||||
while cleanLines and not cleanLines[0]:
|
||||
del cleanLines[0]
|
||||
while cleanLines and not cleanLines[-1]:
|
||||
del cleanLines[-1]
|
||||
|
||||
return '\n'.join(cleanLines)
|
||||
|
||||
def _ParseProject(self, node):
|
||||
"""
|
||||
reads a <project> element from the manifest file
|
||||
"""
|
||||
"""
|
||||
name = self._reqatt(node, 'name')
|
||||
|
||||
remote = self._get_remote(node)
|
||||
@ -347,7 +499,7 @@ class XmlManifest(object):
|
||||
worktree = None
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||
else:
|
||||
worktree = os.path.join(self.topdir, path)
|
||||
worktree = os.path.join(self.topdir, path).replace('\\', '/')
|
||||
gitdir = os.path.join(self.repodir, 'projects/%s.git' % path)
|
||||
|
||||
project = Project(manifest = self,
|
||||
|
22
progress.py
22
progress.py
@ -13,23 +13,27 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
from time import time
|
||||
from trace import IsTrace
|
||||
|
||||
_NOT_TTY = not os.isatty(2)
|
||||
|
||||
class Progress(object):
|
||||
def __init__(self, title, total=0):
|
||||
def __init__(self, title, total=0, units=''):
|
||||
self._title = title
|
||||
self._total = total
|
||||
self._done = 0
|
||||
self._lastp = -1
|
||||
self._start = time()
|
||||
self._show = False
|
||||
self._units = units
|
||||
|
||||
def update(self, inc=1):
|
||||
self._done += inc
|
||||
|
||||
if IsTrace():
|
||||
if _NOT_TTY or IsTrace():
|
||||
return
|
||||
|
||||
if not self._show:
|
||||
@ -48,15 +52,15 @@ class Progress(object):
|
||||
|
||||
if self._lastp != p:
|
||||
self._lastp = p
|
||||
sys.stderr.write('\r%s: %3d%% (%d/%d) ' % (
|
||||
sys.stderr.write('\r%s: %3d%% (%d%s/%d%s) ' % (
|
||||
self._title,
|
||||
p,
|
||||
self._done,
|
||||
self._total))
|
||||
self._done, self._units,
|
||||
self._total, self._units))
|
||||
sys.stderr.flush()
|
||||
|
||||
def end(self):
|
||||
if IsTrace() or not self._show:
|
||||
if _NOT_TTY or IsTrace() or not self._show:
|
||||
return
|
||||
|
||||
if self._total <= 0:
|
||||
@ -66,9 +70,9 @@ class Progress(object):
|
||||
sys.stderr.flush()
|
||||
else:
|
||||
p = (100 * self._done) / self._total
|
||||
sys.stderr.write('\r%s: %3d%% (%d/%d), done. \n' % (
|
||||
sys.stderr.write('\r%s: %3d%% (%d%s/%d%s), done. \n' % (
|
||||
self._title,
|
||||
p,
|
||||
self._done,
|
||||
self._total))
|
||||
self._done, self._units,
|
||||
self._total, self._units))
|
||||
sys.stderr.flush()
|
||||
|
712
project.py
712
project.py
@ -12,20 +12,25 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import traceback
|
||||
import errno
|
||||
import filecmp
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import time
|
||||
import urllib2
|
||||
|
||||
from color import Coloring
|
||||
from git_command import GitCommand
|
||||
from git_config import GitConfig, IsId
|
||||
from error import GitError, ImportError, UploadError
|
||||
from git_config import GitConfig, IsId, GetSchemeFromUrl
|
||||
from error import DownloadError
|
||||
from error import GitError, HookError, ImportError, UploadError
|
||||
from error import ManifestInvalidRevisionError
|
||||
from progress import Progress
|
||||
|
||||
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M
|
||||
|
||||
@ -54,14 +59,25 @@ def not_rev(r):
|
||||
def sq(r):
|
||||
return "'" + r.replace("'", "'\''") + "'"
|
||||
|
||||
hook_list = None
|
||||
def repo_hooks():
|
||||
global hook_list
|
||||
if hook_list is None:
|
||||
_project_hook_list = None
|
||||
def _ProjectHooks():
|
||||
"""List the hooks present in the 'hooks' directory.
|
||||
|
||||
These hooks are project hooks and are copied to the '.git/hooks' directory
|
||||
of all subprojects.
|
||||
|
||||
This function caches the list of hooks (based on the contents of the
|
||||
'repo/hooks' directory) on the first call.
|
||||
|
||||
Returns:
|
||||
A list of absolute paths to all of the files in the hooks directory.
|
||||
"""
|
||||
global _project_hook_list
|
||||
if _project_hook_list is None:
|
||||
d = os.path.abspath(os.path.dirname(__file__))
|
||||
d = os.path.join(d , 'hooks')
|
||||
hook_list = map(lambda x: os.path.join(d, x), os.listdir(d))
|
||||
return hook_list
|
||||
_project_hook_list = map(lambda x: os.path.join(d, x), os.listdir(d))
|
||||
return _project_hook_list
|
||||
|
||||
def relpath(dst, src):
|
||||
src = os.path.dirname(src)
|
||||
@ -111,7 +127,6 @@ class ReviewableBranch(object):
|
||||
self.project = project
|
||||
self.branch = branch
|
||||
self.base = base
|
||||
self.replace_changes = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@ -149,10 +164,10 @@ class ReviewableBranch(object):
|
||||
R_HEADS + self.name,
|
||||
'--')
|
||||
|
||||
def UploadForReview(self, people):
|
||||
def UploadForReview(self, people, auto_topic=False):
|
||||
self.project.UploadForReview(self.name,
|
||||
self.replace_changes,
|
||||
people)
|
||||
people,
|
||||
auto_topic=auto_topic)
|
||||
|
||||
def GetPublishedRefs(self):
|
||||
refs = {}
|
||||
@ -203,6 +218,10 @@ class _CopyFile:
|
||||
# remove existing file first, since it might be read-only
|
||||
if os.path.exists(dest):
|
||||
os.remove(dest)
|
||||
else:
|
||||
dir = os.path.dirname(dest)
|
||||
if not os.path.isdir(dir):
|
||||
os.makedirs(dir)
|
||||
shutil.copy(src, dest)
|
||||
# make the file read-only
|
||||
mode = os.stat(dest)[stat.ST_MODE]
|
||||
@ -220,6 +239,249 @@ class RemoteSpec(object):
|
||||
self.url = url
|
||||
self.review = review
|
||||
|
||||
class RepoHook(object):
|
||||
"""A RepoHook contains information about a script to run as a hook.
|
||||
|
||||
Hooks are used to run a python script before running an upload (for instance,
|
||||
to run presubmit checks). Eventually, we may have hooks for other actions.
|
||||
|
||||
This shouldn't be confused with files in the 'repo/hooks' directory. Those
|
||||
files are copied into each '.git/hooks' folder for each project. Repo-level
|
||||
hooks are associated instead with repo actions.
|
||||
|
||||
Hooks are always python. When a hook is run, we will load the hook into the
|
||||
interpreter and execute its main() function.
|
||||
"""
|
||||
def __init__(self,
|
||||
hook_type,
|
||||
hooks_project,
|
||||
topdir,
|
||||
abort_if_user_denies=False):
|
||||
"""RepoHook constructor.
|
||||
|
||||
Params:
|
||||
hook_type: A string representing the type of hook. This is also used
|
||||
to figure out the name of the file containing the hook. For
|
||||
example: 'pre-upload'.
|
||||
hooks_project: The project containing the repo hooks. If you have a
|
||||
manifest, this is manifest.repo_hooks_project. OK if this is None,
|
||||
which will make the hook a no-op.
|
||||
topdir: Repo's top directory (the one containing the .repo directory).
|
||||
Scripts will run with CWD as this directory. If you have a manifest,
|
||||
this is manifest.topdir
|
||||
abort_if_user_denies: If True, we'll throw a HookError() if the user
|
||||
doesn't allow us to run the hook.
|
||||
"""
|
||||
self._hook_type = hook_type
|
||||
self._hooks_project = hooks_project
|
||||
self._topdir = topdir
|
||||
self._abort_if_user_denies = abort_if_user_denies
|
||||
|
||||
# Store the full path to the script for convenience.
|
||||
if self._hooks_project:
|
||||
self._script_fullpath = os.path.join(self._hooks_project.worktree,
|
||||
self._hook_type + '.py')
|
||||
else:
|
||||
self._script_fullpath = None
|
||||
|
||||
def _GetHash(self):
|
||||
"""Return a hash of the contents of the hooks directory.
|
||||
|
||||
We'll just use git to do this. This hash has the property that if anything
|
||||
changes in the directory we will return a different has.
|
||||
|
||||
SECURITY CONSIDERATION:
|
||||
This hash only represents the contents of files in the hook directory, not
|
||||
any other files imported or called by hooks. Changes to imported files
|
||||
can change the script behavior without affecting the hash.
|
||||
|
||||
Returns:
|
||||
A string representing the hash. This will always be ASCII so that it can
|
||||
be printed to the user easily.
|
||||
"""
|
||||
assert self._hooks_project, "Must have hooks to calculate their hash."
|
||||
|
||||
# We will use the work_git object rather than just calling GetRevisionId().
|
||||
# That gives us a hash of the latest checked in version of the files that
|
||||
# the user will actually be executing. Specifically, GetRevisionId()
|
||||
# doesn't appear to change even if a user checks out a different version
|
||||
# of the hooks repo (via git checkout) nor if a user commits their own revs.
|
||||
#
|
||||
# NOTE: Local (non-committed) changes will not be factored into this hash.
|
||||
# I think this is OK, since we're really only worried about warning the user
|
||||
# about upstream changes.
|
||||
return self._hooks_project.work_git.rev_parse('HEAD')
|
||||
|
||||
def _GetMustVerb(self):
|
||||
"""Return 'must' if the hook is required; 'should' if not."""
|
||||
if self._abort_if_user_denies:
|
||||
return 'must'
|
||||
else:
|
||||
return 'should'
|
||||
|
||||
def _CheckForHookApproval(self):
|
||||
"""Check to see whether this hook has been approved.
|
||||
|
||||
We'll look at the hash of all of the hooks. If this matches the hash that
|
||||
the user last approved, we're done. If it doesn't, we'll ask the user
|
||||
about approval.
|
||||
|
||||
Note that we ask permission for each individual hook even though we use
|
||||
the hash of all hooks when detecting changes. We'd like the user to be
|
||||
able to approve / deny each hook individually. We only use the hash of all
|
||||
hooks because there is no other easy way to detect changes to local imports.
|
||||
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
|
||||
Raises:
|
||||
HookError: Raised if the user doesn't approve and abort_if_user_denies
|
||||
was passed to the consturctor.
|
||||
"""
|
||||
hooks_dir = self._hooks_project.worktree
|
||||
hooks_config = self._hooks_project.config
|
||||
git_approval_key = 'repo.hooks.%s.approvedhash' % self._hook_type
|
||||
|
||||
# Get the last hash that the user approved for this hook; may be None.
|
||||
old_hash = hooks_config.GetString(git_approval_key)
|
||||
|
||||
# Get the current hash so we can tell if scripts changed since approval.
|
||||
new_hash = self._GetHash()
|
||||
|
||||
if old_hash is not None:
|
||||
# User previously approved hook and asked not to be prompted again.
|
||||
if new_hash == old_hash:
|
||||
# Approval matched. We're done.
|
||||
return True
|
||||
else:
|
||||
# Give the user a reason why we're prompting, since they last told
|
||||
# us to "never ask again".
|
||||
prompt = 'WARNING: Scripts have changed since %s was allowed.\n\n' % (
|
||||
self._hook_type)
|
||||
else:
|
||||
prompt = ''
|
||||
|
||||
# Prompt the user if we're not on a tty; on a tty we'll assume "no".
|
||||
if sys.stdout.isatty():
|
||||
prompt += ('Repo %s run the script:\n'
|
||||
' %s\n'
|
||||
'\n'
|
||||
'Do you want to allow this script to run '
|
||||
'(yes/yes-never-ask-again/NO)? ') % (
|
||||
self._GetMustVerb(), self._script_fullpath)
|
||||
response = raw_input(prompt).lower()
|
||||
print
|
||||
|
||||
# User is doing a one-time approval.
|
||||
if response in ('y', 'yes'):
|
||||
return True
|
||||
elif response == 'yes-never-ask-again':
|
||||
hooks_config.SetString(git_approval_key, new_hash)
|
||||
return True
|
||||
|
||||
# For anything else, we'll assume no approval.
|
||||
if self._abort_if_user_denies:
|
||||
raise HookError('You must allow the %s hook or use --no-verify.' %
|
||||
self._hook_type)
|
||||
|
||||
return False
|
||||
|
||||
def _ExecuteHook(self, **kwargs):
|
||||
"""Actually execute the given hook.
|
||||
|
||||
This will run the hook's 'main' function in our python interpreter.
|
||||
|
||||
Args:
|
||||
kwargs: Keyword arguments to pass to the hook. These are often specific
|
||||
to the hook type. For instance, pre-upload hooks will contain
|
||||
a project_list.
|
||||
"""
|
||||
# Keep sys.path and CWD stashed away so that we can always restore them
|
||||
# upon function exit.
|
||||
orig_path = os.getcwd()
|
||||
orig_syspath = sys.path
|
||||
|
||||
try:
|
||||
# Always run hooks with CWD as topdir.
|
||||
os.chdir(self._topdir)
|
||||
|
||||
# Put the hook dir as the first item of sys.path so hooks can do
|
||||
# relative imports. We want to replace the repo dir as [0] so
|
||||
# hooks can't import repo files.
|
||||
sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:]
|
||||
|
||||
# Exec, storing global context in the context dict. We catch exceptions
|
||||
# and convert to a HookError w/ just the failing traceback.
|
||||
context = {}
|
||||
try:
|
||||
execfile(self._script_fullpath, context)
|
||||
except Exception:
|
||||
raise HookError('%s\nFailed to import %s hook; see traceback above.' % (
|
||||
traceback.format_exc(), self._hook_type))
|
||||
|
||||
# Running the script should have defined a main() function.
|
||||
if 'main' not in context:
|
||||
raise HookError('Missing main() in: "%s"' % self._script_fullpath)
|
||||
|
||||
|
||||
# Add 'hook_should_take_kwargs' to the arguments to be passed to main.
|
||||
# We don't actually want hooks to define their main with this argument--
|
||||
# it's there to remind them that their hook should always take **kwargs.
|
||||
# For instance, a pre-upload hook should be defined like:
|
||||
# def main(project_list, **kwargs):
|
||||
#
|
||||
# This allows us to later expand the API without breaking old hooks.
|
||||
kwargs = kwargs.copy()
|
||||
kwargs['hook_should_take_kwargs'] = True
|
||||
|
||||
# Call the main function in the hook. If the hook should cause the
|
||||
# build to fail, it will raise an Exception. We'll catch that convert
|
||||
# to a HookError w/ just the failing traceback.
|
||||
try:
|
||||
context['main'](**kwargs)
|
||||
except Exception:
|
||||
raise HookError('%s\nFailed to run main() for %s hook; see traceback '
|
||||
'above.' % (
|
||||
traceback.format_exc(), self._hook_type))
|
||||
finally:
|
||||
# Restore sys.path and CWD.
|
||||
sys.path = orig_syspath
|
||||
os.chdir(orig_path)
|
||||
|
||||
def Run(self, user_allows_all_hooks, **kwargs):
|
||||
"""Run the hook.
|
||||
|
||||
If the hook doesn't exist (because there is no hooks project or because
|
||||
this particular hook is not enabled), this is a no-op.
|
||||
|
||||
Args:
|
||||
user_allows_all_hooks: If True, we will never prompt about running the
|
||||
hook--we'll just assume it's OK to run it.
|
||||
kwargs: Keyword arguments to pass to the hook. These are often specific
|
||||
to the hook type. For instance, pre-upload hooks will contain
|
||||
a project_list.
|
||||
|
||||
Raises:
|
||||
HookError: If there was a problem finding the hook or the user declined
|
||||
to run a required hook (from _CheckForHookApproval).
|
||||
"""
|
||||
# No-op if there is no hooks project or if hook is disabled.
|
||||
if ((not self._hooks_project) or
|
||||
(self._hook_type not in self._hooks_project.enabled_repo_hooks)):
|
||||
return
|
||||
|
||||
# Bail with a nice error if we can't find the hook.
|
||||
if not os.path.isfile(self._script_fullpath):
|
||||
raise HookError('Couldn\'t find repo hook: "%s"' % self._script_fullpath)
|
||||
|
||||
# Make sure the user is OK with running the hook.
|
||||
if (not user_allows_all_hooks) and (not self._CheckForHookApproval()):
|
||||
return
|
||||
|
||||
# Run the hook with the same version of python we're using.
|
||||
self._ExecuteHook(**kwargs)
|
||||
|
||||
|
||||
class Project(object):
|
||||
def __init__(self,
|
||||
manifest,
|
||||
@ -233,8 +495,11 @@ class Project(object):
|
||||
self.manifest = manifest
|
||||
self.name = name
|
||||
self.remote = remote
|
||||
self.gitdir = gitdir
|
||||
self.worktree = worktree
|
||||
self.gitdir = gitdir.replace('\\', '/')
|
||||
if worktree:
|
||||
self.worktree = worktree.replace('\\', '/')
|
||||
else:
|
||||
self.worktree = None
|
||||
self.relpath = relpath
|
||||
self.revisionExpr = revisionExpr
|
||||
|
||||
@ -258,6 +523,10 @@ class Project(object):
|
||||
self.bare_git = self._GitGetByExec(self, bare=True)
|
||||
self.bare_ref = GitRefs(gitdir)
|
||||
|
||||
# This will be filled in if a project is later identified to be the
|
||||
# project containing repo hooks.
|
||||
self.enabled_repo_hooks = []
|
||||
|
||||
@property
|
||||
def Exists(self):
|
||||
return os.path.isdir(self.gitdir)
|
||||
@ -279,7 +548,7 @@ class Project(object):
|
||||
return os.path.exists(os.path.join(g, 'rebase-apply')) \
|
||||
or os.path.exists(os.path.join(g, 'rebase-merge')) \
|
||||
or os.path.exists(os.path.join(w, '.dotest'))
|
||||
|
||||
|
||||
def IsDirty(self, consider_untracked=True):
|
||||
"""Is the working directory modified in some way?
|
||||
"""
|
||||
@ -364,13 +633,39 @@ class Project(object):
|
||||
|
||||
## Status Display ##
|
||||
|
||||
def PrintWorkTreeStatus(self):
|
||||
def HasChanges(self):
|
||||
"""Returns true if there are uncommitted changes.
|
||||
"""
|
||||
self.work_git.update_index('-q',
|
||||
'--unmerged',
|
||||
'--ignore-missing',
|
||||
'--refresh')
|
||||
if self.IsRebaseInProgress():
|
||||
return True
|
||||
|
||||
if self.work_git.DiffZ('diff-index', '--cached', HEAD):
|
||||
return True
|
||||
|
||||
if self.work_git.DiffZ('diff-files'):
|
||||
return True
|
||||
|
||||
if self.work_git.LsOthers():
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def PrintWorkTreeStatus(self, output_redir=None):
|
||||
"""Prints the status of the repository to stdout.
|
||||
|
||||
Args:
|
||||
output: If specified, redirect the output to this object.
|
||||
"""
|
||||
if not os.path.isdir(self.worktree):
|
||||
print ''
|
||||
print 'project %s/' % self.relpath
|
||||
print ' missing (run "repo sync")'
|
||||
if output_redir == None:
|
||||
output_redir = sys.stdout
|
||||
print >>output_redir, ''
|
||||
print >>output_redir, 'project %s/' % self.relpath
|
||||
print >>output_redir, ' missing (run "repo sync")'
|
||||
return
|
||||
|
||||
self.work_git.update_index('-q',
|
||||
@ -385,6 +680,8 @@ class Project(object):
|
||||
return 'CLEAN'
|
||||
|
||||
out = StatusColoring(self.config)
|
||||
if not output_redir == None:
|
||||
out.redirect(output_redir)
|
||||
out.project('project %-40s', self.relpath + '/')
|
||||
|
||||
branch = self.CurrentBranch
|
||||
@ -412,7 +709,7 @@ class Project(object):
|
||||
|
||||
try: f = df[p]
|
||||
except KeyError: f = None
|
||||
|
||||
|
||||
if i: i_status = i.status.upper()
|
||||
else: i_status = '-'
|
||||
|
||||
@ -434,6 +731,7 @@ class Project(object):
|
||||
else:
|
||||
out.write('%s', line)
|
||||
out.nl()
|
||||
|
||||
return 'DIRTY'
|
||||
|
||||
def PrintWorkTreeDiff(self):
|
||||
@ -497,7 +795,7 @@ class Project(object):
|
||||
if R_HEADS + n not in heads:
|
||||
self.bare_git.DeleteRef(name, id)
|
||||
|
||||
def GetUploadableBranches(self):
|
||||
def GetUploadableBranches(self, selected_branch=None):
|
||||
"""List any branches which can be uploaded for review.
|
||||
"""
|
||||
heads = {}
|
||||
@ -513,6 +811,8 @@ class Project(object):
|
||||
for branch, id in heads.iteritems():
|
||||
if branch in pubed and pubed[branch] == id:
|
||||
continue
|
||||
if selected_branch and branch != selected_branch:
|
||||
continue
|
||||
|
||||
rb = self.GetUploadableBranch(branch)
|
||||
if rb:
|
||||
@ -530,7 +830,9 @@ class Project(object):
|
||||
return rb
|
||||
return None
|
||||
|
||||
def UploadForReview(self, branch=None, replace_changes=None, people=([],[])):
|
||||
def UploadForReview(self, branch=None,
|
||||
people=([],[]),
|
||||
auto_topic=False):
|
||||
"""Uploads the named branch for code review.
|
||||
"""
|
||||
if branch is None:
|
||||
@ -562,13 +864,15 @@ class Project(object):
|
||||
for e in people[1]:
|
||||
rp.append('--cc=%s' % sq(e))
|
||||
|
||||
ref_spec = '%s:refs/for/%s' % (R_HEADS + branch.name, dest_branch)
|
||||
if auto_topic:
|
||||
ref_spec = ref_spec + '/' + branch.name
|
||||
|
||||
cmd = ['push']
|
||||
cmd.append('--receive-pack=%s' % " ".join(rp))
|
||||
cmd.append(branch.remote.SshReviewUrl(self.UserEmail))
|
||||
cmd.append('%s:refs/for/%s' % (R_HEADS + branch.name, dest_branch))
|
||||
if replace_changes:
|
||||
for change_id,commit_id in replace_changes.iteritems():
|
||||
cmd.append('%s:refs/changes/%s/new' % (commit_id, change_id))
|
||||
cmd.append(ref_spec)
|
||||
|
||||
if GitCommand(self, cmd, bare = True).Wait() != 0:
|
||||
raise UploadError('Upload failed')
|
||||
|
||||
@ -584,19 +888,47 @@ class Project(object):
|
||||
|
||||
## Sync ##
|
||||
|
||||
def Sync_NetworkHalf(self):
|
||||
def Sync_NetworkHalf(self, quiet=False, is_new=None):
|
||||
"""Perform only the network IO portion of the sync process.
|
||||
Local working directory/branch state is not affected.
|
||||
"""
|
||||
if not self.Exists:
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, 'Initializing project %s ...' % self.name
|
||||
if is_new is None:
|
||||
is_new = not self.Exists
|
||||
if is_new:
|
||||
self._InitGitDir()
|
||||
|
||||
self._InitRemote()
|
||||
if not self._RemoteFetch():
|
||||
|
||||
if is_new:
|
||||
alt = os.path.join(self.gitdir, 'objects/info/alternates')
|
||||
try:
|
||||
fd = open(alt, 'rb')
|
||||
try:
|
||||
alt_dir = fd.readline().rstrip()
|
||||
finally:
|
||||
fd.close()
|
||||
except IOError:
|
||||
alt_dir = None
|
||||
else:
|
||||
alt_dir = None
|
||||
|
||||
if alt_dir is None and self._ApplyCloneBundle(initial=is_new, quiet=quiet):
|
||||
is_new = False
|
||||
|
||||
if not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir):
|
||||
return False
|
||||
|
||||
#Check that the requested ref was found after fetch
|
||||
#
|
||||
try:
|
||||
self.GetRevisionId()
|
||||
except ManifestInvalidRevisionError:
|
||||
# if the ref is a tag. We can try fetching
|
||||
# the tag manually as a last resort
|
||||
#
|
||||
rev = self.revisionExpr
|
||||
if rev.startswith(R_TAGS):
|
||||
self._RemoteFetch(None, rev[len(R_TAGS):], quiet=quiet)
|
||||
|
||||
if self.worktree:
|
||||
self._InitMRef()
|
||||
else:
|
||||
@ -635,11 +967,11 @@ class Project(object):
|
||||
"""Perform only the local IO portion of the sync process.
|
||||
Network access is not required.
|
||||
"""
|
||||
self._InitWorkTree()
|
||||
all = self.bare_ref.all
|
||||
self.CleanPublishedCache(all)
|
||||
|
||||
revid = self.GetRevisionId(all)
|
||||
|
||||
self._InitWorkTree()
|
||||
head = self.work_git.GetHead()
|
||||
if head.startswith(R_HEADS):
|
||||
branch = head[len(R_HEADS):]
|
||||
@ -706,10 +1038,9 @@ class Project(object):
|
||||
# commits are not yet merged upstream. We do not want
|
||||
# to rewrite the published commits so we punt.
|
||||
#
|
||||
syncbuf.info(self,
|
||||
"branch %s is published but is now %d commits behind",
|
||||
branch.name,
|
||||
len(upstream_gain))
|
||||
syncbuf.fail(self,
|
||||
"branch %s is published (but not merged) and is now %d commits behind"
|
||||
% (branch.name, len(upstream_gain)))
|
||||
return
|
||||
elif pub == head:
|
||||
# All published commits are merged, and thus we are a
|
||||
@ -728,7 +1059,7 @@ class Project(object):
|
||||
last_mine = None
|
||||
cnt_mine = 0
|
||||
for commit in local_changes:
|
||||
commit_id, committer_email = commit.split(' ', 2)
|
||||
commit_id, committer_email = commit.split(' ', 1)
|
||||
if committer_email == self.UserEmail:
|
||||
last_mine = commit_id
|
||||
cnt_mine += 1
|
||||
@ -856,6 +1187,13 @@ class Project(object):
|
||||
|
||||
def CheckoutBranch(self, name):
|
||||
"""Checkout a local topic branch.
|
||||
|
||||
Args:
|
||||
name: The name of the branch to checkout.
|
||||
|
||||
Returns:
|
||||
True if the checkout succeeded; False if it didn't; None if the branch
|
||||
didn't exist.
|
||||
"""
|
||||
rev = R_HEADS + name
|
||||
head = self.work_git.GetHead()
|
||||
@ -870,7 +1208,7 @@ class Project(object):
|
||||
except KeyError:
|
||||
# Branch does not exist in this project
|
||||
#
|
||||
return False
|
||||
return None
|
||||
|
||||
if head.startswith(R_HEADS):
|
||||
try:
|
||||
@ -893,13 +1231,19 @@ class Project(object):
|
||||
|
||||
def AbandonBranch(self, name):
|
||||
"""Destroy a local topic branch.
|
||||
|
||||
Args:
|
||||
name: The name of the branch to abandon.
|
||||
|
||||
Returns:
|
||||
True if the abandon succeeded; False if it didn't; None if the branch
|
||||
didn't exist.
|
||||
"""
|
||||
rev = R_HEADS + name
|
||||
all = self.bare_ref.all
|
||||
if rev not in all:
|
||||
# Doesn't exist; assume already abandoned.
|
||||
#
|
||||
return True
|
||||
# Doesn't exist
|
||||
return None
|
||||
|
||||
head = self.work_git.GetHead()
|
||||
if head == rev:
|
||||
@ -979,22 +1323,218 @@ class Project(object):
|
||||
|
||||
## Direct Git Commands ##
|
||||
|
||||
def _RemoteFetch(self, name=None):
|
||||
def _RemoteFetch(self, name=None, tag=None,
|
||||
initial=False,
|
||||
quiet=False,
|
||||
alt_dir=None):
|
||||
if not name:
|
||||
name = self.remote.name
|
||||
|
||||
ssh_proxy = False
|
||||
if self.GetRemote(name).PreConnectFetch():
|
||||
remote = self.GetRemote(name)
|
||||
if remote.PreConnectFetch():
|
||||
ssh_proxy = True
|
||||
|
||||
if initial:
|
||||
if alt_dir and 'objects' == os.path.basename(alt_dir):
|
||||
ref_dir = os.path.dirname(alt_dir)
|
||||
packed_refs = os.path.join(self.gitdir, 'packed-refs')
|
||||
remote = self.GetRemote(name)
|
||||
|
||||
all = self.bare_ref.all
|
||||
ids = set(all.values())
|
||||
tmp = set()
|
||||
|
||||
for r, id in GitRefs(ref_dir).all.iteritems():
|
||||
if r not in all:
|
||||
if r.startswith(R_TAGS) or remote.WritesTo(r):
|
||||
all[r] = id
|
||||
ids.add(id)
|
||||
continue
|
||||
|
||||
if id in ids:
|
||||
continue
|
||||
|
||||
r = 'refs/_alt/%s' % id
|
||||
all[r] = id
|
||||
ids.add(id)
|
||||
tmp.add(r)
|
||||
|
||||
ref_names = list(all.keys())
|
||||
ref_names.sort()
|
||||
|
||||
tmp_packed = ''
|
||||
old_packed = ''
|
||||
|
||||
for r in ref_names:
|
||||
line = '%s %s\n' % (all[r], r)
|
||||
tmp_packed += line
|
||||
if r not in tmp:
|
||||
old_packed += line
|
||||
|
||||
_lwrite(packed_refs, tmp_packed)
|
||||
else:
|
||||
alt_dir = None
|
||||
|
||||
cmd = ['fetch']
|
||||
|
||||
# The --depth option only affects the initial fetch; after that we'll do
|
||||
# full fetches of changes.
|
||||
depth = self.manifest.manifestProject.config.GetString('repo.depth')
|
||||
if depth and initial:
|
||||
cmd.append('--depth=%s' % depth)
|
||||
|
||||
if quiet:
|
||||
cmd.append('--quiet')
|
||||
if not self.worktree:
|
||||
cmd.append('--update-head-ok')
|
||||
cmd.append(name)
|
||||
return GitCommand(self,
|
||||
cmd,
|
||||
bare = True,
|
||||
ssh_proxy = ssh_proxy).Wait() == 0
|
||||
if tag is not None:
|
||||
cmd.append('tag')
|
||||
cmd.append(tag)
|
||||
|
||||
ok = False
|
||||
for i in range(2):
|
||||
if GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy).Wait() == 0:
|
||||
ok = True
|
||||
break
|
||||
time.sleep(random.randint(30, 45))
|
||||
|
||||
if initial:
|
||||
if alt_dir:
|
||||
if old_packed != '':
|
||||
_lwrite(packed_refs, old_packed)
|
||||
else:
|
||||
os.remove(packed_refs)
|
||||
self.bare_git.pack_refs('--all', '--prune')
|
||||
return ok
|
||||
|
||||
def _ApplyCloneBundle(self, initial=False, quiet=False):
|
||||
if initial and self.manifest.manifestProject.config.GetString('repo.depth'):
|
||||
return False
|
||||
|
||||
remote = self.GetRemote(self.remote.name)
|
||||
bundle_url = remote.url + '/clone.bundle'
|
||||
bundle_url = GitConfig.ForUser().UrlInsteadOf(bundle_url)
|
||||
if GetSchemeFromUrl(bundle_url) not in ('http', 'https'):
|
||||
return False
|
||||
|
||||
bundle_dst = os.path.join(self.gitdir, 'clone.bundle')
|
||||
bundle_tmp = os.path.join(self.gitdir, 'clone.bundle.tmp')
|
||||
|
||||
exist_dst = os.path.exists(bundle_dst)
|
||||
exist_tmp = os.path.exists(bundle_tmp)
|
||||
|
||||
if not initial and not exist_dst and not exist_tmp:
|
||||
return False
|
||||
|
||||
if not exist_dst:
|
||||
exist_dst = self._FetchBundle(bundle_url, bundle_tmp, bundle_dst, quiet)
|
||||
if not exist_dst:
|
||||
return False
|
||||
|
||||
cmd = ['fetch']
|
||||
if quiet:
|
||||
cmd.append('--quiet')
|
||||
if not self.worktree:
|
||||
cmd.append('--update-head-ok')
|
||||
cmd.append(bundle_dst)
|
||||
for f in remote.fetch:
|
||||
cmd.append(str(f))
|
||||
cmd.append('refs/tags/*:refs/tags/*')
|
||||
|
||||
ok = GitCommand(self, cmd, bare=True).Wait() == 0
|
||||
if os.path.exists(bundle_dst):
|
||||
os.remove(bundle_dst)
|
||||
if os.path.exists(bundle_tmp):
|
||||
os.remove(bundle_tmp)
|
||||
return ok
|
||||
|
||||
def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet):
|
||||
keep = True
|
||||
done = False
|
||||
dest = open(tmpPath, 'a+b')
|
||||
try:
|
||||
dest.seek(0, os.SEEK_END)
|
||||
pos = dest.tell()
|
||||
|
||||
req = urllib2.Request(srcUrl)
|
||||
if pos > 0:
|
||||
req.add_header('Range', 'bytes=%d-' % pos)
|
||||
|
||||
try:
|
||||
r = urllib2.urlopen(req)
|
||||
except urllib2.HTTPError, e:
|
||||
if e.code == 404:
|
||||
keep = False
|
||||
return False
|
||||
elif e.info()['content-type'] == 'text/plain':
|
||||
try:
|
||||
msg = e.read()
|
||||
if len(msg) > 0 and msg[-1] == '\n':
|
||||
msg = msg[0:-1]
|
||||
msg = ' (%s)' % msg
|
||||
except:
|
||||
msg = ''
|
||||
else:
|
||||
try:
|
||||
from BaseHTTPServer import BaseHTTPRequestHandler
|
||||
res = BaseHTTPRequestHandler.responses[e.code]
|
||||
msg = ' (%s: %s)' % (res[0], res[1])
|
||||
except:
|
||||
msg = ''
|
||||
raise DownloadError('HTTP %s%s' % (e.code, msg))
|
||||
except urllib2.URLError, e:
|
||||
raise DownloadError('%s (%s)' % (e.reason, req.get_host()))
|
||||
|
||||
p = None
|
||||
try:
|
||||
size = r.headers['content-length']
|
||||
unit = 1 << 10
|
||||
|
||||
if size and not quiet:
|
||||
if size > 1024 * 1.3:
|
||||
unit = 1 << 20
|
||||
desc = 'MB'
|
||||
else:
|
||||
desc = 'KB'
|
||||
p = Progress(
|
||||
'Downloading %s' % self.relpath,
|
||||
int(size) / unit,
|
||||
units=desc)
|
||||
if pos > 0:
|
||||
p.update(pos / unit)
|
||||
|
||||
s = 0
|
||||
while True:
|
||||
d = r.read(8192)
|
||||
if d == '':
|
||||
done = True
|
||||
return True
|
||||
dest.write(d)
|
||||
if p:
|
||||
s += len(d)
|
||||
if s >= unit:
|
||||
p.update(s / unit)
|
||||
s = s % unit
|
||||
if p:
|
||||
if s >= unit:
|
||||
p.update(s / unit)
|
||||
else:
|
||||
p.update(1)
|
||||
finally:
|
||||
r.close()
|
||||
if p:
|
||||
p.end()
|
||||
finally:
|
||||
dest.close()
|
||||
|
||||
if os.path.exists(dstPath):
|
||||
os.remove(dstPath)
|
||||
if done:
|
||||
os.rename(tmpPath, dstPath)
|
||||
elif not keep:
|
||||
os.remove(tmpPath)
|
||||
|
||||
def _Checkout(self, rev, quiet=False):
|
||||
cmd = ['checkout']
|
||||
@ -1032,6 +1572,27 @@ class Project(object):
|
||||
os.makedirs(self.gitdir)
|
||||
self.bare_git.init()
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
ref_dir = mp.config.GetString('repo.reference')
|
||||
|
||||
if ref_dir:
|
||||
mirror_git = os.path.join(ref_dir, self.name + '.git')
|
||||
repo_git = os.path.join(ref_dir, '.repo', 'projects',
|
||||
self.relpath + '.git')
|
||||
|
||||
if os.path.exists(mirror_git):
|
||||
ref_dir = mirror_git
|
||||
|
||||
elif os.path.exists(repo_git):
|
||||
ref_dir = repo_git
|
||||
|
||||
else:
|
||||
ref_dir = None
|
||||
|
||||
if ref_dir:
|
||||
_lwrite(os.path.join(self.gitdir, 'objects/info/alternates'),
|
||||
os.path.join(ref_dir, 'objects') + '\n')
|
||||
|
||||
if self.manifest.IsMirror:
|
||||
self.config.SetString('core.bare', 'true')
|
||||
else:
|
||||
@ -1055,14 +1616,31 @@ class Project(object):
|
||||
hooks = self._gitdir_path('hooks')
|
||||
if not os.path.exists(hooks):
|
||||
os.makedirs(hooks)
|
||||
for stock_hook in repo_hooks():
|
||||
dst = os.path.join(hooks, os.path.basename(stock_hook))
|
||||
for stock_hook in _ProjectHooks():
|
||||
name = os.path.basename(stock_hook)
|
||||
|
||||
if name in ('commit-msg',) and not self.remote.review \
|
||||
and not self is self.manifest.manifestProject:
|
||||
# Don't install a Gerrit Code Review hook if this
|
||||
# project does not appear to use it for reviews.
|
||||
#
|
||||
# Since the manifest project is one of those, but also
|
||||
# managed through gerrit, it's excluded
|
||||
continue
|
||||
|
||||
dst = os.path.join(hooks, name)
|
||||
if os.path.islink(dst):
|
||||
continue
|
||||
if os.path.exists(dst):
|
||||
if filecmp.cmp(stock_hook, dst, shallow=False):
|
||||
os.remove(dst)
|
||||
else:
|
||||
_error("%s: Not replacing %s hook", self.relpath, name)
|
||||
continue
|
||||
try:
|
||||
os.symlink(relpath(stock_hook, dst), dst)
|
||||
except OSError, e:
|
||||
if e.errno == errno.EEXIST:
|
||||
pass
|
||||
elif e.errno == errno.EPERM:
|
||||
if e.errno == errno.EPERM:
|
||||
raise GitError('filesystem must support symlinks')
|
||||
else:
|
||||
raise
|
||||
@ -1120,7 +1698,10 @@ class Project(object):
|
||||
try:
|
||||
src = os.path.join(self.gitdir, name)
|
||||
dst = os.path.join(dotgit, name)
|
||||
os.symlink(relpath(src, dst), dst)
|
||||
if os.path.islink(dst) or not os.path.exists(dst):
|
||||
os.symlink(relpath(src, dst), dst)
|
||||
else:
|
||||
raise GitError('cannot overwrite a local work tree')
|
||||
except OSError, e:
|
||||
if e.errno == errno.EPERM:
|
||||
raise GitError('filesystem must support symlinks')
|
||||
@ -1134,6 +1715,11 @@ class Project(object):
|
||||
cmd.append(HEAD)
|
||||
if GitCommand(self, cmd).Wait() != 0:
|
||||
raise GitError("cannot initialize work tree")
|
||||
|
||||
rr_cache = os.path.join(self.gitdir, 'rr-cache')
|
||||
if not os.path.exists(rr_cache):
|
||||
os.makedirs(rr_cache)
|
||||
|
||||
self._CopyFiles()
|
||||
|
||||
def _gitdir_path(self, path):
|
||||
@ -1292,6 +1878,22 @@ class Project(object):
|
||||
return r
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Allow arbitrary git commands using pythonic syntax.
|
||||
|
||||
This allows you to do things like:
|
||||
git_obj.rev_parse('HEAD')
|
||||
|
||||
Since we don't have a 'rev_parse' method defined, the __getattr__ will
|
||||
run. We'll replace the '_' with a '-' and try to run a git command.
|
||||
Any other arguments will be passed to the git command.
|
||||
|
||||
Args:
|
||||
name: The name of the git command to call. Any '_' characters will
|
||||
be replaced with '-'.
|
||||
|
||||
Returns:
|
||||
A callable object that will try to call git with the named command.
|
||||
"""
|
||||
name = name.replace('_', '-')
|
||||
def runner(*args):
|
||||
cmdv = [name]
|
||||
|
128
repo
128
repo
@ -2,7 +2,7 @@
|
||||
|
||||
## repo default configuration
|
||||
##
|
||||
REPO_URL='git://android.git.kernel.org/tools/repo.git'
|
||||
REPO_URL='https://code.google.com/p/git-repo/'
|
||||
REPO_REV='stable'
|
||||
|
||||
# Copyright (C) 2008 Google Inc.
|
||||
@ -28,7 +28,7 @@ if __name__ == '__main__':
|
||||
del magic
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (1, 8)
|
||||
VERSION = (1, 13)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (1,0)
|
||||
@ -91,6 +91,7 @@ import re
|
||||
import readline
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib2
|
||||
|
||||
home_dot_repo = os.path.expanduser('~/.repoconfig')
|
||||
gpg_dir = os.path.join(home_dot_repo, 'gnupg')
|
||||
@ -118,6 +119,13 @@ group.add_option('-m', '--manifest-name',
|
||||
group.add_option('--mirror',
|
||||
dest='mirror', action='store_true',
|
||||
help='mirror the forrest')
|
||||
group.add_option('--reference',
|
||||
dest='reference',
|
||||
help='location of mirror directory', metavar='DIR')
|
||||
group.add_option('--depth', type='int', default=None,
|
||||
dest='depth',
|
||||
help='create a shallow clone with given depth; see git clone')
|
||||
|
||||
|
||||
# Tool
|
||||
group = init_optparse.add_option_group('repo Version options')
|
||||
@ -180,10 +188,6 @@ def _Init(args):
|
||||
else:
|
||||
can_verify = True
|
||||
|
||||
if not opt.quiet:
|
||||
print >>sys.stderr, 'Getting repo ...'
|
||||
print >>sys.stderr, ' from %s' % url
|
||||
|
||||
dst = os.path.abspath(os.path.join(repodir, S_repo))
|
||||
_Clone(url, dst, opt.quiet)
|
||||
|
||||
@ -256,8 +260,8 @@ def _SetupGnuPG(quiet):
|
||||
gpg_dir, e.strerror)
|
||||
sys.exit(1)
|
||||
|
||||
env = dict(os.environ)
|
||||
env['GNUPGHOME'] = gpg_dir
|
||||
env = os.environ.copy()
|
||||
env['GNUPGHOME'] = gpg_dir.encode()
|
||||
|
||||
cmd = ['gpg', '--import']
|
||||
try:
|
||||
@ -293,15 +297,42 @@ def _SetConfig(local, name, value):
|
||||
raise CloneFailure()
|
||||
|
||||
|
||||
def _Fetch(local, quiet, *args):
|
||||
def _InitHttp():
|
||||
handlers = []
|
||||
|
||||
mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
try:
|
||||
import netrc
|
||||
n = netrc.netrc()
|
||||
for host in n.hosts:
|
||||
p = n.hosts[host]
|
||||
mgr.add_password(None, 'http://%s/' % host, p[0], p[2])
|
||||
mgr.add_password(None, 'https://%s/' % host, p[0], p[2])
|
||||
except:
|
||||
pass
|
||||
handlers.append(urllib2.HTTPBasicAuthHandler(mgr))
|
||||
|
||||
if 'http_proxy' in os.environ:
|
||||
url = os.environ['http_proxy']
|
||||
handlers.append(urllib2.ProxyHandler({'http': url, 'https': url}))
|
||||
if 'REPO_CURL_VERBOSE' in os.environ:
|
||||
handlers.append(urllib2.HTTPHandler(debuglevel=1))
|
||||
handlers.append(urllib2.HTTPSHandler(debuglevel=1))
|
||||
urllib2.install_opener(urllib2.build_opener(*handlers))
|
||||
|
||||
def _Fetch(url, local, src, quiet):
|
||||
if not quiet:
|
||||
print >>sys.stderr, 'Get %s' % url
|
||||
|
||||
cmd = [GIT, 'fetch']
|
||||
if quiet:
|
||||
cmd.append('--quiet')
|
||||
err = subprocess.PIPE
|
||||
else:
|
||||
err = None
|
||||
cmd.extend(args)
|
||||
cmd.append('origin')
|
||||
cmd.append(src)
|
||||
cmd.append('+refs/heads/*:refs/remotes/origin/*')
|
||||
cmd.append('refs/tags/*:refs/tags/*')
|
||||
|
||||
proc = subprocess.Popen(cmd, cwd = local, stderr = err)
|
||||
if err:
|
||||
@ -310,6 +341,62 @@ def _Fetch(local, quiet, *args):
|
||||
if proc.wait() != 0:
|
||||
raise CloneFailure()
|
||||
|
||||
def _DownloadBundle(url, local, quiet):
|
||||
if not url.endswith('/'):
|
||||
url += '/'
|
||||
url += 'clone.bundle'
|
||||
|
||||
proc = subprocess.Popen(
|
||||
[GIT, 'config', '--get-regexp', 'url.*.insteadof'],
|
||||
cwd = local,
|
||||
stdout = subprocess.PIPE)
|
||||
for line in proc.stdout:
|
||||
m = re.compile(r'^url\.(.*)\.insteadof (.*)$').match(line)
|
||||
if m:
|
||||
new_url = m.group(1)
|
||||
old_url = m.group(2)
|
||||
if url.startswith(old_url):
|
||||
url = new_url + url[len(old_url):]
|
||||
break
|
||||
proc.stdout.close()
|
||||
proc.wait()
|
||||
|
||||
if not url.startswith('http:') and not url.startswith('https:'):
|
||||
return False
|
||||
|
||||
dest = open(os.path.join(local, '.git', 'clone.bundle'), 'w+b')
|
||||
try:
|
||||
try:
|
||||
r = urllib2.urlopen(url)
|
||||
except urllib2.HTTPError, e:
|
||||
if e.code == 404:
|
||||
return False
|
||||
print >>sys.stderr, 'fatal: Cannot get %s' % url
|
||||
print >>sys.stderr, 'fatal: HTTP error %s' % e.code
|
||||
raise CloneFailure()
|
||||
except urllib2.URLError, e:
|
||||
print >>sys.stderr, 'fatal: Cannot get %s' % url
|
||||
print >>sys.stderr, 'fatal: error %s' % e.reason
|
||||
raise CloneFailure()
|
||||
try:
|
||||
if not quiet:
|
||||
print >>sys.stderr, 'Get %s' % url
|
||||
while True:
|
||||
buf = r.read(8192)
|
||||
if buf == '':
|
||||
return True
|
||||
dest.write(buf)
|
||||
finally:
|
||||
r.close()
|
||||
finally:
|
||||
dest.close()
|
||||
|
||||
def _ImportBundle(local):
|
||||
path = os.path.join(local, '.git', 'clone.bundle')
|
||||
try:
|
||||
_Fetch(local, local, path, True)
|
||||
finally:
|
||||
os.remove(path)
|
||||
|
||||
def _Clone(url, local, quiet):
|
||||
"""Clones a git repository to a new subdirectory of repodir
|
||||
@ -337,11 +424,14 @@ def _Clone(url, local, quiet):
|
||||
print >>sys.stderr, 'fatal: could not create %s' % local
|
||||
raise CloneFailure()
|
||||
|
||||
_InitHttp()
|
||||
_SetConfig(local, 'remote.origin.url', url)
|
||||
_SetConfig(local, 'remote.origin.fetch',
|
||||
'+refs/heads/*:refs/remotes/origin/*')
|
||||
_Fetch(local, quiet)
|
||||
_Fetch(local, quiet, '--tags')
|
||||
if _DownloadBundle(url, local, quiet):
|
||||
_ImportBundle(local)
|
||||
else:
|
||||
_Fetch(url, local, 'origin', quiet)
|
||||
|
||||
|
||||
def _Verify(cwd, branch, quiet):
|
||||
@ -375,8 +465,8 @@ def _Verify(cwd, branch, quiet):
|
||||
% (branch, cur)
|
||||
print >>sys.stderr
|
||||
|
||||
env = dict(os.environ)
|
||||
env['GNUPGHOME'] = gpg_dir
|
||||
env = os.environ.copy()
|
||||
env['GNUPGHOME'] = gpg_dir.encode()
|
||||
|
||||
cmd = [GIT, 'tag', '-v', cur]
|
||||
proc = subprocess.Popen(cmd,
|
||||
@ -427,10 +517,14 @@ def _FindRepo():
|
||||
dir = os.getcwd()
|
||||
repo = None
|
||||
|
||||
while dir != '/' and not repo:
|
||||
olddir = None
|
||||
while dir != '/' \
|
||||
and dir != olddir \
|
||||
and not repo:
|
||||
repo = os.path.join(dir, repodir, REPO_MAIN)
|
||||
if not os.path.isfile(repo):
|
||||
repo = None
|
||||
olddir = dir
|
||||
dir = os.path.dirname(dir)
|
||||
return (repo, os.path.join(dir, repodir))
|
||||
|
||||
@ -476,6 +570,7 @@ def _Help(args):
|
||||
if args:
|
||||
if args[0] == 'init':
|
||||
init_optparse.print_help()
|
||||
sys.exit(0)
|
||||
else:
|
||||
print >>sys.stderr,\
|
||||
"error: '%s' is not a bootstrap command.\n"\
|
||||
@ -588,4 +683,3 @@ def main(orig_args):
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
||||
|
||||
|
@ -41,21 +41,30 @@ It is equivalent to "git branch -D <branchname>".
|
||||
|
||||
nb = args[0]
|
||||
err = []
|
||||
success = []
|
||||
all = self.GetProjects(args[1:])
|
||||
|
||||
pm = Progress('Abandon %s' % nb, len(all))
|
||||
for project in all:
|
||||
pm.update()
|
||||
if not project.AbandonBranch(nb):
|
||||
err.append(project)
|
||||
|
||||
status = project.AbandonBranch(nb)
|
||||
if status is not None:
|
||||
if status:
|
||||
success.append(project)
|
||||
else:
|
||||
err.append(project)
|
||||
pm.end()
|
||||
|
||||
if err:
|
||||
if len(err) == len(all):
|
||||
print >>sys.stderr, 'error: no project has branch %s' % nb
|
||||
else:
|
||||
for p in err:
|
||||
print >>sys.stderr,\
|
||||
"error: %s/: cannot abandon %s" \
|
||||
% (p.relpath, nb)
|
||||
for p in err:
|
||||
print >>sys.stderr,\
|
||||
"error: %s/: cannot abandon %s" \
|
||||
% (p.relpath, nb)
|
||||
sys.exit(1)
|
||||
elif not success:
|
||||
print >>sys.stderr, 'error: no project has branch %s' % nb
|
||||
sys.exit(1)
|
||||
else:
|
||||
print >>sys.stderr, 'Abandoned in %d project(s):\n %s' % (
|
||||
len(success), '\n '.join(p.relpath for p in success))
|
||||
|
@ -136,7 +136,7 @@ is shown, then the branch appears in all projects.
|
||||
hdr('%c%c %-*s' % (current, published, width, name))
|
||||
out.write(' |')
|
||||
|
||||
if in_cnt < project_cnt and (in_cnt == 1):
|
||||
if in_cnt < project_cnt:
|
||||
fmt = out.write
|
||||
paths = []
|
||||
if in_cnt < project_cnt - in_cnt:
|
||||
@ -150,15 +150,17 @@ is shown, then the branch appears in all projects.
|
||||
for b in i.projects:
|
||||
have.add(b.project)
|
||||
for p in projects:
|
||||
paths.append(p.relpath)
|
||||
if not p in have:
|
||||
paths.append(p.relpath)
|
||||
|
||||
s = ' %s %s' % (type, ', '.join(paths))
|
||||
if width + 7 + len(s) < 80:
|
||||
fmt(s)
|
||||
else:
|
||||
out.nl()
|
||||
fmt(' %s:' % type)
|
||||
fmt(' %s:' % type)
|
||||
for p in paths:
|
||||
out.nl()
|
||||
fmt(' %s' % p)
|
||||
fmt(width*' ' + ' %s' % p)
|
||||
else:
|
||||
out.write(' in all projects')
|
||||
out.nl()
|
||||
|
@ -38,21 +38,27 @@ The command is equivalent to:
|
||||
|
||||
nb = args[0]
|
||||
err = []
|
||||
success = []
|
||||
all = self.GetProjects(args[1:])
|
||||
|
||||
pm = Progress('Checkout %s' % nb, len(all))
|
||||
for project in all:
|
||||
pm.update()
|
||||
if not project.CheckoutBranch(nb):
|
||||
err.append(project)
|
||||
|
||||
status = project.CheckoutBranch(nb)
|
||||
if status is not None:
|
||||
if status:
|
||||
success.append(project)
|
||||
else:
|
||||
err.append(project)
|
||||
pm.end()
|
||||
|
||||
if err:
|
||||
if len(err) == len(all):
|
||||
print >>sys.stderr, 'error: no project has branch %s' % nb
|
||||
else:
|
||||
for p in err:
|
||||
print >>sys.stderr,\
|
||||
"error: %s/: cannot checkout %s" \
|
||||
% (p.relpath, nb)
|
||||
for p in err:
|
||||
print >>sys.stderr,\
|
||||
"error: %s/: cannot checkout %s" \
|
||||
% (p.relpath, nb)
|
||||
sys.exit(1)
|
||||
elif not success:
|
||||
print >>sys.stderr, 'error: no project has branch %s' % nb
|
||||
sys.exit(1)
|
||||
|
114
subcmds/cherry_pick.py
Normal file
114
subcmds/cherry_pick.py
Normal file
@ -0,0 +1,114 @@
|
||||
#
|
||||
# Copyright (C) 2010 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys, re, string, random, os
|
||||
from command import Command
|
||||
from git_command import GitCommand
|
||||
|
||||
CHANGE_ID_RE = re.compile(r'^\s*Change-Id: I([0-9a-f]{40})\s*$')
|
||||
|
||||
class CherryPick(Command):
|
||||
common = True
|
||||
helpSummary = "Cherry-pick a change."
|
||||
helpUsage = """
|
||||
%prog <sha1>
|
||||
"""
|
||||
helpDescription = """
|
||||
'%prog' cherry-picks a change from one branch to another.
|
||||
The change id will be updated, and a reference to the old
|
||||
change id will be added.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
pass
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if len(args) != 1:
|
||||
self.Usage()
|
||||
|
||||
reference = args[0]
|
||||
|
||||
p = GitCommand(None,
|
||||
['rev-parse', '--verify', reference],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
if p.Wait() != 0:
|
||||
print >>sys.stderr, p.stderr
|
||||
sys.exit(1)
|
||||
sha1 = p.stdout.strip()
|
||||
|
||||
p = GitCommand(None, ['cat-file', 'commit', sha1], capture_stdout=True)
|
||||
if p.Wait() != 0:
|
||||
print >>sys.stderr, "error: Failed to retrieve old commit message"
|
||||
sys.exit(1)
|
||||
old_msg = self._StripHeader(p.stdout)
|
||||
|
||||
p = GitCommand(None,
|
||||
['cherry-pick', sha1],
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
status = p.Wait()
|
||||
|
||||
print >>sys.stdout, p.stdout
|
||||
print >>sys.stderr, p.stderr
|
||||
|
||||
if status == 0:
|
||||
# The cherry-pick was applied correctly. We just need to edit the
|
||||
# commit message.
|
||||
new_msg = self._Reformat(old_msg, sha1)
|
||||
|
||||
p = GitCommand(None, ['commit', '--amend', '-F', '-'],
|
||||
provide_stdin = True,
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
p.stdin.write(new_msg)
|
||||
if p.Wait() != 0:
|
||||
print >>sys.stderr, "error: Failed to update commit message"
|
||||
sys.exit(1)
|
||||
|
||||
else:
|
||||
print >>sys.stderr, """\
|
||||
NOTE: When committing (please see above) and editing the commit message,
|
||||
please remove the old Change-Id-line and add:
|
||||
"""
|
||||
print >>sys.stderr, self._GetReference(sha1)
|
||||
print >>sys.stderr
|
||||
|
||||
def _IsChangeId(self, line):
|
||||
return CHANGE_ID_RE.match(line)
|
||||
|
||||
def _GetReference(self, sha1):
|
||||
return "(cherry picked from commit %s)" % sha1
|
||||
|
||||
def _StripHeader(self, commit_msg):
|
||||
lines = commit_msg.splitlines()
|
||||
return "\n".join(lines[lines.index("")+1:])
|
||||
|
||||
def _Reformat(self, old_msg, sha1):
|
||||
new_msg = []
|
||||
|
||||
for line in old_msg.splitlines():
|
||||
if not self._IsChangeId(line):
|
||||
new_msg.append(line)
|
||||
|
||||
# Add a blank line between the message and the change id/reference
|
||||
try:
|
||||
if new_msg[-1].strip() != "":
|
||||
new_msg.append("")
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
new_msg.append(self._GetReference(sha1))
|
||||
return "\n".join(new_msg)
|
@ -36,6 +36,9 @@ makes it available in your project's local working directory.
|
||||
pass
|
||||
|
||||
def _ParseChangeIds(self, args):
|
||||
if not args:
|
||||
self.Usage()
|
||||
|
||||
to_get = []
|
||||
project = None
|
||||
|
||||
|
@ -151,11 +151,11 @@ terminal and are not redirected.
|
||||
first = True
|
||||
|
||||
for project in self.GetProjects(args):
|
||||
env = dict(os.environ.iteritems())
|
||||
env = os.environ.copy()
|
||||
def setenv(name, val):
|
||||
if val is None:
|
||||
val = ''
|
||||
env[name] = val
|
||||
env[name] = val.encode()
|
||||
|
||||
setenv('REPO_PROJECT', project.name)
|
||||
setenv('REPO_PATH', project.relpath)
|
||||
@ -169,6 +169,12 @@ terminal and are not redirected.
|
||||
else:
|
||||
cwd = project.worktree
|
||||
|
||||
if not os.path.exists(cwd):
|
||||
if (opt.project_header and opt.verbose) \
|
||||
or not opt.project_header:
|
||||
print >>sys.stderr, 'skipping %s/' % project.relpath
|
||||
continue
|
||||
|
||||
if opt.project_header:
|
||||
stdin = subprocess.PIPE
|
||||
stdout = subprocess.PIPE
|
||||
|
@ -204,7 +204,7 @@ contain a line that matches both expressions:
|
||||
else:
|
||||
out.project('--- project %s ---' % project.relpath)
|
||||
out.nl()
|
||||
out.write(p.stderr)
|
||||
out.write("%s", p.stderr)
|
||||
out.nl()
|
||||
continue
|
||||
have_match = True
|
||||
@ -217,17 +217,17 @@ contain a line that matches both expressions:
|
||||
if have_rev and full_name:
|
||||
for line in r:
|
||||
rev, line = line.split(':', 1)
|
||||
out.write(rev)
|
||||
out.write("%s", rev)
|
||||
out.write(':')
|
||||
out.project(project.relpath)
|
||||
out.write('/')
|
||||
out.write(line)
|
||||
out.write("%s", line)
|
||||
out.nl()
|
||||
elif full_name:
|
||||
for line in r:
|
||||
out.project(project.relpath)
|
||||
out.write('/')
|
||||
out.write(line)
|
||||
out.write("%s", line)
|
||||
out.nl()
|
||||
else:
|
||||
for line in r:
|
||||
|
@ -94,6 +94,8 @@ See 'repo help --all' for a complete list of recognized commands.
|
||||
body = getattr(cmd, bodyAttr)
|
||||
except AttributeError:
|
||||
return
|
||||
if body == '' or body is None:
|
||||
return
|
||||
|
||||
self.nl()
|
||||
|
||||
|
@ -14,12 +14,14 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from color import Coloring
|
||||
from command import InteractiveCommand, MirrorSafeCommand
|
||||
from error import ManifestParseError
|
||||
from project import SyncBuffer
|
||||
from git_config import GitConfig
|
||||
from git_command import git_require, MIN_GIT_VERSION
|
||||
|
||||
class Init(InteractiveCommand, MirrorSafeCommand):
|
||||
@ -41,6 +43,13 @@ The optional -m argument can be used to specify an alternate manifest
|
||||
to be used. If no manifest is specified, the manifest default.xml
|
||||
will be used.
|
||||
|
||||
The --reference option can be used to point to a directory that
|
||||
has the content of a --mirror sync. This will make the working
|
||||
directory use as much data as possible from the local reference
|
||||
directory when fetching from the server. This will make the sync
|
||||
go a lot faster by reducing data traffic on the network.
|
||||
|
||||
|
||||
Switching Manifest Branches
|
||||
---------------------------
|
||||
|
||||
@ -71,7 +80,12 @@ to update the working directory files.
|
||||
g.add_option('--mirror',
|
||||
dest='mirror', action='store_true',
|
||||
help='mirror the forrest')
|
||||
|
||||
g.add_option('--reference',
|
||||
dest='reference',
|
||||
help='location of mirror directory', metavar='DIR')
|
||||
g.add_option('--depth', type='int', default=None,
|
||||
dest='depth',
|
||||
help='create a shallow clone with given depth; see git clone')
|
||||
|
||||
# Tool
|
||||
g = p.add_option_group('repo Version options')
|
||||
@ -95,8 +109,8 @@ to update the working directory files.
|
||||
sys.exit(1)
|
||||
|
||||
if not opt.quiet:
|
||||
print >>sys.stderr, 'Getting manifest ...'
|
||||
print >>sys.stderr, ' from %s' % opt.manifest_url
|
||||
print >>sys.stderr, 'Get %s' \
|
||||
% GitConfig.ForUser().UrlInsteadOf(opt.manifest_url)
|
||||
m._InitGitDir()
|
||||
|
||||
if opt.manifest_branch:
|
||||
@ -115,6 +129,9 @@ to update the working directory files.
|
||||
r.ResetFetch()
|
||||
r.Save()
|
||||
|
||||
if opt.reference:
|
||||
m.config.SetString('repo.reference', opt.reference)
|
||||
|
||||
if opt.mirror:
|
||||
if is_new:
|
||||
m.config.SetString('repo.mirror', 'true')
|
||||
@ -122,9 +139,14 @@ to update the working directory files.
|
||||
print >>sys.stderr, 'fatal: --mirror not supported on existing client'
|
||||
sys.exit(1)
|
||||
|
||||
if not m.Sync_NetworkHalf():
|
||||
if not m.Sync_NetworkHalf(is_new=is_new):
|
||||
r = m.GetRemote(m.remote.name)
|
||||
print >>sys.stderr, 'fatal: cannot obtain manifest %s' % r.url
|
||||
|
||||
# Better delete the manifest git dir if we created it; otherwise next
|
||||
# time (when user fixes problems) we won't go through the "is_new" logic.
|
||||
if is_new:
|
||||
shutil.rmtree(m.gitdir)
|
||||
sys.exit(1)
|
||||
|
||||
syncbuf = SyncBuffer(m.config)
|
||||
@ -148,20 +170,34 @@ to update the working directory files.
|
||||
print >>sys.stderr, 'fatal: %s' % str(e)
|
||||
sys.exit(1)
|
||||
|
||||
def _PromptKey(self, prompt, key, value):
|
||||
def _Prompt(self, prompt, value):
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
sys.stdout.write('%-10s [%s]: ' % (prompt, value))
|
||||
a = sys.stdin.readline().strip()
|
||||
if a != '' and a != value:
|
||||
mp.config.SetString(key, a)
|
||||
if a == '':
|
||||
return value
|
||||
return a
|
||||
|
||||
def _ConfigureUser(self):
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
print ''
|
||||
self._PromptKey('Your Name', 'user.name', mp.UserName)
|
||||
self._PromptKey('Your Email', 'user.email', mp.UserEmail)
|
||||
while True:
|
||||
print ''
|
||||
name = self._Prompt('Your Name', mp.UserName)
|
||||
email = self._Prompt('Your Email', mp.UserEmail)
|
||||
|
||||
print ''
|
||||
print 'Your identity is: %s <%s>' % (name, email)
|
||||
sys.stdout.write('is this correct [y/n]? ')
|
||||
a = sys.stdin.readline().strip()
|
||||
if a in ('yes', 'y', 't', 'true'):
|
||||
break
|
||||
|
||||
if name != mp.UserName:
|
||||
mp.config.SetString('user.name', name)
|
||||
if email != mp.UserEmail:
|
||||
mp.config.SetString('user.email', email)
|
||||
|
||||
def _HasColorSet(self, gc):
|
||||
for n in ['ui', 'diff', 'status']:
|
||||
@ -200,6 +236,25 @@ to update the working directory files.
|
||||
if a in ('y', 'yes', 't', 'true', 'on'):
|
||||
gc.SetString('color.ui', 'auto')
|
||||
|
||||
def _ConfigureDepth(self, opt):
|
||||
"""Configure the depth we'll sync down.
|
||||
|
||||
Args:
|
||||
opt: Options from optparse. We care about opt.depth.
|
||||
"""
|
||||
# Opt.depth will be non-None if user actually passed --depth to repo init.
|
||||
if opt.depth is not None:
|
||||
if opt.depth > 0:
|
||||
# Positive values will set the depth.
|
||||
depth = str(opt.depth)
|
||||
else:
|
||||
# Negative numbers will clear the depth; passing None to SetString
|
||||
# will do that.
|
||||
depth = None
|
||||
|
||||
# We store the depth in the main manifest project.
|
||||
self.manifest.manifestProject.config.SetString('repo.depth', depth)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
git_require(MIN_GIT_VERSION, fail=True)
|
||||
self._SyncManifest(opt)
|
||||
@ -209,6 +264,8 @@ to update the working directory files.
|
||||
self._ConfigureUser()
|
||||
self._ConfigureColor()
|
||||
|
||||
self._ConfigureDepth(opt)
|
||||
|
||||
if self.manifest.IsMirror:
|
||||
type = 'mirror '
|
||||
else:
|
||||
|
48
subcmds/list.py
Normal file
48
subcmds/list.py
Normal file
@ -0,0 +1,48 @@
|
||||
#
|
||||
# Copyright (C) 2011 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from command import Command, MirrorSafeCommand
|
||||
|
||||
class List(Command, MirrorSafeCommand):
|
||||
common = True
|
||||
helpSummary = "List projects and their associated directories"
|
||||
helpUsage = """
|
||||
%prog [<project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
List all projects; pass '.' to list the project for the cwd.
|
||||
|
||||
This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
"""
|
||||
|
||||
def Execute(self, opt, args):
|
||||
"""List all projects and the associated directories.
|
||||
|
||||
This may be possible to do with 'repo forall', but repo newbies have
|
||||
trouble figuring that out. The idea here is that it should be more
|
||||
discoverable.
|
||||
|
||||
Args:
|
||||
opt: The options. We don't take any.
|
||||
args: Positional args. Can be a list of projects to list, or empty.
|
||||
"""
|
||||
projects = self.GetProjects(args)
|
||||
|
||||
lines = []
|
||||
for project in projects:
|
||||
lines.append("%s : %s" % (project.relpath, project.name))
|
||||
|
||||
lines.sort()
|
||||
print '\n'.join(lines)
|
107
subcmds/rebase.py
Normal file
107
subcmds/rebase.py
Normal file
@ -0,0 +1,107 @@
|
||||
#
|
||||
# Copyright (C) 2010 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
from command import Command
|
||||
from git_command import GitCommand
|
||||
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M
|
||||
from error import GitError
|
||||
|
||||
class Rebase(Command):
|
||||
common = True
|
||||
helpSummary = "Rebase local branches on upstream branch"
|
||||
helpUsage = """
|
||||
%prog {[<project>...] | -i <project>...}
|
||||
"""
|
||||
helpDescription = """
|
||||
'%prog' uses git rebase to move local changes in the current topic branch to
|
||||
the HEAD of the upstream history, useful when you have made commits in a topic
|
||||
branch but need to incorporate new upstream changes "underneath" them.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-i', '--interactive',
|
||||
dest="interactive", action="store_true",
|
||||
help="interactive rebase (single project only)")
|
||||
|
||||
p.add_option('-f', '--force-rebase',
|
||||
dest='force_rebase', action='store_true',
|
||||
help='Pass --force-rebase to git rebase')
|
||||
p.add_option('--no-ff',
|
||||
dest='no_ff', action='store_true',
|
||||
help='Pass --no-ff to git rebase')
|
||||
p.add_option('-q', '--quiet',
|
||||
dest='quiet', action='store_true',
|
||||
help='Pass --quiet to git rebase')
|
||||
p.add_option('--autosquash',
|
||||
dest='autosquash', action='store_true',
|
||||
help='Pass --autosquash to git rebase')
|
||||
p.add_option('--whitespace',
|
||||
dest='whitespace', action='store', metavar='WS',
|
||||
help='Pass --whitespace to git rebase')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all = self.GetProjects(args)
|
||||
one_project = len(all) == 1
|
||||
|
||||
if opt.interactive and not one_project:
|
||||
print >>sys.stderr, 'error: interactive rebase not supported with multiple projects'
|
||||
return -1
|
||||
|
||||
for project in all:
|
||||
cb = project.CurrentBranch
|
||||
if not cb:
|
||||
if one_project:
|
||||
print >>sys.stderr, "error: project %s has a detatched HEAD" % project.relpath
|
||||
return -1
|
||||
# ignore branches with detatched HEADs
|
||||
continue
|
||||
|
||||
upbranch = project.GetBranch(cb)
|
||||
if not upbranch.LocalMerge:
|
||||
if one_project:
|
||||
print >>sys.stderr, "error: project %s does not track any remote branches" % project.relpath
|
||||
return -1
|
||||
# ignore branches without remotes
|
||||
continue
|
||||
|
||||
args = ["rebase"]
|
||||
|
||||
if opt.whitespace:
|
||||
args.append('--whitespace=%s' % opt.whitespace)
|
||||
|
||||
if opt.quiet:
|
||||
args.append('--quiet')
|
||||
|
||||
if opt.force_rebase:
|
||||
args.append('--force-rebase')
|
||||
|
||||
if opt.no_ff:
|
||||
args.append('--no-ff')
|
||||
|
||||
if opt.autosquash:
|
||||
args.append('--autosquash')
|
||||
|
||||
if opt.interactive:
|
||||
args.append("-i")
|
||||
|
||||
args.append(upbranch.LocalMerge)
|
||||
|
||||
print >>sys.stderr, '# %s: rebasing %s -> %s' % \
|
||||
(project.relpath, cb, upbranch.LocalMerge)
|
||||
|
||||
if GitCommand(project, args).Wait() != 0:
|
||||
return -1
|
@ -55,6 +55,7 @@ need to be performed by an end-user.
|
||||
print >>sys.stderr, "error: can't update repo"
|
||||
sys.exit(1)
|
||||
|
||||
rp.bare_git.gc('--auto')
|
||||
_PostRepoFetch(rp,
|
||||
no_repo_verify = opt.no_repo_verify,
|
||||
verbose = True)
|
||||
|
33
subcmds/smartsync.py
Normal file
33
subcmds/smartsync.py
Normal file
@ -0,0 +1,33 @@
|
||||
#
|
||||
# Copyright (C) 2010 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from sync import Sync
|
||||
|
||||
class Smartsync(Sync):
|
||||
common = True
|
||||
helpSummary = "Update working tree to the latest known good revision"
|
||||
helpUsage = """
|
||||
%prog [<project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
The '%prog' command is a shortcut for sync -s.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
Sync._Options(self, p, show_smart=False)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
opt.smart_sync = True
|
||||
Sync.Execute(self, opt, args)
|
@ -15,6 +15,15 @@
|
||||
|
||||
from command import PagedCommand
|
||||
|
||||
try:
|
||||
import threading as _threading
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
|
||||
import itertools
|
||||
import sys
|
||||
import StringIO
|
||||
|
||||
class Status(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Show the working tree status"
|
||||
@ -27,6 +36,9 @@ and the most recent commit on this branch (HEAD), in each project
|
||||
specified. A summary is displayed, one line per file where there
|
||||
is a difference between these three states.
|
||||
|
||||
The -j/--jobs option can be used to run multiple status queries
|
||||
in parallel.
|
||||
|
||||
Status Display
|
||||
--------------
|
||||
|
||||
@ -60,9 +72,34 @@ the following meanings:
|
||||
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-j', '--jobs',
|
||||
dest='jobs', action='store', type='int', default=2,
|
||||
help="number of projects to check simultaneously")
|
||||
|
||||
def _StatusHelper(self, project, clean_counter, sem, output):
|
||||
"""Obtains the status for a specific project.
|
||||
|
||||
Obtains the status for a project, redirecting the output to
|
||||
the specified object. It will release the semaphore
|
||||
when done.
|
||||
|
||||
Args:
|
||||
project: Project to get status of.
|
||||
clean_counter: Counter for clean projects.
|
||||
sem: Semaphore, will call release() when complete.
|
||||
output: Where to output the status.
|
||||
"""
|
||||
try:
|
||||
state = project.PrintWorkTreeStatus(output)
|
||||
if state == 'CLEAN':
|
||||
clean_counter.next()
|
||||
finally:
|
||||
sem.release()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all = self.GetProjects(args)
|
||||
clean = 0
|
||||
counter = itertools.count()
|
||||
|
||||
on = {}
|
||||
for project in all:
|
||||
@ -77,9 +114,24 @@ the following meanings:
|
||||
for cb in branch_names:
|
||||
print '# on branch %s' % cb
|
||||
|
||||
for project in all:
|
||||
state = project.PrintWorkTreeStatus()
|
||||
if state == 'CLEAN':
|
||||
clean += 1
|
||||
if len(all) == clean:
|
||||
if opt.jobs == 1:
|
||||
for project in all:
|
||||
state = project.PrintWorkTreeStatus()
|
||||
if state == 'CLEAN':
|
||||
counter.next()
|
||||
else:
|
||||
sem = _threading.Semaphore(opt.jobs)
|
||||
threads_and_output = []
|
||||
for project in all:
|
||||
sem.acquire()
|
||||
output = StringIO.StringIO()
|
||||
t = _threading.Thread(target=self._StatusHelper,
|
||||
args=(project, counter, sem, output))
|
||||
threads_and_output.append((t, output))
|
||||
t.start()
|
||||
for (t, output) in threads_and_output:
|
||||
t.join()
|
||||
sys.stdout.write(output.getvalue())
|
||||
output.close()
|
||||
if len(all) == counter.next():
|
||||
print 'nothing to commit (working directory clean)'
|
||||
|
297
subcmds/sync.py
297
subcmds/sync.py
@ -17,11 +17,27 @@ from optparse import SUPPRESS_HELP
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import xmlrpclib
|
||||
|
||||
try:
|
||||
import threading as _threading
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
|
||||
try:
|
||||
import resource
|
||||
def _rlimit_nofile():
|
||||
return resource.getrlimit(resource.RLIMIT_NOFILE)
|
||||
except ImportError:
|
||||
def _rlimit_nofile():
|
||||
return (256, 256)
|
||||
|
||||
from git_command import GIT
|
||||
from git_refs import R_HEADS
|
||||
from project import HEAD
|
||||
from project import Project
|
||||
from project import RemoteSpec
|
||||
@ -31,7 +47,12 @@ from project import R_HEADS
|
||||
from project import SyncBuffer
|
||||
from progress import Progress
|
||||
|
||||
class _FetchError(Exception):
|
||||
"""Internal error thrown in _FetchHelper() when we don't want stack trace."""
|
||||
pass
|
||||
|
||||
class Sync(Command, MirrorSafeCommand):
|
||||
jobs = 1
|
||||
common = True
|
||||
helpSummary = "Update working tree to the latest revision"
|
||||
helpUsage = """
|
||||
@ -57,6 +78,14 @@ back to the manifest revision. This option is especially helpful
|
||||
if the project is currently on a topic branch, but the manifest
|
||||
revision is temporarily needed.
|
||||
|
||||
The -s/--smart-sync option can be used to sync to a known good
|
||||
build as specified by the manifest-server element in the current
|
||||
manifest. The -t/--smart-tag option is similar and allows you to
|
||||
specify a custom tag/label.
|
||||
|
||||
The -f/--force-broken option can be used to proceed with syncing
|
||||
other projects if a project sync fails.
|
||||
|
||||
SSH Connections
|
||||
---------------
|
||||
|
||||
@ -87,7 +116,12 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
def _Options(self, p, show_smart=True):
|
||||
self.jobs = self.manifest.default.sync_j
|
||||
|
||||
p.add_option('-f', '--force-broken',
|
||||
dest='force_broken', action='store_true',
|
||||
help="continue sync even if a project fails to sync")
|
||||
p.add_option('-l','--local-only',
|
||||
dest='local_only', action='store_true',
|
||||
help="only update working tree, don't fetch")
|
||||
@ -97,6 +131,19 @@ later is required to fix a server side protocol bug.
|
||||
p.add_option('-d','--detach',
|
||||
dest='detach_head', action='store_true',
|
||||
help='detach projects back to manifest revision')
|
||||
p.add_option('-q','--quiet',
|
||||
dest='quiet', action='store_true',
|
||||
help='be more quiet')
|
||||
p.add_option('-j','--jobs',
|
||||
dest='jobs', action='store', type='int',
|
||||
help="projects to fetch simultaneously (default %d)" % self.jobs)
|
||||
if show_smart:
|
||||
p.add_option('-s', '--smart-sync',
|
||||
dest='smart_sync', action='store_true',
|
||||
help='smart sync using manifest from a known good build')
|
||||
p.add_option('-t', '--smart-tag',
|
||||
dest='smart_tag', action='store',
|
||||
help='smart sync using manifest from a known tag')
|
||||
|
||||
g = p.add_option_group('repo Version options')
|
||||
g.add_option('--no-repo-verify',
|
||||
@ -106,18 +153,111 @@ later is required to fix a server side protocol bug.
|
||||
dest='repo_upgraded', action='store_true',
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
def _Fetch(self, projects):
|
||||
def _FetchHelper(self, opt, project, lock, fetched, pm, sem, err_event):
|
||||
"""Main function of the fetch threads when jobs are > 1.
|
||||
|
||||
Args:
|
||||
opt: Program options returned from optparse. See _Options().
|
||||
project: Project object for the project to fetch.
|
||||
lock: Lock for accessing objects that are shared amongst multiple
|
||||
_FetchHelper() threads.
|
||||
fetched: set object that we will add project.gitdir to when we're done
|
||||
(with our lock held).
|
||||
pm: Instance of a Project object. We will call pm.update() (with our
|
||||
lock held).
|
||||
sem: We'll release() this semaphore when we exit so that another thread
|
||||
can be started up.
|
||||
err_event: We'll set this event in the case of an error (after printing
|
||||
out info about the error).
|
||||
"""
|
||||
# We'll set to true once we've locked the lock.
|
||||
did_lock = False
|
||||
|
||||
# Encapsulate everything in a try/except/finally so that:
|
||||
# - We always set err_event in the case of an exception.
|
||||
# - We always make sure we call sem.release().
|
||||
# - We always make sure we unlock the lock if we locked it.
|
||||
try:
|
||||
try:
|
||||
success = project.Sync_NetworkHalf(quiet=opt.quiet)
|
||||
|
||||
# Lock around all the rest of the code, since printing, updating a set
|
||||
# and Progress.update() are not thread safe.
|
||||
lock.acquire()
|
||||
did_lock = True
|
||||
|
||||
if not success:
|
||||
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
||||
if opt.force_broken:
|
||||
print >>sys.stderr, 'warn: --force-broken, continuing to sync'
|
||||
else:
|
||||
raise _FetchError()
|
||||
|
||||
fetched.add(project.gitdir)
|
||||
pm.update()
|
||||
except BaseException, e:
|
||||
# Notify the _Fetch() function about all errors.
|
||||
err_event.set()
|
||||
|
||||
# If we got our own _FetchError, we don't want a stack trace.
|
||||
# However, if we got something else (something in Sync_NetworkHalf?),
|
||||
# we'd like one (so re-raise after we've set err_event).
|
||||
if not isinstance(e, _FetchError):
|
||||
raise
|
||||
finally:
|
||||
if did_lock:
|
||||
lock.release()
|
||||
sem.release()
|
||||
|
||||
def _Fetch(self, projects, opt):
|
||||
fetched = set()
|
||||
pm = Progress('Fetching projects', len(projects))
|
||||
for project in projects:
|
||||
pm.update()
|
||||
|
||||
if project.Sync_NetworkHalf():
|
||||
fetched.add(project.gitdir)
|
||||
else:
|
||||
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
||||
if self.jobs == 1:
|
||||
for project in projects:
|
||||
pm.update()
|
||||
if project.Sync_NetworkHalf(quiet=opt.quiet):
|
||||
fetched.add(project.gitdir)
|
||||
else:
|
||||
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
||||
if opt.force_broken:
|
||||
print >>sys.stderr, 'warn: --force-broken, continuing to sync'
|
||||
else:
|
||||
sys.exit(1)
|
||||
else:
|
||||
threads = set()
|
||||
lock = _threading.Lock()
|
||||
sem = _threading.Semaphore(self.jobs)
|
||||
err_event = _threading.Event()
|
||||
for project in projects:
|
||||
# Check for any errors before starting any new threads.
|
||||
# ...we'll let existing threads finish, though.
|
||||
if err_event.isSet():
|
||||
break
|
||||
|
||||
sem.acquire()
|
||||
t = _threading.Thread(target = self._FetchHelper,
|
||||
args = (opt,
|
||||
project,
|
||||
lock,
|
||||
fetched,
|
||||
pm,
|
||||
sem,
|
||||
err_event))
|
||||
threads.add(t)
|
||||
t.start()
|
||||
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
# If we saw an error, exit with code 1 so that other scripts can check.
|
||||
if err_event.isSet():
|
||||
print >>sys.stderr, '\nerror: Exited sync due to fetch errors'
|
||||
sys.exit(1)
|
||||
|
||||
pm.end()
|
||||
for project in projects:
|
||||
project.bare_git.gc('--auto')
|
||||
return fetched
|
||||
|
||||
def UpdateProjectList(self):
|
||||
@ -139,32 +279,36 @@ later is required to fix a server side protocol bug.
|
||||
if not path:
|
||||
continue
|
||||
if path not in new_project_paths:
|
||||
project = Project(
|
||||
manifest = self.manifest,
|
||||
name = path,
|
||||
remote = RemoteSpec('origin'),
|
||||
gitdir = os.path.join(self.manifest.topdir,
|
||||
path, '.git'),
|
||||
worktree = os.path.join(self.manifest.topdir, path),
|
||||
relpath = path,
|
||||
revisionExpr = 'HEAD',
|
||||
revisionId = None)
|
||||
if project.IsDirty():
|
||||
print >>sys.stderr, 'error: Cannot remove project "%s": \
|
||||
"""If the path has already been deleted, we don't need to do it
|
||||
"""
|
||||
if os.path.exists(self.manifest.topdir + '/' + path):
|
||||
project = Project(
|
||||
manifest = self.manifest,
|
||||
name = path,
|
||||
remote = RemoteSpec('origin'),
|
||||
gitdir = os.path.join(self.manifest.topdir,
|
||||
path, '.git'),
|
||||
worktree = os.path.join(self.manifest.topdir, path),
|
||||
relpath = path,
|
||||
revisionExpr = 'HEAD',
|
||||
revisionId = None)
|
||||
|
||||
if project.IsDirty():
|
||||
print >>sys.stderr, 'error: Cannot remove project "%s": \
|
||||
uncommitted changes are present' % project.relpath
|
||||
print >>sys.stderr, ' commit changes, then run sync again'
|
||||
return -1
|
||||
else:
|
||||
print >>sys.stderr, 'Deleting obsolete path %s' % project.worktree
|
||||
shutil.rmtree(project.worktree)
|
||||
# Try deleting parent subdirs if they are empty
|
||||
dir = os.path.dirname(project.worktree)
|
||||
while dir != self.manifest.topdir:
|
||||
try:
|
||||
os.rmdir(dir)
|
||||
except OSError:
|
||||
break
|
||||
dir = os.path.dirname(dir)
|
||||
print >>sys.stderr, ' commit changes, then run sync again'
|
||||
return -1
|
||||
else:
|
||||
print >>sys.stderr, 'Deleting obsolete path %s' % project.worktree
|
||||
shutil.rmtree(project.worktree)
|
||||
# Try deleting parent subdirs if they are empty
|
||||
dir = os.path.dirname(project.worktree)
|
||||
while dir != self.manifest.topdir:
|
||||
try:
|
||||
os.rmdir(dir)
|
||||
except OSError:
|
||||
break
|
||||
dir = os.path.dirname(dir)
|
||||
|
||||
new_project_paths.sort()
|
||||
fd = open(file_path, 'w')
|
||||
@ -176,6 +320,12 @@ uncommitted changes are present' % project.relpath
|
||||
return 0
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if opt.jobs:
|
||||
self.jobs = opt.jobs
|
||||
if self.jobs > 1:
|
||||
soft_limit, _ = _rlimit_nofile()
|
||||
self.jobs = min(self.jobs, (soft_limit - 5) / 3)
|
||||
|
||||
if opt.network_only and opt.detach_head:
|
||||
print >>sys.stderr, 'error: cannot combine -n and -d'
|
||||
sys.exit(1)
|
||||
@ -183,6 +333,55 @@ uncommitted changes are present' % project.relpath
|
||||
print >>sys.stderr, 'error: cannot combine -n and -l'
|
||||
sys.exit(1)
|
||||
|
||||
if opt.smart_sync or opt.smart_tag:
|
||||
if not self.manifest.manifest_server:
|
||||
print >>sys.stderr, \
|
||||
'error: cannot smart sync: no manifest server defined in manifest'
|
||||
sys.exit(1)
|
||||
try:
|
||||
server = xmlrpclib.Server(self.manifest.manifest_server)
|
||||
if opt.smart_sync:
|
||||
p = self.manifest.manifestProject
|
||||
b = p.GetBranch(p.CurrentBranch)
|
||||
branch = b.merge
|
||||
if branch.startswith(R_HEADS):
|
||||
branch = branch[len(R_HEADS):]
|
||||
|
||||
env = os.environ.copy()
|
||||
if (env.has_key('TARGET_PRODUCT') and
|
||||
env.has_key('TARGET_BUILD_VARIANT')):
|
||||
target = '%s-%s' % (env['TARGET_PRODUCT'],
|
||||
env['TARGET_BUILD_VARIANT'])
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch, target)
|
||||
else:
|
||||
[success, manifest_str] = server.GetApprovedManifest(branch)
|
||||
else:
|
||||
assert(opt.smart_tag)
|
||||
[success, manifest_str] = server.GetManifest(opt.smart_tag)
|
||||
|
||||
if success:
|
||||
manifest_name = "smart_sync_override.xml"
|
||||
manifest_path = os.path.join(self.manifest.manifestProject.worktree,
|
||||
manifest_name)
|
||||
try:
|
||||
f = open(manifest_path, 'w')
|
||||
try:
|
||||
f.write(manifest_str)
|
||||
finally:
|
||||
f.close()
|
||||
except IOError:
|
||||
print >>sys.stderr, 'error: cannot write manifest to %s' % \
|
||||
manifest_path
|
||||
sys.exit(1)
|
||||
self.manifest.Override(manifest_name)
|
||||
else:
|
||||
print >>sys.stderr, 'error: %s' % manifest_str
|
||||
sys.exit(1)
|
||||
except socket.error:
|
||||
print >>sys.stderr, 'error: cannot connect to manifest server %s' % (
|
||||
self.manifest.manifest_server)
|
||||
sys.exit(1)
|
||||
|
||||
rp = self.manifest.repoProject
|
||||
rp.PreSync()
|
||||
|
||||
@ -192,6 +391,17 @@ uncommitted changes are present' % project.relpath
|
||||
if opt.repo_upgraded:
|
||||
_PostRepoUpgrade(self.manifest)
|
||||
|
||||
if not opt.local_only:
|
||||
mp.Sync_NetworkHalf(quiet=opt.quiet)
|
||||
|
||||
if mp.HasChanges:
|
||||
syncbuf = SyncBuffer(mp.config)
|
||||
mp.Sync_LocalHalf(syncbuf)
|
||||
if not syncbuf.Finish():
|
||||
sys.exit(1)
|
||||
self.manifest._Unload()
|
||||
if opt.jobs is None:
|
||||
self.jobs = self.manifest.default.sync_j
|
||||
all = self.GetProjects(args, missing_ok=True)
|
||||
|
||||
if not opt.local_only:
|
||||
@ -199,28 +409,21 @@ uncommitted changes are present' % project.relpath
|
||||
now = time.time()
|
||||
if (24 * 60 * 60) <= (now - rp.LastFetch):
|
||||
to_fetch.append(rp)
|
||||
to_fetch.append(mp)
|
||||
to_fetch.extend(all)
|
||||
|
||||
fetched = self._Fetch(to_fetch)
|
||||
fetched = self._Fetch(to_fetch, opt)
|
||||
_PostRepoFetch(rp, opt.no_repo_verify)
|
||||
if opt.network_only:
|
||||
# bail out now; the rest touches the working tree
|
||||
return
|
||||
|
||||
if mp.HasChanges:
|
||||
syncbuf = SyncBuffer(mp.config)
|
||||
mp.Sync_LocalHalf(syncbuf)
|
||||
if not syncbuf.Finish():
|
||||
sys.exit(1)
|
||||
|
||||
self.manifest._Unload()
|
||||
all = self.GetProjects(args, missing_ok=True)
|
||||
missing = []
|
||||
for project in all:
|
||||
if project.gitdir not in fetched:
|
||||
missing.append(project)
|
||||
self._Fetch(missing)
|
||||
self._Fetch(missing, opt)
|
||||
|
||||
if self.manifest.IsMirror:
|
||||
# bail out now, we have no working tree
|
||||
@ -241,6 +444,10 @@ uncommitted changes are present' % project.relpath
|
||||
if not syncbuf.Finish():
|
||||
sys.exit(1)
|
||||
|
||||
# If there's a notice that's supposed to print at the end of the sync, print
|
||||
# it now...
|
||||
if self.manifest.notice:
|
||||
print self.manifest.notice
|
||||
|
||||
def _PostRepoUpgrade(manifest):
|
||||
for project in manifest.projects.values():
|
||||
@ -289,9 +496,9 @@ warning: Cannot automatically authenticate repo."""
|
||||
% (project.name, rev)
|
||||
return False
|
||||
|
||||
env = dict(os.environ)
|
||||
env['GIT_DIR'] = project.gitdir
|
||||
env['GNUPGHOME'] = gpg_dir
|
||||
env = os.environ.copy()
|
||||
env['GIT_DIR'] = project.gitdir.encode()
|
||||
env['GNUPGHOME'] = gpg_dir.encode()
|
||||
|
||||
cmd = [GIT, 'tag', '-v', cur]
|
||||
proc = subprocess.Popen(cmd,
|
||||
|
@ -13,12 +13,25 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
import re
|
||||
import sys
|
||||
|
||||
from command import InteractiveCommand
|
||||
from editor import Editor
|
||||
from error import UploadError
|
||||
from error import HookError, UploadError
|
||||
from project import RepoHook
|
||||
|
||||
UNUSUAL_COMMIT_THRESHOLD = 5
|
||||
|
||||
def _ConfirmManyUploads(multiple_branches=False):
|
||||
if multiple_branches:
|
||||
print "ATTENTION: One or more branches has an unusually high number of commits."
|
||||
else:
|
||||
print "ATTENTION: You are uploading an unusually high number of commits."
|
||||
print "YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across branches?)"
|
||||
answer = raw_input("If you are sure you intend to do this, type 'yes': ").strip()
|
||||
return answer == "yes"
|
||||
|
||||
def _die(fmt, *args):
|
||||
msg = fmt % args
|
||||
@ -35,7 +48,7 @@ class Upload(InteractiveCommand):
|
||||
common = True
|
||||
helpSummary = "Upload changes for code review"
|
||||
helpUsage="""
|
||||
%prog [--re --cc] {[<project>]... | --replace <project>}
|
||||
%prog [--re --cc] [<project>]...
|
||||
"""
|
||||
helpDescription = """
|
||||
The '%prog' command is used to send changes to the Gerrit Code
|
||||
@ -55,12 +68,6 @@ added to the respective list of users, and emails are sent to any
|
||||
new users. Users passed as --reviewers must already be registered
|
||||
with the code review system, or the upload will fail.
|
||||
|
||||
If the --replace option is passed the user can designate which
|
||||
existing change(s) in Gerrit match up to the commits in the branch
|
||||
being uploaded. For each matched pair of change,commit the commit
|
||||
will be added as a new patch set, completely replacing the set of
|
||||
files and description associated with the change in Gerrit.
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
@ -72,6 +79,19 @@ to "true" then repo will assume you always answer "y" at the prompt,
|
||||
and will not prompt you further. If it is set to "false" then repo
|
||||
will assume you always answer "n", and will abort.
|
||||
|
||||
review.URL.autocopy:
|
||||
|
||||
To automatically copy a user or mailing list to all uploaded reviews,
|
||||
you can set a per-project or global Git option to do so. Specifically,
|
||||
review.URL.autocopy can be set to a comma separated list of reviewers
|
||||
who you always want copied on all uploads with a non-empty --re
|
||||
argument.
|
||||
|
||||
review.URL.username:
|
||||
|
||||
Override the username used to connect to Gerrit Code Review.
|
||||
By default the local part of the email address is used.
|
||||
|
||||
The URL must match the review URL listed in the manifest XML file,
|
||||
or in the .git/config within the project. For example:
|
||||
|
||||
@ -81,6 +101,7 @@ or in the .git/config within the project. For example:
|
||||
|
||||
[review "http://review.example.com/"]
|
||||
autoupload = true
|
||||
autocopy = johndoe@company.com,my-team-alias@company.com
|
||||
|
||||
References
|
||||
----------
|
||||
@ -90,17 +111,43 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('--replace',
|
||||
dest='replace', action='store_true',
|
||||
help='Upload replacement patchesets from this branch')
|
||||
p.add_option('-t',
|
||||
dest='auto_topic', action='store_true',
|
||||
help='Send local branch name to Gerrit Code Review')
|
||||
p.add_option('--re', '--reviewers',
|
||||
type='string', action='append', dest='reviewers',
|
||||
help='Request reviews from these people.')
|
||||
p.add_option('--cc',
|
||||
type='string', action='append', dest='cc',
|
||||
help='Also send email to these email addresses.')
|
||||
p.add_option('--br',
|
||||
type='string', action='store', dest='branch',
|
||||
help='Branch to upload.')
|
||||
|
||||
def _SingleBranch(self, branch, people):
|
||||
# Options relating to upload hook. Note that verify and no-verify are NOT
|
||||
# opposites of each other, which is why they store to different locations.
|
||||
# We are using them to match 'git commit' syntax.
|
||||
#
|
||||
# Combinations:
|
||||
# - no-verify=False, verify=False (DEFAULT):
|
||||
# If stdout is a tty, can prompt about running upload hooks if needed.
|
||||
# If user denies running hooks, the upload is cancelled. If stdout is
|
||||
# not a tty and we would need to prompt about upload hooks, upload is
|
||||
# cancelled.
|
||||
# - no-verify=False, verify=True:
|
||||
# Always run upload hooks with no prompt.
|
||||
# - no-verify=True, verify=False:
|
||||
# Never run upload hooks, but upload anyway (AKA bypass hooks).
|
||||
# - no-verify=True, verify=True:
|
||||
# Invalid
|
||||
p.add_option('--no-verify',
|
||||
dest='bypass_hooks', action='store_true',
|
||||
help='Do not run the upload hook.')
|
||||
p.add_option('--verify',
|
||||
dest='allow_all_hooks', action='store_true',
|
||||
help='Run the upload hook without prompting.')
|
||||
|
||||
def _SingleBranch(self, opt, branch, people):
|
||||
project = branch.project
|
||||
name = branch.name
|
||||
remote = project.GetBranch(name).remote
|
||||
@ -129,11 +176,15 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
answer = answer in ('y', 'Y', 'yes', '1', 'true', 't')
|
||||
|
||||
if answer:
|
||||
self._UploadAndReport([branch], people)
|
||||
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
|
||||
answer = _ConfirmManyUploads()
|
||||
|
||||
if answer:
|
||||
self._UploadAndReport(opt, [branch], people)
|
||||
else:
|
||||
_die("upload aborted by user")
|
||||
|
||||
def _MultipleBranches(self, pending, people):
|
||||
def _MultipleBranches(self, opt, pending, people):
|
||||
projects = {}
|
||||
branches = {}
|
||||
|
||||
@ -192,7 +243,30 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
todo.append(branch)
|
||||
if not todo:
|
||||
_die("nothing uncommented for upload")
|
||||
self._UploadAndReport(todo, people)
|
||||
|
||||
many_commits = False
|
||||
for branch in todo:
|
||||
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
|
||||
many_commits = True
|
||||
break
|
||||
if many_commits:
|
||||
if not _ConfirmManyUploads(multiple_branches=True):
|
||||
_die("upload aborted by user")
|
||||
|
||||
self._UploadAndReport(opt, todo, people)
|
||||
|
||||
def _AppendAutoCcList(self, branch, people):
|
||||
"""
|
||||
Appends the list of users in the CC list in the git project's config if a
|
||||
non-empty reviewer list was found.
|
||||
"""
|
||||
|
||||
name = branch.name
|
||||
project = branch.project
|
||||
key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
|
||||
raw_list = project.config.GetString(key)
|
||||
if not raw_list is None and len(people[0]) > 0:
|
||||
people[1].extend([entry.strip() for entry in raw_list.split(',')])
|
||||
|
||||
def _FindGerritChange(self, branch):
|
||||
last_pub = branch.project.WasPublished(branch.name)
|
||||
@ -206,66 +280,29 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
except:
|
||||
return ""
|
||||
|
||||
def _ReplaceBranch(self, project, people):
|
||||
branch = project.CurrentBranch
|
||||
if not branch:
|
||||
print >>sys.stdout, "no branches ready for upload"
|
||||
return
|
||||
branch = project.GetUploadableBranch(branch)
|
||||
if not branch:
|
||||
print >>sys.stdout, "no branches ready for upload"
|
||||
return
|
||||
|
||||
script = []
|
||||
script.append('# Replacing from branch %s' % branch.name)
|
||||
|
||||
if len(branch.commits) == 1:
|
||||
change = self._FindGerritChange(branch)
|
||||
script.append('[%-6s] %s' % (change, branch.commits[0]))
|
||||
else:
|
||||
for commit in branch.commits:
|
||||
script.append('[ ] %s' % commit)
|
||||
|
||||
script.append('')
|
||||
script.append('# Insert change numbers in the brackets to add a new patch set.')
|
||||
script.append('# To create a new change record, leave the brackets empty.')
|
||||
|
||||
script = Editor.EditString("\n".join(script)).split("\n")
|
||||
|
||||
change_re = re.compile(r'^\[\s*(\d{1,})\s*\]\s*([0-9a-f]{1,}) .*$')
|
||||
to_replace = dict()
|
||||
full_hashes = branch.unabbrev_commits
|
||||
|
||||
for line in script:
|
||||
m = change_re.match(line)
|
||||
if m:
|
||||
c = m.group(1)
|
||||
f = m.group(2)
|
||||
try:
|
||||
f = full_hashes[f]
|
||||
except KeyError:
|
||||
print 'fh = %s' % full_hashes
|
||||
print >>sys.stderr, "error: commit %s not found" % f
|
||||
sys.exit(1)
|
||||
if c in to_replace:
|
||||
print >>sys.stderr,\
|
||||
"error: change %s cannot accept multiple commits" % c
|
||||
sys.exit(1)
|
||||
to_replace[c] = f
|
||||
|
||||
if not to_replace:
|
||||
print >>sys.stderr, "error: no replacements specified"
|
||||
print >>sys.stderr, " use 'repo upload' without --replace"
|
||||
sys.exit(1)
|
||||
|
||||
branch.replace_changes = to_replace
|
||||
self._UploadAndReport([branch], people)
|
||||
|
||||
def _UploadAndReport(self, todo, people):
|
||||
def _UploadAndReport(self, opt, todo, original_people):
|
||||
have_errors = False
|
||||
for branch in todo:
|
||||
try:
|
||||
branch.UploadForReview(people)
|
||||
people = copy.deepcopy(original_people)
|
||||
self._AppendAutoCcList(branch, people)
|
||||
|
||||
# Check if there are local changes that may have been forgotten
|
||||
if branch.project.HasChanges():
|
||||
key = 'review.%s.autoupload' % branch.project.remote.review
|
||||
answer = branch.project.config.GetBoolean(key)
|
||||
|
||||
# if they want to auto upload, let's not ask because it could be automated
|
||||
if answer is None:
|
||||
sys.stdout.write('Uncommitted changes in ' + branch.project.name + ' (did you forget to amend?). Continue uploading? (y/n) ')
|
||||
a = sys.stdin.readline().strip().lower()
|
||||
if a not in ('y', 'yes', 't', 'true', 'on'):
|
||||
print >>sys.stderr, "skipping upload"
|
||||
branch.uploaded = False
|
||||
branch.error = 'User aborted'
|
||||
continue
|
||||
|
||||
branch.UploadForReview(people, auto_topic=opt.auto_topic)
|
||||
branch.uploaded = True
|
||||
except UploadError, e:
|
||||
branch.error = e
|
||||
@ -273,15 +310,19 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
have_errors = True
|
||||
|
||||
print >>sys.stderr, ''
|
||||
print >>sys.stderr, '--------------------------------------------'
|
||||
print >>sys.stderr, '----------------------------------------------------------------------'
|
||||
|
||||
if have_errors:
|
||||
for branch in todo:
|
||||
if not branch.uploaded:
|
||||
print >>sys.stderr, '[FAILED] %-15s %-15s (%s)' % (
|
||||
if len(str(branch.error)) <= 30:
|
||||
fmt = ' (%s)'
|
||||
else:
|
||||
fmt = '\n (%s)'
|
||||
print >>sys.stderr, ('[FAILED] %-15s %-15s' + fmt) % (
|
||||
branch.project.relpath + '/', \
|
||||
branch.name, \
|
||||
branch.error)
|
||||
str(branch.error))
|
||||
print >>sys.stderr, ''
|
||||
|
||||
for branch in todo:
|
||||
@ -298,6 +339,25 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
pending = []
|
||||
reviewers = []
|
||||
cc = []
|
||||
branch = None
|
||||
|
||||
if opt.branch:
|
||||
branch = opt.branch
|
||||
|
||||
for project in project_list:
|
||||
avail = project.GetUploadableBranches(branch)
|
||||
if avail:
|
||||
pending.append((project, avail))
|
||||
|
||||
if pending and (not opt.bypass_hooks):
|
||||
hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
|
||||
self.manifest.topdir, abort_if_user_denies=True)
|
||||
pending_proj_names = [project.name for (project, avail) in pending]
|
||||
try:
|
||||
hook.Run(opt.allow_all_hooks, project_list=pending_proj_names)
|
||||
except HookError, e:
|
||||
print >>sys.stderr, "ERROR: %s" % str(e)
|
||||
return
|
||||
|
||||
if opt.reviewers:
|
||||
reviewers = _SplitEmails(opt.reviewers)
|
||||
@ -305,22 +365,9 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
cc = _SplitEmails(opt.cc)
|
||||
people = (reviewers,cc)
|
||||
|
||||
if opt.replace:
|
||||
if len(project_list) != 1:
|
||||
print >>sys.stderr, \
|
||||
'error: --replace requires exactly one project'
|
||||
sys.exit(1)
|
||||
self._ReplaceBranch(project_list[0], people)
|
||||
return
|
||||
|
||||
for project in project_list:
|
||||
avail = project.GetUploadableBranches()
|
||||
if avail:
|
||||
pending.append((project, avail))
|
||||
|
||||
if not pending:
|
||||
print >>sys.stdout, "no branches ready for upload"
|
||||
elif len(pending) == 1 and len(pending[0][1]) == 1:
|
||||
self._SingleBranch(pending[0][1][0], people)
|
||||
self._SingleBranch(opt, pending[0][1][0], people)
|
||||
else:
|
||||
self._MultipleBranches(pending, people)
|
||||
self._MultipleBranches(opt, pending, people)
|
||||
|
3
tests/fixtures/test.gitconfig
vendored
Normal file
3
tests/fixtures/test.gitconfig
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
[section]
|
||||
empty
|
||||
nonempty = true
|
52
tests/test_git_config.py
Normal file
52
tests/test_git_config.py
Normal file
@ -0,0 +1,52 @@
|
||||
import os
|
||||
import unittest
|
||||
|
||||
import git_config
|
||||
|
||||
def fixture(*paths):
|
||||
"""Return a path relative to test/fixtures.
|
||||
"""
|
||||
return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
|
||||
|
||||
class GitConfigUnitTest(unittest.TestCase):
|
||||
"""Tests the GitConfig class.
|
||||
"""
|
||||
def setUp(self):
|
||||
"""Create a GitConfig object using the test.gitconfig fixture.
|
||||
"""
|
||||
config_fixture = fixture('test.gitconfig')
|
||||
self.config = git_config.GitConfig(config_fixture)
|
||||
|
||||
def test_GetString_with_empty_config_values(self):
|
||||
"""
|
||||
Test config entries with no value.
|
||||
|
||||
[section]
|
||||
empty
|
||||
|
||||
"""
|
||||
val = self.config.GetString('section.empty')
|
||||
self.assertEqual(val, None)
|
||||
|
||||
def test_GetString_with_true_value(self):
|
||||
"""
|
||||
Test config entries with a string value.
|
||||
|
||||
[section]
|
||||
nonempty = true
|
||||
|
||||
"""
|
||||
val = self.config.GetString('section.nonempty')
|
||||
self.assertEqual(val, 'true')
|
||||
|
||||
def test_GetString_from_missing_file(self):
|
||||
"""
|
||||
Test missing config file
|
||||
"""
|
||||
config_fixture = fixture('not.present.gitconfig')
|
||||
config = git_config.GitConfig(config_fixture)
|
||||
val = config.GetString('empty')
|
||||
self.assertEqual(val, None)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
Reference in New Issue
Block a user