2008-10-21 14:00:00 +00:00
|
|
|
# Copyright (C) 2008 The Android Open Source Project
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2012-11-02 05:59:27 +00:00
|
|
|
from __future__ import print_function
|
Support repo-level pre-upload hook and prep for future hooks.
All repo-level hooks are expected to live in a single project at the
top level of that project. The name of the hooks project is provided
in the manifest.xml. The manifest also lists which hooks are enabled
to make it obvious if a file somehow failed to sync down (or got
deleted).
Before running any hook, we will prompt the user to make sure that it
is OK. A user can deny running the hook, allow once, or allow
"forever" (until hooks change). This tries to keep with the git
spirit of not automatically running anything on the user's computer
that got synced down. Note that individual repo commands can add
always options to avoid these prompts as they see fit (see below for
the 'upload' options).
When hooks are run, they are loaded into the current interpreter (the
one running repo) and their main() function is run. This mechanism is
used (instead of using subprocess) to make it easier to expand to a
richer hook interface in the future. During loading, the
interpreter's sys.path is updated to contain the directory containing
the hooks so that hooks can be split into multiple files.
The upload command has two options that control hook behavior:
- no-verify=False, verify=False (DEFAULT):
If stdout is a tty, can prompt about running upload hooks if needed.
If user denies running hooks, the upload is cancelled. If stdout is
not a tty and we would need to prompt about upload hooks, upload is
cancelled.
- no-verify=False, verify=True:
Always run upload hooks with no prompt.
- no-verify=True, verify=False:
Never run upload hooks, but upload anyway (AKA bypass hooks).
- no-verify=True, verify=True:
Invalid
Sample bit of manifest.xml code for enabling hooks (assumes you have a
project named 'hooks' where hooks are stored):
<repo-hooks in-project="hooks" enabled-list="pre-upload" />
Sample main() function in pre-upload.py in hooks directory:
def main(project_list, **kwargs):
print ('These projects will be uploaded: %s' %
', '.join(project_list))
print ('I am being a good boy and ignoring anything in kwargs\n'
'that I don\'t understand.')
print 'I fail 50% of the time. How flaky.'
if random.random() <= .5:
raise Exception('Pre-upload hook failed. Have a nice day.')
Change-Id: I5cefa2cd5865c72589263cf8e2f152a43c122f70
2011-03-04 19:54:18 +00:00
|
|
|
import traceback
|
2008-11-03 17:59:36 +00:00
|
|
|
import errno
|
2008-10-21 14:00:00 +00:00
|
|
|
import filecmp
|
|
|
|
import os
|
2011-10-03 15:30:24 +00:00
|
|
|
import random
|
2008-10-21 14:00:00 +00:00
|
|
|
import re
|
|
|
|
import shutil
|
|
|
|
import stat
|
2012-08-02 21:57:37 +00:00
|
|
|
import subprocess
|
2008-10-21 14:00:00 +00:00
|
|
|
import sys
|
2013-10-16 09:02:35 +00:00
|
|
|
import tarfile
|
Represent git-submodule as nested projects, take 2
(Previous submission of this change broke Android buildbot due to
incorrect regular expression for parsing git-config output. During
investigation, we also found that Android, which pulls Chromium, has a
workaround for Chromium's submodules; its manifest includes Chromium's
submodules. This new change, in addition to fixing the regex, also
take this type of workarounds into consideration; it adds a new
attribute that makes repo not fetch submodules unless submodules have a
project element defined in the manifest, or this attribute is
overridden by a parent project element or by the default element.)
We need a representation of git-submodule in repo; otherwise repo will
not sync submodules, and leave workspace in a broken state. Of course
this will not be a problem if all projects are owned by the owner of the
manifest file, who may simply choose not to use git-submodule in all
projects. However, this is not possible in practice because manifest
file owner is unlikely to own all upstream projects.
As git submodules are simply git repositories, it is natural to treat
them as plain repo projects that live inside a repo project. That is,
we could use recursively declared projects to denote the is-submodule
relation of git repositories.
The behavior of repo remains the same to projects that do not have a
sub-project within. As for parent projects, repo fetches them and their
sub-projects as normal projects, and then checks out subprojects at the
commit specified in parent's commit object. The sub-project is fetched
at a path relative to parent project's working directory; so the path
specified in manifest file should match that of .gitmodules file.
If a submodule is not registered in repo manifest, repo will derive its
properties from itself and its parent project, which might not always be
correct. In such cases, the subproject is called a derived subproject.
To a user, a sub-project is merely a git-submodule; so all tips of
working with a git-submodule apply here, too. For example, you should
not run `repo sync` in a parent repository if its submodule is dirty.
Change-Id: I4b8344c1b9ccad2f58ad304573133e5d52e1faef
2012-01-11 03:28:42 +00:00
|
|
|
import tempfile
|
2011-10-03 15:30:24 +00:00
|
|
|
import time
|
2011-10-11 21:05:21 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
from color import Coloring
|
2012-10-31 19:27:27 +00:00
|
|
|
from git_command import GitCommand, git_require
|
2011-08-26 00:21:47 +00:00
|
|
|
from git_config import GitConfig, IsId, GetSchemeFromUrl, ID_RE
|
2012-08-22 01:46:11 +00:00
|
|
|
from error import GitError, HookError, UploadError
|
2009-03-02 20:56:08 +00:00
|
|
|
from error import ManifestInvalidRevisionError
|
2012-11-16 01:33:11 +00:00
|
|
|
from error import NoManifestException
|
2012-08-02 21:57:37 +00:00
|
|
|
from trace import IsTrace, Trace
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-04-18 01:49:50 +00:00
|
|
|
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2013-05-17 01:49:33 +00:00
|
|
|
from pyversion import is_python3
|
|
|
|
if not is_python3():
|
|
|
|
# pylint:disable=W0622
|
2013-03-01 13:44:38 +00:00
|
|
|
input = raw_input
|
2013-05-17 01:49:33 +00:00
|
|
|
# pylint:enable=W0622
|
2013-03-01 13:44:38 +00:00
|
|
|
|
2009-04-18 21:45:51 +00:00
|
|
|
def _lwrite(path, content):
|
|
|
|
lock = '%s.lock' % path
|
|
|
|
|
|
|
|
fd = open(lock, 'wb')
|
|
|
|
try:
|
|
|
|
fd.write(content)
|
|
|
|
finally:
|
|
|
|
fd.close()
|
|
|
|
|
|
|
|
try:
|
|
|
|
os.rename(lock, path)
|
|
|
|
except OSError:
|
|
|
|
os.remove(lock)
|
|
|
|
raise
|
|
|
|
|
2009-04-16 15:25:57 +00:00
|
|
|
def _error(fmt, *args):
|
|
|
|
msg = fmt % args
|
2012-11-02 05:59:27 +00:00
|
|
|
print('error: %s' % msg, file=sys.stderr)
|
2009-04-16 15:25:57 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def not_rev(r):
|
|
|
|
return '^' + r
|
|
|
|
|
2009-01-06 00:18:58 +00:00
|
|
|
def sq(r):
|
|
|
|
return "'" + r.replace("'", "'\''") + "'"
|
2008-11-03 18:32:09 +00:00
|
|
|
|
2011-01-10 22:16:30 +00:00
|
|
|
_project_hook_list = None
|
|
|
|
def _ProjectHooks():
|
|
|
|
"""List the hooks present in the 'hooks' directory.
|
|
|
|
|
|
|
|
These hooks are project hooks and are copied to the '.git/hooks' directory
|
|
|
|
of all subprojects.
|
|
|
|
|
|
|
|
This function caches the list of hooks (based on the contents of the
|
|
|
|
'repo/hooks' directory) on the first call.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A list of absolute paths to all of the files in the hooks directory.
|
|
|
|
"""
|
|
|
|
global _project_hook_list
|
|
|
|
if _project_hook_list is None:
|
2013-11-27 19:17:13 +00:00
|
|
|
d = os.path.realpath(os.path.abspath(os.path.dirname(__file__)))
|
2008-11-03 18:32:09 +00:00
|
|
|
d = os.path.join(d , 'hooks')
|
2013-03-01 13:44:38 +00:00
|
|
|
_project_hook_list = [os.path.join(d, x) for x in os.listdir(d)]
|
2011-01-10 22:16:30 +00:00
|
|
|
return _project_hook_list
|
2008-11-03 18:32:09 +00:00
|
|
|
|
|
|
|
|
2008-10-23 18:58:52 +00:00
|
|
|
class DownloadedChange(object):
|
|
|
|
_commit_cache = None
|
|
|
|
|
|
|
|
def __init__(self, project, base, change_id, ps_id, commit):
|
|
|
|
self.project = project
|
|
|
|
self.base = base
|
|
|
|
self.change_id = change_id
|
|
|
|
self.ps_id = ps_id
|
|
|
|
self.commit = commit
|
|
|
|
|
|
|
|
@property
|
|
|
|
def commits(self):
|
|
|
|
if self._commit_cache is None:
|
|
|
|
self._commit_cache = self.project.bare_git.rev_list(
|
|
|
|
'--abbrev=8',
|
|
|
|
'--abbrev-commit',
|
|
|
|
'--pretty=oneline',
|
|
|
|
'--reverse',
|
|
|
|
'--date-order',
|
|
|
|
not_rev(self.base),
|
|
|
|
self.commit,
|
|
|
|
'--')
|
|
|
|
return self._commit_cache
|
|
|
|
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
class ReviewableBranch(object):
|
|
|
|
_commit_cache = None
|
|
|
|
|
|
|
|
def __init__(self, project, branch, base):
|
|
|
|
self.project = project
|
|
|
|
self.branch = branch
|
|
|
|
self.base = base
|
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
return self.branch.name
|
|
|
|
|
|
|
|
@property
|
|
|
|
def commits(self):
|
|
|
|
if self._commit_cache is None:
|
|
|
|
self._commit_cache = self.project.bare_git.rev_list(
|
|
|
|
'--abbrev=8',
|
|
|
|
'--abbrev-commit',
|
|
|
|
'--pretty=oneline',
|
|
|
|
'--reverse',
|
|
|
|
'--date-order',
|
|
|
|
not_rev(self.base),
|
|
|
|
R_HEADS + self.name,
|
|
|
|
'--')
|
|
|
|
return self._commit_cache
|
|
|
|
|
2008-11-12 01:12:43 +00:00
|
|
|
@property
|
|
|
|
def unabbrev_commits(self):
|
|
|
|
r = dict()
|
|
|
|
for commit in self.project.bare_git.rev_list(
|
|
|
|
not_rev(self.base),
|
|
|
|
R_HEADS + self.name,
|
|
|
|
'--'):
|
|
|
|
r[commit[0:8]] = commit
|
|
|
|
return r
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
@property
|
|
|
|
def date(self):
|
|
|
|
return self.project.bare_git.log(
|
|
|
|
'--pretty=format:%cd',
|
|
|
|
'-n', '1',
|
|
|
|
R_HEADS + self.name,
|
|
|
|
'--')
|
|
|
|
|
2013-05-06 17:36:24 +00:00
|
|
|
def UploadForReview(self, people, auto_topic=False, draft=False, dest_branch=None):
|
2008-11-12 01:12:43 +00:00
|
|
|
self.project.UploadForReview(self.name,
|
2010-07-15 23:52:42 +00:00
|
|
|
people,
|
2012-07-28 22:37:04 +00:00
|
|
|
auto_topic=auto_topic,
|
2013-05-06 17:36:24 +00:00
|
|
|
draft=draft,
|
|
|
|
dest_branch=dest_branch)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-05-04 19:45:11 +00:00
|
|
|
def GetPublishedRefs(self):
|
|
|
|
refs = {}
|
|
|
|
output = self.project.bare_git.ls_remote(
|
|
|
|
self.branch.remote.SshReviewUrl(self.project.UserEmail),
|
|
|
|
'refs/changes/*')
|
|
|
|
for line in output.split('\n'):
|
|
|
|
try:
|
|
|
|
(sha, ref) = line.split()
|
|
|
|
refs[sha] = ref
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return refs
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
class StatusColoring(Coloring):
|
|
|
|
def __init__(self, config):
|
|
|
|
Coloring.__init__(self, config, 'status')
|
|
|
|
self.project = self.printer('header', attr = 'bold')
|
|
|
|
self.branch = self.printer('header', attr = 'bold')
|
|
|
|
self.nobranch = self.printer('nobranch', fg = 'red')
|
2009-04-18 22:26:10 +00:00
|
|
|
self.important = self.printer('important', fg = 'red')
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
self.added = self.printer('added', fg = 'green')
|
|
|
|
self.changed = self.printer('changed', fg = 'red')
|
|
|
|
self.untracked = self.printer('untracked', fg = 'red')
|
|
|
|
|
|
|
|
|
|
|
|
class DiffColoring(Coloring):
|
|
|
|
def __init__(self, config):
|
|
|
|
Coloring.__init__(self, config, 'diff')
|
|
|
|
self.project = self.printer('header', attr = 'bold')
|
|
|
|
|
2012-04-12 20:04:13 +00:00
|
|
|
class _Annotation:
|
|
|
|
def __init__(self, name, value, keep):
|
|
|
|
self.name = name
|
|
|
|
self.value = value
|
|
|
|
self.keep = keep
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
class _CopyFile:
|
2009-03-05 18:32:38 +00:00
|
|
|
def __init__(self, src, dest, abssrc, absdest):
|
2008-10-21 14:00:00 +00:00
|
|
|
self.src = src
|
|
|
|
self.dest = dest
|
2009-03-05 18:32:38 +00:00
|
|
|
self.abs_src = abssrc
|
|
|
|
self.abs_dest = absdest
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def _Copy(self):
|
2009-03-05 18:32:38 +00:00
|
|
|
src = self.abs_src
|
|
|
|
dest = self.abs_dest
|
2008-10-21 14:00:00 +00:00
|
|
|
# copy file if it does not exist or is out of date
|
|
|
|
if not os.path.exists(dest) or not filecmp.cmp(src, dest):
|
|
|
|
try:
|
|
|
|
# remove existing file first, since it might be read-only
|
|
|
|
if os.path.exists(dest):
|
|
|
|
os.remove(dest)
|
2009-07-11 13:43:47 +00:00
|
|
|
else:
|
2012-09-29 22:37:55 +00:00
|
|
|
dest_dir = os.path.dirname(dest)
|
|
|
|
if not os.path.isdir(dest_dir):
|
|
|
|
os.makedirs(dest_dir)
|
2008-10-21 14:00:00 +00:00
|
|
|
shutil.copy(src, dest)
|
|
|
|
# make the file read-only
|
|
|
|
mode = os.stat(dest)[stat.ST_MODE]
|
|
|
|
mode = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
|
|
|
|
os.chmod(dest, mode)
|
|
|
|
except IOError:
|
2009-04-16 15:25:57 +00:00
|
|
|
_error('Cannot copy file %s to %s', src, dest)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2014-04-21 22:10:59 +00:00
|
|
|
class _LinkFile:
|
|
|
|
def __init__(self, src, dest, abssrc, absdest):
|
|
|
|
self.src = src
|
|
|
|
self.dest = dest
|
|
|
|
self.abs_src = abssrc
|
|
|
|
self.abs_dest = absdest
|
|
|
|
|
|
|
|
def _Link(self):
|
|
|
|
src = self.abs_src
|
|
|
|
dest = self.abs_dest
|
|
|
|
# link file if it does not exist or is out of date
|
|
|
|
if not os.path.islink(dest) or os.readlink(dest) != src:
|
|
|
|
try:
|
|
|
|
# remove existing file first, since it might be read-only
|
|
|
|
if os.path.exists(dest):
|
|
|
|
os.remove(dest)
|
|
|
|
else:
|
|
|
|
dest_dir = os.path.dirname(dest)
|
|
|
|
if not os.path.isdir(dest_dir):
|
|
|
|
os.makedirs(dest_dir)
|
|
|
|
os.symlink(src, dest)
|
|
|
|
except IOError:
|
|
|
|
_error('Cannot link file %s to %s', src, dest)
|
|
|
|
|
2009-05-19 21:58:02 +00:00
|
|
|
class RemoteSpec(object):
|
|
|
|
def __init__(self,
|
|
|
|
name,
|
|
|
|
url = None,
|
|
|
|
review = None):
|
|
|
|
self.name = name
|
|
|
|
self.url = url
|
|
|
|
self.review = review
|
2008-10-21 14:00:00 +00:00
|
|
|
|
Support repo-level pre-upload hook and prep for future hooks.
All repo-level hooks are expected to live in a single project at the
top level of that project. The name of the hooks project is provided
in the manifest.xml. The manifest also lists which hooks are enabled
to make it obvious if a file somehow failed to sync down (or got
deleted).
Before running any hook, we will prompt the user to make sure that it
is OK. A user can deny running the hook, allow once, or allow
"forever" (until hooks change). This tries to keep with the git
spirit of not automatically running anything on the user's computer
that got synced down. Note that individual repo commands can add
always options to avoid these prompts as they see fit (see below for
the 'upload' options).
When hooks are run, they are loaded into the current interpreter (the
one running repo) and their main() function is run. This mechanism is
used (instead of using subprocess) to make it easier to expand to a
richer hook interface in the future. During loading, the
interpreter's sys.path is updated to contain the directory containing
the hooks so that hooks can be split into multiple files.
The upload command has two options that control hook behavior:
- no-verify=False, verify=False (DEFAULT):
If stdout is a tty, can prompt about running upload hooks if needed.
If user denies running hooks, the upload is cancelled. If stdout is
not a tty and we would need to prompt about upload hooks, upload is
cancelled.
- no-verify=False, verify=True:
Always run upload hooks with no prompt.
- no-verify=True, verify=False:
Never run upload hooks, but upload anyway (AKA bypass hooks).
- no-verify=True, verify=True:
Invalid
Sample bit of manifest.xml code for enabling hooks (assumes you have a
project named 'hooks' where hooks are stored):
<repo-hooks in-project="hooks" enabled-list="pre-upload" />
Sample main() function in pre-upload.py in hooks directory:
def main(project_list, **kwargs):
print ('These projects will be uploaded: %s' %
', '.join(project_list))
print ('I am being a good boy and ignoring anything in kwargs\n'
'that I don\'t understand.')
print 'I fail 50% of the time. How flaky.'
if random.random() <= .5:
raise Exception('Pre-upload hook failed. Have a nice day.')
Change-Id: I5cefa2cd5865c72589263cf8e2f152a43c122f70
2011-03-04 19:54:18 +00:00
|
|
|
class RepoHook(object):
|
|
|
|
"""A RepoHook contains information about a script to run as a hook.
|
|
|
|
|
|
|
|
Hooks are used to run a python script before running an upload (for instance,
|
|
|
|
to run presubmit checks). Eventually, we may have hooks for other actions.
|
|
|
|
|
|
|
|
This shouldn't be confused with files in the 'repo/hooks' directory. Those
|
|
|
|
files are copied into each '.git/hooks' folder for each project. Repo-level
|
|
|
|
hooks are associated instead with repo actions.
|
|
|
|
|
|
|
|
Hooks are always python. When a hook is run, we will load the hook into the
|
|
|
|
interpreter and execute its main() function.
|
|
|
|
"""
|
|
|
|
def __init__(self,
|
|
|
|
hook_type,
|
|
|
|
hooks_project,
|
|
|
|
topdir,
|
|
|
|
abort_if_user_denies=False):
|
|
|
|
"""RepoHook constructor.
|
|
|
|
|
|
|
|
Params:
|
|
|
|
hook_type: A string representing the type of hook. This is also used
|
|
|
|
to figure out the name of the file containing the hook. For
|
|
|
|
example: 'pre-upload'.
|
|
|
|
hooks_project: The project containing the repo hooks. If you have a
|
|
|
|
manifest, this is manifest.repo_hooks_project. OK if this is None,
|
|
|
|
which will make the hook a no-op.
|
|
|
|
topdir: Repo's top directory (the one containing the .repo directory).
|
|
|
|
Scripts will run with CWD as this directory. If you have a manifest,
|
|
|
|
this is manifest.topdir
|
|
|
|
abort_if_user_denies: If True, we'll throw a HookError() if the user
|
|
|
|
doesn't allow us to run the hook.
|
|
|
|
"""
|
|
|
|
self._hook_type = hook_type
|
|
|
|
self._hooks_project = hooks_project
|
|
|
|
self._topdir = topdir
|
|
|
|
self._abort_if_user_denies = abort_if_user_denies
|
|
|
|
|
|
|
|
# Store the full path to the script for convenience.
|
|
|
|
if self._hooks_project:
|
|
|
|
self._script_fullpath = os.path.join(self._hooks_project.worktree,
|
|
|
|
self._hook_type + '.py')
|
|
|
|
else:
|
|
|
|
self._script_fullpath = None
|
|
|
|
|
|
|
|
def _GetHash(self):
|
|
|
|
"""Return a hash of the contents of the hooks directory.
|
|
|
|
|
|
|
|
We'll just use git to do this. This hash has the property that if anything
|
|
|
|
changes in the directory we will return a different has.
|
|
|
|
|
|
|
|
SECURITY CONSIDERATION:
|
|
|
|
This hash only represents the contents of files in the hook directory, not
|
|
|
|
any other files imported or called by hooks. Changes to imported files
|
|
|
|
can change the script behavior without affecting the hash.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A string representing the hash. This will always be ASCII so that it can
|
|
|
|
be printed to the user easily.
|
|
|
|
"""
|
|
|
|
assert self._hooks_project, "Must have hooks to calculate their hash."
|
|
|
|
|
|
|
|
# We will use the work_git object rather than just calling GetRevisionId().
|
|
|
|
# That gives us a hash of the latest checked in version of the files that
|
|
|
|
# the user will actually be executing. Specifically, GetRevisionId()
|
|
|
|
# doesn't appear to change even if a user checks out a different version
|
|
|
|
# of the hooks repo (via git checkout) nor if a user commits their own revs.
|
|
|
|
#
|
|
|
|
# NOTE: Local (non-committed) changes will not be factored into this hash.
|
|
|
|
# I think this is OK, since we're really only worried about warning the user
|
|
|
|
# about upstream changes.
|
|
|
|
return self._hooks_project.work_git.rev_parse('HEAD')
|
|
|
|
|
|
|
|
def _GetMustVerb(self):
|
|
|
|
"""Return 'must' if the hook is required; 'should' if not."""
|
|
|
|
if self._abort_if_user_denies:
|
|
|
|
return 'must'
|
|
|
|
else:
|
|
|
|
return 'should'
|
|
|
|
|
|
|
|
def _CheckForHookApproval(self):
|
|
|
|
"""Check to see whether this hook has been approved.
|
|
|
|
|
|
|
|
We'll look at the hash of all of the hooks. If this matches the hash that
|
|
|
|
the user last approved, we're done. If it doesn't, we'll ask the user
|
|
|
|
about approval.
|
|
|
|
|
|
|
|
Note that we ask permission for each individual hook even though we use
|
|
|
|
the hash of all hooks when detecting changes. We'd like the user to be
|
|
|
|
able to approve / deny each hook individually. We only use the hash of all
|
|
|
|
hooks because there is no other easy way to detect changes to local imports.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if this hook is approved to run; False otherwise.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
HookError: Raised if the user doesn't approve and abort_if_user_denies
|
|
|
|
was passed to the consturctor.
|
|
|
|
"""
|
|
|
|
hooks_config = self._hooks_project.config
|
|
|
|
git_approval_key = 'repo.hooks.%s.approvedhash' % self._hook_type
|
|
|
|
|
|
|
|
# Get the last hash that the user approved for this hook; may be None.
|
|
|
|
old_hash = hooks_config.GetString(git_approval_key)
|
|
|
|
|
|
|
|
# Get the current hash so we can tell if scripts changed since approval.
|
|
|
|
new_hash = self._GetHash()
|
|
|
|
|
|
|
|
if old_hash is not None:
|
|
|
|
# User previously approved hook and asked not to be prompted again.
|
|
|
|
if new_hash == old_hash:
|
|
|
|
# Approval matched. We're done.
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
# Give the user a reason why we're prompting, since they last told
|
|
|
|
# us to "never ask again".
|
|
|
|
prompt = 'WARNING: Scripts have changed since %s was allowed.\n\n' % (
|
|
|
|
self._hook_type)
|
|
|
|
else:
|
|
|
|
prompt = ''
|
|
|
|
|
|
|
|
# Prompt the user if we're not on a tty; on a tty we'll assume "no".
|
|
|
|
if sys.stdout.isatty():
|
|
|
|
prompt += ('Repo %s run the script:\n'
|
|
|
|
' %s\n'
|
|
|
|
'\n'
|
|
|
|
'Do you want to allow this script to run '
|
|
|
|
'(yes/yes-never-ask-again/NO)? ') % (
|
|
|
|
self._GetMustVerb(), self._script_fullpath)
|
2013-03-01 13:44:38 +00:00
|
|
|
response = input(prompt).lower()
|
2012-11-14 02:18:00 +00:00
|
|
|
print()
|
Support repo-level pre-upload hook and prep for future hooks.
All repo-level hooks are expected to live in a single project at the
top level of that project. The name of the hooks project is provided
in the manifest.xml. The manifest also lists which hooks are enabled
to make it obvious if a file somehow failed to sync down (or got
deleted).
Before running any hook, we will prompt the user to make sure that it
is OK. A user can deny running the hook, allow once, or allow
"forever" (until hooks change). This tries to keep with the git
spirit of not automatically running anything on the user's computer
that got synced down. Note that individual repo commands can add
always options to avoid these prompts as they see fit (see below for
the 'upload' options).
When hooks are run, they are loaded into the current interpreter (the
one running repo) and their main() function is run. This mechanism is
used (instead of using subprocess) to make it easier to expand to a
richer hook interface in the future. During loading, the
interpreter's sys.path is updated to contain the directory containing
the hooks so that hooks can be split into multiple files.
The upload command has two options that control hook behavior:
- no-verify=False, verify=False (DEFAULT):
If stdout is a tty, can prompt about running upload hooks if needed.
If user denies running hooks, the upload is cancelled. If stdout is
not a tty and we would need to prompt about upload hooks, upload is
cancelled.
- no-verify=False, verify=True:
Always run upload hooks with no prompt.
- no-verify=True, verify=False:
Never run upload hooks, but upload anyway (AKA bypass hooks).
- no-verify=True, verify=True:
Invalid
Sample bit of manifest.xml code for enabling hooks (assumes you have a
project named 'hooks' where hooks are stored):
<repo-hooks in-project="hooks" enabled-list="pre-upload" />
Sample main() function in pre-upload.py in hooks directory:
def main(project_list, **kwargs):
print ('These projects will be uploaded: %s' %
', '.join(project_list))
print ('I am being a good boy and ignoring anything in kwargs\n'
'that I don\'t understand.')
print 'I fail 50% of the time. How flaky.'
if random.random() <= .5:
raise Exception('Pre-upload hook failed. Have a nice day.')
Change-Id: I5cefa2cd5865c72589263cf8e2f152a43c122f70
2011-03-04 19:54:18 +00:00
|
|
|
|
|
|
|
# User is doing a one-time approval.
|
|
|
|
if response in ('y', 'yes'):
|
|
|
|
return True
|
|
|
|
elif response == 'yes-never-ask-again':
|
|
|
|
hooks_config.SetString(git_approval_key, new_hash)
|
|
|
|
return True
|
|
|
|
|
|
|
|
# For anything else, we'll assume no approval.
|
|
|
|
if self._abort_if_user_denies:
|
|
|
|
raise HookError('You must allow the %s hook or use --no-verify.' %
|
|
|
|
self._hook_type)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
def _ExecuteHook(self, **kwargs):
|
|
|
|
"""Actually execute the given hook.
|
|
|
|
|
|
|
|
This will run the hook's 'main' function in our python interpreter.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
kwargs: Keyword arguments to pass to the hook. These are often specific
|
|
|
|
to the hook type. For instance, pre-upload hooks will contain
|
|
|
|
a project_list.
|
|
|
|
"""
|
|
|
|
# Keep sys.path and CWD stashed away so that we can always restore them
|
|
|
|
# upon function exit.
|
|
|
|
orig_path = os.getcwd()
|
|
|
|
orig_syspath = sys.path
|
|
|
|
|
|
|
|
try:
|
|
|
|
# Always run hooks with CWD as topdir.
|
|
|
|
os.chdir(self._topdir)
|
|
|
|
|
|
|
|
# Put the hook dir as the first item of sys.path so hooks can do
|
|
|
|
# relative imports. We want to replace the repo dir as [0] so
|
|
|
|
# hooks can't import repo files.
|
|
|
|
sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:]
|
|
|
|
|
|
|
|
# Exec, storing global context in the context dict. We catch exceptions
|
|
|
|
# and convert to a HookError w/ just the failing traceback.
|
|
|
|
context = {}
|
|
|
|
try:
|
|
|
|
execfile(self._script_fullpath, context)
|
|
|
|
except Exception:
|
|
|
|
raise HookError('%s\nFailed to import %s hook; see traceback above.' % (
|
|
|
|
traceback.format_exc(), self._hook_type))
|
|
|
|
|
|
|
|
# Running the script should have defined a main() function.
|
|
|
|
if 'main' not in context:
|
|
|
|
raise HookError('Missing main() in: "%s"' % self._script_fullpath)
|
|
|
|
|
|
|
|
|
|
|
|
# Add 'hook_should_take_kwargs' to the arguments to be passed to main.
|
|
|
|
# We don't actually want hooks to define their main with this argument--
|
|
|
|
# it's there to remind them that their hook should always take **kwargs.
|
|
|
|
# For instance, a pre-upload hook should be defined like:
|
|
|
|
# def main(project_list, **kwargs):
|
|
|
|
#
|
|
|
|
# This allows us to later expand the API without breaking old hooks.
|
|
|
|
kwargs = kwargs.copy()
|
|
|
|
kwargs['hook_should_take_kwargs'] = True
|
|
|
|
|
|
|
|
# Call the main function in the hook. If the hook should cause the
|
|
|
|
# build to fail, it will raise an Exception. We'll catch that convert
|
|
|
|
# to a HookError w/ just the failing traceback.
|
|
|
|
try:
|
|
|
|
context['main'](**kwargs)
|
|
|
|
except Exception:
|
|
|
|
raise HookError('%s\nFailed to run main() for %s hook; see traceback '
|
|
|
|
'above.' % (
|
|
|
|
traceback.format_exc(), self._hook_type))
|
|
|
|
finally:
|
|
|
|
# Restore sys.path and CWD.
|
|
|
|
sys.path = orig_syspath
|
|
|
|
os.chdir(orig_path)
|
|
|
|
|
|
|
|
def Run(self, user_allows_all_hooks, **kwargs):
|
|
|
|
"""Run the hook.
|
|
|
|
|
|
|
|
If the hook doesn't exist (because there is no hooks project or because
|
|
|
|
this particular hook is not enabled), this is a no-op.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_allows_all_hooks: If True, we will never prompt about running the
|
|
|
|
hook--we'll just assume it's OK to run it.
|
|
|
|
kwargs: Keyword arguments to pass to the hook. These are often specific
|
|
|
|
to the hook type. For instance, pre-upload hooks will contain
|
|
|
|
a project_list.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
HookError: If there was a problem finding the hook or the user declined
|
|
|
|
to run a required hook (from _CheckForHookApproval).
|
|
|
|
"""
|
|
|
|
# No-op if there is no hooks project or if hook is disabled.
|
|
|
|
if ((not self._hooks_project) or
|
|
|
|
(self._hook_type not in self._hooks_project.enabled_repo_hooks)):
|
|
|
|
return
|
|
|
|
|
|
|
|
# Bail with a nice error if we can't find the hook.
|
|
|
|
if not os.path.isfile(self._script_fullpath):
|
|
|
|
raise HookError('Couldn\'t find repo hook: "%s"' % self._script_fullpath)
|
|
|
|
|
|
|
|
# Make sure the user is OK with running the hook.
|
|
|
|
if (not user_allows_all_hooks) and (not self._CheckForHookApproval()):
|
|
|
|
return
|
|
|
|
|
|
|
|
# Run the hook with the same version of python we're using.
|
|
|
|
self._ExecuteHook(**kwargs)
|
|
|
|
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
class Project(object):
|
|
|
|
def __init__(self,
|
|
|
|
manifest,
|
|
|
|
name,
|
|
|
|
remote,
|
|
|
|
gitdir,
|
2013-10-12 00:03:19 +00:00
|
|
|
objdir,
|
2008-10-21 14:00:00 +00:00
|
|
|
worktree,
|
|
|
|
relpath,
|
2009-05-30 01:38:17 +00:00
|
|
|
revisionExpr,
|
2012-02-28 19:53:24 +00:00
|
|
|
revisionId,
|
2012-03-29 03:15:45 +00:00
|
|
|
rebase = True,
|
2012-04-20 21:41:59 +00:00
|
|
|
groups = None,
|
2012-09-29 03:21:57 +00:00
|
|
|
sync_c = False,
|
Represent git-submodule as nested projects, take 2
(Previous submission of this change broke Android buildbot due to
incorrect regular expression for parsing git-config output. During
investigation, we also found that Android, which pulls Chromium, has a
workaround for Chromium's submodules; its manifest includes Chromium's
submodules. This new change, in addition to fixing the regex, also
take this type of workarounds into consideration; it adds a new
attribute that makes repo not fetch submodules unless submodules have a
project element defined in the manifest, or this attribute is
overridden by a parent project element or by the default element.)
We need a representation of git-submodule in repo; otherwise repo will
not sync submodules, and leave workspace in a broken state. Of course
this will not be a problem if all projects are owned by the owner of the
manifest file, who may simply choose not to use git-submodule in all
projects. However, this is not possible in practice because manifest
file owner is unlikely to own all upstream projects.
As git submodules are simply git repositories, it is natural to treat
them as plain repo projects that live inside a repo project. That is,
we could use recursively declared projects to denote the is-submodule
relation of git repositories.
The behavior of repo remains the same to projects that do not have a
sub-project within. As for parent projects, repo fetches them and their
sub-projects as normal projects, and then checks out subprojects at the
commit specified in parent's commit object. The sub-project is fetched
at a path relative to parent project's working directory; so the path
specified in manifest file should match that of .gitmodules file.
If a submodule is not registered in repo manifest, repo will derive its
properties from itself and its parent project, which might not always be
correct. In such cases, the subproject is called a derived subproject.
To a user, a sub-project is merely a git-submodule; so all tips of
working with a git-submodule apply here, too. For example, you should
not run `repo sync` in a parent repository if its submodule is dirty.
Change-Id: I4b8344c1b9ccad2f58ad304573133e5d52e1faef
2012-01-11 03:28:42 +00:00
|
|
|
sync_s = False,
|
2012-11-27 13:25:30 +00:00
|
|
|
clone_depth = None,
|
Represent git-submodule as nested projects, take 2
(Previous submission of this change broke Android buildbot due to
incorrect regular expression for parsing git-config output. During
investigation, we also found that Android, which pulls Chromium, has a
workaround for Chromium's submodules; its manifest includes Chromium's
submodules. This new change, in addition to fixing the regex, also
take this type of workarounds into consideration; it adds a new
attribute that makes repo not fetch submodules unless submodules have a
project element defined in the manifest, or this attribute is
overridden by a parent project element or by the default element.)
We need a representation of git-submodule in repo; otherwise repo will
not sync submodules, and leave workspace in a broken state. Of course
this will not be a problem if all projects are owned by the owner of the
manifest file, who may simply choose not to use git-submodule in all
projects. However, this is not possible in practice because manifest
file owner is unlikely to own all upstream projects.
As git submodules are simply git repositories, it is natural to treat
them as plain repo projects that live inside a repo project. That is,
we could use recursively declared projects to denote the is-submodule
relation of git repositories.
The behavior of repo remains the same to projects that do not have a
sub-project within. As for parent projects, repo fetches them and their
sub-projects as normal projects, and then checks out subprojects at the
commit specified in parent's commit object. The sub-project is fetched
at a path relative to parent project's working directory; so the path
specified in manifest file should match that of .gitmodules file.
If a submodule is not registered in repo manifest, repo will derive its
properties from itself and its parent project, which might not always be
correct. In such cases, the subproject is called a derived subproject.
To a user, a sub-project is merely a git-submodule; so all tips of
working with a git-submodule apply here, too. For example, you should
not run `repo sync` in a parent repository if its submodule is dirty.
Change-Id: I4b8344c1b9ccad2f58ad304573133e5d52e1faef
2012-01-11 03:28:42 +00:00
|
|
|
upstream = None,
|
|
|
|
parent = None,
|
2013-05-06 17:36:24 +00:00
|
|
|
is_derived = False,
|
|
|
|
dest_branch = None):
|
Represent git-submodule as nested projects, take 2
(Previous submission of this change broke Android buildbot due to
incorrect regular expression for parsing git-config output. During
investigation, we also found that Android, which pulls Chromium, has a
workaround for Chromium's submodules; its manifest includes Chromium's
submodules. This new change, in addition to fixing the regex, also
take this type of workarounds into consideration; it adds a new
attribute that makes repo not fetch submodules unless submodules have a
project element defined in the manifest, or this attribute is
overridden by a parent project element or by the default element.)
We need a representation of git-submodule in repo; otherwise repo will
not sync submodules, and leave workspace in a broken state. Of course
this will not be a problem if all projects are owned by the owner of the
manifest file, who may simply choose not to use git-submodule in all
projects. However, this is not possible in practice because manifest
file owner is unlikely to own all upstream projects.
As git submodules are simply git repositories, it is natural to treat
them as plain repo projects that live inside a repo project. That is,
we could use recursively declared projects to denote the is-submodule
relation of git repositories.
The behavior of repo remains the same to projects that do not have a
sub-project within. As for parent projects, repo fetches them and their
sub-projects as normal projects, and then checks out subprojects at the
commit specified in parent's commit object. The sub-project is fetched
at a path relative to parent project's working directory; so the path
specified in manifest file should match that of .gitmodules file.
If a submodule is not registered in repo manifest, repo will derive its
properties from itself and its parent project, which might not always be
correct. In such cases, the subproject is called a derived subproject.
To a user, a sub-project is merely a git-submodule; so all tips of
working with a git-submodule apply here, too. For example, you should
not run `repo sync` in a parent repository if its submodule is dirty.
Change-Id: I4b8344c1b9ccad2f58ad304573133e5d52e1faef
2012-01-11 03:28:42 +00:00
|
|
|
"""Init a Project object.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
manifest: The XmlManifest object.
|
|
|
|
name: The `name` attribute of manifest.xml's project element.
|
|
|
|
remote: RemoteSpec object specifying its remote's properties.
|
|
|
|
gitdir: Absolute path of git directory.
|
2013-10-12 00:03:19 +00:00
|
|
|
objdir: Absolute path of directory to store git objects.
|
Represent git-submodule as nested projects, take 2
(Previous submission of this change broke Android buildbot due to
incorrect regular expression for parsing git-config output. During
investigation, we also found that Android, which pulls Chromium, has a
workaround for Chromium's submodules; its manifest includes Chromium's
submodules. This new change, in addition to fixing the regex, also
take this type of workarounds into consideration; it adds a new
attribute that makes repo not fetch submodules unless submodules have a
project element defined in the manifest, or this attribute is
overridden by a parent project element or by the default element.)
We need a representation of git-submodule in repo; otherwise repo will
not sync submodules, and leave workspace in a broken state. Of course
this will not be a problem if all projects are owned by the owner of the
manifest file, who may simply choose not to use git-submodule in all
projects. However, this is not possible in practice because manifest
file owner is unlikely to own all upstream projects.
As git submodules are simply git repositories, it is natural to treat
them as plain repo projects that live inside a repo project. That is,
we could use recursively declared projects to denote the is-submodule
relation of git repositories.
The behavior of repo remains the same to projects that do not have a
sub-project within. As for parent projects, repo fetches them and their
sub-projects as normal projects, and then checks out subprojects at the
commit specified in parent's commit object. The sub-project is fetched
at a path relative to parent project's working directory; so the path
specified in manifest file should match that of .gitmodules file.
If a submodule is not registered in repo manifest, repo will derive its
properties from itself and its parent project, which might not always be
correct. In such cases, the subproject is called a derived subproject.
To a user, a sub-project is merely a git-submodule; so all tips of
working with a git-submodule apply here, too. For example, you should
not run `repo sync` in a parent repository if its submodule is dirty.
Change-Id: I4b8344c1b9ccad2f58ad304573133e5d52e1faef
2012-01-11 03:28:42 +00:00
|
|
|
worktree: Absolute path of git working tree.
|
|
|
|
relpath: Relative path of git working tree to repo's top directory.
|
|
|
|
revisionExpr: The `revision` attribute of manifest.xml's project element.
|
|
|
|
revisionId: git commit id for checking out.
|
|
|
|
rebase: The `rebase` attribute of manifest.xml's project element.
|
|
|
|
groups: The `groups` attribute of manifest.xml's project element.
|
|
|
|
sync_c: The `sync-c` attribute of manifest.xml's project element.
|
|
|
|
sync_s: The `sync-s` attribute of manifest.xml's project element.
|
|
|
|
upstream: The `upstream` attribute of manifest.xml's project element.
|
|
|
|
parent: The parent Project object.
|
|
|
|
is_derived: False if the project was explicitly defined in the manifest;
|
|
|
|
True if the project is a discovered submodule.
|
2013-05-06 17:36:24 +00:00
|
|
|
dest_branch: The branch to which to push changes for review by default.
|
Represent git-submodule as nested projects, take 2
(Previous submission of this change broke Android buildbot due to
incorrect regular expression for parsing git-config output. During
investigation, we also found that Android, which pulls Chromium, has a
workaround for Chromium's submodules; its manifest includes Chromium's
submodules. This new change, in addition to fixing the regex, also
take this type of workarounds into consideration; it adds a new
attribute that makes repo not fetch submodules unless submodules have a
project element defined in the manifest, or this attribute is
overridden by a parent project element or by the default element.)
We need a representation of git-submodule in repo; otherwise repo will
not sync submodules, and leave workspace in a broken state. Of course
this will not be a problem if all projects are owned by the owner of the
manifest file, who may simply choose not to use git-submodule in all
projects. However, this is not possible in practice because manifest
file owner is unlikely to own all upstream projects.
As git submodules are simply git repositories, it is natural to treat
them as plain repo projects that live inside a repo project. That is,
we could use recursively declared projects to denote the is-submodule
relation of git repositories.
The behavior of repo remains the same to projects that do not have a
sub-project within. As for parent projects, repo fetches them and their
sub-projects as normal projects, and then checks out subprojects at the
commit specified in parent's commit object. The sub-project is fetched
at a path relative to parent project's working directory; so the path
specified in manifest file should match that of .gitmodules file.
If a submodule is not registered in repo manifest, repo will derive its
properties from itself and its parent project, which might not always be
correct. In such cases, the subproject is called a derived subproject.
To a user, a sub-project is merely a git-submodule; so all tips of
working with a git-submodule apply here, too. For example, you should
not run `repo sync` in a parent repository if its submodule is dirty.
Change-Id: I4b8344c1b9ccad2f58ad304573133e5d52e1faef
2012-01-11 03:28:42 +00:00
|
|
|
"""
|
2008-10-21 14:00:00 +00:00
|
|
|
self.manifest = manifest
|
|
|
|
self.name = name
|
|
|
|
self.remote = remote
|
2011-01-10 01:31:57 +00:00
|
|
|
self.gitdir = gitdir.replace('\\', '/')
|
2013-10-12 00:03:19 +00:00
|
|
|
self.objdir = objdir.replace('\\', '/')
|
2011-01-10 21:26:01 +00:00
|
|
|
if worktree:
|
|
|
|
self.worktree = worktree.replace('\\', '/')
|
|
|
|
else:
|
|
|
|
self.worktree = None
|
2008-10-21 14:00:00 +00:00
|
|
|
self.relpath = relpath
|
2009-05-30 01:38:17 +00:00
|
|
|
self.revisionExpr = revisionExpr
|
|
|
|
|
|
|
|
if revisionId is None \
|
|
|
|
and revisionExpr \
|
|
|
|
and IsId(revisionExpr):
|
|
|
|
self.revisionId = revisionExpr
|
|
|
|
else:
|
|
|
|
self.revisionId = revisionId
|
|
|
|
|
2012-02-28 19:53:24 +00:00
|
|
|
self.rebase = rebase
|
2012-03-29 03:15:45 +00:00
|
|
|
self.groups = groups
|
2012-04-20 21:41:59 +00:00
|
|
|
self.sync_c = sync_c
|
Represent git-submodule as nested projects, take 2
(Previous submission of this change broke Android buildbot due to
incorrect regular expression for parsing git-config output. During
investigation, we also found that Android, which pulls Chromium, has a
workaround for Chromium's submodules; its manifest includes Chromium's
submodules. This new change, in addition to fixing the regex, also
take this type of workarounds into consideration; it adds a new
attribute that makes repo not fetch submodules unless submodules have a
project element defined in the manifest, or this attribute is
overridden by a parent project element or by the default element.)
We need a representation of git-submodule in repo; otherwise repo will
not sync submodules, and leave workspace in a broken state. Of course
this will not be a problem if all projects are owned by the owner of the
manifest file, who may simply choose not to use git-submodule in all
projects. However, this is not possible in practice because manifest
file owner is unlikely to own all upstream projects.
As git submodules are simply git repositories, it is natural to treat
them as plain repo projects that live inside a repo project. That is,
we could use recursively declared projects to denote the is-submodule
relation of git repositories.
The behavior of repo remains the same to projects that do not have a
sub-project within. As for parent projects, repo fetches them and their
sub-projects as normal projects, and then checks out subprojects at the
commit specified in parent's commit object. The sub-project is fetched
at a path relative to parent project's working directory; so the path
specified in manifest file should match that of .gitmodules file.
If a submodule is not registered in repo manifest, repo will derive its
properties from itself and its parent project, which might not always be
correct. In such cases, the subproject is called a derived subproject.
To a user, a sub-project is merely a git-submodule; so all tips of
working with a git-submodule apply here, too. For example, you should
not run `repo sync` in a parent repository if its submodule is dirty.
Change-Id: I4b8344c1b9ccad2f58ad304573133e5d52e1faef
2012-01-11 03:28:42 +00:00
|
|
|
self.sync_s = sync_s
|
2012-11-27 13:25:30 +00:00
|
|
|
self.clone_depth = clone_depth
|
2012-09-29 03:21:57 +00:00
|
|
|
self.upstream = upstream
|
Represent git-submodule as nested projects, take 2
(Previous submission of this change broke Android buildbot due to
incorrect regular expression for parsing git-config output. During
investigation, we also found that Android, which pulls Chromium, has a
workaround for Chromium's submodules; its manifest includes Chromium's
submodules. This new change, in addition to fixing the regex, also
take this type of workarounds into consideration; it adds a new
attribute that makes repo not fetch submodules unless submodules have a
project element defined in the manifest, or this attribute is
overridden by a parent project element or by the default element.)
We need a representation of git-submodule in repo; otherwise repo will
not sync submodules, and leave workspace in a broken state. Of course
this will not be a problem if all projects are owned by the owner of the
manifest file, who may simply choose not to use git-submodule in all
projects. However, this is not possible in practice because manifest
file owner is unlikely to own all upstream projects.
As git submodules are simply git repositories, it is natural to treat
them as plain repo projects that live inside a repo project. That is,
we could use recursively declared projects to denote the is-submodule
relation of git repositories.
The behavior of repo remains the same to projects that do not have a
sub-project within. As for parent projects, repo fetches them and their
sub-projects as normal projects, and then checks out subprojects at the
commit specified in parent's commit object. The sub-project is fetched
at a path relative to parent project's working directory; so the path
specified in manifest file should match that of .gitmodules file.
If a submodule is not registered in repo manifest, repo will derive its
properties from itself and its parent project, which might not always be
correct. In such cases, the subproject is called a derived subproject.
To a user, a sub-project is merely a git-submodule; so all tips of
working with a git-submodule apply here, too. For example, you should
not run `repo sync` in a parent repository if its submodule is dirty.
Change-Id: I4b8344c1b9ccad2f58ad304573133e5d52e1faef
2012-01-11 03:28:42 +00:00
|
|
|
self.parent = parent
|
|
|
|
self.is_derived = is_derived
|
|
|
|
self.subprojects = []
|
2012-02-28 19:53:24 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
self.snapshots = {}
|
|
|
|
self.copyfiles = []
|
2014-04-21 22:10:59 +00:00
|
|
|
self.linkfiles = []
|
2012-04-12 20:04:13 +00:00
|
|
|
self.annotations = []
|
2008-10-21 14:00:00 +00:00
|
|
|
self.config = GitConfig.ForRepository(
|
|
|
|
gitdir = self.gitdir,
|
|
|
|
defaults = self.manifest.globalConfig)
|
|
|
|
|
2008-11-04 15:37:10 +00:00
|
|
|
if self.worktree:
|
2013-10-12 00:03:19 +00:00
|
|
|
self.work_git = self._GitGetByExec(self, bare=False, gitdir=gitdir)
|
2008-11-04 15:37:10 +00:00
|
|
|
else:
|
|
|
|
self.work_git = None
|
2013-10-12 00:03:19 +00:00
|
|
|
self.bare_git = self._GitGetByExec(self, bare=True, gitdir=gitdir)
|
2009-04-18 01:49:50 +00:00
|
|
|
self.bare_ref = GitRefs(gitdir)
|
2013-10-12 00:03:19 +00:00
|
|
|
self.bare_objdir = self._GitGetByExec(self, bare=True, gitdir=objdir)
|
2013-05-06 17:36:24 +00:00
|
|
|
self.dest_branch = dest_branch
|
2008-10-21 14:00:00 +00:00
|
|
|
|
Support repo-level pre-upload hook and prep for future hooks.
All repo-level hooks are expected to live in a single project at the
top level of that project. The name of the hooks project is provided
in the manifest.xml. The manifest also lists which hooks are enabled
to make it obvious if a file somehow failed to sync down (or got
deleted).
Before running any hook, we will prompt the user to make sure that it
is OK. A user can deny running the hook, allow once, or allow
"forever" (until hooks change). This tries to keep with the git
spirit of not automatically running anything on the user's computer
that got synced down. Note that individual repo commands can add
always options to avoid these prompts as they see fit (see below for
the 'upload' options).
When hooks are run, they are loaded into the current interpreter (the
one running repo) and their main() function is run. This mechanism is
used (instead of using subprocess) to make it easier to expand to a
richer hook interface in the future. During loading, the
interpreter's sys.path is updated to contain the directory containing
the hooks so that hooks can be split into multiple files.
The upload command has two options that control hook behavior:
- no-verify=False, verify=False (DEFAULT):
If stdout is a tty, can prompt about running upload hooks if needed.
If user denies running hooks, the upload is cancelled. If stdout is
not a tty and we would need to prompt about upload hooks, upload is
cancelled.
- no-verify=False, verify=True:
Always run upload hooks with no prompt.
- no-verify=True, verify=False:
Never run upload hooks, but upload anyway (AKA bypass hooks).
- no-verify=True, verify=True:
Invalid
Sample bit of manifest.xml code for enabling hooks (assumes you have a
project named 'hooks' where hooks are stored):
<repo-hooks in-project="hooks" enabled-list="pre-upload" />
Sample main() function in pre-upload.py in hooks directory:
def main(project_list, **kwargs):
print ('These projects will be uploaded: %s' %
', '.join(project_list))
print ('I am being a good boy and ignoring anything in kwargs\n'
'that I don\'t understand.')
print 'I fail 50% of the time. How flaky.'
if random.random() <= .5:
raise Exception('Pre-upload hook failed. Have a nice day.')
Change-Id: I5cefa2cd5865c72589263cf8e2f152a43c122f70
2011-03-04 19:54:18 +00:00
|
|
|
# This will be filled in if a project is later identified to be the
|
|
|
|
# project containing repo hooks.
|
|
|
|
self.enabled_repo_hooks = []
|
|
|
|
|
Represent git-submodule as nested projects, take 2
(Previous submission of this change broke Android buildbot due to
incorrect regular expression for parsing git-config output. During
investigation, we also found that Android, which pulls Chromium, has a
workaround for Chromium's submodules; its manifest includes Chromium's
submodules. This new change, in addition to fixing the regex, also
take this type of workarounds into consideration; it adds a new
attribute that makes repo not fetch submodules unless submodules have a
project element defined in the manifest, or this attribute is
overridden by a parent project element or by the default element.)
We need a representation of git-submodule in repo; otherwise repo will
not sync submodules, and leave workspace in a broken state. Of course
this will not be a problem if all projects are owned by the owner of the
manifest file, who may simply choose not to use git-submodule in all
projects. However, this is not possible in practice because manifest
file owner is unlikely to own all upstream projects.
As git submodules are simply git repositories, it is natural to treat
them as plain repo projects that live inside a repo project. That is,
we could use recursively declared projects to denote the is-submodule
relation of git repositories.
The behavior of repo remains the same to projects that do not have a
sub-project within. As for parent projects, repo fetches them and their
sub-projects as normal projects, and then checks out subprojects at the
commit specified in parent's commit object. The sub-project is fetched
at a path relative to parent project's working directory; so the path
specified in manifest file should match that of .gitmodules file.
If a submodule is not registered in repo manifest, repo will derive its
properties from itself and its parent project, which might not always be
correct. In such cases, the subproject is called a derived subproject.
To a user, a sub-project is merely a git-submodule; so all tips of
working with a git-submodule apply here, too. For example, you should
not run `repo sync` in a parent repository if its submodule is dirty.
Change-Id: I4b8344c1b9ccad2f58ad304573133e5d52e1faef
2012-01-11 03:28:42 +00:00
|
|
|
@property
|
|
|
|
def Derived(self):
|
|
|
|
return self.is_derived
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
@property
|
|
|
|
def Exists(self):
|
|
|
|
return os.path.isdir(self.gitdir)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def CurrentBranch(self):
|
|
|
|
"""Obtain the name of the currently checked out branch.
|
|
|
|
The branch name omits the 'refs/heads/' prefix.
|
|
|
|
None is returned if the project is on a detached HEAD.
|
|
|
|
"""
|
2009-04-18 01:43:33 +00:00
|
|
|
b = self.work_git.GetHead()
|
2008-10-21 14:00:00 +00:00
|
|
|
if b.startswith(R_HEADS):
|
|
|
|
return b[len(R_HEADS):]
|
|
|
|
return None
|
|
|
|
|
2009-04-18 22:26:10 +00:00
|
|
|
def IsRebaseInProgress(self):
|
|
|
|
w = self.worktree
|
|
|
|
g = os.path.join(w, '.git')
|
|
|
|
return os.path.exists(os.path.join(g, 'rebase-apply')) \
|
|
|
|
or os.path.exists(os.path.join(g, 'rebase-merge')) \
|
|
|
|
or os.path.exists(os.path.join(w, '.dotest'))
|
2010-06-17 15:55:02 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def IsDirty(self, consider_untracked=True):
|
|
|
|
"""Is the working directory modified in some way?
|
|
|
|
"""
|
|
|
|
self.work_git.update_index('-q',
|
|
|
|
'--unmerged',
|
|
|
|
'--ignore-missing',
|
|
|
|
'--refresh')
|
2012-11-14 03:09:38 +00:00
|
|
|
if self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD):
|
2008-10-21 14:00:00 +00:00
|
|
|
return True
|
|
|
|
if self.work_git.DiffZ('diff-files'):
|
|
|
|
return True
|
|
|
|
if consider_untracked and self.work_git.LsOthers():
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
_userident_name = None
|
|
|
|
_userident_email = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def UserName(self):
|
|
|
|
"""Obtain the user's personal name.
|
|
|
|
"""
|
|
|
|
if self._userident_name is None:
|
|
|
|
self._LoadUserIdentity()
|
|
|
|
return self._userident_name
|
|
|
|
|
|
|
|
@property
|
|
|
|
def UserEmail(self):
|
|
|
|
"""Obtain the user's email address. This is very likely
|
|
|
|
to be their Gerrit login.
|
|
|
|
"""
|
|
|
|
if self._userident_email is None:
|
|
|
|
self._LoadUserIdentity()
|
|
|
|
return self._userident_email
|
|
|
|
|
|
|
|
def _LoadUserIdentity(self):
|
2012-11-14 02:36:51 +00:00
|
|
|
u = self.bare_git.var('GIT_COMMITTER_IDENT')
|
|
|
|
m = re.compile("^(.*) <([^>]*)> ").match(u)
|
|
|
|
if m:
|
|
|
|
self._userident_name = m.group(1)
|
|
|
|
self._userident_email = m.group(2)
|
|
|
|
else:
|
|
|
|
self._userident_name = ''
|
|
|
|
self._userident_email = ''
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def GetRemote(self, name):
|
|
|
|
"""Get the configuration for a single remote.
|
|
|
|
"""
|
|
|
|
return self.config.GetRemote(name)
|
|
|
|
|
|
|
|
def GetBranch(self, name):
|
|
|
|
"""Get the configuration for a single branch.
|
|
|
|
"""
|
|
|
|
return self.config.GetBranch(name)
|
|
|
|
|
2009-04-10 23:02:48 +00:00
|
|
|
def GetBranches(self):
|
|
|
|
"""Get all existing local branches.
|
|
|
|
"""
|
|
|
|
current = self.CurrentBranch
|
2012-09-24 03:15:13 +00:00
|
|
|
all_refs = self._allrefs
|
2009-04-10 23:02:48 +00:00
|
|
|
heads = {}
|
|
|
|
|
2013-03-01 13:44:38 +00:00
|
|
|
for name, ref_id in all_refs.items():
|
2009-04-10 23:02:48 +00:00
|
|
|
if name.startswith(R_HEADS):
|
|
|
|
name = name[len(R_HEADS):]
|
|
|
|
b = self.GetBranch(name)
|
|
|
|
b.current = name == current
|
|
|
|
b.published = None
|
2012-09-24 03:15:13 +00:00
|
|
|
b.revision = ref_id
|
2009-04-10 23:02:48 +00:00
|
|
|
heads[name] = b
|
|
|
|
|
2013-03-01 13:44:38 +00:00
|
|
|
for name, ref_id in all_refs.items():
|
2009-04-10 23:02:48 +00:00
|
|
|
if name.startswith(R_PUB):
|
|
|
|
name = name[len(R_PUB):]
|
|
|
|
b = heads.get(name)
|
|
|
|
if b:
|
2012-09-24 03:15:13 +00:00
|
|
|
b.published = ref_id
|
2009-04-10 23:02:48 +00:00
|
|
|
|
|
|
|
return heads
|
|
|
|
|
2012-03-29 03:15:45 +00:00
|
|
|
def MatchesGroups(self, manifest_groups):
|
|
|
|
"""Returns true if the manifest groups specified at init should cause
|
|
|
|
this project to be synced.
|
|
|
|
Prefixing a manifest group with "-" inverts the meaning of a group.
|
2012-08-13 20:11:18 +00:00
|
|
|
All projects are implicitly labelled with "all".
|
2012-03-29 03:15:45 +00:00
|
|
|
|
2012-04-16 17:36:08 +00:00
|
|
|
labels are resolved in order. In the example case of
|
2012-08-13 20:11:18 +00:00
|
|
|
project_groups: "all,group1,group2"
|
2012-04-16 17:36:08 +00:00
|
|
|
manifest_groups: "-group1,group2"
|
|
|
|
the project will be matched.
|
Special handling for manifest group "default"
Change Details:
* Make "default" a special manifest group that matches any project that
does not have the special project group "notdefault"
* Use "default" instead of "all,-notdefault" when user does not specify
manifest group
* Expand -g option help to include example usage of manifest groups
Change Benefits:
* Allow a more intuitive and expressive manifest groups specification:
* "default" instead of "all,-notdefault"
* "default,foo" instead of "all,-notdefault,foo"
* "default,-foo" instead of "all,-notdefault,-foo"
* "foo,-default" which has no equivalent
* Default manifest groups behavior can be restored by the command
'repo init -g default'. This is significantly more intuitive than the
current equivalent command 'repo init -g all,-notdefault'.
Change-Id: I6d0673791d64a650110a917c248bcebb23b279d3
2012-11-15 00:19:00 +00:00
|
|
|
|
|
|
|
The special manifest group "default" will match any project that
|
|
|
|
does not have the special project group "notdefault"
|
2012-04-16 17:36:08 +00:00
|
|
|
"""
|
Special handling for manifest group "default"
Change Details:
* Make "default" a special manifest group that matches any project that
does not have the special project group "notdefault"
* Use "default" instead of "all,-notdefault" when user does not specify
manifest group
* Expand -g option help to include example usage of manifest groups
Change Benefits:
* Allow a more intuitive and expressive manifest groups specification:
* "default" instead of "all,-notdefault"
* "default,foo" instead of "all,-notdefault,foo"
* "default,-foo" instead of "all,-notdefault,-foo"
* "foo,-default" which has no equivalent
* Default manifest groups behavior can be restored by the command
'repo init -g default'. This is significantly more intuitive than the
current equivalent command 'repo init -g all,-notdefault'.
Change-Id: I6d0673791d64a650110a917c248bcebb23b279d3
2012-11-15 00:19:00 +00:00
|
|
|
expanded_manifest_groups = manifest_groups or ['default']
|
2012-08-13 20:11:18 +00:00
|
|
|
expanded_project_groups = ['all'] + (self.groups or [])
|
Special handling for manifest group "default"
Change Details:
* Make "default" a special manifest group that matches any project that
does not have the special project group "notdefault"
* Use "default" instead of "all,-notdefault" when user does not specify
manifest group
* Expand -g option help to include example usage of manifest groups
Change Benefits:
* Allow a more intuitive and expressive manifest groups specification:
* "default" instead of "all,-notdefault"
* "default,foo" instead of "all,-notdefault,foo"
* "default,-foo" instead of "all,-notdefault,-foo"
* "foo,-default" which has no equivalent
* Default manifest groups behavior can be restored by the command
'repo init -g default'. This is significantly more intuitive than the
current equivalent command 'repo init -g all,-notdefault'.
Change-Id: I6d0673791d64a650110a917c248bcebb23b279d3
2012-11-15 00:19:00 +00:00
|
|
|
if not 'notdefault' in expanded_project_groups:
|
|
|
|
expanded_project_groups += ['default']
|
2012-08-13 20:11:18 +00:00
|
|
|
|
2012-04-16 17:36:08 +00:00
|
|
|
matched = False
|
2012-08-13 20:11:18 +00:00
|
|
|
for group in expanded_manifest_groups:
|
|
|
|
if group.startswith('-') and group[1:] in expanded_project_groups:
|
2012-04-16 17:36:08 +00:00
|
|
|
matched = False
|
2012-08-13 20:11:18 +00:00
|
|
|
elif group in expanded_project_groups:
|
2012-04-16 17:36:08 +00:00
|
|
|
matched = True
|
|
|
|
|
|
|
|
return matched
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
## Status Display ##
|
|
|
|
|
2010-04-08 15:28:59 +00:00
|
|
|
def HasChanges(self):
|
|
|
|
"""Returns true if there are uncommitted changes.
|
|
|
|
"""
|
|
|
|
self.work_git.update_index('-q',
|
|
|
|
'--unmerged',
|
|
|
|
'--ignore-missing',
|
|
|
|
'--refresh')
|
|
|
|
if self.IsRebaseInProgress():
|
|
|
|
return True
|
|
|
|
|
|
|
|
if self.work_git.DiffZ('diff-index', '--cached', HEAD):
|
|
|
|
return True
|
|
|
|
|
|
|
|
if self.work_git.DiffZ('diff-files'):
|
|
|
|
return True
|
|
|
|
|
|
|
|
if self.work_git.LsOthers():
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2011-03-31 10:33:34 +00:00
|
|
|
def PrintWorkTreeStatus(self, output_redir=None):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""Prints the status of the repository to stdout.
|
2011-03-31 10:33:34 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
output: If specified, redirect the output to this object.
|
2008-10-21 14:00:00 +00:00
|
|
|
"""
|
|
|
|
if not os.path.isdir(self.worktree):
|
2011-03-31 10:33:34 +00:00
|
|
|
if output_redir == None:
|
|
|
|
output_redir = sys.stdout
|
2012-11-02 05:59:27 +00:00
|
|
|
print(file=output_redir)
|
|
|
|
print('project %s/' % self.relpath, file=output_redir)
|
|
|
|
print(' missing (run "repo sync")', file=output_redir)
|
2008-10-21 14:00:00 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
self.work_git.update_index('-q',
|
|
|
|
'--unmerged',
|
|
|
|
'--ignore-missing',
|
|
|
|
'--refresh')
|
2009-04-18 22:26:10 +00:00
|
|
|
rb = self.IsRebaseInProgress()
|
2008-10-21 14:00:00 +00:00
|
|
|
di = self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD)
|
|
|
|
df = self.work_git.DiffZ('diff-files')
|
|
|
|
do = self.work_git.LsOthers()
|
2012-01-25 09:51:12 +00:00
|
|
|
if not rb and not di and not df and not do and not self.CurrentBranch:
|
2009-04-11 00:41:44 +00:00
|
|
|
return 'CLEAN'
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
out = StatusColoring(self.config)
|
2011-03-31 10:33:34 +00:00
|
|
|
if not output_redir == None:
|
|
|
|
out.redirect(output_redir)
|
2008-10-21 14:00:00 +00:00
|
|
|
out.project('project %-40s', self.relpath + '/')
|
|
|
|
|
|
|
|
branch = self.CurrentBranch
|
|
|
|
if branch is None:
|
|
|
|
out.nobranch('(*** NO BRANCH ***)')
|
|
|
|
else:
|
|
|
|
out.branch('branch %s', branch)
|
|
|
|
out.nl()
|
|
|
|
|
2009-04-18 22:26:10 +00:00
|
|
|
if rb:
|
|
|
|
out.important('prior sync failed; rebase still in progress')
|
|
|
|
out.nl()
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
paths = list()
|
|
|
|
paths.extend(di.keys())
|
|
|
|
paths.extend(df.keys())
|
|
|
|
paths.extend(do)
|
|
|
|
|
2013-03-01 13:44:38 +00:00
|
|
|
for p in sorted(set(paths)):
|
2012-10-11 07:44:48 +00:00
|
|
|
try:
|
|
|
|
i = di[p]
|
|
|
|
except KeyError:
|
|
|
|
i = None
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2012-10-11 07:44:48 +00:00
|
|
|
try:
|
|
|
|
f = df[p]
|
|
|
|
except KeyError:
|
|
|
|
f = None
|
2010-06-17 15:55:02 +00:00
|
|
|
|
2012-10-11 07:44:48 +00:00
|
|
|
if i:
|
|
|
|
i_status = i.status.upper()
|
|
|
|
else:
|
|
|
|
i_status = '-'
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2012-10-11 07:44:48 +00:00
|
|
|
if f:
|
|
|
|
f_status = f.status.lower()
|
|
|
|
else:
|
|
|
|
f_status = '-'
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
if i and i.src_path:
|
2009-03-03 21:49:48 +00:00
|
|
|
line = ' %s%s\t%s => %s (%s%%)' % (i_status, f_status,
|
2008-10-21 14:00:00 +00:00
|
|
|
i.src_path, p, i.level)
|
|
|
|
else:
|
|
|
|
line = ' %s%s\t%s' % (i_status, f_status, p)
|
|
|
|
|
|
|
|
if i and not f:
|
|
|
|
out.added('%s', line)
|
|
|
|
elif (i and f) or (not i and f):
|
|
|
|
out.changed('%s', line)
|
|
|
|
elif not i and not f:
|
|
|
|
out.untracked('%s', line)
|
|
|
|
else:
|
|
|
|
out.write('%s', line)
|
|
|
|
out.nl()
|
2011-03-31 10:33:34 +00:00
|
|
|
|
2009-04-11 00:41:44 +00:00
|
|
|
return 'DIRTY'
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2012-03-28 11:49:58 +00:00
|
|
|
def PrintWorkTreeDiff(self, absolute_paths=False):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""Prints the status of the repository to stdout.
|
|
|
|
"""
|
|
|
|
out = DiffColoring(self.config)
|
|
|
|
cmd = ['diff']
|
|
|
|
if out.is_on:
|
|
|
|
cmd.append('--color')
|
|
|
|
cmd.append(HEAD)
|
2012-03-28 11:49:58 +00:00
|
|
|
if absolute_paths:
|
|
|
|
cmd.append('--src-prefix=a/%s/' % self.relpath)
|
|
|
|
cmd.append('--dst-prefix=b/%s/' % self.relpath)
|
2008-10-21 14:00:00 +00:00
|
|
|
cmd.append('--')
|
|
|
|
p = GitCommand(self,
|
|
|
|
cmd,
|
|
|
|
capture_stdout = True,
|
|
|
|
capture_stderr = True)
|
|
|
|
has_diff = False
|
|
|
|
for line in p.process.stdout:
|
|
|
|
if not has_diff:
|
|
|
|
out.nl()
|
|
|
|
out.project('project %s/' % self.relpath)
|
|
|
|
out.nl()
|
|
|
|
has_diff = True
|
2012-11-02 05:59:27 +00:00
|
|
|
print(line[:-1])
|
2008-10-21 14:00:00 +00:00
|
|
|
p.Wait()
|
|
|
|
|
|
|
|
|
|
|
|
## Publish / Upload ##
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
def WasPublished(self, branch, all_refs=None):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""Was the branch published (uploaded) for code review?
|
|
|
|
If so, returns the SHA-1 hash of the last published
|
|
|
|
state for the branch.
|
|
|
|
"""
|
2009-04-18 03:58:02 +00:00
|
|
|
key = R_PUB + branch
|
2012-09-24 03:15:13 +00:00
|
|
|
if all_refs is None:
|
2009-04-18 03:58:02 +00:00
|
|
|
try:
|
|
|
|
return self.bare_git.rev_parse(key)
|
|
|
|
except GitError:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
try:
|
2012-09-24 03:15:13 +00:00
|
|
|
return all_refs[key]
|
2009-04-18 03:58:02 +00:00
|
|
|
except KeyError:
|
|
|
|
return None
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
def CleanPublishedCache(self, all_refs=None):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""Prunes any stale published refs.
|
|
|
|
"""
|
2012-09-24 03:15:13 +00:00
|
|
|
if all_refs is None:
|
|
|
|
all_refs = self._allrefs
|
2008-10-21 14:00:00 +00:00
|
|
|
heads = set()
|
|
|
|
canrm = {}
|
2013-03-01 13:44:38 +00:00
|
|
|
for name, ref_id in all_refs.items():
|
2008-10-21 14:00:00 +00:00
|
|
|
if name.startswith(R_HEADS):
|
|
|
|
heads.add(name)
|
|
|
|
elif name.startswith(R_PUB):
|
2012-09-24 03:15:13 +00:00
|
|
|
canrm[name] = ref_id
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2013-03-01 13:44:38 +00:00
|
|
|
for name, ref_id in canrm.items():
|
2008-10-21 14:00:00 +00:00
|
|
|
n = name[len(R_PUB):]
|
|
|
|
if R_HEADS + n not in heads:
|
2012-09-24 03:15:13 +00:00
|
|
|
self.bare_git.DeleteRef(name, ref_id)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2011-05-26 17:34:11 +00:00
|
|
|
def GetUploadableBranches(self, selected_branch=None):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""List any branches which can be uploaded for review.
|
|
|
|
"""
|
|
|
|
heads = {}
|
|
|
|
pubed = {}
|
|
|
|
|
2013-03-01 13:44:38 +00:00
|
|
|
for name, ref_id in self._allrefs.items():
|
2008-10-21 14:00:00 +00:00
|
|
|
if name.startswith(R_HEADS):
|
2012-09-24 03:15:13 +00:00
|
|
|
heads[name[len(R_HEADS):]] = ref_id
|
2008-10-21 14:00:00 +00:00
|
|
|
elif name.startswith(R_PUB):
|
2012-09-24 03:15:13 +00:00
|
|
|
pubed[name[len(R_PUB):]] = ref_id
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
ready = []
|
2013-03-01 13:44:38 +00:00
|
|
|
for branch, ref_id in heads.items():
|
2012-09-24 03:15:13 +00:00
|
|
|
if branch in pubed and pubed[branch] == ref_id:
|
2008-10-21 14:00:00 +00:00
|
|
|
continue
|
2011-05-26 17:34:11 +00:00
|
|
|
if selected_branch and branch != selected_branch:
|
|
|
|
continue
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2008-11-12 01:03:13 +00:00
|
|
|
rb = self.GetUploadableBranch(branch)
|
|
|
|
if rb:
|
|
|
|
ready.append(rb)
|
2008-10-21 14:00:00 +00:00
|
|
|
return ready
|
|
|
|
|
2008-11-12 01:03:13 +00:00
|
|
|
def GetUploadableBranch(self, branch_name):
|
|
|
|
"""Get a single uploadable branch, or None.
|
|
|
|
"""
|
|
|
|
branch = self.GetBranch(branch_name)
|
|
|
|
base = branch.LocalMerge
|
|
|
|
if branch.LocalMerge:
|
|
|
|
rb = ReviewableBranch(self, branch, base)
|
|
|
|
if rb.commits:
|
|
|
|
return rb
|
|
|
|
return None
|
|
|
|
|
2010-07-15 23:52:42 +00:00
|
|
|
def UploadForReview(self, branch=None,
|
|
|
|
people=([],[]),
|
2012-07-28 22:37:04 +00:00
|
|
|
auto_topic=False,
|
2013-05-06 17:36:24 +00:00
|
|
|
draft=False,
|
|
|
|
dest_branch=None):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""Uploads the named branch for code review.
|
|
|
|
"""
|
|
|
|
if branch is None:
|
|
|
|
branch = self.CurrentBranch
|
|
|
|
if branch is None:
|
|
|
|
raise GitError('not currently on a branch')
|
|
|
|
|
|
|
|
branch = self.GetBranch(branch)
|
|
|
|
if not branch.LocalMerge:
|
|
|
|
raise GitError('branch %s does not track a remote' % branch.name)
|
|
|
|
if not branch.remote.review:
|
|
|
|
raise GitError('remote %s has no review url' % branch.remote.name)
|
|
|
|
|
2013-05-06 17:36:24 +00:00
|
|
|
if dest_branch is None:
|
|
|
|
dest_branch = self.dest_branch
|
|
|
|
if dest_branch is None:
|
|
|
|
dest_branch = branch.merge
|
2008-10-21 14:00:00 +00:00
|
|
|
if not dest_branch.startswith(R_HEADS):
|
|
|
|
dest_branch = R_HEADS + dest_branch
|
|
|
|
|
2008-11-06 17:52:51 +00:00
|
|
|
if not branch.remote.projectname:
|
|
|
|
branch.remote.projectname = self.name
|
|
|
|
branch.remote.Save()
|
|
|
|
|
2012-01-11 22:58:54 +00:00
|
|
|
url = branch.remote.ReviewUrl(self.UserEmail)
|
|
|
|
if url is None:
|
|
|
|
raise UploadError('review not configured')
|
|
|
|
cmd = ['push']
|
2009-01-06 00:18:58 +00:00
|
|
|
|
2012-01-11 22:58:54 +00:00
|
|
|
if url.startswith('ssh://'):
|
2009-01-06 00:18:58 +00:00
|
|
|
rp = ['gerrit receive-pack']
|
|
|
|
for e in people[0]:
|
|
|
|
rp.append('--reviewer=%s' % sq(e))
|
|
|
|
for e in people[1]:
|
|
|
|
rp.append('--cc=%s' % sq(e))
|
|
|
|
cmd.append('--receive-pack=%s' % " ".join(rp))
|
2010-07-15 23:52:42 +00:00
|
|
|
|
2012-01-11 22:58:54 +00:00
|
|
|
cmd.append(url)
|
2009-01-06 00:18:58 +00:00
|
|
|
|
2012-01-11 22:58:54 +00:00
|
|
|
if dest_branch.startswith(R_HEADS):
|
|
|
|
dest_branch = dest_branch[len(R_HEADS):]
|
2012-07-28 22:37:04 +00:00
|
|
|
|
|
|
|
upload_type = 'for'
|
|
|
|
if draft:
|
|
|
|
upload_type = 'drafts'
|
|
|
|
|
|
|
|
ref_spec = '%s:refs/%s/%s' % (R_HEADS + branch.name, upload_type,
|
|
|
|
dest_branch)
|
2012-01-11 22:58:54 +00:00
|
|
|
if auto_topic:
|
|
|
|
ref_spec = ref_spec + '/' + branch.name
|
2013-02-28 08:35:51 +00:00
|
|
|
if not url.startswith('ssh://'):
|
|
|
|
rp = ['r=%s' % p for p in people[0]] + \
|
|
|
|
['cc=%s' % p for p in people[1]]
|
|
|
|
if rp:
|
|
|
|
ref_spec = ref_spec + '%' + ','.join(rp)
|
2012-01-11 22:58:54 +00:00
|
|
|
cmd.append(ref_spec)
|
|
|
|
|
|
|
|
if GitCommand(self, cmd, bare = True).Wait() != 0:
|
|
|
|
raise UploadError('Upload failed')
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
|
|
|
|
self.bare_git.UpdateRef(R_PUB + branch.name,
|
|
|
|
R_HEADS + branch.name,
|
|
|
|
message = msg)
|
|
|
|
|
|
|
|
|
|
|
|
## Sync ##
|
|
|
|
|
2013-10-16 09:02:35 +00:00
|
|
|
def _ExtractArchive(self, tarpath, path=None):
|
|
|
|
"""Extract the given tar on its current location
|
|
|
|
|
|
|
|
Args:
|
|
|
|
- tarpath: The path to the actual tar file
|
|
|
|
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
with tarfile.open(tarpath, 'r') as tar:
|
|
|
|
tar.extractall(path=path)
|
|
|
|
return True
|
|
|
|
except (IOError, tarfile.TarError) as e:
|
|
|
|
print("error: Cannot extract archive %s: "
|
|
|
|
"%s" % (tarpath, str(e)), file=sys.stderr)
|
|
|
|
return False
|
|
|
|
|
2012-03-14 22:36:59 +00:00
|
|
|
def Sync_NetworkHalf(self,
|
|
|
|
quiet=False,
|
|
|
|
is_new=None,
|
|
|
|
current_branch_only=False,
|
2012-10-29 17:18:34 +00:00
|
|
|
clone_bundle=True,
|
2013-10-16 09:02:35 +00:00
|
|
|
no_tags=False,
|
|
|
|
archive=False):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""Perform only the network IO portion of the sync process.
|
|
|
|
Local working directory/branch state is not affected.
|
|
|
|
"""
|
2013-10-16 09:02:35 +00:00
|
|
|
if archive and not isinstance(self, MetaProject):
|
|
|
|
if self.remote.url.startswith(('http://', 'https://')):
|
|
|
|
print("error: %s: Cannot fetch archives from http/https "
|
|
|
|
"remotes." % self.name, file=sys.stderr)
|
|
|
|
return False
|
|
|
|
|
|
|
|
name = self.relpath.replace('\\', '/')
|
|
|
|
name = name.replace('/', '_')
|
|
|
|
tarpath = '%s.tar' % name
|
|
|
|
topdir = self.manifest.topdir
|
|
|
|
|
|
|
|
try:
|
|
|
|
self._FetchArchive(tarpath, cwd=topdir)
|
|
|
|
except GitError as e:
|
|
|
|
print('error: %s' % str(e), file=sys.stderr)
|
|
|
|
return False
|
|
|
|
|
|
|
|
# From now on, we only need absolute tarpath
|
|
|
|
tarpath = os.path.join(topdir, tarpath)
|
|
|
|
|
|
|
|
if not self._ExtractArchive(tarpath, path=topdir):
|
|
|
|
return False
|
|
|
|
try:
|
|
|
|
os.remove(tarpath)
|
|
|
|
except OSError as e:
|
|
|
|
print("warn: Cannot remove archive %s: "
|
|
|
|
"%s" % (tarpath, str(e)), file=sys.stderr)
|
2014-04-21 22:10:59 +00:00
|
|
|
self._CopyAndLinkFiles()
|
2013-10-16 09:02:35 +00:00
|
|
|
return True
|
|
|
|
|
2011-09-19 21:50:58 +00:00
|
|
|
if is_new is None:
|
|
|
|
is_new = not self.Exists
|
2010-10-08 08:02:09 +00:00
|
|
|
if is_new:
|
2008-10-21 14:00:00 +00:00
|
|
|
self._InitGitDir()
|
2012-10-24 11:44:42 +00:00
|
|
|
else:
|
|
|
|
self._UpdateHooks()
|
2008-10-21 14:00:00 +00:00
|
|
|
self._InitRemote()
|
2011-10-03 15:30:24 +00:00
|
|
|
|
|
|
|
if is_new:
|
|
|
|
alt = os.path.join(self.gitdir, 'objects/info/alternates')
|
|
|
|
try:
|
|
|
|
fd = open(alt, 'rb')
|
|
|
|
try:
|
|
|
|
alt_dir = fd.readline().rstrip()
|
|
|
|
finally:
|
|
|
|
fd.close()
|
|
|
|
except IOError:
|
|
|
|
alt_dir = None
|
|
|
|
else:
|
|
|
|
alt_dir = None
|
|
|
|
|
2012-03-14 22:36:59 +00:00
|
|
|
if clone_bundle \
|
|
|
|
and alt_dir is None \
|
|
|
|
and self._ApplyCloneBundle(initial=is_new, quiet=quiet):
|
2011-10-03 15:30:24 +00:00
|
|
|
is_new = False
|
|
|
|
|
2012-05-24 16:46:50 +00:00
|
|
|
if not current_branch_only:
|
|
|
|
if self.sync_c:
|
|
|
|
current_branch_only = True
|
|
|
|
elif not self.manifest._loaded:
|
|
|
|
# Manifest cannot check defaults until it syncs.
|
|
|
|
current_branch_only = False
|
|
|
|
elif self.manifest.default.sync_c:
|
|
|
|
current_branch_only = True
|
|
|
|
|
2014-05-01 20:09:57 +00:00
|
|
|
has_sha1 = ID_RE.match(self.revisionExpr) and self._CheckForSha1()
|
|
|
|
if (not has_sha1 #Need to fetch since we don't already have this revision
|
|
|
|
and not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
|
|
|
|
current_branch_only=current_branch_only,
|
|
|
|
no_tags=no_tags)):
|
|
|
|
return False
|
2008-11-04 15:37:10 +00:00
|
|
|
|
|
|
|
if self.worktree:
|
|
|
|
self._InitMRef()
|
|
|
|
else:
|
|
|
|
self._InitMirrorHead()
|
|
|
|
try:
|
|
|
|
os.remove(os.path.join(self.gitdir, 'FETCH_HEAD'))
|
|
|
|
except OSError:
|
|
|
|
pass
|
2008-10-21 14:00:00 +00:00
|
|
|
return True
|
2008-11-03 18:32:09 +00:00
|
|
|
|
|
|
|
def PostRepoUpgrade(self):
|
|
|
|
self._InitHooks()
|
|
|
|
|
2014-04-21 22:10:59 +00:00
|
|
|
def _CopyAndLinkFiles(self):
|
2012-09-24 03:15:13 +00:00
|
|
|
for copyfile in self.copyfiles:
|
|
|
|
copyfile._Copy()
|
2014-04-21 22:10:59 +00:00
|
|
|
for linkfile in self.linkfiles:
|
|
|
|
linkfile._Link()
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2014-01-09 15:21:37 +00:00
|
|
|
def GetCommitRevisionId(self):
|
|
|
|
"""Get revisionId of a commit.
|
|
|
|
|
|
|
|
Use this method instead of GetRevisionId to get the id of the commit rather
|
|
|
|
than the id of the current git object (for example, a tag)
|
|
|
|
|
|
|
|
"""
|
|
|
|
if not self.revisionExpr.startswith(R_TAGS):
|
|
|
|
return self.GetRevisionId(self._allrefs)
|
|
|
|
|
|
|
|
try:
|
|
|
|
return self.bare_git.rev_list(self.revisionExpr, '-1')[0]
|
|
|
|
except GitError:
|
|
|
|
raise ManifestInvalidRevisionError(
|
|
|
|
'revision %s in %s not found' % (self.revisionExpr,
|
|
|
|
self.name))
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
def GetRevisionId(self, all_refs=None):
|
2009-05-30 01:38:17 +00:00
|
|
|
if self.revisionId:
|
|
|
|
return self.revisionId
|
|
|
|
|
|
|
|
rem = self.GetRemote(self.remote.name)
|
|
|
|
rev = rem.ToLocal(self.revisionExpr)
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
if all_refs is not None and rev in all_refs:
|
|
|
|
return all_refs[rev]
|
2009-05-30 01:38:17 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
return self.bare_git.rev_parse('--verify', '%s^0' % rev)
|
|
|
|
except GitError:
|
|
|
|
raise ManifestInvalidRevisionError(
|
|
|
|
'revision %s in %s not found' % (self.revisionExpr,
|
|
|
|
self.name))
|
|
|
|
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
def Sync_LocalHalf(self, syncbuf):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""Perform only the local IO portion of the sync process.
|
|
|
|
Network access is not required.
|
|
|
|
"""
|
2013-10-12 00:03:19 +00:00
|
|
|
self._InitWorkTree()
|
2012-09-24 03:15:13 +00:00
|
|
|
all_refs = self.bare_ref.all
|
|
|
|
self.CleanPublishedCache(all_refs)
|
|
|
|
revid = self.GetRevisionId(all_refs)
|
2011-03-08 20:14:41 +00:00
|
|
|
|
2012-10-25 03:23:11 +00:00
|
|
|
def _doff():
|
|
|
|
self._FastForward(revid)
|
2014-04-21 22:10:59 +00:00
|
|
|
self._CopyAndLinkFiles()
|
2012-10-25 03:23:11 +00:00
|
|
|
|
2009-04-18 03:58:02 +00:00
|
|
|
head = self.work_git.GetHead()
|
|
|
|
if head.startswith(R_HEADS):
|
|
|
|
branch = head[len(R_HEADS):]
|
|
|
|
try:
|
2012-09-24 03:15:13 +00:00
|
|
|
head = all_refs[head]
|
2009-04-18 03:58:02 +00:00
|
|
|
except KeyError:
|
|
|
|
head = None
|
|
|
|
else:
|
|
|
|
branch = None
|
2008-10-21 14:00:00 +00:00
|
|
|
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
if branch is None or syncbuf.detach_head:
|
2008-10-21 14:00:00 +00:00
|
|
|
# Currently on a detached HEAD. The user is assumed to
|
|
|
|
# not have any local modifications worth worrying about.
|
|
|
|
#
|
2009-04-18 22:26:10 +00:00
|
|
|
if self.IsRebaseInProgress():
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
syncbuf.fail(self, _PriorSyncFailedError())
|
|
|
|
return
|
|
|
|
|
2009-04-18 03:58:02 +00:00
|
|
|
if head == revid:
|
|
|
|
# No changes; don't do anything further.
|
2012-06-07 15:11:42 +00:00
|
|
|
# Except if the head needs to be detached
|
2009-04-18 03:58:02 +00:00
|
|
|
#
|
2012-06-07 15:11:42 +00:00
|
|
|
if not syncbuf.detach_head:
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
lost = self._revlist(not_rev(revid), HEAD)
|
|
|
|
if lost:
|
|
|
|
syncbuf.info(self, "discarding %d commits", len(lost))
|
2009-04-18 03:58:02 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
try:
|
2009-05-30 01:38:17 +00:00
|
|
|
self._Checkout(revid, quiet=True)
|
2012-09-09 22:37:57 +00:00
|
|
|
except GitError as e:
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
syncbuf.fail(self, e)
|
|
|
|
return
|
2014-04-21 22:10:59 +00:00
|
|
|
self._CopyAndLinkFiles()
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
return
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-04-18 03:58:02 +00:00
|
|
|
if head == revid:
|
|
|
|
# No changes; don't do anything further.
|
|
|
|
#
|
|
|
|
return
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
branch = self.GetBranch(branch)
|
|
|
|
|
2009-05-30 01:38:17 +00:00
|
|
|
if not branch.LocalMerge:
|
2008-10-21 14:00:00 +00:00
|
|
|
# The current branch has no tracking configuration.
|
2011-08-30 17:52:33 +00:00
|
|
|
# Jump off it to a detached HEAD.
|
2008-10-21 14:00:00 +00:00
|
|
|
#
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
syncbuf.info(self,
|
|
|
|
"leaving %s; does not track upstream",
|
|
|
|
branch.name)
|
2008-10-21 14:00:00 +00:00
|
|
|
try:
|
2009-05-30 01:38:17 +00:00
|
|
|
self._Checkout(revid, quiet=True)
|
2012-09-09 22:37:57 +00:00
|
|
|
except GitError as e:
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
syncbuf.fail(self, e)
|
|
|
|
return
|
2014-04-21 22:10:59 +00:00
|
|
|
self._CopyAndLinkFiles()
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
return
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-05-30 01:38:17 +00:00
|
|
|
upstream_gain = self._revlist(not_rev(HEAD), revid)
|
2012-09-24 03:15:13 +00:00
|
|
|
pub = self.WasPublished(branch.name, all_refs)
|
2008-10-21 14:00:00 +00:00
|
|
|
if pub:
|
2009-05-30 01:38:17 +00:00
|
|
|
not_merged = self._revlist(not_rev(revid), pub)
|
2008-10-21 14:00:00 +00:00
|
|
|
if not_merged:
|
|
|
|
if upstream_gain:
|
|
|
|
# The user has published this branch and some of those
|
|
|
|
# commits are not yet merged upstream. We do not want
|
|
|
|
# to rewrite the published commits so we punt.
|
|
|
|
#
|
2010-03-02 20:38:03 +00:00
|
|
|
syncbuf.fail(self,
|
|
|
|
"branch %s is published (but not merged) and is now %d commits behind"
|
|
|
|
% (branch.name, len(upstream_gain)))
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
return
|
2009-04-21 15:26:32 +00:00
|
|
|
elif pub == head:
|
|
|
|
# All published commits are merged, and thus we are a
|
|
|
|
# strict subset. We can fast-forward safely.
|
2008-10-30 18:03:00 +00:00
|
|
|
#
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
syncbuf.later1(self, _doff)
|
|
|
|
return
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-05-30 01:28:25 +00:00
|
|
|
# Examine the local commits not in the remote. Find the
|
|
|
|
# last one attributed to this user, if any.
|
|
|
|
#
|
2009-05-30 01:38:17 +00:00
|
|
|
local_changes = self._revlist(not_rev(revid), HEAD, format='%H %ce')
|
2009-05-30 01:28:25 +00:00
|
|
|
last_mine = None
|
|
|
|
cnt_mine = 0
|
|
|
|
for commit in local_changes:
|
2013-11-19 13:16:29 +00:00
|
|
|
commit_id, committer_email = commit.decode('utf-8').split(' ', 1)
|
2009-05-30 01:28:25 +00:00
|
|
|
if committer_email == self.UserEmail:
|
|
|
|
last_mine = commit_id
|
|
|
|
cnt_mine += 1
|
|
|
|
|
2009-06-03 18:09:12 +00:00
|
|
|
if not upstream_gain and cnt_mine == len(local_changes):
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
return
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
if self.IsDirty(consider_untracked=False):
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
syncbuf.fail(self, _DirtyError())
|
|
|
|
return
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-05-30 01:38:17 +00:00
|
|
|
# If the upstream switched on us, warn the user.
|
|
|
|
#
|
|
|
|
if branch.merge != self.revisionExpr:
|
|
|
|
if branch.merge and self.revisionExpr:
|
|
|
|
syncbuf.info(self,
|
|
|
|
'manifest switched %s...%s',
|
|
|
|
branch.merge,
|
|
|
|
self.revisionExpr)
|
|
|
|
elif branch.merge:
|
|
|
|
syncbuf.info(self,
|
|
|
|
'manifest no longer tracks %s',
|
|
|
|
branch.merge)
|
|
|
|
|
2009-05-30 01:28:25 +00:00
|
|
|
if cnt_mine < len(local_changes):
|
2008-10-21 14:00:00 +00:00
|
|
|
# Upstream rebased. Not everything in HEAD
|
2009-05-30 01:28:25 +00:00
|
|
|
# was created by this user.
|
2008-10-21 14:00:00 +00:00
|
|
|
#
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
syncbuf.info(self,
|
|
|
|
"discarding %d commits removed from upstream",
|
2009-05-30 01:28:25 +00:00
|
|
|
len(local_changes) - cnt_mine)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-05-30 01:38:17 +00:00
|
|
|
branch.remote = self.GetRemote(self.remote.name)
|
2012-03-20 20:45:00 +00:00
|
|
|
if not ID_RE.match(self.revisionExpr):
|
|
|
|
# in case of manifest sync the revisionExpr might be a SHA1
|
|
|
|
branch.merge = self.revisionExpr
|
2008-10-21 14:00:00 +00:00
|
|
|
branch.Save()
|
|
|
|
|
2012-02-28 19:53:24 +00:00
|
|
|
if cnt_mine > 0 and self.rebase:
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
def _dorebase():
|
2009-05-30 01:38:17 +00:00
|
|
|
self._Rebase(upstream = '%s^1' % last_mine, onto = revid)
|
2014-04-21 22:10:59 +00:00
|
|
|
self._CopyAndLinkFiles()
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
syncbuf.later2(self, _dorebase)
|
2009-05-30 01:28:25 +00:00
|
|
|
elif local_changes:
|
2008-10-21 14:00:00 +00:00
|
|
|
try:
|
2009-05-30 01:38:17 +00:00
|
|
|
self._ResetHard(revid)
|
2014-04-21 22:10:59 +00:00
|
|
|
self._CopyAndLinkFiles()
|
2012-09-09 22:37:57 +00:00
|
|
|
except GitError as e:
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
syncbuf.fail(self, e)
|
|
|
|
return
|
2008-10-21 14:00:00 +00:00
|
|
|
else:
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
syncbuf.later1(self, _doff)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-03-05 18:32:38 +00:00
|
|
|
def AddCopyFile(self, src, dest, absdest):
|
2008-10-21 14:00:00 +00:00
|
|
|
# dest should already be an absolute path, but src is project relative
|
|
|
|
# make src an absolute path
|
2009-03-05 18:32:38 +00:00
|
|
|
abssrc = os.path.join(self.worktree, src)
|
|
|
|
self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest))
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2014-04-21 22:10:59 +00:00
|
|
|
def AddLinkFile(self, src, dest, absdest):
|
|
|
|
# dest should already be an absolute path, but src is project relative
|
|
|
|
# make src an absolute path
|
|
|
|
abssrc = os.path.join(self.worktree, src)
|
|
|
|
self.linkfiles.append(_LinkFile(src, dest, abssrc, absdest))
|
|
|
|
|
2012-04-12 20:04:13 +00:00
|
|
|
def AddAnnotation(self, name, value, keep):
|
|
|
|
self.annotations.append(_Annotation(name, value, keep))
|
|
|
|
|
2008-10-23 18:58:52 +00:00
|
|
|
def DownloadPatchSet(self, change_id, patch_id):
|
|
|
|
"""Download a single patch set of a single change to FETCH_HEAD.
|
|
|
|
"""
|
|
|
|
remote = self.GetRemote(self.remote.name)
|
|
|
|
|
|
|
|
cmd = ['fetch', remote.name]
|
|
|
|
cmd.append('refs/changes/%2.2d/%d/%d' \
|
|
|
|
% (change_id % 100, change_id, patch_id))
|
|
|
|
if GitCommand(self, cmd, bare=True).Wait() != 0:
|
|
|
|
return None
|
|
|
|
return DownloadedChange(self,
|
2009-05-30 01:38:17 +00:00
|
|
|
self.GetRevisionId(),
|
2008-10-23 18:58:52 +00:00
|
|
|
change_id,
|
|
|
|
patch_id,
|
|
|
|
self.bare_git.rev_parse('FETCH_HEAD'))
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
## Branch Management ##
|
|
|
|
|
|
|
|
def StartBranch(self, name):
|
|
|
|
"""Create a new branch off the manifest's revision.
|
|
|
|
"""
|
2009-04-18 21:45:51 +00:00
|
|
|
head = self.work_git.GetHead()
|
|
|
|
if head == (R_HEADS + name):
|
|
|
|
return True
|
2009-04-10 23:21:18 +00:00
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
all_refs = self.bare_ref.all
|
|
|
|
if (R_HEADS + name) in all_refs:
|
2009-04-18 21:45:51 +00:00
|
|
|
return GitCommand(self,
|
2009-04-18 22:04:41 +00:00
|
|
|
['checkout', name, '--'],
|
2009-04-18 21:53:39 +00:00
|
|
|
capture_stdout = True,
|
|
|
|
capture_stderr = True).Wait() == 0
|
2009-04-18 21:45:51 +00:00
|
|
|
|
|
|
|
branch = self.GetBranch(name)
|
|
|
|
branch.remote = self.GetRemote(self.remote.name)
|
2009-05-30 01:38:17 +00:00
|
|
|
branch.merge = self.revisionExpr
|
2012-09-24 03:15:13 +00:00
|
|
|
revid = self.GetRevisionId(all_refs)
|
2009-04-18 21:45:51 +00:00
|
|
|
|
|
|
|
if head.startswith(R_HEADS):
|
|
|
|
try:
|
2012-09-24 03:15:13 +00:00
|
|
|
head = all_refs[head]
|
2009-04-18 21:45:51 +00:00
|
|
|
except KeyError:
|
|
|
|
head = None
|
|
|
|
|
|
|
|
if revid and head and revid == head:
|
|
|
|
ref = os.path.join(self.gitdir, R_HEADS + name)
|
|
|
|
try:
|
|
|
|
os.makedirs(os.path.dirname(ref))
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
_lwrite(ref, '%s\n' % revid)
|
|
|
|
_lwrite(os.path.join(self.worktree, '.git', HEAD),
|
|
|
|
'ref: %s%s\n' % (R_HEADS, name))
|
|
|
|
branch.Save()
|
|
|
|
return True
|
|
|
|
|
|
|
|
if GitCommand(self,
|
2009-05-30 01:38:17 +00:00
|
|
|
['checkout', '-b', branch.name, revid],
|
2009-04-18 21:53:39 +00:00
|
|
|
capture_stdout = True,
|
|
|
|
capture_stderr = True).Wait() == 0:
|
2009-04-18 21:45:51 +00:00
|
|
|
branch.Save()
|
|
|
|
return True
|
|
|
|
return False
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-04-10 20:01:24 +00:00
|
|
|
def CheckoutBranch(self, name):
|
|
|
|
"""Checkout a local topic branch.
|
2011-04-07 19:51:04 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
name: The name of the branch to checkout.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the checkout succeeded; False if it didn't; None if the branch
|
|
|
|
didn't exist.
|
2009-04-10 20:01:24 +00:00
|
|
|
"""
|
2009-04-18 22:04:41 +00:00
|
|
|
rev = R_HEADS + name
|
|
|
|
head = self.work_git.GetHead()
|
|
|
|
if head == rev:
|
|
|
|
# Already on the branch
|
|
|
|
#
|
|
|
|
return True
|
2009-04-10 20:01:24 +00:00
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
all_refs = self.bare_ref.all
|
2009-04-10 20:01:24 +00:00
|
|
|
try:
|
2012-09-24 03:15:13 +00:00
|
|
|
revid = all_refs[rev]
|
2009-04-18 22:04:41 +00:00
|
|
|
except KeyError:
|
|
|
|
# Branch does not exist in this project
|
|
|
|
#
|
2011-04-07 19:51:04 +00:00
|
|
|
return None
|
2009-04-18 22:04:41 +00:00
|
|
|
|
|
|
|
if head.startswith(R_HEADS):
|
|
|
|
try:
|
2012-09-24 03:15:13 +00:00
|
|
|
head = all_refs[head]
|
2009-04-18 22:04:41 +00:00
|
|
|
except KeyError:
|
|
|
|
head = None
|
|
|
|
|
|
|
|
if head == revid:
|
|
|
|
# Same revision; just update HEAD to point to the new
|
|
|
|
# target branch, but otherwise take no other action.
|
|
|
|
#
|
|
|
|
_lwrite(os.path.join(self.worktree, '.git', HEAD),
|
|
|
|
'ref: %s%s\n' % (R_HEADS, name))
|
|
|
|
return True
|
2009-04-10 20:01:24 +00:00
|
|
|
|
2009-04-18 22:04:41 +00:00
|
|
|
return GitCommand(self,
|
|
|
|
['checkout', name, '--'],
|
|
|
|
capture_stdout = True,
|
|
|
|
capture_stderr = True).Wait() == 0
|
2009-04-10 20:01:24 +00:00
|
|
|
|
2008-11-03 19:24:59 +00:00
|
|
|
def AbandonBranch(self, name):
|
|
|
|
"""Destroy a local topic branch.
|
2011-04-07 18:46:59 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
name: The name of the branch to abandon.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the abandon succeeded; False if it didn't; None if the branch
|
|
|
|
didn't exist.
|
2008-11-03 19:24:59 +00:00
|
|
|
"""
|
2009-04-18 22:15:24 +00:00
|
|
|
rev = R_HEADS + name
|
2012-09-24 03:15:13 +00:00
|
|
|
all_refs = self.bare_ref.all
|
|
|
|
if rev not in all_refs:
|
2011-04-07 18:46:59 +00:00
|
|
|
# Doesn't exist
|
|
|
|
return None
|
2008-11-03 19:24:59 +00:00
|
|
|
|
2009-04-18 22:15:24 +00:00
|
|
|
head = self.work_git.GetHead()
|
|
|
|
if head == rev:
|
|
|
|
# We can't destroy the branch while we are sitting
|
|
|
|
# on it. Switch to a detached HEAD.
|
|
|
|
#
|
2012-09-24 03:15:13 +00:00
|
|
|
head = all_refs[head]
|
2008-11-03 19:24:59 +00:00
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
revid = self.GetRevisionId(all_refs)
|
2009-05-30 01:38:17 +00:00
|
|
|
if head == revid:
|
2009-04-18 22:15:24 +00:00
|
|
|
_lwrite(os.path.join(self.worktree, '.git', HEAD),
|
|
|
|
'%s\n' % revid)
|
|
|
|
else:
|
2009-05-30 01:38:17 +00:00
|
|
|
self._Checkout(revid, quiet=True)
|
2009-04-18 22:15:24 +00:00
|
|
|
|
|
|
|
return GitCommand(self,
|
|
|
|
['branch', '-D', name],
|
|
|
|
capture_stdout = True,
|
|
|
|
capture_stderr = True).Wait() == 0
|
2008-11-03 19:24:59 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def PruneHeads(self):
|
|
|
|
"""Prune any topic branches already merged into upstream.
|
|
|
|
"""
|
|
|
|
cb = self.CurrentBranch
|
|
|
|
kill = []
|
2009-03-02 20:30:50 +00:00
|
|
|
left = self._allrefs
|
|
|
|
for name in left.keys():
|
2008-10-21 14:00:00 +00:00
|
|
|
if name.startswith(R_HEADS):
|
|
|
|
name = name[len(R_HEADS):]
|
|
|
|
if cb is None or name != cb:
|
|
|
|
kill.append(name)
|
|
|
|
|
2009-05-30 01:38:17 +00:00
|
|
|
rev = self.GetRevisionId(left)
|
2008-10-21 14:00:00 +00:00
|
|
|
if cb is not None \
|
|
|
|
and not self._revlist(HEAD + '...' + rev) \
|
|
|
|
and not self.IsDirty(consider_untracked = False):
|
|
|
|
self.work_git.DetachHead(HEAD)
|
|
|
|
kill.append(cb)
|
|
|
|
|
|
|
|
if kill:
|
2009-04-18 01:43:33 +00:00
|
|
|
old = self.bare_git.GetHead()
|
|
|
|
if old is None:
|
2008-10-21 14:00:00 +00:00
|
|
|
old = 'refs/heads/please_never_use_this_as_a_branch_name'
|
|
|
|
|
|
|
|
try:
|
|
|
|
self.bare_git.DetachHead(rev)
|
|
|
|
|
|
|
|
b = ['branch', '-d']
|
|
|
|
b.extend(kill)
|
|
|
|
b = GitCommand(self, b, bare=True,
|
|
|
|
capture_stdout=True,
|
|
|
|
capture_stderr=True)
|
|
|
|
b.Wait()
|
|
|
|
finally:
|
|
|
|
self.bare_git.SetHead(old)
|
2009-03-02 20:30:50 +00:00
|
|
|
left = self._allrefs
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-03-02 20:30:50 +00:00
|
|
|
for branch in kill:
|
|
|
|
if (R_HEADS + branch) not in left:
|
|
|
|
self.CleanPublishedCache()
|
|
|
|
break
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
if cb and cb not in kill:
|
|
|
|
kill.append(cb)
|
2009-03-02 20:38:13 +00:00
|
|
|
kill.sort()
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
kept = []
|
|
|
|
for branch in kill:
|
2009-03-02 20:30:50 +00:00
|
|
|
if (R_HEADS + branch) in left:
|
2008-10-21 14:00:00 +00:00
|
|
|
branch = self.GetBranch(branch)
|
|
|
|
base = branch.LocalMerge
|
|
|
|
if not base:
|
|
|
|
base = rev
|
|
|
|
kept.append(ReviewableBranch(self, branch, base))
|
|
|
|
return kept
|
|
|
|
|
|
|
|
|
Represent git-submodule as nested projects, take 2
(Previous submission of this change broke Android buildbot due to
incorrect regular expression for parsing git-config output. During
investigation, we also found that Android, which pulls Chromium, has a
workaround for Chromium's submodules; its manifest includes Chromium's
submodules. This new change, in addition to fixing the regex, also
take this type of workarounds into consideration; it adds a new
attribute that makes repo not fetch submodules unless submodules have a
project element defined in the manifest, or this attribute is
overridden by a parent project element or by the default element.)
We need a representation of git-submodule in repo; otherwise repo will
not sync submodules, and leave workspace in a broken state. Of course
this will not be a problem if all projects are owned by the owner of the
manifest file, who may simply choose not to use git-submodule in all
projects. However, this is not possible in practice because manifest
file owner is unlikely to own all upstream projects.
As git submodules are simply git repositories, it is natural to treat
them as plain repo projects that live inside a repo project. That is,
we could use recursively declared projects to denote the is-submodule
relation of git repositories.
The behavior of repo remains the same to projects that do not have a
sub-project within. As for parent projects, repo fetches them and their
sub-projects as normal projects, and then checks out subprojects at the
commit specified in parent's commit object. The sub-project is fetched
at a path relative to parent project's working directory; so the path
specified in manifest file should match that of .gitmodules file.
If a submodule is not registered in repo manifest, repo will derive its
properties from itself and its parent project, which might not always be
correct. In such cases, the subproject is called a derived subproject.
To a user, a sub-project is merely a git-submodule; so all tips of
working with a git-submodule apply here, too. For example, you should
not run `repo sync` in a parent repository if its submodule is dirty.
Change-Id: I4b8344c1b9ccad2f58ad304573133e5d52e1faef
2012-01-11 03:28:42 +00:00
|
|
|
## Submodule Management ##
|
|
|
|
|
|
|
|
def GetRegisteredSubprojects(self):
|
|
|
|
result = []
|
|
|
|
def rec(subprojects):
|
|
|
|
if not subprojects:
|
|
|
|
return
|
|
|
|
result.extend(subprojects)
|
|
|
|
for p in subprojects:
|
|
|
|
rec(p.subprojects)
|
|
|
|
rec(self.subprojects)
|
|
|
|
return result
|
|
|
|
|
|
|
|
def _GetSubmodules(self):
|
|
|
|
# Unfortunately we cannot call `git submodule status --recursive` here
|
|
|
|
# because the working tree might not exist yet, and it cannot be used
|
|
|
|
# without a working tree in its current implementation.
|
|
|
|
|
|
|
|
def get_submodules(gitdir, rev):
|
|
|
|
# Parse .gitmodules for submodule sub_paths and sub_urls
|
|
|
|
sub_paths, sub_urls = parse_gitmodules(gitdir, rev)
|
|
|
|
if not sub_paths:
|
|
|
|
return []
|
|
|
|
# Run `git ls-tree` to read SHAs of submodule object, which happen to be
|
|
|
|
# revision of submodule repository
|
|
|
|
sub_revs = git_ls_tree(gitdir, rev, sub_paths)
|
|
|
|
submodules = []
|
|
|
|
for sub_path, sub_url in zip(sub_paths, sub_urls):
|
|
|
|
try:
|
|
|
|
sub_rev = sub_revs[sub_path]
|
|
|
|
except KeyError:
|
|
|
|
# Ignore non-exist submodules
|
|
|
|
continue
|
|
|
|
submodules.append((sub_rev, sub_path, sub_url))
|
|
|
|
return submodules
|
|
|
|
|
|
|
|
re_path = re.compile(r'^submodule\.([^.]+)\.path=(.*)$')
|
|
|
|
re_url = re.compile(r'^submodule\.([^.]+)\.url=(.*)$')
|
|
|
|
def parse_gitmodules(gitdir, rev):
|
|
|
|
cmd = ['cat-file', 'blob', '%s:.gitmodules' % rev]
|
|
|
|
try:
|
|
|
|
p = GitCommand(None, cmd, capture_stdout = True, capture_stderr = True,
|
|
|
|
bare = True, gitdir = gitdir)
|
|
|
|
except GitError:
|
|
|
|
return [], []
|
|
|
|
if p.Wait() != 0:
|
|
|
|
return [], []
|
|
|
|
|
|
|
|
gitmodules_lines = []
|
|
|
|
fd, temp_gitmodules_path = tempfile.mkstemp()
|
|
|
|
try:
|
|
|
|
os.write(fd, p.stdout)
|
|
|
|
os.close(fd)
|
|
|
|
cmd = ['config', '--file', temp_gitmodules_path, '--list']
|
|
|
|
p = GitCommand(None, cmd, capture_stdout = True, capture_stderr = True,
|
|
|
|
bare = True, gitdir = gitdir)
|
|
|
|
if p.Wait() != 0:
|
|
|
|
return [], []
|
|
|
|
gitmodules_lines = p.stdout.split('\n')
|
|
|
|
except GitError:
|
|
|
|
return [], []
|
|
|
|
finally:
|
|
|
|
os.remove(temp_gitmodules_path)
|
|
|
|
|
|
|
|
names = set()
|
|
|
|
paths = {}
|
|
|
|
urls = {}
|
|
|
|
for line in gitmodules_lines:
|
|
|
|
if not line:
|
|
|
|
continue
|
|
|
|
m = re_path.match(line)
|
|
|
|
if m:
|
|
|
|
names.add(m.group(1))
|
|
|
|
paths[m.group(1)] = m.group(2)
|
|
|
|
continue
|
|
|
|
m = re_url.match(line)
|
|
|
|
if m:
|
|
|
|
names.add(m.group(1))
|
|
|
|
urls[m.group(1)] = m.group(2)
|
|
|
|
continue
|
|
|
|
names = sorted(names)
|
|
|
|
return ([paths.get(name, '') for name in names],
|
|
|
|
[urls.get(name, '') for name in names])
|
|
|
|
|
|
|
|
def git_ls_tree(gitdir, rev, paths):
|
|
|
|
cmd = ['ls-tree', rev, '--']
|
|
|
|
cmd.extend(paths)
|
|
|
|
try:
|
|
|
|
p = GitCommand(None, cmd, capture_stdout = True, capture_stderr = True,
|
|
|
|
bare = True, gitdir = gitdir)
|
|
|
|
except GitError:
|
|
|
|
return []
|
|
|
|
if p.Wait() != 0:
|
|
|
|
return []
|
|
|
|
objects = {}
|
|
|
|
for line in p.stdout.split('\n'):
|
|
|
|
if not line.strip():
|
|
|
|
continue
|
|
|
|
object_rev, object_path = line.split()[2:4]
|
|
|
|
objects[object_path] = object_rev
|
|
|
|
return objects
|
|
|
|
|
|
|
|
try:
|
|
|
|
rev = self.GetRevisionId()
|
|
|
|
except GitError:
|
|
|
|
return []
|
|
|
|
return get_submodules(self.gitdir, rev)
|
|
|
|
|
|
|
|
def GetDerivedSubprojects(self):
|
|
|
|
result = []
|
|
|
|
if not self.Exists:
|
|
|
|
# If git repo does not exist yet, querying its submodules will
|
|
|
|
# mess up its states; so return here.
|
|
|
|
return result
|
|
|
|
for rev, path, url in self._GetSubmodules():
|
|
|
|
name = self.manifest.GetSubprojectName(self, path)
|
2013-10-12 00:03:19 +00:00
|
|
|
relpath, worktree, gitdir, objdir = \
|
|
|
|
self.manifest.GetSubprojectPaths(self, name, path)
|
|
|
|
project = self.manifest.paths.get(relpath)
|
Represent git-submodule as nested projects, take 2
(Previous submission of this change broke Android buildbot due to
incorrect regular expression for parsing git-config output. During
investigation, we also found that Android, which pulls Chromium, has a
workaround for Chromium's submodules; its manifest includes Chromium's
submodules. This new change, in addition to fixing the regex, also
take this type of workarounds into consideration; it adds a new
attribute that makes repo not fetch submodules unless submodules have a
project element defined in the manifest, or this attribute is
overridden by a parent project element or by the default element.)
We need a representation of git-submodule in repo; otherwise repo will
not sync submodules, and leave workspace in a broken state. Of course
this will not be a problem if all projects are owned by the owner of the
manifest file, who may simply choose not to use git-submodule in all
projects. However, this is not possible in practice because manifest
file owner is unlikely to own all upstream projects.
As git submodules are simply git repositories, it is natural to treat
them as plain repo projects that live inside a repo project. That is,
we could use recursively declared projects to denote the is-submodule
relation of git repositories.
The behavior of repo remains the same to projects that do not have a
sub-project within. As for parent projects, repo fetches them and their
sub-projects as normal projects, and then checks out subprojects at the
commit specified in parent's commit object. The sub-project is fetched
at a path relative to parent project's working directory; so the path
specified in manifest file should match that of .gitmodules file.
If a submodule is not registered in repo manifest, repo will derive its
properties from itself and its parent project, which might not always be
correct. In such cases, the subproject is called a derived subproject.
To a user, a sub-project is merely a git-submodule; so all tips of
working with a git-submodule apply here, too. For example, you should
not run `repo sync` in a parent repository if its submodule is dirty.
Change-Id: I4b8344c1b9ccad2f58ad304573133e5d52e1faef
2012-01-11 03:28:42 +00:00
|
|
|
if project:
|
|
|
|
result.extend(project.GetDerivedSubprojects())
|
|
|
|
continue
|
2013-10-12 00:03:19 +00:00
|
|
|
|
Represent git-submodule as nested projects, take 2
(Previous submission of this change broke Android buildbot due to
incorrect regular expression for parsing git-config output. During
investigation, we also found that Android, which pulls Chromium, has a
workaround for Chromium's submodules; its manifest includes Chromium's
submodules. This new change, in addition to fixing the regex, also
take this type of workarounds into consideration; it adds a new
attribute that makes repo not fetch submodules unless submodules have a
project element defined in the manifest, or this attribute is
overridden by a parent project element or by the default element.)
We need a representation of git-submodule in repo; otherwise repo will
not sync submodules, and leave workspace in a broken state. Of course
this will not be a problem if all projects are owned by the owner of the
manifest file, who may simply choose not to use git-submodule in all
projects. However, this is not possible in practice because manifest
file owner is unlikely to own all upstream projects.
As git submodules are simply git repositories, it is natural to treat
them as plain repo projects that live inside a repo project. That is,
we could use recursively declared projects to denote the is-submodule
relation of git repositories.
The behavior of repo remains the same to projects that do not have a
sub-project within. As for parent projects, repo fetches them and their
sub-projects as normal projects, and then checks out subprojects at the
commit specified in parent's commit object. The sub-project is fetched
at a path relative to parent project's working directory; so the path
specified in manifest file should match that of .gitmodules file.
If a submodule is not registered in repo manifest, repo will derive its
properties from itself and its parent project, which might not always be
correct. In such cases, the subproject is called a derived subproject.
To a user, a sub-project is merely a git-submodule; so all tips of
working with a git-submodule apply here, too. For example, you should
not run `repo sync` in a parent repository if its submodule is dirty.
Change-Id: I4b8344c1b9ccad2f58ad304573133e5d52e1faef
2012-01-11 03:28:42 +00:00
|
|
|
remote = RemoteSpec(self.remote.name,
|
|
|
|
url = url,
|
|
|
|
review = self.remote.review)
|
|
|
|
subproject = Project(manifest = self.manifest,
|
|
|
|
name = name,
|
|
|
|
remote = remote,
|
|
|
|
gitdir = gitdir,
|
2013-10-12 00:03:19 +00:00
|
|
|
objdir = objdir,
|
Represent git-submodule as nested projects, take 2
(Previous submission of this change broke Android buildbot due to
incorrect regular expression for parsing git-config output. During
investigation, we also found that Android, which pulls Chromium, has a
workaround for Chromium's submodules; its manifest includes Chromium's
submodules. This new change, in addition to fixing the regex, also
take this type of workarounds into consideration; it adds a new
attribute that makes repo not fetch submodules unless submodules have a
project element defined in the manifest, or this attribute is
overridden by a parent project element or by the default element.)
We need a representation of git-submodule in repo; otherwise repo will
not sync submodules, and leave workspace in a broken state. Of course
this will not be a problem if all projects are owned by the owner of the
manifest file, who may simply choose not to use git-submodule in all
projects. However, this is not possible in practice because manifest
file owner is unlikely to own all upstream projects.
As git submodules are simply git repositories, it is natural to treat
them as plain repo projects that live inside a repo project. That is,
we could use recursively declared projects to denote the is-submodule
relation of git repositories.
The behavior of repo remains the same to projects that do not have a
sub-project within. As for parent projects, repo fetches them and their
sub-projects as normal projects, and then checks out subprojects at the
commit specified in parent's commit object. The sub-project is fetched
at a path relative to parent project's working directory; so the path
specified in manifest file should match that of .gitmodules file.
If a submodule is not registered in repo manifest, repo will derive its
properties from itself and its parent project, which might not always be
correct. In such cases, the subproject is called a derived subproject.
To a user, a sub-project is merely a git-submodule; so all tips of
working with a git-submodule apply here, too. For example, you should
not run `repo sync` in a parent repository if its submodule is dirty.
Change-Id: I4b8344c1b9ccad2f58ad304573133e5d52e1faef
2012-01-11 03:28:42 +00:00
|
|
|
worktree = worktree,
|
|
|
|
relpath = relpath,
|
|
|
|
revisionExpr = self.revisionExpr,
|
|
|
|
revisionId = rev,
|
|
|
|
rebase = self.rebase,
|
|
|
|
groups = self.groups,
|
|
|
|
sync_c = self.sync_c,
|
|
|
|
sync_s = self.sync_s,
|
|
|
|
parent = self,
|
|
|
|
is_derived = True)
|
|
|
|
result.append(subproject)
|
|
|
|
result.extend(subproject.GetDerivedSubprojects())
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
## Direct Git Commands ##
|
2014-01-17 02:32:33 +00:00
|
|
|
def _CheckForSha1(self):
|
|
|
|
try:
|
|
|
|
# if revision (sha or tag) is not present then following function
|
|
|
|
# throws an error.
|
|
|
|
self.bare_git.rev_parse('--verify', '%s^0' % self.revisionExpr)
|
|
|
|
return True
|
|
|
|
except GitError:
|
|
|
|
# There is no such persistent revision. We have to fetch it.
|
|
|
|
return False
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2013-10-16 09:02:35 +00:00
|
|
|
def _FetchArchive(self, tarpath, cwd=None):
|
|
|
|
cmd = ['archive', '-v', '-o', tarpath]
|
|
|
|
cmd.append('--remote=%s' % self.remote.url)
|
|
|
|
cmd.append('--prefix=%s/' % self.relpath)
|
|
|
|
cmd.append(self.revisionExpr)
|
|
|
|
|
|
|
|
command = GitCommand(self, cmd, cwd=cwd,
|
|
|
|
capture_stdout=True,
|
|
|
|
capture_stderr=True)
|
|
|
|
|
|
|
|
if command.Wait() != 0:
|
|
|
|
raise GitError('git archive %s: %s' % (self.name, command.stderr))
|
|
|
|
|
2011-08-26 00:21:47 +00:00
|
|
|
def _RemoteFetch(self, name=None,
|
|
|
|
current_branch_only=False,
|
2010-10-29 19:05:43 +00:00
|
|
|
initial=False,
|
2011-10-03 15:30:24 +00:00
|
|
|
quiet=False,
|
2012-10-29 17:18:34 +00:00
|
|
|
alt_dir=None,
|
|
|
|
no_tags=False):
|
2011-08-26 00:21:47 +00:00
|
|
|
|
|
|
|
is_sha1 = False
|
|
|
|
tag_name = None
|
2014-04-15 01:28:56 +00:00
|
|
|
depth = None
|
|
|
|
|
|
|
|
# The depth should not be used when fetching to a mirror because
|
|
|
|
# it will result in a shallow repository that cannot be cloned or
|
|
|
|
# fetched from.
|
|
|
|
if not self.manifest.IsMirror:
|
|
|
|
if self.clone_depth:
|
|
|
|
depth = self.clone_depth
|
|
|
|
else:
|
|
|
|
depth = self.manifest.manifestProject.config.GetString('repo.depth')
|
2011-08-26 00:21:47 +00:00
|
|
|
|
2014-01-29 20:48:54 +00:00
|
|
|
if depth:
|
|
|
|
current_branch_only = True
|
|
|
|
|
2011-08-26 00:21:47 +00:00
|
|
|
if current_branch_only:
|
|
|
|
if ID_RE.match(self.revisionExpr) is not None:
|
|
|
|
is_sha1 = True
|
|
|
|
elif self.revisionExpr.startswith(R_TAGS):
|
|
|
|
# this is a tag and its sha1 value should never change
|
|
|
|
tag_name = self.revisionExpr[len(R_TAGS):]
|
|
|
|
|
|
|
|
if is_sha1 or tag_name is not None:
|
2014-01-17 02:32:33 +00:00
|
|
|
if self._CheckForSha1():
|
2011-08-26 00:21:47 +00:00
|
|
|
return True
|
2012-09-29 03:21:57 +00:00
|
|
|
if is_sha1 and (not self.upstream or ID_RE.match(self.upstream)):
|
|
|
|
current_branch_only = False
|
2011-08-26 00:21:47 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
if not name:
|
|
|
|
name = self.remote.name
|
2009-04-11 01:53:46 +00:00
|
|
|
|
|
|
|
ssh_proxy = False
|
2011-09-19 21:50:58 +00:00
|
|
|
remote = self.GetRemote(name)
|
|
|
|
if remote.PreConnectFetch():
|
2009-04-11 01:53:46 +00:00
|
|
|
ssh_proxy = True
|
|
|
|
|
2010-10-08 08:02:09 +00:00
|
|
|
if initial:
|
2011-10-03 15:30:24 +00:00
|
|
|
if alt_dir and 'objects' == os.path.basename(alt_dir):
|
|
|
|
ref_dir = os.path.dirname(alt_dir)
|
2010-10-08 08:02:09 +00:00
|
|
|
packed_refs = os.path.join(self.gitdir, 'packed-refs')
|
|
|
|
remote = self.GetRemote(name)
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
all_refs = self.bare_ref.all
|
|
|
|
ids = set(all_refs.values())
|
2010-10-08 08:02:09 +00:00
|
|
|
tmp = set()
|
|
|
|
|
2013-03-01 13:44:38 +00:00
|
|
|
for r, ref_id in GitRefs(ref_dir).all.items():
|
2012-09-24 03:15:13 +00:00
|
|
|
if r not in all_refs:
|
2010-10-08 08:02:09 +00:00
|
|
|
if r.startswith(R_TAGS) or remote.WritesTo(r):
|
2012-09-24 03:15:13 +00:00
|
|
|
all_refs[r] = ref_id
|
|
|
|
ids.add(ref_id)
|
2010-10-08 08:02:09 +00:00
|
|
|
continue
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
if ref_id in ids:
|
2010-10-08 08:02:09 +00:00
|
|
|
continue
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
r = 'refs/_alt/%s' % ref_id
|
|
|
|
all_refs[r] = ref_id
|
|
|
|
ids.add(ref_id)
|
2010-10-08 08:02:09 +00:00
|
|
|
tmp.add(r)
|
|
|
|
|
|
|
|
tmp_packed = ''
|
|
|
|
old_packed = ''
|
|
|
|
|
2013-03-01 13:44:38 +00:00
|
|
|
for r in sorted(all_refs):
|
2012-09-24 03:15:13 +00:00
|
|
|
line = '%s %s\n' % (all_refs[r], r)
|
2010-10-08 08:02:09 +00:00
|
|
|
tmp_packed += line
|
|
|
|
if r not in tmp:
|
|
|
|
old_packed += line
|
|
|
|
|
|
|
|
_lwrite(packed_refs, tmp_packed)
|
|
|
|
else:
|
2011-10-03 15:30:24 +00:00
|
|
|
alt_dir = None
|
2010-10-08 08:02:09 +00:00
|
|
|
|
2008-11-04 15:37:10 +00:00
|
|
|
cmd = ['fetch']
|
2011-05-04 22:01:04 +00:00
|
|
|
|
|
|
|
# The --depth option only affects the initial fetch; after that we'll do
|
|
|
|
# full fetches of changes.
|
|
|
|
if depth and initial:
|
|
|
|
cmd.append('--depth=%s' % depth)
|
|
|
|
|
2010-10-29 19:05:43 +00:00
|
|
|
if quiet:
|
|
|
|
cmd.append('--quiet')
|
2008-11-04 15:37:10 +00:00
|
|
|
if not self.worktree:
|
|
|
|
cmd.append('--update-head-ok')
|
2011-10-03 15:30:24 +00:00
|
|
|
cmd.append(name)
|
2011-08-26 00:21:47 +00:00
|
|
|
|
2014-03-10 21:21:59 +00:00
|
|
|
# If using depth then we should not get all the tags since they may
|
|
|
|
# be outside of the depth.
|
|
|
|
if no_tags or depth:
|
|
|
|
cmd.append('--no-tags')
|
|
|
|
else:
|
|
|
|
cmd.append('--tags')
|
|
|
|
|
2012-09-29 03:21:57 +00:00
|
|
|
if not current_branch_only:
|
2011-08-26 00:21:47 +00:00
|
|
|
# Fetch whole repo
|
2013-03-01 13:44:38 +00:00
|
|
|
cmd.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
|
2011-08-26 00:21:47 +00:00
|
|
|
elif tag_name is not None:
|
2011-10-03 15:30:24 +00:00
|
|
|
cmd.append('tag')
|
2011-08-26 00:21:47 +00:00
|
|
|
cmd.append(tag_name)
|
|
|
|
else:
|
|
|
|
branch = self.revisionExpr
|
2012-09-29 03:21:57 +00:00
|
|
|
if is_sha1:
|
|
|
|
branch = self.upstream
|
2011-08-26 00:21:47 +00:00
|
|
|
if branch.startswith(R_HEADS):
|
|
|
|
branch = branch[len(R_HEADS):]
|
2013-03-01 13:44:38 +00:00
|
|
|
cmd.append(str((u'+refs/heads/%s:' % branch) + remote.ToLocal('refs/heads/%s' % branch)))
|
2011-10-03 15:30:24 +00:00
|
|
|
|
|
|
|
ok = False
|
2012-09-24 03:15:13 +00:00
|
|
|
for _i in range(2):
|
2012-09-29 03:21:57 +00:00
|
|
|
ret = GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy).Wait()
|
|
|
|
if ret == 0:
|
2011-10-03 15:30:24 +00:00
|
|
|
ok = True
|
|
|
|
break
|
2012-09-29 03:21:57 +00:00
|
|
|
elif current_branch_only and is_sha1 and ret == 128:
|
|
|
|
# Exit code 128 means "couldn't find the ref you asked for"; if we're in sha1
|
|
|
|
# mode, we just tried sync'ing from the upstream field; it doesn't exist, thus
|
|
|
|
# abort the optimization attempt and do a full sync.
|
|
|
|
break
|
2011-10-03 15:30:24 +00:00
|
|
|
time.sleep(random.randint(30, 45))
|
2010-10-08 08:02:09 +00:00
|
|
|
|
|
|
|
if initial:
|
2014-02-12 02:44:58 +00:00
|
|
|
# Ensure that some refs exist. Otherwise, we probably aren't looking
|
|
|
|
# at a real git repository and may have a bad url.
|
|
|
|
if not self.bare_ref.all:
|
2014-03-11 05:55:52 +00:00
|
|
|
ok = False
|
2014-02-12 02:44:58 +00:00
|
|
|
|
2011-10-03 15:30:24 +00:00
|
|
|
if alt_dir:
|
2010-10-08 08:02:09 +00:00
|
|
|
if old_packed != '':
|
|
|
|
_lwrite(packed_refs, old_packed)
|
|
|
|
else:
|
|
|
|
os.remove(packed_refs)
|
|
|
|
self.bare_git.pack_refs('--all', '--prune')
|
2012-09-29 03:21:57 +00:00
|
|
|
|
|
|
|
if is_sha1 and current_branch_only and self.upstream:
|
|
|
|
# We just synced the upstream given branch; verify we
|
|
|
|
# got what we wanted, else trigger a second run of all
|
|
|
|
# refs.
|
2014-01-17 02:32:33 +00:00
|
|
|
if not self._CheckForSha1():
|
2012-09-29 03:21:57 +00:00
|
|
|
return self._RemoteFetch(name=name, current_branch_only=False,
|
|
|
|
initial=False, quiet=quiet, alt_dir=alt_dir)
|
|
|
|
|
2011-10-03 15:30:24 +00:00
|
|
|
return ok
|
|
|
|
|
|
|
|
def _ApplyCloneBundle(self, initial=False, quiet=False):
|
2012-11-27 13:25:30 +00:00
|
|
|
if initial and (self.manifest.manifestProject.config.GetString('repo.depth') or self.clone_depth):
|
2011-10-03 15:30:24 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
remote = self.GetRemote(self.remote.name)
|
|
|
|
bundle_url = remote.url + '/clone.bundle'
|
|
|
|
bundle_url = GitConfig.ForUser().UrlInsteadOf(bundle_url)
|
2013-06-03 22:05:07 +00:00
|
|
|
if GetSchemeFromUrl(bundle_url) not in (
|
|
|
|
'http', 'https', 'persistent-http', 'persistent-https'):
|
2011-10-03 15:30:24 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
bundle_dst = os.path.join(self.gitdir, 'clone.bundle')
|
|
|
|
bundle_tmp = os.path.join(self.gitdir, 'clone.bundle.tmp')
|
2010-10-08 08:02:09 +00:00
|
|
|
|
2011-10-03 15:30:24 +00:00
|
|
|
exist_dst = os.path.exists(bundle_dst)
|
|
|
|
exist_tmp = os.path.exists(bundle_tmp)
|
|
|
|
|
|
|
|
if not initial and not exist_dst and not exist_tmp:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if not exist_dst:
|
|
|
|
exist_dst = self._FetchBundle(bundle_url, bundle_tmp, bundle_dst, quiet)
|
|
|
|
if not exist_dst:
|
|
|
|
return False
|
|
|
|
|
|
|
|
cmd = ['fetch']
|
|
|
|
if quiet:
|
|
|
|
cmd.append('--quiet')
|
|
|
|
if not self.worktree:
|
|
|
|
cmd.append('--update-head-ok')
|
|
|
|
cmd.append(bundle_dst)
|
|
|
|
for f in remote.fetch:
|
|
|
|
cmd.append(str(f))
|
|
|
|
cmd.append('refs/tags/*:refs/tags/*')
|
|
|
|
|
|
|
|
ok = GitCommand(self, cmd, bare=True).Wait() == 0
|
2011-09-19 21:50:58 +00:00
|
|
|
if os.path.exists(bundle_dst):
|
|
|
|
os.remove(bundle_dst)
|
|
|
|
if os.path.exists(bundle_tmp):
|
|
|
|
os.remove(bundle_tmp)
|
2010-10-08 08:02:09 +00:00
|
|
|
return ok
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2011-10-03 15:30:24 +00:00
|
|
|
def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet):
|
2012-08-02 21:57:37 +00:00
|
|
|
if os.path.exists(dstPath):
|
|
|
|
os.remove(dstPath)
|
2011-10-11 19:00:38 +00:00
|
|
|
|
2012-08-30 16:39:36 +00:00
|
|
|
cmd = ['curl', '--fail', '--output', tmpPath, '--netrc', '--location']
|
2012-08-02 21:57:37 +00:00
|
|
|
if quiet:
|
|
|
|
cmd += ['--silent']
|
|
|
|
if os.path.exists(tmpPath):
|
|
|
|
size = os.stat(tmpPath).st_size
|
|
|
|
if size >= 1024:
|
|
|
|
cmd += ['--continue-at', '%d' % (size,)]
|
|
|
|
else:
|
|
|
|
os.remove(tmpPath)
|
|
|
|
if 'http_proxy' in os.environ and 'darwin' == sys.platform:
|
|
|
|
cmd += ['--proxy', os.environ['http_proxy']]
|
2013-06-03 22:05:07 +00:00
|
|
|
cookiefile = self._GetBundleCookieFile(srcUrl)
|
2013-01-11 16:22:54 +00:00
|
|
|
if cookiefile:
|
|
|
|
cmd += ['--cookie', cookiefile]
|
2013-06-03 22:05:07 +00:00
|
|
|
if srcUrl.startswith('persistent-'):
|
|
|
|
srcUrl = srcUrl[len('persistent-'):]
|
2012-08-02 21:57:37 +00:00
|
|
|
cmd += [srcUrl]
|
2011-09-19 21:50:58 +00:00
|
|
|
|
2012-08-02 21:57:37 +00:00
|
|
|
if IsTrace():
|
|
|
|
Trace('%s', ' '.join(cmd))
|
|
|
|
try:
|
|
|
|
proc = subprocess.Popen(cmd)
|
|
|
|
except OSError:
|
|
|
|
return False
|
2011-09-19 21:50:58 +00:00
|
|
|
|
2012-08-30 16:39:36 +00:00
|
|
|
curlret = proc.wait()
|
|
|
|
|
|
|
|
if curlret == 22:
|
|
|
|
# From curl man page:
|
|
|
|
# 22: HTTP page not retrieved. The requested url was not found or
|
|
|
|
# returned another error with the HTTP error code being 400 or above.
|
|
|
|
# This return code only appears if -f, --fail is used.
|
|
|
|
if not quiet:
|
2012-11-02 05:59:27 +00:00
|
|
|
print("Server does not provide clone.bundle; ignoring.",
|
|
|
|
file=sys.stderr)
|
2012-08-30 16:39:36 +00:00
|
|
|
return False
|
|
|
|
|
2012-08-02 21:57:37 +00:00
|
|
|
if os.path.exists(tmpPath):
|
2013-06-03 19:15:23 +00:00
|
|
|
if curlret == 0 and self._IsValidBundle(tmpPath):
|
2011-09-19 21:50:58 +00:00
|
|
|
os.rename(tmpPath, dstPath)
|
2012-08-02 21:57:37 +00:00
|
|
|
return True
|
|
|
|
else:
|
2011-09-19 21:50:58 +00:00
|
|
|
os.remove(tmpPath)
|
2012-08-02 21:57:37 +00:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return False
|
2011-09-19 21:50:58 +00:00
|
|
|
|
2013-06-03 19:15:23 +00:00
|
|
|
def _IsValidBundle(self, path):
|
|
|
|
try:
|
|
|
|
with open(path) as f:
|
|
|
|
if f.read(16) == '# v2 git bundle\n':
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
print("Invalid clone.bundle file; ignoring.", file=sys.stderr)
|
|
|
|
return False
|
|
|
|
except OSError:
|
|
|
|
return False
|
|
|
|
|
2013-06-03 22:05:07 +00:00
|
|
|
def _GetBundleCookieFile(self, url):
|
|
|
|
if url.startswith('persistent-'):
|
|
|
|
try:
|
|
|
|
p = subprocess.Popen(
|
|
|
|
['git-remote-persistent-https', '-print_config', url],
|
|
|
|
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE)
|
2013-09-26 00:46:01 +00:00
|
|
|
p.stdin.close() # Tell subprocess it's ok to close.
|
2013-06-03 22:05:07 +00:00
|
|
|
prefix = 'http.cookiefile='
|
2013-09-26 00:46:01 +00:00
|
|
|
cookiefile = None
|
2013-06-03 22:05:07 +00:00
|
|
|
for line in p.stdout:
|
|
|
|
line = line.strip()
|
|
|
|
if line.startswith(prefix):
|
2013-09-26 00:46:01 +00:00
|
|
|
cookiefile = line[len(prefix):]
|
|
|
|
break
|
2013-06-03 22:05:07 +00:00
|
|
|
if p.wait():
|
2013-11-21 18:38:03 +00:00
|
|
|
err_msg = p.stderr.read()
|
|
|
|
if ' -print_config' in err_msg:
|
2013-06-03 22:05:07 +00:00
|
|
|
pass # Persistent proxy doesn't support -print_config.
|
|
|
|
else:
|
2013-11-21 18:38:03 +00:00
|
|
|
print(err_msg, file=sys.stderr)
|
2013-09-26 00:46:01 +00:00
|
|
|
if cookiefile:
|
|
|
|
return cookiefile
|
2013-06-03 22:05:07 +00:00
|
|
|
except OSError as e:
|
|
|
|
if e.errno == errno.ENOENT:
|
|
|
|
pass # No persistent proxy.
|
|
|
|
raise
|
|
|
|
return GitConfig.ForUser().GetString('http.cookiefile')
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def _Checkout(self, rev, quiet=False):
|
|
|
|
cmd = ['checkout']
|
|
|
|
if quiet:
|
|
|
|
cmd.append('-q')
|
|
|
|
cmd.append(rev)
|
|
|
|
cmd.append('--')
|
|
|
|
if GitCommand(self, cmd).Wait() != 0:
|
|
|
|
if self._allrefs:
|
|
|
|
raise GitError('%s checkout %s ' % (self.name, rev))
|
|
|
|
|
2011-03-24 15:28:18 +00:00
|
|
|
def _CherryPick(self, rev, quiet=False):
|
|
|
|
cmd = ['cherry-pick']
|
|
|
|
cmd.append(rev)
|
|
|
|
cmd.append('--')
|
|
|
|
if GitCommand(self, cmd).Wait() != 0:
|
|
|
|
if self._allrefs:
|
|
|
|
raise GitError('%s cherry-pick %s ' % (self.name, rev))
|
|
|
|
|
2011-08-19 11:56:09 +00:00
|
|
|
def _Revert(self, rev, quiet=False):
|
|
|
|
cmd = ['revert']
|
|
|
|
cmd.append('--no-edit')
|
|
|
|
cmd.append(rev)
|
|
|
|
cmd.append('--')
|
|
|
|
if GitCommand(self, cmd).Wait() != 0:
|
|
|
|
if self._allrefs:
|
|
|
|
raise GitError('%s revert %s ' % (self.name, rev))
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def _ResetHard(self, rev, quiet=True):
|
|
|
|
cmd = ['reset', '--hard']
|
|
|
|
if quiet:
|
|
|
|
cmd.append('-q')
|
|
|
|
cmd.append(rev)
|
|
|
|
if GitCommand(self, cmd).Wait() != 0:
|
|
|
|
raise GitError('%s reset --hard %s ' % (self.name, rev))
|
|
|
|
|
|
|
|
def _Rebase(self, upstream, onto = None):
|
2009-04-16 15:14:26 +00:00
|
|
|
cmd = ['rebase']
|
2008-10-21 14:00:00 +00:00
|
|
|
if onto is not None:
|
|
|
|
cmd.extend(['--onto', onto])
|
|
|
|
cmd.append(upstream)
|
2009-04-16 15:14:26 +00:00
|
|
|
if GitCommand(self, cmd).Wait() != 0:
|
2008-10-21 14:00:00 +00:00
|
|
|
raise GitError('%s rebase %s ' % (self.name, upstream))
|
|
|
|
|
2012-05-04 10:18:12 +00:00
|
|
|
def _FastForward(self, head, ffonly=False):
|
2008-10-21 14:00:00 +00:00
|
|
|
cmd = ['merge', head]
|
2012-05-04 10:18:12 +00:00
|
|
|
if ffonly:
|
|
|
|
cmd.append("--ff-only")
|
2008-10-21 14:00:00 +00:00
|
|
|
if GitCommand(self, cmd).Wait() != 0:
|
|
|
|
raise GitError('%s merge %s ' % (self.name, head))
|
|
|
|
|
2012-10-05 10:37:58 +00:00
|
|
|
def _InitGitDir(self, mirror_git=None):
|
2008-10-21 14:00:00 +00:00
|
|
|
if not os.path.exists(self.gitdir):
|
2013-10-12 00:03:19 +00:00
|
|
|
|
|
|
|
# Initialize the bare repository, which contains all of the objects.
|
|
|
|
if not os.path.exists(self.objdir):
|
|
|
|
os.makedirs(self.objdir)
|
|
|
|
self.bare_objdir.init()
|
|
|
|
|
|
|
|
# If we have a separate directory to hold refs, initialize it as well.
|
|
|
|
if self.objdir != self.gitdir:
|
|
|
|
os.makedirs(self.gitdir)
|
|
|
|
self._ReferenceGitDir(self.objdir, self.gitdir, share_refs=False,
|
|
|
|
copy_all=True)
|
2009-03-04 01:53:18 +00:00
|
|
|
|
2010-10-08 08:02:09 +00:00
|
|
|
mp = self.manifest.manifestProject
|
2012-10-05 10:37:58 +00:00
|
|
|
ref_dir = mp.config.GetString('repo.reference') or ''
|
2010-10-08 08:02:09 +00:00
|
|
|
|
2012-10-05 10:37:58 +00:00
|
|
|
if ref_dir or mirror_git:
|
|
|
|
if not mirror_git:
|
|
|
|
mirror_git = os.path.join(ref_dir, self.name + '.git')
|
2010-10-08 08:02:09 +00:00
|
|
|
repo_git = os.path.join(ref_dir, '.repo', 'projects',
|
|
|
|
self.relpath + '.git')
|
|
|
|
|
|
|
|
if os.path.exists(mirror_git):
|
|
|
|
ref_dir = mirror_git
|
|
|
|
|
|
|
|
elif os.path.exists(repo_git):
|
|
|
|
ref_dir = repo_git
|
|
|
|
|
|
|
|
else:
|
|
|
|
ref_dir = None
|
|
|
|
|
|
|
|
if ref_dir:
|
|
|
|
_lwrite(os.path.join(self.gitdir, 'objects/info/alternates'),
|
|
|
|
os.path.join(ref_dir, 'objects') + '\n')
|
|
|
|
|
2012-10-24 11:44:42 +00:00
|
|
|
self._UpdateHooks()
|
|
|
|
|
|
|
|
m = self.manifest.manifestProject.config
|
|
|
|
for key in ['user.name', 'user.email']:
|
|
|
|
if m.Has(key, include_defaults = False):
|
|
|
|
self.config.SetString(key, m.GetString(key))
|
2009-03-04 01:53:18 +00:00
|
|
|
if self.manifest.IsMirror:
|
|
|
|
self.config.SetString('core.bare', 'true')
|
|
|
|
else:
|
|
|
|
self.config.SetString('core.bare', None)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2012-10-24 11:44:42 +00:00
|
|
|
def _UpdateHooks(self):
|
|
|
|
if os.path.exists(self.gitdir):
|
|
|
|
# Always recreate hooks since they can have been changed
|
|
|
|
# since the latest update.
|
2008-10-21 14:00:00 +00:00
|
|
|
hooks = self._gitdir_path('hooks')
|
2008-10-29 21:38:12 +00:00
|
|
|
try:
|
|
|
|
to_rm = os.listdir(hooks)
|
|
|
|
except OSError:
|
|
|
|
to_rm = []
|
|
|
|
for old_hook in to_rm:
|
2008-10-21 14:00:00 +00:00
|
|
|
os.remove(os.path.join(hooks, old_hook))
|
2008-11-03 18:32:09 +00:00
|
|
|
self._InitHooks()
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2008-11-03 18:32:09 +00:00
|
|
|
def _InitHooks(self):
|
2013-11-27 19:17:13 +00:00
|
|
|
hooks = os.path.realpath(self._gitdir_path('hooks'))
|
2008-11-03 18:32:09 +00:00
|
|
|
if not os.path.exists(hooks):
|
|
|
|
os.makedirs(hooks)
|
2011-01-10 22:16:30 +00:00
|
|
|
for stock_hook in _ProjectHooks():
|
2009-08-23 01:17:46 +00:00
|
|
|
name = os.path.basename(stock_hook)
|
|
|
|
|
2011-04-18 09:23:29 +00:00
|
|
|
if name in ('commit-msg',) and not self.remote.review \
|
|
|
|
and not self is self.manifest.manifestProject:
|
2009-08-23 01:17:46 +00:00
|
|
|
# Don't install a Gerrit Code Review hook if this
|
|
|
|
# project does not appear to use it for reviews.
|
|
|
|
#
|
2011-04-18 09:23:29 +00:00
|
|
|
# Since the manifest project is one of those, but also
|
|
|
|
# managed through gerrit, it's excluded
|
2009-08-23 01:17:46 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
dst = os.path.join(hooks, name)
|
|
|
|
if os.path.islink(dst):
|
|
|
|
continue
|
|
|
|
if os.path.exists(dst):
|
|
|
|
if filecmp.cmp(stock_hook, dst, shallow=False):
|
|
|
|
os.remove(dst)
|
|
|
|
else:
|
|
|
|
_error("%s: Not replacing %s hook", self.relpath, name)
|
|
|
|
continue
|
2008-11-03 18:32:09 +00:00
|
|
|
try:
|
2012-08-05 11:39:26 +00:00
|
|
|
os.symlink(os.path.relpath(stock_hook, os.path.dirname(dst)), dst)
|
2012-09-09 22:37:57 +00:00
|
|
|
except OSError as e:
|
2009-08-23 01:17:46 +00:00
|
|
|
if e.errno == errno.EPERM:
|
2008-11-03 18:32:09 +00:00
|
|
|
raise GitError('filesystem must support symlinks')
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def _InitRemote(self):
|
2009-05-19 21:58:02 +00:00
|
|
|
if self.remote.url:
|
2008-10-21 14:00:00 +00:00
|
|
|
remote = self.GetRemote(self.remote.name)
|
2009-05-19 21:58:02 +00:00
|
|
|
remote.url = self.remote.url
|
|
|
|
remote.review = self.remote.review
|
|
|
|
remote.projectname = self.name
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2008-11-04 15:37:10 +00:00
|
|
|
if self.worktree:
|
|
|
|
remote.ResetFetch(mirror=False)
|
|
|
|
else:
|
|
|
|
remote.ResetFetch(mirror=True)
|
2008-10-21 14:00:00 +00:00
|
|
|
remote.Save()
|
|
|
|
|
|
|
|
def _InitMRef(self):
|
|
|
|
if self.manifest.branch:
|
2009-05-30 01:38:17 +00:00
|
|
|
self._InitAnyMRef(R_M + self.manifest.branch)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2008-11-04 15:37:10 +00:00
|
|
|
def _InitMirrorHead(self):
|
2009-06-01 22:28:21 +00:00
|
|
|
self._InitAnyMRef(HEAD)
|
2009-05-30 01:38:17 +00:00
|
|
|
|
|
|
|
def _InitAnyMRef(self, ref):
|
|
|
|
cur = self.bare_ref.symref(ref)
|
|
|
|
|
|
|
|
if self.revisionId:
|
|
|
|
if cur != '' or self.bare_ref.get(ref) != self.revisionId:
|
|
|
|
msg = 'manifest set to %s' % self.revisionId
|
|
|
|
dst = self.revisionId + '^0'
|
|
|
|
self.bare_git.UpdateRef(ref, dst, message = msg, detach = True)
|
|
|
|
else:
|
|
|
|
remote = self.GetRemote(self.remote.name)
|
|
|
|
dst = remote.ToLocal(self.revisionExpr)
|
|
|
|
if cur != dst:
|
|
|
|
msg = 'manifest set to %s' % self.revisionExpr
|
|
|
|
self.bare_git.symbolic_ref('-m', msg, ref, dst)
|
2008-11-04 15:37:10 +00:00
|
|
|
|
2013-10-12 00:03:19 +00:00
|
|
|
def _ReferenceGitDir(self, gitdir, dotgit, share_refs, copy_all):
|
|
|
|
"""Update |dotgit| to reference |gitdir|, using symlinks where possible.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
gitdir: The bare git repository. Must already be initialized.
|
|
|
|
dotgit: The repository you would like to initialize.
|
|
|
|
share_refs: If true, |dotgit| will store its refs under |gitdir|.
|
|
|
|
Only one work tree can store refs under a given |gitdir|.
|
|
|
|
copy_all: If true, copy all remaining files from |gitdir| -> |dotgit|.
|
|
|
|
This saves you the effort of initializing |dotgit| yourself.
|
|
|
|
"""
|
|
|
|
# These objects can be shared between several working trees.
|
|
|
|
symlink_files = ['description', 'info']
|
|
|
|
symlink_dirs = ['hooks', 'objects', 'rr-cache', 'svn']
|
|
|
|
if share_refs:
|
|
|
|
# These objects can only be used by a single working tree.
|
2014-04-30 18:31:01 +00:00
|
|
|
symlink_files += ['config', 'packed-refs', 'shallow']
|
2013-10-12 00:03:19 +00:00
|
|
|
symlink_dirs += ['logs', 'refs']
|
|
|
|
to_symlink = symlink_files + symlink_dirs
|
|
|
|
|
|
|
|
to_copy = []
|
|
|
|
if copy_all:
|
|
|
|
to_copy = os.listdir(gitdir)
|
|
|
|
|
|
|
|
for name in set(to_copy).union(to_symlink):
|
|
|
|
try:
|
|
|
|
src = os.path.realpath(os.path.join(gitdir, name))
|
|
|
|
dst = os.path.realpath(os.path.join(dotgit, name))
|
|
|
|
|
|
|
|
if os.path.lexists(dst) and not os.path.islink(dst):
|
|
|
|
raise GitError('cannot overwrite a local work tree')
|
|
|
|
|
|
|
|
# If the source dir doesn't exist, create an empty dir.
|
|
|
|
if name in symlink_dirs and not os.path.lexists(src):
|
|
|
|
os.makedirs(src)
|
|
|
|
|
|
|
|
if name in to_symlink:
|
|
|
|
os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst)
|
|
|
|
elif copy_all and not os.path.islink(dst):
|
|
|
|
if os.path.isdir(src):
|
|
|
|
shutil.copytree(src, dst)
|
|
|
|
elif os.path.isfile(src):
|
|
|
|
shutil.copy(src, dst)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno == errno.EPERM:
|
|
|
|
raise GitError('filesystem must support symlinks')
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def _InitWorkTree(self):
|
|
|
|
dotgit = os.path.join(self.worktree, '.git')
|
|
|
|
if not os.path.exists(dotgit):
|
|
|
|
os.makedirs(dotgit)
|
2013-10-12 00:03:19 +00:00
|
|
|
self._ReferenceGitDir(self.gitdir, dotgit, share_refs=True,
|
|
|
|
copy_all=False)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-05-30 01:38:17 +00:00
|
|
|
_lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId())
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
cmd = ['read-tree', '--reset', '-u']
|
|
|
|
cmd.append('-v')
|
2009-05-30 01:38:17 +00:00
|
|
|
cmd.append(HEAD)
|
2008-10-21 14:00:00 +00:00
|
|
|
if GitCommand(self, cmd).Wait() != 0:
|
|
|
|
raise GitError("cannot initialize work tree")
|
2010-11-26 12:42:13 +00:00
|
|
|
|
2014-04-21 22:10:59 +00:00
|
|
|
self._CopyAndLinkFiles()
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def _gitdir_path(self, path):
|
2013-10-12 00:03:19 +00:00
|
|
|
return os.path.realpath(os.path.join(self.gitdir, path))
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-05-30 01:28:25 +00:00
|
|
|
def _revlist(self, *args, **kw):
|
|
|
|
a = []
|
|
|
|
a.extend(args)
|
|
|
|
a.append('--')
|
|
|
|
return self.work_git.rev_list(*a, **kw)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def _allrefs(self):
|
2009-04-18 01:49:50 +00:00
|
|
|
return self.bare_ref.all
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2014-01-09 15:21:37 +00:00
|
|
|
def _getLogs(self, rev1, rev2, oneline=False, color=True):
|
|
|
|
"""Get logs between two revisions of this project."""
|
|
|
|
comp = '..'
|
|
|
|
if rev1:
|
|
|
|
revs = [rev1]
|
|
|
|
if rev2:
|
|
|
|
revs.extend([comp, rev2])
|
|
|
|
cmd = ['log', ''.join(revs)]
|
|
|
|
out = DiffColoring(self.config)
|
|
|
|
if out.is_on and color:
|
|
|
|
cmd.append('--color')
|
|
|
|
if oneline:
|
|
|
|
cmd.append('--oneline')
|
|
|
|
|
|
|
|
try:
|
|
|
|
log = GitCommand(self, cmd, capture_stdout=True, capture_stderr=True)
|
|
|
|
if log.Wait() == 0:
|
|
|
|
return log.stdout
|
|
|
|
except GitError:
|
|
|
|
# worktree may not exist if groups changed for example. In that case,
|
|
|
|
# try in gitdir instead.
|
|
|
|
if not os.path.exists(self.worktree):
|
|
|
|
return self.bare_git.log(*cmd[1:])
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
return None
|
|
|
|
|
|
|
|
def getAddedAndRemovedLogs(self, toProject, oneline=False, color=True):
|
|
|
|
"""Get the list of logs from this revision to given revisionId"""
|
|
|
|
logs = {}
|
|
|
|
selfId = self.GetRevisionId(self._allrefs)
|
|
|
|
toId = toProject.GetRevisionId(toProject._allrefs)
|
|
|
|
|
|
|
|
logs['added'] = self._getLogs(selfId, toId, oneline=oneline, color=color)
|
|
|
|
logs['removed'] = self._getLogs(toId, selfId, oneline=oneline, color=color)
|
|
|
|
return logs
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
class _GitGetByExec(object):
|
2013-10-12 00:03:19 +00:00
|
|
|
def __init__(self, project, bare, gitdir):
|
2008-10-21 14:00:00 +00:00
|
|
|
self._project = project
|
|
|
|
self._bare = bare
|
2013-10-12 00:03:19 +00:00
|
|
|
self._gitdir = gitdir
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def LsOthers(self):
|
|
|
|
p = GitCommand(self._project,
|
|
|
|
['ls-files',
|
|
|
|
'-z',
|
|
|
|
'--others',
|
|
|
|
'--exclude-standard'],
|
|
|
|
bare = False,
|
2013-10-12 00:03:19 +00:00
|
|
|
gitdir=self._gitdir,
|
2008-10-21 14:00:00 +00:00
|
|
|
capture_stdout = True,
|
|
|
|
capture_stderr = True)
|
|
|
|
if p.Wait() == 0:
|
|
|
|
out = p.stdout
|
|
|
|
if out:
|
2012-10-25 03:23:11 +00:00
|
|
|
return out[:-1].split('\0') # pylint: disable=W1401
|
|
|
|
# Backslash is not anomalous
|
2008-10-21 14:00:00 +00:00
|
|
|
return []
|
|
|
|
|
|
|
|
def DiffZ(self, name, *args):
|
|
|
|
cmd = [name]
|
|
|
|
cmd.append('-z')
|
|
|
|
cmd.extend(args)
|
|
|
|
p = GitCommand(self._project,
|
|
|
|
cmd,
|
2013-10-12 00:03:19 +00:00
|
|
|
gitdir=self._gitdir,
|
2008-10-21 14:00:00 +00:00
|
|
|
bare = False,
|
|
|
|
capture_stdout = True,
|
|
|
|
capture_stderr = True)
|
|
|
|
try:
|
|
|
|
out = p.process.stdout.read()
|
|
|
|
r = {}
|
|
|
|
if out:
|
2012-10-25 03:23:11 +00:00
|
|
|
out = iter(out[:-1].split('\0')) # pylint: disable=W1401
|
2008-10-21 14:00:00 +00:00
|
|
|
while out:
|
2008-10-21 20:59:08 +00:00
|
|
|
try:
|
|
|
|
info = out.next()
|
|
|
|
path = out.next()
|
|
|
|
except StopIteration:
|
|
|
|
break
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
class _Info(object):
|
|
|
|
def __init__(self, path, omode, nmode, oid, nid, state):
|
|
|
|
self.path = path
|
|
|
|
self.src_path = None
|
|
|
|
self.old_mode = omode
|
|
|
|
self.new_mode = nmode
|
|
|
|
self.old_id = oid
|
|
|
|
self.new_id = nid
|
|
|
|
|
|
|
|
if len(state) == 1:
|
|
|
|
self.status = state
|
|
|
|
self.level = None
|
|
|
|
else:
|
|
|
|
self.status = state[:1]
|
|
|
|
self.level = state[1:]
|
|
|
|
while self.level.startswith('0'):
|
|
|
|
self.level = self.level[1:]
|
|
|
|
|
|
|
|
info = info[1:].split(' ')
|
2012-11-14 03:09:38 +00:00
|
|
|
info = _Info(path, *info)
|
2008-10-21 14:00:00 +00:00
|
|
|
if info.status in ('R', 'C'):
|
|
|
|
info.src_path = info.path
|
|
|
|
info.path = out.next()
|
|
|
|
r[info.path] = info
|
|
|
|
return r
|
|
|
|
finally:
|
|
|
|
p.Wait()
|
|
|
|
|
|
|
|
def GetHead(self):
|
2009-04-18 01:43:33 +00:00
|
|
|
if self._bare:
|
|
|
|
path = os.path.join(self._project.gitdir, HEAD)
|
|
|
|
else:
|
|
|
|
path = os.path.join(self._project.worktree, '.git', HEAD)
|
2012-11-16 01:33:11 +00:00
|
|
|
try:
|
|
|
|
fd = open(path, 'rb')
|
2014-03-09 17:20:02 +00:00
|
|
|
except IOError as e:
|
|
|
|
raise NoManifestException(path, str(e))
|
2009-04-18 21:48:03 +00:00
|
|
|
try:
|
|
|
|
line = fd.read()
|
|
|
|
finally:
|
|
|
|
fd.close()
|
2013-03-01 13:44:38 +00:00
|
|
|
try:
|
|
|
|
line = line.decode()
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
2009-04-18 01:43:33 +00:00
|
|
|
if line.startswith('ref: '):
|
|
|
|
return line[5:-1]
|
|
|
|
return line[:-1]
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def SetHead(self, ref, message=None):
|
|
|
|
cmdv = []
|
|
|
|
if message is not None:
|
|
|
|
cmdv.extend(['-m', message])
|
|
|
|
cmdv.append(HEAD)
|
|
|
|
cmdv.append(ref)
|
|
|
|
self.symbolic_ref(*cmdv)
|
|
|
|
|
|
|
|
def DetachHead(self, new, message=None):
|
|
|
|
cmdv = ['--no-deref']
|
|
|
|
if message is not None:
|
|
|
|
cmdv.extend(['-m', message])
|
|
|
|
cmdv.append(HEAD)
|
|
|
|
cmdv.append(new)
|
|
|
|
self.update_ref(*cmdv)
|
|
|
|
|
|
|
|
def UpdateRef(self, name, new, old=None,
|
|
|
|
message=None,
|
|
|
|
detach=False):
|
|
|
|
cmdv = []
|
|
|
|
if message is not None:
|
|
|
|
cmdv.extend(['-m', message])
|
|
|
|
if detach:
|
|
|
|
cmdv.append('--no-deref')
|
|
|
|
cmdv.append(name)
|
|
|
|
cmdv.append(new)
|
|
|
|
if old is not None:
|
|
|
|
cmdv.append(old)
|
|
|
|
self.update_ref(*cmdv)
|
|
|
|
|
|
|
|
def DeleteRef(self, name, old=None):
|
|
|
|
if not old:
|
|
|
|
old = self.rev_parse(name)
|
|
|
|
self.update_ref('-d', name, old)
|
2009-04-18 03:58:02 +00:00
|
|
|
self._project.bare_ref.deleted(name)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-05-30 01:28:25 +00:00
|
|
|
def rev_list(self, *args, **kw):
|
|
|
|
if 'format' in kw:
|
|
|
|
cmdv = ['log', '--pretty=format:%s' % kw['format']]
|
|
|
|
else:
|
|
|
|
cmdv = ['rev-list']
|
2008-10-21 14:00:00 +00:00
|
|
|
cmdv.extend(args)
|
|
|
|
p = GitCommand(self._project,
|
|
|
|
cmdv,
|
|
|
|
bare = self._bare,
|
2013-10-12 00:03:19 +00:00
|
|
|
gitdir=self._gitdir,
|
2008-10-21 14:00:00 +00:00
|
|
|
capture_stdout = True,
|
|
|
|
capture_stderr = True)
|
|
|
|
r = []
|
|
|
|
for line in p.process.stdout:
|
2009-05-30 01:28:25 +00:00
|
|
|
if line[-1] == '\n':
|
|
|
|
line = line[:-1]
|
|
|
|
r.append(line)
|
2008-10-21 14:00:00 +00:00
|
|
|
if p.Wait() != 0:
|
|
|
|
raise GitError('%s rev-list %s: %s' % (
|
|
|
|
self._project.name,
|
|
|
|
str(args),
|
|
|
|
p.stderr))
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __getattr__(self, name):
|
Support repo-level pre-upload hook and prep for future hooks.
All repo-level hooks are expected to live in a single project at the
top level of that project. The name of the hooks project is provided
in the manifest.xml. The manifest also lists which hooks are enabled
to make it obvious if a file somehow failed to sync down (or got
deleted).
Before running any hook, we will prompt the user to make sure that it
is OK. A user can deny running the hook, allow once, or allow
"forever" (until hooks change). This tries to keep with the git
spirit of not automatically running anything on the user's computer
that got synced down. Note that individual repo commands can add
always options to avoid these prompts as they see fit (see below for
the 'upload' options).
When hooks are run, they are loaded into the current interpreter (the
one running repo) and their main() function is run. This mechanism is
used (instead of using subprocess) to make it easier to expand to a
richer hook interface in the future. During loading, the
interpreter's sys.path is updated to contain the directory containing
the hooks so that hooks can be split into multiple files.
The upload command has two options that control hook behavior:
- no-verify=False, verify=False (DEFAULT):
If stdout is a tty, can prompt about running upload hooks if needed.
If user denies running hooks, the upload is cancelled. If stdout is
not a tty and we would need to prompt about upload hooks, upload is
cancelled.
- no-verify=False, verify=True:
Always run upload hooks with no prompt.
- no-verify=True, verify=False:
Never run upload hooks, but upload anyway (AKA bypass hooks).
- no-verify=True, verify=True:
Invalid
Sample bit of manifest.xml code for enabling hooks (assumes you have a
project named 'hooks' where hooks are stored):
<repo-hooks in-project="hooks" enabled-list="pre-upload" />
Sample main() function in pre-upload.py in hooks directory:
def main(project_list, **kwargs):
print ('These projects will be uploaded: %s' %
', '.join(project_list))
print ('I am being a good boy and ignoring anything in kwargs\n'
'that I don\'t understand.')
print 'I fail 50% of the time. How flaky.'
if random.random() <= .5:
raise Exception('Pre-upload hook failed. Have a nice day.')
Change-Id: I5cefa2cd5865c72589263cf8e2f152a43c122f70
2011-03-04 19:54:18 +00:00
|
|
|
"""Allow arbitrary git commands using pythonic syntax.
|
|
|
|
|
|
|
|
This allows you to do things like:
|
|
|
|
git_obj.rev_parse('HEAD')
|
|
|
|
|
|
|
|
Since we don't have a 'rev_parse' method defined, the __getattr__ will
|
|
|
|
run. We'll replace the '_' with a '-' and try to run a git command.
|
2012-10-24 00:01:04 +00:00
|
|
|
Any other positional arguments will be passed to the git command, and the
|
|
|
|
following keyword arguments are supported:
|
|
|
|
config: An optional dict of git config options to be passed with '-c'.
|
Support repo-level pre-upload hook and prep for future hooks.
All repo-level hooks are expected to live in a single project at the
top level of that project. The name of the hooks project is provided
in the manifest.xml. The manifest also lists which hooks are enabled
to make it obvious if a file somehow failed to sync down (or got
deleted).
Before running any hook, we will prompt the user to make sure that it
is OK. A user can deny running the hook, allow once, or allow
"forever" (until hooks change). This tries to keep with the git
spirit of not automatically running anything on the user's computer
that got synced down. Note that individual repo commands can add
always options to avoid these prompts as they see fit (see below for
the 'upload' options).
When hooks are run, they are loaded into the current interpreter (the
one running repo) and their main() function is run. This mechanism is
used (instead of using subprocess) to make it easier to expand to a
richer hook interface in the future. During loading, the
interpreter's sys.path is updated to contain the directory containing
the hooks so that hooks can be split into multiple files.
The upload command has two options that control hook behavior:
- no-verify=False, verify=False (DEFAULT):
If stdout is a tty, can prompt about running upload hooks if needed.
If user denies running hooks, the upload is cancelled. If stdout is
not a tty and we would need to prompt about upload hooks, upload is
cancelled.
- no-verify=False, verify=True:
Always run upload hooks with no prompt.
- no-verify=True, verify=False:
Never run upload hooks, but upload anyway (AKA bypass hooks).
- no-verify=True, verify=True:
Invalid
Sample bit of manifest.xml code for enabling hooks (assumes you have a
project named 'hooks' where hooks are stored):
<repo-hooks in-project="hooks" enabled-list="pre-upload" />
Sample main() function in pre-upload.py in hooks directory:
def main(project_list, **kwargs):
print ('These projects will be uploaded: %s' %
', '.join(project_list))
print ('I am being a good boy and ignoring anything in kwargs\n'
'that I don\'t understand.')
print 'I fail 50% of the time. How flaky.'
if random.random() <= .5:
raise Exception('Pre-upload hook failed. Have a nice day.')
Change-Id: I5cefa2cd5865c72589263cf8e2f152a43c122f70
2011-03-04 19:54:18 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
name: The name of the git command to call. Any '_' characters will
|
|
|
|
be replaced with '-'.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A callable object that will try to call git with the named command.
|
|
|
|
"""
|
2008-10-21 14:00:00 +00:00
|
|
|
name = name.replace('_', '-')
|
2012-10-24 00:01:04 +00:00
|
|
|
def runner(*args, **kwargs):
|
|
|
|
cmdv = []
|
|
|
|
config = kwargs.pop('config', None)
|
|
|
|
for k in kwargs:
|
|
|
|
raise TypeError('%s() got an unexpected keyword argument %r'
|
|
|
|
% (name, k))
|
|
|
|
if config is not None:
|
2012-10-31 19:27:27 +00:00
|
|
|
if not git_require((1, 7, 2)):
|
|
|
|
raise ValueError('cannot set config on command line for %s()'
|
|
|
|
% name)
|
2013-03-01 13:44:38 +00:00
|
|
|
for k, v in config.items():
|
2012-10-24 00:01:04 +00:00
|
|
|
cmdv.append('-c')
|
|
|
|
cmdv.append('%s=%s' % (k, v))
|
|
|
|
cmdv.append(name)
|
2008-10-21 14:00:00 +00:00
|
|
|
cmdv.extend(args)
|
|
|
|
p = GitCommand(self._project,
|
|
|
|
cmdv,
|
|
|
|
bare = self._bare,
|
2013-10-12 00:03:19 +00:00
|
|
|
gitdir=self._gitdir,
|
2008-10-21 14:00:00 +00:00
|
|
|
capture_stdout = True,
|
|
|
|
capture_stderr = True)
|
|
|
|
if p.Wait() != 0:
|
|
|
|
raise GitError('%s %s: %s' % (
|
|
|
|
self._project.name,
|
|
|
|
name,
|
|
|
|
p.stderr))
|
|
|
|
r = p.stdout
|
2013-03-01 13:44:38 +00:00
|
|
|
try:
|
2013-09-26 19:59:58 +00:00
|
|
|
r = r.decode('utf-8')
|
2013-03-01 13:44:38 +00:00
|
|
|
except AttributeError:
|
|
|
|
pass
|
2008-10-21 14:00:00 +00:00
|
|
|
if r.endswith('\n') and r.index('\n') == len(r) - 1:
|
|
|
|
return r[:-1]
|
|
|
|
return r
|
|
|
|
return runner
|
|
|
|
|
|
|
|
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
class _PriorSyncFailedError(Exception):
|
|
|
|
def __str__(self):
|
|
|
|
return 'prior sync failed; rebase still in progress'
|
|
|
|
|
|
|
|
class _DirtyError(Exception):
|
|
|
|
def __str__(self):
|
|
|
|
return 'contains uncommitted changes'
|
|
|
|
|
|
|
|
class _InfoMessage(object):
|
|
|
|
def __init__(self, project, text):
|
|
|
|
self.project = project
|
|
|
|
self.text = text
|
|
|
|
|
|
|
|
def Print(self, syncbuf):
|
|
|
|
syncbuf.out.info('%s/: %s', self.project.relpath, self.text)
|
|
|
|
syncbuf.out.nl()
|
|
|
|
|
|
|
|
class _Failure(object):
|
|
|
|
def __init__(self, project, why):
|
|
|
|
self.project = project
|
|
|
|
self.why = why
|
|
|
|
|
|
|
|
def Print(self, syncbuf):
|
|
|
|
syncbuf.out.fail('error: %s/: %s',
|
|
|
|
self.project.relpath,
|
|
|
|
str(self.why))
|
|
|
|
syncbuf.out.nl()
|
|
|
|
|
|
|
|
class _Later(object):
|
|
|
|
def __init__(self, project, action):
|
|
|
|
self.project = project
|
|
|
|
self.action = action
|
|
|
|
|
|
|
|
def Run(self, syncbuf):
|
|
|
|
out = syncbuf.out
|
|
|
|
out.project('project %s/', self.project.relpath)
|
|
|
|
out.nl()
|
|
|
|
try:
|
|
|
|
self.action()
|
|
|
|
out.nl()
|
|
|
|
return True
|
2012-09-24 03:15:13 +00:00
|
|
|
except GitError:
|
Change repo sync to be more friendly when updating the tree
We now try to sync all projects that can be done safely first, before
we start rebasing user commits over the upstream. This has the nice
effect of making the local tree as close to the upstream as possible
before the user has to start resolving merge conflicts, as that extra
information in other projects may aid in the conflict resolution.
Informational output is buffered and delayed until calculation for
all projects has been done, so that the user gets one concise list
of notice messages, rather than it interrupting the progress meter.
Fast-forward output is now prefixed with the project header, so the
user can see which project that update is taking place in, and make
some relation of the diffstat back to the project name.
Rebase output is now prefixed with the project header, so that if
the rebase fails, the user can see which project we were operating
on and can try to address the failure themselves.
Since rebase sits on a detached HEAD, we now look for an in-progress
rebase during sync, so we can alert the user that the given project
is in a state we cannot handle.
Signed-off-by: Shawn O. Pearce <sop@google.com>
2009-04-16 18:21:18 +00:00
|
|
|
out.nl()
|
|
|
|
return False
|
|
|
|
|
|
|
|
class _SyncColoring(Coloring):
|
|
|
|
def __init__(self, config):
|
|
|
|
Coloring.__init__(self, config, 'reposync')
|
|
|
|
self.project = self.printer('header', attr = 'bold')
|
|
|
|
self.info = self.printer('info')
|
|
|
|
self.fail = self.printer('fail', fg='red')
|
|
|
|
|
|
|
|
class SyncBuffer(object):
|
|
|
|
def __init__(self, config, detach_head=False):
|
|
|
|
self._messages = []
|
|
|
|
self._failures = []
|
|
|
|
self._later_queue1 = []
|
|
|
|
self._later_queue2 = []
|
|
|
|
|
|
|
|
self.out = _SyncColoring(config)
|
|
|
|
self.out.redirect(sys.stderr)
|
|
|
|
|
|
|
|
self.detach_head = detach_head
|
|
|
|
self.clean = True
|
|
|
|
|
|
|
|
def info(self, project, fmt, *args):
|
|
|
|
self._messages.append(_InfoMessage(project, fmt % args))
|
|
|
|
|
|
|
|
def fail(self, project, err=None):
|
|
|
|
self._failures.append(_Failure(project, err))
|
|
|
|
self.clean = False
|
|
|
|
|
|
|
|
def later1(self, project, what):
|
|
|
|
self._later_queue1.append(_Later(project, what))
|
|
|
|
|
|
|
|
def later2(self, project, what):
|
|
|
|
self._later_queue2.append(_Later(project, what))
|
|
|
|
|
|
|
|
def Finish(self):
|
|
|
|
self._PrintMessages()
|
|
|
|
self._RunLater()
|
|
|
|
self._PrintMessages()
|
|
|
|
return self.clean
|
|
|
|
|
|
|
|
def _RunLater(self):
|
|
|
|
for q in ['_later_queue1', '_later_queue2']:
|
|
|
|
if not self._RunQueue(q):
|
|
|
|
return
|
|
|
|
|
|
|
|
def _RunQueue(self, queue):
|
|
|
|
for m in getattr(self, queue):
|
|
|
|
if not m.Run(self):
|
|
|
|
self.clean = False
|
|
|
|
return False
|
|
|
|
setattr(self, queue, [])
|
|
|
|
return True
|
|
|
|
|
|
|
|
def _PrintMessages(self):
|
|
|
|
for m in self._messages:
|
|
|
|
m.Print(self)
|
|
|
|
for m in self._failures:
|
|
|
|
m.Print(self)
|
|
|
|
|
|
|
|
self._messages = []
|
|
|
|
self._failures = []
|
|
|
|
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
class MetaProject(Project):
|
|
|
|
"""A special project housed under .repo.
|
|
|
|
"""
|
|
|
|
def __init__(self, manifest, name, gitdir, worktree):
|
|
|
|
Project.__init__(self,
|
|
|
|
manifest = manifest,
|
|
|
|
name = name,
|
|
|
|
gitdir = gitdir,
|
2013-10-12 00:03:19 +00:00
|
|
|
objdir = gitdir,
|
2008-10-21 14:00:00 +00:00
|
|
|
worktree = worktree,
|
2009-05-19 21:58:02 +00:00
|
|
|
remote = RemoteSpec('origin'),
|
2008-10-21 14:00:00 +00:00
|
|
|
relpath = '.repo/%s' % name,
|
2009-05-30 01:38:17 +00:00
|
|
|
revisionExpr = 'refs/heads/master',
|
2012-03-29 03:15:45 +00:00
|
|
|
revisionId = None,
|
|
|
|
groups = None)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def PreSync(self):
|
|
|
|
if self.Exists:
|
|
|
|
cb = self.CurrentBranch
|
|
|
|
if cb:
|
|
|
|
base = self.GetBranch(cb).merge
|
|
|
|
if base:
|
2009-05-30 01:38:17 +00:00
|
|
|
self.revisionExpr = base
|
|
|
|
self.revisionId = None
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2012-06-07 15:19:26 +00:00
|
|
|
def MetaBranchSwitch(self, target):
|
|
|
|
""" Prepare MetaProject for manifest branch switch
|
|
|
|
"""
|
|
|
|
|
|
|
|
# detach and delete manifest branch, allowing a new
|
|
|
|
# branch to take over
|
|
|
|
syncbuf = SyncBuffer(self.config, detach_head = True)
|
|
|
|
self.Sync_LocalHalf(syncbuf)
|
|
|
|
syncbuf.Finish()
|
|
|
|
|
|
|
|
return GitCommand(self,
|
2012-07-20 14:32:19 +00:00
|
|
|
['update-ref', '-d', 'refs/heads/default'],
|
2012-06-07 15:19:26 +00:00
|
|
|
capture_stdout = True,
|
|
|
|
capture_stderr = True).Wait() == 0
|
|
|
|
|
|
|
|
|
2009-04-18 17:49:00 +00:00
|
|
|
@property
|
|
|
|
def LastFetch(self):
|
|
|
|
try:
|
|
|
|
fh = os.path.join(self.gitdir, 'FETCH_HEAD')
|
|
|
|
return os.path.getmtime(fh)
|
|
|
|
except OSError:
|
|
|
|
return 0
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
@property
|
|
|
|
def HasChanges(self):
|
|
|
|
"""Has the remote received new commits not yet checked out?
|
|
|
|
"""
|
2009-05-30 01:38:17 +00:00
|
|
|
if not self.remote or not self.revisionExpr:
|
2009-04-18 17:39:28 +00:00
|
|
|
return False
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
all_refs = self.bare_ref.all
|
|
|
|
revid = self.GetRevisionId(all_refs)
|
2009-04-18 17:39:28 +00:00
|
|
|
head = self.work_git.GetHead()
|
|
|
|
if head.startswith(R_HEADS):
|
|
|
|
try:
|
2012-09-24 03:15:13 +00:00
|
|
|
head = all_refs[head]
|
2009-04-18 17:39:28 +00:00
|
|
|
except KeyError:
|
|
|
|
head = None
|
|
|
|
|
|
|
|
if revid == head:
|
|
|
|
return False
|
2009-05-30 01:38:17 +00:00
|
|
|
elif self._revlist(not_rev(HEAD), revid):
|
2008-10-21 14:00:00 +00:00
|
|
|
return True
|
|
|
|
return False
|