mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
13 Commits
Author | SHA1 | Date | |
---|---|---|---|
ceea368e88 | |||
b660539c4a | |||
752371d91b | |||
1a68dc58eb | |||
df5ee52050 | |||
fab96c68e3 | |||
bf1fbb20ab | |||
29472463ba | |||
c325dc35f6 | |||
f322b9abb4 | |||
db728cd866 | |||
c4657969eb | |||
7b947de1ee |
9
error.py
9
error.py
@ -57,6 +57,15 @@ class UploadError(Exception):
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
class DownloadError(Exception):
|
||||
"""Cannot download a repository.
|
||||
"""
|
||||
def __init__(self, reason):
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
class NoSuchProjectError(Exception):
|
||||
"""A specified project does not exist in the work tree.
|
||||
"""
|
||||
|
@ -26,7 +26,6 @@ import time
|
||||
import urllib2
|
||||
|
||||
from signal import SIGTERM
|
||||
from urllib2 import urlopen, HTTPError
|
||||
from error import GitError, UploadError
|
||||
from trace import Trace
|
||||
|
||||
@ -491,6 +490,12 @@ def close_ssh():
|
||||
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
|
||||
URI_ALL = re.compile(r'^([a-z][a-z+]*)://([^@/]*@?[^/]*)/')
|
||||
|
||||
def GetSchemeFromUrl(url):
|
||||
m = URI_ALL.match(url)
|
||||
if m:
|
||||
return m.group(1)
|
||||
return None
|
||||
|
||||
def _preconnect(url):
|
||||
m = URI_ALL.match(url)
|
||||
if m:
|
||||
@ -570,9 +575,19 @@ class Remote(object):
|
||||
self._review_protocol = info[0]
|
||||
self._review_host = info[1]
|
||||
self._review_port = info[2]
|
||||
elif 'REPO_HOST_PORT_INFO' in os.environ:
|
||||
info = os.environ['REPO_HOST_PORT_INFO']
|
||||
self._review_protocol = 'ssh'
|
||||
self._review_host = info.split(" ")[0]
|
||||
self._review_port = info.split(" ")[1]
|
||||
|
||||
REVIEW_CACHE[u] = (
|
||||
self._review_protocol,
|
||||
self._review_host,
|
||||
self._review_port)
|
||||
else:
|
||||
try:
|
||||
info = urlopen(u).read()
|
||||
info = urllib2.urlopen(u).read()
|
||||
if info == 'NOT_AVAILABLE':
|
||||
raise UploadError('%s: SSH disabled' % self.review)
|
||||
if '<' in info:
|
||||
@ -584,15 +599,15 @@ class Remote(object):
|
||||
self._review_protocol = 'ssh'
|
||||
self._review_host = info.split(" ")[0]
|
||||
self._review_port = info.split(" ")[1]
|
||||
except urllib2.URLError, e:
|
||||
raise UploadError('%s: %s' % (self.review, e.reason[1]))
|
||||
except HTTPError, e:
|
||||
except urllib2.HTTPError, e:
|
||||
if e.code == 404:
|
||||
self._review_protocol = 'http-post'
|
||||
self._review_host = None
|
||||
self._review_port = None
|
||||
else:
|
||||
raise UploadError('Upload over ssh unavailable')
|
||||
raise UploadError('Upload over SSH unavailable')
|
||||
except urllib2.URLError, e:
|
||||
raise UploadError('%s: %s' % (self.review, str(e)))
|
||||
|
||||
REVIEW_CACHE[u] = (
|
||||
self._review_protocol,
|
||||
|
26
main.py
26
main.py
@ -37,6 +37,7 @@ from command import InteractiveCommand
|
||||
from command import MirrorSafeCommand
|
||||
from command import PagedCommand
|
||||
from editor import Editor
|
||||
from error import DownloadError
|
||||
from error import ManifestInvalidRevisionError
|
||||
from error import NoSuchProjectError
|
||||
from error import RepoChangedException
|
||||
@ -143,6 +144,9 @@ class _Repo(object):
|
||||
else:
|
||||
print >>sys.stderr, 'real\t%dh%dm%.3fs' \
|
||||
% (hours, minutes, seconds)
|
||||
except DownloadError, e:
|
||||
print >>sys.stderr, 'error: %s' % str(e)
|
||||
sys.exit(1)
|
||||
except ManifestInvalidRevisionError, e:
|
||||
print >>sys.stderr, 'error: %s' % str(e)
|
||||
sys.exit(1)
|
||||
@ -269,6 +273,24 @@ class _UserAgentHandler(urllib2.BaseHandler):
|
||||
req.add_header('User-Agent', _UserAgent())
|
||||
return req
|
||||
|
||||
class _BasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
||||
def http_error_auth_reqed(self, authreq, host, req, headers):
|
||||
try:
|
||||
old_add_header = req.add_header
|
||||
def _add_header(name, val):
|
||||
val = val.replace('\n', '')
|
||||
old_add_header(name, val)
|
||||
req.add_header = _add_header
|
||||
return urllib2.AbstractBasicAuthHandler.http_error_auth_reqed(
|
||||
self, authreq, host, req, headers)
|
||||
except:
|
||||
reset = getattr(self, 'reset_retry_count', None)
|
||||
if reset is not None:
|
||||
reset()
|
||||
elif getattr(self, 'retried', None):
|
||||
self.retried = 0
|
||||
raise
|
||||
|
||||
def init_http():
|
||||
handlers = [_UserAgentHandler()]
|
||||
|
||||
@ -281,7 +303,9 @@ def init_http():
|
||||
mgr.add_password(None, 'https://%s/' % host, p[0], p[2])
|
||||
except netrc.NetrcParseError:
|
||||
pass
|
||||
handlers.append(urllib2.HTTPBasicAuthHandler(mgr))
|
||||
except IOError:
|
||||
pass
|
||||
handlers.append(_BasicAuthHandler(mgr))
|
||||
|
||||
if 'http_proxy' in os.environ:
|
||||
url = os.environ['http_proxy']
|
||||
|
@ -14,7 +14,9 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urlparse
|
||||
import xml.dom.minidom
|
||||
|
||||
from git_config import GitConfig, IsId
|
||||
@ -24,6 +26,9 @@ from error import ManifestParseError
|
||||
MANIFEST_FILE_NAME = 'manifest.xml'
|
||||
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
|
||||
|
||||
urlparse.uses_relative.extend(['ssh', 'git'])
|
||||
urlparse.uses_netloc.extend(['ssh', 'git'])
|
||||
|
||||
class _Default(object):
|
||||
"""Project defaults within the manifest."""
|
||||
|
||||
@ -35,16 +40,26 @@ class _XmlRemote(object):
|
||||
def __init__(self,
|
||||
name,
|
||||
fetch=None,
|
||||
manifestUrl=None,
|
||||
review=None):
|
||||
self.name = name
|
||||
self.fetchUrl = fetch
|
||||
self.manifestUrl = manifestUrl
|
||||
self.reviewUrl = review
|
||||
self.resolvedFetchUrl = self._resolveFetchUrl()
|
||||
|
||||
def _resolveFetchUrl(self):
|
||||
url = self.fetchUrl.rstrip('/')
|
||||
manifestUrl = self.manifestUrl.rstrip('/')
|
||||
# urljoin will get confused if there is no scheme in the base url
|
||||
# ie, if manifestUrl is of the form <hostname:port>
|
||||
if manifestUrl.find(':') != manifestUrl.find('/') - 1:
|
||||
manifestUrl = 'gopher://' + manifestUrl
|
||||
url = urlparse.urljoin(manifestUrl, url)
|
||||
return re.sub(r'^gopher://', '', url)
|
||||
|
||||
def ToRemoteSpec(self, projectName):
|
||||
url = self.fetchUrl
|
||||
while url.endswith('/'):
|
||||
url = url[:-1]
|
||||
url += '/%s.git' % projectName
|
||||
url = self.resolvedFetchUrl + '/' + projectName
|
||||
return RemoteSpec(self.name, url, self.reviewUrl)
|
||||
|
||||
class XmlManifest(object):
|
||||
@ -357,7 +372,7 @@ class XmlManifest(object):
|
||||
raise ManifestParseError, 'refusing to mirror %s' % m_url
|
||||
|
||||
if self._default and self._default.remote:
|
||||
url = self._default.remote.fetchUrl
|
||||
url = self._default.remote.resolvedFetchUrl
|
||||
if not url.endswith('/'):
|
||||
url += '/'
|
||||
if m_url.startswith(url):
|
||||
@ -366,7 +381,8 @@ class XmlManifest(object):
|
||||
|
||||
if name is None:
|
||||
s = m_url.rindex('/') + 1
|
||||
remote = _XmlRemote('origin', m_url[:s])
|
||||
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
|
||||
remote = _XmlRemote('origin', m_url[:s], manifestUrl)
|
||||
name = m_url[s:]
|
||||
|
||||
if name.endswith('.git'):
|
||||
@ -394,7 +410,8 @@ class XmlManifest(object):
|
||||
review = node.getAttribute('review')
|
||||
if review == '':
|
||||
review = None
|
||||
return _XmlRemote(name, fetch, review)
|
||||
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
|
||||
return _XmlRemote(name, fetch, manifestUrl, review)
|
||||
|
||||
def _ParseDefault(self, node):
|
||||
"""
|
||||
|
221
project.py
221
project.py
@ -16,20 +16,36 @@ import traceback
|
||||
import errno
|
||||
import filecmp
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import time
|
||||
import urllib2
|
||||
|
||||
try:
|
||||
import threading as _threading
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
|
||||
try:
|
||||
from os import SEEK_END
|
||||
except ImportError:
|
||||
SEEK_END = 2
|
||||
|
||||
from color import Coloring
|
||||
from git_command import GitCommand
|
||||
from git_config import GitConfig, IsId
|
||||
from git_config import GitConfig, IsId, GetSchemeFromUrl
|
||||
from error import DownloadError
|
||||
from error import GitError, HookError, ImportError, UploadError
|
||||
from error import ManifestInvalidRevisionError
|
||||
from progress import Progress
|
||||
|
||||
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M
|
||||
|
||||
_urllib_lock = _threading.Lock()
|
||||
|
||||
def _lwrite(path, content):
|
||||
lock = '%s.lock' % path
|
||||
|
||||
@ -884,19 +900,33 @@ class Project(object):
|
||||
|
||||
## Sync ##
|
||||
|
||||
def Sync_NetworkHalf(self, quiet=False):
|
||||
def Sync_NetworkHalf(self, quiet=False, is_new=None):
|
||||
"""Perform only the network IO portion of the sync process.
|
||||
Local working directory/branch state is not affected.
|
||||
"""
|
||||
is_new = not self.Exists
|
||||
if is_new is None:
|
||||
is_new = not self.Exists
|
||||
if is_new:
|
||||
if not quiet:
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, 'Initializing project %s ...' % self.name
|
||||
self._InitGitDir()
|
||||
|
||||
self._InitRemote()
|
||||
if not self._RemoteFetch(initial=is_new, quiet=quiet):
|
||||
|
||||
if is_new:
|
||||
alt = os.path.join(self.gitdir, 'objects/info/alternates')
|
||||
try:
|
||||
fd = open(alt, 'rb')
|
||||
try:
|
||||
alt_dir = fd.readline().rstrip()
|
||||
finally:
|
||||
fd.close()
|
||||
except IOError:
|
||||
alt_dir = None
|
||||
else:
|
||||
alt_dir = None
|
||||
|
||||
if alt_dir is None and self._ApplyCloneBundle(initial=is_new, quiet=quiet):
|
||||
is_new = False
|
||||
|
||||
if not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir):
|
||||
return False
|
||||
|
||||
#Check that the requested ref was found after fetch
|
||||
@ -1307,29 +1337,19 @@ class Project(object):
|
||||
|
||||
def _RemoteFetch(self, name=None, tag=None,
|
||||
initial=False,
|
||||
quiet=False):
|
||||
quiet=False,
|
||||
alt_dir=None):
|
||||
if not name:
|
||||
name = self.remote.name
|
||||
|
||||
ssh_proxy = False
|
||||
if self.GetRemote(name).PreConnectFetch():
|
||||
remote = self.GetRemote(name)
|
||||
if remote.PreConnectFetch():
|
||||
ssh_proxy = True
|
||||
|
||||
if initial:
|
||||
alt = os.path.join(self.gitdir, 'objects/info/alternates')
|
||||
try:
|
||||
fd = open(alt, 'rb')
|
||||
try:
|
||||
ref_dir = fd.readline()
|
||||
if ref_dir and ref_dir.endswith('\n'):
|
||||
ref_dir = ref_dir[:-1]
|
||||
finally:
|
||||
fd.close()
|
||||
except IOError, e:
|
||||
ref_dir = None
|
||||
|
||||
if ref_dir and 'objects' == os.path.basename(ref_dir):
|
||||
ref_dir = os.path.dirname(ref_dir)
|
||||
if alt_dir and 'objects' == os.path.basename(alt_dir):
|
||||
ref_dir = os.path.dirname(alt_dir)
|
||||
packed_refs = os.path.join(self.gitdir, 'packed-refs')
|
||||
remote = self.GetRemote(name)
|
||||
|
||||
@ -1365,9 +1385,8 @@ class Project(object):
|
||||
old_packed += line
|
||||
|
||||
_lwrite(packed_refs, tmp_packed)
|
||||
|
||||
else:
|
||||
ref_dir = None
|
||||
alt_dir = None
|
||||
|
||||
cmd = ['fetch']
|
||||
|
||||
@ -1386,21 +1405,159 @@ class Project(object):
|
||||
cmd.append('tag')
|
||||
cmd.append(tag)
|
||||
|
||||
ok = GitCommand(self,
|
||||
cmd,
|
||||
bare = True,
|
||||
ssh_proxy = ssh_proxy).Wait() == 0
|
||||
ok = False
|
||||
for i in range(2):
|
||||
if GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy).Wait() == 0:
|
||||
ok = True
|
||||
break
|
||||
time.sleep(random.randint(30, 45))
|
||||
|
||||
if initial:
|
||||
if ref_dir:
|
||||
if alt_dir:
|
||||
if old_packed != '':
|
||||
_lwrite(packed_refs, old_packed)
|
||||
else:
|
||||
os.remove(packed_refs)
|
||||
self.bare_git.pack_refs('--all', '--prune')
|
||||
|
||||
return ok
|
||||
|
||||
def _ApplyCloneBundle(self, initial=False, quiet=False):
|
||||
if initial and self.manifest.manifestProject.config.GetString('repo.depth'):
|
||||
return False
|
||||
|
||||
remote = self.GetRemote(self.remote.name)
|
||||
bundle_url = remote.url + '/clone.bundle'
|
||||
bundle_url = GitConfig.ForUser().UrlInsteadOf(bundle_url)
|
||||
if GetSchemeFromUrl(bundle_url) not in ('http', 'https'):
|
||||
return False
|
||||
|
||||
bundle_dst = os.path.join(self.gitdir, 'clone.bundle')
|
||||
bundle_tmp = os.path.join(self.gitdir, 'clone.bundle.tmp')
|
||||
|
||||
exist_dst = os.path.exists(bundle_dst)
|
||||
exist_tmp = os.path.exists(bundle_tmp)
|
||||
|
||||
if not initial and not exist_dst and not exist_tmp:
|
||||
return False
|
||||
|
||||
if not exist_dst:
|
||||
exist_dst = self._FetchBundle(bundle_url, bundle_tmp, bundle_dst, quiet)
|
||||
if not exist_dst:
|
||||
return False
|
||||
|
||||
cmd = ['fetch']
|
||||
if quiet:
|
||||
cmd.append('--quiet')
|
||||
if not self.worktree:
|
||||
cmd.append('--update-head-ok')
|
||||
cmd.append(bundle_dst)
|
||||
for f in remote.fetch:
|
||||
cmd.append(str(f))
|
||||
cmd.append('refs/tags/*:refs/tags/*')
|
||||
|
||||
ok = GitCommand(self, cmd, bare=True).Wait() == 0
|
||||
if os.path.exists(bundle_dst):
|
||||
os.remove(bundle_dst)
|
||||
if os.path.exists(bundle_tmp):
|
||||
os.remove(bundle_tmp)
|
||||
return ok
|
||||
|
||||
def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet):
|
||||
keep = True
|
||||
done = False
|
||||
dest = open(tmpPath, 'a+b')
|
||||
try:
|
||||
dest.seek(0, SEEK_END)
|
||||
pos = dest.tell()
|
||||
|
||||
_urllib_lock.acquire()
|
||||
try:
|
||||
req = urllib2.Request(srcUrl)
|
||||
if pos > 0:
|
||||
req.add_header('Range', 'bytes=%d-' % pos)
|
||||
|
||||
try:
|
||||
r = urllib2.urlopen(req)
|
||||
except urllib2.HTTPError, e:
|
||||
def _content_type():
|
||||
try:
|
||||
return e.info()['content-type']
|
||||
except:
|
||||
return None
|
||||
|
||||
if e.code == 404:
|
||||
keep = False
|
||||
return False
|
||||
elif _content_type() == 'text/plain':
|
||||
try:
|
||||
msg = e.read()
|
||||
if len(msg) > 0 and msg[-1] == '\n':
|
||||
msg = msg[0:-1]
|
||||
msg = ' (%s)' % msg
|
||||
except:
|
||||
msg = ''
|
||||
else:
|
||||
try:
|
||||
from BaseHTTPServer import BaseHTTPRequestHandler
|
||||
res = BaseHTTPRequestHandler.responses[e.code]
|
||||
msg = ' (%s: %s)' % (res[0], res[1])
|
||||
except:
|
||||
msg = ''
|
||||
raise DownloadError('HTTP %s%s' % (e.code, msg))
|
||||
except urllib2.URLError, e:
|
||||
raise DownloadError('%s: %s ' % (req.get_host(), str(e)))
|
||||
finally:
|
||||
_urllib_lock.release()
|
||||
|
||||
p = None
|
||||
try:
|
||||
size = r.headers['content-length']
|
||||
unit = 1 << 10
|
||||
|
||||
if size and not quiet:
|
||||
if size > 1024 * 1.3:
|
||||
unit = 1 << 20
|
||||
desc = 'MB'
|
||||
else:
|
||||
desc = 'KB'
|
||||
p = Progress(
|
||||
'Downloading %s' % self.relpath,
|
||||
int(size) / unit,
|
||||
units=desc)
|
||||
if pos > 0:
|
||||
p.update(pos / unit)
|
||||
|
||||
s = 0
|
||||
while True:
|
||||
d = r.read(8192)
|
||||
if d == '':
|
||||
done = True
|
||||
return True
|
||||
dest.write(d)
|
||||
if p:
|
||||
s += len(d)
|
||||
if s >= unit:
|
||||
p.update(s / unit)
|
||||
s = s % unit
|
||||
if p:
|
||||
if s >= unit:
|
||||
p.update(s / unit)
|
||||
else:
|
||||
p.update(1)
|
||||
finally:
|
||||
r.close()
|
||||
if p:
|
||||
p.end()
|
||||
finally:
|
||||
dest.close()
|
||||
|
||||
if os.path.exists(dstPath):
|
||||
os.remove(dstPath)
|
||||
if done:
|
||||
os.rename(tmpPath, dstPath)
|
||||
elif not keep:
|
||||
os.remove(tmpPath)
|
||||
|
||||
def _Checkout(self, rev, quiet=False):
|
||||
cmd = ['checkout']
|
||||
if quiet:
|
||||
|
103
repo
103
repo
@ -28,7 +28,7 @@ if __name__ == '__main__':
|
||||
del magic
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (1, 12)
|
||||
VERSION = (1, 13)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (1,0)
|
||||
@ -91,6 +91,7 @@ import re
|
||||
import readline
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib2
|
||||
|
||||
home_dot_repo = os.path.expanduser('~/.repoconfig')
|
||||
gpg_dir = os.path.join(home_dot_repo, 'gnupg')
|
||||
@ -187,10 +188,6 @@ def _Init(args):
|
||||
else:
|
||||
can_verify = True
|
||||
|
||||
if not opt.quiet:
|
||||
print >>sys.stderr, 'Getting repo ...'
|
||||
print >>sys.stderr, ' from %s' % url
|
||||
|
||||
dst = os.path.abspath(os.path.join(repodir, S_repo))
|
||||
_Clone(url, dst, opt.quiet)
|
||||
|
||||
@ -300,15 +297,42 @@ def _SetConfig(local, name, value):
|
||||
raise CloneFailure()
|
||||
|
||||
|
||||
def _Fetch(local, quiet, *args):
|
||||
def _InitHttp():
|
||||
handlers = []
|
||||
|
||||
mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
try:
|
||||
import netrc
|
||||
n = netrc.netrc()
|
||||
for host in n.hosts:
|
||||
p = n.hosts[host]
|
||||
mgr.add_password(None, 'http://%s/' % host, p[0], p[2])
|
||||
mgr.add_password(None, 'https://%s/' % host, p[0], p[2])
|
||||
except:
|
||||
pass
|
||||
handlers.append(urllib2.HTTPBasicAuthHandler(mgr))
|
||||
|
||||
if 'http_proxy' in os.environ:
|
||||
url = os.environ['http_proxy']
|
||||
handlers.append(urllib2.ProxyHandler({'http': url, 'https': url}))
|
||||
if 'REPO_CURL_VERBOSE' in os.environ:
|
||||
handlers.append(urllib2.HTTPHandler(debuglevel=1))
|
||||
handlers.append(urllib2.HTTPSHandler(debuglevel=1))
|
||||
urllib2.install_opener(urllib2.build_opener(*handlers))
|
||||
|
||||
def _Fetch(url, local, src, quiet):
|
||||
if not quiet:
|
||||
print >>sys.stderr, 'Get %s' % url
|
||||
|
||||
cmd = [GIT, 'fetch']
|
||||
if quiet:
|
||||
cmd.append('--quiet')
|
||||
err = subprocess.PIPE
|
||||
else:
|
||||
err = None
|
||||
cmd.extend(args)
|
||||
cmd.append('origin')
|
||||
cmd.append(src)
|
||||
cmd.append('+refs/heads/*:refs/remotes/origin/*')
|
||||
cmd.append('refs/tags/*:refs/tags/*')
|
||||
|
||||
proc = subprocess.Popen(cmd, cwd = local, stderr = err)
|
||||
if err:
|
||||
@ -317,6 +341,62 @@ def _Fetch(local, quiet, *args):
|
||||
if proc.wait() != 0:
|
||||
raise CloneFailure()
|
||||
|
||||
def _DownloadBundle(url, local, quiet):
|
||||
if not url.endswith('/'):
|
||||
url += '/'
|
||||
url += 'clone.bundle'
|
||||
|
||||
proc = subprocess.Popen(
|
||||
[GIT, 'config', '--get-regexp', 'url.*.insteadof'],
|
||||
cwd = local,
|
||||
stdout = subprocess.PIPE)
|
||||
for line in proc.stdout:
|
||||
m = re.compile(r'^url\.(.*)\.insteadof (.*)$').match(line)
|
||||
if m:
|
||||
new_url = m.group(1)
|
||||
old_url = m.group(2)
|
||||
if url.startswith(old_url):
|
||||
url = new_url + url[len(old_url):]
|
||||
break
|
||||
proc.stdout.close()
|
||||
proc.wait()
|
||||
|
||||
if not url.startswith('http:') and not url.startswith('https:'):
|
||||
return False
|
||||
|
||||
dest = open(os.path.join(local, '.git', 'clone.bundle'), 'w+b')
|
||||
try:
|
||||
try:
|
||||
r = urllib2.urlopen(url)
|
||||
except urllib2.HTTPError, e:
|
||||
if e.code == 404:
|
||||
return False
|
||||
print >>sys.stderr, 'fatal: Cannot get %s' % url
|
||||
print >>sys.stderr, 'fatal: HTTP error %s' % e.code
|
||||
raise CloneFailure()
|
||||
except urllib2.URLError, e:
|
||||
print >>sys.stderr, 'fatal: Cannot get %s' % url
|
||||
print >>sys.stderr, 'fatal: error %s' % e.reason
|
||||
raise CloneFailure()
|
||||
try:
|
||||
if not quiet:
|
||||
print >>sys.stderr, 'Get %s' % url
|
||||
while True:
|
||||
buf = r.read(8192)
|
||||
if buf == '':
|
||||
return True
|
||||
dest.write(buf)
|
||||
finally:
|
||||
r.close()
|
||||
finally:
|
||||
dest.close()
|
||||
|
||||
def _ImportBundle(local):
|
||||
path = os.path.join(local, '.git', 'clone.bundle')
|
||||
try:
|
||||
_Fetch(local, local, path, True)
|
||||
finally:
|
||||
os.remove(path)
|
||||
|
||||
def _Clone(url, local, quiet):
|
||||
"""Clones a git repository to a new subdirectory of repodir
|
||||
@ -344,11 +424,14 @@ def _Clone(url, local, quiet):
|
||||
print >>sys.stderr, 'fatal: could not create %s' % local
|
||||
raise CloneFailure()
|
||||
|
||||
_InitHttp()
|
||||
_SetConfig(local, 'remote.origin.url', url)
|
||||
_SetConfig(local, 'remote.origin.fetch',
|
||||
'+refs/heads/*:refs/remotes/origin/*')
|
||||
_Fetch(local, quiet)
|
||||
_Fetch(local, quiet, '--tags')
|
||||
if _DownloadBundle(url, local, quiet):
|
||||
_ImportBundle(local)
|
||||
else:
|
||||
_Fetch(url, local, 'origin', quiet)
|
||||
|
||||
|
||||
def _Verify(cwd, branch, quiet):
|
||||
|
@ -165,6 +165,7 @@ See 'repo help --all' for a complete list of recognized commands.
|
||||
print >>sys.stderr, "repo: '%s' is not a repo command." % name
|
||||
sys.exit(1)
|
||||
|
||||
cmd.manifest = self.manifest
|
||||
self._PrintCommandHelp(cmd)
|
||||
|
||||
else:
|
||||
|
@ -21,6 +21,7 @@ from color import Coloring
|
||||
from command import InteractiveCommand, MirrorSafeCommand
|
||||
from error import ManifestParseError
|
||||
from project import SyncBuffer
|
||||
from git_config import GitConfig
|
||||
from git_command import git_require, MIN_GIT_VERSION
|
||||
|
||||
class Init(InteractiveCommand, MirrorSafeCommand):
|
||||
@ -108,8 +109,8 @@ to update the working directory files.
|
||||
sys.exit(1)
|
||||
|
||||
if not opt.quiet:
|
||||
print >>sys.stderr, 'Getting manifest ...'
|
||||
print >>sys.stderr, ' from %s' % opt.manifest_url
|
||||
print >>sys.stderr, 'Get %s' \
|
||||
% GitConfig.ForUser().UrlInsteadOf(opt.manifest_url)
|
||||
m._InitGitDir()
|
||||
|
||||
if opt.manifest_branch:
|
||||
@ -138,7 +139,7 @@ to update the working directory files.
|
||||
print >>sys.stderr, 'fatal: --mirror not supported on existing client'
|
||||
sys.exit(1)
|
||||
|
||||
if not m.Sync_NetworkHalf():
|
||||
if not m.Sync_NetworkHalf(is_new=is_new):
|
||||
r = m.GetRemote(m.remote.name)
|
||||
print >>sys.stderr, 'fatal: cannot obtain manifest %s' % r.url
|
||||
|
||||
|
@ -136,7 +136,6 @@ later is required to fix a server side protocol bug.
|
||||
help='be more quiet')
|
||||
p.add_option('-j','--jobs',
|
||||
dest='jobs', action='store', type='int',
|
||||
default=self.jobs,
|
||||
help="projects to fetch simultaneously (default %d)" % self.jobs)
|
||||
if show_smart:
|
||||
p.add_option('-s', '--smart-sync',
|
||||
@ -196,15 +195,11 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
fetched.add(project.gitdir)
|
||||
pm.update()
|
||||
except BaseException, e:
|
||||
# Notify the _Fetch() function about all errors.
|
||||
except _FetchError:
|
||||
err_event.set()
|
||||
|
||||
# If we got our own _FetchError, we don't want a stack trace.
|
||||
# However, if we got something else (something in Sync_NetworkHalf?),
|
||||
# we'd like one (so re-raise after we've set err_event).
|
||||
if not isinstance(e, _FetchError):
|
||||
raise
|
||||
except:
|
||||
err_event.set()
|
||||
raise
|
||||
finally:
|
||||
if did_lock:
|
||||
lock.release()
|
||||
@ -401,6 +396,8 @@ uncommitted changes are present' % project.relpath
|
||||
if not syncbuf.Finish():
|
||||
sys.exit(1)
|
||||
self.manifest._Unload()
|
||||
if opt.jobs is None:
|
||||
self.jobs = self.manifest.default.sync_j
|
||||
all = self.GetProjects(args, missing_ok=True)
|
||||
|
||||
if not opt.local_only:
|
||||
|
Reference in New Issue
Block a user