Merge "Support smart-sync through persistent-http[s]"

This commit is contained in:
Dan Willemsen 2015-08-19 23:36:35 +00:00 committed by Gerrit Code Review
commit 7c9263bce0
3 changed files with 145 additions and 38 deletions

View File

@ -15,6 +15,8 @@
from __future__ import print_function from __future__ import print_function
import contextlib
import errno
import json import json
import os import os
import re import re
@ -502,6 +504,43 @@ def GetSchemeFromUrl(url):
return m.group(1) return m.group(1)
return None return None
@contextlib.contextmanager
def GetUrlCookieFile(url, quiet):
if url.startswith('persistent-'):
try:
p = subprocess.Popen(
['git-remote-persistent-https', '-print_config', url],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
try:
cookieprefix = 'http.cookiefile='
proxyprefix = 'http.proxy='
cookiefile = None
proxy = None
for line in p.stdout:
line = line.strip()
if line.startswith(cookieprefix):
cookiefile = line[len(cookieprefix):]
if line.startswith(proxyprefix):
proxy = line[len(proxyprefix):]
# Leave subprocess open, as cookie file may be transient.
if cookiefile or proxy:
yield cookiefile, proxy
return
finally:
p.stdin.close()
if p.wait():
err_msg = p.stderr.read()
if ' -print_config' in err_msg:
pass # Persistent proxy doesn't support -print_config.
elif not quiet:
print(err_msg, file=sys.stderr)
except OSError as e:
if e.errno == errno.ENOENT:
pass # No persistent proxy.
raise
yield GitConfig.ForUser().GetString('http.cookiefile'), None
def _preconnect(url): def _preconnect(url):
m = URI_ALL.match(url) m = URI_ALL.match(url)
if m: if m:

View File

@ -13,7 +13,6 @@
# limitations under the License. # limitations under the License.
from __future__ import print_function from __future__ import print_function
import contextlib
import errno import errno
import filecmp import filecmp
import glob import glob
@ -31,7 +30,7 @@ import traceback
from color import Coloring from color import Coloring
from git_command import GitCommand, git_require from git_command import GitCommand, git_require
from git_config import GitConfig, IsId, GetSchemeFromUrl, ID_RE from git_config import GitConfig, IsId, GetSchemeFromUrl, GetUrlCookieFile, ID_RE
from error import GitError, HookError, UploadError, DownloadError from error import GitError, HookError, UploadError, DownloadError
from error import ManifestInvalidRevisionError from error import ManifestInvalidRevisionError
from error import NoManifestException from error import NoManifestException
@ -2030,7 +2029,7 @@ class Project(object):
os.remove(tmpPath) os.remove(tmpPath)
if 'http_proxy' in os.environ and 'darwin' == sys.platform: if 'http_proxy' in os.environ and 'darwin' == sys.platform:
cmd += ['--proxy', os.environ['http_proxy']] cmd += ['--proxy', os.environ['http_proxy']]
with self._GetBundleCookieFile(srcUrl, quiet) as cookiefile: with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, proxy):
if cookiefile: if cookiefile:
cmd += ['--cookie', cookiefile, '--cookie-jar', cookiefile] cmd += ['--cookie', cookiefile, '--cookie-jar', cookiefile]
if srcUrl.startswith('persistent-'): if srcUrl.startswith('persistent-'):
@ -2078,40 +2077,6 @@ class Project(object):
except OSError: except OSError:
return False return False
@contextlib.contextmanager
def _GetBundleCookieFile(self, url, quiet):
if url.startswith('persistent-'):
try:
p = subprocess.Popen(
['git-remote-persistent-https', '-print_config', url],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
try:
prefix = 'http.cookiefile='
cookiefile = None
for line in p.stdout:
line = line.strip()
if line.startswith(prefix):
cookiefile = line[len(prefix):]
break
# Leave subprocess open, as cookie file may be transient.
if cookiefile:
yield cookiefile
return
finally:
p.stdin.close()
if p.wait():
err_msg = p.stderr.read()
if ' -print_config' in err_msg:
pass # Persistent proxy doesn't support -print_config.
elif not quiet:
print(err_msg, file=sys.stderr)
except OSError as e:
if e.errno == errno.ENOENT:
pass # No persistent proxy.
raise
yield GitConfig.ForUser().GetString('http.cookiefile')
def _Checkout(self, rev, quiet=False): def _Checkout(self, rev, quiet=False):
cmd = ['checkout'] cmd = ['checkout']
if quiet: if quiet:

View File

@ -23,18 +23,26 @@ import shutil
import socket import socket
import subprocess import subprocess
import sys import sys
import tempfile
import time import time
from pyversion import is_python3 from pyversion import is_python3
if is_python3(): if is_python3():
import http.cookiejar as cookielib
import urllib.error
import urllib.parse import urllib.parse
import urllib.request
import xmlrpc.client import xmlrpc.client
else: else:
import cookielib
import imp import imp
import urllib2
import urlparse import urlparse
import xmlrpclib import xmlrpclib
urllib = imp.new_module('urllib') urllib = imp.new_module('urllib')
urllib.error = urllib2
urllib.parse = urlparse urllib.parse = urlparse
urllib.request = urllib2
xmlrpc = imp.new_module('xmlrpc') xmlrpc = imp.new_module('xmlrpc')
xmlrpc.client = xmlrpclib xmlrpc.client = xmlrpclib
@ -57,6 +65,7 @@ except ImportError:
multiprocessing = None multiprocessing = None
from git_command import GIT, git_require from git_command import GIT, git_require
from git_config import GetSchemeFromUrl, GetUrlCookieFile
from git_refs import R_HEADS, HEAD from git_refs import R_HEADS, HEAD
import gitc_utils import gitc_utils
from project import Project from project import Project
@ -598,8 +607,12 @@ later is required to fix a server side protocol bug.
(username, password), (username, password),
1) 1)
transport = PersistentTransport(manifest_server)
if manifest_server.startswith('persistent-'):
manifest_server = manifest_server[len('persistent-'):]
try: try:
server = xmlrpc.client.Server(manifest_server) server = xmlrpc.client.Server(manifest_server, transport=transport)
if opt.smart_sync: if opt.smart_sync:
p = self.manifest.manifestProject p = self.manifest.manifestProject
b = p.GetBranch(p.CurrentBranch) b = p.GetBranch(p.CurrentBranch)
@ -879,3 +892,93 @@ class _FetchTimes(object):
os.remove(self._path) os.remove(self._path)
except OSError: except OSError:
pass pass
# This is a replacement for xmlrpc.client.Transport using urllib2
# and supporting persistent-http[s]. It cannot change hosts from
# request to request like the normal transport, the real url
# is passed during initialization.
class PersistentTransport(xmlrpc.client.Transport):
def __init__(self, orig_host):
self.orig_host = orig_host
def request(self, host, handler, request_body, verbose=False):
with GetUrlCookieFile(self.orig_host, not verbose) as (cookiefile, proxy):
# Python doesn't understand cookies with the #HttpOnly_ prefix
# Since we're only using them for HTTP, copy the file temporarily,
# stripping those prefixes away.
tmpcookiefile = tempfile.NamedTemporaryFile()
try:
with open(cookiefile) as f:
for line in f:
if line.startswith("#HttpOnly_"):
line = line[len("#HttpOnly_"):]
tmpcookiefile.write(line)
tmpcookiefile.flush()
cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name)
cookiejar.load()
finally:
tmpcookiefile.close()
proxyhandler = urllib.request.ProxyHandler
if proxy:
proxyhandler = urllib.request.ProxyHandler({
"http": proxy,
"https": proxy })
opener = urllib.request.build_opener(
urllib.request.HTTPCookieProcessor(cookiejar),
proxyhandler)
url = urllib.parse.urljoin(self.orig_host, handler)
parse_results = urllib.parse.urlparse(url)
scheme = parse_results.scheme
if scheme == 'persistent-http':
scheme = 'http'
if scheme == 'persistent-https':
# If we're proxying through persistent-https, use http. The
# proxy itself will do the https.
if proxy:
scheme = 'http'
else:
scheme = 'https'
# Parse out any authentication information using the base class
host, extra_headers, _ = self.get_host_info(parse_results.netloc)
url = urllib.parse.urlunparse((
scheme,
host,
parse_results.path,
parse_results.params,
parse_results.query,
parse_results.fragment))
request = urllib.request.Request(url, request_body)
if extra_headers is not None:
for (name, header) in extra_headers:
request.add_header(name, header)
request.add_header('Content-Type', 'text/xml')
try:
response = opener.open(request)
except urllib.error.HTTPError as e:
if e.code == 501:
# We may have been redirected through a login process
# but our POST turned into a GET. Retry.
response = opener.open(request)
else:
raise
p, u = xmlrpc.client.getparser()
while 1:
data = response.read(1024)
if not data:
break
p.feed(data)
p.close()
return u.close()
def close(self):
pass