mirror of
https://gerrit.googlesource.com/git-repo
synced 2024-12-21 07:16:21 +00:00
Support smart-sync through persistent-http[s]
Use the same cookies and proxy that git traffic goes through for persistent-http[s] to support authentication for smart-sync. Change-Id: I20f4a281c259053a5a4fdbc48b1bca48e781c692
This commit is contained in:
parent
5d0c3a614e
commit
0745bb2657
@ -15,6 +15,8 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
@ -502,6 +504,43 @@ def GetSchemeFromUrl(url):
|
||||
return m.group(1)
|
||||
return None
|
||||
|
||||
@contextlib.contextmanager
|
||||
def GetUrlCookieFile(url, quiet):
|
||||
if url.startswith('persistent-'):
|
||||
try:
|
||||
p = subprocess.Popen(
|
||||
['git-remote-persistent-https', '-print_config', url],
|
||||
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
try:
|
||||
cookieprefix = 'http.cookiefile='
|
||||
proxyprefix = 'http.proxy='
|
||||
cookiefile = None
|
||||
proxy = None
|
||||
for line in p.stdout:
|
||||
line = line.strip()
|
||||
if line.startswith(cookieprefix):
|
||||
cookiefile = line[len(cookieprefix):]
|
||||
if line.startswith(proxyprefix):
|
||||
proxy = line[len(proxyprefix):]
|
||||
# Leave subprocess open, as cookie file may be transient.
|
||||
if cookiefile or proxy:
|
||||
yield cookiefile, proxy
|
||||
return
|
||||
finally:
|
||||
p.stdin.close()
|
||||
if p.wait():
|
||||
err_msg = p.stderr.read()
|
||||
if ' -print_config' in err_msg:
|
||||
pass # Persistent proxy doesn't support -print_config.
|
||||
elif not quiet:
|
||||
print(err_msg, file=sys.stderr)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
pass # No persistent proxy.
|
||||
raise
|
||||
yield GitConfig.ForUser().GetString('http.cookiefile'), None
|
||||
|
||||
def _preconnect(url):
|
||||
m = URI_ALL.match(url)
|
||||
if m:
|
||||
|
39
project.py
39
project.py
@ -13,7 +13,6 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import contextlib
|
||||
import errno
|
||||
import filecmp
|
||||
import glob
|
||||
@ -31,7 +30,7 @@ import traceback
|
||||
|
||||
from color import Coloring
|
||||
from git_command import GitCommand, git_require
|
||||
from git_config import GitConfig, IsId, GetSchemeFromUrl, ID_RE
|
||||
from git_config import GitConfig, IsId, GetSchemeFromUrl, GetUrlCookieFile, ID_RE
|
||||
from error import GitError, HookError, UploadError, DownloadError
|
||||
from error import ManifestInvalidRevisionError
|
||||
from error import NoManifestException
|
||||
@ -2030,7 +2029,7 @@ class Project(object):
|
||||
os.remove(tmpPath)
|
||||
if 'http_proxy' in os.environ and 'darwin' == sys.platform:
|
||||
cmd += ['--proxy', os.environ['http_proxy']]
|
||||
with self._GetBundleCookieFile(srcUrl, quiet) as cookiefile:
|
||||
with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, proxy):
|
||||
if cookiefile:
|
||||
cmd += ['--cookie', cookiefile, '--cookie-jar', cookiefile]
|
||||
if srcUrl.startswith('persistent-'):
|
||||
@ -2078,40 +2077,6 @@ class Project(object):
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _GetBundleCookieFile(self, url, quiet):
|
||||
if url.startswith('persistent-'):
|
||||
try:
|
||||
p = subprocess.Popen(
|
||||
['git-remote-persistent-https', '-print_config', url],
|
||||
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
try:
|
||||
prefix = 'http.cookiefile='
|
||||
cookiefile = None
|
||||
for line in p.stdout:
|
||||
line = line.strip()
|
||||
if line.startswith(prefix):
|
||||
cookiefile = line[len(prefix):]
|
||||
break
|
||||
# Leave subprocess open, as cookie file may be transient.
|
||||
if cookiefile:
|
||||
yield cookiefile
|
||||
return
|
||||
finally:
|
||||
p.stdin.close()
|
||||
if p.wait():
|
||||
err_msg = p.stderr.read()
|
||||
if ' -print_config' in err_msg:
|
||||
pass # Persistent proxy doesn't support -print_config.
|
||||
elif not quiet:
|
||||
print(err_msg, file=sys.stderr)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
pass # No persistent proxy.
|
||||
raise
|
||||
yield GitConfig.ForUser().GetString('http.cookiefile')
|
||||
|
||||
def _Checkout(self, rev, quiet=False):
|
||||
cmd = ['checkout']
|
||||
if quiet:
|
||||
|
105
subcmds/sync.py
105
subcmds/sync.py
@ -23,18 +23,26 @@ import shutil
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
from pyversion import is_python3
|
||||
if is_python3():
|
||||
import http.cookiejar as cookielib
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import xmlrpc.client
|
||||
else:
|
||||
import cookielib
|
||||
import imp
|
||||
import urllib2
|
||||
import urlparse
|
||||
import xmlrpclib
|
||||
urllib = imp.new_module('urllib')
|
||||
urllib.error = urllib2
|
||||
urllib.parse = urlparse
|
||||
urllib.request = urllib2
|
||||
xmlrpc = imp.new_module('xmlrpc')
|
||||
xmlrpc.client = xmlrpclib
|
||||
|
||||
@ -57,6 +65,7 @@ except ImportError:
|
||||
multiprocessing = None
|
||||
|
||||
from git_command import GIT, git_require
|
||||
from git_config import GetSchemeFromUrl, GetUrlCookieFile
|
||||
from git_refs import R_HEADS, HEAD
|
||||
from project import Project
|
||||
from project import RemoteSpec
|
||||
@ -575,8 +584,12 @@ later is required to fix a server side protocol bug.
|
||||
(username, password),
|
||||
1)
|
||||
|
||||
transport = PersistentTransport(manifest_server)
|
||||
if manifest_server.startswith('persistent-'):
|
||||
manifest_server = manifest_server[len('persistent-'):]
|
||||
|
||||
try:
|
||||
server = xmlrpc.client.Server(manifest_server)
|
||||
server = xmlrpc.client.Server(manifest_server, transport=transport)
|
||||
if opt.smart_sync:
|
||||
p = self.manifest.manifestProject
|
||||
b = p.GetBranch(p.CurrentBranch)
|
||||
@ -850,3 +863,93 @@ class _FetchTimes(object):
|
||||
os.remove(self._path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# This is a replacement for xmlrpc.client.Transport using urllib2
|
||||
# and supporting persistent-http[s]. It cannot change hosts from
|
||||
# request to request like the normal transport, the real url
|
||||
# is passed during initialization.
|
||||
class PersistentTransport(xmlrpc.client.Transport):
|
||||
def __init__(self, orig_host):
|
||||
self.orig_host = orig_host
|
||||
|
||||
def request(self, host, handler, request_body, verbose=False):
|
||||
with GetUrlCookieFile(self.orig_host, not verbose) as (cookiefile, proxy):
|
||||
# Python doesn't understand cookies with the #HttpOnly_ prefix
|
||||
# Since we're only using them for HTTP, copy the file temporarily,
|
||||
# stripping those prefixes away.
|
||||
tmpcookiefile = tempfile.NamedTemporaryFile()
|
||||
try:
|
||||
with open(cookiefile) as f:
|
||||
for line in f:
|
||||
if line.startswith("#HttpOnly_"):
|
||||
line = line[len("#HttpOnly_"):]
|
||||
tmpcookiefile.write(line)
|
||||
tmpcookiefile.flush()
|
||||
|
||||
cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name)
|
||||
cookiejar.load()
|
||||
finally:
|
||||
tmpcookiefile.close()
|
||||
|
||||
proxyhandler = urllib.request.ProxyHandler
|
||||
if proxy:
|
||||
proxyhandler = urllib.request.ProxyHandler({
|
||||
"http": proxy,
|
||||
"https": proxy })
|
||||
|
||||
opener = urllib.request.build_opener(
|
||||
urllib.request.HTTPCookieProcessor(cookiejar),
|
||||
proxyhandler)
|
||||
|
||||
url = urllib.parse.urljoin(self.orig_host, handler)
|
||||
parse_results = urllib.parse.urlparse(url)
|
||||
|
||||
scheme = parse_results.scheme
|
||||
if scheme == 'persistent-http':
|
||||
scheme = 'http'
|
||||
if scheme == 'persistent-https':
|
||||
# If we're proxying through persistent-https, use http. The
|
||||
# proxy itself will do the https.
|
||||
if proxy:
|
||||
scheme = 'http'
|
||||
else:
|
||||
scheme = 'https'
|
||||
|
||||
# Parse out any authentication information using the base class
|
||||
host, extra_headers, _ = self.get_host_info(parse_results.netloc)
|
||||
|
||||
url = urllib.parse.urlunparse((
|
||||
scheme,
|
||||
host,
|
||||
parse_results.path,
|
||||
parse_results.params,
|
||||
parse_results.query,
|
||||
parse_results.fragment))
|
||||
|
||||
request = urllib.request.Request(url, request_body)
|
||||
if extra_headers is not None:
|
||||
for (name, header) in extra_headers:
|
||||
request.add_header(name, header)
|
||||
request.add_header('Content-Type', 'text/xml')
|
||||
try:
|
||||
response = opener.open(request)
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 501:
|
||||
# We may have been redirected through a login process
|
||||
# but our POST turned into a GET. Retry.
|
||||
response = opener.open(request)
|
||||
else:
|
||||
raise
|
||||
|
||||
p, u = xmlrpc.client.getparser()
|
||||
while 1:
|
||||
data = response.read(1024)
|
||||
if not data:
|
||||
break
|
||||
p.feed(data)
|
||||
p.close()
|
||||
return u.close()
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user