2019-06-13 06:30:51 +00:00
|
|
|
# -*- coding:utf-8 -*-
|
2008-10-21 14:00:00 +00:00
|
|
|
#
|
|
|
|
# Copyright (C) 2008 The Android Open Source Project
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2012-11-02 05:59:27 +00:00
|
|
|
from __future__ import print_function
|
2013-03-01 13:44:38 +00:00
|
|
|
|
2015-08-17 20:41:45 +00:00
|
|
|
import contextlib
|
|
|
|
import errno
|
2014-05-06 14:57:48 +00:00
|
|
|
import json
|
2008-10-21 14:00:00 +00:00
|
|
|
import os
|
|
|
|
import re
|
2020-02-17 19:58:37 +00:00
|
|
|
import signal
|
2017-08-08 08:18:11 +00:00
|
|
|
import ssl
|
2009-04-11 01:53:46 +00:00
|
|
|
import subprocess
|
2008-10-21 14:00:00 +00:00
|
|
|
import sys
|
2010-12-21 21:39:23 +00:00
|
|
|
try:
|
|
|
|
import threading as _threading
|
|
|
|
except ImportError:
|
|
|
|
import dummy_threading as _threading
|
2009-04-11 01:53:46 +00:00
|
|
|
import time
|
2013-05-17 01:49:33 +00:00
|
|
|
|
|
|
|
from pyversion import is_python3
|
|
|
|
if is_python3():
|
2012-10-31 16:21:55 +00:00
|
|
|
import urllib.request
|
|
|
|
import urllib.error
|
|
|
|
else:
|
2013-05-17 01:49:33 +00:00
|
|
|
import urllib2
|
2012-10-31 16:21:55 +00:00
|
|
|
import imp
|
|
|
|
urllib = imp.new_module('urllib')
|
|
|
|
urllib.request = urllib2
|
|
|
|
urllib.error = urllib2
|
2009-08-23 01:39:49 +00:00
|
|
|
|
2009-01-06 00:18:58 +00:00
|
|
|
from error import GitError, UploadError
|
2016-11-11 22:25:29 +00:00
|
|
|
import platform_utils
|
2019-08-27 04:26:15 +00:00
|
|
|
from repo_trace import Trace
|
2013-05-24 03:12:23 +00:00
|
|
|
if is_python3():
|
|
|
|
from http.client import HTTPException
|
|
|
|
else:
|
|
|
|
from httplib import HTTPException
|
2010-05-12 01:21:33 +00:00
|
|
|
|
|
|
|
from git_command import GitCommand
|
|
|
|
from git_command import ssh_sock
|
|
|
|
from git_command import terminate_ssh_clients
|
2017-06-13 14:29:04 +00:00
|
|
|
from git_refs import R_CHANGES, R_HEADS, R_TAGS
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2012-10-25 03:23:11 +00:00
|
|
|
ID_RE = re.compile(r'^[0-9a-f]{40}$')
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-03-25 21:06:43 +00:00
|
|
|
REVIEW_CACHE = dict()
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2017-06-13 14:29:04 +00:00
|
|
|
def IsChange(rev):
|
|
|
|
return rev.startswith(R_CHANGES)
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def IsId(rev):
|
|
|
|
return ID_RE.match(rev)
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2017-06-13 14:29:04 +00:00
|
|
|
def IsTag(rev):
|
|
|
|
return rev.startswith(R_TAGS)
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2017-06-13 14:29:04 +00:00
|
|
|
def IsImmutable(rev):
|
|
|
|
return IsChange(rev) or IsId(rev) or IsTag(rev)
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2009-04-17 18:00:31 +00:00
|
|
|
def _key(name):
|
|
|
|
parts = name.split('.')
|
|
|
|
if len(parts) < 2:
|
|
|
|
return name.lower()
|
2020-02-12 05:31:05 +00:00
|
|
|
parts[0] = parts[0].lower()
|
2009-04-17 18:00:31 +00:00
|
|
|
parts[-1] = parts[-1].lower()
|
|
|
|
return '.'.join(parts)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
class GitConfig(object):
|
2008-10-29 22:21:24 +00:00
|
|
|
_ForUser = None
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
@classmethod
|
|
|
|
def ForUser(cls):
|
2008-10-29 22:21:24 +00:00
|
|
|
if cls._ForUser is None:
|
2020-02-12 04:56:59 +00:00
|
|
|
cls._ForUser = cls(configfile=os.path.expanduser('~/.gitconfig'))
|
2008-10-29 22:21:24 +00:00
|
|
|
return cls._ForUser
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def ForRepository(cls, gitdir, defaults=None):
|
2020-02-12 04:56:59 +00:00
|
|
|
return cls(configfile=os.path.join(gitdir, 'config'),
|
|
|
|
defaults=defaults)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2014-05-06 14:57:48 +00:00
|
|
|
def __init__(self, configfile, defaults=None, jsonFile=None):
|
2012-09-24 03:15:13 +00:00
|
|
|
self.file = configfile
|
2008-10-21 14:00:00 +00:00
|
|
|
self.defaults = defaults
|
|
|
|
self._cache_dict = None
|
2009-04-18 21:45:51 +00:00
|
|
|
self._section_dict = None
|
2008-10-21 14:00:00 +00:00
|
|
|
self._remotes = {}
|
|
|
|
self._branches = {}
|
2009-05-22 01:52:49 +00:00
|
|
|
|
2014-05-06 14:57:48 +00:00
|
|
|
self._json = jsonFile
|
|
|
|
if self._json is None:
|
|
|
|
self._json = os.path.join(
|
2020-02-12 05:58:39 +00:00
|
|
|
os.path.dirname(self.file),
|
|
|
|
'.repo_' + os.path.basename(self.file) + '.json')
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2020-02-12 04:56:59 +00:00
|
|
|
def Has(self, name, include_defaults=True):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""Return true if this configuration file has the key.
|
|
|
|
"""
|
2009-04-17 18:00:31 +00:00
|
|
|
if _key(name) in self._cache:
|
2008-10-21 14:00:00 +00:00
|
|
|
return True
|
|
|
|
if include_defaults and self.defaults:
|
2020-02-12 04:56:59 +00:00
|
|
|
return self.defaults.Has(name, include_defaults=True)
|
2008-10-21 14:00:00 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
def GetBoolean(self, name):
|
|
|
|
"""Returns a boolean from the configuration file.
|
|
|
|
None : The value was not defined, or is not a boolean.
|
|
|
|
True : The value was set to true or yes.
|
|
|
|
False: The value was set to false or no.
|
|
|
|
"""
|
|
|
|
v = self.GetString(name)
|
|
|
|
if v is None:
|
|
|
|
return None
|
|
|
|
v = v.lower()
|
|
|
|
if v in ('true', 'yes'):
|
|
|
|
return True
|
|
|
|
if v in ('false', 'no'):
|
|
|
|
return False
|
|
|
|
return None
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
def GetString(self, name, all_keys=False):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""Get the first value for a key, or None if it is not defined.
|
|
|
|
|
|
|
|
This configuration file is used first, if the key is not
|
2012-09-24 03:15:13 +00:00
|
|
|
defined or all_keys = True then the defaults are also searched.
|
2008-10-21 14:00:00 +00:00
|
|
|
"""
|
|
|
|
try:
|
2009-04-17 18:00:31 +00:00
|
|
|
v = self._cache[_key(name)]
|
2008-10-21 14:00:00 +00:00
|
|
|
except KeyError:
|
|
|
|
if self.defaults:
|
2020-02-12 04:56:59 +00:00
|
|
|
return self.defaults.GetString(name, all_keys=all_keys)
|
2008-10-21 14:00:00 +00:00
|
|
|
v = []
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
if not all_keys:
|
2008-10-21 14:00:00 +00:00
|
|
|
if v:
|
|
|
|
return v[0]
|
|
|
|
return None
|
|
|
|
|
|
|
|
r = []
|
|
|
|
r.extend(v)
|
|
|
|
if self.defaults:
|
2020-02-12 04:56:59 +00:00
|
|
|
r.extend(self.defaults.GetString(name, all_keys=True))
|
2008-10-21 14:00:00 +00:00
|
|
|
return r
|
|
|
|
|
|
|
|
def SetString(self, name, value):
|
|
|
|
"""Set the value(s) for a key.
|
|
|
|
Only this configuration file is modified.
|
|
|
|
|
|
|
|
The supplied value should be either a string,
|
|
|
|
or a list of strings (to store multiple values).
|
|
|
|
"""
|
2009-04-17 18:00:31 +00:00
|
|
|
key = _key(name)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
try:
|
2009-04-17 18:00:31 +00:00
|
|
|
old = self._cache[key]
|
2008-10-21 14:00:00 +00:00
|
|
|
except KeyError:
|
|
|
|
old = []
|
|
|
|
|
|
|
|
if value is None:
|
|
|
|
if old:
|
2009-04-17 18:00:31 +00:00
|
|
|
del self._cache[key]
|
2008-10-21 14:00:00 +00:00
|
|
|
self._do('--unset-all', name)
|
|
|
|
|
|
|
|
elif isinstance(value, list):
|
|
|
|
if len(value) == 0:
|
|
|
|
self.SetString(name, None)
|
|
|
|
|
|
|
|
elif len(value) == 1:
|
|
|
|
self.SetString(name, value[0])
|
|
|
|
|
|
|
|
elif old != value:
|
2009-04-17 18:00:31 +00:00
|
|
|
self._cache[key] = list(value)
|
2008-10-21 14:00:00 +00:00
|
|
|
self._do('--replace-all', name, value[0])
|
2012-11-01 20:36:50 +00:00
|
|
|
for i in range(1, len(value)):
|
2008-10-21 14:00:00 +00:00
|
|
|
self._do('--add', name, value[i])
|
|
|
|
|
|
|
|
elif len(old) != 1 or old[0] != value:
|
2009-04-17 18:00:31 +00:00
|
|
|
self._cache[key] = [value]
|
2008-10-21 14:00:00 +00:00
|
|
|
self._do('--replace-all', name, value)
|
|
|
|
|
|
|
|
def GetRemote(self, name):
|
|
|
|
"""Get the remote.$name.* configuration values as an object.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
r = self._remotes[name]
|
|
|
|
except KeyError:
|
|
|
|
r = Remote(self, name)
|
|
|
|
self._remotes[r.name] = r
|
|
|
|
return r
|
|
|
|
|
|
|
|
def GetBranch(self, name):
|
|
|
|
"""Get the branch.$name.* configuration values as an object.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
b = self._branches[name]
|
|
|
|
except KeyError:
|
|
|
|
b = Branch(self, name)
|
|
|
|
self._branches[b.name] = b
|
|
|
|
return b
|
|
|
|
|
2009-05-19 19:47:37 +00:00
|
|
|
def GetSubSections(self, section):
|
|
|
|
"""List all subsection names matching $section.*.*
|
|
|
|
"""
|
|
|
|
return self._sections.get(section, set())
|
|
|
|
|
2020-02-12 04:56:59 +00:00
|
|
|
def HasSection(self, section, subsection=''):
|
2009-04-18 21:45:51 +00:00
|
|
|
"""Does at least one key in section.subsection exist?
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return subsection in self._sections[section]
|
|
|
|
except KeyError:
|
|
|
|
return False
|
|
|
|
|
2011-09-19 18:00:31 +00:00
|
|
|
def UrlInsteadOf(self, url):
|
|
|
|
"""Resolve any url.*.insteadof references.
|
|
|
|
"""
|
|
|
|
for new_url in self.GetSubSections('url'):
|
2013-10-29 05:28:42 +00:00
|
|
|
for old_url in self.GetString('url.%s.insteadof' % new_url, True):
|
|
|
|
if old_url is not None and url.startswith(old_url):
|
|
|
|
return new_url + url[len(old_url):]
|
2011-09-19 18:00:31 +00:00
|
|
|
return url
|
|
|
|
|
2009-04-18 21:45:51 +00:00
|
|
|
@property
|
|
|
|
def _sections(self):
|
|
|
|
d = self._section_dict
|
|
|
|
if d is None:
|
|
|
|
d = {}
|
|
|
|
for name in self._cache.keys():
|
|
|
|
p = name.split('.')
|
|
|
|
if 2 == len(p):
|
|
|
|
section = p[0]
|
|
|
|
subsect = ''
|
|
|
|
else:
|
|
|
|
section = p[0]
|
|
|
|
subsect = '.'.join(p[1:-1])
|
|
|
|
if section not in d:
|
|
|
|
d[section] = set()
|
|
|
|
d[section].add(subsect)
|
|
|
|
self._section_dict = d
|
|
|
|
return d
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
@property
|
|
|
|
def _cache(self):
|
|
|
|
if self._cache_dict is None:
|
|
|
|
self._cache_dict = self._Read()
|
|
|
|
return self._cache_dict
|
|
|
|
|
|
|
|
def _Read(self):
|
2014-05-06 14:57:48 +00:00
|
|
|
d = self._ReadJson()
|
2009-04-18 04:03:32 +00:00
|
|
|
if d is None:
|
|
|
|
d = self._ReadGit()
|
2014-05-06 14:57:48 +00:00
|
|
|
self._SaveJson(d)
|
2009-04-18 04:03:32 +00:00
|
|
|
return d
|
|
|
|
|
2014-05-06 14:57:48 +00:00
|
|
|
def _ReadJson(self):
|
2009-04-18 04:03:32 +00:00
|
|
|
try:
|
2020-02-12 06:54:26 +00:00
|
|
|
if os.path.getmtime(self._json) <= os.path.getmtime(self.file):
|
2016-11-11 22:25:29 +00:00
|
|
|
platform_utils.remove(self._json)
|
2009-04-18 04:03:32 +00:00
|
|
|
return None
|
|
|
|
except OSError:
|
|
|
|
return None
|
|
|
|
try:
|
2014-05-06 14:57:48 +00:00
|
|
|
Trace(': parsing %s', self.file)
|
2019-11-11 10:40:22 +00:00
|
|
|
with open(self._json) as fd:
|
2014-05-06 14:57:48 +00:00
|
|
|
return json.load(fd)
|
|
|
|
except (IOError, ValueError):
|
2016-11-11 22:25:29 +00:00
|
|
|
platform_utils.remove(self._json)
|
2009-04-18 04:03:32 +00:00
|
|
|
return None
|
|
|
|
|
2014-05-06 14:57:48 +00:00
|
|
|
def _SaveJson(self, cache):
|
2009-04-18 04:03:32 +00:00
|
|
|
try:
|
2019-11-11 10:40:22 +00:00
|
|
|
with open(self._json, 'w') as fd:
|
2014-05-06 14:57:48 +00:00
|
|
|
json.dump(cache, fd, indent=2)
|
|
|
|
except (IOError, TypeError):
|
2015-06-03 16:02:26 +00:00
|
|
|
if os.path.exists(self._json):
|
2016-11-11 22:25:29 +00:00
|
|
|
platform_utils.remove(self._json)
|
2009-04-18 04:03:32 +00:00
|
|
|
|
|
|
|
def _ReadGit(self):
|
2009-06-28 22:09:16 +00:00
|
|
|
"""
|
|
|
|
Read configuration data from git.
|
|
|
|
|
|
|
|
This internal method populates the GitConfig cache.
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-06-28 22:09:16 +00:00
|
|
|
"""
|
|
|
|
c = {}
|
2009-07-02 23:12:57 +00:00
|
|
|
d = self._do('--null', '--list')
|
|
|
|
if d is None:
|
|
|
|
return c
|
2018-06-23 07:02:26 +00:00
|
|
|
if not is_python3():
|
|
|
|
d = d.decode('utf-8')
|
|
|
|
for line in d.rstrip('\0').split('\0'):
|
2009-06-28 22:09:16 +00:00
|
|
|
if '\n' in line:
|
2012-11-14 02:36:51 +00:00
|
|
|
key, val = line.split('\n', 1)
|
2009-06-28 22:09:16 +00:00
|
|
|
else:
|
2012-11-14 02:36:51 +00:00
|
|
|
key = line
|
|
|
|
val = None
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
if key in c:
|
|
|
|
c[key].append(val)
|
|
|
|
else:
|
|
|
|
c[key] = [val]
|
|
|
|
|
|
|
|
return c
|
|
|
|
|
|
|
|
def _do(self, *args):
|
|
|
|
command = ['config', '--file', self.file]
|
|
|
|
command.extend(args)
|
|
|
|
|
|
|
|
p = GitCommand(None,
|
|
|
|
command,
|
2020-02-12 04:56:59 +00:00
|
|
|
capture_stdout=True,
|
|
|
|
capture_stderr=True)
|
2008-10-21 14:00:00 +00:00
|
|
|
if p.Wait() == 0:
|
|
|
|
return p.stdout
|
|
|
|
else:
|
|
|
|
GitError('git config %s: %s' % (str(args), p.stderr))
|
|
|
|
|
|
|
|
|
|
|
|
class RefSpec(object):
|
|
|
|
"""A Git refspec line, split into its components:
|
|
|
|
|
|
|
|
forced: True if the line starts with '+'
|
|
|
|
src: Left side of the line
|
|
|
|
dst: Right side of the line
|
|
|
|
"""
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def FromString(cls, rs):
|
|
|
|
lhs, rhs = rs.split(':', 2)
|
|
|
|
if lhs.startswith('+'):
|
|
|
|
lhs = lhs[1:]
|
|
|
|
forced = True
|
|
|
|
else:
|
|
|
|
forced = False
|
|
|
|
return cls(forced, lhs, rhs)
|
|
|
|
|
|
|
|
def __init__(self, forced, lhs, rhs):
|
|
|
|
self.forced = forced
|
|
|
|
self.src = lhs
|
|
|
|
self.dst = rhs
|
|
|
|
|
|
|
|
def SourceMatches(self, rev):
|
|
|
|
if self.src:
|
|
|
|
if rev == self.src:
|
|
|
|
return True
|
|
|
|
if self.src.endswith('/*') and rev.startswith(self.src[:-1]):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def DestMatches(self, ref):
|
|
|
|
if self.dst:
|
|
|
|
if ref == self.dst:
|
|
|
|
return True
|
|
|
|
if self.dst.endswith('/*') and ref.startswith(self.dst[:-1]):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def MapSource(self, rev):
|
|
|
|
if self.src.endswith('/*'):
|
|
|
|
return self.dst[:-1] + rev[len(self.src) - 1:]
|
|
|
|
return self.dst
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
s = ''
|
|
|
|
if self.forced:
|
|
|
|
s += '+'
|
|
|
|
if self.src:
|
|
|
|
s += self.src
|
|
|
|
if self.dst:
|
|
|
|
s += ':'
|
|
|
|
s += self.dst
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
2010-10-28 00:06:01 +00:00
|
|
|
_master_processes = []
|
|
|
|
_master_keys = set()
|
2009-04-11 01:53:46 +00:00
|
|
|
_ssh_master = True
|
2010-12-21 21:39:23 +00:00
|
|
|
_master_keys_lock = None
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2010-12-21 21:39:23 +00:00
|
|
|
def init_ssh():
|
|
|
|
"""Should be called once at the start of repo to init ssh master handling.
|
|
|
|
|
|
|
|
At the moment, all we do is to create our lock.
|
|
|
|
"""
|
|
|
|
global _master_keys_lock
|
|
|
|
assert _master_keys_lock is None, "Should only call init_ssh once"
|
|
|
|
_master_keys_lock = _threading.Lock()
|
2009-04-11 01:53:46 +00:00
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2009-08-16 16:44:40 +00:00
|
|
|
def _open_ssh(host, port=None):
|
2009-04-11 01:53:46 +00:00
|
|
|
global _ssh_master
|
|
|
|
|
2010-12-21 21:39:23 +00:00
|
|
|
# Acquire the lock. This is needed to prevent opening multiple masters for
|
|
|
|
# the same host when we're running "repo sync -jN" (for N > 1) _and_ the
|
|
|
|
# manifest <remote fetch="ssh://xyz"> specifies a different host from the
|
|
|
|
# one that was passed to repo init.
|
|
|
|
_master_keys_lock.acquire()
|
|
|
|
try:
|
2009-04-11 01:53:46 +00:00
|
|
|
|
2010-12-21 21:39:23 +00:00
|
|
|
# Check to see whether we already think that the master is running; if we
|
|
|
|
# think it's already running, return right away.
|
|
|
|
if port is not None:
|
|
|
|
key = '%s:%s' % (host, port)
|
|
|
|
else:
|
|
|
|
key = host
|
2009-04-11 01:53:46 +00:00
|
|
|
|
2010-12-21 21:39:23 +00:00
|
|
|
if key in _master_keys:
|
2010-10-28 00:06:01 +00:00
|
|
|
return True
|
2009-04-11 01:53:46 +00:00
|
|
|
|
2020-02-12 06:54:26 +00:00
|
|
|
if (not _ssh_master
|
|
|
|
or 'GIT_SSH' in os.environ
|
|
|
|
or sys.platform in ('win32', 'cygwin')):
|
2010-12-21 21:39:23 +00:00
|
|
|
# failed earlier, or cygwin ssh can't do this
|
|
|
|
#
|
|
|
|
return False
|
|
|
|
|
|
|
|
# We will make two calls to ssh; this is the common part of both calls.
|
|
|
|
command_base = ['ssh',
|
2020-02-12 05:58:39 +00:00
|
|
|
'-o', 'ControlPath %s' % ssh_sock(),
|
|
|
|
host]
|
2010-12-21 21:39:23 +00:00
|
|
|
if port is not None:
|
2012-11-14 03:09:38 +00:00
|
|
|
command_base[1:1] = ['-p', str(port)]
|
2010-12-21 21:39:23 +00:00
|
|
|
|
|
|
|
# Since the key wasn't in _master_keys, we think that master isn't running.
|
|
|
|
# ...but before actually starting a master, we'll double-check. This can
|
|
|
|
# be important because we can't tell that that 'git@myhost.com' is the same
|
|
|
|
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
|
2020-02-12 05:31:05 +00:00
|
|
|
check_command = command_base + ['-O', 'check']
|
2010-12-21 21:39:23 +00:00
|
|
|
try:
|
|
|
|
Trace(': %s', ' '.join(check_command))
|
|
|
|
check_process = subprocess.Popen(check_command,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE)
|
2020-02-12 05:31:05 +00:00
|
|
|
check_process.communicate() # read output, but ignore it...
|
2010-12-21 21:39:23 +00:00
|
|
|
isnt_running = check_process.wait()
|
|
|
|
|
|
|
|
if not isnt_running:
|
|
|
|
# Our double-check found that the master _was_ infact running. Add to
|
|
|
|
# the list of keys.
|
|
|
|
_master_keys.add(key)
|
|
|
|
return True
|
|
|
|
except Exception:
|
|
|
|
# Ignore excpetions. We we will fall back to the normal command and print
|
|
|
|
# to the log there.
|
|
|
|
pass
|
|
|
|
|
2020-02-12 06:01:59 +00:00
|
|
|
command = command_base[:1] + ['-M', '-N'] + command_base[1:]
|
2010-12-21 21:39:23 +00:00
|
|
|
try:
|
|
|
|
Trace(': %s', ' '.join(command))
|
|
|
|
p = subprocess.Popen(command)
|
2012-09-09 22:37:57 +00:00
|
|
|
except Exception as e:
|
2010-12-21 21:39:23 +00:00
|
|
|
_ssh_master = False
|
2012-11-02 05:59:27 +00:00
|
|
|
print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
|
2020-02-12 05:58:39 +00:00
|
|
|
% (host, port, str(e)), file=sys.stderr)
|
2010-12-21 21:39:23 +00:00
|
|
|
return False
|
|
|
|
|
2016-07-15 14:48:42 +00:00
|
|
|
time.sleep(1)
|
|
|
|
ssh_died = (p.poll() is not None)
|
|
|
|
if ssh_died:
|
|
|
|
return False
|
|
|
|
|
2010-12-21 21:39:23 +00:00
|
|
|
_master_processes.append(p)
|
|
|
|
_master_keys.add(key)
|
|
|
|
return True
|
|
|
|
finally:
|
|
|
|
_master_keys_lock.release()
|
2009-04-11 01:53:46 +00:00
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2009-04-11 01:53:46 +00:00
|
|
|
def close_ssh():
|
2010-12-21 21:39:23 +00:00
|
|
|
global _master_keys_lock
|
|
|
|
|
2010-05-12 01:21:33 +00:00
|
|
|
terminate_ssh_clients()
|
|
|
|
|
2010-10-28 00:06:01 +00:00
|
|
|
for p in _master_processes:
|
2009-06-16 18:49:10 +00:00
|
|
|
try:
|
2020-02-17 19:58:37 +00:00
|
|
|
os.kill(p.pid, signal.SIGTERM)
|
2009-06-16 18:49:10 +00:00
|
|
|
p.wait()
|
2009-06-16 21:57:46 +00:00
|
|
|
except OSError:
|
2009-06-16 18:49:10 +00:00
|
|
|
pass
|
2010-10-28 00:06:01 +00:00
|
|
|
del _master_processes[:]
|
|
|
|
_master_keys.clear()
|
2009-04-11 01:53:46 +00:00
|
|
|
|
2010-04-27 21:35:27 +00:00
|
|
|
d = ssh_sock(create=False)
|
2009-04-11 01:53:46 +00:00
|
|
|
if d:
|
|
|
|
try:
|
2018-09-27 17:46:58 +00:00
|
|
|
platform_utils.rmdir(os.path.dirname(d))
|
2009-04-11 01:53:46 +00:00
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
2010-12-21 21:39:23 +00:00
|
|
|
# We're done with the lock, so we can delete it.
|
|
|
|
_master_keys_lock = None
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2009-04-11 01:53:46 +00:00
|
|
|
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
|
2012-03-14 22:22:28 +00:00
|
|
|
URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
|
2009-04-11 01:53:46 +00:00
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2011-09-19 21:50:58 +00:00
|
|
|
def GetSchemeFromUrl(url):
|
|
|
|
m = URI_ALL.match(url)
|
|
|
|
if m:
|
|
|
|
return m.group(1)
|
|
|
|
return None
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2015-08-17 20:41:45 +00:00
|
|
|
@contextlib.contextmanager
|
|
|
|
def GetUrlCookieFile(url, quiet):
|
|
|
|
if url.startswith('persistent-'):
|
|
|
|
try:
|
|
|
|
p = subprocess.Popen(
|
|
|
|
['git-remote-persistent-https', '-print_config', url],
|
|
|
|
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE)
|
|
|
|
try:
|
|
|
|
cookieprefix = 'http.cookiefile='
|
|
|
|
proxyprefix = 'http.proxy='
|
|
|
|
cookiefile = None
|
|
|
|
proxy = None
|
|
|
|
for line in p.stdout:
|
2020-02-08 04:18:23 +00:00
|
|
|
line = line.strip().decode('utf-8')
|
2015-08-17 20:41:45 +00:00
|
|
|
if line.startswith(cookieprefix):
|
2018-02-25 23:49:36 +00:00
|
|
|
cookiefile = os.path.expanduser(line[len(cookieprefix):])
|
2015-08-17 20:41:45 +00:00
|
|
|
if line.startswith(proxyprefix):
|
|
|
|
proxy = line[len(proxyprefix):]
|
|
|
|
# Leave subprocess open, as cookie file may be transient.
|
|
|
|
if cookiefile or proxy:
|
|
|
|
yield cookiefile, proxy
|
|
|
|
return
|
|
|
|
finally:
|
|
|
|
p.stdin.close()
|
|
|
|
if p.wait():
|
2020-02-08 04:18:23 +00:00
|
|
|
err_msg = p.stderr.read().decode('utf-8')
|
2015-08-17 20:41:45 +00:00
|
|
|
if ' -print_config' in err_msg:
|
|
|
|
pass # Persistent proxy doesn't support -print_config.
|
|
|
|
elif not quiet:
|
|
|
|
print(err_msg, file=sys.stderr)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno == errno.ENOENT:
|
|
|
|
pass # No persistent proxy.
|
|
|
|
raise
|
2018-02-25 23:49:36 +00:00
|
|
|
cookiefile = GitConfig.ForUser().GetString('http.cookiefile')
|
|
|
|
if cookiefile:
|
|
|
|
cookiefile = os.path.expanduser(cookiefile)
|
|
|
|
yield cookiefile, None
|
2015-08-17 20:41:45 +00:00
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2009-04-11 01:53:46 +00:00
|
|
|
def _preconnect(url):
|
|
|
|
m = URI_ALL.match(url)
|
|
|
|
if m:
|
|
|
|
scheme = m.group(1)
|
|
|
|
host = m.group(2)
|
|
|
|
if ':' in host:
|
|
|
|
host, port = host.split(':')
|
2009-04-21 21:51:04 +00:00
|
|
|
else:
|
2009-08-16 16:44:40 +00:00
|
|
|
port = None
|
2009-04-11 01:53:46 +00:00
|
|
|
if scheme in ('ssh', 'git+ssh', 'ssh+git'):
|
|
|
|
return _open_ssh(host, port)
|
|
|
|
return False
|
|
|
|
|
|
|
|
m = URI_SCP.match(url)
|
|
|
|
if m:
|
|
|
|
host = m.group(1)
|
2009-08-16 16:44:40 +00:00
|
|
|
return _open_ssh(host)
|
2009-04-11 01:53:46 +00:00
|
|
|
|
2009-06-12 16:06:35 +00:00
|
|
|
return False
|
2009-04-11 01:53:46 +00:00
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
class Remote(object):
|
|
|
|
"""Configuration options related to a remote.
|
|
|
|
"""
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def __init__(self, config, name):
|
|
|
|
self._config = config
|
|
|
|
self.name = name
|
|
|
|
self.url = self._Get('url')
|
2016-08-10 22:00:00 +00:00
|
|
|
self.pushUrl = self._Get('pushurl')
|
2008-10-21 14:00:00 +00:00
|
|
|
self.review = self._Get('review')
|
2008-11-06 17:52:51 +00:00
|
|
|
self.projectname = self._Get('projectname')
|
2013-03-01 13:44:38 +00:00
|
|
|
self.fetch = list(map(RefSpec.FromString,
|
2020-02-12 05:58:39 +00:00
|
|
|
self._Get('fetch', all_keys=True)))
|
2012-01-11 22:58:54 +00:00
|
|
|
self._review_url = None
|
2009-01-06 00:18:58 +00:00
|
|
|
|
2010-01-03 17:20:17 +00:00
|
|
|
def _InsteadOf(self):
|
|
|
|
globCfg = GitConfig.ForUser()
|
|
|
|
urlList = globCfg.GetSubSections('url')
|
|
|
|
longest = ""
|
|
|
|
longestUrl = ""
|
|
|
|
|
|
|
|
for url in urlList:
|
|
|
|
key = "url." + url + ".insteadOf"
|
2012-09-24 03:15:13 +00:00
|
|
|
insteadOfList = globCfg.GetString(key, all_keys=True)
|
2010-01-03 17:20:17 +00:00
|
|
|
|
|
|
|
for insteadOf in insteadOfList:
|
2020-02-12 06:54:26 +00:00
|
|
|
if (self.url.startswith(insteadOf)
|
|
|
|
and len(insteadOf) > len(longest)):
|
2010-01-03 17:20:17 +00:00
|
|
|
longest = insteadOf
|
|
|
|
longestUrl = url
|
|
|
|
|
|
|
|
if len(longest) == 0:
|
|
|
|
return self.url
|
|
|
|
|
|
|
|
return self.url.replace(longest, longestUrl, 1)
|
|
|
|
|
2009-04-11 01:53:46 +00:00
|
|
|
def PreConnectFetch(self):
|
2010-01-03 17:20:17 +00:00
|
|
|
connectionUrl = self._InsteadOf()
|
|
|
|
return _preconnect(connectionUrl)
|
2009-04-11 01:53:46 +00:00
|
|
|
|
2017-08-08 08:18:11 +00:00
|
|
|
def ReviewUrl(self, userEmail, validate_certs):
|
2012-01-11 22:58:54 +00:00
|
|
|
if self._review_url is None:
|
2009-01-06 00:18:58 +00:00
|
|
|
if self.review is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
u = self.review
|
2014-10-23 22:40:00 +00:00
|
|
|
if u.startswith('persistent-'):
|
|
|
|
u = u[len('persistent-'):]
|
2016-12-05 19:32:45 +00:00
|
|
|
if u.split(':')[0] not in ('http', 'https', 'sso', 'ssh'):
|
2009-01-06 00:18:58 +00:00
|
|
|
u = 'http://%s' % u
|
2009-03-25 20:54:54 +00:00
|
|
|
if u.endswith('/Gerrit'):
|
|
|
|
u = u[:len(u) - len('/Gerrit')]
|
2012-01-11 22:58:54 +00:00
|
|
|
if u.endswith('/ssh_info'):
|
|
|
|
u = u[:len(u) - len('/ssh_info')]
|
|
|
|
if not u.endswith('/'):
|
2012-09-24 03:15:13 +00:00
|
|
|
u += '/'
|
2012-01-11 22:58:54 +00:00
|
|
|
http_url = u
|
2009-01-06 00:18:58 +00:00
|
|
|
|
2009-03-25 21:06:43 +00:00
|
|
|
if u in REVIEW_CACHE:
|
2012-01-11 22:58:54 +00:00
|
|
|
self._review_url = REVIEW_CACHE[u]
|
2011-10-11 21:12:46 +00:00
|
|
|
elif 'REPO_HOST_PORT_INFO' in os.environ:
|
2012-01-11 22:58:54 +00:00
|
|
|
host, port = os.environ['REPO_HOST_PORT_INFO'].split()
|
|
|
|
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
|
|
|
REVIEW_CACHE[u] = self._review_url
|
2016-12-05 19:32:45 +00:00
|
|
|
elif u.startswith('sso:') or u.startswith('ssh:'):
|
2014-01-30 17:45:53 +00:00
|
|
|
self._review_url = u # Assume it's right
|
|
|
|
REVIEW_CACHE[u] = self._review_url
|
2016-10-07 08:52:08 +00:00
|
|
|
elif 'REPO_IGNORE_SSH_INFO' in os.environ:
|
|
|
|
self._review_url = http_url
|
|
|
|
REVIEW_CACHE[u] = self._review_url
|
2009-03-25 21:06:43 +00:00
|
|
|
else:
|
|
|
|
try:
|
2012-01-11 22:58:54 +00:00
|
|
|
info_url = u + 'ssh_info'
|
2017-08-08 08:18:11 +00:00
|
|
|
if not validate_certs:
|
|
|
|
context = ssl._create_unverified_context()
|
|
|
|
info = urllib.request.urlopen(info_url, context=context).read()
|
|
|
|
else:
|
|
|
|
info = urllib.request.urlopen(info_url).read()
|
2019-07-04 21:54:54 +00:00
|
|
|
if info == b'NOT_AVAILABLE' or b'<' in info:
|
2013-06-05 20:16:18 +00:00
|
|
|
# If `info` contains '<', we assume the server gave us some sort
|
|
|
|
# of HTML response back, like maybe a login page.
|
2009-03-25 21:06:43 +00:00
|
|
|
#
|
2013-06-05 20:16:18 +00:00
|
|
|
# Assume HTTP if SSH is not enabled or ssh_info doesn't look right.
|
2014-02-04 23:32:29 +00:00
|
|
|
self._review_url = http_url
|
2009-03-25 21:06:43 +00:00
|
|
|
else:
|
2019-07-04 21:54:54 +00:00
|
|
|
info = info.decode('utf-8')
|
2012-01-11 22:58:54 +00:00
|
|
|
host, port = info.split()
|
2016-09-22 16:39:06 +00:00
|
|
|
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
2012-10-31 16:21:55 +00:00
|
|
|
except urllib.error.HTTPError as e:
|
2012-01-11 22:58:54 +00:00
|
|
|
raise UploadError('%s: %s' % (self.review, str(e)))
|
2012-10-31 16:21:55 +00:00
|
|
|
except urllib.error.URLError as e:
|
2011-10-11 16:31:58 +00:00
|
|
|
raise UploadError('%s: %s' % (self.review, str(e)))
|
2013-05-24 03:12:23 +00:00
|
|
|
except HTTPException as e:
|
|
|
|
raise UploadError('%s: %s' % (self.review, e.__class__.__name__))
|
2009-03-25 21:06:43 +00:00
|
|
|
|
2012-01-11 22:58:54 +00:00
|
|
|
REVIEW_CACHE[u] = self._review_url
|
|
|
|
return self._review_url + self.projectname
|
2009-01-06 00:18:58 +00:00
|
|
|
|
2012-01-11 22:58:54 +00:00
|
|
|
def _SshReviewUrl(self, userEmail, host, port):
|
2010-07-16 00:00:14 +00:00
|
|
|
username = self._config.GetString('review.%s.username' % self.review)
|
|
|
|
if username is None:
|
2012-01-11 22:58:54 +00:00
|
|
|
username = userEmail.split('@')[0]
|
|
|
|
return 'ssh://%s@%s:%s/' % (username, host, port)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def ToLocal(self, rev):
|
|
|
|
"""Convert a remote revision string to something we have locally.
|
|
|
|
"""
|
2013-09-12 08:51:18 +00:00
|
|
|
if self.name == '.' or IsId(rev):
|
2008-10-21 14:00:00 +00:00
|
|
|
return rev
|
|
|
|
|
|
|
|
if not rev.startswith('refs/'):
|
|
|
|
rev = R_HEADS + rev
|
|
|
|
|
|
|
|
for spec in self.fetch:
|
|
|
|
if spec.SourceMatches(rev):
|
|
|
|
return spec.MapSource(rev)
|
2014-09-19 18:13:04 +00:00
|
|
|
|
|
|
|
if not rev.startswith(R_HEADS):
|
|
|
|
return rev
|
|
|
|
|
2019-08-03 05:57:09 +00:00
|
|
|
raise GitError('%s: remote %s does not have %s' %
|
|
|
|
(self.projectname, self.name, rev))
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def WritesTo(self, ref):
|
|
|
|
"""True if the remote stores to the tracking ref.
|
|
|
|
"""
|
|
|
|
for spec in self.fetch:
|
|
|
|
if spec.DestMatches(ref):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2008-11-04 15:37:10 +00:00
|
|
|
def ResetFetch(self, mirror=False):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""Set the fetch refspec to its default value.
|
|
|
|
"""
|
2008-11-04 15:37:10 +00:00
|
|
|
if mirror:
|
|
|
|
dst = 'refs/heads/*'
|
|
|
|
else:
|
|
|
|
dst = 'refs/remotes/%s/*' % self.name
|
|
|
|
self.fetch = [RefSpec(True, 'refs/heads/*', dst)]
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def Save(self):
|
|
|
|
"""Save this remote to the configuration.
|
|
|
|
"""
|
|
|
|
self._Set('url', self.url)
|
2016-08-10 22:00:00 +00:00
|
|
|
if self.pushUrl is not None:
|
|
|
|
self._Set('pushurl', self.pushUrl + '/' + self.projectname)
|
|
|
|
else:
|
|
|
|
self._Set('pushurl', self.pushUrl)
|
2008-10-21 14:00:00 +00:00
|
|
|
self._Set('review', self.review)
|
2008-11-06 17:52:51 +00:00
|
|
|
self._Set('projectname', self.projectname)
|
2013-03-01 13:44:38 +00:00
|
|
|
self._Set('fetch', list(map(str, self.fetch)))
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def _Set(self, key, value):
|
|
|
|
key = 'remote.%s.%s' % (self.name, key)
|
|
|
|
return self._config.SetString(key, value)
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
def _Get(self, key, all_keys=False):
|
2008-10-21 14:00:00 +00:00
|
|
|
key = 'remote.%s.%s' % (self.name, key)
|
2020-02-12 04:56:59 +00:00
|
|
|
return self._config.GetString(key, all_keys=all_keys)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Branch(object):
|
|
|
|
"""Configuration options related to a single branch.
|
|
|
|
"""
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def __init__(self, config, name):
|
|
|
|
self._config = config
|
|
|
|
self.name = name
|
|
|
|
self.merge = self._Get('merge')
|
|
|
|
|
|
|
|
r = self._Get('remote')
|
|
|
|
if r:
|
|
|
|
self.remote = self._config.GetRemote(r)
|
|
|
|
else:
|
|
|
|
self.remote = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def LocalMerge(self):
|
|
|
|
"""Convert the merge spec to a local name.
|
|
|
|
"""
|
|
|
|
if self.remote and self.merge:
|
|
|
|
return self.remote.ToLocal(self.merge)
|
|
|
|
return None
|
|
|
|
|
|
|
|
def Save(self):
|
|
|
|
"""Save this branch back into the configuration.
|
|
|
|
"""
|
2009-04-18 21:45:51 +00:00
|
|
|
if self._config.HasSection('branch', self.name):
|
|
|
|
if self.remote:
|
|
|
|
self._Set('remote', self.remote.name)
|
|
|
|
else:
|
|
|
|
self._Set('remote', None)
|
|
|
|
self._Set('merge', self.merge)
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
else:
|
2019-11-11 10:40:22 +00:00
|
|
|
with open(self._config.file, 'a') as fd:
|
2009-04-18 21:45:51 +00:00
|
|
|
fd.write('[branch "%s"]\n' % self.name)
|
|
|
|
if self.remote:
|
|
|
|
fd.write('\tremote = %s\n' % self.remote.name)
|
|
|
|
if self.merge:
|
|
|
|
fd.write('\tmerge = %s\n' % self.merge)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def _Set(self, key, value):
|
|
|
|
key = 'branch.%s.%s' % (self.name, key)
|
|
|
|
return self._config.SetString(key, value)
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
def _Get(self, key, all_keys=False):
|
2008-10-21 14:00:00 +00:00
|
|
|
key = 'branch.%s.%s' % (self.name, key)
|
2020-02-12 04:56:59 +00:00
|
|
|
return self._config.GetString(key, all_keys=all_keys)
|