2008-10-21 14:00:00 +00:00
|
|
|
# Copyright (C) 2008 The Android Open Source Project
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2015-08-17 20:41:45 +00:00
|
|
|
import contextlib
|
2021-07-28 21:36:49 +00:00
|
|
|
import datetime
|
2015-08-17 20:41:45 +00:00
|
|
|
import errno
|
2019-06-13 06:24:21 +00:00
|
|
|
from http.client import HTTPException
|
2014-05-06 14:57:48 +00:00
|
|
|
import json
|
2008-10-21 14:00:00 +00:00
|
|
|
import os
|
|
|
|
import re
|
2017-08-08 08:18:11 +00:00
|
|
|
import ssl
|
2009-04-11 01:53:46 +00:00
|
|
|
import subprocess
|
2008-10-21 14:00:00 +00:00
|
|
|
import sys
|
2019-06-13 06:24:21 +00:00
|
|
|
import urllib.error
|
|
|
|
import urllib.request
|
2009-08-23 01:39:49 +00:00
|
|
|
|
2009-01-06 00:18:58 +00:00
|
|
|
from error import GitError, UploadError
|
2016-11-11 22:25:29 +00:00
|
|
|
import platform_utils
|
2019-08-27 04:26:15 +00:00
|
|
|
from repo_trace import Trace
|
2010-05-12 01:21:33 +00:00
|
|
|
from git_command import GitCommand
|
2017-06-13 14:29:04 +00:00
|
|
|
from git_refs import R_CHANGES, R_HEADS, R_TAGS
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2021-07-28 21:36:49 +00:00
|
|
|
# Prefix that is prepended to all the keys of SyncAnalysisState's data
|
|
|
|
# that is saved in the config.
|
|
|
|
SYNC_STATE_PREFIX = 'repo.syncstate.'
|
|
|
|
|
2012-10-25 03:23:11 +00:00
|
|
|
ID_RE = re.compile(r'^[0-9a-f]{40}$')
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-03-25 21:06:43 +00:00
|
|
|
REVIEW_CACHE = dict()
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2017-06-13 14:29:04 +00:00
|
|
|
def IsChange(rev):
|
|
|
|
return rev.startswith(R_CHANGES)
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def IsId(rev):
|
|
|
|
return ID_RE.match(rev)
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2017-06-13 14:29:04 +00:00
|
|
|
def IsTag(rev):
|
|
|
|
return rev.startswith(R_TAGS)
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2017-06-13 14:29:04 +00:00
|
|
|
def IsImmutable(rev):
|
|
|
|
return IsChange(rev) or IsId(rev) or IsTag(rev)
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2009-04-17 18:00:31 +00:00
|
|
|
def _key(name):
|
|
|
|
parts = name.split('.')
|
|
|
|
if len(parts) < 2:
|
|
|
|
return name.lower()
|
2020-02-12 05:31:05 +00:00
|
|
|
parts[0] = parts[0].lower()
|
2009-04-17 18:00:31 +00:00
|
|
|
parts[-1] = parts[-1].lower()
|
|
|
|
return '.'.join(parts)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
class GitConfig(object):
|
2008-10-29 22:21:24 +00:00
|
|
|
_ForUser = None
|
|
|
|
|
2020-02-19 02:31:51 +00:00
|
|
|
_USER_CONFIG = '~/.gitconfig'
|
|
|
|
|
Add the ability to administratively enroll repo into using superproject.
Repo will remember a choice and an expiration time of the choice, per
user, about whether to use superproject by default. When not specified
from command line and the choice is not expired, repo would use the
user default value.
When a user default value is not present and when the system wide
enable default is provided in git's system configuration, repo would
ask the user for a confirmation which will be valid for two weeks.
git_config.py: Add support for system config. When reading system
config, we would use --system to avoid hardcoding a path as the
value may be different on some other distributions.
git_superproject.py: Add a new subroutine, _UseSuperproject(), which
returns whether superproject should be used and whether it
is from a user configuration.
The value is determined in the following order:
1. If the user specifies either --use-superproject or
--no-use-superproject, then that choice is being used.
2. If neither is specified, we would then check the saved value
(upon repo init) and use that choice when there was a choice.
3. We then check if there is a saved and unexpired value for
user's choice in their ~/.gitconfig, and use the unexpired
choice, if available.
4. Finally, if all the above didn't give us a decision, and if
the git system configuration is providing a rollout hint, present
a prompt to user for their decision and save it in ~/.gitconfig.
subcmds/sync.py: Make use of the new UseSuperproject() provided by
git_superproject.py.
While there also silent stderr from git describe when determining the
version of repo.
Bug: [google internal] b/190688390
Change-Id: Iad3ee03026342ee500e5d65e2f0fa600d7637613
Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/309762
Reviewed-by: Mike Frysinger <vapier@google.com>
Tested-by: Xin Li <delphij@google.com>
2021-06-16 17:19:00 +00:00
|
|
|
_ForSystem = None
|
|
|
|
_SYSTEM_CONFIG = '/etc/gitconfig'
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def ForSystem(cls):
|
|
|
|
if cls._ForSystem is None:
|
|
|
|
cls._ForSystem = cls(configfile=cls._SYSTEM_CONFIG)
|
|
|
|
return cls._ForSystem
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
@classmethod
|
|
|
|
def ForUser(cls):
|
2008-10-29 22:21:24 +00:00
|
|
|
if cls._ForUser is None:
|
2020-02-19 02:31:51 +00:00
|
|
|
cls._ForUser = cls(configfile=os.path.expanduser(cls._USER_CONFIG))
|
2008-10-29 22:21:24 +00:00
|
|
|
return cls._ForUser
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def ForRepository(cls, gitdir, defaults=None):
|
2020-02-12 04:56:59 +00:00
|
|
|
return cls(configfile=os.path.join(gitdir, 'config'),
|
|
|
|
defaults=defaults)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2014-05-06 14:57:48 +00:00
|
|
|
def __init__(self, configfile, defaults=None, jsonFile=None):
|
2012-09-24 03:15:13 +00:00
|
|
|
self.file = configfile
|
2008-10-21 14:00:00 +00:00
|
|
|
self.defaults = defaults
|
|
|
|
self._cache_dict = None
|
2009-04-18 21:45:51 +00:00
|
|
|
self._section_dict = None
|
2008-10-21 14:00:00 +00:00
|
|
|
self._remotes = {}
|
|
|
|
self._branches = {}
|
2009-05-22 01:52:49 +00:00
|
|
|
|
2014-05-06 14:57:48 +00:00
|
|
|
self._json = jsonFile
|
|
|
|
if self._json is None:
|
|
|
|
self._json = os.path.join(
|
2020-02-12 05:58:39 +00:00
|
|
|
os.path.dirname(self.file),
|
|
|
|
'.repo_' + os.path.basename(self.file) + '.json')
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2020-02-12 04:56:59 +00:00
|
|
|
def Has(self, name, include_defaults=True):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""Return true if this configuration file has the key.
|
|
|
|
"""
|
2009-04-17 18:00:31 +00:00
|
|
|
if _key(name) in self._cache:
|
2008-10-21 14:00:00 +00:00
|
|
|
return True
|
|
|
|
if include_defaults and self.defaults:
|
2020-02-12 04:56:59 +00:00
|
|
|
return self.defaults.Has(name, include_defaults=True)
|
2008-10-21 14:00:00 +00:00
|
|
|
return False
|
|
|
|
|
2020-02-19 22:55:22 +00:00
|
|
|
def GetInt(self, name):
|
|
|
|
"""Returns an integer from the configuration file.
|
|
|
|
|
|
|
|
This follows the git config syntax.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: The key to lookup.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
None if the value was not defined, or is not a boolean.
|
|
|
|
Otherwise, the number itself.
|
|
|
|
"""
|
|
|
|
v = self.GetString(name)
|
|
|
|
if v is None:
|
|
|
|
return None
|
|
|
|
v = v.strip()
|
|
|
|
|
|
|
|
mult = 1
|
|
|
|
if v.endswith('k'):
|
|
|
|
v = v[:-1]
|
|
|
|
mult = 1024
|
|
|
|
elif v.endswith('m'):
|
|
|
|
v = v[:-1]
|
|
|
|
mult = 1024 * 1024
|
|
|
|
elif v.endswith('g'):
|
|
|
|
v = v[:-1]
|
|
|
|
mult = 1024 * 1024 * 1024
|
|
|
|
|
|
|
|
base = 10
|
|
|
|
if v.startswith('0x'):
|
|
|
|
base = 16
|
|
|
|
|
|
|
|
try:
|
|
|
|
return int(v, base=base) * mult
|
|
|
|
except ValueError:
|
|
|
|
return None
|
|
|
|
|
2021-03-05 19:04:49 +00:00
|
|
|
def DumpConfigDict(self):
|
|
|
|
"""Returns the current configuration dict.
|
|
|
|
|
|
|
|
Configuration data is information only (e.g. logging) and
|
|
|
|
should not be considered a stable data-source.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
dict of {<key>, <value>} for git configuration cache.
|
|
|
|
<value> are strings converted by GetString.
|
|
|
|
"""
|
|
|
|
config_dict = {}
|
|
|
|
for key in self._cache:
|
|
|
|
config_dict[key] = self.GetString(key)
|
|
|
|
return config_dict
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def GetBoolean(self, name):
|
|
|
|
"""Returns a boolean from the configuration file.
|
|
|
|
None : The value was not defined, or is not a boolean.
|
|
|
|
True : The value was set to true or yes.
|
|
|
|
False: The value was set to false or no.
|
|
|
|
"""
|
|
|
|
v = self.GetString(name)
|
|
|
|
if v is None:
|
|
|
|
return None
|
|
|
|
v = v.lower()
|
|
|
|
if v in ('true', 'yes'):
|
|
|
|
return True
|
|
|
|
if v in ('false', 'no'):
|
|
|
|
return False
|
|
|
|
return None
|
|
|
|
|
2021-02-10 04:14:41 +00:00
|
|
|
def SetBoolean(self, name, value):
|
|
|
|
"""Set the truthy value for a key."""
|
|
|
|
if value is not None:
|
|
|
|
value = 'true' if value else 'false'
|
|
|
|
self.SetString(name, value)
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
def GetString(self, name, all_keys=False):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""Get the first value for a key, or None if it is not defined.
|
|
|
|
|
|
|
|
This configuration file is used first, if the key is not
|
2012-09-24 03:15:13 +00:00
|
|
|
defined or all_keys = True then the defaults are also searched.
|
2008-10-21 14:00:00 +00:00
|
|
|
"""
|
|
|
|
try:
|
2009-04-17 18:00:31 +00:00
|
|
|
v = self._cache[_key(name)]
|
2008-10-21 14:00:00 +00:00
|
|
|
except KeyError:
|
|
|
|
if self.defaults:
|
2020-02-12 04:56:59 +00:00
|
|
|
return self.defaults.GetString(name, all_keys=all_keys)
|
2008-10-21 14:00:00 +00:00
|
|
|
v = []
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
if not all_keys:
|
2008-10-21 14:00:00 +00:00
|
|
|
if v:
|
|
|
|
return v[0]
|
|
|
|
return None
|
|
|
|
|
|
|
|
r = []
|
|
|
|
r.extend(v)
|
|
|
|
if self.defaults:
|
2020-02-12 04:56:59 +00:00
|
|
|
r.extend(self.defaults.GetString(name, all_keys=True))
|
2008-10-21 14:00:00 +00:00
|
|
|
return r
|
|
|
|
|
|
|
|
def SetString(self, name, value):
|
|
|
|
"""Set the value(s) for a key.
|
|
|
|
Only this configuration file is modified.
|
|
|
|
|
|
|
|
The supplied value should be either a string,
|
|
|
|
or a list of strings (to store multiple values).
|
|
|
|
"""
|
2009-04-17 18:00:31 +00:00
|
|
|
key = _key(name)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
try:
|
2009-04-17 18:00:31 +00:00
|
|
|
old = self._cache[key]
|
2008-10-21 14:00:00 +00:00
|
|
|
except KeyError:
|
|
|
|
old = []
|
|
|
|
|
|
|
|
if value is None:
|
|
|
|
if old:
|
2009-04-17 18:00:31 +00:00
|
|
|
del self._cache[key]
|
2008-10-21 14:00:00 +00:00
|
|
|
self._do('--unset-all', name)
|
|
|
|
|
|
|
|
elif isinstance(value, list):
|
|
|
|
if len(value) == 0:
|
|
|
|
self.SetString(name, None)
|
|
|
|
|
|
|
|
elif len(value) == 1:
|
|
|
|
self.SetString(name, value[0])
|
|
|
|
|
|
|
|
elif old != value:
|
2009-04-17 18:00:31 +00:00
|
|
|
self._cache[key] = list(value)
|
2008-10-21 14:00:00 +00:00
|
|
|
self._do('--replace-all', name, value[0])
|
2012-11-01 20:36:50 +00:00
|
|
|
for i in range(1, len(value)):
|
2008-10-21 14:00:00 +00:00
|
|
|
self._do('--add', name, value[i])
|
|
|
|
|
|
|
|
elif len(old) != 1 or old[0] != value:
|
2009-04-17 18:00:31 +00:00
|
|
|
self._cache[key] = [value]
|
2008-10-21 14:00:00 +00:00
|
|
|
self._do('--replace-all', name, value)
|
|
|
|
|
|
|
|
def GetRemote(self, name):
|
|
|
|
"""Get the remote.$name.* configuration values as an object.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
r = self._remotes[name]
|
|
|
|
except KeyError:
|
|
|
|
r = Remote(self, name)
|
|
|
|
self._remotes[r.name] = r
|
|
|
|
return r
|
|
|
|
|
|
|
|
def GetBranch(self, name):
|
|
|
|
"""Get the branch.$name.* configuration values as an object.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
b = self._branches[name]
|
|
|
|
except KeyError:
|
|
|
|
b = Branch(self, name)
|
|
|
|
self._branches[b.name] = b
|
|
|
|
return b
|
|
|
|
|
2021-07-28 21:36:49 +00:00
|
|
|
def GetSyncAnalysisStateData(self):
|
|
|
|
"""Returns data to be logged for the analysis of sync performance."""
|
|
|
|
return {k: v for k, v in self.DumpConfigDict().items() if k.startswith(SYNC_STATE_PREFIX)}
|
|
|
|
|
|
|
|
def UpdateSyncAnalysisState(self, options, superproject_logging_data):
|
|
|
|
"""Update Config's SYNC_STATE_PREFIX* data with the latest sync data.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
options: Options passed to sync returned from optparse. See _Options().
|
|
|
|
superproject_logging_data: A dictionary of superproject data that is to be logged.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
SyncAnalysisState object.
|
|
|
|
"""
|
|
|
|
return SyncAnalysisState(self, options, superproject_logging_data)
|
|
|
|
|
2009-05-19 19:47:37 +00:00
|
|
|
def GetSubSections(self, section):
|
|
|
|
"""List all subsection names matching $section.*.*
|
|
|
|
"""
|
|
|
|
return self._sections.get(section, set())
|
|
|
|
|
2020-02-12 04:56:59 +00:00
|
|
|
def HasSection(self, section, subsection=''):
|
2009-04-18 21:45:51 +00:00
|
|
|
"""Does at least one key in section.subsection exist?
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return subsection in self._sections[section]
|
|
|
|
except KeyError:
|
|
|
|
return False
|
|
|
|
|
2011-09-19 18:00:31 +00:00
|
|
|
def UrlInsteadOf(self, url):
|
|
|
|
"""Resolve any url.*.insteadof references.
|
|
|
|
"""
|
|
|
|
for new_url in self.GetSubSections('url'):
|
2013-10-29 05:28:42 +00:00
|
|
|
for old_url in self.GetString('url.%s.insteadof' % new_url, True):
|
|
|
|
if old_url is not None and url.startswith(old_url):
|
|
|
|
return new_url + url[len(old_url):]
|
2011-09-19 18:00:31 +00:00
|
|
|
return url
|
|
|
|
|
2009-04-18 21:45:51 +00:00
|
|
|
@property
|
|
|
|
def _sections(self):
|
|
|
|
d = self._section_dict
|
|
|
|
if d is None:
|
|
|
|
d = {}
|
|
|
|
for name in self._cache.keys():
|
|
|
|
p = name.split('.')
|
|
|
|
if 2 == len(p):
|
|
|
|
section = p[0]
|
|
|
|
subsect = ''
|
|
|
|
else:
|
|
|
|
section = p[0]
|
|
|
|
subsect = '.'.join(p[1:-1])
|
|
|
|
if section not in d:
|
|
|
|
d[section] = set()
|
|
|
|
d[section].add(subsect)
|
|
|
|
self._section_dict = d
|
|
|
|
return d
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
@property
|
|
|
|
def _cache(self):
|
|
|
|
if self._cache_dict is None:
|
|
|
|
self._cache_dict = self._Read()
|
|
|
|
return self._cache_dict
|
|
|
|
|
|
|
|
def _Read(self):
|
2014-05-06 14:57:48 +00:00
|
|
|
d = self._ReadJson()
|
2009-04-18 04:03:32 +00:00
|
|
|
if d is None:
|
|
|
|
d = self._ReadGit()
|
2014-05-06 14:57:48 +00:00
|
|
|
self._SaveJson(d)
|
2009-04-18 04:03:32 +00:00
|
|
|
return d
|
|
|
|
|
2014-05-06 14:57:48 +00:00
|
|
|
def _ReadJson(self):
|
2009-04-18 04:03:32 +00:00
|
|
|
try:
|
2020-02-12 06:54:26 +00:00
|
|
|
if os.path.getmtime(self._json) <= os.path.getmtime(self.file):
|
2016-11-11 22:25:29 +00:00
|
|
|
platform_utils.remove(self._json)
|
2009-04-18 04:03:32 +00:00
|
|
|
return None
|
|
|
|
except OSError:
|
|
|
|
return None
|
|
|
|
try:
|
2014-05-06 14:57:48 +00:00
|
|
|
Trace(': parsing %s', self.file)
|
2019-11-11 10:40:22 +00:00
|
|
|
with open(self._json) as fd:
|
2014-05-06 14:57:48 +00:00
|
|
|
return json.load(fd)
|
|
|
|
except (IOError, ValueError):
|
2016-11-11 22:25:29 +00:00
|
|
|
platform_utils.remove(self._json)
|
2009-04-18 04:03:32 +00:00
|
|
|
return None
|
|
|
|
|
2014-05-06 14:57:48 +00:00
|
|
|
def _SaveJson(self, cache):
|
2009-04-18 04:03:32 +00:00
|
|
|
try:
|
2019-11-11 10:40:22 +00:00
|
|
|
with open(self._json, 'w') as fd:
|
2014-05-06 14:57:48 +00:00
|
|
|
json.dump(cache, fd, indent=2)
|
|
|
|
except (IOError, TypeError):
|
2015-06-03 16:02:26 +00:00
|
|
|
if os.path.exists(self._json):
|
2016-11-11 22:25:29 +00:00
|
|
|
platform_utils.remove(self._json)
|
2009-04-18 04:03:32 +00:00
|
|
|
|
|
|
|
def _ReadGit(self):
|
2009-06-28 22:09:16 +00:00
|
|
|
"""
|
|
|
|
Read configuration data from git.
|
|
|
|
|
|
|
|
This internal method populates the GitConfig cache.
|
2008-10-21 14:00:00 +00:00
|
|
|
|
2009-06-28 22:09:16 +00:00
|
|
|
"""
|
|
|
|
c = {}
|
2009-07-02 23:12:57 +00:00
|
|
|
d = self._do('--null', '--list')
|
|
|
|
if d is None:
|
|
|
|
return c
|
2018-06-23 07:02:26 +00:00
|
|
|
for line in d.rstrip('\0').split('\0'):
|
2009-06-28 22:09:16 +00:00
|
|
|
if '\n' in line:
|
2012-11-14 02:36:51 +00:00
|
|
|
key, val = line.split('\n', 1)
|
2009-06-28 22:09:16 +00:00
|
|
|
else:
|
2012-11-14 02:36:51 +00:00
|
|
|
key = line
|
|
|
|
val = None
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
if key in c:
|
|
|
|
c[key].append(val)
|
|
|
|
else:
|
|
|
|
c[key] = [val]
|
|
|
|
|
|
|
|
return c
|
|
|
|
|
|
|
|
def _do(self, *args):
|
Add the ability to administratively enroll repo into using superproject.
Repo will remember a choice and an expiration time of the choice, per
user, about whether to use superproject by default. When not specified
from command line and the choice is not expired, repo would use the
user default value.
When a user default value is not present and when the system wide
enable default is provided in git's system configuration, repo would
ask the user for a confirmation which will be valid for two weeks.
git_config.py: Add support for system config. When reading system
config, we would use --system to avoid hardcoding a path as the
value may be different on some other distributions.
git_superproject.py: Add a new subroutine, _UseSuperproject(), which
returns whether superproject should be used and whether it
is from a user configuration.
The value is determined in the following order:
1. If the user specifies either --use-superproject or
--no-use-superproject, then that choice is being used.
2. If neither is specified, we would then check the saved value
(upon repo init) and use that choice when there was a choice.
3. We then check if there is a saved and unexpired value for
user's choice in their ~/.gitconfig, and use the unexpired
choice, if available.
4. Finally, if all the above didn't give us a decision, and if
the git system configuration is providing a rollout hint, present
a prompt to user for their decision and save it in ~/.gitconfig.
subcmds/sync.py: Make use of the new UseSuperproject() provided by
git_superproject.py.
While there also silent stderr from git describe when determining the
version of repo.
Bug: [google internal] b/190688390
Change-Id: Iad3ee03026342ee500e5d65e2f0fa600d7637613
Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/309762
Reviewed-by: Mike Frysinger <vapier@google.com>
Tested-by: Xin Li <delphij@google.com>
2021-06-16 17:19:00 +00:00
|
|
|
if self.file == self._SYSTEM_CONFIG:
|
|
|
|
command = ['config', '--system', '--includes']
|
|
|
|
else:
|
|
|
|
command = ['config', '--file', self.file, '--includes']
|
2008-10-21 14:00:00 +00:00
|
|
|
command.extend(args)
|
|
|
|
|
|
|
|
p = GitCommand(None,
|
|
|
|
command,
|
2020-02-12 04:56:59 +00:00
|
|
|
capture_stdout=True,
|
|
|
|
capture_stderr=True)
|
2008-10-21 14:00:00 +00:00
|
|
|
if p.Wait() == 0:
|
|
|
|
return p.stdout
|
|
|
|
else:
|
|
|
|
GitError('git config %s: %s' % (str(args), p.stderr))
|
|
|
|
|
|
|
|
|
2020-02-19 02:31:51 +00:00
|
|
|
class RepoConfig(GitConfig):
|
|
|
|
"""User settings for repo itself."""
|
|
|
|
|
|
|
|
_USER_CONFIG = '~/.repoconfig/config'
|
|
|
|
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
class RefSpec(object):
|
|
|
|
"""A Git refspec line, split into its components:
|
|
|
|
|
|
|
|
forced: True if the line starts with '+'
|
|
|
|
src: Left side of the line
|
|
|
|
dst: Right side of the line
|
|
|
|
"""
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def FromString(cls, rs):
|
|
|
|
lhs, rhs = rs.split(':', 2)
|
|
|
|
if lhs.startswith('+'):
|
|
|
|
lhs = lhs[1:]
|
|
|
|
forced = True
|
|
|
|
else:
|
|
|
|
forced = False
|
|
|
|
return cls(forced, lhs, rhs)
|
|
|
|
|
|
|
|
def __init__(self, forced, lhs, rhs):
|
|
|
|
self.forced = forced
|
|
|
|
self.src = lhs
|
|
|
|
self.dst = rhs
|
|
|
|
|
|
|
|
def SourceMatches(self, rev):
|
|
|
|
if self.src:
|
|
|
|
if rev == self.src:
|
|
|
|
return True
|
|
|
|
if self.src.endswith('/*') and rev.startswith(self.src[:-1]):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def DestMatches(self, ref):
|
|
|
|
if self.dst:
|
|
|
|
if ref == self.dst:
|
|
|
|
return True
|
|
|
|
if self.dst.endswith('/*') and ref.startswith(self.dst[:-1]):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def MapSource(self, rev):
|
|
|
|
if self.src.endswith('/*'):
|
|
|
|
return self.dst[:-1] + rev[len(self.src) - 1:]
|
|
|
|
return self.dst
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
s = ''
|
|
|
|
if self.forced:
|
|
|
|
s += '+'
|
|
|
|
if self.src:
|
|
|
|
s += self.src
|
|
|
|
if self.dst:
|
|
|
|
s += ':'
|
|
|
|
s += self.dst
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
2012-03-14 22:22:28 +00:00
|
|
|
URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
|
2009-04-11 01:53:46 +00:00
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2011-09-19 21:50:58 +00:00
|
|
|
def GetSchemeFromUrl(url):
|
|
|
|
m = URI_ALL.match(url)
|
|
|
|
if m:
|
|
|
|
return m.group(1)
|
|
|
|
return None
|
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2015-08-17 20:41:45 +00:00
|
|
|
@contextlib.contextmanager
|
|
|
|
def GetUrlCookieFile(url, quiet):
|
|
|
|
if url.startswith('persistent-'):
|
|
|
|
try:
|
|
|
|
p = subprocess.Popen(
|
|
|
|
['git-remote-persistent-https', '-print_config', url],
|
|
|
|
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE)
|
|
|
|
try:
|
|
|
|
cookieprefix = 'http.cookiefile='
|
|
|
|
proxyprefix = 'http.proxy='
|
|
|
|
cookiefile = None
|
|
|
|
proxy = None
|
|
|
|
for line in p.stdout:
|
2020-02-08 04:18:23 +00:00
|
|
|
line = line.strip().decode('utf-8')
|
2015-08-17 20:41:45 +00:00
|
|
|
if line.startswith(cookieprefix):
|
2018-02-25 23:49:36 +00:00
|
|
|
cookiefile = os.path.expanduser(line[len(cookieprefix):])
|
2015-08-17 20:41:45 +00:00
|
|
|
if line.startswith(proxyprefix):
|
|
|
|
proxy = line[len(proxyprefix):]
|
|
|
|
# Leave subprocess open, as cookie file may be transient.
|
|
|
|
if cookiefile or proxy:
|
|
|
|
yield cookiefile, proxy
|
|
|
|
return
|
|
|
|
finally:
|
|
|
|
p.stdin.close()
|
|
|
|
if p.wait():
|
2020-02-08 04:18:23 +00:00
|
|
|
err_msg = p.stderr.read().decode('utf-8')
|
2015-08-17 20:41:45 +00:00
|
|
|
if ' -print_config' in err_msg:
|
|
|
|
pass # Persistent proxy doesn't support -print_config.
|
|
|
|
elif not quiet:
|
|
|
|
print(err_msg, file=sys.stderr)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno == errno.ENOENT:
|
|
|
|
pass # No persistent proxy.
|
|
|
|
raise
|
2018-02-25 23:49:36 +00:00
|
|
|
cookiefile = GitConfig.ForUser().GetString('http.cookiefile')
|
|
|
|
if cookiefile:
|
|
|
|
cookiefile = os.path.expanduser(cookiefile)
|
|
|
|
yield cookiefile, None
|
2015-08-17 20:41:45 +00:00
|
|
|
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
class Remote(object):
|
|
|
|
"""Configuration options related to a remote.
|
|
|
|
"""
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def __init__(self, config, name):
|
|
|
|
self._config = config
|
|
|
|
self.name = name
|
|
|
|
self.url = self._Get('url')
|
2016-08-10 22:00:00 +00:00
|
|
|
self.pushUrl = self._Get('pushurl')
|
2008-10-21 14:00:00 +00:00
|
|
|
self.review = self._Get('review')
|
2008-11-06 17:52:51 +00:00
|
|
|
self.projectname = self._Get('projectname')
|
2013-03-01 13:44:38 +00:00
|
|
|
self.fetch = list(map(RefSpec.FromString,
|
2020-02-12 05:58:39 +00:00
|
|
|
self._Get('fetch', all_keys=True)))
|
2012-01-11 22:58:54 +00:00
|
|
|
self._review_url = None
|
2009-01-06 00:18:58 +00:00
|
|
|
|
2010-01-03 17:20:17 +00:00
|
|
|
def _InsteadOf(self):
|
|
|
|
globCfg = GitConfig.ForUser()
|
|
|
|
urlList = globCfg.GetSubSections('url')
|
|
|
|
longest = ""
|
|
|
|
longestUrl = ""
|
|
|
|
|
|
|
|
for url in urlList:
|
|
|
|
key = "url." + url + ".insteadOf"
|
2012-09-24 03:15:13 +00:00
|
|
|
insteadOfList = globCfg.GetString(key, all_keys=True)
|
2010-01-03 17:20:17 +00:00
|
|
|
|
|
|
|
for insteadOf in insteadOfList:
|
2020-02-12 06:54:26 +00:00
|
|
|
if (self.url.startswith(insteadOf)
|
|
|
|
and len(insteadOf) > len(longest)):
|
2010-01-03 17:20:17 +00:00
|
|
|
longest = insteadOf
|
|
|
|
longestUrl = url
|
|
|
|
|
|
|
|
if len(longest) == 0:
|
|
|
|
return self.url
|
|
|
|
|
|
|
|
return self.url.replace(longest, longestUrl, 1)
|
|
|
|
|
2021-05-06 04:44:42 +00:00
|
|
|
def PreConnectFetch(self, ssh_proxy):
|
2021-05-05 23:44:35 +00:00
|
|
|
"""Run any setup for this remote before we connect to it.
|
|
|
|
|
|
|
|
In practice, if the remote is using SSH, we'll attempt to create a new
|
|
|
|
SSH master session to it for reuse across projects.
|
|
|
|
|
2021-05-06 04:44:42 +00:00
|
|
|
Args:
|
|
|
|
ssh_proxy: The SSH settings for managing master sessions.
|
|
|
|
|
2021-05-05 23:44:35 +00:00
|
|
|
Returns:
|
|
|
|
Whether the preconnect phase for this remote was successful.
|
|
|
|
"""
|
2021-05-06 04:44:42 +00:00
|
|
|
if not ssh_proxy:
|
|
|
|
return True
|
|
|
|
|
2010-01-03 17:20:17 +00:00
|
|
|
connectionUrl = self._InsteadOf()
|
2021-05-06 04:44:42 +00:00
|
|
|
return ssh_proxy.preconnect(connectionUrl)
|
2009-04-11 01:53:46 +00:00
|
|
|
|
2017-08-08 08:18:11 +00:00
|
|
|
def ReviewUrl(self, userEmail, validate_certs):
|
2012-01-11 22:58:54 +00:00
|
|
|
if self._review_url is None:
|
2009-01-06 00:18:58 +00:00
|
|
|
if self.review is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
u = self.review
|
2014-10-23 22:40:00 +00:00
|
|
|
if u.startswith('persistent-'):
|
|
|
|
u = u[len('persistent-'):]
|
2016-12-05 19:32:45 +00:00
|
|
|
if u.split(':')[0] not in ('http', 'https', 'sso', 'ssh'):
|
2009-01-06 00:18:58 +00:00
|
|
|
u = 'http://%s' % u
|
2009-03-25 20:54:54 +00:00
|
|
|
if u.endswith('/Gerrit'):
|
|
|
|
u = u[:len(u) - len('/Gerrit')]
|
2012-01-11 22:58:54 +00:00
|
|
|
if u.endswith('/ssh_info'):
|
|
|
|
u = u[:len(u) - len('/ssh_info')]
|
|
|
|
if not u.endswith('/'):
|
2012-09-24 03:15:13 +00:00
|
|
|
u += '/'
|
2012-01-11 22:58:54 +00:00
|
|
|
http_url = u
|
2009-01-06 00:18:58 +00:00
|
|
|
|
2009-03-25 21:06:43 +00:00
|
|
|
if u in REVIEW_CACHE:
|
2012-01-11 22:58:54 +00:00
|
|
|
self._review_url = REVIEW_CACHE[u]
|
2011-10-11 21:12:46 +00:00
|
|
|
elif 'REPO_HOST_PORT_INFO' in os.environ:
|
2012-01-11 22:58:54 +00:00
|
|
|
host, port = os.environ['REPO_HOST_PORT_INFO'].split()
|
|
|
|
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
|
|
|
REVIEW_CACHE[u] = self._review_url
|
2016-12-05 19:32:45 +00:00
|
|
|
elif u.startswith('sso:') or u.startswith('ssh:'):
|
2014-01-30 17:45:53 +00:00
|
|
|
self._review_url = u # Assume it's right
|
|
|
|
REVIEW_CACHE[u] = self._review_url
|
2016-10-07 08:52:08 +00:00
|
|
|
elif 'REPO_IGNORE_SSH_INFO' in os.environ:
|
|
|
|
self._review_url = http_url
|
|
|
|
REVIEW_CACHE[u] = self._review_url
|
2009-03-25 21:06:43 +00:00
|
|
|
else:
|
|
|
|
try:
|
2012-01-11 22:58:54 +00:00
|
|
|
info_url = u + 'ssh_info'
|
2017-08-08 08:18:11 +00:00
|
|
|
if not validate_certs:
|
|
|
|
context = ssl._create_unverified_context()
|
|
|
|
info = urllib.request.urlopen(info_url, context=context).read()
|
|
|
|
else:
|
|
|
|
info = urllib.request.urlopen(info_url).read()
|
2019-07-04 21:54:54 +00:00
|
|
|
if info == b'NOT_AVAILABLE' or b'<' in info:
|
2013-06-05 20:16:18 +00:00
|
|
|
# If `info` contains '<', we assume the server gave us some sort
|
|
|
|
# of HTML response back, like maybe a login page.
|
2009-03-25 21:06:43 +00:00
|
|
|
#
|
2013-06-05 20:16:18 +00:00
|
|
|
# Assume HTTP if SSH is not enabled or ssh_info doesn't look right.
|
2014-02-04 23:32:29 +00:00
|
|
|
self._review_url = http_url
|
2009-03-25 21:06:43 +00:00
|
|
|
else:
|
2019-07-04 21:54:54 +00:00
|
|
|
info = info.decode('utf-8')
|
2012-01-11 22:58:54 +00:00
|
|
|
host, port = info.split()
|
2016-09-22 16:39:06 +00:00
|
|
|
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
2012-10-31 16:21:55 +00:00
|
|
|
except urllib.error.HTTPError as e:
|
2012-01-11 22:58:54 +00:00
|
|
|
raise UploadError('%s: %s' % (self.review, str(e)))
|
2012-10-31 16:21:55 +00:00
|
|
|
except urllib.error.URLError as e:
|
2011-10-11 16:31:58 +00:00
|
|
|
raise UploadError('%s: %s' % (self.review, str(e)))
|
2013-05-24 03:12:23 +00:00
|
|
|
except HTTPException as e:
|
|
|
|
raise UploadError('%s: %s' % (self.review, e.__class__.__name__))
|
2009-03-25 21:06:43 +00:00
|
|
|
|
2012-01-11 22:58:54 +00:00
|
|
|
REVIEW_CACHE[u] = self._review_url
|
|
|
|
return self._review_url + self.projectname
|
2009-01-06 00:18:58 +00:00
|
|
|
|
2012-01-11 22:58:54 +00:00
|
|
|
def _SshReviewUrl(self, userEmail, host, port):
|
2010-07-16 00:00:14 +00:00
|
|
|
username = self._config.GetString('review.%s.username' % self.review)
|
|
|
|
if username is None:
|
2012-01-11 22:58:54 +00:00
|
|
|
username = userEmail.split('@')[0]
|
|
|
|
return 'ssh://%s@%s:%s/' % (username, host, port)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def ToLocal(self, rev):
|
|
|
|
"""Convert a remote revision string to something we have locally.
|
|
|
|
"""
|
2013-09-12 08:51:18 +00:00
|
|
|
if self.name == '.' or IsId(rev):
|
2008-10-21 14:00:00 +00:00
|
|
|
return rev
|
|
|
|
|
|
|
|
if not rev.startswith('refs/'):
|
|
|
|
rev = R_HEADS + rev
|
|
|
|
|
|
|
|
for spec in self.fetch:
|
|
|
|
if spec.SourceMatches(rev):
|
|
|
|
return spec.MapSource(rev)
|
2014-09-19 18:13:04 +00:00
|
|
|
|
|
|
|
if not rev.startswith(R_HEADS):
|
|
|
|
return rev
|
|
|
|
|
2019-08-03 05:57:09 +00:00
|
|
|
raise GitError('%s: remote %s does not have %s' %
|
|
|
|
(self.projectname, self.name, rev))
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def WritesTo(self, ref):
|
|
|
|
"""True if the remote stores to the tracking ref.
|
|
|
|
"""
|
|
|
|
for spec in self.fetch:
|
|
|
|
if spec.DestMatches(ref):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2008-11-04 15:37:10 +00:00
|
|
|
def ResetFetch(self, mirror=False):
|
2008-10-21 14:00:00 +00:00
|
|
|
"""Set the fetch refspec to its default value.
|
|
|
|
"""
|
2008-11-04 15:37:10 +00:00
|
|
|
if mirror:
|
|
|
|
dst = 'refs/heads/*'
|
|
|
|
else:
|
|
|
|
dst = 'refs/remotes/%s/*' % self.name
|
|
|
|
self.fetch = [RefSpec(True, 'refs/heads/*', dst)]
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def Save(self):
|
|
|
|
"""Save this remote to the configuration.
|
|
|
|
"""
|
|
|
|
self._Set('url', self.url)
|
2016-08-10 22:00:00 +00:00
|
|
|
if self.pushUrl is not None:
|
|
|
|
self._Set('pushurl', self.pushUrl + '/' + self.projectname)
|
|
|
|
else:
|
|
|
|
self._Set('pushurl', self.pushUrl)
|
2008-10-21 14:00:00 +00:00
|
|
|
self._Set('review', self.review)
|
2008-11-06 17:52:51 +00:00
|
|
|
self._Set('projectname', self.projectname)
|
2013-03-01 13:44:38 +00:00
|
|
|
self._Set('fetch', list(map(str, self.fetch)))
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def _Set(self, key, value):
|
|
|
|
key = 'remote.%s.%s' % (self.name, key)
|
|
|
|
return self._config.SetString(key, value)
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
def _Get(self, key, all_keys=False):
|
2008-10-21 14:00:00 +00:00
|
|
|
key = 'remote.%s.%s' % (self.name, key)
|
2020-02-12 04:56:59 +00:00
|
|
|
return self._config.GetString(key, all_keys=all_keys)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Branch(object):
|
|
|
|
"""Configuration options related to a single branch.
|
|
|
|
"""
|
2020-02-12 06:20:19 +00:00
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
def __init__(self, config, name):
|
|
|
|
self._config = config
|
|
|
|
self.name = name
|
|
|
|
self.merge = self._Get('merge')
|
|
|
|
|
|
|
|
r = self._Get('remote')
|
|
|
|
if r:
|
|
|
|
self.remote = self._config.GetRemote(r)
|
|
|
|
else:
|
|
|
|
self.remote = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def LocalMerge(self):
|
|
|
|
"""Convert the merge spec to a local name.
|
|
|
|
"""
|
|
|
|
if self.remote and self.merge:
|
|
|
|
return self.remote.ToLocal(self.merge)
|
|
|
|
return None
|
|
|
|
|
|
|
|
def Save(self):
|
|
|
|
"""Save this branch back into the configuration.
|
|
|
|
"""
|
2009-04-18 21:45:51 +00:00
|
|
|
if self._config.HasSection('branch', self.name):
|
|
|
|
if self.remote:
|
|
|
|
self._Set('remote', self.remote.name)
|
|
|
|
else:
|
|
|
|
self._Set('remote', None)
|
|
|
|
self._Set('merge', self.merge)
|
|
|
|
|
2008-10-21 14:00:00 +00:00
|
|
|
else:
|
2019-11-11 10:40:22 +00:00
|
|
|
with open(self._config.file, 'a') as fd:
|
2009-04-18 21:45:51 +00:00
|
|
|
fd.write('[branch "%s"]\n' % self.name)
|
|
|
|
if self.remote:
|
|
|
|
fd.write('\tremote = %s\n' % self.remote.name)
|
|
|
|
if self.merge:
|
|
|
|
fd.write('\tmerge = %s\n' % self.merge)
|
2008-10-21 14:00:00 +00:00
|
|
|
|
|
|
|
def _Set(self, key, value):
|
|
|
|
key = 'branch.%s.%s' % (self.name, key)
|
|
|
|
return self._config.SetString(key, value)
|
|
|
|
|
2012-09-24 03:15:13 +00:00
|
|
|
def _Get(self, key, all_keys=False):
|
2008-10-21 14:00:00 +00:00
|
|
|
key = 'branch.%s.%s' % (self.name, key)
|
2020-02-12 04:56:59 +00:00
|
|
|
return self._config.GetString(key, all_keys=all_keys)
|
2021-07-28 21:36:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
class SyncAnalysisState:
|
|
|
|
"""Configuration options related to logging of sync state for analysis.
|
|
|
|
|
|
|
|
This object is versioned.
|
|
|
|
"""
|
|
|
|
def __init__(self, config, options, superproject_logging_data):
|
|
|
|
"""Initializes SyncAnalysisState.
|
|
|
|
|
|
|
|
Saves the following data into the |config| object.
|
|
|
|
- sys.argv, options, superproject's logging data.
|
|
|
|
- repo.*, branch.* and remote.* parameters from config object.
|
|
|
|
- Current time as synctime.
|
|
|
|
- Version number of the object.
|
|
|
|
|
|
|
|
All the keys saved by this object are prepended with SYNC_STATE_PREFIX.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
config: GitConfig object to store all options.
|
|
|
|
options: Options passed to sync returned from optparse. See _Options().
|
|
|
|
superproject_logging_data: A dictionary of superproject data that is to be logged.
|
|
|
|
"""
|
|
|
|
self._config = config
|
|
|
|
now = datetime.datetime.utcnow()
|
|
|
|
self._Set('main.synctime', now.isoformat() + 'Z')
|
|
|
|
self._Set('main.version', '1')
|
|
|
|
self._Set('sys.argv', sys.argv)
|
|
|
|
for key, value in superproject_logging_data.items():
|
|
|
|
self._Set(f'superproject.{key}', value)
|
|
|
|
for key, value in options.__dict__.items():
|
|
|
|
self._Set(f'options.{key}', value)
|
|
|
|
config_items = config.DumpConfigDict().items()
|
|
|
|
EXTRACT_NAMESPACES = {'repo', 'branch', 'remote'}
|
|
|
|
self._SetDictionary({k: v for k, v in config_items
|
|
|
|
if not k.startswith(SYNC_STATE_PREFIX) and
|
|
|
|
k.split('.', 1)[0] in EXTRACT_NAMESPACES})
|
|
|
|
|
|
|
|
def _SetDictionary(self, data):
|
|
|
|
"""Save all key/value pairs of |data| dictionary.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
data: A dictionary whose key/value are to be saved.
|
|
|
|
"""
|
|
|
|
for key, value in data.items():
|
|
|
|
self._Set(key, value)
|
|
|
|
|
|
|
|
def _Set(self, key, value):
|
|
|
|
"""Set the |value| for a |key| in the |_config| member.
|
|
|
|
|
|
|
|
|key| is prepended with the value of SYNC_STATE_PREFIX constant.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
key: Name of the key.
|
|
|
|
value: |value| could be of any type. If it is 'bool', it will be saved
|
|
|
|
as a Boolean and for all other types, it will be saved as a String.
|
|
|
|
"""
|
|
|
|
if value is None:
|
|
|
|
return
|
|
|
|
sync_key = f'{SYNC_STATE_PREFIX}{key}'
|
2021-07-29 22:11:23 +00:00
|
|
|
sync_key = sync_key.replace('_', '')
|
2021-07-28 21:36:49 +00:00
|
|
|
if isinstance(value, str):
|
|
|
|
self._config.SetString(sync_key, value)
|
|
|
|
elif isinstance(value, bool):
|
|
|
|
self._config.SetBoolean(sync_key, value)
|
|
|
|
else:
|
|
|
|
self._config.SetString(sync_key, str(value))
|