mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
9 Commits
Author | SHA1 | Date | |
---|---|---|---|
5df6de075e | |||
a0de6e8eab | |||
16614f86b3 | |||
88443387b1 | |||
99482ae58a | |||
ec1df9b7f6 | |||
06d029c1c8 | |||
b715b14807 | |||
60829ba72f |
@ -82,7 +82,7 @@ least one of these before using this command."""
|
||||
fd = None
|
||||
|
||||
if re.compile("^.*[$ \t'].*$").match(editor):
|
||||
args = [editor + ' "$@"']
|
||||
args = [editor + ' "$@"', 'sh']
|
||||
shell = True
|
||||
else:
|
||||
args = [editor]
|
||||
|
@ -257,9 +257,11 @@ class GitConfig(object):
|
||||
finally:
|
||||
fd.close()
|
||||
except IOError:
|
||||
os.remove(self._pickle)
|
||||
if os.path.exists(self._pickle):
|
||||
os.remove(self._pickle)
|
||||
except cPickle.PickleError:
|
||||
os.remove(self._pickle)
|
||||
if os.path.exists(self._pickle):
|
||||
os.remove(self._pickle)
|
||||
|
||||
def _ReadGit(self):
|
||||
"""
|
||||
@ -356,18 +358,21 @@ class RefSpec(object):
|
||||
return s
|
||||
|
||||
|
||||
_ssh_cache = {}
|
||||
_master_processes = []
|
||||
_master_keys = set()
|
||||
_ssh_master = True
|
||||
|
||||
def _open_ssh(host, port=None):
|
||||
global _ssh_master
|
||||
|
||||
# Check to see whether we already think that the master is running; if we
|
||||
# think it's already running, return right away.
|
||||
if port is not None:
|
||||
key = '%s:%s' % (host, port)
|
||||
else:
|
||||
key = host
|
||||
|
||||
if key in _ssh_cache:
|
||||
if key in _master_keys:
|
||||
return True
|
||||
|
||||
if not _ssh_master \
|
||||
@ -377,15 +382,39 @@ def _open_ssh(host, port=None):
|
||||
#
|
||||
return False
|
||||
|
||||
command = ['ssh',
|
||||
'-o','ControlPath %s' % ssh_sock(),
|
||||
'-M',
|
||||
'-N',
|
||||
host]
|
||||
|
||||
# We will make two calls to ssh; this is the common part of both calls.
|
||||
command_base = ['ssh',
|
||||
'-o','ControlPath %s' % ssh_sock(),
|
||||
host]
|
||||
if port is not None:
|
||||
command[3:3] = ['-p',str(port)]
|
||||
command_base[1:1] = ['-p',str(port)]
|
||||
|
||||
# Since the key wasn't in _master_keys, we think that master isn't running.
|
||||
# ...but before actually starting a master, we'll double-check. This can
|
||||
# be important because we can't tell that that 'git@myhost.com' is the same
|
||||
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
|
||||
check_command = command_base + ['-O','check']
|
||||
try:
|
||||
Trace(': %s', ' '.join(check_command))
|
||||
check_process = subprocess.Popen(check_command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
check_process.communicate() # read output, but ignore it...
|
||||
isnt_running = check_process.wait()
|
||||
|
||||
if not isnt_running:
|
||||
# Our double-check found that the master _was_ infact running. Add to
|
||||
# the list of keys.
|
||||
_master_keys.add(key)
|
||||
return True
|
||||
except Exception:
|
||||
# Ignore excpetions. We we will fall back to the normal command and print
|
||||
# to the log there.
|
||||
pass
|
||||
|
||||
command = command_base[:1] + \
|
||||
['-M', '-N'] + \
|
||||
command_base[1:]
|
||||
try:
|
||||
Trace(': %s', ' '.join(command))
|
||||
p = subprocess.Popen(command)
|
||||
@ -396,20 +425,22 @@ def _open_ssh(host, port=None):
|
||||
% (host,port, str(e))
|
||||
return False
|
||||
|
||||
_ssh_cache[key] = p
|
||||
_master_processes.append(p)
|
||||
_master_keys.add(key)
|
||||
time.sleep(1)
|
||||
return True
|
||||
|
||||
def close_ssh():
|
||||
terminate_ssh_clients()
|
||||
|
||||
for key,p in _ssh_cache.iteritems():
|
||||
for p in _master_processes:
|
||||
try:
|
||||
os.kill(p.pid, SIGTERM)
|
||||
p.wait()
|
||||
except OSError:
|
||||
pass
|
||||
_ssh_cache.clear()
|
||||
del _master_processes[:]
|
||||
_master_keys.clear()
|
||||
|
||||
d = ssh_sock(create=False)
|
||||
if d:
|
||||
|
2
git_ssh
2
git_ssh
@ -1,2 +1,2 @@
|
||||
#!/bin/sh
|
||||
exec ssh -o "ControlPath $REPO_SSH_SOCK" "$@"
|
||||
exec ssh -o "ControlMaster no" -o "ControlPath $REPO_SSH_SOCK" "$@"
|
||||
|
120
project.py
120
project.py
@ -111,7 +111,6 @@ class ReviewableBranch(object):
|
||||
self.project = project
|
||||
self.branch = branch
|
||||
self.base = base
|
||||
self.replace_changes = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@ -151,7 +150,6 @@ class ReviewableBranch(object):
|
||||
|
||||
def UploadForReview(self, people, auto_topic=False):
|
||||
self.project.UploadForReview(self.name,
|
||||
self.replace_changes,
|
||||
people,
|
||||
auto_topic=auto_topic)
|
||||
|
||||
@ -557,7 +555,6 @@ class Project(object):
|
||||
return None
|
||||
|
||||
def UploadForReview(self, branch=None,
|
||||
replace_changes=None,
|
||||
people=([],[]),
|
||||
auto_topic=False):
|
||||
"""Uploads the named branch for code review.
|
||||
@ -600,9 +597,6 @@ class Project(object):
|
||||
cmd.append(branch.remote.SshReviewUrl(self.UserEmail))
|
||||
cmd.append(ref_spec)
|
||||
|
||||
if replace_changes:
|
||||
for change_id,commit_id in replace_changes.iteritems():
|
||||
cmd.append('%s:refs/changes/%s/new' % (commit_id, change_id))
|
||||
if GitCommand(self, cmd, bare = True).Wait() != 0:
|
||||
raise UploadError('Upload failed')
|
||||
|
||||
@ -618,17 +612,19 @@ class Project(object):
|
||||
|
||||
## Sync ##
|
||||
|
||||
def Sync_NetworkHalf(self):
|
||||
def Sync_NetworkHalf(self, quiet=False):
|
||||
"""Perform only the network IO portion of the sync process.
|
||||
Local working directory/branch state is not affected.
|
||||
"""
|
||||
if not self.Exists:
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, 'Initializing project %s ...' % self.name
|
||||
is_new = not self.Exists
|
||||
if is_new:
|
||||
if not quiet:
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, 'Initializing project %s ...' % self.name
|
||||
self._InitGitDir()
|
||||
|
||||
self._InitRemote()
|
||||
if not self._RemoteFetch():
|
||||
if not self._RemoteFetch(initial=is_new, quiet=quiet):
|
||||
return False
|
||||
|
||||
#Check that the requested ref was found after fetch
|
||||
@ -641,7 +637,7 @@ class Project(object):
|
||||
#
|
||||
rev = self.revisionExpr
|
||||
if rev.startswith(R_TAGS):
|
||||
self._RemoteFetch(None, rev[len(R_TAGS):])
|
||||
self._RemoteFetch(None, rev[len(R_TAGS):], quiet=quiet)
|
||||
|
||||
if self.worktree:
|
||||
self._InitMRef()
|
||||
@ -1024,7 +1020,9 @@ class Project(object):
|
||||
|
||||
## Direct Git Commands ##
|
||||
|
||||
def _RemoteFetch(self, name=None, tag=None):
|
||||
def _RemoteFetch(self, name=None, tag=None,
|
||||
initial=False,
|
||||
quiet=False):
|
||||
if not name:
|
||||
name = self.remote.name
|
||||
|
||||
@ -1032,17 +1030,84 @@ class Project(object):
|
||||
if self.GetRemote(name).PreConnectFetch():
|
||||
ssh_proxy = True
|
||||
|
||||
if initial:
|
||||
alt = os.path.join(self.gitdir, 'objects/info/alternates')
|
||||
try:
|
||||
fd = open(alt, 'rb')
|
||||
try:
|
||||
ref_dir = fd.readline()
|
||||
if ref_dir and ref_dir.endswith('\n'):
|
||||
ref_dir = ref_dir[:-1]
|
||||
finally:
|
||||
fd.close()
|
||||
except IOError, e:
|
||||
ref_dir = None
|
||||
|
||||
if ref_dir and 'objects' == os.path.basename(ref_dir):
|
||||
ref_dir = os.path.dirname(ref_dir)
|
||||
packed_refs = os.path.join(self.gitdir, 'packed-refs')
|
||||
remote = self.GetRemote(name)
|
||||
|
||||
all = self.bare_ref.all
|
||||
ids = set(all.values())
|
||||
tmp = set()
|
||||
|
||||
for r, id in GitRefs(ref_dir).all.iteritems():
|
||||
if r not in all:
|
||||
if r.startswith(R_TAGS) or remote.WritesTo(r):
|
||||
all[r] = id
|
||||
ids.add(id)
|
||||
continue
|
||||
|
||||
if id in ids:
|
||||
continue
|
||||
|
||||
r = 'refs/_alt/%s' % id
|
||||
all[r] = id
|
||||
ids.add(id)
|
||||
tmp.add(r)
|
||||
|
||||
ref_names = list(all.keys())
|
||||
ref_names.sort()
|
||||
|
||||
tmp_packed = ''
|
||||
old_packed = ''
|
||||
|
||||
for r in ref_names:
|
||||
line = '%s %s\n' % (all[r], r)
|
||||
tmp_packed += line
|
||||
if r not in tmp:
|
||||
old_packed += line
|
||||
|
||||
_lwrite(packed_refs, tmp_packed)
|
||||
|
||||
else:
|
||||
ref_dir = None
|
||||
|
||||
cmd = ['fetch']
|
||||
if quiet:
|
||||
cmd.append('--quiet')
|
||||
if not self.worktree:
|
||||
cmd.append('--update-head-ok')
|
||||
cmd.append(name)
|
||||
if tag is not None:
|
||||
cmd.append('tag')
|
||||
cmd.append(tag)
|
||||
return GitCommand(self,
|
||||
cmd,
|
||||
bare = True,
|
||||
ssh_proxy = ssh_proxy).Wait() == 0
|
||||
|
||||
ok = GitCommand(self,
|
||||
cmd,
|
||||
bare = True,
|
||||
ssh_proxy = ssh_proxy).Wait() == 0
|
||||
|
||||
if initial:
|
||||
if ref_dir:
|
||||
if old_packed != '':
|
||||
_lwrite(packed_refs, old_packed)
|
||||
else:
|
||||
os.remove(packed_refs)
|
||||
self.bare_git.pack_refs('--all', '--prune')
|
||||
|
||||
return ok
|
||||
|
||||
def _Checkout(self, rev, quiet=False):
|
||||
cmd = ['checkout']
|
||||
@ -1080,6 +1145,27 @@ class Project(object):
|
||||
os.makedirs(self.gitdir)
|
||||
self.bare_git.init()
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
ref_dir = mp.config.GetString('repo.reference')
|
||||
|
||||
if ref_dir:
|
||||
mirror_git = os.path.join(ref_dir, self.name + '.git')
|
||||
repo_git = os.path.join(ref_dir, '.repo', 'projects',
|
||||
self.relpath + '.git')
|
||||
|
||||
if os.path.exists(mirror_git):
|
||||
ref_dir = mirror_git
|
||||
|
||||
elif os.path.exists(repo_git):
|
||||
ref_dir = repo_git
|
||||
|
||||
else:
|
||||
ref_dir = None
|
||||
|
||||
if ref_dir:
|
||||
_lwrite(os.path.join(self.gitdir, 'objects/info/alternates'),
|
||||
os.path.join(ref_dir, 'objects') + '\n')
|
||||
|
||||
if self.manifest.IsMirror:
|
||||
self.config.SetString('core.bare', 'true')
|
||||
else:
|
||||
|
5
repo
5
repo
@ -28,7 +28,7 @@ if __name__ == '__main__':
|
||||
del magic
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (1, 8)
|
||||
VERSION = (1, 9)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (1,0)
|
||||
@ -118,6 +118,9 @@ group.add_option('-m', '--manifest-name',
|
||||
group.add_option('--mirror',
|
||||
dest='mirror', action='store_true',
|
||||
help='mirror the forrest')
|
||||
group.add_option('--reference',
|
||||
dest='reference',
|
||||
help='location of mirror directory', metavar='DIR')
|
||||
|
||||
# Tool
|
||||
group = init_optparse.add_option_group('repo Version options')
|
||||
|
@ -41,6 +41,13 @@ The optional -m argument can be used to specify an alternate manifest
|
||||
to be used. If no manifest is specified, the manifest default.xml
|
||||
will be used.
|
||||
|
||||
The --reference option can be used to point to a directory that
|
||||
has the content of a --mirror sync. This will make the working
|
||||
directory use as much data as possible from the local reference
|
||||
directory when fetching from the server. This will make the sync
|
||||
go a lot faster by reducing data traffic on the network.
|
||||
|
||||
|
||||
Switching Manifest Branches
|
||||
---------------------------
|
||||
|
||||
@ -71,7 +78,9 @@ to update the working directory files.
|
||||
g.add_option('--mirror',
|
||||
dest='mirror', action='store_true',
|
||||
help='mirror the forrest')
|
||||
|
||||
g.add_option('--reference',
|
||||
dest='reference',
|
||||
help='location of mirror directory', metavar='DIR')
|
||||
|
||||
# Tool
|
||||
g = p.add_option_group('repo Version options')
|
||||
@ -115,6 +124,9 @@ to update the working directory files.
|
||||
r.ResetFetch()
|
||||
r.Save()
|
||||
|
||||
if opt.reference:
|
||||
m.config.SetString('repo.reference', opt.reference)
|
||||
|
||||
if opt.mirror:
|
||||
if is_new:
|
||||
m.config.SetString('repo.mirror', 'true')
|
||||
|
@ -70,6 +70,9 @@ The -s/--smart-sync option can be used to sync to a known good
|
||||
build as specified by the manifest-server element in the current
|
||||
manifest.
|
||||
|
||||
The -f/--force-broken option can be used to proceed with syncing
|
||||
other projects if a project sync fails.
|
||||
|
||||
SSH Connections
|
||||
---------------
|
||||
|
||||
@ -101,6 +104,9 @@ later is required to fix a server side protocol bug.
|
||||
"""
|
||||
|
||||
def _Options(self, p, show_smart=True):
|
||||
p.add_option('-f', '--force-broken',
|
||||
dest='force_broken', action='store_true',
|
||||
help="continue sync even if a project fails to sync")
|
||||
p.add_option('-l','--local-only',
|
||||
dest='local_only', action='store_true',
|
||||
help="only update working tree, don't fetch")
|
||||
@ -110,6 +116,9 @@ later is required to fix a server side protocol bug.
|
||||
p.add_option('-d','--detach',
|
||||
dest='detach_head', action='store_true',
|
||||
help='detach projects back to manifest revision')
|
||||
p.add_option('-q','--quiet',
|
||||
dest='quiet', action='store_true',
|
||||
help='be more quiet')
|
||||
p.add_option('-j','--jobs',
|
||||
dest='jobs', action='store', type='int',
|
||||
help="number of projects to fetch simultaneously")
|
||||
@ -126,11 +135,14 @@ later is required to fix a server side protocol bug.
|
||||
dest='repo_upgraded', action='store_true',
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
def _FetchHelper(self, project, lock, fetched, pm, sem):
|
||||
if not project.Sync_NetworkHalf():
|
||||
def _FetchHelper(self, opt, project, lock, fetched, pm, sem):
|
||||
if not project.Sync_NetworkHalf(quiet=opt.quiet):
|
||||
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
||||
sem.release()
|
||||
sys.exit(1)
|
||||
if opt.force_broken:
|
||||
print >>sys.stderr, 'warn: --force-broken, continuing to sync'
|
||||
else:
|
||||
sem.release()
|
||||
sys.exit(1)
|
||||
|
||||
lock.acquire()
|
||||
fetched.add(project.gitdir)
|
||||
@ -138,18 +150,21 @@ later is required to fix a server side protocol bug.
|
||||
lock.release()
|
||||
sem.release()
|
||||
|
||||
def _Fetch(self, projects):
|
||||
def _Fetch(self, projects, opt):
|
||||
fetched = set()
|
||||
pm = Progress('Fetching projects', len(projects))
|
||||
|
||||
if self.jobs == 1:
|
||||
for project in projects:
|
||||
pm.update()
|
||||
if project.Sync_NetworkHalf():
|
||||
if project.Sync_NetworkHalf(quiet=opt.quiet):
|
||||
fetched.add(project.gitdir)
|
||||
else:
|
||||
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
||||
sys.exit(1)
|
||||
if opt.force_broken:
|
||||
print >>sys.stderr, 'warn: --force-broken, continuing to sync'
|
||||
else:
|
||||
sys.exit(1)
|
||||
else:
|
||||
threads = set()
|
||||
lock = _threading.Lock()
|
||||
@ -157,7 +172,12 @@ later is required to fix a server side protocol bug.
|
||||
for project in projects:
|
||||
sem.acquire()
|
||||
t = _threading.Thread(target = self._FetchHelper,
|
||||
args = (project, lock, fetched, pm, sem))
|
||||
args = (opt,
|
||||
project,
|
||||
lock,
|
||||
fetched,
|
||||
pm,
|
||||
sem))
|
||||
threads.add(t)
|
||||
t.start()
|
||||
|
||||
@ -291,7 +311,7 @@ uncommitted changes are present' % project.relpath
|
||||
_PostRepoUpgrade(self.manifest)
|
||||
|
||||
if not opt.local_only:
|
||||
mp.Sync_NetworkHalf()
|
||||
mp.Sync_NetworkHalf(quiet=opt.quiet)
|
||||
|
||||
if mp.HasChanges:
|
||||
syncbuf = SyncBuffer(mp.config)
|
||||
@ -308,7 +328,7 @@ uncommitted changes are present' % project.relpath
|
||||
to_fetch.append(rp)
|
||||
to_fetch.extend(all)
|
||||
|
||||
fetched = self._Fetch(to_fetch)
|
||||
fetched = self._Fetch(to_fetch, opt)
|
||||
_PostRepoFetch(rp, opt.no_repo_verify)
|
||||
if opt.network_only:
|
||||
# bail out now; the rest touches the working tree
|
||||
@ -320,7 +340,7 @@ uncommitted changes are present' % project.relpath
|
||||
for project in all:
|
||||
if project.gitdir not in fetched:
|
||||
missing.append(project)
|
||||
self._Fetch(missing)
|
||||
self._Fetch(missing, opt)
|
||||
|
||||
if self.manifest.IsMirror:
|
||||
# bail out now, we have no working tree
|
||||
|
@ -47,7 +47,7 @@ class Upload(InteractiveCommand):
|
||||
common = True
|
||||
helpSummary = "Upload changes for code review"
|
||||
helpUsage="""
|
||||
%prog [--re --cc] {[<project>]... | --replace <project>}
|
||||
%prog [--re --cc] [<project>]...
|
||||
"""
|
||||
helpDescription = """
|
||||
The '%prog' command is used to send changes to the Gerrit Code
|
||||
@ -67,12 +67,6 @@ added to the respective list of users, and emails are sent to any
|
||||
new users. Users passed as --reviewers must already be registered
|
||||
with the code review system, or the upload will fail.
|
||||
|
||||
If the --replace option is passed the user can designate which
|
||||
existing change(s) in Gerrit match up to the commits in the branch
|
||||
being uploaded. For each matched pair of change,commit the commit
|
||||
will be added as a new patch set, completely replacing the set of
|
||||
files and description associated with the change in Gerrit.
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
@ -119,9 +113,6 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
p.add_option('-t',
|
||||
dest='auto_topic', action='store_true',
|
||||
help='Send local branch name to Gerrit Code Review')
|
||||
p.add_option('--replace',
|
||||
dest='replace', action='store_true',
|
||||
help='Upload replacement patchesets from this branch')
|
||||
p.add_option('--re', '--reviewers',
|
||||
type='string', action='append', dest='reviewers',
|
||||
help='Request reviews from these people.')
|
||||
@ -262,65 +253,6 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
except:
|
||||
return ""
|
||||
|
||||
def _ReplaceBranch(self, project, people):
|
||||
branch = project.CurrentBranch
|
||||
if not branch:
|
||||
print >>sys.stdout, "no branches ready for upload"
|
||||
return
|
||||
branch = project.GetUploadableBranch(branch)
|
||||
if not branch:
|
||||
print >>sys.stdout, "no branches ready for upload"
|
||||
return
|
||||
|
||||
script = []
|
||||
script.append('# Replacing from branch %s' % branch.name)
|
||||
|
||||
if len(branch.commits) == 1:
|
||||
change = self._FindGerritChange(branch)
|
||||
script.append('[%-6s] %s' % (change, branch.commits[0]))
|
||||
else:
|
||||
for commit in branch.commits:
|
||||
script.append('[ ] %s' % commit)
|
||||
|
||||
script.append('')
|
||||
script.append('# Insert change numbers in the brackets to add a new patch set.')
|
||||
script.append('# To create a new change record, leave the brackets empty.')
|
||||
|
||||
script = Editor.EditString("\n".join(script)).split("\n")
|
||||
|
||||
change_re = re.compile(r'^\[\s*(\d{1,})\s*\]\s*([0-9a-f]{1,}) .*$')
|
||||
to_replace = dict()
|
||||
full_hashes = branch.unabbrev_commits
|
||||
|
||||
for line in script:
|
||||
m = change_re.match(line)
|
||||
if m:
|
||||
c = m.group(1)
|
||||
f = m.group(2)
|
||||
try:
|
||||
f = full_hashes[f]
|
||||
except KeyError:
|
||||
print 'fh = %s' % full_hashes
|
||||
print >>sys.stderr, "error: commit %s not found" % f
|
||||
sys.exit(1)
|
||||
if c in to_replace:
|
||||
print >>sys.stderr,\
|
||||
"error: change %s cannot accept multiple commits" % c
|
||||
sys.exit(1)
|
||||
to_replace[c] = f
|
||||
|
||||
if not to_replace:
|
||||
print >>sys.stderr, "error: no replacements specified"
|
||||
print >>sys.stderr, " use 'repo upload' without --replace"
|
||||
sys.exit(1)
|
||||
|
||||
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
|
||||
if not _ConfirmManyUploads(multiple_branches=True):
|
||||
_die("upload aborted by user")
|
||||
|
||||
branch.replace_changes = to_replace
|
||||
self._UploadAndReport(opt, [branch], people)
|
||||
|
||||
def _UploadAndReport(self, opt, todo, original_people):
|
||||
have_errors = False
|
||||
for branch in todo:
|
||||
@ -383,14 +315,6 @@ Gerrit Code Review: http://code.google.com/p/gerrit/
|
||||
cc = _SplitEmails(opt.cc)
|
||||
people = (reviewers,cc)
|
||||
|
||||
if opt.replace:
|
||||
if len(project_list) != 1:
|
||||
print >>sys.stderr, \
|
||||
'error: --replace requires exactly one project'
|
||||
sys.exit(1)
|
||||
self._ReplaceBranch(project_list[0], people)
|
||||
return
|
||||
|
||||
for project in project_list:
|
||||
avail = project.GetUploadableBranches()
|
||||
if avail:
|
||||
|
Reference in New Issue
Block a user