mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
86 Commits
Author | SHA1 | Date | |
---|---|---|---|
350cde4c4b | |||
48244781c2 | |||
19a83d8085 | |||
b1168ffada | |||
4c5c7aa74b | |||
ff84fea0bb | |||
d33f43a754 | |||
e756c412e3 | |||
b812a36236 | |||
161f445a4d | |||
68194f42b0 | |||
b1562faee0 | |||
3e768c9dc7 | |||
96fdcef9e3 | |||
2a1ccb2b0c | |||
0a389e94de | |||
2675c3f8b5 | |||
27b07327bc | |||
02d7945eb8 | |||
8f82a4f828 | |||
146fe902b7 | |||
722acefdc4 | |||
13cc3844d7 | |||
feabbdb440 | |||
8630f39dba | |||
df01883f9b | |||
1fc99f4e47 | |||
1775dbe176 | |||
521cd3ce67 | |||
5470df6219 | |||
0ed2bd1d95 | |||
c7a4eefa7e | |||
43c3d9ea17 | |||
4259b8a2ac | |||
2816d4f387 | |||
44469464d2 | |||
c95583bf4f | |||
6a5644d392 | |||
fe08675956 | |||
be0e8ac232 | |||
47c1a63a07 | |||
559b846b17 | |||
7c6c64d463 | |||
3778f9d47e | |||
993eedf9fa | |||
02e0cdf359 | |||
a8e98a6962 | |||
5ab508cbcc | |||
370e3fa666 | |||
b54a392c9a | |||
21f7385400 | |||
24d8dfbc34 | |||
a6df7d284c | |||
67092448c2 | |||
e92ceebde0 | |||
03eaf07ec6 | |||
2896a79120 | |||
8c6eef4713 | |||
34d237fbfb | |||
c99883fee9 | |||
ec18b4bac4 | |||
35f2596c27 | |||
5d40e26201 | |||
70939e2f73 | |||
ae6e0949d1 | |||
339ba9f6f7 | |||
70cd4ab270 | |||
e284ad1d1a | |||
3e5481999d | |||
d3c388391e | |||
2450a2987a | |||
f5c25a68d8 | |||
9fa44db94b | |||
c9ef744c7b | |||
438ee1cad9 | |||
23d7781c0b | |||
a54c527ae9 | |||
df830f1238 | |||
90be5c0839 | |||
7965f9fed0 | |||
de646819b8 | |||
bd4edc9a69 | |||
ce03a401c6 | |||
45476c40c7 | |||
1619134720 | |||
7efd1a5b23 |
29
Makefile
29
Makefile
@ -1,29 +0,0 @@
|
||||
#
|
||||
# Copyright 2008 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
GERRIT_SRC=../gerrit
|
||||
GERRIT_MODULES=codereview froofle
|
||||
|
||||
all:
|
||||
|
||||
clean:
|
||||
find . -name \*.pyc -type f | xargs rm -f
|
||||
|
||||
update-pyclient:
|
||||
$(MAKE) -C $(GERRIT_SRC) release-pyclient
|
||||
rm -rf $(GERRIT_MODULES)
|
||||
(cd $(GERRIT_SRC)/release/pyclient && \
|
||||
find . -type f \
|
||||
| cpio -pd $(abspath .))
|
@ -1 +0,0 @@
|
||||
__version__ = 'v1.0-14-gc4f226bc'
|
@ -1,32 +0,0 @@
|
||||
#!/usr/bin/python2.4
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
|
||||
from froofle.protobuf import descriptor
|
||||
from froofle.protobuf import message
|
||||
from froofle.protobuf import reflection
|
||||
from froofle.protobuf import service
|
||||
from froofle.protobuf import service_reflection
|
||||
from froofle.protobuf import descriptor_pb2
|
||||
|
||||
|
||||
|
||||
_RETRYREQUESTLATERRESPONSE = descriptor.Descriptor(
|
||||
name='RetryRequestLaterResponse',
|
||||
full_name='codereview.RetryRequestLaterResponse',
|
||||
filename='need_retry.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
|
||||
class RetryRequestLaterResponse(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _RETRYREQUESTLATERRESPONSE
|
||||
|
@ -1,360 +0,0 @@
|
||||
# Copyright 2007, 2008 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import base64
|
||||
import cookielib
|
||||
import getpass
|
||||
import logging
|
||||
import md5
|
||||
import os
|
||||
import random
|
||||
import socket
|
||||
import time
|
||||
import urllib
|
||||
import urllib2
|
||||
import urlparse
|
||||
|
||||
from froofle.protobuf.service import RpcChannel
|
||||
from froofle.protobuf.service import RpcController
|
||||
from need_retry_pb2 import RetryRequestLaterResponse;
|
||||
|
||||
class ClientLoginError(urllib2.HTTPError):
|
||||
"""Raised to indicate an error authenticating with ClientLogin."""
|
||||
|
||||
def __init__(self, url, code, msg, headers, args):
|
||||
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
|
||||
self.args = args
|
||||
self.reason = args["Error"]
|
||||
|
||||
|
||||
class Proxy(object):
|
||||
class _ResultHolder(object):
|
||||
def __call__(self, result):
|
||||
self._result = result
|
||||
|
||||
class _RemoteController(RpcController):
|
||||
def Reset(self):
|
||||
pass
|
||||
|
||||
def Failed(self):
|
||||
pass
|
||||
|
||||
def ErrorText(self):
|
||||
pass
|
||||
|
||||
def StartCancel(self):
|
||||
pass
|
||||
|
||||
def SetFailed(self, reason):
|
||||
raise RuntimeError, reason
|
||||
|
||||
def IsCancelled(self):
|
||||
pass
|
||||
|
||||
def NotifyOnCancel(self, callback):
|
||||
pass
|
||||
|
||||
def __init__(self, stub):
|
||||
self._stub = stub
|
||||
|
||||
def __getattr__(self, key):
|
||||
method = getattr(self._stub, key)
|
||||
|
||||
def call(request):
|
||||
done = self._ResultHolder()
|
||||
method(self._RemoteController(), request, done)
|
||||
return done._result
|
||||
|
||||
return call
|
||||
|
||||
|
||||
class HttpRpc(RpcChannel):
|
||||
"""Simple protobuf over HTTP POST implementation."""
|
||||
|
||||
def __init__(self, host, auth_function,
|
||||
host_override=None,
|
||||
extra_headers={},
|
||||
cookie_file=None):
|
||||
"""Creates a new HttpRpc.
|
||||
|
||||
Args:
|
||||
host: The host to send requests to.
|
||||
auth_function: A function that takes no arguments and returns an
|
||||
(email, password) tuple when called. Will be called if authentication
|
||||
is required.
|
||||
host_override: The host header to send to the server (defaults to host).
|
||||
extra_headers: A dict of extra headers to append to every request.
|
||||
cookie_file: If not None, name of the file in ~/ to save the
|
||||
cookie jar into. Applications are encouraged to set this to
|
||||
'.$appname_cookies' or some otherwise unique name.
|
||||
"""
|
||||
self.host = host.lower()
|
||||
self.host_override = host_override
|
||||
self.auth_function = auth_function
|
||||
self.authenticated = False
|
||||
self.extra_headers = extra_headers
|
||||
self.xsrf_token = None
|
||||
if cookie_file is None:
|
||||
self.cookie_file = None
|
||||
else:
|
||||
self.cookie_file = os.path.expanduser("~/%s" % cookie_file)
|
||||
self.opener = self._GetOpener()
|
||||
if self.host_override:
|
||||
logging.info("Server: %s; Host: %s", self.host, self.host_override)
|
||||
else:
|
||||
logging.info("Server: %s", self.host)
|
||||
|
||||
def CallMethod(self, method, controller, request, response_type, done):
|
||||
pat = "application/x-google-protobuf; name=%s"
|
||||
|
||||
url = "/proto/%s/%s" % (method.containing_service.name, method.name)
|
||||
reqbin = request.SerializeToString()
|
||||
reqtyp = pat % request.DESCRIPTOR.full_name
|
||||
reqmd5 = base64.b64encode(md5.new(reqbin).digest())
|
||||
|
||||
start = time.time()
|
||||
while True:
|
||||
t, b = self._Send(url, reqbin, reqtyp, reqmd5)
|
||||
if t == (pat % RetryRequestLaterResponse.DESCRIPTOR.full_name):
|
||||
if time.time() >= (start + 1800):
|
||||
controller.SetFailed("timeout")
|
||||
return
|
||||
s = random.uniform(0.250, 2.000)
|
||||
print "Busy, retrying in %.3f seconds ..." % s
|
||||
time.sleep(s)
|
||||
continue
|
||||
|
||||
if t == (pat % response_type.DESCRIPTOR.full_name):
|
||||
response = response_type()
|
||||
response.ParseFromString(b)
|
||||
done(response)
|
||||
else:
|
||||
controller.SetFailed("Unexpected %s response" % t)
|
||||
break
|
||||
|
||||
def _CreateRequest(self, url, data=None):
|
||||
"""Creates a new urllib request."""
|
||||
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
|
||||
req = urllib2.Request(url, data=data)
|
||||
if self.host_override:
|
||||
req.add_header("Host", self.host_override)
|
||||
for key, value in self.extra_headers.iteritems():
|
||||
req.add_header(key, value)
|
||||
return req
|
||||
|
||||
def _GetAuthToken(self, email, password):
|
||||
"""Uses ClientLogin to authenticate the user, returning an auth token.
|
||||
|
||||
Args:
|
||||
email: The user's email address
|
||||
password: The user's password
|
||||
|
||||
Raises:
|
||||
ClientLoginError: If there was an error authenticating with ClientLogin.
|
||||
HTTPError: If there was some other form of HTTP error.
|
||||
|
||||
Returns:
|
||||
The authentication token returned by ClientLogin.
|
||||
"""
|
||||
account_type = 'GOOGLE'
|
||||
if self.host.endswith('.google.com'):
|
||||
account_type = 'HOSTED'
|
||||
|
||||
req = self._CreateRequest(
|
||||
url="https://www.google.com/accounts/ClientLogin",
|
||||
data=urllib.urlencode({
|
||||
"Email": email,
|
||||
"Passwd": password,
|
||||
"service": "ah",
|
||||
"source": "gerrit-codereview-client",
|
||||
"accountType": account_type,
|
||||
})
|
||||
)
|
||||
try:
|
||||
response = self.opener.open(req)
|
||||
response_body = response.read()
|
||||
response_dict = dict(x.split("=")
|
||||
for x in response_body.split("\n") if x)
|
||||
return response_dict["Auth"]
|
||||
except urllib2.HTTPError, e:
|
||||
if e.code == 403:
|
||||
body = e.read()
|
||||
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
|
||||
raise ClientLoginError(req.get_full_url(), e.code, e.msg,
|
||||
e.headers, response_dict)
|
||||
else:
|
||||
raise
|
||||
|
||||
def _GetAuthCookie(self, auth_token):
|
||||
"""Fetches authentication cookies for an authentication token.
|
||||
|
||||
Args:
|
||||
auth_token: The authentication token returned by ClientLogin.
|
||||
|
||||
Raises:
|
||||
HTTPError: If there was an error fetching the authentication cookies.
|
||||
"""
|
||||
# This is a dummy value to allow us to identify when we're successful.
|
||||
continue_location = "http://localhost/"
|
||||
args = {"continue": continue_location, "auth": auth_token}
|
||||
req = self._CreateRequest("http://%s/_ah/login?%s" %
|
||||
(self.host, urllib.urlencode(args)))
|
||||
try:
|
||||
response = self.opener.open(req)
|
||||
except urllib2.HTTPError, e:
|
||||
response = e
|
||||
if (response.code != 302 or
|
||||
response.info()["location"] != continue_location):
|
||||
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
|
||||
response.headers, response.fp)
|
||||
|
||||
def _GetXsrfToken(self):
|
||||
"""Fetches /proto/_token for use in X-XSRF-Token HTTP header.
|
||||
|
||||
Raises:
|
||||
HTTPError: If there was an error fetching a new token.
|
||||
"""
|
||||
tries = 0
|
||||
while True:
|
||||
url = "http://%s/proto/_token" % self.host
|
||||
req = self._CreateRequest(url)
|
||||
try:
|
||||
response = self.opener.open(req)
|
||||
self.xsrf_token = response.read()
|
||||
return
|
||||
except urllib2.HTTPError, e:
|
||||
if tries > 3:
|
||||
raise
|
||||
elif e.code == 401:
|
||||
self._Authenticate()
|
||||
else:
|
||||
raise
|
||||
|
||||
def _Authenticate(self):
|
||||
"""Authenticates the user.
|
||||
|
||||
The authentication process works as follows:
|
||||
1) We get a username and password from the user
|
||||
2) We use ClientLogin to obtain an AUTH token for the user
|
||||
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
|
||||
3) We pass the auth token to /_ah/login on the server to obtain an
|
||||
authentication cookie. If login was successful, it tries to redirect
|
||||
us to the URL we provided.
|
||||
|
||||
If we attempt to access the upload API without first obtaining an
|
||||
authentication cookie, it returns a 401 response and directs us to
|
||||
authenticate ourselves with ClientLogin.
|
||||
"""
|
||||
attempts = 0
|
||||
while True:
|
||||
attempts += 1
|
||||
try:
|
||||
cred = self.auth_function()
|
||||
auth_token = self._GetAuthToken(cred[0], cred[1])
|
||||
except ClientLoginError:
|
||||
if attempts < 3:
|
||||
continue
|
||||
raise
|
||||
self._GetAuthCookie(auth_token)
|
||||
self.authenticated = True
|
||||
if self.cookie_file is not None:
|
||||
self.cookie_jar.save()
|
||||
return
|
||||
|
||||
def _Send(self, request_path, payload, content_type, content_md5):
|
||||
"""Sends an RPC and returns the response.
|
||||
|
||||
Args:
|
||||
request_path: The path to send the request to, eg /api/appversion/create.
|
||||
payload: The body of the request, or None to send an empty request.
|
||||
content_type: The Content-Type header to use.
|
||||
content_md5: The Content-MD5 header to use.
|
||||
|
||||
Returns:
|
||||
The content type, as a string.
|
||||
The response body, as a string.
|
||||
"""
|
||||
if not self.authenticated:
|
||||
self._Authenticate()
|
||||
if not self.xsrf_token:
|
||||
self._GetXsrfToken()
|
||||
|
||||
old_timeout = socket.getdefaulttimeout()
|
||||
socket.setdefaulttimeout(None)
|
||||
try:
|
||||
tries = 0
|
||||
while True:
|
||||
tries += 1
|
||||
url = "http://%s%s" % (self.host, request_path)
|
||||
req = self._CreateRequest(url=url, data=payload)
|
||||
req.add_header("Content-Type", content_type)
|
||||
req.add_header("Content-MD5", content_md5)
|
||||
req.add_header("X-XSRF-Token", self.xsrf_token)
|
||||
try:
|
||||
f = self.opener.open(req)
|
||||
hdr = f.info()
|
||||
type = hdr.getheader('Content-Type',
|
||||
'application/octet-stream')
|
||||
response = f.read()
|
||||
f.close()
|
||||
return type, response
|
||||
except urllib2.HTTPError, e:
|
||||
if tries > 3:
|
||||
raise
|
||||
elif e.code == 401:
|
||||
self._Authenticate()
|
||||
elif e.code == 403:
|
||||
if not hasattr(e, 'read'):
|
||||
e.read = lambda self: ''
|
||||
raise RuntimeError, '403\nxsrf: %s\n%s' \
|
||||
% (self.xsrf_token, e.read())
|
||||
else:
|
||||
raise
|
||||
finally:
|
||||
socket.setdefaulttimeout(old_timeout)
|
||||
|
||||
def _GetOpener(self):
|
||||
"""Returns an OpenerDirector that supports cookies and ignores redirects.
|
||||
|
||||
Returns:
|
||||
A urllib2.OpenerDirector object.
|
||||
"""
|
||||
opener = urllib2.OpenerDirector()
|
||||
opener.add_handler(urllib2.ProxyHandler())
|
||||
opener.add_handler(urllib2.UnknownHandler())
|
||||
opener.add_handler(urllib2.HTTPHandler())
|
||||
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
|
||||
opener.add_handler(urllib2.HTTPSHandler())
|
||||
opener.add_handler(urllib2.HTTPErrorProcessor())
|
||||
if self.cookie_file is not None:
|
||||
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
|
||||
if os.path.exists(self.cookie_file):
|
||||
try:
|
||||
self.cookie_jar.load()
|
||||
self.authenticated = True
|
||||
except (cookielib.LoadError, IOError):
|
||||
# Failed to load cookies - just ignore them.
|
||||
pass
|
||||
else:
|
||||
# Create an empty cookie file with mode 600
|
||||
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
|
||||
os.close(fd)
|
||||
# Always chmod the cookie file
|
||||
os.chmod(self.cookie_file, 0600)
|
||||
else:
|
||||
# Don't save cookies across runs of update.py.
|
||||
self.cookie_jar = cookielib.CookieJar()
|
||||
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
|
||||
return opener
|
||||
|
@ -1,48 +0,0 @@
|
||||
#!/usr/bin/python2.4
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
|
||||
from froofle.protobuf import descriptor
|
||||
from froofle.protobuf import message
|
||||
from froofle.protobuf import reflection
|
||||
from froofle.protobuf import service
|
||||
from froofle.protobuf import service_reflection
|
||||
from froofle.protobuf import descriptor_pb2
|
||||
|
||||
|
||||
import upload_bundle_pb2
|
||||
|
||||
|
||||
|
||||
_REVIEWSERVICE = descriptor.ServiceDescriptor(
|
||||
name='ReviewService',
|
||||
full_name='codereview.ReviewService',
|
||||
index=0,
|
||||
options=None,
|
||||
methods=[
|
||||
descriptor.MethodDescriptor(
|
||||
name='UploadBundle',
|
||||
full_name='codereview.ReviewService.UploadBundle',
|
||||
index=0,
|
||||
containing_service=None,
|
||||
input_type=upload_bundle_pb2._UPLOADBUNDLEREQUEST,
|
||||
output_type=upload_bundle_pb2._UPLOADBUNDLERESPONSE,
|
||||
options=None,
|
||||
),
|
||||
descriptor.MethodDescriptor(
|
||||
name='ContinueBundle',
|
||||
full_name='codereview.ReviewService.ContinueBundle',
|
||||
index=1,
|
||||
containing_service=None,
|
||||
input_type=upload_bundle_pb2._UPLOADBUNDLECONTINUE,
|
||||
output_type=upload_bundle_pb2._UPLOADBUNDLERESPONSE,
|
||||
options=None,
|
||||
),
|
||||
])
|
||||
|
||||
class ReviewService(service.Service):
|
||||
__metaclass__ = service_reflection.GeneratedServiceType
|
||||
DESCRIPTOR = _REVIEWSERVICE
|
||||
class ReviewService_Stub(ReviewService):
|
||||
__metaclass__ = service_reflection.GeneratedServiceStubType
|
||||
DESCRIPTOR = _REVIEWSERVICE
|
||||
|
@ -1,190 +0,0 @@
|
||||
#!/usr/bin/python2.4
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
|
||||
from froofle.protobuf import descriptor
|
||||
from froofle.protobuf import message
|
||||
from froofle.protobuf import reflection
|
||||
from froofle.protobuf import service
|
||||
from froofle.protobuf import service_reflection
|
||||
from froofle.protobuf import descriptor_pb2
|
||||
|
||||
|
||||
_UPLOADBUNDLERESPONSE_CODETYPE = descriptor.EnumDescriptor(
|
||||
name='CodeType',
|
||||
full_name='codereview.UploadBundleResponse.CodeType',
|
||||
filename='CodeType',
|
||||
values=[
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='RECEIVED', index=0, number=1,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='CONTINUE', index=1, number=4,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='UNAUTHORIZED_USER', index=2, number=7,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='UNKNOWN_PROJECT', index=3, number=2,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='UNKNOWN_BRANCH', index=4, number=3,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='UNKNOWN_BUNDLE', index=5, number=5,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='NOT_BUNDLE_OWNER', index=6, number=6,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='BUNDLE_CLOSED', index=7, number=8,
|
||||
options=None,
|
||||
type=None),
|
||||
],
|
||||
options=None,
|
||||
)
|
||||
|
||||
|
||||
_UPLOADBUNDLEREQUEST = descriptor.Descriptor(
|
||||
name='UploadBundleRequest',
|
||||
full_name='codereview.UploadBundleRequest',
|
||||
filename='upload_bundle.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='dest_project', full_name='codereview.UploadBundleRequest.dest_project', index=0,
|
||||
number=10, type=9, cpp_type=9, label=2,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='dest_branch', full_name='codereview.UploadBundleRequest.dest_branch', index=1,
|
||||
number=11, type=9, cpp_type=9, label=2,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='partial_upload', full_name='codereview.UploadBundleRequest.partial_upload', index=2,
|
||||
number=12, type=8, cpp_type=7, label=2,
|
||||
default_value=False,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='bundle_data', full_name='codereview.UploadBundleRequest.bundle_data', index=3,
|
||||
number=13, type=12, cpp_type=9, label=2,
|
||||
default_value="",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='contained_object', full_name='codereview.UploadBundleRequest.contained_object', index=4,
|
||||
number=1, type=9, cpp_type=9, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_UPLOADBUNDLERESPONSE = descriptor.Descriptor(
|
||||
name='UploadBundleResponse',
|
||||
full_name='codereview.UploadBundleResponse',
|
||||
filename='upload_bundle.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='status_code', full_name='codereview.UploadBundleResponse.status_code', index=0,
|
||||
number=10, type=14, cpp_type=8, label=2,
|
||||
default_value=1,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='bundle_id', full_name='codereview.UploadBundleResponse.bundle_id', index=1,
|
||||
number=11, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
_UPLOADBUNDLERESPONSE_CODETYPE,
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_UPLOADBUNDLECONTINUE = descriptor.Descriptor(
|
||||
name='UploadBundleContinue',
|
||||
full_name='codereview.UploadBundleContinue',
|
||||
filename='upload_bundle.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='bundle_id', full_name='codereview.UploadBundleContinue.bundle_id', index=0,
|
||||
number=10, type=9, cpp_type=9, label=2,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='segment_id', full_name='codereview.UploadBundleContinue.segment_id', index=1,
|
||||
number=11, type=5, cpp_type=1, label=2,
|
||||
default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='partial_upload', full_name='codereview.UploadBundleContinue.partial_upload', index=2,
|
||||
number=12, type=8, cpp_type=7, label=2,
|
||||
default_value=False,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='bundle_data', full_name='codereview.UploadBundleContinue.bundle_data', index=3,
|
||||
number=13, type=12, cpp_type=9, label=1,
|
||||
default_value="",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_UPLOADBUNDLERESPONSE.fields_by_name['status_code'].enum_type = _UPLOADBUNDLERESPONSE_CODETYPE
|
||||
|
||||
class UploadBundleRequest(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _UPLOADBUNDLEREQUEST
|
||||
|
||||
class UploadBundleResponse(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _UPLOADBUNDLERESPONSE
|
||||
|
||||
class UploadBundleContinue(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _UPLOADBUNDLECONTINUE
|
||||
|
5
color.py
5
color.py
@ -100,6 +100,9 @@ class Coloring(object):
|
||||
else:
|
||||
self._on = False
|
||||
|
||||
def redirect(self, out):
|
||||
self._out = out
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
return self._on
|
||||
@ -137,7 +140,7 @@ class Coloring(object):
|
||||
if v is None:
|
||||
return _Color(fg, bg, attr)
|
||||
|
||||
v = v.trim().lowercase()
|
||||
v = v.strip().lower()
|
||||
if v == "reset":
|
||||
return RESET
|
||||
elif v == '':
|
||||
|
@ -114,3 +114,8 @@ class PagedCommand(Command):
|
||||
"""Command which defaults to output in a pager, as its
|
||||
display tends to be larger than one screen full.
|
||||
"""
|
||||
|
||||
class MirrorSafeCommand(object):
|
||||
"""Command permits itself to run within a mirror,
|
||||
and does not require a working directory.
|
||||
"""
|
||||
|
198
docs/manifest-format.txt
Normal file
198
docs/manifest-format.txt
Normal file
@ -0,0 +1,198 @@
|
||||
repo Manifest Format
|
||||
====================
|
||||
|
||||
A repo manifest describes the structure of a repo client; that is
|
||||
the directories that are visible and where they should be obtained
|
||||
from with git.
|
||||
|
||||
The basic structure of a manifest is a bare Git repository holding
|
||||
a single 'default.xml' XML file in the top level directory.
|
||||
|
||||
Manifests are inherently version controlled, since they are kept
|
||||
within a Git repository. Updates to manifests are automatically
|
||||
obtained by clients during `repo sync`.
|
||||
|
||||
|
||||
XML File Format
|
||||
---------------
|
||||
|
||||
A manifest XML file (e.g. 'default.xml') roughly conforms to the
|
||||
following DTD:
|
||||
|
||||
<!DOCTYPE manifest [
|
||||
<!ELEMENT manifest (remote*,
|
||||
default?,
|
||||
remove-project*,
|
||||
project*,
|
||||
add-remote*)>
|
||||
|
||||
<!ELEMENT remote (EMPTY)>
|
||||
<!ATTLIST remote name ID #REQUIRED>
|
||||
<!ATTLIST remote fetch CDATA #REQUIRED>
|
||||
<!ATTLIST remote review CDATA #IMPLIED>
|
||||
<!ATTLIST remote project-name CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT default (EMPTY)>
|
||||
<!ATTLIST default remote IDREF #IMPLIED>
|
||||
<!ATTLIST default revision CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT project (remote*)>
|
||||
<!ATTLIST project name CDATA #REQUIRED>
|
||||
<!ATTLIST project path CDATA #IMPLIED>
|
||||
<!ATTLIST project remote IDREF #IMPLIED>
|
||||
<!ATTLIST project revision CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT add-remote (EMPTY)>
|
||||
<!ATTLIST add-remote to-project ID #REQUIRED>
|
||||
<!ATTLIST add-remote name ID #REQUIRED>
|
||||
<!ATTLIST add-remote fetch CDATA #REQUIRED>
|
||||
<!ATTLIST add-remote review CDATA #IMPLIED>
|
||||
<!ATTLIST add-remote project-name CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT remove-project (EMPTY)>
|
||||
<!ATTLIST remove-project name CDATA #REQUIRED>
|
||||
]>
|
||||
|
||||
A description of the elements and their attributes follows.
|
||||
|
||||
|
||||
Element manifest
|
||||
----------------
|
||||
|
||||
The root element of the file.
|
||||
|
||||
|
||||
Element remote
|
||||
--------------
|
||||
|
||||
One or more remote elements may be specified. Each remote element
|
||||
specifies a Git URL shared by one or more projects and (optionally)
|
||||
the Gerrit review server those projects upload changes through.
|
||||
|
||||
Attribute `name`: A short name unique to this manifest file. The
|
||||
name specified here is used as the remote name in each project's
|
||||
.git/config, and is therefore automatically available to commands
|
||||
like `git fetch`, `git remote`, `git pull` and `git push`.
|
||||
|
||||
Attribute `fetch`: The Git URL prefix for all projects which use
|
||||
this remote. Each project's name is appended to this prefix to
|
||||
form the actual URL used to clone the project.
|
||||
|
||||
Attribute `review`: Hostname of the Gerrit server where reviews
|
||||
are uploaded to by `repo upload`. This attribute is optional;
|
||||
if not specified then `repo upload` will not function.
|
||||
|
||||
Attribute `project-name`: Specifies the name of this project used
|
||||
by the review server given in the review attribute of this element.
|
||||
Only permitted when the remote element is nested inside of a project
|
||||
element (see below). If not given, defaults to the name supplied
|
||||
in the project's name attribute.
|
||||
|
||||
Element add-remote
|
||||
------------------
|
||||
|
||||
Adds a remote to an existing project, whose name is given by the
|
||||
to-project attribute. This is functionally equivalent to nesting
|
||||
a remote element under the project, but has the advantage that it
|
||||
can be specified in the uesr's `local_manifest.xml` to add a remote
|
||||
to a project declared by the normal manifest.
|
||||
|
||||
The element can be used to add a fork of an existing project that
|
||||
the user needs to work with.
|
||||
|
||||
|
||||
Element default
|
||||
---------------
|
||||
|
||||
At most one default element may be specified. Its remote and
|
||||
revision attributes are used when a project element does not
|
||||
specify its own remote or revision attribute.
|
||||
|
||||
Attribute `remote`: Name of a previously defined remote element.
|
||||
Project elements lacking a remote attribute of their own will use
|
||||
this remote.
|
||||
|
||||
Attribute `revision`: Name of a Git branch (e.g. `master` or
|
||||
`refs/heads/master`). Project elements lacking their own
|
||||
revision attribute will use this revision.
|
||||
|
||||
|
||||
Element project
|
||||
---------------
|
||||
|
||||
One or more project elements may be specified. Each element
|
||||
describes a single Git repository to be cloned into the repo
|
||||
client workspace.
|
||||
|
||||
Attribute `name`: A unique name for this project. The project's
|
||||
name is appended onto its remote's fetch URL to generate the actual
|
||||
URL to configure the Git remote with. The URL gets formed as:
|
||||
|
||||
${remote_fetch}/${project_name}.git
|
||||
|
||||
where ${remote_fetch} is the remote's fetch attribute and
|
||||
${project_name} is the project's name attribute. The suffix ".git"
|
||||
is always appended as repo assumes the upstream is a forrest of
|
||||
bare Git repositories.
|
||||
|
||||
The project name must match the name Gerrit knows, if Gerrit is
|
||||
being used for code reviews.
|
||||
|
||||
Attribute `path`: An optional path relative to the top directory
|
||||
of the repo client where the Git working directory for this project
|
||||
should be placed. If not supplied the project name is used.
|
||||
|
||||
Attribute `remote`: Name of a previously defined remote element.
|
||||
If not supplied the remote given by the default element is used.
|
||||
|
||||
Attribute `revision`: Name of the Git branch the manifest wants
|
||||
to track for this project. Names can be relative to refs/heads
|
||||
(e.g. just "master") or absolute (e.g. "refs/heads/master").
|
||||
Tags and/or explicit SHA-1s should work in theory, but have not
|
||||
been extensively tested. If not supplied the revision given by
|
||||
the default element is used.
|
||||
|
||||
Child element `remote`: Described like the top-level remote element,
|
||||
but adds an additional remote to only this project. These additional
|
||||
remotes are fetched from first on the initial `repo sync`, causing
|
||||
the majority of the project's object database to be obtained through
|
||||
these additional remotes.
|
||||
|
||||
|
||||
Element remove-project
|
||||
----------------------
|
||||
|
||||
Deletes the named project from the internal manifest table, possibly
|
||||
allowing a subsequent project element in the same manifest file to
|
||||
replace the project with a different source.
|
||||
|
||||
This element is mostly useful in the local_manifest.xml, where
|
||||
the user can remove a project, and possibly replace it with their
|
||||
own definition.
|
||||
|
||||
|
||||
Local Manifest
|
||||
==============
|
||||
|
||||
Additional remotes and projects may be added through a local
|
||||
manifest, stored in `$TOP_DIR/.repo/local_manifest.xml`.
|
||||
|
||||
For example:
|
||||
|
||||
$ cat .repo/local_manifest.xml
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<manifest>
|
||||
<project path="manifest"
|
||||
name="tools/manifest" />
|
||||
<project path="platform-manifest"
|
||||
name="platform/manifest" />
|
||||
</manifest>
|
||||
|
||||
Users may add projects to the local manifest prior to a `repo sync`
|
||||
invocation, instructing repo to automatically download and manage
|
||||
these extra projects.
|
||||
|
||||
Currently the only supported feature of a local manifest is to
|
||||
add new remotes and/or projects. In the future a local manifest
|
||||
may support picking different revisions of a project, or deleting
|
||||
projects specified in the default manifest.
|
@ -69,14 +69,14 @@ least one of these before using this command."""
|
||||
Returns:
|
||||
new value of edited text; None if editing did not succeed
|
||||
"""
|
||||
editor = cls._GetEditor()
|
||||
editor = cls._GetEditor().split()
|
||||
fd, path = tempfile.mkstemp()
|
||||
try:
|
||||
os.write(fd, data)
|
||||
os.close(fd)
|
||||
fd = None
|
||||
|
||||
if subprocess.Popen([editor, path]).wait() != 0:
|
||||
if subprocess.Popen(editor + [path]).wait() != 0:
|
||||
raise EditorError()
|
||||
return open(path).read()
|
||||
finally:
|
||||
|
6
error.py
6
error.py
@ -17,6 +17,10 @@ class ManifestParseError(Exception):
|
||||
"""Failed to parse the manifest file.
|
||||
"""
|
||||
|
||||
class ManifestInvalidRevisionError(Exception):
|
||||
"""The revision value in a project is incorrect.
|
||||
"""
|
||||
|
||||
class EditorError(Exception):
|
||||
"""Unspecified error from the user's text editor.
|
||||
"""
|
||||
@ -64,3 +68,5 @@ class RepoChangedException(Exception):
|
||||
repo or manifest repositories. In this special case we must
|
||||
use exec to re-execute repo with the new code and manifest.
|
||||
"""
|
||||
def __init__(self, extra_args=[]):
|
||||
self.extra_args = extra_args
|
||||
|
@ -1,433 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# http://code.google.com/p/protobuf/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# TODO(robinson): We probably need to provide deep-copy methods for
|
||||
# descriptor types. When a FieldDescriptor is passed into
|
||||
# Descriptor.__init__(), we should make a deep copy and then set
|
||||
# containing_type on it. Alternatively, we could just get
|
||||
# rid of containing_type (iit's not needed for reflection.py, at least).
|
||||
#
|
||||
# TODO(robinson): Print method?
|
||||
#
|
||||
# TODO(robinson): Useful __repr__?
|
||||
|
||||
"""Descriptors essentially contain exactly the information found in a .proto
|
||||
file, in types that make this information accessible in Python.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
class DescriptorBase(object):
|
||||
|
||||
"""Descriptors base class.
|
||||
|
||||
This class is the base of all descriptor classes. It provides common options
|
||||
related functionaility.
|
||||
"""
|
||||
|
||||
def __init__(self, options, options_class_name):
|
||||
"""Initialize the descriptor given its options message and the name of the
|
||||
class of the options message. The name of the class is required in case
|
||||
the options message is None and has to be created.
|
||||
"""
|
||||
self._options = options
|
||||
self._options_class_name = options_class_name
|
||||
|
||||
def GetOptions(self):
|
||||
"""Retrieves descriptor options.
|
||||
|
||||
This method returns the options set or creates the default options for the
|
||||
descriptor.
|
||||
"""
|
||||
if self._options:
|
||||
return self._options
|
||||
from froofle.protobuf import descriptor_pb2
|
||||
try:
|
||||
options_class = getattr(descriptor_pb2, self._options_class_name)
|
||||
except AttributeError:
|
||||
raise RuntimeError('Unknown options class name %s!' %
|
||||
(self._options_class_name))
|
||||
self._options = options_class()
|
||||
return self._options
|
||||
|
||||
|
||||
class Descriptor(DescriptorBase):
|
||||
|
||||
"""Descriptor for a protocol message type.
|
||||
|
||||
A Descriptor instance has the following attributes:
|
||||
|
||||
name: (str) Name of this protocol message type.
|
||||
full_name: (str) Fully-qualified name of this protocol message type,
|
||||
which will include protocol "package" name and the name of any
|
||||
enclosing types.
|
||||
|
||||
filename: (str) Name of the .proto file containing this message.
|
||||
|
||||
containing_type: (Descriptor) Reference to the descriptor of the
|
||||
type containing us, or None if we have no containing type.
|
||||
|
||||
fields: (list of FieldDescriptors) Field descriptors for all
|
||||
fields in this type.
|
||||
fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor
|
||||
objects as in |fields|, but indexed by "number" attribute in each
|
||||
FieldDescriptor.
|
||||
fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor
|
||||
objects as in |fields|, but indexed by "name" attribute in each
|
||||
FieldDescriptor.
|
||||
|
||||
nested_types: (list of Descriptors) Descriptor references
|
||||
for all protocol message types nested within this one.
|
||||
nested_types_by_name: (dict str -> Descriptor) Same Descriptor
|
||||
objects as in |nested_types|, but indexed by "name" attribute
|
||||
in each Descriptor.
|
||||
|
||||
enum_types: (list of EnumDescriptors) EnumDescriptor references
|
||||
for all enums contained within this type.
|
||||
enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor
|
||||
objects as in |enum_types|, but indexed by "name" attribute
|
||||
in each EnumDescriptor.
|
||||
enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping
|
||||
from enum value name to EnumValueDescriptor for that value.
|
||||
|
||||
extensions: (list of FieldDescriptor) All extensions defined directly
|
||||
within this message type (NOT within a nested type).
|
||||
extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor
|
||||
objects as |extensions|, but indexed by "name" attribute of each
|
||||
FieldDescriptor.
|
||||
|
||||
options: (descriptor_pb2.MessageOptions) Protocol message options or None
|
||||
to use default message options.
|
||||
"""
|
||||
|
||||
def __init__(self, name, full_name, filename, containing_type,
|
||||
fields, nested_types, enum_types, extensions, options=None):
|
||||
"""Arguments to __init__() are as described in the description
|
||||
of Descriptor fields above.
|
||||
"""
|
||||
super(Descriptor, self).__init__(options, 'MessageOptions')
|
||||
self.name = name
|
||||
self.full_name = full_name
|
||||
self.filename = filename
|
||||
self.containing_type = containing_type
|
||||
|
||||
# We have fields in addition to fields_by_name and fields_by_number,
|
||||
# so that:
|
||||
# 1. Clients can index fields by "order in which they're listed."
|
||||
# 2. Clients can easily iterate over all fields with the terse
|
||||
# syntax: for f in descriptor.fields: ...
|
||||
self.fields = fields
|
||||
for field in self.fields:
|
||||
field.containing_type = self
|
||||
self.fields_by_number = dict((f.number, f) for f in fields)
|
||||
self.fields_by_name = dict((f.name, f) for f in fields)
|
||||
|
||||
self.nested_types = nested_types
|
||||
self.nested_types_by_name = dict((t.name, t) for t in nested_types)
|
||||
|
||||
self.enum_types = enum_types
|
||||
for enum_type in self.enum_types:
|
||||
enum_type.containing_type = self
|
||||
self.enum_types_by_name = dict((t.name, t) for t in enum_types)
|
||||
self.enum_values_by_name = dict(
|
||||
(v.name, v) for t in enum_types for v in t.values)
|
||||
|
||||
self.extensions = extensions
|
||||
for extension in self.extensions:
|
||||
extension.extension_scope = self
|
||||
self.extensions_by_name = dict((f.name, f) for f in extensions)
|
||||
|
||||
|
||||
# TODO(robinson): We should have aggressive checking here,
|
||||
# for example:
|
||||
# * If you specify a repeated field, you should not be allowed
|
||||
# to specify a default value.
|
||||
# * [Other examples here as needed].
|
||||
#
|
||||
# TODO(robinson): for this and other *Descriptor classes, we
|
||||
# might also want to lock things down aggressively (e.g.,
|
||||
# prevent clients from setting the attributes). Having
|
||||
# stronger invariants here in general will reduce the number
|
||||
# of runtime checks we must do in reflection.py...
|
||||
class FieldDescriptor(DescriptorBase):
|
||||
|
||||
"""Descriptor for a single field in a .proto file.
|
||||
|
||||
A FieldDescriptor instance has the following attriubtes:
|
||||
|
||||
name: (str) Name of this field, exactly as it appears in .proto.
|
||||
full_name: (str) Name of this field, including containing scope. This is
|
||||
particularly relevant for extensions.
|
||||
index: (int) Dense, 0-indexed index giving the order that this
|
||||
field textually appears within its message in the .proto file.
|
||||
number: (int) Tag number declared for this field in the .proto file.
|
||||
|
||||
type: (One of the TYPE_* constants below) Declared type.
|
||||
cpp_type: (One of the CPPTYPE_* constants below) C++ type used to
|
||||
represent this field.
|
||||
|
||||
label: (One of the LABEL_* constants below) Tells whether this
|
||||
field is optional, required, or repeated.
|
||||
default_value: (Varies) Default value of this field. Only
|
||||
meaningful for non-repeated scalar fields. Repeated fields
|
||||
should always set this to [], and non-repeated composite
|
||||
fields should always set this to None.
|
||||
|
||||
containing_type: (Descriptor) Descriptor of the protocol message
|
||||
type that contains this field. Set by the Descriptor constructor
|
||||
if we're passed into one.
|
||||
Somewhat confusingly, for extension fields, this is the
|
||||
descriptor of the EXTENDED message, not the descriptor
|
||||
of the message containing this field. (See is_extension and
|
||||
extension_scope below).
|
||||
message_type: (Descriptor) If a composite field, a descriptor
|
||||
of the message type contained in this field. Otherwise, this is None.
|
||||
enum_type: (EnumDescriptor) If this field contains an enum, a
|
||||
descriptor of that enum. Otherwise, this is None.
|
||||
|
||||
is_extension: True iff this describes an extension field.
|
||||
extension_scope: (Descriptor) Only meaningful if is_extension is True.
|
||||
Gives the message that immediately contains this extension field.
|
||||
Will be None iff we're a top-level (file-level) extension field.
|
||||
|
||||
options: (descriptor_pb2.FieldOptions) Protocol message field options or
|
||||
None to use default field options.
|
||||
"""
|
||||
|
||||
# Must be consistent with C++ FieldDescriptor::Type enum in
|
||||
# descriptor.h.
|
||||
#
|
||||
# TODO(robinson): Find a way to eliminate this repetition.
|
||||
TYPE_DOUBLE = 1
|
||||
TYPE_FLOAT = 2
|
||||
TYPE_INT64 = 3
|
||||
TYPE_UINT64 = 4
|
||||
TYPE_INT32 = 5
|
||||
TYPE_FIXED64 = 6
|
||||
TYPE_FIXED32 = 7
|
||||
TYPE_BOOL = 8
|
||||
TYPE_STRING = 9
|
||||
TYPE_GROUP = 10
|
||||
TYPE_MESSAGE = 11
|
||||
TYPE_BYTES = 12
|
||||
TYPE_UINT32 = 13
|
||||
TYPE_ENUM = 14
|
||||
TYPE_SFIXED32 = 15
|
||||
TYPE_SFIXED64 = 16
|
||||
TYPE_SINT32 = 17
|
||||
TYPE_SINT64 = 18
|
||||
MAX_TYPE = 18
|
||||
|
||||
# Must be consistent with C++ FieldDescriptor::CppType enum in
|
||||
# descriptor.h.
|
||||
#
|
||||
# TODO(robinson): Find a way to eliminate this repetition.
|
||||
CPPTYPE_INT32 = 1
|
||||
CPPTYPE_INT64 = 2
|
||||
CPPTYPE_UINT32 = 3
|
||||
CPPTYPE_UINT64 = 4
|
||||
CPPTYPE_DOUBLE = 5
|
||||
CPPTYPE_FLOAT = 6
|
||||
CPPTYPE_BOOL = 7
|
||||
CPPTYPE_ENUM = 8
|
||||
CPPTYPE_STRING = 9
|
||||
CPPTYPE_MESSAGE = 10
|
||||
MAX_CPPTYPE = 10
|
||||
|
||||
# Must be consistent with C++ FieldDescriptor::Label enum in
|
||||
# descriptor.h.
|
||||
#
|
||||
# TODO(robinson): Find a way to eliminate this repetition.
|
||||
LABEL_OPTIONAL = 1
|
||||
LABEL_REQUIRED = 2
|
||||
LABEL_REPEATED = 3
|
||||
MAX_LABEL = 3
|
||||
|
||||
def __init__(self, name, full_name, index, number, type, cpp_type, label,
|
||||
default_value, message_type, enum_type, containing_type,
|
||||
is_extension, extension_scope, options=None):
|
||||
"""The arguments are as described in the description of FieldDescriptor
|
||||
attributes above.
|
||||
|
||||
Note that containing_type may be None, and may be set later if necessary
|
||||
(to deal with circular references between message types, for example).
|
||||
Likewise for extension_scope.
|
||||
"""
|
||||
super(FieldDescriptor, self).__init__(options, 'FieldOptions')
|
||||
self.name = name
|
||||
self.full_name = full_name
|
||||
self.index = index
|
||||
self.number = number
|
||||
self.type = type
|
||||
self.cpp_type = cpp_type
|
||||
self.label = label
|
||||
self.default_value = default_value
|
||||
self.containing_type = containing_type
|
||||
self.message_type = message_type
|
||||
self.enum_type = enum_type
|
||||
self.is_extension = is_extension
|
||||
self.extension_scope = extension_scope
|
||||
|
||||
|
||||
class EnumDescriptor(DescriptorBase):
|
||||
|
||||
"""Descriptor for an enum defined in a .proto file.
|
||||
|
||||
An EnumDescriptor instance has the following attributes:
|
||||
|
||||
name: (str) Name of the enum type.
|
||||
full_name: (str) Full name of the type, including package name
|
||||
and any enclosing type(s).
|
||||
filename: (str) Name of the .proto file in which this appears.
|
||||
|
||||
values: (list of EnumValueDescriptors) List of the values
|
||||
in this enum.
|
||||
values_by_name: (dict str -> EnumValueDescriptor) Same as |values|,
|
||||
but indexed by the "name" field of each EnumValueDescriptor.
|
||||
values_by_number: (dict int -> EnumValueDescriptor) Same as |values|,
|
||||
but indexed by the "number" field of each EnumValueDescriptor.
|
||||
containing_type: (Descriptor) Descriptor of the immediate containing
|
||||
type of this enum, or None if this is an enum defined at the
|
||||
top level in a .proto file. Set by Descriptor's constructor
|
||||
if we're passed into one.
|
||||
options: (descriptor_pb2.EnumOptions) Enum options message or
|
||||
None to use default enum options.
|
||||
"""
|
||||
|
||||
def __init__(self, name, full_name, filename, values,
|
||||
containing_type=None, options=None):
|
||||
"""Arguments are as described in the attribute description above."""
|
||||
super(EnumDescriptor, self).__init__(options, 'EnumOptions')
|
||||
self.name = name
|
||||
self.full_name = full_name
|
||||
self.filename = filename
|
||||
self.values = values
|
||||
for value in self.values:
|
||||
value.type = self
|
||||
self.values_by_name = dict((v.name, v) for v in values)
|
||||
self.values_by_number = dict((v.number, v) for v in values)
|
||||
self.containing_type = containing_type
|
||||
|
||||
|
||||
class EnumValueDescriptor(DescriptorBase):
|
||||
|
||||
"""Descriptor for a single value within an enum.
|
||||
|
||||
name: (str) Name of this value.
|
||||
index: (int) Dense, 0-indexed index giving the order that this
|
||||
value appears textually within its enum in the .proto file.
|
||||
number: (int) Actual number assigned to this enum value.
|
||||
type: (EnumDescriptor) EnumDescriptor to which this value
|
||||
belongs. Set by EnumDescriptor's constructor if we're
|
||||
passed into one.
|
||||
options: (descriptor_pb2.EnumValueOptions) Enum value options message or
|
||||
None to use default enum value options options.
|
||||
"""
|
||||
|
||||
def __init__(self, name, index, number, type=None, options=None):
|
||||
"""Arguments are as described in the attribute description above."""
|
||||
super(EnumValueDescriptor, self).__init__(options, 'EnumValueOptions')
|
||||
self.name = name
|
||||
self.index = index
|
||||
self.number = number
|
||||
self.type = type
|
||||
|
||||
|
||||
class ServiceDescriptor(DescriptorBase):
|
||||
|
||||
"""Descriptor for a service.
|
||||
|
||||
name: (str) Name of the service.
|
||||
full_name: (str) Full name of the service, including package name.
|
||||
index: (int) 0-indexed index giving the order that this services
|
||||
definition appears withing the .proto file.
|
||||
methods: (list of MethodDescriptor) List of methods provided by this
|
||||
service.
|
||||
options: (descriptor_pb2.ServiceOptions) Service options message or
|
||||
None to use default service options.
|
||||
"""
|
||||
|
||||
def __init__(self, name, full_name, index, methods, options=None):
|
||||
super(ServiceDescriptor, self).__init__(options, 'ServiceOptions')
|
||||
self.name = name
|
||||
self.full_name = full_name
|
||||
self.index = index
|
||||
self.methods = methods
|
||||
# Set the containing service for each method in this service.
|
||||
for method in self.methods:
|
||||
method.containing_service = self
|
||||
|
||||
def FindMethodByName(self, name):
|
||||
"""Searches for the specified method, and returns its descriptor."""
|
||||
for method in self.methods:
|
||||
if name == method.name:
|
||||
return method
|
||||
return None
|
||||
|
||||
|
||||
class MethodDescriptor(DescriptorBase):
|
||||
|
||||
"""Descriptor for a method in a service.
|
||||
|
||||
name: (str) Name of the method within the service.
|
||||
full_name: (str) Full name of method.
|
||||
index: (int) 0-indexed index of the method inside the service.
|
||||
containing_service: (ServiceDescriptor) The service that contains this
|
||||
method.
|
||||
input_type: The descriptor of the message that this method accepts.
|
||||
output_type: The descriptor of the message that this method returns.
|
||||
options: (descriptor_pb2.MethodOptions) Method options message or
|
||||
None to use default method options.
|
||||
"""
|
||||
|
||||
def __init__(self, name, full_name, index, containing_service,
|
||||
input_type, output_type, options=None):
|
||||
"""The arguments are as described in the description of MethodDescriptor
|
||||
attributes above.
|
||||
|
||||
Note that containing_service may be None, and may be set later if necessary.
|
||||
"""
|
||||
super(MethodDescriptor, self).__init__(options, 'MethodOptions')
|
||||
self.name = name
|
||||
self.full_name = full_name
|
||||
self.index = index
|
||||
self.containing_service = containing_service
|
||||
self.input_type = input_type
|
||||
self.output_type = output_type
|
||||
|
||||
|
||||
def _ParseOptions(message, string):
|
||||
"""Parses serialized options.
|
||||
|
||||
This helper function is used to parse serialized options in generated
|
||||
proto2 files. It must not be used outside proto2.
|
||||
"""
|
||||
message.ParseFromString(string)
|
||||
return message;
|
@ -1,950 +0,0 @@
|
||||
#!/usr/bin/python2.4
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
|
||||
from froofle.protobuf import descriptor
|
||||
from froofle.protobuf import message
|
||||
from froofle.protobuf import reflection
|
||||
from froofle.protobuf import service
|
||||
from froofle.protobuf import service_reflection
|
||||
|
||||
|
||||
_FIELDDESCRIPTORPROTO_TYPE = descriptor.EnumDescriptor(
|
||||
name='Type',
|
||||
full_name='froofle.protobuf.FieldDescriptorProto.Type',
|
||||
filename='Type',
|
||||
values=[
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_DOUBLE', index=0, number=1,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_FLOAT', index=1, number=2,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_INT64', index=2, number=3,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_UINT64', index=3, number=4,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_INT32', index=4, number=5,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_FIXED64', index=5, number=6,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_FIXED32', index=6, number=7,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_BOOL', index=7, number=8,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_STRING', index=8, number=9,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_GROUP', index=9, number=10,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_MESSAGE', index=10, number=11,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_BYTES', index=11, number=12,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_UINT32', index=12, number=13,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_ENUM', index=13, number=14,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_SFIXED32', index=14, number=15,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_SFIXED64', index=15, number=16,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_SINT32', index=16, number=17,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='TYPE_SINT64', index=17, number=18,
|
||||
options=None,
|
||||
type=None),
|
||||
],
|
||||
options=None,
|
||||
)
|
||||
|
||||
_FIELDDESCRIPTORPROTO_LABEL = descriptor.EnumDescriptor(
|
||||
name='Label',
|
||||
full_name='froofle.protobuf.FieldDescriptorProto.Label',
|
||||
filename='Label',
|
||||
values=[
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='LABEL_OPTIONAL', index=0, number=1,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='LABEL_REQUIRED', index=1, number=2,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='LABEL_REPEATED', index=2, number=3,
|
||||
options=None,
|
||||
type=None),
|
||||
],
|
||||
options=None,
|
||||
)
|
||||
|
||||
_FILEOPTIONS_OPTIMIZEMODE = descriptor.EnumDescriptor(
|
||||
name='OptimizeMode',
|
||||
full_name='froofle.protobuf.FileOptions.OptimizeMode',
|
||||
filename='OptimizeMode',
|
||||
values=[
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='SPEED', index=0, number=1,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='CODE_SIZE', index=1, number=2,
|
||||
options=None,
|
||||
type=None),
|
||||
],
|
||||
options=None,
|
||||
)
|
||||
|
||||
_FIELDOPTIONS_CTYPE = descriptor.EnumDescriptor(
|
||||
name='CType',
|
||||
full_name='froofle.protobuf.FieldOptions.CType',
|
||||
filename='CType',
|
||||
values=[
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='CORD', index=0, number=1,
|
||||
options=None,
|
||||
type=None),
|
||||
descriptor.EnumValueDescriptor(
|
||||
name='STRING_PIECE', index=1, number=2,
|
||||
options=None,
|
||||
type=None),
|
||||
],
|
||||
options=None,
|
||||
)
|
||||
|
||||
|
||||
_FILEDESCRIPTORSET = descriptor.Descriptor(
|
||||
name='FileDescriptorSet',
|
||||
full_name='froofle.protobuf.FileDescriptorSet',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='file', full_name='froofle.protobuf.FileDescriptorSet.file', index=0,
|
||||
number=1, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_FILEDESCRIPTORPROTO = descriptor.Descriptor(
|
||||
name='FileDescriptorProto',
|
||||
full_name='froofle.protobuf.FileDescriptorProto',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='name', full_name='froofle.protobuf.FileDescriptorProto.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='package', full_name='froofle.protobuf.FileDescriptorProto.package', index=1,
|
||||
number=2, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='dependency', full_name='froofle.protobuf.FileDescriptorProto.dependency', index=2,
|
||||
number=3, type=9, cpp_type=9, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='message_type', full_name='froofle.protobuf.FileDescriptorProto.message_type', index=3,
|
||||
number=4, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='enum_type', full_name='froofle.protobuf.FileDescriptorProto.enum_type', index=4,
|
||||
number=5, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='service', full_name='froofle.protobuf.FileDescriptorProto.service', index=5,
|
||||
number=6, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='extension', full_name='froofle.protobuf.FileDescriptorProto.extension', index=6,
|
||||
number=7, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='options', full_name='froofle.protobuf.FileDescriptorProto.options', index=7,
|
||||
number=8, type=11, cpp_type=10, label=1,
|
||||
default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_DESCRIPTORPROTO_EXTENSIONRANGE = descriptor.Descriptor(
|
||||
name='ExtensionRange',
|
||||
full_name='froofle.protobuf.DescriptorProto.ExtensionRange',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='start', full_name='froofle.protobuf.DescriptorProto.ExtensionRange.start', index=0,
|
||||
number=1, type=5, cpp_type=1, label=1,
|
||||
default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='end', full_name='froofle.protobuf.DescriptorProto.ExtensionRange.end', index=1,
|
||||
number=2, type=5, cpp_type=1, label=1,
|
||||
default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
_DESCRIPTORPROTO = descriptor.Descriptor(
|
||||
name='DescriptorProto',
|
||||
full_name='froofle.protobuf.DescriptorProto',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='name', full_name='froofle.protobuf.DescriptorProto.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='field', full_name='froofle.protobuf.DescriptorProto.field', index=1,
|
||||
number=2, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='extension', full_name='froofle.protobuf.DescriptorProto.extension', index=2,
|
||||
number=6, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='nested_type', full_name='froofle.protobuf.DescriptorProto.nested_type', index=3,
|
||||
number=3, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='enum_type', full_name='froofle.protobuf.DescriptorProto.enum_type', index=4,
|
||||
number=4, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='extension_range', full_name='froofle.protobuf.DescriptorProto.extension_range', index=5,
|
||||
number=5, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='options', full_name='froofle.protobuf.DescriptorProto.options', index=6,
|
||||
number=7, type=11, cpp_type=10, label=1,
|
||||
default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_FIELDDESCRIPTORPROTO = descriptor.Descriptor(
|
||||
name='FieldDescriptorProto',
|
||||
full_name='froofle.protobuf.FieldDescriptorProto',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='name', full_name='froofle.protobuf.FieldDescriptorProto.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='number', full_name='froofle.protobuf.FieldDescriptorProto.number', index=1,
|
||||
number=3, type=5, cpp_type=1, label=1,
|
||||
default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='label', full_name='froofle.protobuf.FieldDescriptorProto.label', index=2,
|
||||
number=4, type=14, cpp_type=8, label=1,
|
||||
default_value=1,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='type', full_name='froofle.protobuf.FieldDescriptorProto.type', index=3,
|
||||
number=5, type=14, cpp_type=8, label=1,
|
||||
default_value=1,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='type_name', full_name='froofle.protobuf.FieldDescriptorProto.type_name', index=4,
|
||||
number=6, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='extendee', full_name='froofle.protobuf.FieldDescriptorProto.extendee', index=5,
|
||||
number=2, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='default_value', full_name='froofle.protobuf.FieldDescriptorProto.default_value', index=6,
|
||||
number=7, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='options', full_name='froofle.protobuf.FieldDescriptorProto.options', index=7,
|
||||
number=8, type=11, cpp_type=10, label=1,
|
||||
default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
_FIELDDESCRIPTORPROTO_TYPE,
|
||||
_FIELDDESCRIPTORPROTO_LABEL,
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_ENUMDESCRIPTORPROTO = descriptor.Descriptor(
|
||||
name='EnumDescriptorProto',
|
||||
full_name='froofle.protobuf.EnumDescriptorProto',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='name', full_name='froofle.protobuf.EnumDescriptorProto.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='value', full_name='froofle.protobuf.EnumDescriptorProto.value', index=1,
|
||||
number=2, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='options', full_name='froofle.protobuf.EnumDescriptorProto.options', index=2,
|
||||
number=3, type=11, cpp_type=10, label=1,
|
||||
default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_ENUMVALUEDESCRIPTORPROTO = descriptor.Descriptor(
|
||||
name='EnumValueDescriptorProto',
|
||||
full_name='froofle.protobuf.EnumValueDescriptorProto',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='name', full_name='froofle.protobuf.EnumValueDescriptorProto.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='number', full_name='froofle.protobuf.EnumValueDescriptorProto.number', index=1,
|
||||
number=2, type=5, cpp_type=1, label=1,
|
||||
default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='options', full_name='froofle.protobuf.EnumValueDescriptorProto.options', index=2,
|
||||
number=3, type=11, cpp_type=10, label=1,
|
||||
default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_SERVICEDESCRIPTORPROTO = descriptor.Descriptor(
|
||||
name='ServiceDescriptorProto',
|
||||
full_name='froofle.protobuf.ServiceDescriptorProto',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='name', full_name='froofle.protobuf.ServiceDescriptorProto.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='method', full_name='froofle.protobuf.ServiceDescriptorProto.method', index=1,
|
||||
number=2, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='options', full_name='froofle.protobuf.ServiceDescriptorProto.options', index=2,
|
||||
number=3, type=11, cpp_type=10, label=1,
|
||||
default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_METHODDESCRIPTORPROTO = descriptor.Descriptor(
|
||||
name='MethodDescriptorProto',
|
||||
full_name='froofle.protobuf.MethodDescriptorProto',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='name', full_name='froofle.protobuf.MethodDescriptorProto.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='input_type', full_name='froofle.protobuf.MethodDescriptorProto.input_type', index=1,
|
||||
number=2, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='output_type', full_name='froofle.protobuf.MethodDescriptorProto.output_type', index=2,
|
||||
number=3, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='options', full_name='froofle.protobuf.MethodDescriptorProto.options', index=3,
|
||||
number=4, type=11, cpp_type=10, label=1,
|
||||
default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_FILEOPTIONS = descriptor.Descriptor(
|
||||
name='FileOptions',
|
||||
full_name='froofle.protobuf.FileOptions',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='java_package', full_name='froofle.protobuf.FileOptions.java_package', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='java_outer_classname', full_name='froofle.protobuf.FileOptions.java_outer_classname', index=1,
|
||||
number=8, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='java_multiple_files', full_name='froofle.protobuf.FileOptions.java_multiple_files', index=2,
|
||||
number=10, type=8, cpp_type=7, label=1,
|
||||
default_value=False,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='optimize_for', full_name='froofle.protobuf.FileOptions.optimize_for', index=3,
|
||||
number=9, type=14, cpp_type=8, label=1,
|
||||
default_value=2,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='uninterpreted_option', full_name='froofle.protobuf.FileOptions.uninterpreted_option', index=4,
|
||||
number=999, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
_FILEOPTIONS_OPTIMIZEMODE,
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_MESSAGEOPTIONS = descriptor.Descriptor(
|
||||
name='MessageOptions',
|
||||
full_name='froofle.protobuf.MessageOptions',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='message_set_wire_format', full_name='froofle.protobuf.MessageOptions.message_set_wire_format', index=0,
|
||||
number=1, type=8, cpp_type=7, label=1,
|
||||
default_value=False,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='uninterpreted_option', full_name='froofle.protobuf.MessageOptions.uninterpreted_option', index=1,
|
||||
number=999, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_FIELDOPTIONS = descriptor.Descriptor(
|
||||
name='FieldOptions',
|
||||
full_name='froofle.protobuf.FieldOptions',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='ctype', full_name='froofle.protobuf.FieldOptions.ctype', index=0,
|
||||
number=1, type=14, cpp_type=8, label=1,
|
||||
default_value=1,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='experimental_map_key', full_name='froofle.protobuf.FieldOptions.experimental_map_key', index=1,
|
||||
number=9, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='uninterpreted_option', full_name='froofle.protobuf.FieldOptions.uninterpreted_option', index=2,
|
||||
number=999, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
_FIELDOPTIONS_CTYPE,
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_ENUMOPTIONS = descriptor.Descriptor(
|
||||
name='EnumOptions',
|
||||
full_name='froofle.protobuf.EnumOptions',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='uninterpreted_option', full_name='froofle.protobuf.EnumOptions.uninterpreted_option', index=0,
|
||||
number=999, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_ENUMVALUEOPTIONS = descriptor.Descriptor(
|
||||
name='EnumValueOptions',
|
||||
full_name='froofle.protobuf.EnumValueOptions',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='uninterpreted_option', full_name='froofle.protobuf.EnumValueOptions.uninterpreted_option', index=0,
|
||||
number=999, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_SERVICEOPTIONS = descriptor.Descriptor(
|
||||
name='ServiceOptions',
|
||||
full_name='froofle.protobuf.ServiceOptions',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='uninterpreted_option', full_name='froofle.protobuf.ServiceOptions.uninterpreted_option', index=0,
|
||||
number=999, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_METHODOPTIONS = descriptor.Descriptor(
|
||||
name='MethodOptions',
|
||||
full_name='froofle.protobuf.MethodOptions',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='uninterpreted_option', full_name='froofle.protobuf.MethodOptions.uninterpreted_option', index=0,
|
||||
number=999, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_UNINTERPRETEDOPTION_NAMEPART = descriptor.Descriptor(
|
||||
name='NamePart',
|
||||
full_name='froofle.protobuf.UninterpretedOption.NamePart',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='name_part', full_name='froofle.protobuf.UninterpretedOption.NamePart.name_part', index=0,
|
||||
number=1, type=9, cpp_type=9, label=2,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='is_extension', full_name='froofle.protobuf.UninterpretedOption.NamePart.is_extension', index=1,
|
||||
number=2, type=8, cpp_type=7, label=2,
|
||||
default_value=False,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
_UNINTERPRETEDOPTION = descriptor.Descriptor(
|
||||
name='UninterpretedOption',
|
||||
full_name='froofle.protobuf.UninterpretedOption',
|
||||
filename='froofle/protobuf/descriptor.proto',
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='name', full_name='froofle.protobuf.UninterpretedOption.name', index=0,
|
||||
number=2, type=11, cpp_type=10, label=3,
|
||||
default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='identifier_value', full_name='froofle.protobuf.UninterpretedOption.identifier_value', index=1,
|
||||
number=3, type=9, cpp_type=9, label=1,
|
||||
default_value=unicode("", "utf-8"),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='positive_int_value', full_name='froofle.protobuf.UninterpretedOption.positive_int_value', index=2,
|
||||
number=4, type=4, cpp_type=4, label=1,
|
||||
default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='negative_int_value', full_name='froofle.protobuf.UninterpretedOption.negative_int_value', index=3,
|
||||
number=5, type=3, cpp_type=2, label=1,
|
||||
default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='double_value', full_name='froofle.protobuf.UninterpretedOption.double_value', index=4,
|
||||
number=6, type=1, cpp_type=5, label=1,
|
||||
default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
descriptor.FieldDescriptor(
|
||||
name='string_value', full_name='froofle.protobuf.UninterpretedOption.string_value', index=5,
|
||||
number=7, type=12, cpp_type=9, label=1,
|
||||
default_value="",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[], # TODO(robinson): Implement.
|
||||
enum_types=[
|
||||
],
|
||||
options=None)
|
||||
|
||||
|
||||
_FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
|
||||
_FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO
|
||||
_FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
|
||||
_FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO
|
||||
_FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
|
||||
_FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS
|
||||
_DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO
|
||||
_DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
|
||||
_DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO
|
||||
_DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
|
||||
_DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE
|
||||
_DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS
|
||||
_FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL
|
||||
_FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE
|
||||
_FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS
|
||||
_ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO
|
||||
_ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS
|
||||
_ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS
|
||||
_SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO
|
||||
_SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS
|
||||
_METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS
|
||||
_FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE
|
||||
_FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
||||
_MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
||||
_FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE
|
||||
_FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
||||
_ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
||||
_ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
||||
_SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
||||
_METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
||||
_UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART
|
||||
|
||||
class FileDescriptorSet(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _FILEDESCRIPTORSET
|
||||
|
||||
class FileDescriptorProto(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _FILEDESCRIPTORPROTO
|
||||
|
||||
class DescriptorProto(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
|
||||
class ExtensionRange(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _DESCRIPTORPROTO_EXTENSIONRANGE
|
||||
DESCRIPTOR = _DESCRIPTORPROTO
|
||||
|
||||
class FieldDescriptorProto(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _FIELDDESCRIPTORPROTO
|
||||
|
||||
class EnumDescriptorProto(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _ENUMDESCRIPTORPROTO
|
||||
|
||||
class EnumValueDescriptorProto(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _ENUMVALUEDESCRIPTORPROTO
|
||||
|
||||
class ServiceDescriptorProto(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _SERVICEDESCRIPTORPROTO
|
||||
|
||||
class MethodDescriptorProto(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _METHODDESCRIPTORPROTO
|
||||
|
||||
class FileOptions(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _FILEOPTIONS
|
||||
|
||||
class MessageOptions(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _MESSAGEOPTIONS
|
||||
|
||||
class FieldOptions(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _FIELDOPTIONS
|
||||
|
||||
class EnumOptions(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _ENUMOPTIONS
|
||||
|
||||
class EnumValueOptions(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _ENUMVALUEOPTIONS
|
||||
|
||||
class ServiceOptions(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _SERVICEOPTIONS
|
||||
|
||||
class MethodOptions(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _METHODOPTIONS
|
||||
|
||||
class UninterpretedOption(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
|
||||
class NamePart(message.Message):
|
||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
||||
DESCRIPTOR = _UNINTERPRETEDOPTION_NAMEPART
|
||||
DESCRIPTOR = _UNINTERPRETEDOPTION
|
||||
|
@ -1,209 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# http://code.google.com/p/protobuf/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Class for decoding protocol buffer primitives.
|
||||
|
||||
Contains the logic for decoding every logical protocol field type
|
||||
from one of the 5 physical wire types.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
import struct
|
||||
from froofle.protobuf import message
|
||||
from froofle.protobuf.internal import input_stream
|
||||
from froofle.protobuf.internal import wire_format
|
||||
|
||||
|
||||
|
||||
# Note that much of this code is ported from //net/proto/ProtocolBuffer, and
|
||||
# that the interface is strongly inspired by WireFormat from the C++ proto2
|
||||
# implementation.
|
||||
|
||||
|
||||
class Decoder(object):
|
||||
|
||||
"""Decodes logical protocol buffer fields from the wire."""
|
||||
|
||||
def __init__(self, s):
|
||||
"""Initializes the decoder to read from s.
|
||||
|
||||
Args:
|
||||
s: An immutable sequence of bytes, which must be accessible
|
||||
via the Python buffer() primitive (i.e., buffer(s)).
|
||||
"""
|
||||
self._stream = input_stream.InputStream(s)
|
||||
|
||||
def EndOfStream(self):
|
||||
"""Returns true iff we've reached the end of the bytes we're reading."""
|
||||
return self._stream.EndOfStream()
|
||||
|
||||
def Position(self):
|
||||
"""Returns the 0-indexed position in |s|."""
|
||||
return self._stream.Position()
|
||||
|
||||
def ReadFieldNumberAndWireType(self):
|
||||
"""Reads a tag from the wire. Returns a (field_number, wire_type) pair."""
|
||||
tag_and_type = self.ReadUInt32()
|
||||
return wire_format.UnpackTag(tag_and_type)
|
||||
|
||||
def SkipBytes(self, bytes):
|
||||
"""Skips the specified number of bytes on the wire."""
|
||||
self._stream.SkipBytes(bytes)
|
||||
|
||||
# Note that the Read*() methods below are not exactly symmetrical with the
|
||||
# corresponding Encoder.Append*() methods. Those Encoder methods first
|
||||
# encode a tag, but the Read*() methods below assume that the tag has already
|
||||
# been read, and that the client wishes to read a field of the specified type
|
||||
# starting at the current position.
|
||||
|
||||
def ReadInt32(self):
|
||||
"""Reads and returns a signed, varint-encoded, 32-bit integer."""
|
||||
return self._stream.ReadVarint32()
|
||||
|
||||
def ReadInt64(self):
|
||||
"""Reads and returns a signed, varint-encoded, 64-bit integer."""
|
||||
return self._stream.ReadVarint64()
|
||||
|
||||
def ReadUInt32(self):
|
||||
"""Reads and returns an signed, varint-encoded, 32-bit integer."""
|
||||
return self._stream.ReadVarUInt32()
|
||||
|
||||
def ReadUInt64(self):
|
||||
"""Reads and returns an signed, varint-encoded,64-bit integer."""
|
||||
return self._stream.ReadVarUInt64()
|
||||
|
||||
def ReadSInt32(self):
|
||||
"""Reads and returns a signed, zigzag-encoded, varint-encoded,
|
||||
32-bit integer."""
|
||||
return wire_format.ZigZagDecode(self._stream.ReadVarUInt32())
|
||||
|
||||
def ReadSInt64(self):
|
||||
"""Reads and returns a signed, zigzag-encoded, varint-encoded,
|
||||
64-bit integer."""
|
||||
return wire_format.ZigZagDecode(self._stream.ReadVarUInt64())
|
||||
|
||||
def ReadFixed32(self):
|
||||
"""Reads and returns an unsigned, fixed-width, 32-bit integer."""
|
||||
return self._stream.ReadLittleEndian32()
|
||||
|
||||
def ReadFixed64(self):
|
||||
"""Reads and returns an unsigned, fixed-width, 64-bit integer."""
|
||||
return self._stream.ReadLittleEndian64()
|
||||
|
||||
def ReadSFixed32(self):
|
||||
"""Reads and returns a signed, fixed-width, 32-bit integer."""
|
||||
value = self._stream.ReadLittleEndian32()
|
||||
if value >= (1 << 31):
|
||||
value -= (1 << 32)
|
||||
return value
|
||||
|
||||
def ReadSFixed64(self):
|
||||
"""Reads and returns a signed, fixed-width, 64-bit integer."""
|
||||
value = self._stream.ReadLittleEndian64()
|
||||
if value >= (1 << 63):
|
||||
value -= (1 << 64)
|
||||
return value
|
||||
|
||||
def ReadFloat(self):
|
||||
"""Reads and returns a 4-byte floating-point number."""
|
||||
serialized = self._stream.ReadBytes(4)
|
||||
return struct.unpack('f', serialized)[0]
|
||||
|
||||
def ReadDouble(self):
|
||||
"""Reads and returns an 8-byte floating-point number."""
|
||||
serialized = self._stream.ReadBytes(8)
|
||||
return struct.unpack('d', serialized)[0]
|
||||
|
||||
def ReadBool(self):
|
||||
"""Reads and returns a bool."""
|
||||
i = self._stream.ReadVarUInt32()
|
||||
return bool(i)
|
||||
|
||||
def ReadEnum(self):
|
||||
"""Reads and returns an enum value."""
|
||||
return self._stream.ReadVarUInt32()
|
||||
|
||||
def ReadString(self):
|
||||
"""Reads and returns a length-delimited string."""
|
||||
bytes = self.ReadBytes()
|
||||
return unicode(bytes, 'utf-8')
|
||||
|
||||
def ReadBytes(self):
|
||||
"""Reads and returns a length-delimited byte sequence."""
|
||||
length = self._stream.ReadVarUInt32()
|
||||
return self._stream.ReadBytes(length)
|
||||
|
||||
def ReadMessageInto(self, msg):
|
||||
"""Calls msg.MergeFromString() to merge
|
||||
length-delimited serialized message data into |msg|.
|
||||
|
||||
REQUIRES: The decoder must be positioned at the serialized "length"
|
||||
prefix to a length-delmiited serialized message.
|
||||
|
||||
POSTCONDITION: The decoder is positioned just after the
|
||||
serialized message, and we have merged those serialized
|
||||
contents into |msg|.
|
||||
"""
|
||||
length = self._stream.ReadVarUInt32()
|
||||
sub_buffer = self._stream.GetSubBuffer(length)
|
||||
num_bytes_used = msg.MergeFromString(sub_buffer)
|
||||
if num_bytes_used != length:
|
||||
raise message.DecodeError(
|
||||
'Submessage told to deserialize from %d-byte encoding, '
|
||||
'but used only %d bytes' % (length, num_bytes_used))
|
||||
self._stream.SkipBytes(num_bytes_used)
|
||||
|
||||
def ReadGroupInto(self, expected_field_number, group):
|
||||
"""Calls group.MergeFromString() to merge
|
||||
END_GROUP-delimited serialized message data into |group|.
|
||||
We'll raise an exception if we don't find an END_GROUP
|
||||
tag immediately after the serialized message contents.
|
||||
|
||||
REQUIRES: The decoder is positioned just after the START_GROUP
|
||||
tag for this group.
|
||||
|
||||
POSTCONDITION: The decoder is positioned just after the
|
||||
END_GROUP tag for this group, and we have merged
|
||||
the contents of the group into |group|.
|
||||
"""
|
||||
sub_buffer = self._stream.GetSubBuffer() # No a priori length limit.
|
||||
num_bytes_used = group.MergeFromString(sub_buffer)
|
||||
if num_bytes_used < 0:
|
||||
raise message.DecodeError('Group message reported negative bytes read.')
|
||||
self._stream.SkipBytes(num_bytes_used)
|
||||
field_number, field_type = self.ReadFieldNumberAndWireType()
|
||||
if field_type != wire_format.WIRETYPE_END_GROUP:
|
||||
raise message.DecodeError('Group message did not end with an END_GROUP.')
|
||||
if field_number != expected_field_number:
|
||||
raise message.DecodeError('END_GROUP tag had field '
|
||||
'number %d, was expecting field number %d' % (
|
||||
field_number, expected_field_number))
|
||||
# We're now positioned just after the END_GROUP tag. Perfect.
|
@ -1,206 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# http://code.google.com/p/protobuf/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Class for encoding protocol message primitives.
|
||||
|
||||
Contains the logic for encoding every logical protocol field type
|
||||
into one of the 5 physical wire types.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
import struct
|
||||
from froofle.protobuf import message
|
||||
from froofle.protobuf.internal import wire_format
|
||||
from froofle.protobuf.internal import output_stream
|
||||
|
||||
|
||||
# Note that much of this code is ported from //net/proto/ProtocolBuffer, and
|
||||
# that the interface is strongly inspired by WireFormat from the C++ proto2
|
||||
# implementation.
|
||||
|
||||
|
||||
class Encoder(object):
|
||||
|
||||
"""Encodes logical protocol buffer fields to the wire format."""
|
||||
|
||||
def __init__(self):
|
||||
self._stream = output_stream.OutputStream()
|
||||
|
||||
def ToString(self):
|
||||
"""Returns all values encoded in this object as a string."""
|
||||
return self._stream.ToString()
|
||||
|
||||
# All the Append*() methods below first append a tag+type pair to the buffer
|
||||
# before appending the specified value.
|
||||
|
||||
def AppendInt32(self, field_number, value):
|
||||
"""Appends a 32-bit integer to our buffer, varint-encoded."""
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_VARINT)
|
||||
self._stream.AppendVarint32(value)
|
||||
|
||||
def AppendInt64(self, field_number, value):
|
||||
"""Appends a 64-bit integer to our buffer, varint-encoded."""
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_VARINT)
|
||||
self._stream.AppendVarint64(value)
|
||||
|
||||
def AppendUInt32(self, field_number, unsigned_value):
|
||||
"""Appends an unsigned 32-bit integer to our buffer, varint-encoded."""
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_VARINT)
|
||||
self._stream.AppendVarUInt32(unsigned_value)
|
||||
|
||||
def AppendUInt64(self, field_number, unsigned_value):
|
||||
"""Appends an unsigned 64-bit integer to our buffer, varint-encoded."""
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_VARINT)
|
||||
self._stream.AppendVarUInt64(unsigned_value)
|
||||
|
||||
def AppendSInt32(self, field_number, value):
|
||||
"""Appends a 32-bit integer to our buffer, zigzag-encoded and then
|
||||
varint-encoded.
|
||||
"""
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_VARINT)
|
||||
zigzag_value = wire_format.ZigZagEncode(value)
|
||||
self._stream.AppendVarUInt32(zigzag_value)
|
||||
|
||||
def AppendSInt64(self, field_number, value):
|
||||
"""Appends a 64-bit integer to our buffer, zigzag-encoded and then
|
||||
varint-encoded.
|
||||
"""
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_VARINT)
|
||||
zigzag_value = wire_format.ZigZagEncode(value)
|
||||
self._stream.AppendVarUInt64(zigzag_value)
|
||||
|
||||
def AppendFixed32(self, field_number, unsigned_value):
|
||||
"""Appends an unsigned 32-bit integer to our buffer, in little-endian
|
||||
byte-order.
|
||||
"""
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_FIXED32)
|
||||
self._stream.AppendLittleEndian32(unsigned_value)
|
||||
|
||||
def AppendFixed64(self, field_number, unsigned_value):
|
||||
"""Appends an unsigned 64-bit integer to our buffer, in little-endian
|
||||
byte-order.
|
||||
"""
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_FIXED64)
|
||||
self._stream.AppendLittleEndian64(unsigned_value)
|
||||
|
||||
def AppendSFixed32(self, field_number, value):
|
||||
"""Appends a signed 32-bit integer to our buffer, in little-endian
|
||||
byte-order.
|
||||
"""
|
||||
sign = (value & 0x80000000) and -1 or 0
|
||||
if value >> 32 != sign:
|
||||
raise message.EncodeError('SFixed32 out of range: %d' % value)
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_FIXED32)
|
||||
self._stream.AppendLittleEndian32(value & 0xffffffff)
|
||||
|
||||
def AppendSFixed64(self, field_number, value):
|
||||
"""Appends a signed 64-bit integer to our buffer, in little-endian
|
||||
byte-order.
|
||||
"""
|
||||
sign = (value & 0x8000000000000000) and -1 or 0
|
||||
if value >> 64 != sign:
|
||||
raise message.EncodeError('SFixed64 out of range: %d' % value)
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_FIXED64)
|
||||
self._stream.AppendLittleEndian64(value & 0xffffffffffffffff)
|
||||
|
||||
def AppendFloat(self, field_number, value):
|
||||
"""Appends a floating-point number to our buffer."""
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_FIXED32)
|
||||
self._stream.AppendRawBytes(struct.pack('f', value))
|
||||
|
||||
def AppendDouble(self, field_number, value):
|
||||
"""Appends a double-precision floating-point number to our buffer."""
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_FIXED64)
|
||||
self._stream.AppendRawBytes(struct.pack('d', value))
|
||||
|
||||
def AppendBool(self, field_number, value):
|
||||
"""Appends a boolean to our buffer."""
|
||||
self.AppendInt32(field_number, value)
|
||||
|
||||
def AppendEnum(self, field_number, value):
|
||||
"""Appends an enum value to our buffer."""
|
||||
self.AppendInt32(field_number, value)
|
||||
|
||||
def AppendString(self, field_number, value):
|
||||
"""Appends a length-prefixed unicode string, encoded as UTF-8 to our buffer,
|
||||
with the length varint-encoded.
|
||||
"""
|
||||
self.AppendBytes(field_number, value.encode('utf-8'))
|
||||
|
||||
def AppendBytes(self, field_number, value):
|
||||
"""Appends a length-prefixed sequence of bytes to our buffer, with the
|
||||
length varint-encoded.
|
||||
"""
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
self._stream.AppendVarUInt32(len(value))
|
||||
self._stream.AppendRawBytes(value)
|
||||
|
||||
# TODO(robinson): For AppendGroup() and AppendMessage(), we'd really like to
|
||||
# avoid the extra string copy here. We can do so if we widen the Message
|
||||
# interface to be able to serialize to a stream in addition to a string. The
|
||||
# challenge when thinking ahead to the Python/C API implementation of Message
|
||||
# is finding a stream-like Python thing to which we can write raw bytes
|
||||
# from C. I'm not sure such a thing exists(?). (array.array is pretty much
|
||||
# what we want, but it's not directly exposed in the Python/C API).
|
||||
|
||||
def AppendGroup(self, field_number, group):
|
||||
"""Appends a group to our buffer.
|
||||
"""
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_START_GROUP)
|
||||
self._stream.AppendRawBytes(group.SerializeToString())
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_END_GROUP)
|
||||
|
||||
def AppendMessage(self, field_number, msg):
|
||||
"""Appends a nested message to our buffer.
|
||||
"""
|
||||
self._AppendTag(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
self._stream.AppendVarUInt32(msg.ByteSize())
|
||||
self._stream.AppendRawBytes(msg.SerializeToString())
|
||||
|
||||
def AppendMessageSetItem(self, field_number, msg):
|
||||
"""Appends an item using the message set wire format.
|
||||
|
||||
The message set message looks like this:
|
||||
message MessageSet {
|
||||
repeated group Item = 1 {
|
||||
required int32 type_id = 2;
|
||||
required string message = 3;
|
||||
}
|
||||
}
|
||||
"""
|
||||
self._AppendTag(1, wire_format.WIRETYPE_START_GROUP)
|
||||
self.AppendInt32(2, field_number)
|
||||
self.AppendMessage(3, msg)
|
||||
self._AppendTag(1, wire_format.WIRETYPE_END_GROUP)
|
||||
|
||||
def _AppendTag(self, field_number, wire_type):
|
||||
"""Appends a tag containing field number and wire type information."""
|
||||
self._stream.AppendVarUInt32(wire_format.PackTag(field_number, wire_type))
|
@ -1,326 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# http://code.google.com/p/protobuf/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""InputStream is the primitive interface for reading bits from the wire.
|
||||
|
||||
All protocol buffer deserialization can be expressed in terms of
|
||||
the InputStream primitives provided here.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
import struct
|
||||
from array import array
|
||||
from froofle.protobuf import message
|
||||
from froofle.protobuf.internal import wire_format
|
||||
|
||||
|
||||
# Note that much of this code is ported from //net/proto/ProtocolBuffer, and
|
||||
# that the interface is strongly inspired by CodedInputStream from the C++
|
||||
# proto2 implementation.
|
||||
|
||||
|
||||
class InputStreamBuffer(object):
|
||||
|
||||
"""Contains all logic for reading bits, and dealing with stream position.
|
||||
|
||||
If an InputStream method ever raises an exception, the stream is left
|
||||
in an indeterminate state and is not safe for further use.
|
||||
"""
|
||||
|
||||
def __init__(self, s):
|
||||
# What we really want is something like array('B', s), where elements we
|
||||
# read from the array are already given to us as one-byte integers. BUT
|
||||
# using array() instead of buffer() would force full string copies to result
|
||||
# from each GetSubBuffer() call.
|
||||
#
|
||||
# So, if the N serialized bytes of a single protocol buffer object are
|
||||
# split evenly between 2 child messages, and so on recursively, using
|
||||
# array('B', s) instead of buffer() would incur an additional N*logN bytes
|
||||
# copied during deserialization.
|
||||
#
|
||||
# The higher constant overhead of having to ord() for every byte we read
|
||||
# from the buffer in _ReadVarintHelper() could definitely lead to worse
|
||||
# performance in many real-world scenarios, even if the asymptotic
|
||||
# complexity is better. However, our real answer is that the mythical
|
||||
# Python/C extension module output mode for the protocol compiler will
|
||||
# be blazing-fast and will eliminate most use of this class anyway.
|
||||
self._buffer = buffer(s)
|
||||
self._pos = 0
|
||||
|
||||
def EndOfStream(self):
|
||||
"""Returns true iff we're at the end of the stream.
|
||||
If this returns true, then a call to any other InputStream method
|
||||
will raise an exception.
|
||||
"""
|
||||
return self._pos >= len(self._buffer)
|
||||
|
||||
def Position(self):
|
||||
"""Returns the current position in the stream, or equivalently, the
|
||||
number of bytes read so far.
|
||||
"""
|
||||
return self._pos
|
||||
|
||||
def GetSubBuffer(self, size=None):
|
||||
"""Returns a sequence-like object that represents a portion of our
|
||||
underlying sequence.
|
||||
|
||||
Position 0 in the returned object corresponds to self.Position()
|
||||
in this stream.
|
||||
|
||||
If size is specified, then the returned object ends after the
|
||||
next "size" bytes in this stream. If size is not specified,
|
||||
then the returned object ends at the end of this stream.
|
||||
|
||||
We guarantee that the returned object R supports the Python buffer
|
||||
interface (and thus that the call buffer(R) will work).
|
||||
|
||||
Note that the returned buffer is read-only.
|
||||
|
||||
The intended use for this method is for nested-message and nested-group
|
||||
deserialization, where we want to make a recursive MergeFromString()
|
||||
call on the portion of the original sequence that contains the serialized
|
||||
nested message. (And we'd like to do so without making unnecessary string
|
||||
copies).
|
||||
|
||||
REQUIRES: size is nonnegative.
|
||||
"""
|
||||
# Note that buffer() doesn't perform any actual string copy.
|
||||
if size is None:
|
||||
return buffer(self._buffer, self._pos)
|
||||
else:
|
||||
if size < 0:
|
||||
raise message.DecodeError('Negative size %d' % size)
|
||||
return buffer(self._buffer, self._pos, size)
|
||||
|
||||
def SkipBytes(self, num_bytes):
|
||||
"""Skip num_bytes bytes ahead, or go to the end of the stream, whichever
|
||||
comes first.
|
||||
|
||||
REQUIRES: num_bytes is nonnegative.
|
||||
"""
|
||||
if num_bytes < 0:
|
||||
raise message.DecodeError('Negative num_bytes %d' % num_bytes)
|
||||
self._pos += num_bytes
|
||||
self._pos = min(self._pos, len(self._buffer))
|
||||
|
||||
def ReadBytes(self, size):
|
||||
"""Reads up to 'size' bytes from the stream, stopping early
|
||||
only if we reach the end of the stream. Returns the bytes read
|
||||
as a string.
|
||||
"""
|
||||
if size < 0:
|
||||
raise message.DecodeError('Negative size %d' % size)
|
||||
s = (self._buffer[self._pos : self._pos + size])
|
||||
self._pos += len(s) # Only advance by the number of bytes actually read.
|
||||
return s
|
||||
|
||||
def ReadLittleEndian32(self):
|
||||
"""Interprets the next 4 bytes of the stream as a little-endian
|
||||
encoded, unsiged 32-bit integer, and returns that integer.
|
||||
"""
|
||||
try:
|
||||
i = struct.unpack(wire_format.FORMAT_UINT32_LITTLE_ENDIAN,
|
||||
self._buffer[self._pos : self._pos + 4])
|
||||
self._pos += 4
|
||||
return i[0] # unpack() result is a 1-element tuple.
|
||||
except struct.error, e:
|
||||
raise message.DecodeError(e)
|
||||
|
||||
def ReadLittleEndian64(self):
|
||||
"""Interprets the next 8 bytes of the stream as a little-endian
|
||||
encoded, unsiged 64-bit integer, and returns that integer.
|
||||
"""
|
||||
try:
|
||||
i = struct.unpack(wire_format.FORMAT_UINT64_LITTLE_ENDIAN,
|
||||
self._buffer[self._pos : self._pos + 8])
|
||||
self._pos += 8
|
||||
return i[0] # unpack() result is a 1-element tuple.
|
||||
except struct.error, e:
|
||||
raise message.DecodeError(e)
|
||||
|
||||
def ReadVarint32(self):
|
||||
"""Reads a varint from the stream, interprets this varint
|
||||
as a signed, 32-bit integer, and returns the integer.
|
||||
"""
|
||||
i = self.ReadVarint64()
|
||||
if not wire_format.INT32_MIN <= i <= wire_format.INT32_MAX:
|
||||
raise message.DecodeError('Value out of range for int32: %d' % i)
|
||||
return int(i)
|
||||
|
||||
def ReadVarUInt32(self):
|
||||
"""Reads a varint from the stream, interprets this varint
|
||||
as an unsigned, 32-bit integer, and returns the integer.
|
||||
"""
|
||||
i = self.ReadVarUInt64()
|
||||
if i > wire_format.UINT32_MAX:
|
||||
raise message.DecodeError('Value out of range for uint32: %d' % i)
|
||||
return i
|
||||
|
||||
def ReadVarint64(self):
|
||||
"""Reads a varint from the stream, interprets this varint
|
||||
as a signed, 64-bit integer, and returns the integer.
|
||||
"""
|
||||
i = self.ReadVarUInt64()
|
||||
if i > wire_format.INT64_MAX:
|
||||
i -= (1 << 64)
|
||||
return i
|
||||
|
||||
def ReadVarUInt64(self):
|
||||
"""Reads a varint from the stream, interprets this varint
|
||||
as an unsigned, 64-bit integer, and returns the integer.
|
||||
"""
|
||||
i = self._ReadVarintHelper()
|
||||
if not 0 <= i <= wire_format.UINT64_MAX:
|
||||
raise message.DecodeError('Value out of range for uint64: %d' % i)
|
||||
return i
|
||||
|
||||
def _ReadVarintHelper(self):
|
||||
"""Helper for the various varint-reading methods above.
|
||||
Reads an unsigned, varint-encoded integer from the stream and
|
||||
returns this integer.
|
||||
|
||||
Does no bounds checking except to ensure that we read at most as many bytes
|
||||
as could possibly be present in a varint-encoded 64-bit number.
|
||||
"""
|
||||
result = 0
|
||||
shift = 0
|
||||
while 1:
|
||||
if shift >= 64:
|
||||
raise message.DecodeError('Too many bytes when decoding varint.')
|
||||
try:
|
||||
b = ord(self._buffer[self._pos])
|
||||
except IndexError:
|
||||
raise message.DecodeError('Truncated varint.')
|
||||
self._pos += 1
|
||||
result |= ((b & 0x7f) << shift)
|
||||
shift += 7
|
||||
if not (b & 0x80):
|
||||
return result
|
||||
|
||||
class InputStreamArray(object):
|
||||
def __init__(self, s):
|
||||
self._buffer = array('B', s)
|
||||
self._pos = 0
|
||||
|
||||
def EndOfStream(self):
|
||||
return self._pos >= len(self._buffer)
|
||||
|
||||
def Position(self):
|
||||
return self._pos
|
||||
|
||||
def GetSubBuffer(self, size=None):
|
||||
if size is None:
|
||||
return self._buffer[self._pos : ].tostring()
|
||||
else:
|
||||
if size < 0:
|
||||
raise message.DecodeError('Negative size %d' % size)
|
||||
return self._buffer[self._pos : self._pos + size].tostring()
|
||||
|
||||
def SkipBytes(self, num_bytes):
|
||||
if num_bytes < 0:
|
||||
raise message.DecodeError('Negative num_bytes %d' % num_bytes)
|
||||
self._pos += num_bytes
|
||||
self._pos = min(self._pos, len(self._buffer))
|
||||
|
||||
def ReadBytes(self, size):
|
||||
if size < 0:
|
||||
raise message.DecodeError('Negative size %d' % size)
|
||||
s = self._buffer[self._pos : self._pos + size].tostring()
|
||||
self._pos += len(s) # Only advance by the number of bytes actually read.
|
||||
return s
|
||||
|
||||
def ReadLittleEndian32(self):
|
||||
try:
|
||||
i = struct.unpack(wire_format.FORMAT_UINT32_LITTLE_ENDIAN,
|
||||
self._buffer[self._pos : self._pos + 4])
|
||||
self._pos += 4
|
||||
return i[0] # unpack() result is a 1-element tuple.
|
||||
except struct.error, e:
|
||||
raise message.DecodeError(e)
|
||||
|
||||
def ReadLittleEndian64(self):
|
||||
try:
|
||||
i = struct.unpack(wire_format.FORMAT_UINT64_LITTLE_ENDIAN,
|
||||
self._buffer[self._pos : self._pos + 8])
|
||||
self._pos += 8
|
||||
return i[0] # unpack() result is a 1-element tuple.
|
||||
except struct.error, e:
|
||||
raise message.DecodeError(e)
|
||||
|
||||
def ReadVarint32(self):
|
||||
i = self.ReadVarint64()
|
||||
if not wire_format.INT32_MIN <= i <= wire_format.INT32_MAX:
|
||||
raise message.DecodeError('Value out of range for int32: %d' % i)
|
||||
return int(i)
|
||||
|
||||
def ReadVarUInt32(self):
|
||||
i = self.ReadVarUInt64()
|
||||
if i > wire_format.UINT32_MAX:
|
||||
raise message.DecodeError('Value out of range for uint32: %d' % i)
|
||||
return i
|
||||
|
||||
def ReadVarint64(self):
|
||||
i = self.ReadVarUInt64()
|
||||
if i > wire_format.INT64_MAX:
|
||||
i -= (1 << 64)
|
||||
return i
|
||||
|
||||
def ReadVarUInt64(self):
|
||||
i = self._ReadVarintHelper()
|
||||
if not 0 <= i <= wire_format.UINT64_MAX:
|
||||
raise message.DecodeError('Value out of range for uint64: %d' % i)
|
||||
return i
|
||||
|
||||
def _ReadVarintHelper(self):
|
||||
result = 0
|
||||
shift = 0
|
||||
while 1:
|
||||
if shift >= 64:
|
||||
raise message.DecodeError('Too many bytes when decoding varint.')
|
||||
try:
|
||||
b = self._buffer[self._pos]
|
||||
except IndexError:
|
||||
raise message.DecodeError('Truncated varint.')
|
||||
self._pos += 1
|
||||
result |= ((b & 0x7f) << shift)
|
||||
shift += 7
|
||||
if not (b & 0x80):
|
||||
return result
|
||||
|
||||
try:
|
||||
buffer("")
|
||||
InputStream = InputStreamBuffer
|
||||
except NotImplementedError:
|
||||
# Google App Engine: dev_appserver.py
|
||||
InputStream = InputStreamArray
|
||||
except RuntimeError:
|
||||
# Google App Engine: production
|
||||
InputStream = InputStreamArray
|
@ -1,69 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# http://code.google.com/p/protobuf/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Defines a listener interface for observing certain
|
||||
state transitions on Message objects.
|
||||
|
||||
Also defines a null implementation of this interface.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
|
||||
class MessageListener(object):
|
||||
|
||||
"""Listens for transitions to nonempty and for invalidations of cached
|
||||
byte sizes. Meant to be registered via Message._SetListener().
|
||||
"""
|
||||
|
||||
def TransitionToNonempty(self):
|
||||
"""Called the *first* time that this message becomes nonempty.
|
||||
Implementations are free (but not required) to call this method multiple
|
||||
times after the message has become nonempty.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def ByteSizeDirty(self):
|
||||
"""Called *every* time the cached byte size value
|
||||
for this object is invalidated (transitions from being
|
||||
"clean" to "dirty").
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class NullMessageListener(object):
|
||||
|
||||
"""No-op MessageListener implementation."""
|
||||
|
||||
def TransitionToNonempty(self):
|
||||
pass
|
||||
|
||||
def ByteSizeDirty(self):
|
||||
pass
|
@ -1,125 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# http://code.google.com/p/protobuf/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""OutputStream is the primitive interface for sticking bits on the wire.
|
||||
|
||||
All protocol buffer serialization can be expressed in terms of
|
||||
the OutputStream primitives provided here.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
import array
|
||||
import struct
|
||||
from froofle.protobuf import message
|
||||
from froofle.protobuf.internal import wire_format
|
||||
|
||||
|
||||
|
||||
# Note that much of this code is ported from //net/proto/ProtocolBuffer, and
|
||||
# that the interface is strongly inspired by CodedOutputStream from the C++
|
||||
# proto2 implementation.
|
||||
|
||||
|
||||
class OutputStream(object):
|
||||
|
||||
"""Contains all logic for writing bits, and ToString() to get the result."""
|
||||
|
||||
def __init__(self):
|
||||
self._buffer = array.array('B')
|
||||
|
||||
def AppendRawBytes(self, raw_bytes):
|
||||
"""Appends raw_bytes to our internal buffer."""
|
||||
self._buffer.fromstring(raw_bytes)
|
||||
|
||||
def AppendLittleEndian32(self, unsigned_value):
|
||||
"""Appends an unsigned 32-bit integer to the internal buffer,
|
||||
in little-endian byte order.
|
||||
"""
|
||||
if not 0 <= unsigned_value <= wire_format.UINT32_MAX:
|
||||
raise message.EncodeError(
|
||||
'Unsigned 32-bit out of range: %d' % unsigned_value)
|
||||
self._buffer.fromstring(struct.pack(
|
||||
wire_format.FORMAT_UINT32_LITTLE_ENDIAN, unsigned_value))
|
||||
|
||||
def AppendLittleEndian64(self, unsigned_value):
|
||||
"""Appends an unsigned 64-bit integer to the internal buffer,
|
||||
in little-endian byte order.
|
||||
"""
|
||||
if not 0 <= unsigned_value <= wire_format.UINT64_MAX:
|
||||
raise message.EncodeError(
|
||||
'Unsigned 64-bit out of range: %d' % unsigned_value)
|
||||
self._buffer.fromstring(struct.pack(
|
||||
wire_format.FORMAT_UINT64_LITTLE_ENDIAN, unsigned_value))
|
||||
|
||||
def AppendVarint32(self, value):
|
||||
"""Appends a signed 32-bit integer to the internal buffer,
|
||||
encoded as a varint. (Note that a negative varint32 will
|
||||
always require 10 bytes of space.)
|
||||
"""
|
||||
if not wire_format.INT32_MIN <= value <= wire_format.INT32_MAX:
|
||||
raise message.EncodeError('Value out of range: %d' % value)
|
||||
self.AppendVarint64(value)
|
||||
|
||||
def AppendVarUInt32(self, value):
|
||||
"""Appends an unsigned 32-bit integer to the internal buffer,
|
||||
encoded as a varint.
|
||||
"""
|
||||
if not 0 <= value <= wire_format.UINT32_MAX:
|
||||
raise message.EncodeError('Value out of range: %d' % value)
|
||||
self.AppendVarUInt64(value)
|
||||
|
||||
def AppendVarint64(self, value):
|
||||
"""Appends a signed 64-bit integer to the internal buffer,
|
||||
encoded as a varint.
|
||||
"""
|
||||
if not wire_format.INT64_MIN <= value <= wire_format.INT64_MAX:
|
||||
raise message.EncodeError('Value out of range: %d' % value)
|
||||
if value < 0:
|
||||
value += (1 << 64)
|
||||
self.AppendVarUInt64(value)
|
||||
|
||||
def AppendVarUInt64(self, unsigned_value):
|
||||
"""Appends an unsigned 64-bit integer to the internal buffer,
|
||||
encoded as a varint.
|
||||
"""
|
||||
if not 0 <= unsigned_value <= wire_format.UINT64_MAX:
|
||||
raise message.EncodeError('Value out of range: %d' % unsigned_value)
|
||||
while True:
|
||||
bits = unsigned_value & 0x7f
|
||||
unsigned_value >>= 7
|
||||
if not unsigned_value:
|
||||
self._buffer.append(bits)
|
||||
break
|
||||
self._buffer.append(0x80|bits)
|
||||
|
||||
def ToString(self):
|
||||
"""Returns a string containing the bytes in our internal buffer."""
|
||||
return self._buffer.tostring()
|
@ -1,268 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# http://code.google.com/p/protobuf/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Provides type checking routines.
|
||||
|
||||
This module defines type checking utilities in the forms of dictionaries:
|
||||
|
||||
VALUE_CHECKERS: A dictionary of field types and a value validation object.
|
||||
TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
|
||||
function.
|
||||
TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
|
||||
function.
|
||||
FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
|
||||
coresponding wire types.
|
||||
TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
|
||||
function.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
from froofle.protobuf.internal import decoder
|
||||
from froofle.protobuf.internal import encoder
|
||||
from froofle.protobuf.internal import wire_format
|
||||
from froofle.protobuf import descriptor
|
||||
|
||||
_FieldDescriptor = descriptor.FieldDescriptor
|
||||
|
||||
|
||||
def GetTypeChecker(cpp_type, field_type):
|
||||
"""Returns a type checker for a message field of the specified types.
|
||||
|
||||
Args:
|
||||
cpp_type: C++ type of the field (see descriptor.py).
|
||||
field_type: Protocol message field type (see descriptor.py).
|
||||
|
||||
Returns:
|
||||
An instance of TypeChecker which can be used to verify the types
|
||||
of values assigned to a field of the specified type.
|
||||
"""
|
||||
if (cpp_type == _FieldDescriptor.CPPTYPE_STRING and
|
||||
field_type == _FieldDescriptor.TYPE_STRING):
|
||||
return UnicodeValueChecker()
|
||||
return _VALUE_CHECKERS[cpp_type]
|
||||
|
||||
|
||||
# None of the typecheckers below make any attempt to guard against people
|
||||
# subclassing builtin types and doing weird things. We're not trying to
|
||||
# protect against malicious clients here, just people accidentally shooting
|
||||
# themselves in the foot in obvious ways.
|
||||
|
||||
class TypeChecker(object):
|
||||
|
||||
"""Type checker used to catch type errors as early as possible
|
||||
when the client is setting scalar fields in protocol messages.
|
||||
"""
|
||||
|
||||
def __init__(self, *acceptable_types):
|
||||
self._acceptable_types = acceptable_types
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
if not isinstance(proposed_value, self._acceptable_types):
|
||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
||||
(proposed_value, type(proposed_value), self._acceptable_types))
|
||||
raise TypeError(message)
|
||||
|
||||
|
||||
# IntValueChecker and its subclasses perform integer type-checks
|
||||
# and bounds-checks.
|
||||
class IntValueChecker(object):
|
||||
|
||||
"""Checker used for integer fields. Performs type-check and range check."""
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
if not isinstance(proposed_value, (int, long)):
|
||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
||||
(proposed_value, type(proposed_value), (int, long)))
|
||||
raise TypeError(message)
|
||||
if not self._MIN <= proposed_value <= self._MAX:
|
||||
raise ValueError('Value out of range: %d' % proposed_value)
|
||||
|
||||
|
||||
class UnicodeValueChecker(object):
|
||||
|
||||
"""Checker used for string fields."""
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
if not isinstance(proposed_value, (str, unicode)):
|
||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
||||
(proposed_value, type(proposed_value), (str, unicode)))
|
||||
raise TypeError(message)
|
||||
|
||||
# If the value is of type 'str' make sure that it is in 7-bit ASCII
|
||||
# encoding.
|
||||
if isinstance(proposed_value, str):
|
||||
try:
|
||||
unicode(proposed_value, 'ascii')
|
||||
except UnicodeDecodeError:
|
||||
raise ValueError('%.1024r isn\'t in 7-bit ASCII encoding.'
|
||||
% (proposed_value))
|
||||
|
||||
|
||||
class Int32ValueChecker(IntValueChecker):
|
||||
# We're sure to use ints instead of longs here since comparison may be more
|
||||
# efficient.
|
||||
_MIN = -2147483648
|
||||
_MAX = 2147483647
|
||||
|
||||
|
||||
class Uint32ValueChecker(IntValueChecker):
|
||||
_MIN = 0
|
||||
_MAX = (1 << 32) - 1
|
||||
|
||||
|
||||
class Int64ValueChecker(IntValueChecker):
|
||||
_MIN = -(1 << 63)
|
||||
_MAX = (1 << 63) - 1
|
||||
|
||||
|
||||
class Uint64ValueChecker(IntValueChecker):
|
||||
_MIN = 0
|
||||
_MAX = (1 << 64) - 1
|
||||
|
||||
|
||||
# Type-checkers for all scalar CPPTYPEs.
|
||||
_VALUE_CHECKERS = {
|
||||
_FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_DOUBLE: TypeChecker(
|
||||
float, int, long),
|
||||
_FieldDescriptor.CPPTYPE_FLOAT: TypeChecker(
|
||||
float, int, long),
|
||||
_FieldDescriptor.CPPTYPE_BOOL: TypeChecker(bool, int),
|
||||
_FieldDescriptor.CPPTYPE_ENUM: Int32ValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_STRING: TypeChecker(str),
|
||||
}
|
||||
|
||||
|
||||
# Map from field type to a function F, such that F(field_num, value)
|
||||
# gives the total byte size for a value of the given type. This
|
||||
# byte size includes tag information and any other additional space
|
||||
# associated with serializing "value".
|
||||
TYPE_TO_BYTE_SIZE_FN = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
|
||||
_FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
|
||||
_FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
|
||||
_FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
|
||||
_FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
|
||||
_FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
|
||||
_FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
|
||||
_FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
|
||||
_FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
|
||||
_FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
|
||||
_FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
|
||||
_FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
|
||||
_FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
|
||||
_FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
|
||||
_FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
|
||||
_FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
|
||||
_FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
|
||||
_FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
|
||||
}
|
||||
|
||||
|
||||
# Maps from field type to an unbound Encoder method F, such that
|
||||
# F(encoder, field_number, value) will append the serialization
|
||||
# of a value of this type to the encoder.
|
||||
_Encoder = encoder.Encoder
|
||||
TYPE_TO_SERIALIZE_METHOD = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: _Encoder.AppendDouble,
|
||||
_FieldDescriptor.TYPE_FLOAT: _Encoder.AppendFloat,
|
||||
_FieldDescriptor.TYPE_INT64: _Encoder.AppendInt64,
|
||||
_FieldDescriptor.TYPE_UINT64: _Encoder.AppendUInt64,
|
||||
_FieldDescriptor.TYPE_INT32: _Encoder.AppendInt32,
|
||||
_FieldDescriptor.TYPE_FIXED64: _Encoder.AppendFixed64,
|
||||
_FieldDescriptor.TYPE_FIXED32: _Encoder.AppendFixed32,
|
||||
_FieldDescriptor.TYPE_BOOL: _Encoder.AppendBool,
|
||||
_FieldDescriptor.TYPE_STRING: _Encoder.AppendString,
|
||||
_FieldDescriptor.TYPE_GROUP: _Encoder.AppendGroup,
|
||||
_FieldDescriptor.TYPE_MESSAGE: _Encoder.AppendMessage,
|
||||
_FieldDescriptor.TYPE_BYTES: _Encoder.AppendBytes,
|
||||
_FieldDescriptor.TYPE_UINT32: _Encoder.AppendUInt32,
|
||||
_FieldDescriptor.TYPE_ENUM: _Encoder.AppendEnum,
|
||||
_FieldDescriptor.TYPE_SFIXED32: _Encoder.AppendSFixed32,
|
||||
_FieldDescriptor.TYPE_SFIXED64: _Encoder.AppendSFixed64,
|
||||
_FieldDescriptor.TYPE_SINT32: _Encoder.AppendSInt32,
|
||||
_FieldDescriptor.TYPE_SINT64: _Encoder.AppendSInt64,
|
||||
}
|
||||
|
||||
|
||||
# Maps from field type to expected wiretype.
|
||||
FIELD_TYPE_TO_WIRE_TYPE = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
|
||||
_FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
|
||||
_FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
|
||||
_FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
|
||||
_FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_STRING:
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
||||
_FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
|
||||
_FieldDescriptor.TYPE_MESSAGE:
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
||||
_FieldDescriptor.TYPE_BYTES:
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
||||
_FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
|
||||
_FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
|
||||
_FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
|
||||
}
|
||||
|
||||
|
||||
# Maps from field type to an unbound Decoder method F,
|
||||
# such that F(decoder) will read a field of the requested type.
|
||||
#
|
||||
# Note that Message and Group are intentionally missing here.
|
||||
# They're handled by _RecursivelyMerge().
|
||||
_Decoder = decoder.Decoder
|
||||
TYPE_TO_DESERIALIZE_METHOD = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: _Decoder.ReadDouble,
|
||||
_FieldDescriptor.TYPE_FLOAT: _Decoder.ReadFloat,
|
||||
_FieldDescriptor.TYPE_INT64: _Decoder.ReadInt64,
|
||||
_FieldDescriptor.TYPE_UINT64: _Decoder.ReadUInt64,
|
||||
_FieldDescriptor.TYPE_INT32: _Decoder.ReadInt32,
|
||||
_FieldDescriptor.TYPE_FIXED64: _Decoder.ReadFixed64,
|
||||
_FieldDescriptor.TYPE_FIXED32: _Decoder.ReadFixed32,
|
||||
_FieldDescriptor.TYPE_BOOL: _Decoder.ReadBool,
|
||||
_FieldDescriptor.TYPE_STRING: _Decoder.ReadString,
|
||||
_FieldDescriptor.TYPE_BYTES: _Decoder.ReadBytes,
|
||||
_FieldDescriptor.TYPE_UINT32: _Decoder.ReadUInt32,
|
||||
_FieldDescriptor.TYPE_ENUM: _Decoder.ReadEnum,
|
||||
_FieldDescriptor.TYPE_SFIXED32: _Decoder.ReadSFixed32,
|
||||
_FieldDescriptor.TYPE_SFIXED64: _Decoder.ReadSFixed64,
|
||||
_FieldDescriptor.TYPE_SINT32: _Decoder.ReadSInt32,
|
||||
_FieldDescriptor.TYPE_SINT64: _Decoder.ReadSInt64,
|
||||
}
|
@ -1,236 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# http://code.google.com/p/protobuf/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Constants and static functions to support protocol buffer wire format."""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
import struct
|
||||
from froofle.protobuf import message
|
||||
|
||||
|
||||
TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag.
|
||||
_TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7
|
||||
|
||||
# These numbers identify the wire type of a protocol buffer value.
|
||||
# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
|
||||
# tag-and-type to store one of these WIRETYPE_* constants.
|
||||
# These values must match WireType enum in //net/proto2/public/wire_format.h.
|
||||
WIRETYPE_VARINT = 0
|
||||
WIRETYPE_FIXED64 = 1
|
||||
WIRETYPE_LENGTH_DELIMITED = 2
|
||||
WIRETYPE_START_GROUP = 3
|
||||
WIRETYPE_END_GROUP = 4
|
||||
WIRETYPE_FIXED32 = 5
|
||||
_WIRETYPE_MAX = 5
|
||||
|
||||
|
||||
# Bounds for various integer types.
|
||||
INT32_MAX = int((1 << 31) - 1)
|
||||
INT32_MIN = int(-(1 << 31))
|
||||
UINT32_MAX = (1 << 32) - 1
|
||||
|
||||
INT64_MAX = (1 << 63) - 1
|
||||
INT64_MIN = -(1 << 63)
|
||||
UINT64_MAX = (1 << 64) - 1
|
||||
|
||||
# "struct" format strings that will encode/decode the specified formats.
|
||||
FORMAT_UINT32_LITTLE_ENDIAN = '<I'
|
||||
FORMAT_UINT64_LITTLE_ENDIAN = '<Q'
|
||||
|
||||
|
||||
# We'll have to provide alternate implementations of AppendLittleEndian*() on
|
||||
# any architectures where these checks fail.
|
||||
if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4:
|
||||
raise AssertionError('Format "I" is not a 32-bit number.')
|
||||
if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8:
|
||||
raise AssertionError('Format "Q" is not a 64-bit number.')
|
||||
|
||||
|
||||
def PackTag(field_number, wire_type):
|
||||
"""Returns an unsigned 32-bit integer that encodes the field number and
|
||||
wire type information in standard protocol message wire format.
|
||||
|
||||
Args:
|
||||
field_number: Expected to be an integer in the range [1, 1 << 29)
|
||||
wire_type: One of the WIRETYPE_* constants.
|
||||
"""
|
||||
if not 0 <= wire_type <= _WIRETYPE_MAX:
|
||||
raise message.EncodeError('Unknown wire type: %d' % wire_type)
|
||||
return (field_number << TAG_TYPE_BITS) | wire_type
|
||||
|
||||
|
||||
def UnpackTag(tag):
|
||||
"""The inverse of PackTag(). Given an unsigned 32-bit number,
|
||||
returns a (field_number, wire_type) tuple.
|
||||
"""
|
||||
return (tag >> TAG_TYPE_BITS), (tag & _TAG_TYPE_MASK)
|
||||
|
||||
|
||||
def ZigZagEncode(value):
|
||||
"""ZigZag Transform: Encodes signed integers so that they can be
|
||||
effectively used with varint encoding. See wire_format.h for
|
||||
more details.
|
||||
"""
|
||||
if value >= 0:
|
||||
return value << 1
|
||||
return (value << 1) ^ (~0)
|
||||
|
||||
|
||||
def ZigZagDecode(value):
|
||||
"""Inverse of ZigZagEncode()."""
|
||||
if not value & 0x1:
|
||||
return value >> 1
|
||||
return (value >> 1) ^ (~0)
|
||||
|
||||
|
||||
|
||||
# The *ByteSize() functions below return the number of bytes required to
|
||||
# serialize "field number + type" information and then serialize the value.
|
||||
|
||||
|
||||
def Int32ByteSize(field_number, int32):
|
||||
return Int64ByteSize(field_number, int32)
|
||||
|
||||
|
||||
def Int64ByteSize(field_number, int64):
|
||||
# Have to convert to uint before calling UInt64ByteSize().
|
||||
return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
|
||||
|
||||
|
||||
def UInt32ByteSize(field_number, uint32):
|
||||
return UInt64ByteSize(field_number, uint32)
|
||||
|
||||
|
||||
def UInt64ByteSize(field_number, uint64):
|
||||
return _TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
|
||||
|
||||
|
||||
def SInt32ByteSize(field_number, int32):
|
||||
return UInt32ByteSize(field_number, ZigZagEncode(int32))
|
||||
|
||||
|
||||
def SInt64ByteSize(field_number, int64):
|
||||
return UInt64ByteSize(field_number, ZigZagEncode(int64))
|
||||
|
||||
|
||||
def Fixed32ByteSize(field_number, fixed32):
|
||||
return _TagByteSize(field_number) + 4
|
||||
|
||||
|
||||
def Fixed64ByteSize(field_number, fixed64):
|
||||
return _TagByteSize(field_number) + 8
|
||||
|
||||
|
||||
def SFixed32ByteSize(field_number, sfixed32):
|
||||
return _TagByteSize(field_number) + 4
|
||||
|
||||
|
||||
def SFixed64ByteSize(field_number, sfixed64):
|
||||
return _TagByteSize(field_number) + 8
|
||||
|
||||
|
||||
def FloatByteSize(field_number, flt):
|
||||
return _TagByteSize(field_number) + 4
|
||||
|
||||
|
||||
def DoubleByteSize(field_number, double):
|
||||
return _TagByteSize(field_number) + 8
|
||||
|
||||
|
||||
def BoolByteSize(field_number, b):
|
||||
return _TagByteSize(field_number) + 1
|
||||
|
||||
|
||||
def EnumByteSize(field_number, enum):
|
||||
return UInt32ByteSize(field_number, enum)
|
||||
|
||||
|
||||
def StringByteSize(field_number, string):
|
||||
return BytesByteSize(field_number, string.encode('utf-8'))
|
||||
|
||||
|
||||
def BytesByteSize(field_number, b):
|
||||
return (_TagByteSize(field_number)
|
||||
+ _VarUInt64ByteSizeNoTag(len(b))
|
||||
+ len(b))
|
||||
|
||||
|
||||
def GroupByteSize(field_number, message):
|
||||
return (2 * _TagByteSize(field_number) # START and END group.
|
||||
+ message.ByteSize())
|
||||
|
||||
|
||||
def MessageByteSize(field_number, message):
|
||||
return (_TagByteSize(field_number)
|
||||
+ _VarUInt64ByteSizeNoTag(message.ByteSize())
|
||||
+ message.ByteSize())
|
||||
|
||||
|
||||
def MessageSetItemByteSize(field_number, msg):
|
||||
# First compute the sizes of the tags.
|
||||
# There are 2 tags for the beginning and ending of the repeated group, that
|
||||
# is field number 1, one with field number 2 (type_id) and one with field
|
||||
# number 3 (message).
|
||||
total_size = (2 * _TagByteSize(1) + _TagByteSize(2) + _TagByteSize(3))
|
||||
|
||||
# Add the number of bytes for type_id.
|
||||
total_size += _VarUInt64ByteSizeNoTag(field_number)
|
||||
|
||||
message_size = msg.ByteSize()
|
||||
|
||||
# The number of bytes for encoding the length of the message.
|
||||
total_size += _VarUInt64ByteSizeNoTag(message_size)
|
||||
|
||||
# The size of the message.
|
||||
total_size += message_size
|
||||
return total_size
|
||||
|
||||
|
||||
# Private helper functions for the *ByteSize() functions above.
|
||||
|
||||
|
||||
def _TagByteSize(field_number):
|
||||
"""Returns the bytes required to serialize a tag with this field number."""
|
||||
# Just pass in type 0, since the type won't affect the tag+type size.
|
||||
return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
|
||||
|
||||
|
||||
def _VarUInt64ByteSizeNoTag(uint64):
|
||||
"""Returns the bytes required to serialize a single varint.
|
||||
uint64 must be unsigned.
|
||||
"""
|
||||
if uint64 > UINT64_MAX:
|
||||
raise message.EncodeError('Value out of range: %d' % uint64)
|
||||
bytes = 1
|
||||
while uint64 > 0x7f:
|
||||
bytes += 1
|
||||
uint64 >>= 7
|
||||
return bytes
|
@ -1,246 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# http://code.google.com/p/protobuf/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# TODO(robinson): We should just make these methods all "pure-virtual" and move
|
||||
# all implementation out, into reflection.py for now.
|
||||
|
||||
|
||||
"""Contains an abstract base class for protocol messages."""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
from froofle.protobuf import text_format
|
||||
|
||||
class Error(Exception): pass
|
||||
class DecodeError(Error): pass
|
||||
class EncodeError(Error): pass
|
||||
|
||||
|
||||
class Message(object):
|
||||
|
||||
"""Abstract base class for protocol messages.
|
||||
|
||||
Protocol message classes are almost always generated by the protocol
|
||||
compiler. These generated types subclass Message and implement the methods
|
||||
shown below.
|
||||
|
||||
TODO(robinson): Link to an HTML document here.
|
||||
|
||||
TODO(robinson): Document that instances of this class will also
|
||||
have an Extensions attribute with __getitem__ and __setitem__.
|
||||
Again, not sure how to best convey this.
|
||||
|
||||
TODO(robinson): Document that the class must also have a static
|
||||
RegisterExtension(extension_field) method.
|
||||
Not sure how to best express at this point.
|
||||
"""
|
||||
|
||||
# TODO(robinson): Document these fields and methods.
|
||||
|
||||
__slots__ = []
|
||||
|
||||
DESCRIPTOR = None
|
||||
|
||||
def __eq__(self, other_msg):
|
||||
raise NotImplementedError
|
||||
|
||||
def __ne__(self, other_msg):
|
||||
# Can't just say self != other_msg, since that would infinitely recurse. :)
|
||||
return not self == other_msg
|
||||
|
||||
def __str__(self):
|
||||
return text_format.MessageToString(self)
|
||||
|
||||
def MergeFrom(self, other_msg):
|
||||
"""Merges the contents of the specified message into current message.
|
||||
|
||||
This method merges the contents of the specified message into the current
|
||||
message. Singular fields that are set in the specified message overwrite
|
||||
the corresponding fields in the current message. Repeated fields are
|
||||
appended. Singular sub-messages and groups are recursively merged.
|
||||
|
||||
Args:
|
||||
other_msg: Message to merge into the current message.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def CopyFrom(self, other_msg):
|
||||
"""Copies the content of the specified message into the current message.
|
||||
|
||||
The method clears the current message and then merges the specified
|
||||
message using MergeFrom.
|
||||
|
||||
Args:
|
||||
other_msg: Message to copy into the current one.
|
||||
"""
|
||||
if self == other_msg:
|
||||
return
|
||||
self.Clear()
|
||||
self.MergeFrom(other_msg)
|
||||
|
||||
def Clear(self):
|
||||
"""Clears all data that was set in the message."""
|
||||
raise NotImplementedError
|
||||
|
||||
def IsInitialized(self):
|
||||
"""Checks if the message is initialized.
|
||||
|
||||
Returns:
|
||||
The method returns True if the message is initialized (i.e. all of its
|
||||
required fields are set).
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
# TODO(robinson): MergeFromString() should probably return None and be
|
||||
# implemented in terms of a helper that returns the # of bytes read. Our
|
||||
# deserialization routines would use the helper when recursively
|
||||
# deserializing, but the end user would almost always just want the no-return
|
||||
# MergeFromString().
|
||||
|
||||
def MergeFromString(self, serialized):
|
||||
"""Merges serialized protocol buffer data into this message.
|
||||
|
||||
When we find a field in |serialized| that is already present
|
||||
in this message:
|
||||
- If it's a "repeated" field, we append to the end of our list.
|
||||
- Else, if it's a scalar, we overwrite our field.
|
||||
- Else, (it's a nonrepeated composite), we recursively merge
|
||||
into the existing composite.
|
||||
|
||||
TODO(robinson): Document handling of unknown fields.
|
||||
|
||||
Args:
|
||||
serialized: Any object that allows us to call buffer(serialized)
|
||||
to access a string of bytes using the buffer interface.
|
||||
|
||||
TODO(robinson): When we switch to a helper, this will return None.
|
||||
|
||||
Returns:
|
||||
The number of bytes read from |serialized|.
|
||||
For non-group messages, this will always be len(serialized),
|
||||
but for messages which are actually groups, this will
|
||||
generally be less than len(serialized), since we must
|
||||
stop when we reach an END_GROUP tag. Note that if
|
||||
we *do* stop because of an END_GROUP tag, the number
|
||||
of bytes returned does not include the bytes
|
||||
for the END_GROUP tag information.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def ParseFromString(self, serialized):
|
||||
"""Like MergeFromString(), except we clear the object first."""
|
||||
self.Clear()
|
||||
self.MergeFromString(serialized)
|
||||
|
||||
def SerializeToString(self):
|
||||
"""Serializes the protocol message to a binary string.
|
||||
|
||||
Returns:
|
||||
A binary string representation of the message if all of the required
|
||||
fields in the message are set (i.e. the message is initialized).
|
||||
|
||||
Raises:
|
||||
message.EncodeError if the message isn't initialized.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def SerializePartialToString(self):
|
||||
"""Serializes the protocol message to a binary string.
|
||||
|
||||
This method is similar to SerializeToString but doesn't check if the
|
||||
message is initialized.
|
||||
|
||||
Returns:
|
||||
A string representation of the partial message.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
# TODO(robinson): Decide whether we like these better
|
||||
# than auto-generated has_foo() and clear_foo() methods
|
||||
# on the instances themselves. This way is less consistent
|
||||
# with C++, but it makes reflection-type access easier and
|
||||
# reduces the number of magically autogenerated things.
|
||||
#
|
||||
# TODO(robinson): Be sure to document (and test) exactly
|
||||
# which field names are accepted here. Are we case-sensitive?
|
||||
# What do we do with fields that share names with Python keywords
|
||||
# like 'lambda' and 'yield'?
|
||||
#
|
||||
# nnorwitz says:
|
||||
# """
|
||||
# Typically (in python), an underscore is appended to names that are
|
||||
# keywords. So they would become lambda_ or yield_.
|
||||
# """
|
||||
def ListFields(self, field_name):
|
||||
"""Returns a list of (FieldDescriptor, value) tuples for all
|
||||
fields in the message which are not empty. A singular field is non-empty
|
||||
if HasField() would return true, and a repeated field is non-empty if
|
||||
it contains at least one element. The fields are ordered by field
|
||||
number"""
|
||||
raise NotImplementedError
|
||||
|
||||
def HasField(self, field_name):
|
||||
raise NotImplementedError
|
||||
|
||||
def ClearField(self, field_name):
|
||||
raise NotImplementedError
|
||||
|
||||
def HasExtension(self, extension_handle):
|
||||
raise NotImplementedError
|
||||
|
||||
def ClearExtension(self, extension_handle):
|
||||
raise NotImplementedError
|
||||
|
||||
def ByteSize(self):
|
||||
"""Returns the serialized size of this message.
|
||||
Recursively calls ByteSize() on all contained messages.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def _SetListener(self, message_listener):
|
||||
"""Internal method used by the protocol message implementation.
|
||||
Clients should not call this directly.
|
||||
|
||||
Sets a listener that this message will call on certain state transitions.
|
||||
|
||||
The purpose of this method is to register back-edges from children to
|
||||
parents at runtime, for the purpose of setting "has" bits and
|
||||
byte-size-dirty bits in the parent and ancestor objects whenever a child or
|
||||
descendant object is modified.
|
||||
|
||||
If the client wants to disconnect this Message from the object tree, she
|
||||
explicitly sets callback to None.
|
||||
|
||||
If message_listener is None, unregisters any existing listener. Otherwise,
|
||||
message_listener must implement the MessageListener interface in
|
||||
internal/message_listener.py, and we discard any listener registered
|
||||
via a previous _SetListener() call.
|
||||
"""
|
||||
raise NotImplementedError
|
File diff suppressed because it is too large
Load Diff
@ -1,208 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# http://code.google.com/p/protobuf/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Declares the RPC service interfaces.
|
||||
|
||||
This module declares the abstract interfaces underlying proto2 RPC
|
||||
services. These are intented to be independent of any particular RPC
|
||||
implementation, so that proto2 services can be used on top of a variety
|
||||
of implementations.
|
||||
"""
|
||||
|
||||
__author__ = 'petar@google.com (Petar Petrov)'
|
||||
|
||||
|
||||
class Service(object):
|
||||
|
||||
"""Abstract base interface for protocol-buffer-based RPC services.
|
||||
|
||||
Services themselves are abstract classes (implemented either by servers or as
|
||||
stubs), but they subclass this base interface. The methods of this
|
||||
interface can be used to call the methods of the service without knowing
|
||||
its exact type at compile time (analogous to the Message interface).
|
||||
"""
|
||||
|
||||
def GetDescriptor(self):
|
||||
"""Retrieves this service's descriptor."""
|
||||
raise NotImplementedError
|
||||
|
||||
def CallMethod(self, method_descriptor, rpc_controller,
|
||||
request, done):
|
||||
"""Calls a method of the service specified by method_descriptor.
|
||||
|
||||
Preconditions:
|
||||
* method_descriptor.service == GetDescriptor
|
||||
* request is of the exact same classes as returned by
|
||||
GetRequestClass(method).
|
||||
* After the call has started, the request must not be modified.
|
||||
* "rpc_controller" is of the correct type for the RPC implementation being
|
||||
used by this Service. For stubs, the "correct type" depends on the
|
||||
RpcChannel which the stub is using.
|
||||
|
||||
Postconditions:
|
||||
* "done" will be called when the method is complete. This may be
|
||||
before CallMethod() returns or it may be at some point in the future.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def GetRequestClass(self, method_descriptor):
|
||||
"""Returns the class of the request message for the specified method.
|
||||
|
||||
CallMethod() requires that the request is of a particular subclass of
|
||||
Message. GetRequestClass() gets the default instance of this required
|
||||
type.
|
||||
|
||||
Example:
|
||||
method = service.GetDescriptor().FindMethodByName("Foo")
|
||||
request = stub.GetRequestClass(method)()
|
||||
request.ParseFromString(input)
|
||||
service.CallMethod(method, request, callback)
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def GetResponseClass(self, method_descriptor):
|
||||
"""Returns the class of the response message for the specified method.
|
||||
|
||||
This method isn't really needed, as the RpcChannel's CallMethod constructs
|
||||
the response protocol message. It's provided anyway in case it is useful
|
||||
for the caller to know the response type in advance.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class RpcController(object):
|
||||
|
||||
"""An RpcController mediates a single method call.
|
||||
|
||||
The primary purpose of the controller is to provide a way to manipulate
|
||||
settings specific to the RPC implementation and to find out about RPC-level
|
||||
errors. The methods provided by the RpcController interface are intended
|
||||
to be a "least common denominator" set of features which we expect all
|
||||
implementations to support. Specific implementations may provide more
|
||||
advanced features (e.g. deadline propagation).
|
||||
"""
|
||||
|
||||
# Client-side methods below
|
||||
|
||||
def Reset(self):
|
||||
"""Resets the RpcController to its initial state.
|
||||
|
||||
After the RpcController has been reset, it may be reused in
|
||||
a new call. Must not be called while an RPC is in progress.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def Failed(self):
|
||||
"""Returns true if the call failed.
|
||||
|
||||
After a call has finished, returns true if the call failed. The possible
|
||||
reasons for failure depend on the RPC implementation. Failed() must not
|
||||
be called before a call has finished. If Failed() returns true, the
|
||||
contents of the response message are undefined.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def ErrorText(self):
|
||||
"""If Failed is true, returns a human-readable description of the error."""
|
||||
raise NotImplementedError
|
||||
|
||||
def StartCancel(self):
|
||||
"""Initiate cancellation.
|
||||
|
||||
Advises the RPC system that the caller desires that the RPC call be
|
||||
canceled. The RPC system may cancel it immediately, may wait awhile and
|
||||
then cancel it, or may not even cancel the call at all. If the call is
|
||||
canceled, the "done" callback will still be called and the RpcController
|
||||
will indicate that the call failed at that time.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
# Server-side methods below
|
||||
|
||||
def SetFailed(self, reason):
|
||||
"""Sets a failure reason.
|
||||
|
||||
Causes Failed() to return true on the client side. "reason" will be
|
||||
incorporated into the message returned by ErrorText(). If you find
|
||||
you need to return machine-readable information about failures, you
|
||||
should incorporate it into your response protocol buffer and should
|
||||
NOT call SetFailed().
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def IsCanceled(self):
|
||||
"""Checks if the client cancelled the RPC.
|
||||
|
||||
If true, indicates that the client canceled the RPC, so the server may
|
||||
as well give up on replying to it. The server should still call the
|
||||
final "done" callback.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def NotifyOnCancel(self, callback):
|
||||
"""Sets a callback to invoke on cancel.
|
||||
|
||||
Asks that the given callback be called when the RPC is canceled. The
|
||||
callback will always be called exactly once. If the RPC completes without
|
||||
being canceled, the callback will be called after completion. If the RPC
|
||||
has already been canceled when NotifyOnCancel() is called, the callback
|
||||
will be called immediately.
|
||||
|
||||
NotifyOnCancel() must be called no more than once per request.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class RpcChannel(object):
|
||||
|
||||
"""Abstract interface for an RPC channel.
|
||||
|
||||
An RpcChannel represents a communication line to a service which can be used
|
||||
to call that service's methods. The service may be running on another
|
||||
machine. Normally, you should not use an RpcChannel directly, but instead
|
||||
construct a stub {@link Service} wrapping it. Example:
|
||||
|
||||
Example:
|
||||
RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
|
||||
RpcController controller = rpcImpl.Controller()
|
||||
MyService service = MyService_Stub(channel)
|
||||
service.MyMethod(controller, request, callback)
|
||||
"""
|
||||
|
||||
def CallMethod(self, method_descriptor, rpc_controller,
|
||||
request, response_class, done):
|
||||
"""Calls the method identified by the descriptor.
|
||||
|
||||
Call the given method of the remote service. The signature of this
|
||||
procedure looks the same as Service.CallMethod(), but the requirements
|
||||
are less strict in one important way: the request object doesn't have to
|
||||
be of any specific class as long as its descriptor is method.input_type.
|
||||
"""
|
||||
raise NotImplementedError
|
@ -1,289 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# http://code.google.com/p/protobuf/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Contains metaclasses used to create protocol service and service stub
|
||||
classes from ServiceDescriptor objects at runtime.
|
||||
|
||||
The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
|
||||
inject all useful functionality into the classes output by the protocol
|
||||
compiler at compile-time.
|
||||
"""
|
||||
|
||||
__author__ = 'petar@google.com (Petar Petrov)'
|
||||
|
||||
|
||||
class GeneratedServiceType(type):
|
||||
|
||||
"""Metaclass for service classes created at runtime from ServiceDescriptors.
|
||||
|
||||
Implementations for all methods described in the Service class are added here
|
||||
by this class. We also create properties to allow getting/setting all fields
|
||||
in the protocol message.
|
||||
|
||||
The protocol compiler currently uses this metaclass to create protocol service
|
||||
classes at runtime. Clients can also manually create their own classes at
|
||||
runtime, as in this example:
|
||||
|
||||
mydescriptor = ServiceDescriptor(.....)
|
||||
class MyProtoService(service.Service):
|
||||
__metaclass__ = GeneratedServiceType
|
||||
DESCRIPTOR = mydescriptor
|
||||
myservice_instance = MyProtoService()
|
||||
...
|
||||
"""
|
||||
|
||||
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
||||
|
||||
def __init__(cls, name, bases, dictionary):
|
||||
"""Creates a message service class.
|
||||
|
||||
Args:
|
||||
name: Name of the class (ignored, but required by the metaclass
|
||||
protocol).
|
||||
bases: Base classes of the class being constructed.
|
||||
dictionary: The class dictionary of the class being constructed.
|
||||
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
|
||||
describing this protocol service type.
|
||||
"""
|
||||
# Don't do anything if this class doesn't have a descriptor. This happens
|
||||
# when a service class is subclassed.
|
||||
if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
|
||||
return
|
||||
descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
|
||||
service_builder = _ServiceBuilder(descriptor)
|
||||
service_builder.BuildService(cls)
|
||||
|
||||
|
||||
class GeneratedServiceStubType(GeneratedServiceType):
|
||||
|
||||
"""Metaclass for service stubs created at runtime from ServiceDescriptors.
|
||||
|
||||
This class has similar responsibilities as GeneratedServiceType, except that
|
||||
it creates the service stub classes.
|
||||
"""
|
||||
|
||||
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
||||
|
||||
def __init__(cls, name, bases, dictionary):
|
||||
"""Creates a message service stub class.
|
||||
|
||||
Args:
|
||||
name: Name of the class (ignored, here).
|
||||
bases: Base classes of the class being constructed.
|
||||
dictionary: The class dictionary of the class being constructed.
|
||||
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
|
||||
describing this protocol service type.
|
||||
"""
|
||||
super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
|
||||
# Don't do anything if this class doesn't have a descriptor. This happens
|
||||
# when a service stub is subclassed.
|
||||
if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
|
||||
return
|
||||
descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY]
|
||||
service_stub_builder = _ServiceStubBuilder(descriptor)
|
||||
service_stub_builder.BuildServiceStub(cls)
|
||||
|
||||
|
||||
class _ServiceBuilder(object):
|
||||
|
||||
"""This class constructs a protocol service class using a service descriptor.
|
||||
|
||||
Given a service descriptor, this class constructs a class that represents
|
||||
the specified service descriptor. One service builder instance constructs
|
||||
exactly one service class. That means all instances of that class share the
|
||||
same builder.
|
||||
"""
|
||||
|
||||
def __init__(self, service_descriptor):
|
||||
"""Initializes an instance of the service class builder.
|
||||
|
||||
Args:
|
||||
service_descriptor: ServiceDescriptor to use when constructing the
|
||||
service class.
|
||||
"""
|
||||
self.descriptor = service_descriptor
|
||||
|
||||
def BuildService(self, cls):
|
||||
"""Constructs the service class.
|
||||
|
||||
Args:
|
||||
cls: The class that will be constructed.
|
||||
"""
|
||||
|
||||
# CallMethod needs to operate with an instance of the Service class. This
|
||||
# internal wrapper function exists only to be able to pass the service
|
||||
# instance to the method that does the real CallMethod work.
|
||||
def _WrapCallMethod(srvc, method_descriptor,
|
||||
rpc_controller, request, callback):
|
||||
self._CallMethod(srvc, method_descriptor,
|
||||
rpc_controller, request, callback)
|
||||
self.cls = cls
|
||||
cls.CallMethod = _WrapCallMethod
|
||||
cls.GetDescriptor = self._GetDescriptor
|
||||
cls.GetRequestClass = self._GetRequestClass
|
||||
cls.GetResponseClass = self._GetResponseClass
|
||||
for method in self.descriptor.methods:
|
||||
setattr(cls, method.name, self._GenerateNonImplementedMethod(method))
|
||||
|
||||
def _GetDescriptor(self):
|
||||
"""Retrieves the service descriptor.
|
||||
|
||||
Returns:
|
||||
The descriptor of the service (of type ServiceDescriptor).
|
||||
"""
|
||||
return self.descriptor
|
||||
|
||||
def _CallMethod(self, srvc, method_descriptor,
|
||||
rpc_controller, request, callback):
|
||||
"""Calls the method described by a given method descriptor.
|
||||
|
||||
Args:
|
||||
srvc: Instance of the service for which this method is called.
|
||||
method_descriptor: Descriptor that represent the method to call.
|
||||
rpc_controller: RPC controller to use for this method's execution.
|
||||
request: Request protocol message.
|
||||
callback: A callback to invoke after the method has completed.
|
||||
"""
|
||||
if method_descriptor.containing_service != self.descriptor:
|
||||
raise RuntimeError(
|
||||
'CallMethod() given method descriptor for wrong service type.')
|
||||
method = getattr(srvc, method_descriptor.name)
|
||||
method(rpc_controller, request, callback)
|
||||
|
||||
def _GetRequestClass(self, method_descriptor):
|
||||
"""Returns the class of the request protocol message.
|
||||
|
||||
Args:
|
||||
method_descriptor: Descriptor of the method for which to return the
|
||||
request protocol message class.
|
||||
|
||||
Returns:
|
||||
A class that represents the input protocol message of the specified
|
||||
method.
|
||||
"""
|
||||
if method_descriptor.containing_service != self.descriptor:
|
||||
raise RuntimeError(
|
||||
'GetRequestClass() given method descriptor for wrong service type.')
|
||||
return method_descriptor.input_type._concrete_class
|
||||
|
||||
def _GetResponseClass(self, method_descriptor):
|
||||
"""Returns the class of the response protocol message.
|
||||
|
||||
Args:
|
||||
method_descriptor: Descriptor of the method for which to return the
|
||||
response protocol message class.
|
||||
|
||||
Returns:
|
||||
A class that represents the output protocol message of the specified
|
||||
method.
|
||||
"""
|
||||
if method_descriptor.containing_service != self.descriptor:
|
||||
raise RuntimeError(
|
||||
'GetResponseClass() given method descriptor for wrong service type.')
|
||||
return method_descriptor.output_type._concrete_class
|
||||
|
||||
def _GenerateNonImplementedMethod(self, method):
|
||||
"""Generates and returns a method that can be set for a service methods.
|
||||
|
||||
Args:
|
||||
method: Descriptor of the service method for which a method is to be
|
||||
generated.
|
||||
|
||||
Returns:
|
||||
A method that can be added to the service class.
|
||||
"""
|
||||
return lambda inst, rpc_controller, request, callback: (
|
||||
self._NonImplementedMethod(method.name, rpc_controller, callback))
|
||||
|
||||
def _NonImplementedMethod(self, method_name, rpc_controller, callback):
|
||||
"""The body of all methods in the generated service class.
|
||||
|
||||
Args:
|
||||
method_name: Name of the method being executed.
|
||||
rpc_controller: RPC controller used to execute this method.
|
||||
callback: A callback which will be invoked when the method finishes.
|
||||
"""
|
||||
rpc_controller.SetFailed('Method %s not implemented.' % method_name)
|
||||
callback(None)
|
||||
|
||||
|
||||
class _ServiceStubBuilder(object):
|
||||
|
||||
"""Constructs a protocol service stub class using a service descriptor.
|
||||
|
||||
Given a service descriptor, this class constructs a suitable stub class.
|
||||
A stub is just a type-safe wrapper around an RpcChannel which emulates a
|
||||
local implementation of the service.
|
||||
|
||||
One service stub builder instance constructs exactly one class. It means all
|
||||
instances of that class share the same service stub builder.
|
||||
"""
|
||||
|
||||
def __init__(self, service_descriptor):
|
||||
"""Initializes an instance of the service stub class builder.
|
||||
|
||||
Args:
|
||||
service_descriptor: ServiceDescriptor to use when constructing the
|
||||
stub class.
|
||||
"""
|
||||
self.descriptor = service_descriptor
|
||||
|
||||
def BuildServiceStub(self, cls):
|
||||
"""Constructs the stub class.
|
||||
|
||||
Args:
|
||||
cls: The class that will be constructed.
|
||||
"""
|
||||
|
||||
def _ServiceStubInit(stub, rpc_channel):
|
||||
stub.rpc_channel = rpc_channel
|
||||
self.cls = cls
|
||||
cls.__init__ = _ServiceStubInit
|
||||
for method in self.descriptor.methods:
|
||||
setattr(cls, method.name, self._GenerateStubMethod(method))
|
||||
|
||||
def _GenerateStubMethod(self, method):
|
||||
return lambda inst, rpc_controller, request, callback: self._StubMethod(
|
||||
inst, method, rpc_controller, request, callback)
|
||||
|
||||
def _StubMethod(self, stub, method_descriptor,
|
||||
rpc_controller, request, callback):
|
||||
"""The body of all service methods in the generated stub class.
|
||||
|
||||
Args:
|
||||
stub: Stub instance.
|
||||
method_descriptor: Descriptor of the invoked method.
|
||||
rpc_controller: Rpc controller to execute the method.
|
||||
request: Request protocol message.
|
||||
callback: A callback to execute when the method finishes.
|
||||
"""
|
||||
stub.rpc_channel.CallMethod(
|
||||
method_descriptor, rpc_controller, request,
|
||||
method_descriptor.output_type._concrete_class, callback)
|
@ -1,125 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# http://code.google.com/p/protobuf/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Contains routines for printing protocol messages in text format."""
|
||||
|
||||
__author__ = 'kenton@google.com (Kenton Varda)'
|
||||
|
||||
import cStringIO
|
||||
|
||||
from froofle.protobuf import descriptor
|
||||
|
||||
__all__ = [ 'MessageToString', 'PrintMessage', 'PrintField', 'PrintFieldValue' ]
|
||||
|
||||
def MessageToString(message):
|
||||
out = cStringIO.StringIO()
|
||||
PrintMessage(message, out)
|
||||
result = out.getvalue()
|
||||
out.close()
|
||||
return result
|
||||
|
||||
def PrintMessage(message, out, indent = 0):
|
||||
for field, value in message.ListFields():
|
||||
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
||||
for element in value:
|
||||
PrintField(field, element, out, indent)
|
||||
else:
|
||||
PrintField(field, value, out, indent)
|
||||
|
||||
def PrintField(field, value, out, indent = 0):
|
||||
"""Print a single field name/value pair. For repeated fields, the value
|
||||
should be a single element."""
|
||||
|
||||
out.write(' ' * indent);
|
||||
if field.is_extension:
|
||||
out.write('[')
|
||||
if (field.containing_type.GetOptions().message_set_wire_format and
|
||||
field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
|
||||
field.message_type == field.extension_scope and
|
||||
field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL):
|
||||
out.write(field.message_type.full_name)
|
||||
else:
|
||||
out.write(field.full_name)
|
||||
out.write(']')
|
||||
elif field.type == descriptor.FieldDescriptor.TYPE_GROUP:
|
||||
# For groups, use the capitalized name.
|
||||
out.write(field.message_type.name)
|
||||
else:
|
||||
out.write(field.name)
|
||||
|
||||
if field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
# The colon is optional in this case, but our cross-language golden files
|
||||
# don't include it.
|
||||
out.write(': ')
|
||||
|
||||
PrintFieldValue(field, value, out, indent)
|
||||
out.write('\n')
|
||||
|
||||
def PrintFieldValue(field, value, out, indent = 0):
|
||||
"""Print a single field value (not including name). For repeated fields,
|
||||
the value should be a single element."""
|
||||
|
||||
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
out.write(' {\n')
|
||||
PrintMessage(value, out, indent + 2)
|
||||
out.write(' ' * indent + '}')
|
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
|
||||
out.write(field.enum_type.values_by_number[value].name)
|
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
|
||||
out.write('\"')
|
||||
out.write(_CEscape(value))
|
||||
out.write('\"')
|
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
|
||||
if value:
|
||||
out.write("true")
|
||||
else:
|
||||
out.write("false")
|
||||
else:
|
||||
out.write(str(value))
|
||||
|
||||
# text.encode('string_escape') does not seem to satisfy our needs as it
|
||||
# encodes unprintable characters using two-digit hex escapes whereas our
|
||||
# C++ unescaping function allows hex escapes to be any length. So,
|
||||
# "\0011".encode('string_escape') ends up being "\\x011", which will be
|
||||
# decoded in C++ as a single-character string with char code 0x11.
|
||||
def _CEscape(text):
|
||||
def escape(c):
|
||||
o = ord(c)
|
||||
if o == 10: return r"\n" # optional escape
|
||||
if o == 13: return r"\r" # optional escape
|
||||
if o == 9: return r"\t" # optional escape
|
||||
if o == 39: return r"\'" # optional escape
|
||||
|
||||
if o == 34: return r'\"' # necessary escape
|
||||
if o == 92: return r"\\" # necessary escape
|
||||
|
||||
if o >= 127 or o < 32: return "\\%03o" % o # necessary escapes
|
||||
return c
|
||||
return "".join([escape(c) for c in text])
|
156
gerrit_upload.py
156
gerrit_upload.py
@ -1,156 +0,0 @@
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import getpass
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from tempfile import mkstemp
|
||||
|
||||
from codereview.proto_client import HttpRpc, Proxy
|
||||
from codereview.review_pb2 import ReviewService_Stub
|
||||
from codereview.upload_bundle_pb2 import *
|
||||
from git_command import GitCommand
|
||||
from error import UploadError
|
||||
|
||||
try:
|
||||
import readline
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
MAX_SEGMENT_SIZE = 1020 * 1024
|
||||
|
||||
def _GetRpcServer(email, server, save_cookies):
|
||||
"""Returns an RpcServer.
|
||||
|
||||
Returns:
|
||||
A new RpcServer, on which RPC calls can be made.
|
||||
"""
|
||||
|
||||
def GetUserCredentials():
|
||||
"""Prompts the user for a username and password."""
|
||||
e = email
|
||||
if e is None:
|
||||
e = raw_input("Email: ").strip()
|
||||
password = getpass.getpass("Password for %s: " % e)
|
||||
return (e, password)
|
||||
|
||||
# If this is the dev_appserver, use fake authentication.
|
||||
lc_server = server.lower()
|
||||
if lc_server == "localhost" or lc_server.startswith("localhost:"):
|
||||
if email is None:
|
||||
email = "test@example.com"
|
||||
server = HttpRpc(
|
||||
server,
|
||||
lambda: (email, "password"),
|
||||
extra_headers={"Cookie":
|
||||
'dev_appserver_login="%s:False"' % email})
|
||||
# Don't try to talk to ClientLogin.
|
||||
server.authenticated = True
|
||||
return server
|
||||
|
||||
if save_cookies:
|
||||
cookie_file = ".gerrit_cookies"
|
||||
else:
|
||||
cookie_file = None
|
||||
|
||||
return HttpRpc(server, GetUserCredentials,
|
||||
cookie_file=cookie_file)
|
||||
|
||||
def UploadBundle(project,
|
||||
server,
|
||||
email,
|
||||
dest_project,
|
||||
dest_branch,
|
||||
src_branch,
|
||||
bases,
|
||||
save_cookies=True):
|
||||
|
||||
srv = _GetRpcServer(email, server, save_cookies)
|
||||
review = Proxy(ReviewService_Stub(srv))
|
||||
tmp_fd, tmp_bundle = mkstemp(".bundle", ".gpq")
|
||||
os.close(tmp_fd)
|
||||
|
||||
srcid = project.bare_git.rev_parse(src_branch)
|
||||
revlist = project._revlist(src_branch, *bases)
|
||||
|
||||
if srcid not in revlist:
|
||||
# This can happen if src_branch is an annotated tag
|
||||
#
|
||||
revlist.append(srcid)
|
||||
revlist_size = len(revlist) * 42
|
||||
|
||||
try:
|
||||
cmd = ['bundle', 'create', tmp_bundle, src_branch]
|
||||
cmd.extend(bases)
|
||||
if GitCommand(project, cmd).Wait() != 0:
|
||||
raise UploadError('cannot create bundle')
|
||||
fd = open(tmp_bundle, "rb")
|
||||
|
||||
bundle_id = None
|
||||
segment_id = 0
|
||||
next_data = fd.read(MAX_SEGMENT_SIZE - revlist_size)
|
||||
|
||||
while True:
|
||||
this_data = next_data
|
||||
next_data = fd.read(MAX_SEGMENT_SIZE)
|
||||
segment_id += 1
|
||||
|
||||
if bundle_id is None:
|
||||
req = UploadBundleRequest()
|
||||
req.dest_project = str(dest_project)
|
||||
req.dest_branch = str(dest_branch)
|
||||
for c in revlist:
|
||||
req.contained_object.append(c)
|
||||
else:
|
||||
req = UploadBundleContinue()
|
||||
req.bundle_id = bundle_id
|
||||
req.segment_id = segment_id
|
||||
|
||||
req.bundle_data = this_data
|
||||
if len(next_data) > 0:
|
||||
req.partial_upload = True
|
||||
else:
|
||||
req.partial_upload = False
|
||||
|
||||
if bundle_id is None:
|
||||
rsp = review.UploadBundle(req)
|
||||
else:
|
||||
rsp = review.ContinueBundle(req)
|
||||
|
||||
if rsp.status_code == UploadBundleResponse.CONTINUE:
|
||||
bundle_id = rsp.bundle_id
|
||||
elif rsp.status_code == UploadBundleResponse.RECEIVED:
|
||||
bundle_id = rsp.bundle_id
|
||||
return bundle_id
|
||||
else:
|
||||
if rsp.status_code == UploadBundleResponse.UNKNOWN_PROJECT:
|
||||
reason = 'unknown project "%s"' % dest_project
|
||||
elif rsp.status_code == UploadBundleResponse.UNKNOWN_BRANCH:
|
||||
reason = 'unknown branch "%s"' % dest_branch
|
||||
elif rsp.status_code == UploadBundleResponse.UNKNOWN_BUNDLE:
|
||||
reason = 'unknown bundle'
|
||||
elif rsp.status_code == UploadBundleResponse.NOT_BUNDLE_OWNER:
|
||||
reason = 'not bundle owner'
|
||||
elif rsp.status_code == UploadBundleResponse.BUNDLE_CLOSED:
|
||||
reason = 'bundle closed'
|
||||
elif rsp.status_code == UploadBundleResponse.UNAUTHORIZED_USER:
|
||||
reason = ('Unauthorized user. Visit http://%s/hello to sign up.'
|
||||
% server)
|
||||
else:
|
||||
reason = 'unknown error ' + str(rsp.status_code)
|
||||
raise UploadError(reason)
|
||||
finally:
|
||||
os.unlink(tmp_bundle)
|
@ -16,21 +16,28 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from error import GitError
|
||||
from urllib2 import urlopen, HTTPError
|
||||
from error import GitError, UploadError
|
||||
from git_command import GitCommand
|
||||
|
||||
R_HEADS = 'refs/heads/'
|
||||
R_TAGS = 'refs/tags/'
|
||||
ID_RE = re.compile('^[0-9a-f]{40}$')
|
||||
|
||||
REVIEW_CACHE = dict()
|
||||
|
||||
def IsId(rev):
|
||||
return ID_RE.match(rev)
|
||||
|
||||
|
||||
class GitConfig(object):
|
||||
_ForUser = None
|
||||
|
||||
@classmethod
|
||||
def ForUser(cls):
|
||||
return cls(file = os.path.expanduser('~/.gitconfig'))
|
||||
if cls._ForUser is None:
|
||||
cls._ForUser = cls(file = os.path.expanduser('~/.gitconfig'))
|
||||
return cls._ForUser
|
||||
|
||||
@classmethod
|
||||
def ForRepository(cls, gitdir, defaults=None):
|
||||
@ -254,8 +261,68 @@ class Remote(object):
|
||||
self.name = name
|
||||
self.url = self._Get('url')
|
||||
self.review = self._Get('review')
|
||||
self.projectname = self._Get('projectname')
|
||||
self.fetch = map(lambda x: RefSpec.FromString(x),
|
||||
self._Get('fetch', all=True))
|
||||
self._review_protocol = None
|
||||
|
||||
@property
|
||||
def ReviewProtocol(self):
|
||||
if self._review_protocol is None:
|
||||
if self.review is None:
|
||||
return None
|
||||
|
||||
u = self.review
|
||||
if not u.startswith('http:') and not u.startswith('https:'):
|
||||
u = 'http://%s' % u
|
||||
if u.endswith('/Gerrit'):
|
||||
u = u[:len(u) - len('/Gerrit')]
|
||||
if not u.endswith('/ssh_info'):
|
||||
if not u.endswith('/'):
|
||||
u += '/'
|
||||
u += 'ssh_info'
|
||||
|
||||
if u in REVIEW_CACHE:
|
||||
info = REVIEW_CACHE[u]
|
||||
self._review_protocol = info[0]
|
||||
self._review_host = info[1]
|
||||
self._review_port = info[2]
|
||||
else:
|
||||
try:
|
||||
info = urlopen(u).read()
|
||||
if info == 'NOT_AVAILABLE':
|
||||
raise UploadError('Upload over ssh unavailable')
|
||||
if '<' in info:
|
||||
# Assume the server gave us some sort of HTML
|
||||
# response back, like maybe a login page.
|
||||
#
|
||||
raise UploadError('Cannot read %s:\n%s' % (u, info))
|
||||
|
||||
self._review_protocol = 'ssh'
|
||||
self._review_host = info.split(" ")[0]
|
||||
self._review_port = info.split(" ")[1]
|
||||
except HTTPError, e:
|
||||
if e.code == 404:
|
||||
self._review_protocol = 'http-post'
|
||||
self._review_host = None
|
||||
self._review_port = None
|
||||
else:
|
||||
raise UploadError('Cannot guess Gerrit version')
|
||||
|
||||
REVIEW_CACHE[u] = (
|
||||
self._review_protocol,
|
||||
self._review_host,
|
||||
self._review_port)
|
||||
return self._review_protocol
|
||||
|
||||
def SshReviewUrl(self, userEmail):
|
||||
if self.ReviewProtocol != 'ssh':
|
||||
return None
|
||||
return 'ssh://%s@%s:%s/%s' % (
|
||||
userEmail.split("@")[0],
|
||||
self._review_host,
|
||||
self._review_port,
|
||||
self.projectname)
|
||||
|
||||
def ToLocal(self, rev):
|
||||
"""Convert a remote revision string to something we have locally.
|
||||
@ -281,18 +348,21 @@ class Remote(object):
|
||||
return True
|
||||
return False
|
||||
|
||||
def ResetFetch(self):
|
||||
def ResetFetch(self, mirror=False):
|
||||
"""Set the fetch refspec to its default value.
|
||||
"""
|
||||
self.fetch = [RefSpec(True,
|
||||
'refs/heads/*',
|
||||
'refs/remotes/%s/*' % self.name)]
|
||||
if mirror:
|
||||
dst = 'refs/heads/*'
|
||||
else:
|
||||
dst = 'refs/remotes/%s/*' % self.name
|
||||
self.fetch = [RefSpec(True, 'refs/heads/*', dst)]
|
||||
|
||||
def Save(self):
|
||||
"""Save this remote to the configuration.
|
||||
"""
|
||||
self._Set('url', self.url)
|
||||
self._Set('review', self.review)
|
||||
self._Set('projectname', self.projectname)
|
||||
self._Set('fetch', map(lambda x: str(x), self.fetch))
|
||||
|
||||
def _Set(self, key, value):
|
||||
|
44
hooks/pre-auto-gc
Executable file
44
hooks/pre-auto-gc
Executable file
@ -0,0 +1,44 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to verify if you are on battery, in case you
|
||||
# are running Linux or OS X. Called by git-gc --auto with no arguments.
|
||||
# The hook should exit with non-zero status after issuing an appropriate
|
||||
# message if it wants to stop the auto repacking.
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
if test -x /sbin/on_ac_power && /sbin/on_ac_power
|
||||
then
|
||||
exit 0
|
||||
elif test "$(cat /sys/class/power_supply/AC/online 2>/dev/null)" = 1
|
||||
then
|
||||
exit 0
|
||||
elif grep -q 'on-line' /proc/acpi/ac_adapter/AC/state 2>/dev/null
|
||||
then
|
||||
exit 0
|
||||
elif grep -q '0x01$' /proc/apm 2>/dev/null
|
||||
then
|
||||
exit 0
|
||||
elif grep -q "AC Power \+: 1" /proc/pmu/info 2>/dev/null
|
||||
then
|
||||
exit 0
|
||||
elif test -x /usr/bin/pmset && /usr/bin/pmset -g batt |
|
||||
grep -q "Currently drawing from 'AC Power'"
|
||||
then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Auto packing deferred; not on AC"
|
||||
exit 1
|
422
import_ext.py
422
import_ext.py
@ -1,422 +0,0 @@
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import random
|
||||
import stat
|
||||
import sys
|
||||
import urllib2
|
||||
import StringIO
|
||||
|
||||
from error import GitError, ImportError
|
||||
from git_command import GitCommand
|
||||
|
||||
class ImportExternal(object):
|
||||
"""Imports a single revision from a non-git data source.
|
||||
Suitable for use to import a tar or zip based snapshot.
|
||||
"""
|
||||
def __init__(self):
|
||||
self._marks = 0
|
||||
self._files = {}
|
||||
self._tempref = 'refs/repo-external/import'
|
||||
|
||||
self._urls = []
|
||||
self._remap = []
|
||||
self.parent = None
|
||||
self._user_name = 'Upstream'
|
||||
self._user_email = 'upstream-import@none'
|
||||
self._user_when = 1000000
|
||||
|
||||
self.commit = None
|
||||
|
||||
def Clone(self):
|
||||
r = self.__class__()
|
||||
|
||||
r.project = self.project
|
||||
for u in self._urls:
|
||||
r._urls.append(u)
|
||||
for p in self._remap:
|
||||
r._remap.append(_PathMap(r, p._old, p._new))
|
||||
|
||||
return r
|
||||
|
||||
def SetProject(self, project):
|
||||
self.project = project
|
||||
|
||||
def SetVersion(self, version):
|
||||
self.version = version
|
||||
|
||||
def AddUrl(self, url):
|
||||
self._urls.append(url)
|
||||
|
||||
def SetParent(self, commit_hash):
|
||||
self.parent = commit_hash
|
||||
|
||||
def SetCommit(self, commit_hash):
|
||||
self.commit = commit_hash
|
||||
|
||||
def RemapPath(self, old, new, replace_version=True):
|
||||
self._remap.append(_PathMap(self, old, new))
|
||||
|
||||
@property
|
||||
def TagName(self):
|
||||
v = ''
|
||||
for c in self.version:
|
||||
if c >= '0' and c <= '9':
|
||||
v += c
|
||||
elif c >= 'A' and c <= 'Z':
|
||||
v += c
|
||||
elif c >= 'a' and c <= 'z':
|
||||
v += c
|
||||
elif c in ('-', '_', '.', '/', '+', '@'):
|
||||
v += c
|
||||
return 'upstream/%s' % v
|
||||
|
||||
@property
|
||||
def PackageName(self):
|
||||
n = self.project.name
|
||||
if n.startswith('platform/'):
|
||||
# This was not my finest moment...
|
||||
#
|
||||
n = n[len('platform/'):]
|
||||
return n
|
||||
|
||||
def Import(self):
|
||||
self._need_graft = False
|
||||
if self.parent:
|
||||
try:
|
||||
self.project.bare_git.cat_file('-e', self.parent)
|
||||
except GitError:
|
||||
self._need_graft = True
|
||||
|
||||
gfi = GitCommand(self.project,
|
||||
['fast-import', '--force', '--quiet'],
|
||||
bare = True,
|
||||
provide_stdin = True)
|
||||
try:
|
||||
self._out = gfi.stdin
|
||||
|
||||
try:
|
||||
self._UnpackFiles()
|
||||
self._MakeCommit()
|
||||
self._out.flush()
|
||||
finally:
|
||||
rc = gfi.Wait()
|
||||
if rc != 0:
|
||||
raise ImportError('fast-import failed')
|
||||
|
||||
if self._need_graft:
|
||||
id = self._GraftCommit()
|
||||
else:
|
||||
id = self.project.bare_git.rev_parse('%s^0' % self._tempref)
|
||||
|
||||
if self.commit and self.commit != id:
|
||||
raise ImportError('checksum mismatch: %s expected,'
|
||||
' %s imported' % (self.commit, id))
|
||||
|
||||
self._MakeTag(id)
|
||||
return id
|
||||
finally:
|
||||
try:
|
||||
self.project.bare_git.DeleteRef(self._tempref)
|
||||
except GitError:
|
||||
pass
|
||||
|
||||
def _PickUrl(self, failed):
|
||||
u = map(lambda x: x.replace('%version%', self.version), self._urls)
|
||||
for f in failed:
|
||||
if f in u:
|
||||
u.remove(f)
|
||||
if len(u) == 0:
|
||||
return None
|
||||
return random.choice(u)
|
||||
|
||||
def _OpenUrl(self):
|
||||
failed = {}
|
||||
while True:
|
||||
url = self._PickUrl(failed.keys())
|
||||
if url is None:
|
||||
why = 'Cannot download %s' % self.project.name
|
||||
|
||||
if failed:
|
||||
why += ': one or more mirrors are down\n'
|
||||
bad_urls = list(failed.keys())
|
||||
bad_urls.sort()
|
||||
for url in bad_urls:
|
||||
why += ' %s: %s\n' % (url, failed[url])
|
||||
else:
|
||||
why += ': no mirror URLs'
|
||||
raise ImportError(why)
|
||||
|
||||
print >>sys.stderr, "Getting %s ..." % url
|
||||
try:
|
||||
return urllib2.urlopen(url), url
|
||||
except urllib2.HTTPError, e:
|
||||
failed[url] = e.code
|
||||
except urllib2.URLError, e:
|
||||
failed[url] = e.reason[1]
|
||||
except OSError, e:
|
||||
failed[url] = e.strerror
|
||||
|
||||
def _UnpackFiles(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def _NextMark(self):
|
||||
self._marks += 1
|
||||
return self._marks
|
||||
|
||||
def _UnpackOneFile(self, mode, size, name, fd):
|
||||
if stat.S_ISDIR(mode): # directory
|
||||
return
|
||||
else:
|
||||
mode = self._CleanMode(mode, name)
|
||||
|
||||
old_name = name
|
||||
name = self._CleanName(name)
|
||||
|
||||
if stat.S_ISLNK(mode) and self._remap:
|
||||
# The link is relative to the old_name, and may need to
|
||||
# be rewritten according to our remap rules if it goes
|
||||
# up high enough in the tree structure.
|
||||
#
|
||||
dest = self._RewriteLink(fd.read(size), old_name, name)
|
||||
fd = StringIO.StringIO(dest)
|
||||
size = len(dest)
|
||||
|
||||
fi = _File(mode, name, self._NextMark())
|
||||
|
||||
self._out.write('blob\n')
|
||||
self._out.write('mark :%d\n' % fi.mark)
|
||||
self._out.write('data %d\n' % size)
|
||||
while size > 0:
|
||||
n = min(2048, size)
|
||||
self._out.write(fd.read(n))
|
||||
size -= n
|
||||
self._out.write('\n')
|
||||
self._files[fi.name] = fi
|
||||
|
||||
def _SetFileMode(self, name, mode):
|
||||
if not stat.S_ISDIR(mode):
|
||||
mode = self._CleanMode(mode, name)
|
||||
name = self._CleanName(name)
|
||||
try:
|
||||
fi = self._files[name]
|
||||
except KeyError:
|
||||
raise ImportError('file %s was not unpacked' % name)
|
||||
fi.mode = mode
|
||||
|
||||
def _RewriteLink(self, dest, relto_old, relto_new):
|
||||
# Drop the last components of the symlink itself
|
||||
# as the dest is relative to the directory its in.
|
||||
#
|
||||
relto_old = _TrimPath(relto_old)
|
||||
relto_new = _TrimPath(relto_new)
|
||||
|
||||
# Resolve the link to be absolute from the top of
|
||||
# the archive, so we can remap its destination.
|
||||
#
|
||||
while dest.find('/./') >= 0 or dest.find('//') >= 0:
|
||||
dest = dest.replace('/./', '/')
|
||||
dest = dest.replace('//', '/')
|
||||
|
||||
if dest.startswith('../') or dest.find('/../') > 0:
|
||||
dest = _FoldPath('%s/%s' % (relto_old, dest))
|
||||
|
||||
for pm in self._remap:
|
||||
if pm.Matches(dest):
|
||||
dest = pm.Apply(dest)
|
||||
break
|
||||
|
||||
dest, relto_new = _StripCommonPrefix(dest, relto_new)
|
||||
while relto_new:
|
||||
i = relto_new.find('/')
|
||||
if i > 0:
|
||||
relto_new = relto_new[i + 1:]
|
||||
else:
|
||||
relto_new = ''
|
||||
dest = '../' + dest
|
||||
return dest
|
||||
|
||||
def _CleanMode(self, mode, name):
|
||||
if stat.S_ISREG(mode): # regular file
|
||||
if (mode & 0111) == 0:
|
||||
return 0644
|
||||
else:
|
||||
return 0755
|
||||
elif stat.S_ISLNK(mode): # symlink
|
||||
return stat.S_IFLNK
|
||||
else:
|
||||
raise ImportError('invalid mode %o in %s' % (mode, name))
|
||||
|
||||
def _CleanName(self, name):
|
||||
old_name = name
|
||||
for pm in self._remap:
|
||||
if pm.Matches(name):
|
||||
name = pm.Apply(name)
|
||||
break
|
||||
while name.startswith('/'):
|
||||
name = name[1:]
|
||||
if not name:
|
||||
raise ImportError('path %s is empty after remap' % old_name)
|
||||
if name.find('/./') >= 0 or name.find('/../') >= 0:
|
||||
raise ImportError('path %s contains relative parts' % name)
|
||||
return name
|
||||
|
||||
def _MakeCommit(self):
|
||||
msg = '%s %s\n' % (self.PackageName, self.version)
|
||||
|
||||
self._out.write('commit %s\n' % self._tempref)
|
||||
self._out.write('committer %s <%s> %d +0000\n' % (
|
||||
self._user_name,
|
||||
self._user_email,
|
||||
self._user_when))
|
||||
self._out.write('data %d\n' % len(msg))
|
||||
self._out.write(msg)
|
||||
self._out.write('\n')
|
||||
if self.parent and not self._need_graft:
|
||||
self._out.write('from %s^0\n' % self.parent)
|
||||
self._out.write('deleteall\n')
|
||||
|
||||
for f in self._files.values():
|
||||
self._out.write('M %o :%d %s\n' % (f.mode, f.mark, f.name))
|
||||
self._out.write('\n')
|
||||
|
||||
def _GraftCommit(self):
|
||||
raw = self.project.bare_git.cat_file('commit', self._tempref)
|
||||
raw = raw.split("\n")
|
||||
while raw[1].startswith('parent '):
|
||||
del raw[1]
|
||||
raw.insert(1, 'parent %s' % self.parent)
|
||||
id = self._WriteObject('commit', "\n".join(raw))
|
||||
|
||||
graft_file = os.path.join(self.project.gitdir, 'info/grafts')
|
||||
if os.path.exists(graft_file):
|
||||
graft_list = open(graft_file, 'rb').read().split("\n")
|
||||
if graft_list and graft_list[-1] == '':
|
||||
del graft_list[-1]
|
||||
else:
|
||||
graft_list = []
|
||||
|
||||
exists = False
|
||||
for line in graft_list:
|
||||
if line == id:
|
||||
exists = True
|
||||
break
|
||||
|
||||
if not exists:
|
||||
graft_list.append(id)
|
||||
graft_list.append('')
|
||||
fd = open(graft_file, 'wb')
|
||||
fd.write("\n".join(graft_list))
|
||||
fd.close()
|
||||
|
||||
return id
|
||||
|
||||
def _MakeTag(self, id):
|
||||
name = self.TagName
|
||||
|
||||
raw = []
|
||||
raw.append('object %s' % id)
|
||||
raw.append('type commit')
|
||||
raw.append('tag %s' % name)
|
||||
raw.append('tagger %s <%s> %d +0000' % (
|
||||
self._user_name,
|
||||
self._user_email,
|
||||
self._user_when))
|
||||
raw.append('')
|
||||
raw.append('%s %s\n' % (self.PackageName, self.version))
|
||||
|
||||
tagid = self._WriteObject('tag', "\n".join(raw))
|
||||
self.project.bare_git.UpdateRef('refs/tags/%s' % name, tagid)
|
||||
|
||||
def _WriteObject(self, type, data):
|
||||
wo = GitCommand(self.project,
|
||||
['hash-object', '-t', type, '-w', '--stdin'],
|
||||
bare = True,
|
||||
provide_stdin = True,
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
wo.stdin.write(data)
|
||||
if wo.Wait() != 0:
|
||||
raise GitError('cannot create %s from (%s)' % (type, data))
|
||||
return wo.stdout[:-1]
|
||||
|
||||
|
||||
def _TrimPath(path):
|
||||
i = path.rfind('/')
|
||||
if i > 0:
|
||||
path = path[0:i]
|
||||
return ''
|
||||
|
||||
def _StripCommonPrefix(a, b):
|
||||
while True:
|
||||
ai = a.find('/')
|
||||
bi = b.find('/')
|
||||
if ai > 0 and bi > 0 and a[0:ai] == b[0:bi]:
|
||||
a = a[ai + 1:]
|
||||
b = b[bi + 1:]
|
||||
else:
|
||||
break
|
||||
return a, b
|
||||
|
||||
def _FoldPath(path):
|
||||
while True:
|
||||
if path.startswith('../'):
|
||||
return path
|
||||
|
||||
i = path.find('/../')
|
||||
if i <= 0:
|
||||
if path.startswith('/'):
|
||||
return path[1:]
|
||||
return path
|
||||
|
||||
lhs = path[0:i]
|
||||
rhs = path[i + 4:]
|
||||
|
||||
i = lhs.rfind('/')
|
||||
if i > 0:
|
||||
path = lhs[0:i + 1] + rhs
|
||||
else:
|
||||
path = rhs
|
||||
|
||||
class _File(object):
|
||||
def __init__(self, mode, name, mark):
|
||||
self.mode = mode
|
||||
self.name = name
|
||||
self.mark = mark
|
||||
|
||||
|
||||
class _PathMap(object):
|
||||
def __init__(self, imp, old, new):
|
||||
self._imp = imp
|
||||
self._old = old
|
||||
self._new = new
|
||||
|
||||
def _r(self, p):
|
||||
return p.replace('%version%', self._imp.version)
|
||||
|
||||
@property
|
||||
def old(self):
|
||||
return self._r(self._old)
|
||||
|
||||
@property
|
||||
def new(self):
|
||||
return self._r(self._new)
|
||||
|
||||
def Matches(self, name):
|
||||
return name.startswith(self.old)
|
||||
|
||||
def Apply(self, name):
|
||||
return self.new + name[len(self.old):]
|
206
import_tar.py
206
import_tar.py
@ -1,206 +0,0 @@
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import bz2
|
||||
import stat
|
||||
import tarfile
|
||||
import zlib
|
||||
import StringIO
|
||||
|
||||
from import_ext import ImportExternal
|
||||
from error import ImportError
|
||||
|
||||
class ImportTar(ImportExternal):
|
||||
"""Streams a (optionally compressed) tar file from the network
|
||||
directly into a Project's Git repository.
|
||||
"""
|
||||
@classmethod
|
||||
def CanAccept(cls, url):
|
||||
"""Can this importer read and unpack the data stored at url?
|
||||
"""
|
||||
if url.endswith('.tar.gz') or url.endswith('.tgz'):
|
||||
return True
|
||||
if url.endswith('.tar.bz2'):
|
||||
return True
|
||||
if url.endswith('.tar'):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _UnpackFiles(self):
|
||||
url_fd, url = self._OpenUrl()
|
||||
try:
|
||||
if url.endswith('.tar.gz') or url.endswith('.tgz'):
|
||||
tar_fd = _Gzip(url_fd)
|
||||
elif url.endswith('.tar.bz2'):
|
||||
tar_fd = _Bzip2(url_fd)
|
||||
elif url.endswith('.tar'):
|
||||
tar_fd = _Raw(url_fd)
|
||||
else:
|
||||
raise ImportError('non-tar file extension: %s' % url)
|
||||
|
||||
try:
|
||||
tar = tarfile.TarFile(name = url,
|
||||
mode = 'r',
|
||||
fileobj = tar_fd)
|
||||
try:
|
||||
for entry in tar:
|
||||
mode = entry.mode
|
||||
|
||||
if (mode & 0170000) == 0:
|
||||
if entry.isdir():
|
||||
mode |= stat.S_IFDIR
|
||||
elif entry.isfile() or entry.islnk(): # hard links as files
|
||||
mode |= stat.S_IFREG
|
||||
elif entry.issym():
|
||||
mode |= stat.S_IFLNK
|
||||
|
||||
if stat.S_ISLNK(mode): # symlink
|
||||
data_fd = StringIO.StringIO(entry.linkname)
|
||||
data_sz = len(entry.linkname)
|
||||
elif stat.S_ISDIR(mode): # directory
|
||||
data_fd = StringIO.StringIO('')
|
||||
data_sz = 0
|
||||
else:
|
||||
data_fd = tar.extractfile(entry)
|
||||
data_sz = entry.size
|
||||
|
||||
self._UnpackOneFile(mode, data_sz, entry.name, data_fd)
|
||||
finally:
|
||||
tar.close()
|
||||
finally:
|
||||
tar_fd.close()
|
||||
finally:
|
||||
url_fd.close()
|
||||
|
||||
|
||||
|
||||
class _DecompressStream(object):
|
||||
"""file like object to decompress a tar stream
|
||||
"""
|
||||
def __init__(self, fd):
|
||||
self._fd = fd
|
||||
self._pos = 0
|
||||
self._buf = None
|
||||
|
||||
def tell(self):
|
||||
return self._pos
|
||||
|
||||
def seek(self, offset):
|
||||
d = offset - self._pos
|
||||
if d > 0:
|
||||
self.read(d)
|
||||
elif d == 0:
|
||||
pass
|
||||
else:
|
||||
raise NotImplementedError, 'seek backwards'
|
||||
|
||||
def close(self):
|
||||
self._fd = None
|
||||
|
||||
def read(self, size = -1):
|
||||
if not self._fd:
|
||||
raise EOFError, 'Reached EOF'
|
||||
|
||||
r = []
|
||||
try:
|
||||
if size >= 0:
|
||||
self._ReadChunk(r, size)
|
||||
else:
|
||||
while True:
|
||||
self._ReadChunk(r, 2048)
|
||||
except EOFError:
|
||||
pass
|
||||
|
||||
if len(r) == 1:
|
||||
r = r[0]
|
||||
else:
|
||||
r = ''.join(r)
|
||||
self._pos += len(r)
|
||||
return r
|
||||
|
||||
def _ReadChunk(self, r, size):
|
||||
b = self._buf
|
||||
try:
|
||||
while size > 0:
|
||||
if b is None or len(b) == 0:
|
||||
b = self._Decompress(self._fd.read(2048))
|
||||
continue
|
||||
|
||||
use = min(size, len(b))
|
||||
r.append(b[:use])
|
||||
b = b[use:]
|
||||
size -= use
|
||||
finally:
|
||||
self._buf = b
|
||||
|
||||
def _Decompress(self, b):
|
||||
raise NotImplementedError, '_Decompress'
|
||||
|
||||
|
||||
class _Raw(_DecompressStream):
|
||||
"""file like object for an uncompressed stream
|
||||
"""
|
||||
def __init__(self, fd):
|
||||
_DecompressStream.__init__(self, fd)
|
||||
|
||||
def _Decompress(self, b):
|
||||
return b
|
||||
|
||||
|
||||
class _Bzip2(_DecompressStream):
|
||||
"""file like object to decompress a .bz2 stream
|
||||
"""
|
||||
def __init__(self, fd):
|
||||
_DecompressStream.__init__(self, fd)
|
||||
self._bz = bz2.BZ2Decompressor()
|
||||
|
||||
def _Decompress(self, b):
|
||||
return self._bz.decompress(b)
|
||||
|
||||
|
||||
_FHCRC, _FEXTRA, _FNAME, _FCOMMENT = 2, 4, 8, 16
|
||||
class _Gzip(_DecompressStream):
|
||||
"""file like object to decompress a .gz stream
|
||||
"""
|
||||
def __init__(self, fd):
|
||||
_DecompressStream.__init__(self, fd)
|
||||
self._z = zlib.decompressobj(-zlib.MAX_WBITS)
|
||||
|
||||
magic = fd.read(2)
|
||||
if magic != '\037\213':
|
||||
raise IOError, 'Not a gzipped file'
|
||||
|
||||
method = ord(fd.read(1))
|
||||
if method != 8:
|
||||
raise IOError, 'Unknown compression method'
|
||||
|
||||
flag = ord(fd.read(1))
|
||||
fd.read(6)
|
||||
|
||||
if flag & _FEXTRA:
|
||||
xlen = ord(fd.read(1))
|
||||
xlen += 256 * ord(fd.read(1))
|
||||
fd.read(xlen)
|
||||
if flag & _FNAME:
|
||||
while fd.read(1) != '\0':
|
||||
pass
|
||||
if flag & _FCOMMENT:
|
||||
while fd.read(1) != '\0':
|
||||
pass
|
||||
if flag & _FHCRC:
|
||||
fd.read(2)
|
||||
|
||||
def _Decompress(self, b):
|
||||
return self._z.decompress(b)
|
345
import_zip.py
345
import_zip.py
@ -1,345 +0,0 @@
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import stat
|
||||
import struct
|
||||
import zlib
|
||||
import cStringIO
|
||||
|
||||
from import_ext import ImportExternal
|
||||
from error import ImportError
|
||||
|
||||
class ImportZip(ImportExternal):
|
||||
"""Streams a zip file from the network directly into a Project's
|
||||
Git repository.
|
||||
"""
|
||||
@classmethod
|
||||
def CanAccept(cls, url):
|
||||
"""Can this importer read and unpack the data stored at url?
|
||||
"""
|
||||
if url.endswith('.zip') or url.endswith('.jar'):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _UnpackFiles(self):
|
||||
url_fd, url = self._OpenUrl()
|
||||
try:
|
||||
if not self.__class__.CanAccept(url):
|
||||
raise ImportError('non-zip file extension: %s' % url)
|
||||
|
||||
zip = _ZipFile(url_fd)
|
||||
for entry in zip.FileRecords():
|
||||
data = zip.Open(entry).read()
|
||||
sz = len(data)
|
||||
|
||||
if data and _SafeCRLF(data):
|
||||
data = data.replace('\r\n', '\n')
|
||||
sz = len(data)
|
||||
|
||||
fd = cStringIO.StringIO(data)
|
||||
self._UnpackOneFile(entry.mode, sz, entry.name, fd)
|
||||
zip.Close(entry)
|
||||
|
||||
for entry in zip.CentralDirectory():
|
||||
self._SetFileMode(entry.name, entry.mode)
|
||||
|
||||
zip.CheckTail()
|
||||
finally:
|
||||
url_fd.close()
|
||||
|
||||
|
||||
def _SafeCRLF(data):
|
||||
"""Is it reasonably safe to perform a CRLF->LF conversion?
|
||||
|
||||
If the stream contains a NUL byte it is likely binary,
|
||||
and thus a CRLF->LF conversion may damage the stream.
|
||||
|
||||
If the only NUL is in the last position of the stream,
|
||||
but it otherwise can do a CRLF<->LF conversion we do
|
||||
the CRLF conversion anyway. At least one source ZIP
|
||||
file has this structure in its source code.
|
||||
|
||||
If every occurrance of a CR and LF is paired up as a
|
||||
CRLF pair then the conversion is safely bi-directional.
|
||||
s/\r\n/\n/g == s/\n/\r\\n/g can convert between them.
|
||||
"""
|
||||
nul = data.find('\0')
|
||||
if 0 <= nul and nul < (len(data) - 1):
|
||||
return False
|
||||
|
||||
n_lf = 0
|
||||
last = 0
|
||||
while True:
|
||||
lf = data.find('\n', last)
|
||||
if lf < 0:
|
||||
break
|
||||
if lf == 0 or data[lf - 1] != '\r':
|
||||
return False
|
||||
last = lf + 1
|
||||
n_lf += 1
|
||||
return n_lf > 0
|
||||
|
||||
class _ZipFile(object):
|
||||
"""Streaming iterator to parse a zip file on the fly.
|
||||
"""
|
||||
def __init__(self, fd):
|
||||
self._fd = _UngetStream(fd)
|
||||
|
||||
def FileRecords(self):
|
||||
return _FileIter(self._fd)
|
||||
|
||||
def CentralDirectory(self):
|
||||
return _CentIter(self._fd)
|
||||
|
||||
def CheckTail(self):
|
||||
type_buf = self._fd.read(4)
|
||||
type = struct.unpack('<I', type_buf)[0]
|
||||
if type != 0x06054b50: # end of central directory
|
||||
raise ImportError('zip record %x unsupported' % type)
|
||||
|
||||
def Open(self, entry):
|
||||
if entry.is_compressed:
|
||||
return _InflateStream(self._fd)
|
||||
else:
|
||||
if entry.has_trailer:
|
||||
raise ImportError('unable to extract streamed zip')
|
||||
return _FixedLengthStream(self._fd, entry.uncompressed_size)
|
||||
|
||||
def Close(self, entry):
|
||||
if entry.has_trailer:
|
||||
type = struct.unpack('<I', self._fd.read(4))[0]
|
||||
if type == 0x08074b50:
|
||||
# Not a formal type marker, but commonly seen in zips
|
||||
# as the data descriptor signature.
|
||||
#
|
||||
struct.unpack('<3I', self._fd.read(12))
|
||||
else:
|
||||
# No signature for the data descriptor, so read the
|
||||
# remaining fields out of the stream
|
||||
#
|
||||
self._fd.read(8)
|
||||
|
||||
|
||||
class _FileIter(object):
|
||||
def __init__(self, fd):
|
||||
self._fd = fd
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
fd = self._fd
|
||||
|
||||
type_buf = fd.read(4)
|
||||
type = struct.unpack('<I', type_buf)[0]
|
||||
|
||||
if type != 0x04034b50: # local file header
|
||||
fd.unread(type_buf)
|
||||
raise StopIteration()
|
||||
|
||||
rec = _FileHeader(fd.read(26))
|
||||
rec.name = fd.read(rec.name_len)
|
||||
fd.read(rec.extra_len)
|
||||
|
||||
if rec.name.endswith('/'):
|
||||
rec.name = rec.name[:-1]
|
||||
rec.mode = stat.S_IFDIR | 0777
|
||||
return rec
|
||||
|
||||
|
||||
class _FileHeader(object):
|
||||
"""Information about a single file in the archive.
|
||||
0 version needed to extract 2 bytes
|
||||
1 general purpose bit flag 2 bytes
|
||||
2 compression method 2 bytes
|
||||
3 last mod file time 2 bytes
|
||||
4 last mod file date 2 bytes
|
||||
5 crc-32 4 bytes
|
||||
6 compressed size 4 bytes
|
||||
7 uncompressed size 4 bytes
|
||||
8 file name length 2 bytes
|
||||
9 extra field length 2 bytes
|
||||
"""
|
||||
def __init__(self, raw_bin):
|
||||
rec = struct.unpack('<5H3I2H', raw_bin)
|
||||
|
||||
if rec[2] == 8:
|
||||
self.is_compressed = True
|
||||
elif rec[2] == 0:
|
||||
self.is_compressed = False
|
||||
else:
|
||||
raise ImportError('unrecognized compression format')
|
||||
|
||||
if rec[1] & (1 << 3):
|
||||
self.has_trailer = True
|
||||
else:
|
||||
self.has_trailer = False
|
||||
|
||||
self.compressed_size = rec[6]
|
||||
self.uncompressed_size = rec[7]
|
||||
self.name_len = rec[8]
|
||||
self.extra_len = rec[9]
|
||||
self.mode = stat.S_IFREG | 0644
|
||||
|
||||
|
||||
class _CentIter(object):
|
||||
def __init__(self, fd):
|
||||
self._fd = fd
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
fd = self._fd
|
||||
|
||||
type_buf = fd.read(4)
|
||||
type = struct.unpack('<I', type_buf)[0]
|
||||
|
||||
if type != 0x02014b50: # central directory
|
||||
fd.unread(type_buf)
|
||||
raise StopIteration()
|
||||
|
||||
rec = _CentHeader(fd.read(42))
|
||||
rec.name = fd.read(rec.name_len)
|
||||
fd.read(rec.extra_len)
|
||||
fd.read(rec.comment_len)
|
||||
|
||||
if rec.name.endswith('/'):
|
||||
rec.name = rec.name[:-1]
|
||||
rec.mode = stat.S_IFDIR | 0777
|
||||
return rec
|
||||
|
||||
|
||||
class _CentHeader(object):
|
||||
"""Information about a single file in the archive.
|
||||
0 version made by 2 bytes
|
||||
1 version needed to extract 2 bytes
|
||||
2 general purpose bit flag 2 bytes
|
||||
3 compression method 2 bytes
|
||||
4 last mod file time 2 bytes
|
||||
5 last mod file date 2 bytes
|
||||
6 crc-32 4 bytes
|
||||
7 compressed size 4 bytes
|
||||
8 uncompressed size 4 bytes
|
||||
9 file name length 2 bytes
|
||||
10 extra field length 2 bytes
|
||||
11 file comment length 2 bytes
|
||||
12 disk number start 2 bytes
|
||||
13 internal file attributes 2 bytes
|
||||
14 external file attributes 4 bytes
|
||||
15 relative offset of local header 4 bytes
|
||||
"""
|
||||
def __init__(self, raw_bin):
|
||||
rec = struct.unpack('<6H3I5H2I', raw_bin)
|
||||
self.name_len = rec[9]
|
||||
self.extra_len = rec[10]
|
||||
self.comment_len = rec[11]
|
||||
|
||||
if (rec[0] & 0xff00) == 0x0300: # UNIX
|
||||
self.mode = rec[14] >> 16
|
||||
else:
|
||||
self.mode = stat.S_IFREG | 0644
|
||||
|
||||
|
||||
class _UngetStream(object):
|
||||
"""File like object to read and rewind a stream.
|
||||
"""
|
||||
def __init__(self, fd):
|
||||
self._fd = fd
|
||||
self._buf = None
|
||||
|
||||
def read(self, size = -1):
|
||||
r = []
|
||||
try:
|
||||
if size >= 0:
|
||||
self._ReadChunk(r, size)
|
||||
else:
|
||||
while True:
|
||||
self._ReadChunk(r, 2048)
|
||||
except EOFError:
|
||||
pass
|
||||
|
||||
if len(r) == 1:
|
||||
return r[0]
|
||||
return ''.join(r)
|
||||
|
||||
def unread(self, buf):
|
||||
b = self._buf
|
||||
if b is None or len(b) == 0:
|
||||
self._buf = buf
|
||||
else:
|
||||
self._buf = buf + b
|
||||
|
||||
def _ReadChunk(self, r, size):
|
||||
b = self._buf
|
||||
try:
|
||||
while size > 0:
|
||||
if b is None or len(b) == 0:
|
||||
b = self._Inflate(self._fd.read(2048))
|
||||
if not b:
|
||||
raise EOFError()
|
||||
continue
|
||||
|
||||
use = min(size, len(b))
|
||||
r.append(b[:use])
|
||||
b = b[use:]
|
||||
size -= use
|
||||
finally:
|
||||
self._buf = b
|
||||
|
||||
def _Inflate(self, b):
|
||||
return b
|
||||
|
||||
|
||||
class _FixedLengthStream(_UngetStream):
|
||||
"""File like object to read a fixed length stream.
|
||||
"""
|
||||
def __init__(self, fd, have):
|
||||
_UngetStream.__init__(self, fd)
|
||||
self._have = have
|
||||
|
||||
def _Inflate(self, b):
|
||||
n = self._have
|
||||
if n == 0:
|
||||
self._fd.unread(b)
|
||||
return None
|
||||
|
||||
if len(b) > n:
|
||||
self._fd.unread(b[n:])
|
||||
b = b[:n]
|
||||
self._have -= len(b)
|
||||
return b
|
||||
|
||||
|
||||
class _InflateStream(_UngetStream):
|
||||
"""Inflates the stream as it reads input.
|
||||
"""
|
||||
def __init__(self, fd):
|
||||
_UngetStream.__init__(self, fd)
|
||||
self._z = zlib.decompressobj(-zlib.MAX_WBITS)
|
||||
|
||||
def _Inflate(self, b):
|
||||
z = self._z
|
||||
if not z:
|
||||
self._fd.unread(b)
|
||||
return None
|
||||
|
||||
b = z.decompress(b)
|
||||
if z.unconsumed_tail != '':
|
||||
self._fd.unread(z.unconsumed_tail)
|
||||
elif z.unused_data != '':
|
||||
self._fd.unread(z.unused_data)
|
||||
self._z = None
|
||||
return b
|
40
main.py
40
main.py
@ -27,7 +27,12 @@ import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from command import InteractiveCommand, PagedCommand
|
||||
import git_command
|
||||
from command import InteractiveCommand
|
||||
from command import MirrorSafeCommand
|
||||
from command import PagedCommand
|
||||
from editor import Editor
|
||||
from error import ManifestInvalidRevisionError
|
||||
from error import NoSuchProjectError
|
||||
from error import RepoChangedException
|
||||
from manifest import Manifest
|
||||
@ -44,6 +49,12 @@ global_options.add_option('-p', '--paginate',
|
||||
global_options.add_option('--no-pager',
|
||||
dest='no_pager', action='store_true',
|
||||
help='disable the pager')
|
||||
global_options.add_option('--trace',
|
||||
dest='trace', action='store_true',
|
||||
help='trace git command execution')
|
||||
global_options.add_option('--version',
|
||||
dest='show_version', action='store_true',
|
||||
help='display this version of repo')
|
||||
|
||||
class _Repo(object):
|
||||
def __init__(self, repodir):
|
||||
@ -67,6 +78,15 @@ class _Repo(object):
|
||||
argv = []
|
||||
gopts, gargs = global_options.parse_args(glob)
|
||||
|
||||
if gopts.trace:
|
||||
git_command.TRACE = True
|
||||
if gopts.show_version:
|
||||
if name == 'help':
|
||||
name = 'version'
|
||||
else:
|
||||
print >>sys.stderr, 'fatal: invalid usage of --version'
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
cmd = self.commands[name]
|
||||
except KeyError:
|
||||
@ -77,6 +97,13 @@ class _Repo(object):
|
||||
|
||||
cmd.repodir = self.repodir
|
||||
cmd.manifest = Manifest(cmd.repodir)
|
||||
Editor.globalConfig = cmd.manifest.globalConfig
|
||||
|
||||
if not isinstance(cmd, MirrorSafeCommand) and cmd.manifest.IsMirror:
|
||||
print >>sys.stderr, \
|
||||
"fatal: '%s' requires a working directory"\
|
||||
% name
|
||||
sys.exit(1)
|
||||
|
||||
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
|
||||
config = cmd.manifest.globalConfig
|
||||
@ -92,6 +119,9 @@ class _Repo(object):
|
||||
copts, cargs = cmd.OptionParser.parse_args(argv)
|
||||
try:
|
||||
cmd.Execute(copts, cargs)
|
||||
except ManifestInvalidRevisionError, e:
|
||||
print >>sys.stderr, 'error: %s' % str(e)
|
||||
sys.exit(1)
|
||||
except NoSuchProjectError, e:
|
||||
if e.name:
|
||||
print >>sys.stderr, 'error: project %s not found' % e.name
|
||||
@ -184,11 +214,13 @@ def _Main(argv):
|
||||
repo._Run(argv)
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(1)
|
||||
except RepoChangedException:
|
||||
# If the repo or manifest changed, re-exec ourselves.
|
||||
except RepoChangedException, rce:
|
||||
# If repo changed, re-exec ourselves.
|
||||
#
|
||||
argv = list(sys.argv)
|
||||
argv.extend(rce.extra_args)
|
||||
try:
|
||||
os.execv(__file__, sys.argv)
|
||||
os.execv(__file__, argv)
|
||||
except OSError, e:
|
||||
print >>sys.stderr, 'fatal: cannot restart repo after upgrade'
|
||||
print >>sys.stderr, 'fatal: %s' % e
|
||||
|
268
manifest.py
268
manifest.py
@ -17,11 +17,8 @@ import os
|
||||
import sys
|
||||
import xml.dom.minidom
|
||||
|
||||
from editor import Editor
|
||||
from git_config import GitConfig, IsId
|
||||
from import_tar import ImportTar
|
||||
from import_zip import ImportZip
|
||||
from project import Project, MetaProject, R_TAGS
|
||||
from project import Project, MetaProject, R_HEADS, HEAD
|
||||
from remote import Remote
|
||||
from error import ManifestParseError
|
||||
|
||||
@ -42,24 +39,15 @@ class Manifest(object):
|
||||
self.repodir = os.path.abspath(repodir)
|
||||
self.topdir = os.path.dirname(self.repodir)
|
||||
self.manifestFile = os.path.join(self.repodir, MANIFEST_FILE_NAME)
|
||||
|
||||
self.globalConfig = GitConfig.ForUser()
|
||||
Editor.globalConfig = self.globalConfig
|
||||
|
||||
self.repoProject = MetaProject(self, 'repo',
|
||||
gitdir = os.path.join(repodir, 'repo/.git'),
|
||||
worktree = os.path.join(repodir, 'repo'))
|
||||
|
||||
wt = os.path.join(repodir, 'manifests')
|
||||
gd_new = os.path.join(repodir, 'manifests.git')
|
||||
gd_old = os.path.join(wt, '.git')
|
||||
if os.path.exists(gd_new) or not os.path.exists(gd_old):
|
||||
gd = gd_new
|
||||
else:
|
||||
gd = gd_old
|
||||
self.manifestProject = MetaProject(self, 'manifests',
|
||||
gitdir = gd,
|
||||
worktree = wt)
|
||||
gitdir = os.path.join(repodir, 'manifests.git'),
|
||||
worktree = os.path.join(repodir, 'manifests'))
|
||||
|
||||
self._Unload()
|
||||
|
||||
@ -85,6 +73,76 @@ class Manifest(object):
|
||||
except OSError, e:
|
||||
raise ManifestParseError('cannot link manifest %s' % name)
|
||||
|
||||
def _RemoteToXml(self, r, doc, root):
|
||||
e = doc.createElement('remote')
|
||||
root.appendChild(e)
|
||||
e.setAttribute('name', r.name)
|
||||
e.setAttribute('fetch', r.fetchUrl)
|
||||
if r.reviewUrl is not None:
|
||||
e.setAttribute('review', r.reviewUrl)
|
||||
if r.projectName is not None:
|
||||
e.setAttribute('project-name', r.projectName)
|
||||
|
||||
def Save(self, fd, peg_rev=False):
|
||||
"""Write the current manifest out to the given file descriptor.
|
||||
"""
|
||||
doc = xml.dom.minidom.Document()
|
||||
root = doc.createElement('manifest')
|
||||
doc.appendChild(root)
|
||||
|
||||
d = self.default
|
||||
sort_remotes = list(self.remotes.keys())
|
||||
sort_remotes.sort()
|
||||
|
||||
for r in sort_remotes:
|
||||
self._RemoteToXml(self.remotes[r], doc, root)
|
||||
if self.remotes:
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
|
||||
have_default = False
|
||||
e = doc.createElement('default')
|
||||
if d.remote:
|
||||
have_default = True
|
||||
e.setAttribute('remote', d.remote.name)
|
||||
if d.revision:
|
||||
have_default = True
|
||||
e.setAttribute('revision', d.revision)
|
||||
if have_default:
|
||||
root.appendChild(e)
|
||||
root.appendChild(doc.createTextNode(''))
|
||||
|
||||
sort_projects = list(self.projects.keys())
|
||||
sort_projects.sort()
|
||||
|
||||
for p in sort_projects:
|
||||
p = self.projects[p]
|
||||
e = doc.createElement('project')
|
||||
root.appendChild(e)
|
||||
e.setAttribute('name', p.name)
|
||||
if p.relpath != p.name:
|
||||
e.setAttribute('path', p.relpath)
|
||||
if not d.remote or p.remote.name != d.remote.name:
|
||||
e.setAttribute('remote', p.remote.name)
|
||||
if peg_rev:
|
||||
if self.IsMirror:
|
||||
e.setAttribute('revision',
|
||||
p.bare_git.rev_parse(p.revision + '^0'))
|
||||
else:
|
||||
e.setAttribute('revision',
|
||||
p.work_git.rev_parse(HEAD + '^0'))
|
||||
elif not d.revision or p.revision != d.revision:
|
||||
e.setAttribute('revision', p.revision)
|
||||
|
||||
for r in p.extraRemotes:
|
||||
self._RemoteToXml(p.extraRemotes[r], doc, e)
|
||||
for c in p.copyfiles:
|
||||
ce = doc.createElement('copyfile')
|
||||
ce.setAttribute('src', c.src)
|
||||
ce.setAttribute('dest', c.dest)
|
||||
e.appendChild(ce)
|
||||
|
||||
doc.writexml(fd, '', ' ', '\n', 'UTF-8')
|
||||
|
||||
@property
|
||||
def projects(self):
|
||||
self._Load()
|
||||
@ -100,6 +158,10 @@ class Manifest(object):
|
||||
self._Load()
|
||||
return self._default
|
||||
|
||||
@property
|
||||
def IsMirror(self):
|
||||
return self.manifestProject.config.GetBoolean('repo.mirror')
|
||||
|
||||
def _Unload(self):
|
||||
self._loaded = False
|
||||
self._projects = {}
|
||||
@ -109,6 +171,12 @@ class Manifest(object):
|
||||
|
||||
def _Load(self):
|
||||
if not self._loaded:
|
||||
m = self.manifestProject
|
||||
b = m.GetBranch(m.CurrentBranch).merge
|
||||
if b.startswith(R_HEADS):
|
||||
b = b[len(R_HEADS):]
|
||||
self.branch = b
|
||||
|
||||
self._ParseManifest(True)
|
||||
|
||||
local = os.path.join(self.repodir, LOCAL_MANIFEST_NAME)
|
||||
@ -120,6 +188,10 @@ class Manifest(object):
|
||||
finally:
|
||||
self.manifestFile = real
|
||||
|
||||
if self.IsMirror:
|
||||
self._AddMetaProjectMirror(self.repoProject)
|
||||
self._AddMetaProjectMirror(self.manifestProject)
|
||||
|
||||
self._loaded = True
|
||||
|
||||
def _ParseManifest(self, is_root_file):
|
||||
@ -135,10 +207,15 @@ class Manifest(object):
|
||||
"no <manifest> in %s" % \
|
||||
self.manifestFile
|
||||
|
||||
if is_root_file:
|
||||
self.branch = config.getAttribute('branch')
|
||||
if not self.branch:
|
||||
self.branch = 'default'
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'remove-project':
|
||||
name = self._reqatt(node, 'name')
|
||||
try:
|
||||
del self._projects[name]
|
||||
except KeyError:
|
||||
raise ManifestParseError, \
|
||||
'project %s not found' % \
|
||||
(name)
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'remote':
|
||||
@ -168,6 +245,50 @@ class Manifest(object):
|
||||
(project.name, self.manifestFile)
|
||||
self._projects[project.name] = project
|
||||
|
||||
for node in config.childNodes:
|
||||
if node.nodeName == 'add-remote':
|
||||
pn = self._reqatt(node, 'to-project')
|
||||
project = self._projects.get(pn)
|
||||
if not project:
|
||||
raise ManifestParseError, \
|
||||
'project %s not defined in %s' % \
|
||||
(pn, self.manifestFile)
|
||||
self._ParseProjectExtraRemote(project, node)
|
||||
|
||||
def _AddMetaProjectMirror(self, m):
|
||||
name = None
|
||||
m_url = m.GetRemote(m.remote.name).url
|
||||
if m_url.endswith('/.git'):
|
||||
raise ManifestParseError, 'refusing to mirror %s' % m_url
|
||||
|
||||
if self._default and self._default.remote:
|
||||
url = self._default.remote.fetchUrl
|
||||
if not url.endswith('/'):
|
||||
url += '/'
|
||||
if m_url.startswith(url):
|
||||
remote = self._default.remote
|
||||
name = m_url[len(url):]
|
||||
|
||||
if name is None:
|
||||
s = m_url.rindex('/') + 1
|
||||
remote = Remote('origin', fetch = m_url[:s])
|
||||
name = m_url[s:]
|
||||
|
||||
if name.endswith('.git'):
|
||||
name = name[:-4]
|
||||
|
||||
if name not in self._projects:
|
||||
m.PreSync()
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||
project = Project(manifest = self,
|
||||
name = name,
|
||||
remote = remote,
|
||||
gitdir = gitdir,
|
||||
worktree = None,
|
||||
relpath = None,
|
||||
revision = m.revision)
|
||||
self._projects[project.name] = project
|
||||
|
||||
def _ParseRemote(self, node):
|
||||
"""
|
||||
reads a <remote> element from the manifest file
|
||||
@ -175,10 +296,17 @@ class Manifest(object):
|
||||
name = self._reqatt(node, 'name')
|
||||
fetch = self._reqatt(node, 'fetch')
|
||||
review = node.getAttribute('review')
|
||||
if review == '':
|
||||
review = None
|
||||
|
||||
projectName = node.getAttribute('project-name')
|
||||
if projectName == '':
|
||||
projectName = None
|
||||
|
||||
r = Remote(name=name,
|
||||
fetch=fetch,
|
||||
review=review)
|
||||
review=review,
|
||||
projectName=projectName)
|
||||
|
||||
for n in node.childNodes:
|
||||
if n.nodeName == 'require':
|
||||
@ -193,6 +321,8 @@ class Manifest(object):
|
||||
d = _Default()
|
||||
d.remote = self._get_remote(node)
|
||||
d.revision = node.getAttribute('revision')
|
||||
if d.revision == '':
|
||||
d.revision = None
|
||||
return d
|
||||
|
||||
def _ParseProject(self, node):
|
||||
@ -225,8 +355,13 @@ class Manifest(object):
|
||||
"project %s path cannot be absolute in %s" % \
|
||||
(name, self.manifestFile)
|
||||
|
||||
worktree = os.path.join(self.topdir, path)
|
||||
gitdir = os.path.join(self.repodir, 'projects/%s.git' % path)
|
||||
if self.IsMirror:
|
||||
relpath = None
|
||||
worktree = None
|
||||
gitdir = os.path.join(self.topdir, '%s.git' % name)
|
||||
else:
|
||||
worktree = os.path.join(self.topdir, path)
|
||||
gitdir = os.path.join(self.repodir, 'projects/%s.git' % path)
|
||||
|
||||
project = Project(manifest = self,
|
||||
name = name,
|
||||
@ -238,93 +373,28 @@ class Manifest(object):
|
||||
|
||||
for n in node.childNodes:
|
||||
if n.nodeName == 'remote':
|
||||
r = self._ParseRemote(n)
|
||||
if project.extraRemotes.get(r.name) \
|
||||
or project.remote.name == r.name:
|
||||
raise ManifestParseError, \
|
||||
'duplicate remote %s in project %s in %s' % \
|
||||
(r.name, project.name, self.manifestFile)
|
||||
project.extraRemotes[r.name] = r
|
||||
self._ParseProjectExtraRemote(project, n)
|
||||
elif n.nodeName == 'copyfile':
|
||||
self._ParseCopyFile(project, n)
|
||||
|
||||
to_resolve = []
|
||||
by_version = {}
|
||||
|
||||
for n in node.childNodes:
|
||||
if n.nodeName == 'import':
|
||||
self._ParseImport(project, n, to_resolve, by_version)
|
||||
|
||||
for pair in to_resolve:
|
||||
sn, pr = pair
|
||||
try:
|
||||
sn.SetParent(by_version[pr].commit)
|
||||
except KeyError:
|
||||
raise ManifestParseError, \
|
||||
'snapshot %s not in project %s in %s' % \
|
||||
(pr, project.name, self.manifestFile)
|
||||
|
||||
return project
|
||||
|
||||
def _ParseImport(self, project, import_node, to_resolve, by_version):
|
||||
first_url = None
|
||||
for node in import_node.childNodes:
|
||||
if node.nodeName == 'mirror':
|
||||
first_url = self._reqatt(node, 'url')
|
||||
break
|
||||
if not first_url:
|
||||
def _ParseProjectExtraRemote(self, project, n):
|
||||
r = self._ParseRemote(n)
|
||||
if project.extraRemotes.get(r.name) \
|
||||
or project.remote.name == r.name:
|
||||
raise ManifestParseError, \
|
||||
'mirror url required for project %s in %s' % \
|
||||
(project.name, self.manifestFile)
|
||||
|
||||
imp = None
|
||||
for cls in [ImportTar, ImportZip]:
|
||||
if cls.CanAccept(first_url):
|
||||
imp = cls()
|
||||
break
|
||||
if not imp:
|
||||
raise ManifestParseError, \
|
||||
'snapshot %s unsupported for project %s in %s' % \
|
||||
(first_url, project.name, self.manifestFile)
|
||||
|
||||
imp.SetProject(project)
|
||||
|
||||
for node in import_node.childNodes:
|
||||
if node.nodeName == 'remap':
|
||||
old = node.getAttribute('strip')
|
||||
new = node.getAttribute('insert')
|
||||
imp.RemapPath(old, new)
|
||||
|
||||
elif node.nodeName == 'mirror':
|
||||
imp.AddUrl(self._reqatt(node, 'url'))
|
||||
|
||||
for node in import_node.childNodes:
|
||||
if node.nodeName == 'snapshot':
|
||||
sn = imp.Clone()
|
||||
sn.SetVersion(self._reqatt(node, 'version'))
|
||||
sn.SetCommit(node.getAttribute('check'))
|
||||
|
||||
pr = node.getAttribute('prior')
|
||||
if pr:
|
||||
if IsId(pr):
|
||||
sn.SetParent(pr)
|
||||
else:
|
||||
to_resolve.append((sn, pr))
|
||||
|
||||
rev = R_TAGS + sn.TagName
|
||||
|
||||
if rev in project.snapshots:
|
||||
raise ManifestParseError, \
|
||||
'duplicate snapshot %s for project %s in %s' % \
|
||||
(sn.version, project.name, self.manifestFile)
|
||||
project.snapshots[rev] = sn
|
||||
by_version[sn.version] = sn
|
||||
'duplicate remote %s in project %s in %s' % \
|
||||
(r.name, project.name, self.manifestFile)
|
||||
project.extraRemotes[r.name] = r
|
||||
|
||||
def _ParseCopyFile(self, project, node):
|
||||
src = self._reqatt(node, 'src')
|
||||
dest = self._reqatt(node, 'dest')
|
||||
# src is project relative, and dest is relative to the top of the tree
|
||||
project.AddCopyFile(src, os.path.join(self.topdir, dest))
|
||||
if not self.IsMirror:
|
||||
# src is project relative;
|
||||
# dest is relative to the top of the tree
|
||||
project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
|
||||
|
||||
def _get_remote(self, node):
|
||||
name = node.getAttribute('remote')
|
||||
|
2
pager.py
2
pager.py
@ -22,7 +22,7 @@ active = False
|
||||
def RunPager(globalConfig):
|
||||
global active
|
||||
|
||||
if not os.isatty(0):
|
||||
if not os.isatty(0) or not os.isatty(1):
|
||||
return
|
||||
pager = _SelectPager(globalConfig)
|
||||
if pager == '' or pager == 'cat':
|
||||
|
58
progress.py
Normal file
58
progress.py
Normal file
@ -0,0 +1,58 @@
|
||||
#
|
||||
# Copyright (C) 2009 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
class Progress(object):
|
||||
def __init__(self, title, total=0):
|
||||
self._title = title
|
||||
self._total = total
|
||||
self._done = 0
|
||||
self._lastp = -1
|
||||
|
||||
def update(self, inc=1):
|
||||
self._done += inc
|
||||
|
||||
if self._total <= 0:
|
||||
sys.stderr.write('\r%s: %d, ' % (
|
||||
self._title,
|
||||
self._done))
|
||||
sys.stderr.flush()
|
||||
else:
|
||||
p = (100 * self._done) / self._total
|
||||
|
||||
if self._lastp != p:
|
||||
self._lastp = p
|
||||
sys.stderr.write('\r%s: %3d%% (%d/%d) ' % (
|
||||
self._title,
|
||||
p,
|
||||
self._done,
|
||||
self._total))
|
||||
sys.stderr.flush()
|
||||
|
||||
def end(self):
|
||||
if self._total <= 0:
|
||||
sys.stderr.write('\r%s: %d, done. \n' % (
|
||||
self._title,
|
||||
self._done))
|
||||
sys.stderr.flush()
|
||||
else:
|
||||
p = (100 * self._done) / self._total
|
||||
sys.stderr.write('\r%s: %3d%% (%d/%d), done. \n' % (
|
||||
self._title,
|
||||
p,
|
||||
self._done,
|
||||
self._total))
|
||||
sys.stderr.flush()
|
657
project.py
657
project.py
@ -12,6 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import errno
|
||||
import filecmp
|
||||
import os
|
||||
import re
|
||||
@ -23,10 +24,9 @@ import urllib2
|
||||
from color import Coloring
|
||||
from git_command import GitCommand
|
||||
from git_config import GitConfig, IsId
|
||||
from gerrit_upload import UploadBundle
|
||||
from error import GitError, ImportError, UploadError
|
||||
from error import ManifestInvalidRevisionError
|
||||
from remote import Remote
|
||||
from codereview import proto_client
|
||||
|
||||
HEAD = 'HEAD'
|
||||
R_HEADS = 'refs/heads/'
|
||||
@ -34,17 +34,41 @@ R_TAGS = 'refs/tags/'
|
||||
R_PUB = 'refs/published/'
|
||||
R_M = 'refs/remotes/m/'
|
||||
|
||||
def _warn(fmt, *args):
|
||||
def _error(fmt, *args):
|
||||
msg = fmt % args
|
||||
print >>sys.stderr, 'warn: %s' % msg
|
||||
|
||||
def _info(fmt, *args):
|
||||
msg = fmt % args
|
||||
print >>sys.stderr, 'info: %s' % msg
|
||||
print >>sys.stderr, 'error: %s' % msg
|
||||
|
||||
def not_rev(r):
|
||||
return '^' + r
|
||||
|
||||
def sq(r):
|
||||
return "'" + r.replace("'", "'\''") + "'"
|
||||
|
||||
hook_list = None
|
||||
def repo_hooks():
|
||||
global hook_list
|
||||
if hook_list is None:
|
||||
d = os.path.abspath(os.path.dirname(__file__))
|
||||
d = os.path.join(d , 'hooks')
|
||||
hook_list = map(lambda x: os.path.join(d, x), os.listdir(d))
|
||||
return hook_list
|
||||
|
||||
def relpath(dst, src):
|
||||
src = os.path.dirname(src)
|
||||
top = os.path.commonprefix([dst, src])
|
||||
if top.endswith('/'):
|
||||
top = top[:-1]
|
||||
else:
|
||||
top = os.path.dirname(top)
|
||||
|
||||
tmp = src
|
||||
rel = ''
|
||||
while top != tmp:
|
||||
rel += '../'
|
||||
tmp = os.path.dirname(tmp)
|
||||
return rel + dst[len(top) + 1:]
|
||||
|
||||
|
||||
class DownloadedChange(object):
|
||||
_commit_cache = None
|
||||
|
||||
@ -77,6 +101,7 @@ class ReviewableBranch(object):
|
||||
self.project = project
|
||||
self.branch = branch
|
||||
self.base = base
|
||||
self.replace_changes = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@ -96,6 +121,16 @@ class ReviewableBranch(object):
|
||||
'--')
|
||||
return self._commit_cache
|
||||
|
||||
@property
|
||||
def unabbrev_commits(self):
|
||||
r = dict()
|
||||
for commit in self.project.bare_git.rev_list(
|
||||
not_rev(self.base),
|
||||
R_HEADS + self.name,
|
||||
'--'):
|
||||
r[commit[0:8]] = commit
|
||||
return r
|
||||
|
||||
@property
|
||||
def date(self):
|
||||
return self.project.bare_git.log(
|
||||
@ -104,8 +139,10 @@ class ReviewableBranch(object):
|
||||
R_HEADS + self.name,
|
||||
'--')
|
||||
|
||||
def UploadForReview(self):
|
||||
self.project.UploadForReview(self.name)
|
||||
def UploadForReview(self, people):
|
||||
self.project.UploadForReview(self.name,
|
||||
self.replace_changes,
|
||||
people)
|
||||
|
||||
@property
|
||||
def tip_url(self):
|
||||
@ -137,13 +174,15 @@ class DiffColoring(Coloring):
|
||||
|
||||
|
||||
class _CopyFile:
|
||||
def __init__(self, src, dest):
|
||||
def __init__(self, src, dest, abssrc, absdest):
|
||||
self.src = src
|
||||
self.dest = dest
|
||||
self.abs_src = abssrc
|
||||
self.abs_dest = absdest
|
||||
|
||||
def _Copy(self):
|
||||
src = self.src
|
||||
dest = self.dest
|
||||
src = self.abs_src
|
||||
dest = self.abs_dest
|
||||
# copy file if it does not exist or is out of date
|
||||
if not os.path.exists(dest) or not filecmp.cmp(src, dest):
|
||||
try:
|
||||
@ -156,9 +195,7 @@ class _CopyFile:
|
||||
mode = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
|
||||
os.chmod(dest, mode)
|
||||
except IOError:
|
||||
print >>sys.stderr, \
|
||||
'error: Cannot copy file %s to %s' \
|
||||
% (src, dest)
|
||||
_error('Cannot copy file %s to %s', src, dest)
|
||||
|
||||
|
||||
class Project(object):
|
||||
@ -184,7 +221,10 @@ class Project(object):
|
||||
gitdir = self.gitdir,
|
||||
defaults = self.manifest.globalConfig)
|
||||
|
||||
self.work_git = self._GitGetByExec(self, bare=False)
|
||||
if self.worktree:
|
||||
self.work_git = self._GitGetByExec(self, bare=False)
|
||||
else:
|
||||
self.work_git = None
|
||||
self.bare_git = self._GitGetByExec(self, bare=True)
|
||||
|
||||
@property
|
||||
@ -260,6 +300,32 @@ class Project(object):
|
||||
"""
|
||||
return self.config.GetBranch(name)
|
||||
|
||||
def GetBranches(self):
|
||||
"""Get all existing local branches.
|
||||
"""
|
||||
current = self.CurrentBranch
|
||||
all = self.bare_git.ListRefs()
|
||||
heads = {}
|
||||
pubd = {}
|
||||
|
||||
for name, id in all.iteritems():
|
||||
if name.startswith(R_HEADS):
|
||||
name = name[len(R_HEADS):]
|
||||
b = self.GetBranch(name)
|
||||
b.current = name == current
|
||||
b.published = None
|
||||
b.revision = id
|
||||
heads[name] = b
|
||||
|
||||
for name, id in all.iteritems():
|
||||
if name.startswith(R_PUB):
|
||||
name = name[len(R_PUB):]
|
||||
b = heads.get(name)
|
||||
if b:
|
||||
b.published = id
|
||||
|
||||
return heads
|
||||
|
||||
|
||||
## Status Display ##
|
||||
|
||||
@ -280,7 +346,7 @@ class Project(object):
|
||||
df = self.work_git.DiffZ('diff-files')
|
||||
do = self.work_git.LsOthers()
|
||||
if not di and not df and not do:
|
||||
return
|
||||
return 'CLEAN'
|
||||
|
||||
out = StatusColoring(self.config)
|
||||
out.project('project %-40s', self.relpath + '/')
|
||||
@ -314,7 +380,7 @@ class Project(object):
|
||||
else: f_status = '-'
|
||||
|
||||
if i and i.src_path:
|
||||
line = ' %s%s\t%s => (%s%%)' % (i_status, f_status,
|
||||
line = ' %s%s\t%s => %s (%s%%)' % (i_status, f_status,
|
||||
i.src_path, p, i.level)
|
||||
else:
|
||||
line = ' %s%s\t%s' % (i_status, f_status, p)
|
||||
@ -328,6 +394,7 @@ class Project(object):
|
||||
else:
|
||||
out.write('%s', line)
|
||||
out.nl()
|
||||
return 'DIRTY'
|
||||
|
||||
def PrintWorkTreeDiff(self):
|
||||
"""Prints the status of the repository to stdout.
|
||||
@ -398,15 +465,23 @@ class Project(object):
|
||||
if branch in pubed and pubed[branch] == id:
|
||||
continue
|
||||
|
||||
branch = self.GetBranch(branch)
|
||||
base = branch.LocalMerge
|
||||
if branch.LocalMerge:
|
||||
rb = ReviewableBranch(self, branch, base)
|
||||
if rb.commits:
|
||||
ready.append(rb)
|
||||
rb = self.GetUploadableBranch(branch)
|
||||
if rb:
|
||||
ready.append(rb)
|
||||
return ready
|
||||
|
||||
def UploadForReview(self, branch=None):
|
||||
def GetUploadableBranch(self, branch_name):
|
||||
"""Get a single uploadable branch, or None.
|
||||
"""
|
||||
branch = self.GetBranch(branch_name)
|
||||
base = branch.LocalMerge
|
||||
if branch.LocalMerge:
|
||||
rb = ReviewableBranch(self, branch, base)
|
||||
if rb.commits:
|
||||
return rb
|
||||
return None
|
||||
|
||||
def UploadForReview(self, branch=None, replace_changes=None, people=([],[])):
|
||||
"""Uploads the named branch for code review.
|
||||
"""
|
||||
if branch is None:
|
||||
@ -424,27 +499,33 @@ class Project(object):
|
||||
if not dest_branch.startswith(R_HEADS):
|
||||
dest_branch = R_HEADS + dest_branch
|
||||
|
||||
base_list = []
|
||||
for name, id in self._allrefs.iteritems():
|
||||
if branch.remote.WritesTo(name):
|
||||
base_list.append(not_rev(name))
|
||||
if not base_list:
|
||||
raise GitError('no base refs, cannot upload %s' % branch.name)
|
||||
if not branch.remote.projectname:
|
||||
branch.remote.projectname = self.name
|
||||
branch.remote.Save()
|
||||
|
||||
print >>sys.stderr, ''
|
||||
_info("Uploading %s to %s:", branch.name, self.name)
|
||||
try:
|
||||
UploadBundle(project = self,
|
||||
server = branch.remote.review,
|
||||
email = self.UserEmail,
|
||||
dest_project = self.name,
|
||||
dest_branch = dest_branch,
|
||||
src_branch = R_HEADS + branch.name,
|
||||
bases = base_list)
|
||||
except proto_client.ClientLoginError:
|
||||
raise UploadError('Login failure')
|
||||
except urllib2.HTTPError, e:
|
||||
raise UploadError('HTTP error %d' % e.code)
|
||||
if branch.remote.ReviewProtocol == 'ssh':
|
||||
if dest_branch.startswith(R_HEADS):
|
||||
dest_branch = dest_branch[len(R_HEADS):]
|
||||
|
||||
rp = ['gerrit receive-pack']
|
||||
for e in people[0]:
|
||||
rp.append('--reviewer=%s' % sq(e))
|
||||
for e in people[1]:
|
||||
rp.append('--cc=%s' % sq(e))
|
||||
|
||||
cmd = ['push']
|
||||
cmd.append('--receive-pack=%s' % " ".join(rp))
|
||||
cmd.append(branch.remote.SshReviewUrl(self.UserEmail))
|
||||
cmd.append('%s:refs/for/%s' % (R_HEADS + branch.name, dest_branch))
|
||||
if replace_changes:
|
||||
for change_id,commit_id in replace_changes.iteritems():
|
||||
cmd.append('%s:refs/changes/%s/new' % (commit_id, change_id))
|
||||
if GitCommand(self, cmd, bare = True).Wait() != 0:
|
||||
raise UploadError('Upload failed')
|
||||
|
||||
else:
|
||||
raise UploadError('Unsupported protocol %s' \
|
||||
% branch.remote.review)
|
||||
|
||||
msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
|
||||
self.bare_git.UpdateRef(R_PUB + branch.name,
|
||||
@ -462,74 +543,67 @@ class Project(object):
|
||||
print >>sys.stderr
|
||||
print >>sys.stderr, 'Initializing project %s ...' % self.name
|
||||
self._InitGitDir()
|
||||
|
||||
self._InitRemote()
|
||||
for r in self.extraRemotes.values():
|
||||
if not self._RemoteFetch(r.name):
|
||||
return False
|
||||
if not self._SnapshotDownload():
|
||||
return False
|
||||
if not self._RemoteFetch():
|
||||
return False
|
||||
self._RepairAndroidImportErrors()
|
||||
self._InitMRef()
|
||||
|
||||
if self.worktree:
|
||||
self._InitMRef()
|
||||
else:
|
||||
self._InitMirrorHead()
|
||||
try:
|
||||
os.remove(os.path.join(self.gitdir, 'FETCH_HEAD'))
|
||||
except OSError:
|
||||
pass
|
||||
return True
|
||||
|
||||
|
||||
def PostRepoUpgrade(self):
|
||||
self._InitHooks()
|
||||
|
||||
def _CopyFiles(self):
|
||||
for file in self.copyfiles:
|
||||
file._Copy()
|
||||
|
||||
def _RepairAndroidImportErrors(self):
|
||||
if self.name in ['platform/external/iptables',
|
||||
'platform/external/libpcap',
|
||||
'platform/external/tcpdump',
|
||||
'platform/external/webkit',
|
||||
'platform/system/wlan/ti']:
|
||||
# I hate myself for doing this...
|
||||
#
|
||||
# In the initial Android 1.0 release these projects were
|
||||
# shipped, some users got them, and then the history had
|
||||
# to be rewritten to correct problems with their imports.
|
||||
# The 'android-1.0' tag may still be pointing at the old
|
||||
# history, so we need to drop the tag and fetch it again.
|
||||
#
|
||||
try:
|
||||
remote = self.GetRemote(self.remote.name)
|
||||
relname = remote.ToLocal(R_HEADS + 'release-1.0')
|
||||
tagname = R_TAGS + 'android-1.0'
|
||||
if self._revlist(not_rev(relname), tagname):
|
||||
cmd = ['fetch', remote.name, '+%s:%s' % (tagname, tagname)]
|
||||
GitCommand(self, cmd, bare = True).Wait()
|
||||
except GitError:
|
||||
pass
|
||||
|
||||
def Sync_LocalHalf(self):
|
||||
def Sync_LocalHalf(self, syncbuf):
|
||||
"""Perform only the local IO portion of the sync process.
|
||||
Network access is not required.
|
||||
|
||||
Return:
|
||||
True: the sync was successful
|
||||
False: the sync requires user input
|
||||
"""
|
||||
self._InitWorkTree()
|
||||
self.CleanPublishedCache()
|
||||
|
||||
rem = self.GetRemote(self.remote.name)
|
||||
rev = rem.ToLocal(self.revision)
|
||||
try:
|
||||
self.bare_git.rev_parse('--verify', '%s^0' % rev)
|
||||
except GitError:
|
||||
raise ManifestInvalidRevisionError(
|
||||
'revision %s in %s not found' % (self.revision, self.name))
|
||||
|
||||
branch = self.CurrentBranch
|
||||
|
||||
if branch is None:
|
||||
if branch is None or syncbuf.detach_head:
|
||||
# Currently on a detached HEAD. The user is assumed to
|
||||
# not have any local modifications worth worrying about.
|
||||
#
|
||||
if os.path.exists(os.path.join(self.worktree, '.dotest')) \
|
||||
or os.path.exists(os.path.join(self.worktree, '.git', 'rebase-apply')):
|
||||
syncbuf.fail(self, _PriorSyncFailedError())
|
||||
return
|
||||
|
||||
lost = self._revlist(not_rev(rev), HEAD)
|
||||
if lost:
|
||||
_info("[%s] Discarding %d commits", self.name, len(lost))
|
||||
syncbuf.info(self, "discarding %d commits", len(lost))
|
||||
try:
|
||||
self._Checkout(rev, quiet=True)
|
||||
except GitError:
|
||||
return False
|
||||
except GitError, e:
|
||||
syncbuf.fail(self, e)
|
||||
return
|
||||
self._CopyFiles()
|
||||
return True
|
||||
return
|
||||
|
||||
branch = self.GetBranch(branch)
|
||||
merge = branch.LocalMerge
|
||||
@ -538,16 +612,16 @@ class Project(object):
|
||||
# The current branch has no tracking configuration.
|
||||
# Jump off it to a deatched HEAD.
|
||||
#
|
||||
_info("[%s] Leaving %s"
|
||||
" (does not track any upstream)",
|
||||
self.name,
|
||||
branch.name)
|
||||
syncbuf.info(self,
|
||||
"leaving %s; does not track upstream",
|
||||
branch.name)
|
||||
try:
|
||||
self._Checkout(rev, quiet=True)
|
||||
except GitError:
|
||||
return False
|
||||
except GitError, e:
|
||||
syncbuf.fail(self, e)
|
||||
return
|
||||
self._CopyFiles()
|
||||
return True
|
||||
return
|
||||
|
||||
upstream_gain = self._revlist(not_rev(HEAD), rev)
|
||||
pub = self.WasPublished(branch.name)
|
||||
@ -559,12 +633,24 @@ class Project(object):
|
||||
# commits are not yet merged upstream. We do not want
|
||||
# to rewrite the published commits so we punt.
|
||||
#
|
||||
_info("[%s] Branch %s is published,"
|
||||
" but is now %d commits behind.",
|
||||
self.name, branch.name, len(upstream_gain))
|
||||
_info("[%s] Consider merging or rebasing the"
|
||||
" unpublished commits.", self.name)
|
||||
return True
|
||||
syncbuf.info(self,
|
||||
"branch %s is published but is now %d commits behind",
|
||||
branch.name,
|
||||
len(upstream_gain))
|
||||
syncbuf.info(self, "consider merging or rebasing the unpublished commits")
|
||||
return
|
||||
elif upstream_gain:
|
||||
# We can fast-forward safely.
|
||||
#
|
||||
def _doff():
|
||||
self._FastForward(rev)
|
||||
self._CopyFiles()
|
||||
syncbuf.later1(self, _doff)
|
||||
return
|
||||
else:
|
||||
# Trivially no changes in the upstream.
|
||||
#
|
||||
return
|
||||
|
||||
if merge == rev:
|
||||
try:
|
||||
@ -579,8 +665,7 @@ class Project(object):
|
||||
# and pray that the old upstream also wasn't in the habit
|
||||
# of rebasing itself.
|
||||
#
|
||||
_info("[%s] Manifest switched from %s to %s",
|
||||
self.name, merge, rev)
|
||||
syncbuf.info(self, "manifest switched %s...%s", merge, rev)
|
||||
old_merge = merge
|
||||
|
||||
if rev == old_merge:
|
||||
@ -591,19 +676,19 @@ class Project(object):
|
||||
if not upstream_lost and not upstream_gain:
|
||||
# Trivially no changes caused by the upstream.
|
||||
#
|
||||
return True
|
||||
return
|
||||
|
||||
if self.IsDirty(consider_untracked=False):
|
||||
_warn('[%s] commit (or discard) uncommitted changes'
|
||||
' before sync', self.name)
|
||||
return False
|
||||
syncbuf.fail(self, _DirtyError())
|
||||
return
|
||||
|
||||
if upstream_lost:
|
||||
# Upstream rebased. Not everything in HEAD
|
||||
# may have been caused by the user.
|
||||
#
|
||||
_info("[%s] Discarding %d commits removed from upstream",
|
||||
self.name, len(upstream_lost))
|
||||
syncbuf.info(self,
|
||||
"discarding %d commits removed from upstream",
|
||||
len(upstream_lost))
|
||||
|
||||
branch.remote = rem
|
||||
branch.merge = self.revision
|
||||
@ -611,56 +696,28 @@ class Project(object):
|
||||
|
||||
my_changes = self._revlist(not_rev(old_merge), HEAD)
|
||||
if my_changes:
|
||||
try:
|
||||
def _dorebase():
|
||||
self._Rebase(upstream = old_merge, onto = rev)
|
||||
except GitError:
|
||||
return False
|
||||
self._CopyFiles()
|
||||
syncbuf.later2(self, _dorebase)
|
||||
elif upstream_lost:
|
||||
try:
|
||||
self._ResetHard(rev)
|
||||
except GitError:
|
||||
return False
|
||||
self._CopyFiles()
|
||||
except GitError, e:
|
||||
syncbuf.fail(self, e)
|
||||
return
|
||||
else:
|
||||
try:
|
||||
def _doff():
|
||||
self._FastForward(rev)
|
||||
except GitError:
|
||||
return False
|
||||
self._CopyFiles()
|
||||
syncbuf.later1(self, _doff)
|
||||
|
||||
self._CopyFiles()
|
||||
return True
|
||||
|
||||
def _SnapshotDownload(self):
|
||||
if self.snapshots:
|
||||
have = set(self._allrefs.keys())
|
||||
need = []
|
||||
|
||||
for tag, sn in self.snapshots.iteritems():
|
||||
if tag not in have:
|
||||
need.append(sn)
|
||||
|
||||
if need:
|
||||
print >>sys.stderr, """
|
||||
*** Downloading source(s) from a mirror site. ***
|
||||
*** If the network hangs, kill and restart repo. ***
|
||||
"""
|
||||
for sn in need:
|
||||
try:
|
||||
sn.Import()
|
||||
except ImportError, e:
|
||||
print >>sys.stderr, \
|
||||
'error: Cannot import %s: %s' \
|
||||
% (self.name, e)
|
||||
return False
|
||||
cmd = ['repack', '-a', '-d', '-f', '-l']
|
||||
if GitCommand(self, cmd, bare = True).Wait() != 0:
|
||||
return False
|
||||
return True
|
||||
|
||||
def AddCopyFile(self, src, dest):
|
||||
def AddCopyFile(self, src, dest, absdest):
|
||||
# dest should already be an absolute path, but src is project relative
|
||||
# make src an absolute path
|
||||
src = os.path.join(self.worktree, src)
|
||||
self.copyfiles.append(_CopyFile(src, dest))
|
||||
abssrc = os.path.join(self.worktree, src)
|
||||
self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest))
|
||||
|
||||
def DownloadPatchSet(self, change_id, patch_id):
|
||||
"""Download a single patch set of a single change to FETCH_HEAD.
|
||||
@ -685,23 +742,69 @@ class Project(object):
|
||||
def StartBranch(self, name):
|
||||
"""Create a new branch off the manifest's revision.
|
||||
"""
|
||||
branch = self.GetBranch(name)
|
||||
branch.remote = self.GetRemote(self.remote.name)
|
||||
branch.merge = self.revision
|
||||
try:
|
||||
self.bare_git.rev_parse(R_HEADS + name)
|
||||
exists = True
|
||||
except GitError:
|
||||
exists = False;
|
||||
|
||||
if exists:
|
||||
if name == self.CurrentBranch:
|
||||
return True
|
||||
else:
|
||||
cmd = ['checkout', name, '--']
|
||||
return GitCommand(self, cmd).Wait() == 0
|
||||
|
||||
rev = branch.LocalMerge
|
||||
cmd = ['checkout', '-b', branch.name, rev]
|
||||
if GitCommand(self, cmd).Wait() == 0:
|
||||
branch.Save()
|
||||
else:
|
||||
raise GitError('%s checkout %s ' % (self.name, rev))
|
||||
branch = self.GetBranch(name)
|
||||
branch.remote = self.GetRemote(self.remote.name)
|
||||
branch.merge = self.revision
|
||||
|
||||
rev = branch.LocalMerge
|
||||
cmd = ['checkout', '-b', branch.name, rev]
|
||||
if GitCommand(self, cmd).Wait() == 0:
|
||||
branch.Save()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def CheckoutBranch(self, name):
|
||||
"""Checkout a local topic branch.
|
||||
"""
|
||||
|
||||
# Be sure the branch exists
|
||||
try:
|
||||
tip_rev = self.bare_git.rev_parse(R_HEADS + name)
|
||||
except GitError:
|
||||
return False;
|
||||
|
||||
# Do the checkout
|
||||
cmd = ['checkout', name, '--']
|
||||
return GitCommand(self, cmd).Wait() == 0
|
||||
|
||||
def AbandonBranch(self, name):
|
||||
"""Destroy a local topic branch.
|
||||
"""
|
||||
try:
|
||||
tip_rev = self.bare_git.rev_parse(R_HEADS + name)
|
||||
except GitError:
|
||||
return
|
||||
|
||||
if self.CurrentBranch == name:
|
||||
self._Checkout(
|
||||
self.GetRemote(self.remote.name).ToLocal(self.revision),
|
||||
quiet=True)
|
||||
|
||||
cmd = ['branch', '-D', name]
|
||||
GitCommand(self, cmd, capture_stdout=True).Wait()
|
||||
|
||||
def PruneHeads(self):
|
||||
"""Prune any topic branches already merged into upstream.
|
||||
"""
|
||||
cb = self.CurrentBranch
|
||||
kill = []
|
||||
for name in self._allrefs.keys():
|
||||
left = self._allrefs
|
||||
for name in left.keys():
|
||||
if name.startswith(R_HEADS):
|
||||
name = name[len(R_HEADS):]
|
||||
if cb is None or name != cb:
|
||||
@ -714,14 +817,12 @@ class Project(object):
|
||||
self.work_git.DetachHead(HEAD)
|
||||
kill.append(cb)
|
||||
|
||||
deleted = set()
|
||||
if kill:
|
||||
try:
|
||||
old = self.bare_git.GetHead()
|
||||
except GitError:
|
||||
old = 'refs/heads/please_never_use_this_as_a_branch_name'
|
||||
|
||||
rm_re = re.compile(r"^Deleted branch (.*)\.$")
|
||||
try:
|
||||
self.bare_git.DetachHead(rev)
|
||||
|
||||
@ -733,22 +834,20 @@ class Project(object):
|
||||
b.Wait()
|
||||
finally:
|
||||
self.bare_git.SetHead(old)
|
||||
left = self._allrefs
|
||||
|
||||
for line in b.stdout.split("\n"):
|
||||
m = rm_re.match(line)
|
||||
if m:
|
||||
deleted.add(m.group(1))
|
||||
|
||||
if deleted:
|
||||
self.CleanPublishedCache()
|
||||
for branch in kill:
|
||||
if (R_HEADS + branch) not in left:
|
||||
self.CleanPublishedCache()
|
||||
break
|
||||
|
||||
if cb and cb not in kill:
|
||||
kill.append(cb)
|
||||
kill.sort()
|
||||
kill.sort()
|
||||
|
||||
kept = []
|
||||
for branch in kill:
|
||||
if branch not in deleted:
|
||||
if (R_HEADS + branch) in left:
|
||||
branch = self.GetBranch(branch)
|
||||
base = branch.LocalMerge
|
||||
if not base:
|
||||
@ -762,41 +861,11 @@ class Project(object):
|
||||
def _RemoteFetch(self, name=None):
|
||||
if not name:
|
||||
name = self.remote.name
|
||||
|
||||
hide_errors = False
|
||||
if self.extraRemotes or self.snapshots:
|
||||
hide_errors = True
|
||||
|
||||
proc = GitCommand(self,
|
||||
['fetch', name],
|
||||
bare = True,
|
||||
capture_stderr = hide_errors)
|
||||
if hide_errors:
|
||||
err = proc.process.stderr.fileno()
|
||||
buf = ''
|
||||
while True:
|
||||
b = os.read(err, 256)
|
||||
if b:
|
||||
buf += b
|
||||
while buf:
|
||||
r = buf.find('remote: error: unable to find ')
|
||||
if r >= 0:
|
||||
lf = buf.find('\n')
|
||||
if lf < 0:
|
||||
break
|
||||
buf = buf[lf + 1:]
|
||||
continue
|
||||
|
||||
cr = buf.find('\r')
|
||||
if cr < 0:
|
||||
break
|
||||
os.write(2, buf[0:cr + 1])
|
||||
buf = buf[cr + 1:]
|
||||
if not b:
|
||||
if buf:
|
||||
os.write(2, buf)
|
||||
break
|
||||
return proc.Wait() == 0
|
||||
cmd = ['fetch']
|
||||
if not self.worktree:
|
||||
cmd.append('--update-head-ok')
|
||||
cmd.append(name)
|
||||
return GitCommand(self, cmd, bare = True).Wait() == 0
|
||||
|
||||
def _Checkout(self, rev, quiet=False):
|
||||
cmd = ['checkout']
|
||||
@ -817,11 +886,11 @@ class Project(object):
|
||||
raise GitError('%s reset --hard %s ' % (self.name, rev))
|
||||
|
||||
def _Rebase(self, upstream, onto = None):
|
||||
cmd = ['rebase', '-i']
|
||||
cmd = ['rebase']
|
||||
if onto is not None:
|
||||
cmd.extend(['--onto', onto])
|
||||
cmd.append(upstream)
|
||||
if GitCommand(self, cmd, disable_editor=True).Wait() != 0:
|
||||
if GitCommand(self, cmd).Wait() != 0:
|
||||
raise GitError('%s rebase %s ' % (self.name, upstream))
|
||||
|
||||
def _FastForward(self, head):
|
||||
@ -833,19 +902,42 @@ class Project(object):
|
||||
if not os.path.exists(self.gitdir):
|
||||
os.makedirs(self.gitdir)
|
||||
self.bare_git.init()
|
||||
self.config.SetString('core.bare', None)
|
||||
|
||||
if self.manifest.IsMirror:
|
||||
self.config.SetString('core.bare', 'true')
|
||||
else:
|
||||
self.config.SetString('core.bare', None)
|
||||
|
||||
hooks = self._gitdir_path('hooks')
|
||||
for old_hook in os.listdir(hooks):
|
||||
try:
|
||||
to_rm = os.listdir(hooks)
|
||||
except OSError:
|
||||
to_rm = []
|
||||
for old_hook in to_rm:
|
||||
os.remove(os.path.join(hooks, old_hook))
|
||||
|
||||
# TODO(sop) install custom repo hooks
|
||||
self._InitHooks()
|
||||
|
||||
m = self.manifest.manifestProject.config
|
||||
for key in ['user.name', 'user.email']:
|
||||
if m.Has(key, include_defaults = False):
|
||||
self.config.SetString(key, m.GetString(key))
|
||||
|
||||
def _InitHooks(self):
|
||||
hooks = self._gitdir_path('hooks')
|
||||
if not os.path.exists(hooks):
|
||||
os.makedirs(hooks)
|
||||
for stock_hook in repo_hooks():
|
||||
dst = os.path.join(hooks, os.path.basename(stock_hook))
|
||||
try:
|
||||
os.symlink(relpath(stock_hook, dst), dst)
|
||||
except OSError, e:
|
||||
if e.errno == errno.EEXIST:
|
||||
pass
|
||||
elif e.errno == errno.EPERM:
|
||||
raise GitError('filesystem must support symlinks')
|
||||
else:
|
||||
raise
|
||||
|
||||
def _InitRemote(self):
|
||||
if self.remote.fetchUrl:
|
||||
remote = self.GetRemote(self.remote.name)
|
||||
@ -856,14 +948,23 @@ class Project(object):
|
||||
url += '/%s.git' % self.name
|
||||
remote.url = url
|
||||
remote.review = self.remote.reviewUrl
|
||||
if remote.projectname is None:
|
||||
remote.projectname = self.name
|
||||
|
||||
remote.ResetFetch()
|
||||
if self.worktree:
|
||||
remote.ResetFetch(mirror=False)
|
||||
else:
|
||||
remote.ResetFetch(mirror=True)
|
||||
remote.Save()
|
||||
|
||||
for r in self.extraRemotes.values():
|
||||
remote = self.GetRemote(r.name)
|
||||
remote.url = r.fetchUrl
|
||||
remote.review = r.reviewUrl
|
||||
if r.projectName:
|
||||
remote.projectname = r.projectName
|
||||
elif remote.projectname is None:
|
||||
remote.projectname = self.name
|
||||
remote.ResetFetch()
|
||||
remote.Save()
|
||||
|
||||
@ -873,31 +974,23 @@ class Project(object):
|
||||
ref = R_M + self.manifest.branch
|
||||
|
||||
if IsId(self.revision):
|
||||
dst = self.revision + '^0',
|
||||
dst = self.revision + '^0'
|
||||
self.bare_git.UpdateRef(ref, dst, message = msg, detach = True)
|
||||
else:
|
||||
remote = self.GetRemote(self.remote.name)
|
||||
dst = remote.ToLocal(self.revision)
|
||||
self.bare_git.symbolic_ref('-m', msg, ref, dst)
|
||||
|
||||
def _InitMirrorHead(self):
|
||||
dst = self.GetRemote(self.remote.name).ToLocal(self.revision)
|
||||
msg = 'manifest set to %s' % self.revision
|
||||
self.bare_git.SetHead(dst, message=msg)
|
||||
|
||||
def _InitWorkTree(self):
|
||||
dotgit = os.path.join(self.worktree, '.git')
|
||||
if not os.path.exists(dotgit):
|
||||
os.makedirs(dotgit)
|
||||
|
||||
topdir = os.path.commonprefix([self.gitdir, dotgit])
|
||||
if topdir.endswith('/'):
|
||||
topdir = topdir[:-1]
|
||||
else:
|
||||
topdir = os.path.dirname(topdir)
|
||||
|
||||
tmpdir = dotgit
|
||||
relgit = ''
|
||||
while topdir != tmpdir:
|
||||
relgit += '../'
|
||||
tmpdir = os.path.dirname(tmpdir)
|
||||
relgit += self.gitdir[len(topdir) + 1:]
|
||||
|
||||
for name in ['config',
|
||||
'description',
|
||||
'hooks',
|
||||
@ -908,8 +1001,15 @@ class Project(object):
|
||||
'refs',
|
||||
'rr-cache',
|
||||
'svn']:
|
||||
os.symlink(os.path.join(relgit, name),
|
||||
os.path.join(dotgit, name))
|
||||
try:
|
||||
src = os.path.join(self.gitdir, name)
|
||||
dst = os.path.join(dotgit, name)
|
||||
os.symlink(relpath(src, dst), dst)
|
||||
except OSError, e:
|
||||
if e.errno == errno.EPERM:
|
||||
raise GitError('filesystem must support symlinks')
|
||||
else:
|
||||
raise
|
||||
|
||||
rev = self.GetRemote(self.remote.name).ToLocal(self.revision)
|
||||
rev = self.bare_git.rev_parse('%s^0' % rev)
|
||||
@ -1103,6 +1203,113 @@ class Project(object):
|
||||
return runner
|
||||
|
||||
|
||||
class _PriorSyncFailedError(Exception):
|
||||
def __str__(self):
|
||||
return 'prior sync failed; rebase still in progress'
|
||||
|
||||
class _DirtyError(Exception):
|
||||
def __str__(self):
|
||||
return 'contains uncommitted changes'
|
||||
|
||||
class _InfoMessage(object):
|
||||
def __init__(self, project, text):
|
||||
self.project = project
|
||||
self.text = text
|
||||
|
||||
def Print(self, syncbuf):
|
||||
syncbuf.out.info('%s/: %s', self.project.relpath, self.text)
|
||||
syncbuf.out.nl()
|
||||
|
||||
class _Failure(object):
|
||||
def __init__(self, project, why):
|
||||
self.project = project
|
||||
self.why = why
|
||||
|
||||
def Print(self, syncbuf):
|
||||
syncbuf.out.fail('error: %s/: %s',
|
||||
self.project.relpath,
|
||||
str(self.why))
|
||||
syncbuf.out.nl()
|
||||
|
||||
class _Later(object):
|
||||
def __init__(self, project, action):
|
||||
self.project = project
|
||||
self.action = action
|
||||
|
||||
def Run(self, syncbuf):
|
||||
out = syncbuf.out
|
||||
out.project('project %s/', self.project.relpath)
|
||||
out.nl()
|
||||
try:
|
||||
self.action()
|
||||
out.nl()
|
||||
return True
|
||||
except GitError, e:
|
||||
out.nl()
|
||||
return False
|
||||
|
||||
class _SyncColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'reposync')
|
||||
self.project = self.printer('header', attr = 'bold')
|
||||
self.info = self.printer('info')
|
||||
self.fail = self.printer('fail', fg='red')
|
||||
|
||||
class SyncBuffer(object):
|
||||
def __init__(self, config, detach_head=False):
|
||||
self._messages = []
|
||||
self._failures = []
|
||||
self._later_queue1 = []
|
||||
self._later_queue2 = []
|
||||
|
||||
self.out = _SyncColoring(config)
|
||||
self.out.redirect(sys.stderr)
|
||||
|
||||
self.detach_head = detach_head
|
||||
self.clean = True
|
||||
|
||||
def info(self, project, fmt, *args):
|
||||
self._messages.append(_InfoMessage(project, fmt % args))
|
||||
|
||||
def fail(self, project, err=None):
|
||||
self._failures.append(_Failure(project, err))
|
||||
self.clean = False
|
||||
|
||||
def later1(self, project, what):
|
||||
self._later_queue1.append(_Later(project, what))
|
||||
|
||||
def later2(self, project, what):
|
||||
self._later_queue2.append(_Later(project, what))
|
||||
|
||||
def Finish(self):
|
||||
self._PrintMessages()
|
||||
self._RunLater()
|
||||
self._PrintMessages()
|
||||
return self.clean
|
||||
|
||||
def _RunLater(self):
|
||||
for q in ['_later_queue1', '_later_queue2']:
|
||||
if not self._RunQueue(q):
|
||||
return
|
||||
|
||||
def _RunQueue(self, queue):
|
||||
for m in getattr(self, queue):
|
||||
if not m.Run(self):
|
||||
self.clean = False
|
||||
return False
|
||||
setattr(self, queue, [])
|
||||
return True
|
||||
|
||||
def _PrintMessages(self):
|
||||
for m in self._messages:
|
||||
m.Print(self)
|
||||
for m in self._failures:
|
||||
m.Print(self)
|
||||
|
||||
self._messages = []
|
||||
self._failures = []
|
||||
|
||||
|
||||
class MetaProject(Project):
|
||||
"""A special project housed under .repo.
|
||||
"""
|
||||
|
@ -14,8 +14,12 @@
|
||||
# limitations under the License.
|
||||
|
||||
class Remote(object):
|
||||
def __init__(self, name, fetch=None, review=None):
|
||||
def __init__(self, name,
|
||||
fetch=None,
|
||||
review=None,
|
||||
projectName=None):
|
||||
self.name = name
|
||||
self.fetchUrl = fetch
|
||||
self.reviewUrl = review
|
||||
self.projectName = projectName
|
||||
self.requiredCommits = []
|
||||
|
8
repo
8
repo
@ -2,7 +2,7 @@
|
||||
|
||||
## repo default configuration
|
||||
##
|
||||
REPO_URL='git://android.kernel.org/tools/repo.git'
|
||||
REPO_URL='git://android.git.kernel.org/tools/repo.git'
|
||||
REPO_REV='stable'
|
||||
|
||||
# Copyright (C) 2008 Google Inc.
|
||||
@ -28,7 +28,7 @@ if __name__ == '__main__':
|
||||
del magic
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (1, 5)
|
||||
VERSION = (1, 8)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (1,0)
|
||||
@ -115,6 +115,9 @@ group.add_option('-b', '--manifest-branch',
|
||||
group.add_option('-m', '--manifest-name',
|
||||
dest='manifest_name',
|
||||
help='initial manifest file', metavar='NAME.xml')
|
||||
group.add_option('--mirror',
|
||||
dest='mirror', action='store_true',
|
||||
help='mirror the forrest')
|
||||
|
||||
# Tool
|
||||
group = init_optparse.add_option_group('Version options')
|
||||
@ -202,6 +205,7 @@ def _CheckGitVersion():
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
||||
ver_str = proc.stdout.read().strip()
|
||||
proc.stdout.close()
|
||||
proc.wait()
|
||||
|
||||
if not ver_str.startswith('git version '):
|
||||
print >>sys.stderr, 'error: "%s" unsupported' % ver_str
|
||||
|
42
subcmds/abandon.py
Normal file
42
subcmds/abandon.py
Normal file
@ -0,0 +1,42 @@
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from command import Command
|
||||
from git_command import git
|
||||
|
||||
class Abandon(Command):
|
||||
common = True
|
||||
helpSummary = "Permanently abandon a development branch"
|
||||
helpUsage = """
|
||||
%prog <branchname> [<project>...]
|
||||
|
||||
This subcommand permanently abandons a development branch by
|
||||
deleting it (and all its history) from your local repository.
|
||||
|
||||
It is equivalent to "git branch -D <branchname>".
|
||||
"""
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if not args:
|
||||
self.Usage()
|
||||
|
||||
nb = args[0]
|
||||
if not git.check_ref_format('heads/%s' % nb):
|
||||
print >>sys.stderr, "error: '%s' is not a valid name" % nb
|
||||
sys.exit(1)
|
||||
|
||||
for project in self.GetProjects(args[1:]):
|
||||
project.AbandonBranch(nb)
|
150
subcmds/branches.py
Normal file
150
subcmds/branches.py
Normal file
@ -0,0 +1,150 @@
|
||||
#
|
||||
# Copyright (C) 2009 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from color import Coloring
|
||||
from command import Command
|
||||
|
||||
class BranchColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'branch')
|
||||
self.current = self.printer('current', fg='green')
|
||||
self.local = self.printer('local')
|
||||
self.notinproject = self.printer('notinproject', fg='red')
|
||||
|
||||
class BranchInfo(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.current = 0
|
||||
self.published = 0
|
||||
self.published_equal = 0
|
||||
self.projects = []
|
||||
|
||||
def add(self, b):
|
||||
if b.current:
|
||||
self.current += 1
|
||||
if b.published:
|
||||
self.published += 1
|
||||
if b.revision == b.published:
|
||||
self.published_equal += 1
|
||||
self.projects.append(b)
|
||||
|
||||
@property
|
||||
def IsCurrent(self):
|
||||
return self.current > 0
|
||||
|
||||
@property
|
||||
def IsPublished(self):
|
||||
return self.published > 0
|
||||
|
||||
@property
|
||||
def IsPublishedEqual(self):
|
||||
return self.published_equal == len(self.projects)
|
||||
|
||||
|
||||
class Branches(Command):
|
||||
common = True
|
||||
helpSummary = "View current topic branches"
|
||||
helpUsage = """
|
||||
%prog [<project>...]
|
||||
|
||||
Summarizes the currently available topic branches.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-a', '--all',
|
||||
dest='all', action='store_true',
|
||||
help='show all branches, not just the majority')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
projects = self.GetProjects(args)
|
||||
out = BranchColoring(self.manifest.manifestProject.config)
|
||||
all = {}
|
||||
project_cnt = len(projects)
|
||||
|
||||
for project in projects:
|
||||
for name, b in project.GetBranches().iteritems():
|
||||
b.project = project
|
||||
if name not in all:
|
||||
all[name] = BranchInfo(name)
|
||||
all[name].add(b)
|
||||
|
||||
names = all.keys()
|
||||
names.sort()
|
||||
|
||||
if not opt.all and not args:
|
||||
# No -a and no specific projects listed; try to filter the
|
||||
# results down to only the majority of projects.
|
||||
#
|
||||
n = []
|
||||
for name in names:
|
||||
i = all[name]
|
||||
if i.IsCurrent \
|
||||
or 80 <= (100 * len(i.projects)) / project_cnt:
|
||||
n.append(name)
|
||||
names = n
|
||||
|
||||
width = 25
|
||||
for name in names:
|
||||
if width < len(name):
|
||||
width = len(name)
|
||||
|
||||
for name in names:
|
||||
i = all[name]
|
||||
in_cnt = len(i.projects)
|
||||
|
||||
if i.IsCurrent:
|
||||
current = '*'
|
||||
hdr = out.current
|
||||
else:
|
||||
current = ' '
|
||||
hdr = out.local
|
||||
|
||||
if i.IsPublishedEqual:
|
||||
published = 'P'
|
||||
elif i.IsPublished:
|
||||
published = 'p'
|
||||
else:
|
||||
published = ' '
|
||||
|
||||
hdr('%c%c %-*s' % (current, published, width, name))
|
||||
out.write(' |')
|
||||
|
||||
if in_cnt < project_cnt and (in_cnt == 1 or opt.all):
|
||||
fmt = out.write
|
||||
paths = []
|
||||
if in_cnt < project_cnt - in_cnt:
|
||||
type = 'in'
|
||||
for b in i.projects:
|
||||
paths.append(b.project.relpath)
|
||||
else:
|
||||
fmt = out.notinproject
|
||||
type = 'not in'
|
||||
have = set()
|
||||
for b in i.projects:
|
||||
have.add(b.project)
|
||||
for p in projects:
|
||||
paths.append(p.relpath)
|
||||
|
||||
s = ' %s %s' % (type, ', '.join(paths))
|
||||
if width + 7 + len(s) < 80:
|
||||
fmt(s)
|
||||
else:
|
||||
out.nl()
|
||||
fmt(' %s:' % type)
|
||||
for p in paths:
|
||||
out.nl()
|
||||
fmt(' %s' % p)
|
||||
out.nl()
|
47
subcmds/checkout.py
Normal file
47
subcmds/checkout.py
Normal file
@ -0,0 +1,47 @@
|
||||
#
|
||||
# Copyright (C) 2009 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from command import Command
|
||||
|
||||
class Checkout(Command):
|
||||
common = True
|
||||
helpSummary = "Checkout a branch for development"
|
||||
helpUsage = """
|
||||
%prog <branchname> [<project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
The '%prog' command checks out an existing branch that was previously
|
||||
created by 'repo start'.
|
||||
|
||||
The command is equivalent to:
|
||||
|
||||
repo forall [<project>...] -c git checkout <branchname>
|
||||
"""
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if not args:
|
||||
self.Usage()
|
||||
|
||||
retValue = 0;
|
||||
|
||||
branch = args[0]
|
||||
for project in self.GetProjects(args[1:]):
|
||||
if not project.CheckoutBranch(branch):
|
||||
retValue = 1;
|
||||
print >>sys.stderr, "error: checking out branch '%s' in %s failed" % (branch, project.name)
|
||||
|
||||
if (retValue != 0):
|
||||
sys.exit(retValue);
|
@ -1,169 +0,0 @@
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from command import Command
|
||||
from error import GitError, NoSuchProjectError
|
||||
from git_config import IsId
|
||||
from import_tar import ImportTar
|
||||
from import_zip import ImportZip
|
||||
from project import Project
|
||||
from remote import Remote
|
||||
|
||||
def _ToCommit(project, rev):
|
||||
return project.bare_git.rev_parse('--verify', '%s^0' % rev)
|
||||
|
||||
def _Missing(project, rev):
|
||||
return project._revlist('--objects', rev, '--not', '--all')
|
||||
|
||||
|
||||
class ComputeSnapshotCheck(Command):
|
||||
common = False
|
||||
helpSummary = "Compute the check value for a new snapshot"
|
||||
helpUsage = """
|
||||
%prog -p NAME -v VERSION -s FILE [options]
|
||||
"""
|
||||
helpDescription = """
|
||||
%prog computes and then displays the proper check value for a
|
||||
snapshot, so it can be pasted into the manifest file for a project.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
g = p.add_option_group('Snapshot description options')
|
||||
g.add_option('-p', '--project',
|
||||
dest='project', metavar='NAME',
|
||||
help='destination project name')
|
||||
g.add_option('-v', '--version',
|
||||
dest='version', metavar='VERSION',
|
||||
help='upstream version/revision identifier')
|
||||
g.add_option('-s', '--snapshot',
|
||||
dest='snapshot', metavar='PATH',
|
||||
help='local tarball path')
|
||||
g.add_option('--new-project',
|
||||
dest='new_project', action='store_true',
|
||||
help='destinition is a new project')
|
||||
g.add_option('--keep',
|
||||
dest='keep_git', action='store_true',
|
||||
help='keep the temporary git repository')
|
||||
|
||||
g = p.add_option_group('Base revision grafting options')
|
||||
g.add_option('--prior',
|
||||
dest='prior', metavar='COMMIT',
|
||||
help='prior revision checksum')
|
||||
|
||||
g = p.add_option_group('Path mangling options')
|
||||
g.add_option('--strip-prefix',
|
||||
dest='strip_prefix', metavar='PREFIX',
|
||||
help='remove prefix from all paths on import')
|
||||
g.add_option('--insert-prefix',
|
||||
dest='insert_prefix', metavar='PREFIX',
|
||||
help='insert prefix before all paths on import')
|
||||
|
||||
|
||||
def _Compute(self, opt):
|
||||
try:
|
||||
real_project = self.GetProjects([opt.project])[0]
|
||||
except NoSuchProjectError:
|
||||
if opt.new_project:
|
||||
print >>sys.stderr, \
|
||||
"warning: project '%s' does not exist" % opt.project
|
||||
else:
|
||||
raise NoSuchProjectError(opt.project)
|
||||
|
||||
self._tmpdir = tempfile.mkdtemp()
|
||||
project = Project(manifest = self.manifest,
|
||||
name = opt.project,
|
||||
remote = Remote('origin'),
|
||||
gitdir = os.path.join(self._tmpdir, '.git'),
|
||||
worktree = self._tmpdir,
|
||||
relpath = opt.project,
|
||||
revision = 'refs/heads/master')
|
||||
project._InitGitDir()
|
||||
|
||||
url = 'file://%s' % os.path.abspath(opt.snapshot)
|
||||
|
||||
imp = None
|
||||
for cls in [ImportTar, ImportZip]:
|
||||
if cls.CanAccept(url):
|
||||
imp = cls()
|
||||
break
|
||||
if not imp:
|
||||
print >>sys.stderr, 'error: %s unsupported' % opt.snapshot
|
||||
sys.exit(1)
|
||||
|
||||
imp.SetProject(project)
|
||||
imp.SetVersion(opt.version)
|
||||
imp.AddUrl(url)
|
||||
|
||||
if opt.prior:
|
||||
if opt.new_project:
|
||||
if not IsId(opt.prior):
|
||||
print >>sys.stderr, 'error: --prior=%s not valid' % opt.prior
|
||||
sys.exit(1)
|
||||
else:
|
||||
try:
|
||||
opt.prior = _ToCommit(real_project, opt.prior)
|
||||
missing = _Missing(real_project, opt.prior)
|
||||
except GitError, e:
|
||||
print >>sys.stderr,\
|
||||
'error: --prior=%s not valid\n%s' \
|
||||
% (opt.prior, e)
|
||||
sys.exit(1)
|
||||
if missing:
|
||||
print >>sys.stderr,\
|
||||
'error: --prior=%s is valid, but is not reachable' \
|
||||
% opt.prior
|
||||
sys.exit(1)
|
||||
imp.SetParent(opt.prior)
|
||||
|
||||
src = opt.strip_prefix
|
||||
dst = opt.insert_prefix
|
||||
if src or dst:
|
||||
if src is None:
|
||||
src = ''
|
||||
if dst is None:
|
||||
dst = ''
|
||||
imp.RemapPath(src, dst)
|
||||
commitId = imp.Import()
|
||||
|
||||
print >>sys.stderr,"%s\t%s" % (commitId, imp.version)
|
||||
return project
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if args \
|
||||
or not opt.project \
|
||||
or not opt.version \
|
||||
or not opt.snapshot:
|
||||
self.Usage()
|
||||
|
||||
success = False
|
||||
project = None
|
||||
try:
|
||||
self._tmpdir = None
|
||||
project = self._Compute(opt)
|
||||
finally:
|
||||
if project and opt.keep_git:
|
||||
print 'GIT_DIR = %s' % (project.gitdir)
|
||||
elif self._tmpdir:
|
||||
for root, dirs, files in os.walk(self._tmpdir, topdown=False):
|
||||
for name in files:
|
||||
os.remove(os.path.join(root, name))
|
||||
for name in dirs:
|
||||
os.rmdir(os.path.join(root, name))
|
||||
os.rmdir(self._tmpdir)
|
||||
|
@ -17,9 +17,9 @@ import re
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
from command import Command
|
||||
from command import Command, MirrorSafeCommand
|
||||
|
||||
class Forall(Command):
|
||||
class Forall(Command, MirrorSafeCommand):
|
||||
common = False
|
||||
helpSummary = "Run a shell command in each project"
|
||||
helpUsage = """
|
||||
@ -30,10 +30,23 @@ Executes the same shell command in each project.
|
||||
|
||||
Environment
|
||||
-----------
|
||||
pwd is the project's working directory.
|
||||
|
||||
pwd is the project's working directory. If the current client is
|
||||
a mirror client, then pwd is the Git repository.
|
||||
|
||||
REPO_PROJECT is set to the unique name of the project.
|
||||
|
||||
REPO_PATH is the path relative the the root of the client.
|
||||
|
||||
REPO_REMOTE is the name of the remote system from the manifest.
|
||||
|
||||
REPO_LREV is the name of the revision from the manifest, translated
|
||||
to a local tracking branch. If you need to pass the manifest
|
||||
revision to a locally executed git command, use REPO_LREV.
|
||||
|
||||
REPO_RREV is the name of the revision from the manifest, exactly
|
||||
as written in the manifest.
|
||||
|
||||
shell positional arguments ($1, $2, .., $#) are set to any arguments
|
||||
following <command>.
|
||||
|
||||
@ -66,13 +79,31 @@ not redirected.
|
||||
cmd.append(cmd[0])
|
||||
cmd.extend(opt.command[1:])
|
||||
|
||||
mirror = self.manifest.IsMirror
|
||||
rc = 0
|
||||
for project in self.GetProjects(args):
|
||||
env = dict(os.environ.iteritems())
|
||||
env['REPO_PROJECT'] = project.name
|
||||
def setenv(name, val):
|
||||
if val is None:
|
||||
val = ''
|
||||
env[name] = val
|
||||
|
||||
setenv('REPO_PROJECT', project.name)
|
||||
setenv('REPO_PATH', project.relpath)
|
||||
setenv('REPO_REMOTE', project.remote.name)
|
||||
setenv('REPO_LREV', project\
|
||||
.GetRemote(project.remote.name)\
|
||||
.ToLocal(project.revision))
|
||||
setenv('REPO_RREV', project.revision)
|
||||
|
||||
if mirror:
|
||||
setenv('GIT_DIR', project.gitdir)
|
||||
cwd = project.gitdir
|
||||
else:
|
||||
cwd = project.worktree
|
||||
|
||||
p = subprocess.Popen(cmd,
|
||||
cwd = project.worktree,
|
||||
cwd = cwd,
|
||||
shell = shell,
|
||||
env = env)
|
||||
r = p.wait()
|
||||
|
243
subcmds/grep.py
Normal file
243
subcmds/grep.py
Normal file
@ -0,0 +1,243 @@
|
||||
#
|
||||
# Copyright (C) 2009 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from optparse import SUPPRESS_HELP
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
from git_command import GitCommand
|
||||
|
||||
class GrepColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'grep')
|
||||
self.project = self.printer('project', attr='bold')
|
||||
|
||||
class Grep(PagedCommand):
|
||||
common = True
|
||||
helpSummary = "Print lines matching a pattern"
|
||||
helpUsage = """
|
||||
%prog {pattern | -e pattern} [<project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
Search for the specified patterns in all project files.
|
||||
|
||||
Options
|
||||
-------
|
||||
|
||||
The following options can appear as often as necessary to express
|
||||
the pattern to locate:
|
||||
|
||||
-e PATTERN
|
||||
--and, --or, --not, -(, -)
|
||||
|
||||
Further, the -r/--revision option may be specified multiple times
|
||||
in order to scan multiple trees. If the same file matches in more
|
||||
than one tree, only the first result is reported, prefixed by the
|
||||
revision name it was found under.
|
||||
|
||||
Examples
|
||||
-------
|
||||
|
||||
Look for a line that has '#define' and either 'MAX_PATH or 'PATH_MAX':
|
||||
|
||||
repo grep -e '#define' --and -\( -e MAX_PATH -e PATH_MAX \)
|
||||
|
||||
Look for a line that has 'NODE' or 'Unexpected' in files that
|
||||
contain a line that matches both expressions:
|
||||
|
||||
repo grep --all-match -e NODE -e Unexpected
|
||||
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
def carry(option,
|
||||
opt_str,
|
||||
value,
|
||||
parser):
|
||||
pt = getattr(parser.values, 'cmd_argv', None)
|
||||
if pt is None:
|
||||
pt = []
|
||||
setattr(parser.values, 'cmd_argv', pt)
|
||||
|
||||
if opt_str == '-(':
|
||||
pt.append('(')
|
||||
elif opt_str == '-)':
|
||||
pt.append(')')
|
||||
else:
|
||||
pt.append(opt_str)
|
||||
|
||||
if value is not None:
|
||||
pt.append(value)
|
||||
|
||||
g = p.add_option_group('Sources')
|
||||
g.add_option('--cached',
|
||||
action='callback', callback=carry,
|
||||
help='Search the index, instead of the work tree')
|
||||
g.add_option('-r','--revision',
|
||||
dest='revision', action='append', metavar='TREEish',
|
||||
help='Search TREEish, instead of the work tree')
|
||||
|
||||
g = p.add_option_group('Pattern')
|
||||
g.add_option('-e',
|
||||
action='callback', callback=carry,
|
||||
metavar='PATTERN', type='str',
|
||||
help='Pattern to search for')
|
||||
g.add_option('-i', '--ignore-case',
|
||||
action='callback', callback=carry,
|
||||
help='Ignore case differences')
|
||||
g.add_option('-a','--text',
|
||||
action='callback', callback=carry,
|
||||
help="Process binary files as if they were text")
|
||||
g.add_option('-I',
|
||||
action='callback', callback=carry,
|
||||
help="Don't match the pattern in binary files")
|
||||
g.add_option('-w', '--word-regexp',
|
||||
action='callback', callback=carry,
|
||||
help='Match the pattern only at word boundaries')
|
||||
g.add_option('-v', '--invert-match',
|
||||
action='callback', callback=carry,
|
||||
help='Select non-matching lines')
|
||||
g.add_option('-G', '--basic-regexp',
|
||||
action='callback', callback=carry,
|
||||
help='Use POSIX basic regexp for patterns (default)')
|
||||
g.add_option('-E', '--extended-regexp',
|
||||
action='callback', callback=carry,
|
||||
help='Use POSIX extended regexp for patterns')
|
||||
g.add_option('-F', '--fixed-strings',
|
||||
action='callback', callback=carry,
|
||||
help='Use fixed strings (not regexp) for pattern')
|
||||
|
||||
g = p.add_option_group('Pattern Grouping')
|
||||
g.add_option('--all-match',
|
||||
action='callback', callback=carry,
|
||||
help='Limit match to lines that have all patterns')
|
||||
g.add_option('--and', '--or', '--not',
|
||||
action='callback', callback=carry,
|
||||
help='Boolean operators to combine patterns')
|
||||
g.add_option('-(','-)',
|
||||
action='callback', callback=carry,
|
||||
help='Boolean operator grouping')
|
||||
|
||||
g = p.add_option_group('Output')
|
||||
g.add_option('-n',
|
||||
action='callback', callback=carry,
|
||||
help='Prefix the line number to matching lines')
|
||||
g.add_option('-C',
|
||||
action='callback', callback=carry,
|
||||
metavar='CONTEXT', type='str',
|
||||
help='Show CONTEXT lines around match')
|
||||
g.add_option('-B',
|
||||
action='callback', callback=carry,
|
||||
metavar='CONTEXT', type='str',
|
||||
help='Show CONTEXT lines before match')
|
||||
g.add_option('-A',
|
||||
action='callback', callback=carry,
|
||||
metavar='CONTEXT', type='str',
|
||||
help='Show CONTEXT lines after match')
|
||||
g.add_option('-l','--name-only','--files-with-matches',
|
||||
action='callback', callback=carry,
|
||||
help='Show only file names containing matching lines')
|
||||
g.add_option('-L','--files-without-match',
|
||||
action='callback', callback=carry,
|
||||
help='Show only file names not containing matching lines')
|
||||
|
||||
|
||||
def Execute(self, opt, args):
|
||||
out = GrepColoring(self.manifest.manifestProject.config)
|
||||
|
||||
cmd_argv = ['grep']
|
||||
if out.is_on:
|
||||
cmd_argv.append('--color')
|
||||
cmd_argv.extend(getattr(opt,'cmd_argv',[]))
|
||||
|
||||
if '-e' not in cmd_argv:
|
||||
if not args:
|
||||
self.Usage()
|
||||
cmd_argv.append('-e')
|
||||
cmd_argv.append(args[0])
|
||||
args = args[1:]
|
||||
|
||||
projects = self.GetProjects(args)
|
||||
|
||||
full_name = False
|
||||
if len(projects) > 1:
|
||||
cmd_argv.append('--full-name')
|
||||
full_name = True
|
||||
|
||||
have_rev = False
|
||||
if opt.revision:
|
||||
if '--cached' in cmd_argv:
|
||||
print >>sys.stderr,\
|
||||
'fatal: cannot combine --cached and --revision'
|
||||
sys.exit(1)
|
||||
have_rev = True
|
||||
cmd_argv.extend(opt.revision)
|
||||
cmd_argv.append('--')
|
||||
|
||||
bad_rev = False
|
||||
have_match = False
|
||||
|
||||
for project in projects:
|
||||
p = GitCommand(project,
|
||||
cmd_argv,
|
||||
bare = False,
|
||||
capture_stdout = True,
|
||||
capture_stderr = True)
|
||||
if p.Wait() != 0:
|
||||
# no results
|
||||
#
|
||||
if p.stderr:
|
||||
if have_rev and 'fatal: ambiguous argument' in p.stderr:
|
||||
bad_rev = True
|
||||
else:
|
||||
out.project('--- project %s ---' % project.relpath)
|
||||
out.nl()
|
||||
out.write(p.stderr)
|
||||
out.nl()
|
||||
continue
|
||||
have_match = True
|
||||
|
||||
# We cut the last element, to avoid a blank line.
|
||||
#
|
||||
r = p.stdout.split('\n')
|
||||
r = r[0:-1]
|
||||
|
||||
if have_rev and full_name:
|
||||
for line in r:
|
||||
rev, line = line.split(':', 1)
|
||||
out.write(rev)
|
||||
out.write(':')
|
||||
out.project(project.relpath)
|
||||
out.write('/')
|
||||
out.write(line)
|
||||
out.nl()
|
||||
elif full_name:
|
||||
for line in r:
|
||||
out.project(project.relpath)
|
||||
out.write('/')
|
||||
out.write(line)
|
||||
out.nl()
|
||||
else:
|
||||
for line in r:
|
||||
print line
|
||||
|
||||
if have_match:
|
||||
sys.exit(0)
|
||||
elif have_rev and bad_rev:
|
||||
for r in opt.revision:
|
||||
print >>sys.stderr, "error: can't search revision %s" % r
|
||||
sys.exit(1)
|
||||
else:
|
||||
sys.exit(1)
|
@ -13,13 +13,14 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
import sys
|
||||
from formatter import AbstractFormatter, DumbWriter
|
||||
|
||||
from color import Coloring
|
||||
from command import PagedCommand
|
||||
from command import PagedCommand, MirrorSafeCommand
|
||||
|
||||
class Help(PagedCommand):
|
||||
class Help(PagedCommand, MirrorSafeCommand):
|
||||
common = False
|
||||
helpSummary = "Display detailed help on a command"
|
||||
helpUsage = """
|
||||
@ -77,6 +78,7 @@ The most commonly used repo commands are:
|
||||
print fmt % (name, summary)
|
||||
print """
|
||||
See 'repo help <command>' for more information on a specific command.
|
||||
See 'repo help --all' for a complete list of recognized commands.
|
||||
"""
|
||||
|
||||
def _PrintCommandHelp(self, cmd):
|
||||
@ -105,14 +107,24 @@ See 'repo help <command>' for more information on a specific command.
|
||||
body = body.strip()
|
||||
body = body.replace('%prog', me)
|
||||
|
||||
asciidoc_hdr = re.compile(r'^\n?([^\n]{1,})\n(={2,}|-{2,})$')
|
||||
for para in body.split("\n\n"):
|
||||
if para.startswith(' '):
|
||||
self.write('%s', para)
|
||||
self.nl()
|
||||
self.nl()
|
||||
else:
|
||||
self.wrap.add_flowing_data(para)
|
||||
self.wrap.end_paragraph(1)
|
||||
continue
|
||||
|
||||
m = asciidoc_hdr.match(para)
|
||||
if m:
|
||||
self.heading('%s', m.group(1))
|
||||
self.nl()
|
||||
self.heading('%s', ''.ljust(len(m.group(1)),'-'))
|
||||
self.nl()
|
||||
continue
|
||||
|
||||
self.wrap.add_flowing_data(para)
|
||||
self.wrap.end_paragraph(1)
|
||||
self.wrap.end_paragraph(0)
|
||||
|
||||
out = _Out(self.manifest.globalConfig)
|
||||
|
@ -17,12 +17,13 @@ import os
|
||||
import sys
|
||||
|
||||
from color import Coloring
|
||||
from command import InteractiveCommand
|
||||
from command import InteractiveCommand, MirrorSafeCommand
|
||||
from error import ManifestParseError
|
||||
from remote import Remote
|
||||
from project import SyncBuffer
|
||||
from git_command import git, MIN_GIT_VERSION
|
||||
|
||||
class Init(InteractiveCommand):
|
||||
class Init(InteractiveCommand, MirrorSafeCommand):
|
||||
common = True
|
||||
helpSummary = "Initialize repo in the current directory"
|
||||
helpUsage = """
|
||||
@ -57,6 +58,10 @@ default.xml will be used.
|
||||
g.add_option('-m', '--manifest-name',
|
||||
dest='manifest_name', default='default.xml',
|
||||
help='initial manifest file', metavar='NAME.xml')
|
||||
g.add_option('--mirror',
|
||||
dest='mirror', action='store_true',
|
||||
help='mirror the forrest')
|
||||
|
||||
|
||||
# Tool
|
||||
g = p.add_option_group('Version options')
|
||||
@ -85,8 +90,9 @@ default.xml will be used.
|
||||
|
||||
def _SyncManifest(self, opt):
|
||||
m = self.manifest.manifestProject
|
||||
is_new = not m.Exists
|
||||
|
||||
if not m.Exists:
|
||||
if is_new:
|
||||
if not opt.manifest_url:
|
||||
print >>sys.stderr, 'fatal: manifest url (-u) is required.'
|
||||
sys.exit(1)
|
||||
@ -112,9 +118,26 @@ default.xml will be used.
|
||||
r.ResetFetch()
|
||||
r.Save()
|
||||
|
||||
m.Sync_NetworkHalf()
|
||||
m.Sync_LocalHalf()
|
||||
m.StartBranch('default')
|
||||
if opt.mirror:
|
||||
if is_new:
|
||||
m.config.SetString('repo.mirror', 'true')
|
||||
else:
|
||||
print >>sys.stderr, 'fatal: --mirror not supported on existing client'
|
||||
sys.exit(1)
|
||||
|
||||
if not m.Sync_NetworkHalf():
|
||||
r = m.GetRemote(m.remote.name)
|
||||
print >>sys.stderr, 'fatal: cannot obtain manifest %s' % r.url
|
||||
sys.exit(1)
|
||||
|
||||
syncbuf = SyncBuffer(m.config)
|
||||
m.Sync_LocalHalf(syncbuf)
|
||||
syncbuf.Finish()
|
||||
|
||||
if is_new or m.CurrentBranch is None:
|
||||
if not m.StartBranch('default'):
|
||||
print >>sys.stderr, 'fatal: cannot create default in manifest'
|
||||
sys.exit(1)
|
||||
|
||||
def _LinkManifest(self, name):
|
||||
if not name:
|
||||
@ -185,9 +208,14 @@ default.xml will be used.
|
||||
self._SyncManifest(opt)
|
||||
self._LinkManifest(opt.manifest_name)
|
||||
|
||||
if os.isatty(0) and os.isatty(1):
|
||||
if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
|
||||
self._ConfigureUser()
|
||||
self._ConfigureColor()
|
||||
|
||||
if self.manifest.IsMirror:
|
||||
type = 'mirror '
|
||||
else:
|
||||
type = ''
|
||||
|
||||
print ''
|
||||
print 'repo initialized in %s' % self.manifest.topdir
|
||||
print 'repo %sinitialized in %s' % (type, self.manifest.topdir)
|
||||
|
77
subcmds/manifest.py
Normal file
77
subcmds/manifest.py
Normal file
@ -0,0 +1,77 @@
|
||||
#
|
||||
# Copyright (C) 2009 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from command import PagedCommand
|
||||
|
||||
class Manifest(PagedCommand):
|
||||
common = False
|
||||
helpSummary = "Manifest inspection utility"
|
||||
helpUsage = """
|
||||
%prog [-o {-|NAME.xml} [-r]]
|
||||
"""
|
||||
_helpDescription = """
|
||||
|
||||
With the -o option, exports the current manifest for inspection.
|
||||
The manifest and (if present) local_manifest.xml are combined
|
||||
together to produce a single manifest file. This file can be stored
|
||||
in a Git repository for use during future 'repo init' invocations.
|
||||
|
||||
"""
|
||||
|
||||
@property
|
||||
def helpDescription(self):
|
||||
help = self._helpDescription + '\n'
|
||||
r = os.path.dirname(__file__)
|
||||
r = os.path.dirname(r)
|
||||
fd = open(os.path.join(r, 'docs', 'manifest-format.txt'))
|
||||
for line in fd:
|
||||
help += line
|
||||
fd.close()
|
||||
return help
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-r', '--revision-as-HEAD',
|
||||
dest='peg_rev', action='store_true',
|
||||
help='Save revisions as current HEAD')
|
||||
p.add_option('-o', '--output-file',
|
||||
dest='output_file',
|
||||
help='File to save the manifest to',
|
||||
metavar='-|NAME.xml')
|
||||
|
||||
def _Output(self, opt):
|
||||
if opt.output_file == '-':
|
||||
fd = sys.stdout
|
||||
else:
|
||||
fd = open(opt.output_file, 'w')
|
||||
self.manifest.Save(fd,
|
||||
peg_rev = opt.peg_rev)
|
||||
fd.close()
|
||||
if opt.output_file != '-':
|
||||
print >>sys.stderr, 'Saved manifest to %s' % opt.output_file
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if args:
|
||||
self.Usage()
|
||||
|
||||
if opt.output_file is not None:
|
||||
self._Output(opt)
|
||||
return
|
||||
|
||||
print >>sys.stderr, 'error: no operation to perform'
|
||||
print >>sys.stderr, 'error: see repo help manifest'
|
||||
sys.exit(1)
|
59
subcmds/selfupdate.py
Normal file
59
subcmds/selfupdate.py
Normal file
@ -0,0 +1,59 @@
|
||||
#
|
||||
# Copyright (C) 2009 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from optparse import SUPPRESS_HELP
|
||||
import sys
|
||||
|
||||
from command import Command, MirrorSafeCommand
|
||||
from subcmds.sync import _PostRepoUpgrade
|
||||
from subcmds.sync import _PostRepoFetch
|
||||
|
||||
class Selfupdate(Command, MirrorSafeCommand):
|
||||
common = False
|
||||
helpSummary = "Update repo to the latest version"
|
||||
helpUsage = """
|
||||
%prog
|
||||
"""
|
||||
helpDescription = """
|
||||
The '%prog' command upgrades repo to the latest version, if a
|
||||
newer version is available.
|
||||
|
||||
Normally this is done automatically by 'repo sync' and does not
|
||||
need to be performed by an end-user.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('--no-repo-verify',
|
||||
dest='no_repo_verify', action='store_true',
|
||||
help='do not verify repo source code')
|
||||
p.add_option('--repo-upgraded',
|
||||
dest='repo_upgraded', action='store_true',
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
rp = self.manifest.repoProject
|
||||
rp.PreSync()
|
||||
|
||||
if opt.repo_upgraded:
|
||||
_PostRepoUpgrade(self.manifest)
|
||||
|
||||
else:
|
||||
if not rp.Sync_NetworkHalf():
|
||||
print >>sys.stderr, "error: can't update repo"
|
||||
sys.exit(1)
|
||||
|
||||
_PostRepoFetch(rp,
|
||||
no_repo_verify = opt.no_repo_verify,
|
||||
verbose = True)
|
@ -47,5 +47,13 @@ the configuration data is set up properly.
|
||||
print >>sys.stderr, "error: '%s' is not a valid name" % nb
|
||||
sys.exit(1)
|
||||
|
||||
err = []
|
||||
for project in self.GetProjects(args[1:]):
|
||||
project.StartBranch(nb)
|
||||
if not project.StartBranch(nb):
|
||||
err.append(project)
|
||||
|
||||
if err:
|
||||
err.sort()
|
||||
for p in err:
|
||||
print >>sys.stderr, "error: cannot start in %s" % p.relpath
|
||||
sys.exit(1)
|
||||
|
@ -20,8 +20,53 @@ class Status(PagedCommand):
|
||||
helpSummary = "Show the working tree status"
|
||||
helpUsage = """
|
||||
%prog [<project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
'%prog' compares the working tree to the staging area (aka index),
|
||||
and the most recent commit on this branch (HEAD), in each project
|
||||
specified. A summary is displayed, one line per file where there
|
||||
is a difference between these three states.
|
||||
|
||||
Status Display
|
||||
--------------
|
||||
|
||||
The status display is organized into three columns of information,
|
||||
for example if the file 'subcmds/status.py' is modified in the
|
||||
project 'repo' on branch 'devwork':
|
||||
|
||||
project repo/ branch devwork
|
||||
-m subcmds/status.py
|
||||
|
||||
The first column explains how the staging area (index) differs from
|
||||
the last commit (HEAD). Its values are always displayed in upper
|
||||
case and have the following meanings:
|
||||
|
||||
-: no difference
|
||||
A: added (not in HEAD, in index )
|
||||
M: modified ( in HEAD, in index, different content )
|
||||
D: deleted ( in HEAD, not in index )
|
||||
R: renamed (not in HEAD, in index, path changed )
|
||||
C: copied (not in HEAD, in index, copied from another)
|
||||
T: mode changed ( in HEAD, in index, same content )
|
||||
U: unmerged; conflict resolution required
|
||||
|
||||
The second column explains how the working directory differs from
|
||||
the index. Its values are always displayed in lower case and have
|
||||
the following meanings:
|
||||
|
||||
-: new / unknown (not in index, in work tree )
|
||||
m: modified ( in index, in work tree, modified )
|
||||
d: deleted ( in index, not in work tree )
|
||||
|
||||
"""
|
||||
|
||||
def Execute(self, opt, args):
|
||||
for project in self.GetProjects(args):
|
||||
project.PrintWorkTreeStatus()
|
||||
all = self.GetProjects(args)
|
||||
clean = 0
|
||||
|
||||
for project in all:
|
||||
state = project.PrintWorkTreeStatus()
|
||||
if state == 'CLEAN':
|
||||
clean += 1
|
||||
if len(all) == clean:
|
||||
print 'nothing to commit (working directory clean)'
|
||||
|
113
subcmds/sync.py
113
subcmds/sync.py
@ -13,17 +13,21 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from optparse import SUPPRESS_HELP
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from git_command import GIT
|
||||
from command import Command
|
||||
from project import HEAD
|
||||
from command import Command, MirrorSafeCommand
|
||||
from error import RepoChangedException, GitError
|
||||
from project import R_HEADS
|
||||
from project import SyncBuffer
|
||||
from progress import Progress
|
||||
|
||||
class Sync(Command):
|
||||
class Sync(Command, MirrorSafeCommand):
|
||||
common = True
|
||||
helpSummary = "Update working tree to the latest revision"
|
||||
helpUsage = """
|
||||
@ -43,61 +47,120 @@ line. Projects can be specified either by name, or by a relative
|
||||
or absolute path to the project's local directory. If no projects
|
||||
are specified, '%prog' will synchronize all projects listed in
|
||||
the manifest.
|
||||
|
||||
The -d/--detach option can be used to switch specified projects
|
||||
back to the manifest revision. This option is especially helpful
|
||||
if the project is currently on a topic branch, but the manifest
|
||||
revision is temporarily needed.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-l','--local-only',
|
||||
dest='local_only', action='store_true',
|
||||
help="only update working tree, don't fetch")
|
||||
p.add_option('-n','--network-only',
|
||||
dest='network_only', action='store_true',
|
||||
help="fetch only, don't update working tree")
|
||||
p.add_option('-d','--detach',
|
||||
dest='detach_head', action='store_true',
|
||||
help='detach projects back to manifest revision')
|
||||
|
||||
p.add_option('--no-repo-verify',
|
||||
dest='no_repo_verify', action='store_true',
|
||||
help='do not verify repo source code')
|
||||
p.add_option('--repo-upgraded',
|
||||
dest='repo_upgraded', action='store_true',
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
def _Fetch(self, *projects):
|
||||
fetched = set()
|
||||
pm = Progress('Fetching projects', len(projects))
|
||||
for project in projects:
|
||||
pm.update()
|
||||
|
||||
if project.Sync_NetworkHalf():
|
||||
fetched.add(project.gitdir)
|
||||
else:
|
||||
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
|
||||
sys.exit(1)
|
||||
pm.end()
|
||||
return fetched
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if opt.network_only and opt.detach_head:
|
||||
print >>sys.stderr, 'error: cannot combine -n and -d'
|
||||
sys.exit(1)
|
||||
if opt.network_only and opt.local_only:
|
||||
print >>sys.stderr, 'error: cannot combine -n and -l'
|
||||
sys.exit(1)
|
||||
|
||||
rp = self.manifest.repoProject
|
||||
rp.PreSync()
|
||||
|
||||
mp = self.manifest.manifestProject
|
||||
mp.PreSync()
|
||||
|
||||
if opt.repo_upgraded:
|
||||
_PostRepoUpgrade(self.manifest)
|
||||
|
||||
all = self.GetProjects(args, missing_ok=True)
|
||||
fetched = self._Fetch(rp, mp, *all)
|
||||
|
||||
if rp.HasChanges:
|
||||
print >>sys.stderr, 'info: A new version of repo is available'
|
||||
print >>sys.stderr, ''
|
||||
if opt.no_repo_verify or _VerifyTag(rp):
|
||||
if not rp.Sync_LocalHalf():
|
||||
if not opt.local_only:
|
||||
fetched = self._Fetch(rp, mp, *all)
|
||||
_PostRepoFetch(rp, opt.no_repo_verify)
|
||||
if opt.network_only:
|
||||
# bail out now; the rest touches the working tree
|
||||
return
|
||||
|
||||
if mp.HasChanges:
|
||||
syncbuf = SyncBuffer(mp.config)
|
||||
mp.Sync_LocalHalf(syncbuf)
|
||||
if not syncbuf.Finish():
|
||||
sys.exit(1)
|
||||
print >>sys.stderr, 'info: Restarting repo with latest version'
|
||||
raise RepoChangedException()
|
||||
else:
|
||||
print >>sys.stderr, 'warning: Skipped upgrade to unverified version'
|
||||
|
||||
if mp.HasChanges:
|
||||
if not mp.Sync_LocalHalf():
|
||||
sys.exit(1)
|
||||
|
||||
self.manifest._Unload()
|
||||
all = self.GetProjects(args, missing_ok=True)
|
||||
missing = []
|
||||
for project in all:
|
||||
if project.gitdir not in fetched:
|
||||
missing.append(project)
|
||||
self._Fetch(*missing)
|
||||
self.manifest._Unload()
|
||||
all = self.GetProjects(args, missing_ok=True)
|
||||
missing = []
|
||||
for project in all:
|
||||
if project.gitdir not in fetched:
|
||||
missing.append(project)
|
||||
self._Fetch(*missing)
|
||||
|
||||
syncbuf = SyncBuffer(mp.config,
|
||||
detach_head = opt.detach_head)
|
||||
pm = Progress('Syncing work tree', len(all))
|
||||
for project in all:
|
||||
if not project.Sync_LocalHalf():
|
||||
sys.exit(1)
|
||||
pm.update()
|
||||
if project.worktree:
|
||||
project.Sync_LocalHalf(syncbuf)
|
||||
pm.end()
|
||||
print >>sys.stderr
|
||||
if not syncbuf.Finish():
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _PostRepoUpgrade(manifest):
|
||||
for project in manifest.projects.values():
|
||||
if project.Exists:
|
||||
project.PostRepoUpgrade()
|
||||
|
||||
def _PostRepoFetch(rp, no_repo_verify=False, verbose=False):
|
||||
if rp.HasChanges:
|
||||
print >>sys.stderr, 'info: A new version of repo is available'
|
||||
print >>sys.stderr, ''
|
||||
if no_repo_verify or _VerifyTag(rp):
|
||||
syncbuf = SyncBuffer(rp.config)
|
||||
rp.Sync_LocalHalf(syncbuf)
|
||||
if not syncbuf.Finish():
|
||||
sys.exit(1)
|
||||
print >>sys.stderr, 'info: Restarting repo with latest version'
|
||||
raise RepoChangedException(['--repo-upgraded'])
|
||||
else:
|
||||
print >>sys.stderr, 'warning: Skipped upgrade to unverified version'
|
||||
else:
|
||||
if verbose:
|
||||
print >>sys.stderr, 'repo version %s is current' % rp.work_git.describe(HEAD)
|
||||
|
||||
def _VerifyTag(project):
|
||||
gpg_dir = os.path.expanduser('~/.repoconfig/gnupg')
|
||||
if not os.path.exists(gpg_dir):
|
||||
|
@ -25,11 +25,17 @@ def _die(fmt, *args):
|
||||
print >>sys.stderr, 'error: %s' % msg
|
||||
sys.exit(1)
|
||||
|
||||
def _SplitEmails(values):
|
||||
result = []
|
||||
for str in values:
|
||||
result.extend([s.strip() for s in str.split(',')])
|
||||
return result
|
||||
|
||||
class Upload(InteractiveCommand):
|
||||
common = True
|
||||
helpSummary = "Upload changes for code review"
|
||||
helpUsage="""
|
||||
%prog [<project>]...
|
||||
%prog [--re --cc] {[<project>]... | --replace <project>}
|
||||
"""
|
||||
helpDescription = """
|
||||
The '%prog' command is used to send changes to the Gerrit code
|
||||
@ -44,9 +50,31 @@ at the command line. Projects can be specified either by name, or
|
||||
by a relative or absolute path to the project's local directory. If
|
||||
no projects are specified, '%prog' will search for uploadable
|
||||
changes in all projects listed in the manifest.
|
||||
|
||||
If the --reviewers or --cc options are passed, those emails are
|
||||
added to the respective list of users, and emails are sent to any
|
||||
new users. Users passed to --reviewers must be already registered
|
||||
with the code review system, or the upload will fail.
|
||||
|
||||
If the --replace option is passed the user can designate which
|
||||
existing change(s) in Gerrit match up to the commits in the branch
|
||||
being uploaded. For each matched pair of change,commit the commit
|
||||
will be added as a new patch set, completely replacing the set of
|
||||
files and description associated with the change in Gerrit.
|
||||
"""
|
||||
|
||||
def _SingleBranch(self, branch):
|
||||
def _Options(self, p):
|
||||
p.add_option('--replace',
|
||||
dest='replace', action='store_true',
|
||||
help='Upload replacement patchesets from this branch')
|
||||
p.add_option('--re', '--reviewers',
|
||||
type='string', action='append', dest='reviewers',
|
||||
help='Request reviews from these people.')
|
||||
p.add_option('--cc',
|
||||
type='string', action='append', dest='cc',
|
||||
help='Also send email to these email addresses.')
|
||||
|
||||
def _SingleBranch(self, branch, people):
|
||||
project = branch.project
|
||||
name = branch.name
|
||||
date = branch.date
|
||||
@ -64,11 +92,11 @@ changes in all projects listed in the manifest.
|
||||
sys.stdout.write('(y/n)? ')
|
||||
answer = sys.stdin.readline().strip()
|
||||
if answer in ('y', 'Y', 'yes', '1', 'true', 't'):
|
||||
self._UploadAndReport([branch])
|
||||
self._UploadAndReport([branch], people)
|
||||
else:
|
||||
_die("upload aborted by user")
|
||||
|
||||
def _MultipleBranches(self, pending):
|
||||
def _MultipleBranches(self, pending, people):
|
||||
projects = {}
|
||||
branches = {}
|
||||
|
||||
@ -127,13 +155,62 @@ changes in all projects listed in the manifest.
|
||||
todo.append(branch)
|
||||
if not todo:
|
||||
_die("nothing uncommented for upload")
|
||||
self._UploadAndReport(todo)
|
||||
self._UploadAndReport(todo, people)
|
||||
|
||||
def _UploadAndReport(self, todo):
|
||||
def _ReplaceBranch(self, project, people):
|
||||
branch = project.CurrentBranch
|
||||
if not branch:
|
||||
print >>sys.stdout, "no branches ready for upload"
|
||||
return
|
||||
branch = project.GetUploadableBranch(branch)
|
||||
if not branch:
|
||||
print >>sys.stdout, "no branches ready for upload"
|
||||
return
|
||||
|
||||
script = []
|
||||
script.append('# Replacing from branch %s' % branch.name)
|
||||
for commit in branch.commits:
|
||||
script.append('[ ] %s' % commit)
|
||||
script.append('')
|
||||
script.append('# Insert change numbers in the brackets to add a new patch set.')
|
||||
script.append('# To create a new change record, leave the brackets empty.')
|
||||
|
||||
script = Editor.EditString("\n".join(script)).split("\n")
|
||||
|
||||
change_re = re.compile(r'^\[\s*(\d{1,})\s*\]\s*([0-9a-f]{1,}) .*$')
|
||||
to_replace = dict()
|
||||
full_hashes = branch.unabbrev_commits
|
||||
|
||||
for line in script:
|
||||
m = change_re.match(line)
|
||||
if m:
|
||||
c = m.group(1)
|
||||
f = m.group(2)
|
||||
try:
|
||||
f = full_hashes[f]
|
||||
except KeyError:
|
||||
print 'fh = %s' % full_hashes
|
||||
print >>sys.stderr, "error: commit %s not found" % f
|
||||
sys.exit(1)
|
||||
if c in to_replace:
|
||||
print >>sys.stderr,\
|
||||
"error: change %s cannot accept multiple commits" % c
|
||||
sys.exit(1)
|
||||
to_replace[c] = f
|
||||
|
||||
if not to_replace:
|
||||
print >>sys.stderr, "error: no replacements specified"
|
||||
print >>sys.stderr, " use 'repo upload' without --replace"
|
||||
sys.exit(1)
|
||||
|
||||
branch.replace_changes = to_replace
|
||||
self._UploadAndReport([branch], people)
|
||||
|
||||
def _UploadAndReport(self, todo, people):
|
||||
have_errors = False
|
||||
for branch in todo:
|
||||
try:
|
||||
branch.UploadForReview()
|
||||
branch.UploadForReview(people)
|
||||
branch.uploaded = True
|
||||
except UploadError, e:
|
||||
branch.error = e
|
||||
@ -157,9 +234,6 @@ changes in all projects listed in the manifest.
|
||||
print >>sys.stderr, '[OK ] %-15s %s' % (
|
||||
branch.project.relpath + '/',
|
||||
branch.name)
|
||||
print >>sys.stderr, '%s' % branch.tip_url
|
||||
print >>sys.stderr, '(as %s)' % branch.owner_email
|
||||
print >>sys.stderr, ''
|
||||
|
||||
if have_errors:
|
||||
sys.exit(1)
|
||||
@ -167,6 +241,22 @@ changes in all projects listed in the manifest.
|
||||
def Execute(self, opt, args):
|
||||
project_list = self.GetProjects(args)
|
||||
pending = []
|
||||
reviewers = []
|
||||
cc = []
|
||||
|
||||
if opt.reviewers:
|
||||
reviewers = _SplitEmails(opt.reviewers)
|
||||
if opt.cc:
|
||||
cc = _SplitEmails(opt.cc)
|
||||
people = (reviewers,cc)
|
||||
|
||||
if opt.replace:
|
||||
if len(project_list) != 1:
|
||||
print >>sys.stderr, \
|
||||
'error: --replace requires exactly one project'
|
||||
sys.exit(1)
|
||||
self._ReplaceBranch(project_list[0], people)
|
||||
return
|
||||
|
||||
for project in project_list:
|
||||
avail = project.GetUploadableBranches()
|
||||
@ -176,6 +266,6 @@ changes in all projects listed in the manifest.
|
||||
if not pending:
|
||||
print >>sys.stdout, "no branches ready for upload"
|
||||
elif len(pending) == 1 and len(pending[0][1]) == 1:
|
||||
self._SingleBranch(pending[0][1][0])
|
||||
self._SingleBranch(pending[0][1][0], people)
|
||||
else:
|
||||
self._MultipleBranches(pending)
|
||||
self._MultipleBranches(pending, people)
|
||||
|
35
subcmds/version.py
Normal file
35
subcmds/version.py
Normal file
@ -0,0 +1,35 @@
|
||||
#
|
||||
# Copyright (C) 2009 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from command import Command, MirrorSafeCommand
|
||||
from git_command import git
|
||||
from project import HEAD
|
||||
|
||||
class Version(Command, MirrorSafeCommand):
|
||||
common = False
|
||||
helpSummary = "Display the version of repo"
|
||||
helpUsage = """
|
||||
%prog
|
||||
"""
|
||||
|
||||
def Execute(self, opt, args):
|
||||
rp = self.manifest.repoProject
|
||||
rem = rp.GetRemote(rp.remote.name)
|
||||
|
||||
print 'repo version %s' % rp.work_git.describe(HEAD)
|
||||
print ' (from %s)' % rem.url
|
||||
print git.version().strip()
|
||||
print 'Python %s' % sys.version
|
Reference in New Issue
Block a user