diff options
53 files changed, 11781 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..0d20b648 --- /dev/null +++ b/.gitignore | |||
| @@ -0,0 +1 @@ | |||
| *.pyc | |||
diff --git a/COPYING b/COPYING new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/COPYING | |||
| @@ -0,0 +1,202 @@ | |||
| 1 | |||
| 2 | Apache License | ||
| 3 | Version 2.0, January 2004 | ||
| 4 | http://www.apache.org/licenses/ | ||
| 5 | |||
| 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION | ||
| 7 | |||
| 8 | 1. Definitions. | ||
| 9 | |||
| 10 | "License" shall mean the terms and conditions for use, reproduction, | ||
| 11 | and distribution as defined by Sections 1 through 9 of this document. | ||
| 12 | |||
| 13 | "Licensor" shall mean the copyright owner or entity authorized by | ||
| 14 | the copyright owner that is granting the License. | ||
| 15 | |||
| 16 | "Legal Entity" shall mean the union of the acting entity and all | ||
| 17 | other entities that control, are controlled by, or are under common | ||
| 18 | control with that entity. For the purposes of this definition, | ||
| 19 | "control" means (i) the power, direct or indirect, to cause the | ||
| 20 | direction or management of such entity, whether by contract or | ||
| 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the | ||
| 22 | outstanding shares, or (iii) beneficial ownership of such entity. | ||
| 23 | |||
| 24 | "You" (or "Your") shall mean an individual or Legal Entity | ||
| 25 | exercising permissions granted by this License. | ||
| 26 | |||
| 27 | "Source" form shall mean the preferred form for making modifications, | ||
| 28 | including but not limited to software source code, documentation | ||
| 29 | source, and configuration files. | ||
| 30 | |||
| 31 | "Object" form shall mean any form resulting from mechanical | ||
| 32 | transformation or translation of a Source form, including but | ||
| 33 | not limited to compiled object code, generated documentation, | ||
| 34 | and conversions to other media types. | ||
| 35 | |||
| 36 | "Work" shall mean the work of authorship, whether in Source or | ||
| 37 | Object form, made available under the License, as indicated by a | ||
| 38 | copyright notice that is included in or attached to the work | ||
| 39 | (an example is provided in the Appendix below). | ||
| 40 | |||
| 41 | "Derivative Works" shall mean any work, whether in Source or Object | ||
| 42 | form, that is based on (or derived from) the Work and for which the | ||
| 43 | editorial revisions, annotations, elaborations, or other modifications | ||
| 44 | represent, as a whole, an original work of authorship. For the purposes | ||
| 45 | of this License, Derivative Works shall not include works that remain | ||
| 46 | separable from, or merely link (or bind by name) to the interfaces of, | ||
| 47 | the Work and Derivative Works thereof. | ||
| 48 | |||
| 49 | "Contribution" shall mean any work of authorship, including | ||
| 50 | the original version of the Work and any modifications or additions | ||
| 51 | to that Work or Derivative Works thereof, that is intentionally | ||
| 52 | submitted to Licensor for inclusion in the Work by the copyright owner | ||
| 53 | or by an individual or Legal Entity authorized to submit on behalf of | ||
| 54 | the copyright owner. For the purposes of this definition, "submitted" | ||
| 55 | means any form of electronic, verbal, or written communication sent | ||
| 56 | to the Licensor or its representatives, including but not limited to | ||
| 57 | communication on electronic mailing lists, source code control systems, | ||
| 58 | and issue tracking systems that are managed by, or on behalf of, the | ||
| 59 | Licensor for the purpose of discussing and improving the Work, but | ||
| 60 | excluding communication that is conspicuously marked or otherwise | ||
| 61 | designated in writing by the copyright owner as "Not a Contribution." | ||
| 62 | |||
| 63 | "Contributor" shall mean Licensor and any individual or Legal Entity | ||
| 64 | on behalf of whom a Contribution has been received by Licensor and | ||
| 65 | subsequently incorporated within the Work. | ||
| 66 | |||
| 67 | 2. Grant of Copyright License. Subject to the terms and conditions of | ||
| 68 | this License, each Contributor hereby grants to You a perpetual, | ||
| 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable | ||
| 70 | copyright license to reproduce, prepare Derivative Works of, | ||
| 71 | publicly display, publicly perform, sublicense, and distribute the | ||
| 72 | Work and such Derivative Works in Source or Object form. | ||
| 73 | |||
| 74 | 3. Grant of Patent License. Subject to the terms and conditions of | ||
| 75 | this License, each Contributor hereby grants to You a perpetual, | ||
| 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable | ||
| 77 | (except as stated in this section) patent license to make, have made, | ||
| 78 | use, offer to sell, sell, import, and otherwise transfer the Work, | ||
| 79 | where such license applies only to those patent claims licensable | ||
| 80 | by such Contributor that are necessarily infringed by their | ||
| 81 | Contribution(s) alone or by combination of their Contribution(s) | ||
| 82 | with the Work to which such Contribution(s) was submitted. If You | ||
| 83 | institute patent litigation against any entity (including a | ||
| 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work | ||
| 85 | or a Contribution incorporated within the Work constitutes direct | ||
| 86 | or contributory patent infringement, then any patent licenses | ||
| 87 | granted to You under this License for that Work shall terminate | ||
| 88 | as of the date such litigation is filed. | ||
| 89 | |||
| 90 | 4. Redistribution. You may reproduce and distribute copies of the | ||
| 91 | Work or Derivative Works thereof in any medium, with or without | ||
| 92 | modifications, and in Source or Object form, provided that You | ||
| 93 | meet the following conditions: | ||
| 94 | |||
| 95 | (a) You must give any other recipients of the Work or | ||
| 96 | Derivative Works a copy of this License; and | ||
| 97 | |||
| 98 | (b) You must cause any modified files to carry prominent notices | ||
| 99 | stating that You changed the files; and | ||
| 100 | |||
| 101 | (c) You must retain, in the Source form of any Derivative Works | ||
| 102 | that You distribute, all copyright, patent, trademark, and | ||
| 103 | attribution notices from the Source form of the Work, | ||
| 104 | excluding those notices that do not pertain to any part of | ||
| 105 | the Derivative Works; and | ||
| 106 | |||
| 107 | (d) If the Work includes a "NOTICE" text file as part of its | ||
| 108 | distribution, then any Derivative Works that You distribute must | ||
| 109 | include a readable copy of the attribution notices contained | ||
| 110 | within such NOTICE file, excluding those notices that do not | ||
| 111 | pertain to any part of the Derivative Works, in at least one | ||
| 112 | of the following places: within a NOTICE text file distributed | ||
| 113 | as part of the Derivative Works; within the Source form or | ||
| 114 | documentation, if provided along with the Derivative Works; or, | ||
| 115 | within a display generated by the Derivative Works, if and | ||
| 116 | wherever such third-party notices normally appear. The contents | ||
| 117 | of the NOTICE file are for informational purposes only and | ||
| 118 | do not modify the License. You may add Your own attribution | ||
| 119 | notices within Derivative Works that You distribute, alongside | ||
| 120 | or as an addendum to the NOTICE text from the Work, provided | ||
| 121 | that such additional attribution notices cannot be construed | ||
| 122 | as modifying the License. | ||
| 123 | |||
| 124 | You may add Your own copyright statement to Your modifications and | ||
| 125 | may provide additional or different license terms and conditions | ||
| 126 | for use, reproduction, or distribution of Your modifications, or | ||
| 127 | for any such Derivative Works as a whole, provided Your use, | ||
| 128 | reproduction, and distribution of the Work otherwise complies with | ||
| 129 | the conditions stated in this License. | ||
| 130 | |||
| 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, | ||
| 132 | any Contribution intentionally submitted for inclusion in the Work | ||
| 133 | by You to the Licensor shall be under the terms and conditions of | ||
| 134 | this License, without any additional terms or conditions. | ||
| 135 | Notwithstanding the above, nothing herein shall supersede or modify | ||
| 136 | the terms of any separate license agreement you may have executed | ||
| 137 | with Licensor regarding such Contributions. | ||
| 138 | |||
| 139 | 6. Trademarks. This License does not grant permission to use the trade | ||
| 140 | names, trademarks, service marks, or product names of the Licensor, | ||
| 141 | except as required for reasonable and customary use in describing the | ||
| 142 | origin of the Work and reproducing the content of the NOTICE file. | ||
| 143 | |||
| 144 | 7. Disclaimer of Warranty. Unless required by applicable law or | ||
| 145 | agreed to in writing, Licensor provides the Work (and each | ||
| 146 | Contributor provides its Contributions) on an "AS IS" BASIS, | ||
| 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or | ||
| 148 | implied, including, without limitation, any warranties or conditions | ||
| 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A | ||
| 150 | PARTICULAR PURPOSE. You are solely responsible for determining the | ||
| 151 | appropriateness of using or redistributing the Work and assume any | ||
| 152 | risks associated with Your exercise of permissions under this License. | ||
| 153 | |||
| 154 | 8. Limitation of Liability. In no event and under no legal theory, | ||
| 155 | whether in tort (including negligence), contract, or otherwise, | ||
| 156 | unless required by applicable law (such as deliberate and grossly | ||
| 157 | negligent acts) or agreed to in writing, shall any Contributor be | ||
| 158 | liable to You for damages, including any direct, indirect, special, | ||
| 159 | incidental, or consequential damages of any character arising as a | ||
| 160 | result of this License or out of the use or inability to use the | ||
| 161 | Work (including but not limited to damages for loss of goodwill, | ||
| 162 | work stoppage, computer failure or malfunction, or any and all | ||
| 163 | other commercial damages or losses), even if such Contributor | ||
| 164 | has been advised of the possibility of such damages. | ||
| 165 | |||
| 166 | 9. Accepting Warranty or Additional Liability. While redistributing | ||
| 167 | the Work or Derivative Works thereof, You may choose to offer, | ||
| 168 | and charge a fee for, acceptance of support, warranty, indemnity, | ||
| 169 | or other liability obligations and/or rights consistent with this | ||
| 170 | License. However, in accepting such obligations, You may act only | ||
| 171 | on Your own behalf and on Your sole responsibility, not on behalf | ||
| 172 | of any other Contributor, and only if You agree to indemnify, | ||
| 173 | defend, and hold each Contributor harmless for any liability | ||
| 174 | incurred by, or claims asserted against, such Contributor by reason | ||
| 175 | of your accepting any such warranty or additional liability. | ||
| 176 | |||
| 177 | END OF TERMS AND CONDITIONS | ||
| 178 | |||
| 179 | APPENDIX: How to apply the Apache License to your work. | ||
| 180 | |||
| 181 | To apply the Apache License to your work, attach the following | ||
| 182 | boilerplate notice, with the fields enclosed by brackets "[]" | ||
| 183 | replaced with your own identifying information. (Don't include | ||
| 184 | the brackets!) The text should be enclosed in the appropriate | ||
| 185 | comment syntax for the file format. We also recommend that a | ||
| 186 | file or class name and description of purpose be included on the | ||
| 187 | same "printed page" as the copyright notice for easier | ||
| 188 | identification within third-party archives. | ||
| 189 | |||
| 190 | Copyright [yyyy] [name of copyright owner] | ||
| 191 | |||
| 192 | Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 193 | you may not use this file except in compliance with the License. | ||
| 194 | You may obtain a copy of the License at | ||
| 195 | |||
| 196 | http://www.apache.org/licenses/LICENSE-2.0 | ||
| 197 | |||
| 198 | Unless required by applicable law or agreed to in writing, software | ||
| 199 | distributed under the License is distributed on an "AS IS" BASIS, | ||
| 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 201 | See the License for the specific language governing permissions and | ||
| 202 | limitations under the License. | ||
diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..0184e08a --- /dev/null +++ b/Makefile | |||
| @@ -0,0 +1,29 @@ | |||
| 1 | # | ||
| 2 | # Copyright 2008 Google Inc. | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | GERRIT_SRC=../gerrit | ||
| 17 | GERRIT_MODULES=codereview froofle | ||
| 18 | |||
| 19 | all: | ||
| 20 | |||
| 21 | clean: | ||
| 22 | find . -name \*.pyc -type f | xargs rm -f | ||
| 23 | |||
| 24 | update-pyclient: | ||
| 25 | $(MAKE) -C $(GERRIT_SRC) release-pyclient | ||
| 26 | rm -rf $(GERRIT_MODULES) | ||
| 27 | (cd $(GERRIT_SRC)/release/pyclient && \ | ||
| 28 | find . -type f \ | ||
| 29 | | cpio -pd $(abspath .)) | ||
diff --git a/codereview/__init__.py b/codereview/__init__.py new file mode 100644 index 00000000..e47bc94e --- /dev/null +++ b/codereview/__init__.py | |||
| @@ -0,0 +1 @@ | |||
| __version__ = 'v1.0' | |||
diff --git a/codereview/need_retry_pb2.py b/codereview/need_retry_pb2.py new file mode 100644 index 00000000..3fab2d43 --- /dev/null +++ b/codereview/need_retry_pb2.py | |||
| @@ -0,0 +1,32 @@ | |||
| 1 | #!/usr/bin/python2.4 | ||
| 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! | ||
| 3 | |||
| 4 | from froofle.protobuf import descriptor | ||
| 5 | from froofle.protobuf import message | ||
| 6 | from froofle.protobuf import reflection | ||
| 7 | from froofle.protobuf import service | ||
| 8 | from froofle.protobuf import service_reflection | ||
| 9 | from froofle.protobuf import descriptor_pb2 | ||
| 10 | |||
| 11 | |||
| 12 | |||
| 13 | _RETRYREQUESTLATERRESPONSE = descriptor.Descriptor( | ||
| 14 | name='RetryRequestLaterResponse', | ||
| 15 | full_name='codereview.RetryRequestLaterResponse', | ||
| 16 | filename='need_retry.proto', | ||
| 17 | containing_type=None, | ||
| 18 | fields=[ | ||
| 19 | ], | ||
| 20 | extensions=[ | ||
| 21 | ], | ||
| 22 | nested_types=[], # TODO(robinson): Implement. | ||
| 23 | enum_types=[ | ||
| 24 | ], | ||
| 25 | options=None) | ||
| 26 | |||
| 27 | |||
| 28 | |||
| 29 | class RetryRequestLaterResponse(message.Message): | ||
| 30 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 31 | DESCRIPTOR = _RETRYREQUESTLATERRESPONSE | ||
| 32 | |||
diff --git a/codereview/proto_client.py b/codereview/proto_client.py new file mode 100755 index 00000000..e11beff0 --- /dev/null +++ b/codereview/proto_client.py | |||
| @@ -0,0 +1,349 @@ | |||
| 1 | # Copyright 2007, 2008 Google Inc. | ||
| 2 | # | ||
| 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 4 | # you may not use this file except in compliance with the License. | ||
| 5 | # You may obtain a copy of the License at | ||
| 6 | # | ||
| 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 8 | # | ||
| 9 | # Unless required by applicable law or agreed to in writing, software | ||
| 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 12 | # See the License for the specific language governing permissions and | ||
| 13 | # limitations under the License. | ||
| 14 | |||
| 15 | import base64 | ||
| 16 | import cookielib | ||
| 17 | import getpass | ||
| 18 | import logging | ||
| 19 | import md5 | ||
| 20 | import os | ||
| 21 | import random | ||
| 22 | import socket | ||
| 23 | import time | ||
| 24 | import urllib | ||
| 25 | import urllib2 | ||
| 26 | import urlparse | ||
| 27 | |||
| 28 | from froofle.protobuf.service import RpcChannel | ||
| 29 | from froofle.protobuf.service import RpcController | ||
| 30 | from need_retry_pb2 import RetryRequestLaterResponse; | ||
| 31 | |||
| 32 | class ClientLoginError(urllib2.HTTPError): | ||
| 33 | """Raised to indicate an error authenticating with ClientLogin.""" | ||
| 34 | |||
| 35 | def __init__(self, url, code, msg, headers, args): | ||
| 36 | urllib2.HTTPError.__init__(self, url, code, msg, headers, None) | ||
| 37 | self.args = args | ||
| 38 | self.reason = args["Error"] | ||
| 39 | |||
| 40 | |||
| 41 | class Proxy(object): | ||
| 42 | class _ResultHolder(object): | ||
| 43 | def __call__(self, result): | ||
| 44 | self._result = result | ||
| 45 | |||
| 46 | class _RemoteController(RpcController): | ||
| 47 | def Reset(self): | ||
| 48 | pass | ||
| 49 | |||
| 50 | def Failed(self): | ||
| 51 | pass | ||
| 52 | |||
| 53 | def ErrorText(self): | ||
| 54 | pass | ||
| 55 | |||
| 56 | def StartCancel(self): | ||
| 57 | pass | ||
| 58 | |||
| 59 | def SetFailed(self, reason): | ||
| 60 | raise RuntimeError, reason | ||
| 61 | |||
| 62 | def IsCancelled(self): | ||
| 63 | pass | ||
| 64 | |||
| 65 | def NotifyOnCancel(self, callback): | ||
| 66 | pass | ||
| 67 | |||
| 68 | def __init__(self, stub): | ||
| 69 | self._stub = stub | ||
| 70 | |||
| 71 | def __getattr__(self, key): | ||
| 72 | method = getattr(self._stub, key) | ||
| 73 | |||
| 74 | def call(request): | ||
| 75 | done = self._ResultHolder() | ||
| 76 | method(self._RemoteController(), request, done) | ||
| 77 | return done._result | ||
| 78 | |||
| 79 | return call | ||
| 80 | |||
| 81 | |||
| 82 | class HttpRpc(RpcChannel): | ||
| 83 | """Simple protobuf over HTTP POST implementation.""" | ||
| 84 | |||
| 85 | def __init__(self, host, auth_function, | ||
| 86 | host_override=None, | ||
| 87 | extra_headers={}, | ||
| 88 | cookie_file=None): | ||
| 89 | """Creates a new HttpRpc. | ||
| 90 | |||
| 91 | Args: | ||
| 92 | host: The host to send requests to. | ||
| 93 | auth_function: A function that takes no arguments and returns an | ||
| 94 | (email, password) tuple when called. Will be called if authentication | ||
| 95 | is required. | ||
| 96 | host_override: The host header to send to the server (defaults to host). | ||
| 97 | extra_headers: A dict of extra headers to append to every request. | ||
| 98 | cookie_file: If not None, name of the file in ~/ to save the | ||
| 99 | cookie jar into. Applications are encouraged to set this to | ||
| 100 | '.$appname_cookies' or some otherwise unique name. | ||
| 101 | """ | ||
| 102 | self.host = host.lower() | ||
| 103 | self.host_override = host_override | ||
| 104 | self.auth_function = auth_function | ||
| 105 | self.authenticated = False | ||
| 106 | self.extra_headers = extra_headers | ||
| 107 | self.xsrf_token = None | ||
| 108 | if cookie_file is None: | ||
| 109 | self.cookie_file = None | ||
| 110 | else: | ||
| 111 | self.cookie_file = os.path.expanduser("~/%s" % cookie_file) | ||
| 112 | self.opener = self._GetOpener() | ||
| 113 | if self.host_override: | ||
| 114 | logging.info("Server: %s; Host: %s", self.host, self.host_override) | ||
| 115 | else: | ||
| 116 | logging.info("Server: %s", self.host) | ||
| 117 | |||
| 118 | def CallMethod(self, method, controller, request, response_type, done): | ||
| 119 | pat = "application/x-google-protobuf; name=%s" | ||
| 120 | |||
| 121 | url = "/proto/%s/%s" % (method.containing_service.name, method.name) | ||
| 122 | reqbin = request.SerializeToString() | ||
| 123 | reqtyp = pat % request.DESCRIPTOR.full_name | ||
| 124 | reqmd5 = base64.b64encode(md5.new(reqbin).digest()) | ||
| 125 | |||
| 126 | start = time.time() | ||
| 127 | while True: | ||
| 128 | t, b = self._Send(url, reqbin, reqtyp, reqmd5) | ||
| 129 | if t == (pat % RetryRequestLaterResponse.DESCRIPTOR.full_name): | ||
| 130 | if time.time() >= (start + 1800): | ||
| 131 | controller.SetFailed("timeout") | ||
| 132 | return | ||
| 133 | s = random.uniform(0.250, 2.000) | ||
| 134 | print "Busy, retrying in %.3f seconds ..." % s | ||
| 135 | time.sleep(s) | ||
| 136 | continue | ||
| 137 | |||
| 138 | if t == (pat % response_type.DESCRIPTOR.full_name): | ||
| 139 | response = response_type() | ||
| 140 | response.ParseFromString(b) | ||
| 141 | done(response) | ||
| 142 | else: | ||
| 143 | controller.SetFailed("Unexpected %s response" % t) | ||
| 144 | break | ||
| 145 | |||
| 146 | def _CreateRequest(self, url, data=None): | ||
| 147 | """Creates a new urllib request.""" | ||
| 148 | logging.debug("Creating request for: '%s' with payload:\n%s", url, data) | ||
| 149 | req = urllib2.Request(url, data=data) | ||
| 150 | if self.host_override: | ||
| 151 | req.add_header("Host", self.host_override) | ||
| 152 | for key, value in self.extra_headers.iteritems(): | ||
| 153 | req.add_header(key, value) | ||
| 154 | return req | ||
| 155 | |||
| 156 | def _GetAuthToken(self, email, password): | ||
| 157 | """Uses ClientLogin to authenticate the user, returning an auth token. | ||
| 158 | |||
| 159 | Args: | ||
| 160 | email: The user's email address | ||
| 161 | password: The user's password | ||
| 162 | |||
| 163 | Raises: | ||
| 164 | ClientLoginError: If there was an error authenticating with ClientLogin. | ||
| 165 | HTTPError: If there was some other form of HTTP error. | ||
| 166 | |||
| 167 | Returns: | ||
| 168 | The authentication token returned by ClientLogin. | ||
| 169 | """ | ||
| 170 | req = self._CreateRequest( | ||
| 171 | url="https://www.google.com/accounts/ClientLogin", | ||
| 172 | data=urllib.urlencode({ | ||
| 173 | "Email": email, | ||
| 174 | "Passwd": password, | ||
| 175 | "service": "ah", | ||
| 176 | "source": "gerrit-codereview-client", | ||
| 177 | "accountType": "HOSTED_OR_GOOGLE", | ||
| 178 | }) | ||
| 179 | ) | ||
| 180 | try: | ||
| 181 | response = self.opener.open(req) | ||
| 182 | response_body = response.read() | ||
| 183 | response_dict = dict(x.split("=") | ||
| 184 | for x in response_body.split("\n") if x) | ||
| 185 | return response_dict["Auth"] | ||
| 186 | except urllib2.HTTPError, e: | ||
| 187 | if e.code == 403: | ||
| 188 | body = e.read() | ||
| 189 | response_dict = dict(x.split("=", 1) for x in body.split("\n") if x) | ||
| 190 | raise ClientLoginError(req.get_full_url(), e.code, e.msg, | ||
| 191 | e.headers, response_dict) | ||
| 192 | else: | ||
| 193 | raise | ||
| 194 | |||
| 195 | def _GetAuthCookie(self, auth_token): | ||
| 196 | """Fetches authentication cookies for an authentication token. | ||
| 197 | |||
| 198 | Args: | ||
| 199 | auth_token: The authentication token returned by ClientLogin. | ||
| 200 | |||
| 201 | Raises: | ||
| 202 | HTTPError: If there was an error fetching the authentication cookies. | ||
| 203 | """ | ||
| 204 | # This is a dummy value to allow us to identify when we're successful. | ||
| 205 | continue_location = "http://localhost/" | ||
| 206 | args = {"continue": continue_location, "auth": auth_token} | ||
| 207 | req = self._CreateRequest("http://%s/_ah/login?%s" % | ||
| 208 | (self.host, urllib.urlencode(args))) | ||
| 209 | try: | ||
| 210 | response = self.opener.open(req) | ||
| 211 | except urllib2.HTTPError, e: | ||
| 212 | response = e | ||
| 213 | if (response.code != 302 or | ||
| 214 | response.info()["location"] != continue_location): | ||
| 215 | raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, | ||
| 216 | response.headers, response.fp) | ||
| 217 | self.authenticated = True | ||
| 218 | |||
| 219 | def _GetXsrfToken(self): | ||
| 220 | """Fetches /proto/_token for use in X-XSRF-Token HTTP header. | ||
| 221 | |||
| 222 | Raises: | ||
| 223 | HTTPError: If there was an error fetching a new token. | ||
| 224 | """ | ||
| 225 | tries = 0 | ||
| 226 | while True: | ||
| 227 | url = "http://%s/proto/_token" % self.host | ||
| 228 | req = self._CreateRequest(url) | ||
| 229 | try: | ||
| 230 | response = self.opener.open(req) | ||
| 231 | self.xsrf_token = response.read() | ||
| 232 | return | ||
| 233 | except urllib2.HTTPError, e: | ||
| 234 | if tries > 3: | ||
| 235 | raise | ||
| 236 | elif e.code == 401: | ||
| 237 | self._Authenticate() | ||
| 238 | else: | ||
| 239 | raise | ||
| 240 | |||
| 241 | def _Authenticate(self): | ||
| 242 | """Authenticates the user. | ||
| 243 | |||
| 244 | The authentication process works as follows: | ||
| 245 | 1) We get a username and password from the user | ||
| 246 | 2) We use ClientLogin to obtain an AUTH token for the user | ||
| 247 | (see http://code.google.com/apis/accounts/AuthForInstalledApps.html). | ||
| 248 | 3) We pass the auth token to /_ah/login on the server to obtain an | ||
| 249 | authentication cookie. If login was successful, it tries to redirect | ||
| 250 | us to the URL we provided. | ||
| 251 | |||
| 252 | If we attempt to access the upload API without first obtaining an | ||
| 253 | authentication cookie, it returns a 401 response and directs us to | ||
| 254 | authenticate ourselves with ClientLogin. | ||
| 255 | """ | ||
| 256 | for i in range(3): | ||
| 257 | credentials = self.auth_function() | ||
| 258 | auth_token = self._GetAuthToken(credentials[0], credentials[1]) | ||
| 259 | self._GetAuthCookie(auth_token) | ||
| 260 | if self.cookie_file is not None: | ||
| 261 | self.cookie_jar.save() | ||
| 262 | return | ||
| 263 | |||
| 264 | def _Send(self, request_path, payload, content_type, content_md5): | ||
| 265 | """Sends an RPC and returns the response. | ||
| 266 | |||
| 267 | Args: | ||
| 268 | request_path: The path to send the request to, eg /api/appversion/create. | ||
| 269 | payload: The body of the request, or None to send an empty request. | ||
| 270 | content_type: The Content-Type header to use. | ||
| 271 | content_md5: The Content-MD5 header to use. | ||
| 272 | |||
| 273 | Returns: | ||
| 274 | The content type, as a string. | ||
| 275 | The response body, as a string. | ||
| 276 | """ | ||
| 277 | if not self.authenticated: | ||
| 278 | self._Authenticate() | ||
| 279 | if not self.xsrf_token: | ||
| 280 | self._GetXsrfToken() | ||
| 281 | |||
| 282 | old_timeout = socket.getdefaulttimeout() | ||
| 283 | socket.setdefaulttimeout(None) | ||
| 284 | try: | ||
| 285 | tries = 0 | ||
| 286 | while True: | ||
| 287 | tries += 1 | ||
| 288 | url = "http://%s%s" % (self.host, request_path) | ||
| 289 | req = self._CreateRequest(url=url, data=payload) | ||
| 290 | req.add_header("Content-Type", content_type) | ||
| 291 | req.add_header("Content-MD5", content_md5) | ||
| 292 | req.add_header("X-XSRF-Token", self.xsrf_token) | ||
| 293 | try: | ||
| 294 | f = self.opener.open(req) | ||
| 295 | hdr = f.info() | ||
| 296 | type = hdr.getheader('Content-Type', | ||
| 297 | 'application/octet-stream') | ||
| 298 | response = f.read() | ||
| 299 | f.close() | ||
| 300 | return type, response | ||
| 301 | except urllib2.HTTPError, e: | ||
| 302 | if tries > 3: | ||
| 303 | raise | ||
| 304 | elif e.code == 401: | ||
| 305 | self._Authenticate() | ||
| 306 | elif e.code == 403: | ||
| 307 | if not hasattr(e, 'read'): | ||
| 308 | e.read = lambda self: '' | ||
| 309 | raise RuntimeError, '403\nxsrf: %s\n%s' \ | ||
| 310 | % (self.xsrf_token, e.read()) | ||
| 311 | else: | ||
| 312 | raise | ||
| 313 | finally: | ||
| 314 | socket.setdefaulttimeout(old_timeout) | ||
| 315 | |||
| 316 | def _GetOpener(self): | ||
| 317 | """Returns an OpenerDirector that supports cookies and ignores redirects. | ||
| 318 | |||
| 319 | Returns: | ||
| 320 | A urllib2.OpenerDirector object. | ||
| 321 | """ | ||
| 322 | opener = urllib2.OpenerDirector() | ||
| 323 | opener.add_handler(urllib2.ProxyHandler()) | ||
| 324 | opener.add_handler(urllib2.UnknownHandler()) | ||
| 325 | opener.add_handler(urllib2.HTTPHandler()) | ||
| 326 | opener.add_handler(urllib2.HTTPDefaultErrorHandler()) | ||
| 327 | opener.add_handler(urllib2.HTTPSHandler()) | ||
| 328 | opener.add_handler(urllib2.HTTPErrorProcessor()) | ||
| 329 | if self.cookie_file is not None: | ||
| 330 | self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file) | ||
| 331 | if os.path.exists(self.cookie_file): | ||
| 332 | try: | ||
| 333 | self.cookie_jar.load() | ||
| 334 | self.authenticated = True | ||
| 335 | except (cookielib.LoadError, IOError): | ||
| 336 | # Failed to load cookies - just ignore them. | ||
| 337 | pass | ||
| 338 | else: | ||
| 339 | # Create an empty cookie file with mode 600 | ||
| 340 | fd = os.open(self.cookie_file, os.O_CREAT, 0600) | ||
| 341 | os.close(fd) | ||
| 342 | # Always chmod the cookie file | ||
| 343 | os.chmod(self.cookie_file, 0600) | ||
| 344 | else: | ||
| 345 | # Don't save cookies across runs of update.py. | ||
| 346 | self.cookie_jar = cookielib.CookieJar() | ||
| 347 | opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) | ||
| 348 | return opener | ||
| 349 | |||
diff --git a/codereview/review_pb2.py b/codereview/review_pb2.py new file mode 100644 index 00000000..0896feba --- /dev/null +++ b/codereview/review_pb2.py | |||
| @@ -0,0 +1,48 @@ | |||
| 1 | #!/usr/bin/python2.4 | ||
| 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! | ||
| 3 | |||
| 4 | from froofle.protobuf import descriptor | ||
| 5 | from froofle.protobuf import message | ||
| 6 | from froofle.protobuf import reflection | ||
| 7 | from froofle.protobuf import service | ||
| 8 | from froofle.protobuf import service_reflection | ||
| 9 | from froofle.protobuf import descriptor_pb2 | ||
| 10 | |||
| 11 | |||
| 12 | import upload_bundle_pb2 | ||
| 13 | |||
| 14 | |||
| 15 | |||
| 16 | _REVIEWSERVICE = descriptor.ServiceDescriptor( | ||
| 17 | name='ReviewService', | ||
| 18 | full_name='codereview.ReviewService', | ||
| 19 | index=0, | ||
| 20 | options=None, | ||
| 21 | methods=[ | ||
| 22 | descriptor.MethodDescriptor( | ||
| 23 | name='UploadBundle', | ||
| 24 | full_name='codereview.ReviewService.UploadBundle', | ||
| 25 | index=0, | ||
| 26 | containing_service=None, | ||
| 27 | input_type=upload_bundle_pb2._UPLOADBUNDLEREQUEST, | ||
| 28 | output_type=upload_bundle_pb2._UPLOADBUNDLERESPONSE, | ||
| 29 | options=None, | ||
| 30 | ), | ||
| 31 | descriptor.MethodDescriptor( | ||
| 32 | name='ContinueBundle', | ||
| 33 | full_name='codereview.ReviewService.ContinueBundle', | ||
| 34 | index=1, | ||
| 35 | containing_service=None, | ||
| 36 | input_type=upload_bundle_pb2._UPLOADBUNDLECONTINUE, | ||
| 37 | output_type=upload_bundle_pb2._UPLOADBUNDLERESPONSE, | ||
| 38 | options=None, | ||
| 39 | ), | ||
| 40 | ]) | ||
| 41 | |||
| 42 | class ReviewService(service.Service): | ||
| 43 | __metaclass__ = service_reflection.GeneratedServiceType | ||
| 44 | DESCRIPTOR = _REVIEWSERVICE | ||
| 45 | class ReviewService_Stub(ReviewService): | ||
| 46 | __metaclass__ = service_reflection.GeneratedServiceStubType | ||
| 47 | DESCRIPTOR = _REVIEWSERVICE | ||
| 48 | |||
diff --git a/codereview/upload_bundle_pb2.py b/codereview/upload_bundle_pb2.py new file mode 100644 index 00000000..48c36512 --- /dev/null +++ b/codereview/upload_bundle_pb2.py | |||
| @@ -0,0 +1,190 @@ | |||
| 1 | #!/usr/bin/python2.4 | ||
| 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! | ||
| 3 | |||
| 4 | from froofle.protobuf import descriptor | ||
| 5 | from froofle.protobuf import message | ||
| 6 | from froofle.protobuf import reflection | ||
| 7 | from froofle.protobuf import service | ||
| 8 | from froofle.protobuf import service_reflection | ||
| 9 | from froofle.protobuf import descriptor_pb2 | ||
| 10 | |||
| 11 | |||
| 12 | _UPLOADBUNDLERESPONSE_CODETYPE = descriptor.EnumDescriptor( | ||
| 13 | name='CodeType', | ||
| 14 | full_name='codereview.UploadBundleResponse.CodeType', | ||
| 15 | filename='CodeType', | ||
| 16 | values=[ | ||
| 17 | descriptor.EnumValueDescriptor( | ||
| 18 | name='RECEIVED', index=0, number=1, | ||
| 19 | options=None, | ||
| 20 | type=None), | ||
| 21 | descriptor.EnumValueDescriptor( | ||
| 22 | name='CONTINUE', index=1, number=4, | ||
| 23 | options=None, | ||
| 24 | type=None), | ||
| 25 | descriptor.EnumValueDescriptor( | ||
| 26 | name='UNAUTHORIZED_USER', index=2, number=7, | ||
| 27 | options=None, | ||
| 28 | type=None), | ||
| 29 | descriptor.EnumValueDescriptor( | ||
| 30 | name='UNKNOWN_PROJECT', index=3, number=2, | ||
| 31 | options=None, | ||
| 32 | type=None), | ||
| 33 | descriptor.EnumValueDescriptor( | ||
| 34 | name='UNKNOWN_BRANCH', index=4, number=3, | ||
| 35 | options=None, | ||
| 36 | type=None), | ||
| 37 | descriptor.EnumValueDescriptor( | ||
| 38 | name='UNKNOWN_BUNDLE', index=5, number=5, | ||
| 39 | options=None, | ||
| 40 | type=None), | ||
| 41 | descriptor.EnumValueDescriptor( | ||
| 42 | name='NOT_BUNDLE_OWNER', index=6, number=6, | ||
| 43 | options=None, | ||
| 44 | type=None), | ||
| 45 | descriptor.EnumValueDescriptor( | ||
| 46 | name='BUNDLE_CLOSED', index=7, number=8, | ||
| 47 | options=None, | ||
| 48 | type=None), | ||
| 49 | ], | ||
| 50 | options=None, | ||
| 51 | ) | ||
| 52 | |||
| 53 | |||
| 54 | _UPLOADBUNDLEREQUEST = descriptor.Descriptor( | ||
| 55 | name='UploadBundleRequest', | ||
| 56 | full_name='codereview.UploadBundleRequest', | ||
| 57 | filename='upload_bundle.proto', | ||
| 58 | containing_type=None, | ||
| 59 | fields=[ | ||
| 60 | descriptor.FieldDescriptor( | ||
| 61 | name='dest_project', full_name='codereview.UploadBundleRequest.dest_project', index=0, | ||
| 62 | number=10, type=9, cpp_type=9, label=2, | ||
| 63 | default_value=unicode("", "utf-8"), | ||
| 64 | message_type=None, enum_type=None, containing_type=None, | ||
| 65 | is_extension=False, extension_scope=None, | ||
| 66 | options=None), | ||
| 67 | descriptor.FieldDescriptor( | ||
| 68 | name='dest_branch', full_name='codereview.UploadBundleRequest.dest_branch', index=1, | ||
| 69 | number=11, type=9, cpp_type=9, label=2, | ||
| 70 | default_value=unicode("", "utf-8"), | ||
| 71 | message_type=None, enum_type=None, containing_type=None, | ||
| 72 | is_extension=False, extension_scope=None, | ||
| 73 | options=None), | ||
| 74 | descriptor.FieldDescriptor( | ||
| 75 | name='partial_upload', full_name='codereview.UploadBundleRequest.partial_upload', index=2, | ||
| 76 | number=12, type=8, cpp_type=7, label=2, | ||
| 77 | default_value=False, | ||
| 78 | message_type=None, enum_type=None, containing_type=None, | ||
| 79 | is_extension=False, extension_scope=None, | ||
| 80 | options=None), | ||
| 81 | descriptor.FieldDescriptor( | ||
| 82 | name='bundle_data', full_name='codereview.UploadBundleRequest.bundle_data', index=3, | ||
| 83 | number=13, type=12, cpp_type=9, label=2, | ||
| 84 | default_value="", | ||
| 85 | message_type=None, enum_type=None, containing_type=None, | ||
| 86 | is_extension=False, extension_scope=None, | ||
| 87 | options=None), | ||
| 88 | descriptor.FieldDescriptor( | ||
| 89 | name='contained_object', full_name='codereview.UploadBundleRequest.contained_object', index=4, | ||
| 90 | number=1, type=9, cpp_type=9, label=3, | ||
| 91 | default_value=[], | ||
| 92 | message_type=None, enum_type=None, containing_type=None, | ||
| 93 | is_extension=False, extension_scope=None, | ||
| 94 | options=None), | ||
| 95 | ], | ||
| 96 | extensions=[ | ||
| 97 | ], | ||
| 98 | nested_types=[], # TODO(robinson): Implement. | ||
| 99 | enum_types=[ | ||
| 100 | ], | ||
| 101 | options=None) | ||
| 102 | |||
| 103 | |||
| 104 | _UPLOADBUNDLERESPONSE = descriptor.Descriptor( | ||
| 105 | name='UploadBundleResponse', | ||
| 106 | full_name='codereview.UploadBundleResponse', | ||
| 107 | filename='upload_bundle.proto', | ||
| 108 | containing_type=None, | ||
| 109 | fields=[ | ||
| 110 | descriptor.FieldDescriptor( | ||
| 111 | name='status_code', full_name='codereview.UploadBundleResponse.status_code', index=0, | ||
| 112 | number=10, type=14, cpp_type=8, label=2, | ||
| 113 | default_value=1, | ||
| 114 | message_type=None, enum_type=None, containing_type=None, | ||
| 115 | is_extension=False, extension_scope=None, | ||
| 116 | options=None), | ||
| 117 | descriptor.FieldDescriptor( | ||
| 118 | name='bundle_id', full_name='codereview.UploadBundleResponse.bundle_id', index=1, | ||
| 119 | number=11, type=9, cpp_type=9, label=1, | ||
| 120 | default_value=unicode("", "utf-8"), | ||
| 121 | message_type=None, enum_type=None, containing_type=None, | ||
| 122 | is_extension=False, extension_scope=None, | ||
| 123 | options=None), | ||
| 124 | ], | ||
| 125 | extensions=[ | ||
| 126 | ], | ||
| 127 | nested_types=[], # TODO(robinson): Implement. | ||
| 128 | enum_types=[ | ||
| 129 | _UPLOADBUNDLERESPONSE_CODETYPE, | ||
| 130 | ], | ||
| 131 | options=None) | ||
| 132 | |||
| 133 | |||
| 134 | _UPLOADBUNDLECONTINUE = descriptor.Descriptor( | ||
| 135 | name='UploadBundleContinue', | ||
| 136 | full_name='codereview.UploadBundleContinue', | ||
| 137 | filename='upload_bundle.proto', | ||
| 138 | containing_type=None, | ||
| 139 | fields=[ | ||
| 140 | descriptor.FieldDescriptor( | ||
| 141 | name='bundle_id', full_name='codereview.UploadBundleContinue.bundle_id', index=0, | ||
| 142 | number=10, type=9, cpp_type=9, label=2, | ||
| 143 | default_value=unicode("", "utf-8"), | ||
| 144 | message_type=None, enum_type=None, containing_type=None, | ||
| 145 | is_extension=False, extension_scope=None, | ||
| 146 | options=None), | ||
| 147 | descriptor.FieldDescriptor( | ||
| 148 | name='segment_id', full_name='codereview.UploadBundleContinue.segment_id', index=1, | ||
| 149 | number=11, type=5, cpp_type=1, label=2, | ||
| 150 | default_value=0, | ||
| 151 | message_type=None, enum_type=None, containing_type=None, | ||
| 152 | is_extension=False, extension_scope=None, | ||
| 153 | options=None), | ||
| 154 | descriptor.FieldDescriptor( | ||
| 155 | name='partial_upload', full_name='codereview.UploadBundleContinue.partial_upload', index=2, | ||
| 156 | number=12, type=8, cpp_type=7, label=2, | ||
| 157 | default_value=False, | ||
| 158 | message_type=None, enum_type=None, containing_type=None, | ||
| 159 | is_extension=False, extension_scope=None, | ||
| 160 | options=None), | ||
| 161 | descriptor.FieldDescriptor( | ||
| 162 | name='bundle_data', full_name='codereview.UploadBundleContinue.bundle_data', index=3, | ||
| 163 | number=13, type=12, cpp_type=9, label=1, | ||
| 164 | default_value="", | ||
| 165 | message_type=None, enum_type=None, containing_type=None, | ||
| 166 | is_extension=False, extension_scope=None, | ||
| 167 | options=None), | ||
| 168 | ], | ||
| 169 | extensions=[ | ||
| 170 | ], | ||
| 171 | nested_types=[], # TODO(robinson): Implement. | ||
| 172 | enum_types=[ | ||
| 173 | ], | ||
| 174 | options=None) | ||
| 175 | |||
| 176 | |||
| 177 | _UPLOADBUNDLERESPONSE.fields_by_name['status_code'].enum_type = _UPLOADBUNDLERESPONSE_CODETYPE | ||
| 178 | |||
| 179 | class UploadBundleRequest(message.Message): | ||
| 180 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 181 | DESCRIPTOR = _UPLOADBUNDLEREQUEST | ||
| 182 | |||
| 183 | class UploadBundleResponse(message.Message): | ||
| 184 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 185 | DESCRIPTOR = _UPLOADBUNDLERESPONSE | ||
| 186 | |||
| 187 | class UploadBundleContinue(message.Message): | ||
| 188 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 189 | DESCRIPTOR = _UPLOADBUNDLECONTINUE | ||
| 190 | |||
diff --git a/color.py b/color.py new file mode 100644 index 00000000..b3a558cd --- /dev/null +++ b/color.py | |||
| @@ -0,0 +1,154 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import os | ||
| 17 | import sys | ||
| 18 | |||
| 19 | import pager | ||
| 20 | from git_config import GitConfig | ||
| 21 | |||
| 22 | COLORS = {None :-1, | ||
| 23 | 'normal' :-1, | ||
| 24 | 'black' : 0, | ||
| 25 | 'red' : 1, | ||
| 26 | 'green' : 2, | ||
| 27 | 'yellow' : 3, | ||
| 28 | 'blue' : 4, | ||
| 29 | 'magenta': 5, | ||
| 30 | 'cyan' : 6, | ||
| 31 | 'white' : 7} | ||
| 32 | |||
| 33 | ATTRS = {None :-1, | ||
| 34 | 'bold' : 1, | ||
| 35 | 'dim' : 2, | ||
| 36 | 'ul' : 4, | ||
| 37 | 'blink' : 5, | ||
| 38 | 'reverse': 7} | ||
| 39 | |||
| 40 | RESET = "\033[m" | ||
| 41 | |||
| 42 | def is_color(s): return s in COLORS | ||
| 43 | def is_attr(s): return s in ATTRS | ||
| 44 | |||
| 45 | def _Color(fg = None, bg = None, attr = None): | ||
| 46 | fg = COLORS[fg] | ||
| 47 | bg = COLORS[bg] | ||
| 48 | attr = ATTRS[attr] | ||
| 49 | |||
| 50 | if attr >= 0 or fg >= 0 or bg >= 0: | ||
| 51 | need_sep = False | ||
| 52 | code = "\033[" | ||
| 53 | |||
| 54 | if attr >= 0: | ||
| 55 | code += chr(ord('0') + attr) | ||
| 56 | need_sep = True | ||
| 57 | |||
| 58 | if fg >= 0: | ||
| 59 | if need_sep: | ||
| 60 | code += ';' | ||
| 61 | need_sep = True | ||
| 62 | |||
| 63 | if fg < 8: | ||
| 64 | code += '3%c' % (ord('0') + fg) | ||
| 65 | else: | ||
| 66 | code += '38;5;%d' % fg | ||
| 67 | |||
| 68 | if bg >= 0: | ||
| 69 | if need_sep: | ||
| 70 | code += ';' | ||
| 71 | need_sep = True | ||
| 72 | |||
| 73 | if bg < 8: | ||
| 74 | code += '4%c' % (ord('0') + bg) | ||
| 75 | else: | ||
| 76 | code += '48;5;%d' % bg | ||
| 77 | code += 'm' | ||
| 78 | else: | ||
| 79 | code = '' | ||
| 80 | return code | ||
| 81 | |||
| 82 | |||
| 83 | class Coloring(object): | ||
| 84 | def __init__(self, config, type): | ||
| 85 | self._section = 'color.%s' % type | ||
| 86 | self._config = config | ||
| 87 | self._out = sys.stdout | ||
| 88 | |||
| 89 | on = self._config.GetString(self._section) | ||
| 90 | if on is None: | ||
| 91 | on = self._config.GetString('color.ui') | ||
| 92 | |||
| 93 | if on == 'auto': | ||
| 94 | if pager.active or os.isatty(1): | ||
| 95 | self._on = True | ||
| 96 | else: | ||
| 97 | self._on = False | ||
| 98 | elif on in ('true', 'always'): | ||
| 99 | self._on = True | ||
| 100 | else: | ||
| 101 | self._on = False | ||
| 102 | |||
| 103 | @property | ||
| 104 | def is_on(self): | ||
| 105 | return self._on | ||
| 106 | |||
| 107 | def write(self, fmt, *args): | ||
| 108 | self._out.write(fmt % args) | ||
| 109 | |||
| 110 | def nl(self): | ||
| 111 | self._out.write('\n') | ||
| 112 | |||
| 113 | def printer(self, opt=None, fg=None, bg=None, attr=None): | ||
| 114 | s = self | ||
| 115 | c = self.colorer(opt, fg, bg, attr) | ||
| 116 | def f(fmt, *args): | ||
| 117 | s._out.write(c(fmt, *args)) | ||
| 118 | return f | ||
| 119 | |||
| 120 | def colorer(self, opt=None, fg=None, bg=None, attr=None): | ||
| 121 | if self._on: | ||
| 122 | c = self._parse(opt, fg, bg, attr) | ||
| 123 | def f(fmt, *args): | ||
| 124 | str = fmt % args | ||
| 125 | return ''.join([c, str, RESET]) | ||
| 126 | return f | ||
| 127 | else: | ||
| 128 | def f(fmt, *args): | ||
| 129 | return fmt % args | ||
| 130 | return f | ||
| 131 | |||
| 132 | def _parse(self, opt, fg, bg, attr): | ||
| 133 | if not opt: | ||
| 134 | return _Color(fg, bg, attr) | ||
| 135 | |||
| 136 | v = self._config.GetString('%s.%s' % (self._section, opt)) | ||
| 137 | if v is None: | ||
| 138 | return _Color(fg, bg, attr) | ||
| 139 | |||
| 140 | v = v.trim().lowercase() | ||
| 141 | if v == "reset": | ||
| 142 | return RESET | ||
| 143 | elif v == '': | ||
| 144 | return _Color(fg, bg, attr) | ||
| 145 | |||
| 146 | have_fg = False | ||
| 147 | for a in v.split(' '): | ||
| 148 | if is_color(a): | ||
| 149 | if have_fg: bg = a | ||
| 150 | else: fg = a | ||
| 151 | elif is_attr(a): | ||
| 152 | attr = a | ||
| 153 | |||
| 154 | return _Color(fg, bg, attr) | ||
diff --git a/command.py b/command.py new file mode 100644 index 00000000..516c2d9d --- /dev/null +++ b/command.py | |||
| @@ -0,0 +1,116 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import os | ||
| 17 | import optparse | ||
| 18 | import sys | ||
| 19 | |||
| 20 | from error import NoSuchProjectError | ||
| 21 | |||
| 22 | class Command(object): | ||
| 23 | """Base class for any command line action in repo. | ||
| 24 | """ | ||
| 25 | |||
| 26 | common = False | ||
| 27 | manifest = None | ||
| 28 | _optparse = None | ||
| 29 | |||
| 30 | @property | ||
| 31 | def OptionParser(self): | ||
| 32 | if self._optparse is None: | ||
| 33 | try: | ||
| 34 | me = 'repo %s' % self.NAME | ||
| 35 | usage = self.helpUsage.strip().replace('%prog', me) | ||
| 36 | except AttributeError: | ||
| 37 | usage = 'repo %s' % self.NAME | ||
| 38 | self._optparse = optparse.OptionParser(usage = usage) | ||
| 39 | self._Options(self._optparse) | ||
| 40 | return self._optparse | ||
| 41 | |||
| 42 | def _Options(self, p): | ||
| 43 | """Initialize the option parser. | ||
| 44 | """ | ||
| 45 | |||
| 46 | def Usage(self): | ||
| 47 | """Display usage and terminate. | ||
| 48 | """ | ||
| 49 | self.OptionParser.print_usage() | ||
| 50 | sys.exit(1) | ||
| 51 | |||
| 52 | def Execute(self, opt, args): | ||
| 53 | """Perform the action, after option parsing is complete. | ||
| 54 | """ | ||
| 55 | raise NotImplementedError | ||
| 56 | |||
| 57 | def GetProjects(self, args, missing_ok=False): | ||
| 58 | """A list of projects that match the arguments. | ||
| 59 | """ | ||
| 60 | all = self.manifest.projects | ||
| 61 | result = [] | ||
| 62 | |||
| 63 | if not args: | ||
| 64 | for project in all.values(): | ||
| 65 | if missing_ok or project.Exists: | ||
| 66 | result.append(project) | ||
| 67 | else: | ||
| 68 | by_path = None | ||
| 69 | |||
| 70 | for arg in args: | ||
| 71 | project = all.get(arg) | ||
| 72 | |||
| 73 | if not project: | ||
| 74 | path = os.path.abspath(arg) | ||
| 75 | |||
| 76 | if not by_path: | ||
| 77 | by_path = dict() | ||
| 78 | for p in all.values(): | ||
| 79 | by_path[p.worktree] = p | ||
| 80 | |||
| 81 | if os.path.exists(path): | ||
| 82 | while path \ | ||
| 83 | and path != '/' \ | ||
| 84 | and path != self.manifest.topdir: | ||
| 85 | try: | ||
| 86 | project = by_path[path] | ||
| 87 | break | ||
| 88 | except KeyError: | ||
| 89 | path = os.path.dirname(path) | ||
| 90 | else: | ||
| 91 | try: | ||
| 92 | project = by_path[path] | ||
| 93 | except KeyError: | ||
| 94 | pass | ||
| 95 | |||
| 96 | if not project: | ||
| 97 | raise NoSuchProjectError(arg) | ||
| 98 | if not missing_ok and not project.Exists: | ||
| 99 | raise NoSuchProjectError(arg) | ||
| 100 | |||
| 101 | result.append(project) | ||
| 102 | |||
| 103 | def _getpath(x): | ||
| 104 | return x.relpath | ||
| 105 | result.sort(key=_getpath) | ||
| 106 | return result | ||
| 107 | |||
| 108 | class InteractiveCommand(Command): | ||
| 109 | """Command which requires user interaction on the tty and | ||
| 110 | must not run within a pager, even if the user asks to. | ||
| 111 | """ | ||
| 112 | |||
| 113 | class PagedCommand(Command): | ||
| 114 | """Command which defaults to output in a pager, as its | ||
| 115 | display tends to be larger than one screen full. | ||
| 116 | """ | ||
diff --git a/editor.py b/editor.py new file mode 100644 index 00000000..4f22257f --- /dev/null +++ b/editor.py | |||
| @@ -0,0 +1,85 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import os | ||
| 17 | import sys | ||
| 18 | import subprocess | ||
| 19 | import tempfile | ||
| 20 | |||
| 21 | from error import EditorError | ||
| 22 | |||
| 23 | class Editor(object): | ||
| 24 | """Manages the user's preferred text editor.""" | ||
| 25 | |||
| 26 | _editor = None | ||
| 27 | globalConfig = None | ||
| 28 | |||
| 29 | @classmethod | ||
| 30 | def _GetEditor(cls): | ||
| 31 | if cls._editor is None: | ||
| 32 | cls._editor = cls._SelectEditor() | ||
| 33 | return cls._editor | ||
| 34 | |||
| 35 | @classmethod | ||
| 36 | def _SelectEditor(cls): | ||
| 37 | e = os.getenv('GIT_EDITOR') | ||
| 38 | if e: | ||
| 39 | return e | ||
| 40 | |||
| 41 | e = cls.globalConfig.GetString('core.editor') | ||
| 42 | if e: | ||
| 43 | return e | ||
| 44 | |||
| 45 | e = os.getenv('VISUAL') | ||
| 46 | if e: | ||
| 47 | return e | ||
| 48 | |||
| 49 | e = os.getenv('EDITOR') | ||
| 50 | if e: | ||
| 51 | return e | ||
| 52 | |||
| 53 | if os.getenv('TERM') == 'dumb': | ||
| 54 | print >>sys.stderr,\ | ||
| 55 | """No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR. | ||
| 56 | Tried to fall back to vi but terminal is dumb. Please configure at | ||
| 57 | least one of these before using this command.""" | ||
| 58 | sys.exit(1) | ||
| 59 | |||
| 60 | return 'vi' | ||
| 61 | |||
| 62 | @classmethod | ||
| 63 | def EditString(cls, data): | ||
| 64 | """Opens an editor to edit the given content. | ||
| 65 | |||
| 66 | Args: | ||
| 67 | data : the text to edit | ||
| 68 | |||
| 69 | Returns: | ||
| 70 | new value of edited text; None if editing did not succeed | ||
| 71 | """ | ||
| 72 | editor = cls._GetEditor() | ||
| 73 | fd, path = tempfile.mkstemp() | ||
| 74 | try: | ||
| 75 | os.write(fd, data) | ||
| 76 | os.close(fd) | ||
| 77 | fd = None | ||
| 78 | |||
| 79 | if subprocess.Popen([editor, path]).wait() != 0: | ||
| 80 | raise EditorError() | ||
| 81 | return open(path).read() | ||
| 82 | finally: | ||
| 83 | if fd: | ||
| 84 | os.close(fd) | ||
| 85 | os.remove(path) | ||
diff --git a/error.py b/error.py new file mode 100644 index 00000000..e3cf41c1 --- /dev/null +++ b/error.py | |||
| @@ -0,0 +1,66 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | class ManifestParseError(Exception): | ||
| 17 | """Failed to parse the manifest file. | ||
| 18 | """ | ||
| 19 | |||
| 20 | class EditorError(Exception): | ||
| 21 | """Unspecified error from the user's text editor. | ||
| 22 | """ | ||
| 23 | |||
| 24 | class GitError(Exception): | ||
| 25 | """Unspecified internal error from git. | ||
| 26 | """ | ||
| 27 | def __init__(self, command): | ||
| 28 | self.command = command | ||
| 29 | |||
| 30 | def __str__(self): | ||
| 31 | return self.command | ||
| 32 | |||
| 33 | class ImportError(Exception): | ||
| 34 | """An import from a non-Git format cannot be performed. | ||
| 35 | """ | ||
| 36 | def __init__(self, reason): | ||
| 37 | self.reason = reason | ||
| 38 | |||
| 39 | def __str__(self): | ||
| 40 | return self.reason | ||
| 41 | |||
| 42 | class UploadError(Exception): | ||
| 43 | """A bundle upload to Gerrit did not succeed. | ||
| 44 | """ | ||
| 45 | def __init__(self, reason): | ||
| 46 | self.reason = reason | ||
| 47 | |||
| 48 | def __str__(self): | ||
| 49 | return self.reason | ||
| 50 | |||
| 51 | class NoSuchProjectError(Exception): | ||
| 52 | """A specified project does not exist in the work tree. | ||
| 53 | """ | ||
| 54 | def __init__(self, name=None): | ||
| 55 | self.name = name | ||
| 56 | |||
| 57 | def __str__(self): | ||
| 58 | if self.Name is None: | ||
| 59 | return 'in current directory' | ||
| 60 | return self.name | ||
| 61 | |||
| 62 | class RepoChangedException(Exception): | ||
| 63 | """Thrown if 'repo sync' results in repo updating its internal | ||
| 64 | repo or manifest repositories. In this special case we must | ||
| 65 | use exec to re-execute repo with the new code and manifest. | ||
| 66 | """ | ||
diff --git a/froofle/__init__.py b/froofle/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/froofle/__init__.py | |||
diff --git a/froofle/protobuf/__init__.py b/froofle/protobuf/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/froofle/protobuf/__init__.py | |||
diff --git a/froofle/protobuf/descriptor.py b/froofle/protobuf/descriptor.py new file mode 100644 index 00000000..e74cf25e --- /dev/null +++ b/froofle/protobuf/descriptor.py | |||
| @@ -0,0 +1,433 @@ | |||
| 1 | # Protocol Buffers - Google's data interchange format | ||
| 2 | # Copyright 2008 Google Inc. All rights reserved. | ||
| 3 | # http://code.google.com/p/protobuf/ | ||
| 4 | # | ||
| 5 | # Redistribution and use in source and binary forms, with or without | ||
| 6 | # modification, are permitted provided that the following conditions are | ||
| 7 | # met: | ||
| 8 | # | ||
| 9 | # * Redistributions of source code must retain the above copyright | ||
| 10 | # notice, this list of conditions and the following disclaimer. | ||
| 11 | # * Redistributions in binary form must reproduce the above | ||
| 12 | # copyright notice, this list of conditions and the following disclaimer | ||
| 13 | # in the documentation and/or other materials provided with the | ||
| 14 | # distribution. | ||
| 15 | # * Neither the name of Google Inc. nor the names of its | ||
| 16 | # contributors may be used to endorse or promote products derived from | ||
| 17 | # this software without specific prior written permission. | ||
| 18 | # | ||
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
| 30 | |||
| 31 | # TODO(robinson): We probably need to provide deep-copy methods for | ||
| 32 | # descriptor types. When a FieldDescriptor is passed into | ||
| 33 | # Descriptor.__init__(), we should make a deep copy and then set | ||
| 34 | # containing_type on it. Alternatively, we could just get | ||
| 35 | # rid of containing_type (iit's not needed for reflection.py, at least). | ||
| 36 | # | ||
| 37 | # TODO(robinson): Print method? | ||
| 38 | # | ||
| 39 | # TODO(robinson): Useful __repr__? | ||
| 40 | |||
| 41 | """Descriptors essentially contain exactly the information found in a .proto | ||
| 42 | file, in types that make this information accessible in Python. | ||
| 43 | """ | ||
| 44 | |||
| 45 | __author__ = 'robinson@google.com (Will Robinson)' | ||
| 46 | |||
| 47 | class DescriptorBase(object): | ||
| 48 | |||
| 49 | """Descriptors base class. | ||
| 50 | |||
| 51 | This class is the base of all descriptor classes. It provides common options | ||
| 52 | related functionaility. | ||
| 53 | """ | ||
| 54 | |||
| 55 | def __init__(self, options, options_class_name): | ||
| 56 | """Initialize the descriptor given its options message and the name of the | ||
| 57 | class of the options message. The name of the class is required in case | ||
| 58 | the options message is None and has to be created. | ||
| 59 | """ | ||
| 60 | self._options = options | ||
| 61 | self._options_class_name = options_class_name | ||
| 62 | |||
| 63 | def GetOptions(self): | ||
| 64 | """Retrieves descriptor options. | ||
| 65 | |||
| 66 | This method returns the options set or creates the default options for the | ||
| 67 | descriptor. | ||
| 68 | """ | ||
| 69 | if self._options: | ||
| 70 | return self._options | ||
| 71 | from froofle.protobuf import descriptor_pb2 | ||
| 72 | try: | ||
| 73 | options_class = getattr(descriptor_pb2, self._options_class_name) | ||
| 74 | except AttributeError: | ||
| 75 | raise RuntimeError('Unknown options class name %s!' % | ||
| 76 | (self._options_class_name)) | ||
| 77 | self._options = options_class() | ||
| 78 | return self._options | ||
| 79 | |||
| 80 | |||
| 81 | class Descriptor(DescriptorBase): | ||
| 82 | |||
| 83 | """Descriptor for a protocol message type. | ||
| 84 | |||
| 85 | A Descriptor instance has the following attributes: | ||
| 86 | |||
| 87 | name: (str) Name of this protocol message type. | ||
| 88 | full_name: (str) Fully-qualified name of this protocol message type, | ||
| 89 | which will include protocol "package" name and the name of any | ||
| 90 | enclosing types. | ||
| 91 | |||
| 92 | filename: (str) Name of the .proto file containing this message. | ||
| 93 | |||
| 94 | containing_type: (Descriptor) Reference to the descriptor of the | ||
| 95 | type containing us, or None if we have no containing type. | ||
| 96 | |||
| 97 | fields: (list of FieldDescriptors) Field descriptors for all | ||
| 98 | fields in this type. | ||
| 99 | fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor | ||
| 100 | objects as in |fields|, but indexed by "number" attribute in each | ||
| 101 | FieldDescriptor. | ||
| 102 | fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor | ||
| 103 | objects as in |fields|, but indexed by "name" attribute in each | ||
| 104 | FieldDescriptor. | ||
| 105 | |||
| 106 | nested_types: (list of Descriptors) Descriptor references | ||
| 107 | for all protocol message types nested within this one. | ||
| 108 | nested_types_by_name: (dict str -> Descriptor) Same Descriptor | ||
| 109 | objects as in |nested_types|, but indexed by "name" attribute | ||
| 110 | in each Descriptor. | ||
| 111 | |||
| 112 | enum_types: (list of EnumDescriptors) EnumDescriptor references | ||
| 113 | for all enums contained within this type. | ||
| 114 | enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor | ||
| 115 | objects as in |enum_types|, but indexed by "name" attribute | ||
| 116 | in each EnumDescriptor. | ||
| 117 | enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping | ||
| 118 | from enum value name to EnumValueDescriptor for that value. | ||
| 119 | |||
| 120 | extensions: (list of FieldDescriptor) All extensions defined directly | ||
| 121 | within this message type (NOT within a nested type). | ||
| 122 | extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor | ||
| 123 | objects as |extensions|, but indexed by "name" attribute of each | ||
| 124 | FieldDescriptor. | ||
| 125 | |||
| 126 | options: (descriptor_pb2.MessageOptions) Protocol message options or None | ||
| 127 | to use default message options. | ||
| 128 | """ | ||
| 129 | |||
| 130 | def __init__(self, name, full_name, filename, containing_type, | ||
| 131 | fields, nested_types, enum_types, extensions, options=None): | ||
| 132 | """Arguments to __init__() are as described in the description | ||
| 133 | of Descriptor fields above. | ||
| 134 | """ | ||
| 135 | super(Descriptor, self).__init__(options, 'MessageOptions') | ||
| 136 | self.name = name | ||
| 137 | self.full_name = full_name | ||
| 138 | self.filename = filename | ||
| 139 | self.containing_type = containing_type | ||
| 140 | |||
| 141 | # We have fields in addition to fields_by_name and fields_by_number, | ||
| 142 | # so that: | ||
| 143 | # 1. Clients can index fields by "order in which they're listed." | ||
| 144 | # 2. Clients can easily iterate over all fields with the terse | ||
| 145 | # syntax: for f in descriptor.fields: ... | ||
| 146 | self.fields = fields | ||
| 147 | for field in self.fields: | ||
| 148 | field.containing_type = self | ||
| 149 | self.fields_by_number = dict((f.number, f) for f in fields) | ||
| 150 | self.fields_by_name = dict((f.name, f) for f in fields) | ||
| 151 | |||
| 152 | self.nested_types = nested_types | ||
| 153 | self.nested_types_by_name = dict((t.name, t) for t in nested_types) | ||
| 154 | |||
| 155 | self.enum_types = enum_types | ||
| 156 | for enum_type in self.enum_types: | ||
| 157 | enum_type.containing_type = self | ||
| 158 | self.enum_types_by_name = dict((t.name, t) for t in enum_types) | ||
| 159 | self.enum_values_by_name = dict( | ||
| 160 | (v.name, v) for t in enum_types for v in t.values) | ||
| 161 | |||
| 162 | self.extensions = extensions | ||
| 163 | for extension in self.extensions: | ||
| 164 | extension.extension_scope = self | ||
| 165 | self.extensions_by_name = dict((f.name, f) for f in extensions) | ||
| 166 | |||
| 167 | |||
| 168 | # TODO(robinson): We should have aggressive checking here, | ||
| 169 | # for example: | ||
| 170 | # * If you specify a repeated field, you should not be allowed | ||
| 171 | # to specify a default value. | ||
| 172 | # * [Other examples here as needed]. | ||
| 173 | # | ||
| 174 | # TODO(robinson): for this and other *Descriptor classes, we | ||
| 175 | # might also want to lock things down aggressively (e.g., | ||
| 176 | # prevent clients from setting the attributes). Having | ||
| 177 | # stronger invariants here in general will reduce the number | ||
| 178 | # of runtime checks we must do in reflection.py... | ||
| 179 | class FieldDescriptor(DescriptorBase): | ||
| 180 | |||
| 181 | """Descriptor for a single field in a .proto file. | ||
| 182 | |||
| 183 | A FieldDescriptor instance has the following attriubtes: | ||
| 184 | |||
| 185 | name: (str) Name of this field, exactly as it appears in .proto. | ||
| 186 | full_name: (str) Name of this field, including containing scope. This is | ||
| 187 | particularly relevant for extensions. | ||
| 188 | index: (int) Dense, 0-indexed index giving the order that this | ||
| 189 | field textually appears within its message in the .proto file. | ||
| 190 | number: (int) Tag number declared for this field in the .proto file. | ||
| 191 | |||
| 192 | type: (One of the TYPE_* constants below) Declared type. | ||
| 193 | cpp_type: (One of the CPPTYPE_* constants below) C++ type used to | ||
| 194 | represent this field. | ||
| 195 | |||
| 196 | label: (One of the LABEL_* constants below) Tells whether this | ||
| 197 | field is optional, required, or repeated. | ||
| 198 | default_value: (Varies) Default value of this field. Only | ||
| 199 | meaningful for non-repeated scalar fields. Repeated fields | ||
| 200 | should always set this to [], and non-repeated composite | ||
| 201 | fields should always set this to None. | ||
| 202 | |||
| 203 | containing_type: (Descriptor) Descriptor of the protocol message | ||
| 204 | type that contains this field. Set by the Descriptor constructor | ||
| 205 | if we're passed into one. | ||
| 206 | Somewhat confusingly, for extension fields, this is the | ||
| 207 | descriptor of the EXTENDED message, not the descriptor | ||
| 208 | of the message containing this field. (See is_extension and | ||
| 209 | extension_scope below). | ||
| 210 | message_type: (Descriptor) If a composite field, a descriptor | ||
| 211 | of the message type contained in this field. Otherwise, this is None. | ||
| 212 | enum_type: (EnumDescriptor) If this field contains an enum, a | ||
| 213 | descriptor of that enum. Otherwise, this is None. | ||
| 214 | |||
| 215 | is_extension: True iff this describes an extension field. | ||
| 216 | extension_scope: (Descriptor) Only meaningful if is_extension is True. | ||
| 217 | Gives the message that immediately contains this extension field. | ||
| 218 | Will be None iff we're a top-level (file-level) extension field. | ||
| 219 | |||
| 220 | options: (descriptor_pb2.FieldOptions) Protocol message field options or | ||
| 221 | None to use default field options. | ||
| 222 | """ | ||
| 223 | |||
| 224 | # Must be consistent with C++ FieldDescriptor::Type enum in | ||
| 225 | # descriptor.h. | ||
| 226 | # | ||
| 227 | # TODO(robinson): Find a way to eliminate this repetition. | ||
| 228 | TYPE_DOUBLE = 1 | ||
| 229 | TYPE_FLOAT = 2 | ||
| 230 | TYPE_INT64 = 3 | ||
| 231 | TYPE_UINT64 = 4 | ||
| 232 | TYPE_INT32 = 5 | ||
| 233 | TYPE_FIXED64 = 6 | ||
| 234 | TYPE_FIXED32 = 7 | ||
| 235 | TYPE_BOOL = 8 | ||
| 236 | TYPE_STRING = 9 | ||
| 237 | TYPE_GROUP = 10 | ||
| 238 | TYPE_MESSAGE = 11 | ||
| 239 | TYPE_BYTES = 12 | ||
| 240 | TYPE_UINT32 = 13 | ||
| 241 | TYPE_ENUM = 14 | ||
| 242 | TYPE_SFIXED32 = 15 | ||
| 243 | TYPE_SFIXED64 = 16 | ||
| 244 | TYPE_SINT32 = 17 | ||
| 245 | TYPE_SINT64 = 18 | ||
| 246 | MAX_TYPE = 18 | ||
| 247 | |||
| 248 | # Must be consistent with C++ FieldDescriptor::CppType enum in | ||
| 249 | # descriptor.h. | ||
| 250 | # | ||
| 251 | # TODO(robinson): Find a way to eliminate this repetition. | ||
| 252 | CPPTYPE_INT32 = 1 | ||
| 253 | CPPTYPE_INT64 = 2 | ||
| 254 | CPPTYPE_UINT32 = 3 | ||
| 255 | CPPTYPE_UINT64 = 4 | ||
| 256 | CPPTYPE_DOUBLE = 5 | ||
| 257 | CPPTYPE_FLOAT = 6 | ||
| 258 | CPPTYPE_BOOL = 7 | ||
| 259 | CPPTYPE_ENUM = 8 | ||
| 260 | CPPTYPE_STRING = 9 | ||
| 261 | CPPTYPE_MESSAGE = 10 | ||
| 262 | MAX_CPPTYPE = 10 | ||
| 263 | |||
| 264 | # Must be consistent with C++ FieldDescriptor::Label enum in | ||
| 265 | # descriptor.h. | ||
| 266 | # | ||
| 267 | # TODO(robinson): Find a way to eliminate this repetition. | ||
| 268 | LABEL_OPTIONAL = 1 | ||
| 269 | LABEL_REQUIRED = 2 | ||
| 270 | LABEL_REPEATED = 3 | ||
| 271 | MAX_LABEL = 3 | ||
| 272 | |||
| 273 | def __init__(self, name, full_name, index, number, type, cpp_type, label, | ||
| 274 | default_value, message_type, enum_type, containing_type, | ||
| 275 | is_extension, extension_scope, options=None): | ||
| 276 | """The arguments are as described in the description of FieldDescriptor | ||
| 277 | attributes above. | ||
| 278 | |||
| 279 | Note that containing_type may be None, and may be set later if necessary | ||
| 280 | (to deal with circular references between message types, for example). | ||
| 281 | Likewise for extension_scope. | ||
| 282 | """ | ||
| 283 | super(FieldDescriptor, self).__init__(options, 'FieldOptions') | ||
| 284 | self.name = name | ||
| 285 | self.full_name = full_name | ||
| 286 | self.index = index | ||
| 287 | self.number = number | ||
| 288 | self.type = type | ||
| 289 | self.cpp_type = cpp_type | ||
| 290 | self.label = label | ||
| 291 | self.default_value = default_value | ||
| 292 | self.containing_type = containing_type | ||
| 293 | self.message_type = message_type | ||
| 294 | self.enum_type = enum_type | ||
| 295 | self.is_extension = is_extension | ||
| 296 | self.extension_scope = extension_scope | ||
| 297 | |||
| 298 | |||
| 299 | class EnumDescriptor(DescriptorBase): | ||
| 300 | |||
| 301 | """Descriptor for an enum defined in a .proto file. | ||
| 302 | |||
| 303 | An EnumDescriptor instance has the following attributes: | ||
| 304 | |||
| 305 | name: (str) Name of the enum type. | ||
| 306 | full_name: (str) Full name of the type, including package name | ||
| 307 | and any enclosing type(s). | ||
| 308 | filename: (str) Name of the .proto file in which this appears. | ||
| 309 | |||
| 310 | values: (list of EnumValueDescriptors) List of the values | ||
| 311 | in this enum. | ||
| 312 | values_by_name: (dict str -> EnumValueDescriptor) Same as |values|, | ||
| 313 | but indexed by the "name" field of each EnumValueDescriptor. | ||
| 314 | values_by_number: (dict int -> EnumValueDescriptor) Same as |values|, | ||
| 315 | but indexed by the "number" field of each EnumValueDescriptor. | ||
| 316 | containing_type: (Descriptor) Descriptor of the immediate containing | ||
| 317 | type of this enum, or None if this is an enum defined at the | ||
| 318 | top level in a .proto file. Set by Descriptor's constructor | ||
| 319 | if we're passed into one. | ||
| 320 | options: (descriptor_pb2.EnumOptions) Enum options message or | ||
| 321 | None to use default enum options. | ||
| 322 | """ | ||
| 323 | |||
| 324 | def __init__(self, name, full_name, filename, values, | ||
| 325 | containing_type=None, options=None): | ||
| 326 | """Arguments are as described in the attribute description above.""" | ||
| 327 | super(EnumDescriptor, self).__init__(options, 'EnumOptions') | ||
| 328 | self.name = name | ||
| 329 | self.full_name = full_name | ||
| 330 | self.filename = filename | ||
| 331 | self.values = values | ||
| 332 | for value in self.values: | ||
| 333 | value.type = self | ||
| 334 | self.values_by_name = dict((v.name, v) for v in values) | ||
| 335 | self.values_by_number = dict((v.number, v) for v in values) | ||
| 336 | self.containing_type = containing_type | ||
| 337 | |||
| 338 | |||
| 339 | class EnumValueDescriptor(DescriptorBase): | ||
| 340 | |||
| 341 | """Descriptor for a single value within an enum. | ||
| 342 | |||
| 343 | name: (str) Name of this value. | ||
| 344 | index: (int) Dense, 0-indexed index giving the order that this | ||
| 345 | value appears textually within its enum in the .proto file. | ||
| 346 | number: (int) Actual number assigned to this enum value. | ||
| 347 | type: (EnumDescriptor) EnumDescriptor to which this value | ||
| 348 | belongs. Set by EnumDescriptor's constructor if we're | ||
| 349 | passed into one. | ||
| 350 | options: (descriptor_pb2.EnumValueOptions) Enum value options message or | ||
| 351 | None to use default enum value options options. | ||
| 352 | """ | ||
| 353 | |||
| 354 | def __init__(self, name, index, number, type=None, options=None): | ||
| 355 | """Arguments are as described in the attribute description above.""" | ||
| 356 | super(EnumValueDescriptor, self).__init__(options, 'EnumValueOptions') | ||
| 357 | self.name = name | ||
| 358 | self.index = index | ||
| 359 | self.number = number | ||
| 360 | self.type = type | ||
| 361 | |||
| 362 | |||
| 363 | class ServiceDescriptor(DescriptorBase): | ||
| 364 | |||
| 365 | """Descriptor for a service. | ||
| 366 | |||
| 367 | name: (str) Name of the service. | ||
| 368 | full_name: (str) Full name of the service, including package name. | ||
| 369 | index: (int) 0-indexed index giving the order that this services | ||
| 370 | definition appears withing the .proto file. | ||
| 371 | methods: (list of MethodDescriptor) List of methods provided by this | ||
| 372 | service. | ||
| 373 | options: (descriptor_pb2.ServiceOptions) Service options message or | ||
| 374 | None to use default service options. | ||
| 375 | """ | ||
| 376 | |||
| 377 | def __init__(self, name, full_name, index, methods, options=None): | ||
| 378 | super(ServiceDescriptor, self).__init__(options, 'ServiceOptions') | ||
| 379 | self.name = name | ||
| 380 | self.full_name = full_name | ||
| 381 | self.index = index | ||
| 382 | self.methods = methods | ||
| 383 | # Set the containing service for each method in this service. | ||
| 384 | for method in self.methods: | ||
| 385 | method.containing_service = self | ||
| 386 | |||
| 387 | def FindMethodByName(self, name): | ||
| 388 | """Searches for the specified method, and returns its descriptor.""" | ||
| 389 | for method in self.methods: | ||
| 390 | if name == method.name: | ||
| 391 | return method | ||
| 392 | return None | ||
| 393 | |||
| 394 | |||
| 395 | class MethodDescriptor(DescriptorBase): | ||
| 396 | |||
| 397 | """Descriptor for a method in a service. | ||
| 398 | |||
| 399 | name: (str) Name of the method within the service. | ||
| 400 | full_name: (str) Full name of method. | ||
| 401 | index: (int) 0-indexed index of the method inside the service. | ||
| 402 | containing_service: (ServiceDescriptor) The service that contains this | ||
| 403 | method. | ||
| 404 | input_type: The descriptor of the message that this method accepts. | ||
| 405 | output_type: The descriptor of the message that this method returns. | ||
| 406 | options: (descriptor_pb2.MethodOptions) Method options message or | ||
| 407 | None to use default method options. | ||
| 408 | """ | ||
| 409 | |||
| 410 | def __init__(self, name, full_name, index, containing_service, | ||
| 411 | input_type, output_type, options=None): | ||
| 412 | """The arguments are as described in the description of MethodDescriptor | ||
| 413 | attributes above. | ||
| 414 | |||
| 415 | Note that containing_service may be None, and may be set later if necessary. | ||
| 416 | """ | ||
| 417 | super(MethodDescriptor, self).__init__(options, 'MethodOptions') | ||
| 418 | self.name = name | ||
| 419 | self.full_name = full_name | ||
| 420 | self.index = index | ||
| 421 | self.containing_service = containing_service | ||
| 422 | self.input_type = input_type | ||
| 423 | self.output_type = output_type | ||
| 424 | |||
| 425 | |||
| 426 | def _ParseOptions(message, string): | ||
| 427 | """Parses serialized options. | ||
| 428 | |||
| 429 | This helper function is used to parse serialized options in generated | ||
| 430 | proto2 files. It must not be used outside proto2. | ||
| 431 | """ | ||
| 432 | message.ParseFromString(string) | ||
| 433 | return message; | ||
diff --git a/froofle/protobuf/descriptor_pb2.py b/froofle/protobuf/descriptor_pb2.py new file mode 100644 index 00000000..16873834 --- /dev/null +++ b/froofle/protobuf/descriptor_pb2.py | |||
| @@ -0,0 +1,950 @@ | |||
| 1 | #!/usr/bin/python2.4 | ||
| 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! | ||
| 3 | |||
| 4 | from froofle.protobuf import descriptor | ||
| 5 | from froofle.protobuf import message | ||
| 6 | from froofle.protobuf import reflection | ||
| 7 | from froofle.protobuf import service | ||
| 8 | from froofle.protobuf import service_reflection | ||
| 9 | |||
| 10 | |||
| 11 | _FIELDDESCRIPTORPROTO_TYPE = descriptor.EnumDescriptor( | ||
| 12 | name='Type', | ||
| 13 | full_name='froofle.protobuf.FieldDescriptorProto.Type', | ||
| 14 | filename='Type', | ||
| 15 | values=[ | ||
| 16 | descriptor.EnumValueDescriptor( | ||
| 17 | name='TYPE_DOUBLE', index=0, number=1, | ||
| 18 | options=None, | ||
| 19 | type=None), | ||
| 20 | descriptor.EnumValueDescriptor( | ||
| 21 | name='TYPE_FLOAT', index=1, number=2, | ||
| 22 | options=None, | ||
| 23 | type=None), | ||
| 24 | descriptor.EnumValueDescriptor( | ||
| 25 | name='TYPE_INT64', index=2, number=3, | ||
| 26 | options=None, | ||
| 27 | type=None), | ||
| 28 | descriptor.EnumValueDescriptor( | ||
| 29 | name='TYPE_UINT64', index=3, number=4, | ||
| 30 | options=None, | ||
| 31 | type=None), | ||
| 32 | descriptor.EnumValueDescriptor( | ||
| 33 | name='TYPE_INT32', index=4, number=5, | ||
| 34 | options=None, | ||
| 35 | type=None), | ||
| 36 | descriptor.EnumValueDescriptor( | ||
| 37 | name='TYPE_FIXED64', index=5, number=6, | ||
| 38 | options=None, | ||
| 39 | type=None), | ||
| 40 | descriptor.EnumValueDescriptor( | ||
| 41 | name='TYPE_FIXED32', index=6, number=7, | ||
| 42 | options=None, | ||
| 43 | type=None), | ||
| 44 | descriptor.EnumValueDescriptor( | ||
| 45 | name='TYPE_BOOL', index=7, number=8, | ||
| 46 | options=None, | ||
| 47 | type=None), | ||
| 48 | descriptor.EnumValueDescriptor( | ||
| 49 | name='TYPE_STRING', index=8, number=9, | ||
| 50 | options=None, | ||
| 51 | type=None), | ||
| 52 | descriptor.EnumValueDescriptor( | ||
| 53 | name='TYPE_GROUP', index=9, number=10, | ||
| 54 | options=None, | ||
| 55 | type=None), | ||
| 56 | descriptor.EnumValueDescriptor( | ||
| 57 | name='TYPE_MESSAGE', index=10, number=11, | ||
| 58 | options=None, | ||
| 59 | type=None), | ||
| 60 | descriptor.EnumValueDescriptor( | ||
| 61 | name='TYPE_BYTES', index=11, number=12, | ||
| 62 | options=None, | ||
| 63 | type=None), | ||
| 64 | descriptor.EnumValueDescriptor( | ||
| 65 | name='TYPE_UINT32', index=12, number=13, | ||
| 66 | options=None, | ||
| 67 | type=None), | ||
| 68 | descriptor.EnumValueDescriptor( | ||
| 69 | name='TYPE_ENUM', index=13, number=14, | ||
| 70 | options=None, | ||
| 71 | type=None), | ||
| 72 | descriptor.EnumValueDescriptor( | ||
| 73 | name='TYPE_SFIXED32', index=14, number=15, | ||
| 74 | options=None, | ||
| 75 | type=None), | ||
| 76 | descriptor.EnumValueDescriptor( | ||
| 77 | name='TYPE_SFIXED64', index=15, number=16, | ||
| 78 | options=None, | ||
| 79 | type=None), | ||
| 80 | descriptor.EnumValueDescriptor( | ||
| 81 | name='TYPE_SINT32', index=16, number=17, | ||
| 82 | options=None, | ||
| 83 | type=None), | ||
| 84 | descriptor.EnumValueDescriptor( | ||
| 85 | name='TYPE_SINT64', index=17, number=18, | ||
| 86 | options=None, | ||
| 87 | type=None), | ||
| 88 | ], | ||
| 89 | options=None, | ||
| 90 | ) | ||
| 91 | |||
| 92 | _FIELDDESCRIPTORPROTO_LABEL = descriptor.EnumDescriptor( | ||
| 93 | name='Label', | ||
| 94 | full_name='froofle.protobuf.FieldDescriptorProto.Label', | ||
| 95 | filename='Label', | ||
| 96 | values=[ | ||
| 97 | descriptor.EnumValueDescriptor( | ||
| 98 | name='LABEL_OPTIONAL', index=0, number=1, | ||
| 99 | options=None, | ||
| 100 | type=None), | ||
| 101 | descriptor.EnumValueDescriptor( | ||
| 102 | name='LABEL_REQUIRED', index=1, number=2, | ||
| 103 | options=None, | ||
| 104 | type=None), | ||
| 105 | descriptor.EnumValueDescriptor( | ||
| 106 | name='LABEL_REPEATED', index=2, number=3, | ||
| 107 | options=None, | ||
| 108 | type=None), | ||
| 109 | ], | ||
| 110 | options=None, | ||
| 111 | ) | ||
| 112 | |||
| 113 | _FILEOPTIONS_OPTIMIZEMODE = descriptor.EnumDescriptor( | ||
| 114 | name='OptimizeMode', | ||
| 115 | full_name='froofle.protobuf.FileOptions.OptimizeMode', | ||
| 116 | filename='OptimizeMode', | ||
| 117 | values=[ | ||
| 118 | descriptor.EnumValueDescriptor( | ||
| 119 | name='SPEED', index=0, number=1, | ||
| 120 | options=None, | ||
| 121 | type=None), | ||
| 122 | descriptor.EnumValueDescriptor( | ||
| 123 | name='CODE_SIZE', index=1, number=2, | ||
| 124 | options=None, | ||
| 125 | type=None), | ||
| 126 | ], | ||
| 127 | options=None, | ||
| 128 | ) | ||
| 129 | |||
| 130 | _FIELDOPTIONS_CTYPE = descriptor.EnumDescriptor( | ||
| 131 | name='CType', | ||
| 132 | full_name='froofle.protobuf.FieldOptions.CType', | ||
| 133 | filename='CType', | ||
| 134 | values=[ | ||
| 135 | descriptor.EnumValueDescriptor( | ||
| 136 | name='CORD', index=0, number=1, | ||
| 137 | options=None, | ||
| 138 | type=None), | ||
| 139 | descriptor.EnumValueDescriptor( | ||
| 140 | name='STRING_PIECE', index=1, number=2, | ||
| 141 | options=None, | ||
| 142 | type=None), | ||
| 143 | ], | ||
| 144 | options=None, | ||
| 145 | ) | ||
| 146 | |||
| 147 | |||
| 148 | _FILEDESCRIPTORSET = descriptor.Descriptor( | ||
| 149 | name='FileDescriptorSet', | ||
| 150 | full_name='froofle.protobuf.FileDescriptorSet', | ||
| 151 | filename='froofle/protobuf/descriptor.proto', | ||
| 152 | containing_type=None, | ||
| 153 | fields=[ | ||
| 154 | descriptor.FieldDescriptor( | ||
| 155 | name='file', full_name='froofle.protobuf.FileDescriptorSet.file', index=0, | ||
| 156 | number=1, type=11, cpp_type=10, label=3, | ||
| 157 | default_value=[], | ||
| 158 | message_type=None, enum_type=None, containing_type=None, | ||
| 159 | is_extension=False, extension_scope=None, | ||
| 160 | options=None), | ||
| 161 | ], | ||
| 162 | extensions=[ | ||
| 163 | ], | ||
| 164 | nested_types=[], # TODO(robinson): Implement. | ||
| 165 | enum_types=[ | ||
| 166 | ], | ||
| 167 | options=None) | ||
| 168 | |||
| 169 | |||
| 170 | _FILEDESCRIPTORPROTO = descriptor.Descriptor( | ||
| 171 | name='FileDescriptorProto', | ||
| 172 | full_name='froofle.protobuf.FileDescriptorProto', | ||
| 173 | filename='froofle/protobuf/descriptor.proto', | ||
| 174 | containing_type=None, | ||
| 175 | fields=[ | ||
| 176 | descriptor.FieldDescriptor( | ||
| 177 | name='name', full_name='froofle.protobuf.FileDescriptorProto.name', index=0, | ||
| 178 | number=1, type=9, cpp_type=9, label=1, | ||
| 179 | default_value=unicode("", "utf-8"), | ||
| 180 | message_type=None, enum_type=None, containing_type=None, | ||
| 181 | is_extension=False, extension_scope=None, | ||
| 182 | options=None), | ||
| 183 | descriptor.FieldDescriptor( | ||
| 184 | name='package', full_name='froofle.protobuf.FileDescriptorProto.package', index=1, | ||
| 185 | number=2, type=9, cpp_type=9, label=1, | ||
| 186 | default_value=unicode("", "utf-8"), | ||
| 187 | message_type=None, enum_type=None, containing_type=None, | ||
| 188 | is_extension=False, extension_scope=None, | ||
| 189 | options=None), | ||
| 190 | descriptor.FieldDescriptor( | ||
| 191 | name='dependency', full_name='froofle.protobuf.FileDescriptorProto.dependency', index=2, | ||
| 192 | number=3, type=9, cpp_type=9, label=3, | ||
| 193 | default_value=[], | ||
| 194 | message_type=None, enum_type=None, containing_type=None, | ||
| 195 | is_extension=False, extension_scope=None, | ||
| 196 | options=None), | ||
| 197 | descriptor.FieldDescriptor( | ||
| 198 | name='message_type', full_name='froofle.protobuf.FileDescriptorProto.message_type', index=3, | ||
| 199 | number=4, type=11, cpp_type=10, label=3, | ||
| 200 | default_value=[], | ||
| 201 | message_type=None, enum_type=None, containing_type=None, | ||
| 202 | is_extension=False, extension_scope=None, | ||
| 203 | options=None), | ||
| 204 | descriptor.FieldDescriptor( | ||
| 205 | name='enum_type', full_name='froofle.protobuf.FileDescriptorProto.enum_type', index=4, | ||
| 206 | number=5, type=11, cpp_type=10, label=3, | ||
| 207 | default_value=[], | ||
| 208 | message_type=None, enum_type=None, containing_type=None, | ||
| 209 | is_extension=False, extension_scope=None, | ||
| 210 | options=None), | ||
| 211 | descriptor.FieldDescriptor( | ||
| 212 | name='service', full_name='froofle.protobuf.FileDescriptorProto.service', index=5, | ||
| 213 | number=6, type=11, cpp_type=10, label=3, | ||
| 214 | default_value=[], | ||
| 215 | message_type=None, enum_type=None, containing_type=None, | ||
| 216 | is_extension=False, extension_scope=None, | ||
| 217 | options=None), | ||
| 218 | descriptor.FieldDescriptor( | ||
| 219 | name='extension', full_name='froofle.protobuf.FileDescriptorProto.extension', index=6, | ||
| 220 | number=7, type=11, cpp_type=10, label=3, | ||
| 221 | default_value=[], | ||
| 222 | message_type=None, enum_type=None, containing_type=None, | ||
| 223 | is_extension=False, extension_scope=None, | ||
| 224 | options=None), | ||
| 225 | descriptor.FieldDescriptor( | ||
| 226 | name='options', full_name='froofle.protobuf.FileDescriptorProto.options', index=7, | ||
| 227 | number=8, type=11, cpp_type=10, label=1, | ||
| 228 | default_value=None, | ||
| 229 | message_type=None, enum_type=None, containing_type=None, | ||
| 230 | is_extension=False, extension_scope=None, | ||
| 231 | options=None), | ||
| 232 | ], | ||
| 233 | extensions=[ | ||
| 234 | ], | ||
| 235 | nested_types=[], # TODO(robinson): Implement. | ||
| 236 | enum_types=[ | ||
| 237 | ], | ||
| 238 | options=None) | ||
| 239 | |||
| 240 | |||
| 241 | _DESCRIPTORPROTO_EXTENSIONRANGE = descriptor.Descriptor( | ||
| 242 | name='ExtensionRange', | ||
| 243 | full_name='froofle.protobuf.DescriptorProto.ExtensionRange', | ||
| 244 | filename='froofle/protobuf/descriptor.proto', | ||
| 245 | containing_type=None, | ||
| 246 | fields=[ | ||
| 247 | descriptor.FieldDescriptor( | ||
| 248 | name='start', full_name='froofle.protobuf.DescriptorProto.ExtensionRange.start', index=0, | ||
| 249 | number=1, type=5, cpp_type=1, label=1, | ||
| 250 | default_value=0, | ||
| 251 | message_type=None, enum_type=None, containing_type=None, | ||
| 252 | is_extension=False, extension_scope=None, | ||
| 253 | options=None), | ||
| 254 | descriptor.FieldDescriptor( | ||
| 255 | name='end', full_name='froofle.protobuf.DescriptorProto.ExtensionRange.end', index=1, | ||
| 256 | number=2, type=5, cpp_type=1, label=1, | ||
| 257 | default_value=0, | ||
| 258 | message_type=None, enum_type=None, containing_type=None, | ||
| 259 | is_extension=False, extension_scope=None, | ||
| 260 | options=None), | ||
| 261 | ], | ||
| 262 | extensions=[ | ||
| 263 | ], | ||
| 264 | nested_types=[], # TODO(robinson): Implement. | ||
| 265 | enum_types=[ | ||
| 266 | ], | ||
| 267 | options=None) | ||
| 268 | |||
| 269 | _DESCRIPTORPROTO = descriptor.Descriptor( | ||
| 270 | name='DescriptorProto', | ||
| 271 | full_name='froofle.protobuf.DescriptorProto', | ||
| 272 | filename='froofle/protobuf/descriptor.proto', | ||
| 273 | containing_type=None, | ||
| 274 | fields=[ | ||
| 275 | descriptor.FieldDescriptor( | ||
| 276 | name='name', full_name='froofle.protobuf.DescriptorProto.name', index=0, | ||
| 277 | number=1, type=9, cpp_type=9, label=1, | ||
| 278 | default_value=unicode("", "utf-8"), | ||
| 279 | message_type=None, enum_type=None, containing_type=None, | ||
| 280 | is_extension=False, extension_scope=None, | ||
| 281 | options=None), | ||
| 282 | descriptor.FieldDescriptor( | ||
| 283 | name='field', full_name='froofle.protobuf.DescriptorProto.field', index=1, | ||
| 284 | number=2, type=11, cpp_type=10, label=3, | ||
| 285 | default_value=[], | ||
| 286 | message_type=None, enum_type=None, containing_type=None, | ||
| 287 | is_extension=False, extension_scope=None, | ||
| 288 | options=None), | ||
| 289 | descriptor.FieldDescriptor( | ||
| 290 | name='extension', full_name='froofle.protobuf.DescriptorProto.extension', index=2, | ||
| 291 | number=6, type=11, cpp_type=10, label=3, | ||
| 292 | default_value=[], | ||
| 293 | message_type=None, enum_type=None, containing_type=None, | ||
| 294 | is_extension=False, extension_scope=None, | ||
| 295 | options=None), | ||
| 296 | descriptor.FieldDescriptor( | ||
| 297 | name='nested_type', full_name='froofle.protobuf.DescriptorProto.nested_type', index=3, | ||
| 298 | number=3, type=11, cpp_type=10, label=3, | ||
| 299 | default_value=[], | ||
| 300 | message_type=None, enum_type=None, containing_type=None, | ||
| 301 | is_extension=False, extension_scope=None, | ||
| 302 | options=None), | ||
| 303 | descriptor.FieldDescriptor( | ||
| 304 | name='enum_type', full_name='froofle.protobuf.DescriptorProto.enum_type', index=4, | ||
| 305 | number=4, type=11, cpp_type=10, label=3, | ||
| 306 | default_value=[], | ||
| 307 | message_type=None, enum_type=None, containing_type=None, | ||
| 308 | is_extension=False, extension_scope=None, | ||
| 309 | options=None), | ||
| 310 | descriptor.FieldDescriptor( | ||
| 311 | name='extension_range', full_name='froofle.protobuf.DescriptorProto.extension_range', index=5, | ||
| 312 | number=5, type=11, cpp_type=10, label=3, | ||
| 313 | default_value=[], | ||
| 314 | message_type=None, enum_type=None, containing_type=None, | ||
| 315 | is_extension=False, extension_scope=None, | ||
| 316 | options=None), | ||
| 317 | descriptor.FieldDescriptor( | ||
| 318 | name='options', full_name='froofle.protobuf.DescriptorProto.options', index=6, | ||
| 319 | number=7, type=11, cpp_type=10, label=1, | ||
| 320 | default_value=None, | ||
| 321 | message_type=None, enum_type=None, containing_type=None, | ||
| 322 | is_extension=False, extension_scope=None, | ||
| 323 | options=None), | ||
| 324 | ], | ||
| 325 | extensions=[ | ||
| 326 | ], | ||
| 327 | nested_types=[], # TODO(robinson): Implement. | ||
| 328 | enum_types=[ | ||
| 329 | ], | ||
| 330 | options=None) | ||
| 331 | |||
| 332 | |||
| 333 | _FIELDDESCRIPTORPROTO = descriptor.Descriptor( | ||
| 334 | name='FieldDescriptorProto', | ||
| 335 | full_name='froofle.protobuf.FieldDescriptorProto', | ||
| 336 | filename='froofle/protobuf/descriptor.proto', | ||
| 337 | containing_type=None, | ||
| 338 | fields=[ | ||
| 339 | descriptor.FieldDescriptor( | ||
| 340 | name='name', full_name='froofle.protobuf.FieldDescriptorProto.name', index=0, | ||
| 341 | number=1, type=9, cpp_type=9, label=1, | ||
| 342 | default_value=unicode("", "utf-8"), | ||
| 343 | message_type=None, enum_type=None, containing_type=None, | ||
| 344 | is_extension=False, extension_scope=None, | ||
| 345 | options=None), | ||
| 346 | descriptor.FieldDescriptor( | ||
| 347 | name='number', full_name='froofle.protobuf.FieldDescriptorProto.number', index=1, | ||
| 348 | number=3, type=5, cpp_type=1, label=1, | ||
| 349 | default_value=0, | ||
| 350 | message_type=None, enum_type=None, containing_type=None, | ||
| 351 | is_extension=False, extension_scope=None, | ||
| 352 | options=None), | ||
| 353 | descriptor.FieldDescriptor( | ||
| 354 | name='label', full_name='froofle.protobuf.FieldDescriptorProto.label', index=2, | ||
| 355 | number=4, type=14, cpp_type=8, label=1, | ||
| 356 | default_value=1, | ||
| 357 | message_type=None, enum_type=None, containing_type=None, | ||
| 358 | is_extension=False, extension_scope=None, | ||
| 359 | options=None), | ||
| 360 | descriptor.FieldDescriptor( | ||
| 361 | name='type', full_name='froofle.protobuf.FieldDescriptorProto.type', index=3, | ||
| 362 | number=5, type=14, cpp_type=8, label=1, | ||
| 363 | default_value=1, | ||
| 364 | message_type=None, enum_type=None, containing_type=None, | ||
| 365 | is_extension=False, extension_scope=None, | ||
| 366 | options=None), | ||
| 367 | descriptor.FieldDescriptor( | ||
| 368 | name='type_name', full_name='froofle.protobuf.FieldDescriptorProto.type_name', index=4, | ||
| 369 | number=6, type=9, cpp_type=9, label=1, | ||
| 370 | default_value=unicode("", "utf-8"), | ||
| 371 | message_type=None, enum_type=None, containing_type=None, | ||
| 372 | is_extension=False, extension_scope=None, | ||
| 373 | options=None), | ||
| 374 | descriptor.FieldDescriptor( | ||
| 375 | name='extendee', full_name='froofle.protobuf.FieldDescriptorProto.extendee', index=5, | ||
| 376 | number=2, type=9, cpp_type=9, label=1, | ||
| 377 | default_value=unicode("", "utf-8"), | ||
| 378 | message_type=None, enum_type=None, containing_type=None, | ||
| 379 | is_extension=False, extension_scope=None, | ||
| 380 | options=None), | ||
| 381 | descriptor.FieldDescriptor( | ||
| 382 | name='default_value', full_name='froofle.protobuf.FieldDescriptorProto.default_value', index=6, | ||
| 383 | number=7, type=9, cpp_type=9, label=1, | ||
| 384 | default_value=unicode("", "utf-8"), | ||
| 385 | message_type=None, enum_type=None, containing_type=None, | ||
| 386 | is_extension=False, extension_scope=None, | ||
| 387 | options=None), | ||
| 388 | descriptor.FieldDescriptor( | ||
| 389 | name='options', full_name='froofle.protobuf.FieldDescriptorProto.options', index=7, | ||
| 390 | number=8, type=11, cpp_type=10, label=1, | ||
| 391 | default_value=None, | ||
| 392 | message_type=None, enum_type=None, containing_type=None, | ||
| 393 | is_extension=False, extension_scope=None, | ||
| 394 | options=None), | ||
| 395 | ], | ||
| 396 | extensions=[ | ||
| 397 | ], | ||
| 398 | nested_types=[], # TODO(robinson): Implement. | ||
| 399 | enum_types=[ | ||
| 400 | _FIELDDESCRIPTORPROTO_TYPE, | ||
| 401 | _FIELDDESCRIPTORPROTO_LABEL, | ||
| 402 | ], | ||
| 403 | options=None) | ||
| 404 | |||
| 405 | |||
| 406 | _ENUMDESCRIPTORPROTO = descriptor.Descriptor( | ||
| 407 | name='EnumDescriptorProto', | ||
| 408 | full_name='froofle.protobuf.EnumDescriptorProto', | ||
| 409 | filename='froofle/protobuf/descriptor.proto', | ||
| 410 | containing_type=None, | ||
| 411 | fields=[ | ||
| 412 | descriptor.FieldDescriptor( | ||
| 413 | name='name', full_name='froofle.protobuf.EnumDescriptorProto.name', index=0, | ||
| 414 | number=1, type=9, cpp_type=9, label=1, | ||
| 415 | default_value=unicode("", "utf-8"), | ||
| 416 | message_type=None, enum_type=None, containing_type=None, | ||
| 417 | is_extension=False, extension_scope=None, | ||
| 418 | options=None), | ||
| 419 | descriptor.FieldDescriptor( | ||
| 420 | name='value', full_name='froofle.protobuf.EnumDescriptorProto.value', index=1, | ||
| 421 | number=2, type=11, cpp_type=10, label=3, | ||
| 422 | default_value=[], | ||
| 423 | message_type=None, enum_type=None, containing_type=None, | ||
| 424 | is_extension=False, extension_scope=None, | ||
| 425 | options=None), | ||
| 426 | descriptor.FieldDescriptor( | ||
| 427 | name='options', full_name='froofle.protobuf.EnumDescriptorProto.options', index=2, | ||
| 428 | number=3, type=11, cpp_type=10, label=1, | ||
| 429 | default_value=None, | ||
| 430 | message_type=None, enum_type=None, containing_type=None, | ||
| 431 | is_extension=False, extension_scope=None, | ||
| 432 | options=None), | ||
| 433 | ], | ||
| 434 | extensions=[ | ||
| 435 | ], | ||
| 436 | nested_types=[], # TODO(robinson): Implement. | ||
| 437 | enum_types=[ | ||
| 438 | ], | ||
| 439 | options=None) | ||
| 440 | |||
| 441 | |||
| 442 | _ENUMVALUEDESCRIPTORPROTO = descriptor.Descriptor( | ||
| 443 | name='EnumValueDescriptorProto', | ||
| 444 | full_name='froofle.protobuf.EnumValueDescriptorProto', | ||
| 445 | filename='froofle/protobuf/descriptor.proto', | ||
| 446 | containing_type=None, | ||
| 447 | fields=[ | ||
| 448 | descriptor.FieldDescriptor( | ||
| 449 | name='name', full_name='froofle.protobuf.EnumValueDescriptorProto.name', index=0, | ||
| 450 | number=1, type=9, cpp_type=9, label=1, | ||
| 451 | default_value=unicode("", "utf-8"), | ||
| 452 | message_type=None, enum_type=None, containing_type=None, | ||
| 453 | is_extension=False, extension_scope=None, | ||
| 454 | options=None), | ||
| 455 | descriptor.FieldDescriptor( | ||
| 456 | name='number', full_name='froofle.protobuf.EnumValueDescriptorProto.number', index=1, | ||
| 457 | number=2, type=5, cpp_type=1, label=1, | ||
| 458 | default_value=0, | ||
| 459 | message_type=None, enum_type=None, containing_type=None, | ||
| 460 | is_extension=False, extension_scope=None, | ||
| 461 | options=None), | ||
| 462 | descriptor.FieldDescriptor( | ||
| 463 | name='options', full_name='froofle.protobuf.EnumValueDescriptorProto.options', index=2, | ||
| 464 | number=3, type=11, cpp_type=10, label=1, | ||
| 465 | default_value=None, | ||
| 466 | message_type=None, enum_type=None, containing_type=None, | ||
| 467 | is_extension=False, extension_scope=None, | ||
| 468 | options=None), | ||
| 469 | ], | ||
| 470 | extensions=[ | ||
| 471 | ], | ||
| 472 | nested_types=[], # TODO(robinson): Implement. | ||
| 473 | enum_types=[ | ||
| 474 | ], | ||
| 475 | options=None) | ||
| 476 | |||
| 477 | |||
| 478 | _SERVICEDESCRIPTORPROTO = descriptor.Descriptor( | ||
| 479 | name='ServiceDescriptorProto', | ||
| 480 | full_name='froofle.protobuf.ServiceDescriptorProto', | ||
| 481 | filename='froofle/protobuf/descriptor.proto', | ||
| 482 | containing_type=None, | ||
| 483 | fields=[ | ||
| 484 | descriptor.FieldDescriptor( | ||
| 485 | name='name', full_name='froofle.protobuf.ServiceDescriptorProto.name', index=0, | ||
| 486 | number=1, type=9, cpp_type=9, label=1, | ||
| 487 | default_value=unicode("", "utf-8"), | ||
| 488 | message_type=None, enum_type=None, containing_type=None, | ||
| 489 | is_extension=False, extension_scope=None, | ||
| 490 | options=None), | ||
| 491 | descriptor.FieldDescriptor( | ||
| 492 | name='method', full_name='froofle.protobuf.ServiceDescriptorProto.method', index=1, | ||
| 493 | number=2, type=11, cpp_type=10, label=3, | ||
| 494 | default_value=[], | ||
| 495 | message_type=None, enum_type=None, containing_type=None, | ||
| 496 | is_extension=False, extension_scope=None, | ||
| 497 | options=None), | ||
| 498 | descriptor.FieldDescriptor( | ||
| 499 | name='options', full_name='froofle.protobuf.ServiceDescriptorProto.options', index=2, | ||
| 500 | number=3, type=11, cpp_type=10, label=1, | ||
| 501 | default_value=None, | ||
| 502 | message_type=None, enum_type=None, containing_type=None, | ||
| 503 | is_extension=False, extension_scope=None, | ||
| 504 | options=None), | ||
| 505 | ], | ||
| 506 | extensions=[ | ||
| 507 | ], | ||
| 508 | nested_types=[], # TODO(robinson): Implement. | ||
| 509 | enum_types=[ | ||
| 510 | ], | ||
| 511 | options=None) | ||
| 512 | |||
| 513 | |||
| 514 | _METHODDESCRIPTORPROTO = descriptor.Descriptor( | ||
| 515 | name='MethodDescriptorProto', | ||
| 516 | full_name='froofle.protobuf.MethodDescriptorProto', | ||
| 517 | filename='froofle/protobuf/descriptor.proto', | ||
| 518 | containing_type=None, | ||
| 519 | fields=[ | ||
| 520 | descriptor.FieldDescriptor( | ||
| 521 | name='name', full_name='froofle.protobuf.MethodDescriptorProto.name', index=0, | ||
| 522 | number=1, type=9, cpp_type=9, label=1, | ||
| 523 | default_value=unicode("", "utf-8"), | ||
| 524 | message_type=None, enum_type=None, containing_type=None, | ||
| 525 | is_extension=False, extension_scope=None, | ||
| 526 | options=None), | ||
| 527 | descriptor.FieldDescriptor( | ||
| 528 | name='input_type', full_name='froofle.protobuf.MethodDescriptorProto.input_type', index=1, | ||
| 529 | number=2, type=9, cpp_type=9, label=1, | ||
| 530 | default_value=unicode("", "utf-8"), | ||
| 531 | message_type=None, enum_type=None, containing_type=None, | ||
| 532 | is_extension=False, extension_scope=None, | ||
| 533 | options=None), | ||
| 534 | descriptor.FieldDescriptor( | ||
| 535 | name='output_type', full_name='froofle.protobuf.MethodDescriptorProto.output_type', index=2, | ||
| 536 | number=3, type=9, cpp_type=9, label=1, | ||
| 537 | default_value=unicode("", "utf-8"), | ||
| 538 | message_type=None, enum_type=None, containing_type=None, | ||
| 539 | is_extension=False, extension_scope=None, | ||
| 540 | options=None), | ||
| 541 | descriptor.FieldDescriptor( | ||
| 542 | name='options', full_name='froofle.protobuf.MethodDescriptorProto.options', index=3, | ||
| 543 | number=4, type=11, cpp_type=10, label=1, | ||
| 544 | default_value=None, | ||
| 545 | message_type=None, enum_type=None, containing_type=None, | ||
| 546 | is_extension=False, extension_scope=None, | ||
| 547 | options=None), | ||
| 548 | ], | ||
| 549 | extensions=[ | ||
| 550 | ], | ||
| 551 | nested_types=[], # TODO(robinson): Implement. | ||
| 552 | enum_types=[ | ||
| 553 | ], | ||
| 554 | options=None) | ||
| 555 | |||
| 556 | |||
| 557 | _FILEOPTIONS = descriptor.Descriptor( | ||
| 558 | name='FileOptions', | ||
| 559 | full_name='froofle.protobuf.FileOptions', | ||
| 560 | filename='froofle/protobuf/descriptor.proto', | ||
| 561 | containing_type=None, | ||
| 562 | fields=[ | ||
| 563 | descriptor.FieldDescriptor( | ||
| 564 | name='java_package', full_name='froofle.protobuf.FileOptions.java_package', index=0, | ||
| 565 | number=1, type=9, cpp_type=9, label=1, | ||
| 566 | default_value=unicode("", "utf-8"), | ||
| 567 | message_type=None, enum_type=None, containing_type=None, | ||
| 568 | is_extension=False, extension_scope=None, | ||
| 569 | options=None), | ||
| 570 | descriptor.FieldDescriptor( | ||
| 571 | name='java_outer_classname', full_name='froofle.protobuf.FileOptions.java_outer_classname', index=1, | ||
| 572 | number=8, type=9, cpp_type=9, label=1, | ||
| 573 | default_value=unicode("", "utf-8"), | ||
| 574 | message_type=None, enum_type=None, containing_type=None, | ||
| 575 | is_extension=False, extension_scope=None, | ||
| 576 | options=None), | ||
| 577 | descriptor.FieldDescriptor( | ||
| 578 | name='java_multiple_files', full_name='froofle.protobuf.FileOptions.java_multiple_files', index=2, | ||
| 579 | number=10, type=8, cpp_type=7, label=1, | ||
| 580 | default_value=False, | ||
| 581 | message_type=None, enum_type=None, containing_type=None, | ||
| 582 | is_extension=False, extension_scope=None, | ||
| 583 | options=None), | ||
| 584 | descriptor.FieldDescriptor( | ||
| 585 | name='optimize_for', full_name='froofle.protobuf.FileOptions.optimize_for', index=3, | ||
| 586 | number=9, type=14, cpp_type=8, label=1, | ||
| 587 | default_value=2, | ||
| 588 | message_type=None, enum_type=None, containing_type=None, | ||
| 589 | is_extension=False, extension_scope=None, | ||
| 590 | options=None), | ||
| 591 | descriptor.FieldDescriptor( | ||
| 592 | name='uninterpreted_option', full_name='froofle.protobuf.FileOptions.uninterpreted_option', index=4, | ||
| 593 | number=999, type=11, cpp_type=10, label=3, | ||
| 594 | default_value=[], | ||
| 595 | message_type=None, enum_type=None, containing_type=None, | ||
| 596 | is_extension=False, extension_scope=None, | ||
| 597 | options=None), | ||
| 598 | ], | ||
| 599 | extensions=[ | ||
| 600 | ], | ||
| 601 | nested_types=[], # TODO(robinson): Implement. | ||
| 602 | enum_types=[ | ||
| 603 | _FILEOPTIONS_OPTIMIZEMODE, | ||
| 604 | ], | ||
| 605 | options=None) | ||
| 606 | |||
| 607 | |||
| 608 | _MESSAGEOPTIONS = descriptor.Descriptor( | ||
| 609 | name='MessageOptions', | ||
| 610 | full_name='froofle.protobuf.MessageOptions', | ||
| 611 | filename='froofle/protobuf/descriptor.proto', | ||
| 612 | containing_type=None, | ||
| 613 | fields=[ | ||
| 614 | descriptor.FieldDescriptor( | ||
| 615 | name='message_set_wire_format', full_name='froofle.protobuf.MessageOptions.message_set_wire_format', index=0, | ||
| 616 | number=1, type=8, cpp_type=7, label=1, | ||
| 617 | default_value=False, | ||
| 618 | message_type=None, enum_type=None, containing_type=None, | ||
| 619 | is_extension=False, extension_scope=None, | ||
| 620 | options=None), | ||
| 621 | descriptor.FieldDescriptor( | ||
| 622 | name='uninterpreted_option', full_name='froofle.protobuf.MessageOptions.uninterpreted_option', index=1, | ||
| 623 | number=999, type=11, cpp_type=10, label=3, | ||
| 624 | default_value=[], | ||
| 625 | message_type=None, enum_type=None, containing_type=None, | ||
| 626 | is_extension=False, extension_scope=None, | ||
| 627 | options=None), | ||
| 628 | ], | ||
| 629 | extensions=[ | ||
| 630 | ], | ||
| 631 | nested_types=[], # TODO(robinson): Implement. | ||
| 632 | enum_types=[ | ||
| 633 | ], | ||
| 634 | options=None) | ||
| 635 | |||
| 636 | |||
| 637 | _FIELDOPTIONS = descriptor.Descriptor( | ||
| 638 | name='FieldOptions', | ||
| 639 | full_name='froofle.protobuf.FieldOptions', | ||
| 640 | filename='froofle/protobuf/descriptor.proto', | ||
| 641 | containing_type=None, | ||
| 642 | fields=[ | ||
| 643 | descriptor.FieldDescriptor( | ||
| 644 | name='ctype', full_name='froofle.protobuf.FieldOptions.ctype', index=0, | ||
| 645 | number=1, type=14, cpp_type=8, label=1, | ||
| 646 | default_value=1, | ||
| 647 | message_type=None, enum_type=None, containing_type=None, | ||
| 648 | is_extension=False, extension_scope=None, | ||
| 649 | options=None), | ||
| 650 | descriptor.FieldDescriptor( | ||
| 651 | name='experimental_map_key', full_name='froofle.protobuf.FieldOptions.experimental_map_key', index=1, | ||
| 652 | number=9, type=9, cpp_type=9, label=1, | ||
| 653 | default_value=unicode("", "utf-8"), | ||
| 654 | message_type=None, enum_type=None, containing_type=None, | ||
| 655 | is_extension=False, extension_scope=None, | ||
| 656 | options=None), | ||
| 657 | descriptor.FieldDescriptor( | ||
| 658 | name='uninterpreted_option', full_name='froofle.protobuf.FieldOptions.uninterpreted_option', index=2, | ||
| 659 | number=999, type=11, cpp_type=10, label=3, | ||
| 660 | default_value=[], | ||
| 661 | message_type=None, enum_type=None, containing_type=None, | ||
| 662 | is_extension=False, extension_scope=None, | ||
| 663 | options=None), | ||
| 664 | ], | ||
| 665 | extensions=[ | ||
| 666 | ], | ||
| 667 | nested_types=[], # TODO(robinson): Implement. | ||
| 668 | enum_types=[ | ||
| 669 | _FIELDOPTIONS_CTYPE, | ||
| 670 | ], | ||
| 671 | options=None) | ||
| 672 | |||
| 673 | |||
| 674 | _ENUMOPTIONS = descriptor.Descriptor( | ||
| 675 | name='EnumOptions', | ||
| 676 | full_name='froofle.protobuf.EnumOptions', | ||
| 677 | filename='froofle/protobuf/descriptor.proto', | ||
| 678 | containing_type=None, | ||
| 679 | fields=[ | ||
| 680 | descriptor.FieldDescriptor( | ||
| 681 | name='uninterpreted_option', full_name='froofle.protobuf.EnumOptions.uninterpreted_option', index=0, | ||
| 682 | number=999, type=11, cpp_type=10, label=3, | ||
| 683 | default_value=[], | ||
| 684 | message_type=None, enum_type=None, containing_type=None, | ||
| 685 | is_extension=False, extension_scope=None, | ||
| 686 | options=None), | ||
| 687 | ], | ||
| 688 | extensions=[ | ||
| 689 | ], | ||
| 690 | nested_types=[], # TODO(robinson): Implement. | ||
| 691 | enum_types=[ | ||
| 692 | ], | ||
| 693 | options=None) | ||
| 694 | |||
| 695 | |||
| 696 | _ENUMVALUEOPTIONS = descriptor.Descriptor( | ||
| 697 | name='EnumValueOptions', | ||
| 698 | full_name='froofle.protobuf.EnumValueOptions', | ||
| 699 | filename='froofle/protobuf/descriptor.proto', | ||
| 700 | containing_type=None, | ||
| 701 | fields=[ | ||
| 702 | descriptor.FieldDescriptor( | ||
| 703 | name='uninterpreted_option', full_name='froofle.protobuf.EnumValueOptions.uninterpreted_option', index=0, | ||
| 704 | number=999, type=11, cpp_type=10, label=3, | ||
| 705 | default_value=[], | ||
| 706 | message_type=None, enum_type=None, containing_type=None, | ||
| 707 | is_extension=False, extension_scope=None, | ||
| 708 | options=None), | ||
| 709 | ], | ||
| 710 | extensions=[ | ||
| 711 | ], | ||
| 712 | nested_types=[], # TODO(robinson): Implement. | ||
| 713 | enum_types=[ | ||
| 714 | ], | ||
| 715 | options=None) | ||
| 716 | |||
| 717 | |||
| 718 | _SERVICEOPTIONS = descriptor.Descriptor( | ||
| 719 | name='ServiceOptions', | ||
| 720 | full_name='froofle.protobuf.ServiceOptions', | ||
| 721 | filename='froofle/protobuf/descriptor.proto', | ||
| 722 | containing_type=None, | ||
| 723 | fields=[ | ||
| 724 | descriptor.FieldDescriptor( | ||
| 725 | name='uninterpreted_option', full_name='froofle.protobuf.ServiceOptions.uninterpreted_option', index=0, | ||
| 726 | number=999, type=11, cpp_type=10, label=3, | ||
| 727 | default_value=[], | ||
| 728 | message_type=None, enum_type=None, containing_type=None, | ||
| 729 | is_extension=False, extension_scope=None, | ||
| 730 | options=None), | ||
| 731 | ], | ||
| 732 | extensions=[ | ||
| 733 | ], | ||
| 734 | nested_types=[], # TODO(robinson): Implement. | ||
| 735 | enum_types=[ | ||
| 736 | ], | ||
| 737 | options=None) | ||
| 738 | |||
| 739 | |||
| 740 | _METHODOPTIONS = descriptor.Descriptor( | ||
| 741 | name='MethodOptions', | ||
| 742 | full_name='froofle.protobuf.MethodOptions', | ||
| 743 | filename='froofle/protobuf/descriptor.proto', | ||
| 744 | containing_type=None, | ||
| 745 | fields=[ | ||
| 746 | descriptor.FieldDescriptor( | ||
| 747 | name='uninterpreted_option', full_name='froofle.protobuf.MethodOptions.uninterpreted_option', index=0, | ||
| 748 | number=999, type=11, cpp_type=10, label=3, | ||
| 749 | default_value=[], | ||
| 750 | message_type=None, enum_type=None, containing_type=None, | ||
| 751 | is_extension=False, extension_scope=None, | ||
| 752 | options=None), | ||
| 753 | ], | ||
| 754 | extensions=[ | ||
| 755 | ], | ||
| 756 | nested_types=[], # TODO(robinson): Implement. | ||
| 757 | enum_types=[ | ||
| 758 | ], | ||
| 759 | options=None) | ||
| 760 | |||
| 761 | |||
| 762 | _UNINTERPRETEDOPTION_NAMEPART = descriptor.Descriptor( | ||
| 763 | name='NamePart', | ||
| 764 | full_name='froofle.protobuf.UninterpretedOption.NamePart', | ||
| 765 | filename='froofle/protobuf/descriptor.proto', | ||
| 766 | containing_type=None, | ||
| 767 | fields=[ | ||
| 768 | descriptor.FieldDescriptor( | ||
| 769 | name='name_part', full_name='froofle.protobuf.UninterpretedOption.NamePart.name_part', index=0, | ||
| 770 | number=1, type=9, cpp_type=9, label=2, | ||
| 771 | default_value=unicode("", "utf-8"), | ||
| 772 | message_type=None, enum_type=None, containing_type=None, | ||
| 773 | is_extension=False, extension_scope=None, | ||
| 774 | options=None), | ||
| 775 | descriptor.FieldDescriptor( | ||
| 776 | name='is_extension', full_name='froofle.protobuf.UninterpretedOption.NamePart.is_extension', index=1, | ||
| 777 | number=2, type=8, cpp_type=7, label=2, | ||
| 778 | default_value=False, | ||
| 779 | message_type=None, enum_type=None, containing_type=None, | ||
| 780 | is_extension=False, extension_scope=None, | ||
| 781 | options=None), | ||
| 782 | ], | ||
| 783 | extensions=[ | ||
| 784 | ], | ||
| 785 | nested_types=[], # TODO(robinson): Implement. | ||
| 786 | enum_types=[ | ||
| 787 | ], | ||
| 788 | options=None) | ||
| 789 | |||
| 790 | _UNINTERPRETEDOPTION = descriptor.Descriptor( | ||
| 791 | name='UninterpretedOption', | ||
| 792 | full_name='froofle.protobuf.UninterpretedOption', | ||
| 793 | filename='froofle/protobuf/descriptor.proto', | ||
| 794 | containing_type=None, | ||
| 795 | fields=[ | ||
| 796 | descriptor.FieldDescriptor( | ||
| 797 | name='name', full_name='froofle.protobuf.UninterpretedOption.name', index=0, | ||
| 798 | number=2, type=11, cpp_type=10, label=3, | ||
| 799 | default_value=[], | ||
| 800 | message_type=None, enum_type=None, containing_type=None, | ||
| 801 | is_extension=False, extension_scope=None, | ||
| 802 | options=None), | ||
| 803 | descriptor.FieldDescriptor( | ||
| 804 | name='identifier_value', full_name='froofle.protobuf.UninterpretedOption.identifier_value', index=1, | ||
| 805 | number=3, type=9, cpp_type=9, label=1, | ||
| 806 | default_value=unicode("", "utf-8"), | ||
| 807 | message_type=None, enum_type=None, containing_type=None, | ||
| 808 | is_extension=False, extension_scope=None, | ||
| 809 | options=None), | ||
| 810 | descriptor.FieldDescriptor( | ||
| 811 | name='positive_int_value', full_name='froofle.protobuf.UninterpretedOption.positive_int_value', index=2, | ||
| 812 | number=4, type=4, cpp_type=4, label=1, | ||
| 813 | default_value=0, | ||
| 814 | message_type=None, enum_type=None, containing_type=None, | ||
| 815 | is_extension=False, extension_scope=None, | ||
| 816 | options=None), | ||
| 817 | descriptor.FieldDescriptor( | ||
| 818 | name='negative_int_value', full_name='froofle.protobuf.UninterpretedOption.negative_int_value', index=3, | ||
| 819 | number=5, type=3, cpp_type=2, label=1, | ||
| 820 | default_value=0, | ||
| 821 | message_type=None, enum_type=None, containing_type=None, | ||
| 822 | is_extension=False, extension_scope=None, | ||
| 823 | options=None), | ||
| 824 | descriptor.FieldDescriptor( | ||
| 825 | name='double_value', full_name='froofle.protobuf.UninterpretedOption.double_value', index=4, | ||
| 826 | number=6, type=1, cpp_type=5, label=1, | ||
| 827 | default_value=0, | ||
| 828 | message_type=None, enum_type=None, containing_type=None, | ||
| 829 | is_extension=False, extension_scope=None, | ||
| 830 | options=None), | ||
| 831 | descriptor.FieldDescriptor( | ||
| 832 | name='string_value', full_name='froofle.protobuf.UninterpretedOption.string_value', index=5, | ||
| 833 | number=7, type=12, cpp_type=9, label=1, | ||
| 834 | default_value="", | ||
| 835 | message_type=None, enum_type=None, containing_type=None, | ||
| 836 | is_extension=False, extension_scope=None, | ||
| 837 | options=None), | ||
| 838 | ], | ||
| 839 | extensions=[ | ||
| 840 | ], | ||
| 841 | nested_types=[], # TODO(robinson): Implement. | ||
| 842 | enum_types=[ | ||
| 843 | ], | ||
| 844 | options=None) | ||
| 845 | |||
| 846 | |||
| 847 | _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO | ||
| 848 | _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO | ||
| 849 | _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO | ||
| 850 | _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO | ||
| 851 | _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO | ||
| 852 | _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS | ||
| 853 | _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO | ||
| 854 | _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO | ||
| 855 | _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO | ||
| 856 | _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO | ||
| 857 | _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE | ||
| 858 | _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS | ||
| 859 | _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL | ||
| 860 | _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE | ||
| 861 | _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS | ||
| 862 | _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO | ||
| 863 | _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS | ||
| 864 | _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS | ||
| 865 | _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO | ||
| 866 | _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS | ||
| 867 | _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS | ||
| 868 | _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE | ||
| 869 | _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION | ||
| 870 | _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION | ||
| 871 | _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE | ||
| 872 | _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION | ||
| 873 | _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION | ||
| 874 | _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION | ||
| 875 | _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION | ||
| 876 | _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION | ||
| 877 | _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART | ||
| 878 | |||
| 879 | class FileDescriptorSet(message.Message): | ||
| 880 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 881 | DESCRIPTOR = _FILEDESCRIPTORSET | ||
| 882 | |||
| 883 | class FileDescriptorProto(message.Message): | ||
| 884 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 885 | DESCRIPTOR = _FILEDESCRIPTORPROTO | ||
| 886 | |||
| 887 | class DescriptorProto(message.Message): | ||
| 888 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 889 | |||
| 890 | class ExtensionRange(message.Message): | ||
| 891 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 892 | DESCRIPTOR = _DESCRIPTORPROTO_EXTENSIONRANGE | ||
| 893 | DESCRIPTOR = _DESCRIPTORPROTO | ||
| 894 | |||
| 895 | class FieldDescriptorProto(message.Message): | ||
| 896 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 897 | DESCRIPTOR = _FIELDDESCRIPTORPROTO | ||
| 898 | |||
| 899 | class EnumDescriptorProto(message.Message): | ||
| 900 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 901 | DESCRIPTOR = _ENUMDESCRIPTORPROTO | ||
| 902 | |||
| 903 | class EnumValueDescriptorProto(message.Message): | ||
| 904 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 905 | DESCRIPTOR = _ENUMVALUEDESCRIPTORPROTO | ||
| 906 | |||
| 907 | class ServiceDescriptorProto(message.Message): | ||
| 908 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 909 | DESCRIPTOR = _SERVICEDESCRIPTORPROTO | ||
| 910 | |||
| 911 | class MethodDescriptorProto(message.Message): | ||
| 912 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 913 | DESCRIPTOR = _METHODDESCRIPTORPROTO | ||
| 914 | |||
| 915 | class FileOptions(message.Message): | ||
| 916 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 917 | DESCRIPTOR = _FILEOPTIONS | ||
| 918 | |||
| 919 | class MessageOptions(message.Message): | ||
| 920 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 921 | DESCRIPTOR = _MESSAGEOPTIONS | ||
| 922 | |||
| 923 | class FieldOptions(message.Message): | ||
| 924 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 925 | DESCRIPTOR = _FIELDOPTIONS | ||
| 926 | |||
| 927 | class EnumOptions(message.Message): | ||
| 928 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 929 | DESCRIPTOR = _ENUMOPTIONS | ||
| 930 | |||
| 931 | class EnumValueOptions(message.Message): | ||
| 932 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 933 | DESCRIPTOR = _ENUMVALUEOPTIONS | ||
| 934 | |||
| 935 | class ServiceOptions(message.Message): | ||
| 936 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 937 | DESCRIPTOR = _SERVICEOPTIONS | ||
| 938 | |||
| 939 | class MethodOptions(message.Message): | ||
| 940 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 941 | DESCRIPTOR = _METHODOPTIONS | ||
| 942 | |||
| 943 | class UninterpretedOption(message.Message): | ||
| 944 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 945 | |||
| 946 | class NamePart(message.Message): | ||
| 947 | __metaclass__ = reflection.GeneratedProtocolMessageType | ||
| 948 | DESCRIPTOR = _UNINTERPRETEDOPTION_NAMEPART | ||
| 949 | DESCRIPTOR = _UNINTERPRETEDOPTION | ||
| 950 | |||
diff --git a/froofle/protobuf/internal/__init__.py b/froofle/protobuf/internal/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/froofle/protobuf/internal/__init__.py | |||
diff --git a/froofle/protobuf/internal/decoder.py b/froofle/protobuf/internal/decoder.py new file mode 100644 index 00000000..2dd4c96e --- /dev/null +++ b/froofle/protobuf/internal/decoder.py | |||
| @@ -0,0 +1,209 @@ | |||
| 1 | # Protocol Buffers - Google's data interchange format | ||
| 2 | # Copyright 2008 Google Inc. All rights reserved. | ||
| 3 | # http://code.google.com/p/protobuf/ | ||
| 4 | # | ||
| 5 | # Redistribution and use in source and binary forms, with or without | ||
| 6 | # modification, are permitted provided that the following conditions are | ||
| 7 | # met: | ||
| 8 | # | ||
| 9 | # * Redistributions of source code must retain the above copyright | ||
| 10 | # notice, this list of conditions and the following disclaimer. | ||
| 11 | # * Redistributions in binary form must reproduce the above | ||
| 12 | # copyright notice, this list of conditions and the following disclaimer | ||
| 13 | # in the documentation and/or other materials provided with the | ||
| 14 | # distribution. | ||
| 15 | # * Neither the name of Google Inc. nor the names of its | ||
| 16 | # contributors may be used to endorse or promote products derived from | ||
| 17 | # this software without specific prior written permission. | ||
| 18 | # | ||
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
| 30 | |||
| 31 | """Class for decoding protocol buffer primitives. | ||
| 32 | |||
| 33 | Contains the logic for decoding every logical protocol field type | ||
| 34 | from one of the 5 physical wire types. | ||
| 35 | """ | ||
| 36 | |||
| 37 | __author__ = 'robinson@google.com (Will Robinson)' | ||
| 38 | |||
| 39 | import struct | ||
| 40 | from froofle.protobuf import message | ||
| 41 | from froofle.protobuf.internal import input_stream | ||
| 42 | from froofle.protobuf.internal import wire_format | ||
| 43 | |||
| 44 | |||
| 45 | |||
| 46 | # Note that much of this code is ported from //net/proto/ProtocolBuffer, and | ||
| 47 | # that the interface is strongly inspired by WireFormat from the C++ proto2 | ||
| 48 | # implementation. | ||
| 49 | |||
| 50 | |||
| 51 | class Decoder(object): | ||
| 52 | |||
| 53 | """Decodes logical protocol buffer fields from the wire.""" | ||
| 54 | |||
| 55 | def __init__(self, s): | ||
| 56 | """Initializes the decoder to read from s. | ||
| 57 | |||
| 58 | Args: | ||
| 59 | s: An immutable sequence of bytes, which must be accessible | ||
| 60 | via the Python buffer() primitive (i.e., buffer(s)). | ||
| 61 | """ | ||
| 62 | self._stream = input_stream.InputStream(s) | ||
| 63 | |||
| 64 | def EndOfStream(self): | ||
| 65 | """Returns true iff we've reached the end of the bytes we're reading.""" | ||
| 66 | return self._stream.EndOfStream() | ||
| 67 | |||
| 68 | def Position(self): | ||
| 69 | """Returns the 0-indexed position in |s|.""" | ||
| 70 | return self._stream.Position() | ||
| 71 | |||
| 72 | def ReadFieldNumberAndWireType(self): | ||
| 73 | """Reads a tag from the wire. Returns a (field_number, wire_type) pair.""" | ||
| 74 | tag_and_type = self.ReadUInt32() | ||
| 75 | return wire_format.UnpackTag(tag_and_type) | ||
| 76 | |||
| 77 | def SkipBytes(self, bytes): | ||
| 78 | """Skips the specified number of bytes on the wire.""" | ||
| 79 | self._stream.SkipBytes(bytes) | ||
| 80 | |||
| 81 | # Note that the Read*() methods below are not exactly symmetrical with the | ||
| 82 | # corresponding Encoder.Append*() methods. Those Encoder methods first | ||
| 83 | # encode a tag, but the Read*() methods below assume that the tag has already | ||
| 84 | # been read, and that the client wishes to read a field of the specified type | ||
| 85 | # starting at the current position. | ||
| 86 | |||
| 87 | def ReadInt32(self): | ||
| 88 | """Reads and returns a signed, varint-encoded, 32-bit integer.""" | ||
| 89 | return self._stream.ReadVarint32() | ||
| 90 | |||
| 91 | def ReadInt64(self): | ||
| 92 | """Reads and returns a signed, varint-encoded, 64-bit integer.""" | ||
| 93 | return self._stream.ReadVarint64() | ||
| 94 | |||
| 95 | def ReadUInt32(self): | ||
| 96 | """Reads and returns an signed, varint-encoded, 32-bit integer.""" | ||
| 97 | return self._stream.ReadVarUInt32() | ||
| 98 | |||
| 99 | def ReadUInt64(self): | ||
| 100 | """Reads and returns an signed, varint-encoded,64-bit integer.""" | ||
| 101 | return self._stream.ReadVarUInt64() | ||
| 102 | |||
| 103 | def ReadSInt32(self): | ||
| 104 | """Reads and returns a signed, zigzag-encoded, varint-encoded, | ||
| 105 | 32-bit integer.""" | ||
| 106 | return wire_format.ZigZagDecode(self._stream.ReadVarUInt32()) | ||
| 107 | |||
| 108 | def ReadSInt64(self): | ||
| 109 | """Reads and returns a signed, zigzag-encoded, varint-encoded, | ||
| 110 | 64-bit integer.""" | ||
| 111 | return wire_format.ZigZagDecode(self._stream.ReadVarUInt64()) | ||
| 112 | |||
| 113 | def ReadFixed32(self): | ||
| 114 | """Reads and returns an unsigned, fixed-width, 32-bit integer.""" | ||
| 115 | return self._stream.ReadLittleEndian32() | ||
| 116 | |||
| 117 | def ReadFixed64(self): | ||
| 118 | """Reads and returns an unsigned, fixed-width, 64-bit integer.""" | ||
| 119 | return self._stream.ReadLittleEndian64() | ||
| 120 | |||
| 121 | def ReadSFixed32(self): | ||
| 122 | """Reads and returns a signed, fixed-width, 32-bit integer.""" | ||
| 123 | value = self._stream.ReadLittleEndian32() | ||
| 124 | if value >= (1 << 31): | ||
| 125 | value -= (1 << 32) | ||
| 126 | return value | ||
| 127 | |||
| 128 | def ReadSFixed64(self): | ||
| 129 | """Reads and returns a signed, fixed-width, 64-bit integer.""" | ||
| 130 | value = self._stream.ReadLittleEndian64() | ||
| 131 | if value >= (1 << 63): | ||
| 132 | value -= (1 << 64) | ||
| 133 | return value | ||
| 134 | |||
| 135 | def ReadFloat(self): | ||
| 136 | """Reads and returns a 4-byte floating-point number.""" | ||
| 137 | serialized = self._stream.ReadBytes(4) | ||
| 138 | return struct.unpack('f', serialized)[0] | ||
| 139 | |||
| 140 | def ReadDouble(self): | ||
| 141 | """Reads and returns an 8-byte floating-point number.""" | ||
| 142 | serialized = self._stream.ReadBytes(8) | ||
| 143 | return struct.unpack('d', serialized)[0] | ||
| 144 | |||
| 145 | def ReadBool(self): | ||
| 146 | """Reads and returns a bool.""" | ||
| 147 | i = self._stream.ReadVarUInt32() | ||
| 148 | return bool(i) | ||
| 149 | |||
| 150 | def ReadEnum(self): | ||
| 151 | """Reads and returns an enum value.""" | ||
| 152 | return self._stream.ReadVarUInt32() | ||
| 153 | |||
| 154 | def ReadString(self): | ||
| 155 | """Reads and returns a length-delimited string.""" | ||
| 156 | bytes = self.ReadBytes() | ||
| 157 | return unicode(bytes, 'utf-8') | ||
| 158 | |||
| 159 | def ReadBytes(self): | ||
| 160 | """Reads and returns a length-delimited byte sequence.""" | ||
| 161 | length = self._stream.ReadVarUInt32() | ||
| 162 | return self._stream.ReadBytes(length) | ||
| 163 | |||
| 164 | def ReadMessageInto(self, msg): | ||
| 165 | """Calls msg.MergeFromString() to merge | ||
| 166 | length-delimited serialized message data into |msg|. | ||
| 167 | |||
| 168 | REQUIRES: The decoder must be positioned at the serialized "length" | ||
| 169 | prefix to a length-delmiited serialized message. | ||
| 170 | |||
| 171 | POSTCONDITION: The decoder is positioned just after the | ||
| 172 | serialized message, and we have merged those serialized | ||
| 173 | contents into |msg|. | ||
| 174 | """ | ||
| 175 | length = self._stream.ReadVarUInt32() | ||
| 176 | sub_buffer = self._stream.GetSubBuffer(length) | ||
| 177 | num_bytes_used = msg.MergeFromString(sub_buffer) | ||
| 178 | if num_bytes_used != length: | ||
| 179 | raise message.DecodeError( | ||
| 180 | 'Submessage told to deserialize from %d-byte encoding, ' | ||
| 181 | 'but used only %d bytes' % (length, num_bytes_used)) | ||
| 182 | self._stream.SkipBytes(num_bytes_used) | ||
| 183 | |||
| 184 | def ReadGroupInto(self, expected_field_number, group): | ||
| 185 | """Calls group.MergeFromString() to merge | ||
| 186 | END_GROUP-delimited serialized message data into |group|. | ||
| 187 | We'll raise an exception if we don't find an END_GROUP | ||
| 188 | tag immediately after the serialized message contents. | ||
| 189 | |||
| 190 | REQUIRES: The decoder is positioned just after the START_GROUP | ||
| 191 | tag for this group. | ||
| 192 | |||
| 193 | POSTCONDITION: The decoder is positioned just after the | ||
| 194 | END_GROUP tag for this group, and we have merged | ||
| 195 | the contents of the group into |group|. | ||
| 196 | """ | ||
| 197 | sub_buffer = self._stream.GetSubBuffer() # No a priori length limit. | ||
| 198 | num_bytes_used = group.MergeFromString(sub_buffer) | ||
| 199 | if num_bytes_used < 0: | ||
| 200 | raise message.DecodeError('Group message reported negative bytes read.') | ||
| 201 | self._stream.SkipBytes(num_bytes_used) | ||
| 202 | field_number, field_type = self.ReadFieldNumberAndWireType() | ||
| 203 | if field_type != wire_format.WIRETYPE_END_GROUP: | ||
| 204 | raise message.DecodeError('Group message did not end with an END_GROUP.') | ||
| 205 | if field_number != expected_field_number: | ||
| 206 | raise message.DecodeError('END_GROUP tag had field ' | ||
| 207 | 'number %d, was expecting field number %d' % ( | ||
| 208 | field_number, expected_field_number)) | ||
| 209 | # We're now positioned just after the END_GROUP tag. Perfect. | ||
diff --git a/froofle/protobuf/internal/encoder.py b/froofle/protobuf/internal/encoder.py new file mode 100644 index 00000000..8b924b3e --- /dev/null +++ b/froofle/protobuf/internal/encoder.py | |||
| @@ -0,0 +1,206 @@ | |||
| 1 | # Protocol Buffers - Google's data interchange format | ||
| 2 | # Copyright 2008 Google Inc. All rights reserved. | ||
| 3 | # http://code.google.com/p/protobuf/ | ||
| 4 | # | ||
| 5 | # Redistribution and use in source and binary forms, with or without | ||
| 6 | # modification, are permitted provided that the following conditions are | ||
| 7 | # met: | ||
| 8 | # | ||
| 9 | # * Redistributions of source code must retain the above copyright | ||
| 10 | # notice, this list of conditions and the following disclaimer. | ||
| 11 | # * Redistributions in binary form must reproduce the above | ||
| 12 | # copyright notice, this list of conditions and the following disclaimer | ||
| 13 | # in the documentation and/or other materials provided with the | ||
| 14 | # distribution. | ||
| 15 | # * Neither the name of Google Inc. nor the names of its | ||
| 16 | # contributors may be used to endorse or promote products derived from | ||
| 17 | # this software without specific prior written permission. | ||
| 18 | # | ||
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
| 30 | |||
| 31 | """Class for encoding protocol message primitives. | ||
| 32 | |||
| 33 | Contains the logic for encoding every logical protocol field type | ||
| 34 | into one of the 5 physical wire types. | ||
| 35 | """ | ||
| 36 | |||
| 37 | __author__ = 'robinson@google.com (Will Robinson)' | ||
| 38 | |||
| 39 | import struct | ||
| 40 | from froofle.protobuf import message | ||
| 41 | from froofle.protobuf.internal import wire_format | ||
| 42 | from froofle.protobuf.internal import output_stream | ||
| 43 | |||
| 44 | |||
| 45 | # Note that much of this code is ported from //net/proto/ProtocolBuffer, and | ||
| 46 | # that the interface is strongly inspired by WireFormat from the C++ proto2 | ||
| 47 | # implementation. | ||
| 48 | |||
| 49 | |||
| 50 | class Encoder(object): | ||
| 51 | |||
| 52 | """Encodes logical protocol buffer fields to the wire format.""" | ||
| 53 | |||
| 54 | def __init__(self): | ||
| 55 | self._stream = output_stream.OutputStream() | ||
| 56 | |||
| 57 | def ToString(self): | ||
| 58 | """Returns all values encoded in this object as a string.""" | ||
| 59 | return self._stream.ToString() | ||
| 60 | |||
| 61 | # All the Append*() methods below first append a tag+type pair to the buffer | ||
| 62 | # before appending the specified value. | ||
| 63 | |||
| 64 | def AppendInt32(self, field_number, value): | ||
| 65 | """Appends a 32-bit integer to our buffer, varint-encoded.""" | ||
| 66 | self._AppendTag(field_number, wire_format.WIRETYPE_VARINT) | ||
| 67 | self._stream.AppendVarint32(value) | ||
| 68 | |||
| 69 | def AppendInt64(self, field_number, value): | ||
| 70 | """Appends a 64-bit integer to our buffer, varint-encoded.""" | ||
| 71 | self._AppendTag(field_number, wire_format.WIRETYPE_VARINT) | ||
| 72 | self._stream.AppendVarint64(value) | ||
| 73 | |||
| 74 | def AppendUInt32(self, field_number, unsigned_value): | ||
| 75 | """Appends an unsigned 32-bit integer to our buffer, varint-encoded.""" | ||
| 76 | self._AppendTag(field_number, wire_format.WIRETYPE_VARINT) | ||
| 77 | self._stream.AppendVarUInt32(unsigned_value) | ||
| 78 | |||
| 79 | def AppendUInt64(self, field_number, unsigned_value): | ||
| 80 | """Appends an unsigned 64-bit integer to our buffer, varint-encoded.""" | ||
| 81 | self._AppendTag(field_number, wire_format.WIRETYPE_VARINT) | ||
| 82 | self._stream.AppendVarUInt64(unsigned_value) | ||
| 83 | |||
| 84 | def AppendSInt32(self, field_number, value): | ||
| 85 | """Appends a 32-bit integer to our buffer, zigzag-encoded and then | ||
| 86 | varint-encoded. | ||
| 87 | """ | ||
| 88 | self._AppendTag(field_number, wire_format.WIRETYPE_VARINT) | ||
| 89 | zigzag_value = wire_format.ZigZagEncode(value) | ||
| 90 | self._stream.AppendVarUInt32(zigzag_value) | ||
| 91 | |||
| 92 | def AppendSInt64(self, field_number, value): | ||
| 93 | """Appends a 64-bit integer to our buffer, zigzag-encoded and then | ||
| 94 | varint-encoded. | ||
| 95 | """ | ||
| 96 | self._AppendTag(field_number, wire_format.WIRETYPE_VARINT) | ||
| 97 | zigzag_value = wire_format.ZigZagEncode(value) | ||
| 98 | self._stream.AppendVarUInt64(zigzag_value) | ||
| 99 | |||
| 100 | def AppendFixed32(self, field_number, unsigned_value): | ||
| 101 | """Appends an unsigned 32-bit integer to our buffer, in little-endian | ||
| 102 | byte-order. | ||
| 103 | """ | ||
| 104 | self._AppendTag(field_number, wire_format.WIRETYPE_FIXED32) | ||
| 105 | self._stream.AppendLittleEndian32(unsigned_value) | ||
| 106 | |||
| 107 | def AppendFixed64(self, field_number, unsigned_value): | ||
| 108 | """Appends an unsigned 64-bit integer to our buffer, in little-endian | ||
| 109 | byte-order. | ||
| 110 | """ | ||
| 111 | self._AppendTag(field_number, wire_format.WIRETYPE_FIXED64) | ||
| 112 | self._stream.AppendLittleEndian64(unsigned_value) | ||
| 113 | |||
| 114 | def AppendSFixed32(self, field_number, value): | ||
| 115 | """Appends a signed 32-bit integer to our buffer, in little-endian | ||
| 116 | byte-order. | ||
| 117 | """ | ||
| 118 | sign = (value & 0x80000000) and -1 or 0 | ||
| 119 | if value >> 32 != sign: | ||
| 120 | raise message.EncodeError('SFixed32 out of range: %d' % value) | ||
| 121 | self._AppendTag(field_number, wire_format.WIRETYPE_FIXED32) | ||
| 122 | self._stream.AppendLittleEndian32(value & 0xffffffff) | ||
| 123 | |||
| 124 | def AppendSFixed64(self, field_number, value): | ||
| 125 | """Appends a signed 64-bit integer to our buffer, in little-endian | ||
| 126 | byte-order. | ||
| 127 | """ | ||
| 128 | sign = (value & 0x8000000000000000) and -1 or 0 | ||
| 129 | if value >> 64 != sign: | ||
| 130 | raise message.EncodeError('SFixed64 out of range: %d' % value) | ||
| 131 | self._AppendTag(field_number, wire_format.WIRETYPE_FIXED64) | ||
| 132 | self._stream.AppendLittleEndian64(value & 0xffffffffffffffff) | ||
| 133 | |||
| 134 | def AppendFloat(self, field_number, value): | ||
| 135 | """Appends a floating-point number to our buffer.""" | ||
| 136 | self._AppendTag(field_number, wire_format.WIRETYPE_FIXED32) | ||
| 137 | self._stream.AppendRawBytes(struct.pack('f', value)) | ||
| 138 | |||
| 139 | def AppendDouble(self, field_number, value): | ||
| 140 | """Appends a double-precision floating-point number to our buffer.""" | ||
| 141 | self._AppendTag(field_number, wire_format.WIRETYPE_FIXED64) | ||
| 142 | self._stream.AppendRawBytes(struct.pack('d', value)) | ||
| 143 | |||
| 144 | def AppendBool(self, field_number, value): | ||
| 145 | """Appends a boolean to our buffer.""" | ||
| 146 | self.AppendInt32(field_number, value) | ||
| 147 | |||
| 148 | def AppendEnum(self, field_number, value): | ||
| 149 | """Appends an enum value to our buffer.""" | ||
| 150 | self.AppendInt32(field_number, value) | ||
| 151 | |||
| 152 | def AppendString(self, field_number, value): | ||
| 153 | """Appends a length-prefixed unicode string, encoded as UTF-8 to our buffer, | ||
| 154 | with the length varint-encoded. | ||
| 155 | """ | ||
| 156 | self.AppendBytes(field_number, value.encode('utf-8')) | ||
| 157 | |||
| 158 | def AppendBytes(self, field_number, value): | ||
| 159 | """Appends a length-prefixed sequence of bytes to our buffer, with the | ||
| 160 | length varint-encoded. | ||
| 161 | """ | ||
| 162 | self._AppendTag(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||
| 163 | self._stream.AppendVarUInt32(len(value)) | ||
| 164 | self._stream.AppendRawBytes(value) | ||
| 165 | |||
| 166 | # TODO(robinson): For AppendGroup() and AppendMessage(), we'd really like to | ||
| 167 | # avoid the extra string copy here. We can do so if we widen the Message | ||
| 168 | # interface to be able to serialize to a stream in addition to a string. The | ||
| 169 | # challenge when thinking ahead to the Python/C API implementation of Message | ||
| 170 | # is finding a stream-like Python thing to which we can write raw bytes | ||
| 171 | # from C. I'm not sure such a thing exists(?). (array.array is pretty much | ||
| 172 | # what we want, but it's not directly exposed in the Python/C API). | ||
| 173 | |||
| 174 | def AppendGroup(self, field_number, group): | ||
| 175 | """Appends a group to our buffer. | ||
| 176 | """ | ||
| 177 | self._AppendTag(field_number, wire_format.WIRETYPE_START_GROUP) | ||
| 178 | self._stream.AppendRawBytes(group.SerializeToString()) | ||
| 179 | self._AppendTag(field_number, wire_format.WIRETYPE_END_GROUP) | ||
| 180 | |||
| 181 | def AppendMessage(self, field_number, msg): | ||
| 182 | """Appends a nested message to our buffer. | ||
| 183 | """ | ||
| 184 | self._AppendTag(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||
| 185 | self._stream.AppendVarUInt32(msg.ByteSize()) | ||
| 186 | self._stream.AppendRawBytes(msg.SerializeToString()) | ||
| 187 | |||
| 188 | def AppendMessageSetItem(self, field_number, msg): | ||
| 189 | """Appends an item using the message set wire format. | ||
| 190 | |||
| 191 | The message set message looks like this: | ||
| 192 | message MessageSet { | ||
| 193 | repeated group Item = 1 { | ||
| 194 | required int32 type_id = 2; | ||
| 195 | required string message = 3; | ||
| 196 | } | ||
| 197 | } | ||
| 198 | """ | ||
| 199 | self._AppendTag(1, wire_format.WIRETYPE_START_GROUP) | ||
| 200 | self.AppendInt32(2, field_number) | ||
| 201 | self.AppendMessage(3, msg) | ||
| 202 | self._AppendTag(1, wire_format.WIRETYPE_END_GROUP) | ||
| 203 | |||
| 204 | def _AppendTag(self, field_number, wire_type): | ||
| 205 | """Appends a tag containing field number and wire type information.""" | ||
| 206 | self._stream.AppendVarUInt32(wire_format.PackTag(field_number, wire_type)) | ||
diff --git a/froofle/protobuf/internal/input_stream.py b/froofle/protobuf/internal/input_stream.py new file mode 100644 index 00000000..26a26dcf --- /dev/null +++ b/froofle/protobuf/internal/input_stream.py | |||
| @@ -0,0 +1,326 @@ | |||
| 1 | # Protocol Buffers - Google's data interchange format | ||
| 2 | # Copyright 2008 Google Inc. All rights reserved. | ||
| 3 | # http://code.google.com/p/protobuf/ | ||
| 4 | # | ||
| 5 | # Redistribution and use in source and binary forms, with or without | ||
| 6 | # modification, are permitted provided that the following conditions are | ||
| 7 | # met: | ||
| 8 | # | ||
| 9 | # * Redistributions of source code must retain the above copyright | ||
| 10 | # notice, this list of conditions and the following disclaimer. | ||
| 11 | # * Redistributions in binary form must reproduce the above | ||
| 12 | # copyright notice, this list of conditions and the following disclaimer | ||
| 13 | # in the documentation and/or other materials provided with the | ||
| 14 | # distribution. | ||
| 15 | # * Neither the name of Google Inc. nor the names of its | ||
| 16 | # contributors may be used to endorse or promote products derived from | ||
| 17 | # this software without specific prior written permission. | ||
| 18 | # | ||
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
| 30 | |||
| 31 | """InputStream is the primitive interface for reading bits from the wire. | ||
| 32 | |||
| 33 | All protocol buffer deserialization can be expressed in terms of | ||
| 34 | the InputStream primitives provided here. | ||
| 35 | """ | ||
| 36 | |||
| 37 | __author__ = 'robinson@google.com (Will Robinson)' | ||
| 38 | |||
| 39 | import struct | ||
| 40 | from array import array | ||
| 41 | from froofle.protobuf import message | ||
| 42 | from froofle.protobuf.internal import wire_format | ||
| 43 | |||
| 44 | |||
| 45 | # Note that much of this code is ported from //net/proto/ProtocolBuffer, and | ||
| 46 | # that the interface is strongly inspired by CodedInputStream from the C++ | ||
| 47 | # proto2 implementation. | ||
| 48 | |||
| 49 | |||
| 50 | class InputStreamBuffer(object): | ||
| 51 | |||
| 52 | """Contains all logic for reading bits, and dealing with stream position. | ||
| 53 | |||
| 54 | If an InputStream method ever raises an exception, the stream is left | ||
| 55 | in an indeterminate state and is not safe for further use. | ||
| 56 | """ | ||
| 57 | |||
| 58 | def __init__(self, s): | ||
| 59 | # What we really want is something like array('B', s), where elements we | ||
| 60 | # read from the array are already given to us as one-byte integers. BUT | ||
| 61 | # using array() instead of buffer() would force full string copies to result | ||
| 62 | # from each GetSubBuffer() call. | ||
| 63 | # | ||
| 64 | # So, if the N serialized bytes of a single protocol buffer object are | ||
| 65 | # split evenly between 2 child messages, and so on recursively, using | ||
| 66 | # array('B', s) instead of buffer() would incur an additional N*logN bytes | ||
| 67 | # copied during deserialization. | ||
| 68 | # | ||
| 69 | # The higher constant overhead of having to ord() for every byte we read | ||
| 70 | # from the buffer in _ReadVarintHelper() could definitely lead to worse | ||
| 71 | # performance in many real-world scenarios, even if the asymptotic | ||
| 72 | # complexity is better. However, our real answer is that the mythical | ||
| 73 | # Python/C extension module output mode for the protocol compiler will | ||
| 74 | # be blazing-fast and will eliminate most use of this class anyway. | ||
| 75 | self._buffer = buffer(s) | ||
| 76 | self._pos = 0 | ||
| 77 | |||
| 78 | def EndOfStream(self): | ||
| 79 | """Returns true iff we're at the end of the stream. | ||
| 80 | If this returns true, then a call to any other InputStream method | ||
| 81 | will raise an exception. | ||
| 82 | """ | ||
| 83 | return self._pos >= len(self._buffer) | ||
| 84 | |||
| 85 | def Position(self): | ||
| 86 | """Returns the current position in the stream, or equivalently, the | ||
| 87 | number of bytes read so far. | ||
| 88 | """ | ||
| 89 | return self._pos | ||
| 90 | |||
| 91 | def GetSubBuffer(self, size=None): | ||
| 92 | """Returns a sequence-like object that represents a portion of our | ||
| 93 | underlying sequence. | ||
| 94 | |||
| 95 | Position 0 in the returned object corresponds to self.Position() | ||
| 96 | in this stream. | ||
| 97 | |||
| 98 | If size is specified, then the returned object ends after the | ||
| 99 | next "size" bytes in this stream. If size is not specified, | ||
| 100 | then the returned object ends at the end of this stream. | ||
| 101 | |||
| 102 | We guarantee that the returned object R supports the Python buffer | ||
| 103 | interface (and thus that the call buffer(R) will work). | ||
| 104 | |||
| 105 | Note that the returned buffer is read-only. | ||
| 106 | |||
| 107 | The intended use for this method is for nested-message and nested-group | ||
| 108 | deserialization, where we want to make a recursive MergeFromString() | ||
| 109 | call on the portion of the original sequence that contains the serialized | ||
| 110 | nested message. (And we'd like to do so without making unnecessary string | ||
| 111 | copies). | ||
| 112 | |||
| 113 | REQUIRES: size is nonnegative. | ||
| 114 | """ | ||
| 115 | # Note that buffer() doesn't perform any actual string copy. | ||
| 116 | if size is None: | ||
| 117 | return buffer(self._buffer, self._pos) | ||
| 118 | else: | ||
| 119 | if size < 0: | ||
| 120 | raise message.DecodeError('Negative size %d' % size) | ||
| 121 | return buffer(self._buffer, self._pos, size) | ||
| 122 | |||
| 123 | def SkipBytes(self, num_bytes): | ||
| 124 | """Skip num_bytes bytes ahead, or go to the end of the stream, whichever | ||
| 125 | comes first. | ||
| 126 | |||
| 127 | REQUIRES: num_bytes is nonnegative. | ||
| 128 | """ | ||
| 129 | if num_bytes < 0: | ||
| 130 | raise message.DecodeError('Negative num_bytes %d' % num_bytes) | ||
| 131 | self._pos += num_bytes | ||
| 132 | self._pos = min(self._pos, len(self._buffer)) | ||
| 133 | |||
| 134 | def ReadBytes(self, size): | ||
| 135 | """Reads up to 'size' bytes from the stream, stopping early | ||
| 136 | only if we reach the end of the stream. Returns the bytes read | ||
| 137 | as a string. | ||
| 138 | """ | ||
| 139 | if size < 0: | ||
| 140 | raise message.DecodeError('Negative size %d' % size) | ||
| 141 | s = (self._buffer[self._pos : self._pos + size]) | ||
| 142 | self._pos += len(s) # Only advance by the number of bytes actually read. | ||
| 143 | return s | ||
| 144 | |||
| 145 | def ReadLittleEndian32(self): | ||
| 146 | """Interprets the next 4 bytes of the stream as a little-endian | ||
| 147 | encoded, unsiged 32-bit integer, and returns that integer. | ||
| 148 | """ | ||
| 149 | try: | ||
| 150 | i = struct.unpack(wire_format.FORMAT_UINT32_LITTLE_ENDIAN, | ||
| 151 | self._buffer[self._pos : self._pos + 4]) | ||
| 152 | self._pos += 4 | ||
| 153 | return i[0] # unpack() result is a 1-element tuple. | ||
| 154 | except struct.error, e: | ||
| 155 | raise message.DecodeError(e) | ||
| 156 | |||
| 157 | def ReadLittleEndian64(self): | ||
| 158 | """Interprets the next 8 bytes of the stream as a little-endian | ||
| 159 | encoded, unsiged 64-bit integer, and returns that integer. | ||
| 160 | """ | ||
| 161 | try: | ||
| 162 | i = struct.unpack(wire_format.FORMAT_UINT64_LITTLE_ENDIAN, | ||
| 163 | self._buffer[self._pos : self._pos + 8]) | ||
| 164 | self._pos += 8 | ||
| 165 | return i[0] # unpack() result is a 1-element tuple. | ||
| 166 | except struct.error, e: | ||
| 167 | raise message.DecodeError(e) | ||
| 168 | |||
| 169 | def ReadVarint32(self): | ||
| 170 | """Reads a varint from the stream, interprets this varint | ||
| 171 | as a signed, 32-bit integer, and returns the integer. | ||
| 172 | """ | ||
| 173 | i = self.ReadVarint64() | ||
| 174 | if not wire_format.INT32_MIN <= i <= wire_format.INT32_MAX: | ||
| 175 | raise message.DecodeError('Value out of range for int32: %d' % i) | ||
| 176 | return int(i) | ||
| 177 | |||
| 178 | def ReadVarUInt32(self): | ||
| 179 | """Reads a varint from the stream, interprets this varint | ||
| 180 | as an unsigned, 32-bit integer, and returns the integer. | ||
| 181 | """ | ||
| 182 | i = self.ReadVarUInt64() | ||
| 183 | if i > wire_format.UINT32_MAX: | ||
| 184 | raise message.DecodeError('Value out of range for uint32: %d' % i) | ||
| 185 | return i | ||
| 186 | |||
| 187 | def ReadVarint64(self): | ||
| 188 | """Reads a varint from the stream, interprets this varint | ||
| 189 | as a signed, 64-bit integer, and returns the integer. | ||
| 190 | """ | ||
| 191 | i = self.ReadVarUInt64() | ||
| 192 | if i > wire_format.INT64_MAX: | ||
| 193 | i -= (1 << 64) | ||
| 194 | return i | ||
| 195 | |||
| 196 | def ReadVarUInt64(self): | ||
| 197 | """Reads a varint from the stream, interprets this varint | ||
| 198 | as an unsigned, 64-bit integer, and returns the integer. | ||
| 199 | """ | ||
| 200 | i = self._ReadVarintHelper() | ||
| 201 | if not 0 <= i <= wire_format.UINT64_MAX: | ||
| 202 | raise message.DecodeError('Value out of range for uint64: %d' % i) | ||
| 203 | return i | ||
| 204 | |||
| 205 | def _ReadVarintHelper(self): | ||
| 206 | """Helper for the various varint-reading methods above. | ||
| 207 | Reads an unsigned, varint-encoded integer from the stream and | ||
| 208 | returns this integer. | ||
| 209 | |||
| 210 | Does no bounds checking except to ensure that we read at most as many bytes | ||
| 211 | as could possibly be present in a varint-encoded 64-bit number. | ||
| 212 | """ | ||
| 213 | result = 0 | ||
| 214 | shift = 0 | ||
| 215 | while 1: | ||
| 216 | if shift >= 64: | ||
| 217 | raise message.DecodeError('Too many bytes when decoding varint.') | ||
| 218 | try: | ||
| 219 | b = ord(self._buffer[self._pos]) | ||
| 220 | except IndexError: | ||
| 221 | raise message.DecodeError('Truncated varint.') | ||
| 222 | self._pos += 1 | ||
| 223 | result |= ((b & 0x7f) << shift) | ||
| 224 | shift += 7 | ||
| 225 | if not (b & 0x80): | ||
| 226 | return result | ||
| 227 | |||
| 228 | class InputStreamArray(object): | ||
| 229 | def __init__(self, s): | ||
| 230 | self._buffer = array('B', s) | ||
| 231 | self._pos = 0 | ||
| 232 | |||
| 233 | def EndOfStream(self): | ||
| 234 | return self._pos >= len(self._buffer) | ||
| 235 | |||
| 236 | def Position(self): | ||
| 237 | return self._pos | ||
| 238 | |||
| 239 | def GetSubBuffer(self, size=None): | ||
| 240 | if size is None: | ||
| 241 | return self._buffer[self._pos : ].tostring() | ||
| 242 | else: | ||
| 243 | if size < 0: | ||
| 244 | raise message.DecodeError('Negative size %d' % size) | ||
| 245 | return self._buffer[self._pos : self._pos + size].tostring() | ||
| 246 | |||
| 247 | def SkipBytes(self, num_bytes): | ||
| 248 | if num_bytes < 0: | ||
| 249 | raise message.DecodeError('Negative num_bytes %d' % num_bytes) | ||
| 250 | self._pos += num_bytes | ||
| 251 | self._pos = min(self._pos, len(self._buffer)) | ||
| 252 | |||
| 253 | def ReadBytes(self, size): | ||
| 254 | if size < 0: | ||
| 255 | raise message.DecodeError('Negative size %d' % size) | ||
| 256 | s = self._buffer[self._pos : self._pos + size].tostring() | ||
| 257 | self._pos += len(s) # Only advance by the number of bytes actually read. | ||
| 258 | return s | ||
| 259 | |||
| 260 | def ReadLittleEndian32(self): | ||
| 261 | try: | ||
| 262 | i = struct.unpack(wire_format.FORMAT_UINT32_LITTLE_ENDIAN, | ||
| 263 | self._buffer[self._pos : self._pos + 4]) | ||
| 264 | self._pos += 4 | ||
| 265 | return i[0] # unpack() result is a 1-element tuple. | ||
| 266 | except struct.error, e: | ||
| 267 | raise message.DecodeError(e) | ||
| 268 | |||
| 269 | def ReadLittleEndian64(self): | ||
| 270 | try: | ||
| 271 | i = struct.unpack(wire_format.FORMAT_UINT64_LITTLE_ENDIAN, | ||
| 272 | self._buffer[self._pos : self._pos + 8]) | ||
| 273 | self._pos += 8 | ||
| 274 | return i[0] # unpack() result is a 1-element tuple. | ||
| 275 | except struct.error, e: | ||
| 276 | raise message.DecodeError(e) | ||
| 277 | |||
| 278 | def ReadVarint32(self): | ||
| 279 | i = self.ReadVarint64() | ||
| 280 | if not wire_format.INT32_MIN <= i <= wire_format.INT32_MAX: | ||
| 281 | raise message.DecodeError('Value out of range for int32: %d' % i) | ||
| 282 | return int(i) | ||
| 283 | |||
| 284 | def ReadVarUInt32(self): | ||
| 285 | i = self.ReadVarUInt64() | ||
| 286 | if i > wire_format.UINT32_MAX: | ||
| 287 | raise message.DecodeError('Value out of range for uint32: %d' % i) | ||
| 288 | return i | ||
| 289 | |||
| 290 | def ReadVarint64(self): | ||
| 291 | i = self.ReadVarUInt64() | ||
| 292 | if i > wire_format.INT64_MAX: | ||
| 293 | i -= (1 << 64) | ||
| 294 | return i | ||
| 295 | |||
| 296 | def ReadVarUInt64(self): | ||
| 297 | i = self._ReadVarintHelper() | ||
| 298 | if not 0 <= i <= wire_format.UINT64_MAX: | ||
| 299 | raise message.DecodeError('Value out of range for uint64: %d' % i) | ||
| 300 | return i | ||
| 301 | |||
| 302 | def _ReadVarintHelper(self): | ||
| 303 | result = 0 | ||
| 304 | shift = 0 | ||
| 305 | while 1: | ||
| 306 | if shift >= 64: | ||
| 307 | raise message.DecodeError('Too many bytes when decoding varint.') | ||
| 308 | try: | ||
| 309 | b = self._buffer[self._pos] | ||
| 310 | except IndexError: | ||
| 311 | raise message.DecodeError('Truncated varint.') | ||
| 312 | self._pos += 1 | ||
| 313 | result |= ((b & 0x7f) << shift) | ||
| 314 | shift += 7 | ||
| 315 | if not (b & 0x80): | ||
| 316 | return result | ||
| 317 | |||
| 318 | try: | ||
| 319 | buffer("") | ||
| 320 | InputStream = InputStreamBuffer | ||
| 321 | except NotImplementedError: | ||
| 322 | # Google App Engine: dev_appserver.py | ||
| 323 | InputStream = InputStreamArray | ||
| 324 | except RuntimeError: | ||
| 325 | # Google App Engine: production | ||
| 326 | InputStream = InputStreamArray | ||
diff --git a/froofle/protobuf/internal/message_listener.py b/froofle/protobuf/internal/message_listener.py new file mode 100644 index 00000000..43978952 --- /dev/null +++ b/froofle/protobuf/internal/message_listener.py | |||
| @@ -0,0 +1,69 @@ | |||
| 1 | # Protocol Buffers - Google's data interchange format | ||
| 2 | # Copyright 2008 Google Inc. All rights reserved. | ||
| 3 | # http://code.google.com/p/protobuf/ | ||
| 4 | # | ||
| 5 | # Redistribution and use in source and binary forms, with or without | ||
| 6 | # modification, are permitted provided that the following conditions are | ||
| 7 | # met: | ||
| 8 | # | ||
| 9 | # * Redistributions of source code must retain the above copyright | ||
| 10 | # notice, this list of conditions and the following disclaimer. | ||
| 11 | # * Redistributions in binary form must reproduce the above | ||
| 12 | # copyright notice, this list of conditions and the following disclaimer | ||
| 13 | # in the documentation and/or other materials provided with the | ||
| 14 | # distribution. | ||
| 15 | # * Neither the name of Google Inc. nor the names of its | ||
| 16 | # contributors may be used to endorse or promote products derived from | ||
| 17 | # this software without specific prior written permission. | ||
| 18 | # | ||
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
| 30 | |||
| 31 | """Defines a listener interface for observing certain | ||
| 32 | state transitions on Message objects. | ||
| 33 | |||
| 34 | Also defines a null implementation of this interface. | ||
| 35 | """ | ||
| 36 | |||
| 37 | __author__ = 'robinson@google.com (Will Robinson)' | ||
| 38 | |||
| 39 | |||
| 40 | class MessageListener(object): | ||
| 41 | |||
| 42 | """Listens for transitions to nonempty and for invalidations of cached | ||
| 43 | byte sizes. Meant to be registered via Message._SetListener(). | ||
| 44 | """ | ||
| 45 | |||
| 46 | def TransitionToNonempty(self): | ||
| 47 | """Called the *first* time that this message becomes nonempty. | ||
| 48 | Implementations are free (but not required) to call this method multiple | ||
| 49 | times after the message has become nonempty. | ||
| 50 | """ | ||
| 51 | raise NotImplementedError | ||
| 52 | |||
| 53 | def ByteSizeDirty(self): | ||
| 54 | """Called *every* time the cached byte size value | ||
| 55 | for this object is invalidated (transitions from being | ||
| 56 | "clean" to "dirty"). | ||
| 57 | """ | ||
| 58 | raise NotImplementedError | ||
| 59 | |||
| 60 | |||
| 61 | class NullMessageListener(object): | ||
| 62 | |||
| 63 | """No-op MessageListener implementation.""" | ||
| 64 | |||
| 65 | def TransitionToNonempty(self): | ||
| 66 | pass | ||
| 67 | |||
| 68 | def ByteSizeDirty(self): | ||
| 69 | pass | ||
diff --git a/froofle/protobuf/internal/output_stream.py b/froofle/protobuf/internal/output_stream.py new file mode 100644 index 00000000..f62cd1c4 --- /dev/null +++ b/froofle/protobuf/internal/output_stream.py | |||
| @@ -0,0 +1,125 @@ | |||
| 1 | # Protocol Buffers - Google's data interchange format | ||
| 2 | # Copyright 2008 Google Inc. All rights reserved. | ||
| 3 | # http://code.google.com/p/protobuf/ | ||
| 4 | # | ||
| 5 | # Redistribution and use in source and binary forms, with or without | ||
| 6 | # modification, are permitted provided that the following conditions are | ||
| 7 | # met: | ||
| 8 | # | ||
| 9 | # * Redistributions of source code must retain the above copyright | ||
| 10 | # notice, this list of conditions and the following disclaimer. | ||
| 11 | # * Redistributions in binary form must reproduce the above | ||
| 12 | # copyright notice, this list of conditions and the following disclaimer | ||
| 13 | # in the documentation and/or other materials provided with the | ||
| 14 | # distribution. | ||
| 15 | # * Neither the name of Google Inc. nor the names of its | ||
| 16 | # contributors may be used to endorse or promote products derived from | ||
| 17 | # this software without specific prior written permission. | ||
| 18 | # | ||
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
| 30 | |||
| 31 | """OutputStream is the primitive interface for sticking bits on the wire. | ||
| 32 | |||
| 33 | All protocol buffer serialization can be expressed in terms of | ||
| 34 | the OutputStream primitives provided here. | ||
| 35 | """ | ||
| 36 | |||
| 37 | __author__ = 'robinson@google.com (Will Robinson)' | ||
| 38 | |||
| 39 | import array | ||
| 40 | import struct | ||
| 41 | from froofle.protobuf import message | ||
| 42 | from froofle.protobuf.internal import wire_format | ||
| 43 | |||
| 44 | |||
| 45 | |||
| 46 | # Note that much of this code is ported from //net/proto/ProtocolBuffer, and | ||
| 47 | # that the interface is strongly inspired by CodedOutputStream from the C++ | ||
| 48 | # proto2 implementation. | ||
| 49 | |||
| 50 | |||
| 51 | class OutputStream(object): | ||
| 52 | |||
| 53 | """Contains all logic for writing bits, and ToString() to get the result.""" | ||
| 54 | |||
| 55 | def __init__(self): | ||
| 56 | self._buffer = array.array('B') | ||
| 57 | |||
| 58 | def AppendRawBytes(self, raw_bytes): | ||
| 59 | """Appends raw_bytes to our internal buffer.""" | ||
| 60 | self._buffer.fromstring(raw_bytes) | ||
| 61 | |||
| 62 | def AppendLittleEndian32(self, unsigned_value): | ||
| 63 | """Appends an unsigned 32-bit integer to the internal buffer, | ||
| 64 | in little-endian byte order. | ||
| 65 | """ | ||
| 66 | if not 0 <= unsigned_value <= wire_format.UINT32_MAX: | ||
| 67 | raise message.EncodeError( | ||
| 68 | 'Unsigned 32-bit out of range: %d' % unsigned_value) | ||
| 69 | self._buffer.fromstring(struct.pack( | ||
| 70 | wire_format.FORMAT_UINT32_LITTLE_ENDIAN, unsigned_value)) | ||
| 71 | |||
| 72 | def AppendLittleEndian64(self, unsigned_value): | ||
| 73 | """Appends an unsigned 64-bit integer to the internal buffer, | ||
| 74 | in little-endian byte order. | ||
| 75 | """ | ||
| 76 | if not 0 <= unsigned_value <= wire_format.UINT64_MAX: | ||
| 77 | raise message.EncodeError( | ||
| 78 | 'Unsigned 64-bit out of range: %d' % unsigned_value) | ||
| 79 | self._buffer.fromstring(struct.pack( | ||
| 80 | wire_format.FORMAT_UINT64_LITTLE_ENDIAN, unsigned_value)) | ||
| 81 | |||
| 82 | def AppendVarint32(self, value): | ||
| 83 | """Appends a signed 32-bit integer to the internal buffer, | ||
| 84 | encoded as a varint. (Note that a negative varint32 will | ||
| 85 | always require 10 bytes of space.) | ||
| 86 | """ | ||
| 87 | if not wire_format.INT32_MIN <= value <= wire_format.INT32_MAX: | ||
| 88 | raise message.EncodeError('Value out of range: %d' % value) | ||
| 89 | self.AppendVarint64(value) | ||
| 90 | |||
| 91 | def AppendVarUInt32(self, value): | ||
| 92 | """Appends an unsigned 32-bit integer to the internal buffer, | ||
| 93 | encoded as a varint. | ||
| 94 | """ | ||
| 95 | if not 0 <= value <= wire_format.UINT32_MAX: | ||
| 96 | raise message.EncodeError('Value out of range: %d' % value) | ||
| 97 | self.AppendVarUInt64(value) | ||
| 98 | |||
| 99 | def AppendVarint64(self, value): | ||
| 100 | """Appends a signed 64-bit integer to the internal buffer, | ||
| 101 | encoded as a varint. | ||
| 102 | """ | ||
| 103 | if not wire_format.INT64_MIN <= value <= wire_format.INT64_MAX: | ||
| 104 | raise message.EncodeError('Value out of range: %d' % value) | ||
| 105 | if value < 0: | ||
| 106 | value += (1 << 64) | ||
| 107 | self.AppendVarUInt64(value) | ||
| 108 | |||
| 109 | def AppendVarUInt64(self, unsigned_value): | ||
| 110 | """Appends an unsigned 64-bit integer to the internal buffer, | ||
| 111 | encoded as a varint. | ||
| 112 | """ | ||
| 113 | if not 0 <= unsigned_value <= wire_format.UINT64_MAX: | ||
| 114 | raise message.EncodeError('Value out of range: %d' % unsigned_value) | ||
| 115 | while True: | ||
| 116 | bits = unsigned_value & 0x7f | ||
| 117 | unsigned_value >>= 7 | ||
| 118 | if not unsigned_value: | ||
| 119 | self._buffer.append(bits) | ||
| 120 | break | ||
| 121 | self._buffer.append(0x80|bits) | ||
| 122 | |||
| 123 | def ToString(self): | ||
| 124 | """Returns a string containing the bytes in our internal buffer.""" | ||
| 125 | return self._buffer.tostring() | ||
diff --git a/froofle/protobuf/internal/type_checkers.py b/froofle/protobuf/internal/type_checkers.py new file mode 100644 index 00000000..aaf7a844 --- /dev/null +++ b/froofle/protobuf/internal/type_checkers.py | |||
| @@ -0,0 +1,268 @@ | |||
| 1 | # Protocol Buffers - Google's data interchange format | ||
| 2 | # Copyright 2008 Google Inc. All rights reserved. | ||
| 3 | # http://code.google.com/p/protobuf/ | ||
| 4 | # | ||
| 5 | # Redistribution and use in source and binary forms, with or without | ||
| 6 | # modification, are permitted provided that the following conditions are | ||
| 7 | # met: | ||
| 8 | # | ||
| 9 | # * Redistributions of source code must retain the above copyright | ||
| 10 | # notice, this list of conditions and the following disclaimer. | ||
| 11 | # * Redistributions in binary form must reproduce the above | ||
| 12 | # copyright notice, this list of conditions and the following disclaimer | ||
| 13 | # in the documentation and/or other materials provided with the | ||
| 14 | # distribution. | ||
| 15 | # * Neither the name of Google Inc. nor the names of its | ||
| 16 | # contributors may be used to endorse or promote products derived from | ||
| 17 | # this software without specific prior written permission. | ||
| 18 | # | ||
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
| 30 | |||
| 31 | """Provides type checking routines. | ||
| 32 | |||
| 33 | This module defines type checking utilities in the forms of dictionaries: | ||
| 34 | |||
| 35 | VALUE_CHECKERS: A dictionary of field types and a value validation object. | ||
| 36 | TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing | ||
| 37 | function. | ||
| 38 | TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization | ||
| 39 | function. | ||
| 40 | FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their | ||
| 41 | coresponding wire types. | ||
| 42 | TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization | ||
| 43 | function. | ||
| 44 | """ | ||
| 45 | |||
| 46 | __author__ = 'robinson@google.com (Will Robinson)' | ||
| 47 | |||
| 48 | from froofle.protobuf.internal import decoder | ||
| 49 | from froofle.protobuf.internal import encoder | ||
| 50 | from froofle.protobuf.internal import wire_format | ||
| 51 | from froofle.protobuf import descriptor | ||
| 52 | |||
| 53 | _FieldDescriptor = descriptor.FieldDescriptor | ||
| 54 | |||
| 55 | |||
| 56 | def GetTypeChecker(cpp_type, field_type): | ||
| 57 | """Returns a type checker for a message field of the specified types. | ||
| 58 | |||
| 59 | Args: | ||
| 60 | cpp_type: C++ type of the field (see descriptor.py). | ||
| 61 | field_type: Protocol message field type (see descriptor.py). | ||
| 62 | |||
| 63 | Returns: | ||
| 64 | An instance of TypeChecker which can be used to verify the types | ||
| 65 | of values assigned to a field of the specified type. | ||
| 66 | """ | ||
| 67 | if (cpp_type == _FieldDescriptor.CPPTYPE_STRING and | ||
| 68 | field_type == _FieldDescriptor.TYPE_STRING): | ||
| 69 | return UnicodeValueChecker() | ||
| 70 | return _VALUE_CHECKERS[cpp_type] | ||
| 71 | |||
| 72 | |||
| 73 | # None of the typecheckers below make any attempt to guard against people | ||
| 74 | # subclassing builtin types and doing weird things. We're not trying to | ||
| 75 | # protect against malicious clients here, just people accidentally shooting | ||
| 76 | # themselves in the foot in obvious ways. | ||
| 77 | |||
| 78 | class TypeChecker(object): | ||
| 79 | |||
| 80 | """Type checker used to catch type errors as early as possible | ||
| 81 | when the client is setting scalar fields in protocol messages. | ||
| 82 | """ | ||
| 83 | |||
| 84 | def __init__(self, *acceptable_types): | ||
| 85 | self._acceptable_types = acceptable_types | ||
| 86 | |||
| 87 | def CheckValue(self, proposed_value): | ||
| 88 | if not isinstance(proposed_value, self._acceptable_types): | ||
| 89 | message = ('%.1024r has type %s, but expected one of: %s' % | ||
| 90 | (proposed_value, type(proposed_value), self._acceptable_types)) | ||
| 91 | raise TypeError(message) | ||
| 92 | |||
| 93 | |||
| 94 | # IntValueChecker and its subclasses perform integer type-checks | ||
| 95 | # and bounds-checks. | ||
| 96 | class IntValueChecker(object): | ||
| 97 | |||
| 98 | """Checker used for integer fields. Performs type-check and range check.""" | ||
| 99 | |||
| 100 | def CheckValue(self, proposed_value): | ||
| 101 | if not isinstance(proposed_value, (int, long)): | ||
| 102 | message = ('%.1024r has type %s, but expected one of: %s' % | ||
| 103 | (proposed_value, type(proposed_value), (int, long))) | ||
| 104 | raise TypeError(message) | ||
| 105 | if not self._MIN <= proposed_value <= self._MAX: | ||
| 106 | raise ValueError('Value out of range: %d' % proposed_value) | ||
| 107 | |||
| 108 | |||
| 109 | class UnicodeValueChecker(object): | ||
| 110 | |||
| 111 | """Checker used for string fields.""" | ||
| 112 | |||
| 113 | def CheckValue(self, proposed_value): | ||
| 114 | if not isinstance(proposed_value, (str, unicode)): | ||
| 115 | message = ('%.1024r has type %s, but expected one of: %s' % | ||
| 116 | (proposed_value, type(proposed_value), (str, unicode))) | ||
| 117 | raise TypeError(message) | ||
| 118 | |||
| 119 | # If the value is of type 'str' make sure that it is in 7-bit ASCII | ||
| 120 | # encoding. | ||
| 121 | if isinstance(proposed_value, str): | ||
| 122 | try: | ||
| 123 | unicode(proposed_value, 'ascii') | ||
| 124 | except UnicodeDecodeError: | ||
| 125 | raise ValueError('%.1024r isn\'t in 7-bit ASCII encoding.' | ||
| 126 | % (proposed_value)) | ||
| 127 | |||
| 128 | |||
| 129 | class Int32ValueChecker(IntValueChecker): | ||
| 130 | # We're sure to use ints instead of longs here since comparison may be more | ||
| 131 | # efficient. | ||
| 132 | _MIN = -2147483648 | ||
| 133 | _MAX = 2147483647 | ||
| 134 | |||
| 135 | |||
| 136 | class Uint32ValueChecker(IntValueChecker): | ||
| 137 | _MIN = 0 | ||
| 138 | _MAX = (1 << 32) - 1 | ||
| 139 | |||
| 140 | |||
| 141 | class Int64ValueChecker(IntValueChecker): | ||
| 142 | _MIN = -(1 << 63) | ||
| 143 | _MAX = (1 << 63) - 1 | ||
| 144 | |||
| 145 | |||
| 146 | class Uint64ValueChecker(IntValueChecker): | ||
| 147 | _MIN = 0 | ||
| 148 | _MAX = (1 << 64) - 1 | ||
| 149 | |||
| 150 | |||
| 151 | # Type-checkers for all scalar CPPTYPEs. | ||
| 152 | _VALUE_CHECKERS = { | ||
| 153 | _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), | ||
| 154 | _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), | ||
| 155 | _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), | ||
| 156 | _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), | ||
| 157 | _FieldDescriptor.CPPTYPE_DOUBLE: TypeChecker( | ||
| 158 | float, int, long), | ||
| 159 | _FieldDescriptor.CPPTYPE_FLOAT: TypeChecker( | ||
| 160 | float, int, long), | ||
| 161 | _FieldDescriptor.CPPTYPE_BOOL: TypeChecker(bool, int), | ||
| 162 | _FieldDescriptor.CPPTYPE_ENUM: Int32ValueChecker(), | ||
| 163 | _FieldDescriptor.CPPTYPE_STRING: TypeChecker(str), | ||
| 164 | } | ||
| 165 | |||
| 166 | |||
| 167 | # Map from field type to a function F, such that F(field_num, value) | ||
| 168 | # gives the total byte size for a value of the given type. This | ||
| 169 | # byte size includes tag information and any other additional space | ||
| 170 | # associated with serializing "value". | ||
| 171 | TYPE_TO_BYTE_SIZE_FN = { | ||
| 172 | _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, | ||
| 173 | _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, | ||
| 174 | _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, | ||
| 175 | _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, | ||
| 176 | _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, | ||
| 177 | _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, | ||
| 178 | _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, | ||
| 179 | _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, | ||
| 180 | _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, | ||
| 181 | _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, | ||
| 182 | _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, | ||
| 183 | _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, | ||
| 184 | _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, | ||
| 185 | _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, | ||
| 186 | _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, | ||
| 187 | _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, | ||
| 188 | _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, | ||
| 189 | _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize | ||
| 190 | } | ||
| 191 | |||
| 192 | |||
| 193 | # Maps from field type to an unbound Encoder method F, such that | ||
| 194 | # F(encoder, field_number, value) will append the serialization | ||
| 195 | # of a value of this type to the encoder. | ||
| 196 | _Encoder = encoder.Encoder | ||
| 197 | TYPE_TO_SERIALIZE_METHOD = { | ||
| 198 | _FieldDescriptor.TYPE_DOUBLE: _Encoder.AppendDouble, | ||
| 199 | _FieldDescriptor.TYPE_FLOAT: _Encoder.AppendFloat, | ||
| 200 | _FieldDescriptor.TYPE_INT64: _Encoder.AppendInt64, | ||
| 201 | _FieldDescriptor.TYPE_UINT64: _Encoder.AppendUInt64, | ||
| 202 | _FieldDescriptor.TYPE_INT32: _Encoder.AppendInt32, | ||
| 203 | _FieldDescriptor.TYPE_FIXED64: _Encoder.AppendFixed64, | ||
| 204 | _FieldDescriptor.TYPE_FIXED32: _Encoder.AppendFixed32, | ||
| 205 | _FieldDescriptor.TYPE_BOOL: _Encoder.AppendBool, | ||
| 206 | _FieldDescriptor.TYPE_STRING: _Encoder.AppendString, | ||
| 207 | _FieldDescriptor.TYPE_GROUP: _Encoder.AppendGroup, | ||
| 208 | _FieldDescriptor.TYPE_MESSAGE: _Encoder.AppendMessage, | ||
| 209 | _FieldDescriptor.TYPE_BYTES: _Encoder.AppendBytes, | ||
| 210 | _FieldDescriptor.TYPE_UINT32: _Encoder.AppendUInt32, | ||
| 211 | _FieldDescriptor.TYPE_ENUM: _Encoder.AppendEnum, | ||
| 212 | _FieldDescriptor.TYPE_SFIXED32: _Encoder.AppendSFixed32, | ||
| 213 | _FieldDescriptor.TYPE_SFIXED64: _Encoder.AppendSFixed64, | ||
| 214 | _FieldDescriptor.TYPE_SINT32: _Encoder.AppendSInt32, | ||
| 215 | _FieldDescriptor.TYPE_SINT64: _Encoder.AppendSInt64, | ||
| 216 | } | ||
| 217 | |||
| 218 | |||
| 219 | # Maps from field type to expected wiretype. | ||
| 220 | FIELD_TYPE_TO_WIRE_TYPE = { | ||
| 221 | _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, | ||
| 222 | _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, | ||
| 223 | _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, | ||
| 224 | _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, | ||
| 225 | _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, | ||
| 226 | _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, | ||
| 227 | _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, | ||
| 228 | _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, | ||
| 229 | _FieldDescriptor.TYPE_STRING: | ||
| 230 | wire_format.WIRETYPE_LENGTH_DELIMITED, | ||
| 231 | _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, | ||
| 232 | _FieldDescriptor.TYPE_MESSAGE: | ||
| 233 | wire_format.WIRETYPE_LENGTH_DELIMITED, | ||
| 234 | _FieldDescriptor.TYPE_BYTES: | ||
| 235 | wire_format.WIRETYPE_LENGTH_DELIMITED, | ||
| 236 | _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, | ||
| 237 | _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, | ||
| 238 | _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, | ||
| 239 | _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, | ||
| 240 | _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, | ||
| 241 | _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, | ||
| 242 | } | ||
| 243 | |||
| 244 | |||
| 245 | # Maps from field type to an unbound Decoder method F, | ||
| 246 | # such that F(decoder) will read a field of the requested type. | ||
| 247 | # | ||
| 248 | # Note that Message and Group are intentionally missing here. | ||
| 249 | # They're handled by _RecursivelyMerge(). | ||
| 250 | _Decoder = decoder.Decoder | ||
| 251 | TYPE_TO_DESERIALIZE_METHOD = { | ||
| 252 | _FieldDescriptor.TYPE_DOUBLE: _Decoder.ReadDouble, | ||
| 253 | _FieldDescriptor.TYPE_FLOAT: _Decoder.ReadFloat, | ||
| 254 | _FieldDescriptor.TYPE_INT64: _Decoder.ReadInt64, | ||
| 255 | _FieldDescriptor.TYPE_UINT64: _Decoder.ReadUInt64, | ||
| 256 | _FieldDescriptor.TYPE_INT32: _Decoder.ReadInt32, | ||
| 257 | _FieldDescriptor.TYPE_FIXED64: _Decoder.ReadFixed64, | ||
| 258 | _FieldDescriptor.TYPE_FIXED32: _Decoder.ReadFixed32, | ||
| 259 | _FieldDescriptor.TYPE_BOOL: _Decoder.ReadBool, | ||
| 260 | _FieldDescriptor.TYPE_STRING: _Decoder.ReadString, | ||
| 261 | _FieldDescriptor.TYPE_BYTES: _Decoder.ReadBytes, | ||
| 262 | _FieldDescriptor.TYPE_UINT32: _Decoder.ReadUInt32, | ||
| 263 | _FieldDescriptor.TYPE_ENUM: _Decoder.ReadEnum, | ||
| 264 | _FieldDescriptor.TYPE_SFIXED32: _Decoder.ReadSFixed32, | ||
| 265 | _FieldDescriptor.TYPE_SFIXED64: _Decoder.ReadSFixed64, | ||
| 266 | _FieldDescriptor.TYPE_SINT32: _Decoder.ReadSInt32, | ||
| 267 | _FieldDescriptor.TYPE_SINT64: _Decoder.ReadSInt64, | ||
| 268 | } | ||
diff --git a/froofle/protobuf/internal/wire_format.py b/froofle/protobuf/internal/wire_format.py new file mode 100644 index 00000000..4d823c8d --- /dev/null +++ b/froofle/protobuf/internal/wire_format.py | |||
| @@ -0,0 +1,236 @@ | |||
| 1 | # Protocol Buffers - Google's data interchange format | ||
| 2 | # Copyright 2008 Google Inc. All rights reserved. | ||
| 3 | # http://code.google.com/p/protobuf/ | ||
| 4 | # | ||
| 5 | # Redistribution and use in source and binary forms, with or without | ||
| 6 | # modification, are permitted provided that the following conditions are | ||
| 7 | # met: | ||
| 8 | # | ||
| 9 | # * Redistributions of source code must retain the above copyright | ||
| 10 | # notice, this list of conditions and the following disclaimer. | ||
| 11 | # * Redistributions in binary form must reproduce the above | ||
| 12 | # copyright notice, this list of conditions and the following disclaimer | ||
| 13 | # in the documentation and/or other materials provided with the | ||
| 14 | # distribution. | ||
| 15 | # * Neither the name of Google Inc. nor the names of its | ||
| 16 | # contributors may be used to endorse or promote products derived from | ||
| 17 | # this software without specific prior written permission. | ||
| 18 | # | ||
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
| 30 | |||
| 31 | """Constants and static functions to support protocol buffer wire format.""" | ||
| 32 | |||
| 33 | __author__ = 'robinson@google.com (Will Robinson)' | ||
| 34 | |||
| 35 | import struct | ||
| 36 | from froofle.protobuf import message | ||
| 37 | |||
| 38 | |||
| 39 | TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag. | ||
| 40 | _TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7 | ||
| 41 | |||
| 42 | # These numbers identify the wire type of a protocol buffer value. | ||
| 43 | # We use the least-significant TAG_TYPE_BITS bits of the varint-encoded | ||
| 44 | # tag-and-type to store one of these WIRETYPE_* constants. | ||
| 45 | # These values must match WireType enum in //net/proto2/public/wire_format.h. | ||
| 46 | WIRETYPE_VARINT = 0 | ||
| 47 | WIRETYPE_FIXED64 = 1 | ||
| 48 | WIRETYPE_LENGTH_DELIMITED = 2 | ||
| 49 | WIRETYPE_START_GROUP = 3 | ||
| 50 | WIRETYPE_END_GROUP = 4 | ||
| 51 | WIRETYPE_FIXED32 = 5 | ||
| 52 | _WIRETYPE_MAX = 5 | ||
| 53 | |||
| 54 | |||
| 55 | # Bounds for various integer types. | ||
| 56 | INT32_MAX = int((1 << 31) - 1) | ||
| 57 | INT32_MIN = int(-(1 << 31)) | ||
| 58 | UINT32_MAX = (1 << 32) - 1 | ||
| 59 | |||
| 60 | INT64_MAX = (1 << 63) - 1 | ||
| 61 | INT64_MIN = -(1 << 63) | ||
| 62 | UINT64_MAX = (1 << 64) - 1 | ||
| 63 | |||
| 64 | # "struct" format strings that will encode/decode the specified formats. | ||
| 65 | FORMAT_UINT32_LITTLE_ENDIAN = '<I' | ||
| 66 | FORMAT_UINT64_LITTLE_ENDIAN = '<Q' | ||
| 67 | |||
| 68 | |||
| 69 | # We'll have to provide alternate implementations of AppendLittleEndian*() on | ||
| 70 | # any architectures where these checks fail. | ||
| 71 | if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4: | ||
| 72 | raise AssertionError('Format "I" is not a 32-bit number.') | ||
| 73 | if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8: | ||
| 74 | raise AssertionError('Format "Q" is not a 64-bit number.') | ||
| 75 | |||
| 76 | |||
| 77 | def PackTag(field_number, wire_type): | ||
| 78 | """Returns an unsigned 32-bit integer that encodes the field number and | ||
| 79 | wire type information in standard protocol message wire format. | ||
| 80 | |||
| 81 | Args: | ||
| 82 | field_number: Expected to be an integer in the range [1, 1 << 29) | ||
| 83 | wire_type: One of the WIRETYPE_* constants. | ||
| 84 | """ | ||
| 85 | if not 0 <= wire_type <= _WIRETYPE_MAX: | ||
| 86 | raise message.EncodeError('Unknown wire type: %d' % wire_type) | ||
| 87 | return (field_number << TAG_TYPE_BITS) | wire_type | ||
| 88 | |||
| 89 | |||
| 90 | def UnpackTag(tag): | ||
| 91 | """The inverse of PackTag(). Given an unsigned 32-bit number, | ||
| 92 | returns a (field_number, wire_type) tuple. | ||
| 93 | """ | ||
| 94 | return (tag >> TAG_TYPE_BITS), (tag & _TAG_TYPE_MASK) | ||
| 95 | |||
| 96 | |||
| 97 | def ZigZagEncode(value): | ||
| 98 | """ZigZag Transform: Encodes signed integers so that they can be | ||
| 99 | effectively used with varint encoding. See wire_format.h for | ||
| 100 | more details. | ||
| 101 | """ | ||
| 102 | if value >= 0: | ||
| 103 | return value << 1 | ||
| 104 | return (value << 1) ^ (~0) | ||
| 105 | |||
| 106 | |||
| 107 | def ZigZagDecode(value): | ||
| 108 | """Inverse of ZigZagEncode().""" | ||
| 109 | if not value & 0x1: | ||
| 110 | return value >> 1 | ||
| 111 | return (value >> 1) ^ (~0) | ||
| 112 | |||
| 113 | |||
| 114 | |||
| 115 | # The *ByteSize() functions below return the number of bytes required to | ||
| 116 | # serialize "field number + type" information and then serialize the value. | ||
| 117 | |||
| 118 | |||
| 119 | def Int32ByteSize(field_number, int32): | ||
| 120 | return Int64ByteSize(field_number, int32) | ||
| 121 | |||
| 122 | |||
| 123 | def Int64ByteSize(field_number, int64): | ||
| 124 | # Have to convert to uint before calling UInt64ByteSize(). | ||
| 125 | return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) | ||
| 126 | |||
| 127 | |||
| 128 | def UInt32ByteSize(field_number, uint32): | ||
| 129 | return UInt64ByteSize(field_number, uint32) | ||
| 130 | |||
| 131 | |||
| 132 | def UInt64ByteSize(field_number, uint64): | ||
| 133 | return _TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) | ||
| 134 | |||
| 135 | |||
| 136 | def SInt32ByteSize(field_number, int32): | ||
| 137 | return UInt32ByteSize(field_number, ZigZagEncode(int32)) | ||
| 138 | |||
| 139 | |||
| 140 | def SInt64ByteSize(field_number, int64): | ||
| 141 | return UInt64ByteSize(field_number, ZigZagEncode(int64)) | ||
| 142 | |||
| 143 | |||
| 144 | def Fixed32ByteSize(field_number, fixed32): | ||
| 145 | return _TagByteSize(field_number) + 4 | ||
| 146 | |||
| 147 | |||
| 148 | def Fixed64ByteSize(field_number, fixed64): | ||
| 149 | return _TagByteSize(field_number) + 8 | ||
| 150 | |||
| 151 | |||
| 152 | def SFixed32ByteSize(field_number, sfixed32): | ||
| 153 | return _TagByteSize(field_number) + 4 | ||
| 154 | |||
| 155 | |||
| 156 | def SFixed64ByteSize(field_number, sfixed64): | ||
| 157 | return _TagByteSize(field_number) + 8 | ||
| 158 | |||
| 159 | |||
| 160 | def FloatByteSize(field_number, flt): | ||
| 161 | return _TagByteSize(field_number) + 4 | ||
| 162 | |||
| 163 | |||
| 164 | def DoubleByteSize(field_number, double): | ||
| 165 | return _TagByteSize(field_number) + 8 | ||
| 166 | |||
| 167 | |||
| 168 | def BoolByteSize(field_number, b): | ||
| 169 | return _TagByteSize(field_number) + 1 | ||
| 170 | |||
| 171 | |||
| 172 | def EnumByteSize(field_number, enum): | ||
| 173 | return UInt32ByteSize(field_number, enum) | ||
| 174 | |||
| 175 | |||
| 176 | def StringByteSize(field_number, string): | ||
| 177 | return BytesByteSize(field_number, string.encode('utf-8')) | ||
| 178 | |||
| 179 | |||
| 180 | def BytesByteSize(field_number, b): | ||
| 181 | return (_TagByteSize(field_number) | ||
| 182 | + _VarUInt64ByteSizeNoTag(len(b)) | ||
| 183 | + len(b)) | ||
| 184 | |||
| 185 | |||
| 186 | def GroupByteSize(field_number, message): | ||
| 187 | return (2 * _TagByteSize(field_number) # START and END group. | ||
| 188 | + message.ByteSize()) | ||
| 189 | |||
| 190 | |||
| 191 | def MessageByteSize(field_number, message): | ||
| 192 | return (_TagByteSize(field_number) | ||
| 193 | + _VarUInt64ByteSizeNoTag(message.ByteSize()) | ||
| 194 | + message.ByteSize()) | ||
| 195 | |||
| 196 | |||
| 197 | def MessageSetItemByteSize(field_number, msg): | ||
| 198 | # First compute the sizes of the tags. | ||
| 199 | # There are 2 tags for the beginning and ending of the repeated group, that | ||
| 200 | # is field number 1, one with field number 2 (type_id) and one with field | ||
| 201 | # number 3 (message). | ||
| 202 | total_size = (2 * _TagByteSize(1) + _TagByteSize(2) + _TagByteSize(3)) | ||
| 203 | |||
| 204 | # Add the number of bytes for type_id. | ||
| 205 | total_size += _VarUInt64ByteSizeNoTag(field_number) | ||
| 206 | |||
| 207 | message_size = msg.ByteSize() | ||
| 208 | |||
| 209 | # The number of bytes for encoding the length of the message. | ||
| 210 | total_size += _VarUInt64ByteSizeNoTag(message_size) | ||
| 211 | |||
| 212 | # The size of the message. | ||
| 213 | total_size += message_size | ||
| 214 | return total_size | ||
| 215 | |||
| 216 | |||
| 217 | # Private helper functions for the *ByteSize() functions above. | ||
| 218 | |||
| 219 | |||
| 220 | def _TagByteSize(field_number): | ||
| 221 | """Returns the bytes required to serialize a tag with this field number.""" | ||
| 222 | # Just pass in type 0, since the type won't affect the tag+type size. | ||
| 223 | return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) | ||
| 224 | |||
| 225 | |||
| 226 | def _VarUInt64ByteSizeNoTag(uint64): | ||
| 227 | """Returns the bytes required to serialize a single varint. | ||
| 228 | uint64 must be unsigned. | ||
| 229 | """ | ||
| 230 | if uint64 > UINT64_MAX: | ||
| 231 | raise message.EncodeError('Value out of range: %d' % uint64) | ||
| 232 | bytes = 1 | ||
| 233 | while uint64 > 0x7f: | ||
| 234 | bytes += 1 | ||
| 235 | uint64 >>= 7 | ||
| 236 | return bytes | ||
diff --git a/froofle/protobuf/message.py b/froofle/protobuf/message.py new file mode 100644 index 00000000..ed714853 --- /dev/null +++ b/froofle/protobuf/message.py | |||
| @@ -0,0 +1,246 @@ | |||
| 1 | # Protocol Buffers - Google's data interchange format | ||
| 2 | # Copyright 2008 Google Inc. All rights reserved. | ||
| 3 | # http://code.google.com/p/protobuf/ | ||
| 4 | # | ||
| 5 | # Redistribution and use in source and binary forms, with or without | ||
| 6 | # modification, are permitted provided that the following conditions are | ||
| 7 | # met: | ||
| 8 | # | ||
| 9 | # * Redistributions of source code must retain the above copyright | ||
| 10 | # notice, this list of conditions and the following disclaimer. | ||
| 11 | # * Redistributions in binary form must reproduce the above | ||
| 12 | # copyright notice, this list of conditions and the following disclaimer | ||
| 13 | # in the documentation and/or other materials provided with the | ||
| 14 | # distribution. | ||
| 15 | # * Neither the name of Google Inc. nor the names of its | ||
| 16 | # contributors may be used to endorse or promote products derived from | ||
| 17 | # this software without specific prior written permission. | ||
| 18 | # | ||
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
| 30 | |||
| 31 | # TODO(robinson): We should just make these methods all "pure-virtual" and move | ||
| 32 | # all implementation out, into reflection.py for now. | ||
| 33 | |||
| 34 | |||
| 35 | """Contains an abstract base class for protocol messages.""" | ||
| 36 | |||
| 37 | __author__ = 'robinson@google.com (Will Robinson)' | ||
| 38 | |||
| 39 | from froofle.protobuf import text_format | ||
| 40 | |||
| 41 | class Error(Exception): pass | ||
| 42 | class DecodeError(Error): pass | ||
| 43 | class EncodeError(Error): pass | ||
| 44 | |||
| 45 | |||
| 46 | class Message(object): | ||
| 47 | |||
| 48 | """Abstract base class for protocol messages. | ||
| 49 | |||
| 50 | Protocol message classes are almost always generated by the protocol | ||
| 51 | compiler. These generated types subclass Message and implement the methods | ||
| 52 | shown below. | ||
| 53 | |||
| 54 | TODO(robinson): Link to an HTML document here. | ||
| 55 | |||
| 56 | TODO(robinson): Document that instances of this class will also | ||
| 57 | have an Extensions attribute with __getitem__ and __setitem__. | ||
| 58 | Again, not sure how to best convey this. | ||
| 59 | |||
| 60 | TODO(robinson): Document that the class must also have a static | ||
| 61 | RegisterExtension(extension_field) method. | ||
| 62 | Not sure how to best express at this point. | ||
| 63 | """ | ||
| 64 | |||
| 65 | # TODO(robinson): Document these fields and methods. | ||
| 66 | |||
| 67 | __slots__ = [] | ||
| 68 | |||
| 69 | DESCRIPTOR = None | ||
| 70 | |||
| 71 | def __eq__(self, other_msg): | ||
| 72 | raise NotImplementedError | ||
| 73 | |||
| 74 | def __ne__(self, other_msg): | ||
| 75 | # Can't just say self != other_msg, since that would infinitely recurse. :) | ||
| 76 | return not self == other_msg | ||
| 77 | |||
| 78 | def __str__(self): | ||
| 79 | return text_format.MessageToString(self) | ||
| 80 | |||
| 81 | def MergeFrom(self, other_msg): | ||
| 82 | """Merges the contents of the specified message into current message. | ||
| 83 | |||
| 84 | This method merges the contents of the specified message into the current | ||
| 85 | message. Singular fields that are set in the specified message overwrite | ||
| 86 | the corresponding fields in the current message. Repeated fields are | ||
| 87 | appended. Singular sub-messages and groups are recursively merged. | ||
| 88 | |||
| 89 | Args: | ||
| 90 | other_msg: Message to merge into the current message. | ||
| 91 | """ | ||
| 92 | raise NotImplementedError | ||
| 93 | |||
| 94 | def CopyFrom(self, other_msg): | ||
| 95 | """Copies the content of the specified message into the current message. | ||
| 96 | |||
| 97 | The method clears the current message and then merges the specified | ||
| 98 | message using MergeFrom. | ||
| 99 | |||
| 100 | Args: | ||
| 101 | other_msg: Message to copy into the current one. | ||
| 102 | """ | ||
| 103 | if self == other_msg: | ||
| 104 | return | ||
| 105 | self.Clear() | ||
| 106 | self.MergeFrom(other_msg) | ||
| 107 | |||
| 108 | def Clear(self): | ||
| 109 | """Clears all data that was set in the message.""" | ||
| 110 | raise NotImplementedError | ||
| 111 | |||
| 112 | def IsInitialized(self): | ||
| 113 | """Checks if the message is initialized. | ||
| 114 | |||
| 115 | Returns: | ||
| 116 | The method returns True if the message is initialized (i.e. all of its | ||
| 117 | required fields are set). | ||
| 118 | """ | ||
| 119 | raise NotImplementedError | ||
| 120 | |||
| 121 | # TODO(robinson): MergeFromString() should probably return None and be | ||
| 122 | # implemented in terms of a helper that returns the # of bytes read. Our | ||
| 123 | # deserialization routines would use the helper when recursively | ||
| 124 | # deserializing, but the end user would almost always just want the no-return | ||
| 125 | # MergeFromString(). | ||
| 126 | |||
| 127 | def MergeFromString(self, serialized): | ||
| 128 | """Merges serialized protocol buffer data into this message. | ||
| 129 | |||
| 130 | When we find a field in |serialized| that is already present | ||
| 131 | in this message: | ||
| 132 | - If it's a "repeated" field, we append to the end of our list. | ||
| 133 | - Else, if it's a scalar, we overwrite our field. | ||
| 134 | - Else, (it's a nonrepeated composite), we recursively merge | ||
| 135 | into the existing composite. | ||
| 136 | |||
| 137 | TODO(robinson): Document handling of unknown fields. | ||
| 138 | |||
| 139 | Args: | ||
| 140 | serialized: Any object that allows us to call buffer(serialized) | ||
| 141 | to access a string of bytes using the buffer interface. | ||
| 142 | |||
| 143 | TODO(robinson): When we switch to a helper, this will return None. | ||
| 144 | |||
| 145 | Returns: | ||
| 146 | The number of bytes read from |serialized|. | ||
| 147 | For non-group messages, this will always be len(serialized), | ||
| 148 | but for messages which are actually groups, this will | ||
| 149 | generally be less than len(serialized), since we must | ||
| 150 | stop when we reach an END_GROUP tag. Note that if | ||
| 151 | we *do* stop because of an END_GROUP tag, the number | ||
| 152 | of bytes returned does not include the bytes | ||
| 153 | for the END_GROUP tag information. | ||
| 154 | """ | ||
| 155 | raise NotImplementedError | ||
| 156 | |||
| 157 | def ParseFromString(self, serialized): | ||
| 158 | """Like MergeFromString(), except we clear the object first.""" | ||
| 159 | self.Clear() | ||
| 160 | self.MergeFromString(serialized) | ||
| 161 | |||
| 162 | def SerializeToString(self): | ||
| 163 | """Serializes the protocol message to a binary string. | ||
| 164 | |||
| 165 | Returns: | ||
| 166 | A binary string representation of the message if all of the required | ||
| 167 | fields in the message are set (i.e. the message is initialized). | ||
| 168 | |||
| 169 | Raises: | ||
| 170 | message.EncodeError if the message isn't initialized. | ||
| 171 | """ | ||
| 172 | raise NotImplementedError | ||
| 173 | |||
| 174 | def SerializePartialToString(self): | ||
| 175 | """Serializes the protocol message to a binary string. | ||
| 176 | |||
| 177 | This method is similar to SerializeToString but doesn't check if the | ||
| 178 | message is initialized. | ||
| 179 | |||
| 180 | Returns: | ||
| 181 | A string representation of the partial message. | ||
| 182 | """ | ||
| 183 | raise NotImplementedError | ||
| 184 | |||
| 185 | # TODO(robinson): Decide whether we like these better | ||
| 186 | # than auto-generated has_foo() and clear_foo() methods | ||
| 187 | # on the instances themselves. This way is less consistent | ||
| 188 | # with C++, but it makes reflection-type access easier and | ||
| 189 | # reduces the number of magically autogenerated things. | ||
| 190 | # | ||
| 191 | # TODO(robinson): Be sure to document (and test) exactly | ||
| 192 | # which field names are accepted here. Are we case-sensitive? | ||
| 193 | # What do we do with fields that share names with Python keywords | ||
| 194 | # like 'lambda' and 'yield'? | ||
| 195 | # | ||
| 196 | # nnorwitz says: | ||
| 197 | # """ | ||
| 198 | # Typically (in python), an underscore is appended to names that are | ||
| 199 | # keywords. So they would become lambda_ or yield_. | ||
| 200 | # """ | ||
| 201 | def ListFields(self, field_name): | ||
| 202 | """Returns a list of (FieldDescriptor, value) tuples for all | ||
| 203 | fields in the message which are not empty. A singular field is non-empty | ||
| 204 | if HasField() would return true, and a repeated field is non-empty if | ||
| 205 | it contains at least one element. The fields are ordered by field | ||
| 206 | number""" | ||
| 207 | raise NotImplementedError | ||
| 208 | |||
| 209 | def HasField(self, field_name): | ||
| 210 | raise NotImplementedError | ||
| 211 | |||
| 212 | def ClearField(self, field_name): | ||
| 213 | raise NotImplementedError | ||
| 214 | |||
| 215 | def HasExtension(self, extension_handle): | ||
| 216 | raise NotImplementedError | ||
| 217 | |||
| 218 | def ClearExtension(self, extension_handle): | ||
| 219 | raise NotImplementedError | ||
| 220 | |||
| 221 | def ByteSize(self): | ||
| 222 | """Returns the serialized size of this message. | ||
| 223 | Recursively calls ByteSize() on all contained messages. | ||
| 224 | """ | ||
| 225 | raise NotImplementedError | ||
| 226 | |||
| 227 | def _SetListener(self, message_listener): | ||
| 228 | """Internal method used by the protocol message implementation. | ||
| 229 | Clients should not call this directly. | ||
| 230 | |||
| 231 | Sets a listener that this message will call on certain state transitions. | ||
| 232 | |||
| 233 | The purpose of this method is to register back-edges from children to | ||
| 234 | parents at runtime, for the purpose of setting "has" bits and | ||
| 235 | byte-size-dirty bits in the parent and ancestor objects whenever a child or | ||
| 236 | descendant object is modified. | ||
| 237 | |||
| 238 | If the client wants to disconnect this Message from the object tree, she | ||
| 239 | explicitly sets callback to None. | ||
| 240 | |||
| 241 | If message_listener is None, unregisters any existing listener. Otherwise, | ||
| 242 | message_listener must implement the MessageListener interface in | ||
| 243 | internal/message_listener.py, and we discard any listener registered | ||
| 244 | via a previous _SetListener() call. | ||
| 245 | """ | ||
| 246 | raise NotImplementedError | ||
diff --git a/froofle/protobuf/reflection.py b/froofle/protobuf/reflection.py new file mode 100644 index 00000000..e2abff04 --- /dev/null +++ b/froofle/protobuf/reflection.py | |||
| @@ -0,0 +1,1653 @@ | |||
| 1 | # Protocol Buffers - Google's data interchange format | ||
| 2 | # Copyright 2008 Google Inc. All rights reserved. | ||
| 3 | # http://code.google.com/p/protobuf/ | ||
| 4 | # | ||
| 5 | # Redistribution and use in source and binary forms, with or without | ||
| 6 | # modification, are permitted provided that the following conditions are | ||
| 7 | # met: | ||
| 8 | # | ||
| 9 | # * Redistributions of source code must retain the above copyright | ||
| 10 | # notice, this list of conditions and the following disclaimer. | ||
| 11 | # * Redistributions in binary form must reproduce the above | ||
| 12 | # copyright notice, this list of conditions and the following disclaimer | ||
| 13 | # in the documentation and/or other materials provided with the | ||
| 14 | # distribution. | ||
| 15 | # * Neither the name of Google Inc. nor the names of its | ||
| 16 | # contributors may be used to endorse or promote products derived from | ||
| 17 | # this software without specific prior written permission. | ||
| 18 | # | ||
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
| 30 | |||
| 31 | # This code is meant to work on Python 2.4 and above only. | ||
| 32 | # | ||
| 33 | # TODO(robinson): Helpers for verbose, common checks like seeing if a | ||
| 34 | # descriptor's cpp_type is CPPTYPE_MESSAGE. | ||
| 35 | |||
| 36 | """Contains a metaclass and helper functions used to create | ||
| 37 | protocol message classes from Descriptor objects at runtime. | ||
| 38 | |||
| 39 | Recall that a metaclass is the "type" of a class. | ||
| 40 | (A class is to a metaclass what an instance is to a class.) | ||
| 41 | |||
| 42 | In this case, we use the GeneratedProtocolMessageType metaclass | ||
| 43 | to inject all the useful functionality into the classes | ||
| 44 | output by the protocol compiler at compile-time. | ||
| 45 | |||
| 46 | The upshot of all this is that the real implementation | ||
| 47 | details for ALL pure-Python protocol buffers are *here in | ||
| 48 | this file*. | ||
| 49 | """ | ||
| 50 | |||
| 51 | __author__ = 'robinson@google.com (Will Robinson)' | ||
| 52 | |||
| 53 | import heapq | ||
| 54 | import threading | ||
| 55 | import weakref | ||
| 56 | # We use "as" to avoid name collisions with variables. | ||
| 57 | from froofle.protobuf.internal import decoder | ||
| 58 | from froofle.protobuf.internal import encoder | ||
| 59 | from froofle.protobuf.internal import message_listener as message_listener_mod | ||
| 60 | from froofle.protobuf.internal import type_checkers | ||
| 61 | from froofle.protobuf.internal import wire_format | ||
| 62 | from froofle.protobuf import descriptor as descriptor_mod | ||
| 63 | from froofle.protobuf import message as message_mod | ||
| 64 | |||
| 65 | _FieldDescriptor = descriptor_mod.FieldDescriptor | ||
| 66 | |||
| 67 | |||
| 68 | class GeneratedProtocolMessageType(type): | ||
| 69 | |||
| 70 | """Metaclass for protocol message classes created at runtime from Descriptors. | ||
| 71 | |||
| 72 | We add implementations for all methods described in the Message class. We | ||
| 73 | also create properties to allow getting/setting all fields in the protocol | ||
| 74 | message. Finally, we create slots to prevent users from accidentally | ||
| 75 | "setting" nonexistent fields in the protocol message, which then wouldn't get | ||
| 76 | serialized / deserialized properly. | ||
| 77 | |||
| 78 | The protocol compiler currently uses this metaclass to create protocol | ||
| 79 | message classes at runtime. Clients can also manually create their own | ||
| 80 | classes at runtime, as in this example: | ||
| 81 | |||
| 82 | mydescriptor = Descriptor(.....) | ||
| 83 | class MyProtoClass(Message): | ||
| 84 | __metaclass__ = GeneratedProtocolMessageType | ||
| 85 | DESCRIPTOR = mydescriptor | ||
| 86 | myproto_instance = MyProtoClass() | ||
| 87 | myproto.foo_field = 23 | ||
| 88 | ... | ||
| 89 | """ | ||
| 90 | |||
| 91 | # Must be consistent with the protocol-compiler code in | ||
| 92 | # proto2/compiler/internal/generator.*. | ||
| 93 | _DESCRIPTOR_KEY = 'DESCRIPTOR' | ||
| 94 | |||
| 95 | def __new__(cls, name, bases, dictionary): | ||
| 96 | """Custom allocation for runtime-generated class types. | ||
| 97 | |||
| 98 | We override __new__ because this is apparently the only place | ||
| 99 | where we can meaningfully set __slots__ on the class we're creating(?). | ||
| 100 | (The interplay between metaclasses and slots is not very well-documented). | ||
| 101 | |||
| 102 | Args: | ||
| 103 | name: Name of the class (ignored, but required by the | ||
| 104 | metaclass protocol). | ||
| 105 | bases: Base classes of the class we're constructing. | ||
| 106 | (Should be message.Message). We ignore this field, but | ||
| 107 | it's required by the metaclass protocol | ||
| 108 | dictionary: The class dictionary of the class we're | ||
| 109 | constructing. dictionary[_DESCRIPTOR_KEY] must contain | ||
| 110 | a Descriptor object describing this protocol message | ||
| 111 | type. | ||
| 112 | |||
| 113 | Returns: | ||
| 114 | Newly-allocated class. | ||
| 115 | """ | ||
| 116 | descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] | ||
| 117 | _AddSlots(descriptor, dictionary) | ||
| 118 | _AddClassAttributesForNestedExtensions(descriptor, dictionary) | ||
| 119 | superclass = super(GeneratedProtocolMessageType, cls) | ||
| 120 | return superclass.__new__(cls, name, bases, dictionary) | ||
| 121 | |||
| 122 | def __init__(cls, name, bases, dictionary): | ||
| 123 | """Here we perform the majority of our work on the class. | ||
| 124 | We add enum getters, an __init__ method, implementations | ||
| 125 | of all Message methods, and properties for all fields | ||
| 126 | in the protocol type. | ||
| 127 | |||
| 128 | Args: | ||
| 129 | name: Name of the class (ignored, but required by the | ||
| 130 | metaclass protocol). | ||
| 131 | bases: Base classes of the class we're constructing. | ||
| 132 | (Should be message.Message). We ignore this field, but | ||
| 133 | it's required by the metaclass protocol | ||
| 134 | dictionary: The class dictionary of the class we're | ||
| 135 | constructing. dictionary[_DESCRIPTOR_KEY] must contain | ||
| 136 | a Descriptor object describing this protocol message | ||
| 137 | type. | ||
| 138 | """ | ||
| 139 | descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] | ||
| 140 | # We act as a "friend" class of the descriptor, setting | ||
| 141 | # its _concrete_class attribute the first time we use a | ||
| 142 | # given descriptor to initialize a concrete protocol message | ||
| 143 | # class. | ||
| 144 | concrete_class_attr_name = '_concrete_class' | ||
| 145 | if not hasattr(descriptor, concrete_class_attr_name): | ||
| 146 | setattr(descriptor, concrete_class_attr_name, cls) | ||
| 147 | cls._known_extensions = [] | ||
| 148 | _AddEnumValues(descriptor, cls) | ||
| 149 | _AddInitMethod(descriptor, cls) | ||
| 150 | _AddPropertiesForFields(descriptor, cls) | ||
| 151 | _AddStaticMethods(cls) | ||
| 152 | _AddMessageMethods(descriptor, cls) | ||
| 153 | _AddPrivateHelperMethods(cls) | ||
| 154 | superclass = super(GeneratedProtocolMessageType, cls) | ||
| 155 | superclass.__init__(cls, name, bases, dictionary) | ||
| 156 | |||
| 157 | |||
| 158 | # Stateless helpers for GeneratedProtocolMessageType below. | ||
| 159 | # Outside clients should not access these directly. | ||
| 160 | # | ||
| 161 | # I opted not to make any of these methods on the metaclass, to make it more | ||
| 162 | # clear that I'm not really using any state there and to keep clients from | ||
| 163 | # thinking that they have direct access to these construction helpers. | ||
| 164 | |||
| 165 | |||
| 166 | def _PropertyName(proto_field_name): | ||
| 167 | """Returns the name of the public property attribute which | ||
| 168 | clients can use to get and (in some cases) set the value | ||
| 169 | of a protocol message field. | ||
| 170 | |||
| 171 | Args: | ||
| 172 | proto_field_name: The protocol message field name, exactly | ||
| 173 | as it appears (or would appear) in a .proto file. | ||
| 174 | """ | ||
| 175 | # TODO(robinson): Escape Python keywords (e.g., yield), and test this support. | ||
| 176 | # nnorwitz makes my day by writing: | ||
| 177 | # """ | ||
| 178 | # FYI. See the keyword module in the stdlib. This could be as simple as: | ||
| 179 | # | ||
| 180 | # if keyword.iskeyword(proto_field_name): | ||
| 181 | # return proto_field_name + "_" | ||
| 182 | # return proto_field_name | ||
| 183 | # """ | ||
| 184 | return proto_field_name | ||
| 185 | |||
| 186 | |||
| 187 | def _ValueFieldName(proto_field_name): | ||
| 188 | """Returns the name of the (internal) instance attribute which objects | ||
| 189 | should use to store the current value for a given protocol message field. | ||
| 190 | |||
| 191 | Args: | ||
| 192 | proto_field_name: The protocol message field name, exactly | ||
| 193 | as it appears (or would appear) in a .proto file. | ||
| 194 | """ | ||
| 195 | return '_value_' + proto_field_name | ||
| 196 | |||
| 197 | |||
| 198 | def _HasFieldName(proto_field_name): | ||
| 199 | """Returns the name of the (internal) instance attribute which | ||
| 200 | objects should use to store a boolean telling whether this field | ||
| 201 | is explicitly set or not. | ||
| 202 | |||
| 203 | Args: | ||
| 204 | proto_field_name: The protocol message field name, exactly | ||
| 205 | as it appears (or would appear) in a .proto file. | ||
| 206 | """ | ||
| 207 | return '_has_' + proto_field_name | ||
| 208 | |||
| 209 | |||
| 210 | def _AddSlots(message_descriptor, dictionary): | ||
| 211 | """Adds a __slots__ entry to dictionary, containing the names of all valid | ||
| 212 | attributes for this message type. | ||
| 213 | |||
| 214 | Args: | ||
| 215 | message_descriptor: A Descriptor instance describing this message type. | ||
| 216 | dictionary: Class dictionary to which we'll add a '__slots__' entry. | ||
| 217 | """ | ||
| 218 | field_names = [_ValueFieldName(f.name) for f in message_descriptor.fields] | ||
| 219 | field_names.extend(_HasFieldName(f.name) for f in message_descriptor.fields | ||
| 220 | if f.label != _FieldDescriptor.LABEL_REPEATED) | ||
| 221 | field_names.extend(('Extensions', | ||
| 222 | '_cached_byte_size', | ||
| 223 | '_cached_byte_size_dirty', | ||
| 224 | '_called_transition_to_nonempty', | ||
| 225 | '_listener', | ||
| 226 | '_lock', '__weakref__')) | ||
| 227 | dictionary['__slots__'] = field_names | ||
| 228 | |||
| 229 | |||
| 230 | def _AddClassAttributesForNestedExtensions(descriptor, dictionary): | ||
| 231 | extension_dict = descriptor.extensions_by_name | ||
| 232 | for extension_name, extension_field in extension_dict.iteritems(): | ||
| 233 | assert extension_name not in dictionary | ||
| 234 | dictionary[extension_name] = extension_field | ||
| 235 | |||
| 236 | |||
| 237 | def _AddEnumValues(descriptor, cls): | ||
| 238 | """Sets class-level attributes for all enum fields defined in this message. | ||
| 239 | |||
| 240 | Args: | ||
| 241 | descriptor: Descriptor object for this message type. | ||
| 242 | cls: Class we're constructing for this message type. | ||
| 243 | """ | ||
| 244 | for enum_type in descriptor.enum_types: | ||
| 245 | for enum_value in enum_type.values: | ||
| 246 | setattr(cls, enum_value.name, enum_value.number) | ||
| 247 | |||
| 248 | |||
| 249 | def _DefaultValueForField(message, field): | ||
| 250 | """Returns a default value for a field. | ||
| 251 | |||
| 252 | Args: | ||
| 253 | message: Message instance containing this field, or a weakref proxy | ||
| 254 | of same. | ||
| 255 | field: FieldDescriptor object for this field. | ||
| 256 | |||
| 257 | Returns: A default value for this field. May refer back to |message| | ||
| 258 | via a weak reference. | ||
| 259 | """ | ||
| 260 | # TODO(robinson): Only the repeated fields need a reference to 'message' (so | ||
| 261 | # that they can set the 'has' bit on the containing Message when someone | ||
| 262 | # append()s a value). We could special-case this, and avoid an extra | ||
| 263 | # function call on __init__() and Clear() for non-repeated fields. | ||
| 264 | |||
| 265 | # TODO(robinson): Find a better place for the default value assertion in this | ||
| 266 | # function. No need to repeat them every time the client calls Clear('foo'). | ||
| 267 | # (We should probably just assert these things once and as early as possible, | ||
| 268 | # by tightening checking in the descriptor classes.) | ||
| 269 | if field.label == _FieldDescriptor.LABEL_REPEATED: | ||
| 270 | if field.default_value != []: | ||
| 271 | raise ValueError('Repeated field default value not empty list: %s' % ( | ||
| 272 | field.default_value)) | ||
| 273 | listener = _Listener(message, None) | ||
| 274 | if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: | ||
| 275 | # We can't look at _concrete_class yet since it might not have | ||
| 276 | # been set. (Depends on order in which we initialize the classes). | ||
| 277 | return _RepeatedCompositeFieldContainer(listener, field.message_type) | ||
| 278 | else: | ||
| 279 | return _RepeatedScalarFieldContainer( | ||
| 280 | listener, type_checkers.GetTypeChecker(field.cpp_type, field.type)) | ||
| 281 | |||
| 282 | if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: | ||
| 283 | assert field.default_value is None | ||
| 284 | |||
| 285 | return field.default_value | ||
| 286 | |||
| 287 | |||
| 288 | def _AddInitMethod(message_descriptor, cls): | ||
| 289 | """Adds an __init__ method to cls.""" | ||
| 290 | fields = message_descriptor.fields | ||
| 291 | def init(self): | ||
| 292 | self._cached_byte_size = 0 | ||
| 293 | self._cached_byte_size_dirty = False | ||
| 294 | self._listener = message_listener_mod.NullMessageListener() | ||
| 295 | self._called_transition_to_nonempty = False | ||
| 296 | # TODO(robinson): We should only create a lock if we really need one | ||
| 297 | # in this class. | ||
| 298 | self._lock = threading.Lock() | ||
| 299 | for field in fields: | ||
| 300 | default_value = _DefaultValueForField(self, field) | ||
| 301 | python_field_name = _ValueFieldName(field.name) | ||
| 302 | setattr(self, python_field_name, default_value) | ||
| 303 | if field.label != _FieldDescriptor.LABEL_REPEATED: | ||
| 304 | setattr(self, _HasFieldName(field.name), False) | ||
| 305 | self.Extensions = _ExtensionDict(self, cls._known_extensions) | ||
| 306 | |||
| 307 | init.__module__ = None | ||
| 308 | init.__doc__ = None | ||
| 309 | cls.__init__ = init | ||
| 310 | |||
| 311 | |||
| 312 | def _AddPropertiesForFields(descriptor, cls): | ||
| 313 | """Adds properties for all fields in this protocol message type.""" | ||
| 314 | for field in descriptor.fields: | ||
| 315 | _AddPropertiesForField(field, cls) | ||
| 316 | |||
| 317 | |||
| 318 | def _AddPropertiesForField(field, cls): | ||
| 319 | """Adds a public property for a protocol message field. | ||
| 320 | Clients can use this property to get and (in the case | ||
| 321 | of non-repeated scalar fields) directly set the value | ||
| 322 | of a protocol message field. | ||
| 323 | |||
| 324 | Args: | ||
| 325 | field: A FieldDescriptor for this field. | ||
| 326 | cls: The class we're constructing. | ||
| 327 | """ | ||
| 328 | # Catch it if we add other types that we should | ||
| 329 | # handle specially here. | ||
| 330 | assert _FieldDescriptor.MAX_CPPTYPE == 10 | ||
| 331 | |||
| 332 | if field.label == _FieldDescriptor.LABEL_REPEATED: | ||
| 333 | _AddPropertiesForRepeatedField(field, cls) | ||
| 334 | elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: | ||
| 335 | _AddPropertiesForNonRepeatedCompositeField(field, cls) | ||
| 336 | else: | ||
| 337 | _AddPropertiesForNonRepeatedScalarField(field, cls) | ||
| 338 | |||
| 339 | |||
| 340 | def _AddPropertiesForRepeatedField(field, cls): | ||
| 341 | """Adds a public property for a "repeated" protocol message field. Clients | ||
| 342 | can use this property to get the value of the field, which will be either a | ||
| 343 | _RepeatedScalarFieldContainer or _RepeatedCompositeFieldContainer (see | ||
| 344 | below). | ||
| 345 | |||
| 346 | Note that when clients add values to these containers, we perform | ||
| 347 | type-checking in the case of repeated scalar fields, and we also set any | ||
| 348 | necessary "has" bits as a side-effect. | ||
| 349 | |||
| 350 | Args: | ||
| 351 | field: A FieldDescriptor for this field. | ||
| 352 | cls: The class we're constructing. | ||
| 353 | """ | ||
| 354 | proto_field_name = field.name | ||
| 355 | python_field_name = _ValueFieldName(proto_field_name) | ||
| 356 | property_name = _PropertyName(proto_field_name) | ||
| 357 | |||
| 358 | def getter(self): | ||
| 359 | return getattr(self, python_field_name) | ||
| 360 | getter.__module__ = None | ||
| 361 | getter.__doc__ = 'Getter for %s.' % proto_field_name | ||
| 362 | |||
| 363 | # We define a setter just so we can throw an exception with a more | ||
| 364 | # helpful error message. | ||
| 365 | def setter(self, new_value): | ||
| 366 | raise AttributeError('Assignment not allowed to repeated field ' | ||
| 367 | '"%s" in protocol message object.' % proto_field_name) | ||
| 368 | |||
| 369 | doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name | ||
| 370 | setattr(cls, property_name, property(getter, setter, doc=doc)) | ||
| 371 | |||
| 372 | |||
| 373 | def _AddPropertiesForNonRepeatedScalarField(field, cls): | ||
| 374 | """Adds a public property for a nonrepeated, scalar protocol message field. | ||
| 375 | Clients can use this property to get and directly set the value of the field. | ||
| 376 | Note that when the client sets the value of a field by using this property, | ||
| 377 | all necessary "has" bits are set as a side-effect, and we also perform | ||
| 378 | type-checking. | ||
| 379 | |||
| 380 | Args: | ||
| 381 | field: A FieldDescriptor for this field. | ||
| 382 | cls: The class we're constructing. | ||
| 383 | """ | ||
| 384 | proto_field_name = field.name | ||
| 385 | python_field_name = _ValueFieldName(proto_field_name) | ||
| 386 | has_field_name = _HasFieldName(proto_field_name) | ||
| 387 | property_name = _PropertyName(proto_field_name) | ||
| 388 | type_checker = type_checkers.GetTypeChecker(field.cpp_type, field.type) | ||
| 389 | |||
| 390 | def getter(self): | ||
| 391 | return getattr(self, python_field_name) | ||
| 392 | getter.__module__ = None | ||
| 393 | getter.__doc__ = 'Getter for %s.' % proto_field_name | ||
| 394 | def setter(self, new_value): | ||
| 395 | type_checker.CheckValue(new_value) | ||
| 396 | setattr(self, has_field_name, True) | ||
| 397 | self._MarkByteSizeDirty() | ||
| 398 | self._MaybeCallTransitionToNonemptyCallback() | ||
| 399 | setattr(self, python_field_name, new_value) | ||
| 400 | setter.__module__ = None | ||
| 401 | setter.__doc__ = 'Setter for %s.' % proto_field_name | ||
| 402 | |||
| 403 | # Add a property to encapsulate the getter/setter. | ||
| 404 | doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name | ||
| 405 | setattr(cls, property_name, property(getter, setter, doc=doc)) | ||
| 406 | |||
| 407 | |||
| 408 | def _AddPropertiesForNonRepeatedCompositeField(field, cls): | ||
| 409 | """Adds a public property for a nonrepeated, composite protocol message field. | ||
| 410 | A composite field is a "group" or "message" field. | ||
| 411 | |||
| 412 | Clients can use this property to get the value of the field, but cannot | ||
| 413 | assign to the property directly. | ||
| 414 | |||
| 415 | Args: | ||
| 416 | field: A FieldDescriptor for this field. | ||
| 417 | cls: The class we're constructing. | ||
| 418 | """ | ||
| 419 | # TODO(robinson): Remove duplication with similar method | ||
| 420 | # for non-repeated scalars. | ||
| 421 | proto_field_name = field.name | ||
| 422 | python_field_name = _ValueFieldName(proto_field_name) | ||
| 423 | has_field_name = _HasFieldName(proto_field_name) | ||
| 424 | property_name = _PropertyName(proto_field_name) | ||
| 425 | message_type = field.message_type | ||
| 426 | |||
| 427 | def getter(self): | ||
| 428 | # TODO(robinson): Appropriately scary note about double-checked locking. | ||
| 429 | field_value = getattr(self, python_field_name) | ||
| 430 | if field_value is None: | ||
| 431 | self._lock.acquire() | ||
| 432 | try: | ||
| 433 | field_value = getattr(self, python_field_name) | ||
| 434 | if field_value is None: | ||
| 435 | field_class = message_type._concrete_class | ||
| 436 | field_value = field_class() | ||
| 437 | field_value._SetListener(_Listener(self, has_field_name)) | ||
| 438 | setattr(self, python_field_name, field_value) | ||
| 439 | finally: | ||
| 440 | self._lock.release() | ||
| 441 | return field_value | ||
| 442 | getter.__module__ = None | ||
| 443 | getter.__doc__ = 'Getter for %s.' % proto_field_name | ||
| 444 | |||
| 445 | # We define a setter just so we can throw an exception with a more | ||
| 446 | # helpful error message. | ||
| 447 | def setter(self, new_value): | ||
| 448 | raise AttributeError('Assignment not allowed to composite field ' | ||
| 449 | '"%s" in protocol message object.' % proto_field_name) | ||
| 450 | |||
| 451 | # Add a property to encapsulate the getter. | ||
| 452 | doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name | ||
| 453 | setattr(cls, property_name, property(getter, setter, doc=doc)) | ||
| 454 | |||
| 455 | |||
| 456 | def _AddStaticMethods(cls): | ||
| 457 | # TODO(robinson): This probably needs to be thread-safe(?) | ||
| 458 | def RegisterExtension(extension_handle): | ||
| 459 | extension_handle.containing_type = cls.DESCRIPTOR | ||
| 460 | cls._known_extensions.append(extension_handle) | ||
| 461 | cls.RegisterExtension = staticmethod(RegisterExtension) | ||
| 462 | |||
| 463 | |||
| 464 | def _AddListFieldsMethod(message_descriptor, cls): | ||
| 465 | """Helper for _AddMessageMethods().""" | ||
| 466 | |||
| 467 | # Ensure that we always list in ascending field-number order. | ||
| 468 | # For non-extension fields, we can do the sort once, here, at import-time. | ||
| 469 | # For extensions, we sort on each ListFields() call, though | ||
| 470 | # we could do better if we have to. | ||
| 471 | fields = sorted(message_descriptor.fields, key=lambda f: f.number) | ||
| 472 | has_field_names = (_HasFieldName(f.name) for f in fields) | ||
| 473 | value_field_names = (_ValueFieldName(f.name) for f in fields) | ||
| 474 | triplets = zip(has_field_names, value_field_names, fields) | ||
| 475 | |||
| 476 | def ListFields(self): | ||
| 477 | # We need to list all extension and non-extension fields | ||
| 478 | # together, in sorted order by field number. | ||
| 479 | |||
| 480 | # Step 0: Get an iterator over all "set" non-extension fields, | ||
| 481 | # sorted by field number. | ||
| 482 | # This iterator yields (field_number, field_descriptor, value) tuples. | ||
| 483 | def SortedSetFieldsIter(): | ||
| 484 | # Note that triplets is already sorted by field number. | ||
| 485 | for has_field_name, value_field_name, field_descriptor in triplets: | ||
| 486 | if field_descriptor.label == _FieldDescriptor.LABEL_REPEATED: | ||
| 487 | value = getattr(self, _ValueFieldName(field_descriptor.name)) | ||
| 488 | if len(value) > 0: | ||
| 489 | yield (field_descriptor.number, field_descriptor, value) | ||
| 490 | elif getattr(self, _HasFieldName(field_descriptor.name)): | ||
| 491 | value = getattr(self, _ValueFieldName(field_descriptor.name)) | ||
| 492 | yield (field_descriptor.number, field_descriptor, value) | ||
| 493 | sorted_fields = SortedSetFieldsIter() | ||
| 494 | |||
| 495 | # Step 1: Get an iterator over all "set" extension fields, | ||
| 496 | # sorted by field number. | ||
| 497 | # This iterator ALSO yields (field_number, field_descriptor, value) tuples. | ||
| 498 | # TODO(robinson): It's not necessary to repeat this with each | ||
| 499 | # serialization call. We can do better. | ||
| 500 | sorted_extension_fields = sorted( | ||
| 501 | [(f.number, f, v) for f, v in self.Extensions._ListSetExtensions()]) | ||
| 502 | |||
| 503 | # Step 2: Create a composite iterator that merges the extension- | ||
| 504 | # and non-extension fields, and that still yields fields in | ||
| 505 | # sorted order. | ||
| 506 | all_set_fields = _ImergeSorted(sorted_fields, sorted_extension_fields) | ||
| 507 | |||
| 508 | # Step 3: Strip off the field numbers and return. | ||
| 509 | return [field[1:] for field in all_set_fields] | ||
| 510 | |||
| 511 | cls.ListFields = ListFields | ||
| 512 | |||
| 513 | def _AddHasFieldMethod(cls): | ||
| 514 | """Helper for _AddMessageMethods().""" | ||
| 515 | def HasField(self, field_name): | ||
| 516 | try: | ||
| 517 | return getattr(self, _HasFieldName(field_name)) | ||
| 518 | except AttributeError: | ||
| 519 | raise ValueError('Protocol message has no "%s" field.' % field_name) | ||
| 520 | cls.HasField = HasField | ||
| 521 | |||
| 522 | |||
| 523 | def _AddClearFieldMethod(cls): | ||
| 524 | """Helper for _AddMessageMethods().""" | ||
| 525 | def ClearField(self, field_name): | ||
| 526 | try: | ||
| 527 | field = self.DESCRIPTOR.fields_by_name[field_name] | ||
| 528 | except KeyError: | ||
| 529 | raise ValueError('Protocol message has no "%s" field.' % field_name) | ||
| 530 | proto_field_name = field.name | ||
| 531 | python_field_name = _ValueFieldName(proto_field_name) | ||
| 532 | has_field_name = _HasFieldName(proto_field_name) | ||
| 533 | default_value = _DefaultValueForField(self, field) | ||
| 534 | if field.label == _FieldDescriptor.LABEL_REPEATED: | ||
| 535 | self._MarkByteSizeDirty() | ||
| 536 | else: | ||
| 537 | if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: | ||
| 538 | old_field_value = getattr(self, python_field_name) | ||
| 539 | if old_field_value is not None: | ||
| 540 | # Snip the old object out of the object tree. | ||
| 541 | old_field_value._SetListener(None) | ||
| 542 | if getattr(self, has_field_name): | ||
| 543 | setattr(self, has_field_name, False) | ||
| 544 | # Set dirty bit on ourself and parents only if | ||
| 545 | # we're actually changing state. | ||
| 546 | self._MarkByteSizeDirty() | ||
| 547 | setattr(self, python_field_name, default_value) | ||
| 548 | cls.ClearField = ClearField | ||
| 549 | |||
| 550 | |||
| 551 | def _AddClearExtensionMethod(cls): | ||
| 552 | """Helper for _AddMessageMethods().""" | ||
| 553 | def ClearExtension(self, extension_handle): | ||
| 554 | self.Extensions._ClearExtension(extension_handle) | ||
| 555 | cls.ClearExtension = ClearExtension | ||
| 556 | |||
| 557 | |||
| 558 | def _AddClearMethod(cls): | ||
| 559 | """Helper for _AddMessageMethods().""" | ||
| 560 | def Clear(self): | ||
| 561 | # Clear fields. | ||
| 562 | fields = self.DESCRIPTOR.fields | ||
| 563 | for field in fields: | ||
| 564 | self.ClearField(field.name) | ||
| 565 | # Clear extensions. | ||
| 566 | extensions = self.Extensions._ListSetExtensions() | ||
| 567 | for extension in extensions: | ||
| 568 | self.ClearExtension(extension[0]) | ||
| 569 | cls.Clear = Clear | ||
| 570 | |||
| 571 | |||
| 572 | def _AddHasExtensionMethod(cls): | ||
| 573 | """Helper for _AddMessageMethods().""" | ||
| 574 | def HasExtension(self, extension_handle): | ||
| 575 | return self.Extensions._HasExtension(extension_handle) | ||
| 576 | cls.HasExtension = HasExtension | ||
| 577 | |||
| 578 | |||
| 579 | def _AddEqualsMethod(message_descriptor, cls): | ||
| 580 | """Helper for _AddMessageMethods().""" | ||
| 581 | def __eq__(self, other): | ||
| 582 | if self is other: | ||
| 583 | return True | ||
| 584 | |||
| 585 | # Compare all fields contained directly in this message. | ||
| 586 | for field_descriptor in message_descriptor.fields: | ||
| 587 | label = field_descriptor.label | ||
| 588 | property_name = _PropertyName(field_descriptor.name) | ||
| 589 | # Non-repeated field equality requires matching "has" bits as well | ||
| 590 | # as having an equal value. | ||
| 591 | if label != _FieldDescriptor.LABEL_REPEATED: | ||
| 592 | self_has = self.HasField(property_name) | ||
| 593 | other_has = other.HasField(property_name) | ||
| 594 | if self_has != other_has: | ||
| 595 | return False | ||
| 596 | if not self_has: | ||
| 597 | # If the "has" bit for this field is False, we must stop here. | ||
| 598 | # Otherwise we will recurse forever on recursively-defined protos. | ||
| 599 | continue | ||
| 600 | if getattr(self, property_name) != getattr(other, property_name): | ||
| 601 | return False | ||
| 602 | |||
| 603 | # Compare the extensions present in both messages. | ||
| 604 | return self.Extensions == other.Extensions | ||
| 605 | cls.__eq__ = __eq__ | ||
| 606 | |||
| 607 | |||
| 608 | def _AddSetListenerMethod(cls): | ||
| 609 | """Helper for _AddMessageMethods().""" | ||
| 610 | def SetListener(self, listener): | ||
| 611 | if listener is None: | ||
| 612 | self._listener = message_listener_mod.NullMessageListener() | ||
| 613 | else: | ||
| 614 | self._listener = listener | ||
| 615 | cls._SetListener = SetListener | ||
| 616 | |||
| 617 | |||
| 618 | def _BytesForNonRepeatedElement(value, field_number, field_type): | ||
| 619 | """Returns the number of bytes needed to serialize a non-repeated element. | ||
| 620 | The returned byte count includes space for tag information and any | ||
| 621 | other additional space associated with serializing value. | ||
| 622 | |||
| 623 | Args: | ||
| 624 | value: Value we're serializing. | ||
| 625 | field_number: Field number of this value. (Since the field number | ||
| 626 | is stored as part of a varint-encoded tag, this has an impact | ||
| 627 | on the total bytes required to serialize the value). | ||
| 628 | field_type: The type of the field. One of the TYPE_* constants | ||
| 629 | within FieldDescriptor. | ||
| 630 | """ | ||
| 631 | try: | ||
| 632 | fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] | ||
| 633 | return fn(field_number, value) | ||
| 634 | except KeyError: | ||
| 635 | raise message_mod.EncodeError('Unrecognized field type: %d' % field_type) | ||
| 636 | |||
| 637 | |||
| 638 | def _AddByteSizeMethod(message_descriptor, cls): | ||
| 639 | """Helper for _AddMessageMethods().""" | ||
| 640 | |||
| 641 | def BytesForField(message, field, value): | ||
| 642 | """Returns the number of bytes required to serialize a single field | ||
| 643 | in message. The field may be repeated or not, composite or not. | ||
| 644 | |||
| 645 | Args: | ||
| 646 | message: The Message instance containing a field of the given type. | ||
| 647 | field: A FieldDescriptor describing the field of interest. | ||
| 648 | value: The value whose byte size we're interested in. | ||
| 649 | |||
| 650 | Returns: The number of bytes required to serialize the current value | ||
| 651 | of "field" in "message", including space for tags and any other | ||
| 652 | necessary information. | ||
| 653 | """ | ||
| 654 | |||
| 655 | if _MessageSetField(field): | ||
| 656 | return wire_format.MessageSetItemByteSize(field.number, value) | ||
| 657 | |||
| 658 | field_number, field_type = field.number, field.type | ||
| 659 | |||
| 660 | # Repeated fields. | ||
| 661 | if field.label == _FieldDescriptor.LABEL_REPEATED: | ||
| 662 | elements = value | ||
| 663 | else: | ||
| 664 | elements = [value] | ||
| 665 | |||
| 666 | size = sum(_BytesForNonRepeatedElement(element, field_number, field_type) | ||
| 667 | for element in elements) | ||
| 668 | return size | ||
| 669 | |||
| 670 | fields = message_descriptor.fields | ||
| 671 | has_field_names = (_HasFieldName(f.name) for f in fields) | ||
| 672 | zipped = zip(has_field_names, fields) | ||
| 673 | |||
| 674 | def ByteSize(self): | ||
| 675 | if not self._cached_byte_size_dirty: | ||
| 676 | return self._cached_byte_size | ||
| 677 | |||
| 678 | size = 0 | ||
| 679 | # Hardcoded fields first. | ||
| 680 | for has_field_name, field in zipped: | ||
| 681 | if (field.label == _FieldDescriptor.LABEL_REPEATED | ||
| 682 | or getattr(self, has_field_name)): | ||
| 683 | value = getattr(self, _ValueFieldName(field.name)) | ||
| 684 | size += BytesForField(self, field, value) | ||
| 685 | # Extensions next. | ||
| 686 | for field, value in self.Extensions._ListSetExtensions(): | ||
| 687 | size += BytesForField(self, field, value) | ||
| 688 | |||
| 689 | self._cached_byte_size = size | ||
| 690 | self._cached_byte_size_dirty = False | ||
| 691 | return size | ||
| 692 | cls.ByteSize = ByteSize | ||
| 693 | |||
| 694 | |||
| 695 | def _MessageSetField(field_descriptor): | ||
| 696 | """Checks if a field should be serialized using the message set wire format. | ||
| 697 | |||
| 698 | Args: | ||
| 699 | field_descriptor: Descriptor of the field. | ||
| 700 | |||
| 701 | Returns: | ||
| 702 | True if the field should be serialized using the message set wire format, | ||
| 703 | false otherwise. | ||
| 704 | """ | ||
| 705 | return (field_descriptor.is_extension and | ||
| 706 | field_descriptor.label != _FieldDescriptor.LABEL_REPEATED and | ||
| 707 | field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and | ||
| 708 | field_descriptor.containing_type.GetOptions().message_set_wire_format) | ||
| 709 | |||
| 710 | |||
| 711 | def _SerializeValueToEncoder(value, field_number, field_descriptor, encoder): | ||
| 712 | """Appends the serialization of a single value to encoder. | ||
| 713 | |||
| 714 | Args: | ||
| 715 | value: Value to serialize. | ||
| 716 | field_number: Field number of this value. | ||
| 717 | field_descriptor: Descriptor of the field to serialize. | ||
| 718 | encoder: encoder.Encoder object to which we should serialize this value. | ||
| 719 | """ | ||
| 720 | if _MessageSetField(field_descriptor): | ||
| 721 | encoder.AppendMessageSetItem(field_number, value) | ||
| 722 | return | ||
| 723 | |||
| 724 | try: | ||
| 725 | method = type_checkers.TYPE_TO_SERIALIZE_METHOD[field_descriptor.type] | ||
| 726 | method(encoder, field_number, value) | ||
| 727 | except KeyError: | ||
| 728 | raise message_mod.EncodeError('Unrecognized field type: %d' % | ||
| 729 | field_descriptor.type) | ||
| 730 | |||
| 731 | |||
| 732 | def _ImergeSorted(*streams): | ||
| 733 | """Merges N sorted iterators into a single sorted iterator. | ||
| 734 | Each element in streams must be an iterable that yields | ||
| 735 | its elements in sorted order, and the elements contained | ||
| 736 | in each stream must all be comparable. | ||
| 737 | |||
| 738 | There may be repeated elements in the component streams or | ||
| 739 | across the streams; the repeated elements will all be repeated | ||
| 740 | in the merged iterator as well. | ||
| 741 | |||
| 742 | I believe that the heapq module at HEAD in the Python | ||
| 743 | sources has a method like this, but for now we roll our own. | ||
| 744 | """ | ||
| 745 | iters = [iter(stream) for stream in streams] | ||
| 746 | heap = [] | ||
| 747 | for index, it in enumerate(iters): | ||
| 748 | try: | ||
| 749 | heap.append((it.next(), index)) | ||
| 750 | except StopIteration: | ||
| 751 | pass | ||
| 752 | heapq.heapify(heap) | ||
| 753 | |||
| 754 | while heap: | ||
| 755 | smallest_value, idx = heap[0] | ||
| 756 | yield smallest_value | ||
| 757 | try: | ||
| 758 | next_element = iters[idx].next() | ||
| 759 | heapq.heapreplace(heap, (next_element, idx)) | ||
| 760 | except StopIteration: | ||
| 761 | heapq.heappop(heap) | ||
| 762 | |||
| 763 | |||
| 764 | def _AddSerializeToStringMethod(message_descriptor, cls): | ||
| 765 | """Helper for _AddMessageMethods().""" | ||
| 766 | |||
| 767 | def SerializeToString(self): | ||
| 768 | # Check if the message has all of its required fields set. | ||
| 769 | errors = [] | ||
| 770 | if not _InternalIsInitialized(self, errors): | ||
| 771 | raise message_mod.EncodeError('\n'.join(errors)) | ||
| 772 | return self.SerializePartialToString() | ||
| 773 | cls.SerializeToString = SerializeToString | ||
| 774 | |||
| 775 | |||
| 776 | def _AddSerializePartialToStringMethod(message_descriptor, cls): | ||
| 777 | """Helper for _AddMessageMethods().""" | ||
| 778 | Encoder = encoder.Encoder | ||
| 779 | |||
| 780 | def SerializePartialToString(self): | ||
| 781 | encoder = Encoder() | ||
| 782 | # We need to serialize all extension and non-extension fields | ||
| 783 | # together, in sorted order by field number. | ||
| 784 | for field_descriptor, field_value in self.ListFields(): | ||
| 785 | if field_descriptor.label == _FieldDescriptor.LABEL_REPEATED: | ||
| 786 | repeated_value = field_value | ||
| 787 | else: | ||
| 788 | repeated_value = [field_value] | ||
| 789 | for element in repeated_value: | ||
| 790 | _SerializeValueToEncoder(element, field_descriptor.number, | ||
| 791 | field_descriptor, encoder) | ||
| 792 | return encoder.ToString() | ||
| 793 | cls.SerializePartialToString = SerializePartialToString | ||
| 794 | |||
| 795 | |||
| 796 | def _WireTypeForFieldType(field_type): | ||
| 797 | """Given a field type, returns the expected wire type.""" | ||
| 798 | try: | ||
| 799 | return type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_type] | ||
| 800 | except KeyError: | ||
| 801 | raise message_mod.DecodeError('Unknown field type: %d' % field_type) | ||
| 802 | |||
| 803 | |||
| 804 | def _RecursivelyMerge(field_number, field_type, decoder, message): | ||
| 805 | """Decodes a message from decoder into message. | ||
| 806 | message is either a group or a nested message within some containing | ||
| 807 | protocol message. If it's a group, we use the group protocol to | ||
| 808 | deserialize, and if it's a nested message, we use the nested-message | ||
| 809 | protocol. | ||
| 810 | |||
| 811 | Args: | ||
| 812 | field_number: The field number of message in its enclosing protocol buffer. | ||
| 813 | field_type: The field type of message. Must be either TYPE_MESSAGE | ||
| 814 | or TYPE_GROUP. | ||
| 815 | decoder: Decoder to read from. | ||
| 816 | message: Message to deserialize into. | ||
| 817 | """ | ||
| 818 | if field_type == _FieldDescriptor.TYPE_MESSAGE: | ||
| 819 | decoder.ReadMessageInto(message) | ||
| 820 | elif field_type == _FieldDescriptor.TYPE_GROUP: | ||
| 821 | decoder.ReadGroupInto(field_number, message) | ||
| 822 | else: | ||
| 823 | raise message_mod.DecodeError('Unexpected field type: %d' % field_type) | ||
| 824 | |||
| 825 | |||
| 826 | def _DeserializeScalarFromDecoder(field_type, decoder): | ||
| 827 | """Deserializes a scalar of the requested type from decoder. field_type must | ||
| 828 | be a scalar (non-group, non-message) FieldDescriptor.FIELD_* constant. | ||
| 829 | """ | ||
| 830 | try: | ||
| 831 | method = type_checkers.TYPE_TO_DESERIALIZE_METHOD[field_type] | ||
| 832 | return method(decoder) | ||
| 833 | except KeyError: | ||
| 834 | raise message_mod.DecodeError('Unrecognized field type: %d' % field_type) | ||
| 835 | |||
| 836 | |||
| 837 | def _SkipField(field_number, wire_type, decoder): | ||
| 838 | """Skips a field with the specified wire type. | ||
| 839 | |||
| 840 | Args: | ||
| 841 | field_number: Tag number of the field to skip. | ||
| 842 | wire_type: Wire type of the field to skip. | ||
| 843 | decoder: Decoder used to deserialize the messsage. It must be positioned | ||
| 844 | just after reading the the tag and wire type of the field. | ||
| 845 | """ | ||
| 846 | if wire_type == wire_format.WIRETYPE_VARINT: | ||
| 847 | decoder.ReadUInt64() | ||
| 848 | elif wire_type == wire_format.WIRETYPE_FIXED64: | ||
| 849 | decoder.ReadFixed64() | ||
| 850 | elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED: | ||
| 851 | decoder.SkipBytes(decoder.ReadInt32()) | ||
| 852 | elif wire_type == wire_format.WIRETYPE_START_GROUP: | ||
| 853 | _SkipGroup(field_number, decoder) | ||
| 854 | elif wire_type == wire_format.WIRETYPE_END_GROUP: | ||
| 855 | pass | ||
| 856 | elif wire_type == wire_format.WIRETYPE_FIXED32: | ||
| 857 | decoder.ReadFixed32() | ||
| 858 | else: | ||
| 859 | raise message_mod.DecodeError('Unexpected wire type: %d' % wire_type) | ||
| 860 | |||
| 861 | |||
| 862 | def _SkipGroup(group_number, decoder): | ||
| 863 | """Skips a nested group from the decoder. | ||
| 864 | |||
| 865 | Args: | ||
| 866 | group_number: Tag number of the group to skip. | ||
| 867 | decoder: Decoder used to deserialize the message. It must be positioned | ||
| 868 | exactly at the beginning of the message that should be skipped. | ||
| 869 | """ | ||
| 870 | while True: | ||
| 871 | field_number, wire_type = decoder.ReadFieldNumberAndWireType() | ||
| 872 | if (wire_type == wire_format.WIRETYPE_END_GROUP and | ||
| 873 | field_number == group_number): | ||
| 874 | return | ||
| 875 | _SkipField(field_number, wire_type, decoder) | ||
| 876 | |||
| 877 | |||
| 878 | def _DeserializeMessageSetItem(message, decoder): | ||
| 879 | """Deserializes a message using the message set wire format. | ||
| 880 | |||
| 881 | Args: | ||
| 882 | message: Message to be parsed to. | ||
| 883 | decoder: The decoder to be used to deserialize encoded data. Note that the | ||
| 884 | decoder should be positioned just after reading the START_GROUP tag that | ||
| 885 | began the messageset item. | ||
| 886 | """ | ||
| 887 | field_number, wire_type = decoder.ReadFieldNumberAndWireType() | ||
| 888 | if wire_type != wire_format.WIRETYPE_VARINT or field_number != 2: | ||
| 889 | raise message_mod.DecodeError( | ||
| 890 | 'Incorrect message set wire format. ' | ||
| 891 | 'wire_type: %d, field_number: %d' % (wire_type, field_number)) | ||
| 892 | |||
| 893 | type_id = decoder.ReadInt32() | ||
| 894 | field_number, wire_type = decoder.ReadFieldNumberAndWireType() | ||
| 895 | if wire_type != wire_format.WIRETYPE_LENGTH_DELIMITED or field_number != 3: | ||
| 896 | raise message_mod.DecodeError( | ||
| 897 | 'Incorrect message set wire format. ' | ||
| 898 | 'wire_type: %d, field_number: %d' % (wire_type, field_number)) | ||
| 899 | |||
| 900 | extension_dict = message.Extensions | ||
| 901 | extensions_by_number = extension_dict._AllExtensionsByNumber() | ||
| 902 | if type_id not in extensions_by_number: | ||
| 903 | _SkipField(field_number, wire_type, decoder) | ||
| 904 | return | ||
| 905 | |||
| 906 | field_descriptor = extensions_by_number[type_id] | ||
| 907 | value = extension_dict[field_descriptor] | ||
| 908 | decoder.ReadMessageInto(value) | ||
| 909 | # Read the END_GROUP tag. | ||
| 910 | field_number, wire_type = decoder.ReadFieldNumberAndWireType() | ||
| 911 | if wire_type != wire_format.WIRETYPE_END_GROUP or field_number != 1: | ||
| 912 | raise message_mod.DecodeError( | ||
| 913 | 'Incorrect message set wire format. ' | ||
| 914 | 'wire_type: %d, field_number: %d' % (wire_type, field_number)) | ||
| 915 | |||
| 916 | |||
| 917 | def _DeserializeOneEntity(message_descriptor, message, decoder): | ||
| 918 | """Deserializes the next wire entity from decoder into message. | ||
| 919 | The next wire entity is either a scalar or a nested message, | ||
| 920 | and may also be an element in a repeated field (the wire encoding | ||
| 921 | is the same). | ||
| 922 | |||
| 923 | Args: | ||
| 924 | message_descriptor: A Descriptor instance describing all fields | ||
| 925 | in message. | ||
| 926 | message: The Message instance into which we're decoding our fields. | ||
| 927 | decoder: The Decoder we're using to deserialize encoded data. | ||
| 928 | |||
| 929 | Returns: The number of bytes read from decoder during this method. | ||
| 930 | """ | ||
| 931 | initial_position = decoder.Position() | ||
| 932 | field_number, wire_type = decoder.ReadFieldNumberAndWireType() | ||
| 933 | extension_dict = message.Extensions | ||
| 934 | extensions_by_number = extension_dict._AllExtensionsByNumber() | ||
| 935 | if field_number in message_descriptor.fields_by_number: | ||
| 936 | # Non-extension field. | ||
| 937 | field_descriptor = message_descriptor.fields_by_number[field_number] | ||
| 938 | value = getattr(message, _PropertyName(field_descriptor.name)) | ||
| 939 | def nonextension_setter_fn(scalar): | ||
| 940 | setattr(message, _PropertyName(field_descriptor.name), scalar) | ||
| 941 | scalar_setter_fn = nonextension_setter_fn | ||
| 942 | elif field_number in extensions_by_number: | ||
| 943 | # Extension field. | ||
| 944 | field_descriptor = extensions_by_number[field_number] | ||
| 945 | value = extension_dict[field_descriptor] | ||
| 946 | def extension_setter_fn(scalar): | ||
| 947 | extension_dict[field_descriptor] = scalar | ||
| 948 | scalar_setter_fn = extension_setter_fn | ||
| 949 | elif wire_type == wire_format.WIRETYPE_END_GROUP: | ||
| 950 | # We assume we're being parsed as the group that's ended. | ||
| 951 | return 0 | ||
| 952 | elif (wire_type == wire_format.WIRETYPE_START_GROUP and | ||
| 953 | field_number == 1 and | ||
| 954 | message_descriptor.GetOptions().message_set_wire_format): | ||
| 955 | # A Message Set item. | ||
| 956 | _DeserializeMessageSetItem(message, decoder) | ||
| 957 | return decoder.Position() - initial_position | ||
| 958 | else: | ||
| 959 | _SkipField(field_number, wire_type, decoder) | ||
| 960 | return decoder.Position() - initial_position | ||
| 961 | |||
| 962 | # If we reach this point, we've identified the field as either | ||
| 963 | # hardcoded or extension, and set |field_descriptor|, |scalar_setter_fn|, | ||
| 964 | # and |value| appropriately. Now actually deserialize the thing. | ||
| 965 | # | ||
| 966 | # field_descriptor: Describes the field we're deserializing. | ||
| 967 | # value: The value currently stored in the field to deserialize. | ||
| 968 | # Used only if the field is composite and/or repeated. | ||
| 969 | # scalar_setter_fn: A function F such that F(scalar) will | ||
| 970 | # set a nonrepeated scalar value for this field. Used only | ||
| 971 | # if this field is a nonrepeated scalar. | ||
| 972 | |||
| 973 | field_number = field_descriptor.number | ||
| 974 | field_type = field_descriptor.type | ||
| 975 | expected_wire_type = _WireTypeForFieldType(field_type) | ||
| 976 | if wire_type != expected_wire_type: | ||
| 977 | # Need to fill in uninterpreted_bytes. Work for the next CL. | ||
| 978 | raise RuntimeError('TODO(robinson): Wiretype mismatches not handled.') | ||
| 979 | |||
| 980 | property_name = _PropertyName(field_descriptor.name) | ||
| 981 | label = field_descriptor.label | ||
| 982 | cpp_type = field_descriptor.cpp_type | ||
| 983 | |||
| 984 | # Nonrepeated scalar. Just set the field directly. | ||
| 985 | if (label != _FieldDescriptor.LABEL_REPEATED | ||
| 986 | and cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE): | ||
| 987 | scalar_setter_fn(_DeserializeScalarFromDecoder(field_type, decoder)) | ||
| 988 | return decoder.Position() - initial_position | ||
| 989 | |||
| 990 | # Nonrepeated composite. Recursively deserialize. | ||
| 991 | if label != _FieldDescriptor.LABEL_REPEATED: | ||
| 992 | composite = value | ||
| 993 | _RecursivelyMerge(field_number, field_type, decoder, composite) | ||
| 994 | return decoder.Position() - initial_position | ||
| 995 | |||
| 996 | # Now we know we're dealing with a repeated field of some kind. | ||
| 997 | element_list = value | ||
| 998 | |||
| 999 | if cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE: | ||
| 1000 | # Repeated scalar. | ||
| 1001 | element_list.append(_DeserializeScalarFromDecoder(field_type, decoder)) | ||
| 1002 | return decoder.Position() - initial_position | ||
| 1003 | else: | ||
| 1004 | # Repeated composite. | ||
| 1005 | composite = element_list.add() | ||
| 1006 | _RecursivelyMerge(field_number, field_type, decoder, composite) | ||
| 1007 | return decoder.Position() - initial_position | ||
| 1008 | |||
| 1009 | |||
| 1010 | def _FieldOrExtensionValues(message, field_or_extension): | ||
| 1011 | """Retrieves the list of values for the specified field or extension. | ||
| 1012 | |||
| 1013 | The target field or extension can be optional, required or repeated, but it | ||
| 1014 | must have value(s) set. The assumption is that the target field or extension | ||
| 1015 | is set (e.g. _HasFieldOrExtension holds true). | ||
| 1016 | |||
| 1017 | Args: | ||
| 1018 | message: Message which contains the target field or extension. | ||
| 1019 | field_or_extension: Field or extension for which the list of values is | ||
| 1020 | required. Must be an instance of FieldDescriptor. | ||
| 1021 | |||
| 1022 | Returns: | ||
| 1023 | A list of values for the specified field or extension. This list will only | ||
| 1024 | contain a single element if the field is non-repeated. | ||
| 1025 | """ | ||
| 1026 | if field_or_extension.is_extension: | ||
| 1027 | value = message.Extensions[field_or_extension] | ||
| 1028 | else: | ||
| 1029 | value = getattr(message, _ValueFieldName(field_or_extension.name)) | ||
| 1030 | if field_or_extension.label != _FieldDescriptor.LABEL_REPEATED: | ||
| 1031 | return [value] | ||
| 1032 | else: | ||
| 1033 | # In this case value is a list or repeated values. | ||
| 1034 | return value | ||
| 1035 | |||
| 1036 | |||
| 1037 | def _HasFieldOrExtension(message, field_or_extension): | ||
| 1038 | """Checks if a message has the specified field or extension set. | ||
| 1039 | |||
| 1040 | The field or extension specified can be optional, required or repeated. If | ||
| 1041 | it is repeated, this function returns True. Otherwise it checks the has bit | ||
| 1042 | of the field or extension. | ||
| 1043 | |||
| 1044 | Args: | ||
| 1045 | message: Message which contains the target field or extension. | ||
| 1046 | field_or_extension: Field or extension to check. This must be a | ||
| 1047 | FieldDescriptor instance. | ||
| 1048 | |||
| 1049 | Returns: | ||
| 1050 | True if the message has a value set for the specified field or extension, | ||
| 1051 | or if the field or extension is repeated. | ||
| 1052 | """ | ||
| 1053 | if field_or_extension.label == _FieldDescriptor.LABEL_REPEATED: | ||
| 1054 | return True | ||
| 1055 | if field_or_extension.is_extension: | ||
| 1056 | return message.HasExtension(field_or_extension) | ||
| 1057 | else: | ||
| 1058 | return message.HasField(field_or_extension.name) | ||
| 1059 | |||
| 1060 | |||
| 1061 | def _IsFieldOrExtensionInitialized(message, field, errors=None): | ||
| 1062 | """Checks if a message field or extension is initialized. | ||
| 1063 | |||
| 1064 | Args: | ||
| 1065 | message: The message which contains the field or extension. | ||
| 1066 | field: Field or extension to check. This must be a FieldDescriptor instance. | ||
| 1067 | errors: Errors will be appended to it, if set to a meaningful value. | ||
| 1068 | |||
| 1069 | Returns: | ||
| 1070 | True if the field/extension can be considered initialized. | ||
| 1071 | """ | ||
| 1072 | # If the field is required and is not set, it isn't initialized. | ||
| 1073 | if field.label == _FieldDescriptor.LABEL_REQUIRED: | ||
| 1074 | if not _HasFieldOrExtension(message, field): | ||
| 1075 | if errors is not None: | ||
| 1076 | errors.append('Required field %s is not set.' % field.full_name) | ||
| 1077 | return False | ||
| 1078 | |||
| 1079 | # If the field is optional and is not set, or if it | ||
| 1080 | # isn't a submessage then the field is initialized. | ||
| 1081 | if field.label == _FieldDescriptor.LABEL_OPTIONAL: | ||
| 1082 | if not _HasFieldOrExtension(message, field): | ||
| 1083 | return True | ||
| 1084 | if field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE: | ||
| 1085 | return True | ||
| 1086 | |||
| 1087 | # The field is set and is either a single or a repeated submessage. | ||
| 1088 | messages = _FieldOrExtensionValues(message, field) | ||
| 1089 | # If all submessages in this field are initialized, the field is | ||
| 1090 | # considered initialized. | ||
| 1091 | for message in messages: | ||
| 1092 | if not _InternalIsInitialized(message, errors): | ||
| 1093 | return False | ||
| 1094 | return True | ||
| 1095 | |||
| 1096 | |||
| 1097 | def _InternalIsInitialized(message, errors=None): | ||
| 1098 | """Checks if all required fields of a message are set. | ||
| 1099 | |||
| 1100 | Args: | ||
| 1101 | message: The message to check. | ||
| 1102 | errors: If set, initialization errors will be appended to it. | ||
| 1103 | |||
| 1104 | Returns: | ||
| 1105 | True iff the specified message has all required fields set. | ||
| 1106 | """ | ||
| 1107 | fields_and_extensions = [] | ||
| 1108 | fields_and_extensions.extend(message.DESCRIPTOR.fields) | ||
| 1109 | fields_and_extensions.extend( | ||
| 1110 | [extension[0] for extension in message.Extensions._ListSetExtensions()]) | ||
| 1111 | for field_or_extension in fields_and_extensions: | ||
| 1112 | if not _IsFieldOrExtensionInitialized(message, field_or_extension, errors): | ||
| 1113 | return False | ||
| 1114 | return True | ||
| 1115 | |||
| 1116 | |||
| 1117 | def _AddMergeFromStringMethod(message_descriptor, cls): | ||
| 1118 | """Helper for _AddMessageMethods().""" | ||
| 1119 | Decoder = decoder.Decoder | ||
| 1120 | def MergeFromString(self, serialized): | ||
| 1121 | decoder = Decoder(serialized) | ||
| 1122 | byte_count = 0 | ||
| 1123 | while not decoder.EndOfStream(): | ||
| 1124 | bytes_read = _DeserializeOneEntity(message_descriptor, self, decoder) | ||
| 1125 | if not bytes_read: | ||
| 1126 | break | ||
| 1127 | byte_count += bytes_read | ||
| 1128 | return byte_count | ||
| 1129 | cls.MergeFromString = MergeFromString | ||
| 1130 | |||
| 1131 | |||
| 1132 | def _AddIsInitializedMethod(cls): | ||
| 1133 | """Adds the IsInitialized method to the protocol message class.""" | ||
| 1134 | cls.IsInitialized = _InternalIsInitialized | ||
| 1135 | |||
| 1136 | |||
| 1137 | def _MergeFieldOrExtension(destination_msg, field, value): | ||
| 1138 | """Merges a specified message field into another message.""" | ||
| 1139 | property_name = _PropertyName(field.name) | ||
| 1140 | is_extension = field.is_extension | ||
| 1141 | |||
| 1142 | if not is_extension: | ||
| 1143 | destination = getattr(destination_msg, property_name) | ||
| 1144 | elif (field.label == _FieldDescriptor.LABEL_REPEATED or | ||
| 1145 | field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE): | ||
| 1146 | destination = destination_msg.Extensions[field] | ||
| 1147 | |||
| 1148 | # Case 1 - a composite field. | ||
| 1149 | if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: | ||
| 1150 | if field.label == _FieldDescriptor.LABEL_REPEATED: | ||
| 1151 | for v in value: | ||
| 1152 | destination.add().MergeFrom(v) | ||
| 1153 | else: | ||
| 1154 | destination.MergeFrom(value) | ||
| 1155 | return | ||
| 1156 | |||
| 1157 | # Case 2 - a repeated field. | ||
| 1158 | if field.label == _FieldDescriptor.LABEL_REPEATED: | ||
| 1159 | for v in value: | ||
| 1160 | destination.append(v) | ||
| 1161 | return | ||
| 1162 | |||
| 1163 | # Case 3 - a singular field. | ||
| 1164 | if is_extension: | ||
| 1165 | destination_msg.Extensions[field] = value | ||
| 1166 | else: | ||
| 1167 | setattr(destination_msg, property_name, value) | ||
| 1168 | |||
| 1169 | |||
| 1170 | def _AddMergeFromMethod(cls): | ||
| 1171 | def MergeFrom(self, msg): | ||
| 1172 | assert msg is not self | ||
| 1173 | for field in msg.ListFields(): | ||
| 1174 | _MergeFieldOrExtension(self, field[0], field[1]) | ||
| 1175 | cls.MergeFrom = MergeFrom | ||
| 1176 | |||
| 1177 | |||
| 1178 | def _AddMessageMethods(message_descriptor, cls): | ||
| 1179 | """Adds implementations of all Message methods to cls.""" | ||
| 1180 | _AddListFieldsMethod(message_descriptor, cls) | ||
| 1181 | _AddHasFieldMethod(cls) | ||
| 1182 | _AddClearFieldMethod(cls) | ||
| 1183 | _AddClearExtensionMethod(cls) | ||
| 1184 | _AddClearMethod(cls) | ||
| 1185 | _AddHasExtensionMethod(cls) | ||
| 1186 | _AddEqualsMethod(message_descriptor, cls) | ||
| 1187 | _AddSetListenerMethod(cls) | ||
| 1188 | _AddByteSizeMethod(message_descriptor, cls) | ||
| 1189 | _AddSerializeToStringMethod(message_descriptor, cls) | ||
| 1190 | _AddSerializePartialToStringMethod(message_descriptor, cls) | ||
| 1191 | _AddMergeFromStringMethod(message_descriptor, cls) | ||
| 1192 | _AddIsInitializedMethod(cls) | ||
| 1193 | _AddMergeFromMethod(cls) | ||
| 1194 | |||
| 1195 | |||
| 1196 | def _AddPrivateHelperMethods(cls): | ||
| 1197 | """Adds implementation of private helper methods to cls.""" | ||
| 1198 | |||
| 1199 | def MaybeCallTransitionToNonemptyCallback(self): | ||
| 1200 | """Calls self._listener.TransitionToNonempty() the first time this | ||
| 1201 | method is called. On all subsequent calls, this is a no-op. | ||
| 1202 | """ | ||
| 1203 | if not self._called_transition_to_nonempty: | ||
| 1204 | self._listener.TransitionToNonempty() | ||
| 1205 | self._called_transition_to_nonempty = True | ||
| 1206 | cls._MaybeCallTransitionToNonemptyCallback = ( | ||
| 1207 | MaybeCallTransitionToNonemptyCallback) | ||
| 1208 | |||
| 1209 | def MarkByteSizeDirty(self): | ||
| 1210 | """Sets the _cached_byte_size_dirty bit to true, | ||
| 1211 | and propagates this to our listener iff this was a state change. | ||
| 1212 | """ | ||
| 1213 | if not self._cached_byte_size_dirty: | ||
| 1214 | self._cached_byte_size_dirty = True | ||
| 1215 | self._listener.ByteSizeDirty() | ||
| 1216 | cls._MarkByteSizeDirty = MarkByteSizeDirty | ||
| 1217 | |||
| 1218 | |||
| 1219 | class _Listener(object): | ||
| 1220 | |||
| 1221 | """MessageListener implementation that a parent message registers with its | ||
| 1222 | child message. | ||
| 1223 | |||
| 1224 | In order to support semantics like: | ||
| 1225 | |||
| 1226 | foo.bar.baz = 23 | ||
| 1227 | assert foo.HasField('bar') | ||
| 1228 | |||
| 1229 | ...child objects must have back references to their parents. | ||
| 1230 | This helper class is at the heart of this support. | ||
| 1231 | """ | ||
| 1232 | |||
| 1233 | def __init__(self, parent_message, has_field_name): | ||
| 1234 | """Args: | ||
| 1235 | parent_message: The message whose _MaybeCallTransitionToNonemptyCallback() | ||
| 1236 | and _MarkByteSizeDirty() methods we should call when we receive | ||
| 1237 | TransitionToNonempty() and ByteSizeDirty() messages. | ||
| 1238 | has_field_name: The name of the "has" field that we should set in | ||
| 1239 | the parent message when we receive a TransitionToNonempty message, | ||
| 1240 | or None if there's no "has" field to set. (This will be the case | ||
| 1241 | for child objects in "repeated" fields). | ||
| 1242 | """ | ||
| 1243 | # This listener establishes a back reference from a child (contained) object | ||
| 1244 | # to its parent (containing) object. We make this a weak reference to avoid | ||
| 1245 | # creating cyclic garbage when the client finishes with the 'parent' object | ||
| 1246 | # in the tree. | ||
| 1247 | if isinstance(parent_message, weakref.ProxyType): | ||
| 1248 | self._parent_message_weakref = parent_message | ||
| 1249 | else: | ||
| 1250 | self._parent_message_weakref = weakref.proxy(parent_message) | ||
| 1251 | self._has_field_name = has_field_name | ||
| 1252 | |||
| 1253 | def TransitionToNonempty(self): | ||
| 1254 | try: | ||
| 1255 | if self._has_field_name is not None: | ||
| 1256 | setattr(self._parent_message_weakref, self._has_field_name, True) | ||
| 1257 | # Propagate the signal to our parents iff this is the first field set. | ||
| 1258 | self._parent_message_weakref._MaybeCallTransitionToNonemptyCallback() | ||
| 1259 | except ReferenceError: | ||
| 1260 | # We can get here if a client has kept a reference to a child object, | ||
| 1261 | # and is now setting a field on it, but the child's parent has been | ||
| 1262 | # garbage-collected. This is not an error. | ||
| 1263 | pass | ||
| 1264 | |||
| 1265 | def ByteSizeDirty(self): | ||
| 1266 | try: | ||
| 1267 | self._parent_message_weakref._MarkByteSizeDirty() | ||
| 1268 | except ReferenceError: | ||
| 1269 | # Same as above. | ||
| 1270 | pass | ||
| 1271 | |||
| 1272 | |||
| 1273 | # TODO(robinson): Move elsewhere? | ||
| 1274 | # TODO(robinson): Provide a clear() method here in addition to ClearField()? | ||
| 1275 | class _RepeatedScalarFieldContainer(object): | ||
| 1276 | |||
| 1277 | """Simple, type-checked, list-like container for holding repeated scalars.""" | ||
| 1278 | |||
| 1279 | # Minimizes memory usage and disallows assignment to other attributes. | ||
| 1280 | __slots__ = ['_message_listener', '_type_checker', '_values'] | ||
| 1281 | |||
| 1282 | def __init__(self, message_listener, type_checker): | ||
| 1283 | """ | ||
| 1284 | Args: | ||
| 1285 | message_listener: A MessageListener implementation. | ||
| 1286 | The _RepeatedScalarFieldContaininer will call this object's | ||
| 1287 | TransitionToNonempty() method when it transitions from being empty to | ||
| 1288 | being nonempty. | ||
| 1289 | type_checker: A _ValueChecker instance to run on elements inserted | ||
| 1290 | into this container. | ||
| 1291 | """ | ||
| 1292 | self._message_listener = message_listener | ||
| 1293 | self._type_checker = type_checker | ||
| 1294 | self._values = [] | ||
| 1295 | |||
| 1296 | def append(self, elem): | ||
| 1297 | self._type_checker.CheckValue(elem) | ||
| 1298 | self._values.append(elem) | ||
| 1299 | self._message_listener.ByteSizeDirty() | ||
| 1300 | if len(self._values) == 1: | ||
| 1301 | self._message_listener.TransitionToNonempty() | ||
| 1302 | |||
| 1303 | def remove(self, elem): | ||
| 1304 | self._values.remove(elem) | ||
| 1305 | self._message_listener.ByteSizeDirty() | ||
| 1306 | |||
| 1307 | # List-like __getitem__() support also makes us iterable (via "iter(foo)" | ||
| 1308 | # or implicitly via "for i in mylist:") for free. | ||
| 1309 | def __getitem__(self, key): | ||
| 1310 | return self._values[key] | ||
| 1311 | |||
| 1312 | def __setitem__(self, key, value): | ||
| 1313 | # No need to call TransitionToNonempty(), since if we're able to | ||
| 1314 | # set the element at this index, we were already nonempty before | ||
| 1315 | # this method was called. | ||
| 1316 | self._message_listener.ByteSizeDirty() | ||
| 1317 | self._type_checker.CheckValue(value) | ||
| 1318 | self._values[key] = value | ||
| 1319 | |||
| 1320 | def __len__(self): | ||
| 1321 | return len(self._values) | ||
| 1322 | |||
| 1323 | def __eq__(self, other): | ||
| 1324 | if self is other: | ||
| 1325 | return True | ||
| 1326 | # Special case for the same type which should be common and fast. | ||
| 1327 | if isinstance(other, self.__class__): | ||
| 1328 | return other._values == self._values | ||
| 1329 | # We are presumably comparing against some other sequence type. | ||
| 1330 | return other == self._values | ||
| 1331 | |||
| 1332 | def __ne__(self, other): | ||
| 1333 | # Can't use != here since it would infinitely recurse. | ||
| 1334 | return not self == other | ||
| 1335 | |||
| 1336 | |||
| 1337 | # TODO(robinson): Move elsewhere? | ||
| 1338 | # TODO(robinson): Provide a clear() method here in addition to ClearField()? | ||
| 1339 | # TODO(robinson): Unify common functionality with | ||
| 1340 | # _RepeatedScalarFieldContaininer? | ||
| 1341 | class _RepeatedCompositeFieldContainer(object): | ||
| 1342 | |||
| 1343 | """Simple, list-like container for holding repeated composite fields.""" | ||
| 1344 | |||
| 1345 | # Minimizes memory usage and disallows assignment to other attributes. | ||
| 1346 | __slots__ = ['_values', '_message_descriptor', '_message_listener'] | ||
| 1347 | |||
| 1348 | def __init__(self, message_listener, message_descriptor): | ||
| 1349 | """Note that we pass in a descriptor instead of the generated directly, | ||
| 1350 | since at the time we construct a _RepeatedCompositeFieldContainer we | ||
| 1351 | haven't yet necessarily initialized the type that will be contained in the | ||
| 1352 | container. | ||
| 1353 | |||
| 1354 | Args: | ||
| 1355 | message_listener: A MessageListener implementation. | ||
| 1356 | The _RepeatedCompositeFieldContainer will call this object's | ||
| 1357 | TransitionToNonempty() method when it transitions from being empty to | ||
| 1358 | being nonempty. | ||
| 1359 | message_descriptor: A Descriptor instance describing the protocol type | ||
| 1360 | that should be present in this container. We'll use the | ||
| 1361 | _concrete_class field of this descriptor when the client calls add(). | ||
| 1362 | """ | ||
| 1363 | self._message_listener = message_listener | ||
| 1364 | self._message_descriptor = message_descriptor | ||
| 1365 | self._values = [] | ||
| 1366 | |||
| 1367 | def add(self): | ||
| 1368 | new_element = self._message_descriptor._concrete_class() | ||
| 1369 | new_element._SetListener(self._message_listener) | ||
| 1370 | self._values.append(new_element) | ||
| 1371 | self._message_listener.ByteSizeDirty() | ||
| 1372 | self._message_listener.TransitionToNonempty() | ||
| 1373 | return new_element | ||
| 1374 | |||
| 1375 | def __delitem__(self, key): | ||
| 1376 | self._message_listener.ByteSizeDirty() | ||
| 1377 | del self._values[key] | ||
| 1378 | |||
| 1379 | # List-like __getitem__() support also makes us iterable (via "iter(foo)" | ||
| 1380 | # or implicitly via "for i in mylist:") for free. | ||
| 1381 | def __getitem__(self, key): | ||
| 1382 | return self._values[key] | ||
| 1383 | |||
| 1384 | def __len__(self): | ||
| 1385 | return len(self._values) | ||
| 1386 | |||
| 1387 | def __eq__(self, other): | ||
| 1388 | if self is other: | ||
| 1389 | return True | ||
| 1390 | if not isinstance(other, self.__class__): | ||
| 1391 | raise TypeError('Can only compare repeated composite fields against ' | ||
| 1392 | 'other repeated composite fields.') | ||
| 1393 | return self._values == other._values | ||
| 1394 | |||
| 1395 | def __ne__(self, other): | ||
| 1396 | # Can't use != here since it would infinitely recurse. | ||
| 1397 | return not self == other | ||
| 1398 | |||
| 1399 | # TODO(robinson): Implement, document, and test slicing support. | ||
| 1400 | |||
| 1401 | |||
| 1402 | # TODO(robinson): Move elsewhere? This file is getting pretty ridiculous... | ||
| 1403 | # TODO(robinson): Unify error handling of "unknown extension" crap. | ||
| 1404 | # TODO(robinson): There's so much similarity between the way that | ||
| 1405 | # extensions behave and the way that normal fields behave that it would | ||
| 1406 | # be really nice to unify more code. It's not immediately obvious | ||
| 1407 | # how to do this, though, and I'd rather get the full functionality | ||
| 1408 | # implemented (and, crucially, get all the tests and specs fleshed out | ||
| 1409 | # and passing), and then come back to this thorny unification problem. | ||
| 1410 | # TODO(robinson): Support iteritems()-style iteration over all | ||
| 1411 | # extensions with the "has" bits turned on? | ||
| 1412 | class _ExtensionDict(object): | ||
| 1413 | |||
| 1414 | """Dict-like container for supporting an indexable "Extensions" | ||
| 1415 | field on proto instances. | ||
| 1416 | |||
| 1417 | Note that in all cases we expect extension handles to be | ||
| 1418 | FieldDescriptors. | ||
| 1419 | """ | ||
| 1420 | |||
| 1421 | class _ExtensionListener(object): | ||
| 1422 | |||
| 1423 | """Adapts an _ExtensionDict to behave as a MessageListener.""" | ||
| 1424 | |||
| 1425 | def __init__(self, extension_dict, handle_id): | ||
| 1426 | self._extension_dict = extension_dict | ||
| 1427 | self._handle_id = handle_id | ||
| 1428 | |||
| 1429 | def TransitionToNonempty(self): | ||
| 1430 | self._extension_dict._SubmessageTransitionedToNonempty(self._handle_id) | ||
| 1431 | |||
| 1432 | def ByteSizeDirty(self): | ||
| 1433 | self._extension_dict._SubmessageByteSizeBecameDirty() | ||
| 1434 | |||
| 1435 | # TODO(robinson): Somewhere, we need to blow up if people | ||
| 1436 | # try to register two extensions with the same field number. | ||
| 1437 | # (And we need a test for this of course). | ||
| 1438 | |||
| 1439 | def __init__(self, extended_message, known_extensions): | ||
| 1440 | """extended_message: Message instance for which we are the Extensions dict. | ||
| 1441 | known_extensions: Iterable of known extension handles. | ||
| 1442 | These must be FieldDescriptors. | ||
| 1443 | """ | ||
| 1444 | # We keep a weak reference to extended_message, since | ||
| 1445 | # it has a reference to this instance in turn. | ||
| 1446 | self._extended_message = weakref.proxy(extended_message) | ||
| 1447 | # We make a deep copy of known_extensions to avoid any | ||
| 1448 | # thread-safety concerns, since the argument passed in | ||
| 1449 | # is the global (class-level) dict of known extensions for | ||
| 1450 | # this type of message, which could be modified at any time | ||
| 1451 | # via a RegisterExtension() call. | ||
| 1452 | # | ||
| 1453 | # This dict maps from handle id to handle (a FieldDescriptor). | ||
| 1454 | # | ||
| 1455 | # XXX | ||
| 1456 | # TODO(robinson): This isn't good enough. The client could | ||
| 1457 | # instantiate an object in module A, then afterward import | ||
| 1458 | # module B and pass the instance to B.Foo(). If B imports | ||
| 1459 | # an extender of this proto and then tries to use it, B | ||
| 1460 | # will get a KeyError, even though the extension *is* registered | ||
| 1461 | # at the time of use. | ||
| 1462 | # XXX | ||
| 1463 | self._known_extensions = dict((id(e), e) for e in known_extensions) | ||
| 1464 | # Read lock around self._values, which may be modified by multiple | ||
| 1465 | # concurrent readers in the conceptually "const" __getitem__ method. | ||
| 1466 | # So, we grab this lock in every "read-only" method to ensure | ||
| 1467 | # that concurrent read access is safe without external locking. | ||
| 1468 | self._lock = threading.Lock() | ||
| 1469 | # Maps from extension handle ID to current value of that extension. | ||
| 1470 | self._values = {} | ||
| 1471 | # Maps from extension handle ID to a boolean "has" bit, but only | ||
| 1472 | # for non-repeated extension fields. | ||
| 1473 | keys = (id for id, extension in self._known_extensions.iteritems() | ||
| 1474 | if extension.label != _FieldDescriptor.LABEL_REPEATED) | ||
| 1475 | self._has_bits = dict.fromkeys(keys, False) | ||
| 1476 | |||
| 1477 | def __getitem__(self, extension_handle): | ||
| 1478 | """Returns the current value of the given extension handle.""" | ||
| 1479 | # We don't care as much about keeping critical sections short in the | ||
| 1480 | # extension support, since it's presumably much less of a common case. | ||
| 1481 | self._lock.acquire() | ||
| 1482 | try: | ||
| 1483 | handle_id = id(extension_handle) | ||
| 1484 | if handle_id not in self._known_extensions: | ||
| 1485 | raise KeyError('Extension not known to this class') | ||
| 1486 | if handle_id not in self._values: | ||
| 1487 | self._AddMissingHandle(extension_handle, handle_id) | ||
| 1488 | return self._values[handle_id] | ||
| 1489 | finally: | ||
| 1490 | self._lock.release() | ||
| 1491 | |||
| 1492 | def __eq__(self, other): | ||
| 1493 | # We have to grab read locks since we're accessing _values | ||
| 1494 | # in a "const" method. See the comment in the constructor. | ||
| 1495 | if self is other: | ||
| 1496 | return True | ||
| 1497 | self._lock.acquire() | ||
| 1498 | try: | ||
| 1499 | other._lock.acquire() | ||
| 1500 | try: | ||
| 1501 | if self._has_bits != other._has_bits: | ||
| 1502 | return False | ||
| 1503 | # If there's a "has" bit, then only compare values where it is true. | ||
| 1504 | for k, v in self._values.iteritems(): | ||
| 1505 | if self._has_bits.get(k, False) and v != other._values[k]: | ||
| 1506 | return False | ||
| 1507 | return True | ||
| 1508 | finally: | ||
| 1509 | other._lock.release() | ||
| 1510 | finally: | ||
| 1511 | self._lock.release() | ||
| 1512 | |||
| 1513 | def __ne__(self, other): | ||
| 1514 | return not self == other | ||
| 1515 | |||
| 1516 | # Note that this is only meaningful for non-repeated, scalar extension | ||
| 1517 | # fields. Note also that we may have to call | ||
| 1518 | # MaybeCallTransitionToNonemptyCallback() when we do successfully set a field | ||
| 1519 | # this way, to set any necssary "has" bits in the ancestors of the extended | ||
| 1520 | # message. | ||
| 1521 | def __setitem__(self, extension_handle, value): | ||
| 1522 | """If extension_handle specifies a non-repeated, scalar extension | ||
| 1523 | field, sets the value of that field. | ||
| 1524 | """ | ||
| 1525 | handle_id = id(extension_handle) | ||
| 1526 | if handle_id not in self._known_extensions: | ||
| 1527 | raise KeyError('Extension not known to this class') | ||
| 1528 | field = extension_handle # Just shorten the name. | ||
| 1529 | if (field.label == _FieldDescriptor.LABEL_OPTIONAL | ||
| 1530 | and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE): | ||
| 1531 | # It's slightly wasteful to lookup the type checker each time, | ||
| 1532 | # but we expect this to be a vanishingly uncommon case anyway. | ||
| 1533 | type_checker = type_checkers.GetTypeChecker(field.cpp_type, field.type) | ||
| 1534 | type_checker.CheckValue(value) | ||
| 1535 | self._values[handle_id] = value | ||
| 1536 | self._has_bits[handle_id] = True | ||
| 1537 | self._extended_message._MarkByteSizeDirty() | ||
| 1538 | self._extended_message._MaybeCallTransitionToNonemptyCallback() | ||
| 1539 | else: | ||
| 1540 | raise TypeError('Extension is repeated and/or a composite type.') | ||
| 1541 | |||
| 1542 | def _AddMissingHandle(self, extension_handle, handle_id): | ||
| 1543 | """Helper internal to ExtensionDict.""" | ||
| 1544 | # Special handling for non-repeated message extensions, which (like | ||
| 1545 | # normal fields of this kind) are initialized lazily. | ||
| 1546 | # REQUIRES: _lock already held. | ||
| 1547 | cpp_type = extension_handle.cpp_type | ||
| 1548 | label = extension_handle.label | ||
| 1549 | if (cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE | ||
| 1550 | and label != _FieldDescriptor.LABEL_REPEATED): | ||
| 1551 | self._AddMissingNonRepeatedCompositeHandle(extension_handle, handle_id) | ||
| 1552 | else: | ||
| 1553 | self._values[handle_id] = _DefaultValueForField( | ||
| 1554 | self._extended_message, extension_handle) | ||
| 1555 | |||
| 1556 | def _AddMissingNonRepeatedCompositeHandle(self, extension_handle, handle_id): | ||
| 1557 | """Helper internal to ExtensionDict.""" | ||
| 1558 | # REQUIRES: _lock already held. | ||
| 1559 | value = extension_handle.message_type._concrete_class() | ||
| 1560 | value._SetListener(_ExtensionDict._ExtensionListener(self, handle_id)) | ||
| 1561 | self._values[handle_id] = value | ||
| 1562 | |||
| 1563 | def _SubmessageTransitionedToNonempty(self, handle_id): | ||
| 1564 | """Called when a submessage with a given handle id first transitions to | ||
| 1565 | being nonempty. Called by _ExtensionListener. | ||
| 1566 | """ | ||
| 1567 | assert handle_id in self._has_bits | ||
| 1568 | self._has_bits[handle_id] = True | ||
| 1569 | self._extended_message._MaybeCallTransitionToNonemptyCallback() | ||
| 1570 | |||
| 1571 | def _SubmessageByteSizeBecameDirty(self): | ||
| 1572 | """Called whenever a submessage's cached byte size becomes invalid | ||
| 1573 | (goes from being "clean" to being "dirty"). Called by _ExtensionListener. | ||
| 1574 | """ | ||
| 1575 | self._extended_message._MarkByteSizeDirty() | ||
| 1576 | |||
| 1577 | # We may wish to widen the public interface of Message.Extensions | ||
| 1578 | # to expose some of this private functionality in the future. | ||
| 1579 | # For now, we make all this functionality module-private and just | ||
| 1580 | # implement what we need for serialization/deserialization, | ||
| 1581 | # HasField()/ClearField(), etc. | ||
| 1582 | |||
| 1583 | def _HasExtension(self, extension_handle): | ||
| 1584 | """Method for internal use by this module. | ||
| 1585 | Returns true iff we "have" this extension in the sense of the | ||
| 1586 | "has" bit being set. | ||
| 1587 | """ | ||
| 1588 | handle_id = id(extension_handle) | ||
| 1589 | # Note that this is different from the other checks. | ||
| 1590 | if handle_id not in self._has_bits: | ||
| 1591 | raise KeyError('Extension not known to this class, or is repeated field.') | ||
| 1592 | return self._has_bits[handle_id] | ||
| 1593 | |||
| 1594 | # Intentionally pretty similar to ClearField() above. | ||
| 1595 | def _ClearExtension(self, extension_handle): | ||
| 1596 | """Method for internal use by this module. | ||
| 1597 | Clears the specified extension, unsetting its "has" bit. | ||
| 1598 | """ | ||
| 1599 | handle_id = id(extension_handle) | ||
| 1600 | if handle_id not in self._known_extensions: | ||
| 1601 | raise KeyError('Extension not known to this class') | ||
| 1602 | default_value = _DefaultValueForField(self._extended_message, | ||
| 1603 | extension_handle) | ||
| 1604 | if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: | ||
| 1605 | self._extended_message._MarkByteSizeDirty() | ||
| 1606 | else: | ||
| 1607 | cpp_type = extension_handle.cpp_type | ||
| 1608 | if cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: | ||
| 1609 | if handle_id in self._values: | ||
| 1610 | # Future modifications to this object shouldn't set any | ||
| 1611 | # "has" bits here. | ||
| 1612 | self._values[handle_id]._SetListener(None) | ||
| 1613 | if self._has_bits[handle_id]: | ||
| 1614 | self._has_bits[handle_id] = False | ||
| 1615 | self._extended_message._MarkByteSizeDirty() | ||
| 1616 | if handle_id in self._values: | ||
| 1617 | del self._values[handle_id] | ||
| 1618 | |||
| 1619 | def _ListSetExtensions(self): | ||
| 1620 | """Method for internal use by this module. | ||
| 1621 | |||
| 1622 | Returns an sequence of all extensions that are currently "set" | ||
| 1623 | in this extension dict. A "set" extension is a repeated extension, | ||
| 1624 | or a non-repeated extension with its "has" bit set. | ||
| 1625 | |||
| 1626 | The returned sequence contains (field_descriptor, value) pairs, | ||
| 1627 | where value is the current value of the extension with the given | ||
| 1628 | field descriptor. | ||
| 1629 | |||
| 1630 | The sequence values are in arbitrary order. | ||
| 1631 | """ | ||
| 1632 | self._lock.acquire() # Read-only methods must lock around self._values. | ||
| 1633 | try: | ||
| 1634 | set_extensions = [] | ||
| 1635 | for handle_id, value in self._values.iteritems(): | ||
| 1636 | handle = self._known_extensions[handle_id] | ||
| 1637 | if (handle.label == _FieldDescriptor.LABEL_REPEATED | ||
| 1638 | or self._has_bits[handle_id]): | ||
| 1639 | set_extensions.append((handle, value)) | ||
| 1640 | return set_extensions | ||
| 1641 | finally: | ||
| 1642 | self._lock.release() | ||
| 1643 | |||
| 1644 | def _AllExtensionsByNumber(self): | ||
| 1645 | """Method for internal use by this module. | ||
| 1646 | |||
| 1647 | Returns: A dict mapping field_number to (handle, field_descriptor), | ||
| 1648 | for *all* registered extensions for this dict. | ||
| 1649 | """ | ||
| 1650 | # TODO(robinson): Precompute and store this away. Note that we'll have to | ||
| 1651 | # be careful when we move away from having _known_extensions as a | ||
| 1652 | # deep-copied member of this object. | ||
| 1653 | return dict((f.number, f) for f in self._known_extensions.itervalues()) | ||
diff --git a/froofle/protobuf/service.py b/froofle/protobuf/service.py new file mode 100644 index 00000000..3989216a --- /dev/null +++ b/froofle/protobuf/service.py | |||
| @@ -0,0 +1,208 @@ | |||
| 1 | # Protocol Buffers - Google's data interchange format | ||
| 2 | # Copyright 2008 Google Inc. All rights reserved. | ||
| 3 | # http://code.google.com/p/protobuf/ | ||
| 4 | # | ||
| 5 | # Redistribution and use in source and binary forms, with or without | ||
| 6 | # modification, are permitted provided that the following conditions are | ||
| 7 | # met: | ||
| 8 | # | ||
| 9 | # * Redistributions of source code must retain the above copyright | ||
| 10 | # notice, this list of conditions and the following disclaimer. | ||
| 11 | # * Redistributions in binary form must reproduce the above | ||
| 12 | # copyright notice, this list of conditions and the following disclaimer | ||
| 13 | # in the documentation and/or other materials provided with the | ||
| 14 | # distribution. | ||
| 15 | # * Neither the name of Google Inc. nor the names of its | ||
| 16 | # contributors may be used to endorse or promote products derived from | ||
| 17 | # this software without specific prior written permission. | ||
| 18 | # | ||
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
| 30 | |||
| 31 | """Declares the RPC service interfaces. | ||
| 32 | |||
| 33 | This module declares the abstract interfaces underlying proto2 RPC | ||
| 34 | services. These are intented to be independent of any particular RPC | ||
| 35 | implementation, so that proto2 services can be used on top of a variety | ||
| 36 | of implementations. | ||
| 37 | """ | ||
| 38 | |||
| 39 | __author__ = 'petar@google.com (Petar Petrov)' | ||
| 40 | |||
| 41 | |||
| 42 | class Service(object): | ||
| 43 | |||
| 44 | """Abstract base interface for protocol-buffer-based RPC services. | ||
| 45 | |||
| 46 | Services themselves are abstract classes (implemented either by servers or as | ||
| 47 | stubs), but they subclass this base interface. The methods of this | ||
| 48 | interface can be used to call the methods of the service without knowing | ||
| 49 | its exact type at compile time (analogous to the Message interface). | ||
| 50 | """ | ||
| 51 | |||
| 52 | def GetDescriptor(self): | ||
| 53 | """Retrieves this service's descriptor.""" | ||
| 54 | raise NotImplementedError | ||
| 55 | |||
| 56 | def CallMethod(self, method_descriptor, rpc_controller, | ||
| 57 | request, done): | ||
| 58 | """Calls a method of the service specified by method_descriptor. | ||
| 59 | |||
| 60 | Preconditions: | ||
| 61 | * method_descriptor.service == GetDescriptor | ||
| 62 | * request is of the exact same classes as returned by | ||
| 63 | GetRequestClass(method). | ||
| 64 | * After the call has started, the request must not be modified. | ||
| 65 | * "rpc_controller" is of the correct type for the RPC implementation being | ||
| 66 | used by this Service. For stubs, the "correct type" depends on the | ||
| 67 | RpcChannel which the stub is using. | ||
| 68 | |||
| 69 | Postconditions: | ||
| 70 | * "done" will be called when the method is complete. This may be | ||
| 71 | before CallMethod() returns or it may be at some point in the future. | ||
| 72 | """ | ||
| 73 | raise NotImplementedError | ||
| 74 | |||
| 75 | def GetRequestClass(self, method_descriptor): | ||
| 76 | """Returns the class of the request message for the specified method. | ||
| 77 | |||
| 78 | CallMethod() requires that the request is of a particular subclass of | ||
| 79 | Message. GetRequestClass() gets the default instance of this required | ||
| 80 | type. | ||
| 81 | |||
| 82 | Example: | ||
| 83 | method = service.GetDescriptor().FindMethodByName("Foo") | ||
| 84 | request = stub.GetRequestClass(method)() | ||
| 85 | request.ParseFromString(input) | ||
| 86 | service.CallMethod(method, request, callback) | ||
| 87 | """ | ||
| 88 | raise NotImplementedError | ||
| 89 | |||
| 90 | def GetResponseClass(self, method_descriptor): | ||
| 91 | """Returns the class of the response message for the specified method. | ||
| 92 | |||
| 93 | This method isn't really needed, as the RpcChannel's CallMethod constructs | ||
| 94 | the response protocol message. It's provided anyway in case it is useful | ||
| 95 | for the caller to know the response type in advance. | ||
| 96 | """ | ||
| 97 | raise NotImplementedError | ||
| 98 | |||
| 99 | |||
| 100 | class RpcController(object): | ||
| 101 | |||
| 102 | """An RpcController mediates a single method call. | ||
| 103 | |||
| 104 | The primary purpose of the controller is to provide a way to manipulate | ||
| 105 | settings specific to the RPC implementation and to find out about RPC-level | ||
| 106 | errors. The methods provided by the RpcController interface are intended | ||
| 107 | to be a "least common denominator" set of features which we expect all | ||
| 108 | implementations to support. Specific implementations may provide more | ||
| 109 | advanced features (e.g. deadline propagation). | ||
| 110 | """ | ||
| 111 | |||
| 112 | # Client-side methods below | ||
| 113 | |||
| 114 | def Reset(self): | ||
| 115 | """Resets the RpcController to its initial state. | ||
| 116 | |||
| 117 | After the RpcController has been reset, it may be reused in | ||
| 118 | a new call. Must not be called while an RPC is in progress. | ||
| 119 | """ | ||
| 120 | raise NotImplementedError | ||
| 121 | |||
| 122 | def Failed(self): | ||
| 123 | """Returns true if the call failed. | ||
| 124 | |||
| 125 | After a call has finished, returns true if the call failed. The possible | ||
| 126 | reasons for failure depend on the RPC implementation. Failed() must not | ||
| 127 | be called before a call has finished. If Failed() returns true, the | ||
| 128 | contents of the response message are undefined. | ||
| 129 | """ | ||
| 130 | raise NotImplementedError | ||
| 131 | |||
| 132 | def ErrorText(self): | ||
| 133 | """If Failed is true, returns a human-readable description of the error.""" | ||
| 134 | raise NotImplementedError | ||
| 135 | |||
| 136 | def StartCancel(self): | ||
| 137 | """Initiate cancellation. | ||
| 138 | |||
| 139 | Advises the RPC system that the caller desires that the RPC call be | ||
| 140 | canceled. The RPC system may cancel it immediately, may wait awhile and | ||
| 141 | then cancel it, or may not even cancel the call at all. If the call is | ||
| 142 | canceled, the "done" callback will still be called and the RpcController | ||
| 143 | will indicate that the call failed at that time. | ||
| 144 | """ | ||
| 145 | raise NotImplementedError | ||
| 146 | |||
| 147 | # Server-side methods below | ||
| 148 | |||
| 149 | def SetFailed(self, reason): | ||
| 150 | """Sets a failure reason. | ||
| 151 | |||
| 152 | Causes Failed() to return true on the client side. "reason" will be | ||
| 153 | incorporated into the message returned by ErrorText(). If you find | ||
| 154 | you need to return machine-readable information about failures, you | ||
| 155 | should incorporate it into your response protocol buffer and should | ||
| 156 | NOT call SetFailed(). | ||
| 157 | """ | ||
| 158 | raise NotImplementedError | ||
| 159 | |||
| 160 | def IsCanceled(self): | ||
| 161 | """Checks if the client cancelled the RPC. | ||
| 162 | |||
| 163 | If true, indicates that the client canceled the RPC, so the server may | ||
| 164 | as well give up on replying to it. The server should still call the | ||
| 165 | final "done" callback. | ||
| 166 | """ | ||
| 167 | raise NotImplementedError | ||
| 168 | |||
| 169 | def NotifyOnCancel(self, callback): | ||
| 170 | """Sets a callback to invoke on cancel. | ||
| 171 | |||
| 172 | Asks that the given callback be called when the RPC is canceled. The | ||
| 173 | callback will always be called exactly once. If the RPC completes without | ||
| 174 | being canceled, the callback will be called after completion. If the RPC | ||
| 175 | has already been canceled when NotifyOnCancel() is called, the callback | ||
| 176 | will be called immediately. | ||
| 177 | |||
| 178 | NotifyOnCancel() must be called no more than once per request. | ||
| 179 | """ | ||
| 180 | raise NotImplementedError | ||
| 181 | |||
| 182 | |||
| 183 | class RpcChannel(object): | ||
| 184 | |||
| 185 | """Abstract interface for an RPC channel. | ||
| 186 | |||
| 187 | An RpcChannel represents a communication line to a service which can be used | ||
| 188 | to call that service's methods. The service may be running on another | ||
| 189 | machine. Normally, you should not use an RpcChannel directly, but instead | ||
| 190 | construct a stub {@link Service} wrapping it. Example: | ||
| 191 | |||
| 192 | Example: | ||
| 193 | RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") | ||
| 194 | RpcController controller = rpcImpl.Controller() | ||
| 195 | MyService service = MyService_Stub(channel) | ||
| 196 | service.MyMethod(controller, request, callback) | ||
| 197 | """ | ||
| 198 | |||
| 199 | def CallMethod(self, method_descriptor, rpc_controller, | ||
| 200 | request, response_class, done): | ||
| 201 | """Calls the method identified by the descriptor. | ||
| 202 | |||
| 203 | Call the given method of the remote service. The signature of this | ||
| 204 | procedure looks the same as Service.CallMethod(), but the requirements | ||
| 205 | are less strict in one important way: the request object doesn't have to | ||
| 206 | be of any specific class as long as its descriptor is method.input_type. | ||
| 207 | """ | ||
| 208 | raise NotImplementedError | ||
diff --git a/froofle/protobuf/service_reflection.py b/froofle/protobuf/service_reflection.py new file mode 100644 index 00000000..bdd6bad5 --- /dev/null +++ b/froofle/protobuf/service_reflection.py | |||
| @@ -0,0 +1,289 @@ | |||
| 1 | # Protocol Buffers - Google's data interchange format | ||
| 2 | # Copyright 2008 Google Inc. All rights reserved. | ||
| 3 | # http://code.google.com/p/protobuf/ | ||
| 4 | # | ||
| 5 | # Redistribution and use in source and binary forms, with or without | ||
| 6 | # modification, are permitted provided that the following conditions are | ||
| 7 | # met: | ||
| 8 | # | ||
| 9 | # * Redistributions of source code must retain the above copyright | ||
| 10 | # notice, this list of conditions and the following disclaimer. | ||
| 11 | # * Redistributions in binary form must reproduce the above | ||
| 12 | # copyright notice, this list of conditions and the following disclaimer | ||
| 13 | # in the documentation and/or other materials provided with the | ||
| 14 | # distribution. | ||
| 15 | # * Neither the name of Google Inc. nor the names of its | ||
| 16 | # contributors may be used to endorse or promote products derived from | ||
| 17 | # this software without specific prior written permission. | ||
| 18 | # | ||
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
| 30 | |||
| 31 | """Contains metaclasses used to create protocol service and service stub | ||
| 32 | classes from ServiceDescriptor objects at runtime. | ||
| 33 | |||
| 34 | The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to | ||
| 35 | inject all useful functionality into the classes output by the protocol | ||
| 36 | compiler at compile-time. | ||
| 37 | """ | ||
| 38 | |||
| 39 | __author__ = 'petar@google.com (Petar Petrov)' | ||
| 40 | |||
| 41 | |||
| 42 | class GeneratedServiceType(type): | ||
| 43 | |||
| 44 | """Metaclass for service classes created at runtime from ServiceDescriptors. | ||
| 45 | |||
| 46 | Implementations for all methods described in the Service class are added here | ||
| 47 | by this class. We also create properties to allow getting/setting all fields | ||
| 48 | in the protocol message. | ||
| 49 | |||
| 50 | The protocol compiler currently uses this metaclass to create protocol service | ||
| 51 | classes at runtime. Clients can also manually create their own classes at | ||
| 52 | runtime, as in this example: | ||
| 53 | |||
| 54 | mydescriptor = ServiceDescriptor(.....) | ||
| 55 | class MyProtoService(service.Service): | ||
| 56 | __metaclass__ = GeneratedServiceType | ||
| 57 | DESCRIPTOR = mydescriptor | ||
| 58 | myservice_instance = MyProtoService() | ||
| 59 | ... | ||
| 60 | """ | ||
| 61 | |||
| 62 | _DESCRIPTOR_KEY = 'DESCRIPTOR' | ||
| 63 | |||
| 64 | def __init__(cls, name, bases, dictionary): | ||
| 65 | """Creates a message service class. | ||
| 66 | |||
| 67 | Args: | ||
| 68 | name: Name of the class (ignored, but required by the metaclass | ||
| 69 | protocol). | ||
| 70 | bases: Base classes of the class being constructed. | ||
| 71 | dictionary: The class dictionary of the class being constructed. | ||
| 72 | dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object | ||
| 73 | describing this protocol service type. | ||
| 74 | """ | ||
| 75 | # Don't do anything if this class doesn't have a descriptor. This happens | ||
| 76 | # when a service class is subclassed. | ||
| 77 | if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: | ||
| 78 | return | ||
| 79 | descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] | ||
| 80 | service_builder = _ServiceBuilder(descriptor) | ||
| 81 | service_builder.BuildService(cls) | ||
| 82 | |||
| 83 | |||
| 84 | class GeneratedServiceStubType(GeneratedServiceType): | ||
| 85 | |||
| 86 | """Metaclass for service stubs created at runtime from ServiceDescriptors. | ||
| 87 | |||
| 88 | This class has similar responsibilities as GeneratedServiceType, except that | ||
| 89 | it creates the service stub classes. | ||
| 90 | """ | ||
| 91 | |||
| 92 | _DESCRIPTOR_KEY = 'DESCRIPTOR' | ||
| 93 | |||
| 94 | def __init__(cls, name, bases, dictionary): | ||
| 95 | """Creates a message service stub class. | ||
| 96 | |||
| 97 | Args: | ||
| 98 | name: Name of the class (ignored, here). | ||
| 99 | bases: Base classes of the class being constructed. | ||
| 100 | dictionary: The class dictionary of the class being constructed. | ||
| 101 | dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object | ||
| 102 | describing this protocol service type. | ||
| 103 | """ | ||
| 104 | super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) | ||
| 105 | # Don't do anything if this class doesn't have a descriptor. This happens | ||
| 106 | # when a service stub is subclassed. | ||
| 107 | if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: | ||
| 108 | return | ||
| 109 | descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] | ||
| 110 | service_stub_builder = _ServiceStubBuilder(descriptor) | ||
| 111 | service_stub_builder.BuildServiceStub(cls) | ||
| 112 | |||
| 113 | |||
| 114 | class _ServiceBuilder(object): | ||
| 115 | |||
| 116 | """This class constructs a protocol service class using a service descriptor. | ||
| 117 | |||
| 118 | Given a service descriptor, this class constructs a class that represents | ||
| 119 | the specified service descriptor. One service builder instance constructs | ||
| 120 | exactly one service class. That means all instances of that class share the | ||
| 121 | same builder. | ||
| 122 | """ | ||
| 123 | |||
| 124 | def __init__(self, service_descriptor): | ||
| 125 | """Initializes an instance of the service class builder. | ||
| 126 | |||
| 127 | Args: | ||
| 128 | service_descriptor: ServiceDescriptor to use when constructing the | ||
| 129 | service class. | ||
| 130 | """ | ||
| 131 | self.descriptor = service_descriptor | ||
| 132 | |||
| 133 | def BuildService(self, cls): | ||
| 134 | """Constructs the service class. | ||
| 135 | |||
| 136 | Args: | ||
| 137 | cls: The class that will be constructed. | ||
| 138 | """ | ||
| 139 | |||
| 140 | # CallMethod needs to operate with an instance of the Service class. This | ||
| 141 | # internal wrapper function exists only to be able to pass the service | ||
| 142 | # instance to the method that does the real CallMethod work. | ||
| 143 | def _WrapCallMethod(srvc, method_descriptor, | ||
| 144 | rpc_controller, request, callback): | ||
| 145 | self._CallMethod(srvc, method_descriptor, | ||
| 146 | rpc_controller, request, callback) | ||
| 147 | self.cls = cls | ||
| 148 | cls.CallMethod = _WrapCallMethod | ||
| 149 | cls.GetDescriptor = self._GetDescriptor | ||
| 150 | cls.GetRequestClass = self._GetRequestClass | ||
| 151 | cls.GetResponseClass = self._GetResponseClass | ||
| 152 | for method in self.descriptor.methods: | ||
| 153 | setattr(cls, method.name, self._GenerateNonImplementedMethod(method)) | ||
| 154 | |||
| 155 | def _GetDescriptor(self): | ||
| 156 | """Retrieves the service descriptor. | ||
| 157 | |||
| 158 | Returns: | ||
| 159 | The descriptor of the service (of type ServiceDescriptor). | ||
| 160 | """ | ||
| 161 | return self.descriptor | ||
| 162 | |||
| 163 | def _CallMethod(self, srvc, method_descriptor, | ||
| 164 | rpc_controller, request, callback): | ||
| 165 | """Calls the method described by a given method descriptor. | ||
| 166 | |||
| 167 | Args: | ||
| 168 | srvc: Instance of the service for which this method is called. | ||
| 169 | method_descriptor: Descriptor that represent the method to call. | ||
| 170 | rpc_controller: RPC controller to use for this method's execution. | ||
| 171 | request: Request protocol message. | ||
| 172 | callback: A callback to invoke after the method has completed. | ||
| 173 | """ | ||
| 174 | if method_descriptor.containing_service != self.descriptor: | ||
| 175 | raise RuntimeError( | ||
| 176 | 'CallMethod() given method descriptor for wrong service type.') | ||
| 177 | method = getattr(srvc, method_descriptor.name) | ||
| 178 | method(rpc_controller, request, callback) | ||
| 179 | |||
| 180 | def _GetRequestClass(self, method_descriptor): | ||
| 181 | """Returns the class of the request protocol message. | ||
| 182 | |||
| 183 | Args: | ||
| 184 | method_descriptor: Descriptor of the method for which to return the | ||
| 185 | request protocol message class. | ||
| 186 | |||
| 187 | Returns: | ||
| 188 | A class that represents the input protocol message of the specified | ||
| 189 | method. | ||
| 190 | """ | ||
| 191 | if method_descriptor.containing_service != self.descriptor: | ||
| 192 | raise RuntimeError( | ||
| 193 | 'GetRequestClass() given method descriptor for wrong service type.') | ||
| 194 | return method_descriptor.input_type._concrete_class | ||
| 195 | |||
| 196 | def _GetResponseClass(self, method_descriptor): | ||
| 197 | """Returns the class of the response protocol message. | ||
| 198 | |||
| 199 | Args: | ||
| 200 | method_descriptor: Descriptor of the method for which to return the | ||
| 201 | response protocol message class. | ||
| 202 | |||
| 203 | Returns: | ||
| 204 | A class that represents the output protocol message of the specified | ||
| 205 | method. | ||
| 206 | """ | ||
| 207 | if method_descriptor.containing_service != self.descriptor: | ||
| 208 | raise RuntimeError( | ||
| 209 | 'GetResponseClass() given method descriptor for wrong service type.') | ||
| 210 | return method_descriptor.output_type._concrete_class | ||
| 211 | |||
| 212 | def _GenerateNonImplementedMethod(self, method): | ||
| 213 | """Generates and returns a method that can be set for a service methods. | ||
| 214 | |||
| 215 | Args: | ||
| 216 | method: Descriptor of the service method for which a method is to be | ||
| 217 | generated. | ||
| 218 | |||
| 219 | Returns: | ||
| 220 | A method that can be added to the service class. | ||
| 221 | """ | ||
| 222 | return lambda inst, rpc_controller, request, callback: ( | ||
| 223 | self._NonImplementedMethod(method.name, rpc_controller, callback)) | ||
| 224 | |||
| 225 | def _NonImplementedMethod(self, method_name, rpc_controller, callback): | ||
| 226 | """The body of all methods in the generated service class. | ||
| 227 | |||
| 228 | Args: | ||
| 229 | method_name: Name of the method being executed. | ||
| 230 | rpc_controller: RPC controller used to execute this method. | ||
| 231 | callback: A callback which will be invoked when the method finishes. | ||
| 232 | """ | ||
| 233 | rpc_controller.SetFailed('Method %s not implemented.' % method_name) | ||
| 234 | callback(None) | ||
| 235 | |||
| 236 | |||
| 237 | class _ServiceStubBuilder(object): | ||
| 238 | |||
| 239 | """Constructs a protocol service stub class using a service descriptor. | ||
| 240 | |||
| 241 | Given a service descriptor, this class constructs a suitable stub class. | ||
| 242 | A stub is just a type-safe wrapper around an RpcChannel which emulates a | ||
| 243 | local implementation of the service. | ||
| 244 | |||
| 245 | One service stub builder instance constructs exactly one class. It means all | ||
| 246 | instances of that class share the same service stub builder. | ||
| 247 | """ | ||
| 248 | |||
| 249 | def __init__(self, service_descriptor): | ||
| 250 | """Initializes an instance of the service stub class builder. | ||
| 251 | |||
| 252 | Args: | ||
| 253 | service_descriptor: ServiceDescriptor to use when constructing the | ||
| 254 | stub class. | ||
| 255 | """ | ||
| 256 | self.descriptor = service_descriptor | ||
| 257 | |||
| 258 | def BuildServiceStub(self, cls): | ||
| 259 | """Constructs the stub class. | ||
| 260 | |||
| 261 | Args: | ||
| 262 | cls: The class that will be constructed. | ||
| 263 | """ | ||
| 264 | |||
| 265 | def _ServiceStubInit(stub, rpc_channel): | ||
| 266 | stub.rpc_channel = rpc_channel | ||
| 267 | self.cls = cls | ||
| 268 | cls.__init__ = _ServiceStubInit | ||
| 269 | for method in self.descriptor.methods: | ||
| 270 | setattr(cls, method.name, self._GenerateStubMethod(method)) | ||
| 271 | |||
| 272 | def _GenerateStubMethod(self, method): | ||
| 273 | return lambda inst, rpc_controller, request, callback: self._StubMethod( | ||
| 274 | inst, method, rpc_controller, request, callback) | ||
| 275 | |||
| 276 | def _StubMethod(self, stub, method_descriptor, | ||
| 277 | rpc_controller, request, callback): | ||
| 278 | """The body of all service methods in the generated stub class. | ||
| 279 | |||
| 280 | Args: | ||
| 281 | stub: Stub instance. | ||
| 282 | method_descriptor: Descriptor of the invoked method. | ||
| 283 | rpc_controller: Rpc controller to execute the method. | ||
| 284 | request: Request protocol message. | ||
| 285 | callback: A callback to execute when the method finishes. | ||
| 286 | """ | ||
| 287 | stub.rpc_channel.CallMethod( | ||
| 288 | method_descriptor, rpc_controller, request, | ||
| 289 | method_descriptor.output_type._concrete_class, callback) | ||
diff --git a/froofle/protobuf/text_format.py b/froofle/protobuf/text_format.py new file mode 100644 index 00000000..1c4cadfc --- /dev/null +++ b/froofle/protobuf/text_format.py | |||
| @@ -0,0 +1,125 @@ | |||
| 1 | # Protocol Buffers - Google's data interchange format | ||
| 2 | # Copyright 2008 Google Inc. All rights reserved. | ||
| 3 | # http://code.google.com/p/protobuf/ | ||
| 4 | # | ||
| 5 | # Redistribution and use in source and binary forms, with or without | ||
| 6 | # modification, are permitted provided that the following conditions are | ||
| 7 | # met: | ||
| 8 | # | ||
| 9 | # * Redistributions of source code must retain the above copyright | ||
| 10 | # notice, this list of conditions and the following disclaimer. | ||
| 11 | # * Redistributions in binary form must reproduce the above | ||
| 12 | # copyright notice, this list of conditions and the following disclaimer | ||
| 13 | # in the documentation and/or other materials provided with the | ||
| 14 | # distribution. | ||
| 15 | # * Neither the name of Google Inc. nor the names of its | ||
| 16 | # contributors may be used to endorse or promote products derived from | ||
| 17 | # this software without specific prior written permission. | ||
| 18 | # | ||
| 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
| 30 | |||
| 31 | """Contains routines for printing protocol messages in text format.""" | ||
| 32 | |||
| 33 | __author__ = 'kenton@google.com (Kenton Varda)' | ||
| 34 | |||
| 35 | import cStringIO | ||
| 36 | |||
| 37 | from froofle.protobuf import descriptor | ||
| 38 | |||
| 39 | __all__ = [ 'MessageToString', 'PrintMessage', 'PrintField', 'PrintFieldValue' ] | ||
| 40 | |||
| 41 | def MessageToString(message): | ||
| 42 | out = cStringIO.StringIO() | ||
| 43 | PrintMessage(message, out) | ||
| 44 | result = out.getvalue() | ||
| 45 | out.close() | ||
| 46 | return result | ||
| 47 | |||
| 48 | def PrintMessage(message, out, indent = 0): | ||
| 49 | for field, value in message.ListFields(): | ||
| 50 | if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: | ||
| 51 | for element in value: | ||
| 52 | PrintField(field, element, out, indent) | ||
| 53 | else: | ||
| 54 | PrintField(field, value, out, indent) | ||
| 55 | |||
| 56 | def PrintField(field, value, out, indent = 0): | ||
| 57 | """Print a single field name/value pair. For repeated fields, the value | ||
| 58 | should be a single element.""" | ||
| 59 | |||
| 60 | out.write(' ' * indent); | ||
| 61 | if field.is_extension: | ||
| 62 | out.write('[') | ||
| 63 | if (field.containing_type.GetOptions().message_set_wire_format and | ||
| 64 | field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and | ||
| 65 | field.message_type == field.extension_scope and | ||
| 66 | field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): | ||
| 67 | out.write(field.message_type.full_name) | ||
| 68 | else: | ||
| 69 | out.write(field.full_name) | ||
| 70 | out.write(']') | ||
| 71 | elif field.type == descriptor.FieldDescriptor.TYPE_GROUP: | ||
| 72 | # For groups, use the capitalized name. | ||
| 73 | out.write(field.message_type.name) | ||
| 74 | else: | ||
| 75 | out.write(field.name) | ||
| 76 | |||
| 77 | if field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE: | ||
| 78 | # The colon is optional in this case, but our cross-language golden files | ||
| 79 | # don't include it. | ||
| 80 | out.write(': ') | ||
| 81 | |||
| 82 | PrintFieldValue(field, value, out, indent) | ||
| 83 | out.write('\n') | ||
| 84 | |||
| 85 | def PrintFieldValue(field, value, out, indent = 0): | ||
| 86 | """Print a single field value (not including name). For repeated fields, | ||
| 87 | the value should be a single element.""" | ||
| 88 | |||
| 89 | if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: | ||
| 90 | out.write(' {\n') | ||
| 91 | PrintMessage(value, out, indent + 2) | ||
| 92 | out.write(' ' * indent + '}') | ||
| 93 | elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: | ||
| 94 | out.write(field.enum_type.values_by_number[value].name) | ||
| 95 | elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: | ||
| 96 | out.write('\"') | ||
| 97 | out.write(_CEscape(value)) | ||
| 98 | out.write('\"') | ||
| 99 | elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: | ||
| 100 | if value: | ||
| 101 | out.write("true") | ||
| 102 | else: | ||
| 103 | out.write("false") | ||
| 104 | else: | ||
| 105 | out.write(str(value)) | ||
| 106 | |||
| 107 | # text.encode('string_escape') does not seem to satisfy our needs as it | ||
| 108 | # encodes unprintable characters using two-digit hex escapes whereas our | ||
| 109 | # C++ unescaping function allows hex escapes to be any length. So, | ||
| 110 | # "\0011".encode('string_escape') ends up being "\\x011", which will be | ||
| 111 | # decoded in C++ as a single-character string with char code 0x11. | ||
| 112 | def _CEscape(text): | ||
| 113 | def escape(c): | ||
| 114 | o = ord(c) | ||
| 115 | if o == 10: return r"\n" # optional escape | ||
| 116 | if o == 13: return r"\r" # optional escape | ||
| 117 | if o == 9: return r"\t" # optional escape | ||
| 118 | if o == 39: return r"\'" # optional escape | ||
| 119 | |||
| 120 | if o == 34: return r'\"' # necessary escape | ||
| 121 | if o == 92: return r"\\" # necessary escape | ||
| 122 | |||
| 123 | if o >= 127 or o < 32: return "\\%03o" % o # necessary escapes | ||
| 124 | return c | ||
| 125 | return "".join([escape(c) for c in text]) | ||
diff --git a/gerrit_upload.py b/gerrit_upload.py new file mode 100755 index 00000000..a49fb232 --- /dev/null +++ b/gerrit_upload.py | |||
| @@ -0,0 +1,156 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import getpass | ||
| 17 | import os | ||
| 18 | import subprocess | ||
| 19 | import sys | ||
| 20 | from tempfile import mkstemp | ||
| 21 | |||
| 22 | from codereview.proto_client import HttpRpc, Proxy | ||
| 23 | from codereview.review_pb2 import ReviewService_Stub | ||
| 24 | from codereview.upload_bundle_pb2 import * | ||
| 25 | from git_command import GitCommand | ||
| 26 | from error import UploadError | ||
| 27 | |||
| 28 | try: | ||
| 29 | import readline | ||
| 30 | except ImportError: | ||
| 31 | pass | ||
| 32 | |||
| 33 | MAX_SEGMENT_SIZE = 1020 * 1024 | ||
| 34 | |||
| 35 | def _GetRpcServer(email, server, save_cookies): | ||
| 36 | """Returns an RpcServer. | ||
| 37 | |||
| 38 | Returns: | ||
| 39 | A new RpcServer, on which RPC calls can be made. | ||
| 40 | """ | ||
| 41 | |||
| 42 | def GetUserCredentials(): | ||
| 43 | """Prompts the user for a username and password.""" | ||
| 44 | e = email | ||
| 45 | if e is None: | ||
| 46 | e = raw_input("Email: ").strip() | ||
| 47 | password = getpass.getpass("Password for %s: " % e) | ||
| 48 | return (e, password) | ||
| 49 | |||
| 50 | # If this is the dev_appserver, use fake authentication. | ||
| 51 | lc_server = server.lower() | ||
| 52 | if lc_server == "localhost" or lc_server.startswith("localhost:"): | ||
| 53 | if email is None: | ||
| 54 | email = "test@example.com" | ||
| 55 | server = HttpRpc( | ||
| 56 | server, | ||
| 57 | lambda: (email, "password"), | ||
| 58 | extra_headers={"Cookie": | ||
| 59 | 'dev_appserver_login="%s:False"' % email}) | ||
| 60 | # Don't try to talk to ClientLogin. | ||
| 61 | server.authenticated = True | ||
| 62 | return server | ||
| 63 | |||
| 64 | if save_cookies: | ||
| 65 | cookie_file = ".gerrit_cookies" | ||
| 66 | else: | ||
| 67 | cookie_file = None | ||
| 68 | |||
| 69 | return HttpRpc(server, GetUserCredentials, | ||
| 70 | cookie_file=cookie_file) | ||
| 71 | |||
| 72 | def UploadBundle(project, | ||
| 73 | server, | ||
| 74 | email, | ||
| 75 | dest_project, | ||
| 76 | dest_branch, | ||
| 77 | src_branch, | ||
| 78 | bases, | ||
| 79 | save_cookies=True): | ||
| 80 | |||
| 81 | srv = _GetRpcServer(email, server, save_cookies) | ||
| 82 | review = Proxy(ReviewService_Stub(srv)) | ||
| 83 | tmp_fd, tmp_bundle = mkstemp(".bundle", ".gpq") | ||
| 84 | os.close(tmp_fd) | ||
| 85 | |||
| 86 | srcid = project.bare_git.rev_parse(src_branch) | ||
| 87 | revlist = project._revlist(src_branch, *bases) | ||
| 88 | |||
| 89 | if srcid not in revlist: | ||
| 90 | # This can happen if src_branch is an annotated tag | ||
| 91 | # | ||
| 92 | revlist.append(srcid) | ||
| 93 | revlist_size = len(revlist) * 42 | ||
| 94 | |||
| 95 | try: | ||
| 96 | cmd = ['bundle', 'create', tmp_bundle, src_branch] | ||
| 97 | cmd.extend(bases) | ||
| 98 | if GitCommand(project, cmd).Wait() != 0: | ||
| 99 | raise UploadError('cannot create bundle') | ||
| 100 | fd = open(tmp_bundle, "rb") | ||
| 101 | |||
| 102 | bundle_id = None | ||
| 103 | segment_id = 0 | ||
| 104 | next_data = fd.read(MAX_SEGMENT_SIZE - revlist_size) | ||
| 105 | |||
| 106 | while True: | ||
| 107 | this_data = next_data | ||
| 108 | next_data = fd.read(MAX_SEGMENT_SIZE) | ||
| 109 | segment_id += 1 | ||
| 110 | |||
| 111 | if bundle_id is None: | ||
| 112 | req = UploadBundleRequest() | ||
| 113 | req.dest_project = str(dest_project) | ||
| 114 | req.dest_branch = str(dest_branch) | ||
| 115 | for c in revlist: | ||
| 116 | req.contained_object.append(c) | ||
| 117 | else: | ||
| 118 | req = UploadBundleContinue() | ||
| 119 | req.bundle_id = bundle_id | ||
| 120 | req.segment_id = segment_id | ||
| 121 | |||
| 122 | req.bundle_data = this_data | ||
| 123 | if len(next_data) > 0: | ||
| 124 | req.partial_upload = True | ||
| 125 | else: | ||
| 126 | req.partial_upload = False | ||
| 127 | |||
| 128 | if bundle_id is None: | ||
| 129 | rsp = review.UploadBundle(req) | ||
| 130 | else: | ||
| 131 | rsp = review.ContinueBundle(req) | ||
| 132 | |||
| 133 | if rsp.status_code == UploadBundleResponse.CONTINUE: | ||
| 134 | bundle_id = rsp.bundle_id | ||
| 135 | elif rsp.status_code == UploadBundleResponse.RECEIVED: | ||
| 136 | bundle_id = rsp.bundle_id | ||
| 137 | return bundle_id | ||
| 138 | else: | ||
| 139 | if rsp.status_code == UploadBundleResponse.UNKNOWN_PROJECT: | ||
| 140 | reason = 'unknown project "%s"' % dest_project | ||
| 141 | elif rsp.status_code == UploadBundleResponse.UNKNOWN_BRANCH: | ||
| 142 | reason = 'unknown branch "%s"' % dest_branch | ||
| 143 | elif rsp.status_code == UploadBundleResponse.UNKNOWN_BUNDLE: | ||
| 144 | reason = 'unknown bundle' | ||
| 145 | elif rsp.status_code == UploadBundleResponse.NOT_BUNDLE_OWNER: | ||
| 146 | reason = 'not bundle owner' | ||
| 147 | elif rsp.status_code == UploadBundleResponse.BUNDLE_CLOSED: | ||
| 148 | reason = 'bundle closed' | ||
| 149 | elif rsp.status_code == UploadBundleResponse.UNAUTHORIZED_USER: | ||
| 150 | reason = ('Unauthorized user. Visit http://%s/hello to sign up.' | ||
| 151 | % server) | ||
| 152 | else: | ||
| 153 | reason = 'unknown error ' + str(rsp.status_code) | ||
| 154 | raise UploadError(reason) | ||
| 155 | finally: | ||
| 156 | os.unlink(tmp_bundle) | ||
diff --git a/git_command.py b/git_command.py new file mode 100644 index 00000000..a3bd9192 --- /dev/null +++ b/git_command.py | |||
| @@ -0,0 +1,164 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import os | ||
| 17 | import sys | ||
| 18 | import subprocess | ||
| 19 | from error import GitError | ||
| 20 | |||
| 21 | GIT = 'git' | ||
| 22 | MIN_GIT_VERSION = (1, 5, 4) | ||
| 23 | GIT_DIR = 'GIT_DIR' | ||
| 24 | REPO_TRACE = 'REPO_TRACE' | ||
| 25 | |||
| 26 | LAST_GITDIR = None | ||
| 27 | LAST_CWD = None | ||
| 28 | try: | ||
| 29 | TRACE = os.environ[REPO_TRACE] == '1' | ||
| 30 | except KeyError: | ||
| 31 | TRACE = False | ||
| 32 | |||
| 33 | |||
| 34 | class _GitCall(object): | ||
| 35 | def version(self): | ||
| 36 | p = GitCommand(None, ['--version'], capture_stdout=True) | ||
| 37 | if p.Wait() == 0: | ||
| 38 | return p.stdout | ||
| 39 | return None | ||
| 40 | |||
| 41 | def __getattr__(self, name): | ||
| 42 | name = name.replace('_','-') | ||
| 43 | def fun(*cmdv): | ||
| 44 | command = [name] | ||
| 45 | command.extend(cmdv) | ||
| 46 | return GitCommand(None, command).Wait() == 0 | ||
| 47 | return fun | ||
| 48 | git = _GitCall() | ||
| 49 | |||
| 50 | class GitCommand(object): | ||
| 51 | def __init__(self, | ||
| 52 | project, | ||
| 53 | cmdv, | ||
| 54 | bare = False, | ||
| 55 | provide_stdin = False, | ||
| 56 | capture_stdout = False, | ||
| 57 | capture_stderr = False, | ||
| 58 | disable_editor = False, | ||
| 59 | cwd = None, | ||
| 60 | gitdir = None): | ||
| 61 | env = dict(os.environ) | ||
| 62 | |||
| 63 | for e in [REPO_TRACE, | ||
| 64 | GIT_DIR, | ||
| 65 | 'GIT_ALTERNATE_OBJECT_DIRECTORIES', | ||
| 66 | 'GIT_OBJECT_DIRECTORY', | ||
| 67 | 'GIT_WORK_TREE', | ||
| 68 | 'GIT_GRAFT_FILE', | ||
| 69 | 'GIT_INDEX_FILE']: | ||
| 70 | if e in env: | ||
| 71 | del env[e] | ||
| 72 | |||
| 73 | if disable_editor: | ||
| 74 | env['GIT_EDITOR'] = ':' | ||
| 75 | |||
| 76 | if project: | ||
| 77 | if not cwd: | ||
| 78 | cwd = project.worktree | ||
| 79 | if not gitdir: | ||
| 80 | gitdir = project.gitdir | ||
| 81 | |||
| 82 | command = [GIT] | ||
| 83 | if bare: | ||
| 84 | if gitdir: | ||
| 85 | env[GIT_DIR] = gitdir | ||
| 86 | cwd = None | ||
| 87 | command.extend(cmdv) | ||
| 88 | |||
| 89 | if provide_stdin: | ||
| 90 | stdin = subprocess.PIPE | ||
| 91 | else: | ||
| 92 | stdin = None | ||
| 93 | |||
| 94 | if capture_stdout: | ||
| 95 | stdout = subprocess.PIPE | ||
| 96 | else: | ||
| 97 | stdout = None | ||
| 98 | |||
| 99 | if capture_stderr: | ||
| 100 | stderr = subprocess.PIPE | ||
| 101 | else: | ||
| 102 | stderr = None | ||
| 103 | |||
| 104 | if TRACE: | ||
| 105 | global LAST_CWD | ||
| 106 | global LAST_GITDIR | ||
| 107 | |||
| 108 | dbg = '' | ||
| 109 | |||
| 110 | if cwd and LAST_CWD != cwd: | ||
| 111 | if LAST_GITDIR or LAST_CWD: | ||
| 112 | dbg += '\n' | ||
| 113 | dbg += ': cd %s\n' % cwd | ||
| 114 | LAST_CWD = cwd | ||
| 115 | |||
| 116 | if GIT_DIR in env and LAST_GITDIR != env[GIT_DIR]: | ||
| 117 | if LAST_GITDIR or LAST_CWD: | ||
| 118 | dbg += '\n' | ||
| 119 | dbg += ': export GIT_DIR=%s\n' % env[GIT_DIR] | ||
| 120 | LAST_GITDIR = env[GIT_DIR] | ||
| 121 | |||
| 122 | dbg += ': ' | ||
| 123 | dbg += ' '.join(command) | ||
| 124 | if stdin == subprocess.PIPE: | ||
| 125 | dbg += ' 0<|' | ||
| 126 | if stdout == subprocess.PIPE: | ||
| 127 | dbg += ' 1>|' | ||
| 128 | if stderr == subprocess.PIPE: | ||
| 129 | dbg += ' 2>|' | ||
| 130 | print >>sys.stderr, dbg | ||
| 131 | |||
| 132 | try: | ||
| 133 | p = subprocess.Popen(command, | ||
| 134 | cwd = cwd, | ||
| 135 | env = env, | ||
| 136 | stdin = stdin, | ||
| 137 | stdout = stdout, | ||
| 138 | stderr = stderr) | ||
| 139 | except Exception, e: | ||
| 140 | raise GitError('%s: %s' % (command[1], e)) | ||
| 141 | |||
| 142 | self.process = p | ||
| 143 | self.stdin = p.stdin | ||
| 144 | |||
| 145 | def Wait(self): | ||
| 146 | p = self.process | ||
| 147 | |||
| 148 | if p.stdin: | ||
| 149 | p.stdin.close() | ||
| 150 | self.stdin = None | ||
| 151 | |||
| 152 | if p.stdout: | ||
| 153 | self.stdout = p.stdout.read() | ||
| 154 | p.stdout.close() | ||
| 155 | else: | ||
| 156 | p.stdout = None | ||
| 157 | |||
| 158 | if p.stderr: | ||
| 159 | self.stderr = p.stderr.read() | ||
| 160 | p.stderr.close() | ||
| 161 | else: | ||
| 162 | p.stderr = None | ||
| 163 | |||
| 164 | return self.process.wait() | ||
diff --git a/git_config.py b/git_config.py new file mode 100644 index 00000000..f6c5bd1e --- /dev/null +++ b/git_config.py | |||
| @@ -0,0 +1,344 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import os | ||
| 17 | import re | ||
| 18 | import sys | ||
| 19 | from error import GitError | ||
| 20 | from git_command import GitCommand | ||
| 21 | |||
| 22 | R_HEADS = 'refs/heads/' | ||
| 23 | R_TAGS = 'refs/tags/' | ||
| 24 | ID_RE = re.compile('^[0-9a-f]{40}$') | ||
| 25 | |||
| 26 | def IsId(rev): | ||
| 27 | return ID_RE.match(rev) | ||
| 28 | |||
| 29 | |||
| 30 | class GitConfig(object): | ||
| 31 | @classmethod | ||
| 32 | def ForUser(cls): | ||
| 33 | return cls(file = os.path.expanduser('~/.gitconfig')) | ||
| 34 | |||
| 35 | @classmethod | ||
| 36 | def ForRepository(cls, gitdir, defaults=None): | ||
| 37 | return cls(file = os.path.join(gitdir, 'config'), | ||
| 38 | defaults = defaults) | ||
| 39 | |||
| 40 | def __init__(self, file, defaults=None): | ||
| 41 | self.file = file | ||
| 42 | self.defaults = defaults | ||
| 43 | self._cache_dict = None | ||
| 44 | self._remotes = {} | ||
| 45 | self._branches = {} | ||
| 46 | |||
| 47 | def Has(self, name, include_defaults = True): | ||
| 48 | """Return true if this configuration file has the key. | ||
| 49 | """ | ||
| 50 | name = name.lower() | ||
| 51 | if name in self._cache: | ||
| 52 | return True | ||
| 53 | if include_defaults and self.defaults: | ||
| 54 | return self.defaults.Has(name, include_defaults = True) | ||
| 55 | return False | ||
| 56 | |||
| 57 | def GetBoolean(self, name): | ||
| 58 | """Returns a boolean from the configuration file. | ||
| 59 | None : The value was not defined, or is not a boolean. | ||
| 60 | True : The value was set to true or yes. | ||
| 61 | False: The value was set to false or no. | ||
| 62 | """ | ||
| 63 | v = self.GetString(name) | ||
| 64 | if v is None: | ||
| 65 | return None | ||
| 66 | v = v.lower() | ||
| 67 | if v in ('true', 'yes'): | ||
| 68 | return True | ||
| 69 | if v in ('false', 'no'): | ||
| 70 | return False | ||
| 71 | return None | ||
| 72 | |||
| 73 | def GetString(self, name, all=False): | ||
| 74 | """Get the first value for a key, or None if it is not defined. | ||
| 75 | |||
| 76 | This configuration file is used first, if the key is not | ||
| 77 | defined or all = True then the defaults are also searched. | ||
| 78 | """ | ||
| 79 | name = name.lower() | ||
| 80 | |||
| 81 | try: | ||
| 82 | v = self._cache[name] | ||
| 83 | except KeyError: | ||
| 84 | if self.defaults: | ||
| 85 | return self.defaults.GetString(name, all = all) | ||
| 86 | v = [] | ||
| 87 | |||
| 88 | if not all: | ||
| 89 | if v: | ||
| 90 | return v[0] | ||
| 91 | return None | ||
| 92 | |||
| 93 | r = [] | ||
| 94 | r.extend(v) | ||
| 95 | if self.defaults: | ||
| 96 | r.extend(self.defaults.GetString(name, all = True)) | ||
| 97 | return r | ||
| 98 | |||
| 99 | def SetString(self, name, value): | ||
| 100 | """Set the value(s) for a key. | ||
| 101 | Only this configuration file is modified. | ||
| 102 | |||
| 103 | The supplied value should be either a string, | ||
| 104 | or a list of strings (to store multiple values). | ||
| 105 | """ | ||
| 106 | name = name.lower() | ||
| 107 | |||
| 108 | try: | ||
| 109 | old = self._cache[name] | ||
| 110 | except KeyError: | ||
| 111 | old = [] | ||
| 112 | |||
| 113 | if value is None: | ||
| 114 | if old: | ||
| 115 | del self._cache[name] | ||
| 116 | self._do('--unset-all', name) | ||
| 117 | |||
| 118 | elif isinstance(value, list): | ||
| 119 | if len(value) == 0: | ||
| 120 | self.SetString(name, None) | ||
| 121 | |||
| 122 | elif len(value) == 1: | ||
| 123 | self.SetString(name, value[0]) | ||
| 124 | |||
| 125 | elif old != value: | ||
| 126 | self._cache[name] = list(value) | ||
| 127 | self._do('--replace-all', name, value[0]) | ||
| 128 | for i in xrange(1, len(value)): | ||
| 129 | self._do('--add', name, value[i]) | ||
| 130 | |||
| 131 | elif len(old) != 1 or old[0] != value: | ||
| 132 | self._cache[name] = [value] | ||
| 133 | self._do('--replace-all', name, value) | ||
| 134 | |||
| 135 | def GetRemote(self, name): | ||
| 136 | """Get the remote.$name.* configuration values as an object. | ||
| 137 | """ | ||
| 138 | try: | ||
| 139 | r = self._remotes[name] | ||
| 140 | except KeyError: | ||
| 141 | r = Remote(self, name) | ||
| 142 | self._remotes[r.name] = r | ||
| 143 | return r | ||
| 144 | |||
| 145 | def GetBranch(self, name): | ||
| 146 | """Get the branch.$name.* configuration values as an object. | ||
| 147 | """ | ||
| 148 | try: | ||
| 149 | b = self._branches[name] | ||
| 150 | except KeyError: | ||
| 151 | b = Branch(self, name) | ||
| 152 | self._branches[b.name] = b | ||
| 153 | return b | ||
| 154 | |||
| 155 | @property | ||
| 156 | def _cache(self): | ||
| 157 | if self._cache_dict is None: | ||
| 158 | self._cache_dict = self._Read() | ||
| 159 | return self._cache_dict | ||
| 160 | |||
| 161 | def _Read(self): | ||
| 162 | d = self._do('--null', '--list') | ||
| 163 | c = {} | ||
| 164 | while d: | ||
| 165 | lf = d.index('\n') | ||
| 166 | nul = d.index('\0', lf + 1) | ||
| 167 | |||
| 168 | key = d[0:lf] | ||
| 169 | val = d[lf + 1:nul] | ||
| 170 | |||
| 171 | if key in c: | ||
| 172 | c[key].append(val) | ||
| 173 | else: | ||
| 174 | c[key] = [val] | ||
| 175 | |||
| 176 | d = d[nul + 1:] | ||
| 177 | return c | ||
| 178 | |||
| 179 | def _do(self, *args): | ||
| 180 | command = ['config', '--file', self.file] | ||
| 181 | command.extend(args) | ||
| 182 | |||
| 183 | p = GitCommand(None, | ||
| 184 | command, | ||
| 185 | capture_stdout = True, | ||
| 186 | capture_stderr = True) | ||
| 187 | if p.Wait() == 0: | ||
| 188 | return p.stdout | ||
| 189 | else: | ||
| 190 | GitError('git config %s: %s' % (str(args), p.stderr)) | ||
| 191 | |||
| 192 | |||
| 193 | class RefSpec(object): | ||
| 194 | """A Git refspec line, split into its components: | ||
| 195 | |||
| 196 | forced: True if the line starts with '+' | ||
| 197 | src: Left side of the line | ||
| 198 | dst: Right side of the line | ||
| 199 | """ | ||
| 200 | |||
| 201 | @classmethod | ||
| 202 | def FromString(cls, rs): | ||
| 203 | lhs, rhs = rs.split(':', 2) | ||
| 204 | if lhs.startswith('+'): | ||
| 205 | lhs = lhs[1:] | ||
| 206 | forced = True | ||
| 207 | else: | ||
| 208 | forced = False | ||
| 209 | return cls(forced, lhs, rhs) | ||
| 210 | |||
| 211 | def __init__(self, forced, lhs, rhs): | ||
| 212 | self.forced = forced | ||
| 213 | self.src = lhs | ||
| 214 | self.dst = rhs | ||
| 215 | |||
| 216 | def SourceMatches(self, rev): | ||
| 217 | if self.src: | ||
| 218 | if rev == self.src: | ||
| 219 | return True | ||
| 220 | if self.src.endswith('/*') and rev.startswith(self.src[:-1]): | ||
| 221 | return True | ||
| 222 | return False | ||
| 223 | |||
| 224 | def DestMatches(self, ref): | ||
| 225 | if self.dst: | ||
| 226 | if ref == self.dst: | ||
| 227 | return True | ||
| 228 | if self.dst.endswith('/*') and ref.startswith(self.dst[:-1]): | ||
| 229 | return True | ||
| 230 | return False | ||
| 231 | |||
| 232 | def MapSource(self, rev): | ||
| 233 | if self.src.endswith('/*'): | ||
| 234 | return self.dst[:-1] + rev[len(self.src) - 1:] | ||
| 235 | return self.dst | ||
| 236 | |||
| 237 | def __str__(self): | ||
| 238 | s = '' | ||
| 239 | if self.forced: | ||
| 240 | s += '+' | ||
| 241 | if self.src: | ||
| 242 | s += self.src | ||
| 243 | if self.dst: | ||
| 244 | s += ':' | ||
| 245 | s += self.dst | ||
| 246 | return s | ||
| 247 | |||
| 248 | |||
| 249 | class Remote(object): | ||
| 250 | """Configuration options related to a remote. | ||
| 251 | """ | ||
| 252 | def __init__(self, config, name): | ||
| 253 | self._config = config | ||
| 254 | self.name = name | ||
| 255 | self.url = self._Get('url') | ||
| 256 | self.review = self._Get('review') | ||
| 257 | self.fetch = map(lambda x: RefSpec.FromString(x), | ||
| 258 | self._Get('fetch', all=True)) | ||
| 259 | |||
| 260 | def ToLocal(self, rev): | ||
| 261 | """Convert a remote revision string to something we have locally. | ||
| 262 | """ | ||
| 263 | if IsId(rev): | ||
| 264 | return rev | ||
| 265 | if rev.startswith(R_TAGS): | ||
| 266 | return rev | ||
| 267 | |||
| 268 | if not rev.startswith('refs/'): | ||
| 269 | rev = R_HEADS + rev | ||
| 270 | |||
| 271 | for spec in self.fetch: | ||
| 272 | if spec.SourceMatches(rev): | ||
| 273 | return spec.MapSource(rev) | ||
| 274 | raise GitError('remote %s does not have %s' % (self.name, rev)) | ||
| 275 | |||
| 276 | def WritesTo(self, ref): | ||
| 277 | """True if the remote stores to the tracking ref. | ||
| 278 | """ | ||
| 279 | for spec in self.fetch: | ||
| 280 | if spec.DestMatches(ref): | ||
| 281 | return True | ||
| 282 | return False | ||
| 283 | |||
| 284 | def ResetFetch(self): | ||
| 285 | """Set the fetch refspec to its default value. | ||
| 286 | """ | ||
| 287 | self.fetch = [RefSpec(True, | ||
| 288 | 'refs/heads/*', | ||
| 289 | 'refs/remotes/%s/*' % self.name)] | ||
| 290 | |||
| 291 | def Save(self): | ||
| 292 | """Save this remote to the configuration. | ||
| 293 | """ | ||
| 294 | self._Set('url', self.url) | ||
| 295 | self._Set('review', self.review) | ||
| 296 | self._Set('fetch', map(lambda x: str(x), self.fetch)) | ||
| 297 | |||
| 298 | def _Set(self, key, value): | ||
| 299 | key = 'remote.%s.%s' % (self.name, key) | ||
| 300 | return self._config.SetString(key, value) | ||
| 301 | |||
| 302 | def _Get(self, key, all=False): | ||
| 303 | key = 'remote.%s.%s' % (self.name, key) | ||
| 304 | return self._config.GetString(key, all = all) | ||
| 305 | |||
| 306 | |||
| 307 | class Branch(object): | ||
| 308 | """Configuration options related to a single branch. | ||
| 309 | """ | ||
| 310 | def __init__(self, config, name): | ||
| 311 | self._config = config | ||
| 312 | self.name = name | ||
| 313 | self.merge = self._Get('merge') | ||
| 314 | |||
| 315 | r = self._Get('remote') | ||
| 316 | if r: | ||
| 317 | self.remote = self._config.GetRemote(r) | ||
| 318 | else: | ||
| 319 | self.remote = None | ||
| 320 | |||
| 321 | @property | ||
| 322 | def LocalMerge(self): | ||
| 323 | """Convert the merge spec to a local name. | ||
| 324 | """ | ||
| 325 | if self.remote and self.merge: | ||
| 326 | return self.remote.ToLocal(self.merge) | ||
| 327 | return None | ||
| 328 | |||
| 329 | def Save(self): | ||
| 330 | """Save this branch back into the configuration. | ||
| 331 | """ | ||
| 332 | self._Set('merge', self.merge) | ||
| 333 | if self.remote: | ||
| 334 | self._Set('remote', self.remote.name) | ||
| 335 | else: | ||
| 336 | self._Set('remote', None) | ||
| 337 | |||
| 338 | def _Set(self, key, value): | ||
| 339 | key = 'branch.%s.%s' % (self.name, key) | ||
| 340 | return self._config.SetString(key, value) | ||
| 341 | |||
| 342 | def _Get(self, key, all=False): | ||
| 343 | key = 'branch.%s.%s' % (self.name, key) | ||
| 344 | return self._config.GetString(key, all = all) | ||
diff --git a/import_ext.py b/import_ext.py new file mode 100644 index 00000000..2a1ebf88 --- /dev/null +++ b/import_ext.py | |||
| @@ -0,0 +1,422 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import os | ||
| 17 | import random | ||
| 18 | import stat | ||
| 19 | import sys | ||
| 20 | import urllib2 | ||
| 21 | import StringIO | ||
| 22 | |||
| 23 | from error import GitError, ImportError | ||
| 24 | from git_command import GitCommand | ||
| 25 | |||
| 26 | class ImportExternal(object): | ||
| 27 | """Imports a single revision from a non-git data source. | ||
| 28 | Suitable for use to import a tar or zip based snapshot. | ||
| 29 | """ | ||
| 30 | def __init__(self): | ||
| 31 | self._marks = 0 | ||
| 32 | self._files = {} | ||
| 33 | self._tempref = 'refs/repo-external/import' | ||
| 34 | |||
| 35 | self._urls = [] | ||
| 36 | self._remap = [] | ||
| 37 | self.parent = None | ||
| 38 | self._user_name = 'Upstream' | ||
| 39 | self._user_email = 'upstream-import@none' | ||
| 40 | self._user_when = 1000000 | ||
| 41 | |||
| 42 | self.commit = None | ||
| 43 | |||
| 44 | def Clone(self): | ||
| 45 | r = self.__class__() | ||
| 46 | |||
| 47 | r.project = self.project | ||
| 48 | for u in self._urls: | ||
| 49 | r._urls.append(u) | ||
| 50 | for p in self._remap: | ||
| 51 | r._remap.append(_PathMap(r, p._old, p._new)) | ||
| 52 | |||
| 53 | return r | ||
| 54 | |||
| 55 | def SetProject(self, project): | ||
| 56 | self.project = project | ||
| 57 | |||
| 58 | def SetVersion(self, version): | ||
| 59 | self.version = version | ||
| 60 | |||
| 61 | def AddUrl(self, url): | ||
| 62 | self._urls.append(url) | ||
| 63 | |||
| 64 | def SetParent(self, commit_hash): | ||
| 65 | self.parent = commit_hash | ||
| 66 | |||
| 67 | def SetCommit(self, commit_hash): | ||
| 68 | self.commit = commit_hash | ||
| 69 | |||
| 70 | def RemapPath(self, old, new, replace_version=True): | ||
| 71 | self._remap.append(_PathMap(self, old, new)) | ||
| 72 | |||
| 73 | @property | ||
| 74 | def TagName(self): | ||
| 75 | v = '' | ||
| 76 | for c in self.version: | ||
| 77 | if c >= '0' and c <= '9': | ||
| 78 | v += c | ||
| 79 | elif c >= 'A' and c <= 'Z': | ||
| 80 | v += c | ||
| 81 | elif c >= 'a' and c <= 'z': | ||
| 82 | v += c | ||
| 83 | elif c in ('-', '_', '.', '/', '+', '@'): | ||
| 84 | v += c | ||
| 85 | return 'upstream/%s' % v | ||
| 86 | |||
| 87 | @property | ||
| 88 | def PackageName(self): | ||
| 89 | n = self.project.name | ||
| 90 | if n.startswith('platform/'): | ||
| 91 | # This was not my finest moment... | ||
| 92 | # | ||
| 93 | n = n[len('platform/'):] | ||
| 94 | return n | ||
| 95 | |||
| 96 | def Import(self): | ||
| 97 | self._need_graft = False | ||
| 98 | if self.parent: | ||
| 99 | try: | ||
| 100 | self.project.bare_git.cat_file('-e', self.parent) | ||
| 101 | except GitError: | ||
| 102 | self._need_graft = True | ||
| 103 | |||
| 104 | gfi = GitCommand(self.project, | ||
| 105 | ['fast-import', '--force', '--quiet'], | ||
| 106 | bare = True, | ||
| 107 | provide_stdin = True) | ||
| 108 | try: | ||
| 109 | self._out = gfi.stdin | ||
| 110 | |||
| 111 | try: | ||
| 112 | self._UnpackFiles() | ||
| 113 | self._MakeCommit() | ||
| 114 | self._out.flush() | ||
| 115 | finally: | ||
| 116 | rc = gfi.Wait() | ||
| 117 | if rc != 0: | ||
| 118 | raise ImportError('fast-import failed') | ||
| 119 | |||
| 120 | if self._need_graft: | ||
| 121 | id = self._GraftCommit() | ||
| 122 | else: | ||
| 123 | id = self.project.bare_git.rev_parse('%s^0' % self._tempref) | ||
| 124 | |||
| 125 | if self.commit and self.commit != id: | ||
| 126 | raise ImportError('checksum mismatch: %s expected,' | ||
| 127 | ' %s imported' % (self.commit, id)) | ||
| 128 | |||
| 129 | self._MakeTag(id) | ||
| 130 | return id | ||
| 131 | finally: | ||
| 132 | try: | ||
| 133 | self.project.bare_git.DeleteRef(self._tempref) | ||
| 134 | except GitError: | ||
| 135 | pass | ||
| 136 | |||
| 137 | def _PickUrl(self, failed): | ||
| 138 | u = map(lambda x: x.replace('%version%', self.version), self._urls) | ||
| 139 | for f in failed: | ||
| 140 | if f in u: | ||
| 141 | u.remove(f) | ||
| 142 | if len(u) == 0: | ||
| 143 | return None | ||
| 144 | return random.choice(u) | ||
| 145 | |||
| 146 | def _OpenUrl(self): | ||
| 147 | failed = {} | ||
| 148 | while True: | ||
| 149 | url = self._PickUrl(failed.keys()) | ||
| 150 | if url is None: | ||
| 151 | why = 'Cannot download %s' % self.project.name | ||
| 152 | |||
| 153 | if failed: | ||
| 154 | why += ': one or more mirrors are down\n' | ||
| 155 | bad_urls = list(failed.keys()) | ||
| 156 | bad_urls.sort() | ||
| 157 | for url in bad_urls: | ||
| 158 | why += ' %s: %s\n' % (url, failed[url]) | ||
| 159 | else: | ||
| 160 | why += ': no mirror URLs' | ||
| 161 | raise ImportError(why) | ||
| 162 | |||
| 163 | print >>sys.stderr, "Getting %s ..." % url | ||
| 164 | try: | ||
| 165 | return urllib2.urlopen(url), url | ||
| 166 | except urllib2.HTTPError, e: | ||
| 167 | failed[url] = e.code | ||
| 168 | except urllib2.URLError, e: | ||
| 169 | failed[url] = e.reason[1] | ||
| 170 | except OSError, e: | ||
| 171 | failed[url] = e.strerror | ||
| 172 | |||
| 173 | def _UnpackFiles(self): | ||
| 174 | raise NotImplementedError | ||
| 175 | |||
| 176 | def _NextMark(self): | ||
| 177 | self._marks += 1 | ||
| 178 | return self._marks | ||
| 179 | |||
| 180 | def _UnpackOneFile(self, mode, size, name, fd): | ||
| 181 | if stat.S_ISDIR(mode): # directory | ||
| 182 | return | ||
| 183 | else: | ||
| 184 | mode = self._CleanMode(mode, name) | ||
| 185 | |||
| 186 | old_name = name | ||
| 187 | name = self._CleanName(name) | ||
| 188 | |||
| 189 | if stat.S_ISLNK(mode) and self._remap: | ||
| 190 | # The link is relative to the old_name, and may need to | ||
| 191 | # be rewritten according to our remap rules if it goes | ||
| 192 | # up high enough in the tree structure. | ||
| 193 | # | ||
| 194 | dest = self._RewriteLink(fd.read(size), old_name, name) | ||
| 195 | fd = StringIO.StringIO(dest) | ||
| 196 | size = len(dest) | ||
| 197 | |||
| 198 | fi = _File(mode, name, self._NextMark()) | ||
| 199 | |||
| 200 | self._out.write('blob\n') | ||
| 201 | self._out.write('mark :%d\n' % fi.mark) | ||
| 202 | self._out.write('data %d\n' % size) | ||
| 203 | while size > 0: | ||
| 204 | n = min(2048, size) | ||
| 205 | self._out.write(fd.read(n)) | ||
| 206 | size -= n | ||
| 207 | self._out.write('\n') | ||
| 208 | self._files[fi.name] = fi | ||
| 209 | |||
| 210 | def _SetFileMode(self, name, mode): | ||
| 211 | if not stat.S_ISDIR(mode): | ||
| 212 | mode = self._CleanMode(mode, name) | ||
| 213 | name = self._CleanName(name) | ||
| 214 | try: | ||
| 215 | fi = self._files[name] | ||
| 216 | except KeyError: | ||
| 217 | raise ImportError('file %s was not unpacked' % name) | ||
| 218 | fi.mode = mode | ||
| 219 | |||
| 220 | def _RewriteLink(self, dest, relto_old, relto_new): | ||
| 221 | # Drop the last components of the symlink itself | ||
| 222 | # as the dest is relative to the directory its in. | ||
| 223 | # | ||
| 224 | relto_old = _TrimPath(relto_old) | ||
| 225 | relto_new = _TrimPath(relto_new) | ||
| 226 | |||
| 227 | # Resolve the link to be absolute from the top of | ||
| 228 | # the archive, so we can remap its destination. | ||
| 229 | # | ||
| 230 | while dest.find('/./') >= 0 or dest.find('//') >= 0: | ||
| 231 | dest = dest.replace('/./', '/') | ||
| 232 | dest = dest.replace('//', '/') | ||
| 233 | |||
| 234 | if dest.startswith('../') or dest.find('/../') > 0: | ||
| 235 | dest = _FoldPath('%s/%s' % (relto_old, dest)) | ||
| 236 | |||
| 237 | for pm in self._remap: | ||
| 238 | if pm.Matches(dest): | ||
| 239 | dest = pm.Apply(dest) | ||
| 240 | break | ||
| 241 | |||
| 242 | dest, relto_new = _StripCommonPrefix(dest, relto_new) | ||
| 243 | while relto_new: | ||
| 244 | i = relto_new.find('/') | ||
| 245 | if i > 0: | ||
| 246 | relto_new = relto_new[i + 1:] | ||
| 247 | else: | ||
| 248 | relto_new = '' | ||
| 249 | dest = '../' + dest | ||
| 250 | return dest | ||
| 251 | |||
| 252 | def _CleanMode(self, mode, name): | ||
| 253 | if stat.S_ISREG(mode): # regular file | ||
| 254 | if (mode & 0111) == 0: | ||
| 255 | return 0644 | ||
| 256 | else: | ||
| 257 | return 0755 | ||
| 258 | elif stat.S_ISLNK(mode): # symlink | ||
| 259 | return stat.S_IFLNK | ||
| 260 | else: | ||
| 261 | raise ImportError('invalid mode %o in %s' % (mode, name)) | ||
| 262 | |||
| 263 | def _CleanName(self, name): | ||
| 264 | old_name = name | ||
| 265 | for pm in self._remap: | ||
| 266 | if pm.Matches(name): | ||
| 267 | name = pm.Apply(name) | ||
| 268 | break | ||
| 269 | while name.startswith('/'): | ||
| 270 | name = name[1:] | ||
| 271 | if not name: | ||
| 272 | raise ImportError('path %s is empty after remap' % old_name) | ||
| 273 | if name.find('/./') >= 0 or name.find('/../') >= 0: | ||
| 274 | raise ImportError('path %s contains relative parts' % name) | ||
| 275 | return name | ||
| 276 | |||
| 277 | def _MakeCommit(self): | ||
| 278 | msg = '%s %s\n' % (self.PackageName, self.version) | ||
| 279 | |||
| 280 | self._out.write('commit %s\n' % self._tempref) | ||
| 281 | self._out.write('committer %s <%s> %d +0000\n' % ( | ||
| 282 | self._user_name, | ||
| 283 | self._user_email, | ||
| 284 | self._user_when)) | ||
| 285 | self._out.write('data %d\n' % len(msg)) | ||
| 286 | self._out.write(msg) | ||
| 287 | self._out.write('\n') | ||
| 288 | if self.parent and not self._need_graft: | ||
| 289 | self._out.write('from %s^0\n' % self.parent) | ||
| 290 | self._out.write('deleteall\n') | ||
| 291 | |||
| 292 | for f in self._files.values(): | ||
| 293 | self._out.write('M %o :%d %s\n' % (f.mode, f.mark, f.name)) | ||
| 294 | self._out.write('\n') | ||
| 295 | |||
| 296 | def _GraftCommit(self): | ||
| 297 | raw = self.project.bare_git.cat_file('commit', self._tempref) | ||
| 298 | raw = raw.split("\n") | ||
| 299 | while raw[1].startswith('parent '): | ||
| 300 | del raw[1] | ||
| 301 | raw.insert(1, 'parent %s' % self.parent) | ||
| 302 | id = self._WriteObject('commit', "\n".join(raw)) | ||
| 303 | |||
| 304 | graft_file = os.path.join(self.project.gitdir, 'info/grafts') | ||
| 305 | if os.path.exists(graft_file): | ||
| 306 | graft_list = open(graft_file, 'rb').read().split("\n") | ||
| 307 | if graft_list and graft_list[-1] == '': | ||
| 308 | del graft_list[-1] | ||
| 309 | else: | ||
| 310 | graft_list = [] | ||
| 311 | |||
| 312 | exists = False | ||
| 313 | for line in graft_list: | ||
| 314 | if line == id: | ||
| 315 | exists = True | ||
| 316 | break | ||
| 317 | |||
| 318 | if not exists: | ||
| 319 | graft_list.append(id) | ||
| 320 | graft_list.append('') | ||
| 321 | fd = open(graft_file, 'wb') | ||
| 322 | fd.write("\n".join(graft_list)) | ||
| 323 | fd.close() | ||
| 324 | |||
| 325 | return id | ||
| 326 | |||
| 327 | def _MakeTag(self, id): | ||
| 328 | name = self.TagName | ||
| 329 | |||
| 330 | raw = [] | ||
| 331 | raw.append('object %s' % id) | ||
| 332 | raw.append('type commit') | ||
| 333 | raw.append('tag %s' % name) | ||
| 334 | raw.append('tagger %s <%s> %d +0000' % ( | ||
| 335 | self._user_name, | ||
| 336 | self._user_email, | ||
| 337 | self._user_when)) | ||
| 338 | raw.append('') | ||
| 339 | raw.append('%s %s\n' % (self.PackageName, self.version)) | ||
| 340 | |||
| 341 | tagid = self._WriteObject('tag', "\n".join(raw)) | ||
| 342 | self.project.bare_git.UpdateRef('refs/tags/%s' % name, tagid) | ||
| 343 | |||
| 344 | def _WriteObject(self, type, data): | ||
| 345 | wo = GitCommand(self.project, | ||
| 346 | ['hash-object', '-t', type, '-w', '--stdin'], | ||
| 347 | bare = True, | ||
| 348 | provide_stdin = True, | ||
| 349 | capture_stdout = True, | ||
| 350 | capture_stderr = True) | ||
| 351 | wo.stdin.write(data) | ||
| 352 | if wo.Wait() != 0: | ||
| 353 | raise GitError('cannot create %s from (%s)' % (type, data)) | ||
| 354 | return wo.stdout[:-1] | ||
| 355 | |||
| 356 | |||
| 357 | def _TrimPath(path): | ||
| 358 | i = path.rfind('/') | ||
| 359 | if i > 0: | ||
| 360 | path = path[0:i] | ||
| 361 | return '' | ||
| 362 | |||
| 363 | def _StripCommonPrefix(a, b): | ||
| 364 | while True: | ||
| 365 | ai = a.find('/') | ||
| 366 | bi = b.find('/') | ||
| 367 | if ai > 0 and bi > 0 and a[0:ai] == b[0:bi]: | ||
| 368 | a = a[ai + 1:] | ||
| 369 | b = b[bi + 1:] | ||
| 370 | else: | ||
| 371 | break | ||
| 372 | return a, b | ||
| 373 | |||
| 374 | def _FoldPath(path): | ||
| 375 | while True: | ||
| 376 | if path.startswith('../'): | ||
| 377 | return path | ||
| 378 | |||
| 379 | i = path.find('/../') | ||
| 380 | if i <= 0: | ||
| 381 | if path.startswith('/'): | ||
| 382 | return path[1:] | ||
| 383 | return path | ||
| 384 | |||
| 385 | lhs = path[0:i] | ||
| 386 | rhs = path[i + 4:] | ||
| 387 | |||
| 388 | i = lhs.rfind('/') | ||
| 389 | if i > 0: | ||
| 390 | path = lhs[0:i + 1] + rhs | ||
| 391 | else: | ||
| 392 | path = rhs | ||
| 393 | |||
| 394 | class _File(object): | ||
| 395 | def __init__(self, mode, name, mark): | ||
| 396 | self.mode = mode | ||
| 397 | self.name = name | ||
| 398 | self.mark = mark | ||
| 399 | |||
| 400 | |||
| 401 | class _PathMap(object): | ||
| 402 | def __init__(self, imp, old, new): | ||
| 403 | self._imp = imp | ||
| 404 | self._old = old | ||
| 405 | self._new = new | ||
| 406 | |||
| 407 | def _r(self, p): | ||
| 408 | return p.replace('%version%', self._imp.version) | ||
| 409 | |||
| 410 | @property | ||
| 411 | def old(self): | ||
| 412 | return self._r(self._old) | ||
| 413 | |||
| 414 | @property | ||
| 415 | def new(self): | ||
| 416 | return self._r(self._new) | ||
| 417 | |||
| 418 | def Matches(self, name): | ||
| 419 | return name.startswith(self.old) | ||
| 420 | |||
| 421 | def Apply(self, name): | ||
| 422 | return self.new + name[len(self.old):] | ||
diff --git a/import_tar.py b/import_tar.py new file mode 100644 index 00000000..d7ce14de --- /dev/null +++ b/import_tar.py | |||
| @@ -0,0 +1,206 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import bz2 | ||
| 17 | import stat | ||
| 18 | import tarfile | ||
| 19 | import zlib | ||
| 20 | import StringIO | ||
| 21 | |||
| 22 | from import_ext import ImportExternal | ||
| 23 | from error import ImportError | ||
| 24 | |||
| 25 | class ImportTar(ImportExternal): | ||
| 26 | """Streams a (optionally compressed) tar file from the network | ||
| 27 | directly into a Project's Git repository. | ||
| 28 | """ | ||
| 29 | @classmethod | ||
| 30 | def CanAccept(cls, url): | ||
| 31 | """Can this importer read and unpack the data stored at url? | ||
| 32 | """ | ||
| 33 | if url.endswith('.tar.gz') or url.endswith('.tgz'): | ||
| 34 | return True | ||
| 35 | if url.endswith('.tar.bz2'): | ||
| 36 | return True | ||
| 37 | if url.endswith('.tar'): | ||
| 38 | return True | ||
| 39 | return False | ||
| 40 | |||
| 41 | def _UnpackFiles(self): | ||
| 42 | url_fd, url = self._OpenUrl() | ||
| 43 | try: | ||
| 44 | if url.endswith('.tar.gz') or url.endswith('.tgz'): | ||
| 45 | tar_fd = _Gzip(url_fd) | ||
| 46 | elif url.endswith('.tar.bz2'): | ||
| 47 | tar_fd = _Bzip2(url_fd) | ||
| 48 | elif url.endswith('.tar'): | ||
| 49 | tar_fd = _Raw(url_fd) | ||
| 50 | else: | ||
| 51 | raise ImportError('non-tar file extension: %s' % url) | ||
| 52 | |||
| 53 | try: | ||
| 54 | tar = tarfile.TarFile(name = url, | ||
| 55 | mode = 'r', | ||
| 56 | fileobj = tar_fd) | ||
| 57 | try: | ||
| 58 | for entry in tar: | ||
| 59 | mode = entry.mode | ||
| 60 | |||
| 61 | if (mode & 0170000) == 0: | ||
| 62 | if entry.isdir(): | ||
| 63 | mode |= stat.S_IFDIR | ||
| 64 | elif entry.isfile() or entry.islnk(): # hard links as files | ||
| 65 | mode |= stat.S_IFREG | ||
| 66 | elif entry.issym(): | ||
| 67 | mode |= stat.S_IFLNK | ||
| 68 | |||
| 69 | if stat.S_ISLNK(mode): # symlink | ||
| 70 | data_fd = StringIO.StringIO(entry.linkname) | ||
| 71 | data_sz = len(entry.linkname) | ||
| 72 | elif stat.S_ISDIR(mode): # directory | ||
| 73 | data_fd = StringIO.StringIO('') | ||
| 74 | data_sz = 0 | ||
| 75 | else: | ||
| 76 | data_fd = tar.extractfile(entry) | ||
| 77 | data_sz = entry.size | ||
| 78 | |||
| 79 | self._UnpackOneFile(mode, data_sz, entry.name, data_fd) | ||
| 80 | finally: | ||
| 81 | tar.close() | ||
| 82 | finally: | ||
| 83 | tar_fd.close() | ||
| 84 | finally: | ||
| 85 | url_fd.close() | ||
| 86 | |||
| 87 | |||
| 88 | |||
| 89 | class _DecompressStream(object): | ||
| 90 | """file like object to decompress a tar stream | ||
| 91 | """ | ||
| 92 | def __init__(self, fd): | ||
| 93 | self._fd = fd | ||
| 94 | self._pos = 0 | ||
| 95 | self._buf = None | ||
| 96 | |||
| 97 | def tell(self): | ||
| 98 | return self._pos | ||
| 99 | |||
| 100 | def seek(self, offset): | ||
| 101 | d = offset - self._pos | ||
| 102 | if d > 0: | ||
| 103 | self.read(d) | ||
| 104 | elif d == 0: | ||
| 105 | pass | ||
| 106 | else: | ||
| 107 | raise NotImplementedError, 'seek backwards' | ||
| 108 | |||
| 109 | def close(self): | ||
| 110 | self._fd = None | ||
| 111 | |||
| 112 | def read(self, size = -1): | ||
| 113 | if not self._fd: | ||
| 114 | raise EOFError, 'Reached EOF' | ||
| 115 | |||
| 116 | r = [] | ||
| 117 | try: | ||
| 118 | if size >= 0: | ||
| 119 | self._ReadChunk(r, size) | ||
| 120 | else: | ||
| 121 | while True: | ||
| 122 | self._ReadChunk(r, 2048) | ||
| 123 | except EOFError: | ||
| 124 | pass | ||
| 125 | |||
| 126 | if len(r) == 1: | ||
| 127 | r = r[0] | ||
| 128 | else: | ||
| 129 | r = ''.join(r) | ||
| 130 | self._pos += len(r) | ||
| 131 | return r | ||
| 132 | |||
| 133 | def _ReadChunk(self, r, size): | ||
| 134 | b = self._buf | ||
| 135 | try: | ||
| 136 | while size > 0: | ||
| 137 | if b is None or len(b) == 0: | ||
| 138 | b = self._Decompress(self._fd.read(2048)) | ||
| 139 | continue | ||
| 140 | |||
| 141 | use = min(size, len(b)) | ||
| 142 | r.append(b[:use]) | ||
| 143 | b = b[use:] | ||
| 144 | size -= use | ||
| 145 | finally: | ||
| 146 | self._buf = b | ||
| 147 | |||
| 148 | def _Decompress(self, b): | ||
| 149 | raise NotImplementedError, '_Decompress' | ||
| 150 | |||
| 151 | |||
| 152 | class _Raw(_DecompressStream): | ||
| 153 | """file like object for an uncompressed stream | ||
| 154 | """ | ||
| 155 | def __init__(self, fd): | ||
| 156 | _DecompressStream.__init__(self, fd) | ||
| 157 | |||
| 158 | def _Decompress(self, b): | ||
| 159 | return b | ||
| 160 | |||
| 161 | |||
| 162 | class _Bzip2(_DecompressStream): | ||
| 163 | """file like object to decompress a .bz2 stream | ||
| 164 | """ | ||
| 165 | def __init__(self, fd): | ||
| 166 | _DecompressStream.__init__(self, fd) | ||
| 167 | self._bz = bz2.BZ2Decompressor() | ||
| 168 | |||
| 169 | def _Decompress(self, b): | ||
| 170 | return self._bz.decompress(b) | ||
| 171 | |||
| 172 | |||
| 173 | _FHCRC, _FEXTRA, _FNAME, _FCOMMENT = 2, 4, 8, 16 | ||
| 174 | class _Gzip(_DecompressStream): | ||
| 175 | """file like object to decompress a .gz stream | ||
| 176 | """ | ||
| 177 | def __init__(self, fd): | ||
| 178 | _DecompressStream.__init__(self, fd) | ||
| 179 | self._z = zlib.decompressobj(-zlib.MAX_WBITS) | ||
| 180 | |||
| 181 | magic = fd.read(2) | ||
| 182 | if magic != '\037\213': | ||
| 183 | raise IOError, 'Not a gzipped file' | ||
| 184 | |||
| 185 | method = ord(fd.read(1)) | ||
| 186 | if method != 8: | ||
| 187 | raise IOError, 'Unknown compression method' | ||
| 188 | |||
| 189 | flag = ord(fd.read(1)) | ||
| 190 | fd.read(6) | ||
| 191 | |||
| 192 | if flag & _FEXTRA: | ||
| 193 | xlen = ord(fd.read(1)) | ||
| 194 | xlen += 256 * ord(fd.read(1)) | ||
| 195 | fd.read(xlen) | ||
| 196 | if flag & _FNAME: | ||
| 197 | while fd.read(1) != '\0': | ||
| 198 | pass | ||
| 199 | if flag & _FCOMMENT: | ||
| 200 | while fd.read(1) != '\0': | ||
| 201 | pass | ||
| 202 | if flag & _FHCRC: | ||
| 203 | fd.read(2) | ||
| 204 | |||
| 205 | def _Decompress(self, b): | ||
| 206 | return self._z.decompress(b) | ||
diff --git a/import_zip.py b/import_zip.py new file mode 100644 index 00000000..08aff326 --- /dev/null +++ b/import_zip.py | |||
| @@ -0,0 +1,345 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import stat | ||
| 17 | import struct | ||
| 18 | import zlib | ||
| 19 | import cStringIO | ||
| 20 | |||
| 21 | from import_ext import ImportExternal | ||
| 22 | from error import ImportError | ||
| 23 | |||
| 24 | class ImportZip(ImportExternal): | ||
| 25 | """Streams a zip file from the network directly into a Project's | ||
| 26 | Git repository. | ||
| 27 | """ | ||
| 28 | @classmethod | ||
| 29 | def CanAccept(cls, url): | ||
| 30 | """Can this importer read and unpack the data stored at url? | ||
| 31 | """ | ||
| 32 | if url.endswith('.zip') or url.endswith('.jar'): | ||
| 33 | return True | ||
| 34 | return False | ||
| 35 | |||
| 36 | def _UnpackFiles(self): | ||
| 37 | url_fd, url = self._OpenUrl() | ||
| 38 | try: | ||
| 39 | if not self.__class__.CanAccept(url): | ||
| 40 | raise ImportError('non-zip file extension: %s' % url) | ||
| 41 | |||
| 42 | zip = _ZipFile(url_fd) | ||
| 43 | for entry in zip.FileRecords(): | ||
| 44 | data = zip.Open(entry).read() | ||
| 45 | sz = len(data) | ||
| 46 | |||
| 47 | if data and _SafeCRLF(data): | ||
| 48 | data = data.replace('\r\n', '\n') | ||
| 49 | sz = len(data) | ||
| 50 | |||
| 51 | fd = cStringIO.StringIO(data) | ||
| 52 | self._UnpackOneFile(entry.mode, sz, entry.name, fd) | ||
| 53 | zip.Close(entry) | ||
| 54 | |||
| 55 | for entry in zip.CentralDirectory(): | ||
| 56 | self._SetFileMode(entry.name, entry.mode) | ||
| 57 | |||
| 58 | zip.CheckTail() | ||
| 59 | finally: | ||
| 60 | url_fd.close() | ||
| 61 | |||
| 62 | |||
| 63 | def _SafeCRLF(data): | ||
| 64 | """Is it reasonably safe to perform a CRLF->LF conversion? | ||
| 65 | |||
| 66 | If the stream contains a NUL byte it is likely binary, | ||
| 67 | and thus a CRLF->LF conversion may damage the stream. | ||
| 68 | |||
| 69 | If the only NUL is in the last position of the stream, | ||
| 70 | but it otherwise can do a CRLF<->LF conversion we do | ||
| 71 | the CRLF conversion anyway. At least one source ZIP | ||
| 72 | file has this structure in its source code. | ||
| 73 | |||
| 74 | If every occurrance of a CR and LF is paired up as a | ||
| 75 | CRLF pair then the conversion is safely bi-directional. | ||
| 76 | s/\r\n/\n/g == s/\n/\r\\n/g can convert between them. | ||
| 77 | """ | ||
| 78 | nul = data.find('\0') | ||
| 79 | if 0 <= nul and nul < (len(data) - 1): | ||
| 80 | return False | ||
| 81 | |||
| 82 | n_lf = 0 | ||
| 83 | last = 0 | ||
| 84 | while True: | ||
| 85 | lf = data.find('\n', last) | ||
| 86 | if lf < 0: | ||
| 87 | break | ||
| 88 | if lf == 0 or data[lf - 1] != '\r': | ||
| 89 | return False | ||
| 90 | last = lf + 1 | ||
| 91 | n_lf += 1 | ||
| 92 | return n_lf > 0 | ||
| 93 | |||
| 94 | class _ZipFile(object): | ||
| 95 | """Streaming iterator to parse a zip file on the fly. | ||
| 96 | """ | ||
| 97 | def __init__(self, fd): | ||
| 98 | self._fd = _UngetStream(fd) | ||
| 99 | |||
| 100 | def FileRecords(self): | ||
| 101 | return _FileIter(self._fd) | ||
| 102 | |||
| 103 | def CentralDirectory(self): | ||
| 104 | return _CentIter(self._fd) | ||
| 105 | |||
| 106 | def CheckTail(self): | ||
| 107 | type_buf = self._fd.read(4) | ||
| 108 | type = struct.unpack('<I', type_buf)[0] | ||
| 109 | if type != 0x06054b50: # end of central directory | ||
| 110 | raise ImportError('zip record %x unsupported' % type) | ||
| 111 | |||
| 112 | def Open(self, entry): | ||
| 113 | if entry.is_compressed: | ||
| 114 | return _InflateStream(self._fd) | ||
| 115 | else: | ||
| 116 | if entry.has_trailer: | ||
| 117 | raise ImportError('unable to extract streamed zip') | ||
| 118 | return _FixedLengthStream(self._fd, entry.uncompressed_size) | ||
| 119 | |||
| 120 | def Close(self, entry): | ||
| 121 | if entry.has_trailer: | ||
| 122 | type = struct.unpack('<I', self._fd.read(4))[0] | ||
| 123 | if type == 0x08074b50: | ||
| 124 | # Not a formal type marker, but commonly seen in zips | ||
| 125 | # as the data descriptor signature. | ||
| 126 | # | ||
| 127 | struct.unpack('<3I', self._fd.read(12)) | ||
| 128 | else: | ||
| 129 | # No signature for the data descriptor, so read the | ||
| 130 | # remaining fields out of the stream | ||
| 131 | # | ||
| 132 | self._fd.read(8) | ||
| 133 | |||
| 134 | |||
| 135 | class _FileIter(object): | ||
| 136 | def __init__(self, fd): | ||
| 137 | self._fd = fd | ||
| 138 | |||
| 139 | def __iter__(self): | ||
| 140 | return self | ||
| 141 | |||
| 142 | def next(self): | ||
| 143 | fd = self._fd | ||
| 144 | |||
| 145 | type_buf = fd.read(4) | ||
| 146 | type = struct.unpack('<I', type_buf)[0] | ||
| 147 | |||
| 148 | if type != 0x04034b50: # local file header | ||
| 149 | fd.unread(type_buf) | ||
| 150 | raise StopIteration() | ||
| 151 | |||
| 152 | rec = _FileHeader(fd.read(26)) | ||
| 153 | rec.name = fd.read(rec.name_len) | ||
| 154 | fd.read(rec.extra_len) | ||
| 155 | |||
| 156 | if rec.name.endswith('/'): | ||
| 157 | rec.name = rec.name[:-1] | ||
| 158 | rec.mode = stat.S_IFDIR | 0777 | ||
| 159 | return rec | ||
| 160 | |||
| 161 | |||
| 162 | class _FileHeader(object): | ||
| 163 | """Information about a single file in the archive. | ||
| 164 | 0 version needed to extract 2 bytes | ||
| 165 | 1 general purpose bit flag 2 bytes | ||
| 166 | 2 compression method 2 bytes | ||
| 167 | 3 last mod file time 2 bytes | ||
| 168 | 4 last mod file date 2 bytes | ||
| 169 | 5 crc-32 4 bytes | ||
| 170 | 6 compressed size 4 bytes | ||
| 171 | 7 uncompressed size 4 bytes | ||
| 172 | 8 file name length 2 bytes | ||
| 173 | 9 extra field length 2 bytes | ||
| 174 | """ | ||
| 175 | def __init__(self, raw_bin): | ||
| 176 | rec = struct.unpack('<5H3I2H', raw_bin) | ||
| 177 | |||
| 178 | if rec[2] == 8: | ||
| 179 | self.is_compressed = True | ||
| 180 | elif rec[2] == 0: | ||
| 181 | self.is_compressed = False | ||
| 182 | else: | ||
| 183 | raise ImportError('unrecognized compression format') | ||
| 184 | |||
| 185 | if rec[1] & (1 << 3): | ||
| 186 | self.has_trailer = True | ||
| 187 | else: | ||
| 188 | self.has_trailer = False | ||
| 189 | |||
| 190 | self.compressed_size = rec[6] | ||
| 191 | self.uncompressed_size = rec[7] | ||
| 192 | self.name_len = rec[8] | ||
| 193 | self.extra_len = rec[9] | ||
| 194 | self.mode = stat.S_IFREG | 0644 | ||
| 195 | |||
| 196 | |||
| 197 | class _CentIter(object): | ||
| 198 | def __init__(self, fd): | ||
| 199 | self._fd = fd | ||
| 200 | |||
| 201 | def __iter__(self): | ||
| 202 | return self | ||
| 203 | |||
| 204 | def next(self): | ||
| 205 | fd = self._fd | ||
| 206 | |||
| 207 | type_buf = fd.read(4) | ||
| 208 | type = struct.unpack('<I', type_buf)[0] | ||
| 209 | |||
| 210 | if type != 0x02014b50: # central directory | ||
| 211 | fd.unread(type_buf) | ||
| 212 | raise StopIteration() | ||
| 213 | |||
| 214 | rec = _CentHeader(fd.read(42)) | ||
| 215 | rec.name = fd.read(rec.name_len) | ||
| 216 | fd.read(rec.extra_len) | ||
| 217 | fd.read(rec.comment_len) | ||
| 218 | |||
| 219 | if rec.name.endswith('/'): | ||
| 220 | rec.name = rec.name[:-1] | ||
| 221 | rec.mode = stat.S_IFDIR | 0777 | ||
| 222 | return rec | ||
| 223 | |||
| 224 | |||
| 225 | class _CentHeader(object): | ||
| 226 | """Information about a single file in the archive. | ||
| 227 | 0 version made by 2 bytes | ||
| 228 | 1 version needed to extract 2 bytes | ||
| 229 | 2 general purpose bit flag 2 bytes | ||
| 230 | 3 compression method 2 bytes | ||
| 231 | 4 last mod file time 2 bytes | ||
| 232 | 5 last mod file date 2 bytes | ||
| 233 | 6 crc-32 4 bytes | ||
| 234 | 7 compressed size 4 bytes | ||
| 235 | 8 uncompressed size 4 bytes | ||
| 236 | 9 file name length 2 bytes | ||
| 237 | 10 extra field length 2 bytes | ||
| 238 | 11 file comment length 2 bytes | ||
| 239 | 12 disk number start 2 bytes | ||
| 240 | 13 internal file attributes 2 bytes | ||
| 241 | 14 external file attributes 4 bytes | ||
| 242 | 15 relative offset of local header 4 bytes | ||
| 243 | """ | ||
| 244 | def __init__(self, raw_bin): | ||
| 245 | rec = struct.unpack('<6H3I5H2I', raw_bin) | ||
| 246 | self.name_len = rec[9] | ||
| 247 | self.extra_len = rec[10] | ||
| 248 | self.comment_len = rec[11] | ||
| 249 | |||
| 250 | if (rec[0] & 0xff00) == 0x0300: # UNIX | ||
| 251 | self.mode = rec[14] >> 16 | ||
| 252 | else: | ||
| 253 | self.mode = stat.S_IFREG | 0644 | ||
| 254 | |||
| 255 | |||
| 256 | class _UngetStream(object): | ||
| 257 | """File like object to read and rewind a stream. | ||
| 258 | """ | ||
| 259 | def __init__(self, fd): | ||
| 260 | self._fd = fd | ||
| 261 | self._buf = None | ||
| 262 | |||
| 263 | def read(self, size = -1): | ||
| 264 | r = [] | ||
| 265 | try: | ||
| 266 | if size >= 0: | ||
| 267 | self._ReadChunk(r, size) | ||
| 268 | else: | ||
| 269 | while True: | ||
| 270 | self._ReadChunk(r, 2048) | ||
| 271 | except EOFError: | ||
| 272 | pass | ||
| 273 | |||
| 274 | if len(r) == 1: | ||
| 275 | return r[0] | ||
| 276 | return ''.join(r) | ||
| 277 | |||
| 278 | def unread(self, buf): | ||
| 279 | b = self._buf | ||
| 280 | if b is None or len(b) == 0: | ||
| 281 | self._buf = buf | ||
| 282 | else: | ||
| 283 | self._buf = buf + b | ||
| 284 | |||
| 285 | def _ReadChunk(self, r, size): | ||
| 286 | b = self._buf | ||
| 287 | try: | ||
| 288 | while size > 0: | ||
| 289 | if b is None or len(b) == 0: | ||
| 290 | b = self._Inflate(self._fd.read(2048)) | ||
| 291 | if not b: | ||
| 292 | raise EOFError() | ||
| 293 | continue | ||
| 294 | |||
| 295 | use = min(size, len(b)) | ||
| 296 | r.append(b[:use]) | ||
| 297 | b = b[use:] | ||
| 298 | size -= use | ||
| 299 | finally: | ||
| 300 | self._buf = b | ||
| 301 | |||
| 302 | def _Inflate(self, b): | ||
| 303 | return b | ||
| 304 | |||
| 305 | |||
| 306 | class _FixedLengthStream(_UngetStream): | ||
| 307 | """File like object to read a fixed length stream. | ||
| 308 | """ | ||
| 309 | def __init__(self, fd, have): | ||
| 310 | _UngetStream.__init__(self, fd) | ||
| 311 | self._have = have | ||
| 312 | |||
| 313 | def _Inflate(self, b): | ||
| 314 | n = self._have | ||
| 315 | if n == 0: | ||
| 316 | self._fd.unread(b) | ||
| 317 | return None | ||
| 318 | |||
| 319 | if len(b) > n: | ||
| 320 | self._fd.unread(b[n:]) | ||
| 321 | b = b[:n] | ||
| 322 | self._have -= len(b) | ||
| 323 | return b | ||
| 324 | |||
| 325 | |||
| 326 | class _InflateStream(_UngetStream): | ||
| 327 | """Inflates the stream as it reads input. | ||
| 328 | """ | ||
| 329 | def __init__(self, fd): | ||
| 330 | _UngetStream.__init__(self, fd) | ||
| 331 | self._z = zlib.decompressobj(-zlib.MAX_WBITS) | ||
| 332 | |||
| 333 | def _Inflate(self, b): | ||
| 334 | z = self._z | ||
| 335 | if not z: | ||
| 336 | self._fd.unread(b) | ||
| 337 | return None | ||
| 338 | |||
| 339 | b = z.decompress(b) | ||
| 340 | if z.unconsumed_tail != '': | ||
| 341 | self._fd.unread(z.unconsumed_tail) | ||
| 342 | elif z.unused_data != '': | ||
| 343 | self._fd.unread(z.unused_data) | ||
| 344 | self._z = None | ||
| 345 | return b | ||
diff --git a/main.py b/main.py new file mode 100755 index 00000000..56092990 --- /dev/null +++ b/main.py | |||
| @@ -0,0 +1,198 @@ | |||
| 1 | #!/bin/sh | ||
| 2 | # | ||
| 3 | # Copyright (C) 2008 The Android Open Source Project | ||
| 4 | # | ||
| 5 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 6 | # you may not use this file except in compliance with the License. | ||
| 7 | # You may obtain a copy of the License at | ||
| 8 | # | ||
| 9 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 10 | # | ||
| 11 | # Unless required by applicable law or agreed to in writing, software | ||
| 12 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 14 | # See the License for the specific language governing permissions and | ||
| 15 | # limitations under the License. | ||
| 16 | |||
| 17 | magic='--calling-python-from-/bin/sh--' | ||
| 18 | """exec" python2.4 -E "$0" "$@" """#$magic" | ||
| 19 | if __name__ == '__main__': | ||
| 20 | import sys | ||
| 21 | if sys.argv[-1] == '#%s' % magic: | ||
| 22 | del sys.argv[-1] | ||
| 23 | del magic | ||
| 24 | |||
| 25 | import optparse | ||
| 26 | import os | ||
| 27 | import re | ||
| 28 | import sys | ||
| 29 | |||
| 30 | from command import InteractiveCommand, PagedCommand | ||
| 31 | from error import NoSuchProjectError | ||
| 32 | from error import RepoChangedException | ||
| 33 | from manifest import Manifest | ||
| 34 | from pager import RunPager | ||
| 35 | |||
| 36 | from subcmds import all as all_commands | ||
| 37 | |||
| 38 | global_options = optparse.OptionParser( | ||
| 39 | usage="repo [-p|--paginate|--no-pager] COMMAND [ARGS]" | ||
| 40 | ) | ||
| 41 | global_options.add_option('-p', '--paginate', | ||
| 42 | dest='pager', action='store_true', | ||
| 43 | help='display command output in the pager') | ||
| 44 | global_options.add_option('--no-pager', | ||
| 45 | dest='no_pager', action='store_true', | ||
| 46 | help='disable the pager') | ||
| 47 | |||
| 48 | class _Repo(object): | ||
| 49 | def __init__(self, repodir): | ||
| 50 | self.repodir = repodir | ||
| 51 | self.commands = all_commands | ||
| 52 | |||
| 53 | def _Run(self, argv): | ||
| 54 | name = None | ||
| 55 | glob = [] | ||
| 56 | |||
| 57 | for i in xrange(0, len(argv)): | ||
| 58 | if not argv[i].startswith('-'): | ||
| 59 | name = argv[i] | ||
| 60 | if i > 0: | ||
| 61 | glob = argv[:i] | ||
| 62 | argv = argv[i + 1:] | ||
| 63 | break | ||
| 64 | if not name: | ||
| 65 | glob = argv | ||
| 66 | name = 'help' | ||
| 67 | argv = [] | ||
| 68 | gopts, gargs = global_options.parse_args(glob) | ||
| 69 | |||
| 70 | try: | ||
| 71 | cmd = self.commands[name] | ||
| 72 | except KeyError: | ||
| 73 | print >>sys.stderr,\ | ||
| 74 | "repo: '%s' is not a repo command. See 'repo help'."\ | ||
| 75 | % name | ||
| 76 | sys.exit(1) | ||
| 77 | |||
| 78 | cmd.repodir = self.repodir | ||
| 79 | cmd.manifest = Manifest(cmd.repodir) | ||
| 80 | |||
| 81 | if not gopts.no_pager and not isinstance(cmd, InteractiveCommand): | ||
| 82 | config = cmd.manifest.globalConfig | ||
| 83 | if gopts.pager: | ||
| 84 | use_pager = True | ||
| 85 | else: | ||
| 86 | use_pager = config.GetBoolean('pager.%s' % name) | ||
| 87 | if use_pager is None: | ||
| 88 | use_pager = isinstance(cmd, PagedCommand) | ||
| 89 | if use_pager: | ||
| 90 | RunPager(config) | ||
| 91 | |||
| 92 | copts, cargs = cmd.OptionParser.parse_args(argv) | ||
| 93 | try: | ||
| 94 | cmd.Execute(copts, cargs) | ||
| 95 | except NoSuchProjectError, e: | ||
| 96 | if e.name: | ||
| 97 | print >>sys.stderr, 'error: project %s not found' % e.name | ||
| 98 | else: | ||
| 99 | print >>sys.stderr, 'error: no project in current directory' | ||
| 100 | sys.exit(1) | ||
| 101 | |||
| 102 | def _MyWrapperPath(): | ||
| 103 | return os.path.join(os.path.dirname(__file__), 'repo') | ||
| 104 | |||
| 105 | def _CurrentWrapperVersion(): | ||
| 106 | VERSION = None | ||
| 107 | pat = re.compile(r'^VERSION *=') | ||
| 108 | fd = open(_MyWrapperPath()) | ||
| 109 | for line in fd: | ||
| 110 | if pat.match(line): | ||
| 111 | fd.close() | ||
| 112 | exec line | ||
| 113 | return VERSION | ||
| 114 | raise NameError, 'No VERSION in repo script' | ||
| 115 | |||
| 116 | def _CheckWrapperVersion(ver, repo_path): | ||
| 117 | if not repo_path: | ||
| 118 | repo_path = '~/bin/repo' | ||
| 119 | |||
| 120 | if not ver: | ||
| 121 | print >>sys.stderr, 'no --wrapper-version argument' | ||
| 122 | sys.exit(1) | ||
| 123 | |||
| 124 | exp = _CurrentWrapperVersion() | ||
| 125 | ver = tuple(map(lambda x: int(x), ver.split('.'))) | ||
| 126 | if len(ver) == 1: | ||
| 127 | ver = (0, ver[0]) | ||
| 128 | |||
| 129 | if exp[0] > ver[0] or ver < (0, 4): | ||
| 130 | exp_str = '.'.join(map(lambda x: str(x), exp)) | ||
| 131 | print >>sys.stderr, """ | ||
| 132 | !!! A new repo command (%5s) is available. !!! | ||
| 133 | !!! You must upgrade before you can continue: !!! | ||
| 134 | |||
| 135 | cp %s %s | ||
| 136 | """ % (exp_str, _MyWrapperPath(), repo_path) | ||
| 137 | sys.exit(1) | ||
| 138 | |||
| 139 | if exp > ver: | ||
| 140 | exp_str = '.'.join(map(lambda x: str(x), exp)) | ||
| 141 | print >>sys.stderr, """ | ||
| 142 | ... A new repo command (%5s) is available. | ||
| 143 | ... You should upgrade soon: | ||
| 144 | |||
| 145 | cp %s %s | ||
| 146 | """ % (exp_str, _MyWrapperPath(), repo_path) | ||
| 147 | |||
| 148 | def _CheckRepoDir(dir): | ||
| 149 | if not dir: | ||
| 150 | print >>sys.stderr, 'no --repo-dir argument' | ||
| 151 | sys.exit(1) | ||
| 152 | |||
| 153 | def _PruneOptions(argv, opt): | ||
| 154 | i = 0 | ||
| 155 | while i < len(argv): | ||
| 156 | a = argv[i] | ||
| 157 | if a == '--': | ||
| 158 | break | ||
| 159 | if a.startswith('--'): | ||
| 160 | eq = a.find('=') | ||
| 161 | if eq > 0: | ||
| 162 | a = a[0:eq] | ||
| 163 | if not opt.has_option(a): | ||
| 164 | del argv[i] | ||
| 165 | continue | ||
| 166 | i += 1 | ||
| 167 | |||
| 168 | def _Main(argv): | ||
| 169 | opt = optparse.OptionParser(usage="repo wrapperinfo -- ...") | ||
| 170 | opt.add_option("--repo-dir", dest="repodir", | ||
| 171 | help="path to .repo/") | ||
| 172 | opt.add_option("--wrapper-version", dest="wrapper_version", | ||
| 173 | help="version of the wrapper script") | ||
| 174 | opt.add_option("--wrapper-path", dest="wrapper_path", | ||
| 175 | help="location of the wrapper script") | ||
| 176 | _PruneOptions(argv, opt) | ||
| 177 | opt, argv = opt.parse_args(argv) | ||
| 178 | |||
| 179 | _CheckWrapperVersion(opt.wrapper_version, opt.wrapper_path) | ||
| 180 | _CheckRepoDir(opt.repodir) | ||
| 181 | |||
| 182 | repo = _Repo(opt.repodir) | ||
| 183 | try: | ||
| 184 | repo._Run(argv) | ||
| 185 | except KeyboardInterrupt: | ||
| 186 | sys.exit(1) | ||
| 187 | except RepoChangedException: | ||
| 188 | # If the repo or manifest changed, re-exec ourselves. | ||
| 189 | # | ||
| 190 | try: | ||
| 191 | os.execv(__file__, sys.argv) | ||
| 192 | except OSError, e: | ||
| 193 | print >>sys.stderr, 'fatal: cannot restart repo after upgrade' | ||
| 194 | print >>sys.stderr, 'fatal: %s' % e | ||
| 195 | sys.exit(128) | ||
| 196 | |||
| 197 | if __name__ == '__main__': | ||
| 198 | _Main(sys.argv[1:]) | ||
diff --git a/manifest.py b/manifest.py new file mode 100644 index 00000000..45b0f9a5 --- /dev/null +++ b/manifest.py | |||
| @@ -0,0 +1,338 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import os | ||
| 17 | import sys | ||
| 18 | import xml.dom.minidom | ||
| 19 | |||
| 20 | from editor import Editor | ||
| 21 | from git_config import GitConfig, IsId | ||
| 22 | from import_tar import ImportTar | ||
| 23 | from import_zip import ImportZip | ||
| 24 | from project import Project, MetaProject, R_TAGS | ||
| 25 | from remote import Remote | ||
| 26 | from error import ManifestParseError | ||
| 27 | |||
| 28 | MANIFEST_FILE_NAME = 'manifest.xml' | ||
| 29 | |||
| 30 | class _Default(object): | ||
| 31 | """Project defaults within the manifest.""" | ||
| 32 | |||
| 33 | revision = None | ||
| 34 | remote = None | ||
| 35 | |||
| 36 | |||
| 37 | class Manifest(object): | ||
| 38 | """manages the repo configuration file""" | ||
| 39 | |||
| 40 | def __init__(self, repodir): | ||
| 41 | self.repodir = os.path.abspath(repodir) | ||
| 42 | self.topdir = os.path.dirname(self.repodir) | ||
| 43 | self.manifestFile = os.path.join(self.repodir, MANIFEST_FILE_NAME) | ||
| 44 | |||
| 45 | self.globalConfig = GitConfig.ForUser() | ||
| 46 | Editor.globalConfig = self.globalConfig | ||
| 47 | |||
| 48 | self.repoProject = MetaProject(self, 'repo', | ||
| 49 | gitdir = os.path.join(repodir, 'repo/.git'), | ||
| 50 | worktree = os.path.join(repodir, 'repo')) | ||
| 51 | |||
| 52 | wt = os.path.join(repodir, 'manifests') | ||
| 53 | gd_new = os.path.join(repodir, 'manifests.git') | ||
| 54 | gd_old = os.path.join(wt, '.git') | ||
| 55 | if os.path.exists(gd_new) or not os.path.exists(gd_old): | ||
| 56 | gd = gd_new | ||
| 57 | else: | ||
| 58 | gd = gd_old | ||
| 59 | self.manifestProject = MetaProject(self, 'manifests', | ||
| 60 | gitdir = gd, | ||
| 61 | worktree = wt) | ||
| 62 | |||
| 63 | self._Unload() | ||
| 64 | |||
| 65 | def Link(self, name): | ||
| 66 | """Update the repo metadata to use a different manifest. | ||
| 67 | """ | ||
| 68 | path = os.path.join(self.manifestProject.worktree, name) | ||
| 69 | if not os.path.isfile(path): | ||
| 70 | raise ManifestParseError('manifest %s not found' % name) | ||
| 71 | |||
| 72 | old = self.manifestFile | ||
| 73 | try: | ||
| 74 | self.manifestFile = path | ||
| 75 | self._Unload() | ||
| 76 | self._Load() | ||
| 77 | finally: | ||
| 78 | self.manifestFile = old | ||
| 79 | |||
| 80 | try: | ||
| 81 | if os.path.exists(self.manifestFile): | ||
| 82 | os.remove(self.manifestFile) | ||
| 83 | os.symlink('manifests/%s' % name, self.manifestFile) | ||
| 84 | except OSError, e: | ||
| 85 | raise ManifestParseError('cannot link manifest %s' % name) | ||
| 86 | |||
| 87 | @property | ||
| 88 | def projects(self): | ||
| 89 | self._Load() | ||
| 90 | return self._projects | ||
| 91 | |||
| 92 | @property | ||
| 93 | def remotes(self): | ||
| 94 | self._Load() | ||
| 95 | return self._remotes | ||
| 96 | |||
| 97 | @property | ||
| 98 | def default(self): | ||
| 99 | self._Load() | ||
| 100 | return self._default | ||
| 101 | |||
| 102 | def _Unload(self): | ||
| 103 | self._loaded = False | ||
| 104 | self._projects = {} | ||
| 105 | self._remotes = {} | ||
| 106 | self._default = None | ||
| 107 | self.branch = None | ||
| 108 | |||
| 109 | def _Load(self): | ||
| 110 | if not self._loaded: | ||
| 111 | self._ParseManifest() | ||
| 112 | self._loaded = True | ||
| 113 | |||
| 114 | def _ParseManifest(self): | ||
| 115 | root = xml.dom.minidom.parse(self.manifestFile) | ||
| 116 | if not root or not root.childNodes: | ||
| 117 | raise ManifestParseError, \ | ||
| 118 | "no root node in %s" % \ | ||
| 119 | self.manifestFile | ||
| 120 | |||
| 121 | config = root.childNodes[0] | ||
| 122 | if config.nodeName != 'manifest': | ||
| 123 | raise ManifestParseError, \ | ||
| 124 | "no <manifest> in %s" % \ | ||
| 125 | self.manifestFile | ||
| 126 | |||
| 127 | self.branch = config.getAttribute('branch') | ||
| 128 | if not self.branch: | ||
| 129 | self.branch = 'default' | ||
| 130 | |||
| 131 | for node in config.childNodes: | ||
| 132 | if node.nodeName == 'remote': | ||
| 133 | remote = self._ParseRemote(node) | ||
| 134 | if self._remotes.get(remote.name): | ||
| 135 | raise ManifestParseError, \ | ||
| 136 | 'duplicate remote %s in %s' % \ | ||
| 137 | (remote.name, self.manifestFile) | ||
| 138 | self._remotes[remote.name] = remote | ||
| 139 | |||
| 140 | for node in config.childNodes: | ||
| 141 | if node.nodeName == 'default': | ||
| 142 | if self._default is not None: | ||
| 143 | raise ManifestParseError, \ | ||
| 144 | 'duplicate default in %s' % \ | ||
| 145 | (self.manifestFile) | ||
| 146 | self._default = self._ParseDefault(node) | ||
| 147 | if self._default is None: | ||
| 148 | self._default = _Default() | ||
| 149 | |||
| 150 | for node in config.childNodes: | ||
| 151 | if node.nodeName == 'project': | ||
| 152 | project = self._ParseProject(node) | ||
| 153 | if self._projects.get(project.name): | ||
| 154 | raise ManifestParseError, \ | ||
| 155 | 'duplicate project %s in %s' % \ | ||
| 156 | (project.name, self.manifestFile) | ||
| 157 | self._projects[project.name] = project | ||
| 158 | |||
| 159 | def _ParseRemote(self, node): | ||
| 160 | """ | ||
| 161 | reads a <remote> element from the manifest file | ||
| 162 | """ | ||
| 163 | name = self._reqatt(node, 'name') | ||
| 164 | fetch = self._reqatt(node, 'fetch') | ||
| 165 | review = node.getAttribute('review') | ||
| 166 | |||
| 167 | r = Remote(name=name, | ||
| 168 | fetch=fetch, | ||
| 169 | review=review) | ||
| 170 | |||
| 171 | for n in node.childNodes: | ||
| 172 | if n.nodeName == 'require': | ||
| 173 | r.requiredCommits.append(self._reqatt(n, 'commit')) | ||
| 174 | |||
| 175 | return r | ||
| 176 | |||
| 177 | def _ParseDefault(self, node): | ||
| 178 | """ | ||
| 179 | reads a <default> element from the manifest file | ||
| 180 | """ | ||
| 181 | d = _Default() | ||
| 182 | d.remote = self._get_remote(node) | ||
| 183 | d.revision = node.getAttribute('revision') | ||
| 184 | return d | ||
| 185 | |||
| 186 | def _ParseProject(self, node): | ||
| 187 | """ | ||
| 188 | reads a <project> element from the manifest file | ||
| 189 | """ | ||
| 190 | name = self._reqatt(node, 'name') | ||
| 191 | |||
| 192 | remote = self._get_remote(node) | ||
| 193 | if remote is None: | ||
| 194 | remote = self._default.remote | ||
| 195 | if remote is None: | ||
| 196 | raise ManifestParseError, \ | ||
| 197 | "no remote for project %s within %s" % \ | ||
| 198 | (name, self.manifestFile) | ||
| 199 | |||
| 200 | revision = node.getAttribute('revision') | ||
| 201 | if not revision: | ||
| 202 | revision = self._default.revision | ||
| 203 | if not revision: | ||
| 204 | raise ManifestParseError, \ | ||
| 205 | "no revision for project %s within %s" % \ | ||
| 206 | (name, self.manifestFile) | ||
| 207 | |||
| 208 | path = node.getAttribute('path') | ||
| 209 | if not path: | ||
| 210 | path = name | ||
| 211 | if path.startswith('/'): | ||
| 212 | raise ManifestParseError, \ | ||
| 213 | "project %s path cannot be absolute in %s" % \ | ||
| 214 | (name, self.manifestFile) | ||
| 215 | |||
| 216 | worktree = os.path.join(self.topdir, path) | ||
| 217 | gitdir = os.path.join(self.repodir, 'projects/%s.git' % path) | ||
| 218 | |||
| 219 | project = Project(manifest = self, | ||
| 220 | name = name, | ||
| 221 | remote = remote, | ||
| 222 | gitdir = gitdir, | ||
| 223 | worktree = worktree, | ||
| 224 | relpath = path, | ||
| 225 | revision = revision) | ||
| 226 | |||
| 227 | for n in node.childNodes: | ||
| 228 | if n.nodeName == 'remote': | ||
| 229 | r = self._ParseRemote(n) | ||
| 230 | if project.extraRemotes.get(r.name) \ | ||
| 231 | or project.remote.name == r.name: | ||
| 232 | raise ManifestParseError, \ | ||
| 233 | 'duplicate remote %s in project %s in %s' % \ | ||
| 234 | (r.name, project.name, self.manifestFile) | ||
| 235 | project.extraRemotes[r.name] = r | ||
| 236 | elif n.nodeName == 'copyfile': | ||
| 237 | self._ParseCopyFile(project, n) | ||
| 238 | |||
| 239 | to_resolve = [] | ||
| 240 | by_version = {} | ||
| 241 | |||
| 242 | for n in node.childNodes: | ||
| 243 | if n.nodeName == 'import': | ||
| 244 | self._ParseImport(project, n, to_resolve, by_version) | ||
| 245 | |||
| 246 | for pair in to_resolve: | ||
| 247 | sn, pr = pair | ||
| 248 | try: | ||
| 249 | sn.SetParent(by_version[pr].commit) | ||
| 250 | except KeyError: | ||
| 251 | raise ManifestParseError, \ | ||
| 252 | 'snapshot %s not in project %s in %s' % \ | ||
| 253 | (pr, project.name, self.manifestFile) | ||
| 254 | |||
| 255 | return project | ||
| 256 | |||
| 257 | def _ParseImport(self, project, import_node, to_resolve, by_version): | ||
| 258 | first_url = None | ||
| 259 | for node in import_node.childNodes: | ||
| 260 | if node.nodeName == 'mirror': | ||
| 261 | first_url = self._reqatt(node, 'url') | ||
| 262 | break | ||
| 263 | if not first_url: | ||
| 264 | raise ManifestParseError, \ | ||
| 265 | 'mirror url required for project %s in %s' % \ | ||
| 266 | (project.name, self.manifestFile) | ||
| 267 | |||
| 268 | imp = None | ||
| 269 | for cls in [ImportTar, ImportZip]: | ||
| 270 | if cls.CanAccept(first_url): | ||
| 271 | imp = cls() | ||
| 272 | break | ||
| 273 | if not imp: | ||
| 274 | raise ManifestParseError, \ | ||
| 275 | 'snapshot %s unsupported for project %s in %s' % \ | ||
| 276 | (first_url, project.name, self.manifestFile) | ||
| 277 | |||
| 278 | imp.SetProject(project) | ||
| 279 | |||
| 280 | for node in import_node.childNodes: | ||
| 281 | if node.nodeName == 'remap': | ||
| 282 | old = node.getAttribute('strip') | ||
| 283 | new = node.getAttribute('insert') | ||
| 284 | imp.RemapPath(old, new) | ||
| 285 | |||
| 286 | elif node.nodeName == 'mirror': | ||
| 287 | imp.AddUrl(self._reqatt(node, 'url')) | ||
| 288 | |||
| 289 | for node in import_node.childNodes: | ||
| 290 | if node.nodeName == 'snapshot': | ||
| 291 | sn = imp.Clone() | ||
| 292 | sn.SetVersion(self._reqatt(node, 'version')) | ||
| 293 | sn.SetCommit(node.getAttribute('check')) | ||
| 294 | |||
| 295 | pr = node.getAttribute('prior') | ||
| 296 | if pr: | ||
| 297 | if IsId(pr): | ||
| 298 | sn.SetParent(pr) | ||
| 299 | else: | ||
| 300 | to_resolve.append((sn, pr)) | ||
| 301 | |||
| 302 | rev = R_TAGS + sn.TagName | ||
| 303 | |||
| 304 | if rev in project.snapshots: | ||
| 305 | raise ManifestParseError, \ | ||
| 306 | 'duplicate snapshot %s for project %s in %s' % \ | ||
| 307 | (sn.version, project.name, self.manifestFile) | ||
| 308 | project.snapshots[rev] = sn | ||
| 309 | by_version[sn.version] = sn | ||
| 310 | |||
| 311 | def _ParseCopyFile(self, project, node): | ||
| 312 | src = self._reqatt(node, 'src') | ||
| 313 | dest = self._reqatt(node, 'dest') | ||
| 314 | # src is project relative, and dest is relative to the top of the tree | ||
| 315 | project.AddCopyFile(src, os.path.join(self.topdir, dest)) | ||
| 316 | |||
| 317 | def _get_remote(self, node): | ||
| 318 | name = node.getAttribute('remote') | ||
| 319 | if not name: | ||
| 320 | return None | ||
| 321 | |||
| 322 | v = self._remotes.get(name) | ||
| 323 | if not v: | ||
| 324 | raise ManifestParseError, \ | ||
| 325 | "remote %s not defined in %s" % \ | ||
| 326 | (name, self.manifestFile) | ||
| 327 | return v | ||
| 328 | |||
| 329 | def _reqatt(self, node, attname): | ||
| 330 | """ | ||
| 331 | reads a required attribute from the node. | ||
| 332 | """ | ||
| 333 | v = node.getAttribute(attname) | ||
| 334 | if not v: | ||
| 335 | raise ManifestParseError, \ | ||
| 336 | "no %s in <%s> within %s" % \ | ||
| 337 | (attname, node.nodeName, self.manifestFile) | ||
| 338 | return v | ||
diff --git a/pager.py b/pager.py new file mode 100755 index 00000000..320131cd --- /dev/null +++ b/pager.py | |||
| @@ -0,0 +1,84 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import os | ||
| 17 | import select | ||
| 18 | import sys | ||
| 19 | |||
| 20 | active = False | ||
| 21 | |||
| 22 | def RunPager(globalConfig): | ||
| 23 | global active | ||
| 24 | |||
| 25 | if not os.isatty(0): | ||
| 26 | return | ||
| 27 | pager = _SelectPager(globalConfig) | ||
| 28 | if pager == '' or pager == 'cat': | ||
| 29 | return | ||
| 30 | |||
| 31 | # This process turns into the pager; a child it forks will | ||
| 32 | # do the real processing and output back to the pager. This | ||
| 33 | # is necessary to keep the pager in control of the tty. | ||
| 34 | # | ||
| 35 | try: | ||
| 36 | r, w = os.pipe() | ||
| 37 | pid = os.fork() | ||
| 38 | if not pid: | ||
| 39 | os.dup2(w, 1) | ||
| 40 | os.dup2(w, 2) | ||
| 41 | os.close(r) | ||
| 42 | os.close(w) | ||
| 43 | active = True | ||
| 44 | return | ||
| 45 | |||
| 46 | os.dup2(r, 0) | ||
| 47 | os.close(r) | ||
| 48 | os.close(w) | ||
| 49 | |||
| 50 | _BecomePager(pager) | ||
| 51 | except Exception: | ||
| 52 | print >>sys.stderr, "fatal: cannot start pager '%s'" % pager | ||
| 53 | os.exit(255) | ||
| 54 | |||
| 55 | def _SelectPager(globalConfig): | ||
| 56 | try: | ||
| 57 | return os.environ['GIT_PAGER'] | ||
| 58 | except KeyError: | ||
| 59 | pass | ||
| 60 | |||
| 61 | pager = globalConfig.GetString('core.pager') | ||
| 62 | if pager: | ||
| 63 | return pager | ||
| 64 | |||
| 65 | try: | ||
| 66 | return os.environ['PAGER'] | ||
| 67 | except KeyError: | ||
| 68 | pass | ||
| 69 | |||
| 70 | return 'less' | ||
| 71 | |||
| 72 | def _BecomePager(pager): | ||
| 73 | # Delaying execution of the pager until we have output | ||
| 74 | # ready works around a long-standing bug in popularly | ||
| 75 | # available versions of 'less', a better 'more'. | ||
| 76 | # | ||
| 77 | a, b, c = select.select([0], [], [0]) | ||
| 78 | |||
| 79 | os.environ['LESS'] = 'FRSX' | ||
| 80 | |||
| 81 | try: | ||
| 82 | os.execvp(pager, [pager]) | ||
| 83 | except OSError, e: | ||
| 84 | os.execv('/bin/sh', ['sh', '-c', pager]) | ||
diff --git a/project.py b/project.py new file mode 100644 index 00000000..7c0c58f9 --- /dev/null +++ b/project.py | |||
| @@ -0,0 +1,1058 @@ | |||
| 1 | # Copyright (C) 2008 The Android Open Source Project | ||
| 2 | # | ||
| 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 4 | # you may not use this file except in compliance with the License. | ||
| 5 | # You may obtain a copy of the License at | ||
| 6 | # | ||
| 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 8 | # | ||
| 9 | # Unless required by applicable law or agreed to in writing, software | ||
| 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 12 | # See the License for the specific language governing permissions and | ||
| 13 | # limitations under the License. | ||
| 14 | |||
| 15 | import filecmp | ||
| 16 | import os | ||
| 17 | import re | ||
| 18 | import shutil | ||
| 19 | import stat | ||
| 20 | import sys | ||
| 21 | import urllib2 | ||
| 22 | |||
| 23 | from color import Coloring | ||
| 24 | from git_command import GitCommand | ||
| 25 | from git_config import GitConfig, IsId | ||
| 26 | from gerrit_upload import UploadBundle | ||
| 27 | from error import GitError, ImportError, UploadError | ||
| 28 | from remote import Remote | ||
| 29 | from codereview import proto_client | ||
| 30 | |||
| 31 | HEAD = 'HEAD' | ||
| 32 | R_HEADS = 'refs/heads/' | ||
| 33 | R_TAGS = 'refs/tags/' | ||
| 34 | R_PUB = 'refs/published/' | ||
| 35 | R_M = 'refs/remotes/m/' | ||
| 36 | |||
| 37 | def _warn(fmt, *args): | ||
| 38 | msg = fmt % args | ||
| 39 | print >>sys.stderr, 'warn: %s' % msg | ||
| 40 | |||
| 41 | def _info(fmt, *args): | ||
| 42 | msg = fmt % args | ||
| 43 | print >>sys.stderr, 'info: %s' % msg | ||
| 44 | |||
| 45 | def not_rev(r): | ||
| 46 | return '^' + r | ||
| 47 | |||
| 48 | class ReviewableBranch(object): | ||
| 49 | _commit_cache = None | ||
| 50 | |||
| 51 | def __init__(self, project, branch, base): | ||
| 52 | self.project = project | ||
| 53 | self.branch = branch | ||
| 54 | self.base = base | ||
| 55 | |||
| 56 | @property | ||
| 57 | def name(self): | ||
| 58 | return self.branch.name | ||
| 59 | |||
| 60 | @property | ||
| 61 | def commits(self): | ||
| 62 | if self._commit_cache is None: | ||
| 63 | self._commit_cache = self.project.bare_git.rev_list( | ||
| 64 | '--abbrev=8', | ||
| 65 | '--abbrev-commit', | ||
| 66 | '--pretty=oneline', | ||
| 67 | '--reverse', | ||
| 68 | '--date-order', | ||
| 69 | not_rev(self.base), | ||
| 70 | R_HEADS + self.name, | ||
| 71 | '--') | ||
| 72 | return self._commit_cache | ||
| 73 | |||
| 74 | @property | ||
| 75 | def date(self): | ||
| 76 | return self.project.bare_git.log( | ||
| 77 | '--pretty=format:%cd', | ||
| 78 | '-n', '1', | ||
| 79 | R_HEADS + self.name, | ||
| 80 | '--') | ||
| 81 | |||
| 82 | def UploadForReview(self): | ||
| 83 | self.project.UploadForReview(self.name) | ||
| 84 | |||
| 85 | @property | ||
| 86 | def tip_url(self): | ||
| 87 | me = self.project.GetBranch(self.name) | ||
| 88 | commit = self.project.bare_git.rev_parse(R_HEADS + self.name) | ||
| 89 | return 'http://%s/r/%s' % (me.remote.review, commit[0:12]) | ||
| 90 | |||
| 91 | |||
| 92 | class StatusColoring(Coloring): | ||
| 93 | def __init__(self, config): | ||
| 94 | Coloring.__init__(self, config, 'status') | ||
| 95 | self.project = self.printer('header', attr = 'bold') | ||
| 96 | self.branch = self.printer('header', attr = 'bold') | ||
| 97 | self.nobranch = self.printer('nobranch', fg = 'red') | ||
| 98 | |||
| 99 | self.added = self.printer('added', fg = 'green') | ||
| 100 | self.changed = self.printer('changed', fg = 'red') | ||
| 101 | self.untracked = self.printer('untracked', fg = 'red') | ||
| 102 | |||
| 103 | |||
| 104 | class DiffColoring(Coloring): | ||
| 105 | def __init__(self, config): | ||
| 106 | Coloring.__init__(self, config, 'diff') | ||
| 107 | self.project = self.printer('header', attr = 'bold') | ||
| 108 | |||
| 109 | |||
| 110 | class _CopyFile: | ||
| 111 | def __init__(self, src, dest): | ||
| 112 | self.src = src | ||
| 113 | self.dest = dest | ||
| 114 | |||
| 115 | def _Copy(self): | ||
| 116 | src = self.src | ||
| 117 | dest = self.dest | ||
| 118 | # copy file if it does not exist or is out of date | ||
| 119 | if not os.path.exists(dest) or not filecmp.cmp(src, dest): | ||
| 120 | try: | ||
| 121 | # remove existing file first, since it might be read-only | ||
| 122 | if os.path.exists(dest): | ||
| 123 | os.remove(dest) | ||
| 124 | shutil.copy(src, dest) | ||
| 125 | # make the file read-only | ||
| 126 | mode = os.stat(dest)[stat.ST_MODE] | ||
| 127 | mode = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH) | ||
| 128 | os.chmod(dest, mode) | ||
| 129 | except IOError: | ||
| 130 | print >>sys.stderr, \ | ||
| 131 | 'error: Cannot copy file %s to %s' \ | ||
| 132 | % (src, dest) | ||
| 133 | |||
| 134 | |||
| 135 | class Project(object): | ||
| 136 | def __init__(self, | ||
| 137 | manifest, | ||
| 138 | name, | ||
| 139 | remote, | ||
| 140 | gitdir, | ||
| 141 | worktree, | ||
| 142 | relpath, | ||
| 143 | revision): | ||
| 144 | self.manifest = manifest | ||
| 145 | self.name = name | ||
| 146 | self.remote = remote | ||
| 147 | self.gitdir = gitdir | ||
| 148 | self.worktree = worktree | ||
| 149 | self.relpath = relpath | ||
| 150 | self.revision = revision | ||
| 151 | self.snapshots = {} | ||
| 152 | self.extraRemotes = {} | ||
| 153 | self.copyfiles = [] | ||
| 154 | self.config = GitConfig.ForRepository( | ||
| 155 | gitdir = self.gitdir, | ||
| 156 | defaults = self.manifest.globalConfig) | ||
| 157 | |||
| 158 | self.work_git = self._GitGetByExec(self, bare=False) | ||
| 159 | self.bare_git = self._GitGetByExec(self, bare=True) | ||
| 160 | |||
| 161 | @property | ||
| 162 | def Exists(self): | ||
| 163 | return os.path.isdir(self.gitdir) | ||
| 164 | |||
| 165 | @property | ||
| 166 | def CurrentBranch(self): | ||
| 167 | """Obtain the name of the currently checked out branch. | ||
| 168 | The branch name omits the 'refs/heads/' prefix. | ||
| 169 | None is returned if the project is on a detached HEAD. | ||
| 170 | """ | ||
| 171 | try: | ||
| 172 | b = self.work_git.GetHead() | ||
| 173 | except GitError: | ||
| 174 | return None | ||
| 175 | if b.startswith(R_HEADS): | ||
| 176 | return b[len(R_HEADS):] | ||
| 177 | return None | ||
| 178 | |||
| 179 | def IsDirty(self, consider_untracked=True): | ||
| 180 | """Is the working directory modified in some way? | ||
| 181 | """ | ||
| 182 | self.work_git.update_index('-q', | ||
| 183 | '--unmerged', | ||
| 184 | '--ignore-missing', | ||
| 185 | '--refresh') | ||
| 186 | if self.work_git.DiffZ('diff-index','-M','--cached',HEAD): | ||
| 187 | return True | ||
| 188 | if self.work_git.DiffZ('diff-files'): | ||
| 189 | return True | ||
| 190 | if consider_untracked and self.work_git.LsOthers(): | ||
| 191 | return True | ||
| 192 | return False | ||
| 193 | |||
| 194 | _userident_name = None | ||
| 195 | _userident_email = None | ||
| 196 | |||
| 197 | @property | ||
| 198 | def UserName(self): | ||
| 199 | """Obtain the user's personal name. | ||
| 200 | """ | ||
| 201 | if self._userident_name is None: | ||
| 202 | self._LoadUserIdentity() | ||
| 203 | return self._userident_name | ||
| 204 | |||
| 205 | @property | ||
| 206 | def UserEmail(self): | ||
| 207 | """Obtain the user's email address. This is very likely | ||
| 208 | to be their Gerrit login. | ||
| 209 | """ | ||
| 210 | if self._userident_email is None: | ||
| 211 | self._LoadUserIdentity() | ||
| 212 | return self._userident_email | ||
| 213 | |||
| 214 | def _LoadUserIdentity(self): | ||
| 215 | u = self.bare_git.var('GIT_COMMITTER_IDENT') | ||
| 216 | m = re.compile("^(.*) <([^>]*)> ").match(u) | ||
| 217 | if m: | ||
| 218 | self._userident_name = m.group(1) | ||
| 219 | self._userident_email = m.group(2) | ||
| 220 | else: | ||
| 221 | self._userident_name = '' | ||
| 222 | self._userident_email = '' | ||
| 223 | |||
| 224 | def GetRemote(self, name): | ||
| 225 | """Get the configuration for a single remote. | ||
| 226 | """ | ||
| 227 | return self.config.GetRemote(name) | ||
| 228 | |||
| 229 | def GetBranch(self, name): | ||
| 230 | """Get the configuration for a single branch. | ||
| 231 | """ | ||
| 232 | return self.config.GetBranch(name) | ||
| 233 | |||
| 234 | |||
| 235 | ## Status Display ## | ||
| 236 | |||
| 237 | def PrintWorkTreeStatus(self): | ||
| 238 | """Prints the status of the repository to stdout. | ||
| 239 | """ | ||
| 240 | if not os.path.isdir(self.worktree): | ||
| 241 | print '' | ||
| 242 | print 'project %s/' % self.relpath | ||
| 243 | print ' missing (run "repo sync")' | ||
| 244 | return | ||
| 245 | |||
| 246 | self.work_git.update_index('-q', | ||
| 247 | '--unmerged', | ||
| 248 | '--ignore-missing', | ||
| 249 | '--refresh') | ||
| 250 | di = self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD) | ||
| 251 | df = self.work_git.DiffZ('diff-files') | ||
| 252 | do = self.work_git.LsOthers() | ||
| 253 | if not di and not df and not do: | ||
| 254 | return | ||
| 255 | |||
| 256 | out = StatusColoring(self.config) | ||
| 257 | out.project('project %-40s', self.relpath + '/') | ||
| 258 | |||
| 259 | branch = self.CurrentBranch | ||
| 260 | if branch is None: | ||
| 261 | out.nobranch('(*** NO BRANCH ***)') | ||
| 262 | else: | ||
| 263 | out.branch('branch %s', branch) | ||
| 264 | out.nl() | ||
| 265 | |||
| 266 | paths = list() | ||
| 267 | paths.extend(di.keys()) | ||
| 268 | paths.extend(df.keys()) | ||
| 269 | paths.extend(do) | ||
| 270 | |||
| 271 | paths = list(set(paths)) | ||
| 272 | paths.sort() | ||
| 273 | |||
| 274 | for p in paths: | ||
| 275 | try: i = di[p] | ||
| 276 | except KeyError: i = None | ||
| 277 | |||
| 278 | try: f = df[p] | ||
| 279 | except KeyError: f = None | ||
| 280 | |||
| 281 | if i: i_status = i.status.upper() | ||
| 282 | else: i_status = '-' | ||
| 283 | |||
| 284 | if f: f_status = f.status.lower() | ||
| 285 | else: f_status = '-' | ||
| 286 | |||
| 287 | if i and i.src_path: | ||
| 288 | line = ' %s%s\t%s => (%s%%)' % (i_status, f_status, | ||
| 289 | i.src_path, p, i.level) | ||
| 290 | else: | ||
| 291 | line = ' %s%s\t%s' % (i_status, f_status, p) | ||
| 292 | |||
| 293 | if i and not f: | ||
| 294 | out.added('%s', line) | ||
| 295 | elif (i and f) or (not i and f): | ||
| 296 | out.changed('%s', line) | ||
| 297 | elif not i and not f: | ||
| 298 | out.untracked('%s', line) | ||
| 299 | else: | ||
| 300 | out.write('%s', line) | ||
| 301 | out.nl() | ||
| 302 | |||
| 303 | def PrintWorkTreeDiff(self): | ||
| 304 | """Prints the status of the repository to stdout. | ||
| 305 | """ | ||
| 306 | out = DiffColoring(self.config) | ||
| 307 | cmd = ['diff'] | ||
| 308 | if out.is_on: | ||
| 309 | cmd.append('--color') | ||
| 310 | cmd.append(HEAD) | ||
| 311 | cmd.append('--') | ||
| 312 | p = GitCommand(self, | ||
| 313 | cmd, | ||
| 314 | capture_stdout = True, | ||
| 315 | capture_stderr = True) | ||
| 316 | has_diff = False | ||
| 317 | for line in p.process.stdout: | ||
| 318 | if not has_diff: | ||
| 319 | out.nl() | ||
| 320 | out.project('project %s/' % self.relpath) | ||
| 321 | out.nl() | ||
| 322 | has_diff = True | ||
| 323 | print line[:-1] | ||
| 324 | p.Wait() | ||
| 325 | |||
| 326 | |||
| 327 | ## Publish / Upload ## | ||
| 328 | |||
| 329 | def WasPublished(self, branch): | ||
| 330 | """Was the branch published (uploaded) for code review? | ||
| 331 | If so, returns the SHA-1 hash of the last published | ||
| 332 | state for the branch. | ||
| 333 | """ | ||
| 334 | try: | ||
| 335 | return self.bare_git.rev_parse(R_PUB + branch) | ||
| 336 | except GitError: | ||
| 337 | return None | ||
| 338 | |||
| 339 | def CleanPublishedCache(self): | ||
| 340 | """Prunes any stale published refs. | ||
| 341 | """ | ||
| 342 | heads = set() | ||
| 343 | canrm = {} | ||
| 344 | for name, id in self._allrefs.iteritems(): | ||
| 345 | if name.startswith(R_HEADS): | ||
| 346 | heads.add(name) | ||
| 347 | elif name.startswith(R_PUB): | ||
| 348 | canrm[name] = id | ||
| 349 | |||
| 350 | for name, id in canrm.iteritems(): | ||
| 351 | n = name[len(R_PUB):] | ||
| 352 | if R_HEADS + n not in heads: | ||
| 353 | self.bare_git.DeleteRef(name, id) | ||
| 354 | |||
| 355 | def GetUploadableBranches(self): | ||
| 356 | """List any branches which can be uploaded for review. | ||
| 357 | """ | ||
| 358 | heads = {} | ||
| 359 | pubed = {} | ||
| 360 | |||
| 361 | for name, id in self._allrefs.iteritems(): | ||
| 362 | if name.startswith(R_HEADS): | ||
| 363 | heads[name[len(R_HEADS):]] = id | ||
| 364 | elif name.startswith(R_PUB): | ||
| 365 | pubed[name[len(R_PUB):]] = id | ||
| 366 | |||
| 367 | ready = [] | ||
| 368 | for branch, id in heads.iteritems(): | ||
| 369 | if branch in pubed and pubed[branch] == id: | ||
| 370 | continue | ||
| 371 | |||
| 372 | branch = self.GetBranch(branch) | ||
| 373 | base = branch.LocalMerge | ||
| 374 | if branch.LocalMerge: | ||
| 375 | rb = ReviewableBranch(self, branch, base) | ||
| 376 | if rb.commits: | ||
| 377 | ready.append(rb) | ||
| 378 | return ready | ||
| 379 | |||
| 380 | def UploadForReview(self, branch=None): | ||
| 381 | """Uploads the named branch for code review. | ||
| 382 | """ | ||
| 383 | if branch is None: | ||
| 384 | branch = self.CurrentBranch | ||
| 385 | if branch is None: | ||
| 386 | raise GitError('not currently on a branch') | ||
| 387 | |||
| 388 | branch = self.GetBranch(branch) | ||
| 389 | if not branch.LocalMerge: | ||
| 390 | raise GitError('branch %s does not track a remote' % branch.name) | ||
| 391 | if not branch.remote.review: | ||
| 392 | raise GitError('remote %s has no review url' % branch.remote.name) | ||
| 393 | |||
| 394 | dest_branch = branch.merge | ||
| 395 | if not dest_branch.startswith(R_HEADS): | ||
| 396 | dest_branch = R_HEADS + dest_branch | ||
| 397 | |||
| 398 | base_list = [] | ||
| 399 | for name, id in self._allrefs.iteritems(): | ||
| 400 | if branch.remote.WritesTo(name): | ||
| 401 | base_list.append(not_rev(name)) | ||
| 402 | if not base_list: | ||
| 403 | raise GitError('no base refs, cannot upload %s' % branch.name) | ||
| 404 | |||
| 405 | print >>sys.stderr, '' | ||
| 406 | _info("Uploading %s to %s:", branch.name, self.name) | ||
| 407 | try: | ||
| 408 | UploadBundle(project = self, | ||
| 409 | server = branch.remote.review, | ||
| 410 | email = self.UserEmail, | ||
| 411 | dest_project = self.name, | ||
| 412 | dest_branch = dest_branch, | ||
| 413 | src_branch = R_HEADS + branch.name, | ||
| 414 | bases = base_list) | ||
| 415 | except proto_client.ClientLoginError: | ||
| 416 | raise UploadError('Login failure') | ||
| 417 | except urllib2.HTTPError, e: | ||
| 418 | raise UploadError('HTTP error %d' % e.code) | ||
| 419 | |||
| 420 | msg = "posted to %s for %s" % (branch.remote.review, dest_branch) | ||
| 421 | self.bare_git.UpdateRef(R_PUB + branch.name, | ||
| 422 | R_HEADS + branch.name, | ||
| 423 | message = msg) | ||
| 424 | |||
| 425 | |||
| 426 | ## Sync ## | ||
| 427 | |||
| 428 | def Sync_NetworkHalf(self): | ||
| 429 | """Perform only the network IO portion of the sync process. | ||
| 430 | Local working directory/branch state is not affected. | ||
| 431 | """ | ||
| 432 | if not self.Exists: | ||
| 433 | print >>sys.stderr | ||
| 434 | print >>sys.stderr, 'Initializing project %s ...' % self.name | ||
| 435 | self._InitGitDir() | ||
| 436 | self._InitRemote() | ||
| 437 | for r in self.extraRemotes.values(): | ||
| 438 | if not self._RemoteFetch(r.name): | ||
| 439 | return False | ||
| 440 | if not self._SnapshotDownload(): | ||
| 441 | return False | ||
| 442 | if not self._RemoteFetch(): | ||
| 443 | return False | ||
| 444 | self._InitMRef() | ||
| 445 | return True | ||
| 446 | |||
| 447 | def _CopyFiles(self): | ||
| 448 | for file in self.copyfiles: | ||
| 449 | file._Copy() | ||
| 450 | |||
| 451 | def Sync_LocalHalf(self): | ||
| 452 | """Perform only the local IO portion of the sync process. | ||
| 453 | Network access is not required. | ||
| 454 | |||
| 455 | Return: | ||
| 456 | True: the sync was successful | ||
| 457 | False: the sync requires user input | ||
| 458 | """ | ||
| 459 | self._InitWorkTree() | ||
| 460 | self.CleanPublishedCache() | ||
| 461 | |||
| 462 | rem = self.GetRemote(self.remote.name) | ||
| 463 | rev = rem.ToLocal(self.revision) | ||
| 464 | branch = self.CurrentBranch | ||
| 465 | |||
| 466 | if branch is None: | ||
| 467 | # Currently on a detached HEAD. The user is assumed to | ||
| 468 | # not have any local modifications worth worrying about. | ||
| 469 | # | ||
| 470 | lost = self._revlist(not_rev(rev), HEAD) | ||
| 471 | if lost: | ||
| 472 | _info("[%s] Discarding %d commits", self.name, len(lost)) | ||
| 473 | try: | ||
| 474 | self._Checkout(rev, quiet=True) | ||
| 475 | except GitError: | ||
| 476 | return False | ||
| 477 | self._CopyFiles() | ||
| 478 | return True | ||
| 479 | |||
| 480 | branch = self.GetBranch(branch) | ||
| 481 | merge = branch.LocalMerge | ||
| 482 | |||
| 483 | if not merge: | ||
| 484 | # The current branch has no tracking configuration. | ||
| 485 | # Jump off it to a deatched HEAD. | ||
| 486 | # | ||
| 487 | _info("[%s] Leaving %s" | ||
| 488 | " (does not track any upstream)", | ||
| 489 | self.name, | ||
| 490 | branch.name) | ||
| 491 | try: | ||
| 492 | self._Checkout(rev, quiet=True) | ||
| 493 | except GitError: | ||
| 494 | return False | ||
| 495 | self._CopyFiles() | ||
| 496 | return True | ||
| 497 | |||
| 498 | upstream_gain = self._revlist(not_rev(HEAD), rev) | ||
| 499 | pub = self.WasPublished(branch.name) | ||
| 500 | if pub: | ||
| 501 | not_merged = self._revlist(not_rev(rev), pub) | ||
| 502 | if not_merged: | ||
| 503 | if upstream_gain: | ||
| 504 | # The user has published this branch and some of those | ||
| 505 | # commits are not yet merged upstream. We do not want | ||
| 506 | # to rewrite the published commits so we punt. | ||
| 507 | # | ||
| 508 | _info("[%s] Branch %s is published," | ||
| 509 | " but is now %d commits behind.", | ||
| 510 | self.name, branch.name, len(upstream_gain)) | ||
| 511 | _info("[%s] Consider merging or rebasing the" | ||
| 512 | " unpublished commits.", self.name) | ||
| 513 | return True | ||
| 514 | |||
| 515 | if merge == rev: | ||
| 516 | try: | ||
| 517 | old_merge = self.bare_git.rev_parse('%s@{1}' % merge) | ||
| 518 | except GitError: | ||
| 519 | old_merge = merge | ||
| 520 | else: | ||
| 521 | # The upstream switched on us. Time to cross our fingers | ||
| 522 | # and pray that the old upstream also wasn't in the habit | ||
| 523 | # of rebasing itself. | ||
| 524 | # | ||
| 525 | _info("[%s] Manifest switched from %s to %s", | ||
| 526 | self.name, merge, rev) | ||
| 527 | old_merge = merge | ||
| 528 | |||
| 529 | if rev == old_merge: | ||
| 530 | upstream_lost = [] | ||
| 531 | else: | ||
| 532 | upstream_lost = self._revlist(not_rev(rev), old_merge) | ||
| 533 | |||
| 534 | if not upstream_lost and not upstream_gain: | ||
| 535 | # Trivially no changes caused by the upstream. | ||
| 536 | # | ||
| 537 | return True | ||
| 538 | |||
| 539 | if self.IsDirty(consider_untracked=False): | ||
| 540 | _warn('[%s] commit (or discard) uncommitted changes' | ||
| 541 | ' before sync', self.name) | ||
| 542 | return False | ||
| 543 | |||
| 544 | if upstream_lost: | ||
| 545 | # Upstream rebased. Not everything in HEAD | ||
| 546 | # may have been caused by the user. | ||
| 547 | # | ||
| 548 | _info("[%s] Discarding %d commits removed from upstream", | ||
| 549 | self.name, len(upstream_lost)) | ||
| 550 | |||
| 551 | branch.remote = rem | ||
| 552 | branch.merge = self.revision | ||
| 553 | branch.Save() | ||
| 554 | |||
| 555 | my_changes = self._revlist(not_rev(old_merge), HEAD) | ||
| 556 | if my_changes: | ||
| 557 | try: | ||
| 558 | self._Rebase(upstream = old_merge, onto = rev) | ||
| 559 | except GitError: | ||
| 560 | return False | ||
| 561 | elif upstream_lost: | ||
| 562 | try: | ||
| 563 | self._ResetHard(rev) | ||
| 564 | except GitError: | ||
| 565 | return False | ||
| 566 | else: | ||
| 567 | try: | ||
| 568 | self._FastForward(rev) | ||
| 569 | except GitError: | ||
| 570 | return False | ||
| 571 | |||
| 572 | self._CopyFiles() | ||
| 573 | return True | ||
| 574 | |||
| 575 | def _SnapshotDownload(self): | ||
| 576 | if self.snapshots: | ||
| 577 | have = set(self._allrefs.keys()) | ||
| 578 | need = [] | ||
| 579 | |||
| 580 | for tag, sn in self.snapshots.iteritems(): | ||
| 581 | if tag not in have: | ||
| 582 | need.append(sn) | ||
| 583 | |||
| 584 | if need: | ||
| 585 | print >>sys.stderr, """ | ||
| 586 | *** Downloading source(s) from a mirror site. *** | ||
| 587 | *** If the network hangs, kill and restart repo. *** | ||
| 588 | """ | ||
| 589 | for sn in need: | ||
| 590 | try: | ||
| 591 | sn.Import() | ||
| 592 | except ImportError, e: | ||
| 593 | print >>sys.stderr, \ | ||
| 594 | 'error: Cannot import %s: %s' \ | ||
| 595 | % (self.name, e) | ||
| 596 | return False | ||
| 597 | cmd = ['repack', '-a', '-d', '-f', '-l'] | ||
| 598 | if GitCommand(self, cmd, bare = True).Wait() != 0: | ||
| 599 | return False | ||
| 600 | return True | ||
| 601 | |||
| 602 | def AddCopyFile(self, src, dest): | ||
| 603 | # dest should already be an absolute path, but src is project relative | ||
| 604 | # make src an absolute path | ||
| 605 | src = os.path.join(self.worktree, src) | ||
| 606 | self.copyfiles.append(_CopyFile(src, dest)) | ||
| 607 | |||
| 608 | |||
| 609 | ## Branch Management ## | ||
| 610 | |||
| 611 | def StartBranch(self, name): | ||
| 612 | """Create a new branch off the manifest's revision. | ||
| 613 | """ | ||
| 614 | branch = self.GetBranch(name) | ||
| 615 | branch.remote = self.GetRemote(self.remote.name) | ||
| 616 | branch.merge = self.revision | ||
| 617 | |||
| 618 | rev = branch.LocalMerge | ||
| 619 | cmd = ['checkout', '-b', branch.name, rev] | ||
| 620 | if GitCommand(self, cmd).Wait() == 0: | ||
| 621 | branch.Save() | ||
| 622 | else: | ||
| 623 | raise GitError('%s checkout %s ' % (self.name, rev)) | ||
| 624 | |||
| 625 | def PruneHeads(self): | ||
| 626 | """Prune any topic branches already merged into upstream. | ||
| 627 | """ | ||
| 628 | cb = self.CurrentBranch | ||
| 629 | kill = [] | ||
| 630 | for name in self._allrefs.keys(): | ||
| 631 | if name.startswith(R_HEADS): | ||
| 632 | name = name[len(R_HEADS):] | ||
| 633 | if cb is None or name != cb: | ||
| 634 | kill.append(name) | ||
| 635 | |||
| 636 | rev = self.GetRemote(self.remote.name).ToLocal(self.revision) | ||
| 637 | if cb is not None \ | ||
| 638 | and not self._revlist(HEAD + '...' + rev) \ | ||
| 639 | and not self.IsDirty(consider_untracked = False): | ||
| 640 | self.work_git.DetachHead(HEAD) | ||
| 641 | kill.append(cb) | ||
| 642 | |||
| 643 | deleted = set() | ||
| 644 | if kill: | ||
| 645 | try: | ||
| 646 | old = self.bare_git.GetHead() | ||
| 647 | except GitError: | ||
| 648 | old = 'refs/heads/please_never_use_this_as_a_branch_name' | ||
| 649 | |||
| 650 | rm_re = re.compile(r"^Deleted branch (.*)\.$") | ||
| 651 | try: | ||
| 652 | self.bare_git.DetachHead(rev) | ||
| 653 | |||
| 654 | b = ['branch', '-d'] | ||
| 655 | b.extend(kill) | ||
| 656 | b = GitCommand(self, b, bare=True, | ||
| 657 | capture_stdout=True, | ||
| 658 | capture_stderr=True) | ||
| 659 | b.Wait() | ||
| 660 | finally: | ||
| 661 | self.bare_git.SetHead(old) | ||
| 662 | |||
| 663 | for line in b.stdout.split("\n"): | ||
| 664 | m = rm_re.match(line) | ||
| 665 | if m: | ||
| 666 | deleted.add(m.group(1)) | ||
| 667 | |||
| 668 | if deleted: | ||
| 669 | self.CleanPublishedCache() | ||
| 670 | |||
| 671 | if cb and cb not in kill: | ||
| 672 | kill.append(cb) | ||
| 673 | kill.sort() | ||
| 674 | |||
| 675 | kept = [] | ||
| 676 | for branch in kill: | ||
| 677 | if branch not in deleted: | ||
| 678 | branch = self.GetBranch(branch) | ||
| 679 | base = branch.LocalMerge | ||
| 680 | if not base: | ||
| 681 | base = rev | ||
| 682 | kept.append(ReviewableBranch(self, branch, base)) | ||
| 683 | return kept | ||
| 684 | |||
| 685 | |||
| 686 | ## Direct Git Commands ## | ||
| 687 | |||
| 688 | def _RemoteFetch(self, name=None): | ||
| 689 | if not name: | ||
| 690 | name = self.remote.name | ||
| 691 | |||
| 692 | hide_errors = False | ||
| 693 | if self.extraRemotes or self.snapshots: | ||
| 694 | hide_errors = True | ||
| 695 | |||
| 696 | proc = GitCommand(self, | ||
| 697 | ['fetch', name], | ||
| 698 | bare = True, | ||
| 699 | capture_stderr = hide_errors) | ||
| 700 | if hide_errors: | ||
| 701 | err = proc.process.stderr.fileno() | ||
| 702 | buf = '' | ||
| 703 | while True: | ||
| 704 | b = os.read(err, 256) | ||
| 705 | if b: | ||
| 706 | buf += b | ||
| 707 | while buf: | ||
| 708 | r = buf.find('remote: error: unable to find ') | ||
| 709 | if r >= 0: | ||
| 710 | lf = buf.find('\n') | ||
| 711 | if lf < 0: | ||
| 712 | break | ||
| 713 | buf = buf[lf + 1:] | ||
| 714 | continue | ||
| 715 | |||
| 716 | cr = buf.find('\r') | ||
| 717 | if cr < 0: | ||
| 718 | break | ||
| 719 | os.write(2, buf[0:cr + 1]) | ||
| 720 | buf = buf[cr + 1:] | ||
| 721 | if not b: | ||
| 722 | if buf: | ||
| 723 | os.write(2, buf) | ||
| 724 | break | ||
| 725 | return proc.Wait() == 0 | ||
| 726 | |||
| 727 | def _Checkout(self, rev, quiet=False): | ||
| 728 | cmd = ['checkout'] | ||
| 729 | if quiet: | ||
| 730 | cmd.append('-q') | ||
| 731 | cmd.append(rev) | ||
| 732 | cmd.append('--') | ||
| 733 | if GitCommand(self, cmd).Wait() != 0: | ||
| 734 | if self._allrefs: | ||
| 735 | raise GitError('%s checkout %s ' % (self.name, rev)) | ||
| 736 | |||
| 737 | def _ResetHard(self, rev, quiet=True): | ||
| 738 | cmd = ['reset', '--hard'] | ||
| 739 | if quiet: | ||
| 740 | cmd.append('-q') | ||
| 741 | cmd.append(rev) | ||
| 742 | if GitCommand(self, cmd).Wait() != 0: | ||
| 743 | raise GitError('%s reset --hard %s ' % (self.name, rev)) | ||
| 744 | |||
| 745 | def _Rebase(self, upstream, onto = None): | ||
| 746 | cmd = ['rebase', '-i'] | ||
| 747 | if onto is not None: | ||
| 748 | cmd.extend(['--onto', onto]) | ||
| 749 | cmd.append(upstream) | ||
| 750 | if GitCommand(self, cmd, disable_editor=True).Wait() != 0: | ||
| 751 | raise GitError('%s rebase %s ' % (self.name, upstream)) | ||
| 752 | |||
| 753 | def _FastForward(self, head): | ||
| 754 | cmd = ['merge', head] | ||
| 755 | if GitCommand(self, cmd).Wait() != 0: | ||
| 756 | raise GitError('%s merge %s ' % (self.name, head)) | ||
| 757 | |||
| 758 | def _InitGitDir(self): | ||
| 759 | if not os.path.exists(self.gitdir): | ||
| 760 | os.makedirs(self.gitdir) | ||
| 761 | self.bare_git.init() | ||
| 762 | self.config.SetString('core.bare', None) | ||
| 763 | |||
| 764 | hooks = self._gitdir_path('hooks') | ||
| 765 | for old_hook in os.listdir(hooks): | ||
| 766 | os.remove(os.path.join(hooks, old_hook)) | ||
| 767 | |||
| 768 | # TODO(sop) install custom repo hooks | ||
| 769 | |||
| 770 | m = self.manifest.manifestProject.config | ||
| 771 | for key in ['user.name', 'user.email']: | ||
| 772 | if m.Has(key, include_defaults = False): | ||
| 773 | self.config.SetString(key, m.GetString(key)) | ||
| 774 | |||
| 775 | def _InitRemote(self): | ||
| 776 | if self.remote.fetchUrl: | ||
| 777 | remote = self.GetRemote(self.remote.name) | ||
| 778 | |||
| 779 | url = self.remote.fetchUrl | ||
| 780 | while url.endswith('/'): | ||
| 781 | url = url[:-1] | ||
| 782 | url += '/%s.git' % self.name | ||
| 783 | remote.url = url | ||
| 784 | remote.review = self.remote.reviewUrl | ||
| 785 | |||
| 786 | remote.ResetFetch() | ||
| 787 | remote.Save() | ||
| 788 | |||
| 789 | for r in self.extraRemotes.values(): | ||
| 790 | remote = self.GetRemote(r.name) | ||
| 791 | remote.url = r.fetchUrl | ||
| 792 | remote.review = r.reviewUrl | ||
| 793 | remote.ResetFetch() | ||
| 794 | remote.Save() | ||
| 795 | |||
| 796 | def _InitMRef(self): | ||
| 797 | if self.manifest.branch: | ||
| 798 | msg = 'manifest set to %s' % self.revision | ||
| 799 | ref = R_M + self.manifest.branch | ||
| 800 | |||
| 801 | if IsId(self.revision): | ||
| 802 | dst = self.revision + '^0', | ||
| 803 | self.bare_git.UpdateRef(ref, dst, message = msg, detach = True) | ||
| 804 | else: | ||
| 805 | remote = self.GetRemote(self.remote.name) | ||
| 806 | dst = remote.ToLocal(self.revision) | ||
| 807 | self.bare_git.symbolic_ref('-m', msg, ref, dst) | ||
| 808 | |||
| 809 | def _InitWorkTree(self): | ||
| 810 | dotgit = os.path.join(self.worktree, '.git') | ||
| 811 | if not os.path.exists(dotgit): | ||
| 812 | os.makedirs(dotgit) | ||
| 813 | |||
| 814 | topdir = os.path.commonprefix([self.gitdir, dotgit]) | ||
| 815 | if topdir.endswith('/'): | ||
| 816 | topdir = topdir[:-1] | ||
| 817 | else: | ||
| 818 | topdir = os.path.dirname(topdir) | ||
| 819 | |||
| 820 | tmpdir = dotgit | ||
| 821 | relgit = '' | ||
| 822 | while topdir != tmpdir: | ||
| 823 | relgit += '../' | ||
| 824 | tmpdir = os.path.dirname(tmpdir) | ||
| 825 | relgit += self.gitdir[len(topdir) + 1:] | ||
| 826 | |||
| 827 | for name in ['config', | ||
| 828 | 'description', | ||
| 829 | 'hooks', | ||
| 830 | 'info', | ||
| 831 | 'logs', | ||
| 832 | 'objects', | ||
| 833 | 'packed-refs', | ||
| 834 | 'refs', | ||
| 835 | 'rr-cache', | ||
| 836 | 'svn']: | ||
| 837 | os.symlink(os.path.join(relgit, name), | ||
| 838 | os.path.join(dotgit, name)) | ||
| 839 | |||
| 840 | rev = self.GetRemote(self.remote.name).ToLocal(self.revision) | ||
| 841 | rev = self.bare_git.rev_parse('%s^0' % rev) | ||
| 842 | |||
| 843 | f = open(os.path.join(dotgit, HEAD), 'wb') | ||
| 844 | f.write("%s\n" % rev) | ||
| 845 | f.close() | ||
| 846 | |||
| 847 | cmd = ['read-tree', '--reset', '-u'] | ||
| 848 | cmd.append('-v') | ||
| 849 | cmd.append('HEAD') | ||
| 850 | if GitCommand(self, cmd).Wait() != 0: | ||
| 851 | raise GitError("cannot initialize work tree") | ||
| 852 | |||
| 853 | def _gitdir_path(self, path): | ||
| 854 | return os.path.join(self.gitdir, path) | ||
| 855 | |||
| 856 | def _revlist(self, *args): | ||
| 857 | cmd = [] | ||
| 858 | cmd.extend(args) | ||
| 859 | cmd.append('--') | ||
| 860 | return self.work_git.rev_list(*args) | ||
| 861 | |||
| 862 | @property | ||
| 863 | def _allrefs(self): | ||
| 864 | return self.bare_git.ListRefs() | ||
| 865 | |||
| 866 | class _GitGetByExec(object): | ||
| 867 | def __init__(self, project, bare): | ||
| 868 | self._project = project | ||
| 869 | self._bare = bare | ||
| 870 | |||
| 871 | def ListRefs(self, *args): | ||
| 872 | cmdv = ['for-each-ref', '--format=%(objectname) %(refname)'] | ||
| 873 | cmdv.extend(args) | ||
| 874 | p = GitCommand(self._project, | ||
| 875 | cmdv, | ||
| 876 | bare = self._bare, | ||
| 877 | capture_stdout = True, | ||
| 878 | capture_stderr = True) | ||
| 879 | r = {} | ||
| 880 | for line in p.process.stdout: | ||
| 881 | id, name = line[:-1].split(' ', 2) | ||
| 882 | r[name] = id | ||
| 883 | if p.Wait() != 0: | ||
| 884 | raise GitError('%s for-each-ref %s: %s' % ( | ||
| 885 | self._project.name, | ||
| 886 | str(args), | ||
| 887 | p.stderr)) | ||
| 888 | return r | ||
| 889 | |||
| 890 | def LsOthers(self): | ||
| 891 | p = GitCommand(self._project, | ||
| 892 | ['ls-files', | ||
| 893 | '-z', | ||
| 894 | '--others', | ||
| 895 | '--exclude-standard'], | ||
| 896 | bare = False, | ||
| 897 | capture_stdout = True, | ||
| 898 | capture_stderr = True) | ||
| 899 | if p.Wait() == 0: | ||
| 900 | out = p.stdout | ||
| 901 | if out: | ||
| 902 | return out[:-1].split("\0") | ||
| 903 | return [] | ||
| 904 | |||
| 905 | def DiffZ(self, name, *args): | ||
| 906 | cmd = [name] | ||
| 907 | cmd.append('-z') | ||
| 908 | cmd.extend(args) | ||
| 909 | p = GitCommand(self._project, | ||
| 910 | cmd, | ||
| 911 | bare = False, | ||
| 912 | capture_stdout = True, | ||
| 913 | capture_stderr = True) | ||
| 914 | try: | ||
| 915 | out = p.process.stdout.read() | ||
| 916 | r = {} | ||
| 917 | if out: | ||
| 918 | out = iter(out[:-1].split('\0')) | ||
| 919 | while out: | ||
| 920 | info = out.next() | ||
| 921 | path = out.next() | ||
| 922 | |||
| 923 | class _Info(object): | ||
| 924 | def __init__(self, path, omode, nmode, oid, nid, state): | ||
| 925 | self.path = path | ||
| 926 | self.src_path = None | ||
| 927 | self.old_mode = omode | ||
| 928 | self.new_mode = nmode | ||
| 929 | self.old_id = oid | ||
| 930 | self.new_id = nid | ||
| 931 | |||
| 932 | if len(state) == 1: | ||
| 933 | self.status = state | ||
| 934 | self.level = None | ||
| 935 | else: | ||
| 936 | self.status = state[:1] | ||
| 937 | self.level = state[1:] | ||
| 938 | while self.level.startswith('0'): | ||
| 939 | self.level = self.level[1:] | ||
| 940 | |||
| 941 | info = info[1:].split(' ') | ||
| 942 | info =_Info(path, *info) | ||
| 943 | if info.status in ('R', 'C'): | ||
| 944 | info.src_path = info.path | ||
| 945 | info.path = out.next() | ||
| 946 | r[info.path] = info | ||
| 947 | return r | ||
| 948 | finally: | ||
| 949 | p.Wait() | ||
| 950 | |||
| 951 | def GetHead(self): | ||
| 952 | return self.symbolic_ref(HEAD) | ||
| 953 | |||
| 954 | def SetHead(self, ref, message=None): | ||
| 955 | cmdv = [] | ||
| 956 | if message is not None: | ||
| 957 | cmdv.extend(['-m', message]) | ||
| 958 | cmdv.append(HEAD) | ||
| 959 | cmdv.append(ref) | ||
| 960 | self.symbolic_ref(*cmdv) | ||
| 961 | |||
| 962 | def DetachHead(self, new, message=None): | ||
| 963 | cmdv = ['--no-deref'] | ||
| 964 | if message is not None: | ||
| 965 | cmdv.extend(['-m', message]) | ||
| 966 | cmdv.append(HEAD) | ||
| 967 | cmdv.append(new) | ||
| 968 | self.update_ref(*cmdv) | ||
| 969 | |||
| 970 | def UpdateRef(self, name, new, old=None, | ||
| 971 | message=None, | ||
| 972 | detach=False): | ||
| 973 | cmdv = [] | ||
| 974 | if message is not None: | ||
| 975 | cmdv.extend(['-m', message]) | ||
| 976 | if detach: | ||
| 977 | cmdv.append('--no-deref') | ||
| 978 | cmdv.append(name) | ||
| 979 | cmdv.append(new) | ||
| 980 | if old is not None: | ||
| 981 | cmdv.append(old) | ||
| 982 | self.update_ref(*cmdv) | ||
| 983 | |||
| 984 | def DeleteRef(self, name, old=None): | ||
| 985 | if not old: | ||
| 986 | old = self.rev_parse(name) | ||
| 987 | self.update_ref('-d', name, old) | ||
| 988 | |||
| 989 | def rev_list(self, *args): | ||
| 990 | cmdv = ['rev-list'] | ||
| 991 | cmdv.extend(args) | ||
| 992 | p = GitCommand(self._project, | ||
| 993 | cmdv, | ||
| 994 | bare = self._bare, | ||
| 995 | capture_stdout = True, | ||
| 996 | capture_stderr = True) | ||
| 997 | r = [] | ||
| 998 | for line in p.process.stdout: | ||
| 999 | r.append(line[:-1]) | ||
| 1000 | if p.Wait() != 0: | ||
| 1001 | raise GitError('%s rev-list %s: %s' % ( | ||
| 1002 | self._project.name, | ||
| 1003 | str(args), | ||
| 1004 | p.stderr)) | ||
| 1005 | return r | ||
| 1006 | |||
| 1007 | def __getattr__(self, name): | ||
| 1008 | name = name.replace('_', '-') | ||
| 1009 | def runner(*args): | ||
| 1010 | cmdv = [name] | ||
| 1011 | cmdv.extend(args) | ||
| 1012 | p = GitCommand(self._project, | ||
| 1013 | cmdv, | ||
| 1014 | bare = self._bare, | ||
| 1015 | capture_stdout = True, | ||
| 1016 | capture_stderr = True) | ||
| 1017 | if p.Wait() != 0: | ||
| 1018 | raise GitError('%s %s: %s' % ( | ||
| 1019 | self._project.name, | ||
| 1020 | name, | ||
| 1021 | p.stderr)) | ||
| 1022 | r = p.stdout | ||
| 1023 | if r.endswith('\n') and r.index('\n') == len(r) - 1: | ||
| 1024 | return r[:-1] | ||
| 1025 | return r | ||
| 1026 | return runner | ||
| 1027 | |||
| 1028 | |||
| 1029 | class MetaProject(Project): | ||
| 1030 | """A special project housed under .repo. | ||
| 1031 | """ | ||
| 1032 | def __init__(self, manifest, name, gitdir, worktree): | ||
| 1033 | repodir = manifest.repodir | ||
| 1034 | Project.__init__(self, | ||
| 1035 | manifest = manifest, | ||
| 1036 | name = name, | ||
| 1037 | gitdir = gitdir, | ||
| 1038 | worktree = worktree, | ||
| 1039 | remote = Remote('origin'), | ||
| 1040 | relpath = '.repo/%s' % name, | ||
| 1041 | revision = 'refs/heads/master') | ||
| 1042 | |||
| 1043 | def PreSync(self): | ||
| 1044 | if self.Exists: | ||
| 1045 | cb = self.CurrentBranch | ||
| 1046 | if cb: | ||
| 1047 | base = self.GetBranch(cb).merge | ||
| 1048 | if base: | ||
| 1049 | self.revision = base | ||
| 1050 | |||
| 1051 | @property | ||
| 1052 | def HasChanges(self): | ||
| 1053 | """Has the remote received new commits not yet checked out? | ||
| 1054 | """ | ||
| 1055 | rev = self.GetRemote(self.remote.name).ToLocal(self.revision) | ||
| 1056 | if self._revlist(not_rev(HEAD), rev): | ||
| 1057 | return True | ||
| 1058 | return False | ||
diff --git a/remote.py b/remote.py new file mode 100644 index 00000000..27a8f7a7 --- /dev/null +++ b/remote.py | |||
| @@ -0,0 +1,21 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | class Remote(object): | ||
| 17 | def __init__(self, name, fetch=None, review=None): | ||
| 18 | self.name = name | ||
| 19 | self.fetchUrl = fetch | ||
| 20 | self.reviewUrl = review | ||
| 21 | self.requiredCommits = [] | ||
| @@ -0,0 +1,587 @@ | |||
| 1 | #!/bin/sh | ||
| 2 | |||
| 3 | ## repo default configuration | ||
| 4 | ## | ||
| 5 | REPO_URL='git://android.kernel.org/tools/repo.git' | ||
| 6 | REPO_REV='stable' | ||
| 7 | |||
| 8 | # Copyright (C) 2008 Google Inc. | ||
| 9 | # | ||
| 10 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 11 | # you may not use this file except in compliance with the License. | ||
| 12 | # You may obtain a copy of the License at | ||
| 13 | # | ||
| 14 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 15 | # | ||
| 16 | # Unless required by applicable law or agreed to in writing, software | ||
| 17 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 18 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 19 | # See the License for the specific language governing permissions and | ||
| 20 | # limitations under the License. | ||
| 21 | |||
| 22 | magic='--calling-python-from-/bin/sh--' | ||
| 23 | """exec" python2.4 -E "$0" "$@" """#$magic" | ||
| 24 | if __name__ == '__main__': | ||
| 25 | import sys | ||
| 26 | if sys.argv[-1] == '#%s' % magic: | ||
| 27 | del sys.argv[-1] | ||
| 28 | del magic | ||
| 29 | |||
| 30 | # increment this whenever we make important changes to this script | ||
| 31 | VERSION = (1, 4) | ||
| 32 | |||
| 33 | # increment this if the MAINTAINER_KEYS block is modified | ||
| 34 | KEYRING_VERSION = (1,0) | ||
| 35 | MAINTAINER_KEYS = """ | ||
| 36 | |||
| 37 | Repo Maintainer <repo@android.kernel.org> | ||
| 38 | -----BEGIN PGP PUBLIC KEY BLOCK----- | ||
| 39 | Version: GnuPG v1.4.2.2 (GNU/Linux) | ||
| 40 | |||
| 41 | mQGiBEj3ugERBACrLJh/ZPyVSKeClMuznFIrsQ+hpNnmJGw1a9GXKYKk8qHPhAZf | ||
| 42 | WKtrBqAVMNRLhL85oSlekRz98u41H5si5zcuv+IXJDF5MJYcB8f22wAy15lUqPWi | ||
| 43 | VCkk1l8qqLiuW0fo+ZkPY5qOgrvc0HW1SmdH649uNwqCbcKb6CxaTxzhOwCgj3AP | ||
| 44 | xI1WfzLqdJjsm1Nq98L0cLcD/iNsILCuw44PRds3J75YP0pze7YF/6WFMB6QSFGu | ||
| 45 | aUX1FsTTztKNXGms8i5b2l1B8JaLRWq/jOnZzyl1zrUJhkc0JgyZW5oNLGyWGhKD | ||
| 46 | Fxp5YpHuIuMImopWEMFIRQNrvlg+YVK8t3FpdI1RY0LYqha8pPzANhEYgSfoVzOb | ||
| 47 | fbfbA/4ioOrxy8ifSoga7ITyZMA+XbW8bx33WXutO9N7SPKS/AK2JpasSEVLZcON | ||
| 48 | ae5hvAEGVXKxVPDjJBmIc2cOe7kOKSi3OxLzBqrjS2rnjiP4o0ekhZIe4+ocwVOg | ||
| 49 | e0PLlH5avCqihGRhpoqDRsmpzSHzJIxtoeb+GgGEX8KkUsVAhbQpUmVwbyBNYWlu | ||
| 50 | dGFpbmVyIDxyZXBvQGFuZHJvaWQua2VybmVsLm9yZz6IYAQTEQIAIAUCSPe6AQIb | ||
| 51 | AwYLCQgHAwIEFQIIAwQWAgMBAh4BAheAAAoJEBZTDV6SD1xl1GEAn0x/OKQpy7qI | ||
| 52 | 6G73NJviU0IUMtftAKCFMUhGb/0bZvQ8Rm3QCUpWHyEIu7kEDQRI97ogEBAA2wI6 | ||
| 53 | 5fs9y/rMwD6dkD/vK9v4C9mOn1IL5JCPYMJBVSci+9ED4ChzYvfq7wOcj9qIvaE0 | ||
| 54 | GwCt2ar7Q56me5J+byhSb32Rqsw/r3Vo5cZMH80N4cjesGuSXOGyEWTe4HYoxnHv | ||
| 55 | gF4EKI2LK7xfTUcxMtlyn52sUpkfKsCpUhFvdmbAiJE+jCkQZr1Z8u2KphV79Ou+ | ||
| 56 | P1N5IXY/XWOlq48Qf4MWCYlJFrB07xjUjLKMPDNDnm58L5byDrP/eHysKexpbakL | ||
| 57 | xCmYyfT6DV1SWLblpd2hie0sL3YejdtuBMYMS2rI7Yxb8kGuqkz+9l1qhwJtei94 | ||
| 58 | 5MaretDy/d/JH/pRYkRf7L+ke7dpzrP+aJmcz9P1e6gq4NJsWejaALVASBiioqNf | ||
| 59 | QmtqSVzF1wkR5avZkFHuYvj6V/t1RrOZTXxkSk18KFMJRBZrdHFCWbc5qrVxUB6e | ||
| 60 | N5pja0NFIUCigLBV1c6I2DwiuboMNh18VtJJh+nwWeez/RueN4ig59gRTtkcc0PR | ||
| 61 | 35tX2DR8+xCCFVW/NcJ4PSePYzCuuLvp1vEDHnj41R52Fz51hgddT4rBsp0nL+5I | ||
| 62 | socSOIIezw8T9vVzMY4ArCKFAVu2IVyBcahTfBS8q5EM63mONU6UVJEozfGljiMw | ||
| 63 | xuQ7JwKcw0AUEKTKG7aBgBaTAgT8TOevpvlw91cAAwUP/jRkyVi/0WAb0qlEaq/S | ||
| 64 | ouWxX1faR+vU3b+Y2/DGjtXQMzG0qpetaTHC/AxxHpgt/dCkWI6ljYDnxgPLwG0a | ||
| 65 | Oasm94BjZc6vZwf1opFZUKsjOAAxRxNZyjUJKe4UZVuMTk6zo27Nt3LMnc0FO47v | ||
| 66 | FcOjRyquvgNOS818irVHUf12waDx8gszKxQTTtFxU5/ePB2jZmhP6oXSe4K/LG5T | ||
| 67 | +WBRPDrHiGPhCzJRzm9BP0lTnGCAj3o9W90STZa65RK7IaYpC8TB35JTBEbrrNCp | ||
| 68 | w6lzd74LnNEp5eMlKDnXzUAgAH0yzCQeMl7t33QCdYx2hRs2wtTQSjGfAiNmj/WW | ||
| 69 | Vl5Jn+2jCDnRLenKHwVRFsBX2e0BiRWt/i9Y8fjorLCXVj4z+7yW6DawdLkJorEo | ||
| 70 | p3v5ILwfC7hVx4jHSnOgZ65L9s8EQdVr1ckN9243yta7rNgwfcqb60ILMFF1BRk/ | ||
| 71 | 0V7wCL+68UwwiQDvyMOQuqkysKLSDCLb7BFcyA7j6KG+5hpsREstFX2wK1yKeraz | ||
| 72 | 5xGrFy8tfAaeBMIQ17gvFSp/suc9DYO0ICK2BISzq+F+ZiAKsjMYOBNdH/h0zobQ | ||
| 73 | HTHs37+/QLMomGEGKZMWi0dShU2J5mNRQu3Hhxl3hHDVbt5CeJBb26aQcQrFz69W | ||
| 74 | zE3GNvmJosh6leayjtI9P2A6iEkEGBECAAkFAkj3uiACGwwACgkQFlMNXpIPXGWp | ||
| 75 | TACbBS+Up3RpfYVfd63c1cDdlru13pQAn3NQy/SN858MkxN+zym86UBgOad2 | ||
| 76 | =CMiZ | ||
| 77 | -----END PGP PUBLIC KEY BLOCK----- | ||
| 78 | """ | ||
| 79 | |||
| 80 | GIT = 'git' # our git command | ||
| 81 | MIN_GIT_VERSION = (1, 5, 4) # minimum supported git version | ||
| 82 | repodir = '.repo' # name of repo's private directory | ||
| 83 | S_repo = 'repo' # special repo reposiory | ||
| 84 | S_manifests = 'manifests' # special manifest repository | ||
| 85 | REPO_MAIN = S_repo + '/main.py' # main script | ||
| 86 | |||
| 87 | |||
| 88 | import optparse | ||
| 89 | import os | ||
| 90 | import re | ||
| 91 | import readline | ||
| 92 | import subprocess | ||
| 93 | import sys | ||
| 94 | |||
| 95 | home_dot_repo = os.path.expanduser('~/.repoconfig') | ||
| 96 | gpg_dir = os.path.join(home_dot_repo, 'gnupg') | ||
| 97 | |||
| 98 | extra_args = [] | ||
| 99 | init_optparse = optparse.OptionParser(usage="repo init -u url [options]") | ||
| 100 | |||
| 101 | # Logging | ||
| 102 | group = init_optparse.add_option_group('Logging options') | ||
| 103 | group.add_option('-q', '--quiet', | ||
| 104 | dest="quiet", action="store_true", default=False, | ||
| 105 | help="be quiet") | ||
| 106 | |||
| 107 | # Manifest | ||
| 108 | group = init_optparse.add_option_group('Manifest options') | ||
| 109 | group.add_option('-u', '--manifest-url', | ||
| 110 | dest='manifest_url', | ||
| 111 | help='manifest repository location', metavar='URL') | ||
| 112 | group.add_option('-b', '--manifest-branch', | ||
| 113 | dest='manifest_branch', | ||
| 114 | help='manifest branch or revision', metavar='REVISION') | ||
| 115 | group.add_option('-m', '--manifest-name', | ||
| 116 | dest='manifest_name', | ||
| 117 | help='initial manifest file', metavar='NAME.xml') | ||
| 118 | |||
| 119 | # Tool | ||
| 120 | group = init_optparse.add_option_group('Version options') | ||
| 121 | group.add_option('--repo-url', | ||
| 122 | dest='repo_url', | ||
| 123 | help='repo repository location', metavar='URL') | ||
| 124 | group.add_option('--repo-branch', | ||
| 125 | dest='repo_branch', | ||
| 126 | help='repo branch or revision', metavar='REVISION') | ||
| 127 | group.add_option('--no-repo-verify', | ||
| 128 | dest='no_repo_verify', action='store_true', | ||
| 129 | help='do not verify repo source code') | ||
| 130 | |||
| 131 | |||
| 132 | class CloneFailure(Exception): | ||
| 133 | """Indicate the remote clone of repo itself failed. | ||
| 134 | """ | ||
| 135 | |||
| 136 | |||
| 137 | def _Init(args): | ||
| 138 | """Installs repo by cloning it over the network. | ||
| 139 | """ | ||
| 140 | opt, args = init_optparse.parse_args(args) | ||
| 141 | if args or not opt.manifest_url: | ||
| 142 | init_optparse.print_usage() | ||
| 143 | sys.exit(1) | ||
| 144 | |||
| 145 | url = opt.repo_url | ||
| 146 | if not url: | ||
| 147 | url = REPO_URL | ||
| 148 | extra_args.append('--repo-url=%s' % url) | ||
| 149 | |||
| 150 | branch = opt.repo_branch | ||
| 151 | if not branch: | ||
| 152 | branch = REPO_REV | ||
| 153 | extra_args.append('--repo-branch=%s' % branch) | ||
| 154 | |||
| 155 | if branch.startswith('refs/heads/'): | ||
| 156 | branch = branch[len('refs/heads/'):] | ||
| 157 | if branch.startswith('refs/'): | ||
| 158 | print >>sys.stderr, "fatal: invalid branch name '%s'" % branch | ||
| 159 | raise CloneFailure() | ||
| 160 | |||
| 161 | if not os.path.isdir(repodir): | ||
| 162 | try: | ||
| 163 | os.mkdir(repodir) | ||
| 164 | except OSError, e: | ||
| 165 | print >>sys.stderr, \ | ||
| 166 | 'fatal: cannot make %s directory: %s' % ( | ||
| 167 | repodir, e.strerror) | ||
| 168 | # Don't faise CloneFailure; that would delete the | ||
| 169 | # name. Instead exit immediately. | ||
| 170 | # | ||
| 171 | sys.exit(1) | ||
| 172 | |||
| 173 | _CheckGitVersion() | ||
| 174 | try: | ||
| 175 | if _NeedSetupGnuPG(): | ||
| 176 | can_verify = _SetupGnuPG(opt.quiet) | ||
| 177 | else: | ||
| 178 | can_verify = True | ||
| 179 | |||
| 180 | if not opt.quiet: | ||
| 181 | print >>sys.stderr, 'Getting repo ...' | ||
| 182 | print >>sys.stderr, ' from %s' % url | ||
| 183 | |||
| 184 | dst = os.path.abspath(os.path.join(repodir, S_repo)) | ||
| 185 | _Clone(url, dst, opt.quiet) | ||
| 186 | |||
| 187 | if can_verify and not opt.no_repo_verify: | ||
| 188 | rev = _Verify(dst, branch, opt.quiet) | ||
| 189 | else: | ||
| 190 | rev = 'refs/remotes/origin/%s^0' % branch | ||
| 191 | |||
| 192 | _Checkout(dst, branch, rev, opt.quiet) | ||
| 193 | except CloneFailure: | ||
| 194 | if opt.quiet: | ||
| 195 | print >>sys.stderr, \ | ||
| 196 | 'fatal: repo init failed; run without --quiet to see why' | ||
| 197 | raise | ||
| 198 | |||
| 199 | |||
| 200 | def _CheckGitVersion(): | ||
| 201 | cmd = [GIT, '--version'] | ||
| 202 | proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) | ||
| 203 | ver_str = proc.stdout.read().strip() | ||
| 204 | proc.stdout.close() | ||
| 205 | |||
| 206 | if not ver_str.startswith('git version '): | ||
| 207 | print >>sys.stderr, 'error: "%s" unsupported' % ver_str | ||
| 208 | raise CloneFailure() | ||
| 209 | |||
| 210 | ver_str = ver_str[len('git version '):].strip() | ||
| 211 | ver_act = tuple(map(lambda x: int(x), ver_str.split('.')[0:3])) | ||
| 212 | if ver_act < MIN_GIT_VERSION: | ||
| 213 | need = '.'.join(map(lambda x: str(x), MIN_GIT_VERSION)) | ||
| 214 | print >>sys.stderr, 'fatal: git %s or later required' % need | ||
| 215 | raise CloneFailure() | ||
| 216 | |||
| 217 | |||
| 218 | def _NeedSetupGnuPG(): | ||
| 219 | if not os.path.isdir(home_dot_repo): | ||
| 220 | return True | ||
| 221 | |||
| 222 | kv = os.path.join(home_dot_repo, 'keyring-version') | ||
| 223 | if not os.path.exists(kv): | ||
| 224 | return True | ||
| 225 | |||
| 226 | kv = open(kv).read() | ||
| 227 | if not kv: | ||
| 228 | return True | ||
| 229 | |||
| 230 | kv = tuple(map(lambda x: int(x), kv.split('.'))) | ||
| 231 | if kv < KEYRING_VERSION: | ||
| 232 | return True | ||
| 233 | return False | ||
| 234 | |||
| 235 | |||
| 236 | def _SetupGnuPG(quiet): | ||
| 237 | if not os.path.isdir(home_dot_repo): | ||
| 238 | try: | ||
| 239 | os.mkdir(home_dot_repo) | ||
| 240 | except OSError, e: | ||
| 241 | print >>sys.stderr, \ | ||
| 242 | 'fatal: cannot make %s directory: %s' % ( | ||
| 243 | home_dot_repo, e.strerror) | ||
| 244 | sys.exit(1) | ||
| 245 | |||
| 246 | if not os.path.isdir(gpg_dir): | ||
| 247 | try: | ||
| 248 | os.mkdir(gpg_dir, 0700) | ||
| 249 | except OSError, e: | ||
| 250 | print >>sys.stderr, \ | ||
| 251 | 'fatal: cannot make %s directory: %s' % ( | ||
| 252 | gpg_dir, e.strerror) | ||
| 253 | sys.exit(1) | ||
| 254 | |||
| 255 | env = dict(os.environ) | ||
| 256 | env['GNUPGHOME'] = gpg_dir | ||
| 257 | |||
| 258 | cmd = ['gpg', '--import'] | ||
| 259 | try: | ||
| 260 | proc = subprocess.Popen(cmd, | ||
| 261 | env = env, | ||
| 262 | stdin = subprocess.PIPE) | ||
| 263 | except OSError, e: | ||
| 264 | if not quiet: | ||
| 265 | print >>sys.stderr, 'warning: gpg (GnuPG) is not available.' | ||
| 266 | print >>sys.stderr, 'warning: Installing it is strongly encouraged.' | ||
| 267 | print >>sys.stderr | ||
| 268 | return False | ||
| 269 | |||
| 270 | proc.stdin.write(MAINTAINER_KEYS) | ||
| 271 | proc.stdin.close() | ||
| 272 | |||
| 273 | if proc.wait() != 0: | ||
| 274 | print >>sys.stderr, 'fatal: registering repo maintainer keys failed' | ||
| 275 | sys.exit(1) | ||
| 276 | |||
| 277 | |||
| 278 | fd = open(os.path.join(home_dot_repo, 'keyring-version'), 'w') | ||
| 279 | fd.write('.'.join(map(lambda x: str(x), KEYRING_VERSION)) + '\n') | ||
| 280 | fd.close() | ||
| 281 | return True | ||
| 282 | |||
| 283 | |||
| 284 | def _SetConfig(local, name, value): | ||
| 285 | """Set a git configuration option to the specified value. | ||
| 286 | """ | ||
| 287 | cmd = [GIT, 'config', name, value] | ||
| 288 | if subprocess.Popen(cmd, cwd = local).wait() != 0: | ||
| 289 | raise CloneFailure() | ||
| 290 | |||
| 291 | |||
| 292 | def _Fetch(local, quiet, *args): | ||
| 293 | cmd = [GIT, 'fetch'] | ||
| 294 | if quiet: | ||
| 295 | cmd.append('--quiet') | ||
| 296 | err = subprocess.PIPE | ||
| 297 | else: | ||
| 298 | err = None | ||
| 299 | cmd.extend(args) | ||
| 300 | cmd.append('origin') | ||
| 301 | |||
| 302 | proc = subprocess.Popen(cmd, cwd = local, stderr = err) | ||
| 303 | if err: | ||
| 304 | proc.stderr.read() | ||
| 305 | proc.stderr.close() | ||
| 306 | if proc.wait() != 0: | ||
| 307 | raise CloneFailure() | ||
| 308 | |||
| 309 | |||
| 310 | def _Clone(url, local, quiet): | ||
| 311 | """Clones a git repository to a new subdirectory of repodir | ||
| 312 | """ | ||
| 313 | try: | ||
| 314 | os.mkdir(local) | ||
| 315 | except OSError, e: | ||
| 316 | print >>sys.stderr, \ | ||
| 317 | 'fatal: cannot make %s directory: %s' \ | ||
| 318 | % (local, e.strerror) | ||
| 319 | raise CloneFailure() | ||
| 320 | |||
| 321 | cmd = [GIT, 'init', '--quiet'] | ||
| 322 | try: | ||
| 323 | proc = subprocess.Popen(cmd, cwd = local) | ||
| 324 | except OSError, e: | ||
| 325 | print >>sys.stderr | ||
| 326 | print >>sys.stderr, "fatal: '%s' is not available" % GIT | ||
| 327 | print >>sys.stderr, 'fatal: %s' % e | ||
| 328 | print >>sys.stderr | ||
| 329 | print >>sys.stderr, 'Please make sure %s is installed'\ | ||
| 330 | ' and in your path.' % GIT | ||
| 331 | raise CloneFailure() | ||
| 332 | if proc.wait() != 0: | ||
| 333 | print >>sys.stderr, 'fatal: could not create %s' % local | ||
| 334 | raise CloneFailure() | ||
| 335 | |||
| 336 | _SetConfig(local, 'remote.origin.url', url) | ||
| 337 | _SetConfig(local, 'remote.origin.fetch', | ||
| 338 | '+refs/heads/*:refs/remotes/origin/*') | ||
| 339 | _Fetch(local, quiet) | ||
| 340 | _Fetch(local, quiet, '--tags') | ||
| 341 | |||
| 342 | |||
| 343 | def _Verify(cwd, branch, quiet): | ||
| 344 | """Verify the branch has been signed by a tag. | ||
| 345 | """ | ||
| 346 | cmd = [GIT, 'describe', 'origin/%s' % branch] | ||
| 347 | proc = subprocess.Popen(cmd, | ||
| 348 | stdout=subprocess.PIPE, | ||
| 349 | stderr=subprocess.PIPE, | ||
| 350 | cwd = cwd) | ||
| 351 | cur = proc.stdout.read().strip() | ||
| 352 | proc.stdout.close() | ||
| 353 | |||
| 354 | proc.stderr.read() | ||
| 355 | proc.stderr.close() | ||
| 356 | |||
| 357 | if proc.wait() != 0 or not cur: | ||
| 358 | print >>sys.stderr | ||
| 359 | print >>sys.stderr,\ | ||
| 360 | "fatal: branch '%s' has not been signed" \ | ||
| 361 | % branch | ||
| 362 | raise CloneFailure() | ||
| 363 | |||
| 364 | m = re.compile(r'^(.*)-[0-9]{1,}-g[0-9a-f]{1,}$').match(cur) | ||
| 365 | if m: | ||
| 366 | cur = m.group(1) | ||
| 367 | if not quiet: | ||
| 368 | print >>sys.stderr | ||
| 369 | print >>sys.stderr, \ | ||
| 370 | "info: Ignoring branch '%s'; using tagged release '%s'" \ | ||
| 371 | % (branch, cur) | ||
| 372 | print >>sys.stderr | ||
| 373 | |||
| 374 | env = dict(os.environ) | ||
| 375 | env['GNUPGHOME'] = gpg_dir | ||
| 376 | |||
| 377 | cmd = [GIT, 'tag', '-v', cur] | ||
| 378 | proc = subprocess.Popen(cmd, | ||
| 379 | stdout = subprocess.PIPE, | ||
| 380 | stderr = subprocess.PIPE, | ||
| 381 | cwd = cwd, | ||
| 382 | env = env) | ||
| 383 | out = proc.stdout.read() | ||
| 384 | proc.stdout.close() | ||
| 385 | |||
| 386 | err = proc.stderr.read() | ||
| 387 | proc.stderr.close() | ||
| 388 | |||
| 389 | if proc.wait() != 0: | ||
| 390 | print >>sys.stderr | ||
| 391 | print >>sys.stderr, out | ||
| 392 | print >>sys.stderr, err | ||
| 393 | print >>sys.stderr | ||
| 394 | raise CloneFailure() | ||
| 395 | return '%s^0' % cur | ||
| 396 | |||
| 397 | |||
| 398 | def _Checkout(cwd, branch, rev, quiet): | ||
| 399 | """Checkout an upstream branch into the repository and track it. | ||
| 400 | """ | ||
| 401 | cmd = [GIT, 'update-ref', 'refs/heads/default', rev] | ||
| 402 | if subprocess.Popen(cmd, cwd = cwd).wait() != 0: | ||
| 403 | raise CloneFailure() | ||
| 404 | |||
| 405 | _SetConfig(cwd, 'branch.default.remote', 'origin') | ||
| 406 | _SetConfig(cwd, 'branch.default.merge', 'refs/heads/%s' % branch) | ||
| 407 | |||
| 408 | cmd = [GIT, 'symbolic-ref', 'HEAD', 'refs/heads/default'] | ||
| 409 | if subprocess.Popen(cmd, cwd = cwd).wait() != 0: | ||
| 410 | raise CloneFailure() | ||
| 411 | |||
| 412 | cmd = [GIT, 'read-tree', '--reset', '-u'] | ||
| 413 | if not quiet: | ||
| 414 | cmd.append('-v') | ||
| 415 | cmd.append('HEAD') | ||
| 416 | if subprocess.Popen(cmd, cwd = cwd).wait() != 0: | ||
| 417 | raise CloneFailure() | ||
| 418 | |||
| 419 | |||
| 420 | def _FindRepo(): | ||
| 421 | """Look for a repo installation, starting at the current directory. | ||
| 422 | """ | ||
| 423 | dir = os.getcwd() | ||
| 424 | repo = None | ||
| 425 | |||
| 426 | while dir != '/' and not repo: | ||
| 427 | repo = os.path.join(dir, repodir, REPO_MAIN) | ||
| 428 | if not os.path.isfile(repo): | ||
| 429 | repo = None | ||
| 430 | dir = os.path.dirname(dir) | ||
| 431 | return (repo, os.path.join(dir, repodir)) | ||
| 432 | |||
| 433 | |||
| 434 | class _Options: | ||
| 435 | help = False | ||
| 436 | |||
| 437 | |||
| 438 | def _ParseArguments(args): | ||
| 439 | cmd = None | ||
| 440 | opt = _Options() | ||
| 441 | arg = [] | ||
| 442 | |||
| 443 | for i in xrange(0, len(args)): | ||
| 444 | a = args[i] | ||
| 445 | if a == '-h' or a == '--help': | ||
| 446 | opt.help = True | ||
| 447 | |||
| 448 | elif not a.startswith('-'): | ||
| 449 | cmd = a | ||
| 450 | arg = args[i + 1:] | ||
| 451 | break | ||
| 452 | return cmd, opt, arg | ||
| 453 | |||
| 454 | |||
| 455 | def _Usage(): | ||
| 456 | print >>sys.stderr,\ | ||
| 457 | """usage: repo COMMAND [ARGS] | ||
| 458 | |||
| 459 | repo is not yet installed. Use "repo init" to install it here. | ||
| 460 | |||
| 461 | The most commonly used repo commands are: | ||
| 462 | |||
| 463 | init Install repo in the current working directory | ||
| 464 | help Display detailed help on a command | ||
| 465 | |||
| 466 | For access to the full online help, install repo ("repo init"). | ||
| 467 | """ | ||
| 468 | sys.exit(1) | ||
| 469 | |||
| 470 | |||
| 471 | def _Help(args): | ||
| 472 | if args: | ||
| 473 | if args[0] == 'init': | ||
| 474 | init_optparse.print_help() | ||
| 475 | else: | ||
| 476 | print >>sys.stderr,\ | ||
| 477 | "error: '%s' is not a bootstrap command.\n"\ | ||
| 478 | ' For access to online help, install repo ("repo init").'\ | ||
| 479 | % args[0] | ||
| 480 | else: | ||
| 481 | _Usage() | ||
| 482 | sys.exit(1) | ||
| 483 | |||
| 484 | |||
| 485 | def _NotInstalled(): | ||
| 486 | print >>sys.stderr,\ | ||
| 487 | 'error: repo is not installed. Use "repo init" to install it here.' | ||
| 488 | sys.exit(1) | ||
| 489 | |||
| 490 | |||
| 491 | def _NoCommands(cmd): | ||
| 492 | print >>sys.stderr,\ | ||
| 493 | """error: command '%s' requires repo to be installed first. | ||
| 494 | Use "repo init" to install it here.""" % cmd | ||
| 495 | sys.exit(1) | ||
| 496 | |||
| 497 | |||
| 498 | def _RunSelf(wrapper_path): | ||
| 499 | my_dir = os.path.dirname(wrapper_path) | ||
| 500 | my_main = os.path.join(my_dir, 'main.py') | ||
| 501 | my_git = os.path.join(my_dir, '.git') | ||
| 502 | |||
| 503 | if os.path.isfile(my_main) and os.path.isdir(my_git): | ||
| 504 | for name in ['manifest.py', | ||
| 505 | 'project.py', | ||
| 506 | 'subcmds']: | ||
| 507 | if not os.path.exists(os.path.join(my_dir, name)): | ||
| 508 | return None, None | ||
| 509 | return my_main, my_git | ||
| 510 | return None, None | ||
| 511 | |||
| 512 | |||
| 513 | def _SetDefaultsTo(gitdir): | ||
| 514 | global REPO_URL | ||
| 515 | global REPO_REV | ||
| 516 | |||
| 517 | REPO_URL = gitdir | ||
| 518 | proc = subprocess.Popen([GIT, | ||
| 519 | '--git-dir=%s' % gitdir, | ||
| 520 | 'symbolic-ref', | ||
| 521 | 'HEAD'], | ||
| 522 | stdout = subprocess.PIPE, | ||
| 523 | stderr = subprocess.PIPE) | ||
| 524 | REPO_REV = proc.stdout.read().strip() | ||
| 525 | proc.stdout.close() | ||
| 526 | |||
| 527 | proc.stderr.read() | ||
| 528 | proc.stderr.close() | ||
| 529 | |||
| 530 | if proc.wait() != 0: | ||
| 531 | print >>sys.stderr, 'fatal: %s has no current branch' % gitdir | ||
| 532 | sys.exit(1) | ||
| 533 | |||
| 534 | |||
| 535 | def main(orig_args): | ||
| 536 | main, dir = _FindRepo() | ||
| 537 | cmd, opt, args = _ParseArguments(orig_args) | ||
| 538 | |||
| 539 | wrapper_path = os.path.abspath(__file__) | ||
| 540 | my_main, my_git = _RunSelf(wrapper_path) | ||
| 541 | |||
| 542 | if not main: | ||
| 543 | if opt.help: | ||
| 544 | _Usage() | ||
| 545 | if cmd == 'help': | ||
| 546 | _Help(args) | ||
| 547 | if not cmd: | ||
| 548 | _NotInstalled() | ||
| 549 | if cmd == 'init': | ||
| 550 | if my_git: | ||
| 551 | _SetDefaultsTo(my_git) | ||
| 552 | try: | ||
| 553 | _Init(args) | ||
| 554 | except CloneFailure: | ||
| 555 | for root, dirs, files in os.walk(repodir, topdown=False): | ||
| 556 | for name in files: | ||
| 557 | os.remove(os.path.join(root, name)) | ||
| 558 | for name in dirs: | ||
| 559 | os.rmdir(os.path.join(root, name)) | ||
| 560 | os.rmdir(repodir) | ||
| 561 | sys.exit(1) | ||
| 562 | main, dir = _FindRepo() | ||
| 563 | else: | ||
| 564 | _NoCommands(cmd) | ||
| 565 | |||
| 566 | if my_main: | ||
| 567 | main = my_main | ||
| 568 | |||
| 569 | ver_str = '.'.join(map(lambda x: str(x), VERSION)) | ||
| 570 | me = [main, | ||
| 571 | '--repo-dir=%s' % dir, | ||
| 572 | '--wrapper-version=%s' % ver_str, | ||
| 573 | '--wrapper-path=%s' % wrapper_path, | ||
| 574 | '--'] | ||
| 575 | me.extend(orig_args) | ||
| 576 | me.extend(extra_args) | ||
| 577 | try: | ||
| 578 | os.execv(main, me) | ||
| 579 | except OSError, e: | ||
| 580 | print >>sys.stderr, "fatal: unable to start %s" % main | ||
| 581 | print >>sys.stderr, "fatal: %s" % e | ||
| 582 | sys.exit(148) | ||
| 583 | |||
| 584 | |||
| 585 | if __name__ == '__main__': | ||
| 586 | main(sys.argv[1:]) | ||
| 587 | |||
diff --git a/subcmds/__init__.py b/subcmds/__init__.py new file mode 100644 index 00000000..a2286e78 --- /dev/null +++ b/subcmds/__init__.py | |||
| @@ -0,0 +1,49 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import os | ||
| 17 | |||
| 18 | all = {} | ||
| 19 | |||
| 20 | my_dir = os.path.dirname(__file__) | ||
| 21 | for py in os.listdir(my_dir): | ||
| 22 | if py == '__init__.py': | ||
| 23 | continue | ||
| 24 | |||
| 25 | if py.endswith('.py'): | ||
| 26 | name = py[:-3] | ||
| 27 | |||
| 28 | clsn = name.capitalize() | ||
| 29 | while clsn.find('_') > 0: | ||
| 30 | h = clsn.index('_') | ||
| 31 | clsn = clsn[0:h] + clsn[h + 1:].capitalize() | ||
| 32 | |||
| 33 | mod = __import__(__name__, | ||
| 34 | globals(), | ||
| 35 | locals(), | ||
| 36 | ['%s' % name]) | ||
| 37 | mod = getattr(mod, name) | ||
| 38 | try: | ||
| 39 | cmd = getattr(mod, clsn)() | ||
| 40 | except AttributeError: | ||
| 41 | raise SyntaxError, '%s/%s does not define class %s' % ( | ||
| 42 | __name__, py, clsn) | ||
| 43 | |||
| 44 | name = name.replace('_', '-') | ||
| 45 | cmd.NAME = name | ||
| 46 | all[name] = cmd | ||
| 47 | |||
| 48 | if 'help' in all: | ||
| 49 | all['help'].commands = all | ||
diff --git a/subcmds/compute_snapshot_check.py b/subcmds/compute_snapshot_check.py new file mode 100644 index 00000000..82db359a --- /dev/null +++ b/subcmds/compute_snapshot_check.py | |||
| @@ -0,0 +1,169 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import os | ||
| 17 | import sys | ||
| 18 | import tempfile | ||
| 19 | |||
| 20 | from command import Command | ||
| 21 | from error import GitError, NoSuchProjectError | ||
| 22 | from git_config import IsId | ||
| 23 | from import_tar import ImportTar | ||
| 24 | from import_zip import ImportZip | ||
| 25 | from project import Project | ||
| 26 | from remote import Remote | ||
| 27 | |||
| 28 | def _ToCommit(project, rev): | ||
| 29 | return project.bare_git.rev_parse('--verify', '%s^0' % rev) | ||
| 30 | |||
| 31 | def _Missing(project, rev): | ||
| 32 | return project._revlist('--objects', rev, '--not', '--all') | ||
| 33 | |||
| 34 | |||
| 35 | class ComputeSnapshotCheck(Command): | ||
| 36 | common = False | ||
| 37 | helpSummary = "Compute the check value for a new snapshot" | ||
| 38 | helpUsage = """ | ||
| 39 | %prog -p NAME -v VERSION -s FILE [options] | ||
| 40 | """ | ||
| 41 | helpDescription = """ | ||
| 42 | %prog computes and then displays the proper check value for a | ||
| 43 | snapshot, so it can be pasted into the manifest file for a project. | ||
| 44 | """ | ||
| 45 | |||
| 46 | def _Options(self, p): | ||
| 47 | g = p.add_option_group('Snapshot description options') | ||
| 48 | g.add_option('-p', '--project', | ||
| 49 | dest='project', metavar='NAME', | ||
| 50 | help='destination project name') | ||
| 51 | g.add_option('-v', '--version', | ||
| 52 | dest='version', metavar='VERSION', | ||
| 53 | help='upstream version/revision identifier') | ||
| 54 | g.add_option('-s', '--snapshot', | ||
| 55 | dest='snapshot', metavar='PATH', | ||
| 56 | help='local tarball path') | ||
| 57 | g.add_option('--new-project', | ||
| 58 | dest='new_project', action='store_true', | ||
| 59 | help='destinition is a new project') | ||
| 60 | g.add_option('--keep', | ||
| 61 | dest='keep_git', action='store_true', | ||
| 62 | help='keep the temporary git repository') | ||
| 63 | |||
| 64 | g = p.add_option_group('Base revision grafting options') | ||
| 65 | g.add_option('--prior', | ||
| 66 | dest='prior', metavar='COMMIT', | ||
| 67 | help='prior revision checksum') | ||
| 68 | |||
| 69 | g = p.add_option_group('Path mangling options') | ||
| 70 | g.add_option('--strip-prefix', | ||
| 71 | dest='strip_prefix', metavar='PREFIX', | ||
| 72 | help='remove prefix from all paths on import') | ||
| 73 | g.add_option('--insert-prefix', | ||
| 74 | dest='insert_prefix', metavar='PREFIX', | ||
| 75 | help='insert prefix before all paths on import') | ||
| 76 | |||
| 77 | |||
| 78 | def _Compute(self, opt): | ||
| 79 | try: | ||
| 80 | real_project = self.GetProjects([opt.project])[0] | ||
| 81 | except NoSuchProjectError: | ||
| 82 | if opt.new_project: | ||
| 83 | print >>sys.stderr, \ | ||
| 84 | "warning: project '%s' does not exist" % opt.project | ||
| 85 | else: | ||
| 86 | raise NoSuchProjectError(opt.project) | ||
| 87 | |||
| 88 | self._tmpdir = tempfile.mkdtemp() | ||
| 89 | project = Project(manifest = self.manifest, | ||
| 90 | name = opt.project, | ||
| 91 | remote = Remote('origin'), | ||
| 92 | gitdir = os.path.join(self._tmpdir, '.git'), | ||
| 93 | worktree = self._tmpdir, | ||
| 94 | relpath = opt.project, | ||
| 95 | revision = 'refs/heads/master') | ||
| 96 | project._InitGitDir() | ||
| 97 | |||
| 98 | url = 'file://%s' % os.path.abspath(opt.snapshot) | ||
| 99 | |||
| 100 | imp = None | ||
| 101 | for cls in [ImportTar, ImportZip]: | ||
| 102 | if cls.CanAccept(url): | ||
| 103 | imp = cls() | ||
| 104 | break | ||
| 105 | if not imp: | ||
| 106 | print >>sys.stderr, 'error: %s unsupported' % opt.snapshot | ||
| 107 | sys.exit(1) | ||
| 108 | |||
| 109 | imp.SetProject(project) | ||
| 110 | imp.SetVersion(opt.version) | ||
| 111 | imp.AddUrl(url) | ||
| 112 | |||
| 113 | if opt.prior: | ||
| 114 | if opt.new_project: | ||
| 115 | if not IsId(opt.prior): | ||
| 116 | print >>sys.stderr, 'error: --prior=%s not valid' % opt.prior | ||
| 117 | sys.exit(1) | ||
| 118 | else: | ||
| 119 | try: | ||
| 120 | opt.prior = _ToCommit(real_project, opt.prior) | ||
| 121 | missing = _Missing(real_project, opt.prior) | ||
| 122 | except GitError, e: | ||
| 123 | print >>sys.stderr,\ | ||
| 124 | 'error: --prior=%s not valid\n%s' \ | ||
| 125 | % (opt.prior, e) | ||
| 126 | sys.exit(1) | ||
| 127 | if missing: | ||
| 128 | print >>sys.stderr,\ | ||
| 129 | 'error: --prior=%s is valid, but is not reachable' \ | ||
| 130 | % opt.prior | ||
| 131 | sys.exit(1) | ||
| 132 | imp.SetParent(opt.prior) | ||
| 133 | |||
| 134 | src = opt.strip_prefix | ||
| 135 | dst = opt.insert_prefix | ||
| 136 | if src or dst: | ||
| 137 | if src is None: | ||
| 138 | src = '' | ||
| 139 | if dst is None: | ||
| 140 | dst = '' | ||
| 141 | imp.RemapPath(src, dst) | ||
| 142 | commitId = imp.Import() | ||
| 143 | |||
| 144 | print >>sys.stderr,"%s\t%s" % (commitId, imp.version) | ||
| 145 | return project | ||
| 146 | |||
| 147 | def Execute(self, opt, args): | ||
| 148 | if args \ | ||
| 149 | or not opt.project \ | ||
| 150 | or not opt.version \ | ||
| 151 | or not opt.snapshot: | ||
| 152 | self.Usage() | ||
| 153 | |||
| 154 | success = False | ||
| 155 | project = None | ||
| 156 | try: | ||
| 157 | self._tmpdir = None | ||
| 158 | project = self._Compute(opt) | ||
| 159 | finally: | ||
| 160 | if project and opt.keep_git: | ||
| 161 | print 'GIT_DIR = %s' % (project.gitdir) | ||
| 162 | elif self._tmpdir: | ||
| 163 | for root, dirs, files in os.walk(self._tmpdir, topdown=False): | ||
| 164 | for name in files: | ||
| 165 | os.remove(os.path.join(root, name)) | ||
| 166 | for name in dirs: | ||
| 167 | os.rmdir(os.path.join(root, name)) | ||
| 168 | os.rmdir(self._tmpdir) | ||
| 169 | |||
diff --git a/subcmds/diff.py b/subcmds/diff.py new file mode 100644 index 00000000..e0247140 --- /dev/null +++ b/subcmds/diff.py | |||
| @@ -0,0 +1,27 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | from command import PagedCommand | ||
| 17 | |||
| 18 | class Diff(PagedCommand): | ||
| 19 | common = True | ||
| 20 | helpSummary = "Show changes between commit and working tree" | ||
| 21 | helpUsage = """ | ||
| 22 | %prog [<project>...] | ||
| 23 | """ | ||
| 24 | |||
| 25 | def Execute(self, opt, args): | ||
| 26 | for project in self.GetProjects(args): | ||
| 27 | project.PrintWorkTreeDiff() | ||
diff --git a/subcmds/forall.py b/subcmds/forall.py new file mode 100644 index 00000000..b22e22a1 --- /dev/null +++ b/subcmds/forall.py | |||
| @@ -0,0 +1,82 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import re | ||
| 17 | import os | ||
| 18 | import sys | ||
| 19 | import subprocess | ||
| 20 | from command import Command | ||
| 21 | |||
| 22 | class Forall(Command): | ||
| 23 | common = False | ||
| 24 | helpSummary = "Run a shell command in each project" | ||
| 25 | helpUsage = """ | ||
| 26 | %prog [<project>...] -c <command> [<arg>...] | ||
| 27 | """ | ||
| 28 | helpDescription = """ | ||
| 29 | Executes the same shell command in each project. | ||
| 30 | |||
| 31 | Environment | ||
| 32 | ----------- | ||
| 33 | pwd is the project's working directory. | ||
| 34 | |||
| 35 | REPO_PROJECT is set to the unique name of the project. | ||
| 36 | |||
| 37 | shell positional arguments ($1, $2, .., $#) are set to any arguments | ||
| 38 | following <command>. | ||
| 39 | |||
| 40 | stdin, stdout, stderr are inherited from the terminal and are | ||
| 41 | not redirected. | ||
| 42 | """ | ||
| 43 | |||
| 44 | def _Options(self, p): | ||
| 45 | def cmd(option, opt_str, value, parser): | ||
| 46 | setattr(parser.values, option.dest, list(parser.rargs)) | ||
| 47 | while parser.rargs: | ||
| 48 | del parser.rargs[0] | ||
| 49 | p.add_option('-c', '--command', | ||
| 50 | help='Command (and arguments) to execute', | ||
| 51 | dest='command', | ||
| 52 | action='callback', | ||
| 53 | callback=cmd) | ||
| 54 | |||
| 55 | def Execute(self, opt, args): | ||
| 56 | if not opt.command: | ||
| 57 | self.Usage() | ||
| 58 | |||
| 59 | cmd = [opt.command[0]] | ||
| 60 | |||
| 61 | shell = True | ||
| 62 | if re.compile(r'^[a-z0-9A-Z_/\.-]+$').match(cmd[0]): | ||
| 63 | shell = False | ||
| 64 | |||
| 65 | if shell: | ||
| 66 | cmd.append(cmd[0]) | ||
| 67 | cmd.extend(opt.command[1:]) | ||
| 68 | |||
| 69 | rc = 0 | ||
| 70 | for project in self.GetProjects(args): | ||
| 71 | env = dict(os.environ.iteritems()) | ||
| 72 | env['REPO_PROJECT'] = project.name | ||
| 73 | |||
| 74 | p = subprocess.Popen(cmd, | ||
| 75 | cwd = project.worktree, | ||
| 76 | shell = shell, | ||
| 77 | env = env) | ||
| 78 | r = p.wait() | ||
| 79 | if r != 0 and r != rc: | ||
| 80 | rc = r | ||
| 81 | if rc != 0: | ||
| 82 | sys.exit(rc) | ||
diff --git a/subcmds/help.py b/subcmds/help.py new file mode 100644 index 00000000..6e0238a0 --- /dev/null +++ b/subcmds/help.py | |||
| @@ -0,0 +1,147 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import sys | ||
| 17 | from formatter import AbstractFormatter, DumbWriter | ||
| 18 | |||
| 19 | from color import Coloring | ||
| 20 | from command import PagedCommand | ||
| 21 | |||
| 22 | class Help(PagedCommand): | ||
| 23 | common = False | ||
| 24 | helpSummary = "Display detailed help on a command" | ||
| 25 | helpUsage = """ | ||
| 26 | %prog [--all|command] | ||
| 27 | """ | ||
| 28 | helpDescription = """ | ||
| 29 | Displays detailed usage information about a command. | ||
| 30 | """ | ||
| 31 | |||
| 32 | def _PrintAllCommands(self): | ||
| 33 | print 'usage: repo COMMAND [ARGS]' | ||
| 34 | print """ | ||
| 35 | The complete list of recognized repo commands are: | ||
| 36 | """ | ||
| 37 | commandNames = self.commands.keys() | ||
| 38 | commandNames.sort() | ||
| 39 | |||
| 40 | maxlen = 0 | ||
| 41 | for name in commandNames: | ||
| 42 | maxlen = max(maxlen, len(name)) | ||
| 43 | fmt = ' %%-%ds %%s' % maxlen | ||
| 44 | |||
| 45 | for name in commandNames: | ||
| 46 | command = self.commands[name] | ||
| 47 | try: | ||
| 48 | summary = command.helpSummary.strip() | ||
| 49 | except AttributeError: | ||
| 50 | summary = '' | ||
| 51 | print fmt % (name, summary) | ||
| 52 | print """ | ||
| 53 | See 'repo help <command>' for more information on a specific command. | ||
| 54 | """ | ||
| 55 | |||
| 56 | def _PrintCommonCommands(self): | ||
| 57 | print 'usage: repo COMMAND [ARGS]' | ||
| 58 | print """ | ||
| 59 | The most commonly used repo commands are: | ||
| 60 | """ | ||
| 61 | commandNames = [name | ||
| 62 | for name in self.commands.keys() | ||
| 63 | if self.commands[name].common] | ||
| 64 | commandNames.sort() | ||
| 65 | |||
| 66 | maxlen = 0 | ||
| 67 | for name in commandNames: | ||
| 68 | maxlen = max(maxlen, len(name)) | ||
| 69 | fmt = ' %%-%ds %%s' % maxlen | ||
| 70 | |||
| 71 | for name in commandNames: | ||
| 72 | command = self.commands[name] | ||
| 73 | try: | ||
| 74 | summary = command.helpSummary.strip() | ||
| 75 | except AttributeError: | ||
| 76 | summary = '' | ||
| 77 | print fmt % (name, summary) | ||
| 78 | print """ | ||
| 79 | See 'repo help <command>' for more information on a specific command. | ||
| 80 | """ | ||
| 81 | |||
| 82 | def _PrintCommandHelp(self, cmd): | ||
| 83 | class _Out(Coloring): | ||
| 84 | def __init__(self, gc): | ||
| 85 | Coloring.__init__(self, gc, 'help') | ||
| 86 | self.heading = self.printer('heading', attr='bold') | ||
| 87 | |||
| 88 | self.wrap = AbstractFormatter(DumbWriter()) | ||
| 89 | |||
| 90 | def _PrintSection(self, heading, bodyAttr): | ||
| 91 | try: | ||
| 92 | body = getattr(cmd, bodyAttr) | ||
| 93 | except AttributeError: | ||
| 94 | return | ||
| 95 | |||
| 96 | self.nl() | ||
| 97 | |||
| 98 | self.heading('%s', heading) | ||
| 99 | self.nl() | ||
| 100 | |||
| 101 | self.heading('%s', ''.ljust(len(heading), '-')) | ||
| 102 | self.nl() | ||
| 103 | |||
| 104 | me = 'repo %s' % cmd.NAME | ||
| 105 | body = body.strip() | ||
| 106 | body = body.replace('%prog', me) | ||
| 107 | |||
| 108 | for para in body.split("\n\n"): | ||
| 109 | if para.startswith(' '): | ||
| 110 | self.write('%s', para) | ||
| 111 | self.nl() | ||
| 112 | self.nl() | ||
| 113 | else: | ||
| 114 | self.wrap.add_flowing_data(para) | ||
| 115 | self.wrap.end_paragraph(1) | ||
| 116 | self.wrap.end_paragraph(0) | ||
| 117 | |||
| 118 | out = _Out(self.manifest.globalConfig) | ||
| 119 | cmd.OptionParser.print_help() | ||
| 120 | out._PrintSection('Summary', 'helpSummary') | ||
| 121 | out._PrintSection('Description', 'helpDescription') | ||
| 122 | |||
| 123 | def _Options(self, p): | ||
| 124 | p.add_option('-a', '--all', | ||
| 125 | dest='show_all', action='store_true', | ||
| 126 | help='show the complete list of commands') | ||
| 127 | |||
| 128 | def Execute(self, opt, args): | ||
| 129 | if len(args) == 0: | ||
| 130 | if opt.show_all: | ||
| 131 | self._PrintAllCommands() | ||
| 132 | else: | ||
| 133 | self._PrintCommonCommands() | ||
| 134 | |||
| 135 | elif len(args) == 1: | ||
| 136 | name = args[0] | ||
| 137 | |||
| 138 | try: | ||
| 139 | cmd = self.commands[name] | ||
| 140 | except KeyError: | ||
| 141 | print >>sys.stderr, "repo: '%s' is not a repo command." % name | ||
| 142 | sys.exit(1) | ||
| 143 | |||
| 144 | self._PrintCommandHelp(cmd) | ||
| 145 | |||
| 146 | else: | ||
| 147 | self._PrintCommandHelp(self) | ||
diff --git a/subcmds/init.py b/subcmds/init.py new file mode 100644 index 00000000..03f358d1 --- /dev/null +++ b/subcmds/init.py | |||
| @@ -0,0 +1,193 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import os | ||
| 17 | import sys | ||
| 18 | |||
| 19 | from color import Coloring | ||
| 20 | from command import InteractiveCommand | ||
| 21 | from error import ManifestParseError | ||
| 22 | from remote import Remote | ||
| 23 | from git_command import git, MIN_GIT_VERSION | ||
| 24 | |||
| 25 | class Init(InteractiveCommand): | ||
| 26 | common = True | ||
| 27 | helpSummary = "Initialize repo in the current directory" | ||
| 28 | helpUsage = """ | ||
| 29 | %prog [options] | ||
| 30 | """ | ||
| 31 | helpDescription = """ | ||
| 32 | The '%prog' command is run once to install and initialize repo. | ||
| 33 | The latest repo source code and manifest collection is downloaded | ||
| 34 | from the server and is installed in the .repo/ directory in the | ||
| 35 | current working directory. | ||
| 36 | |||
| 37 | The optional <manifest> argument can be used to specify an alternate | ||
| 38 | manifest to be used. If no manifest is specified, the manifest | ||
| 39 | default.xml will be used. | ||
| 40 | """ | ||
| 41 | |||
| 42 | def _Options(self, p): | ||
| 43 | # Logging | ||
| 44 | g = p.add_option_group('Logging options') | ||
| 45 | g.add_option('-q', '--quiet', | ||
| 46 | dest="quiet", action="store_true", default=False, | ||
| 47 | help="be quiet") | ||
| 48 | |||
| 49 | # Manifest | ||
| 50 | g = p.add_option_group('Manifest options') | ||
| 51 | g.add_option('-u', '--manifest-url', | ||
| 52 | dest='manifest_url', | ||
| 53 | help='manifest repository location', metavar='URL') | ||
| 54 | g.add_option('-b', '--manifest-branch', | ||
| 55 | dest='manifest_branch', | ||
| 56 | help='manifest branch or revision', metavar='REVISION') | ||
| 57 | g.add_option('-m', '--manifest-name', | ||
| 58 | dest='manifest_name', default='default.xml', | ||
| 59 | help='initial manifest file', metavar='NAME.xml') | ||
| 60 | |||
| 61 | # Tool | ||
| 62 | g = p.add_option_group('Version options') | ||
| 63 | g.add_option('--repo-url', | ||
| 64 | dest='repo_url', | ||
| 65 | help='repo repository location', metavar='URL') | ||
| 66 | g.add_option('--repo-branch', | ||
| 67 | dest='repo_branch', | ||
| 68 | help='repo branch or revision', metavar='REVISION') | ||
| 69 | g.add_option('--no-repo-verify', | ||
| 70 | dest='no_repo_verify', action='store_true', | ||
| 71 | help='do not verify repo source code') | ||
| 72 | |||
| 73 | def _CheckGitVersion(self): | ||
| 74 | ver_str = git.version() | ||
| 75 | if not ver_str.startswith('git version '): | ||
| 76 | print >>sys.stderr, 'error: "%s" unsupported' % ver_str | ||
| 77 | sys.exit(1) | ||
| 78 | |||
| 79 | ver_str = ver_str[len('git version '):].strip() | ||
| 80 | ver_act = tuple(map(lambda x: int(x), ver_str.split('.')[0:3])) | ||
| 81 | if ver_act < MIN_GIT_VERSION: | ||
| 82 | need = '.'.join(map(lambda x: str(x), MIN_GIT_VERSION)) | ||
| 83 | print >>sys.stderr, 'fatal: git %s or later required' % need | ||
| 84 | sys.exit(1) | ||
| 85 | |||
| 86 | def _SyncManifest(self, opt): | ||
| 87 | m = self.manifest.manifestProject | ||
| 88 | |||
| 89 | if not m.Exists: | ||
| 90 | if not opt.manifest_url: | ||
| 91 | print >>sys.stderr, 'fatal: manifest url (-u) is required.' | ||
| 92 | sys.exit(1) | ||
| 93 | |||
| 94 | if not opt.quiet: | ||
| 95 | print >>sys.stderr, 'Getting manifest ...' | ||
| 96 | print >>sys.stderr, ' from %s' % opt.manifest_url | ||
| 97 | m._InitGitDir() | ||
| 98 | |||
| 99 | if opt.manifest_branch: | ||
| 100 | m.revision = opt.manifest_branch | ||
| 101 | else: | ||
| 102 | m.revision = 'refs/heads/master' | ||
| 103 | else: | ||
| 104 | if opt.manifest_branch: | ||
| 105 | m.revision = opt.manifest_branch | ||
| 106 | else: | ||
| 107 | m.PreSync() | ||
| 108 | |||
| 109 | if opt.manifest_url: | ||
| 110 | r = m.GetRemote(m.remote.name) | ||
| 111 | r.url = opt.manifest_url | ||
| 112 | r.ResetFetch() | ||
| 113 | r.Save() | ||
| 114 | |||
| 115 | m.Sync_NetworkHalf() | ||
| 116 | m.Sync_LocalHalf() | ||
| 117 | m.StartBranch('default') | ||
| 118 | |||
| 119 | def _LinkManifest(self, name): | ||
| 120 | if not name: | ||
| 121 | print >>sys.stderr, 'fatal: manifest name (-m) is required.' | ||
| 122 | sys.exit(1) | ||
| 123 | |||
| 124 | try: | ||
| 125 | self.manifest.Link(name) | ||
| 126 | except ManifestParseError, e: | ||
| 127 | print >>sys.stderr, "fatal: manifest '%s' not available" % name | ||
| 128 | print >>sys.stderr, 'fatal: %s' % str(e) | ||
| 129 | sys.exit(1) | ||
| 130 | |||
| 131 | def _PromptKey(self, prompt, key, value): | ||
| 132 | mp = self.manifest.manifestProject | ||
| 133 | |||
| 134 | sys.stdout.write('%-10s [%s]: ' % (prompt, value)) | ||
| 135 | a = sys.stdin.readline().strip() | ||
| 136 | if a != '' and a != value: | ||
| 137 | mp.config.SetString(key, a) | ||
| 138 | |||
| 139 | def _ConfigureUser(self): | ||
| 140 | mp = self.manifest.manifestProject | ||
| 141 | |||
| 142 | print '' | ||
| 143 | self._PromptKey('Your Name', 'user.name', mp.UserName) | ||
| 144 | self._PromptKey('Your Email', 'user.email', mp.UserEmail) | ||
| 145 | |||
| 146 | def _HasColorSet(self, gc): | ||
| 147 | for n in ['ui', 'diff', 'status']: | ||
| 148 | if gc.Has('color.%s' % n): | ||
| 149 | return True | ||
| 150 | return False | ||
| 151 | |||
| 152 | def _ConfigureColor(self): | ||
| 153 | gc = self.manifest.globalConfig | ||
| 154 | if self._HasColorSet(gc): | ||
| 155 | return | ||
| 156 | |||
| 157 | class _Test(Coloring): | ||
| 158 | def __init__(self): | ||
| 159 | Coloring.__init__(self, gc, 'test color display') | ||
| 160 | self._on = True | ||
| 161 | out = _Test() | ||
| 162 | |||
| 163 | print '' | ||
| 164 | print "Testing colorized output (for 'repo diff', 'repo status'):" | ||
| 165 | |||
| 166 | for c in ['black','red','green','yellow','blue','magenta','cyan']: | ||
| 167 | out.write(' ') | ||
| 168 | out.printer(fg=c)(' %-6s ', c) | ||
| 169 | out.write(' ') | ||
| 170 | out.printer(fg='white', bg='black')(' %s ' % 'white') | ||
| 171 | out.nl() | ||
| 172 | |||
| 173 | for c in ['bold','dim','ul','reverse']: | ||
| 174 | out.write(' ') | ||
| 175 | out.printer(fg='black', attr=c)(' %-6s ', c) | ||
| 176 | out.nl() | ||
| 177 | |||
| 178 | sys.stdout.write('Enable color display in this user account (y/n)? ') | ||
| 179 | a = sys.stdin.readline().strip().lower() | ||
| 180 | if a in ('y', 'yes', 't', 'true', 'on'): | ||
| 181 | gc.SetString('color.ui', 'auto') | ||
| 182 | |||
| 183 | def Execute(self, opt, args): | ||
| 184 | self._CheckGitVersion() | ||
| 185 | self._SyncManifest(opt) | ||
| 186 | self._LinkManifest(opt.manifest_name) | ||
| 187 | |||
| 188 | if os.isatty(0) and os.isatty(1): | ||
| 189 | self._ConfigureUser() | ||
| 190 | self._ConfigureColor() | ||
| 191 | |||
| 192 | print '' | ||
| 193 | print 'repo initialized in %s' % self.manifest.topdir | ||
diff --git a/subcmds/prune.py b/subcmds/prune.py new file mode 100644 index 00000000..f412bd48 --- /dev/null +++ b/subcmds/prune.py | |||
| @@ -0,0 +1,59 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | from color import Coloring | ||
| 17 | from command import PagedCommand | ||
| 18 | |||
| 19 | class Prune(PagedCommand): | ||
| 20 | common = True | ||
| 21 | helpSummary = "Prune (delete) already merged topics" | ||
| 22 | helpUsage = """ | ||
| 23 | %prog [<project>...] | ||
| 24 | """ | ||
| 25 | |||
| 26 | def Execute(self, opt, args): | ||
| 27 | all = [] | ||
| 28 | for project in self.GetProjects(args): | ||
| 29 | all.extend(project.PruneHeads()) | ||
| 30 | |||
| 31 | if not all: | ||
| 32 | return | ||
| 33 | |||
| 34 | class Report(Coloring): | ||
| 35 | def __init__(self, config): | ||
| 36 | Coloring.__init__(self, config, 'status') | ||
| 37 | self.project = self.printer('header', attr='bold') | ||
| 38 | |||
| 39 | out = Report(all[0].project.config) | ||
| 40 | out.project('Pending Branches') | ||
| 41 | out.nl() | ||
| 42 | |||
| 43 | project = None | ||
| 44 | |||
| 45 | for branch in all: | ||
| 46 | if project != branch.project: | ||
| 47 | project = branch.project | ||
| 48 | out.nl() | ||
| 49 | out.project('project %s/' % project.relpath) | ||
| 50 | out.nl() | ||
| 51 | |||
| 52 | commits = branch.commits | ||
| 53 | date = branch.date | ||
| 54 | print '%s %-33s (%2d commit%s, %s)' % ( | ||
| 55 | branch.name == project.CurrentBranch and '*' or ' ', | ||
| 56 | branch.name, | ||
| 57 | len(commits), | ||
| 58 | len(commits) != 1 and 's' or ' ', | ||
| 59 | date) | ||
diff --git a/subcmds/stage.py b/subcmds/stage.py new file mode 100644 index 00000000..c451cd6d --- /dev/null +++ b/subcmds/stage.py | |||
| @@ -0,0 +1,108 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import sys | ||
| 17 | |||
| 18 | from color import Coloring | ||
| 19 | from command import InteractiveCommand | ||
| 20 | from git_command import GitCommand | ||
| 21 | |||
| 22 | class _ProjectList(Coloring): | ||
| 23 | def __init__(self, gc): | ||
| 24 | Coloring.__init__(self, gc, 'interactive') | ||
| 25 | self.prompt = self.printer('prompt', fg='blue', attr='bold') | ||
| 26 | self.header = self.printer('header', attr='bold') | ||
| 27 | self.help = self.printer('help', fg='red', attr='bold') | ||
| 28 | |||
| 29 | class Stage(InteractiveCommand): | ||
| 30 | common = True | ||
| 31 | helpSummary = "Stage file(s) for commit" | ||
| 32 | helpUsage = """ | ||
| 33 | %prog -i [<project>...] | ||
| 34 | """ | ||
| 35 | helpDescription = """ | ||
| 36 | The '%prog' command stages files to prepare the next commit. | ||
| 37 | """ | ||
| 38 | |||
| 39 | def _Options(self, p): | ||
| 40 | p.add_option('-i', '--interactive', | ||
| 41 | dest='interactive', action='store_true', | ||
| 42 | help='use interactive staging') | ||
| 43 | |||
| 44 | def Execute(self, opt, args): | ||
| 45 | if opt.interactive: | ||
| 46 | self._Interactive(opt, args) | ||
| 47 | else: | ||
| 48 | self.Usage() | ||
| 49 | |||
| 50 | def _Interactive(self, opt, args): | ||
| 51 | all = filter(lambda x: x.IsDirty(), self.GetProjects(args)) | ||
| 52 | if not all: | ||
| 53 | print >>sys.stderr,'no projects have uncommitted modifications' | ||
| 54 | return | ||
| 55 | |||
| 56 | out = _ProjectList(self.manifest.manifestProject.config) | ||
| 57 | while True: | ||
| 58 | out.header(' %-20s %s', 'project', 'path') | ||
| 59 | out.nl() | ||
| 60 | |||
| 61 | for i in xrange(0, len(all)): | ||
| 62 | p = all[i] | ||
| 63 | out.write('%3d: %-20s %s', i + 1, p.name, p.relpath + '/') | ||
| 64 | out.nl() | ||
| 65 | out.nl() | ||
| 66 | |||
| 67 | out.write('%3d: (', 0) | ||
| 68 | out.prompt('q') | ||
| 69 | out.write('uit)') | ||
| 70 | out.nl() | ||
| 71 | |||
| 72 | out.prompt('project> ') | ||
| 73 | try: | ||
| 74 | a = sys.stdin.readline() | ||
| 75 | except KeyboardInterrupt: | ||
| 76 | out.nl() | ||
| 77 | break | ||
| 78 | if a == '': | ||
| 79 | out.nl() | ||
| 80 | break | ||
| 81 | |||
| 82 | a = a.strip() | ||
| 83 | if a.lower() in ('q', 'quit', 'exit'): | ||
| 84 | break | ||
| 85 | if not a: | ||
| 86 | continue | ||
| 87 | |||
| 88 | try: | ||
| 89 | a_index = int(a) | ||
| 90 | except ValueError: | ||
| 91 | a_index = None | ||
| 92 | |||
| 93 | if a_index is not None: | ||
| 94 | if a_index == 0: | ||
| 95 | break | ||
| 96 | if 0 < a_index and a_index <= len(all): | ||
| 97 | _AddI(all[a_index - 1]) | ||
| 98 | continue | ||
| 99 | |||
| 100 | p = filter(lambda x: x.name == a or x.relpath == a, all) | ||
| 101 | if len(p) == 1: | ||
| 102 | _AddI(p[0]) | ||
| 103 | continue | ||
| 104 | print 'Bye.' | ||
| 105 | |||
| 106 | def _AddI(project): | ||
| 107 | p = GitCommand(project, ['add', '--interactive'], bare=False) | ||
| 108 | p.Wait() | ||
diff --git a/subcmds/start.py b/subcmds/start.py new file mode 100644 index 00000000..4eb3e476 --- /dev/null +++ b/subcmds/start.py | |||
| @@ -0,0 +1,51 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import sys | ||
| 17 | from command import Command | ||
| 18 | from git_command import git | ||
| 19 | |||
| 20 | class Start(Command): | ||
| 21 | common = True | ||
| 22 | helpSummary = "Start a new branch for development" | ||
| 23 | helpUsage = """ | ||
| 24 | %prog <newbranchname> [<project>...] | ||
| 25 | |||
| 26 | This subcommand starts a new branch of development that is automatically | ||
| 27 | pulled from a remote branch. | ||
| 28 | |||
| 29 | It is equivalent to the following git commands: | ||
| 30 | |||
| 31 | "git branch --track <newbranchname> m/<codeline>", | ||
| 32 | or | ||
| 33 | "git checkout --track -b <newbranchname> m/<codeline>". | ||
| 34 | |||
| 35 | All three forms set up the config entries that repo bases some of its | ||
| 36 | processing on. Use %prog or git branch or checkout with --track to ensure | ||
| 37 | the configuration data is set up properly. | ||
| 38 | |||
| 39 | """ | ||
| 40 | |||
| 41 | def Execute(self, opt, args): | ||
| 42 | if not args: | ||
| 43 | self.Usage() | ||
| 44 | |||
| 45 | nb = args[0] | ||
| 46 | if not git.check_ref_format('heads/%s' % nb): | ||
| 47 | print >>sys.stderr, "error: '%s' is not a valid name" % nb | ||
| 48 | sys.exit(1) | ||
| 49 | |||
| 50 | for project in self.GetProjects(args[1:]): | ||
| 51 | project.StartBranch(nb) | ||
diff --git a/subcmds/status.py b/subcmds/status.py new file mode 100644 index 00000000..1615b423 --- /dev/null +++ b/subcmds/status.py | |||
| @@ -0,0 +1,27 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | from command import PagedCommand | ||
| 17 | |||
| 18 | class Status(PagedCommand): | ||
| 19 | common = True | ||
| 20 | helpSummary = "Show the working tree status" | ||
| 21 | helpUsage = """ | ||
| 22 | %prog [<project>...] | ||
| 23 | """ | ||
| 24 | |||
| 25 | def Execute(self, opt, args): | ||
| 26 | for project in self.GetProjects(args): | ||
| 27 | project.PrintWorkTreeStatus() | ||
diff --git a/subcmds/sync.py b/subcmds/sync.py new file mode 100644 index 00000000..3eb44edf --- /dev/null +++ b/subcmds/sync.py | |||
| @@ -0,0 +1,150 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import os | ||
| 17 | import re | ||
| 18 | import subprocess | ||
| 19 | import sys | ||
| 20 | |||
| 21 | from git_command import GIT | ||
| 22 | from command import Command | ||
| 23 | from error import RepoChangedException, GitError | ||
| 24 | from project import R_HEADS | ||
| 25 | |||
| 26 | class Sync(Command): | ||
| 27 | common = True | ||
| 28 | helpSummary = "Update working tree to the latest revision" | ||
| 29 | helpUsage = """ | ||
| 30 | %prog [<project>...] | ||
| 31 | """ | ||
| 32 | helpDescription = """ | ||
| 33 | The '%prog' command synchronizes local project directories | ||
| 34 | with the remote repositories specified in the manifest. If a local | ||
| 35 | project does not yet exist, it will clone a new local directory from | ||
| 36 | the remote repository and set up tracking branches as specified in | ||
| 37 | the manifest. If the local project already exists, '%prog' | ||
| 38 | will update the remote branches and rebase any new local changes | ||
| 39 | on top of the new remote changes. | ||
| 40 | |||
| 41 | '%prog' will synchronize all projects listed at the command | ||
| 42 | line. Projects can be specified either by name, or by a relative | ||
| 43 | or absolute path to the project's local directory. If no projects | ||
| 44 | are specified, '%prog' will synchronize all projects listed in | ||
| 45 | the manifest. | ||
| 46 | """ | ||
| 47 | |||
| 48 | def _Options(self, p): | ||
| 49 | p.add_option('--no-repo-verify', | ||
| 50 | dest='no_repo_verify', action='store_true', | ||
| 51 | help='do not verify repo source code') | ||
| 52 | |||
| 53 | def _Fetch(self, *projects): | ||
| 54 | fetched = set() | ||
| 55 | for project in projects: | ||
| 56 | if project.Sync_NetworkHalf(): | ||
| 57 | fetched.add(project.gitdir) | ||
| 58 | else: | ||
| 59 | print >>sys.stderr, 'error: Cannot fetch %s' % project.name | ||
| 60 | sys.exit(1) | ||
| 61 | return fetched | ||
| 62 | |||
| 63 | def Execute(self, opt, args): | ||
| 64 | rp = self.manifest.repoProject | ||
| 65 | rp.PreSync() | ||
| 66 | |||
| 67 | mp = self.manifest.manifestProject | ||
| 68 | mp.PreSync() | ||
| 69 | |||
| 70 | all = self.GetProjects(args, missing_ok=True) | ||
| 71 | fetched = self._Fetch(rp, mp, *all) | ||
| 72 | |||
| 73 | if rp.HasChanges: | ||
| 74 | print >>sys.stderr, 'info: A new version of repo is available' | ||
| 75 | print >>sys.stderr, '' | ||
| 76 | if opt.no_repo_verify or _VerifyTag(rp): | ||
| 77 | if not rp.Sync_LocalHalf(): | ||
| 78 | sys.exit(1) | ||
| 79 | print >>sys.stderr, 'info: Restarting repo with latest version' | ||
| 80 | raise RepoChangedException() | ||
| 81 | else: | ||
| 82 | print >>sys.stderr, 'warning: Skipped upgrade to unverified version' | ||
| 83 | |||
| 84 | if mp.HasChanges: | ||
| 85 | if not mp.Sync_LocalHalf(): | ||
| 86 | sys.exit(1) | ||
| 87 | |||
| 88 | self.manifest._Unload() | ||
| 89 | all = self.GetProjects(args, missing_ok=True) | ||
| 90 | missing = [] | ||
| 91 | for project in all: | ||
| 92 | if project.gitdir not in fetched: | ||
| 93 | missing.append(project) | ||
| 94 | self._Fetch(*missing) | ||
| 95 | |||
| 96 | for project in all: | ||
| 97 | if not project.Sync_LocalHalf(): | ||
| 98 | sys.exit(1) | ||
| 99 | |||
| 100 | |||
| 101 | def _VerifyTag(project): | ||
| 102 | gpg_dir = os.path.expanduser('~/.repoconfig/gnupg') | ||
| 103 | if not os.path.exists(gpg_dir): | ||
| 104 | print >>sys.stderr,\ | ||
| 105 | """warning: GnuPG was not available during last "repo init" | ||
| 106 | warning: Cannot automatically authenticate repo.""" | ||
| 107 | return True | ||
| 108 | |||
| 109 | remote = project.GetRemote(project.remote.name) | ||
| 110 | ref = remote.ToLocal(project.revision) | ||
| 111 | |||
| 112 | try: | ||
| 113 | cur = project.bare_git.describe(ref) | ||
| 114 | except GitError: | ||
| 115 | cur = None | ||
| 116 | |||
| 117 | if not cur \ | ||
| 118 | or re.compile(r'^.*-[0-9]{1,}-g[0-9a-f]{1,}$').match(cur): | ||
| 119 | rev = project.revision | ||
| 120 | if rev.startswith(R_HEADS): | ||
| 121 | rev = rev[len(R_HEADS):] | ||
| 122 | |||
| 123 | print >>sys.stderr | ||
| 124 | print >>sys.stderr,\ | ||
| 125 | "warning: project '%s' branch '%s' is not signed" \ | ||
| 126 | % (project.name, rev) | ||
| 127 | return False | ||
| 128 | |||
| 129 | env = dict(os.environ) | ||
| 130 | env['GIT_DIR'] = project.gitdir | ||
| 131 | env['GNUPGHOME'] = gpg_dir | ||
| 132 | |||
| 133 | cmd = [GIT, 'tag', '-v', cur] | ||
| 134 | proc = subprocess.Popen(cmd, | ||
| 135 | stdout = subprocess.PIPE, | ||
| 136 | stderr = subprocess.PIPE, | ||
| 137 | env = env) | ||
| 138 | out = proc.stdout.read() | ||
| 139 | proc.stdout.close() | ||
| 140 | |||
| 141 | err = proc.stderr.read() | ||
| 142 | proc.stderr.close() | ||
| 143 | |||
| 144 | if proc.wait() != 0: | ||
| 145 | print >>sys.stderr | ||
| 146 | print >>sys.stderr, out | ||
| 147 | print >>sys.stderr, err | ||
| 148 | print >>sys.stderr | ||
| 149 | return False | ||
| 150 | return True | ||
diff --git a/subcmds/upload.py b/subcmds/upload.py new file mode 100644 index 00000000..ad05050e --- /dev/null +++ b/subcmds/upload.py | |||
| @@ -0,0 +1,180 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2008 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import re | ||
| 17 | import sys | ||
| 18 | |||
| 19 | from command import InteractiveCommand | ||
| 20 | from editor import Editor | ||
| 21 | from error import UploadError | ||
| 22 | |||
| 23 | def _die(fmt, *args): | ||
| 24 | msg = fmt % args | ||
| 25 | print >>sys.stderr, 'error: %s' % msg | ||
| 26 | sys.exit(1) | ||
| 27 | |||
| 28 | class Upload(InteractiveCommand): | ||
| 29 | common = True | ||
| 30 | helpSummary = "Upload changes for code review" | ||
| 31 | helpUsage=""" | ||
| 32 | %prog [<project>]... | ||
| 33 | """ | ||
| 34 | helpDescription = """ | ||
| 35 | The '%prog' command is used to send changes to the Gerrit code | ||
| 36 | review system. It searches for changes in local projects that do | ||
| 37 | not yet exist in the corresponding remote repository. If multiple | ||
| 38 | changes are found, '%prog' opens an editor to allow the | ||
| 39 | user to choose which change to upload. After a successful upload, | ||
| 40 | repo prints the URL for the change in the Gerrit code review system. | ||
| 41 | |||
| 42 | '%prog' searches for uploadable changes in all projects listed | ||
| 43 | at the command line. Projects can be specified either by name, or | ||
| 44 | by a relative or absolute path to the project's local directory. If | ||
| 45 | no projects are specified, '%prog' will search for uploadable | ||
| 46 | changes in all projects listed in the manifest. | ||
| 47 | """ | ||
| 48 | |||
| 49 | def _SingleBranch(self, branch): | ||
| 50 | project = branch.project | ||
| 51 | name = branch.name | ||
| 52 | date = branch.date | ||
| 53 | list = branch.commits | ||
| 54 | |||
| 55 | print 'Upload project %s/:' % project.relpath | ||
| 56 | print ' branch %s (%2d commit%s, %s):' % ( | ||
| 57 | name, | ||
| 58 | len(list), | ||
| 59 | len(list) != 1 and 's' or '', | ||
| 60 | date) | ||
| 61 | for commit in list: | ||
| 62 | print ' %s' % commit | ||
| 63 | |||
| 64 | sys.stdout.write('(y/n)? ') | ||
| 65 | answer = sys.stdin.readline().strip() | ||
| 66 | if answer in ('y', 'Y', 'yes', '1', 'true', 't'): | ||
| 67 | self._UploadAndReport([branch]) | ||
| 68 | else: | ||
| 69 | _die("upload aborted by user") | ||
| 70 | |||
| 71 | def _MultipleBranches(self, pending): | ||
| 72 | projects = {} | ||
| 73 | branches = {} | ||
| 74 | |||
| 75 | script = [] | ||
| 76 | script.append('# Uncomment the branches to upload:') | ||
| 77 | for project, avail in pending: | ||
| 78 | script.append('#') | ||
| 79 | script.append('# project %s/:' % project.relpath) | ||
| 80 | |||
| 81 | b = {} | ||
| 82 | for branch in avail: | ||
| 83 | name = branch.name | ||
| 84 | date = branch.date | ||
| 85 | list = branch.commits | ||
| 86 | |||
| 87 | if b: | ||
| 88 | script.append('#') | ||
| 89 | script.append('# branch %s (%2d commit%s, %s):' % ( | ||
| 90 | name, | ||
| 91 | len(list), | ||
| 92 | len(list) != 1 and 's' or '', | ||
| 93 | date)) | ||
| 94 | for commit in list: | ||
| 95 | script.append('# %s' % commit) | ||
| 96 | b[name] = branch | ||
| 97 | |||
| 98 | projects[project.relpath] = project | ||
| 99 | branches[project.name] = b | ||
| 100 | script.append('') | ||
| 101 | |||
| 102 | script = Editor.EditString("\n".join(script)).split("\n") | ||
| 103 | |||
| 104 | project_re = re.compile(r'^#?\s*project\s*([^\s]+)/:$') | ||
| 105 | branch_re = re.compile(r'^\s*branch\s*([^\s(]+)\s*\(.*') | ||
| 106 | |||
| 107 | project = None | ||
| 108 | todo = [] | ||
| 109 | |||
| 110 | for line in script: | ||
| 111 | m = project_re.match(line) | ||
| 112 | if m: | ||
| 113 | name = m.group(1) | ||
| 114 | project = projects.get(name) | ||
| 115 | if not project: | ||
| 116 | _die('project %s not available for upload', name) | ||
| 117 | continue | ||
| 118 | |||
| 119 | m = branch_re.match(line) | ||
| 120 | if m: | ||
| 121 | name = m.group(1) | ||
| 122 | if not project: | ||
| 123 | _die('project for branch %s not in script', name) | ||
| 124 | branch = branches[project.name].get(name) | ||
| 125 | if not branch: | ||
| 126 | _die('branch %s not in %s', name, project.relpath) | ||
| 127 | todo.append(branch) | ||
| 128 | if not todo: | ||
| 129 | _die("nothing uncommented for upload") | ||
| 130 | self._UploadAndReport(todo) | ||
| 131 | |||
| 132 | def _UploadAndReport(self, todo): | ||
| 133 | have_errors = False | ||
| 134 | for branch in todo: | ||
| 135 | try: | ||
| 136 | branch.UploadForReview() | ||
| 137 | branch.uploaded = True | ||
| 138 | except UploadError, e: | ||
| 139 | branch.error = e | ||
| 140 | branch.uploaded = False | ||
| 141 | have_errors = True | ||
| 142 | |||
| 143 | print >>sys.stderr, '' | ||
| 144 | print >>sys.stderr, '--------------------------------------------' | ||
| 145 | |||
| 146 | if have_errors: | ||
| 147 | for branch in todo: | ||
| 148 | if not branch.uploaded: | ||
| 149 | print >>sys.stderr, '[FAILED] %-15s %-15s (%s)' % ( | ||
| 150 | branch.project.relpath + '/', \ | ||
| 151 | branch.name, \ | ||
| 152 | branch.error) | ||
| 153 | print >>sys.stderr, '' | ||
| 154 | |||
| 155 | for branch in todo: | ||
| 156 | if branch.uploaded: | ||
| 157 | print >>sys.stderr, '[OK ] %-15s %s' % ( | ||
| 158 | branch.project.relpath + '/', | ||
| 159 | branch.name) | ||
| 160 | print >>sys.stderr, '%s' % branch.tip_url | ||
| 161 | print >>sys.stderr, '' | ||
| 162 | |||
| 163 | if have_errors: | ||
| 164 | sys.exit(1) | ||
| 165 | |||
| 166 | def Execute(self, opt, args): | ||
| 167 | project_list = self.GetProjects(args) | ||
| 168 | pending = [] | ||
| 169 | |||
| 170 | for project in project_list: | ||
| 171 | avail = project.GetUploadableBranches() | ||
| 172 | if avail: | ||
| 173 | pending.append((project, avail)) | ||
| 174 | |||
| 175 | if not pending: | ||
| 176 | print >>sys.stdout, "no branches ready for upload" | ||
| 177 | elif len(pending) == 1 and len(pending[0][1]) == 1: | ||
| 178 | self._SingleBranch(pending[0][1][0]) | ||
| 179 | else: | ||
| 180 | self._MultipleBranches(pending) | ||
