diff options
| -rw-r--r-- | docs/manifest-format.txt | 8 | ||||
| -rw-r--r-- | error.py | 7 | ||||
| -rw-r--r-- | git_config.py | 54 | ||||
| -rwxr-xr-x | main.py | 27 | ||||
| -rw-r--r-- | manifest_xml.py | 46 | ||||
| -rw-r--r-- | project.py | 146 | ||||
| -rwxr-xr-x | repo | 16 | ||||
| -rw-r--r-- | subcmds/forall.py | 297 | ||||
| -rw-r--r-- | subcmds/status.py | 6 | ||||
| -rw-r--r-- | subcmds/sync.py | 47 | ||||
| -rw-r--r-- | subcmds/upload.py | 6 |
11 files changed, 408 insertions, 252 deletions
diff --git a/docs/manifest-format.txt b/docs/manifest-format.txt index e48b75fe..f187bfaf 100644 --- a/docs/manifest-format.txt +++ b/docs/manifest-format.txt | |||
| @@ -35,6 +35,7 @@ following DTD: | |||
| 35 | <!ATTLIST remote alias CDATA #IMPLIED> | 35 | <!ATTLIST remote alias CDATA #IMPLIED> |
| 36 | <!ATTLIST remote fetch CDATA #REQUIRED> | 36 | <!ATTLIST remote fetch CDATA #REQUIRED> |
| 37 | <!ATTLIST remote review CDATA #IMPLIED> | 37 | <!ATTLIST remote review CDATA #IMPLIED> |
| 38 | <!ATTLIST remote revision CDATA #IMPLIED> | ||
| 38 | 39 | ||
| 39 | <!ELEMENT default (EMPTY)> | 40 | <!ELEMENT default (EMPTY)> |
| 40 | <!ATTLIST default remote IDREF #IMPLIED> | 41 | <!ATTLIST default remote IDREF #IMPLIED> |
| @@ -112,6 +113,10 @@ Attribute `review`: Hostname of the Gerrit server where reviews | |||
| 112 | are uploaded to by `repo upload`. This attribute is optional; | 113 | are uploaded to by `repo upload`. This attribute is optional; |
| 113 | if not specified then `repo upload` will not function. | 114 | if not specified then `repo upload` will not function. |
| 114 | 115 | ||
| 116 | Attribute `revision`: Name of a Git branch (e.g. `master` or | ||
| 117 | `refs/heads/master`). Remotes with their own revision will override | ||
| 118 | the default revision. | ||
| 119 | |||
| 115 | Element default | 120 | Element default |
| 116 | --------------- | 121 | --------------- |
| 117 | 122 | ||
| @@ -208,7 +213,8 @@ to track for this project. Names can be relative to refs/heads | |||
| 208 | (e.g. just "master") or absolute (e.g. "refs/heads/master"). | 213 | (e.g. just "master") or absolute (e.g. "refs/heads/master"). |
| 209 | Tags and/or explicit SHA-1s should work in theory, but have not | 214 | Tags and/or explicit SHA-1s should work in theory, but have not |
| 210 | been extensively tested. If not supplied the revision given by | 215 | been extensively tested. If not supplied the revision given by |
| 211 | the default element is used. | 216 | the remote element is used if applicable, else the default |
| 217 | element is used. | ||
| 212 | 218 | ||
| 213 | Attribute `dest-branch`: Name of a Git branch (e.g. `master`). | 219 | Attribute `dest-branch`: Name of a Git branch (e.g. `master`). |
| 214 | When using `repo upload`, changes will be submitted for code | 220 | When using `repo upload`, changes will be submitted for code |
| @@ -24,6 +24,13 @@ class ManifestInvalidRevisionError(Exception): | |||
| 24 | class NoManifestException(Exception): | 24 | class NoManifestException(Exception): |
| 25 | """The required manifest does not exist. | 25 | """The required manifest does not exist. |
| 26 | """ | 26 | """ |
| 27 | def __init__(self, path, reason): | ||
| 28 | super(NoManifestException, self).__init__() | ||
| 29 | self.path = path | ||
| 30 | self.reason = reason | ||
| 31 | |||
| 32 | def __str__(self): | ||
| 33 | return self.reason | ||
| 27 | 34 | ||
| 28 | class EditorError(Exception): | 35 | class EditorError(Exception): |
| 29 | """Unspecified error from the user's text editor. | 36 | """Unspecified error from the user's text editor. |
diff --git a/git_config.py b/git_config.py index c1a6b55e..a667b3f8 100644 --- a/git_config.py +++ b/git_config.py | |||
| @@ -15,8 +15,8 @@ | |||
| 15 | 15 | ||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | 17 | ||
| 18 | import json | ||
| 18 | import os | 19 | import os |
| 19 | import pickle | ||
| 20 | import re | 20 | import re |
| 21 | import subprocess | 21 | import subprocess |
| 22 | import sys | 22 | import sys |
| @@ -80,7 +80,7 @@ class GitConfig(object): | |||
| 80 | return cls(configfile = os.path.join(gitdir, 'config'), | 80 | return cls(configfile = os.path.join(gitdir, 'config'), |
| 81 | defaults = defaults) | 81 | defaults = defaults) |
| 82 | 82 | ||
| 83 | def __init__(self, configfile, defaults=None, pickleFile=None): | 83 | def __init__(self, configfile, defaults=None, jsonFile=None): |
| 84 | self.file = configfile | 84 | self.file = configfile |
| 85 | self.defaults = defaults | 85 | self.defaults = defaults |
| 86 | self._cache_dict = None | 86 | self._cache_dict = None |
| @@ -88,12 +88,11 @@ class GitConfig(object): | |||
| 88 | self._remotes = {} | 88 | self._remotes = {} |
| 89 | self._branches = {} | 89 | self._branches = {} |
| 90 | 90 | ||
| 91 | if pickleFile is None: | 91 | self._json = jsonFile |
| 92 | self._pickle = os.path.join( | 92 | if self._json is None: |
| 93 | self._json = os.path.join( | ||
| 93 | os.path.dirname(self.file), | 94 | os.path.dirname(self.file), |
| 94 | '.repopickle_' + os.path.basename(self.file)) | 95 | '.repo_' + os.path.basename(self.file) + '.json') |
| 95 | else: | ||
| 96 | self._pickle = pickleFile | ||
| 97 | 96 | ||
| 98 | def Has(self, name, include_defaults = True): | 97 | def Has(self, name, include_defaults = True): |
| 99 | """Return true if this configuration file has the key. | 98 | """Return true if this configuration file has the key. |
| @@ -248,50 +247,41 @@ class GitConfig(object): | |||
| 248 | return self._cache_dict | 247 | return self._cache_dict |
| 249 | 248 | ||
| 250 | def _Read(self): | 249 | def _Read(self): |
| 251 | d = self._ReadPickle() | 250 | d = self._ReadJson() |
| 252 | if d is None: | 251 | if d is None: |
| 253 | d = self._ReadGit() | 252 | d = self._ReadGit() |
| 254 | self._SavePickle(d) | 253 | self._SaveJson(d) |
| 255 | return d | 254 | return d |
| 256 | 255 | ||
| 257 | def _ReadPickle(self): | 256 | def _ReadJson(self): |
| 258 | try: | 257 | try: |
| 259 | if os.path.getmtime(self._pickle) \ | 258 | if os.path.getmtime(self._json) \ |
| 260 | <= os.path.getmtime(self.file): | 259 | <= os.path.getmtime(self.file): |
| 261 | os.remove(self._pickle) | 260 | os.remove(self._json) |
| 262 | return None | 261 | return None |
| 263 | except OSError: | 262 | except OSError: |
| 264 | return None | 263 | return None |
| 265 | try: | 264 | try: |
| 266 | Trace(': unpickle %s', self.file) | 265 | Trace(': parsing %s', self.file) |
| 267 | fd = open(self._pickle, 'rb') | 266 | fd = open(self._json) |
| 268 | try: | 267 | try: |
| 269 | return pickle.load(fd) | 268 | return json.load(fd) |
| 270 | finally: | 269 | finally: |
| 271 | fd.close() | 270 | fd.close() |
| 272 | except EOFError: | 271 | except (IOError, ValueError): |
| 273 | os.remove(self._pickle) | 272 | os.remove(self._json) |
| 274 | return None | ||
| 275 | except IOError: | ||
| 276 | os.remove(self._pickle) | ||
| 277 | return None | ||
| 278 | except pickle.PickleError: | ||
| 279 | os.remove(self._pickle) | ||
| 280 | return None | 273 | return None |
| 281 | 274 | ||
| 282 | def _SavePickle(self, cache): | 275 | def _SaveJson(self, cache): |
| 283 | try: | 276 | try: |
| 284 | fd = open(self._pickle, 'wb') | 277 | fd = open(self._json, 'w') |
| 285 | try: | 278 | try: |
| 286 | pickle.dump(cache, fd, pickle.HIGHEST_PROTOCOL) | 279 | json.dump(cache, fd, indent=2) |
| 287 | finally: | 280 | finally: |
| 288 | fd.close() | 281 | fd.close() |
| 289 | except IOError: | 282 | except (IOError, TypeError): |
| 290 | if os.path.exists(self._pickle): | 283 | if os.path.exists(self.json): |
| 291 | os.remove(self._pickle) | 284 | os.remove(self._json) |
| 292 | except pickle.PickleError: | ||
| 293 | if os.path.exists(self._pickle): | ||
| 294 | os.remove(self._pickle) | ||
| 295 | 285 | ||
| 296 | def _ReadGit(self): | 286 | def _ReadGit(self): |
| 297 | """ | 287 | """ |
| @@ -129,8 +129,15 @@ class _Repo(object): | |||
| 129 | file=sys.stderr) | 129 | file=sys.stderr) |
| 130 | return 1 | 130 | return 1 |
| 131 | 131 | ||
| 132 | copts, cargs = cmd.OptionParser.parse_args(argv) | 132 | try: |
| 133 | copts = cmd.ReadEnvironmentOptions(copts) | 133 | copts, cargs = cmd.OptionParser.parse_args(argv) |
| 134 | copts = cmd.ReadEnvironmentOptions(copts) | ||
| 135 | except NoManifestException as e: | ||
| 136 | print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)), | ||
| 137 | file=sys.stderr) | ||
| 138 | print('error: manifest missing or unreadable -- please run init', | ||
| 139 | file=sys.stderr) | ||
| 140 | return 1 | ||
| 134 | 141 | ||
| 135 | if not gopts.no_pager and not isinstance(cmd, InteractiveCommand): | 142 | if not gopts.no_pager and not isinstance(cmd, InteractiveCommand): |
| 136 | config = cmd.manifest.globalConfig | 143 | config = cmd.manifest.globalConfig |
| @@ -146,15 +153,13 @@ class _Repo(object): | |||
| 146 | start = time.time() | 153 | start = time.time() |
| 147 | try: | 154 | try: |
| 148 | result = cmd.Execute(copts, cargs) | 155 | result = cmd.Execute(copts, cargs) |
| 149 | except DownloadError as e: | 156 | except (DownloadError, ManifestInvalidRevisionError, |
| 150 | print('error: %s' % str(e), file=sys.stderr) | 157 | NoManifestException) as e: |
| 151 | result = 1 | 158 | print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)), |
| 152 | except ManifestInvalidRevisionError as e: | 159 | file=sys.stderr) |
| 153 | print('error: %s' % str(e), file=sys.stderr) | 160 | if isinstance(e, NoManifestException): |
| 154 | result = 1 | 161 | print('error: manifest missing or unreadable -- please run init', |
| 155 | except NoManifestException as e: | 162 | file=sys.stderr) |
| 156 | print('error: manifest required for this command -- please run init', | ||
| 157 | file=sys.stderr) | ||
| 158 | result = 1 | 163 | result = 1 |
| 159 | except NoSuchProjectError as e: | 164 | except NoSuchProjectError as e: |
| 160 | if e.name: | 165 | if e.name: |
diff --git a/manifest_xml.py b/manifest_xml.py index 3c8fadd6..fdc31778 100644 --- a/manifest_xml.py +++ b/manifest_xml.py | |||
| @@ -63,12 +63,14 @@ class _XmlRemote(object): | |||
| 63 | alias=None, | 63 | alias=None, |
| 64 | fetch=None, | 64 | fetch=None, |
| 65 | manifestUrl=None, | 65 | manifestUrl=None, |
| 66 | review=None): | 66 | review=None, |
| 67 | revision=None): | ||
| 67 | self.name = name | 68 | self.name = name |
| 68 | self.fetchUrl = fetch | 69 | self.fetchUrl = fetch |
| 69 | self.manifestUrl = manifestUrl | 70 | self.manifestUrl = manifestUrl |
| 70 | self.remoteAlias = alias | 71 | self.remoteAlias = alias |
| 71 | self.reviewUrl = review | 72 | self.reviewUrl = review |
| 73 | self.revision = revision | ||
| 72 | self.resolvedFetchUrl = self._resolveFetchUrl() | 74 | self.resolvedFetchUrl = self._resolveFetchUrl() |
| 73 | 75 | ||
| 74 | def __eq__(self, other): | 76 | def __eq__(self, other): |
| @@ -159,6 +161,8 @@ class XmlManifest(object): | |||
| 159 | e.setAttribute('alias', r.remoteAlias) | 161 | e.setAttribute('alias', r.remoteAlias) |
| 160 | if r.reviewUrl is not None: | 162 | if r.reviewUrl is not None: |
| 161 | e.setAttribute('review', r.reviewUrl) | 163 | e.setAttribute('review', r.reviewUrl) |
| 164 | if r.revision is not None: | ||
| 165 | e.setAttribute('revision', r.revision) | ||
| 162 | 166 | ||
| 163 | def Save(self, fd, peg_rev=False, peg_rev_upstream=True): | 167 | def Save(self, fd, peg_rev=False, peg_rev_upstream=True): |
| 164 | """Write the current manifest out to the given file descriptor. | 168 | """Write the current manifest out to the given file descriptor. |
| @@ -240,7 +244,8 @@ class XmlManifest(object): | |||
| 240 | if d.remote: | 244 | if d.remote: |
| 241 | remoteName = d.remote.remoteAlias or d.remote.name | 245 | remoteName = d.remote.remoteAlias or d.remote.name |
| 242 | if not d.remote or p.remote.name != remoteName: | 246 | if not d.remote or p.remote.name != remoteName: |
| 243 | e.setAttribute('remote', p.remote.name) | 247 | remoteName = p.remote.name |
| 248 | e.setAttribute('remote', remoteName) | ||
| 244 | if peg_rev: | 249 | if peg_rev: |
| 245 | if self.IsMirror: | 250 | if self.IsMirror: |
| 246 | value = p.bare_git.rev_parse(p.revisionExpr + '^0') | 251 | value = p.bare_git.rev_parse(p.revisionExpr + '^0') |
| @@ -252,8 +257,10 @@ class XmlManifest(object): | |||
| 252 | # isn't our value, and the if the default doesn't already have that | 257 | # isn't our value, and the if the default doesn't already have that |
| 253 | # covered. | 258 | # covered. |
| 254 | e.setAttribute('upstream', p.revisionExpr) | 259 | e.setAttribute('upstream', p.revisionExpr) |
| 255 | elif not d.revisionExpr or p.revisionExpr != d.revisionExpr: | 260 | else: |
| 256 | e.setAttribute('revision', p.revisionExpr) | 261 | revision = self.remotes[remoteName].revision or d.revisionExpr |
| 262 | if not revision or revision != p.revisionExpr: | ||
| 263 | e.setAttribute('revision', p.revisionExpr) | ||
| 257 | 264 | ||
| 258 | for c in p.copyfiles: | 265 | for c in p.copyfiles: |
| 259 | ce = doc.createElement('copyfile') | 266 | ce = doc.createElement('copyfile') |
| @@ -261,6 +268,12 @@ class XmlManifest(object): | |||
| 261 | ce.setAttribute('dest', c.dest) | 268 | ce.setAttribute('dest', c.dest) |
| 262 | e.appendChild(ce) | 269 | e.appendChild(ce) |
| 263 | 270 | ||
| 271 | for l in p.linkfiles: | ||
| 272 | le = doc.createElement('linkfile') | ||
| 273 | le.setAttribute('src', l.src) | ||
| 274 | le.setAttribute('dest', l.dest) | ||
| 275 | e.appendChild(le) | ||
| 276 | |||
| 264 | default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath] | 277 | default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath] |
| 265 | egroups = [g for g in p.groups if g not in default_groups] | 278 | egroups = [g for g in p.groups if g not in default_groups] |
| 266 | if egroups: | 279 | if egroups: |
| @@ -304,7 +317,7 @@ class XmlManifest(object): | |||
| 304 | @property | 317 | @property |
| 305 | def projects(self): | 318 | def projects(self): |
| 306 | self._Load() | 319 | self._Load() |
| 307 | return self._paths.values() | 320 | return list(self._paths.values()) |
| 308 | 321 | ||
| 309 | @property | 322 | @property |
| 310 | def remotes(self): | 323 | def remotes(self): |
| @@ -586,8 +599,11 @@ class XmlManifest(object): | |||
| 586 | review = node.getAttribute('review') | 599 | review = node.getAttribute('review') |
| 587 | if review == '': | 600 | if review == '': |
| 588 | review = None | 601 | review = None |
| 602 | revision = node.getAttribute('revision') | ||
| 603 | if revision == '': | ||
| 604 | revision = None | ||
| 589 | manifestUrl = self.manifestProject.config.GetString('remote.origin.url') | 605 | manifestUrl = self.manifestProject.config.GetString('remote.origin.url') |
| 590 | return _XmlRemote(name, alias, fetch, manifestUrl, review) | 606 | return _XmlRemote(name, alias, fetch, manifestUrl, review, revision) |
| 591 | 607 | ||
| 592 | def _ParseDefault(self, node): | 608 | def _ParseDefault(self, node): |
| 593 | """ | 609 | """ |
| @@ -680,7 +696,7 @@ class XmlManifest(object): | |||
| 680 | raise ManifestParseError("no remote for project %s within %s" % | 696 | raise ManifestParseError("no remote for project %s within %s" % |
| 681 | (name, self.manifestFile)) | 697 | (name, self.manifestFile)) |
| 682 | 698 | ||
| 683 | revisionExpr = node.getAttribute('revision') | 699 | revisionExpr = node.getAttribute('revision') or remote.revision |
| 684 | if not revisionExpr: | 700 | if not revisionExpr: |
| 685 | revisionExpr = self._default.revisionExpr | 701 | revisionExpr = self._default.revisionExpr |
| 686 | if not revisionExpr: | 702 | if not revisionExpr: |
| @@ -765,6 +781,8 @@ class XmlManifest(object): | |||
| 765 | for n in node.childNodes: | 781 | for n in node.childNodes: |
| 766 | if n.nodeName == 'copyfile': | 782 | if n.nodeName == 'copyfile': |
| 767 | self._ParseCopyFile(project, n) | 783 | self._ParseCopyFile(project, n) |
| 784 | if n.nodeName == 'linkfile': | ||
| 785 | self._ParseLinkFile(project, n) | ||
| 768 | if n.nodeName == 'annotation': | 786 | if n.nodeName == 'annotation': |
| 769 | self._ParseAnnotation(project, n) | 787 | self._ParseAnnotation(project, n) |
| 770 | if n.nodeName == 'project': | 788 | if n.nodeName == 'project': |
| @@ -814,6 +832,14 @@ class XmlManifest(object): | |||
| 814 | # dest is relative to the top of the tree | 832 | # dest is relative to the top of the tree |
| 815 | project.AddCopyFile(src, dest, os.path.join(self.topdir, dest)) | 833 | project.AddCopyFile(src, dest, os.path.join(self.topdir, dest)) |
| 816 | 834 | ||
| 835 | def _ParseLinkFile(self, project, node): | ||
| 836 | src = self._reqatt(node, 'src') | ||
| 837 | dest = self._reqatt(node, 'dest') | ||
| 838 | if not self.IsMirror: | ||
| 839 | # src is project relative; | ||
| 840 | # dest is relative to the top of the tree | ||
| 841 | project.AddLinkFile(src, dest, os.path.join(self.topdir, dest)) | ||
| 842 | |||
| 817 | def _ParseAnnotation(self, project, node): | 843 | def _ParseAnnotation(self, project, node): |
| 818 | name = self._reqatt(node, 'name') | 844 | name = self._reqatt(node, 'name') |
| 819 | value = self._reqatt(node, 'value') | 845 | value = self._reqatt(node, 'value') |
| @@ -856,10 +882,8 @@ class XmlManifest(object): | |||
| 856 | fromProjects = self.paths | 882 | fromProjects = self.paths |
| 857 | toProjects = manifest.paths | 883 | toProjects = manifest.paths |
| 858 | 884 | ||
| 859 | fromKeys = fromProjects.keys() | 885 | fromKeys = sorted(fromProjects.keys()) |
| 860 | fromKeys.sort() | 886 | toKeys = sorted(toProjects.keys()) |
| 861 | toKeys = toProjects.keys() | ||
| 862 | toKeys.sort() | ||
| 863 | 887 | ||
| 864 | diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []} | 888 | diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []} |
| 865 | 889 | ||
| @@ -231,14 +231,40 @@ class _CopyFile: | |||
| 231 | except IOError: | 231 | except IOError: |
| 232 | _error('Cannot copy file %s to %s', src, dest) | 232 | _error('Cannot copy file %s to %s', src, dest) |
| 233 | 233 | ||
| 234 | class _LinkFile: | ||
| 235 | def __init__(self, src, dest, abssrc, absdest): | ||
| 236 | self.src = src | ||
| 237 | self.dest = dest | ||
| 238 | self.abs_src = abssrc | ||
| 239 | self.abs_dest = absdest | ||
| 240 | |||
| 241 | def _Link(self): | ||
| 242 | src = self.abs_src | ||
| 243 | dest = self.abs_dest | ||
| 244 | # link file if it does not exist or is out of date | ||
| 245 | if not os.path.islink(dest) or os.readlink(dest) != src: | ||
| 246 | try: | ||
| 247 | # remove existing file first, since it might be read-only | ||
| 248 | if os.path.exists(dest): | ||
| 249 | os.remove(dest) | ||
| 250 | else: | ||
| 251 | dest_dir = os.path.dirname(dest) | ||
| 252 | if not os.path.isdir(dest_dir): | ||
| 253 | os.makedirs(dest_dir) | ||
| 254 | os.symlink(src, dest) | ||
| 255 | except IOError: | ||
| 256 | _error('Cannot link file %s to %s', src, dest) | ||
| 257 | |||
| 234 | class RemoteSpec(object): | 258 | class RemoteSpec(object): |
| 235 | def __init__(self, | 259 | def __init__(self, |
| 236 | name, | 260 | name, |
| 237 | url = None, | 261 | url = None, |
| 238 | review = None): | 262 | review = None, |
| 263 | revision = None): | ||
| 239 | self.name = name | 264 | self.name = name |
| 240 | self.url = url | 265 | self.url = url |
| 241 | self.review = review | 266 | self.review = review |
| 267 | self.revision = revision | ||
| 242 | 268 | ||
| 243 | class RepoHook(object): | 269 | class RepoHook(object): |
| 244 | """A RepoHook contains information about a script to run as a hook. | 270 | """A RepoHook contains information about a script to run as a hook. |
| @@ -414,7 +440,8 @@ class RepoHook(object): | |||
| 414 | # and convert to a HookError w/ just the failing traceback. | 440 | # and convert to a HookError w/ just the failing traceback. |
| 415 | context = {} | 441 | context = {} |
| 416 | try: | 442 | try: |
| 417 | execfile(self._script_fullpath, context) | 443 | exec(compile(open(self._script_fullpath).read(), |
| 444 | self._script_fullpath, 'exec'), context) | ||
| 418 | except Exception: | 445 | except Exception: |
| 419 | raise HookError('%s\nFailed to import %s hook; see traceback above.' % ( | 446 | raise HookError('%s\nFailed to import %s hook; see traceback above.' % ( |
| 420 | traceback.format_exc(), self._hook_type)) | 447 | traceback.format_exc(), self._hook_type)) |
| @@ -555,6 +582,7 @@ class Project(object): | |||
| 555 | 582 | ||
| 556 | self.snapshots = {} | 583 | self.snapshots = {} |
| 557 | self.copyfiles = [] | 584 | self.copyfiles = [] |
| 585 | self.linkfiles = [] | ||
| 558 | self.annotations = [] | 586 | self.annotations = [] |
| 559 | self.config = GitConfig.ForRepository( | 587 | self.config = GitConfig.ForRepository( |
| 560 | gitdir = self.gitdir, | 588 | gitdir = self.gitdir, |
| @@ -1040,7 +1068,7 @@ class Project(object): | |||
| 1040 | except OSError as e: | 1068 | except OSError as e: |
| 1041 | print("warn: Cannot remove archive %s: " | 1069 | print("warn: Cannot remove archive %s: " |
| 1042 | "%s" % (tarpath, str(e)), file=sys.stderr) | 1070 | "%s" % (tarpath, str(e)), file=sys.stderr) |
| 1043 | self._CopyFiles() | 1071 | self._CopyAndLinkFiles() |
| 1044 | return True | 1072 | return True |
| 1045 | 1073 | ||
| 1046 | if is_new is None: | 1074 | if is_new is None: |
| @@ -1078,17 +1106,12 @@ class Project(object): | |||
| 1078 | elif self.manifest.default.sync_c: | 1106 | elif self.manifest.default.sync_c: |
| 1079 | current_branch_only = True | 1107 | current_branch_only = True |
| 1080 | 1108 | ||
| 1081 | is_sha1 = False | 1109 | has_sha1 = ID_RE.match(self.revisionExpr) and self._CheckForSha1() |
| 1082 | if ID_RE.match(self.revisionExpr) is not None: | 1110 | if (not has_sha1 #Need to fetch since we don't already have this revision |
| 1083 | is_sha1 = True | 1111 | and not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir, |
| 1084 | if is_sha1 and self._CheckForSha1(): | 1112 | current_branch_only=current_branch_only, |
| 1085 | # Don't need to fetch since we already have this revision | 1113 | no_tags=no_tags)): |
| 1086 | return True | 1114 | return False |
| 1087 | |||
| 1088 | if not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir, | ||
| 1089 | current_branch_only=current_branch_only, | ||
| 1090 | no_tags=no_tags): | ||
| 1091 | return False | ||
| 1092 | 1115 | ||
| 1093 | if self.worktree: | 1116 | if self.worktree: |
| 1094 | self._InitMRef() | 1117 | self._InitMRef() |
| @@ -1103,9 +1126,11 @@ class Project(object): | |||
| 1103 | def PostRepoUpgrade(self): | 1126 | def PostRepoUpgrade(self): |
| 1104 | self._InitHooks() | 1127 | self._InitHooks() |
| 1105 | 1128 | ||
| 1106 | def _CopyFiles(self): | 1129 | def _CopyAndLinkFiles(self): |
| 1107 | for copyfile in self.copyfiles: | 1130 | for copyfile in self.copyfiles: |
| 1108 | copyfile._Copy() | 1131 | copyfile._Copy() |
| 1132 | for linkfile in self.linkfiles: | ||
| 1133 | linkfile._Link() | ||
| 1109 | 1134 | ||
| 1110 | def GetCommitRevisionId(self): | 1135 | def GetCommitRevisionId(self): |
| 1111 | """Get revisionId of a commit. | 1136 | """Get revisionId of a commit. |
| @@ -1152,7 +1177,7 @@ class Project(object): | |||
| 1152 | 1177 | ||
| 1153 | def _doff(): | 1178 | def _doff(): |
| 1154 | self._FastForward(revid) | 1179 | self._FastForward(revid) |
| 1155 | self._CopyFiles() | 1180 | self._CopyAndLinkFiles() |
| 1156 | 1181 | ||
| 1157 | head = self.work_git.GetHead() | 1182 | head = self.work_git.GetHead() |
| 1158 | if head.startswith(R_HEADS): | 1183 | if head.startswith(R_HEADS): |
| @@ -1188,7 +1213,7 @@ class Project(object): | |||
| 1188 | except GitError as e: | 1213 | except GitError as e: |
| 1189 | syncbuf.fail(self, e) | 1214 | syncbuf.fail(self, e) |
| 1190 | return | 1215 | return |
| 1191 | self._CopyFiles() | 1216 | self._CopyAndLinkFiles() |
| 1192 | return | 1217 | return |
| 1193 | 1218 | ||
| 1194 | if head == revid: | 1219 | if head == revid: |
| @@ -1210,7 +1235,7 @@ class Project(object): | |||
| 1210 | except GitError as e: | 1235 | except GitError as e: |
| 1211 | syncbuf.fail(self, e) | 1236 | syncbuf.fail(self, e) |
| 1212 | return | 1237 | return |
| 1213 | self._CopyFiles() | 1238 | self._CopyAndLinkFiles() |
| 1214 | return | 1239 | return |
| 1215 | 1240 | ||
| 1216 | upstream_gain = self._revlist(not_rev(HEAD), revid) | 1241 | upstream_gain = self._revlist(not_rev(HEAD), revid) |
| @@ -1283,12 +1308,12 @@ class Project(object): | |||
| 1283 | if cnt_mine > 0 and self.rebase: | 1308 | if cnt_mine > 0 and self.rebase: |
| 1284 | def _dorebase(): | 1309 | def _dorebase(): |
| 1285 | self._Rebase(upstream = '%s^1' % last_mine, onto = revid) | 1310 | self._Rebase(upstream = '%s^1' % last_mine, onto = revid) |
| 1286 | self._CopyFiles() | 1311 | self._CopyAndLinkFiles() |
| 1287 | syncbuf.later2(self, _dorebase) | 1312 | syncbuf.later2(self, _dorebase) |
| 1288 | elif local_changes: | 1313 | elif local_changes: |
| 1289 | try: | 1314 | try: |
| 1290 | self._ResetHard(revid) | 1315 | self._ResetHard(revid) |
| 1291 | self._CopyFiles() | 1316 | self._CopyAndLinkFiles() |
| 1292 | except GitError as e: | 1317 | except GitError as e: |
| 1293 | syncbuf.fail(self, e) | 1318 | syncbuf.fail(self, e) |
| 1294 | return | 1319 | return |
| @@ -1301,6 +1326,12 @@ class Project(object): | |||
| 1301 | abssrc = os.path.join(self.worktree, src) | 1326 | abssrc = os.path.join(self.worktree, src) |
| 1302 | self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest)) | 1327 | self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest)) |
| 1303 | 1328 | ||
| 1329 | def AddLinkFile(self, src, dest, absdest): | ||
| 1330 | # dest should already be an absolute path, but src is project relative | ||
| 1331 | # make src an absolute path | ||
| 1332 | abssrc = os.path.join(self.worktree, src) | ||
| 1333 | self.linkfiles.append(_LinkFile(src, dest, abssrc, absdest)) | ||
| 1334 | |||
| 1304 | def AddAnnotation(self, name, value, keep): | 1335 | def AddAnnotation(self, name, value, keep): |
| 1305 | self.annotations.append(_Annotation(name, value, keep)) | 1336 | self.annotations.append(_Annotation(name, value, keep)) |
| 1306 | 1337 | ||
| @@ -1629,7 +1660,8 @@ class Project(object): | |||
| 1629 | 1660 | ||
| 1630 | remote = RemoteSpec(self.remote.name, | 1661 | remote = RemoteSpec(self.remote.name, |
| 1631 | url = url, | 1662 | url = url, |
| 1632 | review = self.remote.review) | 1663 | review = self.remote.review, |
| 1664 | revision = self.remote.revision) | ||
| 1633 | subproject = Project(manifest = self.manifest, | 1665 | subproject = Project(manifest = self.manifest, |
| 1634 | name = name, | 1666 | name = name, |
| 1635 | remote = remote, | 1667 | remote = remote, |
| @@ -1674,6 +1706,7 @@ class Project(object): | |||
| 1674 | if command.Wait() != 0: | 1706 | if command.Wait() != 0: |
| 1675 | raise GitError('git archive %s: %s' % (self.name, command.stderr)) | 1707 | raise GitError('git archive %s: %s' % (self.name, command.stderr)) |
| 1676 | 1708 | ||
| 1709 | |||
| 1677 | def _RemoteFetch(self, name=None, | 1710 | def _RemoteFetch(self, name=None, |
| 1678 | current_branch_only=False, | 1711 | current_branch_only=False, |
| 1679 | initial=False, | 1712 | initial=False, |
| @@ -1683,11 +1716,17 @@ class Project(object): | |||
| 1683 | 1716 | ||
| 1684 | is_sha1 = False | 1717 | is_sha1 = False |
| 1685 | tag_name = None | 1718 | tag_name = None |
| 1719 | depth = None | ||
| 1720 | |||
| 1721 | # The depth should not be used when fetching to a mirror because | ||
| 1722 | # it will result in a shallow repository that cannot be cloned or | ||
| 1723 | # fetched from. | ||
| 1724 | if not self.manifest.IsMirror: | ||
| 1725 | if self.clone_depth: | ||
| 1726 | depth = self.clone_depth | ||
| 1727 | else: | ||
| 1728 | depth = self.manifest.manifestProject.config.GetString('repo.depth') | ||
| 1686 | 1729 | ||
| 1687 | if self.clone_depth: | ||
| 1688 | depth = self.clone_depth | ||
| 1689 | else: | ||
| 1690 | depth = self.manifest.manifestProject.config.GetString('repo.depth') | ||
| 1691 | if depth: | 1730 | if depth: |
| 1692 | current_branch_only = True | 1731 | current_branch_only = True |
| 1693 | 1732 | ||
| @@ -1763,26 +1802,37 @@ class Project(object): | |||
| 1763 | cmd.append('--update-head-ok') | 1802 | cmd.append('--update-head-ok') |
| 1764 | cmd.append(name) | 1803 | cmd.append(name) |
| 1765 | 1804 | ||
| 1805 | # If using depth then we should not get all the tags since they may | ||
| 1806 | # be outside of the depth. | ||
| 1807 | if no_tags or depth: | ||
| 1808 | cmd.append('--no-tags') | ||
| 1809 | else: | ||
| 1810 | cmd.append('--tags') | ||
| 1811 | |||
| 1812 | spec = [] | ||
| 1766 | if not current_branch_only: | 1813 | if not current_branch_only: |
| 1767 | # Fetch whole repo | 1814 | # Fetch whole repo |
| 1768 | # If using depth then we should not get all the tags since they may | 1815 | spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*'))) |
| 1769 | # be outside of the depth. | ||
| 1770 | if no_tags or depth: | ||
| 1771 | cmd.append('--no-tags') | ||
| 1772 | else: | ||
| 1773 | cmd.append('--tags') | ||
| 1774 | |||
| 1775 | cmd.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*'))) | ||
| 1776 | elif tag_name is not None: | 1816 | elif tag_name is not None: |
| 1777 | cmd.append('tag') | 1817 | spec.append('tag') |
| 1778 | cmd.append(tag_name) | 1818 | spec.append(tag_name) |
| 1779 | else: | 1819 | else: |
| 1780 | branch = self.revisionExpr | 1820 | branch = self.revisionExpr |
| 1781 | if is_sha1: | 1821 | if is_sha1: |
| 1782 | branch = self.upstream | 1822 | branch = self.upstream |
| 1783 | if branch.startswith(R_HEADS): | 1823 | if branch.startswith(R_HEADS): |
| 1784 | branch = branch[len(R_HEADS):] | 1824 | branch = branch[len(R_HEADS):] |
| 1785 | cmd.append(str((u'+refs/heads/%s:' % branch) + remote.ToLocal('refs/heads/%s' % branch))) | 1825 | spec.append(str((u'+refs/heads/%s:' % branch) + remote.ToLocal('refs/heads/%s' % branch))) |
| 1826 | cmd.extend(spec) | ||
| 1827 | |||
| 1828 | shallowfetch = self.config.GetString('repo.shallowfetch') | ||
| 1829 | if shallowfetch and shallowfetch != ' '.join(spec): | ||
| 1830 | GitCommand(self, ['fetch', '--unshallow', name] + shallowfetch.split(), | ||
| 1831 | bare=True, ssh_proxy=ssh_proxy).Wait() | ||
| 1832 | if depth: | ||
| 1833 | self.config.SetString('repo.shallowfetch', ' '.join(spec)) | ||
| 1834 | else: | ||
| 1835 | self.config.SetString('repo.shallowfetch', None) | ||
| 1786 | 1836 | ||
| 1787 | ok = False | 1837 | ok = False |
| 1788 | for _i in range(2): | 1838 | for _i in range(2): |
| @@ -1801,7 +1851,7 @@ class Project(object): | |||
| 1801 | # Ensure that some refs exist. Otherwise, we probably aren't looking | 1851 | # Ensure that some refs exist. Otherwise, we probably aren't looking |
| 1802 | # at a real git repository and may have a bad url. | 1852 | # at a real git repository and may have a bad url. |
| 1803 | if not self.bare_ref.all: | 1853 | if not self.bare_ref.all: |
| 1804 | ok = False | 1854 | ok = False |
| 1805 | 1855 | ||
| 1806 | if alt_dir: | 1856 | if alt_dir: |
| 1807 | if old_packed != '': | 1857 | if old_packed != '': |
| @@ -2147,7 +2197,7 @@ class Project(object): | |||
| 2147 | symlink_dirs = ['hooks', 'objects', 'rr-cache', 'svn'] | 2197 | symlink_dirs = ['hooks', 'objects', 'rr-cache', 'svn'] |
| 2148 | if share_refs: | 2198 | if share_refs: |
| 2149 | # These objects can only be used by a single working tree. | 2199 | # These objects can only be used by a single working tree. |
| 2150 | symlink_files += ['config', 'packed-refs'] | 2200 | symlink_files += ['config', 'packed-refs', 'shallow'] |
| 2151 | symlink_dirs += ['logs', 'refs'] | 2201 | symlink_dirs += ['logs', 'refs'] |
| 2152 | to_symlink = symlink_files + symlink_dirs | 2202 | to_symlink = symlink_files + symlink_dirs |
| 2153 | 2203 | ||
| @@ -2167,6 +2217,14 @@ class Project(object): | |||
| 2167 | if name in symlink_dirs and not os.path.lexists(src): | 2217 | if name in symlink_dirs and not os.path.lexists(src): |
| 2168 | os.makedirs(src) | 2218 | os.makedirs(src) |
| 2169 | 2219 | ||
| 2220 | # If the source file doesn't exist, ensure the destination | ||
| 2221 | # file doesn't either. | ||
| 2222 | if name in symlink_files and not os.path.lexists(src): | ||
| 2223 | try: | ||
| 2224 | os.remove(dst) | ||
| 2225 | except OSError: | ||
| 2226 | pass | ||
| 2227 | |||
| 2170 | if name in to_symlink: | 2228 | if name in to_symlink: |
| 2171 | os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst) | 2229 | os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst) |
| 2172 | elif copy_all and not os.path.islink(dst): | 2230 | elif copy_all and not os.path.islink(dst): |
| @@ -2195,7 +2253,7 @@ class Project(object): | |||
| 2195 | if GitCommand(self, cmd).Wait() != 0: | 2253 | if GitCommand(self, cmd).Wait() != 0: |
| 2196 | raise GitError("cannot initialize work tree") | 2254 | raise GitError("cannot initialize work tree") |
| 2197 | 2255 | ||
| 2198 | self._CopyFiles() | 2256 | self._CopyAndLinkFiles() |
| 2199 | 2257 | ||
| 2200 | def _gitdir_path(self, path): | 2258 | def _gitdir_path(self, path): |
| 2201 | return os.path.realpath(os.path.join(self.gitdir, path)) | 2259 | return os.path.realpath(os.path.join(self.gitdir, path)) |
| @@ -2287,8 +2345,8 @@ class Project(object): | |||
| 2287 | out = iter(out[:-1].split('\0')) # pylint: disable=W1401 | 2345 | out = iter(out[:-1].split('\0')) # pylint: disable=W1401 |
| 2288 | while out: | 2346 | while out: |
| 2289 | try: | 2347 | try: |
| 2290 | info = out.next() | 2348 | info = next(out) |
| 2291 | path = out.next() | 2349 | path = next(out) |
| 2292 | except StopIteration: | 2350 | except StopIteration: |
| 2293 | break | 2351 | break |
| 2294 | 2352 | ||
| @@ -2314,7 +2372,7 @@ class Project(object): | |||
| 2314 | info = _Info(path, *info) | 2372 | info = _Info(path, *info) |
| 2315 | if info.status in ('R', 'C'): | 2373 | if info.status in ('R', 'C'): |
| 2316 | info.src_path = info.path | 2374 | info.src_path = info.path |
| 2317 | info.path = out.next() | 2375 | info.path = next(out) |
| 2318 | r[info.path] = info | 2376 | r[info.path] = info |
| 2319 | return r | 2377 | return r |
| 2320 | finally: | 2378 | finally: |
| @@ -2327,8 +2385,8 @@ class Project(object): | |||
| 2327 | path = os.path.join(self._project.worktree, '.git', HEAD) | 2385 | path = os.path.join(self._project.worktree, '.git', HEAD) |
| 2328 | try: | 2386 | try: |
| 2329 | fd = open(path, 'rb') | 2387 | fd = open(path, 'rb') |
| 2330 | except IOError: | 2388 | except IOError as e: |
| 2331 | raise NoManifestException(path) | 2389 | raise NoManifestException(path, str(e)) |
| 2332 | try: | 2390 | try: |
| 2333 | line = fd.read() | 2391 | line = fd.read() |
| 2334 | finally: | 2392 | finally: |
| @@ -114,6 +114,7 @@ import errno | |||
| 114 | import optparse | 114 | import optparse |
| 115 | import os | 115 | import os |
| 116 | import re | 116 | import re |
| 117 | import shutil | ||
| 117 | import stat | 118 | import stat |
| 118 | import subprocess | 119 | import subprocess |
| 119 | import sys | 120 | import sys |
| @@ -138,10 +139,6 @@ def _print(*objects, **kwargs): | |||
| 138 | 139 | ||
| 139 | # Python version check | 140 | # Python version check |
| 140 | ver = sys.version_info | 141 | ver = sys.version_info |
| 141 | if ver[0] == 3: | ||
| 142 | _print('warning: Python 3 support is currently experimental. YMMV.\n' | ||
| 143 | 'Please use Python 2.6 - 2.7 instead.', | ||
| 144 | file=sys.stderr) | ||
| 145 | if (ver[0], ver[1]) < MIN_PYTHON_VERSION: | 142 | if (ver[0], ver[1]) < MIN_PYTHON_VERSION: |
| 146 | _print('error: Python version %s unsupported.\n' | 143 | _print('error: Python version %s unsupported.\n' |
| 147 | 'Please use Python 2.6 - 2.7 instead.' | 144 | 'Please use Python 2.6 - 2.7 instead.' |
| @@ -741,12 +738,7 @@ def main(orig_args): | |||
| 741 | try: | 738 | try: |
| 742 | _Init(args) | 739 | _Init(args) |
| 743 | except CloneFailure: | 740 | except CloneFailure: |
| 744 | for root, dirs, files in os.walk(repodir, topdown=False): | 741 | shutil.rmtree(repodir, ignore_errors=True) |
| 745 | for name in files: | ||
| 746 | os.remove(os.path.join(root, name)) | ||
| 747 | for name in dirs: | ||
| 748 | os.rmdir(os.path.join(root, name)) | ||
| 749 | os.rmdir(repodir) | ||
| 750 | sys.exit(1) | 742 | sys.exit(1) |
| 751 | repo_main, rel_repo_dir = _FindRepo() | 743 | repo_main, rel_repo_dir = _FindRepo() |
| 752 | else: | 744 | else: |
| @@ -772,4 +764,8 @@ def main(orig_args): | |||
| 772 | 764 | ||
| 773 | 765 | ||
| 774 | if __name__ == '__main__': | 766 | if __name__ == '__main__': |
| 767 | if ver[0] == 3: | ||
| 768 | _print('warning: Python 3 support is currently experimental. YMMV.\n' | ||
| 769 | 'Please use Python 2.6 - 2.7 instead.', | ||
| 770 | file=sys.stderr) | ||
| 775 | main(sys.argv[1:]) | 771 | main(sys.argv[1:]) |
diff --git a/subcmds/forall.py b/subcmds/forall.py index e2a420a9..7771ec16 100644 --- a/subcmds/forall.py +++ b/subcmds/forall.py | |||
| @@ -14,7 +14,9 @@ | |||
| 14 | # limitations under the License. | 14 | # limitations under the License. |
| 15 | 15 | ||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | import errno | ||
| 17 | import fcntl | 18 | import fcntl |
| 19 | import multiprocessing | ||
| 18 | import re | 20 | import re |
| 19 | import os | 21 | import os |
| 20 | import select | 22 | import select |
| @@ -31,6 +33,7 @@ _CAN_COLOR = [ | |||
| 31 | 'log', | 33 | 'log', |
| 32 | ] | 34 | ] |
| 33 | 35 | ||
| 36 | |||
| 34 | class ForallColoring(Coloring): | 37 | class ForallColoring(Coloring): |
| 35 | def __init__(self, config): | 38 | def __init__(self, config): |
| 36 | Coloring.__init__(self, config, 'forall') | 39 | Coloring.__init__(self, config, 'forall') |
| @@ -87,6 +90,12 @@ revision to a locally executed git command, use REPO_LREV. | |||
| 87 | REPO_RREV is the name of the revision from the manifest, exactly | 90 | REPO_RREV is the name of the revision from the manifest, exactly |
| 88 | as written in the manifest. | 91 | as written in the manifest. |
| 89 | 92 | ||
| 93 | REPO_COUNT is the total number of projects being iterated. | ||
| 94 | |||
| 95 | REPO_I is the current (1-based) iteration count. Can be used in | ||
| 96 | conjunction with REPO_COUNT to add a simple progress indicator to your | ||
| 97 | command. | ||
| 98 | |||
| 90 | REPO__* are any extra environment variables, specified by the | 99 | REPO__* are any extra environment variables, specified by the |
| 91 | "annotation" element under any project element. This can be useful | 100 | "annotation" element under any project element. This can be useful |
| 92 | for differentiating trees based on user-specific criteria, or simply | 101 | for differentiating trees based on user-specific criteria, or simply |
| @@ -126,9 +135,31 @@ without iterating through the remaining projects. | |||
| 126 | g.add_option('-v', '--verbose', | 135 | g.add_option('-v', '--verbose', |
| 127 | dest='verbose', action='store_true', | 136 | dest='verbose', action='store_true', |
| 128 | help='Show command error messages') | 137 | help='Show command error messages') |
| 138 | g.add_option('-j', '--jobs', | ||
| 139 | dest='jobs', action='store', type='int', default=1, | ||
| 140 | help='number of commands to execute simultaneously') | ||
| 129 | 141 | ||
| 130 | def WantPager(self, opt): | 142 | def WantPager(self, opt): |
| 131 | return opt.project_header | 143 | return opt.project_header and opt.jobs == 1 |
| 144 | |||
| 145 | def _SerializeProject(self, project): | ||
| 146 | """ Serialize a project._GitGetByExec instance. | ||
| 147 | |||
| 148 | project._GitGetByExec is not pickle-able. Instead of trying to pass it | ||
| 149 | around between processes, make a dict ourselves containing only the | ||
| 150 | attributes that we need. | ||
| 151 | |||
| 152 | """ | ||
| 153 | return { | ||
| 154 | 'name': project.name, | ||
| 155 | 'relpath': project.relpath, | ||
| 156 | 'remote_name': project.remote.name, | ||
| 157 | 'lrev': project.GetRevisionId(), | ||
| 158 | 'rrev': project.revisionExpr, | ||
| 159 | 'annotations': dict((a.name, a.value) for a in project.annotations), | ||
| 160 | 'gitdir': project.gitdir, | ||
| 161 | 'worktree': project.worktree, | ||
| 162 | } | ||
| 132 | 163 | ||
| 133 | def Execute(self, opt, args): | 164 | def Execute(self, opt, args): |
| 134 | if not opt.command: | 165 | if not opt.command: |
| @@ -167,123 +198,165 @@ without iterating through the remaining projects. | |||
| 167 | # pylint: enable=W0631 | 198 | # pylint: enable=W0631 |
| 168 | 199 | ||
| 169 | mirror = self.manifest.IsMirror | 200 | mirror = self.manifest.IsMirror |
| 170 | out = ForallColoring(self.manifest.manifestProject.config) | ||
| 171 | out.redirect(sys.stdout) | ||
| 172 | |||
| 173 | rc = 0 | 201 | rc = 0 |
| 174 | first = True | ||
| 175 | 202 | ||
| 176 | if not opt.regex: | 203 | if not opt.regex: |
| 177 | projects = self.GetProjects(args) | 204 | projects = self.GetProjects(args) |
| 178 | else: | 205 | else: |
| 179 | projects = self.FindProjects(args) | 206 | projects = self.FindProjects(args) |
| 180 | 207 | ||
| 181 | for project in projects: | 208 | os.environ['REPO_COUNT'] = str(len(projects)) |
| 182 | env = os.environ.copy() | 209 | |
| 183 | def setenv(name, val): | 210 | pool = multiprocessing.Pool(opt.jobs) |
| 184 | if val is None: | 211 | try: |
| 185 | val = '' | 212 | config = self.manifest.manifestProject.config |
| 186 | env[name] = val.encode() | 213 | results_it = pool.imap( |
| 187 | 214 | DoWorkWrapper, | |
| 188 | setenv('REPO_PROJECT', project.name) | 215 | [[mirror, opt, cmd, shell, cnt, config, self._SerializeProject(p)] |
| 189 | setenv('REPO_PATH', project.relpath) | 216 | for cnt, p in enumerate(projects)] |
| 190 | setenv('REPO_REMOTE', project.remote.name) | 217 | ) |
| 191 | setenv('REPO_LREV', project.GetRevisionId()) | 218 | pool.close() |
| 192 | setenv('REPO_RREV', project.revisionExpr) | 219 | for r in results_it: |
| 193 | for a in project.annotations: | 220 | rc = rc or r |
| 194 | setenv("REPO__%s" % (a.name), a.value) | 221 | if r != 0 and opt.abort_on_errors: |
| 195 | 222 | raise Exception('Aborting due to previous error') | |
| 196 | if mirror: | 223 | except (KeyboardInterrupt, WorkerKeyboardInterrupt): |
| 197 | setenv('GIT_DIR', project.gitdir) | 224 | # Catch KeyboardInterrupt raised inside and outside of workers |
| 198 | cwd = project.gitdir | 225 | print('Interrupted - terminating the pool') |
| 199 | else: | 226 | pool.terminate() |
| 200 | cwd = project.worktree | 227 | rc = rc or errno.EINTR |
| 201 | 228 | except Exception as e: | |
| 202 | if not os.path.exists(cwd): | 229 | # Catch any other exceptions raised |
| 203 | if (opt.project_header and opt.verbose) \ | 230 | print('Got an error, terminating the pool: %r' % e, |
| 204 | or not opt.project_header: | 231 | file=sys.stderr) |
| 205 | print('skipping %s/' % project.relpath, file=sys.stderr) | 232 | pool.terminate() |
| 206 | continue | 233 | rc = rc or getattr(e, 'errno', 1) |
| 207 | 234 | finally: | |
| 208 | if opt.project_header: | 235 | pool.join() |
| 209 | stdin = subprocess.PIPE | ||
| 210 | stdout = subprocess.PIPE | ||
| 211 | stderr = subprocess.PIPE | ||
| 212 | else: | ||
| 213 | stdin = None | ||
| 214 | stdout = None | ||
| 215 | stderr = None | ||
| 216 | |||
| 217 | p = subprocess.Popen(cmd, | ||
| 218 | cwd = cwd, | ||
| 219 | shell = shell, | ||
| 220 | env = env, | ||
| 221 | stdin = stdin, | ||
| 222 | stdout = stdout, | ||
| 223 | stderr = stderr) | ||
| 224 | |||
| 225 | if opt.project_header: | ||
| 226 | class sfd(object): | ||
| 227 | def __init__(self, fd, dest): | ||
| 228 | self.fd = fd | ||
| 229 | self.dest = dest | ||
| 230 | def fileno(self): | ||
| 231 | return self.fd.fileno() | ||
| 232 | |||
| 233 | empty = True | ||
| 234 | errbuf = '' | ||
| 235 | |||
| 236 | p.stdin.close() | ||
| 237 | s_in = [sfd(p.stdout, sys.stdout), | ||
| 238 | sfd(p.stderr, sys.stderr)] | ||
| 239 | |||
| 240 | for s in s_in: | ||
| 241 | flags = fcntl.fcntl(s.fd, fcntl.F_GETFL) | ||
| 242 | fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) | ||
| 243 | |||
| 244 | while s_in: | ||
| 245 | in_ready, _out_ready, _err_ready = select.select(s_in, [], []) | ||
| 246 | for s in in_ready: | ||
| 247 | buf = s.fd.read(4096) | ||
| 248 | if not buf: | ||
| 249 | s.fd.close() | ||
| 250 | s_in.remove(s) | ||
| 251 | continue | ||
| 252 | |||
| 253 | if not opt.verbose: | ||
| 254 | if s.fd != p.stdout: | ||
| 255 | errbuf += buf | ||
| 256 | continue | ||
| 257 | |||
| 258 | if empty: | ||
| 259 | if first: | ||
| 260 | first = False | ||
| 261 | else: | ||
| 262 | out.nl() | ||
| 263 | |||
| 264 | if mirror: | ||
| 265 | project_header_path = project.name | ||
| 266 | else: | ||
| 267 | project_header_path = project.relpath | ||
| 268 | out.project('project %s/', project_header_path) | ||
| 269 | out.nl() | ||
| 270 | out.flush() | ||
| 271 | if errbuf: | ||
| 272 | sys.stderr.write(errbuf) | ||
| 273 | sys.stderr.flush() | ||
| 274 | errbuf = '' | ||
| 275 | empty = False | ||
| 276 | |||
| 277 | s.dest.write(buf) | ||
| 278 | s.dest.flush() | ||
| 279 | |||
| 280 | r = p.wait() | ||
| 281 | if r != 0: | ||
| 282 | if r != rc: | ||
| 283 | rc = r | ||
| 284 | if opt.abort_on_errors: | ||
| 285 | print("error: %s: Aborting due to previous error" % project.relpath, | ||
| 286 | file=sys.stderr) | ||
| 287 | sys.exit(r) | ||
| 288 | if rc != 0: | 236 | if rc != 0: |
| 289 | sys.exit(rc) | 237 | sys.exit(rc) |
| 238 | |||
| 239 | |||
| 240 | class WorkerKeyboardInterrupt(Exception): | ||
| 241 | """ Keyboard interrupt exception for worker processes. """ | ||
| 242 | pass | ||
| 243 | |||
| 244 | |||
| 245 | def DoWorkWrapper(args): | ||
| 246 | """ A wrapper around the DoWork() method. | ||
| 247 | |||
| 248 | Catch the KeyboardInterrupt exceptions here and re-raise them as a different, | ||
| 249 | ``Exception``-based exception to stop it flooding the console with stacktraces | ||
| 250 | and making the parent hang indefinitely. | ||
| 251 | |||
| 252 | """ | ||
| 253 | project = args.pop() | ||
| 254 | try: | ||
| 255 | return DoWork(project, *args) | ||
| 256 | except KeyboardInterrupt: | ||
| 257 | print('%s: Worker interrupted' % project['name']) | ||
| 258 | raise WorkerKeyboardInterrupt() | ||
| 259 | |||
| 260 | |||
| 261 | def DoWork(project, mirror, opt, cmd, shell, cnt, config): | ||
| 262 | env = os.environ.copy() | ||
| 263 | def setenv(name, val): | ||
| 264 | if val is None: | ||
| 265 | val = '' | ||
| 266 | env[name] = val.encode() | ||
| 267 | |||
| 268 | setenv('REPO_PROJECT', project['name']) | ||
| 269 | setenv('REPO_PATH', project['relpath']) | ||
| 270 | setenv('REPO_REMOTE', project['remote_name']) | ||
| 271 | setenv('REPO_LREV', project['lrev']) | ||
| 272 | setenv('REPO_RREV', project['rrev']) | ||
| 273 | setenv('REPO_I', str(cnt + 1)) | ||
| 274 | for name in project['annotations']: | ||
| 275 | setenv("REPO__%s" % (name), project['annotations'][name]) | ||
| 276 | |||
| 277 | if mirror: | ||
| 278 | setenv('GIT_DIR', project['gitdir']) | ||
| 279 | cwd = project['gitdir'] | ||
| 280 | else: | ||
| 281 | cwd = project['worktree'] | ||
| 282 | |||
| 283 | if not os.path.exists(cwd): | ||
| 284 | if (opt.project_header and opt.verbose) \ | ||
| 285 | or not opt.project_header: | ||
| 286 | print('skipping %s/' % project['relpath'], file=sys.stderr) | ||
| 287 | return | ||
| 288 | |||
| 289 | if opt.project_header: | ||
| 290 | stdin = subprocess.PIPE | ||
| 291 | stdout = subprocess.PIPE | ||
| 292 | stderr = subprocess.PIPE | ||
| 293 | else: | ||
| 294 | stdin = None | ||
| 295 | stdout = None | ||
| 296 | stderr = None | ||
| 297 | |||
| 298 | p = subprocess.Popen(cmd, | ||
| 299 | cwd=cwd, | ||
| 300 | shell=shell, | ||
| 301 | env=env, | ||
| 302 | stdin=stdin, | ||
| 303 | stdout=stdout, | ||
| 304 | stderr=stderr) | ||
| 305 | |||
| 306 | if opt.project_header: | ||
| 307 | out = ForallColoring(config) | ||
| 308 | out.redirect(sys.stdout) | ||
| 309 | class sfd(object): | ||
| 310 | def __init__(self, fd, dest): | ||
| 311 | self.fd = fd | ||
| 312 | self.dest = dest | ||
| 313 | def fileno(self): | ||
| 314 | return self.fd.fileno() | ||
| 315 | |||
| 316 | empty = True | ||
| 317 | errbuf = '' | ||
| 318 | |||
| 319 | p.stdin.close() | ||
| 320 | s_in = [sfd(p.stdout, sys.stdout), | ||
| 321 | sfd(p.stderr, sys.stderr)] | ||
| 322 | |||
| 323 | for s in s_in: | ||
| 324 | flags = fcntl.fcntl(s.fd, fcntl.F_GETFL) | ||
| 325 | fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) | ||
| 326 | |||
| 327 | while s_in: | ||
| 328 | in_ready, _out_ready, _err_ready = select.select(s_in, [], []) | ||
| 329 | for s in in_ready: | ||
| 330 | buf = s.fd.read(4096) | ||
| 331 | if not buf: | ||
| 332 | s.fd.close() | ||
| 333 | s_in.remove(s) | ||
| 334 | continue | ||
| 335 | |||
| 336 | if not opt.verbose: | ||
| 337 | if s.fd != p.stdout: | ||
| 338 | errbuf += buf | ||
| 339 | continue | ||
| 340 | |||
| 341 | if empty and out: | ||
| 342 | if not cnt == 0: | ||
| 343 | out.nl() | ||
| 344 | |||
| 345 | if mirror: | ||
| 346 | project_header_path = project['name'] | ||
| 347 | else: | ||
| 348 | project_header_path = project['relpath'] | ||
| 349 | out.project('project %s/', project_header_path) | ||
| 350 | out.nl() | ||
| 351 | out.flush() | ||
| 352 | if errbuf: | ||
| 353 | sys.stderr.write(errbuf) | ||
| 354 | sys.stderr.flush() | ||
| 355 | errbuf = '' | ||
| 356 | empty = False | ||
| 357 | |||
| 358 | s.dest.write(buf) | ||
| 359 | s.dest.flush() | ||
| 360 | |||
| 361 | r = p.wait() | ||
| 362 | return r | ||
diff --git a/subcmds/status.py b/subcmds/status.py index 41c4429a..b42675e0 100644 --- a/subcmds/status.py +++ b/subcmds/status.py | |||
| @@ -113,7 +113,7 @@ the following meanings: | |||
| 113 | try: | 113 | try: |
| 114 | state = project.PrintWorkTreeStatus(output) | 114 | state = project.PrintWorkTreeStatus(output) |
| 115 | if state == 'CLEAN': | 115 | if state == 'CLEAN': |
| 116 | clean_counter.next() | 116 | next(clean_counter) |
| 117 | finally: | 117 | finally: |
| 118 | sem.release() | 118 | sem.release() |
| 119 | 119 | ||
| @@ -141,7 +141,7 @@ the following meanings: | |||
| 141 | for project in all_projects: | 141 | for project in all_projects: |
| 142 | state = project.PrintWorkTreeStatus() | 142 | state = project.PrintWorkTreeStatus() |
| 143 | if state == 'CLEAN': | 143 | if state == 'CLEAN': |
| 144 | counter.next() | 144 | next(counter) |
| 145 | else: | 145 | else: |
| 146 | sem = _threading.Semaphore(opt.jobs) | 146 | sem = _threading.Semaphore(opt.jobs) |
| 147 | threads_and_output = [] | 147 | threads_and_output = [] |
| @@ -164,7 +164,7 @@ the following meanings: | |||
| 164 | t.join() | 164 | t.join() |
| 165 | output.dump(sys.stdout) | 165 | output.dump(sys.stdout) |
| 166 | output.close() | 166 | output.close() |
| 167 | if len(all_projects) == counter.next(): | 167 | if len(all_projects) == next(counter): |
| 168 | print('nothing to commit (working directory clean)') | 168 | print('nothing to commit (working directory clean)') |
| 169 | 169 | ||
| 170 | if opt.orphans: | 170 | if opt.orphans: |
diff --git a/subcmds/sync.py b/subcmds/sync.py index a0a68960..6f77310f 100644 --- a/subcmds/sync.py +++ b/subcmds/sync.py | |||
| @@ -14,10 +14,10 @@ | |||
| 14 | # limitations under the License. | 14 | # limitations under the License. |
| 15 | 15 | ||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | import json | ||
| 17 | import netrc | 18 | import netrc |
| 18 | from optparse import SUPPRESS_HELP | 19 | from optparse import SUPPRESS_HELP |
| 19 | import os | 20 | import os |
| 20 | import pickle | ||
| 21 | import re | 21 | import re |
| 22 | import shutil | 22 | import shutil |
| 23 | import socket | 23 | import socket |
| @@ -760,7 +760,7 @@ class _FetchTimes(object): | |||
| 760 | _ALPHA = 0.5 | 760 | _ALPHA = 0.5 |
| 761 | 761 | ||
| 762 | def __init__(self, manifest): | 762 | def __init__(self, manifest): |
| 763 | self._path = os.path.join(manifest.repodir, '.repopickle_fetchtimes') | 763 | self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json') |
| 764 | self._times = None | 764 | self._times = None |
| 765 | self._seen = set() | 765 | self._seen = set() |
| 766 | 766 | ||
| @@ -779,22 +779,17 @@ class _FetchTimes(object): | |||
| 779 | def _Load(self): | 779 | def _Load(self): |
| 780 | if self._times is None: | 780 | if self._times is None: |
| 781 | try: | 781 | try: |
| 782 | f = open(self._path, 'rb') | 782 | f = open(self._path) |
| 783 | except IOError: | ||
| 784 | self._times = {} | ||
| 785 | return self._times | ||
| 786 | try: | ||
| 787 | try: | 783 | try: |
| 788 | self._times = pickle.load(f) | 784 | self._times = json.load(f) |
| 789 | except IOError: | 785 | finally: |
| 790 | try: | 786 | f.close() |
| 791 | os.remove(self._path) | 787 | except (IOError, ValueError): |
| 792 | except OSError: | 788 | try: |
| 793 | pass | 789 | os.remove(self._path) |
| 794 | self._times = {} | 790 | except OSError: |
| 795 | finally: | 791 | pass |
| 796 | f.close() | 792 | self._times = {} |
| 797 | return self._times | ||
| 798 | 793 | ||
| 799 | def Save(self): | 794 | def Save(self): |
| 800 | if self._times is None: | 795 | if self._times is None: |
| @@ -808,13 +803,13 @@ class _FetchTimes(object): | |||
| 808 | del self._times[name] | 803 | del self._times[name] |
| 809 | 804 | ||
| 810 | try: | 805 | try: |
| 811 | f = open(self._path, 'wb') | 806 | f = open(self._path, 'w') |
| 812 | try: | 807 | try: |
| 813 | pickle.dump(self._times, f) | 808 | json.dump(self._times, f, indent=2) |
| 814 | except (IOError, OSError, pickle.PickleError): | 809 | finally: |
| 815 | try: | 810 | f.close() |
| 816 | os.remove(self._path) | 811 | except (IOError, TypeError): |
| 817 | except OSError: | 812 | try: |
| 818 | pass | 813 | os.remove(self._path) |
| 819 | finally: | 814 | except OSError: |
| 820 | f.close() | 815 | pass |
diff --git a/subcmds/upload.py b/subcmds/upload.py index e2fa261e..0ee36df1 100644 --- a/subcmds/upload.py +++ b/subcmds/upload.py | |||
| @@ -25,10 +25,12 @@ from git_command import GitCommand | |||
| 25 | from project import RepoHook | 25 | from project import RepoHook |
| 26 | 26 | ||
| 27 | from pyversion import is_python3 | 27 | from pyversion import is_python3 |
| 28 | # pylint:disable=W0622 | ||
| 28 | if not is_python3(): | 29 | if not is_python3(): |
| 29 | # pylint:disable=W0622 | ||
| 30 | input = raw_input | 30 | input = raw_input |
| 31 | # pylint:enable=W0622 | 31 | else: |
| 32 | unicode = str | ||
| 33 | # pylint:enable=W0622 | ||
| 32 | 34 | ||
| 33 | UNUSUAL_COMMIT_THRESHOLD = 5 | 35 | UNUSUAL_COMMIT_THRESHOLD = 5 |
| 34 | 36 | ||
