diff options
Diffstat (limited to 'subcmds')
| -rw-r--r-- | subcmds/branches.py | 20 | ||||
| -rw-r--r-- | subcmds/cherry_pick.py | 1 | ||||
| -rw-r--r-- | subcmds/download.py | 1 | ||||
| -rw-r--r-- | subcmds/forall.py | 325 | ||||
| -rw-r--r-- | subcmds/info.py | 3 | ||||
| -rw-r--r-- | subcmds/init.py | 6 | ||||
| -rw-r--r-- | subcmds/start.py | 8 | ||||
| -rw-r--r-- | subcmds/status.py | 54 | ||||
| -rw-r--r-- | subcmds/sync.py | 102 | ||||
| -rw-r--r-- | subcmds/upload.py | 14 |
10 files changed, 337 insertions, 197 deletions
diff --git a/subcmds/branches.py b/subcmds/branches.py index f714c1e8..2902684a 100644 --- a/subcmds/branches.py +++ b/subcmds/branches.py | |||
| @@ -47,6 +47,10 @@ class BranchInfo(object): | |||
| 47 | return self.current > 0 | 47 | return self.current > 0 |
| 48 | 48 | ||
| 49 | @property | 49 | @property |
| 50 | def IsSplitCurrent(self): | ||
| 51 | return self.current != 0 and self.current != len(self.projects) | ||
| 52 | |||
| 53 | @property | ||
| 50 | def IsPublished(self): | 54 | def IsPublished(self): |
| 51 | return self.published > 0 | 55 | return self.published > 0 |
| 52 | 56 | ||
| @@ -139,10 +143,14 @@ is shown, then the branch appears in all projects. | |||
| 139 | if in_cnt < project_cnt: | 143 | if in_cnt < project_cnt: |
| 140 | fmt = out.write | 144 | fmt = out.write |
| 141 | paths = [] | 145 | paths = [] |
| 142 | if in_cnt < project_cnt - in_cnt: | 146 | non_cur_paths = [] |
| 147 | if i.IsSplitCurrent or (in_cnt < project_cnt - in_cnt): | ||
| 143 | in_type = 'in' | 148 | in_type = 'in' |
| 144 | for b in i.projects: | 149 | for b in i.projects: |
| 145 | paths.append(b.project.relpath) | 150 | if not i.IsSplitCurrent or b.current: |
| 151 | paths.append(b.project.relpath) | ||
| 152 | else: | ||
| 153 | non_cur_paths.append(b.project.relpath) | ||
| 146 | else: | 154 | else: |
| 147 | fmt = out.notinproject | 155 | fmt = out.notinproject |
| 148 | in_type = 'not in' | 156 | in_type = 'not in' |
| @@ -154,13 +162,19 @@ is shown, then the branch appears in all projects. | |||
| 154 | paths.append(p.relpath) | 162 | paths.append(p.relpath) |
| 155 | 163 | ||
| 156 | s = ' %s %s' % (in_type, ', '.join(paths)) | 164 | s = ' %s %s' % (in_type, ', '.join(paths)) |
| 157 | if width + 7 + len(s) < 80: | 165 | if not i.IsSplitCurrent and (width + 7 + len(s) < 80): |
| 166 | fmt = out.current if i.IsCurrent else fmt | ||
| 158 | fmt(s) | 167 | fmt(s) |
| 159 | else: | 168 | else: |
| 160 | fmt(' %s:' % in_type) | 169 | fmt(' %s:' % in_type) |
| 170 | fmt = out.current if i.IsCurrent else out.write | ||
| 161 | for p in paths: | 171 | for p in paths: |
| 162 | out.nl() | 172 | out.nl() |
| 163 | fmt(width*' ' + ' %s' % p) | 173 | fmt(width*' ' + ' %s' % p) |
| 174 | fmt = out.write | ||
| 175 | for p in non_cur_paths: | ||
| 176 | out.nl() | ||
| 177 | fmt(width*' ' + ' %s' % p) | ||
| 164 | else: | 178 | else: |
| 165 | out.write(' in all projects') | 179 | out.write(' in all projects') |
| 166 | out.nl() | 180 | out.nl() |
diff --git a/subcmds/cherry_pick.py b/subcmds/cherry_pick.py index 520e4c32..1f7dffdc 100644 --- a/subcmds/cherry_pick.py +++ b/subcmds/cherry_pick.py | |||
| @@ -76,6 +76,7 @@ change id will be added. | |||
| 76 | capture_stdout = True, | 76 | capture_stdout = True, |
| 77 | capture_stderr = True) | 77 | capture_stderr = True) |
| 78 | p.stdin.write(new_msg) | 78 | p.stdin.write(new_msg) |
| 79 | p.stdin.close() | ||
| 79 | if p.Wait() != 0: | 80 | if p.Wait() != 0: |
| 80 | print("error: Failed to update commit message", file=sys.stderr) | 81 | print("error: Failed to update commit message", file=sys.stderr) |
| 81 | sys.exit(1) | 82 | sys.exit(1) |
diff --git a/subcmds/download.py b/subcmds/download.py index 098d8b43..a029462e 100644 --- a/subcmds/download.py +++ b/subcmds/download.py | |||
| @@ -93,6 +93,7 @@ makes it available in your project's local working directory. | |||
| 93 | except GitError: | 93 | except GitError: |
| 94 | print('[%s] Could not complete the cherry-pick of %s' \ | 94 | print('[%s] Could not complete the cherry-pick of %s' \ |
| 95 | % (project.name, dl.commit), file=sys.stderr) | 95 | % (project.name, dl.commit), file=sys.stderr) |
| 96 | sys.exit(1) | ||
| 96 | 97 | ||
| 97 | elif opt.revert: | 98 | elif opt.revert: |
| 98 | project._Revert(dl.commit) | 99 | project._Revert(dl.commit) |
diff --git a/subcmds/forall.py b/subcmds/forall.py index e2a420a9..b93cd6d0 100644 --- a/subcmds/forall.py +++ b/subcmds/forall.py | |||
| @@ -14,10 +14,13 @@ | |||
| 14 | # limitations under the License. | 14 | # limitations under the License. |
| 15 | 15 | ||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | import errno | ||
| 17 | import fcntl | 18 | import fcntl |
| 19 | import multiprocessing | ||
| 18 | import re | 20 | import re |
| 19 | import os | 21 | import os |
| 20 | import select | 22 | import select |
| 23 | import signal | ||
| 21 | import sys | 24 | import sys |
| 22 | import subprocess | 25 | import subprocess |
| 23 | 26 | ||
| @@ -31,6 +34,7 @@ _CAN_COLOR = [ | |||
| 31 | 'log', | 34 | 'log', |
| 32 | ] | 35 | ] |
| 33 | 36 | ||
| 37 | |||
| 34 | class ForallColoring(Coloring): | 38 | class ForallColoring(Coloring): |
| 35 | def __init__(self, config): | 39 | def __init__(self, config): |
| 36 | Coloring.__init__(self, config, 'forall') | 40 | Coloring.__init__(self, config, 'forall') |
| @@ -87,6 +91,12 @@ revision to a locally executed git command, use REPO_LREV. | |||
| 87 | REPO_RREV is the name of the revision from the manifest, exactly | 91 | REPO_RREV is the name of the revision from the manifest, exactly |
| 88 | as written in the manifest. | 92 | as written in the manifest. |
| 89 | 93 | ||
| 94 | REPO_COUNT is the total number of projects being iterated. | ||
| 95 | |||
| 96 | REPO_I is the current (1-based) iteration count. Can be used in | ||
| 97 | conjunction with REPO_COUNT to add a simple progress indicator to your | ||
| 98 | command. | ||
| 99 | |||
| 90 | REPO__* are any extra environment variables, specified by the | 100 | REPO__* are any extra environment variables, specified by the |
| 91 | "annotation" element under any project element. This can be useful | 101 | "annotation" element under any project element. This can be useful |
| 92 | for differentiating trees based on user-specific criteria, or simply | 102 | for differentiating trees based on user-specific criteria, or simply |
| @@ -126,9 +136,35 @@ without iterating through the remaining projects. | |||
| 126 | g.add_option('-v', '--verbose', | 136 | g.add_option('-v', '--verbose', |
| 127 | dest='verbose', action='store_true', | 137 | dest='verbose', action='store_true', |
| 128 | help='Show command error messages') | 138 | help='Show command error messages') |
| 139 | g.add_option('-j', '--jobs', | ||
| 140 | dest='jobs', action='store', type='int', default=1, | ||
| 141 | help='number of commands to execute simultaneously') | ||
| 129 | 142 | ||
| 130 | def WantPager(self, opt): | 143 | def WantPager(self, opt): |
| 131 | return opt.project_header | 144 | return opt.project_header and opt.jobs == 1 |
| 145 | |||
| 146 | def _SerializeProject(self, project): | ||
| 147 | """ Serialize a project._GitGetByExec instance. | ||
| 148 | |||
| 149 | project._GitGetByExec is not pickle-able. Instead of trying to pass it | ||
| 150 | around between processes, make a dict ourselves containing only the | ||
| 151 | attributes that we need. | ||
| 152 | |||
| 153 | """ | ||
| 154 | if not self.manifest.IsMirror: | ||
| 155 | lrev = project.GetRevisionId() | ||
| 156 | else: | ||
| 157 | lrev = None | ||
| 158 | return { | ||
| 159 | 'name': project.name, | ||
| 160 | 'relpath': project.relpath, | ||
| 161 | 'remote_name': project.remote.name, | ||
| 162 | 'lrev': lrev, | ||
| 163 | 'rrev': project.revisionExpr, | ||
| 164 | 'annotations': dict((a.name, a.value) for a in project.annotations), | ||
| 165 | 'gitdir': project.gitdir, | ||
| 166 | 'worktree': project.worktree, | ||
| 167 | } | ||
| 132 | 168 | ||
| 133 | def Execute(self, opt, args): | 169 | def Execute(self, opt, args): |
| 134 | if not opt.command: | 170 | if not opt.command: |
| @@ -167,123 +203,188 @@ without iterating through the remaining projects. | |||
| 167 | # pylint: enable=W0631 | 203 | # pylint: enable=W0631 |
| 168 | 204 | ||
| 169 | mirror = self.manifest.IsMirror | 205 | mirror = self.manifest.IsMirror |
| 170 | out = ForallColoring(self.manifest.manifestProject.config) | ||
| 171 | out.redirect(sys.stdout) | ||
| 172 | |||
| 173 | rc = 0 | 206 | rc = 0 |
| 174 | first = True | 207 | |
| 208 | smart_sync_manifest_name = "smart_sync_override.xml" | ||
| 209 | smart_sync_manifest_path = os.path.join( | ||
| 210 | self.manifest.manifestProject.worktree, smart_sync_manifest_name) | ||
| 211 | |||
| 212 | if os.path.isfile(smart_sync_manifest_path): | ||
| 213 | self.manifest.Override(smart_sync_manifest_path) | ||
| 175 | 214 | ||
| 176 | if not opt.regex: | 215 | if not opt.regex: |
| 177 | projects = self.GetProjects(args) | 216 | projects = self.GetProjects(args) |
| 178 | else: | 217 | else: |
| 179 | projects = self.FindProjects(args) | 218 | projects = self.FindProjects(args) |
| 180 | 219 | ||
| 181 | for project in projects: | 220 | os.environ['REPO_COUNT'] = str(len(projects)) |
| 182 | env = os.environ.copy() | 221 | |
| 183 | def setenv(name, val): | 222 | pool = multiprocessing.Pool(opt.jobs, InitWorker) |
| 184 | if val is None: | 223 | try: |
| 185 | val = '' | 224 | config = self.manifest.manifestProject.config |
| 186 | env[name] = val.encode() | 225 | results_it = pool.imap( |
| 187 | 226 | DoWorkWrapper, | |
| 188 | setenv('REPO_PROJECT', project.name) | 227 | self.ProjectArgs(projects, mirror, opt, cmd, shell, config)) |
| 189 | setenv('REPO_PATH', project.relpath) | 228 | pool.close() |
| 190 | setenv('REPO_REMOTE', project.remote.name) | 229 | for r in results_it: |
| 191 | setenv('REPO_LREV', project.GetRevisionId()) | 230 | rc = rc or r |
| 192 | setenv('REPO_RREV', project.revisionExpr) | 231 | if r != 0 and opt.abort_on_errors: |
| 193 | for a in project.annotations: | 232 | raise Exception('Aborting due to previous error') |
| 194 | setenv("REPO__%s" % (a.name), a.value) | 233 | except (KeyboardInterrupt, WorkerKeyboardInterrupt): |
| 195 | 234 | # Catch KeyboardInterrupt raised inside and outside of workers | |
| 196 | if mirror: | 235 | print('Interrupted - terminating the pool') |
| 197 | setenv('GIT_DIR', project.gitdir) | 236 | pool.terminate() |
| 198 | cwd = project.gitdir | 237 | rc = rc or errno.EINTR |
| 199 | else: | 238 | except Exception as e: |
| 200 | cwd = project.worktree | 239 | # Catch any other exceptions raised |
| 201 | 240 | print('Got an error, terminating the pool: %r' % e, | |
| 202 | if not os.path.exists(cwd): | 241 | file=sys.stderr) |
| 203 | if (opt.project_header and opt.verbose) \ | 242 | pool.terminate() |
| 204 | or not opt.project_header: | 243 | rc = rc or getattr(e, 'errno', 1) |
| 205 | print('skipping %s/' % project.relpath, file=sys.stderr) | 244 | finally: |
| 206 | continue | 245 | pool.join() |
| 207 | |||
| 208 | if opt.project_header: | ||
| 209 | stdin = subprocess.PIPE | ||
| 210 | stdout = subprocess.PIPE | ||
| 211 | stderr = subprocess.PIPE | ||
| 212 | else: | ||
| 213 | stdin = None | ||
| 214 | stdout = None | ||
| 215 | stderr = None | ||
| 216 | |||
| 217 | p = subprocess.Popen(cmd, | ||
| 218 | cwd = cwd, | ||
| 219 | shell = shell, | ||
| 220 | env = env, | ||
| 221 | stdin = stdin, | ||
| 222 | stdout = stdout, | ||
| 223 | stderr = stderr) | ||
| 224 | |||
| 225 | if opt.project_header: | ||
| 226 | class sfd(object): | ||
| 227 | def __init__(self, fd, dest): | ||
| 228 | self.fd = fd | ||
| 229 | self.dest = dest | ||
| 230 | def fileno(self): | ||
| 231 | return self.fd.fileno() | ||
| 232 | |||
| 233 | empty = True | ||
| 234 | errbuf = '' | ||
| 235 | |||
| 236 | p.stdin.close() | ||
| 237 | s_in = [sfd(p.stdout, sys.stdout), | ||
| 238 | sfd(p.stderr, sys.stderr)] | ||
| 239 | |||
| 240 | for s in s_in: | ||
| 241 | flags = fcntl.fcntl(s.fd, fcntl.F_GETFL) | ||
| 242 | fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) | ||
| 243 | |||
| 244 | while s_in: | ||
| 245 | in_ready, _out_ready, _err_ready = select.select(s_in, [], []) | ||
| 246 | for s in in_ready: | ||
| 247 | buf = s.fd.read(4096) | ||
| 248 | if not buf: | ||
| 249 | s.fd.close() | ||
| 250 | s_in.remove(s) | ||
| 251 | continue | ||
| 252 | |||
| 253 | if not opt.verbose: | ||
| 254 | if s.fd != p.stdout: | ||
| 255 | errbuf += buf | ||
| 256 | continue | ||
| 257 | |||
| 258 | if empty: | ||
| 259 | if first: | ||
| 260 | first = False | ||
| 261 | else: | ||
| 262 | out.nl() | ||
| 263 | |||
| 264 | if mirror: | ||
| 265 | project_header_path = project.name | ||
| 266 | else: | ||
| 267 | project_header_path = project.relpath | ||
| 268 | out.project('project %s/', project_header_path) | ||
| 269 | out.nl() | ||
| 270 | out.flush() | ||
| 271 | if errbuf: | ||
| 272 | sys.stderr.write(errbuf) | ||
| 273 | sys.stderr.flush() | ||
| 274 | errbuf = '' | ||
| 275 | empty = False | ||
| 276 | |||
| 277 | s.dest.write(buf) | ||
| 278 | s.dest.flush() | ||
| 279 | |||
| 280 | r = p.wait() | ||
| 281 | if r != 0: | ||
| 282 | if r != rc: | ||
| 283 | rc = r | ||
| 284 | if opt.abort_on_errors: | ||
| 285 | print("error: %s: Aborting due to previous error" % project.relpath, | ||
| 286 | file=sys.stderr) | ||
| 287 | sys.exit(r) | ||
| 288 | if rc != 0: | 246 | if rc != 0: |
| 289 | sys.exit(rc) | 247 | sys.exit(rc) |
| 248 | |||
| 249 | def ProjectArgs(self, projects, mirror, opt, cmd, shell, config): | ||
| 250 | for cnt, p in enumerate(projects): | ||
| 251 | try: | ||
| 252 | project = self._SerializeProject(p) | ||
| 253 | except Exception as e: | ||
| 254 | print('Project list error: %r' % e, | ||
| 255 | file=sys.stderr) | ||
| 256 | return | ||
| 257 | except KeyboardInterrupt: | ||
| 258 | print('Project list interrupted', | ||
| 259 | file=sys.stderr) | ||
| 260 | return | ||
| 261 | yield [mirror, opt, cmd, shell, cnt, config, project] | ||
| 262 | |||
| 263 | class WorkerKeyboardInterrupt(Exception): | ||
| 264 | """ Keyboard interrupt exception for worker processes. """ | ||
| 265 | pass | ||
| 266 | |||
| 267 | |||
| 268 | def InitWorker(): | ||
| 269 | signal.signal(signal.SIGINT, signal.SIG_IGN) | ||
| 270 | |||
| 271 | def DoWorkWrapper(args): | ||
| 272 | """ A wrapper around the DoWork() method. | ||
| 273 | |||
| 274 | Catch the KeyboardInterrupt exceptions here and re-raise them as a different, | ||
| 275 | ``Exception``-based exception to stop it flooding the console with stacktraces | ||
| 276 | and making the parent hang indefinitely. | ||
| 277 | |||
| 278 | """ | ||
| 279 | project = args.pop() | ||
| 280 | try: | ||
| 281 | return DoWork(project, *args) | ||
| 282 | except KeyboardInterrupt: | ||
| 283 | print('%s: Worker interrupted' % project['name']) | ||
| 284 | raise WorkerKeyboardInterrupt() | ||
| 285 | |||
| 286 | |||
| 287 | def DoWork(project, mirror, opt, cmd, shell, cnt, config): | ||
| 288 | env = os.environ.copy() | ||
| 289 | def setenv(name, val): | ||
| 290 | if val is None: | ||
| 291 | val = '' | ||
| 292 | if hasattr(val, 'encode'): | ||
| 293 | val = val.encode() | ||
| 294 | env[name] = val | ||
| 295 | |||
| 296 | setenv('REPO_PROJECT', project['name']) | ||
| 297 | setenv('REPO_PATH', project['relpath']) | ||
| 298 | setenv('REPO_REMOTE', project['remote_name']) | ||
| 299 | setenv('REPO_LREV', project['lrev']) | ||
| 300 | setenv('REPO_RREV', project['rrev']) | ||
| 301 | setenv('REPO_I', str(cnt + 1)) | ||
| 302 | for name in project['annotations']: | ||
| 303 | setenv("REPO__%s" % (name), project['annotations'][name]) | ||
| 304 | |||
| 305 | if mirror: | ||
| 306 | setenv('GIT_DIR', project['gitdir']) | ||
| 307 | cwd = project['gitdir'] | ||
| 308 | else: | ||
| 309 | cwd = project['worktree'] | ||
| 310 | |||
| 311 | if not os.path.exists(cwd): | ||
| 312 | if (opt.project_header and opt.verbose) \ | ||
| 313 | or not opt.project_header: | ||
| 314 | print('skipping %s/' % project['relpath'], file=sys.stderr) | ||
| 315 | return | ||
| 316 | |||
| 317 | if opt.project_header: | ||
| 318 | stdin = subprocess.PIPE | ||
| 319 | stdout = subprocess.PIPE | ||
| 320 | stderr = subprocess.PIPE | ||
| 321 | else: | ||
| 322 | stdin = None | ||
| 323 | stdout = None | ||
| 324 | stderr = None | ||
| 325 | |||
| 326 | p = subprocess.Popen(cmd, | ||
| 327 | cwd=cwd, | ||
| 328 | shell=shell, | ||
| 329 | env=env, | ||
| 330 | stdin=stdin, | ||
| 331 | stdout=stdout, | ||
| 332 | stderr=stderr) | ||
| 333 | |||
| 334 | if opt.project_header: | ||
| 335 | out = ForallColoring(config) | ||
| 336 | out.redirect(sys.stdout) | ||
| 337 | class sfd(object): | ||
| 338 | def __init__(self, fd, dest): | ||
| 339 | self.fd = fd | ||
| 340 | self.dest = dest | ||
| 341 | def fileno(self): | ||
| 342 | return self.fd.fileno() | ||
| 343 | |||
| 344 | empty = True | ||
| 345 | errbuf = '' | ||
| 346 | |||
| 347 | p.stdin.close() | ||
| 348 | s_in = [sfd(p.stdout, sys.stdout), | ||
| 349 | sfd(p.stderr, sys.stderr)] | ||
| 350 | |||
| 351 | for s in s_in: | ||
| 352 | flags = fcntl.fcntl(s.fd, fcntl.F_GETFL) | ||
| 353 | fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) | ||
| 354 | |||
| 355 | while s_in: | ||
| 356 | in_ready, _out_ready, _err_ready = select.select(s_in, [], []) | ||
| 357 | for s in in_ready: | ||
| 358 | buf = s.fd.read(4096) | ||
| 359 | if not buf: | ||
| 360 | s.fd.close() | ||
| 361 | s_in.remove(s) | ||
| 362 | continue | ||
| 363 | |||
| 364 | if not opt.verbose: | ||
| 365 | if s.fd != p.stdout: | ||
| 366 | errbuf += buf | ||
| 367 | continue | ||
| 368 | |||
| 369 | if empty and out: | ||
| 370 | if not cnt == 0: | ||
| 371 | out.nl() | ||
| 372 | |||
| 373 | if mirror: | ||
| 374 | project_header_path = project['name'] | ||
| 375 | else: | ||
| 376 | project_header_path = project['relpath'] | ||
| 377 | out.project('project %s/', project_header_path) | ||
| 378 | out.nl() | ||
| 379 | out.flush() | ||
| 380 | if errbuf: | ||
| 381 | sys.stderr.write(errbuf) | ||
| 382 | sys.stderr.flush() | ||
| 383 | errbuf = '' | ||
| 384 | empty = False | ||
| 385 | |||
| 386 | s.dest.write(buf) | ||
| 387 | s.dest.flush() | ||
| 388 | |||
| 389 | r = p.wait() | ||
| 390 | return r | ||
diff --git a/subcmds/info.py b/subcmds/info.py index d42860ae..ed196e90 100644 --- a/subcmds/info.py +++ b/subcmds/info.py | |||
| @@ -59,7 +59,8 @@ class Info(PagedCommand): | |||
| 59 | or 'all,-notdefault') | 59 | or 'all,-notdefault') |
| 60 | 60 | ||
| 61 | self.heading("Manifest branch: ") | 61 | self.heading("Manifest branch: ") |
| 62 | self.headtext(self.manifest.default.revisionExpr) | 62 | if self.manifest.default.revisionExpr: |
| 63 | self.headtext(self.manifest.default.revisionExpr) | ||
| 63 | self.out.nl() | 64 | self.out.nl() |
| 64 | self.heading("Manifest merge branch: ") | 65 | self.heading("Manifest merge branch: ") |
| 65 | self.headtext(mergeBranch) | 66 | self.headtext(mergeBranch) |
diff --git a/subcmds/init.py b/subcmds/init.py index b1fcb69c..dbb6ddda 100644 --- a/subcmds/init.py +++ b/subcmds/init.py | |||
| @@ -27,7 +27,7 @@ else: | |||
| 27 | import imp | 27 | import imp |
| 28 | import urlparse | 28 | import urlparse |
| 29 | urllib = imp.new_module('urllib') | 29 | urllib = imp.new_module('urllib') |
| 30 | urllib.parse = urlparse.urlparse | 30 | urllib.parse = urlparse |
| 31 | 31 | ||
| 32 | from color import Coloring | 32 | from color import Coloring |
| 33 | from command import InteractiveCommand, MirrorSafeCommand | 33 | from command import InteractiveCommand, MirrorSafeCommand |
| @@ -153,7 +153,7 @@ to update the working directory files. | |||
| 153 | # server where this git is located, so let's save that here. | 153 | # server where this git is located, so let's save that here. |
| 154 | mirrored_manifest_git = None | 154 | mirrored_manifest_git = None |
| 155 | if opt.reference: | 155 | if opt.reference: |
| 156 | manifest_git_path = urllib.parse(opt.manifest_url).path[1:] | 156 | manifest_git_path = urllib.parse.urlparse(opt.manifest_url).path[1:] |
| 157 | mirrored_manifest_git = os.path.join(opt.reference, manifest_git_path) | 157 | mirrored_manifest_git = os.path.join(opt.reference, manifest_git_path) |
| 158 | if not mirrored_manifest_git.endswith(".git"): | 158 | if not mirrored_manifest_git.endswith(".git"): |
| 159 | mirrored_manifest_git += ".git" | 159 | mirrored_manifest_git += ".git" |
| @@ -233,7 +233,7 @@ to update the working directory files. | |||
| 233 | sys.exit(1) | 233 | sys.exit(1) |
| 234 | 234 | ||
| 235 | if opt.manifest_branch: | 235 | if opt.manifest_branch: |
| 236 | m.MetaBranchSwitch(opt.manifest_branch) | 236 | m.MetaBranchSwitch() |
| 237 | 237 | ||
| 238 | syncbuf = SyncBuffer(m.config) | 238 | syncbuf = SyncBuffer(m.config) |
| 239 | m.Sync_LocalHalf(syncbuf) | 239 | m.Sync_LocalHalf(syncbuf) |
diff --git a/subcmds/start.py b/subcmds/start.py index 2d723fc2..60ad41e0 100644 --- a/subcmds/start.py +++ b/subcmds/start.py | |||
| @@ -59,9 +59,13 @@ revision specified in the manifest. | |||
| 59 | for project in all_projects: | 59 | for project in all_projects: |
| 60 | pm.update() | 60 | pm.update() |
| 61 | # If the current revision is a specific SHA1 then we can't push back | 61 | # If the current revision is a specific SHA1 then we can't push back |
| 62 | # to it so substitute the manifest default revision instead. | 62 | # to it; so substitute with dest_branch if defined, or with manifest |
| 63 | # default revision instead. | ||
| 63 | if IsId(project.revisionExpr): | 64 | if IsId(project.revisionExpr): |
| 64 | project.revisionExpr = self.manifest.default.revisionExpr | 65 | if project.dest_branch: |
| 66 | project.revisionExpr = project.dest_branch | ||
| 67 | else: | ||
| 68 | project.revisionExpr = self.manifest.default.revisionExpr | ||
| 65 | if not project.StartBranch(nb): | 69 | if not project.StartBranch(nb): |
| 66 | err.append(project) | 70 | err.append(project) |
| 67 | pm.end() | 71 | pm.end() |
diff --git a/subcmds/status.py b/subcmds/status.py index 41c4429a..38c229b1 100644 --- a/subcmds/status.py +++ b/subcmds/status.py | |||
| @@ -22,15 +22,8 @@ except ImportError: | |||
| 22 | 22 | ||
| 23 | import glob | 23 | import glob |
| 24 | 24 | ||
| 25 | from pyversion import is_python3 | ||
| 26 | if is_python3(): | ||
| 27 | import io | ||
| 28 | else: | ||
| 29 | import StringIO as io | ||
| 30 | |||
| 31 | import itertools | 25 | import itertools |
| 32 | import os | 26 | import os |
| 33 | import sys | ||
| 34 | 27 | ||
| 35 | from color import Coloring | 28 | from color import Coloring |
| 36 | 29 | ||
| @@ -97,7 +90,7 @@ the following meanings: | |||
| 97 | dest='orphans', action='store_true', | 90 | dest='orphans', action='store_true', |
| 98 | help="include objects in working directory outside of repo projects") | 91 | help="include objects in working directory outside of repo projects") |
| 99 | 92 | ||
| 100 | def _StatusHelper(self, project, clean_counter, sem, output): | 93 | def _StatusHelper(self, project, clean_counter, sem): |
| 101 | """Obtains the status for a specific project. | 94 | """Obtains the status for a specific project. |
| 102 | 95 | ||
| 103 | Obtains the status for a project, redirecting the output to | 96 | Obtains the status for a project, redirecting the output to |
| @@ -111,9 +104,9 @@ the following meanings: | |||
| 111 | output: Where to output the status. | 104 | output: Where to output the status. |
| 112 | """ | 105 | """ |
| 113 | try: | 106 | try: |
| 114 | state = project.PrintWorkTreeStatus(output) | 107 | state = project.PrintWorkTreeStatus() |
| 115 | if state == 'CLEAN': | 108 | if state == 'CLEAN': |
| 116 | clean_counter.next() | 109 | next(clean_counter) |
| 117 | finally: | 110 | finally: |
| 118 | sem.release() | 111 | sem.release() |
| 119 | 112 | ||
| @@ -122,16 +115,16 @@ the following meanings: | |||
| 122 | status_header = ' --\t' | 115 | status_header = ' --\t' |
| 123 | for item in dirs: | 116 | for item in dirs: |
| 124 | if not os.path.isdir(item): | 117 | if not os.path.isdir(item): |
| 125 | outstring.write(''.join([status_header, item])) | 118 | outstring.append(''.join([status_header, item])) |
| 126 | continue | 119 | continue |
| 127 | if item in proj_dirs: | 120 | if item in proj_dirs: |
| 128 | continue | 121 | continue |
| 129 | if item in proj_dirs_parents: | 122 | if item in proj_dirs_parents: |
| 130 | self._FindOrphans(glob.glob('%s/.*' % item) + \ | 123 | self._FindOrphans(glob.glob('%s/.*' % item) + |
| 131 | glob.glob('%s/*' % item), \ | 124 | glob.glob('%s/*' % item), |
| 132 | proj_dirs, proj_dirs_parents, outstring) | 125 | proj_dirs, proj_dirs_parents, outstring) |
| 133 | continue | 126 | continue |
| 134 | outstring.write(''.join([status_header, item, '/'])) | 127 | outstring.append(''.join([status_header, item, '/'])) |
| 135 | 128 | ||
| 136 | def Execute(self, opt, args): | 129 | def Execute(self, opt, args): |
| 137 | all_projects = self.GetProjects(args) | 130 | all_projects = self.GetProjects(args) |
| @@ -141,30 +134,21 @@ the following meanings: | |||
| 141 | for project in all_projects: | 134 | for project in all_projects: |
| 142 | state = project.PrintWorkTreeStatus() | 135 | state = project.PrintWorkTreeStatus() |
| 143 | if state == 'CLEAN': | 136 | if state == 'CLEAN': |
| 144 | counter.next() | 137 | next(counter) |
| 145 | else: | 138 | else: |
| 146 | sem = _threading.Semaphore(opt.jobs) | 139 | sem = _threading.Semaphore(opt.jobs) |
| 147 | threads_and_output = [] | 140 | threads = [] |
| 148 | for project in all_projects: | 141 | for project in all_projects: |
| 149 | sem.acquire() | 142 | sem.acquire() |
| 150 | 143 | ||
| 151 | class BufList(io.StringIO): | ||
| 152 | def dump(self, ostream): | ||
| 153 | for entry in self.buflist: | ||
| 154 | ostream.write(entry) | ||
| 155 | |||
| 156 | output = BufList() | ||
| 157 | |||
| 158 | t = _threading.Thread(target=self._StatusHelper, | 144 | t = _threading.Thread(target=self._StatusHelper, |
| 159 | args=(project, counter, sem, output)) | 145 | args=(project, counter, sem)) |
| 160 | threads_and_output.append((t, output)) | 146 | threads.append(t) |
| 161 | t.daemon = True | 147 | t.daemon = True |
| 162 | t.start() | 148 | t.start() |
| 163 | for (t, output) in threads_and_output: | 149 | for t in threads: |
| 164 | t.join() | 150 | t.join() |
| 165 | output.dump(sys.stdout) | 151 | if len(all_projects) == next(counter): |
| 166 | output.close() | ||
| 167 | if len(all_projects) == counter.next(): | ||
| 168 | print('nothing to commit (working directory clean)') | 152 | print('nothing to commit (working directory clean)') |
| 169 | 153 | ||
| 170 | if opt.orphans: | 154 | if opt.orphans: |
| @@ -188,23 +172,21 @@ the following meanings: | |||
| 188 | try: | 172 | try: |
| 189 | os.chdir(self.manifest.topdir) | 173 | os.chdir(self.manifest.topdir) |
| 190 | 174 | ||
| 191 | outstring = io.StringIO() | 175 | outstring = [] |
| 192 | self._FindOrphans(glob.glob('.*') + \ | 176 | self._FindOrphans(glob.glob('.*') + |
| 193 | glob.glob('*'), \ | 177 | glob.glob('*'), |
| 194 | proj_dirs, proj_dirs_parents, outstring) | 178 | proj_dirs, proj_dirs_parents, outstring) |
| 195 | 179 | ||
| 196 | if outstring.buflist: | 180 | if outstring: |
| 197 | output = StatusColoring(self.manifest.globalConfig) | 181 | output = StatusColoring(self.manifest.globalConfig) |
| 198 | output.project('Objects not within a project (orphans)') | 182 | output.project('Objects not within a project (orphans)') |
| 199 | output.nl() | 183 | output.nl() |
| 200 | for entry in outstring.buflist: | 184 | for entry in outstring: |
| 201 | output.untracked(entry) | 185 | output.untracked(entry) |
| 202 | output.nl() | 186 | output.nl() |
| 203 | else: | 187 | else: |
| 204 | print('No orphan files or directories') | 188 | print('No orphan files or directories') |
| 205 | 189 | ||
| 206 | outstring.close() | ||
| 207 | |||
| 208 | finally: | 190 | finally: |
| 209 | # Restore CWD. | 191 | # Restore CWD. |
| 210 | os.chdir(orig_path) | 192 | os.chdir(orig_path) |
diff --git a/subcmds/sync.py b/subcmds/sync.py index b50df099..43d450be 100644 --- a/subcmds/sync.py +++ b/subcmds/sync.py | |||
| @@ -14,10 +14,10 @@ | |||
| 14 | # limitations under the License. | 14 | # limitations under the License. |
| 15 | 15 | ||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | import json | ||
| 17 | import netrc | 18 | import netrc |
| 18 | from optparse import SUPPRESS_HELP | 19 | from optparse import SUPPRESS_HELP |
| 19 | import os | 20 | import os |
| 20 | import pickle | ||
| 21 | import re | 21 | import re |
| 22 | import shutil | 22 | import shutil |
| 23 | import socket | 23 | import socket |
| @@ -119,6 +119,11 @@ credentials. | |||
| 119 | The -f/--force-broken option can be used to proceed with syncing | 119 | The -f/--force-broken option can be used to proceed with syncing |
| 120 | other projects if a project sync fails. | 120 | other projects if a project sync fails. |
| 121 | 121 | ||
| 122 | The --force-sync option can be used to overwrite existing git | ||
| 123 | directories if they have previously been linked to a different | ||
| 124 | object direcotry. WARNING: This may cause data to be lost since | ||
| 125 | refs may be removed when overwriting. | ||
| 126 | |||
| 122 | The --no-clone-bundle option disables any attempt to use | 127 | The --no-clone-bundle option disables any attempt to use |
| 123 | $URL/clone.bundle to bootstrap a new Git repository from a | 128 | $URL/clone.bundle to bootstrap a new Git repository from a |
| 124 | resumeable bundle file on a content delivery network. This | 129 | resumeable bundle file on a content delivery network. This |
| @@ -128,6 +133,13 @@ HTTP client or proxy configuration, but the Git binary works. | |||
| 128 | The --fetch-submodules option enables fetching Git submodules | 133 | The --fetch-submodules option enables fetching Git submodules |
| 129 | of a project from server. | 134 | of a project from server. |
| 130 | 135 | ||
| 136 | The -c/--current-branch option can be used to only fetch objects that | ||
| 137 | are on the branch specified by a project's revision. | ||
| 138 | |||
| 139 | The --optimized-fetch option can be used to only fetch projects that | ||
| 140 | are fixed to a sha1 revision if the sha1 revision does not already | ||
| 141 | exist locally. | ||
| 142 | |||
| 131 | SSH Connections | 143 | SSH Connections |
| 132 | --------------- | 144 | --------------- |
| 133 | 145 | ||
| @@ -167,6 +179,11 @@ later is required to fix a server side protocol bug. | |||
| 167 | p.add_option('-f', '--force-broken', | 179 | p.add_option('-f', '--force-broken', |
| 168 | dest='force_broken', action='store_true', | 180 | dest='force_broken', action='store_true', |
| 169 | help="continue sync even if a project fails to sync") | 181 | help="continue sync even if a project fails to sync") |
| 182 | p.add_option('--force-sync', | ||
| 183 | dest='force_sync', action='store_true', | ||
| 184 | help="overwrite an existing git directory if it needs to " | ||
| 185 | "point to a different object directory. WARNING: this " | ||
| 186 | "may cause loss of data") | ||
| 170 | p.add_option('-l', '--local-only', | 187 | p.add_option('-l', '--local-only', |
| 171 | dest='local_only', action='store_true', | 188 | dest='local_only', action='store_true', |
| 172 | help="only update working tree, don't fetch") | 189 | help="only update working tree, don't fetch") |
| @@ -203,6 +220,9 @@ later is required to fix a server side protocol bug. | |||
| 203 | p.add_option('--no-tags', | 220 | p.add_option('--no-tags', |
| 204 | dest='no_tags', action='store_true', | 221 | dest='no_tags', action='store_true', |
| 205 | help="don't fetch tags") | 222 | help="don't fetch tags") |
| 223 | p.add_option('--optimized-fetch', | ||
| 224 | dest='optimized_fetch', action='store_true', | ||
| 225 | help='only fetch projects fixed to sha1 if revision does not exist locally') | ||
| 206 | if show_smart: | 226 | if show_smart: |
| 207 | p.add_option('-s', '--smart-sync', | 227 | p.add_option('-s', '--smart-sync', |
| 208 | dest='smart_sync', action='store_true', | 228 | dest='smart_sync', action='store_true', |
| @@ -271,8 +291,10 @@ later is required to fix a server side protocol bug. | |||
| 271 | success = project.Sync_NetworkHalf( | 291 | success = project.Sync_NetworkHalf( |
| 272 | quiet=opt.quiet, | 292 | quiet=opt.quiet, |
| 273 | current_branch_only=opt.current_branch_only, | 293 | current_branch_only=opt.current_branch_only, |
| 294 | force_sync=opt.force_sync, | ||
| 274 | clone_bundle=not opt.no_clone_bundle, | 295 | clone_bundle=not opt.no_clone_bundle, |
| 275 | no_tags=opt.no_tags, archive=self.manifest.IsArchive) | 296 | no_tags=opt.no_tags, archive=self.manifest.IsArchive, |
| 297 | optimized_fetch=opt.optimized_fetch) | ||
| 276 | self._fetch_times.Set(project, time.time() - start) | 298 | self._fetch_times.Set(project, time.time() - start) |
| 277 | 299 | ||
| 278 | # Lock around all the rest of the code, since printing, updating a set | 300 | # Lock around all the rest of the code, since printing, updating a set |
| @@ -508,6 +530,9 @@ later is required to fix a server side protocol bug. | |||
| 508 | self.manifest.Override(opt.manifest_name) | 530 | self.manifest.Override(opt.manifest_name) |
| 509 | 531 | ||
| 510 | manifest_name = opt.manifest_name | 532 | manifest_name = opt.manifest_name |
| 533 | smart_sync_manifest_name = "smart_sync_override.xml" | ||
| 534 | smart_sync_manifest_path = os.path.join( | ||
| 535 | self.manifest.manifestProject.worktree, smart_sync_manifest_name) | ||
| 511 | 536 | ||
| 512 | if opt.smart_sync or opt.smart_tag: | 537 | if opt.smart_sync or opt.smart_tag: |
| 513 | if not self.manifest.manifest_server: | 538 | if not self.manifest.manifest_server: |
| @@ -560,7 +585,10 @@ later is required to fix a server side protocol bug. | |||
| 560 | branch = branch[len(R_HEADS):] | 585 | branch = branch[len(R_HEADS):] |
| 561 | 586 | ||
| 562 | env = os.environ.copy() | 587 | env = os.environ.copy() |
| 563 | if 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env: | 588 | if 'SYNC_TARGET' in env: |
| 589 | target = env['SYNC_TARGET'] | ||
| 590 | [success, manifest_str] = server.GetApprovedManifest(branch, target) | ||
| 591 | elif 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env: | ||
| 564 | target = '%s-%s' % (env['TARGET_PRODUCT'], | 592 | target = '%s-%s' % (env['TARGET_PRODUCT'], |
| 565 | env['TARGET_BUILD_VARIANT']) | 593 | env['TARGET_BUILD_VARIANT']) |
| 566 | [success, manifest_str] = server.GetApprovedManifest(branch, target) | 594 | [success, manifest_str] = server.GetApprovedManifest(branch, target) |
| @@ -571,17 +599,16 @@ later is required to fix a server side protocol bug. | |||
| 571 | [success, manifest_str] = server.GetManifest(opt.smart_tag) | 599 | [success, manifest_str] = server.GetManifest(opt.smart_tag) |
| 572 | 600 | ||
| 573 | if success: | 601 | if success: |
| 574 | manifest_name = "smart_sync_override.xml" | 602 | manifest_name = smart_sync_manifest_name |
| 575 | manifest_path = os.path.join(self.manifest.manifestProject.worktree, | ||
| 576 | manifest_name) | ||
| 577 | try: | 603 | try: |
| 578 | f = open(manifest_path, 'w') | 604 | f = open(smart_sync_manifest_path, 'w') |
| 579 | try: | 605 | try: |
| 580 | f.write(manifest_str) | 606 | f.write(manifest_str) |
| 581 | finally: | 607 | finally: |
| 582 | f.close() | 608 | f.close() |
| 583 | except IOError: | 609 | except IOError as e: |
| 584 | print('error: cannot write manifest to %s' % manifest_path, | 610 | print('error: cannot write manifest to %s:\n%s' |
| 611 | % (smart_sync_manifest_path, e), | ||
| 585 | file=sys.stderr) | 612 | file=sys.stderr) |
| 586 | sys.exit(1) | 613 | sys.exit(1) |
| 587 | self._ReloadManifest(manifest_name) | 614 | self._ReloadManifest(manifest_name) |
| @@ -598,6 +625,13 @@ later is required to fix a server side protocol bug. | |||
| 598 | % (self.manifest.manifest_server, e.errcode, e.errmsg), | 625 | % (self.manifest.manifest_server, e.errcode, e.errmsg), |
| 599 | file=sys.stderr) | 626 | file=sys.stderr) |
| 600 | sys.exit(1) | 627 | sys.exit(1) |
| 628 | else: # Not smart sync or smart tag mode | ||
| 629 | if os.path.isfile(smart_sync_manifest_path): | ||
| 630 | try: | ||
| 631 | os.remove(smart_sync_manifest_path) | ||
| 632 | except OSError as e: | ||
| 633 | print('error: failed to remove existing smart sync override manifest: %s' % | ||
| 634 | e, file=sys.stderr) | ||
| 601 | 635 | ||
| 602 | rp = self.manifest.repoProject | 636 | rp = self.manifest.repoProject |
| 603 | rp.PreSync() | 637 | rp.PreSync() |
| @@ -611,7 +645,8 @@ later is required to fix a server side protocol bug. | |||
| 611 | if not opt.local_only: | 645 | if not opt.local_only: |
| 612 | mp.Sync_NetworkHalf(quiet=opt.quiet, | 646 | mp.Sync_NetworkHalf(quiet=opt.quiet, |
| 613 | current_branch_only=opt.current_branch_only, | 647 | current_branch_only=opt.current_branch_only, |
| 614 | no_tags=opt.no_tags) | 648 | no_tags=opt.no_tags, |
| 649 | optimized_fetch=opt.optimized_fetch) | ||
| 615 | 650 | ||
| 616 | if mp.HasChanges: | 651 | if mp.HasChanges: |
| 617 | syncbuf = SyncBuffer(mp.config) | 652 | syncbuf = SyncBuffer(mp.config) |
| @@ -674,7 +709,7 @@ later is required to fix a server side protocol bug. | |||
| 674 | for project in all_projects: | 709 | for project in all_projects: |
| 675 | pm.update() | 710 | pm.update() |
| 676 | if project.worktree: | 711 | if project.worktree: |
| 677 | project.Sync_LocalHalf(syncbuf) | 712 | project.Sync_LocalHalf(syncbuf, force_sync=opt.force_sync) |
| 678 | pm.end() | 713 | pm.end() |
| 679 | print(file=sys.stderr) | 714 | print(file=sys.stderr) |
| 680 | if not syncbuf.Finish(): | 715 | if not syncbuf.Finish(): |
| @@ -762,7 +797,7 @@ class _FetchTimes(object): | |||
| 762 | _ALPHA = 0.5 | 797 | _ALPHA = 0.5 |
| 763 | 798 | ||
| 764 | def __init__(self, manifest): | 799 | def __init__(self, manifest): |
| 765 | self._path = os.path.join(manifest.repodir, '.repopickle_fetchtimes') | 800 | self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json') |
| 766 | self._times = None | 801 | self._times = None |
| 767 | self._seen = set() | 802 | self._seen = set() |
| 768 | 803 | ||
| @@ -781,22 +816,17 @@ class _FetchTimes(object): | |||
| 781 | def _Load(self): | 816 | def _Load(self): |
| 782 | if self._times is None: | 817 | if self._times is None: |
| 783 | try: | 818 | try: |
| 784 | f = open(self._path, 'rb') | 819 | f = open(self._path) |
| 785 | except IOError: | ||
| 786 | self._times = {} | ||
| 787 | return self._times | ||
| 788 | try: | ||
| 789 | try: | 820 | try: |
| 790 | self._times = pickle.load(f) | 821 | self._times = json.load(f) |
| 791 | except IOError: | 822 | finally: |
| 792 | try: | 823 | f.close() |
| 793 | os.remove(self._path) | 824 | except (IOError, ValueError): |
| 794 | except OSError: | 825 | try: |
| 795 | pass | 826 | os.remove(self._path) |
| 796 | self._times = {} | 827 | except OSError: |
| 797 | finally: | 828 | pass |
| 798 | f.close() | 829 | self._times = {} |
| 799 | return self._times | ||
| 800 | 830 | ||
| 801 | def Save(self): | 831 | def Save(self): |
| 802 | if self._times is None: | 832 | if self._times is None: |
| @@ -810,13 +840,13 @@ class _FetchTimes(object): | |||
| 810 | del self._times[name] | 840 | del self._times[name] |
| 811 | 841 | ||
| 812 | try: | 842 | try: |
| 813 | f = open(self._path, 'wb') | 843 | f = open(self._path, 'w') |
| 814 | try: | 844 | try: |
| 815 | pickle.dump(self._times, f) | 845 | json.dump(self._times, f, indent=2) |
| 816 | except (IOError, OSError, pickle.PickleError): | 846 | finally: |
| 817 | try: | 847 | f.close() |
| 818 | os.remove(self._path) | 848 | except (IOError, TypeError): |
| 819 | except OSError: | 849 | try: |
| 820 | pass | 850 | os.remove(self._path) |
| 821 | finally: | 851 | except OSError: |
| 822 | f.close() | 852 | pass |
diff --git a/subcmds/upload.py b/subcmds/upload.py index e2fa261e..674fc17d 100644 --- a/subcmds/upload.py +++ b/subcmds/upload.py | |||
| @@ -25,10 +25,12 @@ from git_command import GitCommand | |||
| 25 | from project import RepoHook | 25 | from project import RepoHook |
| 26 | 26 | ||
| 27 | from pyversion import is_python3 | 27 | from pyversion import is_python3 |
| 28 | # pylint:disable=W0622 | ||
| 28 | if not is_python3(): | 29 | if not is_python3(): |
| 29 | # pylint:disable=W0622 | ||
| 30 | input = raw_input | 30 | input = raw_input |
| 31 | # pylint:enable=W0622 | 31 | else: |
| 32 | unicode = str | ||
| 33 | # pylint:enable=W0622 | ||
| 32 | 34 | ||
| 33 | UNUSUAL_COMMIT_THRESHOLD = 5 | 35 | UNUSUAL_COMMIT_THRESHOLD = 5 |
| 34 | 36 | ||
| @@ -337,13 +339,17 @@ Gerrit Code Review: http://code.google.com/p/gerrit/ | |||
| 337 | self._AppendAutoList(branch, people) | 339 | self._AppendAutoList(branch, people) |
| 338 | 340 | ||
| 339 | # Check if there are local changes that may have been forgotten | 341 | # Check if there are local changes that may have been forgotten |
| 340 | if branch.project.HasChanges(): | 342 | changes = branch.project.UncommitedFiles() |
| 343 | if changes: | ||
| 341 | key = 'review.%s.autoupload' % branch.project.remote.review | 344 | key = 'review.%s.autoupload' % branch.project.remote.review |
| 342 | answer = branch.project.config.GetBoolean(key) | 345 | answer = branch.project.config.GetBoolean(key) |
| 343 | 346 | ||
| 344 | # if they want to auto upload, let's not ask because it could be automated | 347 | # if they want to auto upload, let's not ask because it could be automated |
| 345 | if answer is None: | 348 | if answer is None: |
| 346 | sys.stdout.write('Uncommitted changes in ' + branch.project.name + ' (did you forget to amend?). Continue uploading? (y/N) ') | 349 | sys.stdout.write('Uncommitted changes in ' + branch.project.name) |
| 350 | sys.stdout.write(' (did you forget to amend?):\n') | ||
| 351 | sys.stdout.write('\n'.join(changes) + '\n') | ||
| 352 | sys.stdout.write('Continue uploading? (y/N) ') | ||
| 347 | a = sys.stdin.readline().strip().lower() | 353 | a = sys.stdin.readline().strip().lower() |
| 348 | if a not in ('y', 'yes', 't', 'true', 'on'): | 354 | if a not in ('y', 'yes', 't', 'true', 'on'): |
| 349 | print("skipping upload", file=sys.stderr) | 355 | print("skipping upload", file=sys.stderr) |
