diff options
Diffstat (limited to 'subcmds/sync.py')
| -rw-r--r-- | subcmds/sync.py | 306 |
1 files changed, 228 insertions, 78 deletions
diff --git a/subcmds/sync.py b/subcmds/sync.py index d41052d7..3211cbb1 100644 --- a/subcmds/sync.py +++ b/subcmds/sync.py | |||
| @@ -12,6 +12,7 @@ | |||
| 12 | # See the License for the specific language governing permissions and | 12 | # See the License for the specific language governing permissions and |
| 13 | # limitations under the License. | 13 | # limitations under the License. |
| 14 | 14 | ||
| 15 | import errno | ||
| 15 | import functools | 16 | import functools |
| 16 | import http.cookiejar as cookielib | 17 | import http.cookiejar as cookielib |
| 17 | import io | 18 | import io |
| @@ -56,6 +57,7 @@ from error import RepoChangedException, GitError, ManifestParseError | |||
| 56 | import platform_utils | 57 | import platform_utils |
| 57 | from project import SyncBuffer | 58 | from project import SyncBuffer |
| 58 | from progress import Progress | 59 | from progress import Progress |
| 60 | import ssh | ||
| 59 | from wrapper import Wrapper | 61 | from wrapper import Wrapper |
| 60 | from manifest_xml import GitcManifest | 62 | from manifest_xml import GitcManifest |
| 61 | 63 | ||
| @@ -64,7 +66,7 @@ _ONE_DAY_S = 24 * 60 * 60 | |||
| 64 | 66 | ||
| 65 | class Sync(Command, MirrorSafeCommand): | 67 | class Sync(Command, MirrorSafeCommand): |
| 66 | jobs = 1 | 68 | jobs = 1 |
| 67 | common = True | 69 | COMMON = True |
| 68 | helpSummary = "Update working tree to the latest revision" | 70 | helpSummary = "Update working tree to the latest revision" |
| 69 | helpUsage = """ | 71 | helpUsage = """ |
| 70 | %prog [<project>...] | 72 | %prog [<project>...] |
| @@ -168,10 +170,11 @@ later is required to fix a server side protocol bug. | |||
| 168 | PARALLEL_JOBS = 1 | 170 | PARALLEL_JOBS = 1 |
| 169 | 171 | ||
| 170 | def _CommonOptions(self, p): | 172 | def _CommonOptions(self, p): |
| 171 | try: | 173 | if self.manifest: |
| 172 | self.PARALLEL_JOBS = self.manifest.default.sync_j | 174 | try: |
| 173 | except ManifestParseError: | 175 | self.PARALLEL_JOBS = self.manifest.default.sync_j |
| 174 | pass | 176 | except ManifestParseError: |
| 177 | pass | ||
| 175 | super()._CommonOptions(p) | 178 | super()._CommonOptions(p) |
| 176 | 179 | ||
| 177 | def _Options(self, p, show_smart=True): | 180 | def _Options(self, p, show_smart=True): |
| @@ -212,6 +215,9 @@ later is required to fix a server side protocol bug. | |||
| 212 | p.add_option('-c', '--current-branch', | 215 | p.add_option('-c', '--current-branch', |
| 213 | dest='current_branch_only', action='store_true', | 216 | dest='current_branch_only', action='store_true', |
| 214 | help='fetch only current branch from server') | 217 | help='fetch only current branch from server') |
| 218 | p.add_option('--no-current-branch', | ||
| 219 | dest='current_branch_only', action='store_false', | ||
| 220 | help='fetch all branches from server') | ||
| 215 | p.add_option('-m', '--manifest-name', | 221 | p.add_option('-m', '--manifest-name', |
| 216 | dest='manifest_name', | 222 | dest='manifest_name', |
| 217 | help='temporary manifest to use for this sync', metavar='NAME.xml') | 223 | help='temporary manifest to use for this sync', metavar='NAME.xml') |
| @@ -230,8 +236,14 @@ later is required to fix a server side protocol bug. | |||
| 230 | help='fetch submodules from server') | 236 | help='fetch submodules from server') |
| 231 | p.add_option('--use-superproject', action='store_true', | 237 | p.add_option('--use-superproject', action='store_true', |
| 232 | help='use the manifest superproject to sync projects') | 238 | help='use the manifest superproject to sync projects') |
| 239 | p.add_option('--no-use-superproject', action='store_false', | ||
| 240 | dest='use_superproject', | ||
| 241 | help='disable use of manifest superprojects') | ||
| 242 | p.add_option('--tags', | ||
| 243 | action='store_false', | ||
| 244 | help='fetch tags') | ||
| 233 | p.add_option('--no-tags', | 245 | p.add_option('--no-tags', |
| 234 | dest='tags', default=True, action='store_false', | 246 | dest='tags', action='store_false', |
| 235 | help="don't fetch tags") | 247 | help="don't fetch tags") |
| 236 | p.add_option('--optimized-fetch', | 248 | p.add_option('--optimized-fetch', |
| 237 | dest='optimized_fetch', action='store_true', | 249 | dest='optimized_fetch', action='store_true', |
| @@ -266,17 +278,11 @@ later is required to fix a server side protocol bug. | |||
| 266 | branch = branch[len(R_HEADS):] | 278 | branch = branch[len(R_HEADS):] |
| 267 | return branch | 279 | return branch |
| 268 | 280 | ||
| 269 | def _UseSuperproject(self, opt): | ||
| 270 | """Returns True if use-superproject option is enabled""" | ||
| 271 | return (opt.use_superproject or | ||
| 272 | self.manifest.manifestProject.config.GetBoolean( | ||
| 273 | 'repo.superproject')) | ||
| 274 | |||
| 275 | def _GetCurrentBranchOnly(self, opt): | 281 | def _GetCurrentBranchOnly(self, opt): |
| 276 | """Returns True if current-branch or use-superproject options are enabled.""" | 282 | """Returns True if current-branch or use-superproject options are enabled.""" |
| 277 | return opt.current_branch_only or self._UseSuperproject(opt) | 283 | return opt.current_branch_only or git_superproject.UseSuperproject(opt, self.manifest) |
| 278 | 284 | ||
| 279 | def _UpdateProjectsRevisionId(self, opt, args): | 285 | def _UpdateProjectsRevisionId(self, opt, args, load_local_manifests, superproject_logging_data): |
| 280 | """Update revisionId of every project with the SHA from superproject. | 286 | """Update revisionId of every project with the SHA from superproject. |
| 281 | 287 | ||
| 282 | This function updates each project's revisionId with SHA from superproject. | 288 | This function updates each project's revisionId with SHA from superproject. |
| @@ -286,22 +292,40 @@ later is required to fix a server side protocol bug. | |||
| 286 | opt: Program options returned from optparse. See _Options(). | 292 | opt: Program options returned from optparse. See _Options(). |
| 287 | args: Arguments to pass to GetProjects. See the GetProjects | 293 | args: Arguments to pass to GetProjects. See the GetProjects |
| 288 | docstring for details. | 294 | docstring for details. |
| 295 | load_local_manifests: Whether to load local manifests. | ||
| 296 | superproject_logging_data: A dictionary of superproject data that is to be logged. | ||
| 289 | 297 | ||
| 290 | Returns: | 298 | Returns: |
| 291 | Returns path to the overriding manifest file. | 299 | Returns path to the overriding manifest file instead of None. |
| 292 | """ | 300 | """ |
| 301 | print_messages = git_superproject.PrintMessages(opt, self.manifest) | ||
| 293 | superproject = git_superproject.Superproject(self.manifest, | 302 | superproject = git_superproject.Superproject(self.manifest, |
| 294 | self.repodir, | 303 | self.repodir, |
| 295 | quiet=opt.quiet) | 304 | self.git_event_log, |
| 305 | quiet=opt.quiet, | ||
| 306 | print_messages=print_messages) | ||
| 307 | if opt.local_only: | ||
| 308 | manifest_path = superproject.manifest_path | ||
| 309 | if manifest_path: | ||
| 310 | self._ReloadManifest(manifest_path, load_local_manifests) | ||
| 311 | return manifest_path | ||
| 312 | |||
| 296 | all_projects = self.GetProjects(args, | 313 | all_projects = self.GetProjects(args, |
| 297 | missing_ok=True, | 314 | missing_ok=True, |
| 298 | submodules_ok=opt.fetch_submodules) | 315 | submodules_ok=opt.fetch_submodules) |
| 299 | manifest_path = superproject.UpdateProjectsRevisionId(all_projects) | 316 | update_result = superproject.UpdateProjectsRevisionId(all_projects) |
| 300 | if not manifest_path: | 317 | manifest_path = update_result.manifest_path |
| 301 | print('error: Update of revsionId from superproject has failed', | 318 | superproject_logging_data['updatedrevisionid'] = bool(manifest_path) |
| 302 | file=sys.stderr) | 319 | if manifest_path: |
| 303 | sys.exit(1) | 320 | self._ReloadManifest(manifest_path, load_local_manifests) |
| 304 | self._ReloadManifest(manifest_path) | 321 | else: |
| 322 | if print_messages: | ||
| 323 | print('warning: Update of revisionId from superproject has failed, ' | ||
| 324 | 'repo sync will not use superproject to fetch the source. ', | ||
| 325 | 'Please resync with the --no-use-superproject option to avoid this repo warning.', | ||
| 326 | file=sys.stderr) | ||
| 327 | if update_result.fatal and opt.use_superproject is not None: | ||
| 328 | sys.exit(1) | ||
| 305 | return manifest_path | 329 | return manifest_path |
| 306 | 330 | ||
| 307 | def _FetchProjectList(self, opt, projects): | 331 | def _FetchProjectList(self, opt, projects): |
| @@ -343,11 +367,12 @@ later is required to fix a server side protocol bug. | |||
| 343 | optimized_fetch=opt.optimized_fetch, | 367 | optimized_fetch=opt.optimized_fetch, |
| 344 | retry_fetches=opt.retry_fetches, | 368 | retry_fetches=opt.retry_fetches, |
| 345 | prune=opt.prune, | 369 | prune=opt.prune, |
| 370 | ssh_proxy=self.ssh_proxy, | ||
| 346 | clone_filter=self.manifest.CloneFilter, | 371 | clone_filter=self.manifest.CloneFilter, |
| 347 | partial_clone_exclude=self.manifest.PartialCloneExclude) | 372 | partial_clone_exclude=self.manifest.PartialCloneExclude) |
| 348 | 373 | ||
| 349 | output = buf.getvalue() | 374 | output = buf.getvalue() |
| 350 | if opt.verbose and output: | 375 | if (opt.verbose or not success) and output: |
| 351 | print('\n' + output.rstrip()) | 376 | print('\n' + output.rstrip()) |
| 352 | 377 | ||
| 353 | if not success: | 378 | if not success: |
| @@ -364,7 +389,11 @@ later is required to fix a server side protocol bug. | |||
| 364 | finish = time.time() | 389 | finish = time.time() |
| 365 | return (success, project, start, finish) | 390 | return (success, project, start, finish) |
| 366 | 391 | ||
| 367 | def _Fetch(self, projects, opt, err_event): | 392 | @classmethod |
| 393 | def _FetchInitChild(cls, ssh_proxy): | ||
| 394 | cls.ssh_proxy = ssh_proxy | ||
| 395 | |||
| 396 | def _Fetch(self, projects, opt, err_event, ssh_proxy): | ||
| 368 | ret = True | 397 | ret = True |
| 369 | 398 | ||
| 370 | jobs = opt.jobs_network if opt.jobs_network else self.jobs | 399 | jobs = opt.jobs_network if opt.jobs_network else self.jobs |
| @@ -394,8 +423,14 @@ later is required to fix a server side protocol bug. | |||
| 394 | break | 423 | break |
| 395 | return ret | 424 | return ret |
| 396 | 425 | ||
| 426 | # We pass the ssh proxy settings via the class. This allows multiprocessing | ||
| 427 | # to pickle it up when spawning children. We can't pass it as an argument | ||
| 428 | # to _FetchProjectList below as multiprocessing is unable to pickle those. | ||
| 429 | Sync.ssh_proxy = None | ||
| 430 | |||
| 397 | # NB: Multiprocessing is heavy, so don't spin it up for one job. | 431 | # NB: Multiprocessing is heavy, so don't spin it up for one job. |
| 398 | if len(projects_list) == 1 or jobs == 1: | 432 | if len(projects_list) == 1 or jobs == 1: |
| 433 | self._FetchInitChild(ssh_proxy) | ||
| 399 | if not _ProcessResults(self._FetchProjectList(opt, x) for x in projects_list): | 434 | if not _ProcessResults(self._FetchProjectList(opt, x) for x in projects_list): |
| 400 | ret = False | 435 | ret = False |
| 401 | else: | 436 | else: |
| @@ -413,7 +448,8 @@ later is required to fix a server side protocol bug. | |||
| 413 | else: | 448 | else: |
| 414 | pm.update(inc=0, msg='warming up') | 449 | pm.update(inc=0, msg='warming up') |
| 415 | chunksize = 4 | 450 | chunksize = 4 |
| 416 | with multiprocessing.Pool(jobs) as pool: | 451 | with multiprocessing.Pool( |
| 452 | jobs, initializer=self._FetchInitChild, initargs=(ssh_proxy,)) as pool: | ||
| 417 | results = pool.imap_unordered( | 453 | results = pool.imap_unordered( |
| 418 | functools.partial(self._FetchProjectList, opt), | 454 | functools.partial(self._FetchProjectList, opt), |
| 419 | projects_list, | 455 | projects_list, |
| @@ -422,6 +458,11 @@ later is required to fix a server side protocol bug. | |||
| 422 | ret = False | 458 | ret = False |
| 423 | pool.close() | 459 | pool.close() |
| 424 | 460 | ||
| 461 | # Cleanup the reference now that we're done with it, and we're going to | ||
| 462 | # release any resources it points to. If we don't, later multiprocessing | ||
| 463 | # usage (e.g. checkouts) will try to pickle and then crash. | ||
| 464 | del Sync.ssh_proxy | ||
| 465 | |||
| 425 | pm.end() | 466 | pm.end() |
| 426 | self._fetch_times.Save() | 467 | self._fetch_times.Save() |
| 427 | 468 | ||
| @@ -430,6 +471,69 @@ later is required to fix a server side protocol bug. | |||
| 430 | 471 | ||
| 431 | return (ret, fetched) | 472 | return (ret, fetched) |
| 432 | 473 | ||
| 474 | def _FetchMain(self, opt, args, all_projects, err_event, manifest_name, | ||
| 475 | load_local_manifests, ssh_proxy): | ||
| 476 | """The main network fetch loop. | ||
| 477 | |||
| 478 | Args: | ||
| 479 | opt: Program options returned from optparse. See _Options(). | ||
| 480 | args: Command line args used to filter out projects. | ||
| 481 | all_projects: List of all projects that should be fetched. | ||
| 482 | err_event: Whether an error was hit while processing. | ||
| 483 | manifest_name: Manifest file to be reloaded. | ||
| 484 | load_local_manifests: Whether to load local manifests. | ||
| 485 | ssh_proxy: SSH manager for clients & masters. | ||
| 486 | |||
| 487 | Returns: | ||
| 488 | List of all projects that should be checked out. | ||
| 489 | """ | ||
| 490 | rp = self.manifest.repoProject | ||
| 491 | |||
| 492 | to_fetch = [] | ||
| 493 | now = time.time() | ||
| 494 | if _ONE_DAY_S <= (now - rp.LastFetch): | ||
| 495 | to_fetch.append(rp) | ||
| 496 | to_fetch.extend(all_projects) | ||
| 497 | to_fetch.sort(key=self._fetch_times.Get, reverse=True) | ||
| 498 | |||
| 499 | success, fetched = self._Fetch(to_fetch, opt, err_event, ssh_proxy) | ||
| 500 | if not success: | ||
| 501 | err_event.set() | ||
| 502 | |||
| 503 | _PostRepoFetch(rp, opt.repo_verify) | ||
| 504 | if opt.network_only: | ||
| 505 | # bail out now; the rest touches the working tree | ||
| 506 | if err_event.is_set(): | ||
| 507 | print('\nerror: Exited sync due to fetch errors.\n', file=sys.stderr) | ||
| 508 | sys.exit(1) | ||
| 509 | return | ||
| 510 | |||
| 511 | # Iteratively fetch missing and/or nested unregistered submodules | ||
| 512 | previously_missing_set = set() | ||
| 513 | while True: | ||
| 514 | self._ReloadManifest(manifest_name, load_local_manifests) | ||
| 515 | all_projects = self.GetProjects(args, | ||
| 516 | missing_ok=True, | ||
| 517 | submodules_ok=opt.fetch_submodules) | ||
| 518 | missing = [] | ||
| 519 | for project in all_projects: | ||
| 520 | if project.gitdir not in fetched: | ||
| 521 | missing.append(project) | ||
| 522 | if not missing: | ||
| 523 | break | ||
| 524 | # Stop us from non-stopped fetching actually-missing repos: If set of | ||
| 525 | # missing repos has not been changed from last fetch, we break. | ||
| 526 | missing_set = set(p.name for p in missing) | ||
| 527 | if previously_missing_set == missing_set: | ||
| 528 | break | ||
| 529 | previously_missing_set = missing_set | ||
| 530 | success, new_fetched = self._Fetch(missing, opt, err_event, ssh_proxy) | ||
| 531 | if not success: | ||
| 532 | err_event.set() | ||
| 533 | fetched.update(new_fetched) | ||
| 534 | |||
| 535 | return all_projects | ||
| 536 | |||
| 433 | def _CheckoutOne(self, detach_head, force_sync, project): | 537 | def _CheckoutOne(self, detach_head, force_sync, project): |
| 434 | """Checkout work tree for one project | 538 | """Checkout work tree for one project |
| 435 | 539 | ||
| @@ -564,10 +668,18 @@ later is required to fix a server side protocol bug. | |||
| 564 | t.join() | 668 | t.join() |
| 565 | pm.end() | 669 | pm.end() |
| 566 | 670 | ||
| 567 | def _ReloadManifest(self, manifest_name=None): | 671 | def _ReloadManifest(self, manifest_name=None, load_local_manifests=True): |
| 672 | """Reload the manfiest from the file specified by the |manifest_name|. | ||
| 673 | |||
| 674 | It unloads the manifest if |manifest_name| is None. | ||
| 675 | |||
| 676 | Args: | ||
| 677 | manifest_name: Manifest file to be reloaded. | ||
| 678 | load_local_manifests: Whether to load local manifests. | ||
| 679 | """ | ||
| 568 | if manifest_name: | 680 | if manifest_name: |
| 569 | # Override calls _Unload already | 681 | # Override calls _Unload already |
| 570 | self.manifest.Override(manifest_name) | 682 | self.manifest.Override(manifest_name, load_local_manifests=load_local_manifests) |
| 571 | else: | 683 | else: |
| 572 | self.manifest._Unload() | 684 | self.manifest._Unload() |
| 573 | 685 | ||
| @@ -614,6 +726,56 @@ later is required to fix a server side protocol bug. | |||
| 614 | fd.write('\n') | 726 | fd.write('\n') |
| 615 | return 0 | 727 | return 0 |
| 616 | 728 | ||
| 729 | def UpdateCopyLinkfileList(self): | ||
| 730 | """Save all dests of copyfile and linkfile, and update them if needed. | ||
| 731 | |||
| 732 | Returns: | ||
| 733 | Whether update was successful. | ||
| 734 | """ | ||
| 735 | new_paths = {} | ||
| 736 | new_linkfile_paths = [] | ||
| 737 | new_copyfile_paths = [] | ||
| 738 | for project in self.GetProjects(None, missing_ok=True): | ||
| 739 | new_linkfile_paths.extend(x.dest for x in project.linkfiles) | ||
| 740 | new_copyfile_paths.extend(x.dest for x in project.copyfiles) | ||
| 741 | |||
| 742 | new_paths = { | ||
| 743 | 'linkfile': new_linkfile_paths, | ||
| 744 | 'copyfile': new_copyfile_paths, | ||
| 745 | } | ||
| 746 | |||
| 747 | copylinkfile_name = 'copy-link-files.json' | ||
| 748 | copylinkfile_path = os.path.join(self.manifest.repodir, copylinkfile_name) | ||
| 749 | old_copylinkfile_paths = {} | ||
| 750 | |||
| 751 | if os.path.exists(copylinkfile_path): | ||
| 752 | with open(copylinkfile_path, 'rb') as fp: | ||
| 753 | try: | ||
| 754 | old_copylinkfile_paths = json.load(fp) | ||
| 755 | except: | ||
| 756 | print('error: %s is not a json formatted file.' % | ||
| 757 | copylinkfile_path, file=sys.stderr) | ||
| 758 | platform_utils.remove(copylinkfile_path) | ||
| 759 | return False | ||
| 760 | |||
| 761 | need_remove_files = [] | ||
| 762 | need_remove_files.extend( | ||
| 763 | set(old_copylinkfile_paths.get('linkfile', [])) - | ||
| 764 | set(new_linkfile_paths)) | ||
| 765 | need_remove_files.extend( | ||
| 766 | set(old_copylinkfile_paths.get('copyfile', [])) - | ||
| 767 | set(new_copyfile_paths)) | ||
| 768 | |||
| 769 | for need_remove_file in need_remove_files: | ||
| 770 | # Try to remove the updated copyfile or linkfile. | ||
| 771 | # So, if the file is not exist, nothing need to do. | ||
| 772 | platform_utils.remove(need_remove_file, missing_ok=True) | ||
| 773 | |||
| 774 | # Create copy-link-files.json, save dest path of "copyfile" and "linkfile". | ||
| 775 | with open(copylinkfile_path, 'w', encoding='utf-8') as fp: | ||
| 776 | json.dump(new_paths, fp) | ||
| 777 | return True | ||
| 778 | |||
| 617 | def _SmartSyncSetup(self, opt, smart_sync_manifest_path): | 779 | def _SmartSyncSetup(self, opt, smart_sync_manifest_path): |
| 618 | if not self.manifest.manifest_server: | 780 | if not self.manifest.manifest_server: |
| 619 | print('error: cannot smart sync: no manifest server defined in ' | 781 | print('error: cannot smart sync: no manifest server defined in ' |
| @@ -730,7 +892,7 @@ later is required to fix a server side protocol bug. | |||
| 730 | start, time.time(), clean) | 892 | start, time.time(), clean) |
| 731 | if not clean: | 893 | if not clean: |
| 732 | sys.exit(1) | 894 | sys.exit(1) |
| 733 | self._ReloadManifest(opt.manifest_name) | 895 | self._ReloadManifest(manifest_name) |
| 734 | if opt.jobs is None: | 896 | if opt.jobs is None: |
| 735 | self.jobs = self.manifest.default.sync_j | 897 | self.jobs = self.manifest.default.sync_j |
| 736 | 898 | ||
| @@ -779,7 +941,7 @@ later is required to fix a server side protocol bug. | |||
| 779 | print('error: failed to remove existing smart sync override manifest: %s' % | 941 | print('error: failed to remove existing smart sync override manifest: %s' % |
| 780 | e, file=sys.stderr) | 942 | e, file=sys.stderr) |
| 781 | 943 | ||
| 782 | err_event = _threading.Event() | 944 | err_event = multiprocessing.Event() |
| 783 | 945 | ||
| 784 | rp = self.manifest.repoProject | 946 | rp = self.manifest.repoProject |
| 785 | rp.PreSync() | 947 | rp.PreSync() |
| @@ -802,8 +964,16 @@ later is required to fix a server side protocol bug. | |||
| 802 | else: | 964 | else: |
| 803 | self._UpdateManifestProject(opt, mp, manifest_name) | 965 | self._UpdateManifestProject(opt, mp, manifest_name) |
| 804 | 966 | ||
| 805 | if self._UseSuperproject(opt): | 967 | load_local_manifests = not self.manifest.HasLocalManifests |
| 806 | manifest_name = self._UpdateProjectsRevisionId(opt, args) | 968 | use_superproject = git_superproject.UseSuperproject(opt, self.manifest) |
| 969 | superproject_logging_data = { | ||
| 970 | 'superproject': use_superproject, | ||
| 971 | 'haslocalmanifests': bool(self.manifest.HasLocalManifests), | ||
| 972 | 'hassuperprojecttag': bool(self.manifest.superproject), | ||
| 973 | } | ||
| 974 | if use_superproject: | ||
| 975 | manifest_name = self._UpdateProjectsRevisionId( | ||
| 976 | opt, args, load_local_manifests, superproject_logging_data) or opt.manifest_name | ||
| 807 | 977 | ||
| 808 | if self.gitc_manifest: | 978 | if self.gitc_manifest: |
| 809 | gitc_manifest_projects = self.GetProjects(args, | 979 | gitc_manifest_projects = self.GetProjects(args, |
| @@ -849,49 +1019,17 @@ later is required to fix a server side protocol bug. | |||
| 849 | 1019 | ||
| 850 | self._fetch_times = _FetchTimes(self.manifest) | 1020 | self._fetch_times = _FetchTimes(self.manifest) |
| 851 | if not opt.local_only: | 1021 | if not opt.local_only: |
| 852 | to_fetch = [] | 1022 | with multiprocessing.Manager() as manager: |
| 853 | now = time.time() | 1023 | with ssh.ProxyManager(manager) as ssh_proxy: |
| 854 | if _ONE_DAY_S <= (now - rp.LastFetch): | 1024 | # Initialize the socket dir once in the parent. |
| 855 | to_fetch.append(rp) | 1025 | ssh_proxy.sock() |
| 856 | to_fetch.extend(all_projects) | 1026 | all_projects = self._FetchMain(opt, args, all_projects, err_event, |
| 857 | to_fetch.sort(key=self._fetch_times.Get, reverse=True) | 1027 | manifest_name, load_local_manifests, |
| 858 | 1028 | ssh_proxy) | |
| 859 | success, fetched = self._Fetch(to_fetch, opt, err_event) | ||
| 860 | if not success: | ||
| 861 | err_event.set() | ||
| 862 | 1029 | ||
| 863 | _PostRepoFetch(rp, opt.repo_verify) | ||
| 864 | if opt.network_only: | 1030 | if opt.network_only: |
| 865 | # bail out now; the rest touches the working tree | ||
| 866 | if err_event.is_set(): | ||
| 867 | print('\nerror: Exited sync due to fetch errors.\n', file=sys.stderr) | ||
| 868 | sys.exit(1) | ||
| 869 | return | 1031 | return |
| 870 | 1032 | ||
| 871 | # Iteratively fetch missing and/or nested unregistered submodules | ||
| 872 | previously_missing_set = set() | ||
| 873 | while True: | ||
| 874 | self._ReloadManifest(manifest_name) | ||
| 875 | all_projects = self.GetProjects(args, | ||
| 876 | missing_ok=True, | ||
| 877 | submodules_ok=opt.fetch_submodules) | ||
| 878 | missing = [] | ||
| 879 | for project in all_projects: | ||
| 880 | if project.gitdir not in fetched: | ||
| 881 | missing.append(project) | ||
| 882 | if not missing: | ||
| 883 | break | ||
| 884 | # Stop us from non-stopped fetching actually-missing repos: If set of | ||
| 885 | # missing repos has not been changed from last fetch, we break. | ||
| 886 | missing_set = set(p.name for p in missing) | ||
| 887 | if previously_missing_set == missing_set: | ||
| 888 | break | ||
| 889 | previously_missing_set = missing_set | ||
| 890 | success, new_fetched = self._Fetch(missing, opt, err_event) | ||
| 891 | if not success: | ||
| 892 | err_event.set() | ||
| 893 | fetched.update(new_fetched) | ||
| 894 | |||
| 895 | # If we saw an error, exit with code 1 so that other scripts can check. | 1033 | # If we saw an error, exit with code 1 so that other scripts can check. |
| 896 | if err_event.is_set(): | 1034 | if err_event.is_set(): |
| 897 | err_network_sync = True | 1035 | err_network_sync = True |
| @@ -914,6 +1052,13 @@ later is required to fix a server side protocol bug. | |||
| 914 | print('\nerror: Local checkouts *not* updated.', file=sys.stderr) | 1052 | print('\nerror: Local checkouts *not* updated.', file=sys.stderr) |
| 915 | sys.exit(1) | 1053 | sys.exit(1) |
| 916 | 1054 | ||
| 1055 | err_update_linkfiles = not self.UpdateCopyLinkfileList() | ||
| 1056 | if err_update_linkfiles: | ||
| 1057 | err_event.set() | ||
| 1058 | if opt.fail_fast: | ||
| 1059 | print('\nerror: Local update copyfile or linkfile failed.', file=sys.stderr) | ||
| 1060 | sys.exit(1) | ||
| 1061 | |||
| 917 | err_results = [] | 1062 | err_results = [] |
| 918 | # NB: We don't exit here because this is the last step. | 1063 | # NB: We don't exit here because this is the last step. |
| 919 | err_checkout = not self._Checkout(all_projects, opt, err_results) | 1064 | err_checkout = not self._Checkout(all_projects, opt, err_results) |
| @@ -932,6 +1077,8 @@ later is required to fix a server side protocol bug. | |||
| 932 | print('error: Downloading network changes failed.', file=sys.stderr) | 1077 | print('error: Downloading network changes failed.', file=sys.stderr) |
| 933 | if err_update_projects: | 1078 | if err_update_projects: |
| 934 | print('error: Updating local project lists failed.', file=sys.stderr) | 1079 | print('error: Updating local project lists failed.', file=sys.stderr) |
| 1080 | if err_update_linkfiles: | ||
| 1081 | print('error: Updating copyfiles or linkfiles failed.', file=sys.stderr) | ||
| 935 | if err_checkout: | 1082 | if err_checkout: |
| 936 | print('error: Checking out local projects failed.', file=sys.stderr) | 1083 | print('error: Checking out local projects failed.', file=sys.stderr) |
| 937 | if err_results: | 1084 | if err_results: |
| @@ -940,6 +1087,15 @@ later is required to fix a server side protocol bug. | |||
| 940 | file=sys.stderr) | 1087 | file=sys.stderr) |
| 941 | sys.exit(1) | 1088 | sys.exit(1) |
| 942 | 1089 | ||
| 1090 | # Log the previous sync analysis state from the config. | ||
| 1091 | self.git_event_log.LogDataConfigEvents(mp.config.GetSyncAnalysisStateData(), | ||
| 1092 | 'previous_sync_state') | ||
| 1093 | |||
| 1094 | # Update and log with the new sync analysis state. | ||
| 1095 | mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data) | ||
| 1096 | self.git_event_log.LogDataConfigEvents(mp.config.GetSyncAnalysisStateData(), | ||
| 1097 | 'current_sync_state') | ||
| 1098 | |||
| 943 | if not opt.quiet: | 1099 | if not opt.quiet: |
| 944 | print('repo sync has finished successfully.') | 1100 | print('repo sync has finished successfully.') |
| 945 | 1101 | ||
| @@ -1011,10 +1167,7 @@ class _FetchTimes(object): | |||
| 1011 | with open(self._path) as f: | 1167 | with open(self._path) as f: |
| 1012 | self._times = json.load(f) | 1168 | self._times = json.load(f) |
| 1013 | except (IOError, ValueError): | 1169 | except (IOError, ValueError): |
| 1014 | try: | 1170 | platform_utils.remove(self._path, missing_ok=True) |
| 1015 | platform_utils.remove(self._path) | ||
| 1016 | except OSError: | ||
| 1017 | pass | ||
| 1018 | self._times = {} | 1171 | self._times = {} |
| 1019 | 1172 | ||
| 1020 | def Save(self): | 1173 | def Save(self): |
| @@ -1032,10 +1185,7 @@ class _FetchTimes(object): | |||
| 1032 | with open(self._path, 'w') as f: | 1185 | with open(self._path, 'w') as f: |
| 1033 | json.dump(self._times, f, indent=2) | 1186 | json.dump(self._times, f, indent=2) |
| 1034 | except (IOError, TypeError): | 1187 | except (IOError, TypeError): |
| 1035 | try: | 1188 | platform_utils.remove(self._path, missing_ok=True) |
| 1036 | platform_utils.remove(self._path) | ||
| 1037 | except OSError: | ||
| 1038 | pass | ||
| 1039 | 1189 | ||
| 1040 | # This is a replacement for xmlrpc.client.Transport using urllib2 | 1190 | # This is a replacement for xmlrpc.client.Transport using urllib2 |
| 1041 | # and supporting persistent-http[s]. It cannot change hosts from | 1191 | # and supporting persistent-http[s]. It cannot change hosts from |
