diff options
| author | Gavin Mak <gavinmak@google.com> | 2023-03-11 06:46:20 +0000 |
|---|---|---|
| committer | LUCI <gerrit-scoped@luci-project-accounts.iam.gserviceaccount.com> | 2023-03-22 17:46:28 +0000 |
| commit | ea2e330e43c182dc16b0111ebc69ee5a71ee4ce1 (patch) | |
| tree | dc33ba0e56825b3e007d0589891756724725a465 /project.py | |
| parent | 1604cf255f8c1786a23388db6d5277ac7949a24a (diff) | |
| download | git-repo-ea2e330e43c182dc16b0111ebc69ee5a71ee4ce1.tar.gz | |
Format codebase with black and check formatting in CQ
Apply rules set by https://gerrit-review.googlesource.com/c/git-repo/+/362954/ across the codebase and fix any lingering errors caught
by flake8. Also check black formatting in run_tests (and CQ).
Bug: b/267675342
Change-Id: I972d77649dac351150dcfeb1cd1ad0ea2efc1956
Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/363474
Reviewed-by: Mike Frysinger <vapier@google.com>
Tested-by: Gavin Mak <gavinmak@google.com>
Commit-Queue: Gavin Mak <gavinmak@google.com>
Diffstat (limited to 'project.py')
| -rw-r--r-- | project.py | 7874 |
1 files changed, 4169 insertions, 3705 deletions
| @@ -32,8 +32,13 @@ import urllib.parse | |||
| 32 | from color import Coloring | 32 | from color import Coloring |
| 33 | import fetch | 33 | import fetch |
| 34 | from git_command import GitCommand, git_require | 34 | from git_command import GitCommand, git_require |
| 35 | from git_config import GitConfig, IsId, GetSchemeFromUrl, GetUrlCookieFile, \ | 35 | from git_config import ( |
| 36 | ID_RE | 36 | GitConfig, |
| 37 | IsId, | ||
| 38 | GetSchemeFromUrl, | ||
| 39 | GetUrlCookieFile, | ||
| 40 | ID_RE, | ||
| 41 | ) | ||
| 37 | import git_superproject | 42 | import git_superproject |
| 38 | from git_trace2_event_log import EventLog | 43 | from git_trace2_event_log import EventLog |
| 39 | from error import GitError, UploadError, DownloadError | 44 | from error import GitError, UploadError, DownloadError |
| @@ -47,12 +52,13 @@ from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M, R_WORKTREE_M | |||
| 47 | 52 | ||
| 48 | 53 | ||
| 49 | class SyncNetworkHalfResult(NamedTuple): | 54 | class SyncNetworkHalfResult(NamedTuple): |
| 50 | """Sync_NetworkHalf return value.""" | 55 | """Sync_NetworkHalf return value.""" |
| 51 | # True if successful. | 56 | |
| 52 | success: bool | 57 | # True if successful. |
| 53 | # Did we query the remote? False when optimized_fetch is True and we have the | 58 | success: bool |
| 54 | # commit already present. | 59 | # Did we query the remote? False when optimized_fetch is True and we have |
| 55 | remote_fetched: bool | 60 | # the commit already present. |
| 61 | remote_fetched: bool | ||
| 56 | 62 | ||
| 57 | 63 | ||
| 58 | # Maximum sleep time allowed during retries. | 64 | # Maximum sleep time allowed during retries. |
| @@ -62,3904 +68,4362 @@ RETRY_JITTER_PERCENT = 0.1 | |||
| 62 | 68 | ||
| 63 | # Whether to use alternates. Switching back and forth is *NOT* supported. | 69 | # Whether to use alternates. Switching back and forth is *NOT* supported. |
| 64 | # TODO(vapier): Remove knob once behavior is verified. | 70 | # TODO(vapier): Remove knob once behavior is verified. |
| 65 | _ALTERNATES = os.environ.get('REPO_USE_ALTERNATES') == '1' | 71 | _ALTERNATES = os.environ.get("REPO_USE_ALTERNATES") == "1" |
| 66 | 72 | ||
| 67 | 73 | ||
| 68 | def _lwrite(path, content): | 74 | def _lwrite(path, content): |
| 69 | lock = '%s.lock' % path | 75 | lock = "%s.lock" % path |
| 70 | 76 | ||
| 71 | # Maintain Unix line endings on all OS's to match git behavior. | 77 | # Maintain Unix line endings on all OS's to match git behavior. |
| 72 | with open(lock, 'w', newline='\n') as fd: | 78 | with open(lock, "w", newline="\n") as fd: |
| 73 | fd.write(content) | 79 | fd.write(content) |
| 74 | 80 | ||
| 75 | try: | 81 | try: |
| 76 | platform_utils.rename(lock, path) | 82 | platform_utils.rename(lock, path) |
| 77 | except OSError: | 83 | except OSError: |
| 78 | platform_utils.remove(lock) | 84 | platform_utils.remove(lock) |
| 79 | raise | 85 | raise |
| 80 | 86 | ||
| 81 | 87 | ||
| 82 | def _error(fmt, *args): | 88 | def _error(fmt, *args): |
| 83 | msg = fmt % args | 89 | msg = fmt % args |
| 84 | print('error: %s' % msg, file=sys.stderr) | 90 | print("error: %s" % msg, file=sys.stderr) |
| 85 | 91 | ||
| 86 | 92 | ||
| 87 | def _warn(fmt, *args): | 93 | def _warn(fmt, *args): |
| 88 | msg = fmt % args | 94 | msg = fmt % args |
| 89 | print('warn: %s' % msg, file=sys.stderr) | 95 | print("warn: %s" % msg, file=sys.stderr) |
| 90 | 96 | ||
| 91 | 97 | ||
| 92 | def not_rev(r): | 98 | def not_rev(r): |
| 93 | return '^' + r | 99 | return "^" + r |
| 94 | 100 | ||
| 95 | 101 | ||
| 96 | def sq(r): | 102 | def sq(r): |
| 97 | return "'" + r.replace("'", "'\''") + "'" | 103 | return "'" + r.replace("'", "'''") + "'" |
| 98 | 104 | ||
| 99 | 105 | ||
| 100 | _project_hook_list = None | 106 | _project_hook_list = None |
| 101 | 107 | ||
| 102 | 108 | ||
| 103 | def _ProjectHooks(): | 109 | def _ProjectHooks(): |
| 104 | """List the hooks present in the 'hooks' directory. | 110 | """List the hooks present in the 'hooks' directory. |
| 105 | 111 | ||
| 106 | These hooks are project hooks and are copied to the '.git/hooks' directory | 112 | These hooks are project hooks and are copied to the '.git/hooks' directory |
| 107 | of all subprojects. | 113 | of all subprojects. |
| 108 | 114 | ||
| 109 | This function caches the list of hooks (based on the contents of the | 115 | This function caches the list of hooks (based on the contents of the |
| 110 | 'repo/hooks' directory) on the first call. | 116 | 'repo/hooks' directory) on the first call. |
| 111 | 117 | ||
| 112 | Returns: | 118 | Returns: |
| 113 | A list of absolute paths to all of the files in the hooks directory. | 119 | A list of absolute paths to all of the files in the hooks directory. |
| 114 | """ | 120 | """ |
| 115 | global _project_hook_list | 121 | global _project_hook_list |
| 116 | if _project_hook_list is None: | 122 | if _project_hook_list is None: |
| 117 | d = platform_utils.realpath(os.path.abspath(os.path.dirname(__file__))) | 123 | d = platform_utils.realpath(os.path.abspath(os.path.dirname(__file__))) |
| 118 | d = os.path.join(d, 'hooks') | 124 | d = os.path.join(d, "hooks") |
| 119 | _project_hook_list = [os.path.join(d, x) for x in platform_utils.listdir(d)] | 125 | _project_hook_list = [ |
| 120 | return _project_hook_list | 126 | os.path.join(d, x) for x in platform_utils.listdir(d) |
| 127 | ] | ||
| 128 | return _project_hook_list | ||
| 121 | 129 | ||
| 122 | 130 | ||
| 123 | class DownloadedChange(object): | 131 | class DownloadedChange(object): |
| 124 | _commit_cache = None | 132 | _commit_cache = None |
| 125 | 133 | ||
| 126 | def __init__(self, project, base, change_id, ps_id, commit): | 134 | def __init__(self, project, base, change_id, ps_id, commit): |
| 127 | self.project = project | 135 | self.project = project |
| 128 | self.base = base | 136 | self.base = base |
| 129 | self.change_id = change_id | 137 | self.change_id = change_id |
| 130 | self.ps_id = ps_id | 138 | self.ps_id = ps_id |
| 131 | self.commit = commit | 139 | self.commit = commit |
| 132 | 140 | ||
| 133 | @property | 141 | @property |
| 134 | def commits(self): | 142 | def commits(self): |
| 135 | if self._commit_cache is None: | 143 | if self._commit_cache is None: |
| 136 | self._commit_cache = self.project.bare_git.rev_list('--abbrev=8', | 144 | self._commit_cache = self.project.bare_git.rev_list( |
| 137 | '--abbrev-commit', | 145 | "--abbrev=8", |
| 138 | '--pretty=oneline', | 146 | "--abbrev-commit", |
| 139 | '--reverse', | 147 | "--pretty=oneline", |
| 140 | '--date-order', | 148 | "--reverse", |
| 141 | not_rev(self.base), | 149 | "--date-order", |
| 142 | self.commit, | 150 | not_rev(self.base), |
| 143 | '--') | 151 | self.commit, |
| 144 | return self._commit_cache | 152 | "--", |
| 153 | ) | ||
| 154 | return self._commit_cache | ||
| 145 | 155 | ||
| 146 | 156 | ||
| 147 | class ReviewableBranch(object): | 157 | class ReviewableBranch(object): |
| 148 | _commit_cache = None | 158 | _commit_cache = None |
| 149 | _base_exists = None | 159 | _base_exists = None |
| 150 | 160 | ||
| 151 | def __init__(self, project, branch, base): | 161 | def __init__(self, project, branch, base): |
| 152 | self.project = project | 162 | self.project = project |
| 153 | self.branch = branch | 163 | self.branch = branch |
| 154 | self.base = base | 164 | self.base = base |
| 155 | 165 | ||
| 156 | @property | 166 | @property |
| 157 | def name(self): | 167 | def name(self): |
| 158 | return self.branch.name | 168 | return self.branch.name |
| 159 | 169 | ||
| 160 | @property | 170 | @property |
| 161 | def commits(self): | 171 | def commits(self): |
| 162 | if self._commit_cache is None: | 172 | if self._commit_cache is None: |
| 163 | args = ('--abbrev=8', '--abbrev-commit', '--pretty=oneline', '--reverse', | 173 | args = ( |
| 164 | '--date-order', not_rev(self.base), R_HEADS + self.name, '--') | 174 | "--abbrev=8", |
| 165 | try: | 175 | "--abbrev-commit", |
| 166 | self._commit_cache = self.project.bare_git.rev_list(*args) | 176 | "--pretty=oneline", |
| 167 | except GitError: | 177 | "--reverse", |
| 168 | # We weren't able to probe the commits for this branch. Was it tracking | 178 | "--date-order", |
| 169 | # a branch that no longer exists? If so, return no commits. Otherwise, | 179 | not_rev(self.base), |
| 170 | # rethrow the error as we don't know what's going on. | 180 | R_HEADS + self.name, |
| 171 | if self.base_exists: | 181 | "--", |
| 172 | raise | 182 | ) |
| 173 | 183 | try: | |
| 174 | self._commit_cache = [] | 184 | self._commit_cache = self.project.bare_git.rev_list(*args) |
| 175 | 185 | except GitError: | |
| 176 | return self._commit_cache | 186 | # We weren't able to probe the commits for this branch. Was it |
| 177 | 187 | # tracking a branch that no longer exists? If so, return no | |
| 178 | @property | 188 | # commits. Otherwise, rethrow the error as we don't know what's |
| 179 | def unabbrev_commits(self): | 189 | # going on. |
| 180 | r = dict() | 190 | if self.base_exists: |
| 181 | for commit in self.project.bare_git.rev_list(not_rev(self.base), | 191 | raise |
| 182 | R_HEADS + self.name, | 192 | |
| 183 | '--'): | 193 | self._commit_cache = [] |
| 184 | r[commit[0:8]] = commit | 194 | |
| 185 | return r | 195 | return self._commit_cache |
| 186 | 196 | ||
| 187 | @property | 197 | @property |
| 188 | def date(self): | 198 | def unabbrev_commits(self): |
| 189 | return self.project.bare_git.log('--pretty=format:%cd', | 199 | r = dict() |
| 190 | '-n', '1', | 200 | for commit in self.project.bare_git.rev_list( |
| 191 | R_HEADS + self.name, | 201 | not_rev(self.base), R_HEADS + self.name, "--" |
| 192 | '--') | 202 | ): |
| 193 | 203 | r[commit[0:8]] = commit | |
| 194 | @property | 204 | return r |
| 195 | def base_exists(self): | 205 | |
| 196 | """Whether the branch we're tracking exists. | 206 | @property |
| 197 | 207 | def date(self): | |
| 198 | Normally it should, but sometimes branches we track can get deleted. | 208 | return self.project.bare_git.log( |
| 199 | """ | 209 | "--pretty=format:%cd", "-n", "1", R_HEADS + self.name, "--" |
| 200 | if self._base_exists is None: | 210 | ) |
| 201 | try: | ||
| 202 | self.project.bare_git.rev_parse('--verify', not_rev(self.base)) | ||
| 203 | # If we're still here, the base branch exists. | ||
| 204 | self._base_exists = True | ||
| 205 | except GitError: | ||
| 206 | # If we failed to verify, the base branch doesn't exist. | ||
| 207 | self._base_exists = False | ||
| 208 | |||
| 209 | return self._base_exists | ||
| 210 | |||
| 211 | def UploadForReview(self, people, | ||
| 212 | dryrun=False, | ||
| 213 | auto_topic=False, | ||
| 214 | hashtags=(), | ||
| 215 | labels=(), | ||
| 216 | private=False, | ||
| 217 | notify=None, | ||
| 218 | wip=False, | ||
| 219 | ready=False, | ||
| 220 | dest_branch=None, | ||
| 221 | validate_certs=True, | ||
| 222 | push_options=None): | ||
| 223 | self.project.UploadForReview(branch=self.name, | ||
| 224 | people=people, | ||
| 225 | dryrun=dryrun, | ||
| 226 | auto_topic=auto_topic, | ||
| 227 | hashtags=hashtags, | ||
| 228 | labels=labels, | ||
| 229 | private=private, | ||
| 230 | notify=notify, | ||
| 231 | wip=wip, | ||
| 232 | ready=ready, | ||
| 233 | dest_branch=dest_branch, | ||
| 234 | validate_certs=validate_certs, | ||
| 235 | push_options=push_options) | ||
| 236 | |||
| 237 | def GetPublishedRefs(self): | ||
| 238 | refs = {} | ||
| 239 | output = self.project.bare_git.ls_remote( | ||
| 240 | self.branch.remote.SshReviewUrl(self.project.UserEmail), | ||
| 241 | 'refs/changes/*') | ||
| 242 | for line in output.split('\n'): | ||
| 243 | try: | ||
| 244 | (sha, ref) = line.split() | ||
| 245 | refs[sha] = ref | ||
| 246 | except ValueError: | ||
| 247 | pass | ||
| 248 | |||
| 249 | return refs | ||
| 250 | 211 | ||
| 212 | @property | ||
| 213 | def base_exists(self): | ||
| 214 | """Whether the branch we're tracking exists. | ||
| 251 | 215 | ||
| 252 | class StatusColoring(Coloring): | 216 | Normally it should, but sometimes branches we track can get deleted. |
| 217 | """ | ||
| 218 | if self._base_exists is None: | ||
| 219 | try: | ||
| 220 | self.project.bare_git.rev_parse("--verify", not_rev(self.base)) | ||
| 221 | # If we're still here, the base branch exists. | ||
| 222 | self._base_exists = True | ||
| 223 | except GitError: | ||
| 224 | # If we failed to verify, the base branch doesn't exist. | ||
| 225 | self._base_exists = False | ||
| 226 | |||
| 227 | return self._base_exists | ||
| 228 | |||
| 229 | def UploadForReview( | ||
| 230 | self, | ||
| 231 | people, | ||
| 232 | dryrun=False, | ||
| 233 | auto_topic=False, | ||
| 234 | hashtags=(), | ||
| 235 | labels=(), | ||
| 236 | private=False, | ||
| 237 | notify=None, | ||
| 238 | wip=False, | ||
| 239 | ready=False, | ||
| 240 | dest_branch=None, | ||
| 241 | validate_certs=True, | ||
| 242 | push_options=None, | ||
| 243 | ): | ||
| 244 | self.project.UploadForReview( | ||
| 245 | branch=self.name, | ||
| 246 | people=people, | ||
| 247 | dryrun=dryrun, | ||
| 248 | auto_topic=auto_topic, | ||
| 249 | hashtags=hashtags, | ||
| 250 | labels=labels, | ||
| 251 | private=private, | ||
| 252 | notify=notify, | ||
| 253 | wip=wip, | ||
| 254 | ready=ready, | ||
| 255 | dest_branch=dest_branch, | ||
| 256 | validate_certs=validate_certs, | ||
| 257 | push_options=push_options, | ||
| 258 | ) | ||
| 253 | 259 | ||
| 254 | def __init__(self, config): | 260 | def GetPublishedRefs(self): |
| 255 | super().__init__(config, 'status') | 261 | refs = {} |
| 256 | self.project = self.printer('header', attr='bold') | 262 | output = self.project.bare_git.ls_remote( |
| 257 | self.branch = self.printer('header', attr='bold') | 263 | self.branch.remote.SshReviewUrl(self.project.UserEmail), |
| 258 | self.nobranch = self.printer('nobranch', fg='red') | 264 | "refs/changes/*", |
| 259 | self.important = self.printer('important', fg='red') | 265 | ) |
| 266 | for line in output.split("\n"): | ||
| 267 | try: | ||
| 268 | (sha, ref) = line.split() | ||
| 269 | refs[sha] = ref | ||
| 270 | except ValueError: | ||
| 271 | pass | ||
| 260 | 272 | ||
| 261 | self.added = self.printer('added', fg='green') | 273 | return refs |
| 262 | self.changed = self.printer('changed', fg='red') | ||
| 263 | self.untracked = self.printer('untracked', fg='red') | ||
| 264 | 274 | ||
| 265 | 275 | ||
| 266 | class DiffColoring(Coloring): | 276 | class StatusColoring(Coloring): |
| 277 | def __init__(self, config): | ||
| 278 | super().__init__(config, "status") | ||
| 279 | self.project = self.printer("header", attr="bold") | ||
| 280 | self.branch = self.printer("header", attr="bold") | ||
| 281 | self.nobranch = self.printer("nobranch", fg="red") | ||
| 282 | self.important = self.printer("important", fg="red") | ||
| 267 | 283 | ||
| 268 | def __init__(self, config): | 284 | self.added = self.printer("added", fg="green") |
| 269 | super().__init__(config, 'diff') | 285 | self.changed = self.printer("changed", fg="red") |
| 270 | self.project = self.printer('header', attr='bold') | 286 | self.untracked = self.printer("untracked", fg="red") |
| 271 | self.fail = self.printer('fail', fg='red') | ||
| 272 | 287 | ||
| 273 | 288 | ||
| 274 | class Annotation(object): | 289 | class DiffColoring(Coloring): |
| 290 | def __init__(self, config): | ||
| 291 | super().__init__(config, "diff") | ||
| 292 | self.project = self.printer("header", attr="bold") | ||
| 293 | self.fail = self.printer("fail", fg="red") | ||
| 294 | |||
| 275 | 295 | ||
| 276 | def __init__(self, name, value, keep): | 296 | class Annotation(object): |
| 277 | self.name = name | 297 | def __init__(self, name, value, keep): |
| 278 | self.value = value | 298 | self.name = name |
| 279 | self.keep = keep | 299 | self.value = value |
| 300 | self.keep = keep | ||
| 280 | 301 | ||
| 281 | def __eq__(self, other): | 302 | def __eq__(self, other): |
| 282 | if not isinstance(other, Annotation): | 303 | if not isinstance(other, Annotation): |
| 283 | return False | 304 | return False |
| 284 | return self.__dict__ == other.__dict__ | 305 | return self.__dict__ == other.__dict__ |
| 285 | 306 | ||
| 286 | def __lt__(self, other): | 307 | def __lt__(self, other): |
| 287 | # This exists just so that lists of Annotation objects can be sorted, for | 308 | # This exists just so that lists of Annotation objects can be sorted, |
| 288 | # use in comparisons. | 309 | # for use in comparisons. |
| 289 | if not isinstance(other, Annotation): | 310 | if not isinstance(other, Annotation): |
| 290 | raise ValueError('comparison is not between two Annotation objects') | 311 | raise ValueError("comparison is not between two Annotation objects") |
| 291 | if self.name == other.name: | 312 | if self.name == other.name: |
| 292 | if self.value == other.value: | 313 | if self.value == other.value: |
| 293 | return self.keep < other.keep | 314 | return self.keep < other.keep |
| 294 | return self.value < other.value | 315 | return self.value < other.value |
| 295 | return self.name < other.name | 316 | return self.name < other.name |
| 296 | 317 | ||
| 297 | 318 | ||
| 298 | def _SafeExpandPath(base, subpath, skipfinal=False): | 319 | def _SafeExpandPath(base, subpath, skipfinal=False): |
| 299 | """Make sure |subpath| is completely safe under |base|. | 320 | """Make sure |subpath| is completely safe under |base|. |
| 300 | 321 | ||
| 301 | We make sure no intermediate symlinks are traversed, and that the final path | 322 | We make sure no intermediate symlinks are traversed, and that the final path |
| 302 | is not a special file (e.g. not a socket or fifo). | 323 | is not a special file (e.g. not a socket or fifo). |
| 303 | 324 | ||
| 304 | NB: We rely on a number of paths already being filtered out while parsing the | 325 | NB: We rely on a number of paths already being filtered out while parsing |
| 305 | manifest. See the validation logic in manifest_xml.py for more details. | 326 | the manifest. See the validation logic in manifest_xml.py for more details. |
| 306 | """ | 327 | """ |
| 307 | # Split up the path by its components. We can't use os.path.sep exclusively | 328 | # Split up the path by its components. We can't use os.path.sep exclusively |
| 308 | # as some platforms (like Windows) will convert / to \ and that bypasses all | 329 | # as some platforms (like Windows) will convert / to \ and that bypasses all |
| 309 | # our constructed logic here. Especially since manifest authors only use | 330 | # our constructed logic here. Especially since manifest authors only use |
| 310 | # / in their paths. | 331 | # / in their paths. |
| 311 | resep = re.compile(r'[/%s]' % re.escape(os.path.sep)) | 332 | resep = re.compile(r"[/%s]" % re.escape(os.path.sep)) |
| 312 | components = resep.split(subpath) | 333 | components = resep.split(subpath) |
| 313 | if skipfinal: | 334 | if skipfinal: |
| 314 | # Whether the caller handles the final component itself. | 335 | # Whether the caller handles the final component itself. |
| 315 | finalpart = components.pop() | 336 | finalpart = components.pop() |
| 316 | 337 | ||
| 317 | path = base | 338 | path = base |
| 318 | for part in components: | 339 | for part in components: |
| 319 | if part in {'.', '..'}: | 340 | if part in {".", ".."}: |
| 320 | raise ManifestInvalidPathError( | 341 | raise ManifestInvalidPathError( |
| 321 | '%s: "%s" not allowed in paths' % (subpath, part)) | 342 | '%s: "%s" not allowed in paths' % (subpath, part) |
| 322 | 343 | ) | |
| 323 | path = os.path.join(path, part) | 344 | |
| 324 | if platform_utils.islink(path): | 345 | path = os.path.join(path, part) |
| 325 | raise ManifestInvalidPathError( | 346 | if platform_utils.islink(path): |
| 326 | '%s: traversing symlinks not allow' % (path,)) | 347 | raise ManifestInvalidPathError( |
| 327 | 348 | "%s: traversing symlinks not allow" % (path,) | |
| 328 | if os.path.exists(path): | 349 | ) |
| 329 | if not os.path.isfile(path) and not platform_utils.isdir(path): | 350 | |
| 330 | raise ManifestInvalidPathError( | 351 | if os.path.exists(path): |
| 331 | '%s: only regular files & directories allowed' % (path,)) | 352 | if not os.path.isfile(path) and not platform_utils.isdir(path): |
| 332 | 353 | raise ManifestInvalidPathError( | |
| 333 | if skipfinal: | 354 | "%s: only regular files & directories allowed" % (path,) |
| 334 | path = os.path.join(path, finalpart) | 355 | ) |
| 335 | 356 | ||
| 336 | return path | 357 | if skipfinal: |
| 358 | path = os.path.join(path, finalpart) | ||
| 359 | |||
| 360 | return path | ||
| 337 | 361 | ||
| 338 | 362 | ||
| 339 | class _CopyFile(object): | 363 | class _CopyFile(object): |
| 340 | """Container for <copyfile> manifest element.""" | 364 | """Container for <copyfile> manifest element.""" |
| 341 | 365 | ||
| 342 | def __init__(self, git_worktree, src, topdir, dest): | 366 | def __init__(self, git_worktree, src, topdir, dest): |
| 343 | """Register a <copyfile> request. | 367 | """Register a <copyfile> request. |
| 344 | 368 | ||
| 345 | Args: | 369 | Args: |
| 346 | git_worktree: Absolute path to the git project checkout. | 370 | git_worktree: Absolute path to the git project checkout. |
| 347 | src: Relative path under |git_worktree| of file to read. | 371 | src: Relative path under |git_worktree| of file to read. |
| 348 | topdir: Absolute path to the top of the repo client checkout. | 372 | topdir: Absolute path to the top of the repo client checkout. |
| 349 | dest: Relative path under |topdir| of file to write. | 373 | dest: Relative path under |topdir| of file to write. |
| 350 | """ | 374 | """ |
| 351 | self.git_worktree = git_worktree | 375 | self.git_worktree = git_worktree |
| 352 | self.topdir = topdir | 376 | self.topdir = topdir |
| 353 | self.src = src | 377 | self.src = src |
| 354 | self.dest = dest | 378 | self.dest = dest |
| 355 | 379 | ||
| 356 | def _Copy(self): | 380 | def _Copy(self): |
| 357 | src = _SafeExpandPath(self.git_worktree, self.src) | 381 | src = _SafeExpandPath(self.git_worktree, self.src) |
| 358 | dest = _SafeExpandPath(self.topdir, self.dest) | 382 | dest = _SafeExpandPath(self.topdir, self.dest) |
| 359 | 383 | ||
| 360 | if platform_utils.isdir(src): | 384 | if platform_utils.isdir(src): |
| 361 | raise ManifestInvalidPathError( | 385 | raise ManifestInvalidPathError( |
| 362 | '%s: copying from directory not supported' % (self.src,)) | 386 | "%s: copying from directory not supported" % (self.src,) |
| 363 | if platform_utils.isdir(dest): | 387 | ) |
| 364 | raise ManifestInvalidPathError( | 388 | if platform_utils.isdir(dest): |
| 365 | '%s: copying to directory not allowed' % (self.dest,)) | 389 | raise ManifestInvalidPathError( |
| 366 | 390 | "%s: copying to directory not allowed" % (self.dest,) | |
| 367 | # copy file if it does not exist or is out of date | 391 | ) |
| 368 | if not os.path.exists(dest) or not filecmp.cmp(src, dest): | 392 | |
| 369 | try: | 393 | # Copy file if it does not exist or is out of date. |
| 370 | # remove existing file first, since it might be read-only | 394 | if not os.path.exists(dest) or not filecmp.cmp(src, dest): |
| 371 | if os.path.exists(dest): | 395 | try: |
| 372 | platform_utils.remove(dest) | 396 | # Remove existing file first, since it might be read-only. |
| 373 | else: | 397 | if os.path.exists(dest): |
| 374 | dest_dir = os.path.dirname(dest) | 398 | platform_utils.remove(dest) |
| 375 | if not platform_utils.isdir(dest_dir): | 399 | else: |
| 376 | os.makedirs(dest_dir) | 400 | dest_dir = os.path.dirname(dest) |
| 377 | shutil.copy(src, dest) | 401 | if not platform_utils.isdir(dest_dir): |
| 378 | # make the file read-only | 402 | os.makedirs(dest_dir) |
| 379 | mode = os.stat(dest)[stat.ST_MODE] | 403 | shutil.copy(src, dest) |
| 380 | mode = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH) | 404 | # Make the file read-only. |
| 381 | os.chmod(dest, mode) | 405 | mode = os.stat(dest)[stat.ST_MODE] |
| 382 | except IOError: | 406 | mode = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH) |
| 383 | _error('Cannot copy file %s to %s', src, dest) | 407 | os.chmod(dest, mode) |
| 408 | except IOError: | ||
| 409 | _error("Cannot copy file %s to %s", src, dest) | ||
| 384 | 410 | ||
| 385 | 411 | ||
| 386 | class _LinkFile(object): | 412 | class _LinkFile(object): |
| 387 | """Container for <linkfile> manifest element.""" | 413 | """Container for <linkfile> manifest element.""" |
| 388 | 414 | ||
| 389 | def __init__(self, git_worktree, src, topdir, dest): | 415 | def __init__(self, git_worktree, src, topdir, dest): |
| 390 | """Register a <linkfile> request. | 416 | """Register a <linkfile> request. |
| 391 | 417 | ||
| 392 | Args: | 418 | Args: |
| 393 | git_worktree: Absolute path to the git project checkout. | 419 | git_worktree: Absolute path to the git project checkout. |
| 394 | src: Target of symlink relative to path under |git_worktree|. | 420 | src: Target of symlink relative to path under |git_worktree|. |
| 395 | topdir: Absolute path to the top of the repo client checkout. | 421 | topdir: Absolute path to the top of the repo client checkout. |
| 396 | dest: Relative path under |topdir| of symlink to create. | 422 | dest: Relative path under |topdir| of symlink to create. |
| 397 | """ | 423 | """ |
| 398 | self.git_worktree = git_worktree | 424 | self.git_worktree = git_worktree |
| 399 | self.topdir = topdir | 425 | self.topdir = topdir |
| 400 | self.src = src | 426 | self.src = src |
| 401 | self.dest = dest | 427 | self.dest = dest |
| 402 | 428 | ||
| 403 | def __linkIt(self, relSrc, absDest): | 429 | def __linkIt(self, relSrc, absDest): |
| 404 | # link file if it does not exist or is out of date | 430 | # Link file if it does not exist or is out of date. |
| 405 | if not platform_utils.islink(absDest) or (platform_utils.readlink(absDest) != relSrc): | 431 | if not platform_utils.islink(absDest) or ( |
| 406 | try: | 432 | platform_utils.readlink(absDest) != relSrc |
| 407 | # remove existing file first, since it might be read-only | 433 | ): |
| 408 | if os.path.lexists(absDest): | 434 | try: |
| 409 | platform_utils.remove(absDest) | 435 | # Remove existing file first, since it might be read-only. |
| 436 | if os.path.lexists(absDest): | ||
| 437 | platform_utils.remove(absDest) | ||
| 438 | else: | ||
| 439 | dest_dir = os.path.dirname(absDest) | ||
| 440 | if not platform_utils.isdir(dest_dir): | ||
| 441 | os.makedirs(dest_dir) | ||
| 442 | platform_utils.symlink(relSrc, absDest) | ||
| 443 | except IOError: | ||
| 444 | _error("Cannot link file %s to %s", relSrc, absDest) | ||
| 445 | |||
| 446 | def _Link(self): | ||
| 447 | """Link the self.src & self.dest paths. | ||
| 448 | |||
| 449 | Handles wild cards on the src linking all of the files in the source in | ||
| 450 | to the destination directory. | ||
| 451 | """ | ||
| 452 | # Some people use src="." to create stable links to projects. Let's | ||
| 453 | # allow that but reject all other uses of "." to keep things simple. | ||
| 454 | if self.src == ".": | ||
| 455 | src = self.git_worktree | ||
| 410 | else: | 456 | else: |
| 411 | dest_dir = os.path.dirname(absDest) | 457 | src = _SafeExpandPath(self.git_worktree, self.src) |
| 412 | if not platform_utils.isdir(dest_dir): | 458 | |
| 413 | os.makedirs(dest_dir) | 459 | if not glob.has_magic(src): |
| 414 | platform_utils.symlink(relSrc, absDest) | 460 | # Entity does not contain a wild card so just a simple one to one |
| 415 | except IOError: | 461 | # link operation. |
| 416 | _error('Cannot link file %s to %s', relSrc, absDest) | 462 | dest = _SafeExpandPath(self.topdir, self.dest, skipfinal=True) |
| 417 | 463 | # dest & src are absolute paths at this point. Make sure the target | |
| 418 | def _Link(self): | 464 | # of the symlink is relative in the context of the repo client |
| 419 | """Link the self.src & self.dest paths. | 465 | # checkout. |
| 420 | 466 | relpath = os.path.relpath(src, os.path.dirname(dest)) | |
| 421 | Handles wild cards on the src linking all of the files in the source in to | 467 | self.__linkIt(relpath, dest) |
| 422 | the destination directory. | 468 | else: |
| 423 | """ | 469 | dest = _SafeExpandPath(self.topdir, self.dest) |
| 424 | # Some people use src="." to create stable links to projects. Lets allow | 470 | # Entity contains a wild card. |
| 425 | # that but reject all other uses of "." to keep things simple. | 471 | if os.path.exists(dest) and not platform_utils.isdir(dest): |
| 426 | if self.src == '.': | 472 | _error( |
| 427 | src = self.git_worktree | 473 | "Link error: src with wildcard, %s must be a directory", |
| 428 | else: | 474 | dest, |
| 429 | src = _SafeExpandPath(self.git_worktree, self.src) | 475 | ) |
| 430 | 476 | else: | |
| 431 | if not glob.has_magic(src): | 477 | for absSrcFile in glob.glob(src): |
| 432 | # Entity does not contain a wild card so just a simple one to one link operation. | 478 | # Create a releative path from source dir to destination |
| 433 | dest = _SafeExpandPath(self.topdir, self.dest, skipfinal=True) | 479 | # dir. |
| 434 | # dest & src are absolute paths at this point. Make sure the target of | 480 | absSrcDir = os.path.dirname(absSrcFile) |
| 435 | # the symlink is relative in the context of the repo client checkout. | 481 | relSrcDir = os.path.relpath(absSrcDir, dest) |
| 436 | relpath = os.path.relpath(src, os.path.dirname(dest)) | 482 | |
| 437 | self.__linkIt(relpath, dest) | 483 | # Get the source file name. |
| 438 | else: | 484 | srcFile = os.path.basename(absSrcFile) |
| 439 | dest = _SafeExpandPath(self.topdir, self.dest) | 485 | |
| 440 | # Entity contains a wild card. | 486 | # Now form the final full paths to srcFile. They will be |
| 441 | if os.path.exists(dest) and not platform_utils.isdir(dest): | 487 | # absolute for the desintaiton and relative for the source. |
| 442 | _error('Link error: src with wildcard, %s must be a directory', dest) | 488 | absDest = os.path.join(dest, srcFile) |
| 443 | else: | 489 | relSrc = os.path.join(relSrcDir, srcFile) |
| 444 | for absSrcFile in glob.glob(src): | 490 | self.__linkIt(relSrc, absDest) |
| 445 | # Create a releative path from source dir to destination dir | ||
| 446 | absSrcDir = os.path.dirname(absSrcFile) | ||
| 447 | relSrcDir = os.path.relpath(absSrcDir, dest) | ||
| 448 | |||
| 449 | # Get the source file name | ||
| 450 | srcFile = os.path.basename(absSrcFile) | ||
| 451 | |||
| 452 | # Now form the final full paths to srcFile. They will be | ||
| 453 | # absolute for the desintaiton and relative for the srouce. | ||
| 454 | absDest = os.path.join(dest, srcFile) | ||
| 455 | relSrc = os.path.join(relSrcDir, srcFile) | ||
| 456 | self.__linkIt(relSrc, absDest) | ||
| 457 | 491 | ||
| 458 | 492 | ||
| 459 | class RemoteSpec(object): | 493 | class RemoteSpec(object): |
| 460 | 494 | def __init__( | |
| 461 | def __init__(self, | 495 | self, |
| 462 | name, | 496 | name, |
| 463 | url=None, | 497 | url=None, |
| 464 | pushUrl=None, | 498 | pushUrl=None, |
| 465 | review=None, | 499 | review=None, |
| 466 | revision=None, | 500 | revision=None, |
| 467 | orig_name=None, | 501 | orig_name=None, |
| 468 | fetchUrl=None): | 502 | fetchUrl=None, |
| 469 | self.name = name | 503 | ): |
| 470 | self.url = url | 504 | self.name = name |
| 471 | self.pushUrl = pushUrl | 505 | self.url = url |
| 472 | self.review = review | 506 | self.pushUrl = pushUrl |
| 473 | self.revision = revision | 507 | self.review = review |
| 474 | self.orig_name = orig_name | 508 | self.revision = revision |
| 475 | self.fetchUrl = fetchUrl | 509 | self.orig_name = orig_name |
| 510 | self.fetchUrl = fetchUrl | ||
| 476 | 511 | ||
| 477 | 512 | ||
| 478 | class Project(object): | 513 | class Project(object): |
| 479 | # These objects can be shared between several working trees. | 514 | # These objects can be shared between several working trees. |
| 480 | @property | 515 | @property |
| 481 | def shareable_dirs(self): | 516 | def shareable_dirs(self): |
| 482 | """Return the shareable directories""" | 517 | """Return the shareable directories""" |
| 483 | if self.UseAlternates: | 518 | if self.UseAlternates: |
| 484 | return ['hooks', 'rr-cache'] | 519 | return ["hooks", "rr-cache"] |
| 485 | else: | 520 | else: |
| 486 | return ['hooks', 'objects', 'rr-cache'] | 521 | return ["hooks", "objects", "rr-cache"] |
| 487 | 522 | ||
| 488 | def __init__(self, | 523 | def __init__( |
| 489 | manifest, | 524 | self, |
| 490 | name, | 525 | manifest, |
| 491 | remote, | 526 | name, |
| 492 | gitdir, | 527 | remote, |
| 493 | objdir, | 528 | gitdir, |
| 494 | worktree, | 529 | objdir, |
| 495 | relpath, | 530 | worktree, |
| 496 | revisionExpr, | 531 | relpath, |
| 497 | revisionId, | 532 | revisionExpr, |
| 498 | rebase=True, | 533 | revisionId, |
| 499 | groups=None, | 534 | rebase=True, |
| 500 | sync_c=False, | 535 | groups=None, |
| 501 | sync_s=False, | 536 | sync_c=False, |
| 502 | sync_tags=True, | 537 | sync_s=False, |
| 503 | clone_depth=None, | 538 | sync_tags=True, |
| 504 | upstream=None, | 539 | clone_depth=None, |
| 505 | parent=None, | 540 | upstream=None, |
| 506 | use_git_worktrees=False, | 541 | parent=None, |
| 507 | is_derived=False, | 542 | use_git_worktrees=False, |
| 508 | dest_branch=None, | 543 | is_derived=False, |
| 509 | optimized_fetch=False, | 544 | dest_branch=None, |
| 510 | retry_fetches=0, | 545 | optimized_fetch=False, |
| 511 | old_revision=None): | 546 | retry_fetches=0, |
| 512 | """Init a Project object. | 547 | old_revision=None, |
| 513 | 548 | ): | |
| 514 | Args: | 549 | """Init a Project object. |
| 515 | manifest: The XmlManifest object. | ||
| 516 | name: The `name` attribute of manifest.xml's project element. | ||
| 517 | remote: RemoteSpec object specifying its remote's properties. | ||
| 518 | gitdir: Absolute path of git directory. | ||
| 519 | objdir: Absolute path of directory to store git objects. | ||
| 520 | worktree: Absolute path of git working tree. | ||
| 521 | relpath: Relative path of git working tree to repo's top directory. | ||
| 522 | revisionExpr: The `revision` attribute of manifest.xml's project element. | ||
| 523 | revisionId: git commit id for checking out. | ||
| 524 | rebase: The `rebase` attribute of manifest.xml's project element. | ||
| 525 | groups: The `groups` attribute of manifest.xml's project element. | ||
| 526 | sync_c: The `sync-c` attribute of manifest.xml's project element. | ||
| 527 | sync_s: The `sync-s` attribute of manifest.xml's project element. | ||
| 528 | sync_tags: The `sync-tags` attribute of manifest.xml's project element. | ||
| 529 | upstream: The `upstream` attribute of manifest.xml's project element. | ||
| 530 | parent: The parent Project object. | ||
| 531 | use_git_worktrees: Whether to use `git worktree` for this project. | ||
| 532 | is_derived: False if the project was explicitly defined in the manifest; | ||
| 533 | True if the project is a discovered submodule. | ||
| 534 | dest_branch: The branch to which to push changes for review by default. | ||
| 535 | optimized_fetch: If True, when a project is set to a sha1 revision, only | ||
| 536 | fetch from the remote if the sha1 is not present locally. | ||
| 537 | retry_fetches: Retry remote fetches n times upon receiving transient error | ||
| 538 | with exponential backoff and jitter. | ||
| 539 | old_revision: saved git commit id for open GITC projects. | ||
| 540 | """ | ||
| 541 | self.client = self.manifest = manifest | ||
| 542 | self.name = name | ||
| 543 | self.remote = remote | ||
| 544 | self.UpdatePaths(relpath, worktree, gitdir, objdir) | ||
| 545 | self.SetRevision(revisionExpr, revisionId=revisionId) | ||
| 546 | |||
| 547 | self.rebase = rebase | ||
| 548 | self.groups = groups | ||
| 549 | self.sync_c = sync_c | ||
| 550 | self.sync_s = sync_s | ||
| 551 | self.sync_tags = sync_tags | ||
| 552 | self.clone_depth = clone_depth | ||
| 553 | self.upstream = upstream | ||
| 554 | self.parent = parent | ||
| 555 | # NB: Do not use this setting in __init__ to change behavior so that the | ||
| 556 | # manifest.git checkout can inspect & change it after instantiating. See | ||
| 557 | # the XmlManifest init code for more info. | ||
| 558 | self.use_git_worktrees = use_git_worktrees | ||
| 559 | self.is_derived = is_derived | ||
| 560 | self.optimized_fetch = optimized_fetch | ||
| 561 | self.retry_fetches = max(0, retry_fetches) | ||
| 562 | self.subprojects = [] | ||
| 563 | |||
| 564 | self.snapshots = {} | ||
| 565 | self.copyfiles = [] | ||
| 566 | self.linkfiles = [] | ||
| 567 | self.annotations = [] | ||
| 568 | self.dest_branch = dest_branch | ||
| 569 | self.old_revision = old_revision | ||
| 570 | |||
| 571 | # This will be filled in if a project is later identified to be the | ||
| 572 | # project containing repo hooks. | ||
| 573 | self.enabled_repo_hooks = [] | ||
| 574 | |||
| 575 | def RelPath(self, local=True): | ||
| 576 | """Return the path for the project relative to a manifest. | ||
| 577 | |||
| 578 | Args: | ||
| 579 | local: a boolean, if True, the path is relative to the local | ||
| 580 | (sub)manifest. If false, the path is relative to the | ||
| 581 | outermost manifest. | ||
| 582 | """ | ||
| 583 | if local: | ||
| 584 | return self.relpath | ||
| 585 | return os.path.join(self.manifest.path_prefix, self.relpath) | ||
| 586 | |||
| 587 | def SetRevision(self, revisionExpr, revisionId=None): | ||
| 588 | """Set revisionId based on revision expression and id""" | ||
| 589 | self.revisionExpr = revisionExpr | ||
| 590 | if revisionId is None and revisionExpr and IsId(revisionExpr): | ||
| 591 | self.revisionId = self.revisionExpr | ||
| 592 | else: | ||
| 593 | self.revisionId = revisionId | ||
| 594 | |||
| 595 | def UpdatePaths(self, relpath, worktree, gitdir, objdir): | ||
| 596 | """Update paths used by this project""" | ||
| 597 | self.gitdir = gitdir.replace('\\', '/') | ||
| 598 | self.objdir = objdir.replace('\\', '/') | ||
| 599 | if worktree: | ||
| 600 | self.worktree = os.path.normpath(worktree).replace('\\', '/') | ||
| 601 | else: | ||
| 602 | self.worktree = None | ||
| 603 | self.relpath = relpath | ||
| 604 | |||
| 605 | self.config = GitConfig.ForRepository(gitdir=self.gitdir, | ||
| 606 | defaults=self.manifest.globalConfig) | ||
| 607 | |||
| 608 | if self.worktree: | ||
| 609 | self.work_git = self._GitGetByExec(self, bare=False, gitdir=self.gitdir) | ||
| 610 | else: | ||
| 611 | self.work_git = None | ||
| 612 | self.bare_git = self._GitGetByExec(self, bare=True, gitdir=self.gitdir) | ||
| 613 | self.bare_ref = GitRefs(self.gitdir) | ||
| 614 | self.bare_objdir = self._GitGetByExec(self, bare=True, gitdir=self.objdir) | ||
| 615 | |||
| 616 | @property | ||
| 617 | def UseAlternates(self): | ||
| 618 | """Whether git alternates are in use. | ||
| 619 | |||
| 620 | This will be removed once migration to alternates is complete. | ||
| 621 | """ | ||
| 622 | return _ALTERNATES or self.manifest.is_multimanifest | ||
| 623 | 550 | ||
| 624 | @property | 551 | Args: |
| 625 | def Derived(self): | 552 | manifest: The XmlManifest object. |
| 626 | return self.is_derived | 553 | name: The `name` attribute of manifest.xml's project element. |
| 554 | remote: RemoteSpec object specifying its remote's properties. | ||
| 555 | gitdir: Absolute path of git directory. | ||
| 556 | objdir: Absolute path of directory to store git objects. | ||
| 557 | worktree: Absolute path of git working tree. | ||
| 558 | relpath: Relative path of git working tree to repo's top directory. | ||
| 559 | revisionExpr: The `revision` attribute of manifest.xml's project | ||
| 560 | element. | ||
| 561 | revisionId: git commit id for checking out. | ||
| 562 | rebase: The `rebase` attribute of manifest.xml's project element. | ||
| 563 | groups: The `groups` attribute of manifest.xml's project element. | ||
| 564 | sync_c: The `sync-c` attribute of manifest.xml's project element. | ||
| 565 | sync_s: The `sync-s` attribute of manifest.xml's project element. | ||
| 566 | sync_tags: The `sync-tags` attribute of manifest.xml's project | ||
| 567 | element. | ||
| 568 | upstream: The `upstream` attribute of manifest.xml's project | ||
| 569 | element. | ||
| 570 | parent: The parent Project object. | ||
| 571 | use_git_worktrees: Whether to use `git worktree` for this project. | ||
| 572 | is_derived: False if the project was explicitly defined in the | ||
| 573 | manifest; True if the project is a discovered submodule. | ||
| 574 | dest_branch: The branch to which to push changes for review by | ||
| 575 | default. | ||
| 576 | optimized_fetch: If True, when a project is set to a sha1 revision, | ||
| 577 | only fetch from the remote if the sha1 is not present locally. | ||
| 578 | retry_fetches: Retry remote fetches n times upon receiving transient | ||
| 579 | error with exponential backoff and jitter. | ||
| 580 | old_revision: saved git commit id for open GITC projects. | ||
| 581 | """ | ||
| 582 | self.client = self.manifest = manifest | ||
| 583 | self.name = name | ||
| 584 | self.remote = remote | ||
| 585 | self.UpdatePaths(relpath, worktree, gitdir, objdir) | ||
| 586 | self.SetRevision(revisionExpr, revisionId=revisionId) | ||
| 587 | |||
| 588 | self.rebase = rebase | ||
| 589 | self.groups = groups | ||
| 590 | self.sync_c = sync_c | ||
| 591 | self.sync_s = sync_s | ||
| 592 | self.sync_tags = sync_tags | ||
| 593 | self.clone_depth = clone_depth | ||
| 594 | self.upstream = upstream | ||
| 595 | self.parent = parent | ||
| 596 | # NB: Do not use this setting in __init__ to change behavior so that the | ||
| 597 | # manifest.git checkout can inspect & change it after instantiating. | ||
| 598 | # See the XmlManifest init code for more info. | ||
| 599 | self.use_git_worktrees = use_git_worktrees | ||
| 600 | self.is_derived = is_derived | ||
| 601 | self.optimized_fetch = optimized_fetch | ||
| 602 | self.retry_fetches = max(0, retry_fetches) | ||
| 603 | self.subprojects = [] | ||
| 604 | |||
| 605 | self.snapshots = {} | ||
| 606 | self.copyfiles = [] | ||
| 607 | self.linkfiles = [] | ||
| 608 | self.annotations = [] | ||
| 609 | self.dest_branch = dest_branch | ||
| 610 | self.old_revision = old_revision | ||
| 611 | |||
| 612 | # This will be filled in if a project is later identified to be the | ||
| 613 | # project containing repo hooks. | ||
| 614 | self.enabled_repo_hooks = [] | ||
| 615 | |||
| 616 | def RelPath(self, local=True): | ||
| 617 | """Return the path for the project relative to a manifest. | ||
| 627 | 618 | ||
| 628 | @property | 619 | Args: |
| 629 | def Exists(self): | 620 | local: a boolean, if True, the path is relative to the local |
| 630 | return platform_utils.isdir(self.gitdir) and platform_utils.isdir(self.objdir) | 621 | (sub)manifest. If false, the path is relative to the outermost |
| 622 | manifest. | ||
| 623 | """ | ||
| 624 | if local: | ||
| 625 | return self.relpath | ||
| 626 | return os.path.join(self.manifest.path_prefix, self.relpath) | ||
| 627 | |||
| 628 | def SetRevision(self, revisionExpr, revisionId=None): | ||
| 629 | """Set revisionId based on revision expression and id""" | ||
| 630 | self.revisionExpr = revisionExpr | ||
| 631 | if revisionId is None and revisionExpr and IsId(revisionExpr): | ||
| 632 | self.revisionId = self.revisionExpr | ||
| 633 | else: | ||
| 634 | self.revisionId = revisionId | ||
| 635 | |||
| 636 | def UpdatePaths(self, relpath, worktree, gitdir, objdir): | ||
| 637 | """Update paths used by this project""" | ||
| 638 | self.gitdir = gitdir.replace("\\", "/") | ||
| 639 | self.objdir = objdir.replace("\\", "/") | ||
| 640 | if worktree: | ||
| 641 | self.worktree = os.path.normpath(worktree).replace("\\", "/") | ||
| 642 | else: | ||
| 643 | self.worktree = None | ||
| 644 | self.relpath = relpath | ||
| 631 | 645 | ||
| 632 | @property | 646 | self.config = GitConfig.ForRepository( |
| 633 | def CurrentBranch(self): | 647 | gitdir=self.gitdir, defaults=self.manifest.globalConfig |
| 634 | """Obtain the name of the currently checked out branch. | 648 | ) |
| 635 | 649 | ||
| 636 | The branch name omits the 'refs/heads/' prefix. | 650 | if self.worktree: |
| 637 | None is returned if the project is on a detached HEAD, or if the work_git is | 651 | self.work_git = self._GitGetByExec( |
| 638 | otheriwse inaccessible (e.g. an incomplete sync). | 652 | self, bare=False, gitdir=self.gitdir |
| 639 | """ | 653 | ) |
| 640 | try: | 654 | else: |
| 641 | b = self.work_git.GetHead() | 655 | self.work_git = None |
| 642 | except NoManifestException: | 656 | self.bare_git = self._GitGetByExec(self, bare=True, gitdir=self.gitdir) |
| 643 | # If the local checkout is in a bad state, don't barf. Let the callers | 657 | self.bare_ref = GitRefs(self.gitdir) |
| 644 | # process this like the head is unreadable. | 658 | self.bare_objdir = self._GitGetByExec( |
| 645 | return None | 659 | self, bare=True, gitdir=self.objdir |
| 646 | if b.startswith(R_HEADS): | 660 | ) |
| 647 | return b[len(R_HEADS):] | ||
| 648 | return None | ||
| 649 | |||
| 650 | def IsRebaseInProgress(self): | ||
| 651 | return (os.path.exists(self.work_git.GetDotgitPath('rebase-apply')) or | ||
| 652 | os.path.exists(self.work_git.GetDotgitPath('rebase-merge')) or | ||
| 653 | os.path.exists(os.path.join(self.worktree, '.dotest'))) | ||
| 654 | |||
| 655 | def IsDirty(self, consider_untracked=True): | ||
| 656 | """Is the working directory modified in some way? | ||
| 657 | """ | ||
| 658 | self.work_git.update_index('-q', | ||
| 659 | '--unmerged', | ||
| 660 | '--ignore-missing', | ||
| 661 | '--refresh') | ||
| 662 | if self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD): | ||
| 663 | return True | ||
| 664 | if self.work_git.DiffZ('diff-files'): | ||
| 665 | return True | ||
| 666 | if consider_untracked and self.UntrackedFiles(): | ||
| 667 | return True | ||
| 668 | return False | ||
| 669 | |||
| 670 | _userident_name = None | ||
| 671 | _userident_email = None | ||
| 672 | |||
| 673 | @property | ||
| 674 | def UserName(self): | ||
| 675 | """Obtain the user's personal name. | ||
| 676 | """ | ||
| 677 | if self._userident_name is None: | ||
| 678 | self._LoadUserIdentity() | ||
| 679 | return self._userident_name | ||
| 680 | |||
| 681 | @property | ||
| 682 | def UserEmail(self): | ||
| 683 | """Obtain the user's email address. This is very likely | ||
| 684 | to be their Gerrit login. | ||
| 685 | """ | ||
| 686 | if self._userident_email is None: | ||
| 687 | self._LoadUserIdentity() | ||
| 688 | return self._userident_email | ||
| 689 | |||
| 690 | def _LoadUserIdentity(self): | ||
| 691 | u = self.bare_git.var('GIT_COMMITTER_IDENT') | ||
| 692 | m = re.compile("^(.*) <([^>]*)> ").match(u) | ||
| 693 | if m: | ||
| 694 | self._userident_name = m.group(1) | ||
| 695 | self._userident_email = m.group(2) | ||
| 696 | else: | ||
| 697 | self._userident_name = '' | ||
| 698 | self._userident_email = '' | ||
| 699 | |||
| 700 | def GetRemote(self, name=None): | ||
| 701 | """Get the configuration for a single remote. | ||
| 702 | |||
| 703 | Defaults to the current project's remote. | ||
| 704 | """ | ||
| 705 | if name is None: | ||
| 706 | name = self.remote.name | ||
| 707 | return self.config.GetRemote(name) | ||
| 708 | 661 | ||
| 709 | def GetBranch(self, name): | 662 | @property |
| 710 | """Get the configuration for a single branch. | 663 | def UseAlternates(self): |
| 711 | """ | 664 | """Whether git alternates are in use. |
| 712 | return self.config.GetBranch(name) | 665 | |
| 666 | This will be removed once migration to alternates is complete. | ||
| 667 | """ | ||
| 668 | return _ALTERNATES or self.manifest.is_multimanifest | ||
| 669 | |||
| 670 | @property | ||
| 671 | def Derived(self): | ||
| 672 | return self.is_derived | ||
| 673 | |||
| 674 | @property | ||
| 675 | def Exists(self): | ||
| 676 | return platform_utils.isdir(self.gitdir) and platform_utils.isdir( | ||
| 677 | self.objdir | ||
| 678 | ) | ||
| 679 | |||
| 680 | @property | ||
| 681 | def CurrentBranch(self): | ||
| 682 | """Obtain the name of the currently checked out branch. | ||
| 683 | |||
| 684 | The branch name omits the 'refs/heads/' prefix. | ||
| 685 | None is returned if the project is on a detached HEAD, or if the | ||
| 686 | work_git is otheriwse inaccessible (e.g. an incomplete sync). | ||
| 687 | """ | ||
| 688 | try: | ||
| 689 | b = self.work_git.GetHead() | ||
| 690 | except NoManifestException: | ||
| 691 | # If the local checkout is in a bad state, don't barf. Let the | ||
| 692 | # callers process this like the head is unreadable. | ||
| 693 | return None | ||
| 694 | if b.startswith(R_HEADS): | ||
| 695 | return b[len(R_HEADS) :] | ||
| 696 | return None | ||
| 697 | |||
| 698 | def IsRebaseInProgress(self): | ||
| 699 | return ( | ||
| 700 | os.path.exists(self.work_git.GetDotgitPath("rebase-apply")) | ||
| 701 | or os.path.exists(self.work_git.GetDotgitPath("rebase-merge")) | ||
| 702 | or os.path.exists(os.path.join(self.worktree, ".dotest")) | ||
| 703 | ) | ||
| 704 | |||
| 705 | def IsDirty(self, consider_untracked=True): | ||
| 706 | """Is the working directory modified in some way?""" | ||
| 707 | self.work_git.update_index( | ||
| 708 | "-q", "--unmerged", "--ignore-missing", "--refresh" | ||
| 709 | ) | ||
| 710 | if self.work_git.DiffZ("diff-index", "-M", "--cached", HEAD): | ||
| 711 | return True | ||
| 712 | if self.work_git.DiffZ("diff-files"): | ||
| 713 | return True | ||
| 714 | if consider_untracked and self.UntrackedFiles(): | ||
| 715 | return True | ||
| 716 | return False | ||
| 717 | |||
| 718 | _userident_name = None | ||
| 719 | _userident_email = None | ||
| 720 | |||
| 721 | @property | ||
| 722 | def UserName(self): | ||
| 723 | """Obtain the user's personal name.""" | ||
| 724 | if self._userident_name is None: | ||
| 725 | self._LoadUserIdentity() | ||
| 726 | return self._userident_name | ||
| 727 | |||
| 728 | @property | ||
| 729 | def UserEmail(self): | ||
| 730 | """Obtain the user's email address. This is very likely | ||
| 731 | to be their Gerrit login. | ||
| 732 | """ | ||
| 733 | if self._userident_email is None: | ||
| 734 | self._LoadUserIdentity() | ||
| 735 | return self._userident_email | ||
| 736 | |||
| 737 | def _LoadUserIdentity(self): | ||
| 738 | u = self.bare_git.var("GIT_COMMITTER_IDENT") | ||
| 739 | m = re.compile("^(.*) <([^>]*)> ").match(u) | ||
| 740 | if m: | ||
| 741 | self._userident_name = m.group(1) | ||
| 742 | self._userident_email = m.group(2) | ||
| 743 | else: | ||
| 744 | self._userident_name = "" | ||
| 745 | self._userident_email = "" | ||
| 746 | |||
| 747 | def GetRemote(self, name=None): | ||
| 748 | """Get the configuration for a single remote. | ||
| 749 | |||
| 750 | Defaults to the current project's remote. | ||
| 751 | """ | ||
| 752 | if name is None: | ||
| 753 | name = self.remote.name | ||
| 754 | return self.config.GetRemote(name) | ||
| 755 | |||
| 756 | def GetBranch(self, name): | ||
| 757 | """Get the configuration for a single branch.""" | ||
| 758 | return self.config.GetBranch(name) | ||
| 759 | |||
| 760 | def GetBranches(self): | ||
| 761 | """Get all existing local branches.""" | ||
| 762 | current = self.CurrentBranch | ||
| 763 | all_refs = self._allrefs | ||
| 764 | heads = {} | ||
| 765 | |||
| 766 | for name, ref_id in all_refs.items(): | ||
| 767 | if name.startswith(R_HEADS): | ||
| 768 | name = name[len(R_HEADS) :] | ||
| 769 | b = self.GetBranch(name) | ||
| 770 | b.current = name == current | ||
| 771 | b.published = None | ||
| 772 | b.revision = ref_id | ||
| 773 | heads[name] = b | ||
| 774 | |||
| 775 | for name, ref_id in all_refs.items(): | ||
| 776 | if name.startswith(R_PUB): | ||
| 777 | name = name[len(R_PUB) :] | ||
| 778 | b = heads.get(name) | ||
| 779 | if b: | ||
| 780 | b.published = ref_id | ||
| 781 | |||
| 782 | return heads | ||
| 783 | |||
| 784 | def MatchesGroups(self, manifest_groups): | ||
| 785 | """Returns true if the manifest groups specified at init should cause | ||
| 786 | this project to be synced. | ||
| 787 | Prefixing a manifest group with "-" inverts the meaning of a group. | ||
| 788 | All projects are implicitly labelled with "all". | ||
| 789 | |||
| 790 | labels are resolved in order. In the example case of | ||
| 791 | project_groups: "all,group1,group2" | ||
| 792 | manifest_groups: "-group1,group2" | ||
| 793 | the project will be matched. | ||
| 794 | |||
| 795 | The special manifest group "default" will match any project that | ||
| 796 | does not have the special project group "notdefault" | ||
| 797 | """ | ||
| 798 | default_groups = self.manifest.default_groups or ["default"] | ||
| 799 | expanded_manifest_groups = manifest_groups or default_groups | ||
| 800 | expanded_project_groups = ["all"] + (self.groups or []) | ||
| 801 | if "notdefault" not in expanded_project_groups: | ||
| 802 | expanded_project_groups += ["default"] | ||
| 713 | 803 | ||
| 714 | def GetBranches(self): | ||
| 715 | """Get all existing local branches. | ||
| 716 | """ | ||
| 717 | current = self.CurrentBranch | ||
| 718 | all_refs = self._allrefs | ||
| 719 | heads = {} | ||
| 720 | |||
| 721 | for name, ref_id in all_refs.items(): | ||
| 722 | if name.startswith(R_HEADS): | ||
| 723 | name = name[len(R_HEADS):] | ||
| 724 | b = self.GetBranch(name) | ||
| 725 | b.current = name == current | ||
| 726 | b.published = None | ||
| 727 | b.revision = ref_id | ||
| 728 | heads[name] = b | ||
| 729 | |||
| 730 | for name, ref_id in all_refs.items(): | ||
| 731 | if name.startswith(R_PUB): | ||
| 732 | name = name[len(R_PUB):] | ||
| 733 | b = heads.get(name) | ||
| 734 | if b: | ||
| 735 | b.published = ref_id | ||
| 736 | |||
| 737 | return heads | ||
| 738 | |||
| 739 | def MatchesGroups(self, manifest_groups): | ||
| 740 | """Returns true if the manifest groups specified at init should cause | ||
| 741 | this project to be synced. | ||
| 742 | Prefixing a manifest group with "-" inverts the meaning of a group. | ||
| 743 | All projects are implicitly labelled with "all". | ||
| 744 | |||
| 745 | labels are resolved in order. In the example case of | ||
| 746 | project_groups: "all,group1,group2" | ||
| 747 | manifest_groups: "-group1,group2" | ||
| 748 | the project will be matched. | ||
| 749 | |||
| 750 | The special manifest group "default" will match any project that | ||
| 751 | does not have the special project group "notdefault" | ||
| 752 | """ | ||
| 753 | default_groups = self.manifest.default_groups or ['default'] | ||
| 754 | expanded_manifest_groups = manifest_groups or default_groups | ||
| 755 | expanded_project_groups = ['all'] + (self.groups or []) | ||
| 756 | if 'notdefault' not in expanded_project_groups: | ||
| 757 | expanded_project_groups += ['default'] | ||
| 758 | |||
| 759 | matched = False | ||
| 760 | for group in expanded_manifest_groups: | ||
| 761 | if group.startswith('-') and group[1:] in expanded_project_groups: | ||
| 762 | matched = False | 804 | matched = False |
| 763 | elif group in expanded_project_groups: | 805 | for group in expanded_manifest_groups: |
| 764 | matched = True | 806 | if group.startswith("-") and group[1:] in expanded_project_groups: |
| 807 | matched = False | ||
| 808 | elif group in expanded_project_groups: | ||
| 809 | matched = True | ||
| 765 | 810 | ||
| 766 | return matched | 811 | return matched |
| 767 | 812 | ||
| 768 | # Status Display ## | 813 | def UncommitedFiles(self, get_all=True): |
| 769 | def UncommitedFiles(self, get_all=True): | 814 | """Returns a list of strings, uncommitted files in the git tree. |
| 770 | """Returns a list of strings, uncommitted files in the git tree. | ||
| 771 | 815 | ||
| 772 | Args: | 816 | Args: |
| 773 | get_all: a boolean, if True - get information about all different | 817 | get_all: a boolean, if True - get information about all different |
| 774 | uncommitted files. If False - return as soon as any kind of | 818 | uncommitted files. If False - return as soon as any kind of |
| 775 | uncommitted files is detected. | 819 | uncommitted files is detected. |
| 776 | """ | 820 | """ |
| 777 | details = [] | 821 | details = [] |
| 778 | self.work_git.update_index('-q', | 822 | self.work_git.update_index( |
| 779 | '--unmerged', | 823 | "-q", "--unmerged", "--ignore-missing", "--refresh" |
| 780 | '--ignore-missing', | 824 | ) |
| 781 | '--refresh') | 825 | if self.IsRebaseInProgress(): |
| 782 | if self.IsRebaseInProgress(): | 826 | details.append("rebase in progress") |
| 783 | details.append("rebase in progress") | 827 | if not get_all: |
| 784 | if not get_all: | 828 | return details |
| 785 | return details | 829 | |
| 830 | changes = self.work_git.DiffZ("diff-index", "--cached", HEAD).keys() | ||
| 831 | if changes: | ||
| 832 | details.extend(changes) | ||
| 833 | if not get_all: | ||
| 834 | return details | ||
| 835 | |||
| 836 | changes = self.work_git.DiffZ("diff-files").keys() | ||
| 837 | if changes: | ||
| 838 | details.extend(changes) | ||
| 839 | if not get_all: | ||
| 840 | return details | ||
| 841 | |||
| 842 | changes = self.UntrackedFiles() | ||
| 843 | if changes: | ||
| 844 | details.extend(changes) | ||
| 786 | 845 | ||
| 787 | changes = self.work_git.DiffZ('diff-index', '--cached', HEAD).keys() | ||
| 788 | if changes: | ||
| 789 | details.extend(changes) | ||
| 790 | if not get_all: | ||
| 791 | return details | 846 | return details |
| 792 | 847 | ||
| 793 | changes = self.work_git.DiffZ('diff-files').keys() | 848 | def UntrackedFiles(self): |
| 794 | if changes: | 849 | """Returns a list of strings, untracked files in the git tree.""" |
| 795 | details.extend(changes) | 850 | return self.work_git.LsOthers() |
| 796 | if not get_all: | ||
| 797 | return details | ||
| 798 | 851 | ||
| 799 | changes = self.UntrackedFiles() | 852 | def HasChanges(self): |
| 800 | if changes: | 853 | """Returns true if there are uncommitted changes.""" |
| 801 | details.extend(changes) | 854 | return bool(self.UncommitedFiles(get_all=False)) |
| 802 | 855 | ||
| 803 | return details | 856 | def PrintWorkTreeStatus(self, output_redir=None, quiet=False, local=False): |
| 857 | """Prints the status of the repository to stdout. | ||
| 804 | 858 | ||
| 805 | def UntrackedFiles(self): | 859 | Args: |
| 806 | """Returns a list of strings, untracked files in the git tree.""" | 860 | output_redir: If specified, redirect the output to this object. |
| 807 | return self.work_git.LsOthers() | 861 | quiet: If True then only print the project name. Do not print |
| 862 | the modified files, branch name, etc. | ||
| 863 | local: a boolean, if True, the path is relative to the local | ||
| 864 | (sub)manifest. If false, the path is relative to the outermost | ||
| 865 | manifest. | ||
| 866 | """ | ||
| 867 | if not platform_utils.isdir(self.worktree): | ||
| 868 | if output_redir is None: | ||
| 869 | output_redir = sys.stdout | ||
| 870 | print(file=output_redir) | ||
| 871 | print("project %s/" % self.RelPath(local), file=output_redir) | ||
| 872 | print(' missing (run "repo sync")', file=output_redir) | ||
| 873 | return | ||
| 874 | |||
| 875 | self.work_git.update_index( | ||
| 876 | "-q", "--unmerged", "--ignore-missing", "--refresh" | ||
| 877 | ) | ||
| 878 | rb = self.IsRebaseInProgress() | ||
| 879 | di = self.work_git.DiffZ("diff-index", "-M", "--cached", HEAD) | ||
| 880 | df = self.work_git.DiffZ("diff-files") | ||
| 881 | do = self.work_git.LsOthers() | ||
| 882 | if not rb and not di and not df and not do and not self.CurrentBranch: | ||
| 883 | return "CLEAN" | ||
| 884 | |||
| 885 | out = StatusColoring(self.config) | ||
| 886 | if output_redir is not None: | ||
| 887 | out.redirect(output_redir) | ||
| 888 | out.project("project %-40s", self.RelPath(local) + "/ ") | ||
| 889 | |||
| 890 | if quiet: | ||
| 891 | out.nl() | ||
| 892 | return "DIRTY" | ||
| 893 | |||
| 894 | branch = self.CurrentBranch | ||
| 895 | if branch is None: | ||
| 896 | out.nobranch("(*** NO BRANCH ***)") | ||
| 897 | else: | ||
| 898 | out.branch("branch %s", branch) | ||
| 899 | out.nl() | ||
| 808 | 900 | ||
| 809 | def HasChanges(self): | 901 | if rb: |
| 810 | """Returns true if there are uncommitted changes. | 902 | out.important("prior sync failed; rebase still in progress") |
| 811 | """ | 903 | out.nl() |
| 812 | return bool(self.UncommitedFiles(get_all=False)) | ||
| 813 | |||
| 814 | def PrintWorkTreeStatus(self, output_redir=None, quiet=False, local=False): | ||
| 815 | """Prints the status of the repository to stdout. | ||
| 816 | |||
| 817 | Args: | ||
| 818 | output_redir: If specified, redirect the output to this object. | ||
| 819 | quiet: If True then only print the project name. Do not print | ||
| 820 | the modified files, branch name, etc. | ||
| 821 | local: a boolean, if True, the path is relative to the local | ||
| 822 | (sub)manifest. If false, the path is relative to the | ||
| 823 | outermost manifest. | ||
| 824 | """ | ||
| 825 | if not platform_utils.isdir(self.worktree): | ||
| 826 | if output_redir is None: | ||
| 827 | output_redir = sys.stdout | ||
| 828 | print(file=output_redir) | ||
| 829 | print('project %s/' % self.RelPath(local), file=output_redir) | ||
| 830 | print(' missing (run "repo sync")', file=output_redir) | ||
| 831 | return | ||
| 832 | |||
| 833 | self.work_git.update_index('-q', | ||
| 834 | '--unmerged', | ||
| 835 | '--ignore-missing', | ||
| 836 | '--refresh') | ||
| 837 | rb = self.IsRebaseInProgress() | ||
| 838 | di = self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD) | ||
| 839 | df = self.work_git.DiffZ('diff-files') | ||
| 840 | do = self.work_git.LsOthers() | ||
| 841 | if not rb and not di and not df and not do and not self.CurrentBranch: | ||
| 842 | return 'CLEAN' | ||
| 843 | |||
| 844 | out = StatusColoring(self.config) | ||
| 845 | if output_redir is not None: | ||
| 846 | out.redirect(output_redir) | ||
| 847 | out.project('project %-40s', self.RelPath(local) + '/ ') | ||
| 848 | |||
| 849 | if quiet: | ||
| 850 | out.nl() | ||
| 851 | return 'DIRTY' | ||
| 852 | |||
| 853 | branch = self.CurrentBranch | ||
| 854 | if branch is None: | ||
| 855 | out.nobranch('(*** NO BRANCH ***)') | ||
| 856 | else: | ||
| 857 | out.branch('branch %s', branch) | ||
| 858 | out.nl() | ||
| 859 | |||
| 860 | if rb: | ||
| 861 | out.important('prior sync failed; rebase still in progress') | ||
| 862 | out.nl() | ||
| 863 | |||
| 864 | paths = list() | ||
| 865 | paths.extend(di.keys()) | ||
| 866 | paths.extend(df.keys()) | ||
| 867 | paths.extend(do) | ||
| 868 | |||
| 869 | for p in sorted(set(paths)): | ||
| 870 | try: | ||
| 871 | i = di[p] | ||
| 872 | except KeyError: | ||
| 873 | i = None | ||
| 874 | |||
| 875 | try: | ||
| 876 | f = df[p] | ||
| 877 | except KeyError: | ||
| 878 | f = None | ||
| 879 | |||
| 880 | if i: | ||
| 881 | i_status = i.status.upper() | ||
| 882 | else: | ||
| 883 | i_status = '-' | ||
| 884 | |||
| 885 | if f: | ||
| 886 | f_status = f.status.lower() | ||
| 887 | else: | ||
| 888 | f_status = '-' | ||
| 889 | |||
| 890 | if i and i.src_path: | ||
| 891 | line = ' %s%s\t%s => %s (%s%%)' % (i_status, f_status, | ||
| 892 | i.src_path, p, i.level) | ||
| 893 | else: | ||
| 894 | line = ' %s%s\t%s' % (i_status, f_status, p) | ||
| 895 | |||
| 896 | if i and not f: | ||
| 897 | out.added('%s', line) | ||
| 898 | elif (i and f) or (not i and f): | ||
| 899 | out.changed('%s', line) | ||
| 900 | elif not i and not f: | ||
| 901 | out.untracked('%s', line) | ||
| 902 | else: | ||
| 903 | out.write('%s', line) | ||
| 904 | out.nl() | ||
| 905 | |||
| 906 | return 'DIRTY' | ||
| 907 | |||
| 908 | def PrintWorkTreeDiff(self, absolute_paths=False, output_redir=None, | ||
| 909 | local=False): | ||
| 910 | """Prints the status of the repository to stdout. | ||
| 911 | """ | ||
| 912 | out = DiffColoring(self.config) | ||
| 913 | if output_redir: | ||
| 914 | out.redirect(output_redir) | ||
| 915 | cmd = ['diff'] | ||
| 916 | if out.is_on: | ||
| 917 | cmd.append('--color') | ||
| 918 | cmd.append(HEAD) | ||
| 919 | if absolute_paths: | ||
| 920 | cmd.append('--src-prefix=a/%s/' % self.RelPath(local)) | ||
| 921 | cmd.append('--dst-prefix=b/%s/' % self.RelPath(local)) | ||
| 922 | cmd.append('--') | ||
| 923 | try: | ||
| 924 | p = GitCommand(self, | ||
| 925 | cmd, | ||
| 926 | capture_stdout=True, | ||
| 927 | capture_stderr=True) | ||
| 928 | p.Wait() | ||
| 929 | except GitError as e: | ||
| 930 | out.nl() | ||
| 931 | out.project('project %s/' % self.RelPath(local)) | ||
| 932 | out.nl() | ||
| 933 | out.fail('%s', str(e)) | ||
| 934 | out.nl() | ||
| 935 | return False | ||
| 936 | if p.stdout: | ||
| 937 | out.nl() | ||
| 938 | out.project('project %s/' % self.RelPath(local)) | ||
| 939 | out.nl() | ||
| 940 | out.write('%s', p.stdout) | ||
| 941 | return p.Wait() == 0 | ||
| 942 | |||
| 943 | # Publish / Upload ## | ||
| 944 | def WasPublished(self, branch, all_refs=None): | ||
| 945 | """Was the branch published (uploaded) for code review? | ||
| 946 | If so, returns the SHA-1 hash of the last published | ||
| 947 | state for the branch. | ||
| 948 | """ | ||
| 949 | key = R_PUB + branch | ||
| 950 | if all_refs is None: | ||
| 951 | try: | ||
| 952 | return self.bare_git.rev_parse(key) | ||
| 953 | except GitError: | ||
| 954 | return None | ||
| 955 | else: | ||
| 956 | try: | ||
| 957 | return all_refs[key] | ||
| 958 | except KeyError: | ||
| 959 | return None | ||
| 960 | 904 | ||
| 961 | def CleanPublishedCache(self, all_refs=None): | 905 | paths = list() |
| 962 | """Prunes any stale published refs. | 906 | paths.extend(di.keys()) |
| 963 | """ | 907 | paths.extend(df.keys()) |
| 964 | if all_refs is None: | 908 | paths.extend(do) |
| 965 | all_refs = self._allrefs | ||
| 966 | heads = set() | ||
| 967 | canrm = {} | ||
| 968 | for name, ref_id in all_refs.items(): | ||
| 969 | if name.startswith(R_HEADS): | ||
| 970 | heads.add(name) | ||
| 971 | elif name.startswith(R_PUB): | ||
| 972 | canrm[name] = ref_id | ||
| 973 | |||
| 974 | for name, ref_id in canrm.items(): | ||
| 975 | n = name[len(R_PUB):] | ||
| 976 | if R_HEADS + n not in heads: | ||
| 977 | self.bare_git.DeleteRef(name, ref_id) | ||
| 978 | |||
| 979 | def GetUploadableBranches(self, selected_branch=None): | ||
| 980 | """List any branches which can be uploaded for review. | ||
| 981 | """ | ||
| 982 | heads = {} | ||
| 983 | pubed = {} | ||
| 984 | |||
| 985 | for name, ref_id in self._allrefs.items(): | ||
| 986 | if name.startswith(R_HEADS): | ||
| 987 | heads[name[len(R_HEADS):]] = ref_id | ||
| 988 | elif name.startswith(R_PUB): | ||
| 989 | pubed[name[len(R_PUB):]] = ref_id | ||
| 990 | |||
| 991 | ready = [] | ||
| 992 | for branch, ref_id in heads.items(): | ||
| 993 | if branch in pubed and pubed[branch] == ref_id: | ||
| 994 | continue | ||
| 995 | if selected_branch and branch != selected_branch: | ||
| 996 | continue | ||
| 997 | |||
| 998 | rb = self.GetUploadableBranch(branch) | ||
| 999 | if rb: | ||
| 1000 | ready.append(rb) | ||
| 1001 | return ready | ||
| 1002 | |||
| 1003 | def GetUploadableBranch(self, branch_name): | ||
| 1004 | """Get a single uploadable branch, or None. | ||
| 1005 | """ | ||
| 1006 | branch = self.GetBranch(branch_name) | ||
| 1007 | base = branch.LocalMerge | ||
| 1008 | if branch.LocalMerge: | ||
| 1009 | rb = ReviewableBranch(self, branch, base) | ||
| 1010 | if rb.commits: | ||
| 1011 | return rb | ||
| 1012 | return None | ||
| 1013 | |||
| 1014 | def UploadForReview(self, branch=None, | ||
| 1015 | people=([], []), | ||
| 1016 | dryrun=False, | ||
| 1017 | auto_topic=False, | ||
| 1018 | hashtags=(), | ||
| 1019 | labels=(), | ||
| 1020 | private=False, | ||
| 1021 | notify=None, | ||
| 1022 | wip=False, | ||
| 1023 | ready=False, | ||
| 1024 | dest_branch=None, | ||
| 1025 | validate_certs=True, | ||
| 1026 | push_options=None): | ||
| 1027 | """Uploads the named branch for code review. | ||
| 1028 | """ | ||
| 1029 | if branch is None: | ||
| 1030 | branch = self.CurrentBranch | ||
| 1031 | if branch is None: | ||
| 1032 | raise GitError('not currently on a branch') | ||
| 1033 | |||
| 1034 | branch = self.GetBranch(branch) | ||
| 1035 | if not branch.LocalMerge: | ||
| 1036 | raise GitError('branch %s does not track a remote' % branch.name) | ||
| 1037 | if not branch.remote.review: | ||
| 1038 | raise GitError('remote %s has no review url' % branch.remote.name) | ||
| 1039 | |||
| 1040 | # Basic validity check on label syntax. | ||
| 1041 | for label in labels: | ||
| 1042 | if not re.match(r'^.+[+-][0-9]+$', label): | ||
| 1043 | raise UploadError( | ||
| 1044 | f'invalid label syntax "{label}": labels use forms like ' | ||
| 1045 | 'CodeReview+1 or Verified-1') | ||
| 1046 | |||
| 1047 | if dest_branch is None: | ||
| 1048 | dest_branch = self.dest_branch | ||
| 1049 | if dest_branch is None: | ||
| 1050 | dest_branch = branch.merge | ||
| 1051 | if not dest_branch.startswith(R_HEADS): | ||
| 1052 | dest_branch = R_HEADS + dest_branch | ||
| 1053 | |||
| 1054 | if not branch.remote.projectname: | ||
| 1055 | branch.remote.projectname = self.name | ||
| 1056 | branch.remote.Save() | ||
| 1057 | |||
| 1058 | url = branch.remote.ReviewUrl(self.UserEmail, validate_certs) | ||
| 1059 | if url is None: | ||
| 1060 | raise UploadError('review not configured') | ||
| 1061 | cmd = ['push'] | ||
| 1062 | if dryrun: | ||
| 1063 | cmd.append('-n') | ||
| 1064 | |||
| 1065 | if url.startswith('ssh://'): | ||
| 1066 | cmd.append('--receive-pack=gerrit receive-pack') | ||
| 1067 | |||
| 1068 | for push_option in (push_options or []): | ||
| 1069 | cmd.append('-o') | ||
| 1070 | cmd.append(push_option) | ||
| 1071 | |||
| 1072 | cmd.append(url) | ||
| 1073 | |||
| 1074 | if dest_branch.startswith(R_HEADS): | ||
| 1075 | dest_branch = dest_branch[len(R_HEADS):] | ||
| 1076 | |||
| 1077 | ref_spec = '%s:refs/for/%s' % (R_HEADS + branch.name, dest_branch) | ||
| 1078 | opts = [] | ||
| 1079 | if auto_topic: | ||
| 1080 | opts += ['topic=' + branch.name] | ||
| 1081 | opts += ['t=%s' % p for p in hashtags] | ||
| 1082 | # NB: No need to encode labels as they've been validated above. | ||
| 1083 | opts += ['l=%s' % p for p in labels] | ||
| 1084 | |||
| 1085 | opts += ['r=%s' % p for p in people[0]] | ||
| 1086 | opts += ['cc=%s' % p for p in people[1]] | ||
| 1087 | if notify: | ||
| 1088 | opts += ['notify=' + notify] | ||
| 1089 | if private: | ||
| 1090 | opts += ['private'] | ||
| 1091 | if wip: | ||
| 1092 | opts += ['wip'] | ||
| 1093 | if ready: | ||
| 1094 | opts += ['ready'] | ||
| 1095 | if opts: | ||
| 1096 | ref_spec = ref_spec + '%' + ','.join(opts) | ||
| 1097 | cmd.append(ref_spec) | ||
| 1098 | |||
| 1099 | if GitCommand(self, cmd, bare=True).Wait() != 0: | ||
| 1100 | raise UploadError('Upload failed') | ||
| 1101 | |||
| 1102 | if not dryrun: | ||
| 1103 | msg = "posted to %s for %s" % (branch.remote.review, dest_branch) | ||
| 1104 | self.bare_git.UpdateRef(R_PUB + branch.name, | ||
| 1105 | R_HEADS + branch.name, | ||
| 1106 | message=msg) | ||
| 1107 | |||
| 1108 | # Sync ## | ||
| 1109 | def _ExtractArchive(self, tarpath, path=None): | ||
| 1110 | """Extract the given tar on its current location | ||
| 1111 | |||
| 1112 | Args: | ||
| 1113 | - tarpath: The path to the actual tar file | ||
| 1114 | 909 | ||
| 1115 | """ | 910 | for p in sorted(set(paths)): |
| 1116 | try: | 911 | try: |
| 1117 | with tarfile.open(tarpath, 'r') as tar: | 912 | i = di[p] |
| 1118 | tar.extractall(path=path) | 913 | except KeyError: |
| 1119 | return True | 914 | i = None |
| 1120 | except (IOError, tarfile.TarError) as e: | ||
| 1121 | _error("Cannot extract archive %s: %s", tarpath, str(e)) | ||
| 1122 | return False | ||
| 1123 | |||
| 1124 | def Sync_NetworkHalf(self, | ||
| 1125 | quiet=False, | ||
| 1126 | verbose=False, | ||
| 1127 | output_redir=None, | ||
| 1128 | is_new=None, | ||
| 1129 | current_branch_only=None, | ||
| 1130 | force_sync=False, | ||
| 1131 | clone_bundle=True, | ||
| 1132 | tags=None, | ||
| 1133 | archive=False, | ||
| 1134 | optimized_fetch=False, | ||
| 1135 | retry_fetches=0, | ||
| 1136 | prune=False, | ||
| 1137 | submodules=False, | ||
| 1138 | ssh_proxy=None, | ||
| 1139 | clone_filter=None, | ||
| 1140 | partial_clone_exclude=set()): | ||
| 1141 | """Perform only the network IO portion of the sync process. | ||
| 1142 | Local working directory/branch state is not affected. | ||
| 1143 | """ | ||
| 1144 | if archive and not isinstance(self, MetaProject): | ||
| 1145 | if self.remote.url.startswith(('http://', 'https://')): | ||
| 1146 | _error("%s: Cannot fetch archives from http/https remotes.", self.name) | ||
| 1147 | return SyncNetworkHalfResult(False, False) | ||
| 1148 | |||
| 1149 | name = self.relpath.replace('\\', '/') | ||
| 1150 | name = name.replace('/', '_') | ||
| 1151 | tarpath = '%s.tar' % name | ||
| 1152 | topdir = self.manifest.topdir | ||
| 1153 | |||
| 1154 | try: | ||
| 1155 | self._FetchArchive(tarpath, cwd=topdir) | ||
| 1156 | except GitError as e: | ||
| 1157 | _error('%s', e) | ||
| 1158 | return SyncNetworkHalfResult(False, False) | ||
| 1159 | |||
| 1160 | # From now on, we only need absolute tarpath | ||
| 1161 | tarpath = os.path.join(topdir, tarpath) | ||
| 1162 | |||
| 1163 | if not self._ExtractArchive(tarpath, path=topdir): | ||
| 1164 | return SyncNetworkHalfResult(False, True) | ||
| 1165 | try: | ||
| 1166 | platform_utils.remove(tarpath) | ||
| 1167 | except OSError as e: | ||
| 1168 | _warn("Cannot remove archive %s: %s", tarpath, str(e)) | ||
| 1169 | self._CopyAndLinkFiles() | ||
| 1170 | return SyncNetworkHalfResult(True, True) | ||
| 1171 | |||
| 1172 | # If the shared object dir already exists, don't try to rebootstrap with a | ||
| 1173 | # clone bundle download. We should have the majority of objects already. | ||
| 1174 | if clone_bundle and os.path.exists(self.objdir): | ||
| 1175 | clone_bundle = False | ||
| 1176 | |||
| 1177 | if self.name in partial_clone_exclude: | ||
| 1178 | clone_bundle = True | ||
| 1179 | clone_filter = None | ||
| 1180 | |||
| 1181 | if is_new is None: | ||
| 1182 | is_new = not self.Exists | ||
| 1183 | if is_new: | ||
| 1184 | self._InitGitDir(force_sync=force_sync, quiet=quiet) | ||
| 1185 | else: | ||
| 1186 | self._UpdateHooks(quiet=quiet) | ||
| 1187 | self._InitRemote() | ||
| 1188 | |||
| 1189 | if self.UseAlternates: | ||
| 1190 | # If gitdir/objects is a symlink, migrate it from the old layout. | ||
| 1191 | gitdir_objects = os.path.join(self.gitdir, 'objects') | ||
| 1192 | if platform_utils.islink(gitdir_objects): | ||
| 1193 | platform_utils.remove(gitdir_objects, missing_ok=True) | ||
| 1194 | gitdir_alt = os.path.join(self.gitdir, 'objects/info/alternates') | ||
| 1195 | if not os.path.exists(gitdir_alt): | ||
| 1196 | os.makedirs(os.path.dirname(gitdir_alt), exist_ok=True) | ||
| 1197 | _lwrite(gitdir_alt, os.path.join( | ||
| 1198 | os.path.relpath(self.objdir, gitdir_objects), 'objects') + '\n') | ||
| 1199 | |||
| 1200 | if is_new: | ||
| 1201 | alt = os.path.join(self.objdir, 'objects/info/alternates') | ||
| 1202 | try: | ||
| 1203 | with open(alt) as fd: | ||
| 1204 | # This works for both absolute and relative alternate directories. | ||
| 1205 | alt_dir = os.path.join(self.objdir, 'objects', fd.readline().rstrip()) | ||
| 1206 | except IOError: | ||
| 1207 | alt_dir = None | ||
| 1208 | else: | ||
| 1209 | alt_dir = None | ||
| 1210 | |||
| 1211 | if (clone_bundle | ||
| 1212 | and alt_dir is None | ||
| 1213 | and self._ApplyCloneBundle(initial=is_new, quiet=quiet, verbose=verbose)): | ||
| 1214 | is_new = False | ||
| 1215 | |||
| 1216 | if current_branch_only is None: | ||
| 1217 | if self.sync_c: | ||
| 1218 | current_branch_only = True | ||
| 1219 | elif not self.manifest._loaded: | ||
| 1220 | # Manifest cannot check defaults until it syncs. | ||
| 1221 | current_branch_only = False | ||
| 1222 | elif self.manifest.default.sync_c: | ||
| 1223 | current_branch_only = True | ||
| 1224 | |||
| 1225 | if tags is None: | ||
| 1226 | tags = self.sync_tags | ||
| 1227 | |||
| 1228 | if self.clone_depth: | ||
| 1229 | depth = self.clone_depth | ||
| 1230 | else: | ||
| 1231 | depth = self.manifest.manifestProject.depth | ||
| 1232 | |||
| 1233 | # See if we can skip the network fetch entirely. | ||
| 1234 | remote_fetched = False | ||
| 1235 | if not (optimized_fetch and | ||
| 1236 | (ID_RE.match(self.revisionExpr) and | ||
| 1237 | self._CheckForImmutableRevision())): | ||
| 1238 | remote_fetched = True | ||
| 1239 | if not self._RemoteFetch( | ||
| 1240 | initial=is_new, | ||
| 1241 | quiet=quiet, verbose=verbose, output_redir=output_redir, | ||
| 1242 | alt_dir=alt_dir, current_branch_only=current_branch_only, | ||
| 1243 | tags=tags, prune=prune, depth=depth, | ||
| 1244 | submodules=submodules, force_sync=force_sync, | ||
| 1245 | ssh_proxy=ssh_proxy, | ||
| 1246 | clone_filter=clone_filter, retry_fetches=retry_fetches): | ||
| 1247 | return SyncNetworkHalfResult(False, remote_fetched) | ||
| 1248 | |||
| 1249 | mp = self.manifest.manifestProject | ||
| 1250 | dissociate = mp.dissociate | ||
| 1251 | if dissociate: | ||
| 1252 | alternates_file = os.path.join(self.objdir, 'objects/info/alternates') | ||
| 1253 | if os.path.exists(alternates_file): | ||
| 1254 | cmd = ['repack', '-a', '-d'] | ||
| 1255 | p = GitCommand(self, cmd, bare=True, capture_stdout=bool(output_redir), | ||
| 1256 | merge_output=bool(output_redir)) | ||
| 1257 | if p.stdout and output_redir: | ||
| 1258 | output_redir.write(p.stdout) | ||
| 1259 | if p.Wait() != 0: | ||
| 1260 | return SyncNetworkHalfResult(False, remote_fetched) | ||
| 1261 | platform_utils.remove(alternates_file) | ||
| 1262 | |||
| 1263 | if self.worktree: | ||
| 1264 | self._InitMRef() | ||
| 1265 | else: | ||
| 1266 | self._InitMirrorHead() | ||
| 1267 | platform_utils.remove(os.path.join(self.gitdir, 'FETCH_HEAD'), | ||
| 1268 | missing_ok=True) | ||
| 1269 | return SyncNetworkHalfResult(True, remote_fetched) | ||
| 1270 | |||
| 1271 | def PostRepoUpgrade(self): | ||
| 1272 | self._InitHooks() | ||
| 1273 | |||
| 1274 | def _CopyAndLinkFiles(self): | ||
| 1275 | if self.client.isGitcClient: | ||
| 1276 | return | ||
| 1277 | for copyfile in self.copyfiles: | ||
| 1278 | copyfile._Copy() | ||
| 1279 | for linkfile in self.linkfiles: | ||
| 1280 | linkfile._Link() | ||
| 1281 | |||
| 1282 | def GetCommitRevisionId(self): | ||
| 1283 | """Get revisionId of a commit. | ||
| 1284 | |||
| 1285 | Use this method instead of GetRevisionId to get the id of the commit rather | ||
| 1286 | than the id of the current git object (for example, a tag) | ||
| 1287 | 915 | ||
| 1288 | """ | 916 | try: |
| 1289 | if not self.revisionExpr.startswith(R_TAGS): | 917 | f = df[p] |
| 1290 | return self.GetRevisionId(self._allrefs) | 918 | except KeyError: |
| 919 | f = None | ||
| 920 | |||
| 921 | if i: | ||
| 922 | i_status = i.status.upper() | ||
| 923 | else: | ||
| 924 | i_status = "-" | ||
| 925 | |||
| 926 | if f: | ||
| 927 | f_status = f.status.lower() | ||
| 928 | else: | ||
| 929 | f_status = "-" | ||
| 930 | |||
| 931 | if i and i.src_path: | ||
| 932 | line = " %s%s\t%s => %s (%s%%)" % ( | ||
| 933 | i_status, | ||
| 934 | f_status, | ||
| 935 | i.src_path, | ||
| 936 | p, | ||
| 937 | i.level, | ||
| 938 | ) | ||
| 939 | else: | ||
| 940 | line = " %s%s\t%s" % (i_status, f_status, p) | ||
| 941 | |||
| 942 | if i and not f: | ||
| 943 | out.added("%s", line) | ||
| 944 | elif (i and f) or (not i and f): | ||
| 945 | out.changed("%s", line) | ||
| 946 | elif not i and not f: | ||
| 947 | out.untracked("%s", line) | ||
| 948 | else: | ||
| 949 | out.write("%s", line) | ||
| 950 | out.nl() | ||
| 951 | |||
| 952 | return "DIRTY" | ||
| 953 | |||
| 954 | def PrintWorkTreeDiff( | ||
| 955 | self, absolute_paths=False, output_redir=None, local=False | ||
| 956 | ): | ||
| 957 | """Prints the status of the repository to stdout.""" | ||
| 958 | out = DiffColoring(self.config) | ||
| 959 | if output_redir: | ||
| 960 | out.redirect(output_redir) | ||
| 961 | cmd = ["diff"] | ||
| 962 | if out.is_on: | ||
| 963 | cmd.append("--color") | ||
| 964 | cmd.append(HEAD) | ||
| 965 | if absolute_paths: | ||
| 966 | cmd.append("--src-prefix=a/%s/" % self.RelPath(local)) | ||
| 967 | cmd.append("--dst-prefix=b/%s/" % self.RelPath(local)) | ||
| 968 | cmd.append("--") | ||
| 969 | try: | ||
| 970 | p = GitCommand(self, cmd, capture_stdout=True, capture_stderr=True) | ||
| 971 | p.Wait() | ||
| 972 | except GitError as e: | ||
| 973 | out.nl() | ||
| 974 | out.project("project %s/" % self.RelPath(local)) | ||
| 975 | out.nl() | ||
| 976 | out.fail("%s", str(e)) | ||
| 977 | out.nl() | ||
| 978 | return False | ||
| 979 | if p.stdout: | ||
| 980 | out.nl() | ||
| 981 | out.project("project %s/" % self.RelPath(local)) | ||
| 982 | out.nl() | ||
| 983 | out.write("%s", p.stdout) | ||
| 984 | return p.Wait() == 0 | ||
| 985 | |||
| 986 | def WasPublished(self, branch, all_refs=None): | ||
| 987 | """Was the branch published (uploaded) for code review? | ||
| 988 | If so, returns the SHA-1 hash of the last published | ||
| 989 | state for the branch. | ||
| 990 | """ | ||
| 991 | key = R_PUB + branch | ||
| 992 | if all_refs is None: | ||
| 993 | try: | ||
| 994 | return self.bare_git.rev_parse(key) | ||
| 995 | except GitError: | ||
| 996 | return None | ||
| 997 | else: | ||
| 998 | try: | ||
| 999 | return all_refs[key] | ||
| 1000 | except KeyError: | ||
| 1001 | return None | ||
| 1002 | |||
| 1003 | def CleanPublishedCache(self, all_refs=None): | ||
| 1004 | """Prunes any stale published refs.""" | ||
| 1005 | if all_refs is None: | ||
| 1006 | all_refs = self._allrefs | ||
| 1007 | heads = set() | ||
| 1008 | canrm = {} | ||
| 1009 | for name, ref_id in all_refs.items(): | ||
| 1010 | if name.startswith(R_HEADS): | ||
| 1011 | heads.add(name) | ||
| 1012 | elif name.startswith(R_PUB): | ||
| 1013 | canrm[name] = ref_id | ||
| 1014 | |||
| 1015 | for name, ref_id in canrm.items(): | ||
| 1016 | n = name[len(R_PUB) :] | ||
| 1017 | if R_HEADS + n not in heads: | ||
| 1018 | self.bare_git.DeleteRef(name, ref_id) | ||
| 1019 | |||
| 1020 | def GetUploadableBranches(self, selected_branch=None): | ||
| 1021 | """List any branches which can be uploaded for review.""" | ||
| 1022 | heads = {} | ||
| 1023 | pubed = {} | ||
| 1024 | |||
| 1025 | for name, ref_id in self._allrefs.items(): | ||
| 1026 | if name.startswith(R_HEADS): | ||
| 1027 | heads[name[len(R_HEADS) :]] = ref_id | ||
| 1028 | elif name.startswith(R_PUB): | ||
| 1029 | pubed[name[len(R_PUB) :]] = ref_id | ||
| 1030 | |||
| 1031 | ready = [] | ||
| 1032 | for branch, ref_id in heads.items(): | ||
| 1033 | if branch in pubed and pubed[branch] == ref_id: | ||
| 1034 | continue | ||
| 1035 | if selected_branch and branch != selected_branch: | ||
| 1036 | continue | ||
| 1037 | |||
| 1038 | rb = self.GetUploadableBranch(branch) | ||
| 1039 | if rb: | ||
| 1040 | ready.append(rb) | ||
| 1041 | return ready | ||
| 1042 | |||
| 1043 | def GetUploadableBranch(self, branch_name): | ||
| 1044 | """Get a single uploadable branch, or None.""" | ||
| 1045 | branch = self.GetBranch(branch_name) | ||
| 1046 | base = branch.LocalMerge | ||
| 1047 | if branch.LocalMerge: | ||
| 1048 | rb = ReviewableBranch(self, branch, base) | ||
| 1049 | if rb.commits: | ||
| 1050 | return rb | ||
| 1051 | return None | ||
| 1291 | 1052 | ||
| 1292 | try: | 1053 | def UploadForReview( |
| 1293 | return self.bare_git.rev_list(self.revisionExpr, '-1')[0] | 1054 | self, |
| 1294 | except GitError: | 1055 | branch=None, |
| 1295 | raise ManifestInvalidRevisionError('revision %s in %s not found' % | 1056 | people=([], []), |
| 1296 | (self.revisionExpr, self.name)) | 1057 | dryrun=False, |
| 1058 | auto_topic=False, | ||
| 1059 | hashtags=(), | ||
| 1060 | labels=(), | ||
| 1061 | private=False, | ||
| 1062 | notify=None, | ||
| 1063 | wip=False, | ||
| 1064 | ready=False, | ||
| 1065 | dest_branch=None, | ||
| 1066 | validate_certs=True, | ||
| 1067 | push_options=None, | ||
| 1068 | ): | ||
| 1069 | """Uploads the named branch for code review.""" | ||
| 1070 | if branch is None: | ||
| 1071 | branch = self.CurrentBranch | ||
| 1072 | if branch is None: | ||
| 1073 | raise GitError("not currently on a branch") | ||
| 1297 | 1074 | ||
| 1298 | def GetRevisionId(self, all_refs=None): | 1075 | branch = self.GetBranch(branch) |
| 1299 | if self.revisionId: | 1076 | if not branch.LocalMerge: |
| 1300 | return self.revisionId | 1077 | raise GitError("branch %s does not track a remote" % branch.name) |
| 1078 | if not branch.remote.review: | ||
| 1079 | raise GitError("remote %s has no review url" % branch.remote.name) | ||
| 1080 | |||
| 1081 | # Basic validity check on label syntax. | ||
| 1082 | for label in labels: | ||
| 1083 | if not re.match(r"^.+[+-][0-9]+$", label): | ||
| 1084 | raise UploadError( | ||
| 1085 | f'invalid label syntax "{label}": labels use forms like ' | ||
| 1086 | "CodeReview+1 or Verified-1" | ||
| 1087 | ) | ||
| 1088 | |||
| 1089 | if dest_branch is None: | ||
| 1090 | dest_branch = self.dest_branch | ||
| 1091 | if dest_branch is None: | ||
| 1092 | dest_branch = branch.merge | ||
| 1093 | if not dest_branch.startswith(R_HEADS): | ||
| 1094 | dest_branch = R_HEADS + dest_branch | ||
| 1095 | |||
| 1096 | if not branch.remote.projectname: | ||
| 1097 | branch.remote.projectname = self.name | ||
| 1098 | branch.remote.Save() | ||
| 1099 | |||
| 1100 | url = branch.remote.ReviewUrl(self.UserEmail, validate_certs) | ||
| 1101 | if url is None: | ||
| 1102 | raise UploadError("review not configured") | ||
| 1103 | cmd = ["push"] | ||
| 1104 | if dryrun: | ||
| 1105 | cmd.append("-n") | ||
| 1106 | |||
| 1107 | if url.startswith("ssh://"): | ||
| 1108 | cmd.append("--receive-pack=gerrit receive-pack") | ||
| 1109 | |||
| 1110 | for push_option in push_options or []: | ||
| 1111 | cmd.append("-o") | ||
| 1112 | cmd.append(push_option) | ||
| 1113 | |||
| 1114 | cmd.append(url) | ||
| 1115 | |||
| 1116 | if dest_branch.startswith(R_HEADS): | ||
| 1117 | dest_branch = dest_branch[len(R_HEADS) :] | ||
| 1118 | |||
| 1119 | ref_spec = "%s:refs/for/%s" % (R_HEADS + branch.name, dest_branch) | ||
| 1120 | opts = [] | ||
| 1121 | if auto_topic: | ||
| 1122 | opts += ["topic=" + branch.name] | ||
| 1123 | opts += ["t=%s" % p for p in hashtags] | ||
| 1124 | # NB: No need to encode labels as they've been validated above. | ||
| 1125 | opts += ["l=%s" % p for p in labels] | ||
| 1126 | |||
| 1127 | opts += ["r=%s" % p for p in people[0]] | ||
| 1128 | opts += ["cc=%s" % p for p in people[1]] | ||
| 1129 | if notify: | ||
| 1130 | opts += ["notify=" + notify] | ||
| 1131 | if private: | ||
| 1132 | opts += ["private"] | ||
| 1133 | if wip: | ||
| 1134 | opts += ["wip"] | ||
| 1135 | if ready: | ||
| 1136 | opts += ["ready"] | ||
| 1137 | if opts: | ||
| 1138 | ref_spec = ref_spec + "%" + ",".join(opts) | ||
| 1139 | cmd.append(ref_spec) | ||
| 1140 | |||
| 1141 | if GitCommand(self, cmd, bare=True).Wait() != 0: | ||
| 1142 | raise UploadError("Upload failed") | ||
| 1143 | |||
| 1144 | if not dryrun: | ||
| 1145 | msg = "posted to %s for %s" % (branch.remote.review, dest_branch) | ||
| 1146 | self.bare_git.UpdateRef( | ||
| 1147 | R_PUB + branch.name, R_HEADS + branch.name, message=msg | ||
| 1148 | ) | ||
| 1149 | |||
| 1150 | def _ExtractArchive(self, tarpath, path=None): | ||
| 1151 | """Extract the given tar on its current location | ||
| 1301 | 1152 | ||
| 1302 | rem = self.GetRemote() | 1153 | Args: |
| 1303 | rev = rem.ToLocal(self.revisionExpr) | 1154 | tarpath: The path to the actual tar file |
| 1304 | 1155 | ||
| 1305 | if all_refs is not None and rev in all_refs: | 1156 | """ |
| 1306 | return all_refs[rev] | 1157 | try: |
| 1158 | with tarfile.open(tarpath, "r") as tar: | ||
| 1159 | tar.extractall(path=path) | ||
| 1160 | return True | ||
| 1161 | except (IOError, tarfile.TarError) as e: | ||
| 1162 | _error("Cannot extract archive %s: %s", tarpath, str(e)) | ||
| 1163 | return False | ||
| 1307 | 1164 | ||
| 1308 | try: | 1165 | def Sync_NetworkHalf( |
| 1309 | return self.bare_git.rev_parse('--verify', '%s^0' % rev) | 1166 | self, |
| 1310 | except GitError: | 1167 | quiet=False, |
| 1311 | raise ManifestInvalidRevisionError('revision %s in %s not found' % | 1168 | verbose=False, |
| 1312 | (self.revisionExpr, self.name)) | 1169 | output_redir=None, |
| 1170 | is_new=None, | ||
| 1171 | current_branch_only=None, | ||
| 1172 | force_sync=False, | ||
| 1173 | clone_bundle=True, | ||
| 1174 | tags=None, | ||
| 1175 | archive=False, | ||
| 1176 | optimized_fetch=False, | ||
| 1177 | retry_fetches=0, | ||
| 1178 | prune=False, | ||
| 1179 | submodules=False, | ||
| 1180 | ssh_proxy=None, | ||
| 1181 | clone_filter=None, | ||
| 1182 | partial_clone_exclude=set(), | ||
| 1183 | ): | ||
| 1184 | """Perform only the network IO portion of the sync process. | ||
| 1185 | Local working directory/branch state is not affected. | ||
| 1186 | """ | ||
| 1187 | if archive and not isinstance(self, MetaProject): | ||
| 1188 | if self.remote.url.startswith(("http://", "https://")): | ||
| 1189 | _error( | ||
| 1190 | "%s: Cannot fetch archives from http/https remotes.", | ||
| 1191 | self.name, | ||
| 1192 | ) | ||
| 1193 | return SyncNetworkHalfResult(False, False) | ||
| 1194 | |||
| 1195 | name = self.relpath.replace("\\", "/") | ||
| 1196 | name = name.replace("/", "_") | ||
| 1197 | tarpath = "%s.tar" % name | ||
| 1198 | topdir = self.manifest.topdir | ||
| 1313 | 1199 | ||
| 1314 | def SetRevisionId(self, revisionId): | 1200 | try: |
| 1315 | if self.revisionExpr: | 1201 | self._FetchArchive(tarpath, cwd=topdir) |
| 1316 | self.upstream = self.revisionExpr | 1202 | except GitError as e: |
| 1203 | _error("%s", e) | ||
| 1204 | return SyncNetworkHalfResult(False, False) | ||
| 1317 | 1205 | ||
| 1318 | self.revisionId = revisionId | 1206 | # From now on, we only need absolute tarpath. |
| 1207 | tarpath = os.path.join(topdir, tarpath) | ||
| 1319 | 1208 | ||
| 1320 | def Sync_LocalHalf(self, syncbuf, force_sync=False, submodules=False): | 1209 | if not self._ExtractArchive(tarpath, path=topdir): |
| 1321 | """Perform only the local IO portion of the sync process. | 1210 | return SyncNetworkHalfResult(False, True) |
| 1322 | Network access is not required. | 1211 | try: |
| 1323 | """ | 1212 | platform_utils.remove(tarpath) |
| 1324 | if not os.path.exists(self.gitdir): | 1213 | except OSError as e: |
| 1325 | syncbuf.fail(self, | 1214 | _warn("Cannot remove archive %s: %s", tarpath, str(e)) |
| 1326 | 'Cannot checkout %s due to missing network sync; Run ' | 1215 | self._CopyAndLinkFiles() |
| 1327 | '`repo sync -n %s` first.' % | 1216 | return SyncNetworkHalfResult(True, True) |
| 1328 | (self.name, self.name)) | 1217 | |
| 1329 | return | 1218 | # If the shared object dir already exists, don't try to rebootstrap with |
| 1330 | 1219 | # a clone bundle download. We should have the majority of objects | |
| 1331 | self._InitWorkTree(force_sync=force_sync, submodules=submodules) | 1220 | # already. |
| 1332 | all_refs = self.bare_ref.all | 1221 | if clone_bundle and os.path.exists(self.objdir): |
| 1333 | self.CleanPublishedCache(all_refs) | 1222 | clone_bundle = False |
| 1334 | revid = self.GetRevisionId(all_refs) | 1223 | |
| 1335 | 1224 | if self.name in partial_clone_exclude: | |
| 1336 | # Special case the root of the repo client checkout. Make sure it doesn't | 1225 | clone_bundle = True |
| 1337 | # contain files being checked out to dirs we don't allow. | 1226 | clone_filter = None |
| 1338 | if self.relpath == '.': | 1227 | |
| 1339 | PROTECTED_PATHS = {'.repo'} | 1228 | if is_new is None: |
| 1340 | paths = set(self.work_git.ls_tree('-z', '--name-only', '--', revid).split('\0')) | 1229 | is_new = not self.Exists |
| 1341 | bad_paths = paths & PROTECTED_PATHS | 1230 | if is_new: |
| 1342 | if bad_paths: | 1231 | self._InitGitDir(force_sync=force_sync, quiet=quiet) |
| 1343 | syncbuf.fail(self, | 1232 | else: |
| 1344 | 'Refusing to checkout project that writes to protected ' | 1233 | self._UpdateHooks(quiet=quiet) |
| 1345 | 'paths: %s' % (', '.join(bad_paths),)) | 1234 | self._InitRemote() |
| 1346 | return | 1235 | |
| 1347 | 1236 | if self.UseAlternates: | |
| 1348 | def _doff(): | 1237 | # If gitdir/objects is a symlink, migrate it from the old layout. |
| 1349 | self._FastForward(revid) | 1238 | gitdir_objects = os.path.join(self.gitdir, "objects") |
| 1350 | self._CopyAndLinkFiles() | 1239 | if platform_utils.islink(gitdir_objects): |
| 1351 | 1240 | platform_utils.remove(gitdir_objects, missing_ok=True) | |
| 1352 | def _dosubmodules(): | 1241 | gitdir_alt = os.path.join(self.gitdir, "objects/info/alternates") |
| 1353 | self._SyncSubmodules(quiet=True) | 1242 | if not os.path.exists(gitdir_alt): |
| 1354 | 1243 | os.makedirs(os.path.dirname(gitdir_alt), exist_ok=True) | |
| 1355 | head = self.work_git.GetHead() | 1244 | _lwrite( |
| 1356 | if head.startswith(R_HEADS): | 1245 | gitdir_alt, |
| 1357 | branch = head[len(R_HEADS):] | 1246 | os.path.join( |
| 1358 | try: | 1247 | os.path.relpath(self.objdir, gitdir_objects), "objects" |
| 1359 | head = all_refs[head] | 1248 | ) |
| 1360 | except KeyError: | 1249 | + "\n", |
| 1361 | head = None | 1250 | ) |
| 1362 | else: | ||
| 1363 | branch = None | ||
| 1364 | |||
| 1365 | if branch is None or syncbuf.detach_head: | ||
| 1366 | # Currently on a detached HEAD. The user is assumed to | ||
| 1367 | # not have any local modifications worth worrying about. | ||
| 1368 | # | ||
| 1369 | if self.IsRebaseInProgress(): | ||
| 1370 | syncbuf.fail(self, _PriorSyncFailedError()) | ||
| 1371 | return | ||
| 1372 | |||
| 1373 | if head == revid: | ||
| 1374 | # No changes; don't do anything further. | ||
| 1375 | # Except if the head needs to be detached | ||
| 1376 | # | ||
| 1377 | if not syncbuf.detach_head: | ||
| 1378 | # The copy/linkfile config may have changed. | ||
| 1379 | self._CopyAndLinkFiles() | ||
| 1380 | return | ||
| 1381 | else: | ||
| 1382 | lost = self._revlist(not_rev(revid), HEAD) | ||
| 1383 | if lost: | ||
| 1384 | syncbuf.info(self, "discarding %d commits", len(lost)) | ||
| 1385 | |||
| 1386 | try: | ||
| 1387 | self._Checkout(revid, quiet=True) | ||
| 1388 | if submodules: | ||
| 1389 | self._SyncSubmodules(quiet=True) | ||
| 1390 | except GitError as e: | ||
| 1391 | syncbuf.fail(self, e) | ||
| 1392 | return | ||
| 1393 | self._CopyAndLinkFiles() | ||
| 1394 | return | ||
| 1395 | |||
| 1396 | if head == revid: | ||
| 1397 | # No changes; don't do anything further. | ||
| 1398 | # | ||
| 1399 | # The copy/linkfile config may have changed. | ||
| 1400 | self._CopyAndLinkFiles() | ||
| 1401 | return | ||
| 1402 | |||
| 1403 | branch = self.GetBranch(branch) | ||
| 1404 | |||
| 1405 | if not branch.LocalMerge: | ||
| 1406 | # The current branch has no tracking configuration. | ||
| 1407 | # Jump off it to a detached HEAD. | ||
| 1408 | # | ||
| 1409 | syncbuf.info(self, | ||
| 1410 | "leaving %s; does not track upstream", | ||
| 1411 | branch.name) | ||
| 1412 | try: | ||
| 1413 | self._Checkout(revid, quiet=True) | ||
| 1414 | if submodules: | ||
| 1415 | self._SyncSubmodules(quiet=True) | ||
| 1416 | except GitError as e: | ||
| 1417 | syncbuf.fail(self, e) | ||
| 1418 | return | ||
| 1419 | self._CopyAndLinkFiles() | ||
| 1420 | return | ||
| 1421 | 1251 | ||
| 1422 | upstream_gain = self._revlist(not_rev(HEAD), revid) | 1252 | if is_new: |
| 1253 | alt = os.path.join(self.objdir, "objects/info/alternates") | ||
| 1254 | try: | ||
| 1255 | with open(alt) as fd: | ||
| 1256 | # This works for both absolute and relative alternate | ||
| 1257 | # directories. | ||
| 1258 | alt_dir = os.path.join( | ||
| 1259 | self.objdir, "objects", fd.readline().rstrip() | ||
| 1260 | ) | ||
| 1261 | except IOError: | ||
| 1262 | alt_dir = None | ||
| 1263 | else: | ||
| 1264 | alt_dir = None | ||
| 1423 | 1265 | ||
| 1424 | # See if we can perform a fast forward merge. This can happen if our | 1266 | if ( |
| 1425 | # branch isn't in the exact same state as we last published. | 1267 | clone_bundle |
| 1426 | try: | 1268 | and alt_dir is None |
| 1427 | self.work_git.merge_base('--is-ancestor', HEAD, revid) | 1269 | and self._ApplyCloneBundle( |
| 1428 | # Skip the published logic. | 1270 | initial=is_new, quiet=quiet, verbose=verbose |
| 1429 | pub = False | 1271 | ) |
| 1430 | except GitError: | 1272 | ): |
| 1431 | pub = self.WasPublished(branch.name, all_refs) | 1273 | is_new = False |
| 1432 | 1274 | ||
| 1433 | if pub: | 1275 | if current_branch_only is None: |
| 1434 | not_merged = self._revlist(not_rev(revid), pub) | 1276 | if self.sync_c: |
| 1435 | if not_merged: | 1277 | current_branch_only = True |
| 1436 | if upstream_gain: | 1278 | elif not self.manifest._loaded: |
| 1437 | # The user has published this branch and some of those | 1279 | # Manifest cannot check defaults until it syncs. |
| 1438 | # commits are not yet merged upstream. We do not want | 1280 | current_branch_only = False |
| 1439 | # to rewrite the published commits so we punt. | 1281 | elif self.manifest.default.sync_c: |
| 1440 | # | 1282 | current_branch_only = True |
| 1441 | syncbuf.fail(self, | 1283 | |
| 1442 | "branch %s is published (but not merged) and is now " | 1284 | if tags is None: |
| 1443 | "%d commits behind" % (branch.name, len(upstream_gain))) | 1285 | tags = self.sync_tags |
| 1444 | return | 1286 | |
| 1445 | elif pub == head: | 1287 | if self.clone_depth: |
| 1446 | # All published commits are merged, and thus we are a | 1288 | depth = self.clone_depth |
| 1447 | # strict subset. We can fast-forward safely. | 1289 | else: |
| 1448 | # | 1290 | depth = self.manifest.manifestProject.depth |
| 1449 | syncbuf.later1(self, _doff) | 1291 | |
| 1450 | if submodules: | 1292 | # See if we can skip the network fetch entirely. |
| 1451 | syncbuf.later1(self, _dosubmodules) | 1293 | remote_fetched = False |
| 1452 | return | 1294 | if not ( |
| 1453 | 1295 | optimized_fetch | |
| 1454 | # Examine the local commits not in the remote. Find the | 1296 | and ( |
| 1455 | # last one attributed to this user, if any. | 1297 | ID_RE.match(self.revisionExpr) |
| 1456 | # | 1298 | and self._CheckForImmutableRevision() |
| 1457 | local_changes = self._revlist(not_rev(revid), HEAD, format='%H %ce') | 1299 | ) |
| 1458 | last_mine = None | 1300 | ): |
| 1459 | cnt_mine = 0 | 1301 | remote_fetched = True |
| 1460 | for commit in local_changes: | 1302 | if not self._RemoteFetch( |
| 1461 | commit_id, committer_email = commit.split(' ', 1) | 1303 | initial=is_new, |
| 1462 | if committer_email == self.UserEmail: | 1304 | quiet=quiet, |
| 1463 | last_mine = commit_id | 1305 | verbose=verbose, |
| 1464 | cnt_mine += 1 | 1306 | output_redir=output_redir, |
| 1465 | 1307 | alt_dir=alt_dir, | |
| 1466 | if not upstream_gain and cnt_mine == len(local_changes): | 1308 | current_branch_only=current_branch_only, |
| 1467 | # The copy/linkfile config may have changed. | 1309 | tags=tags, |
| 1468 | self._CopyAndLinkFiles() | 1310 | prune=prune, |
| 1469 | return | 1311 | depth=depth, |
| 1470 | 1312 | submodules=submodules, | |
| 1471 | if self.IsDirty(consider_untracked=False): | 1313 | force_sync=force_sync, |
| 1472 | syncbuf.fail(self, _DirtyError()) | 1314 | ssh_proxy=ssh_proxy, |
| 1473 | return | 1315 | clone_filter=clone_filter, |
| 1474 | 1316 | retry_fetches=retry_fetches, | |
| 1475 | # If the upstream switched on us, warn the user. | 1317 | ): |
| 1476 | # | 1318 | return SyncNetworkHalfResult(False, remote_fetched) |
| 1477 | if branch.merge != self.revisionExpr: | ||
| 1478 | if branch.merge and self.revisionExpr: | ||
| 1479 | syncbuf.info(self, | ||
| 1480 | 'manifest switched %s...%s', | ||
| 1481 | branch.merge, | ||
| 1482 | self.revisionExpr) | ||
| 1483 | elif branch.merge: | ||
| 1484 | syncbuf.info(self, | ||
| 1485 | 'manifest no longer tracks %s', | ||
| 1486 | branch.merge) | ||
| 1487 | |||
| 1488 | if cnt_mine < len(local_changes): | ||
| 1489 | # Upstream rebased. Not everything in HEAD | ||
| 1490 | # was created by this user. | ||
| 1491 | # | ||
| 1492 | syncbuf.info(self, | ||
| 1493 | "discarding %d commits removed from upstream", | ||
| 1494 | len(local_changes) - cnt_mine) | ||
| 1495 | |||
| 1496 | branch.remote = self.GetRemote() | ||
| 1497 | if not ID_RE.match(self.revisionExpr): | ||
| 1498 | # in case of manifest sync the revisionExpr might be a SHA1 | ||
| 1499 | branch.merge = self.revisionExpr | ||
| 1500 | if not branch.merge.startswith('refs/'): | ||
| 1501 | branch.merge = R_HEADS + branch.merge | ||
| 1502 | branch.Save() | ||
| 1503 | |||
| 1504 | if cnt_mine > 0 and self.rebase: | ||
| 1505 | def _docopyandlink(): | ||
| 1506 | self._CopyAndLinkFiles() | ||
| 1507 | |||
| 1508 | def _dorebase(): | ||
| 1509 | self._Rebase(upstream='%s^1' % last_mine, onto=revid) | ||
| 1510 | syncbuf.later2(self, _dorebase) | ||
| 1511 | if submodules: | ||
| 1512 | syncbuf.later2(self, _dosubmodules) | ||
| 1513 | syncbuf.later2(self, _docopyandlink) | ||
| 1514 | elif local_changes: | ||
| 1515 | try: | ||
| 1516 | self._ResetHard(revid) | ||
| 1517 | if submodules: | ||
| 1518 | self._SyncSubmodules(quiet=True) | ||
| 1519 | self._CopyAndLinkFiles() | ||
| 1520 | except GitError as e: | ||
| 1521 | syncbuf.fail(self, e) | ||
| 1522 | return | ||
| 1523 | else: | ||
| 1524 | syncbuf.later1(self, _doff) | ||
| 1525 | if submodules: | ||
| 1526 | syncbuf.later1(self, _dosubmodules) | ||
| 1527 | |||
| 1528 | def AddCopyFile(self, src, dest, topdir): | ||
| 1529 | """Mark |src| for copying to |dest| (relative to |topdir|). | ||
| 1530 | |||
| 1531 | No filesystem changes occur here. Actual copying happens later on. | ||
| 1532 | |||
| 1533 | Paths should have basic validation run on them before being queued. | ||
| 1534 | Further checking will be handled when the actual copy happens. | ||
| 1535 | """ | ||
| 1536 | self.copyfiles.append(_CopyFile(self.worktree, src, topdir, dest)) | ||
| 1537 | 1319 | ||
| 1538 | def AddLinkFile(self, src, dest, topdir): | 1320 | mp = self.manifest.manifestProject |
| 1539 | """Mark |dest| to create a symlink (relative to |topdir|) pointing to |src|. | 1321 | dissociate = mp.dissociate |
| 1322 | if dissociate: | ||
| 1323 | alternates_file = os.path.join( | ||
| 1324 | self.objdir, "objects/info/alternates" | ||
| 1325 | ) | ||
| 1326 | if os.path.exists(alternates_file): | ||
| 1327 | cmd = ["repack", "-a", "-d"] | ||
| 1328 | p = GitCommand( | ||
| 1329 | self, | ||
| 1330 | cmd, | ||
| 1331 | bare=True, | ||
| 1332 | capture_stdout=bool(output_redir), | ||
| 1333 | merge_output=bool(output_redir), | ||
| 1334 | ) | ||
| 1335 | if p.stdout and output_redir: | ||
| 1336 | output_redir.write(p.stdout) | ||
| 1337 | if p.Wait() != 0: | ||
| 1338 | return SyncNetworkHalfResult(False, remote_fetched) | ||
| 1339 | platform_utils.remove(alternates_file) | ||
| 1340 | |||
| 1341 | if self.worktree: | ||
| 1342 | self._InitMRef() | ||
| 1343 | else: | ||
| 1344 | self._InitMirrorHead() | ||
| 1345 | platform_utils.remove( | ||
| 1346 | os.path.join(self.gitdir, "FETCH_HEAD"), missing_ok=True | ||
| 1347 | ) | ||
| 1348 | return SyncNetworkHalfResult(True, remote_fetched) | ||
| 1540 | 1349 | ||
| 1541 | No filesystem changes occur here. Actual linking happens later on. | 1350 | def PostRepoUpgrade(self): |
| 1351 | self._InitHooks() | ||
| 1542 | 1352 | ||
| 1543 | Paths should have basic validation run on them before being queued. | 1353 | def _CopyAndLinkFiles(self): |
| 1544 | Further checking will be handled when the actual link happens. | 1354 | if self.client.isGitcClient: |
| 1545 | """ | 1355 | return |
| 1546 | self.linkfiles.append(_LinkFile(self.worktree, src, topdir, dest)) | 1356 | for copyfile in self.copyfiles: |
| 1357 | copyfile._Copy() | ||
| 1358 | for linkfile in self.linkfiles: | ||
| 1359 | linkfile._Link() | ||
| 1547 | 1360 | ||
| 1548 | def AddAnnotation(self, name, value, keep): | 1361 | def GetCommitRevisionId(self): |
| 1549 | self.annotations.append(Annotation(name, value, keep)) | 1362 | """Get revisionId of a commit. |
| 1550 | 1363 | ||
| 1551 | def DownloadPatchSet(self, change_id, patch_id): | 1364 | Use this method instead of GetRevisionId to get the id of the commit |
| 1552 | """Download a single patch set of a single change to FETCH_HEAD. | 1365 | rather than the id of the current git object (for example, a tag) |
| 1553 | """ | ||
| 1554 | remote = self.GetRemote() | ||
| 1555 | |||
| 1556 | cmd = ['fetch', remote.name] | ||
| 1557 | cmd.append('refs/changes/%2.2d/%d/%d' | ||
| 1558 | % (change_id % 100, change_id, patch_id)) | ||
| 1559 | if GitCommand(self, cmd, bare=True).Wait() != 0: | ||
| 1560 | return None | ||
| 1561 | return DownloadedChange(self, | ||
| 1562 | self.GetRevisionId(), | ||
| 1563 | change_id, | ||
| 1564 | patch_id, | ||
| 1565 | self.bare_git.rev_parse('FETCH_HEAD')) | ||
| 1566 | |||
| 1567 | def DeleteWorktree(self, quiet=False, force=False): | ||
| 1568 | """Delete the source checkout and any other housekeeping tasks. | ||
| 1569 | |||
| 1570 | This currently leaves behind the internal .repo/ cache state. This helps | ||
| 1571 | when switching branches or manifest changes get reverted as we don't have | ||
| 1572 | to redownload all the git objects. But we should do some GC at some point. | ||
| 1573 | |||
| 1574 | Args: | ||
| 1575 | quiet: Whether to hide normal messages. | ||
| 1576 | force: Always delete tree even if dirty. | ||
| 1577 | 1366 | ||
| 1578 | Returns: | 1367 | """ |
| 1579 | True if the worktree was completely cleaned out. | 1368 | if not self.revisionExpr.startswith(R_TAGS): |
| 1580 | """ | 1369 | return self.GetRevisionId(self._allrefs) |
| 1581 | if self.IsDirty(): | ||
| 1582 | if force: | ||
| 1583 | print('warning: %s: Removing dirty project: uncommitted changes lost.' % | ||
| 1584 | (self.RelPath(local=False),), file=sys.stderr) | ||
| 1585 | else: | ||
| 1586 | print('error: %s: Cannot remove project: uncommitted changes are ' | ||
| 1587 | 'present.\n' % (self.RelPath(local=False),), file=sys.stderr) | ||
| 1588 | return False | ||
| 1589 | 1370 | ||
| 1590 | if not quiet: | ||
| 1591 | print('%s: Deleting obsolete checkout.' % (self.RelPath(local=False),)) | ||
| 1592 | |||
| 1593 | # Unlock and delink from the main worktree. We don't use git's worktree | ||
| 1594 | # remove because it will recursively delete projects -- we handle that | ||
| 1595 | # ourselves below. https://crbug.com/git/48 | ||
| 1596 | if self.use_git_worktrees: | ||
| 1597 | needle = platform_utils.realpath(self.gitdir) | ||
| 1598 | # Find the git worktree commondir under .repo/worktrees/. | ||
| 1599 | output = self.bare_git.worktree('list', '--porcelain').splitlines()[0] | ||
| 1600 | assert output.startswith('worktree '), output | ||
| 1601 | commondir = output[9:] | ||
| 1602 | # Walk each of the git worktrees to see where they point. | ||
| 1603 | configs = os.path.join(commondir, 'worktrees') | ||
| 1604 | for name in os.listdir(configs): | ||
| 1605 | gitdir = os.path.join(configs, name, 'gitdir') | ||
| 1606 | with open(gitdir) as fp: | ||
| 1607 | relpath = fp.read().strip() | ||
| 1608 | # Resolve the checkout path and see if it matches this project. | ||
| 1609 | fullpath = platform_utils.realpath(os.path.join(configs, name, relpath)) | ||
| 1610 | if fullpath == needle: | ||
| 1611 | platform_utils.rmtree(os.path.join(configs, name)) | ||
| 1612 | |||
| 1613 | # Delete the .git directory first, so we're less likely to have a partially | ||
| 1614 | # working git repository around. There shouldn't be any git projects here, | ||
| 1615 | # so rmtree works. | ||
| 1616 | |||
| 1617 | # Try to remove plain files first in case of git worktrees. If this fails | ||
| 1618 | # for any reason, we'll fall back to rmtree, and that'll display errors if | ||
| 1619 | # it can't remove things either. | ||
| 1620 | try: | ||
| 1621 | platform_utils.remove(self.gitdir) | ||
| 1622 | except OSError: | ||
| 1623 | pass | ||
| 1624 | try: | ||
| 1625 | platform_utils.rmtree(self.gitdir) | ||
| 1626 | except OSError as e: | ||
| 1627 | if e.errno != errno.ENOENT: | ||
| 1628 | print('error: %s: %s' % (self.gitdir, e), file=sys.stderr) | ||
| 1629 | print('error: %s: Failed to delete obsolete checkout; remove manually, ' | ||
| 1630 | 'then run `repo sync -l`.' % (self.RelPath(local=False),), | ||
| 1631 | file=sys.stderr) | ||
| 1632 | return False | ||
| 1633 | |||
| 1634 | # Delete everything under the worktree, except for directories that contain | ||
| 1635 | # another git project. | ||
| 1636 | dirs_to_remove = [] | ||
| 1637 | failed = False | ||
| 1638 | for root, dirs, files in platform_utils.walk(self.worktree): | ||
| 1639 | for f in files: | ||
| 1640 | path = os.path.join(root, f) | ||
| 1641 | try: | ||
| 1642 | platform_utils.remove(path) | ||
| 1643 | except OSError as e: | ||
| 1644 | if e.errno != errno.ENOENT: | ||
| 1645 | print('error: %s: Failed to remove: %s' % (path, e), file=sys.stderr) | ||
| 1646 | failed = True | ||
| 1647 | dirs[:] = [d for d in dirs | ||
| 1648 | if not os.path.lexists(os.path.join(root, d, '.git'))] | ||
| 1649 | dirs_to_remove += [os.path.join(root, d) for d in dirs | ||
| 1650 | if os.path.join(root, d) not in dirs_to_remove] | ||
| 1651 | for d in reversed(dirs_to_remove): | ||
| 1652 | if platform_utils.islink(d): | ||
| 1653 | try: | 1371 | try: |
| 1654 | platform_utils.remove(d) | 1372 | return self.bare_git.rev_list(self.revisionExpr, "-1")[0] |
| 1655 | except OSError as e: | 1373 | except GitError: |
| 1656 | if e.errno != errno.ENOENT: | 1374 | raise ManifestInvalidRevisionError( |
| 1657 | print('error: %s: Failed to remove: %s' % (d, e), file=sys.stderr) | 1375 | "revision %s in %s not found" % (self.revisionExpr, self.name) |
| 1658 | failed = True | 1376 | ) |
| 1659 | elif not platform_utils.listdir(d): | ||
| 1660 | try: | ||
| 1661 | platform_utils.rmdir(d) | ||
| 1662 | except OSError as e: | ||
| 1663 | if e.errno != errno.ENOENT: | ||
| 1664 | print('error: %s: Failed to remove: %s' % (d, e), file=sys.stderr) | ||
| 1665 | failed = True | ||
| 1666 | if failed: | ||
| 1667 | print('error: %s: Failed to delete obsolete checkout.' % (self.RelPath(local=False),), | ||
| 1668 | file=sys.stderr) | ||
| 1669 | print(' Remove manually, then run `repo sync -l`.', file=sys.stderr) | ||
| 1670 | return False | ||
| 1671 | |||
| 1672 | # Try deleting parent dirs if they are empty. | ||
| 1673 | path = self.worktree | ||
| 1674 | while path != self.manifest.topdir: | ||
| 1675 | try: | ||
| 1676 | platform_utils.rmdir(path) | ||
| 1677 | except OSError as e: | ||
| 1678 | if e.errno != errno.ENOENT: | ||
| 1679 | break | ||
| 1680 | path = os.path.dirname(path) | ||
| 1681 | |||
| 1682 | return True | ||
| 1683 | |||
| 1684 | # Branch Management ## | ||
| 1685 | def StartBranch(self, name, branch_merge='', revision=None): | ||
| 1686 | """Create a new branch off the manifest's revision. | ||
| 1687 | """ | ||
| 1688 | if not branch_merge: | ||
| 1689 | branch_merge = self.revisionExpr | ||
| 1690 | head = self.work_git.GetHead() | ||
| 1691 | if head == (R_HEADS + name): | ||
| 1692 | return True | ||
| 1693 | |||
| 1694 | all_refs = self.bare_ref.all | ||
| 1695 | if R_HEADS + name in all_refs: | ||
| 1696 | return GitCommand(self, ['checkout', '-q', name, '--']).Wait() == 0 | ||
| 1697 | |||
| 1698 | branch = self.GetBranch(name) | ||
| 1699 | branch.remote = self.GetRemote() | ||
| 1700 | branch.merge = branch_merge | ||
| 1701 | if not branch.merge.startswith('refs/') and not ID_RE.match(branch_merge): | ||
| 1702 | branch.merge = R_HEADS + branch_merge | ||
| 1703 | |||
| 1704 | if revision is None: | ||
| 1705 | revid = self.GetRevisionId(all_refs) | ||
| 1706 | else: | ||
| 1707 | revid = self.work_git.rev_parse(revision) | ||
| 1708 | |||
| 1709 | if head.startswith(R_HEADS): | ||
| 1710 | try: | ||
| 1711 | head = all_refs[head] | ||
| 1712 | except KeyError: | ||
| 1713 | head = None | ||
| 1714 | if revid and head and revid == head: | ||
| 1715 | ref = R_HEADS + name | ||
| 1716 | self.work_git.update_ref(ref, revid) | ||
| 1717 | self.work_git.symbolic_ref(HEAD, ref) | ||
| 1718 | branch.Save() | ||
| 1719 | return True | ||
| 1720 | |||
| 1721 | if GitCommand(self, ['checkout', '-q', '-b', branch.name, revid]).Wait() == 0: | ||
| 1722 | branch.Save() | ||
| 1723 | return True | ||
| 1724 | return False | ||
| 1725 | |||
| 1726 | def CheckoutBranch(self, name): | ||
| 1727 | """Checkout a local topic branch. | ||
| 1728 | 1377 | ||
| 1729 | Args: | 1378 | def GetRevisionId(self, all_refs=None): |
| 1730 | name: The name of the branch to checkout. | 1379 | if self.revisionId: |
| 1380 | return self.revisionId | ||
| 1731 | 1381 | ||
| 1732 | Returns: | 1382 | rem = self.GetRemote() |
| 1733 | True if the checkout succeeded; False if it didn't; None if the branch | 1383 | rev = rem.ToLocal(self.revisionExpr) |
| 1734 | didn't exist. | ||
| 1735 | """ | ||
| 1736 | rev = R_HEADS + name | ||
| 1737 | head = self.work_git.GetHead() | ||
| 1738 | if head == rev: | ||
| 1739 | # Already on the branch | ||
| 1740 | # | ||
| 1741 | return True | ||
| 1742 | |||
| 1743 | all_refs = self.bare_ref.all | ||
| 1744 | try: | ||
| 1745 | revid = all_refs[rev] | ||
| 1746 | except KeyError: | ||
| 1747 | # Branch does not exist in this project | ||
| 1748 | # | ||
| 1749 | return None | ||
| 1750 | |||
| 1751 | if head.startswith(R_HEADS): | ||
| 1752 | try: | ||
| 1753 | head = all_refs[head] | ||
| 1754 | except KeyError: | ||
| 1755 | head = None | ||
| 1756 | |||
| 1757 | if head == revid: | ||
| 1758 | # Same revision; just update HEAD to point to the new | ||
| 1759 | # target branch, but otherwise take no other action. | ||
| 1760 | # | ||
| 1761 | _lwrite(self.work_git.GetDotgitPath(subpath=HEAD), | ||
| 1762 | 'ref: %s%s\n' % (R_HEADS, name)) | ||
| 1763 | return True | ||
| 1764 | |||
| 1765 | return GitCommand(self, | ||
| 1766 | ['checkout', name, '--'], | ||
| 1767 | capture_stdout=True, | ||
| 1768 | capture_stderr=True).Wait() == 0 | ||
| 1769 | |||
| 1770 | def AbandonBranch(self, name): | ||
| 1771 | """Destroy a local topic branch. | ||
| 1772 | |||
| 1773 | Args: | ||
| 1774 | name: The name of the branch to abandon. | ||
| 1775 | 1384 | ||
| 1776 | Returns: | 1385 | if all_refs is not None and rev in all_refs: |
| 1777 | True if the abandon succeeded; False if it didn't; None if the branch | 1386 | return all_refs[rev] |
| 1778 | didn't exist. | ||
| 1779 | """ | ||
| 1780 | rev = R_HEADS + name | ||
| 1781 | all_refs = self.bare_ref.all | ||
| 1782 | if rev not in all_refs: | ||
| 1783 | # Doesn't exist | ||
| 1784 | return None | ||
| 1785 | |||
| 1786 | head = self.work_git.GetHead() | ||
| 1787 | if head == rev: | ||
| 1788 | # We can't destroy the branch while we are sitting | ||
| 1789 | # on it. Switch to a detached HEAD. | ||
| 1790 | # | ||
| 1791 | head = all_refs[head] | ||
| 1792 | |||
| 1793 | revid = self.GetRevisionId(all_refs) | ||
| 1794 | if head == revid: | ||
| 1795 | _lwrite(self.work_git.GetDotgitPath(subpath=HEAD), '%s\n' % revid) | ||
| 1796 | else: | ||
| 1797 | self._Checkout(revid, quiet=True) | ||
| 1798 | |||
| 1799 | return GitCommand(self, | ||
| 1800 | ['branch', '-D', name], | ||
| 1801 | capture_stdout=True, | ||
| 1802 | capture_stderr=True).Wait() == 0 | ||
| 1803 | |||
| 1804 | def PruneHeads(self): | ||
| 1805 | """Prune any topic branches already merged into upstream. | ||
| 1806 | """ | ||
| 1807 | cb = self.CurrentBranch | ||
| 1808 | kill = [] | ||
| 1809 | left = self._allrefs | ||
| 1810 | for name in left.keys(): | ||
| 1811 | if name.startswith(R_HEADS): | ||
| 1812 | name = name[len(R_HEADS):] | ||
| 1813 | if cb is None or name != cb: | ||
| 1814 | kill.append(name) | ||
| 1815 | |||
| 1816 | # Minor optimization: If there's nothing to prune, then don't try to read | ||
| 1817 | # any project state. | ||
| 1818 | if not kill and not cb: | ||
| 1819 | return [] | ||
| 1820 | |||
| 1821 | rev = self.GetRevisionId(left) | ||
| 1822 | if cb is not None \ | ||
| 1823 | and not self._revlist(HEAD + '...' + rev) \ | ||
| 1824 | and not self.IsDirty(consider_untracked=False): | ||
| 1825 | self.work_git.DetachHead(HEAD) | ||
| 1826 | kill.append(cb) | ||
| 1827 | |||
| 1828 | if kill: | ||
| 1829 | old = self.bare_git.GetHead() | ||
| 1830 | |||
| 1831 | try: | ||
| 1832 | self.bare_git.DetachHead(rev) | ||
| 1833 | |||
| 1834 | b = ['branch', '-d'] | ||
| 1835 | b.extend(kill) | ||
| 1836 | b = GitCommand(self, b, bare=True, | ||
| 1837 | capture_stdout=True, | ||
| 1838 | capture_stderr=True) | ||
| 1839 | b.Wait() | ||
| 1840 | finally: | ||
| 1841 | if ID_RE.match(old): | ||
| 1842 | self.bare_git.DetachHead(old) | ||
| 1843 | else: | ||
| 1844 | self.bare_git.SetHead(old) | ||
| 1845 | left = self._allrefs | ||
| 1846 | 1387 | ||
| 1847 | for branch in kill: | 1388 | try: |
| 1848 | if (R_HEADS + branch) not in left: | 1389 | return self.bare_git.rev_parse("--verify", "%s^0" % rev) |
| 1849 | self.CleanPublishedCache() | 1390 | except GitError: |
| 1850 | break | 1391 | raise ManifestInvalidRevisionError( |
| 1392 | "revision %s in %s not found" % (self.revisionExpr, self.name) | ||
| 1393 | ) | ||
| 1394 | |||
| 1395 | def SetRevisionId(self, revisionId): | ||
| 1396 | if self.revisionExpr: | ||
| 1397 | self.upstream = self.revisionExpr | ||
| 1398 | |||
| 1399 | self.revisionId = revisionId | ||
| 1400 | |||
| 1401 | def Sync_LocalHalf(self, syncbuf, force_sync=False, submodules=False): | ||
| 1402 | """Perform only the local IO portion of the sync process. | ||
| 1403 | |||
| 1404 | Network access is not required. | ||
| 1405 | """ | ||
| 1406 | if not os.path.exists(self.gitdir): | ||
| 1407 | syncbuf.fail( | ||
| 1408 | self, | ||
| 1409 | "Cannot checkout %s due to missing network sync; Run " | ||
| 1410 | "`repo sync -n %s` first." % (self.name, self.name), | ||
| 1411 | ) | ||
| 1412 | return | ||
| 1413 | |||
| 1414 | self._InitWorkTree(force_sync=force_sync, submodules=submodules) | ||
| 1415 | all_refs = self.bare_ref.all | ||
| 1416 | self.CleanPublishedCache(all_refs) | ||
| 1417 | revid = self.GetRevisionId(all_refs) | ||
| 1418 | |||
| 1419 | # Special case the root of the repo client checkout. Make sure it | ||
| 1420 | # doesn't contain files being checked out to dirs we don't allow. | ||
| 1421 | if self.relpath == ".": | ||
| 1422 | PROTECTED_PATHS = {".repo"} | ||
| 1423 | paths = set( | ||
| 1424 | self.work_git.ls_tree("-z", "--name-only", "--", revid).split( | ||
| 1425 | "\0" | ||
| 1426 | ) | ||
| 1427 | ) | ||
| 1428 | bad_paths = paths & PROTECTED_PATHS | ||
| 1429 | if bad_paths: | ||
| 1430 | syncbuf.fail( | ||
| 1431 | self, | ||
| 1432 | "Refusing to checkout project that writes to protected " | ||
| 1433 | "paths: %s" % (", ".join(bad_paths),), | ||
| 1434 | ) | ||
| 1435 | return | ||
| 1436 | |||
| 1437 | def _doff(): | ||
| 1438 | self._FastForward(revid) | ||
| 1439 | self._CopyAndLinkFiles() | ||
| 1440 | |||
| 1441 | def _dosubmodules(): | ||
| 1442 | self._SyncSubmodules(quiet=True) | ||
| 1443 | |||
| 1444 | head = self.work_git.GetHead() | ||
| 1445 | if head.startswith(R_HEADS): | ||
| 1446 | branch = head[len(R_HEADS) :] | ||
| 1447 | try: | ||
| 1448 | head = all_refs[head] | ||
| 1449 | except KeyError: | ||
| 1450 | head = None | ||
| 1451 | else: | ||
| 1452 | branch = None | ||
| 1453 | |||
| 1454 | if branch is None or syncbuf.detach_head: | ||
| 1455 | # Currently on a detached HEAD. The user is assumed to | ||
| 1456 | # not have any local modifications worth worrying about. | ||
| 1457 | if self.IsRebaseInProgress(): | ||
| 1458 | syncbuf.fail(self, _PriorSyncFailedError()) | ||
| 1459 | return | ||
| 1460 | |||
| 1461 | if head == revid: | ||
| 1462 | # No changes; don't do anything further. | ||
| 1463 | # Except if the head needs to be detached. | ||
| 1464 | if not syncbuf.detach_head: | ||
| 1465 | # The copy/linkfile config may have changed. | ||
| 1466 | self._CopyAndLinkFiles() | ||
| 1467 | return | ||
| 1468 | else: | ||
| 1469 | lost = self._revlist(not_rev(revid), HEAD) | ||
| 1470 | if lost: | ||
| 1471 | syncbuf.info(self, "discarding %d commits", len(lost)) | ||
| 1851 | 1472 | ||
| 1852 | if cb and cb not in kill: | 1473 | try: |
| 1853 | kill.append(cb) | 1474 | self._Checkout(revid, quiet=True) |
| 1854 | kill.sort() | 1475 | if submodules: |
| 1476 | self._SyncSubmodules(quiet=True) | ||
| 1477 | except GitError as e: | ||
| 1478 | syncbuf.fail(self, e) | ||
| 1479 | return | ||
| 1480 | self._CopyAndLinkFiles() | ||
| 1481 | return | ||
| 1482 | |||
| 1483 | if head == revid: | ||
| 1484 | # No changes; don't do anything further. | ||
| 1485 | # | ||
| 1486 | # The copy/linkfile config may have changed. | ||
| 1487 | self._CopyAndLinkFiles() | ||
| 1488 | return | ||
| 1855 | 1489 | ||
| 1856 | kept = [] | ||
| 1857 | for branch in kill: | ||
| 1858 | if R_HEADS + branch in left: | ||
| 1859 | branch = self.GetBranch(branch) | 1490 | branch = self.GetBranch(branch) |
| 1860 | base = branch.LocalMerge | 1491 | |
| 1861 | if not base: | 1492 | if not branch.LocalMerge: |
| 1862 | base = rev | 1493 | # The current branch has no tracking configuration. |
| 1863 | kept.append(ReviewableBranch(self, branch, base)) | 1494 | # Jump off it to a detached HEAD. |
| 1864 | return kept | 1495 | syncbuf.info( |
| 1865 | 1496 | self, "leaving %s; does not track upstream", branch.name | |
| 1866 | # Submodule Management ## | 1497 | ) |
| 1867 | def GetRegisteredSubprojects(self): | 1498 | try: |
| 1868 | result = [] | 1499 | self._Checkout(revid, quiet=True) |
| 1869 | 1500 | if submodules: | |
| 1870 | def rec(subprojects): | 1501 | self._SyncSubmodules(quiet=True) |
| 1871 | if not subprojects: | 1502 | except GitError as e: |
| 1872 | return | 1503 | syncbuf.fail(self, e) |
| 1873 | result.extend(subprojects) | 1504 | return |
| 1874 | for p in subprojects: | 1505 | self._CopyAndLinkFiles() |
| 1875 | rec(p.subprojects) | 1506 | return |
| 1876 | rec(self.subprojects) | 1507 | |
| 1877 | return result | 1508 | upstream_gain = self._revlist(not_rev(HEAD), revid) |
| 1878 | 1509 | ||
| 1879 | def _GetSubmodules(self): | 1510 | # See if we can perform a fast forward merge. This can happen if our |
| 1880 | # Unfortunately we cannot call `git submodule status --recursive` here | 1511 | # branch isn't in the exact same state as we last published. |
| 1881 | # because the working tree might not exist yet, and it cannot be used | ||
| 1882 | # without a working tree in its current implementation. | ||
| 1883 | |||
| 1884 | def get_submodules(gitdir, rev): | ||
| 1885 | # Parse .gitmodules for submodule sub_paths and sub_urls | ||
| 1886 | sub_paths, sub_urls = parse_gitmodules(gitdir, rev) | ||
| 1887 | if not sub_paths: | ||
| 1888 | return [] | ||
| 1889 | # Run `git ls-tree` to read SHAs of submodule object, which happen to be | ||
| 1890 | # revision of submodule repository | ||
| 1891 | sub_revs = git_ls_tree(gitdir, rev, sub_paths) | ||
| 1892 | submodules = [] | ||
| 1893 | for sub_path, sub_url in zip(sub_paths, sub_urls): | ||
| 1894 | try: | 1512 | try: |
| 1895 | sub_rev = sub_revs[sub_path] | 1513 | self.work_git.merge_base("--is-ancestor", HEAD, revid) |
| 1896 | except KeyError: | 1514 | # Skip the published logic. |
| 1897 | # Ignore non-exist submodules | 1515 | pub = False |
| 1898 | continue | 1516 | except GitError: |
| 1899 | submodules.append((sub_rev, sub_path, sub_url)) | 1517 | pub = self.WasPublished(branch.name, all_refs) |
| 1900 | return submodules | 1518 | |
| 1901 | 1519 | if pub: | |
| 1902 | re_path = re.compile(r'^submodule\.(.+)\.path=(.*)$') | 1520 | not_merged = self._revlist(not_rev(revid), pub) |
| 1903 | re_url = re.compile(r'^submodule\.(.+)\.url=(.*)$') | 1521 | if not_merged: |
| 1904 | 1522 | if upstream_gain: | |
| 1905 | def parse_gitmodules(gitdir, rev): | 1523 | # The user has published this branch and some of those |
| 1906 | cmd = ['cat-file', 'blob', '%s:.gitmodules' % rev] | 1524 | # commits are not yet merged upstream. We do not want |
| 1907 | try: | 1525 | # to rewrite the published commits so we punt. |
| 1908 | p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True, | 1526 | syncbuf.fail( |
| 1909 | bare=True, gitdir=gitdir) | 1527 | self, |
| 1910 | except GitError: | 1528 | "branch %s is published (but not merged) and is now " |
| 1911 | return [], [] | 1529 | "%d commits behind" % (branch.name, len(upstream_gain)), |
| 1912 | if p.Wait() != 0: | 1530 | ) |
| 1913 | return [], [] | 1531 | return |
| 1914 | 1532 | elif pub == head: | |
| 1915 | gitmodules_lines = [] | 1533 | # All published commits are merged, and thus we are a |
| 1916 | fd, temp_gitmodules_path = tempfile.mkstemp() | 1534 | # strict subset. We can fast-forward safely. |
| 1917 | try: | 1535 | syncbuf.later1(self, _doff) |
| 1918 | os.write(fd, p.stdout.encode('utf-8')) | 1536 | if submodules: |
| 1919 | os.close(fd) | 1537 | syncbuf.later1(self, _dosubmodules) |
| 1920 | cmd = ['config', '--file', temp_gitmodules_path, '--list'] | 1538 | return |
| 1921 | p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True, | 1539 | |
| 1922 | bare=True, gitdir=gitdir) | 1540 | # Examine the local commits not in the remote. Find the |
| 1923 | if p.Wait() != 0: | 1541 | # last one attributed to this user, if any. |
| 1924 | return [], [] | 1542 | local_changes = self._revlist(not_rev(revid), HEAD, format="%H %ce") |
| 1925 | gitmodules_lines = p.stdout.split('\n') | 1543 | last_mine = None |
| 1926 | except GitError: | 1544 | cnt_mine = 0 |
| 1927 | return [], [] | 1545 | for commit in local_changes: |
| 1928 | finally: | 1546 | commit_id, committer_email = commit.split(" ", 1) |
| 1929 | platform_utils.remove(temp_gitmodules_path) | 1547 | if committer_email == self.UserEmail: |
| 1930 | 1548 | last_mine = commit_id | |
| 1931 | names = set() | 1549 | cnt_mine += 1 |
| 1932 | paths = {} | 1550 | |
| 1933 | urls = {} | 1551 | if not upstream_gain and cnt_mine == len(local_changes): |
| 1934 | for line in gitmodules_lines: | 1552 | # The copy/linkfile config may have changed. |
| 1935 | if not line: | 1553 | self._CopyAndLinkFiles() |
| 1936 | continue | 1554 | return |
| 1937 | m = re_path.match(line) | 1555 | |
| 1938 | if m: | 1556 | if self.IsDirty(consider_untracked=False): |
| 1939 | names.add(m.group(1)) | 1557 | syncbuf.fail(self, _DirtyError()) |
| 1940 | paths[m.group(1)] = m.group(2) | 1558 | return |
| 1941 | continue | 1559 | |
| 1942 | m = re_url.match(line) | 1560 | # If the upstream switched on us, warn the user. |
| 1943 | if m: | 1561 | if branch.merge != self.revisionExpr: |
| 1944 | names.add(m.group(1)) | 1562 | if branch.merge and self.revisionExpr: |
| 1945 | urls[m.group(1)] = m.group(2) | 1563 | syncbuf.info( |
| 1946 | continue | 1564 | self, |
| 1947 | names = sorted(names) | 1565 | "manifest switched %s...%s", |
| 1948 | return ([paths.get(name, '') for name in names], | 1566 | branch.merge, |
| 1949 | [urls.get(name, '') for name in names]) | 1567 | self.revisionExpr, |
| 1950 | 1568 | ) | |
| 1951 | def git_ls_tree(gitdir, rev, paths): | 1569 | elif branch.merge: |
| 1952 | cmd = ['ls-tree', rev, '--'] | 1570 | syncbuf.info(self, "manifest no longer tracks %s", branch.merge) |
| 1953 | cmd.extend(paths) | 1571 | |
| 1954 | try: | 1572 | if cnt_mine < len(local_changes): |
| 1955 | p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True, | 1573 | # Upstream rebased. Not everything in HEAD was created by this user. |
| 1956 | bare=True, gitdir=gitdir) | 1574 | syncbuf.info( |
| 1957 | except GitError: | 1575 | self, |
| 1958 | return [] | 1576 | "discarding %d commits removed from upstream", |
| 1959 | if p.Wait() != 0: | 1577 | len(local_changes) - cnt_mine, |
| 1960 | return [] | 1578 | ) |
| 1961 | objects = {} | 1579 | |
| 1962 | for line in p.stdout.split('\n'): | 1580 | branch.remote = self.GetRemote() |
| 1963 | if not line.strip(): | 1581 | if not ID_RE.match(self.revisionExpr): |
| 1964 | continue | 1582 | # In case of manifest sync the revisionExpr might be a SHA1. |
| 1965 | object_rev, object_path = line.split()[2:4] | 1583 | branch.merge = self.revisionExpr |
| 1966 | objects[object_path] = object_rev | 1584 | if not branch.merge.startswith("refs/"): |
| 1967 | return objects | 1585 | branch.merge = R_HEADS + branch.merge |
| 1586 | branch.Save() | ||
| 1587 | |||
| 1588 | if cnt_mine > 0 and self.rebase: | ||
| 1589 | |||
| 1590 | def _docopyandlink(): | ||
| 1591 | self._CopyAndLinkFiles() | ||
| 1592 | |||
| 1593 | def _dorebase(): | ||
| 1594 | self._Rebase(upstream="%s^1" % last_mine, onto=revid) | ||
| 1595 | |||
| 1596 | syncbuf.later2(self, _dorebase) | ||
| 1597 | if submodules: | ||
| 1598 | syncbuf.later2(self, _dosubmodules) | ||
| 1599 | syncbuf.later2(self, _docopyandlink) | ||
| 1600 | elif local_changes: | ||
| 1601 | try: | ||
| 1602 | self._ResetHard(revid) | ||
| 1603 | if submodules: | ||
| 1604 | self._SyncSubmodules(quiet=True) | ||
| 1605 | self._CopyAndLinkFiles() | ||
| 1606 | except GitError as e: | ||
| 1607 | syncbuf.fail(self, e) | ||
| 1608 | return | ||
| 1609 | else: | ||
| 1610 | syncbuf.later1(self, _doff) | ||
| 1611 | if submodules: | ||
| 1612 | syncbuf.later1(self, _dosubmodules) | ||
| 1968 | 1613 | ||
| 1969 | try: | 1614 | def AddCopyFile(self, src, dest, topdir): |
| 1970 | rev = self.GetRevisionId() | 1615 | """Mark |src| for copying to |dest| (relative to |topdir|). |
| 1971 | except GitError: | ||
| 1972 | return [] | ||
| 1973 | return get_submodules(self.gitdir, rev) | ||
| 1974 | |||
| 1975 | def GetDerivedSubprojects(self): | ||
| 1976 | result = [] | ||
| 1977 | if not self.Exists: | ||
| 1978 | # If git repo does not exist yet, querying its submodules will | ||
| 1979 | # mess up its states; so return here. | ||
| 1980 | return result | ||
| 1981 | for rev, path, url in self._GetSubmodules(): | ||
| 1982 | name = self.manifest.GetSubprojectName(self, path) | ||
| 1983 | relpath, worktree, gitdir, objdir = \ | ||
| 1984 | self.manifest.GetSubprojectPaths(self, name, path) | ||
| 1985 | project = self.manifest.paths.get(relpath) | ||
| 1986 | if project: | ||
| 1987 | result.extend(project.GetDerivedSubprojects()) | ||
| 1988 | continue | ||
| 1989 | |||
| 1990 | if url.startswith('..'): | ||
| 1991 | url = urllib.parse.urljoin("%s/" % self.remote.url, url) | ||
| 1992 | remote = RemoteSpec(self.remote.name, | ||
| 1993 | url=url, | ||
| 1994 | pushUrl=self.remote.pushUrl, | ||
| 1995 | review=self.remote.review, | ||
| 1996 | revision=self.remote.revision) | ||
| 1997 | subproject = Project(manifest=self.manifest, | ||
| 1998 | name=name, | ||
| 1999 | remote=remote, | ||
| 2000 | gitdir=gitdir, | ||
| 2001 | objdir=objdir, | ||
| 2002 | worktree=worktree, | ||
| 2003 | relpath=relpath, | ||
| 2004 | revisionExpr=rev, | ||
| 2005 | revisionId=rev, | ||
| 2006 | rebase=self.rebase, | ||
| 2007 | groups=self.groups, | ||
| 2008 | sync_c=self.sync_c, | ||
| 2009 | sync_s=self.sync_s, | ||
| 2010 | sync_tags=self.sync_tags, | ||
| 2011 | parent=self, | ||
| 2012 | is_derived=True) | ||
| 2013 | result.append(subproject) | ||
| 2014 | result.extend(subproject.GetDerivedSubprojects()) | ||
| 2015 | return result | ||
| 2016 | |||
| 2017 | # Direct Git Commands ## | ||
| 2018 | def EnableRepositoryExtension(self, key, value='true', version=1): | ||
| 2019 | """Enable git repository extension |key| with |value|. | ||
| 2020 | |||
| 2021 | Args: | ||
| 2022 | key: The extension to enabled. Omit the "extensions." prefix. | ||
| 2023 | value: The value to use for the extension. | ||
| 2024 | version: The minimum git repository version needed. | ||
| 2025 | """ | ||
| 2026 | # Make sure the git repo version is new enough already. | ||
| 2027 | found_version = self.config.GetInt('core.repositoryFormatVersion') | ||
| 2028 | if found_version is None: | ||
| 2029 | found_version = 0 | ||
| 2030 | if found_version < version: | ||
| 2031 | self.config.SetString('core.repositoryFormatVersion', str(version)) | ||
| 2032 | 1616 | ||
| 2033 | # Enable the extension! | 1617 | No filesystem changes occur here. Actual copying happens later on. |
| 2034 | self.config.SetString('extensions.%s' % (key,), value) | ||
| 2035 | 1618 | ||
| 2036 | def ResolveRemoteHead(self, name=None): | 1619 | Paths should have basic validation run on them before being queued. |
| 2037 | """Find out what the default branch (HEAD) points to. | 1620 | Further checking will be handled when the actual copy happens. |
| 1621 | """ | ||
| 1622 | self.copyfiles.append(_CopyFile(self.worktree, src, topdir, dest)) | ||
| 2038 | 1623 | ||
| 2039 | Normally this points to refs/heads/master, but projects are moving to main. | 1624 | def AddLinkFile(self, src, dest, topdir): |
| 2040 | Support whatever the server uses rather than hardcoding "master" ourselves. | 1625 | """Mark |dest| to create a symlink (relative to |topdir|) pointing to |
| 2041 | """ | 1626 | |src|. |
| 2042 | if name is None: | ||
| 2043 | name = self.remote.name | ||
| 2044 | 1627 | ||
| 2045 | # The output will look like (NB: tabs are separators): | 1628 | No filesystem changes occur here. Actual linking happens later on. |
| 2046 | # ref: refs/heads/master HEAD | ||
| 2047 | # 5f6803b100bb3cd0f534e96e88c91373e8ed1c44 HEAD | ||
| 2048 | output = self.bare_git.ls_remote('-q', '--symref', '--exit-code', name, 'HEAD') | ||
| 2049 | 1629 | ||
| 2050 | for line in output.splitlines(): | 1630 | Paths should have basic validation run on them before being queued. |
| 2051 | lhs, rhs = line.split('\t', 1) | 1631 | Further checking will be handled when the actual link happens. |
| 2052 | if rhs == 'HEAD' and lhs.startswith('ref:'): | 1632 | """ |
| 2053 | return lhs[4:].strip() | 1633 | self.linkfiles.append(_LinkFile(self.worktree, src, topdir, dest)) |
| 2054 | 1634 | ||
| 2055 | return None | 1635 | def AddAnnotation(self, name, value, keep): |
| 1636 | self.annotations.append(Annotation(name, value, keep)) | ||
| 2056 | 1637 | ||
| 2057 | def _CheckForImmutableRevision(self): | 1638 | def DownloadPatchSet(self, change_id, patch_id): |
| 2058 | try: | 1639 | """Download a single patch set of a single change to FETCH_HEAD.""" |
| 2059 | # if revision (sha or tag) is not present then following function | 1640 | remote = self.GetRemote() |
| 2060 | # throws an error. | ||
| 2061 | self.bare_git.rev_list('-1', '--missing=allow-any', | ||
| 2062 | '%s^0' % self.revisionExpr, '--') | ||
| 2063 | if self.upstream: | ||
| 2064 | rev = self.GetRemote().ToLocal(self.upstream) | ||
| 2065 | self.bare_git.rev_list('-1', '--missing=allow-any', | ||
| 2066 | '%s^0' % rev, '--') | ||
| 2067 | self.bare_git.merge_base('--is-ancestor', self.revisionExpr, rev) | ||
| 2068 | return True | ||
| 2069 | except GitError: | ||
| 2070 | # There is no such persistent revision. We have to fetch it. | ||
| 2071 | return False | ||
| 2072 | |||
| 2073 | def _FetchArchive(self, tarpath, cwd=None): | ||
| 2074 | cmd = ['archive', '-v', '-o', tarpath] | ||
| 2075 | cmd.append('--remote=%s' % self.remote.url) | ||
| 2076 | cmd.append('--prefix=%s/' % self.RelPath(local=False)) | ||
| 2077 | cmd.append(self.revisionExpr) | ||
| 2078 | |||
| 2079 | command = GitCommand(self, cmd, cwd=cwd, | ||
| 2080 | capture_stdout=True, | ||
| 2081 | capture_stderr=True) | ||
| 2082 | |||
| 2083 | if command.Wait() != 0: | ||
| 2084 | raise GitError('git archive %s: %s' % (self.name, command.stderr)) | ||
| 2085 | |||
| 2086 | def _RemoteFetch(self, name=None, | ||
| 2087 | current_branch_only=False, | ||
| 2088 | initial=False, | ||
| 2089 | quiet=False, | ||
| 2090 | verbose=False, | ||
| 2091 | output_redir=None, | ||
| 2092 | alt_dir=None, | ||
| 2093 | tags=True, | ||
| 2094 | prune=False, | ||
| 2095 | depth=None, | ||
| 2096 | submodules=False, | ||
| 2097 | ssh_proxy=None, | ||
| 2098 | force_sync=False, | ||
| 2099 | clone_filter=None, | ||
| 2100 | retry_fetches=2, | ||
| 2101 | retry_sleep_initial_sec=4.0, | ||
| 2102 | retry_exp_factor=2.0): | ||
| 2103 | is_sha1 = False | ||
| 2104 | tag_name = None | ||
| 2105 | # The depth should not be used when fetching to a mirror because | ||
| 2106 | # it will result in a shallow repository that cannot be cloned or | ||
| 2107 | # fetched from. | ||
| 2108 | # The repo project should also never be synced with partial depth. | ||
| 2109 | if self.manifest.IsMirror or self.relpath == '.repo/repo': | ||
| 2110 | depth = None | ||
| 2111 | |||
| 2112 | if depth: | ||
| 2113 | current_branch_only = True | ||
| 2114 | |||
| 2115 | if ID_RE.match(self.revisionExpr) is not None: | ||
| 2116 | is_sha1 = True | ||
| 2117 | |||
| 2118 | if current_branch_only: | ||
| 2119 | if self.revisionExpr.startswith(R_TAGS): | ||
| 2120 | # This is a tag and its commit id should never change. | ||
| 2121 | tag_name = self.revisionExpr[len(R_TAGS):] | ||
| 2122 | elif self.upstream and self.upstream.startswith(R_TAGS): | ||
| 2123 | # This is a tag and its commit id should never change. | ||
| 2124 | tag_name = self.upstream[len(R_TAGS):] | ||
| 2125 | |||
| 2126 | if is_sha1 or tag_name is not None: | ||
| 2127 | if self._CheckForImmutableRevision(): | ||
| 2128 | if verbose: | ||
| 2129 | print('Skipped fetching project %s (already have persistent ref)' | ||
| 2130 | % self.name) | ||
| 2131 | return True | ||
| 2132 | if is_sha1 and not depth: | ||
| 2133 | # When syncing a specific commit and --depth is not set: | ||
| 2134 | # * if upstream is explicitly specified and is not a sha1, fetch only | ||
| 2135 | # upstream as users expect only upstream to be fetch. | ||
| 2136 | # Note: The commit might not be in upstream in which case the sync | ||
| 2137 | # will fail. | ||
| 2138 | # * otherwise, fetch all branches to make sure we end up with the | ||
| 2139 | # specific commit. | ||
| 2140 | if self.upstream: | ||
| 2141 | current_branch_only = not ID_RE.match(self.upstream) | ||
| 2142 | else: | ||
| 2143 | current_branch_only = False | ||
| 2144 | 1641 | ||
| 2145 | if not name: | 1642 | cmd = ["fetch", remote.name] |
| 2146 | name = self.remote.name | 1643 | cmd.append( |
| 1644 | "refs/changes/%2.2d/%d/%d" % (change_id % 100, change_id, patch_id) | ||
| 1645 | ) | ||
| 1646 | if GitCommand(self, cmd, bare=True).Wait() != 0: | ||
| 1647 | return None | ||
| 1648 | return DownloadedChange( | ||
| 1649 | self, | ||
| 1650 | self.GetRevisionId(), | ||
| 1651 | change_id, | ||
| 1652 | patch_id, | ||
| 1653 | self.bare_git.rev_parse("FETCH_HEAD"), | ||
| 1654 | ) | ||
| 2147 | 1655 | ||
| 2148 | remote = self.GetRemote(name) | 1656 | def DeleteWorktree(self, quiet=False, force=False): |
| 2149 | if not remote.PreConnectFetch(ssh_proxy): | 1657 | """Delete the source checkout and any other housekeeping tasks. |
| 2150 | ssh_proxy = None | ||
| 2151 | 1658 | ||
| 2152 | if initial: | 1659 | This currently leaves behind the internal .repo/ cache state. This |
| 2153 | if alt_dir and 'objects' == os.path.basename(alt_dir): | 1660 | helps when switching branches or manifest changes get reverted as we |
| 2154 | ref_dir = os.path.dirname(alt_dir) | 1661 | don't have to redownload all the git objects. But we should do some GC |
| 2155 | packed_refs = os.path.join(self.gitdir, 'packed-refs') | 1662 | at some point. |
| 2156 | 1663 | ||
| 2157 | all_refs = self.bare_ref.all | 1664 | Args: |
| 2158 | ids = set(all_refs.values()) | 1665 | quiet: Whether to hide normal messages. |
| 2159 | tmp = set() | 1666 | force: Always delete tree even if dirty. |
| 2160 | 1667 | ||
| 2161 | for r, ref_id in GitRefs(ref_dir).all.items(): | 1668 | Returns: |
| 2162 | if r not in all_refs: | 1669 | True if the worktree was completely cleaned out. |
| 2163 | if r.startswith(R_TAGS) or remote.WritesTo(r): | 1670 | """ |
| 2164 | all_refs[r] = ref_id | 1671 | if self.IsDirty(): |
| 2165 | ids.add(ref_id) | 1672 | if force: |
| 2166 | continue | 1673 | print( |
| 2167 | 1674 | "warning: %s: Removing dirty project: uncommitted changes " | |
| 2168 | if ref_id in ids: | 1675 | "lost." % (self.RelPath(local=False),), |
| 2169 | continue | 1676 | file=sys.stderr, |
| 2170 | 1677 | ) | |
| 2171 | r = 'refs/_alt/%s' % ref_id | 1678 | else: |
| 2172 | all_refs[r] = ref_id | 1679 | print( |
| 2173 | ids.add(ref_id) | 1680 | "error: %s: Cannot remove project: uncommitted changes are " |
| 2174 | tmp.add(r) | 1681 | "present.\n" % (self.RelPath(local=False),), |
| 2175 | 1682 | file=sys.stderr, | |
| 2176 | tmp_packed_lines = [] | 1683 | ) |
| 2177 | old_packed_lines = [] | 1684 | return False |
| 2178 | 1685 | ||
| 2179 | for r in sorted(all_refs): | 1686 | if not quiet: |
| 2180 | line = '%s %s\n' % (all_refs[r], r) | 1687 | print( |
| 2181 | tmp_packed_lines.append(line) | 1688 | "%s: Deleting obsolete checkout." % (self.RelPath(local=False),) |
| 2182 | if r not in tmp: | 1689 | ) |
| 2183 | old_packed_lines.append(line) | 1690 | |
| 2184 | 1691 | # Unlock and delink from the main worktree. We don't use git's worktree | |
| 2185 | tmp_packed = ''.join(tmp_packed_lines) | 1692 | # remove because it will recursively delete projects -- we handle that |
| 2186 | old_packed = ''.join(old_packed_lines) | 1693 | # ourselves below. https://crbug.com/git/48 |
| 2187 | _lwrite(packed_refs, tmp_packed) | 1694 | if self.use_git_worktrees: |
| 2188 | else: | 1695 | needle = platform_utils.realpath(self.gitdir) |
| 2189 | alt_dir = None | 1696 | # Find the git worktree commondir under .repo/worktrees/. |
| 2190 | 1697 | output = self.bare_git.worktree("list", "--porcelain").splitlines()[ | |
| 2191 | cmd = ['fetch'] | 1698 | 0 |
| 2192 | 1699 | ] | |
| 2193 | if clone_filter: | 1700 | assert output.startswith("worktree "), output |
| 2194 | git_require((2, 19, 0), fail=True, msg='partial clones') | 1701 | commondir = output[9:] |
| 2195 | cmd.append('--filter=%s' % clone_filter) | 1702 | # Walk each of the git worktrees to see where they point. |
| 2196 | self.EnableRepositoryExtension('partialclone', self.remote.name) | 1703 | configs = os.path.join(commondir, "worktrees") |
| 2197 | 1704 | for name in os.listdir(configs): | |
| 2198 | if depth: | 1705 | gitdir = os.path.join(configs, name, "gitdir") |
| 2199 | cmd.append('--depth=%s' % depth) | 1706 | with open(gitdir) as fp: |
| 2200 | else: | 1707 | relpath = fp.read().strip() |
| 2201 | # If this repo has shallow objects, then we don't know which refs have | 1708 | # Resolve the checkout path and see if it matches this project. |
| 2202 | # shallow objects or not. Tell git to unshallow all fetched refs. Don't | 1709 | fullpath = platform_utils.realpath( |
| 2203 | # do this with projects that don't have shallow objects, since it is less | 1710 | os.path.join(configs, name, relpath) |
| 2204 | # efficient. | 1711 | ) |
| 2205 | if os.path.exists(os.path.join(self.gitdir, 'shallow')): | 1712 | if fullpath == needle: |
| 2206 | cmd.append('--depth=2147483647') | 1713 | platform_utils.rmtree(os.path.join(configs, name)) |
| 2207 | 1714 | ||
| 2208 | if not verbose: | 1715 | # Delete the .git directory first, so we're less likely to have a |
| 2209 | cmd.append('--quiet') | 1716 | # partially working git repository around. There shouldn't be any git |
| 2210 | if not quiet and sys.stdout.isatty(): | 1717 | # projects here, so rmtree works. |
| 2211 | cmd.append('--progress') | 1718 | |
| 2212 | if not self.worktree: | 1719 | # Try to remove plain files first in case of git worktrees. If this |
| 2213 | cmd.append('--update-head-ok') | 1720 | # fails for any reason, we'll fall back to rmtree, and that'll display |
| 2214 | cmd.append(name) | 1721 | # errors if it can't remove things either. |
| 2215 | |||
| 2216 | if force_sync: | ||
| 2217 | cmd.append('--force') | ||
| 2218 | |||
| 2219 | if prune: | ||
| 2220 | cmd.append('--prune') | ||
| 2221 | |||
| 2222 | # Always pass something for --recurse-submodules, git with GIT_DIR behaves | ||
| 2223 | # incorrectly when not given `--recurse-submodules=no`. (b/218891912) | ||
| 2224 | cmd.append(f'--recurse-submodules={"on-demand" if submodules else "no"}') | ||
| 2225 | |||
| 2226 | spec = [] | ||
| 2227 | if not current_branch_only: | ||
| 2228 | # Fetch whole repo | ||
| 2229 | spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*'))) | ||
| 2230 | elif tag_name is not None: | ||
| 2231 | spec.append('tag') | ||
| 2232 | spec.append(tag_name) | ||
| 2233 | |||
| 2234 | if self.manifest.IsMirror and not current_branch_only: | ||
| 2235 | branch = None | ||
| 2236 | else: | ||
| 2237 | branch = self.revisionExpr | ||
| 2238 | if (not self.manifest.IsMirror and is_sha1 and depth | ||
| 2239 | and git_require((1, 8, 3))): | ||
| 2240 | # Shallow checkout of a specific commit, fetch from that commit and not | ||
| 2241 | # the heads only as the commit might be deeper in the history. | ||
| 2242 | spec.append(branch) | ||
| 2243 | if self.upstream: | ||
| 2244 | spec.append(self.upstream) | ||
| 2245 | else: | ||
| 2246 | if is_sha1: | ||
| 2247 | branch = self.upstream | ||
| 2248 | if branch is not None and branch.strip(): | ||
| 2249 | if not branch.startswith('refs/'): | ||
| 2250 | branch = R_HEADS + branch | ||
| 2251 | spec.append(str((u'+%s:' % branch) + remote.ToLocal(branch))) | ||
| 2252 | |||
| 2253 | # If mirroring repo and we cannot deduce the tag or branch to fetch, fetch | ||
| 2254 | # whole repo. | ||
| 2255 | if self.manifest.IsMirror and not spec: | ||
| 2256 | spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*'))) | ||
| 2257 | |||
| 2258 | # If using depth then we should not get all the tags since they may | ||
| 2259 | # be outside of the depth. | ||
| 2260 | if not tags or depth: | ||
| 2261 | cmd.append('--no-tags') | ||
| 2262 | else: | ||
| 2263 | cmd.append('--tags') | ||
| 2264 | spec.append(str((u'+refs/tags/*:') + remote.ToLocal('refs/tags/*'))) | ||
| 2265 | |||
| 2266 | cmd.extend(spec) | ||
| 2267 | |||
| 2268 | # At least one retry minimum due to git remote prune. | ||
| 2269 | retry_fetches = max(retry_fetches, 2) | ||
| 2270 | retry_cur_sleep = retry_sleep_initial_sec | ||
| 2271 | ok = prune_tried = False | ||
| 2272 | for try_n in range(retry_fetches): | ||
| 2273 | gitcmd = GitCommand( | ||
| 2274 | self, cmd, bare=True, objdir=os.path.join(self.objdir, 'objects'), | ||
| 2275 | ssh_proxy=ssh_proxy, | ||
| 2276 | merge_output=True, capture_stdout=quiet or bool(output_redir)) | ||
| 2277 | if gitcmd.stdout and not quiet and output_redir: | ||
| 2278 | output_redir.write(gitcmd.stdout) | ||
| 2279 | ret = gitcmd.Wait() | ||
| 2280 | if ret == 0: | ||
| 2281 | ok = True | ||
| 2282 | break | ||
| 2283 | |||
| 2284 | # Retry later due to HTTP 429 Too Many Requests. | ||
| 2285 | elif (gitcmd.stdout and | ||
| 2286 | 'error:' in gitcmd.stdout and | ||
| 2287 | 'HTTP 429' in gitcmd.stdout): | ||
| 2288 | # Fallthru to sleep+retry logic at the bottom. | ||
| 2289 | pass | ||
| 2290 | |||
| 2291 | # Try to prune remote branches once in case there are conflicts. | ||
| 2292 | # For example, if the remote had refs/heads/upstream, but deleted that and | ||
| 2293 | # now has refs/heads/upstream/foo. | ||
| 2294 | elif (gitcmd.stdout and | ||
| 2295 | 'error:' in gitcmd.stdout and | ||
| 2296 | 'git remote prune' in gitcmd.stdout and | ||
| 2297 | not prune_tried): | ||
| 2298 | prune_tried = True | ||
| 2299 | prunecmd = GitCommand(self, ['remote', 'prune', name], bare=True, | ||
| 2300 | ssh_proxy=ssh_proxy) | ||
| 2301 | ret = prunecmd.Wait() | ||
| 2302 | if ret: | ||
| 2303 | break | ||
| 2304 | print('retrying fetch after pruning remote branches', file=output_redir) | ||
| 2305 | # Continue right away so we don't sleep as we shouldn't need to. | ||
| 2306 | continue | ||
| 2307 | elif current_branch_only and is_sha1 and ret == 128: | ||
| 2308 | # Exit code 128 means "couldn't find the ref you asked for"; if we're | ||
| 2309 | # in sha1 mode, we just tried sync'ing from the upstream field; it | ||
| 2310 | # doesn't exist, thus abort the optimization attempt and do a full sync. | ||
| 2311 | break | ||
| 2312 | elif ret < 0: | ||
| 2313 | # Git died with a signal, exit immediately | ||
| 2314 | break | ||
| 2315 | |||
| 2316 | # Figure out how long to sleep before the next attempt, if there is one. | ||
| 2317 | if not verbose and gitcmd.stdout: | ||
| 2318 | print('\n%s:\n%s' % (self.name, gitcmd.stdout), end='', file=output_redir) | ||
| 2319 | if try_n < retry_fetches - 1: | ||
| 2320 | print('%s: sleeping %s seconds before retrying' % (self.name, retry_cur_sleep), | ||
| 2321 | file=output_redir) | ||
| 2322 | time.sleep(retry_cur_sleep) | ||
| 2323 | retry_cur_sleep = min(retry_exp_factor * retry_cur_sleep, | ||
| 2324 | MAXIMUM_RETRY_SLEEP_SEC) | ||
| 2325 | retry_cur_sleep *= (1 - random.uniform(-RETRY_JITTER_PERCENT, | ||
| 2326 | RETRY_JITTER_PERCENT)) | ||
| 2327 | |||
| 2328 | if initial: | ||
| 2329 | if alt_dir: | ||
| 2330 | if old_packed != '': | ||
| 2331 | _lwrite(packed_refs, old_packed) | ||
| 2332 | else: | ||
| 2333 | platform_utils.remove(packed_refs) | ||
| 2334 | self.bare_git.pack_refs('--all', '--prune') | ||
| 2335 | |||
| 2336 | if is_sha1 and current_branch_only: | ||
| 2337 | # We just synced the upstream given branch; verify we | ||
| 2338 | # got what we wanted, else trigger a second run of all | ||
| 2339 | # refs. | ||
| 2340 | if not self._CheckForImmutableRevision(): | ||
| 2341 | # Sync the current branch only with depth set to None. | ||
| 2342 | # We always pass depth=None down to avoid infinite recursion. | ||
| 2343 | return self._RemoteFetch( | ||
| 2344 | name=name, quiet=quiet, verbose=verbose, output_redir=output_redir, | ||
| 2345 | current_branch_only=current_branch_only and depth, | ||
| 2346 | initial=False, alt_dir=alt_dir, | ||
| 2347 | depth=None, ssh_proxy=ssh_proxy, clone_filter=clone_filter) | ||
| 2348 | |||
| 2349 | return ok | ||
| 2350 | |||
| 2351 | def _ApplyCloneBundle(self, initial=False, quiet=False, verbose=False): | ||
| 2352 | if initial and (self.manifest.manifestProject.depth or self.clone_depth): | ||
| 2353 | return False | ||
| 2354 | |||
| 2355 | remote = self.GetRemote() | ||
| 2356 | bundle_url = remote.url + '/clone.bundle' | ||
| 2357 | bundle_url = GitConfig.ForUser().UrlInsteadOf(bundle_url) | ||
| 2358 | if GetSchemeFromUrl(bundle_url) not in ('http', 'https', | ||
| 2359 | 'persistent-http', | ||
| 2360 | 'persistent-https'): | ||
| 2361 | return False | ||
| 2362 | |||
| 2363 | bundle_dst = os.path.join(self.gitdir, 'clone.bundle') | ||
| 2364 | bundle_tmp = os.path.join(self.gitdir, 'clone.bundle.tmp') | ||
| 2365 | |||
| 2366 | exist_dst = os.path.exists(bundle_dst) | ||
| 2367 | exist_tmp = os.path.exists(bundle_tmp) | ||
| 2368 | |||
| 2369 | if not initial and not exist_dst and not exist_tmp: | ||
| 2370 | return False | ||
| 2371 | |||
| 2372 | if not exist_dst: | ||
| 2373 | exist_dst = self._FetchBundle(bundle_url, bundle_tmp, bundle_dst, quiet, | ||
| 2374 | verbose) | ||
| 2375 | if not exist_dst: | ||
| 2376 | return False | ||
| 2377 | |||
| 2378 | cmd = ['fetch'] | ||
| 2379 | if not verbose: | ||
| 2380 | cmd.append('--quiet') | ||
| 2381 | if not quiet and sys.stdout.isatty(): | ||
| 2382 | cmd.append('--progress') | ||
| 2383 | if not self.worktree: | ||
| 2384 | cmd.append('--update-head-ok') | ||
| 2385 | cmd.append(bundle_dst) | ||
| 2386 | for f in remote.fetch: | ||
| 2387 | cmd.append(str(f)) | ||
| 2388 | cmd.append('+refs/tags/*:refs/tags/*') | ||
| 2389 | |||
| 2390 | ok = GitCommand( | ||
| 2391 | self, cmd, bare=True, objdir=os.path.join(self.objdir, 'objects')).Wait() == 0 | ||
| 2392 | platform_utils.remove(bundle_dst, missing_ok=True) | ||
| 2393 | platform_utils.remove(bundle_tmp, missing_ok=True) | ||
| 2394 | return ok | ||
| 2395 | |||
| 2396 | def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet, verbose): | ||
| 2397 | platform_utils.remove(dstPath, missing_ok=True) | ||
| 2398 | |||
| 2399 | cmd = ['curl', '--fail', '--output', tmpPath, '--netrc', '--location'] | ||
| 2400 | if quiet: | ||
| 2401 | cmd += ['--silent', '--show-error'] | ||
| 2402 | if os.path.exists(tmpPath): | ||
| 2403 | size = os.stat(tmpPath).st_size | ||
| 2404 | if size >= 1024: | ||
| 2405 | cmd += ['--continue-at', '%d' % (size,)] | ||
| 2406 | else: | ||
| 2407 | platform_utils.remove(tmpPath) | ||
| 2408 | with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, proxy): | ||
| 2409 | if cookiefile: | ||
| 2410 | cmd += ['--cookie', cookiefile] | ||
| 2411 | if proxy: | ||
| 2412 | cmd += ['--proxy', proxy] | ||
| 2413 | elif 'http_proxy' in os.environ and 'darwin' == sys.platform: | ||
| 2414 | cmd += ['--proxy', os.environ['http_proxy']] | ||
| 2415 | if srcUrl.startswith('persistent-https'): | ||
| 2416 | srcUrl = 'http' + srcUrl[len('persistent-https'):] | ||
| 2417 | elif srcUrl.startswith('persistent-http'): | ||
| 2418 | srcUrl = 'http' + srcUrl[len('persistent-http'):] | ||
| 2419 | cmd += [srcUrl] | ||
| 2420 | |||
| 2421 | proc = None | ||
| 2422 | with Trace('Fetching bundle: %s', ' '.join(cmd)): | ||
| 2423 | if verbose: | ||
| 2424 | print('%s: Downloading bundle: %s' % (self.name, srcUrl)) | ||
| 2425 | stdout = None if verbose else subprocess.PIPE | ||
| 2426 | stderr = None if verbose else subprocess.STDOUT | ||
| 2427 | try: | 1722 | try: |
| 2428 | proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr) | 1723 | platform_utils.remove(self.gitdir) |
| 2429 | except OSError: | 1724 | except OSError: |
| 2430 | return False | 1725 | pass |
| 2431 | 1726 | try: | |
| 2432 | (output, _) = proc.communicate() | 1727 | platform_utils.rmtree(self.gitdir) |
| 2433 | curlret = proc.returncode | 1728 | except OSError as e: |
| 2434 | 1729 | if e.errno != errno.ENOENT: | |
| 2435 | if curlret == 22: | 1730 | print("error: %s: %s" % (self.gitdir, e), file=sys.stderr) |
| 2436 | # From curl man page: | 1731 | print( |
| 2437 | # 22: HTTP page not retrieved. The requested url was not found or | 1732 | "error: %s: Failed to delete obsolete checkout; remove " |
| 2438 | # returned another error with the HTTP error code being 400 or above. | 1733 | "manually, then run `repo sync -l`." |
| 2439 | # This return code only appears if -f, --fail is used. | 1734 | % (self.RelPath(local=False),), |
| 2440 | if verbose: | 1735 | file=sys.stderr, |
| 2441 | print('%s: Unable to retrieve clone.bundle; ignoring.' % self.name) | 1736 | ) |
| 2442 | if output: | 1737 | return False |
| 2443 | print('Curl output:\n%s' % output) | 1738 | |
| 2444 | return False | 1739 | # Delete everything under the worktree, except for directories that |
| 2445 | elif curlret and not verbose and output: | 1740 | # contain another git project. |
| 2446 | print('%s' % output, file=sys.stderr) | 1741 | dirs_to_remove = [] |
| 1742 | failed = False | ||
| 1743 | for root, dirs, files in platform_utils.walk(self.worktree): | ||
| 1744 | for f in files: | ||
| 1745 | path = os.path.join(root, f) | ||
| 1746 | try: | ||
| 1747 | platform_utils.remove(path) | ||
| 1748 | except OSError as e: | ||
| 1749 | if e.errno != errno.ENOENT: | ||
| 1750 | print( | ||
| 1751 | "error: %s: Failed to remove: %s" % (path, e), | ||
| 1752 | file=sys.stderr, | ||
| 1753 | ) | ||
| 1754 | failed = True | ||
| 1755 | dirs[:] = [ | ||
| 1756 | d | ||
| 1757 | for d in dirs | ||
| 1758 | if not os.path.lexists(os.path.join(root, d, ".git")) | ||
| 1759 | ] | ||
| 1760 | dirs_to_remove += [ | ||
| 1761 | os.path.join(root, d) | ||
| 1762 | for d in dirs | ||
| 1763 | if os.path.join(root, d) not in dirs_to_remove | ||
| 1764 | ] | ||
| 1765 | for d in reversed(dirs_to_remove): | ||
| 1766 | if platform_utils.islink(d): | ||
| 1767 | try: | ||
| 1768 | platform_utils.remove(d) | ||
| 1769 | except OSError as e: | ||
| 1770 | if e.errno != errno.ENOENT: | ||
| 1771 | print( | ||
| 1772 | "error: %s: Failed to remove: %s" % (d, e), | ||
| 1773 | file=sys.stderr, | ||
| 1774 | ) | ||
| 1775 | failed = True | ||
| 1776 | elif not platform_utils.listdir(d): | ||
| 1777 | try: | ||
| 1778 | platform_utils.rmdir(d) | ||
| 1779 | except OSError as e: | ||
| 1780 | if e.errno != errno.ENOENT: | ||
| 1781 | print( | ||
| 1782 | "error: %s: Failed to remove: %s" % (d, e), | ||
| 1783 | file=sys.stderr, | ||
| 1784 | ) | ||
| 1785 | failed = True | ||
| 1786 | if failed: | ||
| 1787 | print( | ||
| 1788 | "error: %s: Failed to delete obsolete checkout." | ||
| 1789 | % (self.RelPath(local=False),), | ||
| 1790 | file=sys.stderr, | ||
| 1791 | ) | ||
| 1792 | print( | ||
| 1793 | " Remove manually, then run `repo sync -l`.", | ||
| 1794 | file=sys.stderr, | ||
| 1795 | ) | ||
| 1796 | return False | ||
| 1797 | |||
| 1798 | # Try deleting parent dirs if they are empty. | ||
| 1799 | path = self.worktree | ||
| 1800 | while path != self.manifest.topdir: | ||
| 1801 | try: | ||
| 1802 | platform_utils.rmdir(path) | ||
| 1803 | except OSError as e: | ||
| 1804 | if e.errno != errno.ENOENT: | ||
| 1805 | break | ||
| 1806 | path = os.path.dirname(path) | ||
| 2447 | 1807 | ||
| 2448 | if os.path.exists(tmpPath): | ||
| 2449 | if curlret == 0 and self._IsValidBundle(tmpPath, quiet): | ||
| 2450 | platform_utils.rename(tmpPath, dstPath) | ||
| 2451 | return True | 1808 | return True |
| 2452 | else: | ||
| 2453 | platform_utils.remove(tmpPath) | ||
| 2454 | return False | ||
| 2455 | else: | ||
| 2456 | return False | ||
| 2457 | 1809 | ||
| 2458 | def _IsValidBundle(self, path, quiet): | 1810 | def StartBranch(self, name, branch_merge="", revision=None): |
| 2459 | try: | 1811 | """Create a new branch off the manifest's revision.""" |
| 2460 | with open(path, 'rb') as f: | 1812 | if not branch_merge: |
| 2461 | if f.read(16) == b'# v2 git bundle\n': | 1813 | branch_merge = self.revisionExpr |
| 2462 | return True | 1814 | head = self.work_git.GetHead() |
| 1815 | if head == (R_HEADS + name): | ||
| 1816 | return True | ||
| 1817 | |||
| 1818 | all_refs = self.bare_ref.all | ||
| 1819 | if R_HEADS + name in all_refs: | ||
| 1820 | return GitCommand(self, ["checkout", "-q", name, "--"]).Wait() == 0 | ||
| 1821 | |||
| 1822 | branch = self.GetBranch(name) | ||
| 1823 | branch.remote = self.GetRemote() | ||
| 1824 | branch.merge = branch_merge | ||
| 1825 | if not branch.merge.startswith("refs/") and not ID_RE.match( | ||
| 1826 | branch_merge | ||
| 1827 | ): | ||
| 1828 | branch.merge = R_HEADS + branch_merge | ||
| 1829 | |||
| 1830 | if revision is None: | ||
| 1831 | revid = self.GetRevisionId(all_refs) | ||
| 2463 | else: | 1832 | else: |
| 2464 | if not quiet: | 1833 | revid = self.work_git.rev_parse(revision) |
| 2465 | print("Invalid clone.bundle file; ignoring.", file=sys.stderr) | 1834 | |
| 2466 | return False | 1835 | if head.startswith(R_HEADS): |
| 2467 | except OSError: | 1836 | try: |
| 2468 | return False | 1837 | head = all_refs[head] |
| 2469 | 1838 | except KeyError: | |
| 2470 | def _Checkout(self, rev, quiet=False): | 1839 | head = None |
| 2471 | cmd = ['checkout'] | 1840 | if revid and head and revid == head: |
| 2472 | if quiet: | 1841 | ref = R_HEADS + name |
| 2473 | cmd.append('-q') | 1842 | self.work_git.update_ref(ref, revid) |
| 2474 | cmd.append(rev) | 1843 | self.work_git.symbolic_ref(HEAD, ref) |
| 2475 | cmd.append('--') | 1844 | branch.Save() |
| 2476 | if GitCommand(self, cmd).Wait() != 0: | 1845 | return True |
| 2477 | if self._allrefs: | 1846 | |
| 2478 | raise GitError('%s checkout %s ' % (self.name, rev)) | 1847 | if ( |
| 2479 | 1848 | GitCommand( | |
| 2480 | def _CherryPick(self, rev, ffonly=False, record_origin=False): | 1849 | self, ["checkout", "-q", "-b", branch.name, revid] |
| 2481 | cmd = ['cherry-pick'] | 1850 | ).Wait() |
| 2482 | if ffonly: | 1851 | == 0 |
| 2483 | cmd.append('--ff') | 1852 | ): |
| 2484 | if record_origin: | 1853 | branch.Save() |
| 2485 | cmd.append('-x') | 1854 | return True |
| 2486 | cmd.append(rev) | 1855 | return False |
| 2487 | cmd.append('--') | ||
| 2488 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2489 | if self._allrefs: | ||
| 2490 | raise GitError('%s cherry-pick %s ' % (self.name, rev)) | ||
| 2491 | |||
| 2492 | def _LsRemote(self, refs): | ||
| 2493 | cmd = ['ls-remote', self.remote.name, refs] | ||
| 2494 | p = GitCommand(self, cmd, capture_stdout=True) | ||
| 2495 | if p.Wait() == 0: | ||
| 2496 | return p.stdout | ||
| 2497 | return None | ||
| 2498 | |||
| 2499 | def _Revert(self, rev): | ||
| 2500 | cmd = ['revert'] | ||
| 2501 | cmd.append('--no-edit') | ||
| 2502 | cmd.append(rev) | ||
| 2503 | cmd.append('--') | ||
| 2504 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2505 | if self._allrefs: | ||
| 2506 | raise GitError('%s revert %s ' % (self.name, rev)) | ||
| 2507 | |||
| 2508 | def _ResetHard(self, rev, quiet=True): | ||
| 2509 | cmd = ['reset', '--hard'] | ||
| 2510 | if quiet: | ||
| 2511 | cmd.append('-q') | ||
| 2512 | cmd.append(rev) | ||
| 2513 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2514 | raise GitError('%s reset --hard %s ' % (self.name, rev)) | ||
| 2515 | |||
| 2516 | def _SyncSubmodules(self, quiet=True): | ||
| 2517 | cmd = ['submodule', 'update', '--init', '--recursive'] | ||
| 2518 | if quiet: | ||
| 2519 | cmd.append('-q') | ||
| 2520 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2521 | raise GitError('%s submodule update --init --recursive ' % self.name) | ||
| 2522 | |||
| 2523 | def _Rebase(self, upstream, onto=None): | ||
| 2524 | cmd = ['rebase'] | ||
| 2525 | if onto is not None: | ||
| 2526 | cmd.extend(['--onto', onto]) | ||
| 2527 | cmd.append(upstream) | ||
| 2528 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2529 | raise GitError('%s rebase %s ' % (self.name, upstream)) | ||
| 2530 | |||
| 2531 | def _FastForward(self, head, ffonly=False): | ||
| 2532 | cmd = ['merge', '--no-stat', head] | ||
| 2533 | if ffonly: | ||
| 2534 | cmd.append("--ff-only") | ||
| 2535 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2536 | raise GitError('%s merge %s ' % (self.name, head)) | ||
| 2537 | |||
| 2538 | def _InitGitDir(self, mirror_git=None, force_sync=False, quiet=False): | ||
| 2539 | init_git_dir = not os.path.exists(self.gitdir) | ||
| 2540 | init_obj_dir = not os.path.exists(self.objdir) | ||
| 2541 | try: | ||
| 2542 | # Initialize the bare repository, which contains all of the objects. | ||
| 2543 | if init_obj_dir: | ||
| 2544 | os.makedirs(self.objdir) | ||
| 2545 | self.bare_objdir.init() | ||
| 2546 | 1856 | ||
| 2547 | self._UpdateHooks(quiet=quiet) | 1857 | def CheckoutBranch(self, name): |
| 1858 | """Checkout a local topic branch. | ||
| 2548 | 1859 | ||
| 2549 | if self.use_git_worktrees: | 1860 | Args: |
| 2550 | # Enable per-worktree config file support if possible. This is more a | 1861 | name: The name of the branch to checkout. |
| 2551 | # nice-to-have feature for users rather than a hard requirement. | 1862 | |
| 2552 | if git_require((2, 20, 0)): | 1863 | Returns: |
| 2553 | self.EnableRepositoryExtension('worktreeConfig') | 1864 | True if the checkout succeeded; False if it didn't; None if the |
| 2554 | 1865 | branch didn't exist. | |
| 2555 | # If we have a separate directory to hold refs, initialize it as well. | 1866 | """ |
| 2556 | if self.objdir != self.gitdir: | 1867 | rev = R_HEADS + name |
| 2557 | if init_git_dir: | 1868 | head = self.work_git.GetHead() |
| 2558 | os.makedirs(self.gitdir) | 1869 | if head == rev: |
| 2559 | 1870 | # Already on the branch. | |
| 2560 | if init_obj_dir or init_git_dir: | 1871 | return True |
| 2561 | self._ReferenceGitDir(self.objdir, self.gitdir, copy_all=True) | 1872 | |
| 1873 | all_refs = self.bare_ref.all | ||
| 2562 | try: | 1874 | try: |
| 2563 | self._CheckDirReference(self.objdir, self.gitdir) | 1875 | revid = all_refs[rev] |
| 2564 | except GitError as e: | 1876 | except KeyError: |
| 2565 | if force_sync: | 1877 | # Branch does not exist in this project. |
| 2566 | print("Retrying clone after deleting %s" % | 1878 | return None |
| 2567 | self.gitdir, file=sys.stderr) | 1879 | |
| 1880 | if head.startswith(R_HEADS): | ||
| 2568 | try: | 1881 | try: |
| 2569 | platform_utils.rmtree(platform_utils.realpath(self.gitdir)) | 1882 | head = all_refs[head] |
| 2570 | if self.worktree and os.path.exists(platform_utils.realpath | 1883 | except KeyError: |
| 2571 | (self.worktree)): | 1884 | head = None |
| 2572 | platform_utils.rmtree(platform_utils.realpath(self.worktree)) | 1885 | |
| 2573 | return self._InitGitDir(mirror_git=mirror_git, force_sync=False, | 1886 | if head == revid: |
| 2574 | quiet=quiet) | 1887 | # Same revision; just update HEAD to point to the new |
| 2575 | except Exception: | 1888 | # target branch, but otherwise take no other action. |
| 2576 | raise e | 1889 | _lwrite( |
| 2577 | raise e | 1890 | self.work_git.GetDotgitPath(subpath=HEAD), |
| 2578 | 1891 | "ref: %s%s\n" % (R_HEADS, name), | |
| 2579 | if init_git_dir: | 1892 | ) |
| 2580 | mp = self.manifest.manifestProject | 1893 | return True |
| 2581 | ref_dir = mp.reference or '' | 1894 | |
| 2582 | 1895 | return ( | |
| 2583 | def _expanded_ref_dirs(): | 1896 | GitCommand( |
| 2584 | """Iterate through the possible git reference directory paths.""" | 1897 | self, |
| 2585 | name = self.name + '.git' | 1898 | ["checkout", name, "--"], |
| 2586 | yield mirror_git or os.path.join(ref_dir, name) | 1899 | capture_stdout=True, |
| 2587 | for prefix in '', self.remote.name: | 1900 | capture_stderr=True, |
| 2588 | yield os.path.join(ref_dir, '.repo', 'project-objects', prefix, name) | 1901 | ).Wait() |
| 2589 | yield os.path.join(ref_dir, '.repo', 'worktrees', prefix, name) | 1902 | == 0 |
| 2590 | 1903 | ) | |
| 2591 | if ref_dir or mirror_git: | ||
| 2592 | found_ref_dir = None | ||
| 2593 | for path in _expanded_ref_dirs(): | ||
| 2594 | if os.path.exists(path): | ||
| 2595 | found_ref_dir = path | ||
| 2596 | break | ||
| 2597 | ref_dir = found_ref_dir | ||
| 2598 | |||
| 2599 | if ref_dir: | ||
| 2600 | if not os.path.isabs(ref_dir): | ||
| 2601 | # The alternate directory is relative to the object database. | ||
| 2602 | ref_dir = os.path.relpath(ref_dir, | ||
| 2603 | os.path.join(self.objdir, 'objects')) | ||
| 2604 | _lwrite(os.path.join(self.objdir, 'objects/info/alternates'), | ||
| 2605 | os.path.join(ref_dir, 'objects') + '\n') | ||
| 2606 | |||
| 2607 | m = self.manifest.manifestProject.config | ||
| 2608 | for key in ['user.name', 'user.email']: | ||
| 2609 | if m.Has(key, include_defaults=False): | ||
| 2610 | self.config.SetString(key, m.GetString(key)) | ||
| 2611 | if not self.manifest.EnableGitLfs: | ||
| 2612 | self.config.SetString('filter.lfs.smudge', 'git-lfs smudge --skip -- %f') | ||
| 2613 | self.config.SetString('filter.lfs.process', 'git-lfs filter-process --skip') | ||
| 2614 | self.config.SetBoolean('core.bare', True if self.manifest.IsMirror else None) | ||
| 2615 | except Exception: | ||
| 2616 | if init_obj_dir and os.path.exists(self.objdir): | ||
| 2617 | platform_utils.rmtree(self.objdir) | ||
| 2618 | if init_git_dir and os.path.exists(self.gitdir): | ||
| 2619 | platform_utils.rmtree(self.gitdir) | ||
| 2620 | raise | ||
| 2621 | |||
| 2622 | def _UpdateHooks(self, quiet=False): | ||
| 2623 | if os.path.exists(self.objdir): | ||
| 2624 | self._InitHooks(quiet=quiet) | ||
| 2625 | |||
| 2626 | def _InitHooks(self, quiet=False): | ||
| 2627 | hooks = platform_utils.realpath(os.path.join(self.objdir, 'hooks')) | ||
| 2628 | if not os.path.exists(hooks): | ||
| 2629 | os.makedirs(hooks) | ||
| 2630 | |||
| 2631 | # Delete sample hooks. They're noise. | ||
| 2632 | for hook in glob.glob(os.path.join(hooks, '*.sample')): | ||
| 2633 | try: | ||
| 2634 | platform_utils.remove(hook, missing_ok=True) | ||
| 2635 | except PermissionError: | ||
| 2636 | pass | ||
| 2637 | |||
| 2638 | for stock_hook in _ProjectHooks(): | ||
| 2639 | name = os.path.basename(stock_hook) | ||
| 2640 | |||
| 2641 | if name in ('commit-msg',) and not self.remote.review \ | ||
| 2642 | and self is not self.manifest.manifestProject: | ||
| 2643 | # Don't install a Gerrit Code Review hook if this | ||
| 2644 | # project does not appear to use it for reviews. | ||
| 2645 | # | ||
| 2646 | # Since the manifest project is one of those, but also | ||
| 2647 | # managed through gerrit, it's excluded | ||
| 2648 | continue | ||
| 2649 | |||
| 2650 | dst = os.path.join(hooks, name) | ||
| 2651 | if platform_utils.islink(dst): | ||
| 2652 | continue | ||
| 2653 | if os.path.exists(dst): | ||
| 2654 | # If the files are the same, we'll leave it alone. We create symlinks | ||
| 2655 | # below by default but fallback to hardlinks if the OS blocks them. | ||
| 2656 | # So if we're here, it's probably because we made a hardlink below. | ||
| 2657 | if not filecmp.cmp(stock_hook, dst, shallow=False): | ||
| 2658 | if not quiet: | ||
| 2659 | _warn("%s: Not replacing locally modified %s hook", | ||
| 2660 | self.RelPath(local=False), name) | ||
| 2661 | continue | ||
| 2662 | try: | ||
| 2663 | platform_utils.symlink( | ||
| 2664 | os.path.relpath(stock_hook, os.path.dirname(dst)), dst) | ||
| 2665 | except OSError as e: | ||
| 2666 | if e.errno == errno.EPERM: | ||
| 2667 | try: | ||
| 2668 | os.link(stock_hook, dst) | ||
| 2669 | except OSError: | ||
| 2670 | raise GitError(self._get_symlink_error_message()) | ||
| 2671 | else: | ||
| 2672 | raise | ||
| 2673 | |||
| 2674 | def _InitRemote(self): | ||
| 2675 | if self.remote.url: | ||
| 2676 | remote = self.GetRemote() | ||
| 2677 | remote.url = self.remote.url | ||
| 2678 | remote.pushUrl = self.remote.pushUrl | ||
| 2679 | remote.review = self.remote.review | ||
| 2680 | remote.projectname = self.name | ||
| 2681 | |||
| 2682 | if self.worktree: | ||
| 2683 | remote.ResetFetch(mirror=False) | ||
| 2684 | else: | ||
| 2685 | remote.ResetFetch(mirror=True) | ||
| 2686 | remote.Save() | ||
| 2687 | |||
| 2688 | def _InitMRef(self): | ||
| 2689 | """Initialize the pseudo m/<manifest branch> ref.""" | ||
| 2690 | if self.manifest.branch: | ||
| 2691 | if self.use_git_worktrees: | ||
| 2692 | # Set up the m/ space to point to the worktree-specific ref space. | ||
| 2693 | # We'll update the worktree-specific ref space on each checkout. | ||
| 2694 | ref = R_M + self.manifest.branch | ||
| 2695 | if not self.bare_ref.symref(ref): | ||
| 2696 | self.bare_git.symbolic_ref( | ||
| 2697 | '-m', 'redirecting to worktree scope', | ||
| 2698 | ref, R_WORKTREE_M + self.manifest.branch) | ||
| 2699 | |||
| 2700 | # We can't update this ref with git worktrees until it exists. | ||
| 2701 | # We'll wait until the initial checkout to set it. | ||
| 2702 | if not os.path.exists(self.worktree): | ||
| 2703 | return | ||
| 2704 | |||
| 2705 | base = R_WORKTREE_M | ||
| 2706 | active_git = self.work_git | ||
| 2707 | |||
| 2708 | self._InitAnyMRef(HEAD, self.bare_git, detach=True) | ||
| 2709 | else: | ||
| 2710 | base = R_M | ||
| 2711 | active_git = self.bare_git | ||
| 2712 | |||
| 2713 | self._InitAnyMRef(base + self.manifest.branch, active_git) | ||
| 2714 | |||
| 2715 | def _InitMirrorHead(self): | ||
| 2716 | self._InitAnyMRef(HEAD, self.bare_git) | ||
| 2717 | |||
| 2718 | def _InitAnyMRef(self, ref, active_git, detach=False): | ||
| 2719 | """Initialize |ref| in |active_git| to the value in the manifest. | ||
| 2720 | |||
| 2721 | This points |ref| to the <project> setting in the manifest. | ||
| 2722 | |||
| 2723 | Args: | ||
| 2724 | ref: The branch to update. | ||
| 2725 | active_git: The git repository to make updates in. | ||
| 2726 | detach: Whether to update target of symbolic refs, or overwrite the ref | ||
| 2727 | directly (and thus make it non-symbolic). | ||
| 2728 | """ | ||
| 2729 | cur = self.bare_ref.symref(ref) | ||
| 2730 | |||
| 2731 | if self.revisionId: | ||
| 2732 | if cur != '' or self.bare_ref.get(ref) != self.revisionId: | ||
| 2733 | msg = 'manifest set to %s' % self.revisionId | ||
| 2734 | dst = self.revisionId + '^0' | ||
| 2735 | active_git.UpdateRef(ref, dst, message=msg, detach=True) | ||
| 2736 | else: | ||
| 2737 | remote = self.GetRemote() | ||
| 2738 | dst = remote.ToLocal(self.revisionExpr) | ||
| 2739 | if cur != dst: | ||
| 2740 | msg = 'manifest set to %s' % self.revisionExpr | ||
| 2741 | if detach: | ||
| 2742 | active_git.UpdateRef(ref, dst, message=msg, detach=True) | ||
| 2743 | else: | ||
| 2744 | active_git.symbolic_ref('-m', msg, ref, dst) | ||
| 2745 | |||
| 2746 | def _CheckDirReference(self, srcdir, destdir): | ||
| 2747 | # Git worktrees don't use symlinks to share at all. | ||
| 2748 | if self.use_git_worktrees: | ||
| 2749 | return | ||
| 2750 | |||
| 2751 | for name in self.shareable_dirs: | ||
| 2752 | # Try to self-heal a bit in simple cases. | ||
| 2753 | dst_path = os.path.join(destdir, name) | ||
| 2754 | src_path = os.path.join(srcdir, name) | ||
| 2755 | |||
| 2756 | dst = platform_utils.realpath(dst_path) | ||
| 2757 | if os.path.lexists(dst): | ||
| 2758 | src = platform_utils.realpath(src_path) | ||
| 2759 | # Fail if the links are pointing to the wrong place | ||
| 2760 | if src != dst: | ||
| 2761 | _error('%s is different in %s vs %s', name, destdir, srcdir) | ||
| 2762 | raise GitError('--force-sync not enabled; cannot overwrite a local ' | ||
| 2763 | 'work tree. If you\'re comfortable with the ' | ||
| 2764 | 'possibility of losing the work tree\'s git metadata,' | ||
| 2765 | ' use `repo sync --force-sync {0}` to ' | ||
| 2766 | 'proceed.'.format(self.RelPath(local=False))) | ||
| 2767 | |||
| 2768 | def _ReferenceGitDir(self, gitdir, dotgit, copy_all): | ||
| 2769 | """Update |dotgit| to reference |gitdir|, using symlinks where possible. | ||
| 2770 | |||
| 2771 | Args: | ||
| 2772 | gitdir: The bare git repository. Must already be initialized. | ||
| 2773 | dotgit: The repository you would like to initialize. | ||
| 2774 | copy_all: If true, copy all remaining files from |gitdir| -> |dotgit|. | ||
| 2775 | This saves you the effort of initializing |dotgit| yourself. | ||
| 2776 | """ | ||
| 2777 | symlink_dirs = self.shareable_dirs[:] | ||
| 2778 | to_symlink = symlink_dirs | ||
| 2779 | |||
| 2780 | to_copy = [] | ||
| 2781 | if copy_all: | ||
| 2782 | to_copy = platform_utils.listdir(gitdir) | ||
| 2783 | |||
| 2784 | dotgit = platform_utils.realpath(dotgit) | ||
| 2785 | for name in set(to_copy).union(to_symlink): | ||
| 2786 | try: | ||
| 2787 | src = platform_utils.realpath(os.path.join(gitdir, name)) | ||
| 2788 | dst = os.path.join(dotgit, name) | ||
| 2789 | |||
| 2790 | if os.path.lexists(dst): | ||
| 2791 | continue | ||
| 2792 | |||
| 2793 | # If the source dir doesn't exist, create an empty dir. | ||
| 2794 | if name in symlink_dirs and not os.path.lexists(src): | ||
| 2795 | os.makedirs(src) | ||
| 2796 | |||
| 2797 | if name in to_symlink: | ||
| 2798 | platform_utils.symlink( | ||
| 2799 | os.path.relpath(src, os.path.dirname(dst)), dst) | ||
| 2800 | elif copy_all and not platform_utils.islink(dst): | ||
| 2801 | if platform_utils.isdir(src): | ||
| 2802 | shutil.copytree(src, dst) | ||
| 2803 | elif os.path.isfile(src): | ||
| 2804 | shutil.copy(src, dst) | ||
| 2805 | |||
| 2806 | except OSError as e: | ||
| 2807 | if e.errno == errno.EPERM: | ||
| 2808 | raise DownloadError(self._get_symlink_error_message()) | ||
| 2809 | else: | ||
| 2810 | raise | ||
| 2811 | |||
| 2812 | def _InitGitWorktree(self): | ||
| 2813 | """Init the project using git worktrees.""" | ||
| 2814 | self.bare_git.worktree('prune') | ||
| 2815 | self.bare_git.worktree('add', '-ff', '--checkout', '--detach', '--lock', | ||
| 2816 | self.worktree, self.GetRevisionId()) | ||
| 2817 | |||
| 2818 | # Rewrite the internal state files to use relative paths between the | ||
| 2819 | # checkouts & worktrees. | ||
| 2820 | dotgit = os.path.join(self.worktree, '.git') | ||
| 2821 | with open(dotgit, 'r') as fp: | ||
| 2822 | # Figure out the checkout->worktree path. | ||
| 2823 | setting = fp.read() | ||
| 2824 | assert setting.startswith('gitdir:') | ||
| 2825 | git_worktree_path = setting.split(':', 1)[1].strip() | ||
| 2826 | # Some platforms (e.g. Windows) won't let us update dotgit in situ because | ||
| 2827 | # of file permissions. Delete it and recreate it from scratch to avoid. | ||
| 2828 | platform_utils.remove(dotgit) | ||
| 2829 | # Use relative path from checkout->worktree & maintain Unix line endings | ||
| 2830 | # on all OS's to match git behavior. | ||
| 2831 | with open(dotgit, 'w', newline='\n') as fp: | ||
| 2832 | print('gitdir:', os.path.relpath(git_worktree_path, self.worktree), | ||
| 2833 | file=fp) | ||
| 2834 | # Use relative path from worktree->checkout & maintain Unix line endings | ||
| 2835 | # on all OS's to match git behavior. | ||
| 2836 | with open(os.path.join(git_worktree_path, 'gitdir'), 'w', newline='\n') as fp: | ||
| 2837 | print(os.path.relpath(dotgit, git_worktree_path), file=fp) | ||
| 2838 | |||
| 2839 | self._InitMRef() | ||
| 2840 | |||
| 2841 | def _InitWorkTree(self, force_sync=False, submodules=False): | ||
| 2842 | """Setup the worktree .git path. | ||
| 2843 | |||
| 2844 | This is the user-visible path like src/foo/.git/. | ||
| 2845 | |||
| 2846 | With non-git-worktrees, this will be a symlink to the .repo/projects/ path. | ||
| 2847 | With git-worktrees, this will be a .git file using "gitdir: ..." syntax. | ||
| 2848 | |||
| 2849 | Older checkouts had .git/ directories. If we see that, migrate it. | ||
| 2850 | |||
| 2851 | This also handles changes in the manifest. Maybe this project was backed | ||
| 2852 | by "foo/bar" on the server, but now it's "new/foo/bar". We have to update | ||
| 2853 | the path we point to under .repo/projects/ to match. | ||
| 2854 | """ | ||
| 2855 | dotgit = os.path.join(self.worktree, '.git') | ||
| 2856 | |||
| 2857 | # If using an old layout style (a directory), migrate it. | ||
| 2858 | if not platform_utils.islink(dotgit) and platform_utils.isdir(dotgit): | ||
| 2859 | self._MigrateOldWorkTreeGitDir(dotgit) | ||
| 2860 | |||
| 2861 | init_dotgit = not os.path.exists(dotgit) | ||
| 2862 | if self.use_git_worktrees: | ||
| 2863 | if init_dotgit: | ||
| 2864 | self._InitGitWorktree() | ||
| 2865 | self._CopyAndLinkFiles() | ||
| 2866 | else: | ||
| 2867 | if not init_dotgit: | ||
| 2868 | # See if the project has changed. | ||
| 2869 | if platform_utils.realpath(self.gitdir) != platform_utils.realpath(dotgit): | ||
| 2870 | platform_utils.remove(dotgit) | ||
| 2871 | |||
| 2872 | if init_dotgit or not os.path.exists(dotgit): | ||
| 2873 | os.makedirs(self.worktree, exist_ok=True) | ||
| 2874 | platform_utils.symlink(os.path.relpath(self.gitdir, self.worktree), dotgit) | ||
| 2875 | |||
| 2876 | if init_dotgit: | ||
| 2877 | _lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId()) | ||
| 2878 | |||
| 2879 | # Finish checking out the worktree. | ||
| 2880 | cmd = ['read-tree', '--reset', '-u', '-v', HEAD] | ||
| 2881 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2882 | raise GitError('Cannot initialize work tree for ' + self.name) | ||
| 2883 | 1904 | ||
| 2884 | if submodules: | 1905 | def AbandonBranch(self, name): |
| 2885 | self._SyncSubmodules(quiet=True) | 1906 | """Destroy a local topic branch. |
| 2886 | self._CopyAndLinkFiles() | ||
| 2887 | 1907 | ||
| 2888 | @classmethod | 1908 | Args: |
| 2889 | def _MigrateOldWorkTreeGitDir(cls, dotgit): | 1909 | name: The name of the branch to abandon. |
| 2890 | """Migrate the old worktree .git/ dir style to a symlink. | ||
| 2891 | 1910 | ||
| 2892 | This logic specifically only uses state from |dotgit| to figure out where to | 1911 | Returns: |
| 2893 | move content and not |self|. This way if the backing project also changed | 1912 | True if the abandon succeeded; False if it didn't; None if the |
| 2894 | places, we only do the .git/ dir to .git symlink migration here. The path | 1913 | branch didn't exist. |
| 2895 | updates will happen independently. | 1914 | """ |
| 2896 | """ | 1915 | rev = R_HEADS + name |
| 2897 | # Figure out where in .repo/projects/ it's pointing to. | 1916 | all_refs = self.bare_ref.all |
| 2898 | if not os.path.islink(os.path.join(dotgit, 'refs')): | 1917 | if rev not in all_refs: |
| 2899 | raise GitError(f'{dotgit}: unsupported checkout state') | 1918 | # Doesn't exist |
| 2900 | gitdir = os.path.dirname(os.path.realpath(os.path.join(dotgit, 'refs'))) | 1919 | return None |
| 2901 | 1920 | ||
| 2902 | # Remove known symlink paths that exist in .repo/projects/. | 1921 | head = self.work_git.GetHead() |
| 2903 | KNOWN_LINKS = { | 1922 | if head == rev: |
| 2904 | 'config', 'description', 'hooks', 'info', 'logs', 'objects', | 1923 | # We can't destroy the branch while we are sitting |
| 2905 | 'packed-refs', 'refs', 'rr-cache', 'shallow', 'svn', | 1924 | # on it. Switch to a detached HEAD. |
| 2906 | } | 1925 | head = all_refs[head] |
| 2907 | # Paths that we know will be in both, but are safe to clobber in .repo/projects/. | 1926 | |
| 2908 | SAFE_TO_CLOBBER = { | 1927 | revid = self.GetRevisionId(all_refs) |
| 2909 | 'COMMIT_EDITMSG', 'FETCH_HEAD', 'HEAD', 'gc.log', 'gitk.cache', 'index', | 1928 | if head == revid: |
| 2910 | 'ORIG_HEAD', | 1929 | _lwrite( |
| 2911 | } | 1930 | self.work_git.GetDotgitPath(subpath=HEAD), "%s\n" % revid |
| 2912 | 1931 | ) | |
| 2913 | # First see if we'd succeed before starting the migration. | 1932 | else: |
| 2914 | unknown_paths = [] | 1933 | self._Checkout(revid, quiet=True) |
| 2915 | for name in platform_utils.listdir(dotgit): | 1934 | |
| 2916 | # Ignore all temporary/backup names. These are common with vim & emacs. | 1935 | return ( |
| 2917 | if name.endswith('~') or (name[0] == '#' and name[-1] == '#'): | 1936 | GitCommand( |
| 2918 | continue | 1937 | self, |
| 2919 | 1938 | ["branch", "-D", name], | |
| 2920 | dotgit_path = os.path.join(dotgit, name) | 1939 | capture_stdout=True, |
| 2921 | if name in KNOWN_LINKS: | 1940 | capture_stderr=True, |
| 2922 | if not platform_utils.islink(dotgit_path): | 1941 | ).Wait() |
| 2923 | unknown_paths.append(f'{dotgit_path}: should be a symlink') | 1942 | == 0 |
| 2924 | else: | 1943 | ) |
| 2925 | gitdir_path = os.path.join(gitdir, name) | ||
| 2926 | if name not in SAFE_TO_CLOBBER and os.path.exists(gitdir_path): | ||
| 2927 | unknown_paths.append(f'{dotgit_path}: unknown file; please file a bug') | ||
| 2928 | if unknown_paths: | ||
| 2929 | raise GitError('Aborting migration: ' + '\n'.join(unknown_paths)) | ||
| 2930 | |||
| 2931 | # Now walk the paths and sync the .git/ to .repo/projects/. | ||
| 2932 | for name in platform_utils.listdir(dotgit): | ||
| 2933 | dotgit_path = os.path.join(dotgit, name) | ||
| 2934 | |||
| 2935 | # Ignore all temporary/backup names. These are common with vim & emacs. | ||
| 2936 | if name.endswith('~') or (name[0] == '#' and name[-1] == '#'): | ||
| 2937 | platform_utils.remove(dotgit_path) | ||
| 2938 | elif name in KNOWN_LINKS: | ||
| 2939 | platform_utils.remove(dotgit_path) | ||
| 2940 | else: | ||
| 2941 | gitdir_path = os.path.join(gitdir, name) | ||
| 2942 | platform_utils.remove(gitdir_path, missing_ok=True) | ||
| 2943 | platform_utils.rename(dotgit_path, gitdir_path) | ||
| 2944 | |||
| 2945 | # Now that the dir should be empty, clear it out, and symlink it over. | ||
| 2946 | platform_utils.rmdir(dotgit) | ||
| 2947 | platform_utils.symlink(os.path.relpath(gitdir, os.path.dirname(dotgit)), dotgit) | ||
| 2948 | |||
| 2949 | def _get_symlink_error_message(self): | ||
| 2950 | if platform_utils.isWindows(): | ||
| 2951 | return ('Unable to create symbolic link. Please re-run the command as ' | ||
| 2952 | 'Administrator, or see ' | ||
| 2953 | 'https://github.com/git-for-windows/git/wiki/Symbolic-Links ' | ||
| 2954 | 'for other options.') | ||
| 2955 | return 'filesystem must support symlinks' | ||
| 2956 | |||
| 2957 | def _revlist(self, *args, **kw): | ||
| 2958 | a = [] | ||
| 2959 | a.extend(args) | ||
| 2960 | a.append('--') | ||
| 2961 | return self.work_git.rev_list(*a, **kw) | ||
| 2962 | |||
| 2963 | @property | ||
| 2964 | def _allrefs(self): | ||
| 2965 | return self.bare_ref.all | ||
| 2966 | |||
| 2967 | def _getLogs(self, rev1, rev2, oneline=False, color=True, pretty_format=None): | ||
| 2968 | """Get logs between two revisions of this project.""" | ||
| 2969 | comp = '..' | ||
| 2970 | if rev1: | ||
| 2971 | revs = [rev1] | ||
| 2972 | if rev2: | ||
| 2973 | revs.extend([comp, rev2]) | ||
| 2974 | cmd = ['log', ''.join(revs)] | ||
| 2975 | out = DiffColoring(self.config) | ||
| 2976 | if out.is_on and color: | ||
| 2977 | cmd.append('--color') | ||
| 2978 | if pretty_format is not None: | ||
| 2979 | cmd.append('--pretty=format:%s' % pretty_format) | ||
| 2980 | if oneline: | ||
| 2981 | cmd.append('--oneline') | ||
| 2982 | |||
| 2983 | try: | ||
| 2984 | log = GitCommand(self, cmd, capture_stdout=True, capture_stderr=True) | ||
| 2985 | if log.Wait() == 0: | ||
| 2986 | return log.stdout | ||
| 2987 | except GitError: | ||
| 2988 | # worktree may not exist if groups changed for example. In that case, | ||
| 2989 | # try in gitdir instead. | ||
| 2990 | if not os.path.exists(self.worktree): | ||
| 2991 | return self.bare_git.log(*cmd[1:]) | ||
| 2992 | else: | ||
| 2993 | raise | ||
| 2994 | return None | ||
| 2995 | |||
| 2996 | def getAddedAndRemovedLogs(self, toProject, oneline=False, color=True, | ||
| 2997 | pretty_format=None): | ||
| 2998 | """Get the list of logs from this revision to given revisionId""" | ||
| 2999 | logs = {} | ||
| 3000 | selfId = self.GetRevisionId(self._allrefs) | ||
| 3001 | toId = toProject.GetRevisionId(toProject._allrefs) | ||
| 3002 | |||
| 3003 | logs['added'] = self._getLogs(selfId, toId, oneline=oneline, color=color, | ||
| 3004 | pretty_format=pretty_format) | ||
| 3005 | logs['removed'] = self._getLogs(toId, selfId, oneline=oneline, color=color, | ||
| 3006 | pretty_format=pretty_format) | ||
| 3007 | return logs | ||
| 3008 | |||
| 3009 | class _GitGetByExec(object): | ||
| 3010 | |||
| 3011 | def __init__(self, project, bare, gitdir): | ||
| 3012 | self._project = project | ||
| 3013 | self._bare = bare | ||
| 3014 | self._gitdir = gitdir | ||
| 3015 | |||
| 3016 | # __getstate__ and __setstate__ are required for pickling because __getattr__ exists. | ||
| 3017 | def __getstate__(self): | ||
| 3018 | return (self._project, self._bare, self._gitdir) | ||
| 3019 | |||
| 3020 | def __setstate__(self, state): | ||
| 3021 | self._project, self._bare, self._gitdir = state | ||
| 3022 | |||
| 3023 | def LsOthers(self): | ||
| 3024 | p = GitCommand(self._project, | ||
| 3025 | ['ls-files', | ||
| 3026 | '-z', | ||
| 3027 | '--others', | ||
| 3028 | '--exclude-standard'], | ||
| 3029 | bare=False, | ||
| 3030 | gitdir=self._gitdir, | ||
| 3031 | capture_stdout=True, | ||
| 3032 | capture_stderr=True) | ||
| 3033 | if p.Wait() == 0: | ||
| 3034 | out = p.stdout | ||
| 3035 | if out: | ||
| 3036 | # Backslash is not anomalous | ||
| 3037 | return out[:-1].split('\0') | ||
| 3038 | return [] | ||
| 3039 | |||
| 3040 | def DiffZ(self, name, *args): | ||
| 3041 | cmd = [name] | ||
| 3042 | cmd.append('-z') | ||
| 3043 | cmd.append('--ignore-submodules') | ||
| 3044 | cmd.extend(args) | ||
| 3045 | p = GitCommand(self._project, | ||
| 3046 | cmd, | ||
| 3047 | gitdir=self._gitdir, | ||
| 3048 | bare=False, | ||
| 3049 | capture_stdout=True, | ||
| 3050 | capture_stderr=True) | ||
| 3051 | p.Wait() | ||
| 3052 | r = {} | ||
| 3053 | out = p.stdout | ||
| 3054 | if out: | ||
| 3055 | out = iter(out[:-1].split('\0')) | ||
| 3056 | while out: | ||
| 3057 | try: | ||
| 3058 | info = next(out) | ||
| 3059 | path = next(out) | ||
| 3060 | except StopIteration: | ||
| 3061 | break | ||
| 3062 | |||
| 3063 | class _Info(object): | ||
| 3064 | |||
| 3065 | def __init__(self, path, omode, nmode, oid, nid, state): | ||
| 3066 | self.path = path | ||
| 3067 | self.src_path = None | ||
| 3068 | self.old_mode = omode | ||
| 3069 | self.new_mode = nmode | ||
| 3070 | self.old_id = oid | ||
| 3071 | self.new_id = nid | ||
| 3072 | |||
| 3073 | if len(state) == 1: | ||
| 3074 | self.status = state | ||
| 3075 | self.level = None | ||
| 3076 | else: | ||
| 3077 | self.status = state[:1] | ||
| 3078 | self.level = state[1:] | ||
| 3079 | while self.level.startswith('0'): | ||
| 3080 | self.level = self.level[1:] | ||
| 3081 | |||
| 3082 | info = info[1:].split(' ') | ||
| 3083 | info = _Info(path, *info) | ||
| 3084 | if info.status in ('R', 'C'): | ||
| 3085 | info.src_path = info.path | ||
| 3086 | info.path = next(out) | ||
| 3087 | r[info.path] = info | ||
| 3088 | return r | ||
| 3089 | |||
| 3090 | def GetDotgitPath(self, subpath=None): | ||
| 3091 | """Return the full path to the .git dir. | ||
| 3092 | |||
| 3093 | As a convenience, append |subpath| if provided. | ||
| 3094 | """ | ||
| 3095 | if self._bare: | ||
| 3096 | dotgit = self._gitdir | ||
| 3097 | else: | ||
| 3098 | dotgit = os.path.join(self._project.worktree, '.git') | ||
| 3099 | if os.path.isfile(dotgit): | ||
| 3100 | # Git worktrees use a "gitdir:" syntax to point to the scratch space. | ||
| 3101 | with open(dotgit) as fp: | ||
| 3102 | setting = fp.read() | ||
| 3103 | assert setting.startswith('gitdir:') | ||
| 3104 | gitdir = setting.split(':', 1)[1].strip() | ||
| 3105 | dotgit = os.path.normpath(os.path.join(self._project.worktree, gitdir)) | ||
| 3106 | |||
| 3107 | return dotgit if subpath is None else os.path.join(dotgit, subpath) | ||
| 3108 | |||
| 3109 | def GetHead(self): | ||
| 3110 | """Return the ref that HEAD points to.""" | ||
| 3111 | path = self.GetDotgitPath(subpath=HEAD) | ||
| 3112 | try: | ||
| 3113 | with open(path) as fd: | ||
| 3114 | line = fd.readline() | ||
| 3115 | except IOError as e: | ||
| 3116 | raise NoManifestException(path, str(e)) | ||
| 3117 | try: | ||
| 3118 | line = line.decode() | ||
| 3119 | except AttributeError: | ||
| 3120 | pass | ||
| 3121 | if line.startswith('ref: '): | ||
| 3122 | return line[5:-1] | ||
| 3123 | return line[:-1] | ||
| 3124 | |||
| 3125 | def SetHead(self, ref, message=None): | ||
| 3126 | cmdv = [] | ||
| 3127 | if message is not None: | ||
| 3128 | cmdv.extend(['-m', message]) | ||
| 3129 | cmdv.append(HEAD) | ||
| 3130 | cmdv.append(ref) | ||
| 3131 | self.symbolic_ref(*cmdv) | ||
| 3132 | |||
| 3133 | def DetachHead(self, new, message=None): | ||
| 3134 | cmdv = ['--no-deref'] | ||
| 3135 | if message is not None: | ||
| 3136 | cmdv.extend(['-m', message]) | ||
| 3137 | cmdv.append(HEAD) | ||
| 3138 | cmdv.append(new) | ||
| 3139 | self.update_ref(*cmdv) | ||
| 3140 | |||
| 3141 | def UpdateRef(self, name, new, old=None, | ||
| 3142 | message=None, | ||
| 3143 | detach=False): | ||
| 3144 | cmdv = [] | ||
| 3145 | if message is not None: | ||
| 3146 | cmdv.extend(['-m', message]) | ||
| 3147 | if detach: | ||
| 3148 | cmdv.append('--no-deref') | ||
| 3149 | cmdv.append(name) | ||
| 3150 | cmdv.append(new) | ||
| 3151 | if old is not None: | ||
| 3152 | cmdv.append(old) | ||
| 3153 | self.update_ref(*cmdv) | ||
| 3154 | |||
| 3155 | def DeleteRef(self, name, old=None): | ||
| 3156 | if not old: | ||
| 3157 | old = self.rev_parse(name) | ||
| 3158 | self.update_ref('-d', name, old) | ||
| 3159 | self._project.bare_ref.deleted(name) | ||
| 3160 | |||
| 3161 | def rev_list(self, *args, **kw): | ||
| 3162 | if 'format' in kw: | ||
| 3163 | cmdv = ['log', '--pretty=format:%s' % kw['format']] | ||
| 3164 | else: | ||
| 3165 | cmdv = ['rev-list'] | ||
| 3166 | cmdv.extend(args) | ||
| 3167 | p = GitCommand(self._project, | ||
| 3168 | cmdv, | ||
| 3169 | bare=self._bare, | ||
| 3170 | gitdir=self._gitdir, | ||
| 3171 | capture_stdout=True, | ||
| 3172 | capture_stderr=True) | ||
| 3173 | if p.Wait() != 0: | ||
| 3174 | raise GitError('%s rev-list %s: %s' % | ||
| 3175 | (self._project.name, str(args), p.stderr)) | ||
| 3176 | return p.stdout.splitlines() | ||
| 3177 | |||
| 3178 | def __getattr__(self, name): | ||
| 3179 | """Allow arbitrary git commands using pythonic syntax. | ||
| 3180 | |||
| 3181 | This allows you to do things like: | ||
| 3182 | git_obj.rev_parse('HEAD') | ||
| 3183 | |||
| 3184 | Since we don't have a 'rev_parse' method defined, the __getattr__ will | ||
| 3185 | run. We'll replace the '_' with a '-' and try to run a git command. | ||
| 3186 | Any other positional arguments will be passed to the git command, and the | ||
| 3187 | following keyword arguments are supported: | ||
| 3188 | config: An optional dict of git config options to be passed with '-c'. | ||
| 3189 | |||
| 3190 | Args: | ||
| 3191 | name: The name of the git command to call. Any '_' characters will | ||
| 3192 | be replaced with '-'. | ||
| 3193 | |||
| 3194 | Returns: | ||
| 3195 | A callable object that will try to call git with the named command. | ||
| 3196 | """ | ||
| 3197 | name = name.replace('_', '-') | ||
| 3198 | |||
| 3199 | def runner(*args, **kwargs): | ||
| 3200 | cmdv = [] | ||
| 3201 | config = kwargs.pop('config', None) | ||
| 3202 | for k in kwargs: | ||
| 3203 | raise TypeError('%s() got an unexpected keyword argument %r' | ||
| 3204 | % (name, k)) | ||
| 3205 | if config is not None: | ||
| 3206 | for k, v in config.items(): | ||
| 3207 | cmdv.append('-c') | ||
| 3208 | cmdv.append('%s=%s' % (k, v)) | ||
| 3209 | cmdv.append(name) | ||
| 3210 | cmdv.extend(args) | ||
| 3211 | p = GitCommand(self._project, | ||
| 3212 | cmdv, | ||
| 3213 | bare=self._bare, | ||
| 3214 | gitdir=self._gitdir, | ||
| 3215 | capture_stdout=True, | ||
| 3216 | capture_stderr=True) | ||
| 3217 | if p.Wait() != 0: | ||
| 3218 | raise GitError('%s %s: %s' % | ||
| 3219 | (self._project.name, name, p.stderr)) | ||
| 3220 | r = p.stdout | ||
| 3221 | if r.endswith('\n') and r.index('\n') == len(r) - 1: | ||
| 3222 | return r[:-1] | ||
| 3223 | return r | ||
| 3224 | return runner | ||
| 3225 | 1944 | ||
| 1945 | def PruneHeads(self): | ||
| 1946 | """Prune any topic branches already merged into upstream.""" | ||
| 1947 | cb = self.CurrentBranch | ||
| 1948 | kill = [] | ||
| 1949 | left = self._allrefs | ||
| 1950 | for name in left.keys(): | ||
| 1951 | if name.startswith(R_HEADS): | ||
| 1952 | name = name[len(R_HEADS) :] | ||
| 1953 | if cb is None or name != cb: | ||
| 1954 | kill.append(name) | ||
| 1955 | |||
| 1956 | # Minor optimization: If there's nothing to prune, then don't try to | ||
| 1957 | # read any project state. | ||
| 1958 | if not kill and not cb: | ||
| 1959 | return [] | ||
| 1960 | |||
| 1961 | rev = self.GetRevisionId(left) | ||
| 1962 | if ( | ||
| 1963 | cb is not None | ||
| 1964 | and not self._revlist(HEAD + "..." + rev) | ||
| 1965 | and not self.IsDirty(consider_untracked=False) | ||
| 1966 | ): | ||
| 1967 | self.work_git.DetachHead(HEAD) | ||
| 1968 | kill.append(cb) | ||
| 1969 | |||
| 1970 | if kill: | ||
| 1971 | old = self.bare_git.GetHead() | ||
| 3226 | 1972 | ||
| 3227 | class _PriorSyncFailedError(Exception): | 1973 | try: |
| 1974 | self.bare_git.DetachHead(rev) | ||
| 1975 | |||
| 1976 | b = ["branch", "-d"] | ||
| 1977 | b.extend(kill) | ||
| 1978 | b = GitCommand( | ||
| 1979 | self, b, bare=True, capture_stdout=True, capture_stderr=True | ||
| 1980 | ) | ||
| 1981 | b.Wait() | ||
| 1982 | finally: | ||
| 1983 | if ID_RE.match(old): | ||
| 1984 | self.bare_git.DetachHead(old) | ||
| 1985 | else: | ||
| 1986 | self.bare_git.SetHead(old) | ||
| 1987 | left = self._allrefs | ||
| 1988 | |||
| 1989 | for branch in kill: | ||
| 1990 | if (R_HEADS + branch) not in left: | ||
| 1991 | self.CleanPublishedCache() | ||
| 1992 | break | ||
| 1993 | |||
| 1994 | if cb and cb not in kill: | ||
| 1995 | kill.append(cb) | ||
| 1996 | kill.sort() | ||
| 1997 | |||
| 1998 | kept = [] | ||
| 1999 | for branch in kill: | ||
| 2000 | if R_HEADS + branch in left: | ||
| 2001 | branch = self.GetBranch(branch) | ||
| 2002 | base = branch.LocalMerge | ||
| 2003 | if not base: | ||
| 2004 | base = rev | ||
| 2005 | kept.append(ReviewableBranch(self, branch, base)) | ||
| 2006 | return kept | ||
| 2007 | |||
| 2008 | def GetRegisteredSubprojects(self): | ||
| 2009 | result = [] | ||
| 2010 | |||
| 2011 | def rec(subprojects): | ||
| 2012 | if not subprojects: | ||
| 2013 | return | ||
| 2014 | result.extend(subprojects) | ||
| 2015 | for p in subprojects: | ||
| 2016 | rec(p.subprojects) | ||
| 2017 | |||
| 2018 | rec(self.subprojects) | ||
| 2019 | return result | ||
| 2020 | |||
| 2021 | def _GetSubmodules(self): | ||
| 2022 | # Unfortunately we cannot call `git submodule status --recursive` here | ||
| 2023 | # because the working tree might not exist yet, and it cannot be used | ||
| 2024 | # without a working tree in its current implementation. | ||
| 2025 | |||
| 2026 | def get_submodules(gitdir, rev): | ||
| 2027 | # Parse .gitmodules for submodule sub_paths and sub_urls. | ||
| 2028 | sub_paths, sub_urls = parse_gitmodules(gitdir, rev) | ||
| 2029 | if not sub_paths: | ||
| 2030 | return [] | ||
| 2031 | # Run `git ls-tree` to read SHAs of submodule object, which happen | ||
| 2032 | # to be revision of submodule repository. | ||
| 2033 | sub_revs = git_ls_tree(gitdir, rev, sub_paths) | ||
| 2034 | submodules = [] | ||
| 2035 | for sub_path, sub_url in zip(sub_paths, sub_urls): | ||
| 2036 | try: | ||
| 2037 | sub_rev = sub_revs[sub_path] | ||
| 2038 | except KeyError: | ||
| 2039 | # Ignore non-exist submodules. | ||
| 2040 | continue | ||
| 2041 | submodules.append((sub_rev, sub_path, sub_url)) | ||
| 2042 | return submodules | ||
| 2043 | |||
| 2044 | re_path = re.compile(r"^submodule\.(.+)\.path=(.*)$") | ||
| 2045 | re_url = re.compile(r"^submodule\.(.+)\.url=(.*)$") | ||
| 2046 | |||
| 2047 | def parse_gitmodules(gitdir, rev): | ||
| 2048 | cmd = ["cat-file", "blob", "%s:.gitmodules" % rev] | ||
| 2049 | try: | ||
| 2050 | p = GitCommand( | ||
| 2051 | None, | ||
| 2052 | cmd, | ||
| 2053 | capture_stdout=True, | ||
| 2054 | capture_stderr=True, | ||
| 2055 | bare=True, | ||
| 2056 | gitdir=gitdir, | ||
| 2057 | ) | ||
| 2058 | except GitError: | ||
| 2059 | return [], [] | ||
| 2060 | if p.Wait() != 0: | ||
| 2061 | return [], [] | ||
| 2062 | |||
| 2063 | gitmodules_lines = [] | ||
| 2064 | fd, temp_gitmodules_path = tempfile.mkstemp() | ||
| 2065 | try: | ||
| 2066 | os.write(fd, p.stdout.encode("utf-8")) | ||
| 2067 | os.close(fd) | ||
| 2068 | cmd = ["config", "--file", temp_gitmodules_path, "--list"] | ||
| 2069 | p = GitCommand( | ||
| 2070 | None, | ||
| 2071 | cmd, | ||
| 2072 | capture_stdout=True, | ||
| 2073 | capture_stderr=True, | ||
| 2074 | bare=True, | ||
| 2075 | gitdir=gitdir, | ||
| 2076 | ) | ||
| 2077 | if p.Wait() != 0: | ||
| 2078 | return [], [] | ||
| 2079 | gitmodules_lines = p.stdout.split("\n") | ||
| 2080 | except GitError: | ||
| 2081 | return [], [] | ||
| 2082 | finally: | ||
| 2083 | platform_utils.remove(temp_gitmodules_path) | ||
| 2084 | |||
| 2085 | names = set() | ||
| 2086 | paths = {} | ||
| 2087 | urls = {} | ||
| 2088 | for line in gitmodules_lines: | ||
| 2089 | if not line: | ||
| 2090 | continue | ||
| 2091 | m = re_path.match(line) | ||
| 2092 | if m: | ||
| 2093 | names.add(m.group(1)) | ||
| 2094 | paths[m.group(1)] = m.group(2) | ||
| 2095 | continue | ||
| 2096 | m = re_url.match(line) | ||
| 2097 | if m: | ||
| 2098 | names.add(m.group(1)) | ||
| 2099 | urls[m.group(1)] = m.group(2) | ||
| 2100 | continue | ||
| 2101 | names = sorted(names) | ||
| 2102 | return ( | ||
| 2103 | [paths.get(name, "") for name in names], | ||
| 2104 | [urls.get(name, "") for name in names], | ||
| 2105 | ) | ||
| 2106 | |||
| 2107 | def git_ls_tree(gitdir, rev, paths): | ||
| 2108 | cmd = ["ls-tree", rev, "--"] | ||
| 2109 | cmd.extend(paths) | ||
| 2110 | try: | ||
| 2111 | p = GitCommand( | ||
| 2112 | None, | ||
| 2113 | cmd, | ||
| 2114 | capture_stdout=True, | ||
| 2115 | capture_stderr=True, | ||
| 2116 | bare=True, | ||
| 2117 | gitdir=gitdir, | ||
| 2118 | ) | ||
| 2119 | except GitError: | ||
| 2120 | return [] | ||
| 2121 | if p.Wait() != 0: | ||
| 2122 | return [] | ||
| 2123 | objects = {} | ||
| 2124 | for line in p.stdout.split("\n"): | ||
| 2125 | if not line.strip(): | ||
| 2126 | continue | ||
| 2127 | object_rev, object_path = line.split()[2:4] | ||
| 2128 | objects[object_path] = object_rev | ||
| 2129 | return objects | ||
| 3228 | 2130 | ||
| 3229 | def __str__(self): | 2131 | try: |
| 3230 | return 'prior sync failed; rebase still in progress' | 2132 | rev = self.GetRevisionId() |
| 2133 | except GitError: | ||
| 2134 | return [] | ||
| 2135 | return get_submodules(self.gitdir, rev) | ||
| 2136 | |||
| 2137 | def GetDerivedSubprojects(self): | ||
| 2138 | result = [] | ||
| 2139 | if not self.Exists: | ||
| 2140 | # If git repo does not exist yet, querying its submodules will | ||
| 2141 | # mess up its states; so return here. | ||
| 2142 | return result | ||
| 2143 | for rev, path, url in self._GetSubmodules(): | ||
| 2144 | name = self.manifest.GetSubprojectName(self, path) | ||
| 2145 | ( | ||
| 2146 | relpath, | ||
| 2147 | worktree, | ||
| 2148 | gitdir, | ||
| 2149 | objdir, | ||
| 2150 | ) = self.manifest.GetSubprojectPaths(self, name, path) | ||
| 2151 | project = self.manifest.paths.get(relpath) | ||
| 2152 | if project: | ||
| 2153 | result.extend(project.GetDerivedSubprojects()) | ||
| 2154 | continue | ||
| 2155 | |||
| 2156 | if url.startswith(".."): | ||
| 2157 | url = urllib.parse.urljoin("%s/" % self.remote.url, url) | ||
| 2158 | remote = RemoteSpec( | ||
| 2159 | self.remote.name, | ||
| 2160 | url=url, | ||
| 2161 | pushUrl=self.remote.pushUrl, | ||
| 2162 | review=self.remote.review, | ||
| 2163 | revision=self.remote.revision, | ||
| 2164 | ) | ||
| 2165 | subproject = Project( | ||
| 2166 | manifest=self.manifest, | ||
| 2167 | name=name, | ||
| 2168 | remote=remote, | ||
| 2169 | gitdir=gitdir, | ||
| 2170 | objdir=objdir, | ||
| 2171 | worktree=worktree, | ||
| 2172 | relpath=relpath, | ||
| 2173 | revisionExpr=rev, | ||
| 2174 | revisionId=rev, | ||
| 2175 | rebase=self.rebase, | ||
| 2176 | groups=self.groups, | ||
| 2177 | sync_c=self.sync_c, | ||
| 2178 | sync_s=self.sync_s, | ||
| 2179 | sync_tags=self.sync_tags, | ||
| 2180 | parent=self, | ||
| 2181 | is_derived=True, | ||
| 2182 | ) | ||
| 2183 | result.append(subproject) | ||
| 2184 | result.extend(subproject.GetDerivedSubprojects()) | ||
| 2185 | return result | ||
| 2186 | |||
| 2187 | def EnableRepositoryExtension(self, key, value="true", version=1): | ||
| 2188 | """Enable git repository extension |key| with |value|. | ||
| 3231 | 2189 | ||
| 2190 | Args: | ||
| 2191 | key: The extension to enabled. Omit the "extensions." prefix. | ||
| 2192 | value: The value to use for the extension. | ||
| 2193 | version: The minimum git repository version needed. | ||
| 2194 | """ | ||
| 2195 | # Make sure the git repo version is new enough already. | ||
| 2196 | found_version = self.config.GetInt("core.repositoryFormatVersion") | ||
| 2197 | if found_version is None: | ||
| 2198 | found_version = 0 | ||
| 2199 | if found_version < version: | ||
| 2200 | self.config.SetString("core.repositoryFormatVersion", str(version)) | ||
| 2201 | |||
| 2202 | # Enable the extension! | ||
| 2203 | self.config.SetString("extensions.%s" % (key,), value) | ||
| 2204 | |||
| 2205 | def ResolveRemoteHead(self, name=None): | ||
| 2206 | """Find out what the default branch (HEAD) points to. | ||
| 2207 | |||
| 2208 | Normally this points to refs/heads/master, but projects are moving to | ||
| 2209 | main. Support whatever the server uses rather than hardcoding "master" | ||
| 2210 | ourselves. | ||
| 2211 | """ | ||
| 2212 | if name is None: | ||
| 2213 | name = self.remote.name | ||
| 2214 | |||
| 2215 | # The output will look like (NB: tabs are separators): | ||
| 2216 | # ref: refs/heads/master HEAD | ||
| 2217 | # 5f6803b100bb3cd0f534e96e88c91373e8ed1c44 HEAD | ||
| 2218 | output = self.bare_git.ls_remote( | ||
| 2219 | "-q", "--symref", "--exit-code", name, "HEAD" | ||
| 2220 | ) | ||
| 3232 | 2221 | ||
| 3233 | class _DirtyError(Exception): | 2222 | for line in output.splitlines(): |
| 2223 | lhs, rhs = line.split("\t", 1) | ||
| 2224 | if rhs == "HEAD" and lhs.startswith("ref:"): | ||
| 2225 | return lhs[4:].strip() | ||
| 3234 | 2226 | ||
| 3235 | def __str__(self): | 2227 | return None |
| 3236 | return 'contains uncommitted changes' | ||
| 3237 | 2228 | ||
| 2229 | def _CheckForImmutableRevision(self): | ||
| 2230 | try: | ||
| 2231 | # if revision (sha or tag) is not present then following function | ||
| 2232 | # throws an error. | ||
| 2233 | self.bare_git.rev_list( | ||
| 2234 | "-1", "--missing=allow-any", "%s^0" % self.revisionExpr, "--" | ||
| 2235 | ) | ||
| 2236 | if self.upstream: | ||
| 2237 | rev = self.GetRemote().ToLocal(self.upstream) | ||
| 2238 | self.bare_git.rev_list( | ||
| 2239 | "-1", "--missing=allow-any", "%s^0" % rev, "--" | ||
| 2240 | ) | ||
| 2241 | self.bare_git.merge_base( | ||
| 2242 | "--is-ancestor", self.revisionExpr, rev | ||
| 2243 | ) | ||
| 2244 | return True | ||
| 2245 | except GitError: | ||
| 2246 | # There is no such persistent revision. We have to fetch it. | ||
| 2247 | return False | ||
| 3238 | 2248 | ||
| 3239 | class _InfoMessage(object): | 2249 | def _FetchArchive(self, tarpath, cwd=None): |
| 2250 | cmd = ["archive", "-v", "-o", tarpath] | ||
| 2251 | cmd.append("--remote=%s" % self.remote.url) | ||
| 2252 | cmd.append("--prefix=%s/" % self.RelPath(local=False)) | ||
| 2253 | cmd.append(self.revisionExpr) | ||
| 3240 | 2254 | ||
| 3241 | def __init__(self, project, text): | 2255 | command = GitCommand( |
| 3242 | self.project = project | 2256 | self, cmd, cwd=cwd, capture_stdout=True, capture_stderr=True |
| 3243 | self.text = text | 2257 | ) |
| 3244 | 2258 | ||
| 3245 | def Print(self, syncbuf): | 2259 | if command.Wait() != 0: |
| 3246 | syncbuf.out.info('%s/: %s', self.project.RelPath(local=False), self.text) | 2260 | raise GitError("git archive %s: %s" % (self.name, command.stderr)) |
| 3247 | syncbuf.out.nl() | 2261 | |
| 2262 | def _RemoteFetch( | ||
| 2263 | self, | ||
| 2264 | name=None, | ||
| 2265 | current_branch_only=False, | ||
| 2266 | initial=False, | ||
| 2267 | quiet=False, | ||
| 2268 | verbose=False, | ||
| 2269 | output_redir=None, | ||
| 2270 | alt_dir=None, | ||
| 2271 | tags=True, | ||
| 2272 | prune=False, | ||
| 2273 | depth=None, | ||
| 2274 | submodules=False, | ||
| 2275 | ssh_proxy=None, | ||
| 2276 | force_sync=False, | ||
| 2277 | clone_filter=None, | ||
| 2278 | retry_fetches=2, | ||
| 2279 | retry_sleep_initial_sec=4.0, | ||
| 2280 | retry_exp_factor=2.0, | ||
| 2281 | ): | ||
| 2282 | is_sha1 = False | ||
| 2283 | tag_name = None | ||
| 2284 | # The depth should not be used when fetching to a mirror because | ||
| 2285 | # it will result in a shallow repository that cannot be cloned or | ||
| 2286 | # fetched from. | ||
| 2287 | # The repo project should also never be synced with partial depth. | ||
| 2288 | if self.manifest.IsMirror or self.relpath == ".repo/repo": | ||
| 2289 | depth = None | ||
| 2290 | |||
| 2291 | if depth: | ||
| 2292 | current_branch_only = True | ||
| 2293 | |||
| 2294 | if ID_RE.match(self.revisionExpr) is not None: | ||
| 2295 | is_sha1 = True | ||
| 2296 | |||
| 2297 | if current_branch_only: | ||
| 2298 | if self.revisionExpr.startswith(R_TAGS): | ||
| 2299 | # This is a tag and its commit id should never change. | ||
| 2300 | tag_name = self.revisionExpr[len(R_TAGS) :] | ||
| 2301 | elif self.upstream and self.upstream.startswith(R_TAGS): | ||
| 2302 | # This is a tag and its commit id should never change. | ||
| 2303 | tag_name = self.upstream[len(R_TAGS) :] | ||
| 2304 | |||
| 2305 | if is_sha1 or tag_name is not None: | ||
| 2306 | if self._CheckForImmutableRevision(): | ||
| 2307 | if verbose: | ||
| 2308 | print( | ||
| 2309 | "Skipped fetching project %s (already have " | ||
| 2310 | "persistent ref)" % self.name | ||
| 2311 | ) | ||
| 2312 | return True | ||
| 2313 | if is_sha1 and not depth: | ||
| 2314 | # When syncing a specific commit and --depth is not set: | ||
| 2315 | # * if upstream is explicitly specified and is not a sha1, fetch | ||
| 2316 | # only upstream as users expect only upstream to be fetch. | ||
| 2317 | # Note: The commit might not be in upstream in which case the | ||
| 2318 | # sync will fail. | ||
| 2319 | # * otherwise, fetch all branches to make sure we end up with | ||
| 2320 | # the specific commit. | ||
| 2321 | if self.upstream: | ||
| 2322 | current_branch_only = not ID_RE.match(self.upstream) | ||
| 2323 | else: | ||
| 2324 | current_branch_only = False | ||
| 2325 | |||
| 2326 | if not name: | ||
| 2327 | name = self.remote.name | ||
| 2328 | |||
| 2329 | remote = self.GetRemote(name) | ||
| 2330 | if not remote.PreConnectFetch(ssh_proxy): | ||
| 2331 | ssh_proxy = None | ||
| 2332 | |||
| 2333 | if initial: | ||
| 2334 | if alt_dir and "objects" == os.path.basename(alt_dir): | ||
| 2335 | ref_dir = os.path.dirname(alt_dir) | ||
| 2336 | packed_refs = os.path.join(self.gitdir, "packed-refs") | ||
| 2337 | |||
| 2338 | all_refs = self.bare_ref.all | ||
| 2339 | ids = set(all_refs.values()) | ||
| 2340 | tmp = set() | ||
| 2341 | |||
| 2342 | for r, ref_id in GitRefs(ref_dir).all.items(): | ||
| 2343 | if r not in all_refs: | ||
| 2344 | if r.startswith(R_TAGS) or remote.WritesTo(r): | ||
| 2345 | all_refs[r] = ref_id | ||
| 2346 | ids.add(ref_id) | ||
| 2347 | continue | ||
| 2348 | |||
| 2349 | if ref_id in ids: | ||
| 2350 | continue | ||
| 2351 | |||
| 2352 | r = "refs/_alt/%s" % ref_id | ||
| 2353 | all_refs[r] = ref_id | ||
| 2354 | ids.add(ref_id) | ||
| 2355 | tmp.add(r) | ||
| 2356 | |||
| 2357 | tmp_packed_lines = [] | ||
| 2358 | old_packed_lines = [] | ||
| 2359 | |||
| 2360 | for r in sorted(all_refs): | ||
| 2361 | line = "%s %s\n" % (all_refs[r], r) | ||
| 2362 | tmp_packed_lines.append(line) | ||
| 2363 | if r not in tmp: | ||
| 2364 | old_packed_lines.append(line) | ||
| 2365 | |||
| 2366 | tmp_packed = "".join(tmp_packed_lines) | ||
| 2367 | old_packed = "".join(old_packed_lines) | ||
| 2368 | _lwrite(packed_refs, tmp_packed) | ||
| 2369 | else: | ||
| 2370 | alt_dir = None | ||
| 2371 | |||
| 2372 | cmd = ["fetch"] | ||
| 2373 | |||
| 2374 | if clone_filter: | ||
| 2375 | git_require((2, 19, 0), fail=True, msg="partial clones") | ||
| 2376 | cmd.append("--filter=%s" % clone_filter) | ||
| 2377 | self.EnableRepositoryExtension("partialclone", self.remote.name) | ||
| 2378 | |||
| 2379 | if depth: | ||
| 2380 | cmd.append("--depth=%s" % depth) | ||
| 2381 | else: | ||
| 2382 | # If this repo has shallow objects, then we don't know which refs | ||
| 2383 | # have shallow objects or not. Tell git to unshallow all fetched | ||
| 2384 | # refs. Don't do this with projects that don't have shallow | ||
| 2385 | # objects, since it is less efficient. | ||
| 2386 | if os.path.exists(os.path.join(self.gitdir, "shallow")): | ||
| 2387 | cmd.append("--depth=2147483647") | ||
| 2388 | |||
| 2389 | if not verbose: | ||
| 2390 | cmd.append("--quiet") | ||
| 2391 | if not quiet and sys.stdout.isatty(): | ||
| 2392 | cmd.append("--progress") | ||
| 2393 | if not self.worktree: | ||
| 2394 | cmd.append("--update-head-ok") | ||
| 2395 | cmd.append(name) | ||
| 2396 | |||
| 2397 | if force_sync: | ||
| 2398 | cmd.append("--force") | ||
| 2399 | |||
| 2400 | if prune: | ||
| 2401 | cmd.append("--prune") | ||
| 2402 | |||
| 2403 | # Always pass something for --recurse-submodules, git with GIT_DIR | ||
| 2404 | # behaves incorrectly when not given `--recurse-submodules=no`. | ||
| 2405 | # (b/218891912) | ||
| 2406 | cmd.append( | ||
| 2407 | f'--recurse-submodules={"on-demand" if submodules else "no"}' | ||
| 2408 | ) | ||
| 3248 | 2409 | ||
| 2410 | spec = [] | ||
| 2411 | if not current_branch_only: | ||
| 2412 | # Fetch whole repo. | ||
| 2413 | spec.append( | ||
| 2414 | str(("+refs/heads/*:") + remote.ToLocal("refs/heads/*")) | ||
| 2415 | ) | ||
| 2416 | elif tag_name is not None: | ||
| 2417 | spec.append("tag") | ||
| 2418 | spec.append(tag_name) | ||
| 2419 | |||
| 2420 | if self.manifest.IsMirror and not current_branch_only: | ||
| 2421 | branch = None | ||
| 2422 | else: | ||
| 2423 | branch = self.revisionExpr | ||
| 2424 | if ( | ||
| 2425 | not self.manifest.IsMirror | ||
| 2426 | and is_sha1 | ||
| 2427 | and depth | ||
| 2428 | and git_require((1, 8, 3)) | ||
| 2429 | ): | ||
| 2430 | # Shallow checkout of a specific commit, fetch from that commit and | ||
| 2431 | # not the heads only as the commit might be deeper in the history. | ||
| 2432 | spec.append(branch) | ||
| 2433 | if self.upstream: | ||
| 2434 | spec.append(self.upstream) | ||
| 2435 | else: | ||
| 2436 | if is_sha1: | ||
| 2437 | branch = self.upstream | ||
| 2438 | if branch is not None and branch.strip(): | ||
| 2439 | if not branch.startswith("refs/"): | ||
| 2440 | branch = R_HEADS + branch | ||
| 2441 | spec.append(str(("+%s:" % branch) + remote.ToLocal(branch))) | ||
| 2442 | |||
| 2443 | # If mirroring repo and we cannot deduce the tag or branch to fetch, | ||
| 2444 | # fetch whole repo. | ||
| 2445 | if self.manifest.IsMirror and not spec: | ||
| 2446 | spec.append( | ||
| 2447 | str(("+refs/heads/*:") + remote.ToLocal("refs/heads/*")) | ||
| 2448 | ) | ||
| 2449 | |||
| 2450 | # If using depth then we should not get all the tags since they may | ||
| 2451 | # be outside of the depth. | ||
| 2452 | if not tags or depth: | ||
| 2453 | cmd.append("--no-tags") | ||
| 2454 | else: | ||
| 2455 | cmd.append("--tags") | ||
| 2456 | spec.append(str(("+refs/tags/*:") + remote.ToLocal("refs/tags/*"))) | ||
| 2457 | |||
| 2458 | cmd.extend(spec) | ||
| 2459 | |||
| 2460 | # At least one retry minimum due to git remote prune. | ||
| 2461 | retry_fetches = max(retry_fetches, 2) | ||
| 2462 | retry_cur_sleep = retry_sleep_initial_sec | ||
| 2463 | ok = prune_tried = False | ||
| 2464 | for try_n in range(retry_fetches): | ||
| 2465 | gitcmd = GitCommand( | ||
| 2466 | self, | ||
| 2467 | cmd, | ||
| 2468 | bare=True, | ||
| 2469 | objdir=os.path.join(self.objdir, "objects"), | ||
| 2470 | ssh_proxy=ssh_proxy, | ||
| 2471 | merge_output=True, | ||
| 2472 | capture_stdout=quiet or bool(output_redir), | ||
| 2473 | ) | ||
| 2474 | if gitcmd.stdout and not quiet and output_redir: | ||
| 2475 | output_redir.write(gitcmd.stdout) | ||
| 2476 | ret = gitcmd.Wait() | ||
| 2477 | if ret == 0: | ||
| 2478 | ok = True | ||
| 2479 | break | ||
| 2480 | |||
| 2481 | # Retry later due to HTTP 429 Too Many Requests. | ||
| 2482 | elif ( | ||
| 2483 | gitcmd.stdout | ||
| 2484 | and "error:" in gitcmd.stdout | ||
| 2485 | and "HTTP 429" in gitcmd.stdout | ||
| 2486 | ): | ||
| 2487 | # Fallthru to sleep+retry logic at the bottom. | ||
| 2488 | pass | ||
| 2489 | |||
| 2490 | # Try to prune remote branches once in case there are conflicts. | ||
| 2491 | # For example, if the remote had refs/heads/upstream, but deleted | ||
| 2492 | # that and now has refs/heads/upstream/foo. | ||
| 2493 | elif ( | ||
| 2494 | gitcmd.stdout | ||
| 2495 | and "error:" in gitcmd.stdout | ||
| 2496 | and "git remote prune" in gitcmd.stdout | ||
| 2497 | and not prune_tried | ||
| 2498 | ): | ||
| 2499 | prune_tried = True | ||
| 2500 | prunecmd = GitCommand( | ||
| 2501 | self, | ||
| 2502 | ["remote", "prune", name], | ||
| 2503 | bare=True, | ||
| 2504 | ssh_proxy=ssh_proxy, | ||
| 2505 | ) | ||
| 2506 | ret = prunecmd.Wait() | ||
| 2507 | if ret: | ||
| 2508 | break | ||
| 2509 | print( | ||
| 2510 | "retrying fetch after pruning remote branches", | ||
| 2511 | file=output_redir, | ||
| 2512 | ) | ||
| 2513 | # Continue right away so we don't sleep as we shouldn't need to. | ||
| 2514 | continue | ||
| 2515 | elif current_branch_only and is_sha1 and ret == 128: | ||
| 2516 | # Exit code 128 means "couldn't find the ref you asked for"; if | ||
| 2517 | # we're in sha1 mode, we just tried sync'ing from the upstream | ||
| 2518 | # field; it doesn't exist, thus abort the optimization attempt | ||
| 2519 | # and do a full sync. | ||
| 2520 | break | ||
| 2521 | elif ret < 0: | ||
| 2522 | # Git died with a signal, exit immediately. | ||
| 2523 | break | ||
| 2524 | |||
| 2525 | # Figure out how long to sleep before the next attempt, if there is | ||
| 2526 | # one. | ||
| 2527 | if not verbose and gitcmd.stdout: | ||
| 2528 | print( | ||
| 2529 | "\n%s:\n%s" % (self.name, gitcmd.stdout), | ||
| 2530 | end="", | ||
| 2531 | file=output_redir, | ||
| 2532 | ) | ||
| 2533 | if try_n < retry_fetches - 1: | ||
| 2534 | print( | ||
| 2535 | "%s: sleeping %s seconds before retrying" | ||
| 2536 | % (self.name, retry_cur_sleep), | ||
| 2537 | file=output_redir, | ||
| 2538 | ) | ||
| 2539 | time.sleep(retry_cur_sleep) | ||
| 2540 | retry_cur_sleep = min( | ||
| 2541 | retry_exp_factor * retry_cur_sleep, MAXIMUM_RETRY_SLEEP_SEC | ||
| 2542 | ) | ||
| 2543 | retry_cur_sleep *= 1 - random.uniform( | ||
| 2544 | -RETRY_JITTER_PERCENT, RETRY_JITTER_PERCENT | ||
| 2545 | ) | ||
| 2546 | |||
| 2547 | if initial: | ||
| 2548 | if alt_dir: | ||
| 2549 | if old_packed != "": | ||
| 2550 | _lwrite(packed_refs, old_packed) | ||
| 2551 | else: | ||
| 2552 | platform_utils.remove(packed_refs) | ||
| 2553 | self.bare_git.pack_refs("--all", "--prune") | ||
| 2554 | |||
| 2555 | if is_sha1 and current_branch_only: | ||
| 2556 | # We just synced the upstream given branch; verify we | ||
| 2557 | # got what we wanted, else trigger a second run of all | ||
| 2558 | # refs. | ||
| 2559 | if not self._CheckForImmutableRevision(): | ||
| 2560 | # Sync the current branch only with depth set to None. | ||
| 2561 | # We always pass depth=None down to avoid infinite recursion. | ||
| 2562 | return self._RemoteFetch( | ||
| 2563 | name=name, | ||
| 2564 | quiet=quiet, | ||
| 2565 | verbose=verbose, | ||
| 2566 | output_redir=output_redir, | ||
| 2567 | current_branch_only=current_branch_only and depth, | ||
| 2568 | initial=False, | ||
| 2569 | alt_dir=alt_dir, | ||
| 2570 | depth=None, | ||
| 2571 | ssh_proxy=ssh_proxy, | ||
| 2572 | clone_filter=clone_filter, | ||
| 2573 | ) | ||
| 2574 | |||
| 2575 | return ok | ||
| 2576 | |||
| 2577 | def _ApplyCloneBundle(self, initial=False, quiet=False, verbose=False): | ||
| 2578 | if initial and ( | ||
| 2579 | self.manifest.manifestProject.depth or self.clone_depth | ||
| 2580 | ): | ||
| 2581 | return False | ||
| 3249 | 2582 | ||
| 3250 | class _Failure(object): | 2583 | remote = self.GetRemote() |
| 2584 | bundle_url = remote.url + "/clone.bundle" | ||
| 2585 | bundle_url = GitConfig.ForUser().UrlInsteadOf(bundle_url) | ||
| 2586 | if GetSchemeFromUrl(bundle_url) not in ( | ||
| 2587 | "http", | ||
| 2588 | "https", | ||
| 2589 | "persistent-http", | ||
| 2590 | "persistent-https", | ||
| 2591 | ): | ||
| 2592 | return False | ||
| 3251 | 2593 | ||
| 3252 | def __init__(self, project, why): | 2594 | bundle_dst = os.path.join(self.gitdir, "clone.bundle") |
| 3253 | self.project = project | 2595 | bundle_tmp = os.path.join(self.gitdir, "clone.bundle.tmp") |
| 3254 | self.why = why | ||
| 3255 | 2596 | ||
| 3256 | def Print(self, syncbuf): | 2597 | exist_dst = os.path.exists(bundle_dst) |
| 3257 | syncbuf.out.fail('error: %s/: %s', | 2598 | exist_tmp = os.path.exists(bundle_tmp) |
| 3258 | self.project.RelPath(local=False), | ||
| 3259 | str(self.why)) | ||
| 3260 | syncbuf.out.nl() | ||
| 3261 | 2599 | ||
| 2600 | if not initial and not exist_dst and not exist_tmp: | ||
| 2601 | return False | ||
| 3262 | 2602 | ||
| 3263 | class _Later(object): | 2603 | if not exist_dst: |
| 2604 | exist_dst = self._FetchBundle( | ||
| 2605 | bundle_url, bundle_tmp, bundle_dst, quiet, verbose | ||
| 2606 | ) | ||
| 2607 | if not exist_dst: | ||
| 2608 | return False | ||
| 3264 | 2609 | ||
| 3265 | def __init__(self, project, action): | 2610 | cmd = ["fetch"] |
| 3266 | self.project = project | 2611 | if not verbose: |
| 3267 | self.action = action | 2612 | cmd.append("--quiet") |
| 2613 | if not quiet and sys.stdout.isatty(): | ||
| 2614 | cmd.append("--progress") | ||
| 2615 | if not self.worktree: | ||
| 2616 | cmd.append("--update-head-ok") | ||
| 2617 | cmd.append(bundle_dst) | ||
| 2618 | for f in remote.fetch: | ||
| 2619 | cmd.append(str(f)) | ||
| 2620 | cmd.append("+refs/tags/*:refs/tags/*") | ||
| 2621 | |||
| 2622 | ok = ( | ||
| 2623 | GitCommand( | ||
| 2624 | self, | ||
| 2625 | cmd, | ||
| 2626 | bare=True, | ||
| 2627 | objdir=os.path.join(self.objdir, "objects"), | ||
| 2628 | ).Wait() | ||
| 2629 | == 0 | ||
| 2630 | ) | ||
| 2631 | platform_utils.remove(bundle_dst, missing_ok=True) | ||
| 2632 | platform_utils.remove(bundle_tmp, missing_ok=True) | ||
| 2633 | return ok | ||
| 2634 | |||
| 2635 | def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet, verbose): | ||
| 2636 | platform_utils.remove(dstPath, missing_ok=True) | ||
| 2637 | |||
| 2638 | cmd = ["curl", "--fail", "--output", tmpPath, "--netrc", "--location"] | ||
| 2639 | if quiet: | ||
| 2640 | cmd += ["--silent", "--show-error"] | ||
| 2641 | if os.path.exists(tmpPath): | ||
| 2642 | size = os.stat(tmpPath).st_size | ||
| 2643 | if size >= 1024: | ||
| 2644 | cmd += ["--continue-at", "%d" % (size,)] | ||
| 2645 | else: | ||
| 2646 | platform_utils.remove(tmpPath) | ||
| 2647 | with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, proxy): | ||
| 2648 | if cookiefile: | ||
| 2649 | cmd += ["--cookie", cookiefile] | ||
| 2650 | if proxy: | ||
| 2651 | cmd += ["--proxy", proxy] | ||
| 2652 | elif "http_proxy" in os.environ and "darwin" == sys.platform: | ||
| 2653 | cmd += ["--proxy", os.environ["http_proxy"]] | ||
| 2654 | if srcUrl.startswith("persistent-https"): | ||
| 2655 | srcUrl = "http" + srcUrl[len("persistent-https") :] | ||
| 2656 | elif srcUrl.startswith("persistent-http"): | ||
| 2657 | srcUrl = "http" + srcUrl[len("persistent-http") :] | ||
| 2658 | cmd += [srcUrl] | ||
| 2659 | |||
| 2660 | proc = None | ||
| 2661 | with Trace("Fetching bundle: %s", " ".join(cmd)): | ||
| 2662 | if verbose: | ||
| 2663 | print("%s: Downloading bundle: %s" % (self.name, srcUrl)) | ||
| 2664 | stdout = None if verbose else subprocess.PIPE | ||
| 2665 | stderr = None if verbose else subprocess.STDOUT | ||
| 2666 | try: | ||
| 2667 | proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr) | ||
| 2668 | except OSError: | ||
| 2669 | return False | ||
| 2670 | |||
| 2671 | (output, _) = proc.communicate() | ||
| 2672 | curlret = proc.returncode | ||
| 2673 | |||
| 2674 | if curlret == 22: | ||
| 2675 | # From curl man page: | ||
| 2676 | # 22: HTTP page not retrieved. The requested url was not found | ||
| 2677 | # or returned another error with the HTTP error code being 400 | ||
| 2678 | # or above. This return code only appears if -f, --fail is used. | ||
| 2679 | if verbose: | ||
| 2680 | print( | ||
| 2681 | "%s: Unable to retrieve clone.bundle; ignoring." | ||
| 2682 | % self.name | ||
| 2683 | ) | ||
| 2684 | if output: | ||
| 2685 | print("Curl output:\n%s" % output) | ||
| 2686 | return False | ||
| 2687 | elif curlret and not verbose and output: | ||
| 2688 | print("%s" % output, file=sys.stderr) | ||
| 2689 | |||
| 2690 | if os.path.exists(tmpPath): | ||
| 2691 | if curlret == 0 and self._IsValidBundle(tmpPath, quiet): | ||
| 2692 | platform_utils.rename(tmpPath, dstPath) | ||
| 2693 | return True | ||
| 2694 | else: | ||
| 2695 | platform_utils.remove(tmpPath) | ||
| 2696 | return False | ||
| 2697 | else: | ||
| 2698 | return False | ||
| 3268 | 2699 | ||
| 3269 | def Run(self, syncbuf): | 2700 | def _IsValidBundle(self, path, quiet): |
| 3270 | out = syncbuf.out | 2701 | try: |
| 3271 | out.project('project %s/', self.project.RelPath(local=False)) | 2702 | with open(path, "rb") as f: |
| 3272 | out.nl() | 2703 | if f.read(16) == b"# v2 git bundle\n": |
| 3273 | try: | 2704 | return True |
| 3274 | self.action() | 2705 | else: |
| 3275 | out.nl() | 2706 | if not quiet: |
| 3276 | return True | 2707 | print( |
| 3277 | except GitError: | 2708 | "Invalid clone.bundle file; ignoring.", |
| 3278 | out.nl() | 2709 | file=sys.stderr, |
| 3279 | return False | 2710 | ) |
| 2711 | return False | ||
| 2712 | except OSError: | ||
| 2713 | return False | ||
| 3280 | 2714 | ||
| 2715 | def _Checkout(self, rev, quiet=False): | ||
| 2716 | cmd = ["checkout"] | ||
| 2717 | if quiet: | ||
| 2718 | cmd.append("-q") | ||
| 2719 | cmd.append(rev) | ||
| 2720 | cmd.append("--") | ||
| 2721 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2722 | if self._allrefs: | ||
| 2723 | raise GitError("%s checkout %s " % (self.name, rev)) | ||
| 2724 | |||
| 2725 | def _CherryPick(self, rev, ffonly=False, record_origin=False): | ||
| 2726 | cmd = ["cherry-pick"] | ||
| 2727 | if ffonly: | ||
| 2728 | cmd.append("--ff") | ||
| 2729 | if record_origin: | ||
| 2730 | cmd.append("-x") | ||
| 2731 | cmd.append(rev) | ||
| 2732 | cmd.append("--") | ||
| 2733 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2734 | if self._allrefs: | ||
| 2735 | raise GitError("%s cherry-pick %s " % (self.name, rev)) | ||
| 2736 | |||
| 2737 | def _LsRemote(self, refs): | ||
| 2738 | cmd = ["ls-remote", self.remote.name, refs] | ||
| 2739 | p = GitCommand(self, cmd, capture_stdout=True) | ||
| 2740 | if p.Wait() == 0: | ||
| 2741 | return p.stdout | ||
| 2742 | return None | ||
| 3281 | 2743 | ||
| 3282 | class _SyncColoring(Coloring): | 2744 | def _Revert(self, rev): |
| 2745 | cmd = ["revert"] | ||
| 2746 | cmd.append("--no-edit") | ||
| 2747 | cmd.append(rev) | ||
| 2748 | cmd.append("--") | ||
| 2749 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2750 | if self._allrefs: | ||
| 2751 | raise GitError("%s revert %s " % (self.name, rev)) | ||
| 2752 | |||
| 2753 | def _ResetHard(self, rev, quiet=True): | ||
| 2754 | cmd = ["reset", "--hard"] | ||
| 2755 | if quiet: | ||
| 2756 | cmd.append("-q") | ||
| 2757 | cmd.append(rev) | ||
| 2758 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2759 | raise GitError("%s reset --hard %s " % (self.name, rev)) | ||
| 3283 | 2760 | ||
| 3284 | def __init__(self, config): | 2761 | def _SyncSubmodules(self, quiet=True): |
| 3285 | super().__init__(config, 'reposync') | 2762 | cmd = ["submodule", "update", "--init", "--recursive"] |
| 3286 | self.project = self.printer('header', attr='bold') | 2763 | if quiet: |
| 3287 | self.info = self.printer('info') | 2764 | cmd.append("-q") |
| 3288 | self.fail = self.printer('fail', fg='red') | 2765 | if GitCommand(self, cmd).Wait() != 0: |
| 2766 | raise GitError( | ||
| 2767 | "%s submodule update --init --recursive " % self.name | ||
| 2768 | ) | ||
| 2769 | |||
| 2770 | def _Rebase(self, upstream, onto=None): | ||
| 2771 | cmd = ["rebase"] | ||
| 2772 | if onto is not None: | ||
| 2773 | cmd.extend(["--onto", onto]) | ||
| 2774 | cmd.append(upstream) | ||
| 2775 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2776 | raise GitError("%s rebase %s " % (self.name, upstream)) | ||
| 3289 | 2777 | ||
| 2778 | def _FastForward(self, head, ffonly=False): | ||
| 2779 | cmd = ["merge", "--no-stat", head] | ||
| 2780 | if ffonly: | ||
| 2781 | cmd.append("--ff-only") | ||
| 2782 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2783 | raise GitError("%s merge %s " % (self.name, head)) | ||
| 3290 | 2784 | ||
| 3291 | class SyncBuffer(object): | 2785 | def _InitGitDir(self, mirror_git=None, force_sync=False, quiet=False): |
| 2786 | init_git_dir = not os.path.exists(self.gitdir) | ||
| 2787 | init_obj_dir = not os.path.exists(self.objdir) | ||
| 2788 | try: | ||
| 2789 | # Initialize the bare repository, which contains all of the objects. | ||
| 2790 | if init_obj_dir: | ||
| 2791 | os.makedirs(self.objdir) | ||
| 2792 | self.bare_objdir.init() | ||
| 2793 | |||
| 2794 | self._UpdateHooks(quiet=quiet) | ||
| 2795 | |||
| 2796 | if self.use_git_worktrees: | ||
| 2797 | # Enable per-worktree config file support if possible. This | ||
| 2798 | # is more a nice-to-have feature for users rather than a | ||
| 2799 | # hard requirement. | ||
| 2800 | if git_require((2, 20, 0)): | ||
| 2801 | self.EnableRepositoryExtension("worktreeConfig") | ||
| 2802 | |||
| 2803 | # If we have a separate directory to hold refs, initialize it as | ||
| 2804 | # well. | ||
| 2805 | if self.objdir != self.gitdir: | ||
| 2806 | if init_git_dir: | ||
| 2807 | os.makedirs(self.gitdir) | ||
| 2808 | |||
| 2809 | if init_obj_dir or init_git_dir: | ||
| 2810 | self._ReferenceGitDir( | ||
| 2811 | self.objdir, self.gitdir, copy_all=True | ||
| 2812 | ) | ||
| 2813 | try: | ||
| 2814 | self._CheckDirReference(self.objdir, self.gitdir) | ||
| 2815 | except GitError as e: | ||
| 2816 | if force_sync: | ||
| 2817 | print( | ||
| 2818 | "Retrying clone after deleting %s" % self.gitdir, | ||
| 2819 | file=sys.stderr, | ||
| 2820 | ) | ||
| 2821 | try: | ||
| 2822 | platform_utils.rmtree( | ||
| 2823 | platform_utils.realpath(self.gitdir) | ||
| 2824 | ) | ||
| 2825 | if self.worktree and os.path.exists( | ||
| 2826 | platform_utils.realpath(self.worktree) | ||
| 2827 | ): | ||
| 2828 | platform_utils.rmtree( | ||
| 2829 | platform_utils.realpath(self.worktree) | ||
| 2830 | ) | ||
| 2831 | return self._InitGitDir( | ||
| 2832 | mirror_git=mirror_git, | ||
| 2833 | force_sync=False, | ||
| 2834 | quiet=quiet, | ||
| 2835 | ) | ||
| 2836 | except Exception: | ||
| 2837 | raise e | ||
| 2838 | raise e | ||
| 2839 | |||
| 2840 | if init_git_dir: | ||
| 2841 | mp = self.manifest.manifestProject | ||
| 2842 | ref_dir = mp.reference or "" | ||
| 2843 | |||
| 2844 | def _expanded_ref_dirs(): | ||
| 2845 | """Iterate through possible git reference dir paths.""" | ||
| 2846 | name = self.name + ".git" | ||
| 2847 | yield mirror_git or os.path.join(ref_dir, name) | ||
| 2848 | for prefix in "", self.remote.name: | ||
| 2849 | yield os.path.join( | ||
| 2850 | ref_dir, ".repo", "project-objects", prefix, name | ||
| 2851 | ) | ||
| 2852 | yield os.path.join( | ||
| 2853 | ref_dir, ".repo", "worktrees", prefix, name | ||
| 2854 | ) | ||
| 2855 | |||
| 2856 | if ref_dir or mirror_git: | ||
| 2857 | found_ref_dir = None | ||
| 2858 | for path in _expanded_ref_dirs(): | ||
| 2859 | if os.path.exists(path): | ||
| 2860 | found_ref_dir = path | ||
| 2861 | break | ||
| 2862 | ref_dir = found_ref_dir | ||
| 2863 | |||
| 2864 | if ref_dir: | ||
| 2865 | if not os.path.isabs(ref_dir): | ||
| 2866 | # The alternate directory is relative to the object | ||
| 2867 | # database. | ||
| 2868 | ref_dir = os.path.relpath( | ||
| 2869 | ref_dir, os.path.join(self.objdir, "objects") | ||
| 2870 | ) | ||
| 2871 | _lwrite( | ||
| 2872 | os.path.join( | ||
| 2873 | self.objdir, "objects/info/alternates" | ||
| 2874 | ), | ||
| 2875 | os.path.join(ref_dir, "objects") + "\n", | ||
| 2876 | ) | ||
| 2877 | |||
| 2878 | m = self.manifest.manifestProject.config | ||
| 2879 | for key in ["user.name", "user.email"]: | ||
| 2880 | if m.Has(key, include_defaults=False): | ||
| 2881 | self.config.SetString(key, m.GetString(key)) | ||
| 2882 | if not self.manifest.EnableGitLfs: | ||
| 2883 | self.config.SetString( | ||
| 2884 | "filter.lfs.smudge", "git-lfs smudge --skip -- %f" | ||
| 2885 | ) | ||
| 2886 | self.config.SetString( | ||
| 2887 | "filter.lfs.process", "git-lfs filter-process --skip" | ||
| 2888 | ) | ||
| 2889 | self.config.SetBoolean( | ||
| 2890 | "core.bare", True if self.manifest.IsMirror else None | ||
| 2891 | ) | ||
| 2892 | except Exception: | ||
| 2893 | if init_obj_dir and os.path.exists(self.objdir): | ||
| 2894 | platform_utils.rmtree(self.objdir) | ||
| 2895 | if init_git_dir and os.path.exists(self.gitdir): | ||
| 2896 | platform_utils.rmtree(self.gitdir) | ||
| 2897 | raise | ||
| 2898 | |||
| 2899 | def _UpdateHooks(self, quiet=False): | ||
| 2900 | if os.path.exists(self.objdir): | ||
| 2901 | self._InitHooks(quiet=quiet) | ||
| 2902 | |||
| 2903 | def _InitHooks(self, quiet=False): | ||
| 2904 | hooks = platform_utils.realpath(os.path.join(self.objdir, "hooks")) | ||
| 2905 | if not os.path.exists(hooks): | ||
| 2906 | os.makedirs(hooks) | ||
| 2907 | |||
| 2908 | # Delete sample hooks. They're noise. | ||
| 2909 | for hook in glob.glob(os.path.join(hooks, "*.sample")): | ||
| 2910 | try: | ||
| 2911 | platform_utils.remove(hook, missing_ok=True) | ||
| 2912 | except PermissionError: | ||
| 2913 | pass | ||
| 2914 | |||
| 2915 | for stock_hook in _ProjectHooks(): | ||
| 2916 | name = os.path.basename(stock_hook) | ||
| 2917 | |||
| 2918 | if ( | ||
| 2919 | name in ("commit-msg",) | ||
| 2920 | and not self.remote.review | ||
| 2921 | and self is not self.manifest.manifestProject | ||
| 2922 | ): | ||
| 2923 | # Don't install a Gerrit Code Review hook if this | ||
| 2924 | # project does not appear to use it for reviews. | ||
| 2925 | # | ||
| 2926 | # Since the manifest project is one of those, but also | ||
| 2927 | # managed through gerrit, it's excluded. | ||
| 2928 | continue | ||
| 2929 | |||
| 2930 | dst = os.path.join(hooks, name) | ||
| 2931 | if platform_utils.islink(dst): | ||
| 2932 | continue | ||
| 2933 | if os.path.exists(dst): | ||
| 2934 | # If the files are the same, we'll leave it alone. We create | ||
| 2935 | # symlinks below by default but fallback to hardlinks if the OS | ||
| 2936 | # blocks them. So if we're here, it's probably because we made a | ||
| 2937 | # hardlink below. | ||
| 2938 | if not filecmp.cmp(stock_hook, dst, shallow=False): | ||
| 2939 | if not quiet: | ||
| 2940 | _warn( | ||
| 2941 | "%s: Not replacing locally modified %s hook", | ||
| 2942 | self.RelPath(local=False), | ||
| 2943 | name, | ||
| 2944 | ) | ||
| 2945 | continue | ||
| 2946 | try: | ||
| 2947 | platform_utils.symlink( | ||
| 2948 | os.path.relpath(stock_hook, os.path.dirname(dst)), dst | ||
| 2949 | ) | ||
| 2950 | except OSError as e: | ||
| 2951 | if e.errno == errno.EPERM: | ||
| 2952 | try: | ||
| 2953 | os.link(stock_hook, dst) | ||
| 2954 | except OSError: | ||
| 2955 | raise GitError(self._get_symlink_error_message()) | ||
| 2956 | else: | ||
| 2957 | raise | ||
| 2958 | |||
| 2959 | def _InitRemote(self): | ||
| 2960 | if self.remote.url: | ||
| 2961 | remote = self.GetRemote() | ||
| 2962 | remote.url = self.remote.url | ||
| 2963 | remote.pushUrl = self.remote.pushUrl | ||
| 2964 | remote.review = self.remote.review | ||
| 2965 | remote.projectname = self.name | ||
| 2966 | |||
| 2967 | if self.worktree: | ||
| 2968 | remote.ResetFetch(mirror=False) | ||
| 2969 | else: | ||
| 2970 | remote.ResetFetch(mirror=True) | ||
| 2971 | remote.Save() | ||
| 2972 | |||
| 2973 | def _InitMRef(self): | ||
| 2974 | """Initialize the pseudo m/<manifest branch> ref.""" | ||
| 2975 | if self.manifest.branch: | ||
| 2976 | if self.use_git_worktrees: | ||
| 2977 | # Set up the m/ space to point to the worktree-specific ref | ||
| 2978 | # space. We'll update the worktree-specific ref space on each | ||
| 2979 | # checkout. | ||
| 2980 | ref = R_M + self.manifest.branch | ||
| 2981 | if not self.bare_ref.symref(ref): | ||
| 2982 | self.bare_git.symbolic_ref( | ||
| 2983 | "-m", | ||
| 2984 | "redirecting to worktree scope", | ||
| 2985 | ref, | ||
| 2986 | R_WORKTREE_M + self.manifest.branch, | ||
| 2987 | ) | ||
| 2988 | |||
| 2989 | # We can't update this ref with git worktrees until it exists. | ||
| 2990 | # We'll wait until the initial checkout to set it. | ||
| 2991 | if not os.path.exists(self.worktree): | ||
| 2992 | return | ||
| 2993 | |||
| 2994 | base = R_WORKTREE_M | ||
| 2995 | active_git = self.work_git | ||
| 2996 | |||
| 2997 | self._InitAnyMRef(HEAD, self.bare_git, detach=True) | ||
| 2998 | else: | ||
| 2999 | base = R_M | ||
| 3000 | active_git = self.bare_git | ||
| 3001 | |||
| 3002 | self._InitAnyMRef(base + self.manifest.branch, active_git) | ||
| 3003 | |||
| 3004 | def _InitMirrorHead(self): | ||
| 3005 | self._InitAnyMRef(HEAD, self.bare_git) | ||
| 3006 | |||
| 3007 | def _InitAnyMRef(self, ref, active_git, detach=False): | ||
| 3008 | """Initialize |ref| in |active_git| to the value in the manifest. | ||
| 3009 | |||
| 3010 | This points |ref| to the <project> setting in the manifest. | ||
| 3011 | |||
| 3012 | Args: | ||
| 3013 | ref: The branch to update. | ||
| 3014 | active_git: The git repository to make updates in. | ||
| 3015 | detach: Whether to update target of symbolic refs, or overwrite the | ||
| 3016 | ref directly (and thus make it non-symbolic). | ||
| 3017 | """ | ||
| 3018 | cur = self.bare_ref.symref(ref) | ||
| 3019 | |||
| 3020 | if self.revisionId: | ||
| 3021 | if cur != "" or self.bare_ref.get(ref) != self.revisionId: | ||
| 3022 | msg = "manifest set to %s" % self.revisionId | ||
| 3023 | dst = self.revisionId + "^0" | ||
| 3024 | active_git.UpdateRef(ref, dst, message=msg, detach=True) | ||
| 3025 | else: | ||
| 3026 | remote = self.GetRemote() | ||
| 3027 | dst = remote.ToLocal(self.revisionExpr) | ||
| 3028 | if cur != dst: | ||
| 3029 | msg = "manifest set to %s" % self.revisionExpr | ||
| 3030 | if detach: | ||
| 3031 | active_git.UpdateRef(ref, dst, message=msg, detach=True) | ||
| 3032 | else: | ||
| 3033 | active_git.symbolic_ref("-m", msg, ref, dst) | ||
| 3034 | |||
| 3035 | def _CheckDirReference(self, srcdir, destdir): | ||
| 3036 | # Git worktrees don't use symlinks to share at all. | ||
| 3037 | if self.use_git_worktrees: | ||
| 3038 | return | ||
| 3039 | |||
| 3040 | for name in self.shareable_dirs: | ||
| 3041 | # Try to self-heal a bit in simple cases. | ||
| 3042 | dst_path = os.path.join(destdir, name) | ||
| 3043 | src_path = os.path.join(srcdir, name) | ||
| 3044 | |||
| 3045 | dst = platform_utils.realpath(dst_path) | ||
| 3046 | if os.path.lexists(dst): | ||
| 3047 | src = platform_utils.realpath(src_path) | ||
| 3048 | # Fail if the links are pointing to the wrong place. | ||
| 3049 | if src != dst: | ||
| 3050 | _error("%s is different in %s vs %s", name, destdir, srcdir) | ||
| 3051 | raise GitError( | ||
| 3052 | "--force-sync not enabled; cannot overwrite a local " | ||
| 3053 | "work tree. If you're comfortable with the " | ||
| 3054 | "possibility of losing the work tree's git metadata," | ||
| 3055 | " use `repo sync --force-sync {0}` to " | ||
| 3056 | "proceed.".format(self.RelPath(local=False)) | ||
| 3057 | ) | ||
| 3058 | |||
| 3059 | def _ReferenceGitDir(self, gitdir, dotgit, copy_all): | ||
| 3060 | """Update |dotgit| to reference |gitdir|, using symlinks where possible. | ||
| 3061 | |||
| 3062 | Args: | ||
| 3063 | gitdir: The bare git repository. Must already be initialized. | ||
| 3064 | dotgit: The repository you would like to initialize. | ||
| 3065 | copy_all: If true, copy all remaining files from |gitdir| -> | ||
| 3066 | |dotgit|. This saves you the effort of initializing |dotgit| | ||
| 3067 | yourself. | ||
| 3068 | """ | ||
| 3069 | symlink_dirs = self.shareable_dirs[:] | ||
| 3070 | to_symlink = symlink_dirs | ||
| 3071 | |||
| 3072 | to_copy = [] | ||
| 3073 | if copy_all: | ||
| 3074 | to_copy = platform_utils.listdir(gitdir) | ||
| 3075 | |||
| 3076 | dotgit = platform_utils.realpath(dotgit) | ||
| 3077 | for name in set(to_copy).union(to_symlink): | ||
| 3078 | try: | ||
| 3079 | src = platform_utils.realpath(os.path.join(gitdir, name)) | ||
| 3080 | dst = os.path.join(dotgit, name) | ||
| 3081 | |||
| 3082 | if os.path.lexists(dst): | ||
| 3083 | continue | ||
| 3084 | |||
| 3085 | # If the source dir doesn't exist, create an empty dir. | ||
| 3086 | if name in symlink_dirs and not os.path.lexists(src): | ||
| 3087 | os.makedirs(src) | ||
| 3088 | |||
| 3089 | if name in to_symlink: | ||
| 3090 | platform_utils.symlink( | ||
| 3091 | os.path.relpath(src, os.path.dirname(dst)), dst | ||
| 3092 | ) | ||
| 3093 | elif copy_all and not platform_utils.islink(dst): | ||
| 3094 | if platform_utils.isdir(src): | ||
| 3095 | shutil.copytree(src, dst) | ||
| 3096 | elif os.path.isfile(src): | ||
| 3097 | shutil.copy(src, dst) | ||
| 3098 | |||
| 3099 | except OSError as e: | ||
| 3100 | if e.errno == errno.EPERM: | ||
| 3101 | raise DownloadError(self._get_symlink_error_message()) | ||
| 3102 | else: | ||
| 3103 | raise | ||
| 3104 | |||
| 3105 | def _InitGitWorktree(self): | ||
| 3106 | """Init the project using git worktrees.""" | ||
| 3107 | self.bare_git.worktree("prune") | ||
| 3108 | self.bare_git.worktree( | ||
| 3109 | "add", | ||
| 3110 | "-ff", | ||
| 3111 | "--checkout", | ||
| 3112 | "--detach", | ||
| 3113 | "--lock", | ||
| 3114 | self.worktree, | ||
| 3115 | self.GetRevisionId(), | ||
| 3116 | ) | ||
| 3292 | 3117 | ||
| 3293 | def __init__(self, config, detach_head=False): | 3118 | # Rewrite the internal state files to use relative paths between the |
| 3294 | self._messages = [] | 3119 | # checkouts & worktrees. |
| 3295 | self._failures = [] | 3120 | dotgit = os.path.join(self.worktree, ".git") |
| 3296 | self._later_queue1 = [] | 3121 | with open(dotgit, "r") as fp: |
| 3297 | self._later_queue2 = [] | 3122 | # Figure out the checkout->worktree path. |
| 3123 | setting = fp.read() | ||
| 3124 | assert setting.startswith("gitdir:") | ||
| 3125 | git_worktree_path = setting.split(":", 1)[1].strip() | ||
| 3126 | # Some platforms (e.g. Windows) won't let us update dotgit in situ | ||
| 3127 | # because of file permissions. Delete it and recreate it from scratch | ||
| 3128 | # to avoid. | ||
| 3129 | platform_utils.remove(dotgit) | ||
| 3130 | # Use relative path from checkout->worktree & maintain Unix line endings | ||
| 3131 | # on all OS's to match git behavior. | ||
| 3132 | with open(dotgit, "w", newline="\n") as fp: | ||
| 3133 | print( | ||
| 3134 | "gitdir:", | ||
| 3135 | os.path.relpath(git_worktree_path, self.worktree), | ||
| 3136 | file=fp, | ||
| 3137 | ) | ||
| 3138 | # Use relative path from worktree->checkout & maintain Unix line endings | ||
| 3139 | # on all OS's to match git behavior. | ||
| 3140 | with open( | ||
| 3141 | os.path.join(git_worktree_path, "gitdir"), "w", newline="\n" | ||
| 3142 | ) as fp: | ||
| 3143 | print(os.path.relpath(dotgit, git_worktree_path), file=fp) | ||
| 3144 | |||
| 3145 | self._InitMRef() | ||
| 3146 | |||
| 3147 | def _InitWorkTree(self, force_sync=False, submodules=False): | ||
| 3148 | """Setup the worktree .git path. | ||
| 3149 | |||
| 3150 | This is the user-visible path like src/foo/.git/. | ||
| 3151 | |||
| 3152 | With non-git-worktrees, this will be a symlink to the .repo/projects/ | ||
| 3153 | path. With git-worktrees, this will be a .git file using "gitdir: ..." | ||
| 3154 | syntax. | ||
| 3155 | |||
| 3156 | Older checkouts had .git/ directories. If we see that, migrate it. | ||
| 3157 | |||
| 3158 | This also handles changes in the manifest. Maybe this project was | ||
| 3159 | backed by "foo/bar" on the server, but now it's "new/foo/bar". We have | ||
| 3160 | to update the path we point to under .repo/projects/ to match. | ||
| 3161 | """ | ||
| 3162 | dotgit = os.path.join(self.worktree, ".git") | ||
| 3163 | |||
| 3164 | # If using an old layout style (a directory), migrate it. | ||
| 3165 | if not platform_utils.islink(dotgit) and platform_utils.isdir(dotgit): | ||
| 3166 | self._MigrateOldWorkTreeGitDir(dotgit) | ||
| 3167 | |||
| 3168 | init_dotgit = not os.path.exists(dotgit) | ||
| 3169 | if self.use_git_worktrees: | ||
| 3170 | if init_dotgit: | ||
| 3171 | self._InitGitWorktree() | ||
| 3172 | self._CopyAndLinkFiles() | ||
| 3173 | else: | ||
| 3174 | if not init_dotgit: | ||
| 3175 | # See if the project has changed. | ||
| 3176 | if platform_utils.realpath( | ||
| 3177 | self.gitdir | ||
| 3178 | ) != platform_utils.realpath(dotgit): | ||
| 3179 | platform_utils.remove(dotgit) | ||
| 3180 | |||
| 3181 | if init_dotgit or not os.path.exists(dotgit): | ||
| 3182 | os.makedirs(self.worktree, exist_ok=True) | ||
| 3183 | platform_utils.symlink( | ||
| 3184 | os.path.relpath(self.gitdir, self.worktree), dotgit | ||
| 3185 | ) | ||
| 3186 | |||
| 3187 | if init_dotgit: | ||
| 3188 | _lwrite( | ||
| 3189 | os.path.join(dotgit, HEAD), "%s\n" % self.GetRevisionId() | ||
| 3190 | ) | ||
| 3191 | |||
| 3192 | # Finish checking out the worktree. | ||
| 3193 | cmd = ["read-tree", "--reset", "-u", "-v", HEAD] | ||
| 3194 | if GitCommand(self, cmd).Wait() != 0: | ||
| 3195 | raise GitError( | ||
| 3196 | "Cannot initialize work tree for " + self.name | ||
| 3197 | ) | ||
| 3198 | |||
| 3199 | if submodules: | ||
| 3200 | self._SyncSubmodules(quiet=True) | ||
| 3201 | self._CopyAndLinkFiles() | ||
| 3202 | |||
| 3203 | @classmethod | ||
| 3204 | def _MigrateOldWorkTreeGitDir(cls, dotgit): | ||
| 3205 | """Migrate the old worktree .git/ dir style to a symlink. | ||
| 3206 | |||
| 3207 | This logic specifically only uses state from |dotgit| to figure out | ||
| 3208 | where to move content and not |self|. This way if the backing project | ||
| 3209 | also changed places, we only do the .git/ dir to .git symlink migration | ||
| 3210 | here. The path updates will happen independently. | ||
| 3211 | """ | ||
| 3212 | # Figure out where in .repo/projects/ it's pointing to. | ||
| 3213 | if not os.path.islink(os.path.join(dotgit, "refs")): | ||
| 3214 | raise GitError(f"{dotgit}: unsupported checkout state") | ||
| 3215 | gitdir = os.path.dirname(os.path.realpath(os.path.join(dotgit, "refs"))) | ||
| 3216 | |||
| 3217 | # Remove known symlink paths that exist in .repo/projects/. | ||
| 3218 | KNOWN_LINKS = { | ||
| 3219 | "config", | ||
| 3220 | "description", | ||
| 3221 | "hooks", | ||
| 3222 | "info", | ||
| 3223 | "logs", | ||
| 3224 | "objects", | ||
| 3225 | "packed-refs", | ||
| 3226 | "refs", | ||
| 3227 | "rr-cache", | ||
| 3228 | "shallow", | ||
| 3229 | "svn", | ||
| 3230 | } | ||
| 3231 | # Paths that we know will be in both, but are safe to clobber in | ||
| 3232 | # .repo/projects/. | ||
| 3233 | SAFE_TO_CLOBBER = { | ||
| 3234 | "COMMIT_EDITMSG", | ||
| 3235 | "FETCH_HEAD", | ||
| 3236 | "HEAD", | ||
| 3237 | "gc.log", | ||
| 3238 | "gitk.cache", | ||
| 3239 | "index", | ||
| 3240 | "ORIG_HEAD", | ||
| 3241 | } | ||
| 3242 | |||
| 3243 | # First see if we'd succeed before starting the migration. | ||
| 3244 | unknown_paths = [] | ||
| 3245 | for name in platform_utils.listdir(dotgit): | ||
| 3246 | # Ignore all temporary/backup names. These are common with vim & | ||
| 3247 | # emacs. | ||
| 3248 | if name.endswith("~") or (name[0] == "#" and name[-1] == "#"): | ||
| 3249 | continue | ||
| 3250 | |||
| 3251 | dotgit_path = os.path.join(dotgit, name) | ||
| 3252 | if name in KNOWN_LINKS: | ||
| 3253 | if not platform_utils.islink(dotgit_path): | ||
| 3254 | unknown_paths.append(f"{dotgit_path}: should be a symlink") | ||
| 3255 | else: | ||
| 3256 | gitdir_path = os.path.join(gitdir, name) | ||
| 3257 | if name not in SAFE_TO_CLOBBER and os.path.exists(gitdir_path): | ||
| 3258 | unknown_paths.append( | ||
| 3259 | f"{dotgit_path}: unknown file; please file a bug" | ||
| 3260 | ) | ||
| 3261 | if unknown_paths: | ||
| 3262 | raise GitError("Aborting migration: " + "\n".join(unknown_paths)) | ||
| 3263 | |||
| 3264 | # Now walk the paths and sync the .git/ to .repo/projects/. | ||
| 3265 | for name in platform_utils.listdir(dotgit): | ||
| 3266 | dotgit_path = os.path.join(dotgit, name) | ||
| 3267 | |||
| 3268 | # Ignore all temporary/backup names. These are common with vim & | ||
| 3269 | # emacs. | ||
| 3270 | if name.endswith("~") or (name[0] == "#" and name[-1] == "#"): | ||
| 3271 | platform_utils.remove(dotgit_path) | ||
| 3272 | elif name in KNOWN_LINKS: | ||
| 3273 | platform_utils.remove(dotgit_path) | ||
| 3274 | else: | ||
| 3275 | gitdir_path = os.path.join(gitdir, name) | ||
| 3276 | platform_utils.remove(gitdir_path, missing_ok=True) | ||
| 3277 | platform_utils.rename(dotgit_path, gitdir_path) | ||
| 3278 | |||
| 3279 | # Now that the dir should be empty, clear it out, and symlink it over. | ||
| 3280 | platform_utils.rmdir(dotgit) | ||
| 3281 | platform_utils.symlink( | ||
| 3282 | os.path.relpath(gitdir, os.path.dirname(dotgit)), dotgit | ||
| 3283 | ) | ||
| 3298 | 3284 | ||
| 3299 | self.out = _SyncColoring(config) | 3285 | def _get_symlink_error_message(self): |
| 3300 | self.out.redirect(sys.stderr) | 3286 | if platform_utils.isWindows(): |
| 3287 | return ( | ||
| 3288 | "Unable to create symbolic link. Please re-run the command as " | ||
| 3289 | "Administrator, or see " | ||
| 3290 | "https://github.com/git-for-windows/git/wiki/Symbolic-Links " | ||
| 3291 | "for other options." | ||
| 3292 | ) | ||
| 3293 | return "filesystem must support symlinks" | ||
| 3294 | |||
| 3295 | def _revlist(self, *args, **kw): | ||
| 3296 | a = [] | ||
| 3297 | a.extend(args) | ||
| 3298 | a.append("--") | ||
| 3299 | return self.work_git.rev_list(*a, **kw) | ||
| 3300 | |||
| 3301 | @property | ||
| 3302 | def _allrefs(self): | ||
| 3303 | return self.bare_ref.all | ||
| 3304 | |||
| 3305 | def _getLogs( | ||
| 3306 | self, rev1, rev2, oneline=False, color=True, pretty_format=None | ||
| 3307 | ): | ||
| 3308 | """Get logs between two revisions of this project.""" | ||
| 3309 | comp = ".." | ||
| 3310 | if rev1: | ||
| 3311 | revs = [rev1] | ||
| 3312 | if rev2: | ||
| 3313 | revs.extend([comp, rev2]) | ||
| 3314 | cmd = ["log", "".join(revs)] | ||
| 3315 | out = DiffColoring(self.config) | ||
| 3316 | if out.is_on and color: | ||
| 3317 | cmd.append("--color") | ||
| 3318 | if pretty_format is not None: | ||
| 3319 | cmd.append("--pretty=format:%s" % pretty_format) | ||
| 3320 | if oneline: | ||
| 3321 | cmd.append("--oneline") | ||
| 3301 | 3322 | ||
| 3302 | self.detach_head = detach_head | 3323 | try: |
| 3303 | self.clean = True | 3324 | log = GitCommand( |
| 3304 | self.recent_clean = True | 3325 | self, cmd, capture_stdout=True, capture_stderr=True |
| 3326 | ) | ||
| 3327 | if log.Wait() == 0: | ||
| 3328 | return log.stdout | ||
| 3329 | except GitError: | ||
| 3330 | # worktree may not exist if groups changed for example. In that | ||
| 3331 | # case, try in gitdir instead. | ||
| 3332 | if not os.path.exists(self.worktree): | ||
| 3333 | return self.bare_git.log(*cmd[1:]) | ||
| 3334 | else: | ||
| 3335 | raise | ||
| 3336 | return None | ||
| 3305 | 3337 | ||
| 3306 | def info(self, project, fmt, *args): | 3338 | def getAddedAndRemovedLogs( |
| 3307 | self._messages.append(_InfoMessage(project, fmt % args)) | 3339 | self, toProject, oneline=False, color=True, pretty_format=None |
| 3340 | ): | ||
| 3341 | """Get the list of logs from this revision to given revisionId""" | ||
| 3342 | logs = {} | ||
| 3343 | selfId = self.GetRevisionId(self._allrefs) | ||
| 3344 | toId = toProject.GetRevisionId(toProject._allrefs) | ||
| 3345 | |||
| 3346 | logs["added"] = self._getLogs( | ||
| 3347 | selfId, | ||
| 3348 | toId, | ||
| 3349 | oneline=oneline, | ||
| 3350 | color=color, | ||
| 3351 | pretty_format=pretty_format, | ||
| 3352 | ) | ||
| 3353 | logs["removed"] = self._getLogs( | ||
| 3354 | toId, | ||
| 3355 | selfId, | ||
| 3356 | oneline=oneline, | ||
| 3357 | color=color, | ||
| 3358 | pretty_format=pretty_format, | ||
| 3359 | ) | ||
| 3360 | return logs | ||
| 3361 | |||
| 3362 | class _GitGetByExec(object): | ||
| 3363 | def __init__(self, project, bare, gitdir): | ||
| 3364 | self._project = project | ||
| 3365 | self._bare = bare | ||
| 3366 | self._gitdir = gitdir | ||
| 3367 | |||
| 3368 | # __getstate__ and __setstate__ are required for pickling because | ||
| 3369 | # __getattr__ exists. | ||
| 3370 | def __getstate__(self): | ||
| 3371 | return (self._project, self._bare, self._gitdir) | ||
| 3372 | |||
| 3373 | def __setstate__(self, state): | ||
| 3374 | self._project, self._bare, self._gitdir = state | ||
| 3375 | |||
| 3376 | def LsOthers(self): | ||
| 3377 | p = GitCommand( | ||
| 3378 | self._project, | ||
| 3379 | ["ls-files", "-z", "--others", "--exclude-standard"], | ||
| 3380 | bare=False, | ||
| 3381 | gitdir=self._gitdir, | ||
| 3382 | capture_stdout=True, | ||
| 3383 | capture_stderr=True, | ||
| 3384 | ) | ||
| 3385 | if p.Wait() == 0: | ||
| 3386 | out = p.stdout | ||
| 3387 | if out: | ||
| 3388 | # Backslash is not anomalous. | ||
| 3389 | return out[:-1].split("\0") | ||
| 3390 | return [] | ||
| 3391 | |||
| 3392 | def DiffZ(self, name, *args): | ||
| 3393 | cmd = [name] | ||
| 3394 | cmd.append("-z") | ||
| 3395 | cmd.append("--ignore-submodules") | ||
| 3396 | cmd.extend(args) | ||
| 3397 | p = GitCommand( | ||
| 3398 | self._project, | ||
| 3399 | cmd, | ||
| 3400 | gitdir=self._gitdir, | ||
| 3401 | bare=False, | ||
| 3402 | capture_stdout=True, | ||
| 3403 | capture_stderr=True, | ||
| 3404 | ) | ||
| 3405 | p.Wait() | ||
| 3406 | r = {} | ||
| 3407 | out = p.stdout | ||
| 3408 | if out: | ||
| 3409 | out = iter(out[:-1].split("\0")) | ||
| 3410 | while out: | ||
| 3411 | try: | ||
| 3412 | info = next(out) | ||
| 3413 | path = next(out) | ||
| 3414 | except StopIteration: | ||
| 3415 | break | ||
| 3416 | |||
| 3417 | class _Info(object): | ||
| 3418 | def __init__(self, path, omode, nmode, oid, nid, state): | ||
| 3419 | self.path = path | ||
| 3420 | self.src_path = None | ||
| 3421 | self.old_mode = omode | ||
| 3422 | self.new_mode = nmode | ||
| 3423 | self.old_id = oid | ||
| 3424 | self.new_id = nid | ||
| 3425 | |||
| 3426 | if len(state) == 1: | ||
| 3427 | self.status = state | ||
| 3428 | self.level = None | ||
| 3429 | else: | ||
| 3430 | self.status = state[:1] | ||
| 3431 | self.level = state[1:] | ||
| 3432 | while self.level.startswith("0"): | ||
| 3433 | self.level = self.level[1:] | ||
| 3434 | |||
| 3435 | info = info[1:].split(" ") | ||
| 3436 | info = _Info(path, *info) | ||
| 3437 | if info.status in ("R", "C"): | ||
| 3438 | info.src_path = info.path | ||
| 3439 | info.path = next(out) | ||
| 3440 | r[info.path] = info | ||
| 3441 | return r | ||
| 3442 | |||
| 3443 | def GetDotgitPath(self, subpath=None): | ||
| 3444 | """Return the full path to the .git dir. | ||
| 3445 | |||
| 3446 | As a convenience, append |subpath| if provided. | ||
| 3447 | """ | ||
| 3448 | if self._bare: | ||
| 3449 | dotgit = self._gitdir | ||
| 3450 | else: | ||
| 3451 | dotgit = os.path.join(self._project.worktree, ".git") | ||
| 3452 | if os.path.isfile(dotgit): | ||
| 3453 | # Git worktrees use a "gitdir:" syntax to point to the | ||
| 3454 | # scratch space. | ||
| 3455 | with open(dotgit) as fp: | ||
| 3456 | setting = fp.read() | ||
| 3457 | assert setting.startswith("gitdir:") | ||
| 3458 | gitdir = setting.split(":", 1)[1].strip() | ||
| 3459 | dotgit = os.path.normpath( | ||
| 3460 | os.path.join(self._project.worktree, gitdir) | ||
| 3461 | ) | ||
| 3462 | |||
| 3463 | return dotgit if subpath is None else os.path.join(dotgit, subpath) | ||
| 3464 | |||
| 3465 | def GetHead(self): | ||
| 3466 | """Return the ref that HEAD points to.""" | ||
| 3467 | path = self.GetDotgitPath(subpath=HEAD) | ||
| 3468 | try: | ||
| 3469 | with open(path) as fd: | ||
| 3470 | line = fd.readline() | ||
| 3471 | except IOError as e: | ||
| 3472 | raise NoManifestException(path, str(e)) | ||
| 3473 | try: | ||
| 3474 | line = line.decode() | ||
| 3475 | except AttributeError: | ||
| 3476 | pass | ||
| 3477 | if line.startswith("ref: "): | ||
| 3478 | return line[5:-1] | ||
| 3479 | return line[:-1] | ||
| 3480 | |||
| 3481 | def SetHead(self, ref, message=None): | ||
| 3482 | cmdv = [] | ||
| 3483 | if message is not None: | ||
| 3484 | cmdv.extend(["-m", message]) | ||
| 3485 | cmdv.append(HEAD) | ||
| 3486 | cmdv.append(ref) | ||
| 3487 | self.symbolic_ref(*cmdv) | ||
| 3488 | |||
| 3489 | def DetachHead(self, new, message=None): | ||
| 3490 | cmdv = ["--no-deref"] | ||
| 3491 | if message is not None: | ||
| 3492 | cmdv.extend(["-m", message]) | ||
| 3493 | cmdv.append(HEAD) | ||
| 3494 | cmdv.append(new) | ||
| 3495 | self.update_ref(*cmdv) | ||
| 3496 | |||
| 3497 | def UpdateRef(self, name, new, old=None, message=None, detach=False): | ||
| 3498 | cmdv = [] | ||
| 3499 | if message is not None: | ||
| 3500 | cmdv.extend(["-m", message]) | ||
| 3501 | if detach: | ||
| 3502 | cmdv.append("--no-deref") | ||
| 3503 | cmdv.append(name) | ||
| 3504 | cmdv.append(new) | ||
| 3505 | if old is not None: | ||
| 3506 | cmdv.append(old) | ||
| 3507 | self.update_ref(*cmdv) | ||
| 3508 | |||
| 3509 | def DeleteRef(self, name, old=None): | ||
| 3510 | if not old: | ||
| 3511 | old = self.rev_parse(name) | ||
| 3512 | self.update_ref("-d", name, old) | ||
| 3513 | self._project.bare_ref.deleted(name) | ||
| 3514 | |||
| 3515 | def rev_list(self, *args, **kw): | ||
| 3516 | if "format" in kw: | ||
| 3517 | cmdv = ["log", "--pretty=format:%s" % kw["format"]] | ||
| 3518 | else: | ||
| 3519 | cmdv = ["rev-list"] | ||
| 3520 | cmdv.extend(args) | ||
| 3521 | p = GitCommand( | ||
| 3522 | self._project, | ||
| 3523 | cmdv, | ||
| 3524 | bare=self._bare, | ||
| 3525 | gitdir=self._gitdir, | ||
| 3526 | capture_stdout=True, | ||
| 3527 | capture_stderr=True, | ||
| 3528 | ) | ||
| 3529 | if p.Wait() != 0: | ||
| 3530 | raise GitError( | ||
| 3531 | "%s rev-list %s: %s" | ||
| 3532 | % (self._project.name, str(args), p.stderr) | ||
| 3533 | ) | ||
| 3534 | return p.stdout.splitlines() | ||
| 3535 | |||
| 3536 | def __getattr__(self, name): | ||
| 3537 | """Allow arbitrary git commands using pythonic syntax. | ||
| 3538 | |||
| 3539 | This allows you to do things like: | ||
| 3540 | git_obj.rev_parse('HEAD') | ||
| 3541 | |||
| 3542 | Since we don't have a 'rev_parse' method defined, the __getattr__ | ||
| 3543 | will run. We'll replace the '_' with a '-' and try to run a git | ||
| 3544 | command. Any other positional arguments will be passed to the git | ||
| 3545 | command, and the following keyword arguments are supported: | ||
| 3546 | config: An optional dict of git config options to be passed with | ||
| 3547 | '-c'. | ||
| 3548 | |||
| 3549 | Args: | ||
| 3550 | name: The name of the git command to call. Any '_' characters | ||
| 3551 | will be replaced with '-'. | ||
| 3552 | |||
| 3553 | Returns: | ||
| 3554 | A callable object that will try to call git with the named | ||
| 3555 | command. | ||
| 3556 | """ | ||
| 3557 | name = name.replace("_", "-") | ||
| 3558 | |||
| 3559 | def runner(*args, **kwargs): | ||
| 3560 | cmdv = [] | ||
| 3561 | config = kwargs.pop("config", None) | ||
| 3562 | for k in kwargs: | ||
| 3563 | raise TypeError( | ||
| 3564 | "%s() got an unexpected keyword argument %r" % (name, k) | ||
| 3565 | ) | ||
| 3566 | if config is not None: | ||
| 3567 | for k, v in config.items(): | ||
| 3568 | cmdv.append("-c") | ||
| 3569 | cmdv.append("%s=%s" % (k, v)) | ||
| 3570 | cmdv.append(name) | ||
| 3571 | cmdv.extend(args) | ||
| 3572 | p = GitCommand( | ||
| 3573 | self._project, | ||
| 3574 | cmdv, | ||
| 3575 | bare=self._bare, | ||
| 3576 | gitdir=self._gitdir, | ||
| 3577 | capture_stdout=True, | ||
| 3578 | capture_stderr=True, | ||
| 3579 | ) | ||
| 3580 | if p.Wait() != 0: | ||
| 3581 | raise GitError( | ||
| 3582 | "%s %s: %s" % (self._project.name, name, p.stderr) | ||
| 3583 | ) | ||
| 3584 | r = p.stdout | ||
| 3585 | if r.endswith("\n") and r.index("\n") == len(r) - 1: | ||
| 3586 | return r[:-1] | ||
| 3587 | return r | ||
| 3588 | |||
| 3589 | return runner | ||
| 3308 | 3590 | ||
| 3309 | def fail(self, project, err=None): | ||
| 3310 | self._failures.append(_Failure(project, err)) | ||
| 3311 | self._MarkUnclean() | ||
| 3312 | 3591 | ||
| 3313 | def later1(self, project, what): | 3592 | class _PriorSyncFailedError(Exception): |
| 3314 | self._later_queue1.append(_Later(project, what)) | 3593 | def __str__(self): |
| 3594 | return "prior sync failed; rebase still in progress" | ||
| 3315 | 3595 | ||
| 3316 | def later2(self, project, what): | ||
| 3317 | self._later_queue2.append(_Later(project, what)) | ||
| 3318 | 3596 | ||
| 3319 | def Finish(self): | 3597 | class _DirtyError(Exception): |
| 3320 | self._PrintMessages() | 3598 | def __str__(self): |
| 3321 | self._RunLater() | 3599 | return "contains uncommitted changes" |
| 3322 | self._PrintMessages() | ||
| 3323 | return self.clean | ||
| 3324 | 3600 | ||
| 3325 | def Recently(self): | ||
| 3326 | recent_clean = self.recent_clean | ||
| 3327 | self.recent_clean = True | ||
| 3328 | return recent_clean | ||
| 3329 | 3601 | ||
| 3330 | def _MarkUnclean(self): | 3602 | class _InfoMessage(object): |
| 3331 | self.clean = False | 3603 | def __init__(self, project, text): |
| 3332 | self.recent_clean = False | 3604 | self.project = project |
| 3605 | self.text = text | ||
| 3333 | 3606 | ||
| 3334 | def _RunLater(self): | 3607 | def Print(self, syncbuf): |
| 3335 | for q in ['_later_queue1', '_later_queue2']: | 3608 | syncbuf.out.info( |
| 3336 | if not self._RunQueue(q): | 3609 | "%s/: %s", self.project.RelPath(local=False), self.text |
| 3337 | return | 3610 | ) |
| 3611 | syncbuf.out.nl() | ||
| 3338 | 3612 | ||
| 3339 | def _RunQueue(self, queue): | ||
| 3340 | for m in getattr(self, queue): | ||
| 3341 | if not m.Run(self): | ||
| 3342 | self._MarkUnclean() | ||
| 3343 | return False | ||
| 3344 | setattr(self, queue, []) | ||
| 3345 | return True | ||
| 3346 | 3613 | ||
| 3347 | def _PrintMessages(self): | 3614 | class _Failure(object): |
| 3348 | if self._messages or self._failures: | 3615 | def __init__(self, project, why): |
| 3349 | if os.isatty(2): | 3616 | self.project = project |
| 3350 | self.out.write(progress.CSI_ERASE_LINE) | 3617 | self.why = why |
| 3351 | self.out.write('\r') | ||
| 3352 | 3618 | ||
| 3353 | for m in self._messages: | 3619 | def Print(self, syncbuf): |
| 3354 | m.Print(self) | 3620 | syncbuf.out.fail( |
| 3355 | for m in self._failures: | 3621 | "error: %s/: %s", self.project.RelPath(local=False), str(self.why) |
| 3356 | m.Print(self) | 3622 | ) |
| 3623 | syncbuf.out.nl() | ||
| 3357 | 3624 | ||
| 3358 | self._messages = [] | ||
| 3359 | self._failures = [] | ||
| 3360 | 3625 | ||
| 3626 | class _Later(object): | ||
| 3627 | def __init__(self, project, action): | ||
| 3628 | self.project = project | ||
| 3629 | self.action = action | ||
| 3630 | |||
| 3631 | def Run(self, syncbuf): | ||
| 3632 | out = syncbuf.out | ||
| 3633 | out.project("project %s/", self.project.RelPath(local=False)) | ||
| 3634 | out.nl() | ||
| 3635 | try: | ||
| 3636 | self.action() | ||
| 3637 | out.nl() | ||
| 3638 | return True | ||
| 3639 | except GitError: | ||
| 3640 | out.nl() | ||
| 3641 | return False | ||
| 3361 | 3642 | ||
| 3362 | class MetaProject(Project): | ||
| 3363 | """A special project housed under .repo.""" | ||
| 3364 | |||
| 3365 | def __init__(self, manifest, name, gitdir, worktree): | ||
| 3366 | Project.__init__(self, | ||
| 3367 | manifest=manifest, | ||
| 3368 | name=name, | ||
| 3369 | gitdir=gitdir, | ||
| 3370 | objdir=gitdir, | ||
| 3371 | worktree=worktree, | ||
| 3372 | remote=RemoteSpec('origin'), | ||
| 3373 | relpath='.repo/%s' % name, | ||
| 3374 | revisionExpr='refs/heads/master', | ||
| 3375 | revisionId=None, | ||
| 3376 | groups=None) | ||
| 3377 | |||
| 3378 | def PreSync(self): | ||
| 3379 | if self.Exists: | ||
| 3380 | cb = self.CurrentBranch | ||
| 3381 | if cb: | ||
| 3382 | base = self.GetBranch(cb).merge | ||
| 3383 | if base: | ||
| 3384 | self.revisionExpr = base | ||
| 3385 | self.revisionId = None | ||
| 3386 | |||
| 3387 | @property | ||
| 3388 | def HasChanges(self): | ||
| 3389 | """Has the remote received new commits not yet checked out?""" | ||
| 3390 | if not self.remote or not self.revisionExpr: | ||
| 3391 | return False | ||
| 3392 | |||
| 3393 | all_refs = self.bare_ref.all | ||
| 3394 | revid = self.GetRevisionId(all_refs) | ||
| 3395 | head = self.work_git.GetHead() | ||
| 3396 | if head.startswith(R_HEADS): | ||
| 3397 | try: | ||
| 3398 | head = all_refs[head] | ||
| 3399 | except KeyError: | ||
| 3400 | head = None | ||
| 3401 | |||
| 3402 | if revid == head: | ||
| 3403 | return False | ||
| 3404 | elif self._revlist(not_rev(HEAD), revid): | ||
| 3405 | return True | ||
| 3406 | return False | ||
| 3407 | 3643 | ||
| 3644 | class _SyncColoring(Coloring): | ||
| 3645 | def __init__(self, config): | ||
| 3646 | super().__init__(config, "reposync") | ||
| 3647 | self.project = self.printer("header", attr="bold") | ||
| 3648 | self.info = self.printer("info") | ||
| 3649 | self.fail = self.printer("fail", fg="red") | ||
| 3408 | 3650 | ||
| 3409 | class RepoProject(MetaProject): | ||
| 3410 | """The MetaProject for repo itself.""" | ||
| 3411 | 3651 | ||
| 3412 | @property | 3652 | class SyncBuffer(object): |
| 3413 | def LastFetch(self): | 3653 | def __init__(self, config, detach_head=False): |
| 3414 | try: | 3654 | self._messages = [] |
| 3415 | fh = os.path.join(self.gitdir, 'FETCH_HEAD') | 3655 | self._failures = [] |
| 3416 | return os.path.getmtime(fh) | 3656 | self._later_queue1 = [] |
| 3417 | except OSError: | 3657 | self._later_queue2 = [] |
| 3418 | return 0 | ||
| 3419 | 3658 | ||
| 3659 | self.out = _SyncColoring(config) | ||
| 3660 | self.out.redirect(sys.stderr) | ||
| 3420 | 3661 | ||
| 3421 | class ManifestProject(MetaProject): | 3662 | self.detach_head = detach_head |
| 3422 | """The MetaProject for manifests.""" | 3663 | self.clean = True |
| 3423 | 3664 | self.recent_clean = True | |
| 3424 | def MetaBranchSwitch(self, submodules=False): | ||
| 3425 | """Prepare for manifest branch switch.""" | ||
| 3426 | |||
| 3427 | # detach and delete manifest branch, allowing a new | ||
| 3428 | # branch to take over | ||
| 3429 | syncbuf = SyncBuffer(self.config, detach_head=True) | ||
| 3430 | self.Sync_LocalHalf(syncbuf, submodules=submodules) | ||
| 3431 | syncbuf.Finish() | ||
| 3432 | |||
| 3433 | return GitCommand(self, | ||
| 3434 | ['update-ref', '-d', 'refs/heads/default'], | ||
| 3435 | capture_stdout=True, | ||
| 3436 | capture_stderr=True).Wait() == 0 | ||
| 3437 | |||
| 3438 | @property | ||
| 3439 | def standalone_manifest_url(self): | ||
| 3440 | """The URL of the standalone manifest, or None.""" | ||
| 3441 | return self.config.GetString('manifest.standalone') | ||
| 3442 | |||
| 3443 | @property | ||
| 3444 | def manifest_groups(self): | ||
| 3445 | """The manifest groups string.""" | ||
| 3446 | return self.config.GetString('manifest.groups') | ||
| 3447 | |||
| 3448 | @property | ||
| 3449 | def reference(self): | ||
| 3450 | """The --reference for this manifest.""" | ||
| 3451 | return self.config.GetString('repo.reference') | ||
| 3452 | |||
| 3453 | @property | ||
| 3454 | def dissociate(self): | ||
| 3455 | """Whether to dissociate.""" | ||
| 3456 | return self.config.GetBoolean('repo.dissociate') | ||
| 3457 | |||
| 3458 | @property | ||
| 3459 | def archive(self): | ||
| 3460 | """Whether we use archive.""" | ||
| 3461 | return self.config.GetBoolean('repo.archive') | ||
| 3462 | |||
| 3463 | @property | ||
| 3464 | def mirror(self): | ||
| 3465 | """Whether we use mirror.""" | ||
| 3466 | return self.config.GetBoolean('repo.mirror') | ||
| 3467 | |||
| 3468 | @property | ||
| 3469 | def use_worktree(self): | ||
| 3470 | """Whether we use worktree.""" | ||
| 3471 | return self.config.GetBoolean('repo.worktree') | ||
| 3472 | |||
| 3473 | @property | ||
| 3474 | def clone_bundle(self): | ||
| 3475 | """Whether we use clone_bundle.""" | ||
| 3476 | return self.config.GetBoolean('repo.clonebundle') | ||
| 3477 | |||
| 3478 | @property | ||
| 3479 | def submodules(self): | ||
| 3480 | """Whether we use submodules.""" | ||
| 3481 | return self.config.GetBoolean('repo.submodules') | ||
| 3482 | |||
| 3483 | @property | ||
| 3484 | def git_lfs(self): | ||
| 3485 | """Whether we use git_lfs.""" | ||
| 3486 | return self.config.GetBoolean('repo.git-lfs') | ||
| 3487 | |||
| 3488 | @property | ||
| 3489 | def use_superproject(self): | ||
| 3490 | """Whether we use superproject.""" | ||
| 3491 | return self.config.GetBoolean('repo.superproject') | ||
| 3492 | |||
| 3493 | @property | ||
| 3494 | def partial_clone(self): | ||
| 3495 | """Whether this is a partial clone.""" | ||
| 3496 | return self.config.GetBoolean('repo.partialclone') | ||
| 3497 | |||
| 3498 | @property | ||
| 3499 | def depth(self): | ||
| 3500 | """Partial clone depth.""" | ||
| 3501 | return self.config.GetString('repo.depth') | ||
| 3502 | |||
| 3503 | @property | ||
| 3504 | def clone_filter(self): | ||
| 3505 | """The clone filter.""" | ||
| 3506 | return self.config.GetString('repo.clonefilter') | ||
| 3507 | |||
| 3508 | @property | ||
| 3509 | def partial_clone_exclude(self): | ||
| 3510 | """Partial clone exclude string""" | ||
| 3511 | return self.config.GetString('repo.partialcloneexclude') | ||
| 3512 | |||
| 3513 | @property | ||
| 3514 | def manifest_platform(self): | ||
| 3515 | """The --platform argument from `repo init`.""" | ||
| 3516 | return self.config.GetString('manifest.platform') | ||
| 3517 | |||
| 3518 | @property | ||
| 3519 | def _platform_name(self): | ||
| 3520 | """Return the name of the platform.""" | ||
| 3521 | return platform.system().lower() | ||
| 3522 | |||
| 3523 | def SyncWithPossibleInit(self, submanifest, verbose=False, | ||
| 3524 | current_branch_only=False, tags='', git_event_log=None): | ||
| 3525 | """Sync a manifestProject, possibly for the first time. | ||
| 3526 | |||
| 3527 | Call Sync() with arguments from the most recent `repo init`. If this is a | ||
| 3528 | new sub manifest, then inherit options from the parent's manifestProject. | ||
| 3529 | |||
| 3530 | This is used by subcmds.Sync() to do an initial download of new sub | ||
| 3531 | manifests. | ||
| 3532 | |||
| 3533 | Args: | ||
| 3534 | submanifest: an XmlSubmanifest, the submanifest to re-sync. | ||
| 3535 | verbose: a boolean, whether to show all output, rather than only errors. | ||
| 3536 | current_branch_only: a boolean, whether to only fetch the current manifest | ||
| 3537 | branch from the server. | ||
| 3538 | tags: a boolean, whether to fetch tags. | ||
| 3539 | git_event_log: an EventLog, for git tracing. | ||
| 3540 | """ | ||
| 3541 | # TODO(lamontjones): when refactoring sync (and init?) consider how to | ||
| 3542 | # better get the init options that we should use for new submanifests that | ||
| 3543 | # are added when syncing an existing workspace. | ||
| 3544 | git_event_log = git_event_log or EventLog() | ||
| 3545 | spec = submanifest.ToSubmanifestSpec() | ||
| 3546 | # Use the init options from the existing manifestProject, or the parent if | ||
| 3547 | # it doesn't exist. | ||
| 3548 | # | ||
| 3549 | # Today, we only support changing manifest_groups on the sub-manifest, with | ||
| 3550 | # no supported-for-the-user way to change the other arguments from those | ||
| 3551 | # specified by the outermost manifest. | ||
| 3552 | # | ||
| 3553 | # TODO(lamontjones): determine which of these should come from the outermost | ||
| 3554 | # manifest and which should come from the parent manifest. | ||
| 3555 | mp = self if self.Exists else submanifest.parent.manifestProject | ||
| 3556 | return self.Sync( | ||
| 3557 | manifest_url=spec.manifestUrl, | ||
| 3558 | manifest_branch=spec.revision, | ||
| 3559 | standalone_manifest=mp.standalone_manifest_url, | ||
| 3560 | groups=mp.manifest_groups, | ||
| 3561 | platform=mp.manifest_platform, | ||
| 3562 | mirror=mp.mirror, | ||
| 3563 | dissociate=mp.dissociate, | ||
| 3564 | reference=mp.reference, | ||
| 3565 | worktree=mp.use_worktree, | ||
| 3566 | submodules=mp.submodules, | ||
| 3567 | archive=mp.archive, | ||
| 3568 | partial_clone=mp.partial_clone, | ||
| 3569 | clone_filter=mp.clone_filter, | ||
| 3570 | partial_clone_exclude=mp.partial_clone_exclude, | ||
| 3571 | clone_bundle=mp.clone_bundle, | ||
| 3572 | git_lfs=mp.git_lfs, | ||
| 3573 | use_superproject=mp.use_superproject, | ||
| 3574 | verbose=verbose, | ||
| 3575 | current_branch_only=current_branch_only, | ||
| 3576 | tags=tags, | ||
| 3577 | depth=mp.depth, | ||
| 3578 | git_event_log=git_event_log, | ||
| 3579 | manifest_name=spec.manifestName, | ||
| 3580 | this_manifest_only=True, | ||
| 3581 | outer_manifest=False, | ||
| 3582 | ) | ||
| 3583 | |||
| 3584 | def Sync(self, _kwargs_only=(), manifest_url='', manifest_branch=None, | ||
| 3585 | standalone_manifest=False, groups='', mirror=False, reference='', | ||
| 3586 | dissociate=False, worktree=False, submodules=False, archive=False, | ||
| 3587 | partial_clone=None, depth=None, clone_filter='blob:none', | ||
| 3588 | partial_clone_exclude=None, clone_bundle=None, git_lfs=None, | ||
| 3589 | use_superproject=None, verbose=False, current_branch_only=False, | ||
| 3590 | git_event_log=None, platform='', manifest_name='default.xml', | ||
| 3591 | tags='', this_manifest_only=False, outer_manifest=True): | ||
| 3592 | """Sync the manifest and all submanifests. | ||
| 3593 | |||
| 3594 | Args: | ||
| 3595 | manifest_url: a string, the URL of the manifest project. | ||
| 3596 | manifest_branch: a string, the manifest branch to use. | ||
| 3597 | standalone_manifest: a boolean, whether to store the manifest as a static | ||
| 3598 | file. | ||
| 3599 | groups: a string, restricts the checkout to projects with the specified | ||
| 3600 | groups. | ||
| 3601 | mirror: a boolean, whether to create a mirror of the remote repository. | ||
| 3602 | reference: a string, location of a repo instance to use as a reference. | ||
| 3603 | dissociate: a boolean, whether to dissociate from reference mirrors after | ||
| 3604 | clone. | ||
| 3605 | worktree: a boolean, whether to use git-worktree to manage projects. | ||
| 3606 | submodules: a boolean, whether sync submodules associated with the | ||
| 3607 | manifest project. | ||
| 3608 | archive: a boolean, whether to checkout each project as an archive. See | ||
| 3609 | git-archive. | ||
| 3610 | partial_clone: a boolean, whether to perform a partial clone. | ||
| 3611 | depth: an int, how deep of a shallow clone to create. | ||
| 3612 | clone_filter: a string, filter to use with partial_clone. | ||
| 3613 | partial_clone_exclude : a string, comma-delimeted list of project namess | ||
| 3614 | to exclude from partial clone. | ||
| 3615 | clone_bundle: a boolean, whether to enable /clone.bundle on HTTP/HTTPS. | ||
| 3616 | git_lfs: a boolean, whether to enable git LFS support. | ||
| 3617 | use_superproject: a boolean, whether to use the manifest superproject to | ||
| 3618 | sync projects. | ||
| 3619 | verbose: a boolean, whether to show all output, rather than only errors. | ||
| 3620 | current_branch_only: a boolean, whether to only fetch the current manifest | ||
| 3621 | branch from the server. | ||
| 3622 | platform: a string, restrict the checkout to projects with the specified | ||
| 3623 | platform group. | ||
| 3624 | git_event_log: an EventLog, for git tracing. | ||
| 3625 | tags: a boolean, whether to fetch tags. | ||
| 3626 | manifest_name: a string, the name of the manifest file to use. | ||
| 3627 | this_manifest_only: a boolean, whether to only operate on the current sub | ||
| 3628 | manifest. | ||
| 3629 | outer_manifest: a boolean, whether to start at the outermost manifest. | ||
| 3630 | 3665 | ||
| 3631 | Returns: | 3666 | def info(self, project, fmt, *args): |
| 3632 | a boolean, whether the sync was successful. | 3667 | self._messages.append(_InfoMessage(project, fmt % args)) |
| 3633 | """ | ||
| 3634 | assert _kwargs_only == (), 'Sync only accepts keyword arguments.' | ||
| 3635 | |||
| 3636 | groups = groups or self.manifest.GetDefaultGroupsStr(with_platform=False) | ||
| 3637 | platform = platform or 'auto' | ||
| 3638 | git_event_log = git_event_log or EventLog() | ||
| 3639 | if outer_manifest and self.manifest.is_submanifest: | ||
| 3640 | # In a multi-manifest checkout, use the outer manifest unless we are told | ||
| 3641 | # not to. | ||
| 3642 | return self.client.outer_manifest.manifestProject.Sync( | ||
| 3643 | manifest_url=manifest_url, | ||
| 3644 | manifest_branch=manifest_branch, | ||
| 3645 | standalone_manifest=standalone_manifest, | ||
| 3646 | groups=groups, | ||
| 3647 | platform=platform, | ||
| 3648 | mirror=mirror, | ||
| 3649 | dissociate=dissociate, | ||
| 3650 | reference=reference, | ||
| 3651 | worktree=worktree, | ||
| 3652 | submodules=submodules, | ||
| 3653 | archive=archive, | ||
| 3654 | partial_clone=partial_clone, | ||
| 3655 | clone_filter=clone_filter, | ||
| 3656 | partial_clone_exclude=partial_clone_exclude, | ||
| 3657 | clone_bundle=clone_bundle, | ||
| 3658 | git_lfs=git_lfs, | ||
| 3659 | use_superproject=use_superproject, | ||
| 3660 | verbose=verbose, | ||
| 3661 | current_branch_only=current_branch_only, | ||
| 3662 | tags=tags, | ||
| 3663 | depth=depth, | ||
| 3664 | git_event_log=git_event_log, | ||
| 3665 | manifest_name=manifest_name, | ||
| 3666 | this_manifest_only=this_manifest_only, | ||
| 3667 | outer_manifest=False) | ||
| 3668 | |||
| 3669 | # If repo has already been initialized, we take -u with the absence of | ||
| 3670 | # --standalone-manifest to mean "transition to a standard repo set up", | ||
| 3671 | # which necessitates starting fresh. | ||
| 3672 | # If --standalone-manifest is set, we always tear everything down and start | ||
| 3673 | # anew. | ||
| 3674 | if self.Exists: | ||
| 3675 | was_standalone_manifest = self.config.GetString('manifest.standalone') | ||
| 3676 | if was_standalone_manifest and not manifest_url: | ||
| 3677 | print('fatal: repo was initialized with a standlone manifest, ' | ||
| 3678 | 'cannot be re-initialized without --manifest-url/-u') | ||
| 3679 | return False | ||
| 3680 | 3668 | ||
| 3681 | if standalone_manifest or (was_standalone_manifest and manifest_url): | 3669 | def fail(self, project, err=None): |
| 3682 | self.config.ClearCache() | 3670 | self._failures.append(_Failure(project, err)) |
| 3683 | if self.gitdir and os.path.exists(self.gitdir): | 3671 | self._MarkUnclean() |
| 3684 | platform_utils.rmtree(self.gitdir) | ||
| 3685 | if self.worktree and os.path.exists(self.worktree): | ||
| 3686 | platform_utils.rmtree(self.worktree) | ||
| 3687 | |||
| 3688 | is_new = not self.Exists | ||
| 3689 | if is_new: | ||
| 3690 | if not manifest_url: | ||
| 3691 | print('fatal: manifest url is required.', file=sys.stderr) | ||
| 3692 | return False | ||
| 3693 | 3672 | ||
| 3694 | if verbose: | 3673 | def later1(self, project, what): |
| 3695 | print('Downloading manifest from %s' % | 3674 | self._later_queue1.append(_Later(project, what)) |
| 3696 | (GitConfig.ForUser().UrlInsteadOf(manifest_url),), | 3675 | |
| 3697 | file=sys.stderr) | 3676 | def later2(self, project, what): |
| 3698 | 3677 | self._later_queue2.append(_Later(project, what)) | |
| 3699 | # The manifest project object doesn't keep track of the path on the | 3678 | |
| 3700 | # server where this git is located, so let's save that here. | 3679 | def Finish(self): |
| 3701 | mirrored_manifest_git = None | 3680 | self._PrintMessages() |
| 3702 | if reference: | 3681 | self._RunLater() |
| 3703 | manifest_git_path = urllib.parse.urlparse(manifest_url).path[1:] | 3682 | self._PrintMessages() |
| 3704 | mirrored_manifest_git = os.path.join(reference, manifest_git_path) | 3683 | return self.clean |
| 3705 | if not mirrored_manifest_git.endswith(".git"): | 3684 | |
| 3706 | mirrored_manifest_git += ".git" | 3685 | def Recently(self): |
| 3707 | if not os.path.exists(mirrored_manifest_git): | 3686 | recent_clean = self.recent_clean |
| 3708 | mirrored_manifest_git = os.path.join(reference, | 3687 | self.recent_clean = True |
| 3709 | '.repo/manifests.git') | 3688 | return recent_clean |
| 3710 | 3689 | ||
| 3711 | self._InitGitDir(mirror_git=mirrored_manifest_git) | 3690 | def _MarkUnclean(self): |
| 3712 | 3691 | self.clean = False | |
| 3713 | # If standalone_manifest is set, mark the project as "standalone" -- we'll | 3692 | self.recent_clean = False |
| 3714 | # still do much of the manifests.git set up, but will avoid actual syncs to | 3693 | |
| 3715 | # a remote. | 3694 | def _RunLater(self): |
| 3716 | if standalone_manifest: | 3695 | for q in ["_later_queue1", "_later_queue2"]: |
| 3717 | self.config.SetString('manifest.standalone', manifest_url) | 3696 | if not self._RunQueue(q): |
| 3718 | elif not manifest_url and not manifest_branch: | 3697 | return |
| 3719 | # If -u is set and --standalone-manifest is not, then we're not in | 3698 | |
| 3720 | # standalone mode. Otherwise, use config to infer what we were in the last | 3699 | def _RunQueue(self, queue): |
| 3721 | # init. | 3700 | for m in getattr(self, queue): |
| 3722 | standalone_manifest = bool(self.config.GetString('manifest.standalone')) | 3701 | if not m.Run(self): |
| 3723 | if not standalone_manifest: | 3702 | self._MarkUnclean() |
| 3724 | self.config.SetString('manifest.standalone', None) | 3703 | return False |
| 3725 | 3704 | setattr(self, queue, []) | |
| 3726 | self._ConfigureDepth(depth) | 3705 | return True |
| 3727 | 3706 | ||
| 3728 | # Set the remote URL before the remote branch as we might need it below. | 3707 | def _PrintMessages(self): |
| 3729 | if manifest_url: | 3708 | if self._messages or self._failures: |
| 3730 | r = self.GetRemote() | 3709 | if os.isatty(2): |
| 3731 | r.url = manifest_url | 3710 | self.out.write(progress.CSI_ERASE_LINE) |
| 3732 | r.ResetFetch() | 3711 | self.out.write("\r") |
| 3733 | r.Save() | 3712 | |
| 3734 | 3713 | for m in self._messages: | |
| 3735 | if not standalone_manifest: | 3714 | m.Print(self) |
| 3736 | if manifest_branch: | 3715 | for m in self._failures: |
| 3737 | if manifest_branch == 'HEAD': | 3716 | m.Print(self) |
| 3738 | manifest_branch = self.ResolveRemoteHead() | 3717 | |
| 3739 | if manifest_branch is None: | 3718 | self._messages = [] |
| 3740 | print('fatal: unable to resolve HEAD', file=sys.stderr) | 3719 | self._failures = [] |
| 3720 | |||
| 3721 | |||
| 3722 | class MetaProject(Project): | ||
| 3723 | """A special project housed under .repo.""" | ||
| 3724 | |||
| 3725 | def __init__(self, manifest, name, gitdir, worktree): | ||
| 3726 | Project.__init__( | ||
| 3727 | self, | ||
| 3728 | manifest=manifest, | ||
| 3729 | name=name, | ||
| 3730 | gitdir=gitdir, | ||
| 3731 | objdir=gitdir, | ||
| 3732 | worktree=worktree, | ||
| 3733 | remote=RemoteSpec("origin"), | ||
| 3734 | relpath=".repo/%s" % name, | ||
| 3735 | revisionExpr="refs/heads/master", | ||
| 3736 | revisionId=None, | ||
| 3737 | groups=None, | ||
| 3738 | ) | ||
| 3739 | |||
| 3740 | def PreSync(self): | ||
| 3741 | if self.Exists: | ||
| 3742 | cb = self.CurrentBranch | ||
| 3743 | if cb: | ||
| 3744 | base = self.GetBranch(cb).merge | ||
| 3745 | if base: | ||
| 3746 | self.revisionExpr = base | ||
| 3747 | self.revisionId = None | ||
| 3748 | |||
| 3749 | @property | ||
| 3750 | def HasChanges(self): | ||
| 3751 | """Has the remote received new commits not yet checked out?""" | ||
| 3752 | if not self.remote or not self.revisionExpr: | ||
| 3741 | return False | 3753 | return False |
| 3742 | self.revisionExpr = manifest_branch | ||
| 3743 | else: | ||
| 3744 | if is_new: | ||
| 3745 | default_branch = self.ResolveRemoteHead() | ||
| 3746 | if default_branch is None: | ||
| 3747 | # If the remote doesn't have HEAD configured, default to master. | ||
| 3748 | default_branch = 'refs/heads/master' | ||
| 3749 | self.revisionExpr = default_branch | ||
| 3750 | else: | ||
| 3751 | self.PreSync() | ||
| 3752 | |||
| 3753 | groups = re.split(r'[,\s]+', groups or '') | ||
| 3754 | all_platforms = ['linux', 'darwin', 'windows'] | ||
| 3755 | platformize = lambda x: 'platform-' + x | ||
| 3756 | if platform == 'auto': | ||
| 3757 | if not mirror and not self.mirror: | ||
| 3758 | groups.append(platformize(self._platform_name)) | ||
| 3759 | elif platform == 'all': | ||
| 3760 | groups.extend(map(platformize, all_platforms)) | ||
| 3761 | elif platform in all_platforms: | ||
| 3762 | groups.append(platformize(platform)) | ||
| 3763 | elif platform != 'none': | ||
| 3764 | print('fatal: invalid platform flag', file=sys.stderr) | ||
| 3765 | return False | ||
| 3766 | self.config.SetString('manifest.platform', platform) | ||
| 3767 | |||
| 3768 | groups = [x for x in groups if x] | ||
| 3769 | groupstr = ','.join(groups) | ||
| 3770 | if platform == 'auto' and groupstr == self.manifest.GetDefaultGroupsStr(): | ||
| 3771 | groupstr = None | ||
| 3772 | self.config.SetString('manifest.groups', groupstr) | ||
| 3773 | |||
| 3774 | if reference: | ||
| 3775 | self.config.SetString('repo.reference', reference) | ||
| 3776 | |||
| 3777 | if dissociate: | ||
| 3778 | self.config.SetBoolean('repo.dissociate', dissociate) | ||
| 3779 | |||
| 3780 | if worktree: | ||
| 3781 | if mirror: | ||
| 3782 | print('fatal: --mirror and --worktree are incompatible', | ||
| 3783 | file=sys.stderr) | ||
| 3784 | return False | ||
| 3785 | if submodules: | ||
| 3786 | print('fatal: --submodules and --worktree are incompatible', | ||
| 3787 | file=sys.stderr) | ||
| 3788 | return False | ||
| 3789 | self.config.SetBoolean('repo.worktree', worktree) | ||
| 3790 | if is_new: | ||
| 3791 | self.use_git_worktrees = True | ||
| 3792 | print('warning: --worktree is experimental!', file=sys.stderr) | ||
| 3793 | |||
| 3794 | if archive: | ||
| 3795 | if is_new: | ||
| 3796 | self.config.SetBoolean('repo.archive', archive) | ||
| 3797 | else: | ||
| 3798 | print('fatal: --archive is only supported when initializing a new ' | ||
| 3799 | 'workspace.', file=sys.stderr) | ||
| 3800 | print('Either delete the .repo folder in this workspace, or initialize ' | ||
| 3801 | 'in another location.', file=sys.stderr) | ||
| 3802 | return False | ||
| 3803 | 3754 | ||
| 3804 | if mirror: | 3755 | all_refs = self.bare_ref.all |
| 3805 | if is_new: | 3756 | revid = self.GetRevisionId(all_refs) |
| 3806 | self.config.SetBoolean('repo.mirror', mirror) | 3757 | head = self.work_git.GetHead() |
| 3807 | else: | 3758 | if head.startswith(R_HEADS): |
| 3808 | print('fatal: --mirror is only supported when initializing a new ' | 3759 | try: |
| 3809 | 'workspace.', file=sys.stderr) | 3760 | head = all_refs[head] |
| 3810 | print('Either delete the .repo folder in this workspace, or initialize ' | 3761 | except KeyError: |
| 3811 | 'in another location.', file=sys.stderr) | 3762 | head = None |
| 3812 | return False | ||
| 3813 | 3763 | ||
| 3814 | if partial_clone is not None: | 3764 | if revid == head: |
| 3815 | if mirror: | 3765 | return False |
| 3816 | print('fatal: --mirror and --partial-clone are mutually exclusive', | 3766 | elif self._revlist(not_rev(HEAD), revid): |
| 3817 | file=sys.stderr) | 3767 | return True |
| 3818 | return False | ||
| 3819 | self.config.SetBoolean('repo.partialclone', partial_clone) | ||
| 3820 | if clone_filter: | ||
| 3821 | self.config.SetString('repo.clonefilter', clone_filter) | ||
| 3822 | elif self.partial_clone: | ||
| 3823 | clone_filter = self.clone_filter | ||
| 3824 | else: | ||
| 3825 | clone_filter = None | ||
| 3826 | |||
| 3827 | if partial_clone_exclude is not None: | ||
| 3828 | self.config.SetString('repo.partialcloneexclude', partial_clone_exclude) | ||
| 3829 | |||
| 3830 | if clone_bundle is None: | ||
| 3831 | clone_bundle = False if partial_clone else True | ||
| 3832 | else: | ||
| 3833 | self.config.SetBoolean('repo.clonebundle', clone_bundle) | ||
| 3834 | |||
| 3835 | if submodules: | ||
| 3836 | self.config.SetBoolean('repo.submodules', submodules) | ||
| 3837 | |||
| 3838 | if git_lfs is not None: | ||
| 3839 | if git_lfs: | ||
| 3840 | git_require((2, 17, 0), fail=True, msg='Git LFS support') | ||
| 3841 | |||
| 3842 | self.config.SetBoolean('repo.git-lfs', git_lfs) | ||
| 3843 | if not is_new: | ||
| 3844 | print('warning: Changing --git-lfs settings will only affect new project checkouts.\n' | ||
| 3845 | ' Existing projects will require manual updates.\n', file=sys.stderr) | ||
| 3846 | |||
| 3847 | if use_superproject is not None: | ||
| 3848 | self.config.SetBoolean('repo.superproject', use_superproject) | ||
| 3849 | |||
| 3850 | if not standalone_manifest: | ||
| 3851 | success = self.Sync_NetworkHalf( | ||
| 3852 | is_new=is_new, quiet=not verbose, verbose=verbose, | ||
| 3853 | clone_bundle=clone_bundle, current_branch_only=current_branch_only, | ||
| 3854 | tags=tags, submodules=submodules, clone_filter=clone_filter, | ||
| 3855 | partial_clone_exclude=self.manifest.PartialCloneExclude).success | ||
| 3856 | if not success: | ||
| 3857 | r = self.GetRemote() | ||
| 3858 | print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr) | ||
| 3859 | |||
| 3860 | # Better delete the manifest git dir if we created it; otherwise next | ||
| 3861 | # time (when user fixes problems) we won't go through the "is_new" logic. | ||
| 3862 | if is_new: | ||
| 3863 | platform_utils.rmtree(self.gitdir) | ||
| 3864 | return False | 3768 | return False |
| 3865 | 3769 | ||
| 3866 | if manifest_branch: | ||
| 3867 | self.MetaBranchSwitch(submodules=submodules) | ||
| 3868 | 3770 | ||
| 3869 | syncbuf = SyncBuffer(self.config) | 3771 | class RepoProject(MetaProject): |
| 3870 | self.Sync_LocalHalf(syncbuf, submodules=submodules) | 3772 | """The MetaProject for repo itself.""" |
| 3871 | syncbuf.Finish() | ||
| 3872 | 3773 | ||
| 3873 | if is_new or self.CurrentBranch is None: | 3774 | @property |
| 3874 | if not self.StartBranch('default'): | 3775 | def LastFetch(self): |
| 3875 | print('fatal: cannot create default in manifest', file=sys.stderr) | 3776 | try: |
| 3876 | return False | 3777 | fh = os.path.join(self.gitdir, "FETCH_HEAD") |
| 3778 | return os.path.getmtime(fh) | ||
| 3779 | except OSError: | ||
| 3780 | return 0 | ||
| 3877 | 3781 | ||
| 3878 | if not manifest_name: | ||
| 3879 | print('fatal: manifest name (-m) is required.', file=sys.stderr) | ||
| 3880 | return False | ||
| 3881 | 3782 | ||
| 3882 | elif is_new: | 3783 | class ManifestProject(MetaProject): |
| 3883 | # This is a new standalone manifest. | 3784 | """The MetaProject for manifests.""" |
| 3884 | manifest_name = 'default.xml' | 3785 | |
| 3885 | manifest_data = fetch.fetch_file(manifest_url, verbose=verbose) | 3786 | def MetaBranchSwitch(self, submodules=False): |
| 3886 | dest = os.path.join(self.worktree, manifest_name) | 3787 | """Prepare for manifest branch switch.""" |
| 3887 | os.makedirs(os.path.dirname(dest), exist_ok=True) | 3788 | |
| 3888 | with open(dest, 'wb') as f: | 3789 | # detach and delete manifest branch, allowing a new |
| 3889 | f.write(manifest_data) | 3790 | # branch to take over |
| 3791 | syncbuf = SyncBuffer(self.config, detach_head=True) | ||
| 3792 | self.Sync_LocalHalf(syncbuf, submodules=submodules) | ||
| 3793 | syncbuf.Finish() | ||
| 3794 | |||
| 3795 | return ( | ||
| 3796 | GitCommand( | ||
| 3797 | self, | ||
| 3798 | ["update-ref", "-d", "refs/heads/default"], | ||
| 3799 | capture_stdout=True, | ||
| 3800 | capture_stderr=True, | ||
| 3801 | ).Wait() | ||
| 3802 | == 0 | ||
| 3803 | ) | ||
| 3804 | |||
| 3805 | @property | ||
| 3806 | def standalone_manifest_url(self): | ||
| 3807 | """The URL of the standalone manifest, or None.""" | ||
| 3808 | return self.config.GetString("manifest.standalone") | ||
| 3809 | |||
| 3810 | @property | ||
| 3811 | def manifest_groups(self): | ||
| 3812 | """The manifest groups string.""" | ||
| 3813 | return self.config.GetString("manifest.groups") | ||
| 3814 | |||
| 3815 | @property | ||
| 3816 | def reference(self): | ||
| 3817 | """The --reference for this manifest.""" | ||
| 3818 | return self.config.GetString("repo.reference") | ||
| 3819 | |||
| 3820 | @property | ||
| 3821 | def dissociate(self): | ||
| 3822 | """Whether to dissociate.""" | ||
| 3823 | return self.config.GetBoolean("repo.dissociate") | ||
| 3824 | |||
| 3825 | @property | ||
| 3826 | def archive(self): | ||
| 3827 | """Whether we use archive.""" | ||
| 3828 | return self.config.GetBoolean("repo.archive") | ||
| 3829 | |||
| 3830 | @property | ||
| 3831 | def mirror(self): | ||
| 3832 | """Whether we use mirror.""" | ||
| 3833 | return self.config.GetBoolean("repo.mirror") | ||
| 3834 | |||
| 3835 | @property | ||
| 3836 | def use_worktree(self): | ||
| 3837 | """Whether we use worktree.""" | ||
| 3838 | return self.config.GetBoolean("repo.worktree") | ||
| 3839 | |||
| 3840 | @property | ||
| 3841 | def clone_bundle(self): | ||
| 3842 | """Whether we use clone_bundle.""" | ||
| 3843 | return self.config.GetBoolean("repo.clonebundle") | ||
| 3844 | |||
| 3845 | @property | ||
| 3846 | def submodules(self): | ||
| 3847 | """Whether we use submodules.""" | ||
| 3848 | return self.config.GetBoolean("repo.submodules") | ||
| 3849 | |||
| 3850 | @property | ||
| 3851 | def git_lfs(self): | ||
| 3852 | """Whether we use git_lfs.""" | ||
| 3853 | return self.config.GetBoolean("repo.git-lfs") | ||
| 3854 | |||
| 3855 | @property | ||
| 3856 | def use_superproject(self): | ||
| 3857 | """Whether we use superproject.""" | ||
| 3858 | return self.config.GetBoolean("repo.superproject") | ||
| 3859 | |||
| 3860 | @property | ||
| 3861 | def partial_clone(self): | ||
| 3862 | """Whether this is a partial clone.""" | ||
| 3863 | return self.config.GetBoolean("repo.partialclone") | ||
| 3864 | |||
| 3865 | @property | ||
| 3866 | def depth(self): | ||
| 3867 | """Partial clone depth.""" | ||
| 3868 | return self.config.GetString("repo.depth") | ||
| 3869 | |||
| 3870 | @property | ||
| 3871 | def clone_filter(self): | ||
| 3872 | """The clone filter.""" | ||
| 3873 | return self.config.GetString("repo.clonefilter") | ||
| 3874 | |||
| 3875 | @property | ||
| 3876 | def partial_clone_exclude(self): | ||
| 3877 | """Partial clone exclude string""" | ||
| 3878 | return self.config.GetString("repo.partialcloneexclude") | ||
| 3879 | |||
| 3880 | @property | ||
| 3881 | def manifest_platform(self): | ||
| 3882 | """The --platform argument from `repo init`.""" | ||
| 3883 | return self.config.GetString("manifest.platform") | ||
| 3884 | |||
| 3885 | @property | ||
| 3886 | def _platform_name(self): | ||
| 3887 | """Return the name of the platform.""" | ||
| 3888 | return platform.system().lower() | ||
| 3889 | |||
| 3890 | def SyncWithPossibleInit( | ||
| 3891 | self, | ||
| 3892 | submanifest, | ||
| 3893 | verbose=False, | ||
| 3894 | current_branch_only=False, | ||
| 3895 | tags="", | ||
| 3896 | git_event_log=None, | ||
| 3897 | ): | ||
| 3898 | """Sync a manifestProject, possibly for the first time. | ||
| 3899 | |||
| 3900 | Call Sync() with arguments from the most recent `repo init`. If this is | ||
| 3901 | a new sub manifest, then inherit options from the parent's | ||
| 3902 | manifestProject. | ||
| 3903 | |||
| 3904 | This is used by subcmds.Sync() to do an initial download of new sub | ||
| 3905 | manifests. | ||
| 3890 | 3906 | ||
| 3891 | try: | 3907 | Args: |
| 3892 | self.manifest.Link(manifest_name) | 3908 | submanifest: an XmlSubmanifest, the submanifest to re-sync. |
| 3893 | except ManifestParseError as e: | 3909 | verbose: a boolean, whether to show all output, rather than only |
| 3894 | print("fatal: manifest '%s' not available" % manifest_name, | 3910 | errors. |
| 3895 | file=sys.stderr) | 3911 | current_branch_only: a boolean, whether to only fetch the current |
| 3896 | print('fatal: %s' % str(e), file=sys.stderr) | 3912 | manifest branch from the server. |
| 3897 | return False | 3913 | tags: a boolean, whether to fetch tags. |
| 3898 | 3914 | git_event_log: an EventLog, for git tracing. | |
| 3899 | if not this_manifest_only: | 3915 | """ |
| 3900 | for submanifest in self.manifest.submanifests.values(): | 3916 | # TODO(lamontjones): when refactoring sync (and init?) consider how to |
| 3917 | # better get the init options that we should use for new submanifests | ||
| 3918 | # that are added when syncing an existing workspace. | ||
| 3919 | git_event_log = git_event_log or EventLog() | ||
| 3901 | spec = submanifest.ToSubmanifestSpec() | 3920 | spec = submanifest.ToSubmanifestSpec() |
| 3902 | submanifest.repo_client.manifestProject.Sync( | 3921 | # Use the init options from the existing manifestProject, or the parent |
| 3922 | # if it doesn't exist. | ||
| 3923 | # | ||
| 3924 | # Today, we only support changing manifest_groups on the sub-manifest, | ||
| 3925 | # with no supported-for-the-user way to change the other arguments from | ||
| 3926 | # those specified by the outermost manifest. | ||
| 3927 | # | ||
| 3928 | # TODO(lamontjones): determine which of these should come from the | ||
| 3929 | # outermost manifest and which should come from the parent manifest. | ||
| 3930 | mp = self if self.Exists else submanifest.parent.manifestProject | ||
| 3931 | return self.Sync( | ||
| 3903 | manifest_url=spec.manifestUrl, | 3932 | manifest_url=spec.manifestUrl, |
| 3904 | manifest_branch=spec.revision, | 3933 | manifest_branch=spec.revision, |
| 3905 | standalone_manifest=standalone_manifest, | 3934 | standalone_manifest=mp.standalone_manifest_url, |
| 3906 | groups=self.manifest_groups, | 3935 | groups=mp.manifest_groups, |
| 3907 | platform=platform, | 3936 | platform=mp.manifest_platform, |
| 3908 | mirror=mirror, | 3937 | mirror=mp.mirror, |
| 3909 | dissociate=dissociate, | 3938 | dissociate=mp.dissociate, |
| 3910 | reference=reference, | 3939 | reference=mp.reference, |
| 3911 | worktree=worktree, | 3940 | worktree=mp.use_worktree, |
| 3912 | submodules=submodules, | 3941 | submodules=mp.submodules, |
| 3913 | archive=archive, | 3942 | archive=mp.archive, |
| 3914 | partial_clone=partial_clone, | 3943 | partial_clone=mp.partial_clone, |
| 3915 | clone_filter=clone_filter, | 3944 | clone_filter=mp.clone_filter, |
| 3916 | partial_clone_exclude=partial_clone_exclude, | 3945 | partial_clone_exclude=mp.partial_clone_exclude, |
| 3917 | clone_bundle=clone_bundle, | 3946 | clone_bundle=mp.clone_bundle, |
| 3918 | git_lfs=git_lfs, | 3947 | git_lfs=mp.git_lfs, |
| 3919 | use_superproject=use_superproject, | 3948 | use_superproject=mp.use_superproject, |
| 3920 | verbose=verbose, | 3949 | verbose=verbose, |
| 3921 | current_branch_only=current_branch_only, | 3950 | current_branch_only=current_branch_only, |
| 3922 | tags=tags, | 3951 | tags=tags, |
| 3923 | depth=depth, | 3952 | depth=mp.depth, |
| 3924 | git_event_log=git_event_log, | 3953 | git_event_log=git_event_log, |
| 3925 | manifest_name=spec.manifestName, | 3954 | manifest_name=spec.manifestName, |
| 3926 | this_manifest_only=False, | 3955 | this_manifest_only=True, |
| 3927 | outer_manifest=False, | 3956 | outer_manifest=False, |
| 3928 | ) | 3957 | ) |
| 3929 | 3958 | ||
| 3930 | # Lastly, if the manifest has a <superproject> then have the superproject | 3959 | def Sync( |
| 3931 | # sync it (if it will be used). | 3960 | self, |
| 3932 | if git_superproject.UseSuperproject(use_superproject, self.manifest): | 3961 | _kwargs_only=(), |
| 3933 | sync_result = self.manifest.superproject.Sync(git_event_log) | 3962 | manifest_url="", |
| 3934 | if not sync_result.success: | 3963 | manifest_branch=None, |
| 3935 | submanifest = '' | 3964 | standalone_manifest=False, |
| 3936 | if self.manifest.path_prefix: | 3965 | groups="", |
| 3937 | submanifest = f'for {self.manifest.path_prefix} ' | 3966 | mirror=False, |
| 3938 | print(f'warning: git update of superproject {submanifest}failed, repo ' | 3967 | reference="", |
| 3939 | 'sync will not use superproject to fetch source; while this ' | 3968 | dissociate=False, |
| 3940 | 'error is not fatal, and you can continue to run repo sync, ' | 3969 | worktree=False, |
| 3941 | 'please run repo init with the --no-use-superproject option to ' | 3970 | submodules=False, |
| 3942 | 'stop seeing this warning', file=sys.stderr) | 3971 | archive=False, |
| 3943 | if sync_result.fatal and use_superproject is not None: | 3972 | partial_clone=None, |
| 3944 | return False | 3973 | depth=None, |
| 3945 | 3974 | clone_filter="blob:none", | |
| 3946 | return True | 3975 | partial_clone_exclude=None, |
| 3947 | 3976 | clone_bundle=None, | |
| 3948 | def _ConfigureDepth(self, depth): | 3977 | git_lfs=None, |
| 3949 | """Configure the depth we'll sync down. | 3978 | use_superproject=None, |
| 3950 | 3979 | verbose=False, | |
| 3951 | Args: | 3980 | current_branch_only=False, |
| 3952 | depth: an int, how deep of a partial clone to create. | 3981 | git_event_log=None, |
| 3953 | """ | 3982 | platform="", |
| 3954 | # Opt.depth will be non-None if user actually passed --depth to repo init. | 3983 | manifest_name="default.xml", |
| 3955 | if depth is not None: | 3984 | tags="", |
| 3956 | if depth > 0: | 3985 | this_manifest_only=False, |
| 3957 | # Positive values will set the depth. | 3986 | outer_manifest=True, |
| 3958 | depth = str(depth) | 3987 | ): |
| 3959 | else: | 3988 | """Sync the manifest and all submanifests. |
| 3960 | # Negative numbers will clear the depth; passing None to SetString | 3989 | |
| 3961 | # will do that. | 3990 | Args: |
| 3962 | depth = None | 3991 | manifest_url: a string, the URL of the manifest project. |
| 3963 | 3992 | manifest_branch: a string, the manifest branch to use. | |
| 3964 | # We store the depth in the main manifest project. | 3993 | standalone_manifest: a boolean, whether to store the manifest as a |
| 3965 | self.config.SetString('repo.depth', depth) | 3994 | static file. |
| 3995 | groups: a string, restricts the checkout to projects with the | ||
| 3996 | specified groups. | ||
| 3997 | mirror: a boolean, whether to create a mirror of the remote | ||
| 3998 | repository. | ||
| 3999 | reference: a string, location of a repo instance to use as a | ||
| 4000 | reference. | ||
| 4001 | dissociate: a boolean, whether to dissociate from reference mirrors | ||
| 4002 | after clone. | ||
| 4003 | worktree: a boolean, whether to use git-worktree to manage projects. | ||
| 4004 | submodules: a boolean, whether sync submodules associated with the | ||
| 4005 | manifest project. | ||
| 4006 | archive: a boolean, whether to checkout each project as an archive. | ||
| 4007 | See git-archive. | ||
| 4008 | partial_clone: a boolean, whether to perform a partial clone. | ||
| 4009 | depth: an int, how deep of a shallow clone to create. | ||
| 4010 | clone_filter: a string, filter to use with partial_clone. | ||
| 4011 | partial_clone_exclude : a string, comma-delimeted list of project | ||
| 4012 | names to exclude from partial clone. | ||
| 4013 | clone_bundle: a boolean, whether to enable /clone.bundle on | ||
| 4014 | HTTP/HTTPS. | ||
| 4015 | git_lfs: a boolean, whether to enable git LFS support. | ||
| 4016 | use_superproject: a boolean, whether to use the manifest | ||
| 4017 | superproject to sync projects. | ||
| 4018 | verbose: a boolean, whether to show all output, rather than only | ||
| 4019 | errors. | ||
| 4020 | current_branch_only: a boolean, whether to only fetch the current | ||
| 4021 | manifest branch from the server. | ||
| 4022 | platform: a string, restrict the checkout to projects with the | ||
| 4023 | specified platform group. | ||
| 4024 | git_event_log: an EventLog, for git tracing. | ||
| 4025 | tags: a boolean, whether to fetch tags. | ||
| 4026 | manifest_name: a string, the name of the manifest file to use. | ||
| 4027 | this_manifest_only: a boolean, whether to only operate on the | ||
| 4028 | current sub manifest. | ||
| 4029 | outer_manifest: a boolean, whether to start at the outermost | ||
| 4030 | manifest. | ||
| 4031 | |||
| 4032 | Returns: | ||
| 4033 | a boolean, whether the sync was successful. | ||
| 4034 | """ | ||
| 4035 | assert _kwargs_only == (), "Sync only accepts keyword arguments." | ||
| 4036 | |||
| 4037 | groups = groups or self.manifest.GetDefaultGroupsStr( | ||
| 4038 | with_platform=False | ||
| 4039 | ) | ||
| 4040 | platform = platform or "auto" | ||
| 4041 | git_event_log = git_event_log or EventLog() | ||
| 4042 | if outer_manifest and self.manifest.is_submanifest: | ||
| 4043 | # In a multi-manifest checkout, use the outer manifest unless we are | ||
| 4044 | # told not to. | ||
| 4045 | return self.client.outer_manifest.manifestProject.Sync( | ||
| 4046 | manifest_url=manifest_url, | ||
| 4047 | manifest_branch=manifest_branch, | ||
| 4048 | standalone_manifest=standalone_manifest, | ||
| 4049 | groups=groups, | ||
| 4050 | platform=platform, | ||
| 4051 | mirror=mirror, | ||
| 4052 | dissociate=dissociate, | ||
| 4053 | reference=reference, | ||
| 4054 | worktree=worktree, | ||
| 4055 | submodules=submodules, | ||
| 4056 | archive=archive, | ||
| 4057 | partial_clone=partial_clone, | ||
| 4058 | clone_filter=clone_filter, | ||
| 4059 | partial_clone_exclude=partial_clone_exclude, | ||
| 4060 | clone_bundle=clone_bundle, | ||
| 4061 | git_lfs=git_lfs, | ||
| 4062 | use_superproject=use_superproject, | ||
| 4063 | verbose=verbose, | ||
| 4064 | current_branch_only=current_branch_only, | ||
| 4065 | tags=tags, | ||
| 4066 | depth=depth, | ||
| 4067 | git_event_log=git_event_log, | ||
| 4068 | manifest_name=manifest_name, | ||
| 4069 | this_manifest_only=this_manifest_only, | ||
| 4070 | outer_manifest=False, | ||
| 4071 | ) | ||
| 4072 | |||
| 4073 | # If repo has already been initialized, we take -u with the absence of | ||
| 4074 | # --standalone-manifest to mean "transition to a standard repo set up", | ||
| 4075 | # which necessitates starting fresh. | ||
| 4076 | # If --standalone-manifest is set, we always tear everything down and | ||
| 4077 | # start anew. | ||
| 4078 | if self.Exists: | ||
| 4079 | was_standalone_manifest = self.config.GetString( | ||
| 4080 | "manifest.standalone" | ||
| 4081 | ) | ||
| 4082 | if was_standalone_manifest and not manifest_url: | ||
| 4083 | print( | ||
| 4084 | "fatal: repo was initialized with a standlone manifest, " | ||
| 4085 | "cannot be re-initialized without --manifest-url/-u" | ||
| 4086 | ) | ||
| 4087 | return False | ||
| 4088 | |||
| 4089 | if standalone_manifest or ( | ||
| 4090 | was_standalone_manifest and manifest_url | ||
| 4091 | ): | ||
| 4092 | self.config.ClearCache() | ||
| 4093 | if self.gitdir and os.path.exists(self.gitdir): | ||
| 4094 | platform_utils.rmtree(self.gitdir) | ||
| 4095 | if self.worktree and os.path.exists(self.worktree): | ||
| 4096 | platform_utils.rmtree(self.worktree) | ||
| 4097 | |||
| 4098 | is_new = not self.Exists | ||
| 4099 | if is_new: | ||
| 4100 | if not manifest_url: | ||
| 4101 | print("fatal: manifest url is required.", file=sys.stderr) | ||
| 4102 | return False | ||
| 4103 | |||
| 4104 | if verbose: | ||
| 4105 | print( | ||
| 4106 | "Downloading manifest from %s" | ||
| 4107 | % (GitConfig.ForUser().UrlInsteadOf(manifest_url),), | ||
| 4108 | file=sys.stderr, | ||
| 4109 | ) | ||
| 4110 | |||
| 4111 | # The manifest project object doesn't keep track of the path on the | ||
| 4112 | # server where this git is located, so let's save that here. | ||
| 4113 | mirrored_manifest_git = None | ||
| 4114 | if reference: | ||
| 4115 | manifest_git_path = urllib.parse.urlparse(manifest_url).path[1:] | ||
| 4116 | mirrored_manifest_git = os.path.join( | ||
| 4117 | reference, manifest_git_path | ||
| 4118 | ) | ||
| 4119 | if not mirrored_manifest_git.endswith(".git"): | ||
| 4120 | mirrored_manifest_git += ".git" | ||
| 4121 | if not os.path.exists(mirrored_manifest_git): | ||
| 4122 | mirrored_manifest_git = os.path.join( | ||
| 4123 | reference, ".repo/manifests.git" | ||
| 4124 | ) | ||
| 4125 | |||
| 4126 | self._InitGitDir(mirror_git=mirrored_manifest_git) | ||
| 4127 | |||
| 4128 | # If standalone_manifest is set, mark the project as "standalone" -- | ||
| 4129 | # we'll still do much of the manifests.git set up, but will avoid actual | ||
| 4130 | # syncs to a remote. | ||
| 4131 | if standalone_manifest: | ||
| 4132 | self.config.SetString("manifest.standalone", manifest_url) | ||
| 4133 | elif not manifest_url and not manifest_branch: | ||
| 4134 | # If -u is set and --standalone-manifest is not, then we're not in | ||
| 4135 | # standalone mode. Otherwise, use config to infer what we were in | ||
| 4136 | # the last init. | ||
| 4137 | standalone_manifest = bool( | ||
| 4138 | self.config.GetString("manifest.standalone") | ||
| 4139 | ) | ||
| 4140 | if not standalone_manifest: | ||
| 4141 | self.config.SetString("manifest.standalone", None) | ||
| 4142 | |||
| 4143 | self._ConfigureDepth(depth) | ||
| 4144 | |||
| 4145 | # Set the remote URL before the remote branch as we might need it below. | ||
| 4146 | if manifest_url: | ||
| 4147 | r = self.GetRemote() | ||
| 4148 | r.url = manifest_url | ||
| 4149 | r.ResetFetch() | ||
| 4150 | r.Save() | ||
| 4151 | |||
| 4152 | if not standalone_manifest: | ||
| 4153 | if manifest_branch: | ||
| 4154 | if manifest_branch == "HEAD": | ||
| 4155 | manifest_branch = self.ResolveRemoteHead() | ||
| 4156 | if manifest_branch is None: | ||
| 4157 | print("fatal: unable to resolve HEAD", file=sys.stderr) | ||
| 4158 | return False | ||
| 4159 | self.revisionExpr = manifest_branch | ||
| 4160 | else: | ||
| 4161 | if is_new: | ||
| 4162 | default_branch = self.ResolveRemoteHead() | ||
| 4163 | if default_branch is None: | ||
| 4164 | # If the remote doesn't have HEAD configured, default to | ||
| 4165 | # master. | ||
| 4166 | default_branch = "refs/heads/master" | ||
| 4167 | self.revisionExpr = default_branch | ||
| 4168 | else: | ||
| 4169 | self.PreSync() | ||
| 4170 | |||
| 4171 | groups = re.split(r"[,\s]+", groups or "") | ||
| 4172 | all_platforms = ["linux", "darwin", "windows"] | ||
| 4173 | platformize = lambda x: "platform-" + x | ||
| 4174 | if platform == "auto": | ||
| 4175 | if not mirror and not self.mirror: | ||
| 4176 | groups.append(platformize(self._platform_name)) | ||
| 4177 | elif platform == "all": | ||
| 4178 | groups.extend(map(platformize, all_platforms)) | ||
| 4179 | elif platform in all_platforms: | ||
| 4180 | groups.append(platformize(platform)) | ||
| 4181 | elif platform != "none": | ||
| 4182 | print("fatal: invalid platform flag", file=sys.stderr) | ||
| 4183 | return False | ||
| 4184 | self.config.SetString("manifest.platform", platform) | ||
| 4185 | |||
| 4186 | groups = [x for x in groups if x] | ||
| 4187 | groupstr = ",".join(groups) | ||
| 4188 | if ( | ||
| 4189 | platform == "auto" | ||
| 4190 | and groupstr == self.manifest.GetDefaultGroupsStr() | ||
| 4191 | ): | ||
| 4192 | groupstr = None | ||
| 4193 | self.config.SetString("manifest.groups", groupstr) | ||
| 4194 | |||
| 4195 | if reference: | ||
| 4196 | self.config.SetString("repo.reference", reference) | ||
| 4197 | |||
| 4198 | if dissociate: | ||
| 4199 | self.config.SetBoolean("repo.dissociate", dissociate) | ||
| 4200 | |||
| 4201 | if worktree: | ||
| 4202 | if mirror: | ||
| 4203 | print( | ||
| 4204 | "fatal: --mirror and --worktree are incompatible", | ||
| 4205 | file=sys.stderr, | ||
| 4206 | ) | ||
| 4207 | return False | ||
| 4208 | if submodules: | ||
| 4209 | print( | ||
| 4210 | "fatal: --submodules and --worktree are incompatible", | ||
| 4211 | file=sys.stderr, | ||
| 4212 | ) | ||
| 4213 | return False | ||
| 4214 | self.config.SetBoolean("repo.worktree", worktree) | ||
| 4215 | if is_new: | ||
| 4216 | self.use_git_worktrees = True | ||
| 4217 | print("warning: --worktree is experimental!", file=sys.stderr) | ||
| 4218 | |||
| 4219 | if archive: | ||
| 4220 | if is_new: | ||
| 4221 | self.config.SetBoolean("repo.archive", archive) | ||
| 4222 | else: | ||
| 4223 | print( | ||
| 4224 | "fatal: --archive is only supported when initializing a " | ||
| 4225 | "new workspace.", | ||
| 4226 | file=sys.stderr, | ||
| 4227 | ) | ||
| 4228 | print( | ||
| 4229 | "Either delete the .repo folder in this workspace, or " | ||
| 4230 | "initialize in another location.", | ||
| 4231 | file=sys.stderr, | ||
| 4232 | ) | ||
| 4233 | return False | ||
| 4234 | |||
| 4235 | if mirror: | ||
| 4236 | if is_new: | ||
| 4237 | self.config.SetBoolean("repo.mirror", mirror) | ||
| 4238 | else: | ||
| 4239 | print( | ||
| 4240 | "fatal: --mirror is only supported when initializing a new " | ||
| 4241 | "workspace.", | ||
| 4242 | file=sys.stderr, | ||
| 4243 | ) | ||
| 4244 | print( | ||
| 4245 | "Either delete the .repo folder in this workspace, or " | ||
| 4246 | "initialize in another location.", | ||
| 4247 | file=sys.stderr, | ||
| 4248 | ) | ||
| 4249 | return False | ||
| 4250 | |||
| 4251 | if partial_clone is not None: | ||
| 4252 | if mirror: | ||
| 4253 | print( | ||
| 4254 | "fatal: --mirror and --partial-clone are mutually " | ||
| 4255 | "exclusive", | ||
| 4256 | file=sys.stderr, | ||
| 4257 | ) | ||
| 4258 | return False | ||
| 4259 | self.config.SetBoolean("repo.partialclone", partial_clone) | ||
| 4260 | if clone_filter: | ||
| 4261 | self.config.SetString("repo.clonefilter", clone_filter) | ||
| 4262 | elif self.partial_clone: | ||
| 4263 | clone_filter = self.clone_filter | ||
| 4264 | else: | ||
| 4265 | clone_filter = None | ||
| 4266 | |||
| 4267 | if partial_clone_exclude is not None: | ||
| 4268 | self.config.SetString( | ||
| 4269 | "repo.partialcloneexclude", partial_clone_exclude | ||
| 4270 | ) | ||
| 4271 | |||
| 4272 | if clone_bundle is None: | ||
| 4273 | clone_bundle = False if partial_clone else True | ||
| 4274 | else: | ||
| 4275 | self.config.SetBoolean("repo.clonebundle", clone_bundle) | ||
| 4276 | |||
| 4277 | if submodules: | ||
| 4278 | self.config.SetBoolean("repo.submodules", submodules) | ||
| 4279 | |||
| 4280 | if git_lfs is not None: | ||
| 4281 | if git_lfs: | ||
| 4282 | git_require((2, 17, 0), fail=True, msg="Git LFS support") | ||
| 4283 | |||
| 4284 | self.config.SetBoolean("repo.git-lfs", git_lfs) | ||
| 4285 | if not is_new: | ||
| 4286 | print( | ||
| 4287 | "warning: Changing --git-lfs settings will only affect new " | ||
| 4288 | "project checkouts.\n" | ||
| 4289 | " Existing projects will require manual updates.\n", | ||
| 4290 | file=sys.stderr, | ||
| 4291 | ) | ||
| 4292 | |||
| 4293 | if use_superproject is not None: | ||
| 4294 | self.config.SetBoolean("repo.superproject", use_superproject) | ||
| 4295 | |||
| 4296 | if not standalone_manifest: | ||
| 4297 | success = self.Sync_NetworkHalf( | ||
| 4298 | is_new=is_new, | ||
| 4299 | quiet=not verbose, | ||
| 4300 | verbose=verbose, | ||
| 4301 | clone_bundle=clone_bundle, | ||
| 4302 | current_branch_only=current_branch_only, | ||
| 4303 | tags=tags, | ||
| 4304 | submodules=submodules, | ||
| 4305 | clone_filter=clone_filter, | ||
| 4306 | partial_clone_exclude=self.manifest.PartialCloneExclude, | ||
| 4307 | ).success | ||
| 4308 | if not success: | ||
| 4309 | r = self.GetRemote() | ||
| 4310 | print( | ||
| 4311 | "fatal: cannot obtain manifest %s" % r.url, file=sys.stderr | ||
| 4312 | ) | ||
| 4313 | |||
| 4314 | # Better delete the manifest git dir if we created it; otherwise | ||
| 4315 | # next time (when user fixes problems) we won't go through the | ||
| 4316 | # "is_new" logic. | ||
| 4317 | if is_new: | ||
| 4318 | platform_utils.rmtree(self.gitdir) | ||
| 4319 | return False | ||
| 4320 | |||
| 4321 | if manifest_branch: | ||
| 4322 | self.MetaBranchSwitch(submodules=submodules) | ||
| 4323 | |||
| 4324 | syncbuf = SyncBuffer(self.config) | ||
| 4325 | self.Sync_LocalHalf(syncbuf, submodules=submodules) | ||
| 4326 | syncbuf.Finish() | ||
| 4327 | |||
| 4328 | if is_new or self.CurrentBranch is None: | ||
| 4329 | if not self.StartBranch("default"): | ||
| 4330 | print( | ||
| 4331 | "fatal: cannot create default in manifest", | ||
| 4332 | file=sys.stderr, | ||
| 4333 | ) | ||
| 4334 | return False | ||
| 4335 | |||
| 4336 | if not manifest_name: | ||
| 4337 | print("fatal: manifest name (-m) is required.", file=sys.stderr) | ||
| 4338 | return False | ||
| 4339 | |||
| 4340 | elif is_new: | ||
| 4341 | # This is a new standalone manifest. | ||
| 4342 | manifest_name = "default.xml" | ||
| 4343 | manifest_data = fetch.fetch_file(manifest_url, verbose=verbose) | ||
| 4344 | dest = os.path.join(self.worktree, manifest_name) | ||
| 4345 | os.makedirs(os.path.dirname(dest), exist_ok=True) | ||
| 4346 | with open(dest, "wb") as f: | ||
| 4347 | f.write(manifest_data) | ||
| 4348 | |||
| 4349 | try: | ||
| 4350 | self.manifest.Link(manifest_name) | ||
| 4351 | except ManifestParseError as e: | ||
| 4352 | print( | ||
| 4353 | "fatal: manifest '%s' not available" % manifest_name, | ||
| 4354 | file=sys.stderr, | ||
| 4355 | ) | ||
| 4356 | print("fatal: %s" % str(e), file=sys.stderr) | ||
| 4357 | return False | ||
| 4358 | |||
| 4359 | if not this_manifest_only: | ||
| 4360 | for submanifest in self.manifest.submanifests.values(): | ||
| 4361 | spec = submanifest.ToSubmanifestSpec() | ||
| 4362 | submanifest.repo_client.manifestProject.Sync( | ||
| 4363 | manifest_url=spec.manifestUrl, | ||
| 4364 | manifest_branch=spec.revision, | ||
| 4365 | standalone_manifest=standalone_manifest, | ||
| 4366 | groups=self.manifest_groups, | ||
| 4367 | platform=platform, | ||
| 4368 | mirror=mirror, | ||
| 4369 | dissociate=dissociate, | ||
| 4370 | reference=reference, | ||
| 4371 | worktree=worktree, | ||
| 4372 | submodules=submodules, | ||
| 4373 | archive=archive, | ||
| 4374 | partial_clone=partial_clone, | ||
| 4375 | clone_filter=clone_filter, | ||
| 4376 | partial_clone_exclude=partial_clone_exclude, | ||
| 4377 | clone_bundle=clone_bundle, | ||
| 4378 | git_lfs=git_lfs, | ||
| 4379 | use_superproject=use_superproject, | ||
| 4380 | verbose=verbose, | ||
| 4381 | current_branch_only=current_branch_only, | ||
| 4382 | tags=tags, | ||
| 4383 | depth=depth, | ||
| 4384 | git_event_log=git_event_log, | ||
| 4385 | manifest_name=spec.manifestName, | ||
| 4386 | this_manifest_only=False, | ||
| 4387 | outer_manifest=False, | ||
| 4388 | ) | ||
| 4389 | |||
| 4390 | # Lastly, if the manifest has a <superproject> then have the | ||
| 4391 | # superproject sync it (if it will be used). | ||
| 4392 | if git_superproject.UseSuperproject(use_superproject, self.manifest): | ||
| 4393 | sync_result = self.manifest.superproject.Sync(git_event_log) | ||
| 4394 | if not sync_result.success: | ||
| 4395 | submanifest = "" | ||
| 4396 | if self.manifest.path_prefix: | ||
| 4397 | submanifest = f"for {self.manifest.path_prefix} " | ||
| 4398 | print( | ||
| 4399 | f"warning: git update of superproject {submanifest}failed, " | ||
| 4400 | "repo sync will not use superproject to fetch source; " | ||
| 4401 | "while this error is not fatal, and you can continue to " | ||
| 4402 | "run repo sync, please run repo init with the " | ||
| 4403 | "--no-use-superproject option to stop seeing this warning", | ||
| 4404 | file=sys.stderr, | ||
| 4405 | ) | ||
| 4406 | if sync_result.fatal and use_superproject is not None: | ||
| 4407 | return False | ||
| 4408 | |||
| 4409 | return True | ||
| 4410 | |||
| 4411 | def _ConfigureDepth(self, depth): | ||
| 4412 | """Configure the depth we'll sync down. | ||
| 4413 | |||
| 4414 | Args: | ||
| 4415 | depth: an int, how deep of a partial clone to create. | ||
| 4416 | """ | ||
| 4417 | # Opt.depth will be non-None if user actually passed --depth to repo | ||
| 4418 | # init. | ||
| 4419 | if depth is not None: | ||
| 4420 | if depth > 0: | ||
| 4421 | # Positive values will set the depth. | ||
| 4422 | depth = str(depth) | ||
| 4423 | else: | ||
| 4424 | # Negative numbers will clear the depth; passing None to | ||
| 4425 | # SetString will do that. | ||
| 4426 | depth = None | ||
| 4427 | |||
| 4428 | # We store the depth in the main manifest project. | ||
| 4429 | self.config.SetString("repo.depth", depth) | ||
