diff options
Diffstat (limited to 'bitbake/lib/bb/fetch2/git.py')
-rw-r--r-- | bitbake/lib/bb/fetch2/git.py | 459 |
1 files changed, 259 insertions, 200 deletions
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py index c7ff769fdf..14ec45a3f6 100644 --- a/bitbake/lib/bb/fetch2/git.py +++ b/bitbake/lib/bb/fetch2/git.py | |||
@@ -9,15 +9,6 @@ Supported SRC_URI options are: | |||
9 | - branch | 9 | - branch |
10 | The git branch to retrieve from. The default is "master" | 10 | The git branch to retrieve from. The default is "master" |
11 | 11 | ||
12 | This option also supports multiple branch fetching, with branches | ||
13 | separated by commas. In multiple branches case, the name option | ||
14 | must have the same number of names to match the branches, which is | ||
15 | used to specify the SRC_REV for the branch | ||
16 | e.g: | ||
17 | SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY" | ||
18 | SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx" | ||
19 | SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY" | ||
20 | |||
21 | - tag | 12 | - tag |
22 | The git tag to retrieve. The default is "master" | 13 | The git tag to retrieve. The default is "master" |
23 | 14 | ||
@@ -81,6 +72,7 @@ import shlex | |||
81 | import shutil | 72 | import shutil |
82 | import subprocess | 73 | import subprocess |
83 | import tempfile | 74 | import tempfile |
75 | import urllib | ||
84 | import bb | 76 | import bb |
85 | import bb.progress | 77 | import bb.progress |
86 | from contextlib import contextmanager | 78 | from contextlib import contextmanager |
@@ -190,14 +182,11 @@ class Git(FetchMethod): | |||
190 | ud.bareclone = ud.parm.get("bareclone","0") == "1" | 182 | ud.bareclone = ud.parm.get("bareclone","0") == "1" |
191 | if ud.bareclone: | 183 | if ud.bareclone: |
192 | ud.nocheckout = 1 | 184 | ud.nocheckout = 1 |
193 | 185 | ||
194 | ud.unresolvedrev = {} | 186 | ud.unresolvedrev = "" |
195 | branches = ud.parm.get("branch", "").split(',') | 187 | ud.branch = ud.parm.get("branch", "") |
196 | if branches == [""] and not ud.nobranch: | 188 | if not ud.branch and not ud.nobranch: |
197 | bb.warn("URL: %s does not set any branch parameter. The future default branch used by tools and repositories is uncertain and we will therefore soon require this is set in all git urls." % ud.url) | 189 | raise bb.fetch2.ParameterError("The url does not set any branch parameter or set nobranch=1.", ud.url) |
198 | branches = ["master"] | ||
199 | if len(branches) != len(ud.names): | ||
200 | raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url) | ||
201 | 190 | ||
202 | ud.noshared = d.getVar("BB_GIT_NOSHARED") == "1" | 191 | ud.noshared = d.getVar("BB_GIT_NOSHARED") == "1" |
203 | 192 | ||
@@ -207,8 +196,11 @@ class Git(FetchMethod): | |||
207 | if ud.bareclone: | 196 | if ud.bareclone: |
208 | ud.cloneflags += " --mirror" | 197 | ud.cloneflags += " --mirror" |
209 | 198 | ||
199 | ud.shallow_skip_fast = False | ||
210 | ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1" | 200 | ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1" |
211 | ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split() | 201 | ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split() |
202 | if 'tag' in ud.parm: | ||
203 | ud.shallow_extra_refs.append("refs/tags/" + ud.parm['tag']) | ||
212 | 204 | ||
213 | depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH") | 205 | depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH") |
214 | if depth_default is not None: | 206 | if depth_default is not None: |
@@ -225,32 +217,27 @@ class Git(FetchMethod): | |||
225 | 217 | ||
226 | revs_default = d.getVar("BB_GIT_SHALLOW_REVS") | 218 | revs_default = d.getVar("BB_GIT_SHALLOW_REVS") |
227 | ud.shallow_revs = [] | 219 | ud.shallow_revs = [] |
228 | ud.branches = {} | 220 | |
229 | for pos, name in enumerate(ud.names): | 221 | ud.unresolvedrev = ud.branch |
230 | branch = branches[pos] | 222 | |
231 | ud.branches[name] = branch | 223 | shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % ud.name) |
232 | ud.unresolvedrev[name] = branch | 224 | if shallow_depth is not None: |
233 | 225 | try: | |
234 | shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % name) | 226 | shallow_depth = int(shallow_depth or 0) |
235 | if shallow_depth is not None: | 227 | except ValueError: |
236 | try: | 228 | raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth)) |
237 | shallow_depth = int(shallow_depth or 0) | 229 | else: |
238 | except ValueError: | 230 | if shallow_depth < 0: |
239 | raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth)) | 231 | raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth)) |
240 | else: | 232 | ud.shallow_depths[ud.name] = shallow_depth |
241 | if shallow_depth < 0: | 233 | |
242 | raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth)) | 234 | revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % ud.name) |
243 | ud.shallow_depths[name] = shallow_depth | 235 | if revs is not None: |
244 | 236 | ud.shallow_revs.extend(revs.split()) | |
245 | revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % name) | 237 | elif revs_default is not None: |
246 | if revs is not None: | 238 | ud.shallow_revs.extend(revs_default.split()) |
247 | ud.shallow_revs.extend(revs.split()) | 239 | |
248 | elif revs_default is not None: | 240 | if ud.shallow and not ud.shallow_revs and ud.shallow_depths[ud.name] == 0: |
249 | ud.shallow_revs.extend(revs_default.split()) | ||
250 | |||
251 | if (ud.shallow and | ||
252 | not ud.shallow_revs and | ||
253 | all(ud.shallow_depths[n] == 0 for n in ud.names)): | ||
254 | # Shallow disabled for this URL | 241 | # Shallow disabled for this URL |
255 | ud.shallow = False | 242 | ud.shallow = False |
256 | 243 | ||
@@ -259,10 +246,9 @@ class Git(FetchMethod): | |||
259 | # rev of this repository. This will get resolved into a revision | 246 | # rev of this repository. This will get resolved into a revision |
260 | # later. If an actual revision happens to have also been provided | 247 | # later. If an actual revision happens to have also been provided |
261 | # then this setting will be overridden. | 248 | # then this setting will be overridden. |
262 | for name in ud.names: | 249 | ud.unresolvedrev = 'HEAD' |
263 | ud.unresolvedrev[name] = 'HEAD' | ||
264 | 250 | ||
265 | ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all" | 251 | ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all -c clone.defaultRemoteName=origin" |
266 | 252 | ||
267 | write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0" | 253 | write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0" |
268 | ud.write_tarballs = write_tarballs != "0" or ud.rebaseable | 254 | ud.write_tarballs = write_tarballs != "0" or ud.rebaseable |
@@ -270,12 +256,11 @@ class Git(FetchMethod): | |||
270 | 256 | ||
271 | ud.setup_revisions(d) | 257 | ud.setup_revisions(d) |
272 | 258 | ||
273 | for name in ud.names: | 259 | # Ensure any revision that doesn't look like a SHA-1 is translated into one |
274 | # Ensure any revision that doesn't look like a SHA-1 is translated into one | 260 | if not sha1_re.match(ud.revision or ''): |
275 | if not sha1_re.match(ud.revisions[name] or ''): | 261 | if ud.revision: |
276 | if ud.revisions[name]: | 262 | ud.unresolvedrev = ud.revision |
277 | ud.unresolvedrev[name] = ud.revisions[name] | 263 | ud.revision = self.latest_revision(ud, d, ud.name) |
278 | ud.revisions[name] = self.latest_revision(ud, d, name) | ||
279 | 264 | ||
280 | gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_')) | 265 | gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_')) |
281 | if gitsrcname.startswith('.'): | 266 | if gitsrcname.startswith('.'): |
@@ -286,8 +271,7 @@ class Git(FetchMethod): | |||
286 | # upstream repo in the future, the mirror will remain intact and still | 271 | # upstream repo in the future, the mirror will remain intact and still |
287 | # contain the revision | 272 | # contain the revision |
288 | if ud.rebaseable: | 273 | if ud.rebaseable: |
289 | for name in ud.names: | 274 | gitsrcname = gitsrcname + '_' + ud.revision |
290 | gitsrcname = gitsrcname + '_' + ud.revisions[name] | ||
291 | 275 | ||
292 | dl_dir = d.getVar("DL_DIR") | 276 | dl_dir = d.getVar("DL_DIR") |
293 | gitdir = d.getVar("GITDIR") or (dl_dir + "/git2") | 277 | gitdir = d.getVar("GITDIR") or (dl_dir + "/git2") |
@@ -305,15 +289,14 @@ class Git(FetchMethod): | |||
305 | if ud.shallow_revs: | 289 | if ud.shallow_revs: |
306 | tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs))) | 290 | tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs))) |
307 | 291 | ||
308 | for name, revision in sorted(ud.revisions.items()): | 292 | tarballname = "%s_%s" % (tarballname, ud.revision[:7]) |
309 | tarballname = "%s_%s" % (tarballname, ud.revisions[name][:7]) | 293 | depth = ud.shallow_depths[ud.name] |
310 | depth = ud.shallow_depths[name] | 294 | if depth: |
311 | if depth: | 295 | tarballname = "%s-%s" % (tarballname, depth) |
312 | tarballname = "%s-%s" % (tarballname, depth) | ||
313 | 296 | ||
314 | shallow_refs = [] | 297 | shallow_refs = [] |
315 | if not ud.nobranch: | 298 | if not ud.nobranch: |
316 | shallow_refs.extend(ud.branches.values()) | 299 | shallow_refs.append(ud.branch) |
317 | if ud.shallow_extra_refs: | 300 | if ud.shallow_extra_refs: |
318 | shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs) | 301 | shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs) |
319 | if shallow_refs: | 302 | if shallow_refs: |
@@ -338,18 +321,19 @@ class Git(FetchMethod): | |||
338 | return True | 321 | return True |
339 | if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d): | 322 | if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d): |
340 | return True | 323 | return True |
341 | for name in ud.names: | 324 | if not self._contains_ref(ud, d, ud.name, ud.clonedir): |
342 | if not self._contains_ref(ud, d, name, ud.clonedir): | 325 | return True |
343 | return True | ||
344 | return False | 326 | return False |
345 | 327 | ||
346 | def lfs_need_update(self, ud, d): | 328 | def lfs_need_update(self, ud, d): |
329 | if not self._need_lfs(ud): | ||
330 | return False | ||
331 | |||
347 | if self.clonedir_need_update(ud, d): | 332 | if self.clonedir_need_update(ud, d): |
348 | return True | 333 | return True |
349 | 334 | ||
350 | for name in ud.names: | 335 | if not self._lfs_objects_downloaded(ud, d, ud.clonedir): |
351 | if not self._lfs_objects_downloaded(ud, d, name, ud.clonedir): | 336 | return True |
352 | return True | ||
353 | return False | 337 | return False |
354 | 338 | ||
355 | def clonedir_need_shallow_revs(self, ud, d): | 339 | def clonedir_need_shallow_revs(self, ud, d): |
@@ -366,6 +350,13 @@ class Git(FetchMethod): | |||
366 | def tarball_need_update(self, ud): | 350 | def tarball_need_update(self, ud): |
367 | return ud.write_tarballs and not os.path.exists(ud.fullmirror) | 351 | return ud.write_tarballs and not os.path.exists(ud.fullmirror) |
368 | 352 | ||
353 | def update_mirror_links(self, ud, origud): | ||
354 | super().update_mirror_links(ud, origud) | ||
355 | # When using shallow mode, add a symlink to the original fullshallow | ||
356 | # path to ensure a valid symlink even in the `PREMIRRORS` case | ||
357 | if ud.shallow and not os.path.exists(origud.fullshallow): | ||
358 | self.ensure_symlink(ud.localpath, origud.fullshallow) | ||
359 | |||
369 | def try_premirror(self, ud, d): | 360 | def try_premirror(self, ud, d): |
370 | # If we don't do this, updating an existing checkout with only premirrors | 361 | # If we don't do this, updating an existing checkout with only premirrors |
371 | # is not possible | 362 | # is not possible |
@@ -446,6 +437,24 @@ class Git(FetchMethod): | |||
446 | if ud.proto.lower() != 'file': | 437 | if ud.proto.lower() != 'file': |
447 | bb.fetch2.check_network_access(d, clone_cmd, ud.url) | 438 | bb.fetch2.check_network_access(d, clone_cmd, ud.url) |
448 | progresshandler = GitProgressHandler(d) | 439 | progresshandler = GitProgressHandler(d) |
440 | |||
441 | # Try creating a fast initial shallow clone | ||
442 | # Enabling ud.shallow_skip_fast will skip this | ||
443 | # If the Git error "Server does not allow request for unadvertised object" | ||
444 | # occurs, shallow_skip_fast is enabled automatically. | ||
445 | # This may happen if the Git server does not allow the request | ||
446 | # or if the Git client has issues with this functionality. | ||
447 | if ud.shallow and not ud.shallow_skip_fast: | ||
448 | try: | ||
449 | self.clone_shallow_with_tarball(ud, d) | ||
450 | # When the shallow clone has succeeded, use the shallow tarball | ||
451 | ud.localpath = ud.fullshallow | ||
452 | return | ||
453 | except: | ||
454 | logger.warning("Creating fast initial shallow clone failed, try initial regular clone now.") | ||
455 | |||
456 | # When skipping fast initial shallow or the fast inital shallow clone failed: | ||
457 | # Try again with an initial regular clone | ||
449 | runfetchcmd(clone_cmd, d, log=progresshandler) | 458 | runfetchcmd(clone_cmd, d, log=progresshandler) |
450 | 459 | ||
451 | # Update the checkout if needed | 460 | # Update the checkout if needed |
@@ -473,9 +482,8 @@ class Git(FetchMethod): | |||
473 | if exc.errno != errno.ENOENT: | 482 | if exc.errno != errno.ENOENT: |
474 | raise | 483 | raise |
475 | 484 | ||
476 | for name in ud.names: | 485 | if not self._contains_ref(ud, d, ud.name, ud.clonedir): |
477 | if not self._contains_ref(ud, d, name, ud.clonedir): | 486 | raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revision, ud.branch)) |
478 | raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name])) | ||
479 | 487 | ||
480 | if ud.shallow and ud.write_shallow_tarballs: | 488 | if ud.shallow and ud.write_shallow_tarballs: |
481 | missing_rev = self.clonedir_need_shallow_revs(ud, d) | 489 | missing_rev = self.clonedir_need_shallow_revs(ud, d) |
@@ -483,128 +491,168 @@ class Git(FetchMethod): | |||
483 | raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev) | 491 | raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev) |
484 | 492 | ||
485 | if self.lfs_need_update(ud, d): | 493 | if self.lfs_need_update(ud, d): |
486 | # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching | 494 | self.lfs_fetch(ud, d, ud.clonedir, ud.revision) |
487 | # of all LFS blobs needed at the srcrev. | ||
488 | # | ||
489 | # It would be nice to just do this inline here by running 'git-lfs fetch' | ||
490 | # on the bare clonedir, but that operation requires a working copy on some | ||
491 | # releases of Git LFS. | ||
492 | with tempfile.TemporaryDirectory(dir=d.getVar('DL_DIR')) as tmpdir: | ||
493 | # Do the checkout. This implicitly involves a Git LFS fetch. | ||
494 | Git.unpack(self, ud, tmpdir, d) | ||
495 | |||
496 | # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into | ||
497 | # the bare clonedir. | ||
498 | # | ||
499 | # As this procedure is invoked repeatedly on incremental fetches as | ||
500 | # a recipe's SRCREV is bumped throughout its lifetime, this will | ||
501 | # result in a gradual accumulation of LFS blobs in <ud.clonedir>/lfs | ||
502 | # corresponding to all the blobs reachable from the different revs | ||
503 | # fetched across time. | ||
504 | # | ||
505 | # Only do this if the unpack resulted in a .git/lfs directory being | ||
506 | # created; this only happens if at least one blob needed to be | ||
507 | # downloaded. | ||
508 | if os.path.exists(os.path.join(ud.destdir, ".git", "lfs")): | ||
509 | runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/.git" % ud.destdir) | ||
510 | |||
511 | def build_mirror_data(self, ud, d): | ||
512 | 495 | ||
513 | # Create as a temp file and move atomically into position to avoid races | 496 | def lfs_fetch(self, ud, d, clonedir, revision, fetchall=False, progresshandler=None): |
514 | @contextmanager | 497 | """Helper method for fetching Git LFS data""" |
515 | def create_atomic(filename): | 498 | try: |
516 | fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename)) | 499 | if self._need_lfs(ud) and self._contains_lfs(ud, d, clonedir) and len(revision): |
517 | try: | 500 | self._ensure_git_lfs(d, ud) |
518 | yield tfile | 501 | |
519 | umask = os.umask(0o666) | 502 | # Using worktree with the revision because .lfsconfig may exists |
520 | os.umask(umask) | 503 | worktree_add_cmd = "%s worktree add wt %s" % (ud.basecmd, revision) |
521 | os.chmod(tfile, (0o666 & ~umask)) | 504 | runfetchcmd(worktree_add_cmd, d, log=progresshandler, workdir=clonedir) |
522 | os.rename(tfile, filename) | 505 | lfs_fetch_cmd = "%s lfs fetch %s" % (ud.basecmd, "--all" if fetchall else "") |
523 | finally: | 506 | runfetchcmd(lfs_fetch_cmd, d, log=progresshandler, workdir=(clonedir + "/wt")) |
524 | os.close(fd) | 507 | worktree_rem_cmd = "%s worktree remove -f wt" % ud.basecmd |
508 | runfetchcmd(worktree_rem_cmd, d, log=progresshandler, workdir=clonedir) | ||
509 | except: | ||
510 | logger.warning("Fetching LFS did not succeed.") | ||
511 | |||
512 | @contextmanager | ||
513 | def create_atomic(self, filename): | ||
514 | """Create as a temp file and move atomically into position to avoid races""" | ||
515 | fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename)) | ||
516 | try: | ||
517 | yield tfile | ||
518 | umask = os.umask(0o666) | ||
519 | os.umask(umask) | ||
520 | os.chmod(tfile, (0o666 & ~umask)) | ||
521 | os.rename(tfile, filename) | ||
522 | finally: | ||
523 | os.close(fd) | ||
525 | 524 | ||
525 | def build_mirror_data(self, ud, d): | ||
526 | if ud.shallow and ud.write_shallow_tarballs: | 526 | if ud.shallow and ud.write_shallow_tarballs: |
527 | if not os.path.exists(ud.fullshallow): | 527 | if not os.path.exists(ud.fullshallow): |
528 | if os.path.islink(ud.fullshallow): | 528 | if os.path.islink(ud.fullshallow): |
529 | os.unlink(ud.fullshallow) | 529 | os.unlink(ud.fullshallow) |
530 | tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) | 530 | self.clone_shallow_with_tarball(ud, d) |
531 | shallowclone = os.path.join(tempdir, 'git') | ||
532 | try: | ||
533 | self.clone_shallow_local(ud, shallowclone, d) | ||
534 | |||
535 | logger.info("Creating tarball of git repository") | ||
536 | with create_atomic(ud.fullshallow) as tfile: | ||
537 | runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone) | ||
538 | runfetchcmd("touch %s.done" % ud.fullshallow, d) | ||
539 | finally: | ||
540 | bb.utils.remove(tempdir, recurse=True) | ||
541 | elif ud.write_tarballs and not os.path.exists(ud.fullmirror): | 531 | elif ud.write_tarballs and not os.path.exists(ud.fullmirror): |
542 | if os.path.islink(ud.fullmirror): | 532 | if os.path.islink(ud.fullmirror): |
543 | os.unlink(ud.fullmirror) | 533 | os.unlink(ud.fullmirror) |
544 | 534 | ||
545 | logger.info("Creating tarball of git repository") | 535 | logger.info("Creating tarball of git repository") |
546 | with create_atomic(ud.fullmirror) as tfile: | 536 | with self.create_atomic(ud.fullmirror) as tfile: |
547 | mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d, | 537 | mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d, |
548 | quiet=True, workdir=ud.clonedir) | 538 | quiet=True, workdir=ud.clonedir) |
549 | runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ." | 539 | runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ." |
550 | % (tfile, mtime), d, workdir=ud.clonedir) | 540 | % (tfile, mtime), d, workdir=ud.clonedir) |
551 | runfetchcmd("touch %s.done" % ud.fullmirror, d) | 541 | runfetchcmd("touch %s.done" % ud.fullmirror, d) |
552 | 542 | ||
543 | def clone_shallow_with_tarball(self, ud, d): | ||
544 | ret = False | ||
545 | tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) | ||
546 | shallowclone = os.path.join(tempdir, 'git') | ||
547 | try: | ||
548 | try: | ||
549 | self.clone_shallow_local(ud, shallowclone, d) | ||
550 | except: | ||
551 | logger.warning("Fast shallow clone failed, try to skip fast mode now.") | ||
552 | bb.utils.remove(tempdir, recurse=True) | ||
553 | os.mkdir(tempdir) | ||
554 | ud.shallow_skip_fast = True | ||
555 | self.clone_shallow_local(ud, shallowclone, d) | ||
556 | logger.info("Creating tarball of git repository") | ||
557 | with self.create_atomic(ud.fullshallow) as tfile: | ||
558 | runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone) | ||
559 | runfetchcmd("touch %s.done" % ud.fullshallow, d) | ||
560 | ret = True | ||
561 | finally: | ||
562 | bb.utils.remove(tempdir, recurse=True) | ||
563 | |||
564 | return ret | ||
565 | |||
553 | def clone_shallow_local(self, ud, dest, d): | 566 | def clone_shallow_local(self, ud, dest, d): |
554 | """Clone the repo and make it shallow. | 567 | """ |
568 | Shallow fetch from ud.clonedir (${DL_DIR}/git2/<gitrepo> by default): | ||
569 | - For BB_GIT_SHALLOW_DEPTH: git fetch --depth <depth> rev | ||
570 | - For BB_GIT_SHALLOW_REVS: git fetch --shallow-exclude=<revs> rev | ||
571 | """ | ||
555 | 572 | ||
556 | The upstream url of the new clone isn't set at this time, as it'll be | 573 | progresshandler = GitProgressHandler(d) |
557 | set correctly when unpacked.""" | 574 | repourl = self._get_repo_url(ud) |
558 | runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d) | 575 | bb.utils.mkdirhier(dest) |
576 | init_cmd = "%s init -q" % ud.basecmd | ||
577 | if ud.bareclone: | ||
578 | init_cmd += " --bare" | ||
579 | runfetchcmd(init_cmd, d, workdir=dest) | ||
580 | # Use repourl when creating a fast initial shallow clone | ||
581 | # Prefer already existing full bare clones if available | ||
582 | if not ud.shallow_skip_fast and not os.path.exists(ud.clonedir): | ||
583 | remote = shlex.quote(repourl) | ||
584 | else: | ||
585 | remote = ud.clonedir | ||
586 | runfetchcmd("%s remote add origin %s" % (ud.basecmd, remote), d, workdir=dest) | ||
559 | 587 | ||
560 | to_parse, shallow_branches = [], [] | 588 | # Check the histories which should be excluded |
561 | for name in ud.names: | 589 | shallow_exclude = '' |
562 | revision = ud.revisions[name] | 590 | for revision in ud.shallow_revs: |
563 | depth = ud.shallow_depths[name] | 591 | shallow_exclude += " --shallow-exclude=%s" % revision |
564 | if depth: | ||
565 | to_parse.append('%s~%d^{}' % (revision, depth - 1)) | ||
566 | 592 | ||
567 | # For nobranch, we need a ref, otherwise the commits will be | 593 | revision = ud.revision |
568 | # removed, and for non-nobranch, we truncate the branch to our | 594 | depth = ud.shallow_depths[ud.name] |
569 | # srcrev, to avoid keeping unnecessary history beyond that. | ||
570 | branch = ud.branches[name] | ||
571 | if ud.nobranch: | ||
572 | ref = "refs/shallow/%s" % name | ||
573 | elif ud.bareclone: | ||
574 | ref = "refs/heads/%s" % branch | ||
575 | else: | ||
576 | ref = "refs/remotes/origin/%s" % branch | ||
577 | 595 | ||
578 | shallow_branches.append(ref) | 596 | # The --depth and --shallow-exclude can't be used together |
579 | runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest) | 597 | if depth and shallow_exclude: |
598 | raise bb.fetch2.FetchError("BB_GIT_SHALLOW_REVS is set, but BB_GIT_SHALLOW_DEPTH is not 0.") | ||
599 | |||
600 | # For nobranch, we need a ref, otherwise the commits will be | ||
601 | # removed, and for non-nobranch, we truncate the branch to our | ||
602 | # srcrev, to avoid keeping unnecessary history beyond that. | ||
603 | branch = ud.branch | ||
604 | if ud.nobranch: | ||
605 | ref = "refs/shallow/%s" % ud.name | ||
606 | elif ud.bareclone: | ||
607 | ref = "refs/heads/%s" % branch | ||
608 | else: | ||
609 | ref = "refs/remotes/origin/%s" % branch | ||
610 | |||
611 | fetch_cmd = "%s fetch origin %s" % (ud.basecmd, revision) | ||
612 | if depth: | ||
613 | fetch_cmd += " --depth %s" % depth | ||
614 | |||
615 | if shallow_exclude: | ||
616 | fetch_cmd += shallow_exclude | ||
580 | 617 | ||
581 | # Map srcrev+depths to revisions | 618 | # Advertise the revision for lower version git such as 2.25.1: |
582 | parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest) | 619 | # error: Server does not allow request for unadvertised object. |
620 | # The ud.clonedir is a local temporary dir, will be removed when | ||
621 | # fetch is done, so we can do anything on it. | ||
622 | adv_cmd = 'git branch -f advertise-%s %s' % (revision, revision) | ||
623 | if ud.shallow_skip_fast: | ||
624 | runfetchcmd(adv_cmd, d, workdir=ud.clonedir) | ||
583 | 625 | ||
584 | # Resolve specified revisions | 626 | runfetchcmd(fetch_cmd, d, workdir=dest) |
585 | parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest) | 627 | runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest) |
586 | shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines() | 628 | # Fetch Git LFS data |
629 | self.lfs_fetch(ud, d, dest, ud.revision) | ||
587 | 630 | ||
588 | # Apply extra ref wildcards | 631 | # Apply extra ref wildcards |
589 | all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd, | 632 | all_refs_remote = runfetchcmd("%s ls-remote origin 'refs/*'" % ud.basecmd, \ |
590 | d, workdir=dest).splitlines() | 633 | d, workdir=dest).splitlines() |
634 | all_refs = [] | ||
635 | for line in all_refs_remote: | ||
636 | all_refs.append(line.split()[-1]) | ||
637 | extra_refs = [] | ||
591 | for r in ud.shallow_extra_refs: | 638 | for r in ud.shallow_extra_refs: |
592 | if not ud.bareclone: | 639 | if not ud.bareclone: |
593 | r = r.replace('refs/heads/', 'refs/remotes/origin/') | 640 | r = r.replace('refs/heads/', 'refs/remotes/origin/') |
594 | 641 | ||
595 | if '*' in r: | 642 | if '*' in r: |
596 | matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs) | 643 | matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs) |
597 | shallow_branches.extend(matches) | 644 | extra_refs.extend(matches) |
598 | else: | 645 | else: |
599 | shallow_branches.append(r) | 646 | extra_refs.append(r) |
600 | 647 | ||
601 | # Make the repository shallow | 648 | for ref in extra_refs: |
602 | shallow_cmd = [self.make_shallow_path, '-s'] | 649 | ref_fetch = ref.replace('refs/heads/', '').replace('refs/remotes/origin/', '').replace('refs/tags/', '') |
603 | for b in shallow_branches: | 650 | runfetchcmd("%s fetch origin --depth 1 %s" % (ud.basecmd, ref_fetch), d, workdir=dest) |
604 | shallow_cmd.append('-r') | 651 | revision = runfetchcmd("%s rev-parse FETCH_HEAD" % ud.basecmd, d, workdir=dest) |
605 | shallow_cmd.append(b) | 652 | runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest) |
606 | shallow_cmd.extend(shallow_revisions) | 653 | |
607 | runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest) | 654 | # The url is local ud.clonedir, set it to upstream one |
655 | runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=dest) | ||
608 | 656 | ||
609 | def unpack(self, ud, destdir, d): | 657 | def unpack(self, ud, destdir, d): |
610 | """ unpack the downloaded src to destdir""" | 658 | """ unpack the downloaded src to destdir""" |
@@ -612,7 +660,7 @@ class Git(FetchMethod): | |||
612 | subdir = ud.parm.get("subdir") | 660 | subdir = ud.parm.get("subdir") |
613 | subpath = ud.parm.get("subpath") | 661 | subpath = ud.parm.get("subpath") |
614 | readpathspec = "" | 662 | readpathspec = "" |
615 | def_destsuffix = "git/" | 663 | def_destsuffix = (d.getVar("BB_GIT_DEFAULT_DESTSUFFIX") or "git") + "/" |
616 | 664 | ||
617 | if subpath: | 665 | if subpath: |
618 | readpathspec = ":%s" % subpath | 666 | readpathspec = ":%s" % subpath |
@@ -664,30 +712,43 @@ class Git(FetchMethod): | |||
664 | if not source_found: | 712 | if not source_found: |
665 | raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url) | 713 | raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url) |
666 | 714 | ||
715 | # If there is a tag parameter in the url and we also have a fixed srcrev, check the tag | ||
716 | # matches the revision | ||
717 | if 'tag' in ud.parm and sha1_re.match(ud.revision): | ||
718 | output = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.parm['tag']), d, workdir=destdir) | ||
719 | output = output.strip() | ||
720 | if output != ud.revision: | ||
721 | # It is possible ud.revision is the revision on an annotated tag which won't match the output of rev-list | ||
722 | # If it resolves to the same thing there isn't a problem. | ||
723 | output2 = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.revision), d, workdir=destdir) | ||
724 | output2 = output2.strip() | ||
725 | if output != output2: | ||
726 | raise bb.fetch2.FetchError("The revision the git tag '%s' resolved to didn't match the SRCREV in use (%s vs %s)" % (ud.parm['tag'], output, ud.revision), ud.url) | ||
727 | |||
667 | repourl = self._get_repo_url(ud) | 728 | repourl = self._get_repo_url(ud) |
668 | runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir) | 729 | runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir) |
669 | 730 | ||
670 | if self._contains_lfs(ud, d, destdir): | 731 | if self._contains_lfs(ud, d, destdir): |
671 | if need_lfs and not self._find_git_lfs(d): | 732 | if not need_lfs: |
672 | raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl)) | ||
673 | elif not need_lfs: | ||
674 | bb.note("Repository %s has LFS content but it is not being fetched" % (repourl)) | 733 | bb.note("Repository %s has LFS content but it is not being fetched" % (repourl)) |
675 | else: | 734 | else: |
735 | self._ensure_git_lfs(d, ud) | ||
736 | |||
676 | runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir) | 737 | runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir) |
677 | 738 | ||
678 | if not ud.nocheckout: | 739 | if not ud.nocheckout: |
679 | if subpath: | 740 | if subpath: |
680 | runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d, | 741 | runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revision, readpathspec), d, |
681 | workdir=destdir) | 742 | workdir=destdir) |
682 | runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir) | 743 | runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir) |
683 | elif not ud.nobranch: | 744 | elif not ud.nobranch: |
684 | branchname = ud.branches[ud.names[0]] | 745 | branchname = ud.branch |
685 | runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \ | 746 | runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \ |
686 | ud.revisions[ud.names[0]]), d, workdir=destdir) | 747 | ud.revision), d, workdir=destdir) |
687 | runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \ | 748 | runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \ |
688 | branchname), d, workdir=destdir) | 749 | branchname), d, workdir=destdir) |
689 | else: | 750 | else: |
690 | runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir) | 751 | runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revision), d, workdir=destdir) |
691 | 752 | ||
692 | return True | 753 | return True |
693 | 754 | ||
@@ -701,8 +762,13 @@ class Git(FetchMethod): | |||
701 | clonedir = os.path.realpath(ud.localpath) | 762 | clonedir = os.path.realpath(ud.localpath) |
702 | to_remove.append(clonedir) | 763 | to_remove.append(clonedir) |
703 | 764 | ||
765 | # Remove shallow mirror tarball | ||
766 | if ud.shallow: | ||
767 | to_remove.append(ud.fullshallow) | ||
768 | to_remove.append(ud.fullshallow + ".done") | ||
769 | |||
704 | for r in to_remove: | 770 | for r in to_remove: |
705 | if os.path.exists(r): | 771 | if os.path.exists(r) or os.path.islink(r): |
706 | bb.note('Removing %s' % r) | 772 | bb.note('Removing %s' % r) |
707 | bb.utils.remove(r, True) | 773 | bb.utils.remove(r, True) |
708 | 774 | ||
@@ -713,10 +779,10 @@ class Git(FetchMethod): | |||
713 | cmd = "" | 779 | cmd = "" |
714 | if ud.nobranch: | 780 | if ud.nobranch: |
715 | cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % ( | 781 | cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % ( |
716 | ud.basecmd, ud.revisions[name]) | 782 | ud.basecmd, ud.revision) |
717 | else: | 783 | else: |
718 | cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % ( | 784 | cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % ( |
719 | ud.basecmd, ud.revisions[name], ud.branches[name]) | 785 | ud.basecmd, ud.revision, ud.branch) |
720 | try: | 786 | try: |
721 | output = runfetchcmd(cmd, d, quiet=True, workdir=wd) | 787 | output = runfetchcmd(cmd, d, quiet=True, workdir=wd) |
722 | except bb.fetch2.FetchError: | 788 | except bb.fetch2.FetchError: |
@@ -725,19 +791,21 @@ class Git(FetchMethod): | |||
725 | raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) | 791 | raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) |
726 | return output.split()[0] != "0" | 792 | return output.split()[0] != "0" |
727 | 793 | ||
728 | def _lfs_objects_downloaded(self, ud, d, name, wd): | 794 | def _lfs_objects_downloaded(self, ud, d, wd): |
729 | """ | 795 | """ |
730 | Verifies whether the LFS objects for requested revisions have already been downloaded | 796 | Verifies whether the LFS objects for requested revisions have already been downloaded |
731 | """ | 797 | """ |
732 | # Bail out early if this repository doesn't use LFS | 798 | # Bail out early if this repository doesn't use LFS |
733 | if not self._need_lfs(ud) or not self._contains_lfs(ud, d, wd): | 799 | if not self._contains_lfs(ud, d, wd): |
734 | return True | 800 | return True |
735 | 801 | ||
802 | self._ensure_git_lfs(d, ud) | ||
803 | |||
736 | # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file | 804 | # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file |
737 | # existence. | 805 | # existence. |
738 | # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git | 806 | # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git |
739 | cmd = "%s lfs ls-files -l %s" \ | 807 | cmd = "%s lfs ls-files -l %s" \ |
740 | % (ud.basecmd, ud.revisions[name]) | 808 | % (ud.basecmd, ud.revision) |
741 | output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip() | 809 | output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip() |
742 | # Do not do any further matching if no objects are managed by LFS | 810 | # Do not do any further matching if no objects are managed by LFS |
743 | if not output: | 811 | if not output: |
@@ -761,18 +829,8 @@ class Git(FetchMethod): | |||
761 | """ | 829 | """ |
762 | Check if the repository has 'lfs' (large file) content | 830 | Check if the repository has 'lfs' (large file) content |
763 | """ | 831 | """ |
764 | |||
765 | if ud.nobranch: | ||
766 | # If no branch is specified, use the current git commit | ||
767 | refname = self._build_revision(ud, d, ud.names[0]) | ||
768 | elif wd == ud.clonedir: | ||
769 | # The bare clonedir doesn't use the remote names; it has the branch immediately. | ||
770 | refname = ud.branches[ud.names[0]] | ||
771 | else: | ||
772 | refname = "origin/%s" % ud.branches[ud.names[0]] | ||
773 | |||
774 | cmd = "%s grep lfs %s:.gitattributes | wc -l" % ( | 832 | cmd = "%s grep lfs %s:.gitattributes | wc -l" % ( |
775 | ud.basecmd, refname) | 833 | ud.basecmd, ud.revision) |
776 | 834 | ||
777 | try: | 835 | try: |
778 | output = runfetchcmd(cmd, d, quiet=True, workdir=wd) | 836 | output = runfetchcmd(cmd, d, quiet=True, workdir=wd) |
@@ -782,12 +840,14 @@ class Git(FetchMethod): | |||
782 | pass | 840 | pass |
783 | return False | 841 | return False |
784 | 842 | ||
785 | def _find_git_lfs(self, d): | 843 | def _ensure_git_lfs(self, d, ud): |
786 | """ | 844 | """ |
787 | Return True if git-lfs can be found, False otherwise. | 845 | Ensures that git-lfs is available, raising a FetchError if it isn't. |
788 | """ | 846 | """ |
789 | import shutil | 847 | if shutil.which("git-lfs", path=d.getVar('PATH')) is None: |
790 | return shutil.which("git-lfs", path=d.getVar('PATH')) is not None | 848 | raise bb.fetch2.FetchError( |
849 | "Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 " | ||
850 | "to ignore it)" % self._get_repo_url(ud)) | ||
791 | 851 | ||
792 | def _get_repo_url(self, ud): | 852 | def _get_repo_url(self, ud): |
793 | """ | 853 | """ |
@@ -795,21 +855,21 @@ class Git(FetchMethod): | |||
795 | """ | 855 | """ |
796 | # Note that we do not support passwords directly in the git urls. There are several | 856 | # Note that we do not support passwords directly in the git urls. There are several |
797 | # reasons. SRC_URI can be written out to things like buildhistory and people don't | 857 | # reasons. SRC_URI can be written out to things like buildhistory and people don't |
798 | # want to leak passwords like that. Its also all too easy to share metadata without | 858 | # want to leak passwords like that. Its also all too easy to share metadata without |
799 | # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as | 859 | # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as |
800 | # alternatives so we will not take patches adding password support here. | 860 | # alternatives so we will not take patches adding password support here. |
801 | if ud.user: | 861 | if ud.user: |
802 | username = ud.user + '@' | 862 | username = ud.user + '@' |
803 | else: | 863 | else: |
804 | username = "" | 864 | username = "" |
805 | return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path) | 865 | return "%s://%s%s%s" % (ud.proto, username, ud.host, urllib.parse.quote(ud.path)) |
806 | 866 | ||
807 | def _revision_key(self, ud, d, name): | 867 | def _revision_key(self, ud, d, name): |
808 | """ | 868 | """ |
809 | Return a unique key for the url | 869 | Return a unique key for the url |
810 | """ | 870 | """ |
811 | # Collapse adjacent slashes | 871 | # Collapse adjacent slashes |
812 | return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev[name] | 872 | return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev |
813 | 873 | ||
814 | def _lsremote(self, ud, d, search): | 874 | def _lsremote(self, ud, d, search): |
815 | """ | 875 | """ |
@@ -842,26 +902,26 @@ class Git(FetchMethod): | |||
842 | Compute the HEAD revision for the url | 902 | Compute the HEAD revision for the url |
843 | """ | 903 | """ |
844 | if not d.getVar("__BBSRCREV_SEEN"): | 904 | if not d.getVar("__BBSRCREV_SEEN"): |
845 | raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev[name], ud.host+ud.path)) | 905 | raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev, ud.host+ud.path)) |
846 | 906 | ||
847 | # Ensure we mark as not cached | 907 | # Ensure we mark as not cached |
848 | bb.fetch2.mark_recipe_nocache(d) | 908 | bb.fetch2.mark_recipe_nocache(d) |
849 | 909 | ||
850 | output = self._lsremote(ud, d, "") | 910 | output = self._lsremote(ud, d, "") |
851 | # Tags of the form ^{} may not work, need to fallback to other form | 911 | # Tags of the form ^{} may not work, need to fallback to other form |
852 | if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead: | 912 | if ud.unresolvedrev[:5] == "refs/" or ud.usehead: |
853 | head = ud.unresolvedrev[name] | 913 | head = ud.unresolvedrev |
854 | tag = ud.unresolvedrev[name] | 914 | tag = ud.unresolvedrev |
855 | else: | 915 | else: |
856 | head = "refs/heads/%s" % ud.unresolvedrev[name] | 916 | head = "refs/heads/%s" % ud.unresolvedrev |
857 | tag = "refs/tags/%s" % ud.unresolvedrev[name] | 917 | tag = "refs/tags/%s" % ud.unresolvedrev |
858 | for s in [head, tag + "^{}", tag]: | 918 | for s in [head, tag + "^{}", tag]: |
859 | for l in output.strip().split('\n'): | 919 | for l in output.strip().split('\n'): |
860 | sha1, ref = l.split() | 920 | sha1, ref = l.split() |
861 | if s == ref: | 921 | if s == ref: |
862 | return sha1 | 922 | return sha1 |
863 | raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \ | 923 | raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \ |
864 | (ud.unresolvedrev[name], ud.host+ud.path)) | 924 | (ud.unresolvedrev, ud.host+ud.path)) |
865 | 925 | ||
866 | def latest_versionstring(self, ud, d): | 926 | def latest_versionstring(self, ud, d): |
867 | """ | 927 | """ |
@@ -912,23 +972,22 @@ class Git(FetchMethod): | |||
912 | return pupver | 972 | return pupver |
913 | 973 | ||
914 | def _build_revision(self, ud, d, name): | 974 | def _build_revision(self, ud, d, name): |
915 | return ud.revisions[name] | 975 | return ud.revision |
916 | 976 | ||
917 | def gitpkgv_revision(self, ud, d, name): | 977 | def gitpkgv_revision(self, ud, d, name): |
918 | """ | 978 | """ |
919 | Return a sortable revision number by counting commits in the history | 979 | Return a sortable revision number by counting commits in the history |
920 | Based on gitpkgv.bblass in meta-openembedded | 980 | Based on gitpkgv.bblass in meta-openembedded |
921 | """ | 981 | """ |
922 | rev = self._build_revision(ud, d, name) | 982 | rev = ud.revision |
923 | localpath = ud.localpath | 983 | localpath = ud.localpath |
924 | rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev) | 984 | rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev) |
925 | if not os.path.exists(localpath): | 985 | if not os.path.exists(localpath): |
926 | commits = None | 986 | commits = None |
927 | else: | 987 | else: |
928 | if not os.path.exists(rev_file) or not os.path.getsize(rev_file): | 988 | if not os.path.exists(rev_file) or not os.path.getsize(rev_file): |
929 | from pipes import quote | ||
930 | commits = bb.fetch2.runfetchcmd( | 989 | commits = bb.fetch2.runfetchcmd( |
931 | "git rev-list %s -- | wc -l" % quote(rev), | 990 | "git rev-list %s -- | wc -l" % shlex.quote(rev), |
932 | d, quiet=True).strip().lstrip('0') | 991 | d, quiet=True).strip().lstrip('0') |
933 | if commits: | 992 | if commits: |
934 | open(rev_file, "w").write("%d\n" % int(commits)) | 993 | open(rev_file, "w").write("%d\n" % int(commits)) |