diff options
Diffstat (limited to 'bitbake/lib/bb/fetch2/__init__.py')
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 561 |
1 files changed, 388 insertions, 173 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 19169d780f..0ad987c596 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -23,17 +23,18 @@ import collections | |||
23 | import subprocess | 23 | import subprocess |
24 | import pickle | 24 | import pickle |
25 | import errno | 25 | import errno |
26 | import bb.persist_data, bb.utils | 26 | import bb.utils |
27 | import bb.checksum | 27 | import bb.checksum |
28 | import bb.process | 28 | import bb.process |
29 | import bb.event | 29 | import bb.event |
30 | 30 | ||
31 | __version__ = "2" | 31 | __version__ = "2" |
32 | _checksum_cache = bb.checksum.FileChecksumCache() | 32 | _checksum_cache = bb.checksum.FileChecksumCache() |
33 | _revisions_cache = bb.checksum.RevisionsCache() | ||
33 | 34 | ||
34 | logger = logging.getLogger("BitBake.Fetcher") | 35 | logger = logging.getLogger("BitBake.Fetcher") |
35 | 36 | ||
36 | CHECKSUM_LIST = [ "md5", "sha256", "sha1", "sha384", "sha512" ] | 37 | CHECKSUM_LIST = [ "goh1", "md5", "sha256", "sha1", "sha384", "sha512" ] |
37 | SHOWN_CHECKSUM_LIST = ["sha256"] | 38 | SHOWN_CHECKSUM_LIST = ["sha256"] |
38 | 39 | ||
39 | class BBFetchException(Exception): | 40 | class BBFetchException(Exception): |
@@ -113,7 +114,7 @@ class MissingParameterError(BBFetchException): | |||
113 | self.args = (missing, url) | 114 | self.args = (missing, url) |
114 | 115 | ||
115 | class ParameterError(BBFetchException): | 116 | class ParameterError(BBFetchException): |
116 | """Exception raised when a url cannot be proccessed due to invalid parameters.""" | 117 | """Exception raised when a url cannot be processed due to invalid parameters.""" |
117 | def __init__(self, message, url): | 118 | def __init__(self, message, url): |
118 | msg = "URL: '%s' has invalid parameters. %s" % (url, message) | 119 | msg = "URL: '%s' has invalid parameters. %s" % (url, message) |
119 | self.url = url | 120 | self.url = url |
@@ -182,7 +183,7 @@ class URI(object): | |||
182 | Some notes about relative URIs: while it's specified that | 183 | Some notes about relative URIs: while it's specified that |
183 | a URI beginning with <scheme>:// should either be directly | 184 | a URI beginning with <scheme>:// should either be directly |
184 | followed by a hostname or a /, the old URI handling of the | 185 | followed by a hostname or a /, the old URI handling of the |
185 | fetch2 library did not comform to this. Therefore, this URI | 186 | fetch2 library did not conform to this. Therefore, this URI |
186 | class has some kludges to make sure that URIs are parsed in | 187 | class has some kludges to make sure that URIs are parsed in |
187 | a way comforming to bitbake's current usage. This URI class | 188 | a way comforming to bitbake's current usage. This URI class |
188 | supports the following: | 189 | supports the following: |
@@ -199,7 +200,7 @@ class URI(object): | |||
199 | file://hostname/absolute/path.diff (would be IETF compliant) | 200 | file://hostname/absolute/path.diff (would be IETF compliant) |
200 | 201 | ||
201 | Note that the last case only applies to a list of | 202 | Note that the last case only applies to a list of |
202 | "whitelisted" schemes (currently only file://), that requires | 203 | explicitly allowed schemes (currently only file://), that requires |
203 | its URIs to not have a network location. | 204 | its URIs to not have a network location. |
204 | """ | 205 | """ |
205 | 206 | ||
@@ -237,7 +238,7 @@ class URI(object): | |||
237 | # to RFC compliant URL format. E.g.: | 238 | # to RFC compliant URL format. E.g.: |
238 | # file://foo.diff -> file:foo.diff | 239 | # file://foo.diff -> file:foo.diff |
239 | if urlp.scheme in self._netloc_forbidden: | 240 | if urlp.scheme in self._netloc_forbidden: |
240 | uri = re.sub("(?<=:)//(?!/)", "", uri, 1) | 241 | uri = re.sub(r"(?<=:)//(?!/)", "", uri, count=1) |
241 | reparse = 1 | 242 | reparse = 1 |
242 | 243 | ||
243 | if reparse: | 244 | if reparse: |
@@ -290,12 +291,12 @@ class URI(object): | |||
290 | 291 | ||
291 | def _param_str_split(self, string, elmdelim, kvdelim="="): | 292 | def _param_str_split(self, string, elmdelim, kvdelim="="): |
292 | ret = collections.OrderedDict() | 293 | ret = collections.OrderedDict() |
293 | for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]: | 294 | for k, v in [x.split(kvdelim, 1) if kvdelim in x else (x, None) for x in string.split(elmdelim) if x]: |
294 | ret[k] = v | 295 | ret[k] = v |
295 | return ret | 296 | return ret |
296 | 297 | ||
297 | def _param_str_join(self, dict_, elmdelim, kvdelim="="): | 298 | def _param_str_join(self, dict_, elmdelim, kvdelim="="): |
298 | return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()]) | 299 | return elmdelim.join([kvdelim.join([k, v]) if v else k for k, v in dict_.items()]) |
299 | 300 | ||
300 | @property | 301 | @property |
301 | def hostport(self): | 302 | def hostport(self): |
@@ -352,6 +353,14 @@ def decodeurl(url): | |||
352 | user, password, parameters). | 353 | user, password, parameters). |
353 | """ | 354 | """ |
354 | 355 | ||
356 | uri = URI(url) | ||
357 | path = uri.path if uri.path else "/" | ||
358 | return uri.scheme, uri.hostport, path, uri.username, uri.password, uri.params | ||
359 | |||
360 | def decodemirrorurl(url): | ||
361 | """Decodes a mirror URL into the tokens (scheme, network location, path, | ||
362 | user, password, parameters). | ||
363 | """ | ||
355 | m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) | 364 | m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) |
356 | if not m: | 365 | if not m: |
357 | raise MalformedUrl(url) | 366 | raise MalformedUrl(url) |
@@ -370,6 +379,9 @@ def decodeurl(url): | |||
370 | elif type.lower() == 'file': | 379 | elif type.lower() == 'file': |
371 | host = "" | 380 | host = "" |
372 | path = location | 381 | path = location |
382 | if user: | ||
383 | path = user + '@' + path | ||
384 | user = "" | ||
373 | else: | 385 | else: |
374 | host = location | 386 | host = location |
375 | path = "/" | 387 | path = "/" |
@@ -388,7 +400,7 @@ def decodeurl(url): | |||
388 | if s: | 400 | if s: |
389 | if not '=' in s: | 401 | if not '=' in s: |
390 | raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s)) | 402 | raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s)) |
391 | s1, s2 = s.split('=') | 403 | s1, s2 = s.split('=', 1) |
392 | p[s1] = s2 | 404 | p[s1] = s2 |
393 | 405 | ||
394 | return type, host, urllib.parse.unquote(path), user, pswd, p | 406 | return type, host, urllib.parse.unquote(path), user, pswd, p |
@@ -402,34 +414,37 @@ def encodeurl(decoded): | |||
402 | 414 | ||
403 | if not type: | 415 | if not type: |
404 | raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) | 416 | raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) |
405 | url = '%s://' % type | 417 | uri = URI() |
418 | uri.scheme = type | ||
406 | if user and type != "file": | 419 | if user and type != "file": |
407 | url += "%s" % user | 420 | uri.username = user |
408 | if pswd: | 421 | if pswd: |
409 | url += ":%s" % pswd | 422 | uri.password = pswd |
410 | url += "@" | ||
411 | if host and type != "file": | 423 | if host and type != "file": |
412 | url += "%s" % host | 424 | uri.hostname = host |
413 | if path: | 425 | if path: |
414 | # Standardise path to ensure comparisons work | 426 | # Standardise path to ensure comparisons work |
415 | while '//' in path: | 427 | while '//' in path: |
416 | path = path.replace("//", "/") | 428 | path = path.replace("//", "/") |
417 | url += "%s" % urllib.parse.quote(path) | 429 | uri.path = path |
430 | if type == "file": | ||
431 | # Use old not IETF compliant style | ||
432 | uri.relative = False | ||
418 | if p: | 433 | if p: |
419 | for parm in p: | 434 | uri.params = p |
420 | url += ";%s=%s" % (parm, p[parm]) | ||
421 | 435 | ||
422 | return url | 436 | return str(uri) |
423 | 437 | ||
424 | def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | 438 | def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): |
425 | if not ud.url or not uri_find or not uri_replace: | 439 | if not ud.url or not uri_find or not uri_replace: |
426 | logger.error("uri_replace: passed an undefined value, not replacing") | 440 | logger.error("uri_replace: passed an undefined value, not replacing") |
427 | return None | 441 | return None |
428 | uri_decoded = list(decodeurl(ud.url)) | 442 | uri_decoded = list(decodemirrorurl(ud.url)) |
429 | uri_find_decoded = list(decodeurl(uri_find)) | 443 | uri_find_decoded = list(decodemirrorurl(uri_find)) |
430 | uri_replace_decoded = list(decodeurl(uri_replace)) | 444 | uri_replace_decoded = list(decodemirrorurl(uri_replace)) |
431 | logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) | 445 | logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) |
432 | result_decoded = ['', '', '', '', '', {}] | 446 | result_decoded = ['', '', '', '', '', {}] |
447 | # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params | ||
433 | for loc, i in enumerate(uri_find_decoded): | 448 | for loc, i in enumerate(uri_find_decoded): |
434 | result_decoded[loc] = uri_decoded[loc] | 449 | result_decoded[loc] = uri_decoded[loc] |
435 | regexp = i | 450 | regexp = i |
@@ -449,6 +464,9 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | |||
449 | for l in replacements: | 464 | for l in replacements: |
450 | uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l]) | 465 | uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l]) |
451 | result_decoded[loc][k] = uri_replace_decoded[loc][k] | 466 | result_decoded[loc][k] = uri_replace_decoded[loc][k] |
467 | elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]: | ||
468 | # User/password in the replacement is just a straight replacement | ||
469 | result_decoded[loc] = uri_replace_decoded[loc] | ||
452 | elif (re.match(regexp, uri_decoded[loc])): | 470 | elif (re.match(regexp, uri_decoded[loc])): |
453 | if not uri_replace_decoded[loc]: | 471 | if not uri_replace_decoded[loc]: |
454 | result_decoded[loc] = "" | 472 | result_decoded[loc] = "" |
@@ -456,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | |||
456 | for k in replacements: | 474 | for k in replacements: |
457 | uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k]) | 475 | uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k]) |
458 | #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc])) | 476 | #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc])) |
459 | result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1) | 477 | result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], count=1) |
460 | if loc == 2: | 478 | if loc == 2: |
461 | # Handle path manipulations | 479 | # Handle path manipulations |
462 | basename = None | 480 | basename = None |
@@ -465,10 +483,18 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | |||
465 | basename = os.path.basename(mirrortarball) | 483 | basename = os.path.basename(mirrortarball) |
466 | # Kill parameters, they make no sense for mirror tarballs | 484 | # Kill parameters, they make no sense for mirror tarballs |
467 | uri_decoded[5] = {} | 485 | uri_decoded[5] = {} |
486 | uri_find_decoded[5] = {} | ||
468 | elif ud.localpath and ud.method.supports_checksum(ud): | 487 | elif ud.localpath and ud.method.supports_checksum(ud): |
469 | basename = os.path.basename(ud.localpath) | 488 | basename = os.path.basename(ud.localpath) |
470 | if basename and not result_decoded[loc].endswith(basename): | 489 | if basename: |
471 | result_decoded[loc] = os.path.join(result_decoded[loc], basename) | 490 | uri_basename = os.path.basename(uri_decoded[loc]) |
491 | # Prefix with a slash as a sentinel in case | ||
492 | # result_decoded[loc] does not contain one. | ||
493 | path = "/" + result_decoded[loc] | ||
494 | if uri_basename and basename != uri_basename and path.endswith("/" + uri_basename): | ||
495 | result_decoded[loc] = path[1:-len(uri_basename)] + basename | ||
496 | elif not path.endswith("/" + basename): | ||
497 | result_decoded[loc] = os.path.join(path[1:], basename) | ||
472 | else: | 498 | else: |
473 | return None | 499 | return None |
474 | result = encodeurl(result_decoded) | 500 | result = encodeurl(result_decoded) |
@@ -481,18 +507,23 @@ methods = [] | |||
481 | urldata_cache = {} | 507 | urldata_cache = {} |
482 | saved_headrevs = {} | 508 | saved_headrevs = {} |
483 | 509 | ||
484 | def fetcher_init(d): | 510 | def fetcher_init(d, servercontext=True): |
485 | """ | 511 | """ |
486 | Called to initialize the fetchers once the configuration data is known. | 512 | Called to initialize the fetchers once the configuration data is known. |
487 | Calls before this must not hit the cache. | 513 | Calls before this must not hit the cache. |
488 | """ | 514 | """ |
489 | 515 | ||
490 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | 516 | _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) |
517 | _revisions_cache.init_cache(d.getVar("BB_CACHEDIR")) | ||
518 | |||
519 | if not servercontext: | ||
520 | return | ||
521 | |||
491 | try: | 522 | try: |
492 | # fetcher_init is called multiple times, so make sure we only save the | 523 | # fetcher_init is called multiple times, so make sure we only save the |
493 | # revs the first time it is called. | 524 | # revs the first time it is called. |
494 | if not bb.fetch2.saved_headrevs: | 525 | if not bb.fetch2.saved_headrevs: |
495 | bb.fetch2.saved_headrevs = dict(revs) | 526 | bb.fetch2.saved_headrevs = _revisions_cache.get_revs() |
496 | except: | 527 | except: |
497 | pass | 528 | pass |
498 | 529 | ||
@@ -502,11 +533,10 @@ def fetcher_init(d): | |||
502 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | 533 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
503 | elif srcrev_policy == "clear": | 534 | elif srcrev_policy == "clear": |
504 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | 535 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) |
505 | revs.clear() | 536 | _revisions_cache.clear_cache() |
506 | else: | 537 | else: |
507 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | 538 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) |
508 | 539 | ||
509 | _checksum_cache.init_cache(d) | ||
510 | 540 | ||
511 | for m in methods: | 541 | for m in methods: |
512 | if hasattr(m, "init"): | 542 | if hasattr(m, "init"): |
@@ -514,9 +544,11 @@ def fetcher_init(d): | |||
514 | 544 | ||
515 | def fetcher_parse_save(): | 545 | def fetcher_parse_save(): |
516 | _checksum_cache.save_extras() | 546 | _checksum_cache.save_extras() |
547 | _revisions_cache.save_extras() | ||
517 | 548 | ||
518 | def fetcher_parse_done(): | 549 | def fetcher_parse_done(): |
519 | _checksum_cache.save_merge() | 550 | _checksum_cache.save_merge() |
551 | _revisions_cache.save_merge() | ||
520 | 552 | ||
521 | def fetcher_compare_revisions(d): | 553 | def fetcher_compare_revisions(d): |
522 | """ | 554 | """ |
@@ -524,7 +556,7 @@ def fetcher_compare_revisions(d): | |||
524 | when bitbake was started and return true if they have changed. | 556 | when bitbake was started and return true if they have changed. |
525 | """ | 557 | """ |
526 | 558 | ||
527 | headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) | 559 | headrevs = _revisions_cache.get_revs() |
528 | return headrevs != bb.fetch2.saved_headrevs | 560 | return headrevs != bb.fetch2.saved_headrevs |
529 | 561 | ||
530 | def mirror_from_string(data): | 562 | def mirror_from_string(data): |
@@ -534,7 +566,7 @@ def mirror_from_string(data): | |||
534 | bb.warn('Invalid mirror data %s, should have paired members.' % data) | 566 | bb.warn('Invalid mirror data %s, should have paired members.' % data) |
535 | return list(zip(*[iter(mirrors)]*2)) | 567 | return list(zip(*[iter(mirrors)]*2)) |
536 | 568 | ||
537 | def verify_checksum(ud, d, precomputed={}): | 569 | def verify_checksum(ud, d, precomputed={}, localpath=None, fatal_nochecksum=True): |
538 | """ | 570 | """ |
539 | verify the MD5 and SHA256 checksum for downloaded src | 571 | verify the MD5 and SHA256 checksum for downloaded src |
540 | 572 | ||
@@ -548,20 +580,25 @@ def verify_checksum(ud, d, precomputed={}): | |||
548 | file against those in the recipe each time, rather than only after | 580 | file against those in the recipe each time, rather than only after |
549 | downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571. | 581 | downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571. |
550 | """ | 582 | """ |
551 | |||
552 | if ud.ignore_checksums or not ud.method.supports_checksum(ud): | 583 | if ud.ignore_checksums or not ud.method.supports_checksum(ud): |
553 | return {} | 584 | return {} |
554 | 585 | ||
586 | if localpath is None: | ||
587 | localpath = ud.localpath | ||
588 | |||
555 | def compute_checksum_info(checksum_id): | 589 | def compute_checksum_info(checksum_id): |
556 | checksum_name = getattr(ud, "%s_name" % checksum_id) | 590 | checksum_name = getattr(ud, "%s_name" % checksum_id) |
557 | 591 | ||
558 | if checksum_id in precomputed: | 592 | if checksum_id in precomputed: |
559 | checksum_data = precomputed[checksum_id] | 593 | checksum_data = precomputed[checksum_id] |
560 | else: | 594 | else: |
561 | checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath) | 595 | checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(localpath) |
562 | 596 | ||
563 | checksum_expected = getattr(ud, "%s_expected" % checksum_id) | 597 | checksum_expected = getattr(ud, "%s_expected" % checksum_id) |
564 | 598 | ||
599 | if checksum_expected == '': | ||
600 | checksum_expected = None | ||
601 | |||
565 | return { | 602 | return { |
566 | "id": checksum_id, | 603 | "id": checksum_id, |
567 | "name": checksum_name, | 604 | "name": checksum_name, |
@@ -581,17 +618,13 @@ def verify_checksum(ud, d, precomputed={}): | |||
581 | checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])] | 618 | checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])] |
582 | 619 | ||
583 | # If no checksum has been provided | 620 | # If no checksum has been provided |
584 | if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos): | 621 | if fatal_nochecksum and ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos): |
585 | messages = [] | 622 | messages = [] |
586 | strict = d.getVar("BB_STRICT_CHECKSUM") or "0" | 623 | strict = d.getVar("BB_STRICT_CHECKSUM") or "0" |
587 | 624 | ||
588 | # If strict checking enabled and neither sum defined, raise error | 625 | # If strict checking enabled and neither sum defined, raise error |
589 | if strict == "1": | 626 | if strict == "1": |
590 | messages.append("No checksum specified for '%s', please add at " \ | 627 | raise NoChecksumError("\n".join(checksum_lines)) |
591 | "least one to the recipe:" % ud.localpath) | ||
592 | messages.extend(checksum_lines) | ||
593 | logger.error("\n".join(messages)) | ||
594 | raise NoChecksumError("Missing SRC_URI checksum", ud.url) | ||
595 | 628 | ||
596 | bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d) | 629 | bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d) |
597 | 630 | ||
@@ -612,8 +645,8 @@ def verify_checksum(ud, d, precomputed={}): | |||
612 | 645 | ||
613 | for ci in checksum_infos: | 646 | for ci in checksum_infos: |
614 | if ci["expected"] and ci["expected"] != ci["data"]: | 647 | if ci["expected"] and ci["expected"] != ci["data"]: |
615 | messages.append("File: '%s' has %s checksum %s when %s was " \ | 648 | messages.append("File: '%s' has %s checksum '%s' when '%s' was " \ |
616 | "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"])) | 649 | "expected" % (localpath, ci["id"], ci["data"], ci["expected"])) |
617 | bad_checksum = ci["data"] | 650 | bad_checksum = ci["data"] |
618 | 651 | ||
619 | if bad_checksum: | 652 | if bad_checksum: |
@@ -731,13 +764,16 @@ def subprocess_setup(): | |||
731 | # SIGPIPE errors are known issues with gzip/bash | 764 | # SIGPIPE errors are known issues with gzip/bash |
732 | signal.signal(signal.SIGPIPE, signal.SIG_DFL) | 765 | signal.signal(signal.SIGPIPE, signal.SIG_DFL) |
733 | 766 | ||
734 | def get_autorev(d): | 767 | def mark_recipe_nocache(d): |
735 | # only not cache src rev in autorev case | ||
736 | if d.getVar('BB_SRCREV_POLICY') != "cache": | 768 | if d.getVar('BB_SRCREV_POLICY') != "cache": |
737 | d.setVar('BB_DONT_CACHE', '1') | 769 | d.setVar('BB_DONT_CACHE', '1') |
770 | |||
771 | def get_autorev(d): | ||
772 | mark_recipe_nocache(d) | ||
773 | d.setVar("__BBAUTOREV_SEEN", True) | ||
738 | return "AUTOINC" | 774 | return "AUTOINC" |
739 | 775 | ||
740 | def get_srcrev(d, method_name='sortable_revision'): | 776 | def _get_srcrev(d, method_name='sortable_revision'): |
741 | """ | 777 | """ |
742 | Return the revision string, usually for use in the version string (PV) of the current package | 778 | Return the revision string, usually for use in the version string (PV) of the current package |
743 | Most packages usually only have one SCM so we just pass on the call. | 779 | Most packages usually only have one SCM so we just pass on the call. |
@@ -751,23 +787,34 @@ def get_srcrev(d, method_name='sortable_revision'): | |||
751 | that fetcher provides a method with the given name and the same signature as sortable_revision. | 787 | that fetcher provides a method with the given name and the same signature as sortable_revision. |
752 | """ | 788 | """ |
753 | 789 | ||
790 | d.setVar("__BBSRCREV_SEEN", "1") | ||
791 | recursion = d.getVar("__BBINSRCREV") | ||
792 | if recursion: | ||
793 | raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI") | ||
794 | d.setVar("__BBINSRCREV", True) | ||
795 | |||
754 | scms = [] | 796 | scms = [] |
797 | revs = [] | ||
755 | fetcher = Fetch(d.getVar('SRC_URI').split(), d) | 798 | fetcher = Fetch(d.getVar('SRC_URI').split(), d) |
756 | urldata = fetcher.ud | 799 | urldata = fetcher.ud |
757 | for u in urldata: | 800 | for u in urldata: |
758 | if urldata[u].method.supports_srcrev(): | 801 | if urldata[u].method.supports_srcrev(): |
759 | scms.append(u) | 802 | scms.append(u) |
760 | 803 | ||
761 | if len(scms) == 0: | 804 | if not scms: |
762 | raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") | 805 | d.delVar("__BBINSRCREV") |
806 | return "", revs | ||
763 | 807 | ||
764 | if len(scms) == 1 and len(urldata[scms[0]].names) == 1: | 808 | |
765 | autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0]) | 809 | if len(scms) == 1: |
810 | autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].name) | ||
811 | revs.append(rev) | ||
766 | if len(rev) > 10: | 812 | if len(rev) > 10: |
767 | rev = rev[:10] | 813 | rev = rev[:10] |
814 | d.delVar("__BBINSRCREV") | ||
768 | if autoinc: | 815 | if autoinc: |
769 | return "AUTOINC+" + rev | 816 | return "AUTOINC+" + rev, revs |
770 | return rev | 817 | return rev, revs |
771 | 818 | ||
772 | # | 819 | # |
773 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | 820 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT |
@@ -781,12 +828,12 @@ def get_srcrev(d, method_name='sortable_revision'): | |||
781 | seenautoinc = False | 828 | seenautoinc = False |
782 | for scm in scms: | 829 | for scm in scms: |
783 | ud = urldata[scm] | 830 | ud = urldata[scm] |
784 | for name in ud.names: | 831 | autoinc, rev = getattr(ud.method, method_name)(ud, d, ud.name) |
785 | autoinc, rev = getattr(ud.method, method_name)(ud, d, name) | 832 | revs.append(rev) |
786 | seenautoinc = seenautoinc or autoinc | 833 | seenautoinc = seenautoinc or autoinc |
787 | if len(rev) > 10: | 834 | if len(rev) > 10: |
788 | rev = rev[:10] | 835 | rev = rev[:10] |
789 | name_to_rev[name] = rev | 836 | name_to_rev[ud.name] = rev |
790 | # Replace names by revisions in the SRCREV_FORMAT string. The approach used | 837 | # Replace names by revisions in the SRCREV_FORMAT string. The approach used |
791 | # here can handle names being prefixes of other names and names appearing | 838 | # here can handle names being prefixes of other names and names appearing |
792 | # as substrings in revisions (in which case the name should not be | 839 | # as substrings in revisions (in which case the name should not be |
@@ -799,12 +846,71 @@ def get_srcrev(d, method_name='sortable_revision'): | |||
799 | if seenautoinc: | 846 | if seenautoinc: |
800 | format = "AUTOINC+" + format | 847 | format = "AUTOINC+" + format |
801 | 848 | ||
802 | return format | 849 | d.delVar("__BBINSRCREV") |
850 | return format, revs | ||
851 | |||
852 | def get_hashvalue(d, method_name='sortable_revision'): | ||
853 | pkgv, revs = _get_srcrev(d, method_name=method_name) | ||
854 | return " ".join(revs) | ||
855 | |||
856 | def get_pkgv_string(d, method_name='sortable_revision'): | ||
857 | pkgv, revs = _get_srcrev(d, method_name=method_name) | ||
858 | return pkgv | ||
859 | |||
860 | def get_srcrev(d, method_name='sortable_revision'): | ||
861 | pkgv, revs = _get_srcrev(d, method_name=method_name) | ||
862 | if not pkgv: | ||
863 | raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") | ||
864 | return pkgv | ||
803 | 865 | ||
804 | def localpath(url, d): | 866 | def localpath(url, d): |
805 | fetcher = bb.fetch2.Fetch([url], d) | 867 | fetcher = bb.fetch2.Fetch([url], d) |
806 | return fetcher.localpath(url) | 868 | return fetcher.localpath(url) |
807 | 869 | ||
870 | # Need to export PATH as binary could be in metadata paths | ||
871 | # rather than host provided | ||
872 | # Also include some other variables. | ||
873 | FETCH_EXPORT_VARS = ['HOME', 'PATH', | ||
874 | 'HTTP_PROXY', 'http_proxy', | ||
875 | 'HTTPS_PROXY', 'https_proxy', | ||
876 | 'FTP_PROXY', 'ftp_proxy', | ||
877 | 'FTPS_PROXY', 'ftps_proxy', | ||
878 | 'NO_PROXY', 'no_proxy', | ||
879 | 'ALL_PROXY', 'all_proxy', | ||
880 | 'GIT_PROXY_COMMAND', | ||
881 | 'GIT_SSH', | ||
882 | 'GIT_SSH_COMMAND', | ||
883 | 'GIT_SSL_CAINFO', | ||
884 | 'GIT_SMART_HTTP', | ||
885 | 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', | ||
886 | 'SOCKS5_USER', 'SOCKS5_PASSWD', | ||
887 | 'DBUS_SESSION_BUS_ADDRESS', | ||
888 | 'P4CONFIG', | ||
889 | 'SSL_CERT_FILE', | ||
890 | 'NODE_EXTRA_CA_CERTS', | ||
891 | 'AWS_PROFILE', | ||
892 | 'AWS_ACCESS_KEY_ID', | ||
893 | 'AWS_SECRET_ACCESS_KEY', | ||
894 | 'AWS_ROLE_ARN', | ||
895 | 'AWS_WEB_IDENTITY_TOKEN_FILE', | ||
896 | 'AWS_DEFAULT_REGION', | ||
897 | 'AWS_SESSION_TOKEN', | ||
898 | 'GIT_CACHE_PATH', | ||
899 | 'REMOTE_CONTAINERS_IPC', | ||
900 | 'GITHUB_TOKEN', | ||
901 | 'SSL_CERT_DIR'] | ||
902 | |||
903 | def get_fetcher_environment(d): | ||
904 | newenv = {} | ||
905 | origenv = d.getVar("BB_ORIGENV") | ||
906 | for name in bb.fetch2.FETCH_EXPORT_VARS: | ||
907 | value = d.getVar(name) | ||
908 | if not value and origenv: | ||
909 | value = origenv.getVar(name) | ||
910 | if value: | ||
911 | newenv[name] = value | ||
912 | return newenv | ||
913 | |||
808 | def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): | 914 | def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): |
809 | """ | 915 | """ |
810 | Run cmd returning the command output | 916 | Run cmd returning the command output |
@@ -813,25 +919,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): | |||
813 | Optionally remove the files/directories listed in cleanup upon failure | 919 | Optionally remove the files/directories listed in cleanup upon failure |
814 | """ | 920 | """ |
815 | 921 | ||
816 | # Need to export PATH as binary could be in metadata paths | 922 | exportvars = FETCH_EXPORT_VARS |
817 | # rather than host provided | ||
818 | # Also include some other variables. | ||
819 | # FIXME: Should really include all export varaiables? | ||
820 | exportvars = ['HOME', 'PATH', | ||
821 | 'HTTP_PROXY', 'http_proxy', | ||
822 | 'HTTPS_PROXY', 'https_proxy', | ||
823 | 'FTP_PROXY', 'ftp_proxy', | ||
824 | 'FTPS_PROXY', 'ftps_proxy', | ||
825 | 'NO_PROXY', 'no_proxy', | ||
826 | 'ALL_PROXY', 'all_proxy', | ||
827 | 'GIT_PROXY_COMMAND', | ||
828 | 'GIT_SSH', | ||
829 | 'GIT_SSL_CAINFO', | ||
830 | 'GIT_SMART_HTTP', | ||
831 | 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', | ||
832 | 'SOCKS5_USER', 'SOCKS5_PASSWD', | ||
833 | 'DBUS_SESSION_BUS_ADDRESS', | ||
834 | 'P4CONFIG'] | ||
835 | 923 | ||
836 | if not cleanup: | 924 | if not cleanup: |
837 | cleanup = [] | 925 | cleanup = [] |
@@ -868,14 +956,17 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): | |||
868 | (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir) | 956 | (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir) |
869 | success = True | 957 | success = True |
870 | except bb.process.NotFoundError as e: | 958 | except bb.process.NotFoundError as e: |
871 | error_message = "Fetch command %s" % (e.command) | 959 | error_message = "Fetch command %s not found" % (e.command) |
872 | except bb.process.ExecutionError as e: | 960 | except bb.process.ExecutionError as e: |
873 | if e.stdout: | 961 | if e.stdout: |
874 | output = "output:\n%s\n%s" % (e.stdout, e.stderr) | 962 | output = "output:\n%s\n%s" % (e.stdout, e.stderr) |
875 | elif e.stderr: | 963 | elif e.stderr: |
876 | output = "output:\n%s" % e.stderr | 964 | output = "output:\n%s" % e.stderr |
877 | else: | 965 | else: |
878 | output = "no output" | 966 | if log: |
967 | output = "see logfile for output" | ||
968 | else: | ||
969 | output = "no output" | ||
879 | error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output) | 970 | error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output) |
880 | except bb.process.CmdError as e: | 971 | except bb.process.CmdError as e: |
881 | error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg) | 972 | error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg) |
@@ -937,6 +1028,7 @@ def build_mirroruris(origud, mirrors, ld): | |||
937 | 1028 | ||
938 | try: | 1029 | try: |
939 | newud = FetchData(newuri, ld) | 1030 | newud = FetchData(newuri, ld) |
1031 | newud.ignore_checksums = True | ||
940 | newud.setup_localpath(ld) | 1032 | newud.setup_localpath(ld) |
941 | except bb.fetch2.BBFetchException as e: | 1033 | except bb.fetch2.BBFetchException as e: |
942 | logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) | 1034 | logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) |
@@ -1000,6 +1092,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
1000 | # If that tarball is a local file:// we need to provide a symlink to it | 1092 | # If that tarball is a local file:// we need to provide a symlink to it |
1001 | dldir = ld.getVar("DL_DIR") | 1093 | dldir = ld.getVar("DL_DIR") |
1002 | 1094 | ||
1095 | if bb.utils.to_boolean(ld.getVar("BB_FETCH_PREMIRRORONLY")): | ||
1096 | ld = ld.createCopy() | ||
1097 | ld.setVar("BB_NO_NETWORK", "1") | ||
1098 | |||
1003 | if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): | 1099 | if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): |
1004 | # Create donestamp in old format to avoid triggering a re-download | 1100 | # Create donestamp in old format to avoid triggering a re-download |
1005 | if ud.donestamp: | 1101 | if ud.donestamp: |
@@ -1021,7 +1117,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
1021 | origud.method.build_mirror_data(origud, ld) | 1117 | origud.method.build_mirror_data(origud, ld) |
1022 | return origud.localpath | 1118 | return origud.localpath |
1023 | # Otherwise the result is a local file:// and we symlink to it | 1119 | # Otherwise the result is a local file:// and we symlink to it |
1024 | ensure_symlink(ud.localpath, origud.localpath) | 1120 | # This may also be a link to a shallow archive |
1121 | # When using shallow mode, add a symlink to the original fullshallow | ||
1122 | # path to ensure a valid symlink even in the `PREMIRRORS` case | ||
1123 | origud.method.update_mirror_links(ud, origud) | ||
1025 | update_stamp(origud, ld) | 1124 | update_stamp(origud, ld) |
1026 | return ud.localpath | 1125 | return ud.localpath |
1027 | 1126 | ||
@@ -1046,7 +1145,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
1046 | logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) | 1145 | logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) |
1047 | logger.debug(str(e)) | 1146 | logger.debug(str(e)) |
1048 | try: | 1147 | try: |
1049 | ud.method.clean(ud, ld) | 1148 | if ud.method.cleanup_upon_failure(): |
1149 | ud.method.clean(ud, ld) | ||
1050 | except UnboundLocalError: | 1150 | except UnboundLocalError: |
1051 | pass | 1151 | pass |
1052 | return False | 1152 | return False |
@@ -1054,23 +1154,6 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
1054 | if ud.lockfile and ud.lockfile != origud.lockfile: | 1154 | if ud.lockfile and ud.lockfile != origud.lockfile: |
1055 | bb.utils.unlockfile(lf) | 1155 | bb.utils.unlockfile(lf) |
1056 | 1156 | ||
1057 | |||
1058 | def ensure_symlink(target, link_name): | ||
1059 | if not os.path.exists(link_name): | ||
1060 | if os.path.islink(link_name): | ||
1061 | # Broken symbolic link | ||
1062 | os.unlink(link_name) | ||
1063 | |||
1064 | # In case this is executing without any file locks held (as is | ||
1065 | # the case for file:// URLs), two tasks may end up here at the | ||
1066 | # same time, in which case we do not want the second task to | ||
1067 | # fail when the link has already been created by the first task. | ||
1068 | try: | ||
1069 | os.symlink(target, link_name) | ||
1070 | except FileExistsError: | ||
1071 | pass | ||
1072 | |||
1073 | |||
1074 | def try_mirrors(fetch, d, origud, mirrors, check = False): | 1157 | def try_mirrors(fetch, d, origud, mirrors, check = False): |
1075 | """ | 1158 | """ |
1076 | Try to use a mirrored version of the sources. | 1159 | Try to use a mirrored version of the sources. |
@@ -1099,7 +1182,7 @@ def trusted_network(d, url): | |||
1099 | if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")): | 1182 | if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")): |
1100 | return True | 1183 | return True |
1101 | 1184 | ||
1102 | pkgname = d.expand(d.getVar('PN', False)) | 1185 | pkgname = d.getVar('PN') |
1103 | trusted_hosts = None | 1186 | trusted_hosts = None |
1104 | if pkgname: | 1187 | if pkgname: |
1105 | trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) | 1188 | trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) |
@@ -1140,11 +1223,11 @@ def srcrev_internal_helper(ud, d, name): | |||
1140 | pn = d.getVar("PN") | 1223 | pn = d.getVar("PN") |
1141 | attempts = [] | 1224 | attempts = [] |
1142 | if name != '' and pn: | 1225 | if name != '' and pn: |
1143 | attempts.append("SRCREV_%s_pn-%s" % (name, pn)) | 1226 | attempts.append("SRCREV_%s:pn-%s" % (name, pn)) |
1144 | if name != '': | 1227 | if name != '': |
1145 | attempts.append("SRCREV_%s" % name) | 1228 | attempts.append("SRCREV_%s" % name) |
1146 | if pn: | 1229 | if pn: |
1147 | attempts.append("SRCREV_pn-%s" % pn) | 1230 | attempts.append("SRCREV:pn-%s" % pn) |
1148 | attempts.append("SRCREV") | 1231 | attempts.append("SRCREV") |
1149 | 1232 | ||
1150 | for a in attempts: | 1233 | for a in attempts: |
@@ -1152,23 +1235,21 @@ def srcrev_internal_helper(ud, d, name): | |||
1152 | if srcrev and srcrev != "INVALID": | 1235 | if srcrev and srcrev != "INVALID": |
1153 | break | 1236 | break |
1154 | 1237 | ||
1155 | if 'rev' in ud.parm and 'tag' in ud.parm: | 1238 | if 'rev' in ud.parm: |
1156 | raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url)) | 1239 | parmrev = ud.parm['rev'] |
1157 | |||
1158 | if 'rev' in ud.parm or 'tag' in ud.parm: | ||
1159 | if 'rev' in ud.parm: | ||
1160 | parmrev = ud.parm['rev'] | ||
1161 | else: | ||
1162 | parmrev = ud.parm['tag'] | ||
1163 | if srcrev == "INVALID" or not srcrev: | 1240 | if srcrev == "INVALID" or not srcrev: |
1164 | return parmrev | 1241 | return parmrev |
1165 | if srcrev != parmrev: | 1242 | if srcrev != parmrev: |
1166 | raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev)) | 1243 | raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev)) |
1167 | return parmrev | 1244 | return parmrev |
1168 | 1245 | ||
1246 | if 'tag' in ud.parm and (srcrev == "INVALID" or not srcrev): | ||
1247 | return ud.parm['tag'] | ||
1248 | |||
1169 | if srcrev == "INVALID" or not srcrev: | 1249 | if srcrev == "INVALID" or not srcrev: |
1170 | raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) | 1250 | raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) |
1171 | if srcrev == "AUTOINC": | 1251 | if srcrev == "AUTOINC": |
1252 | d.setVar("__BBAUTOREV_ACTED_UPON", True) | ||
1172 | srcrev = ud.method.latest_revision(ud, d, name) | 1253 | srcrev = ud.method.latest_revision(ud, d, name) |
1173 | 1254 | ||
1174 | return srcrev | 1255 | return srcrev |
@@ -1180,23 +1261,21 @@ def get_checksum_file_list(d): | |||
1180 | SRC_URI as a space-separated string | 1261 | SRC_URI as a space-separated string |
1181 | """ | 1262 | """ |
1182 | fetch = Fetch([], d, cache = False, localonly = True) | 1263 | fetch = Fetch([], d, cache = False, localonly = True) |
1183 | |||
1184 | dl_dir = d.getVar('DL_DIR') | ||
1185 | filelist = [] | 1264 | filelist = [] |
1186 | for u in fetch.urls: | 1265 | for u in fetch.urls: |
1187 | ud = fetch.ud[u] | 1266 | ud = fetch.ud[u] |
1188 | |||
1189 | if ud and isinstance(ud.method, local.Local): | 1267 | if ud and isinstance(ud.method, local.Local): |
1190 | paths = ud.method.localpaths(ud, d) | 1268 | found = False |
1269 | paths = ud.method.localfile_searchpaths(ud, d) | ||
1191 | for f in paths: | 1270 | for f in paths: |
1192 | pth = ud.decodedurl | 1271 | pth = ud.path |
1193 | if f.startswith(dl_dir): | 1272 | if os.path.exists(f): |
1194 | # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else | 1273 | found = True |
1195 | if os.path.exists(f): | ||
1196 | bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f))) | ||
1197 | else: | ||
1198 | bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f))) | ||
1199 | filelist.append(f + ":" + str(os.path.exists(f))) | 1274 | filelist.append(f + ":" + str(os.path.exists(f))) |
1275 | if not found: | ||
1276 | bb.fatal(("Unable to get checksum for %s SRC_URI entry %s: file could not be found" | ||
1277 | "\nThe following paths were searched:" | ||
1278 | "\n%s") % (d.getVar('PN'), os.path.basename(f), '\n'.join(paths))) | ||
1200 | 1279 | ||
1201 | return " ".join(filelist) | 1280 | return " ".join(filelist) |
1202 | 1281 | ||
@@ -1234,28 +1313,28 @@ class FetchData(object): | |||
1234 | self.setup = False | 1313 | self.setup = False |
1235 | 1314 | ||
1236 | def configure_checksum(checksum_id): | 1315 | def configure_checksum(checksum_id): |
1316 | checksum_plain_name = "%ssum" % checksum_id | ||
1237 | if "name" in self.parm: | 1317 | if "name" in self.parm: |
1238 | checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id) | 1318 | checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id) |
1239 | else: | 1319 | else: |
1240 | checksum_name = "%ssum" % checksum_id | 1320 | checksum_name = checksum_plain_name |
1241 | |||
1242 | setattr(self, "%s_name" % checksum_id, checksum_name) | ||
1243 | 1321 | ||
1244 | if checksum_name in self.parm: | 1322 | if checksum_name in self.parm: |
1245 | checksum_expected = self.parm[checksum_name] | 1323 | checksum_expected = self.parm[checksum_name] |
1246 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]: | 1324 | elif checksum_plain_name in self.parm: |
1325 | checksum_expected = self.parm[checksum_plain_name] | ||
1326 | checksum_name = checksum_plain_name | ||
1327 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs", "gomod", "npm"]: | ||
1247 | checksum_expected = None | 1328 | checksum_expected = None |
1248 | else: | 1329 | else: |
1249 | checksum_expected = d.getVarFlag("SRC_URI", checksum_name) | 1330 | checksum_expected = d.getVarFlag("SRC_URI", checksum_name) |
1250 | 1331 | ||
1332 | setattr(self, "%s_name" % checksum_id, checksum_name) | ||
1251 | setattr(self, "%s_expected" % checksum_id, checksum_expected) | 1333 | setattr(self, "%s_expected" % checksum_id, checksum_expected) |
1252 | 1334 | ||
1253 | for checksum_id in CHECKSUM_LIST: | 1335 | self.name = self.parm.get("name",'default') |
1254 | configure_checksum(checksum_id) | 1336 | if "," in self.name: |
1255 | 1337 | raise ParameterError("The fetcher no longer supports multiple name parameters in a single url", self.url) | |
1256 | self.ignore_checksums = False | ||
1257 | |||
1258 | self.names = self.parm.get("name",'default').split(',') | ||
1259 | 1338 | ||
1260 | self.method = None | 1339 | self.method = None |
1261 | for m in methods: | 1340 | for m in methods: |
@@ -1276,6 +1355,11 @@ class FetchData(object): | |||
1276 | if hasattr(self.method, "urldata_init"): | 1355 | if hasattr(self.method, "urldata_init"): |
1277 | self.method.urldata_init(self, d) | 1356 | self.method.urldata_init(self, d) |
1278 | 1357 | ||
1358 | for checksum_id in CHECKSUM_LIST: | ||
1359 | configure_checksum(checksum_id) | ||
1360 | |||
1361 | self.ignore_checksums = False | ||
1362 | |||
1279 | if "localpath" in self.parm: | 1363 | if "localpath" in self.parm: |
1280 | # if user sets localpath for file, use it instead. | 1364 | # if user sets localpath for file, use it instead. |
1281 | self.localpath = self.parm["localpath"] | 1365 | self.localpath = self.parm["localpath"] |
@@ -1302,13 +1386,7 @@ class FetchData(object): | |||
1302 | self.lockfile = basepath + '.lock' | 1386 | self.lockfile = basepath + '.lock' |
1303 | 1387 | ||
1304 | def setup_revisions(self, d): | 1388 | def setup_revisions(self, d): |
1305 | self.revisions = {} | 1389 | self.revision = srcrev_internal_helper(self, d, self.name) |
1306 | for name in self.names: | ||
1307 | self.revisions[name] = srcrev_internal_helper(self, d, name) | ||
1308 | |||
1309 | # add compatibility code for non name specified case | ||
1310 | if len(self.names) == 1: | ||
1311 | self.revision = self.revisions[self.names[0]] | ||
1312 | 1390 | ||
1313 | def setup_localpath(self, d): | 1391 | def setup_localpath(self, d): |
1314 | if not self.localpath: | 1392 | if not self.localpath: |
@@ -1355,6 +1433,9 @@ class FetchMethod(object): | |||
1355 | Is localpath something that can be represented by a checksum? | 1433 | Is localpath something that can be represented by a checksum? |
1356 | """ | 1434 | """ |
1357 | 1435 | ||
1436 | # We cannot compute checksums for None | ||
1437 | if urldata.localpath is None: | ||
1438 | return False | ||
1358 | # We cannot compute checksums for directories | 1439 | # We cannot compute checksums for directories |
1359 | if os.path.isdir(urldata.localpath): | 1440 | if os.path.isdir(urldata.localpath): |
1360 | return False | 1441 | return False |
@@ -1367,6 +1448,12 @@ class FetchMethod(object): | |||
1367 | """ | 1448 | """ |
1368 | return False | 1449 | return False |
1369 | 1450 | ||
1451 | def cleanup_upon_failure(self): | ||
1452 | """ | ||
1453 | When a fetch fails, should clean() be called? | ||
1454 | """ | ||
1455 | return True | ||
1456 | |||
1370 | def verify_donestamp(self, ud, d): | 1457 | def verify_donestamp(self, ud, d): |
1371 | """ | 1458 | """ |
1372 | Verify the donestamp file | 1459 | Verify the donestamp file |
@@ -1427,37 +1514,40 @@ class FetchMethod(object): | |||
1427 | (file, urldata.parm.get('unpack'))) | 1514 | (file, urldata.parm.get('unpack'))) |
1428 | 1515 | ||
1429 | base, ext = os.path.splitext(file) | 1516 | base, ext = os.path.splitext(file) |
1430 | if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']: | 1517 | if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz', '.zst']: |
1431 | efile = os.path.join(rootdir, os.path.basename(base)) | 1518 | efile = os.path.join(rootdir, os.path.basename(base)) |
1432 | else: | 1519 | else: |
1433 | efile = file | 1520 | efile = file |
1434 | cmd = None | 1521 | cmd = None |
1435 | 1522 | ||
1436 | if unpack: | 1523 | if unpack: |
1524 | tar_cmd = 'tar --extract --no-same-owner' | ||
1525 | if 'striplevel' in urldata.parm: | ||
1526 | tar_cmd += ' --strip-components=%s' % urldata.parm['striplevel'] | ||
1437 | if file.endswith('.tar'): | 1527 | if file.endswith('.tar'): |
1438 | cmd = 'tar x --no-same-owner -f %s' % file | 1528 | cmd = '%s -f %s' % (tar_cmd, file) |
1439 | elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): | 1529 | elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): |
1440 | cmd = 'tar xz --no-same-owner -f %s' % file | 1530 | cmd = '%s -z -f %s' % (tar_cmd, file) |
1441 | elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): | 1531 | elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): |
1442 | cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file | 1532 | cmd = 'bzip2 -dc %s | %s -f -' % (file, tar_cmd) |
1443 | elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): | 1533 | elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): |
1444 | cmd = 'gzip -dc %s > %s' % (file, efile) | 1534 | cmd = 'gzip -dc %s > %s' % (file, efile) |
1445 | elif file.endswith('.bz2'): | 1535 | elif file.endswith('.bz2'): |
1446 | cmd = 'bzip2 -dc %s > %s' % (file, efile) | 1536 | cmd = 'bzip2 -dc %s > %s' % (file, efile) |
1447 | elif file.endswith('.txz') or file.endswith('.tar.xz'): | 1537 | elif file.endswith('.txz') or file.endswith('.tar.xz'): |
1448 | cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file | 1538 | cmd = 'xz -dc %s | %s -f -' % (file, tar_cmd) |
1449 | elif file.endswith('.xz'): | 1539 | elif file.endswith('.xz'): |
1450 | cmd = 'xz -dc %s > %s' % (file, efile) | 1540 | cmd = 'xz -dc %s > %s' % (file, efile) |
1451 | elif file.endswith('.tar.lz'): | 1541 | elif file.endswith('.tar.lz'): |
1452 | cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file | 1542 | cmd = 'lzip -dc %s | %s -f -' % (file, tar_cmd) |
1453 | elif file.endswith('.lz'): | 1543 | elif file.endswith('.lz'): |
1454 | cmd = 'lzip -dc %s > %s' % (file, efile) | 1544 | cmd = 'lzip -dc %s > %s' % (file, efile) |
1455 | elif file.endswith('.tar.7z'): | 1545 | elif file.endswith('.tar.7z'): |
1456 | cmd = '7z x -so %s | tar x --no-same-owner -f -' % file | 1546 | cmd = '7z x -so %s | %s -f -' % (file, tar_cmd) |
1457 | elif file.endswith('.7z'): | 1547 | elif file.endswith('.7z'): |
1458 | cmd = '7za x -y %s 1>/dev/null' % file | 1548 | cmd = '7za x -y %s 1>/dev/null' % file |
1459 | elif file.endswith('.tzst') or file.endswith('.tar.zst'): | 1549 | elif file.endswith('.tzst') or file.endswith('.tar.zst'): |
1460 | cmd = 'zstd --decompress --stdout %s | tar x --no-same-owner -f -' % file | 1550 | cmd = 'zstd --decompress --stdout %s | %s -f -' % (file, tar_cmd) |
1461 | elif file.endswith('.zst'): | 1551 | elif file.endswith('.zst'): |
1462 | cmd = 'zstd --decompress --stdout %s > %s' % (file, efile) | 1552 | cmd = 'zstd --decompress --stdout %s > %s' % (file, efile) |
1463 | elif file.endswith('.zip') or file.endswith('.jar'): | 1553 | elif file.endswith('.zip') or file.endswith('.jar'): |
@@ -1483,14 +1573,14 @@ class FetchMethod(object): | |||
1483 | datafile = None | 1573 | datafile = None |
1484 | if output: | 1574 | if output: |
1485 | for line in output.decode().splitlines(): | 1575 | for line in output.decode().splitlines(): |
1486 | if line.startswith('data.tar.'): | 1576 | if line.startswith('data.tar.') or line == 'data.tar': |
1487 | datafile = line | 1577 | datafile = line |
1488 | break | 1578 | break |
1489 | else: | 1579 | else: |
1490 | raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url) | 1580 | raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar* file", urldata.url) |
1491 | else: | 1581 | else: |
1492 | raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) | 1582 | raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) |
1493 | cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile) | 1583 | cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile) |
1494 | 1584 | ||
1495 | # If 'subdir' param exists, create a dir and use it as destination for unpack cmd | 1585 | # If 'subdir' param exists, create a dir and use it as destination for unpack cmd |
1496 | if 'subdir' in urldata.parm: | 1586 | if 'subdir' in urldata.parm: |
@@ -1506,6 +1596,7 @@ class FetchMethod(object): | |||
1506 | unpackdir = rootdir | 1596 | unpackdir = rootdir |
1507 | 1597 | ||
1508 | if not unpack or not cmd: | 1598 | if not unpack or not cmd: |
1599 | urldata.unpack_tracer.unpack("file-copy", unpackdir) | ||
1509 | # If file == dest, then avoid any copies, as we already put the file into dest! | 1600 | # If file == dest, then avoid any copies, as we already put the file into dest! |
1510 | dest = os.path.join(unpackdir, os.path.basename(file)) | 1601 | dest = os.path.join(unpackdir, os.path.basename(file)) |
1511 | if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)): | 1602 | if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)): |
@@ -1519,7 +1610,9 @@ class FetchMethod(object): | |||
1519 | if urlpath.find("/") != -1: | 1610 | if urlpath.find("/") != -1: |
1520 | destdir = urlpath.rsplit("/", 1)[0] + '/' | 1611 | destdir = urlpath.rsplit("/", 1)[0] + '/' |
1521 | bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) | 1612 | bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) |
1522 | cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir) | 1613 | cmd = 'cp --force --preserve=timestamps --no-dereference --recursive -H "%s" "%s"' % (file, destdir) |
1614 | else: | ||
1615 | urldata.unpack_tracer.unpack("archive-extract", unpackdir) | ||
1523 | 1616 | ||
1524 | if not cmd: | 1617 | if not cmd: |
1525 | return | 1618 | return |
@@ -1546,6 +1639,28 @@ class FetchMethod(object): | |||
1546 | """ | 1639 | """ |
1547 | bb.utils.remove(urldata.localpath) | 1640 | bb.utils.remove(urldata.localpath) |
1548 | 1641 | ||
1642 | def ensure_symlink(self, target, link_name): | ||
1643 | if not os.path.exists(link_name): | ||
1644 | dirname = os.path.dirname(link_name) | ||
1645 | bb.utils.mkdirhier(dirname) | ||
1646 | if os.path.islink(link_name): | ||
1647 | # Broken symbolic link | ||
1648 | os.unlink(link_name) | ||
1649 | |||
1650 | # In case this is executing without any file locks held (as is | ||
1651 | # the case for file:// URLs), two tasks may end up here at the | ||
1652 | # same time, in which case we do not want the second task to | ||
1653 | # fail when the link has already been created by the first task. | ||
1654 | try: | ||
1655 | os.symlink(target, link_name) | ||
1656 | except FileExistsError: | ||
1657 | pass | ||
1658 | |||
1659 | def update_mirror_links(self, ud, origud): | ||
1660 | # For local file:// results, create a symlink to them | ||
1661 | # This may also be a link to a shallow archive | ||
1662 | self.ensure_symlink(ud.localpath, origud.localpath) | ||
1663 | |||
1549 | def try_premirror(self, urldata, d): | 1664 | def try_premirror(self, urldata, d): |
1550 | """ | 1665 | """ |
1551 | Should premirrors be used? | 1666 | Should premirrors be used? |
@@ -1573,13 +1688,13 @@ class FetchMethod(object): | |||
1573 | if not hasattr(self, "_latest_revision"): | 1688 | if not hasattr(self, "_latest_revision"): |
1574 | raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) | 1689 | raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) |
1575 | 1690 | ||
1576 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | ||
1577 | key = self.generate_revision_key(ud, d, name) | 1691 | key = self.generate_revision_key(ud, d, name) |
1578 | try: | 1692 | |
1579 | return revs[key] | 1693 | rev = _revisions_cache.get_rev(key) |
1580 | except KeyError: | 1694 | if rev is None: |
1581 | revs[key] = rev = self._latest_revision(ud, d, name) | 1695 | rev = self._latest_revision(ud, d, name) |
1582 | return rev | 1696 | _revisions_cache.set_rev(key, rev) |
1697 | return rev | ||
1583 | 1698 | ||
1584 | def sortable_revision(self, ud, d, name): | 1699 | def sortable_revision(self, ud, d, name): |
1585 | latest_rev = self._build_revision(ud, d, name) | 1700 | latest_rev = self._build_revision(ud, d, name) |
@@ -1611,12 +1726,61 @@ class FetchMethod(object): | |||
1611 | """ | 1726 | """ |
1612 | return [] | 1727 | return [] |
1613 | 1728 | ||
1729 | |||
1730 | class DummyUnpackTracer(object): | ||
1731 | """ | ||
1732 | Abstract API definition for a class that traces unpacked source files back | ||
1733 | to their respective upstream SRC_URI entries, for software composition | ||
1734 | analysis, license compliance and detailed SBOM generation purposes. | ||
1735 | User may load their own unpack tracer class (instead of the dummy | ||
1736 | one) by setting the BB_UNPACK_TRACER_CLASS config parameter. | ||
1737 | """ | ||
1738 | def start(self, unpackdir, urldata_dict, d): | ||
1739 | """ | ||
1740 | Start tracing the core Fetch.unpack process, using an index to map | ||
1741 | unpacked files to each SRC_URI entry. | ||
1742 | This method is called by Fetch.unpack and it may receive nested calls by | ||
1743 | gitsm and npmsw fetchers, that expand SRC_URI entries by adding implicit | ||
1744 | URLs and by recursively calling Fetch.unpack from new (nested) Fetch | ||
1745 | instances. | ||
1746 | """ | ||
1747 | return | ||
1748 | def start_url(self, url): | ||
1749 | """Start tracing url unpack process. | ||
1750 | This method is called by Fetch.unpack before the fetcher-specific unpack | ||
1751 | method starts, and it may receive nested calls by gitsm and npmsw | ||
1752 | fetchers. | ||
1753 | """ | ||
1754 | return | ||
1755 | def unpack(self, unpack_type, destdir): | ||
1756 | """ | ||
1757 | Set unpack_type and destdir for current url. | ||
1758 | This method is called by the fetcher-specific unpack method after url | ||
1759 | tracing started. | ||
1760 | """ | ||
1761 | return | ||
1762 | def finish_url(self, url): | ||
1763 | """Finish tracing url unpack process and update the file index. | ||
1764 | This method is called by Fetch.unpack after the fetcher-specific unpack | ||
1765 | method finished its job, and it may receive nested calls by gitsm | ||
1766 | and npmsw fetchers. | ||
1767 | """ | ||
1768 | return | ||
1769 | def complete(self): | ||
1770 | """ | ||
1771 | Finish tracing the Fetch.unpack process, and check if all nested | ||
1772 | Fecth.unpack calls (if any) have been completed; if so, save collected | ||
1773 | metadata. | ||
1774 | """ | ||
1775 | return | ||
1776 | |||
1777 | |||
1614 | class Fetch(object): | 1778 | class Fetch(object): |
1615 | def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): | 1779 | def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): |
1616 | if localonly and cache: | 1780 | if localonly and cache: |
1617 | raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") | 1781 | raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") |
1618 | 1782 | ||
1619 | if len(urls) == 0: | 1783 | if not urls: |
1620 | urls = d.getVar("SRC_URI").split() | 1784 | urls = d.getVar("SRC_URI").split() |
1621 | self.urls = urls | 1785 | self.urls = urls |
1622 | self.d = d | 1786 | self.d = d |
@@ -1631,10 +1795,30 @@ class Fetch(object): | |||
1631 | if key in urldata_cache: | 1795 | if key in urldata_cache: |
1632 | self.ud = urldata_cache[key] | 1796 | self.ud = urldata_cache[key] |
1633 | 1797 | ||
1798 | # the unpack_tracer object needs to be made available to possible nested | ||
1799 | # Fetch instances (when those are created by gitsm and npmsw fetchers) | ||
1800 | # so we set it as a global variable | ||
1801 | global unpack_tracer | ||
1802 | try: | ||
1803 | unpack_tracer | ||
1804 | except NameError: | ||
1805 | class_path = d.getVar("BB_UNPACK_TRACER_CLASS") | ||
1806 | if class_path: | ||
1807 | # use user-defined unpack tracer class | ||
1808 | import importlib | ||
1809 | module_name, _, class_name = class_path.rpartition(".") | ||
1810 | module = importlib.import_module(module_name) | ||
1811 | class_ = getattr(module, class_name) | ||
1812 | unpack_tracer = class_() | ||
1813 | else: | ||
1814 | # fall back to the dummy/abstract class | ||
1815 | unpack_tracer = DummyUnpackTracer() | ||
1816 | |||
1634 | for url in urls: | 1817 | for url in urls: |
1635 | if url not in self.ud: | 1818 | if url not in self.ud: |
1636 | try: | 1819 | try: |
1637 | self.ud[url] = FetchData(url, d, localonly) | 1820 | self.ud[url] = FetchData(url, d, localonly) |
1821 | self.ud[url].unpack_tracer = unpack_tracer | ||
1638 | except NonLocalMethod: | 1822 | except NonLocalMethod: |
1639 | if localonly: | 1823 | if localonly: |
1640 | self.ud[url] = None | 1824 | self.ud[url] = None |
@@ -1648,7 +1832,7 @@ class Fetch(object): | |||
1648 | self.ud[url] = FetchData(url, self.d) | 1832 | self.ud[url] = FetchData(url, self.d) |
1649 | 1833 | ||
1650 | self.ud[url].setup_localpath(self.d) | 1834 | self.ud[url].setup_localpath(self.d) |
1651 | return self.d.expand(self.ud[url].localpath) | 1835 | return self.ud[url].localpath |
1652 | 1836 | ||
1653 | def localpaths(self): | 1837 | def localpaths(self): |
1654 | """ | 1838 | """ |
@@ -1673,6 +1857,7 @@ class Fetch(object): | |||
1673 | network = self.d.getVar("BB_NO_NETWORK") | 1857 | network = self.d.getVar("BB_NO_NETWORK") |
1674 | premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY")) | 1858 | premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY")) |
1675 | 1859 | ||
1860 | checksum_missing_messages = [] | ||
1676 | for u in urls: | 1861 | for u in urls: |
1677 | ud = self.ud[u] | 1862 | ud = self.ud[u] |
1678 | ud.setup_localpath(self.d) | 1863 | ud.setup_localpath(self.d) |
@@ -1684,7 +1869,6 @@ class Fetch(object): | |||
1684 | 1869 | ||
1685 | try: | 1870 | try: |
1686 | self.d.setVar("BB_NO_NETWORK", network) | 1871 | self.d.setVar("BB_NO_NETWORK", network) |
1687 | |||
1688 | if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): | 1872 | if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): |
1689 | done = True | 1873 | done = True |
1690 | elif m.try_premirror(ud, self.d): | 1874 | elif m.try_premirror(ud, self.d): |
@@ -1701,23 +1885,28 @@ class Fetch(object): | |||
1701 | logger.debug(str(e)) | 1885 | logger.debug(str(e)) |
1702 | done = False | 1886 | done = False |
1703 | 1887 | ||
1888 | d = self.d | ||
1704 | if premirroronly: | 1889 | if premirroronly: |
1705 | self.d.setVar("BB_NO_NETWORK", "1") | 1890 | # Only disable the network in a copy |
1891 | d = bb.data.createCopy(self.d) | ||
1892 | d.setVar("BB_NO_NETWORK", "1") | ||
1706 | 1893 | ||
1707 | firsterr = None | 1894 | firsterr = None |
1708 | verified_stamp = m.verify_donestamp(ud, self.d) | 1895 | verified_stamp = False |
1709 | if not done and (not verified_stamp or m.need_update(ud, self.d)): | 1896 | if done: |
1897 | verified_stamp = m.verify_donestamp(ud, d) | ||
1898 | if not done and (not verified_stamp or m.need_update(ud, d)): | ||
1710 | try: | 1899 | try: |
1711 | if not trusted_network(self.d, ud.url): | 1900 | if not trusted_network(d, ud.url): |
1712 | raise UntrustedUrl(ud.url) | 1901 | raise UntrustedUrl(ud.url) |
1713 | logger.debug("Trying Upstream") | 1902 | logger.debug("Trying Upstream") |
1714 | m.download(ud, self.d) | 1903 | m.download(ud, d) |
1715 | if hasattr(m, "build_mirror_data"): | 1904 | if hasattr(m, "build_mirror_data"): |
1716 | m.build_mirror_data(ud, self.d) | 1905 | m.build_mirror_data(ud, d) |
1717 | done = True | 1906 | done = True |
1718 | # early checksum verify, so that if checksum mismatched, | 1907 | # early checksum verify, so that if checksum mismatched, |
1719 | # fetcher still have chance to fetch from mirror | 1908 | # fetcher still have chance to fetch from mirror |
1720 | m.update_donestamp(ud, self.d) | 1909 | m.update_donestamp(ud, d) |
1721 | 1910 | ||
1722 | except bb.fetch2.NetworkAccess: | 1911 | except bb.fetch2.NetworkAccess: |
1723 | raise | 1912 | raise |
@@ -1735,18 +1924,18 @@ class Fetch(object): | |||
1735 | logger.debug(str(e)) | 1924 | logger.debug(str(e)) |
1736 | firsterr = e | 1925 | firsterr = e |
1737 | # Remove any incomplete fetch | 1926 | # Remove any incomplete fetch |
1738 | if not verified_stamp: | 1927 | if not verified_stamp and m.cleanup_upon_failure(): |
1739 | m.clean(ud, self.d) | 1928 | m.clean(ud, d) |
1740 | logger.debug("Trying MIRRORS") | 1929 | logger.debug("Trying MIRRORS") |
1741 | mirrors = mirror_from_string(self.d.getVar('MIRRORS')) | 1930 | mirrors = mirror_from_string(d.getVar('MIRRORS')) |
1742 | done = m.try_mirrors(self, ud, self.d, mirrors) | 1931 | done = m.try_mirrors(self, ud, d, mirrors) |
1743 | 1932 | ||
1744 | if not done or not m.done(ud, self.d): | 1933 | if not done or not m.done(ud, d): |
1745 | if firsterr: | 1934 | if firsterr: |
1746 | logger.error(str(firsterr)) | 1935 | logger.error(str(firsterr)) |
1747 | raise FetchError("Unable to fetch URL from any source.", u) | 1936 | raise FetchError("Unable to fetch URL from any source.", u) |
1748 | 1937 | ||
1749 | m.update_donestamp(ud, self.d) | 1938 | m.update_donestamp(ud, d) |
1750 | 1939 | ||
1751 | except IOError as e: | 1940 | except IOError as e: |
1752 | if e.errno in [errno.ESTALE]: | 1941 | if e.errno in [errno.ESTALE]: |
@@ -1754,17 +1943,28 @@ class Fetch(object): | |||
1754 | raise ChecksumError("Stale Error Detected") | 1943 | raise ChecksumError("Stale Error Detected") |
1755 | 1944 | ||
1756 | except BBFetchException as e: | 1945 | except BBFetchException as e: |
1757 | if isinstance(e, ChecksumError): | 1946 | if isinstance(e, NoChecksumError): |
1947 | (message, _) = e.args | ||
1948 | checksum_missing_messages.append(message) | ||
1949 | continue | ||
1950 | elif isinstance(e, ChecksumError): | ||
1758 | logger.error("Checksum failure fetching %s" % u) | 1951 | logger.error("Checksum failure fetching %s" % u) |
1759 | raise | 1952 | raise |
1760 | 1953 | ||
1761 | finally: | 1954 | finally: |
1762 | if ud.lockfile: | 1955 | if ud.lockfile: |
1763 | bb.utils.unlockfile(lf) | 1956 | bb.utils.unlockfile(lf) |
1957 | if checksum_missing_messages: | ||
1958 | logger.error("Missing SRC_URI checksum, please add those to the recipe: \n%s", "\n".join(checksum_missing_messages)) | ||
1959 | raise BBFetchException("There was some missing checksums in the recipe") | ||
1764 | 1960 | ||
1765 | def checkstatus(self, urls=None): | 1961 | def checkstatus(self, urls=None): |
1766 | """ | 1962 | """ |
1767 | Check all urls exist upstream | 1963 | Check all URLs exist upstream. |
1964 | |||
1965 | Returns None if the URLs exist, raises FetchError if the check wasn't | ||
1966 | successful but there wasn't an error (such as file not found), and | ||
1967 | raises other exceptions in error cases. | ||
1768 | """ | 1968 | """ |
1769 | 1969 | ||
1770 | if not urls: | 1970 | if not urls: |
@@ -1787,7 +1987,7 @@ class Fetch(object): | |||
1787 | ret = m.try_mirrors(self, ud, self.d, mirrors, True) | 1987 | ret = m.try_mirrors(self, ud, self.d, mirrors, True) |
1788 | 1988 | ||
1789 | if not ret: | 1989 | if not ret: |
1790 | raise FetchError("URL %s doesn't work" % u, u) | 1990 | raise FetchError("URL doesn't work", u) |
1791 | 1991 | ||
1792 | def unpack(self, root, urls=None): | 1992 | def unpack(self, root, urls=None): |
1793 | """ | 1993 | """ |
@@ -1797,6 +1997,8 @@ class Fetch(object): | |||
1797 | if not urls: | 1997 | if not urls: |
1798 | urls = self.urls | 1998 | urls = self.urls |
1799 | 1999 | ||
2000 | unpack_tracer.start(root, self.ud, self.d) | ||
2001 | |||
1800 | for u in urls: | 2002 | for u in urls: |
1801 | ud = self.ud[u] | 2003 | ud = self.ud[u] |
1802 | ud.setup_localpath(self.d) | 2004 | ud.setup_localpath(self.d) |
@@ -1804,11 +2006,15 @@ class Fetch(object): | |||
1804 | if ud.lockfile: | 2006 | if ud.lockfile: |
1805 | lf = bb.utils.lockfile(ud.lockfile) | 2007 | lf = bb.utils.lockfile(ud.lockfile) |
1806 | 2008 | ||
2009 | unpack_tracer.start_url(u) | ||
1807 | ud.method.unpack(ud, root, self.d) | 2010 | ud.method.unpack(ud, root, self.d) |
2011 | unpack_tracer.finish_url(u) | ||
1808 | 2012 | ||
1809 | if ud.lockfile: | 2013 | if ud.lockfile: |
1810 | bb.utils.unlockfile(lf) | 2014 | bb.utils.unlockfile(lf) |
1811 | 2015 | ||
2016 | unpack_tracer.complete() | ||
2017 | |||
1812 | def clean(self, urls=None): | 2018 | def clean(self, urls=None): |
1813 | """ | 2019 | """ |
1814 | Clean files that the fetcher gets or places | 2020 | Clean files that the fetcher gets or places |
@@ -1908,6 +2114,10 @@ from . import repo | |||
1908 | from . import clearcase | 2114 | from . import clearcase |
1909 | from . import npm | 2115 | from . import npm |
1910 | from . import npmsw | 2116 | from . import npmsw |
2117 | from . import az | ||
2118 | from . import crate | ||
2119 | from . import gcp | ||
2120 | from . import gomod | ||
1911 | 2121 | ||
1912 | methods.append(local.Local()) | 2122 | methods.append(local.Local()) |
1913 | methods.append(wget.Wget()) | 2123 | methods.append(wget.Wget()) |
@@ -1927,3 +2137,8 @@ methods.append(repo.Repo()) | |||
1927 | methods.append(clearcase.ClearCase()) | 2137 | methods.append(clearcase.ClearCase()) |
1928 | methods.append(npm.Npm()) | 2138 | methods.append(npm.Npm()) |
1929 | methods.append(npmsw.NpmShrinkWrap()) | 2139 | methods.append(npmsw.NpmShrinkWrap()) |
2140 | methods.append(az.Az()) | ||
2141 | methods.append(crate.Crate()) | ||
2142 | methods.append(gcp.GCP()) | ||
2143 | methods.append(gomod.GoMod()) | ||
2144 | methods.append(gomod.GoModGit()) | ||