summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2/__init__.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/fetch2/__init__.py')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py365
1 files changed, 274 insertions, 91 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 19169d780f..5bf2c4b8cf 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -113,7 +113,7 @@ class MissingParameterError(BBFetchException):
113 self.args = (missing, url) 113 self.args = (missing, url)
114 114
115class ParameterError(BBFetchException): 115class ParameterError(BBFetchException):
116 """Exception raised when a url cannot be proccessed due to invalid parameters.""" 116 """Exception raised when a url cannot be processed due to invalid parameters."""
117 def __init__(self, message, url): 117 def __init__(self, message, url):
118 msg = "URL: '%s' has invalid parameters. %s" % (url, message) 118 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
119 self.url = url 119 self.url = url
@@ -182,7 +182,7 @@ class URI(object):
182 Some notes about relative URIs: while it's specified that 182 Some notes about relative URIs: while it's specified that
183 a URI beginning with <scheme>:// should either be directly 183 a URI beginning with <scheme>:// should either be directly
184 followed by a hostname or a /, the old URI handling of the 184 followed by a hostname or a /, the old URI handling of the
185 fetch2 library did not comform to this. Therefore, this URI 185 fetch2 library did not conform to this. Therefore, this URI
186 class has some kludges to make sure that URIs are parsed in 186 class has some kludges to make sure that URIs are parsed in
187 a way comforming to bitbake's current usage. This URI class 187 a way comforming to bitbake's current usage. This URI class
188 supports the following: 188 supports the following:
@@ -199,7 +199,7 @@ class URI(object):
199 file://hostname/absolute/path.diff (would be IETF compliant) 199 file://hostname/absolute/path.diff (would be IETF compliant)
200 200
201 Note that the last case only applies to a list of 201 Note that the last case only applies to a list of
202 "whitelisted" schemes (currently only file://), that requires 202 explicitly allowed schemes (currently only file://), that requires
203 its URIs to not have a network location. 203 its URIs to not have a network location.
204 """ 204 """
205 205
@@ -290,12 +290,12 @@ class URI(object):
290 290
291 def _param_str_split(self, string, elmdelim, kvdelim="="): 291 def _param_str_split(self, string, elmdelim, kvdelim="="):
292 ret = collections.OrderedDict() 292 ret = collections.OrderedDict()
293 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]: 293 for k, v in [x.split(kvdelim, 1) if kvdelim in x else (x, None) for x in string.split(elmdelim) if x]:
294 ret[k] = v 294 ret[k] = v
295 return ret 295 return ret
296 296
297 def _param_str_join(self, dict_, elmdelim, kvdelim="="): 297 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
298 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()]) 298 return elmdelim.join([kvdelim.join([k, v]) if v else k for k, v in dict_.items()])
299 299
300 @property 300 @property
301 def hostport(self): 301 def hostport(self):
@@ -388,7 +388,7 @@ def decodeurl(url):
388 if s: 388 if s:
389 if not '=' in s: 389 if not '=' in s:
390 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s)) 390 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
391 s1, s2 = s.split('=') 391 s1, s2 = s.split('=', 1)
392 p[s1] = s2 392 p[s1] = s2
393 393
394 return type, host, urllib.parse.unquote(path), user, pswd, p 394 return type, host, urllib.parse.unquote(path), user, pswd, p
@@ -402,24 +402,24 @@ def encodeurl(decoded):
402 402
403 if not type: 403 if not type:
404 raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) 404 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
405 url = '%s://' % type 405 url = ['%s://' % type]
406 if user and type != "file": 406 if user and type != "file":
407 url += "%s" % user 407 url.append("%s" % user)
408 if pswd: 408 if pswd:
409 url += ":%s" % pswd 409 url.append(":%s" % pswd)
410 url += "@" 410 url.append("@")
411 if host and type != "file": 411 if host and type != "file":
412 url += "%s" % host 412 url.append("%s" % host)
413 if path: 413 if path:
414 # Standardise path to ensure comparisons work 414 # Standardise path to ensure comparisons work
415 while '//' in path: 415 while '//' in path:
416 path = path.replace("//", "/") 416 path = path.replace("//", "/")
417 url += "%s" % urllib.parse.quote(path) 417 url.append("%s" % urllib.parse.quote(path))
418 if p: 418 if p:
419 for parm in p: 419 for parm in p:
420 url += ";%s=%s" % (parm, p[parm]) 420 url.append(";%s=%s" % (parm, p[parm]))
421 421
422 return url 422 return "".join(url)
423 423
424def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): 424def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
425 if not ud.url or not uri_find or not uri_replace: 425 if not ud.url or not uri_find or not uri_replace:
@@ -430,6 +430,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
430 uri_replace_decoded = list(decodeurl(uri_replace)) 430 uri_replace_decoded = list(decodeurl(uri_replace))
431 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) 431 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
432 result_decoded = ['', '', '', '', '', {}] 432 result_decoded = ['', '', '', '', '', {}]
433 # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params
433 for loc, i in enumerate(uri_find_decoded): 434 for loc, i in enumerate(uri_find_decoded):
434 result_decoded[loc] = uri_decoded[loc] 435 result_decoded[loc] = uri_decoded[loc]
435 regexp = i 436 regexp = i
@@ -449,6 +450,9 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
449 for l in replacements: 450 for l in replacements:
450 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l]) 451 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
451 result_decoded[loc][k] = uri_replace_decoded[loc][k] 452 result_decoded[loc][k] = uri_replace_decoded[loc][k]
453 elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]:
454 # User/password in the replacement is just a straight replacement
455 result_decoded[loc] = uri_replace_decoded[loc]
452 elif (re.match(regexp, uri_decoded[loc])): 456 elif (re.match(regexp, uri_decoded[loc])):
453 if not uri_replace_decoded[loc]: 457 if not uri_replace_decoded[loc]:
454 result_decoded[loc] = "" 458 result_decoded[loc] = ""
@@ -465,10 +469,18 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
465 basename = os.path.basename(mirrortarball) 469 basename = os.path.basename(mirrortarball)
466 # Kill parameters, they make no sense for mirror tarballs 470 # Kill parameters, they make no sense for mirror tarballs
467 uri_decoded[5] = {} 471 uri_decoded[5] = {}
472 uri_find_decoded[5] = {}
468 elif ud.localpath and ud.method.supports_checksum(ud): 473 elif ud.localpath and ud.method.supports_checksum(ud):
469 basename = os.path.basename(ud.localpath) 474 basename = os.path.basename(ud.localpath)
470 if basename and not result_decoded[loc].endswith(basename): 475 if basename:
471 result_decoded[loc] = os.path.join(result_decoded[loc], basename) 476 uri_basename = os.path.basename(uri_decoded[loc])
477 # Prefix with a slash as a sentinel in case
478 # result_decoded[loc] does not contain one.
479 path = "/" + result_decoded[loc]
480 if uri_basename and basename != uri_basename and path.endswith("/" + uri_basename):
481 result_decoded[loc] = path[1:-len(uri_basename)] + basename
482 elif not path.endswith("/" + basename):
483 result_decoded[loc] = os.path.join(path[1:], basename)
472 else: 484 else:
473 return None 485 return None
474 result = encodeurl(result_decoded) 486 result = encodeurl(result_decoded)
@@ -506,7 +518,7 @@ def fetcher_init(d):
506 else: 518 else:
507 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) 519 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
508 520
509 _checksum_cache.init_cache(d) 521 _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
510 522
511 for m in methods: 523 for m in methods:
512 if hasattr(m, "init"): 524 if hasattr(m, "init"):
@@ -534,7 +546,7 @@ def mirror_from_string(data):
534 bb.warn('Invalid mirror data %s, should have paired members.' % data) 546 bb.warn('Invalid mirror data %s, should have paired members.' % data)
535 return list(zip(*[iter(mirrors)]*2)) 547 return list(zip(*[iter(mirrors)]*2))
536 548
537def verify_checksum(ud, d, precomputed={}): 549def verify_checksum(ud, d, precomputed={}, localpath=None, fatal_nochecksum=True):
538 """ 550 """
539 verify the MD5 and SHA256 checksum for downloaded src 551 verify the MD5 and SHA256 checksum for downloaded src
540 552
@@ -548,20 +560,25 @@ def verify_checksum(ud, d, precomputed={}):
548 file against those in the recipe each time, rather than only after 560 file against those in the recipe each time, rather than only after
549 downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571. 561 downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
550 """ 562 """
551
552 if ud.ignore_checksums or not ud.method.supports_checksum(ud): 563 if ud.ignore_checksums or not ud.method.supports_checksum(ud):
553 return {} 564 return {}
554 565
566 if localpath is None:
567 localpath = ud.localpath
568
555 def compute_checksum_info(checksum_id): 569 def compute_checksum_info(checksum_id):
556 checksum_name = getattr(ud, "%s_name" % checksum_id) 570 checksum_name = getattr(ud, "%s_name" % checksum_id)
557 571
558 if checksum_id in precomputed: 572 if checksum_id in precomputed:
559 checksum_data = precomputed[checksum_id] 573 checksum_data = precomputed[checksum_id]
560 else: 574 else:
561 checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath) 575 checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(localpath)
562 576
563 checksum_expected = getattr(ud, "%s_expected" % checksum_id) 577 checksum_expected = getattr(ud, "%s_expected" % checksum_id)
564 578
579 if checksum_expected == '':
580 checksum_expected = None
581
565 return { 582 return {
566 "id": checksum_id, 583 "id": checksum_id,
567 "name": checksum_name, 584 "name": checksum_name,
@@ -581,17 +598,13 @@ def verify_checksum(ud, d, precomputed={}):
581 checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])] 598 checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])]
582 599
583 # If no checksum has been provided 600 # If no checksum has been provided
584 if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos): 601 if fatal_nochecksum and ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
585 messages = [] 602 messages = []
586 strict = d.getVar("BB_STRICT_CHECKSUM") or "0" 603 strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
587 604
588 # If strict checking enabled and neither sum defined, raise error 605 # If strict checking enabled and neither sum defined, raise error
589 if strict == "1": 606 if strict == "1":
590 messages.append("No checksum specified for '%s', please add at " \ 607 raise NoChecksumError("\n".join(checksum_lines))
591 "least one to the recipe:" % ud.localpath)
592 messages.extend(checksum_lines)
593 logger.error("\n".join(messages))
594 raise NoChecksumError("Missing SRC_URI checksum", ud.url)
595 608
596 bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d) 609 bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d)
597 610
@@ -612,8 +625,8 @@ def verify_checksum(ud, d, precomputed={}):
612 625
613 for ci in checksum_infos: 626 for ci in checksum_infos:
614 if ci["expected"] and ci["expected"] != ci["data"]: 627 if ci["expected"] and ci["expected"] != ci["data"]:
615 messages.append("File: '%s' has %s checksum %s when %s was " \ 628 messages.append("File: '%s' has %s checksum '%s' when '%s' was " \
616 "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"])) 629 "expected" % (localpath, ci["id"], ci["data"], ci["expected"]))
617 bad_checksum = ci["data"] 630 bad_checksum = ci["data"]
618 631
619 if bad_checksum: 632 if bad_checksum:
@@ -731,13 +744,16 @@ def subprocess_setup():
731 # SIGPIPE errors are known issues with gzip/bash 744 # SIGPIPE errors are known issues with gzip/bash
732 signal.signal(signal.SIGPIPE, signal.SIG_DFL) 745 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
733 746
734def get_autorev(d): 747def mark_recipe_nocache(d):
735 # only not cache src rev in autorev case
736 if d.getVar('BB_SRCREV_POLICY') != "cache": 748 if d.getVar('BB_SRCREV_POLICY') != "cache":
737 d.setVar('BB_DONT_CACHE', '1') 749 d.setVar('BB_DONT_CACHE', '1')
750
751def get_autorev(d):
752 mark_recipe_nocache(d)
753 d.setVar("__BBAUTOREV_SEEN", True)
738 return "AUTOINC" 754 return "AUTOINC"
739 755
740def get_srcrev(d, method_name='sortable_revision'): 756def _get_srcrev(d, method_name='sortable_revision'):
741 """ 757 """
742 Return the revision string, usually for use in the version string (PV) of the current package 758 Return the revision string, usually for use in the version string (PV) of the current package
743 Most packages usually only have one SCM so we just pass on the call. 759 Most packages usually only have one SCM so we just pass on the call.
@@ -751,23 +767,34 @@ def get_srcrev(d, method_name='sortable_revision'):
751 that fetcher provides a method with the given name and the same signature as sortable_revision. 767 that fetcher provides a method with the given name and the same signature as sortable_revision.
752 """ 768 """
753 769
770 d.setVar("__BBSRCREV_SEEN", "1")
771 recursion = d.getVar("__BBINSRCREV")
772 if recursion:
773 raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI")
774 d.setVar("__BBINSRCREV", True)
775
754 scms = [] 776 scms = []
777 revs = []
755 fetcher = Fetch(d.getVar('SRC_URI').split(), d) 778 fetcher = Fetch(d.getVar('SRC_URI').split(), d)
756 urldata = fetcher.ud 779 urldata = fetcher.ud
757 for u in urldata: 780 for u in urldata:
758 if urldata[u].method.supports_srcrev(): 781 if urldata[u].method.supports_srcrev():
759 scms.append(u) 782 scms.append(u)
760 783
761 if len(scms) == 0: 784 if not scms:
762 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") 785 d.delVar("__BBINSRCREV")
786 return "", revs
787
763 788
764 if len(scms) == 1 and len(urldata[scms[0]].names) == 1: 789 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
765 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0]) 790 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
791 revs.append(rev)
766 if len(rev) > 10: 792 if len(rev) > 10:
767 rev = rev[:10] 793 rev = rev[:10]
794 d.delVar("__BBINSRCREV")
768 if autoinc: 795 if autoinc:
769 return "AUTOINC+" + rev 796 return "AUTOINC+" + rev, revs
770 return rev 797 return rev, revs
771 798
772 # 799 #
773 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT 800 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
@@ -783,6 +810,7 @@ def get_srcrev(d, method_name='sortable_revision'):
783 ud = urldata[scm] 810 ud = urldata[scm]
784 for name in ud.names: 811 for name in ud.names:
785 autoinc, rev = getattr(ud.method, method_name)(ud, d, name) 812 autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
813 revs.append(rev)
786 seenautoinc = seenautoinc or autoinc 814 seenautoinc = seenautoinc or autoinc
787 if len(rev) > 10: 815 if len(rev) > 10:
788 rev = rev[:10] 816 rev = rev[:10]
@@ -799,12 +827,70 @@ def get_srcrev(d, method_name='sortable_revision'):
799 if seenautoinc: 827 if seenautoinc:
800 format = "AUTOINC+" + format 828 format = "AUTOINC+" + format
801 829
802 return format 830 d.delVar("__BBINSRCREV")
831 return format, revs
832
833def get_hashvalue(d, method_name='sortable_revision'):
834 pkgv, revs = _get_srcrev(d, method_name=method_name)
835 return " ".join(revs)
836
837def get_pkgv_string(d, method_name='sortable_revision'):
838 pkgv, revs = _get_srcrev(d, method_name=method_name)
839 return pkgv
840
841def get_srcrev(d, method_name='sortable_revision'):
842 pkgv, revs = _get_srcrev(d, method_name=method_name)
843 if not pkgv:
844 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
845 return pkgv
803 846
804def localpath(url, d): 847def localpath(url, d):
805 fetcher = bb.fetch2.Fetch([url], d) 848 fetcher = bb.fetch2.Fetch([url], d)
806 return fetcher.localpath(url) 849 return fetcher.localpath(url)
807 850
851# Need to export PATH as binary could be in metadata paths
852# rather than host provided
853# Also include some other variables.
854FETCH_EXPORT_VARS = ['HOME', 'PATH',
855 'HTTP_PROXY', 'http_proxy',
856 'HTTPS_PROXY', 'https_proxy',
857 'FTP_PROXY', 'ftp_proxy',
858 'FTPS_PROXY', 'ftps_proxy',
859 'NO_PROXY', 'no_proxy',
860 'ALL_PROXY', 'all_proxy',
861 'GIT_PROXY_COMMAND',
862 'GIT_SSH',
863 'GIT_SSH_COMMAND',
864 'GIT_SSL_CAINFO',
865 'GIT_SMART_HTTP',
866 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
867 'SOCKS5_USER', 'SOCKS5_PASSWD',
868 'DBUS_SESSION_BUS_ADDRESS',
869 'P4CONFIG',
870 'SSL_CERT_FILE',
871 'NODE_EXTRA_CA_CERTS',
872 'AWS_PROFILE',
873 'AWS_ACCESS_KEY_ID',
874 'AWS_SECRET_ACCESS_KEY',
875 'AWS_ROLE_ARN',
876 'AWS_WEB_IDENTITY_TOKEN_FILE',
877 'AWS_DEFAULT_REGION',
878 'AWS_SESSION_TOKEN',
879 'GIT_CACHE_PATH',
880 'REMOTE_CONTAINERS_IPC',
881 'SSL_CERT_DIR']
882
883def get_fetcher_environment(d):
884 newenv = {}
885 origenv = d.getVar("BB_ORIGENV")
886 for name in bb.fetch2.FETCH_EXPORT_VARS:
887 value = d.getVar(name)
888 if not value and origenv:
889 value = origenv.getVar(name)
890 if value:
891 newenv[name] = value
892 return newenv
893
808def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): 894def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
809 """ 895 """
810 Run cmd returning the command output 896 Run cmd returning the command output
@@ -813,25 +899,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
813 Optionally remove the files/directories listed in cleanup upon failure 899 Optionally remove the files/directories listed in cleanup upon failure
814 """ 900 """
815 901
816 # Need to export PATH as binary could be in metadata paths 902 exportvars = FETCH_EXPORT_VARS
817 # rather than host provided
818 # Also include some other variables.
819 # FIXME: Should really include all export varaiables?
820 exportvars = ['HOME', 'PATH',
821 'HTTP_PROXY', 'http_proxy',
822 'HTTPS_PROXY', 'https_proxy',
823 'FTP_PROXY', 'ftp_proxy',
824 'FTPS_PROXY', 'ftps_proxy',
825 'NO_PROXY', 'no_proxy',
826 'ALL_PROXY', 'all_proxy',
827 'GIT_PROXY_COMMAND',
828 'GIT_SSH',
829 'GIT_SSL_CAINFO',
830 'GIT_SMART_HTTP',
831 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
832 'SOCKS5_USER', 'SOCKS5_PASSWD',
833 'DBUS_SESSION_BUS_ADDRESS',
834 'P4CONFIG']
835 903
836 if not cleanup: 904 if not cleanup:
837 cleanup = [] 905 cleanup = []
@@ -868,14 +936,17 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
868 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir) 936 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
869 success = True 937 success = True
870 except bb.process.NotFoundError as e: 938 except bb.process.NotFoundError as e:
871 error_message = "Fetch command %s" % (e.command) 939 error_message = "Fetch command %s not found" % (e.command)
872 except bb.process.ExecutionError as e: 940 except bb.process.ExecutionError as e:
873 if e.stdout: 941 if e.stdout:
874 output = "output:\n%s\n%s" % (e.stdout, e.stderr) 942 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
875 elif e.stderr: 943 elif e.stderr:
876 output = "output:\n%s" % e.stderr 944 output = "output:\n%s" % e.stderr
877 else: 945 else:
878 output = "no output" 946 if log:
947 output = "see logfile for output"
948 else:
949 output = "no output"
879 error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output) 950 error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
880 except bb.process.CmdError as e: 951 except bb.process.CmdError as e:
881 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg) 952 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
@@ -937,6 +1008,7 @@ def build_mirroruris(origud, mirrors, ld):
937 1008
938 try: 1009 try:
939 newud = FetchData(newuri, ld) 1010 newud = FetchData(newuri, ld)
1011 newud.ignore_checksums = True
940 newud.setup_localpath(ld) 1012 newud.setup_localpath(ld)
941 except bb.fetch2.BBFetchException as e: 1013 except bb.fetch2.BBFetchException as e:
942 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) 1014 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
@@ -1046,7 +1118,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1046 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) 1118 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
1047 logger.debug(str(e)) 1119 logger.debug(str(e))
1048 try: 1120 try:
1049 ud.method.clean(ud, ld) 1121 if ud.method.cleanup_upon_failure():
1122 ud.method.clean(ud, ld)
1050 except UnboundLocalError: 1123 except UnboundLocalError:
1051 pass 1124 pass
1052 return False 1125 return False
@@ -1057,6 +1130,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1057 1130
1058def ensure_symlink(target, link_name): 1131def ensure_symlink(target, link_name):
1059 if not os.path.exists(link_name): 1132 if not os.path.exists(link_name):
1133 dirname = os.path.dirname(link_name)
1134 bb.utils.mkdirhier(dirname)
1060 if os.path.islink(link_name): 1135 if os.path.islink(link_name):
1061 # Broken symbolic link 1136 # Broken symbolic link
1062 os.unlink(link_name) 1137 os.unlink(link_name)
@@ -1140,11 +1215,11 @@ def srcrev_internal_helper(ud, d, name):
1140 pn = d.getVar("PN") 1215 pn = d.getVar("PN")
1141 attempts = [] 1216 attempts = []
1142 if name != '' and pn: 1217 if name != '' and pn:
1143 attempts.append("SRCREV_%s_pn-%s" % (name, pn)) 1218 attempts.append("SRCREV_%s:pn-%s" % (name, pn))
1144 if name != '': 1219 if name != '':
1145 attempts.append("SRCREV_%s" % name) 1220 attempts.append("SRCREV_%s" % name)
1146 if pn: 1221 if pn:
1147 attempts.append("SRCREV_pn-%s" % pn) 1222 attempts.append("SRCREV:pn-%s" % pn)
1148 attempts.append("SRCREV") 1223 attempts.append("SRCREV")
1149 1224
1150 for a in attempts: 1225 for a in attempts:
@@ -1169,6 +1244,7 @@ def srcrev_internal_helper(ud, d, name):
1169 if srcrev == "INVALID" or not srcrev: 1244 if srcrev == "INVALID" or not srcrev:
1170 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) 1245 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
1171 if srcrev == "AUTOINC": 1246 if srcrev == "AUTOINC":
1247 d.setVar("__BBAUTOREV_ACTED_UPON", True)
1172 srcrev = ud.method.latest_revision(ud, d, name) 1248 srcrev = ud.method.latest_revision(ud, d, name)
1173 1249
1174 return srcrev 1250 return srcrev
@@ -1180,23 +1256,21 @@ def get_checksum_file_list(d):
1180 SRC_URI as a space-separated string 1256 SRC_URI as a space-separated string
1181 """ 1257 """
1182 fetch = Fetch([], d, cache = False, localonly = True) 1258 fetch = Fetch([], d, cache = False, localonly = True)
1183
1184 dl_dir = d.getVar('DL_DIR')
1185 filelist = [] 1259 filelist = []
1186 for u in fetch.urls: 1260 for u in fetch.urls:
1187 ud = fetch.ud[u] 1261 ud = fetch.ud[u]
1188
1189 if ud and isinstance(ud.method, local.Local): 1262 if ud and isinstance(ud.method, local.Local):
1190 paths = ud.method.localpaths(ud, d) 1263 found = False
1264 paths = ud.method.localfile_searchpaths(ud, d)
1191 for f in paths: 1265 for f in paths:
1192 pth = ud.decodedurl 1266 pth = ud.decodedurl
1193 if f.startswith(dl_dir): 1267 if os.path.exists(f):
1194 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else 1268 found = True
1195 if os.path.exists(f):
1196 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
1197 else:
1198 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
1199 filelist.append(f + ":" + str(os.path.exists(f))) 1269 filelist.append(f + ":" + str(os.path.exists(f)))
1270 if not found:
1271 bb.fatal(("Unable to get checksum for %s SRC_URI entry %s: file could not be found"
1272 "\nThe following paths were searched:"
1273 "\n%s") % (d.getVar('PN'), os.path.basename(f), '\n'.join(paths)))
1200 1274
1201 return " ".join(filelist) 1275 return " ".join(filelist)
1202 1276
@@ -1243,18 +1317,13 @@ class FetchData(object):
1243 1317
1244 if checksum_name in self.parm: 1318 if checksum_name in self.parm:
1245 checksum_expected = self.parm[checksum_name] 1319 checksum_expected = self.parm[checksum_name]
1246 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]: 1320 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs"]:
1247 checksum_expected = None 1321 checksum_expected = None
1248 else: 1322 else:
1249 checksum_expected = d.getVarFlag("SRC_URI", checksum_name) 1323 checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
1250 1324
1251 setattr(self, "%s_expected" % checksum_id, checksum_expected) 1325 setattr(self, "%s_expected" % checksum_id, checksum_expected)
1252 1326
1253 for checksum_id in CHECKSUM_LIST:
1254 configure_checksum(checksum_id)
1255
1256 self.ignore_checksums = False
1257
1258 self.names = self.parm.get("name",'default').split(',') 1327 self.names = self.parm.get("name",'default').split(',')
1259 1328
1260 self.method = None 1329 self.method = None
@@ -1276,6 +1345,11 @@ class FetchData(object):
1276 if hasattr(self.method, "urldata_init"): 1345 if hasattr(self.method, "urldata_init"):
1277 self.method.urldata_init(self, d) 1346 self.method.urldata_init(self, d)
1278 1347
1348 for checksum_id in CHECKSUM_LIST:
1349 configure_checksum(checksum_id)
1350
1351 self.ignore_checksums = False
1352
1279 if "localpath" in self.parm: 1353 if "localpath" in self.parm:
1280 # if user sets localpath for file, use it instead. 1354 # if user sets localpath for file, use it instead.
1281 self.localpath = self.parm["localpath"] 1355 self.localpath = self.parm["localpath"]
@@ -1355,6 +1429,9 @@ class FetchMethod(object):
1355 Is localpath something that can be represented by a checksum? 1429 Is localpath something that can be represented by a checksum?
1356 """ 1430 """
1357 1431
1432 # We cannot compute checksums for None
1433 if urldata.localpath is None:
1434 return False
1358 # We cannot compute checksums for directories 1435 # We cannot compute checksums for directories
1359 if os.path.isdir(urldata.localpath): 1436 if os.path.isdir(urldata.localpath):
1360 return False 1437 return False
@@ -1367,6 +1444,12 @@ class FetchMethod(object):
1367 """ 1444 """
1368 return False 1445 return False
1369 1446
1447 def cleanup_upon_failure(self):
1448 """
1449 When a fetch fails, should clean() be called?
1450 """
1451 return True
1452
1370 def verify_donestamp(self, ud, d): 1453 def verify_donestamp(self, ud, d):
1371 """ 1454 """
1372 Verify the donestamp file 1455 Verify the donestamp file
@@ -1434,30 +1517,33 @@ class FetchMethod(object):
1434 cmd = None 1517 cmd = None
1435 1518
1436 if unpack: 1519 if unpack:
1520 tar_cmd = 'tar --extract --no-same-owner'
1521 if 'striplevel' in urldata.parm:
1522 tar_cmd += ' --strip-components=%s' % urldata.parm['striplevel']
1437 if file.endswith('.tar'): 1523 if file.endswith('.tar'):
1438 cmd = 'tar x --no-same-owner -f %s' % file 1524 cmd = '%s -f %s' % (tar_cmd, file)
1439 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): 1525 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1440 cmd = 'tar xz --no-same-owner -f %s' % file 1526 cmd = '%s -z -f %s' % (tar_cmd, file)
1441 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): 1527 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1442 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file 1528 cmd = 'bzip2 -dc %s | %s -f -' % (file, tar_cmd)
1443 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): 1529 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1444 cmd = 'gzip -dc %s > %s' % (file, efile) 1530 cmd = 'gzip -dc %s > %s' % (file, efile)
1445 elif file.endswith('.bz2'): 1531 elif file.endswith('.bz2'):
1446 cmd = 'bzip2 -dc %s > %s' % (file, efile) 1532 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1447 elif file.endswith('.txz') or file.endswith('.tar.xz'): 1533 elif file.endswith('.txz') or file.endswith('.tar.xz'):
1448 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file 1534 cmd = 'xz -dc %s | %s -f -' % (file, tar_cmd)
1449 elif file.endswith('.xz'): 1535 elif file.endswith('.xz'):
1450 cmd = 'xz -dc %s > %s' % (file, efile) 1536 cmd = 'xz -dc %s > %s' % (file, efile)
1451 elif file.endswith('.tar.lz'): 1537 elif file.endswith('.tar.lz'):
1452 cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file 1538 cmd = 'lzip -dc %s | %s -f -' % (file, tar_cmd)
1453 elif file.endswith('.lz'): 1539 elif file.endswith('.lz'):
1454 cmd = 'lzip -dc %s > %s' % (file, efile) 1540 cmd = 'lzip -dc %s > %s' % (file, efile)
1455 elif file.endswith('.tar.7z'): 1541 elif file.endswith('.tar.7z'):
1456 cmd = '7z x -so %s | tar x --no-same-owner -f -' % file 1542 cmd = '7z x -so %s | %s -f -' % (file, tar_cmd)
1457 elif file.endswith('.7z'): 1543 elif file.endswith('.7z'):
1458 cmd = '7za x -y %s 1>/dev/null' % file 1544 cmd = '7za x -y %s 1>/dev/null' % file
1459 elif file.endswith('.tzst') or file.endswith('.tar.zst'): 1545 elif file.endswith('.tzst') or file.endswith('.tar.zst'):
1460 cmd = 'zstd --decompress --stdout %s | tar x --no-same-owner -f -' % file 1546 cmd = 'zstd --decompress --stdout %s | %s -f -' % (file, tar_cmd)
1461 elif file.endswith('.zst'): 1547 elif file.endswith('.zst'):
1462 cmd = 'zstd --decompress --stdout %s > %s' % (file, efile) 1548 cmd = 'zstd --decompress --stdout %s > %s' % (file, efile)
1463 elif file.endswith('.zip') or file.endswith('.jar'): 1549 elif file.endswith('.zip') or file.endswith('.jar'):
@@ -1490,7 +1576,7 @@ class FetchMethod(object):
1490 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url) 1576 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
1491 else: 1577 else:
1492 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) 1578 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
1493 cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile) 1579 cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile)
1494 1580
1495 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd 1581 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
1496 if 'subdir' in urldata.parm: 1582 if 'subdir' in urldata.parm:
@@ -1506,6 +1592,7 @@ class FetchMethod(object):
1506 unpackdir = rootdir 1592 unpackdir = rootdir
1507 1593
1508 if not unpack or not cmd: 1594 if not unpack or not cmd:
1595 urldata.unpack_tracer.unpack("file-copy", unpackdir)
1509 # If file == dest, then avoid any copies, as we already put the file into dest! 1596 # If file == dest, then avoid any copies, as we already put the file into dest!
1510 dest = os.path.join(unpackdir, os.path.basename(file)) 1597 dest = os.path.join(unpackdir, os.path.basename(file))
1511 if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)): 1598 if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
@@ -1520,6 +1607,8 @@ class FetchMethod(object):
1520 destdir = urlpath.rsplit("/", 1)[0] + '/' 1607 destdir = urlpath.rsplit("/", 1)[0] + '/'
1521 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) 1608 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
1522 cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir) 1609 cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir)
1610 else:
1611 urldata.unpack_tracer.unpack("archive-extract", unpackdir)
1523 1612
1524 if not cmd: 1613 if not cmd:
1525 return 1614 return
@@ -1611,12 +1700,61 @@ class FetchMethod(object):
1611 """ 1700 """
1612 return [] 1701 return []
1613 1702
1703
1704class DummyUnpackTracer(object):
1705 """
1706 Abstract API definition for a class that traces unpacked source files back
1707 to their respective upstream SRC_URI entries, for software composition
1708 analysis, license compliance and detailed SBOM generation purposes.
1709 User may load their own unpack tracer class (instead of the dummy
1710 one) by setting the BB_UNPACK_TRACER_CLASS config parameter.
1711 """
1712 def start(self, unpackdir, urldata_dict, d):
1713 """
1714 Start tracing the core Fetch.unpack process, using an index to map
1715 unpacked files to each SRC_URI entry.
1716 This method is called by Fetch.unpack and it may receive nested calls by
1717 gitsm and npmsw fetchers, that expand SRC_URI entries by adding implicit
1718 URLs and by recursively calling Fetch.unpack from new (nested) Fetch
1719 instances.
1720 """
1721 return
1722 def start_url(self, url):
1723 """Start tracing url unpack process.
1724 This method is called by Fetch.unpack before the fetcher-specific unpack
1725 method starts, and it may receive nested calls by gitsm and npmsw
1726 fetchers.
1727 """
1728 return
1729 def unpack(self, unpack_type, destdir):
1730 """
1731 Set unpack_type and destdir for current url.
1732 This method is called by the fetcher-specific unpack method after url
1733 tracing started.
1734 """
1735 return
1736 def finish_url(self, url):
1737 """Finish tracing url unpack process and update the file index.
1738 This method is called by Fetch.unpack after the fetcher-specific unpack
1739 method finished its job, and it may receive nested calls by gitsm
1740 and npmsw fetchers.
1741 """
1742 return
1743 def complete(self):
1744 """
1745 Finish tracing the Fetch.unpack process, and check if all nested
1746 Fecth.unpack calls (if any) have been completed; if so, save collected
1747 metadata.
1748 """
1749 return
1750
1751
1614class Fetch(object): 1752class Fetch(object):
1615 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): 1753 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
1616 if localonly and cache: 1754 if localonly and cache:
1617 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") 1755 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1618 1756
1619 if len(urls) == 0: 1757 if not urls:
1620 urls = d.getVar("SRC_URI").split() 1758 urls = d.getVar("SRC_URI").split()
1621 self.urls = urls 1759 self.urls = urls
1622 self.d = d 1760 self.d = d
@@ -1631,10 +1769,30 @@ class Fetch(object):
1631 if key in urldata_cache: 1769 if key in urldata_cache:
1632 self.ud = urldata_cache[key] 1770 self.ud = urldata_cache[key]
1633 1771
1772 # the unpack_tracer object needs to be made available to possible nested
1773 # Fetch instances (when those are created by gitsm and npmsw fetchers)
1774 # so we set it as a global variable
1775 global unpack_tracer
1776 try:
1777 unpack_tracer
1778 except NameError:
1779 class_path = d.getVar("BB_UNPACK_TRACER_CLASS")
1780 if class_path:
1781 # use user-defined unpack tracer class
1782 import importlib
1783 module_name, _, class_name = class_path.rpartition(".")
1784 module = importlib.import_module(module_name)
1785 class_ = getattr(module, class_name)
1786 unpack_tracer = class_()
1787 else:
1788 # fall back to the dummy/abstract class
1789 unpack_tracer = DummyUnpackTracer()
1790
1634 for url in urls: 1791 for url in urls:
1635 if url not in self.ud: 1792 if url not in self.ud:
1636 try: 1793 try:
1637 self.ud[url] = FetchData(url, d, localonly) 1794 self.ud[url] = FetchData(url, d, localonly)
1795 self.ud[url].unpack_tracer = unpack_tracer
1638 except NonLocalMethod: 1796 except NonLocalMethod:
1639 if localonly: 1797 if localonly:
1640 self.ud[url] = None 1798 self.ud[url] = None
@@ -1673,6 +1831,7 @@ class Fetch(object):
1673 network = self.d.getVar("BB_NO_NETWORK") 1831 network = self.d.getVar("BB_NO_NETWORK")
1674 premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY")) 1832 premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
1675 1833
1834 checksum_missing_messages = []
1676 for u in urls: 1835 for u in urls:
1677 ud = self.ud[u] 1836 ud = self.ud[u]
1678 ud.setup_localpath(self.d) 1837 ud.setup_localpath(self.d)
@@ -1684,7 +1843,6 @@ class Fetch(object):
1684 1843
1685 try: 1844 try:
1686 self.d.setVar("BB_NO_NETWORK", network) 1845 self.d.setVar("BB_NO_NETWORK", network)
1687
1688 if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): 1846 if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
1689 done = True 1847 done = True
1690 elif m.try_premirror(ud, self.d): 1848 elif m.try_premirror(ud, self.d):
@@ -1705,7 +1863,9 @@ class Fetch(object):
1705 self.d.setVar("BB_NO_NETWORK", "1") 1863 self.d.setVar("BB_NO_NETWORK", "1")
1706 1864
1707 firsterr = None 1865 firsterr = None
1708 verified_stamp = m.verify_donestamp(ud, self.d) 1866 verified_stamp = False
1867 if done:
1868 verified_stamp = m.verify_donestamp(ud, self.d)
1709 if not done and (not verified_stamp or m.need_update(ud, self.d)): 1869 if not done and (not verified_stamp or m.need_update(ud, self.d)):
1710 try: 1870 try:
1711 if not trusted_network(self.d, ud.url): 1871 if not trusted_network(self.d, ud.url):
@@ -1735,7 +1895,7 @@ class Fetch(object):
1735 logger.debug(str(e)) 1895 logger.debug(str(e))
1736 firsterr = e 1896 firsterr = e
1737 # Remove any incomplete fetch 1897 # Remove any incomplete fetch
1738 if not verified_stamp: 1898 if not verified_stamp and m.cleanup_upon_failure():
1739 m.clean(ud, self.d) 1899 m.clean(ud, self.d)
1740 logger.debug("Trying MIRRORS") 1900 logger.debug("Trying MIRRORS")
1741 mirrors = mirror_from_string(self.d.getVar('MIRRORS')) 1901 mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
@@ -1754,17 +1914,28 @@ class Fetch(object):
1754 raise ChecksumError("Stale Error Detected") 1914 raise ChecksumError("Stale Error Detected")
1755 1915
1756 except BBFetchException as e: 1916 except BBFetchException as e:
1757 if isinstance(e, ChecksumError): 1917 if isinstance(e, NoChecksumError):
1918 (message, _) = e.args
1919 checksum_missing_messages.append(message)
1920 continue
1921 elif isinstance(e, ChecksumError):
1758 logger.error("Checksum failure fetching %s" % u) 1922 logger.error("Checksum failure fetching %s" % u)
1759 raise 1923 raise
1760 1924
1761 finally: 1925 finally:
1762 if ud.lockfile: 1926 if ud.lockfile:
1763 bb.utils.unlockfile(lf) 1927 bb.utils.unlockfile(lf)
1928 if checksum_missing_messages:
1929 logger.error("Missing SRC_URI checksum, please add those to the recipe: \n%s", "\n".join(checksum_missing_messages))
1930 raise BBFetchException("There was some missing checksums in the recipe")
1764 1931
1765 def checkstatus(self, urls=None): 1932 def checkstatus(self, urls=None):
1766 """ 1933 """
1767 Check all urls exist upstream 1934 Check all URLs exist upstream.
1935
1936 Returns None if the URLs exist, raises FetchError if the check wasn't
1937 successful but there wasn't an error (such as file not found), and
1938 raises other exceptions in error cases.
1768 """ 1939 """
1769 1940
1770 if not urls: 1941 if not urls:
@@ -1787,7 +1958,7 @@ class Fetch(object):
1787 ret = m.try_mirrors(self, ud, self.d, mirrors, True) 1958 ret = m.try_mirrors(self, ud, self.d, mirrors, True)
1788 1959
1789 if not ret: 1960 if not ret:
1790 raise FetchError("URL %s doesn't work" % u, u) 1961 raise FetchError("URL doesn't work", u)
1791 1962
1792 def unpack(self, root, urls=None): 1963 def unpack(self, root, urls=None):
1793 """ 1964 """
@@ -1797,6 +1968,8 @@ class Fetch(object):
1797 if not urls: 1968 if not urls:
1798 urls = self.urls 1969 urls = self.urls
1799 1970
1971 unpack_tracer.start(root, self.ud, self.d)
1972
1800 for u in urls: 1973 for u in urls:
1801 ud = self.ud[u] 1974 ud = self.ud[u]
1802 ud.setup_localpath(self.d) 1975 ud.setup_localpath(self.d)
@@ -1804,11 +1977,15 @@ class Fetch(object):
1804 if ud.lockfile: 1977 if ud.lockfile:
1805 lf = bb.utils.lockfile(ud.lockfile) 1978 lf = bb.utils.lockfile(ud.lockfile)
1806 1979
1980 unpack_tracer.start_url(u)
1807 ud.method.unpack(ud, root, self.d) 1981 ud.method.unpack(ud, root, self.d)
1982 unpack_tracer.finish_url(u)
1808 1983
1809 if ud.lockfile: 1984 if ud.lockfile:
1810 bb.utils.unlockfile(lf) 1985 bb.utils.unlockfile(lf)
1811 1986
1987 unpack_tracer.complete()
1988
1812 def clean(self, urls=None): 1989 def clean(self, urls=None):
1813 """ 1990 """
1814 Clean files that the fetcher gets or places 1991 Clean files that the fetcher gets or places
@@ -1908,6 +2085,9 @@ from . import repo
1908from . import clearcase 2085from . import clearcase
1909from . import npm 2086from . import npm
1910from . import npmsw 2087from . import npmsw
2088from . import az
2089from . import crate
2090from . import gcp
1911 2091
1912methods.append(local.Local()) 2092methods.append(local.Local())
1913methods.append(wget.Wget()) 2093methods.append(wget.Wget())
@@ -1927,3 +2107,6 @@ methods.append(repo.Repo())
1927methods.append(clearcase.ClearCase()) 2107methods.append(clearcase.ClearCase())
1928methods.append(npm.Npm()) 2108methods.append(npm.Npm())
1929methods.append(npmsw.NpmShrinkWrap()) 2109methods.append(npmsw.NpmShrinkWrap())
2110methods.append(az.Az())
2111methods.append(crate.Crate())
2112methods.append(gcp.GCP())