diff options
Diffstat (limited to 'bitbake/lib/bb/fetch2/__init__.py')
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 224 |
1 files changed, 128 insertions, 96 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 5bf2c4b8cf..0ad987c596 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -23,17 +23,18 @@ import collections | |||
23 | import subprocess | 23 | import subprocess |
24 | import pickle | 24 | import pickle |
25 | import errno | 25 | import errno |
26 | import bb.persist_data, bb.utils | 26 | import bb.utils |
27 | import bb.checksum | 27 | import bb.checksum |
28 | import bb.process | 28 | import bb.process |
29 | import bb.event | 29 | import bb.event |
30 | 30 | ||
31 | __version__ = "2" | 31 | __version__ = "2" |
32 | _checksum_cache = bb.checksum.FileChecksumCache() | 32 | _checksum_cache = bb.checksum.FileChecksumCache() |
33 | _revisions_cache = bb.checksum.RevisionsCache() | ||
33 | 34 | ||
34 | logger = logging.getLogger("BitBake.Fetcher") | 35 | logger = logging.getLogger("BitBake.Fetcher") |
35 | 36 | ||
36 | CHECKSUM_LIST = [ "md5", "sha256", "sha1", "sha384", "sha512" ] | 37 | CHECKSUM_LIST = [ "goh1", "md5", "sha256", "sha1", "sha384", "sha512" ] |
37 | SHOWN_CHECKSUM_LIST = ["sha256"] | 38 | SHOWN_CHECKSUM_LIST = ["sha256"] |
38 | 39 | ||
39 | class BBFetchException(Exception): | 40 | class BBFetchException(Exception): |
@@ -237,7 +238,7 @@ class URI(object): | |||
237 | # to RFC compliant URL format. E.g.: | 238 | # to RFC compliant URL format. E.g.: |
238 | # file://foo.diff -> file:foo.diff | 239 | # file://foo.diff -> file:foo.diff |
239 | if urlp.scheme in self._netloc_forbidden: | 240 | if urlp.scheme in self._netloc_forbidden: |
240 | uri = re.sub("(?<=:)//(?!/)", "", uri, 1) | 241 | uri = re.sub(r"(?<=:)//(?!/)", "", uri, count=1) |
241 | reparse = 1 | 242 | reparse = 1 |
242 | 243 | ||
243 | if reparse: | 244 | if reparse: |
@@ -352,6 +353,14 @@ def decodeurl(url): | |||
352 | user, password, parameters). | 353 | user, password, parameters). |
353 | """ | 354 | """ |
354 | 355 | ||
356 | uri = URI(url) | ||
357 | path = uri.path if uri.path else "/" | ||
358 | return uri.scheme, uri.hostport, path, uri.username, uri.password, uri.params | ||
359 | |||
360 | def decodemirrorurl(url): | ||
361 | """Decodes a mirror URL into the tokens (scheme, network location, path, | ||
362 | user, password, parameters). | ||
363 | """ | ||
355 | m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) | 364 | m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) |
356 | if not m: | 365 | if not m: |
357 | raise MalformedUrl(url) | 366 | raise MalformedUrl(url) |
@@ -370,6 +379,9 @@ def decodeurl(url): | |||
370 | elif type.lower() == 'file': | 379 | elif type.lower() == 'file': |
371 | host = "" | 380 | host = "" |
372 | path = location | 381 | path = location |
382 | if user: | ||
383 | path = user + '@' + path | ||
384 | user = "" | ||
373 | else: | 385 | else: |
374 | host = location | 386 | host = location |
375 | path = "/" | 387 | path = "/" |
@@ -402,32 +414,34 @@ def encodeurl(decoded): | |||
402 | 414 | ||
403 | if not type: | 415 | if not type: |
404 | raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) | 416 | raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) |
405 | url = ['%s://' % type] | 417 | uri = URI() |
418 | uri.scheme = type | ||
406 | if user and type != "file": | 419 | if user and type != "file": |
407 | url.append("%s" % user) | 420 | uri.username = user |
408 | if pswd: | 421 | if pswd: |
409 | url.append(":%s" % pswd) | 422 | uri.password = pswd |
410 | url.append("@") | ||
411 | if host and type != "file": | 423 | if host and type != "file": |
412 | url.append("%s" % host) | 424 | uri.hostname = host |
413 | if path: | 425 | if path: |
414 | # Standardise path to ensure comparisons work | 426 | # Standardise path to ensure comparisons work |
415 | while '//' in path: | 427 | while '//' in path: |
416 | path = path.replace("//", "/") | 428 | path = path.replace("//", "/") |
417 | url.append("%s" % urllib.parse.quote(path)) | 429 | uri.path = path |
430 | if type == "file": | ||
431 | # Use old not IETF compliant style | ||
432 | uri.relative = False | ||
418 | if p: | 433 | if p: |
419 | for parm in p: | 434 | uri.params = p |
420 | url.append(";%s=%s" % (parm, p[parm])) | ||
421 | 435 | ||
422 | return "".join(url) | 436 | return str(uri) |
423 | 437 | ||
424 | def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | 438 | def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): |
425 | if not ud.url or not uri_find or not uri_replace: | 439 | if not ud.url or not uri_find or not uri_replace: |
426 | logger.error("uri_replace: passed an undefined value, not replacing") | 440 | logger.error("uri_replace: passed an undefined value, not replacing") |
427 | return None | 441 | return None |
428 | uri_decoded = list(decodeurl(ud.url)) | 442 | uri_decoded = list(decodemirrorurl(ud.url)) |
429 | uri_find_decoded = list(decodeurl(uri_find)) | 443 | uri_find_decoded = list(decodemirrorurl(uri_find)) |
430 | uri_replace_decoded = list(decodeurl(uri_replace)) | 444 | uri_replace_decoded = list(decodemirrorurl(uri_replace)) |
431 | logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) | 445 | logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) |
432 | result_decoded = ['', '', '', '', '', {}] | 446 | result_decoded = ['', '', '', '', '', {}] |
433 | # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params | 447 | # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params |
@@ -460,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | |||
460 | for k in replacements: | 474 | for k in replacements: |
461 | uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k]) | 475 | uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k]) |
462 | #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc])) | 476 | #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc])) |
463 | result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1) | 477 | result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], count=1) |
464 | if loc == 2: | 478 | if loc == 2: |
465 | # Handle path manipulations | 479 | # Handle path manipulations |
466 | basename = None | 480 | basename = None |
@@ -493,18 +507,23 @@ methods = [] | |||
493 | urldata_cache = {} | 507 | urldata_cache = {} |
494 | saved_headrevs = {} | 508 | saved_headrevs = {} |
495 | 509 | ||
496 | def fetcher_init(d): | 510 | def fetcher_init(d, servercontext=True): |
497 | """ | 511 | """ |
498 | Called to initialize the fetchers once the configuration data is known. | 512 | Called to initialize the fetchers once the configuration data is known. |
499 | Calls before this must not hit the cache. | 513 | Calls before this must not hit the cache. |
500 | """ | 514 | """ |
501 | 515 | ||
502 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | 516 | _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) |
517 | _revisions_cache.init_cache(d.getVar("BB_CACHEDIR")) | ||
518 | |||
519 | if not servercontext: | ||
520 | return | ||
521 | |||
503 | try: | 522 | try: |
504 | # fetcher_init is called multiple times, so make sure we only save the | 523 | # fetcher_init is called multiple times, so make sure we only save the |
505 | # revs the first time it is called. | 524 | # revs the first time it is called. |
506 | if not bb.fetch2.saved_headrevs: | 525 | if not bb.fetch2.saved_headrevs: |
507 | bb.fetch2.saved_headrevs = dict(revs) | 526 | bb.fetch2.saved_headrevs = _revisions_cache.get_revs() |
508 | except: | 527 | except: |
509 | pass | 528 | pass |
510 | 529 | ||
@@ -514,11 +533,10 @@ def fetcher_init(d): | |||
514 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | 533 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
515 | elif srcrev_policy == "clear": | 534 | elif srcrev_policy == "clear": |
516 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | 535 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) |
517 | revs.clear() | 536 | _revisions_cache.clear_cache() |
518 | else: | 537 | else: |
519 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | 538 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) |
520 | 539 | ||
521 | _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) | ||
522 | 540 | ||
523 | for m in methods: | 541 | for m in methods: |
524 | if hasattr(m, "init"): | 542 | if hasattr(m, "init"): |
@@ -526,9 +544,11 @@ def fetcher_init(d): | |||
526 | 544 | ||
527 | def fetcher_parse_save(): | 545 | def fetcher_parse_save(): |
528 | _checksum_cache.save_extras() | 546 | _checksum_cache.save_extras() |
547 | _revisions_cache.save_extras() | ||
529 | 548 | ||
530 | def fetcher_parse_done(): | 549 | def fetcher_parse_done(): |
531 | _checksum_cache.save_merge() | 550 | _checksum_cache.save_merge() |
551 | _revisions_cache.save_merge() | ||
532 | 552 | ||
533 | def fetcher_compare_revisions(d): | 553 | def fetcher_compare_revisions(d): |
534 | """ | 554 | """ |
@@ -536,7 +556,7 @@ def fetcher_compare_revisions(d): | |||
536 | when bitbake was started and return true if they have changed. | 556 | when bitbake was started and return true if they have changed. |
537 | """ | 557 | """ |
538 | 558 | ||
539 | headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) | 559 | headrevs = _revisions_cache.get_revs() |
540 | return headrevs != bb.fetch2.saved_headrevs | 560 | return headrevs != bb.fetch2.saved_headrevs |
541 | 561 | ||
542 | def mirror_from_string(data): | 562 | def mirror_from_string(data): |
@@ -786,8 +806,8 @@ def _get_srcrev(d, method_name='sortable_revision'): | |||
786 | return "", revs | 806 | return "", revs |
787 | 807 | ||
788 | 808 | ||
789 | if len(scms) == 1 and len(urldata[scms[0]].names) == 1: | 809 | if len(scms) == 1: |
790 | autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0]) | 810 | autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].name) |
791 | revs.append(rev) | 811 | revs.append(rev) |
792 | if len(rev) > 10: | 812 | if len(rev) > 10: |
793 | rev = rev[:10] | 813 | rev = rev[:10] |
@@ -808,13 +828,12 @@ def _get_srcrev(d, method_name='sortable_revision'): | |||
808 | seenautoinc = False | 828 | seenautoinc = False |
809 | for scm in scms: | 829 | for scm in scms: |
810 | ud = urldata[scm] | 830 | ud = urldata[scm] |
811 | for name in ud.names: | 831 | autoinc, rev = getattr(ud.method, method_name)(ud, d, ud.name) |
812 | autoinc, rev = getattr(ud.method, method_name)(ud, d, name) | 832 | revs.append(rev) |
813 | revs.append(rev) | 833 | seenautoinc = seenautoinc or autoinc |
814 | seenautoinc = seenautoinc or autoinc | 834 | if len(rev) > 10: |
815 | if len(rev) > 10: | 835 | rev = rev[:10] |
816 | rev = rev[:10] | 836 | name_to_rev[ud.name] = rev |
817 | name_to_rev[name] = rev | ||
818 | # Replace names by revisions in the SRCREV_FORMAT string. The approach used | 837 | # Replace names by revisions in the SRCREV_FORMAT string. The approach used |
819 | # here can handle names being prefixes of other names and names appearing | 838 | # here can handle names being prefixes of other names and names appearing |
820 | # as substrings in revisions (in which case the name should not be | 839 | # as substrings in revisions (in which case the name should not be |
@@ -878,6 +897,7 @@ FETCH_EXPORT_VARS = ['HOME', 'PATH', | |||
878 | 'AWS_SESSION_TOKEN', | 897 | 'AWS_SESSION_TOKEN', |
879 | 'GIT_CACHE_PATH', | 898 | 'GIT_CACHE_PATH', |
880 | 'REMOTE_CONTAINERS_IPC', | 899 | 'REMOTE_CONTAINERS_IPC', |
900 | 'GITHUB_TOKEN', | ||
881 | 'SSL_CERT_DIR'] | 901 | 'SSL_CERT_DIR'] |
882 | 902 | ||
883 | def get_fetcher_environment(d): | 903 | def get_fetcher_environment(d): |
@@ -1072,6 +1092,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
1072 | # If that tarball is a local file:// we need to provide a symlink to it | 1092 | # If that tarball is a local file:// we need to provide a symlink to it |
1073 | dldir = ld.getVar("DL_DIR") | 1093 | dldir = ld.getVar("DL_DIR") |
1074 | 1094 | ||
1095 | if bb.utils.to_boolean(ld.getVar("BB_FETCH_PREMIRRORONLY")): | ||
1096 | ld = ld.createCopy() | ||
1097 | ld.setVar("BB_NO_NETWORK", "1") | ||
1098 | |||
1075 | if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): | 1099 | if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): |
1076 | # Create donestamp in old format to avoid triggering a re-download | 1100 | # Create donestamp in old format to avoid triggering a re-download |
1077 | if ud.donestamp: | 1101 | if ud.donestamp: |
@@ -1093,7 +1117,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
1093 | origud.method.build_mirror_data(origud, ld) | 1117 | origud.method.build_mirror_data(origud, ld) |
1094 | return origud.localpath | 1118 | return origud.localpath |
1095 | # Otherwise the result is a local file:// and we symlink to it | 1119 | # Otherwise the result is a local file:// and we symlink to it |
1096 | ensure_symlink(ud.localpath, origud.localpath) | 1120 | # This may also be a link to a shallow archive |
1121 | # When using shallow mode, add a symlink to the original fullshallow | ||
1122 | # path to ensure a valid symlink even in the `PREMIRRORS` case | ||
1123 | origud.method.update_mirror_links(ud, origud) | ||
1097 | update_stamp(origud, ld) | 1124 | update_stamp(origud, ld) |
1098 | return ud.localpath | 1125 | return ud.localpath |
1099 | 1126 | ||
@@ -1127,25 +1154,6 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
1127 | if ud.lockfile and ud.lockfile != origud.lockfile: | 1154 | if ud.lockfile and ud.lockfile != origud.lockfile: |
1128 | bb.utils.unlockfile(lf) | 1155 | bb.utils.unlockfile(lf) |
1129 | 1156 | ||
1130 | |||
1131 | def ensure_symlink(target, link_name): | ||
1132 | if not os.path.exists(link_name): | ||
1133 | dirname = os.path.dirname(link_name) | ||
1134 | bb.utils.mkdirhier(dirname) | ||
1135 | if os.path.islink(link_name): | ||
1136 | # Broken symbolic link | ||
1137 | os.unlink(link_name) | ||
1138 | |||
1139 | # In case this is executing without any file locks held (as is | ||
1140 | # the case for file:// URLs), two tasks may end up here at the | ||
1141 | # same time, in which case we do not want the second task to | ||
1142 | # fail when the link has already been created by the first task. | ||
1143 | try: | ||
1144 | os.symlink(target, link_name) | ||
1145 | except FileExistsError: | ||
1146 | pass | ||
1147 | |||
1148 | |||
1149 | def try_mirrors(fetch, d, origud, mirrors, check = False): | 1157 | def try_mirrors(fetch, d, origud, mirrors, check = False): |
1150 | """ | 1158 | """ |
1151 | Try to use a mirrored version of the sources. | 1159 | Try to use a mirrored version of the sources. |
@@ -1174,7 +1182,7 @@ def trusted_network(d, url): | |||
1174 | if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")): | 1182 | if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")): |
1175 | return True | 1183 | return True |
1176 | 1184 | ||
1177 | pkgname = d.expand(d.getVar('PN', False)) | 1185 | pkgname = d.getVar('PN') |
1178 | trusted_hosts = None | 1186 | trusted_hosts = None |
1179 | if pkgname: | 1187 | if pkgname: |
1180 | trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) | 1188 | trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) |
@@ -1227,20 +1235,17 @@ def srcrev_internal_helper(ud, d, name): | |||
1227 | if srcrev and srcrev != "INVALID": | 1235 | if srcrev and srcrev != "INVALID": |
1228 | break | 1236 | break |
1229 | 1237 | ||
1230 | if 'rev' in ud.parm and 'tag' in ud.parm: | 1238 | if 'rev' in ud.parm: |
1231 | raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url)) | 1239 | parmrev = ud.parm['rev'] |
1232 | |||
1233 | if 'rev' in ud.parm or 'tag' in ud.parm: | ||
1234 | if 'rev' in ud.parm: | ||
1235 | parmrev = ud.parm['rev'] | ||
1236 | else: | ||
1237 | parmrev = ud.parm['tag'] | ||
1238 | if srcrev == "INVALID" or not srcrev: | 1240 | if srcrev == "INVALID" or not srcrev: |
1239 | return parmrev | 1241 | return parmrev |
1240 | if srcrev != parmrev: | 1242 | if srcrev != parmrev: |
1241 | raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev)) | 1243 | raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev)) |
1242 | return parmrev | 1244 | return parmrev |
1243 | 1245 | ||
1246 | if 'tag' in ud.parm and (srcrev == "INVALID" or not srcrev): | ||
1247 | return ud.parm['tag'] | ||
1248 | |||
1244 | if srcrev == "INVALID" or not srcrev: | 1249 | if srcrev == "INVALID" or not srcrev: |
1245 | raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) | 1250 | raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) |
1246 | if srcrev == "AUTOINC": | 1251 | if srcrev == "AUTOINC": |
@@ -1263,7 +1268,7 @@ def get_checksum_file_list(d): | |||
1263 | found = False | 1268 | found = False |
1264 | paths = ud.method.localfile_searchpaths(ud, d) | 1269 | paths = ud.method.localfile_searchpaths(ud, d) |
1265 | for f in paths: | 1270 | for f in paths: |
1266 | pth = ud.decodedurl | 1271 | pth = ud.path |
1267 | if os.path.exists(f): | 1272 | if os.path.exists(f): |
1268 | found = True | 1273 | found = True |
1269 | filelist.append(f + ":" + str(os.path.exists(f))) | 1274 | filelist.append(f + ":" + str(os.path.exists(f))) |
@@ -1308,23 +1313,28 @@ class FetchData(object): | |||
1308 | self.setup = False | 1313 | self.setup = False |
1309 | 1314 | ||
1310 | def configure_checksum(checksum_id): | 1315 | def configure_checksum(checksum_id): |
1316 | checksum_plain_name = "%ssum" % checksum_id | ||
1311 | if "name" in self.parm: | 1317 | if "name" in self.parm: |
1312 | checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id) | 1318 | checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id) |
1313 | else: | 1319 | else: |
1314 | checksum_name = "%ssum" % checksum_id | 1320 | checksum_name = checksum_plain_name |
1315 | |||
1316 | setattr(self, "%s_name" % checksum_id, checksum_name) | ||
1317 | 1321 | ||
1318 | if checksum_name in self.parm: | 1322 | if checksum_name in self.parm: |
1319 | checksum_expected = self.parm[checksum_name] | 1323 | checksum_expected = self.parm[checksum_name] |
1320 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs"]: | 1324 | elif checksum_plain_name in self.parm: |
1325 | checksum_expected = self.parm[checksum_plain_name] | ||
1326 | checksum_name = checksum_plain_name | ||
1327 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs", "gomod", "npm"]: | ||
1321 | checksum_expected = None | 1328 | checksum_expected = None |
1322 | else: | 1329 | else: |
1323 | checksum_expected = d.getVarFlag("SRC_URI", checksum_name) | 1330 | checksum_expected = d.getVarFlag("SRC_URI", checksum_name) |
1324 | 1331 | ||
1332 | setattr(self, "%s_name" % checksum_id, checksum_name) | ||
1325 | setattr(self, "%s_expected" % checksum_id, checksum_expected) | 1333 | setattr(self, "%s_expected" % checksum_id, checksum_expected) |
1326 | 1334 | ||
1327 | self.names = self.parm.get("name",'default').split(',') | 1335 | self.name = self.parm.get("name",'default') |
1336 | if "," in self.name: | ||
1337 | raise ParameterError("The fetcher no longer supports multiple name parameters in a single url", self.url) | ||
1328 | 1338 | ||
1329 | self.method = None | 1339 | self.method = None |
1330 | for m in methods: | 1340 | for m in methods: |
@@ -1376,13 +1386,7 @@ class FetchData(object): | |||
1376 | self.lockfile = basepath + '.lock' | 1386 | self.lockfile = basepath + '.lock' |
1377 | 1387 | ||
1378 | def setup_revisions(self, d): | 1388 | def setup_revisions(self, d): |
1379 | self.revisions = {} | 1389 | self.revision = srcrev_internal_helper(self, d, self.name) |
1380 | for name in self.names: | ||
1381 | self.revisions[name] = srcrev_internal_helper(self, d, name) | ||
1382 | |||
1383 | # add compatibility code for non name specified case | ||
1384 | if len(self.names) == 1: | ||
1385 | self.revision = self.revisions[self.names[0]] | ||
1386 | 1390 | ||
1387 | def setup_localpath(self, d): | 1391 | def setup_localpath(self, d): |
1388 | if not self.localpath: | 1392 | if not self.localpath: |
@@ -1510,7 +1514,7 @@ class FetchMethod(object): | |||
1510 | (file, urldata.parm.get('unpack'))) | 1514 | (file, urldata.parm.get('unpack'))) |
1511 | 1515 | ||
1512 | base, ext = os.path.splitext(file) | 1516 | base, ext = os.path.splitext(file) |
1513 | if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']: | 1517 | if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz', '.zst']: |
1514 | efile = os.path.join(rootdir, os.path.basename(base)) | 1518 | efile = os.path.join(rootdir, os.path.basename(base)) |
1515 | else: | 1519 | else: |
1516 | efile = file | 1520 | efile = file |
@@ -1569,11 +1573,11 @@ class FetchMethod(object): | |||
1569 | datafile = None | 1573 | datafile = None |
1570 | if output: | 1574 | if output: |
1571 | for line in output.decode().splitlines(): | 1575 | for line in output.decode().splitlines(): |
1572 | if line.startswith('data.tar.'): | 1576 | if line.startswith('data.tar.') or line == 'data.tar': |
1573 | datafile = line | 1577 | datafile = line |
1574 | break | 1578 | break |
1575 | else: | 1579 | else: |
1576 | raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url) | 1580 | raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar* file", urldata.url) |
1577 | else: | 1581 | else: |
1578 | raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) | 1582 | raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) |
1579 | cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile) | 1583 | cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile) |
@@ -1606,7 +1610,7 @@ class FetchMethod(object): | |||
1606 | if urlpath.find("/") != -1: | 1610 | if urlpath.find("/") != -1: |
1607 | destdir = urlpath.rsplit("/", 1)[0] + '/' | 1611 | destdir = urlpath.rsplit("/", 1)[0] + '/' |
1608 | bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) | 1612 | bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) |
1609 | cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir) | 1613 | cmd = 'cp --force --preserve=timestamps --no-dereference --recursive -H "%s" "%s"' % (file, destdir) |
1610 | else: | 1614 | else: |
1611 | urldata.unpack_tracer.unpack("archive-extract", unpackdir) | 1615 | urldata.unpack_tracer.unpack("archive-extract", unpackdir) |
1612 | 1616 | ||
@@ -1635,6 +1639,28 @@ class FetchMethod(object): | |||
1635 | """ | 1639 | """ |
1636 | bb.utils.remove(urldata.localpath) | 1640 | bb.utils.remove(urldata.localpath) |
1637 | 1641 | ||
1642 | def ensure_symlink(self, target, link_name): | ||
1643 | if not os.path.exists(link_name): | ||
1644 | dirname = os.path.dirname(link_name) | ||
1645 | bb.utils.mkdirhier(dirname) | ||
1646 | if os.path.islink(link_name): | ||
1647 | # Broken symbolic link | ||
1648 | os.unlink(link_name) | ||
1649 | |||
1650 | # In case this is executing without any file locks held (as is | ||
1651 | # the case for file:// URLs), two tasks may end up here at the | ||
1652 | # same time, in which case we do not want the second task to | ||
1653 | # fail when the link has already been created by the first task. | ||
1654 | try: | ||
1655 | os.symlink(target, link_name) | ||
1656 | except FileExistsError: | ||
1657 | pass | ||
1658 | |||
1659 | def update_mirror_links(self, ud, origud): | ||
1660 | # For local file:// results, create a symlink to them | ||
1661 | # This may also be a link to a shallow archive | ||
1662 | self.ensure_symlink(ud.localpath, origud.localpath) | ||
1663 | |||
1638 | def try_premirror(self, urldata, d): | 1664 | def try_premirror(self, urldata, d): |
1639 | """ | 1665 | """ |
1640 | Should premirrors be used? | 1666 | Should premirrors be used? |
@@ -1662,13 +1688,13 @@ class FetchMethod(object): | |||
1662 | if not hasattr(self, "_latest_revision"): | 1688 | if not hasattr(self, "_latest_revision"): |
1663 | raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) | 1689 | raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) |
1664 | 1690 | ||
1665 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | ||
1666 | key = self.generate_revision_key(ud, d, name) | 1691 | key = self.generate_revision_key(ud, d, name) |
1667 | try: | 1692 | |
1668 | return revs[key] | 1693 | rev = _revisions_cache.get_rev(key) |
1669 | except KeyError: | 1694 | if rev is None: |
1670 | revs[key] = rev = self._latest_revision(ud, d, name) | 1695 | rev = self._latest_revision(ud, d, name) |
1671 | return rev | 1696 | _revisions_cache.set_rev(key, rev) |
1697 | return rev | ||
1672 | 1698 | ||
1673 | def sortable_revision(self, ud, d, name): | 1699 | def sortable_revision(self, ud, d, name): |
1674 | latest_rev = self._build_revision(ud, d, name) | 1700 | latest_rev = self._build_revision(ud, d, name) |
@@ -1806,7 +1832,7 @@ class Fetch(object): | |||
1806 | self.ud[url] = FetchData(url, self.d) | 1832 | self.ud[url] = FetchData(url, self.d) |
1807 | 1833 | ||
1808 | self.ud[url].setup_localpath(self.d) | 1834 | self.ud[url].setup_localpath(self.d) |
1809 | return self.d.expand(self.ud[url].localpath) | 1835 | return self.ud[url].localpath |
1810 | 1836 | ||
1811 | def localpaths(self): | 1837 | def localpaths(self): |
1812 | """ | 1838 | """ |
@@ -1859,25 +1885,28 @@ class Fetch(object): | |||
1859 | logger.debug(str(e)) | 1885 | logger.debug(str(e)) |
1860 | done = False | 1886 | done = False |
1861 | 1887 | ||
1888 | d = self.d | ||
1862 | if premirroronly: | 1889 | if premirroronly: |
1863 | self.d.setVar("BB_NO_NETWORK", "1") | 1890 | # Only disable the network in a copy |
1891 | d = bb.data.createCopy(self.d) | ||
1892 | d.setVar("BB_NO_NETWORK", "1") | ||
1864 | 1893 | ||
1865 | firsterr = None | 1894 | firsterr = None |
1866 | verified_stamp = False | 1895 | verified_stamp = False |
1867 | if done: | 1896 | if done: |
1868 | verified_stamp = m.verify_donestamp(ud, self.d) | 1897 | verified_stamp = m.verify_donestamp(ud, d) |
1869 | if not done and (not verified_stamp or m.need_update(ud, self.d)): | 1898 | if not done and (not verified_stamp or m.need_update(ud, d)): |
1870 | try: | 1899 | try: |
1871 | if not trusted_network(self.d, ud.url): | 1900 | if not trusted_network(d, ud.url): |
1872 | raise UntrustedUrl(ud.url) | 1901 | raise UntrustedUrl(ud.url) |
1873 | logger.debug("Trying Upstream") | 1902 | logger.debug("Trying Upstream") |
1874 | m.download(ud, self.d) | 1903 | m.download(ud, d) |
1875 | if hasattr(m, "build_mirror_data"): | 1904 | if hasattr(m, "build_mirror_data"): |
1876 | m.build_mirror_data(ud, self.d) | 1905 | m.build_mirror_data(ud, d) |
1877 | done = True | 1906 | done = True |
1878 | # early checksum verify, so that if checksum mismatched, | 1907 | # early checksum verify, so that if checksum mismatched, |
1879 | # fetcher still have chance to fetch from mirror | 1908 | # fetcher still have chance to fetch from mirror |
1880 | m.update_donestamp(ud, self.d) | 1909 | m.update_donestamp(ud, d) |
1881 | 1910 | ||
1882 | except bb.fetch2.NetworkAccess: | 1911 | except bb.fetch2.NetworkAccess: |
1883 | raise | 1912 | raise |
@@ -1896,17 +1925,17 @@ class Fetch(object): | |||
1896 | firsterr = e | 1925 | firsterr = e |
1897 | # Remove any incomplete fetch | 1926 | # Remove any incomplete fetch |
1898 | if not verified_stamp and m.cleanup_upon_failure(): | 1927 | if not verified_stamp and m.cleanup_upon_failure(): |
1899 | m.clean(ud, self.d) | 1928 | m.clean(ud, d) |
1900 | logger.debug("Trying MIRRORS") | 1929 | logger.debug("Trying MIRRORS") |
1901 | mirrors = mirror_from_string(self.d.getVar('MIRRORS')) | 1930 | mirrors = mirror_from_string(d.getVar('MIRRORS')) |
1902 | done = m.try_mirrors(self, ud, self.d, mirrors) | 1931 | done = m.try_mirrors(self, ud, d, mirrors) |
1903 | 1932 | ||
1904 | if not done or not m.done(ud, self.d): | 1933 | if not done or not m.done(ud, d): |
1905 | if firsterr: | 1934 | if firsterr: |
1906 | logger.error(str(firsterr)) | 1935 | logger.error(str(firsterr)) |
1907 | raise FetchError("Unable to fetch URL from any source.", u) | 1936 | raise FetchError("Unable to fetch URL from any source.", u) |
1908 | 1937 | ||
1909 | m.update_donestamp(ud, self.d) | 1938 | m.update_donestamp(ud, d) |
1910 | 1939 | ||
1911 | except IOError as e: | 1940 | except IOError as e: |
1912 | if e.errno in [errno.ESTALE]: | 1941 | if e.errno in [errno.ESTALE]: |
@@ -2088,6 +2117,7 @@ from . import npmsw | |||
2088 | from . import az | 2117 | from . import az |
2089 | from . import crate | 2118 | from . import crate |
2090 | from . import gcp | 2119 | from . import gcp |
2120 | from . import gomod | ||
2091 | 2121 | ||
2092 | methods.append(local.Local()) | 2122 | methods.append(local.Local()) |
2093 | methods.append(wget.Wget()) | 2123 | methods.append(wget.Wget()) |
@@ -2110,3 +2140,5 @@ methods.append(npmsw.NpmShrinkWrap()) | |||
2110 | methods.append(az.Az()) | 2140 | methods.append(az.Az()) |
2111 | methods.append(crate.Crate()) | 2141 | methods.append(crate.Crate()) |
2112 | methods.append(gcp.GCP()) | 2142 | methods.append(gcp.GCP()) |
2143 | methods.append(gomod.GoMod()) | ||
2144 | methods.append(gomod.GoModGit()) | ||