diff options
author | Joshua Lock <joshua.g.lock@intel.com> | 2016-11-25 15:28:08 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2016-11-30 15:48:09 +0000 |
commit | 1fce7ecbbb004a5ad82da3eef79cfd52b276708d (patch) | |
tree | dc19c8ecb8e0b04ba5eafd27a7679bb55585a868 /bitbake/lib/bb/fetch2/__init__.py | |
parent | 1d0c124cdf0282b8d139063409e40982f0ec9888 (diff) | |
download | poky-1fce7ecbbb004a5ad82da3eef79cfd52b276708d.tar.gz |
bitbake: bitbake: remove True option to getVar calls
getVar() now defaults to expanding by default, thus remove the True
option from getVar() calls with a regex search and replace.
Search made with the following regex: getVar ?\(( ?[^,()]*), True\)
(Bitbake rev: 3b45c479de8640f92dd1d9f147b02e1eecfaadc8)
Signed-off-by: Joshua Lock <joshua.g.lock@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/fetch2/__init__.py')
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 64 |
1 files changed, 32 insertions, 32 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 5c76b22529..ced43630ea 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -491,7 +491,7 @@ def fetcher_init(d): | |||
491 | Calls before this must not hit the cache. | 491 | Calls before this must not hit the cache. |
492 | """ | 492 | """ |
493 | # When to drop SCM head revisions controlled by user policy | 493 | # When to drop SCM head revisions controlled by user policy |
494 | srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear" | 494 | srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" |
495 | if srcrev_policy == "cache": | 495 | if srcrev_policy == "cache": |
496 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | 496 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
497 | elif srcrev_policy == "clear": | 497 | elif srcrev_policy == "clear": |
@@ -572,7 +572,7 @@ def verify_checksum(ud, d, precomputed={}): | |||
572 | 572 | ||
573 | if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected: | 573 | if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected: |
574 | # If strict checking enabled and neither sum defined, raise error | 574 | # If strict checking enabled and neither sum defined, raise error |
575 | strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0" | 575 | strict = d.getVar("BB_STRICT_CHECKSUM") or "0" |
576 | if strict == "1": | 576 | if strict == "1": |
577 | logger.error('No checksum specified for %s, please add at least one to the recipe:\n' | 577 | logger.error('No checksum specified for %s, please add at least one to the recipe:\n' |
578 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' % | 578 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' % |
@@ -718,7 +718,7 @@ def subprocess_setup(): | |||
718 | 718 | ||
719 | def get_autorev(d): | 719 | def get_autorev(d): |
720 | # only not cache src rev in autorev case | 720 | # only not cache src rev in autorev case |
721 | if d.getVar('BB_SRCREV_POLICY', True) != "cache": | 721 | if d.getVar('BB_SRCREV_POLICY') != "cache": |
722 | d.setVar('BB_DONT_CACHE', '1') | 722 | d.setVar('BB_DONT_CACHE', '1') |
723 | return "AUTOINC" | 723 | return "AUTOINC" |
724 | 724 | ||
@@ -737,7 +737,7 @@ def get_srcrev(d, method_name='sortable_revision'): | |||
737 | """ | 737 | """ |
738 | 738 | ||
739 | scms = [] | 739 | scms = [] |
740 | fetcher = Fetch(d.getVar('SRC_URI', True).split(), d) | 740 | fetcher = Fetch(d.getVar('SRC_URI').split(), d) |
741 | urldata = fetcher.ud | 741 | urldata = fetcher.ud |
742 | for u in urldata: | 742 | for u in urldata: |
743 | if urldata[u].method.supports_srcrev(): | 743 | if urldata[u].method.supports_srcrev(): |
@@ -757,7 +757,7 @@ def get_srcrev(d, method_name='sortable_revision'): | |||
757 | # | 757 | # |
758 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | 758 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT |
759 | # | 759 | # |
760 | format = d.getVar('SRCREV_FORMAT', True) | 760 | format = d.getVar('SRCREV_FORMAT') |
761 | if not format: | 761 | if not format: |
762 | raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") | 762 | raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") |
763 | 763 | ||
@@ -821,7 +821,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): | |||
821 | 821 | ||
822 | origenv = d.getVar("BB_ORIGENV", False) | 822 | origenv = d.getVar("BB_ORIGENV", False) |
823 | for var in exportvars: | 823 | for var in exportvars: |
824 | val = d.getVar(var, True) or (origenv and origenv.getVar(var, True)) | 824 | val = d.getVar(var) or (origenv and origenv.getVar(var)) |
825 | if val: | 825 | if val: |
826 | cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) | 826 | cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) |
827 | 827 | ||
@@ -860,7 +860,7 @@ def check_network_access(d, info = "", url = None): | |||
860 | """ | 860 | """ |
861 | log remote network access, and error if BB_NO_NETWORK is set | 861 | log remote network access, and error if BB_NO_NETWORK is set |
862 | """ | 862 | """ |
863 | if d.getVar("BB_NO_NETWORK", True) == "1": | 863 | if d.getVar("BB_NO_NETWORK") == "1": |
864 | raise NetworkAccess(url, info) | 864 | raise NetworkAccess(url, info) |
865 | else: | 865 | else: |
866 | logger.debug(1, "Fetcher accessed the network with the command %s" % info) | 866 | logger.debug(1, "Fetcher accessed the network with the command %s" % info) |
@@ -958,7 +958,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
958 | 958 | ||
959 | # We may be obtaining a mirror tarball which needs further processing by the real fetcher | 959 | # We may be obtaining a mirror tarball which needs further processing by the real fetcher |
960 | # If that tarball is a local file:// we need to provide a symlink to it | 960 | # If that tarball is a local file:// we need to provide a symlink to it |
961 | dldir = ld.getVar("DL_DIR", True) | 961 | dldir = ld.getVar("DL_DIR") |
962 | if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \ | 962 | if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \ |
963 | and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): | 963 | and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): |
964 | # Create donestamp in old format to avoid triggering a re-download | 964 | # Create donestamp in old format to avoid triggering a re-download |
@@ -1032,14 +1032,14 @@ def trusted_network(d, url): | |||
1032 | BB_ALLOWED_NETWORKS is set globally or for a specific recipe. | 1032 | BB_ALLOWED_NETWORKS is set globally or for a specific recipe. |
1033 | Note: modifies SRC_URI & mirrors. | 1033 | Note: modifies SRC_URI & mirrors. |
1034 | """ | 1034 | """ |
1035 | if d.getVar('BB_NO_NETWORK', True) == "1": | 1035 | if d.getVar('BB_NO_NETWORK') == "1": |
1036 | return True | 1036 | return True |
1037 | 1037 | ||
1038 | pkgname = d.expand(d.getVar('PN', False)) | 1038 | pkgname = d.expand(d.getVar('PN', False)) |
1039 | trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) | 1039 | trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) |
1040 | 1040 | ||
1041 | if not trusted_hosts: | 1041 | if not trusted_hosts: |
1042 | trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True) | 1042 | trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS') |
1043 | 1043 | ||
1044 | # Not enabled. | 1044 | # Not enabled. |
1045 | if not trusted_hosts: | 1045 | if not trusted_hosts: |
@@ -1071,7 +1071,7 @@ def srcrev_internal_helper(ud, d, name): | |||
1071 | """ | 1071 | """ |
1072 | 1072 | ||
1073 | srcrev = None | 1073 | srcrev = None |
1074 | pn = d.getVar("PN", True) | 1074 | pn = d.getVar("PN") |
1075 | attempts = [] | 1075 | attempts = [] |
1076 | if name != '' and pn: | 1076 | if name != '' and pn: |
1077 | attempts.append("SRCREV_%s_pn-%s" % (name, pn)) | 1077 | attempts.append("SRCREV_%s_pn-%s" % (name, pn)) |
@@ -1082,7 +1082,7 @@ def srcrev_internal_helper(ud, d, name): | |||
1082 | attempts.append("SRCREV") | 1082 | attempts.append("SRCREV") |
1083 | 1083 | ||
1084 | for a in attempts: | 1084 | for a in attempts: |
1085 | srcrev = d.getVar(a, True) | 1085 | srcrev = d.getVar(a) |
1086 | if srcrev and srcrev != "INVALID": | 1086 | if srcrev and srcrev != "INVALID": |
1087 | break | 1087 | break |
1088 | 1088 | ||
@@ -1115,7 +1115,7 @@ def get_checksum_file_list(d): | |||
1115 | """ | 1115 | """ |
1116 | fetch = Fetch([], d, cache = False, localonly = True) | 1116 | fetch = Fetch([], d, cache = False, localonly = True) |
1117 | 1117 | ||
1118 | dl_dir = d.getVar('DL_DIR', True) | 1118 | dl_dir = d.getVar('DL_DIR') |
1119 | filelist = [] | 1119 | filelist = [] |
1120 | for u in fetch.urls: | 1120 | for u in fetch.urls: |
1121 | ud = fetch.ud[u] | 1121 | ud = fetch.ud[u] |
@@ -1129,9 +1129,9 @@ def get_checksum_file_list(d): | |||
1129 | if f.startswith(dl_dir): | 1129 | if f.startswith(dl_dir): |
1130 | # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else | 1130 | # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else |
1131 | if os.path.exists(f): | 1131 | if os.path.exists(f): |
1132 | bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f))) | 1132 | bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f))) |
1133 | else: | 1133 | else: |
1134 | bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f))) | 1134 | bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f))) |
1135 | filelist.append(f + ":" + str(os.path.exists(f))) | 1135 | filelist.append(f + ":" + str(os.path.exists(f))) |
1136 | 1136 | ||
1137 | return " ".join(filelist) | 1137 | return " ".join(filelist) |
@@ -1204,7 +1204,7 @@ class FetchData(object): | |||
1204 | raise NonLocalMethod() | 1204 | raise NonLocalMethod() |
1205 | 1205 | ||
1206 | if self.parm.get("proto", None) and "protocol" not in self.parm: | 1206 | if self.parm.get("proto", None) and "protocol" not in self.parm: |
1207 | logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True)) | 1207 | logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN')) |
1208 | self.parm["protocol"] = self.parm.get("proto", None) | 1208 | self.parm["protocol"] = self.parm.get("proto", None) |
1209 | 1209 | ||
1210 | if hasattr(self.method, "urldata_init"): | 1210 | if hasattr(self.method, "urldata_init"): |
@@ -1217,7 +1217,7 @@ class FetchData(object): | |||
1217 | elif self.localfile: | 1217 | elif self.localfile: |
1218 | self.localpath = self.method.localpath(self, d) | 1218 | self.localpath = self.method.localpath(self, d) |
1219 | 1219 | ||
1220 | dldir = d.getVar("DL_DIR", True) | 1220 | dldir = d.getVar("DL_DIR") |
1221 | 1221 | ||
1222 | if not self.needdonestamp: | 1222 | if not self.needdonestamp: |
1223 | return | 1223 | return |
@@ -1257,12 +1257,12 @@ class FetchData(object): | |||
1257 | if "srcdate" in self.parm: | 1257 | if "srcdate" in self.parm: |
1258 | return self.parm['srcdate'] | 1258 | return self.parm['srcdate'] |
1259 | 1259 | ||
1260 | pn = d.getVar("PN", True) | 1260 | pn = d.getVar("PN") |
1261 | 1261 | ||
1262 | if pn: | 1262 | if pn: |
1263 | return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True) | 1263 | return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE") |
1264 | 1264 | ||
1265 | return d.getVar("SRCDATE", True) or d.getVar("DATE", True) | 1265 | return d.getVar("SRCDATE") or d.getVar("DATE") |
1266 | 1266 | ||
1267 | class FetchMethod(object): | 1267 | class FetchMethod(object): |
1268 | """Base class for 'fetch'ing data""" | 1268 | """Base class for 'fetch'ing data""" |
@@ -1282,7 +1282,7 @@ class FetchMethod(object): | |||
1282 | Can also setup variables in urldata for use in go (saving code duplication | 1282 | Can also setup variables in urldata for use in go (saving code duplication |
1283 | and duplicate code execution) | 1283 | and duplicate code execution) |
1284 | """ | 1284 | """ |
1285 | return os.path.join(d.getVar("DL_DIR", True), urldata.localfile) | 1285 | return os.path.join(d.getVar("DL_DIR"), urldata.localfile) |
1286 | 1286 | ||
1287 | def supports_checksum(self, urldata): | 1287 | def supports_checksum(self, urldata): |
1288 | """ | 1288 | """ |
@@ -1450,7 +1450,7 @@ class FetchMethod(object): | |||
1450 | if not cmd: | 1450 | if not cmd: |
1451 | return | 1451 | return |
1452 | 1452 | ||
1453 | path = data.getVar('PATH', True) | 1453 | path = data.getVar('PATH') |
1454 | if path: | 1454 | if path: |
1455 | cmd = "PATH=\"%s\" %s" % (path, cmd) | 1455 | cmd = "PATH=\"%s\" %s" % (path, cmd) |
1456 | bb.note("Unpacking %s to %s/" % (file, unpackdir)) | 1456 | bb.note("Unpacking %s to %s/" % (file, unpackdir)) |
@@ -1507,7 +1507,7 @@ class FetchMethod(object): | |||
1507 | 1507 | ||
1508 | def generate_revision_key(self, ud, d, name): | 1508 | def generate_revision_key(self, ud, d, name): |
1509 | key = self._revision_key(ud, d, name) | 1509 | key = self._revision_key(ud, d, name) |
1510 | return "%s-%s" % (key, d.getVar("PN", True) or "") | 1510 | return "%s-%s" % (key, d.getVar("PN") or "") |
1511 | 1511 | ||
1512 | class Fetch(object): | 1512 | class Fetch(object): |
1513 | def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): | 1513 | def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): |
@@ -1515,14 +1515,14 @@ class Fetch(object): | |||
1515 | raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") | 1515 | raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") |
1516 | 1516 | ||
1517 | if len(urls) == 0: | 1517 | if len(urls) == 0: |
1518 | urls = d.getVar("SRC_URI", True).split() | 1518 | urls = d.getVar("SRC_URI").split() |
1519 | self.urls = urls | 1519 | self.urls = urls |
1520 | self.d = d | 1520 | self.d = d |
1521 | self.ud = {} | 1521 | self.ud = {} |
1522 | self.connection_cache = connection_cache | 1522 | self.connection_cache = connection_cache |
1523 | 1523 | ||
1524 | fn = d.getVar('FILE', True) | 1524 | fn = d.getVar('FILE') |
1525 | mc = d.getVar('__BBMULTICONFIG', True) or "" | 1525 | mc = d.getVar('__BBMULTICONFIG') or "" |
1526 | if cache and fn and mc + fn in urldata_cache: | 1526 | if cache and fn and mc + fn in urldata_cache: |
1527 | self.ud = urldata_cache[mc + fn] | 1527 | self.ud = urldata_cache[mc + fn] |
1528 | 1528 | ||
@@ -1565,8 +1565,8 @@ class Fetch(object): | |||
1565 | if not urls: | 1565 | if not urls: |
1566 | urls = self.urls | 1566 | urls = self.urls |
1567 | 1567 | ||
1568 | network = self.d.getVar("BB_NO_NETWORK", True) | 1568 | network = self.d.getVar("BB_NO_NETWORK") |
1569 | premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1") | 1569 | premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY") == "1") |
1570 | 1570 | ||
1571 | for u in urls: | 1571 | for u in urls: |
1572 | ud = self.ud[u] | 1572 | ud = self.ud[u] |
@@ -1584,7 +1584,7 @@ class Fetch(object): | |||
1584 | localpath = ud.localpath | 1584 | localpath = ud.localpath |
1585 | elif m.try_premirror(ud, self.d): | 1585 | elif m.try_premirror(ud, self.d): |
1586 | logger.debug(1, "Trying PREMIRRORS") | 1586 | logger.debug(1, "Trying PREMIRRORS") |
1587 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) | 1587 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) |
1588 | localpath = try_mirrors(self, self.d, ud, mirrors, False) | 1588 | localpath = try_mirrors(self, self.d, ud, mirrors, False) |
1589 | 1589 | ||
1590 | if premirroronly: | 1590 | if premirroronly: |
@@ -1624,7 +1624,7 @@ class Fetch(object): | |||
1624 | if not verified_stamp: | 1624 | if not verified_stamp: |
1625 | m.clean(ud, self.d) | 1625 | m.clean(ud, self.d) |
1626 | logger.debug(1, "Trying MIRRORS") | 1626 | logger.debug(1, "Trying MIRRORS") |
1627 | mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) | 1627 | mirrors = mirror_from_string(self.d.getVar('MIRRORS')) |
1628 | localpath = try_mirrors(self, self.d, ud, mirrors) | 1628 | localpath = try_mirrors(self, self.d, ud, mirrors) |
1629 | 1629 | ||
1630 | if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): | 1630 | if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): |
@@ -1657,7 +1657,7 @@ class Fetch(object): | |||
1657 | m = ud.method | 1657 | m = ud.method |
1658 | logger.debug(1, "Testing URL %s", u) | 1658 | logger.debug(1, "Testing URL %s", u) |
1659 | # First try checking uri, u, from PREMIRRORS | 1659 | # First try checking uri, u, from PREMIRRORS |
1660 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) | 1660 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) |
1661 | ret = try_mirrors(self, self.d, ud, mirrors, True) | 1661 | ret = try_mirrors(self, self.d, ud, mirrors, True) |
1662 | if not ret: | 1662 | if not ret: |
1663 | # Next try checking from the original uri, u | 1663 | # Next try checking from the original uri, u |
@@ -1665,7 +1665,7 @@ class Fetch(object): | |||
1665 | ret = m.checkstatus(self, ud, self.d) | 1665 | ret = m.checkstatus(self, ud, self.d) |
1666 | except: | 1666 | except: |
1667 | # Finally, try checking uri, u, from MIRRORS | 1667 | # Finally, try checking uri, u, from MIRRORS |
1668 | mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) | 1668 | mirrors = mirror_from_string(self.d.getVar('MIRRORS')) |
1669 | ret = try_mirrors(self, self.d, ud, mirrors, True) | 1669 | ret = try_mirrors(self, self.d, ud, mirrors, True) |
1670 | 1670 | ||
1671 | if not ret: | 1671 | if not ret: |