summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
authorJoshua Lock <joshua.g.lock@intel.com>2016-11-25 15:28:08 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2016-11-30 15:48:09 +0000
commit1fce7ecbbb004a5ad82da3eef79cfd52b276708d (patch)
treedc19c8ecb8e0b04ba5eafd27a7679bb55585a868 /bitbake/lib/bb/fetch2
parent1d0c124cdf0282b8d139063409e40982f0ec9888 (diff)
downloadpoky-1fce7ecbbb004a5ad82da3eef79cfd52b276708d.tar.gz
bitbake: bitbake: remove True option to getVar calls
getVar() now defaults to expanding by default, thus remove the True option from getVar() calls with a regex search and replace. Search made with the following regex: getVar ?\(( ?[^,()]*), True\) (Bitbake rev: 3b45c479de8640f92dd1d9f147b02e1eecfaadc8) Signed-off-by: Joshua Lock <joshua.g.lock@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/fetch2')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py64
-rw-r--r--bitbake/lib/bb/fetch2/clearcase.py10
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py14
-rw-r--r--bitbake/lib/bb/fetch2/git.py12
-rw-r--r--bitbake/lib/bb/fetch2/hg.py10
-rw-r--r--bitbake/lib/bb/fetch2/local.py12
-rw-r--r--bitbake/lib/bb/fetch2/npm.py20
-rw-r--r--bitbake/lib/bb/fetch2/osc.py10
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py8
-rw-r--r--bitbake/lib/bb/fetch2/repo.py4
-rw-r--r--bitbake/lib/bb/fetch2/sftp.py2
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py4
-rw-r--r--bitbake/lib/bb/fetch2/svn.py2
-rw-r--r--bitbake/lib/bb/fetch2/wget.py12
14 files changed, 92 insertions, 92 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 5c76b22529..ced43630ea 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -491,7 +491,7 @@ def fetcher_init(d):
491 Calls before this must not hit the cache. 491 Calls before this must not hit the cache.
492 """ 492 """
493 # When to drop SCM head revisions controlled by user policy 493 # When to drop SCM head revisions controlled by user policy
494 srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear" 494 srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
495 if srcrev_policy == "cache": 495 if srcrev_policy == "cache":
496 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) 496 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
497 elif srcrev_policy == "clear": 497 elif srcrev_policy == "clear":
@@ -572,7 +572,7 @@ def verify_checksum(ud, d, precomputed={}):
572 572
573 if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected: 573 if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
574 # If strict checking enabled and neither sum defined, raise error 574 # If strict checking enabled and neither sum defined, raise error
575 strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0" 575 strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
576 if strict == "1": 576 if strict == "1":
577 logger.error('No checksum specified for %s, please add at least one to the recipe:\n' 577 logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
578 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' % 578 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
@@ -718,7 +718,7 @@ def subprocess_setup():
718 718
719def get_autorev(d): 719def get_autorev(d):
720 # only not cache src rev in autorev case 720 # only not cache src rev in autorev case
721 if d.getVar('BB_SRCREV_POLICY', True) != "cache": 721 if d.getVar('BB_SRCREV_POLICY') != "cache":
722 d.setVar('BB_DONT_CACHE', '1') 722 d.setVar('BB_DONT_CACHE', '1')
723 return "AUTOINC" 723 return "AUTOINC"
724 724
@@ -737,7 +737,7 @@ def get_srcrev(d, method_name='sortable_revision'):
737 """ 737 """
738 738
739 scms = [] 739 scms = []
740 fetcher = Fetch(d.getVar('SRC_URI', True).split(), d) 740 fetcher = Fetch(d.getVar('SRC_URI').split(), d)
741 urldata = fetcher.ud 741 urldata = fetcher.ud
742 for u in urldata: 742 for u in urldata:
743 if urldata[u].method.supports_srcrev(): 743 if urldata[u].method.supports_srcrev():
@@ -757,7 +757,7 @@ def get_srcrev(d, method_name='sortable_revision'):
757 # 757 #
758 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT 758 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
759 # 759 #
760 format = d.getVar('SRCREV_FORMAT', True) 760 format = d.getVar('SRCREV_FORMAT')
761 if not format: 761 if not format:
762 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") 762 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
763 763
@@ -821,7 +821,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
821 821
822 origenv = d.getVar("BB_ORIGENV", False) 822 origenv = d.getVar("BB_ORIGENV", False)
823 for var in exportvars: 823 for var in exportvars:
824 val = d.getVar(var, True) or (origenv and origenv.getVar(var, True)) 824 val = d.getVar(var) or (origenv and origenv.getVar(var))
825 if val: 825 if val:
826 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) 826 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
827 827
@@ -860,7 +860,7 @@ def check_network_access(d, info = "", url = None):
860 """ 860 """
861 log remote network access, and error if BB_NO_NETWORK is set 861 log remote network access, and error if BB_NO_NETWORK is set
862 """ 862 """
863 if d.getVar("BB_NO_NETWORK", True) == "1": 863 if d.getVar("BB_NO_NETWORK") == "1":
864 raise NetworkAccess(url, info) 864 raise NetworkAccess(url, info)
865 else: 865 else:
866 logger.debug(1, "Fetcher accessed the network with the command %s" % info) 866 logger.debug(1, "Fetcher accessed the network with the command %s" % info)
@@ -958,7 +958,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
958 958
959 # We may be obtaining a mirror tarball which needs further processing by the real fetcher 959 # We may be obtaining a mirror tarball which needs further processing by the real fetcher
960 # If that tarball is a local file:// we need to provide a symlink to it 960 # If that tarball is a local file:// we need to provide a symlink to it
961 dldir = ld.getVar("DL_DIR", True) 961 dldir = ld.getVar("DL_DIR")
962 if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \ 962 if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
963 and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): 963 and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
964 # Create donestamp in old format to avoid triggering a re-download 964 # Create donestamp in old format to avoid triggering a re-download
@@ -1032,14 +1032,14 @@ def trusted_network(d, url):
1032 BB_ALLOWED_NETWORKS is set globally or for a specific recipe. 1032 BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
1033 Note: modifies SRC_URI & mirrors. 1033 Note: modifies SRC_URI & mirrors.
1034 """ 1034 """
1035 if d.getVar('BB_NO_NETWORK', True) == "1": 1035 if d.getVar('BB_NO_NETWORK') == "1":
1036 return True 1036 return True
1037 1037
1038 pkgname = d.expand(d.getVar('PN', False)) 1038 pkgname = d.expand(d.getVar('PN', False))
1039 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) 1039 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
1040 1040
1041 if not trusted_hosts: 1041 if not trusted_hosts:
1042 trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True) 1042 trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
1043 1043
1044 # Not enabled. 1044 # Not enabled.
1045 if not trusted_hosts: 1045 if not trusted_hosts:
@@ -1071,7 +1071,7 @@ def srcrev_internal_helper(ud, d, name):
1071 """ 1071 """
1072 1072
1073 srcrev = None 1073 srcrev = None
1074 pn = d.getVar("PN", True) 1074 pn = d.getVar("PN")
1075 attempts = [] 1075 attempts = []
1076 if name != '' and pn: 1076 if name != '' and pn:
1077 attempts.append("SRCREV_%s_pn-%s" % (name, pn)) 1077 attempts.append("SRCREV_%s_pn-%s" % (name, pn))
@@ -1082,7 +1082,7 @@ def srcrev_internal_helper(ud, d, name):
1082 attempts.append("SRCREV") 1082 attempts.append("SRCREV")
1083 1083
1084 for a in attempts: 1084 for a in attempts:
1085 srcrev = d.getVar(a, True) 1085 srcrev = d.getVar(a)
1086 if srcrev and srcrev != "INVALID": 1086 if srcrev and srcrev != "INVALID":
1087 break 1087 break
1088 1088
@@ -1115,7 +1115,7 @@ def get_checksum_file_list(d):
1115 """ 1115 """
1116 fetch = Fetch([], d, cache = False, localonly = True) 1116 fetch = Fetch([], d, cache = False, localonly = True)
1117 1117
1118 dl_dir = d.getVar('DL_DIR', True) 1118 dl_dir = d.getVar('DL_DIR')
1119 filelist = [] 1119 filelist = []
1120 for u in fetch.urls: 1120 for u in fetch.urls:
1121 ud = fetch.ud[u] 1121 ud = fetch.ud[u]
@@ -1129,9 +1129,9 @@ def get_checksum_file_list(d):
1129 if f.startswith(dl_dir): 1129 if f.startswith(dl_dir):
1130 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else 1130 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
1131 if os.path.exists(f): 1131 if os.path.exists(f):
1132 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f))) 1132 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
1133 else: 1133 else:
1134 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f))) 1134 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
1135 filelist.append(f + ":" + str(os.path.exists(f))) 1135 filelist.append(f + ":" + str(os.path.exists(f)))
1136 1136
1137 return " ".join(filelist) 1137 return " ".join(filelist)
@@ -1204,7 +1204,7 @@ class FetchData(object):
1204 raise NonLocalMethod() 1204 raise NonLocalMethod()
1205 1205
1206 if self.parm.get("proto", None) and "protocol" not in self.parm: 1206 if self.parm.get("proto", None) and "protocol" not in self.parm:
1207 logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True)) 1207 logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
1208 self.parm["protocol"] = self.parm.get("proto", None) 1208 self.parm["protocol"] = self.parm.get("proto", None)
1209 1209
1210 if hasattr(self.method, "urldata_init"): 1210 if hasattr(self.method, "urldata_init"):
@@ -1217,7 +1217,7 @@ class FetchData(object):
1217 elif self.localfile: 1217 elif self.localfile:
1218 self.localpath = self.method.localpath(self, d) 1218 self.localpath = self.method.localpath(self, d)
1219 1219
1220 dldir = d.getVar("DL_DIR", True) 1220 dldir = d.getVar("DL_DIR")
1221 1221
1222 if not self.needdonestamp: 1222 if not self.needdonestamp:
1223 return 1223 return
@@ -1257,12 +1257,12 @@ class FetchData(object):
1257 if "srcdate" in self.parm: 1257 if "srcdate" in self.parm:
1258 return self.parm['srcdate'] 1258 return self.parm['srcdate']
1259 1259
1260 pn = d.getVar("PN", True) 1260 pn = d.getVar("PN")
1261 1261
1262 if pn: 1262 if pn:
1263 return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True) 1263 return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
1264 1264
1265 return d.getVar("SRCDATE", True) or d.getVar("DATE", True) 1265 return d.getVar("SRCDATE") or d.getVar("DATE")
1266 1266
1267class FetchMethod(object): 1267class FetchMethod(object):
1268 """Base class for 'fetch'ing data""" 1268 """Base class for 'fetch'ing data"""
@@ -1282,7 +1282,7 @@ class FetchMethod(object):
1282 Can also setup variables in urldata for use in go (saving code duplication 1282 Can also setup variables in urldata for use in go (saving code duplication
1283 and duplicate code execution) 1283 and duplicate code execution)
1284 """ 1284 """
1285 return os.path.join(d.getVar("DL_DIR", True), urldata.localfile) 1285 return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
1286 1286
1287 def supports_checksum(self, urldata): 1287 def supports_checksum(self, urldata):
1288 """ 1288 """
@@ -1450,7 +1450,7 @@ class FetchMethod(object):
1450 if not cmd: 1450 if not cmd:
1451 return 1451 return
1452 1452
1453 path = data.getVar('PATH', True) 1453 path = data.getVar('PATH')
1454 if path: 1454 if path:
1455 cmd = "PATH=\"%s\" %s" % (path, cmd) 1455 cmd = "PATH=\"%s\" %s" % (path, cmd)
1456 bb.note("Unpacking %s to %s/" % (file, unpackdir)) 1456 bb.note("Unpacking %s to %s/" % (file, unpackdir))
@@ -1507,7 +1507,7 @@ class FetchMethod(object):
1507 1507
1508 def generate_revision_key(self, ud, d, name): 1508 def generate_revision_key(self, ud, d, name):
1509 key = self._revision_key(ud, d, name) 1509 key = self._revision_key(ud, d, name)
1510 return "%s-%s" % (key, d.getVar("PN", True) or "") 1510 return "%s-%s" % (key, d.getVar("PN") or "")
1511 1511
1512class Fetch(object): 1512class Fetch(object):
1513 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): 1513 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
@@ -1515,14 +1515,14 @@ class Fetch(object):
1515 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") 1515 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1516 1516
1517 if len(urls) == 0: 1517 if len(urls) == 0:
1518 urls = d.getVar("SRC_URI", True).split() 1518 urls = d.getVar("SRC_URI").split()
1519 self.urls = urls 1519 self.urls = urls
1520 self.d = d 1520 self.d = d
1521 self.ud = {} 1521 self.ud = {}
1522 self.connection_cache = connection_cache 1522 self.connection_cache = connection_cache
1523 1523
1524 fn = d.getVar('FILE', True) 1524 fn = d.getVar('FILE')
1525 mc = d.getVar('__BBMULTICONFIG', True) or "" 1525 mc = d.getVar('__BBMULTICONFIG') or ""
1526 if cache and fn and mc + fn in urldata_cache: 1526 if cache and fn and mc + fn in urldata_cache:
1527 self.ud = urldata_cache[mc + fn] 1527 self.ud = urldata_cache[mc + fn]
1528 1528
@@ -1565,8 +1565,8 @@ class Fetch(object):
1565 if not urls: 1565 if not urls:
1566 urls = self.urls 1566 urls = self.urls
1567 1567
1568 network = self.d.getVar("BB_NO_NETWORK", True) 1568 network = self.d.getVar("BB_NO_NETWORK")
1569 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1") 1569 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY") == "1")
1570 1570
1571 for u in urls: 1571 for u in urls:
1572 ud = self.ud[u] 1572 ud = self.ud[u]
@@ -1584,7 +1584,7 @@ class Fetch(object):
1584 localpath = ud.localpath 1584 localpath = ud.localpath
1585 elif m.try_premirror(ud, self.d): 1585 elif m.try_premirror(ud, self.d):
1586 logger.debug(1, "Trying PREMIRRORS") 1586 logger.debug(1, "Trying PREMIRRORS")
1587 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) 1587 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
1588 localpath = try_mirrors(self, self.d, ud, mirrors, False) 1588 localpath = try_mirrors(self, self.d, ud, mirrors, False)
1589 1589
1590 if premirroronly: 1590 if premirroronly:
@@ -1624,7 +1624,7 @@ class Fetch(object):
1624 if not verified_stamp: 1624 if not verified_stamp:
1625 m.clean(ud, self.d) 1625 m.clean(ud, self.d)
1626 logger.debug(1, "Trying MIRRORS") 1626 logger.debug(1, "Trying MIRRORS")
1627 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) 1627 mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
1628 localpath = try_mirrors(self, self.d, ud, mirrors) 1628 localpath = try_mirrors(self, self.d, ud, mirrors)
1629 1629
1630 if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): 1630 if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
@@ -1657,7 +1657,7 @@ class Fetch(object):
1657 m = ud.method 1657 m = ud.method
1658 logger.debug(1, "Testing URL %s", u) 1658 logger.debug(1, "Testing URL %s", u)
1659 # First try checking uri, u, from PREMIRRORS 1659 # First try checking uri, u, from PREMIRRORS
1660 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) 1660 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
1661 ret = try_mirrors(self, self.d, ud, mirrors, True) 1661 ret = try_mirrors(self, self.d, ud, mirrors, True)
1662 if not ret: 1662 if not ret:
1663 # Next try checking from the original uri, u 1663 # Next try checking from the original uri, u
@@ -1665,7 +1665,7 @@ class Fetch(object):
1665 ret = m.checkstatus(self, ud, self.d) 1665 ret = m.checkstatus(self, ud, self.d)
1666 except: 1666 except:
1667 # Finally, try checking uri, u, from MIRRORS 1667 # Finally, try checking uri, u, from MIRRORS
1668 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) 1668 mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
1669 ret = try_mirrors(self, self.d, ud, mirrors, True) 1669 ret = try_mirrors(self, self.d, ud, mirrors, True)
1670 1670
1671 if not ret: 1671 if not ret:
diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py
index d35b2dabeb..8df8f53d2f 100644
--- a/bitbake/lib/bb/fetch2/clearcase.py
+++ b/bitbake/lib/bb/fetch2/clearcase.py
@@ -108,13 +108,13 @@ class ClearCase(FetchMethod):
108 else: 108 else:
109 ud.module = "" 109 ud.module = ""
110 110
111 ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool") 111 ud.basecmd = d.getVar("FETCHCMD_ccrc") or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
112 112
113 if d.getVar("SRCREV", True) == "INVALID": 113 if d.getVar("SRCREV") == "INVALID":
114 raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.") 114 raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
115 115
116 ud.label = d.getVar("SRCREV", False) 116 ud.label = d.getVar("SRCREV", False)
117 ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True) 117 ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC")
118 118
119 ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path) 119 ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
120 120
@@ -124,7 +124,7 @@ class ClearCase(FetchMethod):
124 124
125 ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True)) 125 ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
126 ud.csname = "%s-config-spec" % (ud.identifier) 126 ud.csname = "%s-config-spec" % (ud.identifier)
127 ud.ccasedir = os.path.join(d.getVar("DL_DIR", True), ud.type) 127 ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type)
128 ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) 128 ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
129 ud.configspecfile = os.path.join(ud.ccasedir, ud.csname) 129 ud.configspecfile = os.path.join(ud.ccasedir, ud.csname)
130 ud.localfile = "%s.tar.gz" % (ud.identifier) 130 ud.localfile = "%s.tar.gz" % (ud.identifier)
@@ -144,7 +144,7 @@ class ClearCase(FetchMethod):
144 self.debug("configspecfile = %s" % ud.configspecfile) 144 self.debug("configspecfile = %s" % ud.configspecfile)
145 self.debug("localfile = %s" % ud.localfile) 145 self.debug("localfile = %s" % ud.localfile)
146 146
147 ud.localfile = os.path.join(d.getVar("DL_DIR", True), ud.localfile) 147 ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile)
148 148
149 def _build_ccase_command(self, ud, command): 149 def _build_ccase_command(self, ud, command):
150 """ 150 """
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
index 5ff70ba921..64c50c2165 100644
--- a/bitbake/lib/bb/fetch2/cvs.py
+++ b/bitbake/lib/bb/fetch2/cvs.py
@@ -87,10 +87,10 @@ class Cvs(FetchMethod):
87 cvsroot = ud.path 87 cvsroot = ud.path
88 else: 88 else:
89 cvsroot = ":" + method 89 cvsroot = ":" + method
90 cvsproxyhost = d.getVar('CVS_PROXY_HOST', True) 90 cvsproxyhost = d.getVar('CVS_PROXY_HOST')
91 if cvsproxyhost: 91 if cvsproxyhost:
92 cvsroot += ";proxy=" + cvsproxyhost 92 cvsroot += ";proxy=" + cvsproxyhost
93 cvsproxyport = d.getVar('CVS_PROXY_PORT', True) 93 cvsproxyport = d.getVar('CVS_PROXY_PORT')
94 if cvsproxyport: 94 if cvsproxyport:
95 cvsroot += ";proxyport=" + cvsproxyport 95 cvsroot += ";proxyport=" + cvsproxyport
96 cvsroot += ":" + ud.user 96 cvsroot += ":" + ud.user
@@ -110,7 +110,7 @@ class Cvs(FetchMethod):
110 if ud.tag: 110 if ud.tag:
111 options.append("-r %s" % ud.tag) 111 options.append("-r %s" % ud.tag)
112 112
113 cvsbasecmd = d.getVar("FETCHCMD_cvs", True) 113 cvsbasecmd = d.getVar("FETCHCMD_cvs")
114 cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module 114 cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
115 cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options) 115 cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
116 116
@@ -120,8 +120,8 @@ class Cvs(FetchMethod):
120 120
121 # create module directory 121 # create module directory
122 logger.debug(2, "Fetch: checking for module directory") 122 logger.debug(2, "Fetch: checking for module directory")
123 pkg = d.getVar('PN', True) 123 pkg = d.getVar('PN')
124 pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg) 124 pkgdir = os.path.join(d.getVar('CVSDIR'), pkg)
125 moddir = os.path.join(pkgdir, localdir) 125 moddir = os.path.join(pkgdir, localdir)
126 workdir = None 126 workdir = None
127 if os.access(os.path.join(moddir, 'CVS'), os.R_OK): 127 if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
@@ -164,8 +164,8 @@ class Cvs(FetchMethod):
164 def clean(self, ud, d): 164 def clean(self, ud, d):
165 """ Clean CVS Files and tarballs """ 165 """ Clean CVS Files and tarballs """
166 166
167 pkg = d.getVar('PN', True) 167 pkg = d.getVar('PN')
168 pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg) 168 pkgdir = os.path.join(d.getVar("CVSDIR"), pkg)
169 169
170 bb.utils.remove(pkgdir, True) 170 bb.utils.remove(pkgdir, True)
171 bb.utils.remove(ud.localpath) 171 bb.utils.remove(ud.localpath)
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index 6b618345c3..cb9fa3fb1a 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -182,9 +182,9 @@ class Git(FetchMethod):
182 if ud.usehead: 182 if ud.usehead:
183 ud.unresolvedrev['default'] = 'HEAD' 183 ud.unresolvedrev['default'] = 'HEAD'
184 184
185 ud.basecmd = d.getVar("FETCHCMD_git", True) or "git -c core.fsyncobjectfiles=0" 185 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
186 186
187 ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS", True) or "0") != "0") or ud.rebaseable 187 ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0") or ud.rebaseable
188 188
189 ud.setup_revisons(d) 189 ud.setup_revisons(d)
190 190
@@ -207,8 +207,8 @@ class Git(FetchMethod):
207 for name in ud.names: 207 for name in ud.names:
208 gitsrcname = gitsrcname + '_' + ud.revisions[name] 208 gitsrcname = gitsrcname + '_' + ud.revisions[name]
209 ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname) 209 ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
210 ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) 210 ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball)
211 gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/") 211 gitdir = d.getVar("GITDIR") or (d.getVar("DL_DIR") + "/git2/")
212 ud.clonedir = os.path.join(gitdir, gitsrcname) 212 ud.clonedir = os.path.join(gitdir, gitsrcname)
213 213
214 ud.localfile = ud.clonedir 214 ud.localfile = ud.clonedir
@@ -229,7 +229,7 @@ class Git(FetchMethod):
229 def try_premirror(self, ud, d): 229 def try_premirror(self, ud, d):
230 # If we don't do this, updating an existing checkout with only premirrors 230 # If we don't do this, updating an existing checkout with only premirrors
231 # is not possible 231 # is not possible
232 if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: 232 if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
233 return True 233 return True
234 if os.path.exists(ud.clonedir): 234 if os.path.exists(ud.clonedir):
235 return False 235 return False
@@ -418,7 +418,7 @@ class Git(FetchMethod):
418 """ 418 """
419 pupver = ('', '') 419 pupver = ('', '')
420 420
421 tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)") 421 tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P<pver>([0-9][\.|_]?)+)")
422 try: 422 try:
423 output = self._lsremote(ud, d, "refs/tags/*") 423 output = self._lsremote(ud, d, "refs/tags/*")
424 except bb.fetch2.FetchError or bb.fetch2.NetworkAccess: 424 except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
index a1419aade5..ee5b2dd6f3 100644
--- a/bitbake/lib/bb/fetch2/hg.py
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -78,15 +78,15 @@ class Hg(FetchMethod):
78 hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \ 78 hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \
79 ud.host, ud.path.replace('/', '.')) 79 ud.host, ud.path.replace('/', '.'))
80 ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname 80 ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname
81 ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) 81 ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball)
82 82
83 hgdir = d.getVar("HGDIR", True) or (d.getVar("DL_DIR", True) + "/hg/") 83 hgdir = d.getVar("HGDIR") or (d.getVar("DL_DIR") + "/hg/")
84 ud.pkgdir = os.path.join(hgdir, hgsrcname) 84 ud.pkgdir = os.path.join(hgdir, hgsrcname)
85 ud.moddir = os.path.join(ud.pkgdir, ud.module) 85 ud.moddir = os.path.join(ud.pkgdir, ud.module)
86 ud.localfile = ud.moddir 86 ud.localfile = ud.moddir
87 ud.basecmd = d.getVar("FETCHCMD_hg", True) or "/usr/bin/env hg" 87 ud.basecmd = d.getVar("FETCHCMD_hg") or "/usr/bin/env hg"
88 88
89 ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS", True) 89 ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS")
90 90
91 def need_update(self, ud, d): 91 def need_update(self, ud, d):
92 revTag = ud.parm.get('rev', 'tip') 92 revTag = ud.parm.get('rev', 'tip')
@@ -99,7 +99,7 @@ class Hg(FetchMethod):
99 def try_premirror(self, ud, d): 99 def try_premirror(self, ud, d):
100 # If we don't do this, updating an existing checkout with only premirrors 100 # If we don't do this, updating an existing checkout with only premirrors
101 # is not possible 101 # is not possible
102 if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: 102 if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
103 return True 103 return True
104 if os.path.exists(ud.moddir): 104 if os.path.exists(ud.moddir):
105 return False 105 return False
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
index 5adfd5d448..83778883e0 100644
--- a/bitbake/lib/bb/fetch2/local.py
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -63,13 +63,13 @@ class Local(FetchMethod):
63 newpath = path 63 newpath = path
64 if path[0] == "/": 64 if path[0] == "/":
65 return [path] 65 return [path]
66 filespath = d.getVar('FILESPATH', True) 66 filespath = d.getVar('FILESPATH')
67 if filespath: 67 if filespath:
68 logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) 68 logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
69 newpath, hist = bb.utils.which(filespath, path, history=True) 69 newpath, hist = bb.utils.which(filespath, path, history=True)
70 searched.extend(hist) 70 searched.extend(hist)
71 if not newpath: 71 if not newpath:
72 filesdir = d.getVar('FILESDIR', True) 72 filesdir = d.getVar('FILESDIR')
73 if filesdir: 73 if filesdir:
74 logger.debug(2, "Searching for %s in path: %s" % (path, filesdir)) 74 logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
75 newpath = os.path.join(filesdir, path) 75 newpath = os.path.join(filesdir, path)
@@ -81,7 +81,7 @@ class Local(FetchMethod):
81 logger.debug(2, "Searching for %s in path: %s" % (path, newpath)) 81 logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
82 return searched 82 return searched
83 if not os.path.exists(newpath): 83 if not os.path.exists(newpath):
84 dldirfile = os.path.join(d.getVar("DL_DIR", True), path) 84 dldirfile = os.path.join(d.getVar("DL_DIR"), path)
85 logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path)) 85 logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
86 bb.utils.mkdirhier(os.path.dirname(dldirfile)) 86 bb.utils.mkdirhier(os.path.dirname(dldirfile))
87 searched.append(dldirfile) 87 searched.append(dldirfile)
@@ -100,13 +100,13 @@ class Local(FetchMethod):
100 # no need to fetch local files, we'll deal with them in place. 100 # no need to fetch local files, we'll deal with them in place.
101 if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath): 101 if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
102 locations = [] 102 locations = []
103 filespath = d.getVar('FILESPATH', True) 103 filespath = d.getVar('FILESPATH')
104 if filespath: 104 if filespath:
105 locations = filespath.split(":") 105 locations = filespath.split(":")
106 filesdir = d.getVar('FILESDIR', True) 106 filesdir = d.getVar('FILESDIR')
107 if filesdir: 107 if filesdir:
108 locations.append(filesdir) 108 locations.append(filesdir)
109 locations.append(d.getVar("DL_DIR", True)) 109 locations.append(d.getVar("DL_DIR"))
110 110
111 msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations) 111 msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
112 raise FetchError(msg) 112 raise FetchError(msg)
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py
index 66ab075b1c..cbeb8ff889 100644
--- a/bitbake/lib/bb/fetch2/npm.py
+++ b/bitbake/lib/bb/fetch2/npm.py
@@ -87,12 +87,12 @@ class Npm(FetchMethod):
87 bb.utils.mkdirhier(ud.pkgdatadir) 87 bb.utils.mkdirhier(ud.pkgdatadir)
88 ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest) 88 ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)
89 89
90 self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate " 90 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
91 ud.prefixdir = prefixdir 91 ud.prefixdir = prefixdir
92 92
93 ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS", True) or "0") != "0") 93 ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0")
94 ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version) 94 ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
95 ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) 95 ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball)
96 96
97 def need_update(self, ud, d): 97 def need_update(self, ud, d):
98 if os.path.exists(ud.localpath): 98 if os.path.exists(ud.localpath):
@@ -102,7 +102,7 @@ class Npm(FetchMethod):
102 def _runwget(self, ud, d, command, quiet): 102 def _runwget(self, ud, d, command, quiet):
103 logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command)) 103 logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
104 bb.fetch2.check_network_access(d, command) 104 bb.fetch2.check_network_access(d, command)
105 dldir = d.getVar("DL_DIR", True) 105 dldir = d.getVar("DL_DIR")
106 runfetchcmd(command, d, quiet, workdir=dldir) 106 runfetchcmd(command, d, quiet, workdir=dldir)
107 107
108 def _unpackdep(self, ud, pkg, data, destdir, dldir, d): 108 def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
@@ -116,7 +116,7 @@ class Npm(FetchMethod):
116 # Change to subdir before executing command 116 # Change to subdir before executing command
117 if not os.path.exists(destdir): 117 if not os.path.exists(destdir):
118 os.makedirs(destdir) 118 os.makedirs(destdir)
119 path = d.getVar('PATH', True) 119 path = d.getVar('PATH')
120 if path: 120 if path:
121 cmd = "PATH=\"%s\" %s" % (path, cmd) 121 cmd = "PATH=\"%s\" %s" % (path, cmd)
122 bb.note("Unpacking %s to %s/" % (file, destdir)) 122 bb.note("Unpacking %s to %s/" % (file, destdir))
@@ -132,7 +132,7 @@ class Npm(FetchMethod):
132 132
133 133
134 def unpack(self, ud, destdir, d): 134 def unpack(self, ud, destdir, d):
135 dldir = d.getVar("DL_DIR", True) 135 dldir = d.getVar("DL_DIR")
136 depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version) 136 depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version)
137 with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile: 137 with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile:
138 workobj = json.load(datafile) 138 workobj = json.load(datafile)
@@ -251,12 +251,12 @@ class Npm(FetchMethod):
251 lockdown = {} 251 lockdown = {}
252 252
253 if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror): 253 if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
254 dest = d.getVar("DL_DIR", True) 254 dest = d.getVar("DL_DIR")
255 bb.utils.mkdirhier(dest) 255 bb.utils.mkdirhier(dest)
256 runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest) 256 runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
257 return 257 return
258 258
259 shwrf = d.getVar('NPM_SHRINKWRAP', True) 259 shwrf = d.getVar('NPM_SHRINKWRAP')
260 logger.debug(2, "NPM shrinkwrap file is %s" % shwrf) 260 logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
261 if shwrf: 261 if shwrf:
262 try: 262 try:
@@ -266,7 +266,7 @@ class Npm(FetchMethod):
266 raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e))) 266 raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e)))
267 elif not ud.ignore_checksums: 267 elif not ud.ignore_checksums:
268 logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname) 268 logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
269 lckdf = d.getVar('NPM_LOCKDOWN', True) 269 lckdf = d.getVar('NPM_LOCKDOWN')
270 logger.debug(2, "NPM lockdown file is %s" % lckdf) 270 logger.debug(2, "NPM lockdown file is %s" % lckdf)
271 if lckdf: 271 if lckdf:
272 try: 272 try:
@@ -292,7 +292,7 @@ class Npm(FetchMethod):
292 if os.path.islink(ud.fullmirror): 292 if os.path.islink(ud.fullmirror):
293 os.unlink(ud.fullmirror) 293 os.unlink(ud.fullmirror)
294 294
295 dldir = d.getVar("DL_DIR", True) 295 dldir = d.getVar("DL_DIR")
296 logger.info("Creating tarball of npm data") 296 logger.info("Creating tarball of npm data")
297 runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d, 297 runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d,
298 workdir=dldir) 298 workdir=dldir)
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
index 295abf953b..26f88e1f46 100644
--- a/bitbake/lib/bb/fetch2/osc.py
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -34,7 +34,7 @@ class Osc(FetchMethod):
34 34
35 # Create paths to osc checkouts 35 # Create paths to osc checkouts
36 relpath = self._strip_leading_slashes(ud.path) 36 relpath = self._strip_leading_slashes(ud.path)
37 ud.pkgdir = os.path.join(d.getVar('OSCDIR', True), ud.host) 37 ud.pkgdir = os.path.join(d.getVar('OSCDIR'), ud.host)
38 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) 38 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
39 39
40 if 'rev' in ud.parm: 40 if 'rev' in ud.parm:
@@ -84,7 +84,7 @@ class Osc(FetchMethod):
84 84
85 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") 85 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
86 86
87 if os.access(os.path.join(d.getVar('OSCDIR', True), ud.path, ud.module), os.R_OK): 87 if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK):
88 oscupdatecmd = self._buildosccommand(ud, d, "update") 88 oscupdatecmd = self._buildosccommand(ud, d, "update")
89 logger.info("Update "+ ud.url) 89 logger.info("Update "+ ud.url)
90 # update sources there 90 # update sources there
@@ -112,7 +112,7 @@ class Osc(FetchMethod):
112 Generate a .oscrc to be used for this run. 112 Generate a .oscrc to be used for this run.
113 """ 113 """
114 114
115 config_path = os.path.join(d.getVar('OSCDIR', True), "oscrc") 115 config_path = os.path.join(d.getVar('OSCDIR'), "oscrc")
116 if (os.path.exists(config_path)): 116 if (os.path.exists(config_path)):
117 os.remove(config_path) 117 os.remove(config_path)
118 118
@@ -121,8 +121,8 @@ class Osc(FetchMethod):
121 f.write("apisrv = %s\n" % ud.host) 121 f.write("apisrv = %s\n" % ud.host)
122 f.write("scheme = http\n") 122 f.write("scheme = http\n")
123 f.write("su-wrapper = su -c\n") 123 f.write("su-wrapper = su -c\n")
124 f.write("build-root = %s\n" % d.getVar('WORKDIR', True)) 124 f.write("build-root = %s\n" % d.getVar('WORKDIR'))
125 f.write("urllist = %s\n" % d.getVar("OSCURLLIST", True)) 125 f.write("urllist = %s\n" % d.getVar("OSCURLLIST"))
126 f.write("extra-pkgs = gzip\n") 126 f.write("extra-pkgs = gzip\n")
127 f.write("\n") 127 f.write("\n")
128 f.write("[%s]\n" % ud.host) 128 f.write("[%s]\n" % ud.host)
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
index 50cb479096..be73ca0518 100644
--- a/bitbake/lib/bb/fetch2/perforce.py
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -44,13 +44,13 @@ class Perforce(FetchMethod):
44 provided by the env, use it. If P4PORT is specified by the recipe, use 44 provided by the env, use it. If P4PORT is specified by the recipe, use
45 its values, which may override the settings in P4CONFIG. 45 its values, which may override the settings in P4CONFIG.
46 """ 46 """
47 ud.basecmd = d.getVar('FETCHCMD_p4', True) 47 ud.basecmd = d.getVar('FETCHCMD_p4')
48 if not ud.basecmd: 48 if not ud.basecmd:
49 ud.basecmd = "/usr/bin/env p4" 49 ud.basecmd = "/usr/bin/env p4"
50 50
51 ud.dldir = d.getVar('P4DIR', True) 51 ud.dldir = d.getVar('P4DIR')
52 if not ud.dldir: 52 if not ud.dldir:
53 ud.dldir = '%s/%s' % (d.getVar('DL_DIR', True), 'p4') 53 ud.dldir = '%s/%s' % (d.getVar('DL_DIR'), 'p4')
54 54
55 path = ud.url.split('://')[1] 55 path = ud.url.split('://')[1]
56 path = path.split(';')[0] 56 path = path.split(';')[0]
@@ -62,7 +62,7 @@ class Perforce(FetchMethod):
62 ud.path = path 62 ud.path = path
63 63
64 ud.usingp4config = False 64 ud.usingp4config = False
65 p4port = d.getVar('P4PORT', True) 65 p4port = d.getVar('P4PORT')
66 66
67 if p4port: 67 if p4port:
68 logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port) 68 logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port)
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py
index bfd4ae16aa..24dcebb0cb 100644
--- a/bitbake/lib/bb/fetch2/repo.py
+++ b/bitbake/lib/bb/fetch2/repo.py
@@ -56,12 +56,12 @@ class Repo(FetchMethod):
56 def download(self, ud, d): 56 def download(self, ud, d):
57 """Fetch url""" 57 """Fetch url"""
58 58
59 if os.access(os.path.join(d.getVar("DL_DIR", True), ud.localfile), os.R_OK): 59 if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK):
60 logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) 60 logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
61 return 61 return
62 62
63 gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) 63 gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
64 repodir = d.getVar("REPODIR", True) or os.path.join(d.getVar("DL_DIR", True), "repo") 64 repodir = d.getVar("REPODIR") or os.path.join(d.getVar("DL_DIR"), "repo")
65 codir = os.path.join(repodir, gitsrcname, ud.manifest) 65 codir = os.path.join(repodir, gitsrcname, ud.manifest)
66 66
67 if ud.user: 67 if ud.user:
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py
index 48ddfc176a..da857dd9ff 100644
--- a/bitbake/lib/bb/fetch2/sftp.py
+++ b/bitbake/lib/bb/fetch2/sftp.py
@@ -104,7 +104,7 @@ class SFTP(FetchMethod):
104 port = '-P %d' % urlo.port 104 port = '-P %d' % urlo.port
105 urlo.port = None 105 urlo.port = None
106 106
107 dldir = d.getVar('DL_DIR', True) 107 dldir = d.getVar('DL_DIR')
108 lpath = os.path.join(dldir, ud.localfile) 108 lpath = os.path.join(dldir, ud.localfile)
109 109
110 user = '' 110 user = ''
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
index 56f9b7eb35..e668b0d4b7 100644
--- a/bitbake/lib/bb/fetch2/ssh.py
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -87,11 +87,11 @@ class SSH(FetchMethod):
87 m = __pattern__.match(urldata.url) 87 m = __pattern__.match(urldata.url)
88 path = m.group('path') 88 path = m.group('path')
89 host = m.group('host') 89 host = m.group('host')
90 urldata.localpath = os.path.join(d.getVar('DL_DIR', True), 90 urldata.localpath = os.path.join(d.getVar('DL_DIR'),
91 os.path.basename(os.path.normpath(path))) 91 os.path.basename(os.path.normpath(path)))
92 92
93 def download(self, urldata, d): 93 def download(self, urldata, d):
94 dldir = d.getVar('DL_DIR', True) 94 dldir = d.getVar('DL_DIR')
95 95
96 m = __pattern__.match(urldata.url) 96 m = __pattern__.match(urldata.url)
97 path = m.group('path') 97 path = m.group('path')
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
index 6ca79d35d7..b568c72049 100644
--- a/bitbake/lib/bb/fetch2/svn.py
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -50,7 +50,7 @@ class Svn(FetchMethod):
50 if not "module" in ud.parm: 50 if not "module" in ud.parm:
51 raise MissingParameterError('module', ud.url) 51 raise MissingParameterError('module', ud.url)
52 52
53 ud.basecmd = d.getVar('FETCHCMD_svn', True) 53 ud.basecmd = d.getVar('FETCHCMD_svn')
54 54
55 ud.module = ud.parm["module"] 55 ud.module = ud.parm["module"]
56 56
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index ecb946aa81..4ba63df0a8 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -88,7 +88,7 @@ class Wget(FetchMethod):
88 if not ud.localfile: 88 if not ud.localfile:
89 ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d) 89 ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d)
90 90
91 self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate" 91 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate"
92 92
93 def _runwget(self, ud, d, command, quiet): 93 def _runwget(self, ud, d, command, quiet):
94 94
@@ -104,7 +104,7 @@ class Wget(FetchMethod):
104 fetchcmd = self.basecmd 104 fetchcmd = self.basecmd
105 105
106 if 'downloadfilename' in ud.parm: 106 if 'downloadfilename' in ud.parm:
107 dldir = d.getVar("DL_DIR", True) 107 dldir = d.getVar("DL_DIR")
108 bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile)) 108 bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
109 fetchcmd += " -O " + dldir + os.sep + ud.localfile 109 fetchcmd += " -O " + dldir + os.sep + ud.localfile
110 110
@@ -543,7 +543,7 @@ class Wget(FetchMethod):
543 self.suffix_regex_comp = re.compile(psuffix_regex) 543 self.suffix_regex_comp = re.compile(psuffix_regex)
544 544
545 # compile regex, can be specific by package or generic regex 545 # compile regex, can be specific by package or generic regex
546 pn_regex = d.getVar('UPSTREAM_CHECK_REGEX', True) 546 pn_regex = d.getVar('UPSTREAM_CHECK_REGEX')
547 if pn_regex: 547 if pn_regex:
548 package_custom_regex_comp = re.compile(pn_regex) 548 package_custom_regex_comp = re.compile(pn_regex)
549 else: 549 else:
@@ -564,7 +564,7 @@ class Wget(FetchMethod):
564 sanity check to ensure same name and type. 564 sanity check to ensure same name and type.
565 """ 565 """
566 package = ud.path.split("/")[-1] 566 package = ud.path.split("/")[-1]
567 current_version = ['', d.getVar('PV', True), ''] 567 current_version = ['', d.getVar('PV'), '']
568 568
569 """possible to have no version in pkg name, such as spectrum-fw""" 569 """possible to have no version in pkg name, such as spectrum-fw"""
570 if not re.search("\d+", package): 570 if not re.search("\d+", package):
@@ -579,7 +579,7 @@ class Wget(FetchMethod):
579 bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern)) 579 bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
580 580
581 uri = "" 581 uri = ""
582 regex_uri = d.getVar("UPSTREAM_CHECK_URI", True) 582 regex_uri = d.getVar("UPSTREAM_CHECK_URI")
583 if not regex_uri: 583 if not regex_uri:
584 path = ud.path.split(package)[0] 584 path = ud.path.split(package)[0]
585 585
@@ -588,7 +588,7 @@ class Wget(FetchMethod):
588 dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/") 588 dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
589 m = dirver_regex.search(path) 589 m = dirver_regex.search(path)
590 if m: 590 if m:
591 pn = d.getVar('PN', True) 591 pn = d.getVar('PN')
592 dirver = m.group('dirver') 592 dirver = m.group('dirver')
593 593
594 dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn))) 594 dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))