summaryrefslogtreecommitdiffstats
path: root/meta/classes/base.bbclass
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2011-11-09 15:00:01 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2011-11-10 11:51:19 +0000
commitc8dee9b92dfd545852ecac8dc2adfc95ac02e957 (patch)
tree5f1b86954646a0f3bb914407994388a6a4346769 /meta/classes/base.bbclass
parent5d3860f4a8abb8e95442b04f8b84a333af362fcd (diff)
downloadpoky-c8dee9b92dfd545852ecac8dc2adfc95ac02e957.tar.gz
Convert to use direct access to the data store (instead of bb.data.*Var*())
This is the result of running the following over the metadata: sed \ -e 's:bb.data.\(setVar([^,()]*,[^,()]*\), *\([^ )]*\) *):\2.\1):g' \ -e 's:bb.data.\(setVarFlag([^,()]*,[^,()]*,[^,()]*\), *\([^) ]*\) *):\2.\1):g' \ -e 's:bb.data.\(getVar([^,()]*\), *\([^(), ]*\) *,\([^)]*\)):\2.\1,\3):g' \ -e 's:bb.data.\(getVarFlag([^,()]*,[^,()]*\), *\([^(), ]*\) *,\([^)]*\)):\2.\1,\3):g' \ -e 's:bb.data.\(getVarFlag([^,()]*,[^,()]*\), *\([^() ]*\) *):\2.\1):g' \ -e 's:bb.data.\(getVar([^,()]*\), *\([^) ]*\) *):\2.\1):g' \ -i `grep -ril bb.data *` (From OE-Core rev: b22831fd63164c4db9c0b72934d7d734a6585251) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes/base.bbclass')
-rw-r--r--meta/classes/base.bbclass114
1 files changed, 57 insertions, 57 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index f5397446dd..f3f798f9bf 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -60,9 +60,9 @@ def base_dep_prepend(d):
60 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not 60 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
61 # we need that built is the responsibility of the patch function / class, not 61 # we need that built is the responsibility of the patch function / class, not
62 # the application. 62 # the application.
63 if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d): 63 if not d.getVar('INHIBIT_DEFAULT_DEPS'):
64 if (bb.data.getVar('HOST_SYS', d, 1) != 64 if (d.getVar('HOST_SYS', 1) !=
65 bb.data.getVar('BUILD_SYS', d, 1)): 65 d.getVar('BUILD_SYS', 1)):
66 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " 66 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
67 return deps 67 return deps
68 68
@@ -73,13 +73,13 @@ DEPENDS_prepend="${BASEDEPENDS} "
73FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}" 73FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
74# THISDIR only works properly with imediate expansion as it has to run 74# THISDIR only works properly with imediate expansion as it has to run
75# in the context of the location its used (:=) 75# in the context of the location its used (:=)
76THISDIR = "${@os.path.dirname(bb.data.getVar('FILE', d, True))}" 76THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}"
77 77
78addtask fetch 78addtask fetch
79do_fetch[dirs] = "${DL_DIR}" 79do_fetch[dirs] = "${DL_DIR}"
80python base_do_fetch() { 80python base_do_fetch() {
81 81
82 src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split() 82 src_uri = (d.getVar('SRC_URI', True) or "").split()
83 if len(src_uri) == 0: 83 if len(src_uri) == 0:
84 return 84 return
85 85
@@ -96,14 +96,14 @@ python base_do_fetch() {
96addtask unpack after do_fetch 96addtask unpack after do_fetch
97do_unpack[dirs] = "${WORKDIR}" 97do_unpack[dirs] = "${WORKDIR}"
98python base_do_unpack() { 98python base_do_unpack() {
99 src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split() 99 src_uri = (d.getVar('SRC_URI', True) or "").split()
100 if len(src_uri) == 0: 100 if len(src_uri) == 0:
101 return 101 return
102 102
103 localdata = bb.data.createCopy(d) 103 localdata = bb.data.createCopy(d)
104 bb.data.update_data(localdata) 104 bb.data.update_data(localdata)
105 105
106 rootdir = bb.data.getVar('WORKDIR', localdata, True) 106 rootdir = localdata.getVar('WORKDIR', True)
107 107
108 try: 108 try:
109 fetcher = bb.fetch2.Fetch(src_uri, localdata) 109 fetcher = bb.fetch2.Fetch(src_uri, localdata)
@@ -118,7 +118,7 @@ def generate_git_config(e):
118 from bb import data 118 from bb import data
119 119
120 if data.getVar('GIT_CORE_CONFIG', e.data, True): 120 if data.getVar('GIT_CORE_CONFIG', e.data, True):
121 gitconfig_path = bb.data.getVar('GIT_CONFIG', e.data, True) 121 gitconfig_path = e.data.getVar('GIT_CONFIG', True)
122 proxy_command = " gitproxy = %s\n" % data.getVar('GIT_PROXY_COMMAND', e.data, True) 122 proxy_command = " gitproxy = %s\n" % data.getVar('GIT_PROXY_COMMAND', e.data, True)
123 123
124 bb.mkdirhier(bb.data.expand("${STAGING_DIR_NATIVE}/usr/etc/", e.data)) 124 bb.mkdirhier(bb.data.expand("${STAGING_DIR_NATIVE}/usr/etc/", e.data))
@@ -207,11 +207,11 @@ python base_eventhandler() {
207 name = getName(e) 207 name = getName(e)
208 208
209 if name.startswith("BuildStarted"): 209 if name.startswith("BuildStarted"):
210 bb.data.setVar( 'BB_VERSION', bb.__version__, e.data ) 210 e.data.setVar( 'BB_VERSION', bb.__version__)
211 statusvars = ['BB_VERSION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TUNE_FEATURES', 'TARGET_FPU'] 211 statusvars = ['BB_VERSION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TUNE_FEATURES', 'TARGET_FPU']
212 statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars] 212 statuslines = ["%-17s = \"%s\"" % (i, e.data.getVar(i, 1) or '') for i in statusvars]
213 213
214 layers = (bb.data.getVar("BBLAYERS", e.data, 1) or "").split() 214 layers = (e.data.getVar("BBLAYERS", 1) or "").split()
215 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ 215 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
216 base_get_metadata_git_branch(i, None).strip(), \ 216 base_get_metadata_git_branch(i, None).strip(), \
217 base_get_metadata_git_revision(i, None)) \ 217 base_get_metadata_git_revision(i, None)) \
@@ -237,7 +237,7 @@ python base_eventhandler() {
237 needed_vars = [ "TARGET_ARCH", "TARGET_OS" ] 237 needed_vars = [ "TARGET_ARCH", "TARGET_OS" ]
238 pesteruser = [] 238 pesteruser = []
239 for v in needed_vars: 239 for v in needed_vars:
240 val = bb.data.getVar(v, e.data, 1) 240 val = e.data.getVar(v, 1)
241 if not val or val == 'INVALID': 241 if not val or val == 'INVALID':
242 pesteruser.append(v) 242 pesteruser.append(v)
243 if pesteruser: 243 if pesteruser:
@@ -330,23 +330,23 @@ python () {
330 appendVar('EXTRA_OECONF', extraconf) 330 appendVar('EXTRA_OECONF', extraconf)
331 331
332 # If PRINC is set, try and increase the PR value by the amount specified 332 # If PRINC is set, try and increase the PR value by the amount specified
333 princ = bb.data.getVar('PRINC', d, True) 333 princ = d.getVar('PRINC', True)
334 if princ: 334 if princ:
335 pr = bb.data.getVar('PR', d, True) 335 pr = d.getVar('PR', True)
336 pr_prefix = re.search("\D+",pr) 336 pr_prefix = re.search("\D+",pr)
337 prval = re.search("\d+",pr) 337 prval = re.search("\d+",pr)
338 if pr_prefix is None or prval is None: 338 if pr_prefix is None or prval is None:
339 bb.error("Unable to analyse format of PR variable: %s" % pr) 339 bb.error("Unable to analyse format of PR variable: %s" % pr)
340 nval = int(prval.group(0)) + int(princ) 340 nval = int(prval.group(0)) + int(princ)
341 pr = pr_prefix.group(0) + str(nval) + pr[prval.end():] 341 pr = pr_prefix.group(0) + str(nval) + pr[prval.end():]
342 bb.data.setVar('PR', pr, d) 342 d.setVar('PR', pr)
343 343
344 pn = bb.data.getVar('PN', d, 1) 344 pn = d.getVar('PN', 1)
345 license = bb.data.getVar('LICENSE', d, True) 345 license = d.getVar('LICENSE', True)
346 if license == "INVALID": 346 if license == "INVALID":
347 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) 347 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
348 348
349 commercial_license = " %s " % bb.data.getVar('COMMERCIAL_LICENSE', d, 1) 349 commercial_license = " %s " % d.getVar('COMMERCIAL_LICENSE', 1)
350 import re 350 import re
351 pnr = "[ \t]%s[ \t]" % pn.replace('+', "\+") 351 pnr = "[ \t]%s[ \t]" % pn.replace('+', "\+")
352 if commercial_license and re.search(pnr, commercial_license): 352 if commercial_license and re.search(pnr, commercial_license):
@@ -356,86 +356,86 @@ python () {
356 # If we're building a target package we need to use fakeroot (pseudo) 356 # If we're building a target package we need to use fakeroot (pseudo)
357 # in order to capture permissions, owners, groups and special files 357 # in order to capture permissions, owners, groups and special files
358 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d): 358 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
359 bb.data.setVarFlag('do_configure', 'umask', 022, d) 359 d.setVarFlag('do_configure', 'umask', 022)
360 bb.data.setVarFlag('do_compile', 'umask', 022, d) 360 d.setVarFlag('do_compile', 'umask', 022)
361 deps = (bb.data.getVarFlag('do_install', 'depends', d) or "").split() 361 deps = (d.getVarFlag('do_install', 'depends') or "").split()
362 deps.append('virtual/fakeroot-native:do_populate_sysroot') 362 deps.append('virtual/fakeroot-native:do_populate_sysroot')
363 bb.data.setVarFlag('do_install', 'depends', " ".join(deps),d) 363 bb.data.setVarFlag('do_install', 'depends', " ".join(deps),d)
364 bb.data.setVarFlag('do_install', 'fakeroot', 1, d) 364 d.setVarFlag('do_install', 'fakeroot', 1)
365 bb.data.setVarFlag('do_install', 'umask', 022, d) 365 d.setVarFlag('do_install', 'umask', 022)
366 deps = (bb.data.getVarFlag('do_package', 'depends', d) or "").split() 366 deps = (d.getVarFlag('do_package', 'depends') or "").split()
367 deps.append('virtual/fakeroot-native:do_populate_sysroot') 367 deps.append('virtual/fakeroot-native:do_populate_sysroot')
368 bb.data.setVarFlag('do_package', 'depends', " ".join(deps),d) 368 bb.data.setVarFlag('do_package', 'depends', " ".join(deps),d)
369 bb.data.setVarFlag('do_package', 'fakeroot', 1, d) 369 d.setVarFlag('do_package', 'fakeroot', 1)
370 bb.data.setVarFlag('do_package', 'umask', 022, d) 370 d.setVarFlag('do_package', 'umask', 022)
371 bb.data.setVarFlag('do_package_setscene', 'fakeroot', 1, d) 371 d.setVarFlag('do_package_setscene', 'fakeroot', 1)
372 source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0) 372 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0)
373 if not source_mirror_fetch: 373 if not source_mirror_fetch:
374 need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1) 374 need_host = d.getVar('COMPATIBLE_HOST', 1)
375 if need_host: 375 if need_host:
376 import re 376 import re
377 this_host = bb.data.getVar('HOST_SYS', d, 1) 377 this_host = d.getVar('HOST_SYS', 1)
378 if not re.match(need_host, this_host): 378 if not re.match(need_host, this_host):
379 raise bb.parse.SkipPackage("incompatible with host %s" % this_host) 379 raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
380 380
381 need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1) 381 need_machine = d.getVar('COMPATIBLE_MACHINE', 1)
382 if need_machine: 382 if need_machine:
383 import re 383 import re
384 this_machine = bb.data.getVar('MACHINE', d, 1) 384 this_machine = d.getVar('MACHINE', 1)
385 if this_machine and not re.match(need_machine, this_machine): 385 if this_machine and not re.match(need_machine, this_machine):
386 this_soc_family = bb.data.getVar('SOC_FAMILY', d, 1) 386 this_soc_family = d.getVar('SOC_FAMILY', 1)
387 if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family: 387 if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family:
388 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine) 388 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
389 389
390 390
391 dont_want_license = bb.data.getVar('INCOMPATIBLE_LICENSE', d, 1) 391 dont_want_license = d.getVar('INCOMPATIBLE_LICENSE', 1)
392 if dont_want_license and not pn.endswith("-native") and not pn.endswith("-cross") and not pn.endswith("-cross-initial") and not pn.endswith("-cross-intermediate") and not pn.endswith("-crosssdk-intermediate") and not pn.endswith("-crosssdk") and not pn.endswith("-crosssdk-initial"): 392 if dont_want_license and not pn.endswith("-native") and not pn.endswith("-cross") and not pn.endswith("-cross-initial") and not pn.endswith("-cross-intermediate") and not pn.endswith("-crosssdk-intermediate") and not pn.endswith("-crosssdk") and not pn.endswith("-crosssdk-initial"):
393 hosttools_whitelist = (bb.data.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, d, 1) or "").split() 393 hosttools_whitelist = (d.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, 1) or "").split()
394 lgplv2_whitelist = (bb.data.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, d, 1) or "").split() 394 lgplv2_whitelist = (d.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, 1) or "").split()
395 dont_want_whitelist = (bb.data.getVar('WHITELIST_%s' % dont_want_license, d, 1) or "").split() 395 dont_want_whitelist = (d.getVar('WHITELIST_%s' % dont_want_license, 1) or "").split()
396 if pn not in hosttools_whitelist and pn not in lgplv2_whitelist and pn not in dont_want_whitelist: 396 if pn not in hosttools_whitelist and pn not in lgplv2_whitelist and pn not in dont_want_whitelist:
397 397
398 import re 398 import re
399 this_license = bb.data.getVar('LICENSE', d, 1) 399 this_license = d.getVar('LICENSE', 1)
400 if this_license and re.search(dont_want_license, this_license): 400 if this_license and re.search(dont_want_license, this_license):
401 bb.note("SKIPPING %s because it's %s" % (pn, this_license)) 401 bb.note("SKIPPING %s because it's %s" % (pn, this_license))
402 raise bb.parse.SkipPackage("incompatible with license %s" % this_license) 402 raise bb.parse.SkipPackage("incompatible with license %s" % this_license)
403 403
404 # Git packages should DEPEND on git-native 404 # Git packages should DEPEND on git-native
405 srcuri = bb.data.getVar('SRC_URI', d, 1) 405 srcuri = d.getVar('SRC_URI', 1)
406 if "git://" in srcuri: 406 if "git://" in srcuri:
407 depends = bb.data.getVarFlag('do_fetch', 'depends', d) or "" 407 depends = d.getVarFlag('do_fetch', 'depends') or ""
408 depends = depends + " git-native:do_populate_sysroot" 408 depends = depends + " git-native:do_populate_sysroot"
409 bb.data.setVarFlag('do_fetch', 'depends', depends, d) 409 d.setVarFlag('do_fetch', 'depends', depends)
410 410
411 # Mercurial packages should DEPEND on mercurial-native 411 # Mercurial packages should DEPEND on mercurial-native
412 elif "hg://" in srcuri: 412 elif "hg://" in srcuri:
413 depends = bb.data.getVarFlag('do_fetch', 'depends', d) or "" 413 depends = d.getVarFlag('do_fetch', 'depends') or ""
414 depends = depends + " mercurial-native:do_populate_sysroot" 414 depends = depends + " mercurial-native:do_populate_sysroot"
415 bb.data.setVarFlag('do_fetch', 'depends', depends, d) 415 d.setVarFlag('do_fetch', 'depends', depends)
416 416
417 # OSC packages should DEPEND on osc-native 417 # OSC packages should DEPEND on osc-native
418 elif "osc://" in srcuri: 418 elif "osc://" in srcuri:
419 depends = bb.data.getVarFlag('do_fetch', 'depends', d) or "" 419 depends = d.getVarFlag('do_fetch', 'depends') or ""
420 depends = depends + " osc-native:do_populate_sysroot" 420 depends = depends + " osc-native:do_populate_sysroot"
421 bb.data.setVarFlag('do_fetch', 'depends', depends, d) 421 d.setVarFlag('do_fetch', 'depends', depends)
422 422
423 # *.xz should depends on xz-native for unpacking 423 # *.xz should depends on xz-native for unpacking
424 # Not endswith because of "*.patch.xz;patch=1". Need bb.decodeurl in future 424 # Not endswith because of "*.patch.xz;patch=1". Need bb.decodeurl in future
425 if '.xz' in srcuri: 425 if '.xz' in srcuri:
426 depends = bb.data.getVarFlag('do_unpack', 'depends', d) or "" 426 depends = d.getVarFlag('do_unpack', 'depends') or ""
427 depends = depends + " xz-native:do_populate_sysroot" 427 depends = depends + " xz-native:do_populate_sysroot"
428 bb.data.setVarFlag('do_unpack', 'depends', depends, d) 428 d.setVarFlag('do_unpack', 'depends', depends)
429 429
430 # unzip-native should already be staged before unpacking ZIP recipes 430 # unzip-native should already be staged before unpacking ZIP recipes
431 if ".zip" in srcuri: 431 if ".zip" in srcuri:
432 depends = bb.data.getVarFlag('do_unpack', 'depends', d) or "" 432 depends = d.getVarFlag('do_unpack', 'depends') or ""
433 depends = depends + " unzip-native:do_populate_sysroot" 433 depends = depends + " unzip-native:do_populate_sysroot"
434 bb.data.setVarFlag('do_unpack', 'depends', depends, d) 434 d.setVarFlag('do_unpack', 'depends', depends)
435 435
436 # 'multimachine' handling 436 # 'multimachine' handling
437 mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1) 437 mach_arch = d.getVar('MACHINE_ARCH', 1)
438 pkg_arch = bb.data.getVar('PACKAGE_ARCH', d, 1) 438 pkg_arch = d.getVar('PACKAGE_ARCH', 1)
439 439
440 if (pkg_arch == mach_arch): 440 if (pkg_arch == mach_arch):
441 # Already machine specific - nothing further to do 441 # Already machine specific - nothing further to do
@@ -445,7 +445,7 @@ python () {
445 # We always try to scan SRC_URI for urls with machine overrides 445 # We always try to scan SRC_URI for urls with machine overrides
446 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 446 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
447 # 447 #
448 override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1) 448 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', 1)
449 if override != '0': 449 if override != '0':
450 paths = [] 450 paths = []
451 for p in [ "${PF}", "${P}", "${PN}", "files", "" ]: 451 for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
@@ -461,18 +461,18 @@ python () {
461 for mp in paths: 461 for mp in paths:
462 if local.startswith(mp): 462 if local.startswith(mp):
463 #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch)) 463 #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch))
464 bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d) 464 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
465 return 465 return
466 466
467 packages = bb.data.getVar('PACKAGES', d, 1).split() 467 packages = d.getVar('PACKAGES', 1).split()
468 for pkg in packages: 468 for pkg in packages:
469 pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1) 469 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, 1)
470 470
471 # We could look for != PACKAGE_ARCH here but how to choose 471 # We could look for != PACKAGE_ARCH here but how to choose
472 # if multiple differences are present? 472 # if multiple differences are present?
473 # Look through PACKAGE_ARCHS for the priority order? 473 # Look through PACKAGE_ARCHS for the priority order?
474 if pkgarch and pkgarch == mach_arch: 474 if pkgarch and pkgarch == mach_arch:
475 bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d) 475 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
476 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True)) 476 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True))
477} 477}
478 478
@@ -483,7 +483,7 @@ python do_cleansstate() {
483 483
484addtask cleanall after do_cleansstate 484addtask cleanall after do_cleansstate
485python do_cleanall() { 485python do_cleanall() {
486 src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split() 486 src_uri = (d.getVar('SRC_URI', True) or "").split()
487 if len(src_uri) == 0: 487 if len(src_uri) == 0:
488 return 488 return
489 489