summaryrefslogtreecommitdiffstats
path: root/meta/classes
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2012-03-03 10:59:25 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2012-03-05 10:22:56 -0800
commit06f2f8ce0a3093973ca54b48f542f8485b666079 (patch)
treedbcfa5c491eb2e5d237aa539cb7c6e77dc07dd6f /meta/classes
parentd01dadfb87cfd2284b3e849d35a35fe5df0239c4 (diff)
downloadpoky-06f2f8ce0a3093973ca54b48f542f8485b666079.tar.gz
meta: Convert getVar/getVarFlag(xxx, 1) -> (xxx, True)
Using "1" with getVar is bad coding style and "True" is preferred. This patch is a sed over the meta directory of the form: sed \ -e 's:\(\.getVar([^,()]*, \)1 *):\1True):g' \ -e 's:\(\.getVarFlag([^,()]*, [^,()]*, \)1 *):\1True):g' \ -i `grep -ril getVar *` Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes')
-rw-r--r--meta/classes/autotools.bbclass6
-rw-r--r--meta/classes/base.bbclass42
-rw-r--r--meta/classes/copyleft_compliance.bbclass4
-rw-r--r--meta/classes/cpan-base.bbclass2
-rw-r--r--meta/classes/cpan_build.bbclass4
-rw-r--r--meta/classes/debian.bbclass10
-rw-r--r--meta/classes/distrodata.bbclass2
-rw-r--r--meta/classes/distutils-base.bbclass2
-rw-r--r--meta/classes/distutils-native-base.bbclass2
-rw-r--r--meta/classes/gconf.bbclass12
-rw-r--r--meta/classes/gtk-icon-cache.bbclass16
-rw-r--r--meta/classes/image.bbclass8
-rw-r--r--meta/classes/imagetest-qemu.bbclass32
-rw-r--r--meta/classes/kernel-arch.bbclass6
-rw-r--r--meta/classes/kernel.bbclass46
-rw-r--r--meta/classes/libc-common.bbclass6
-rw-r--r--meta/classes/license.bbclass2
-rw-r--r--meta/classes/metadata_scm.bbclass2
-rw-r--r--meta/classes/package.bbclass2
-rw-r--r--meta/classes/package_ipk.bbclass42
-rw-r--r--meta/classes/package_rpm.bbclass2
-rw-r--r--meta/classes/package_tar.bbclass18
-rw-r--r--meta/classes/packagedata.bbclass4
-rw-r--r--meta/classes/pkg_distribute.bbclass2
-rw-r--r--meta/classes/pkg_metainfo.bbclass12
-rw-r--r--meta/classes/populate_sdk_deb.bbclass4
-rw-r--r--meta/classes/qemu.bbclass2
-rw-r--r--meta/classes/qt4e.bbclass2
-rw-r--r--meta/classes/recipe_sanity.bbclass30
-rw-r--r--meta/classes/siteconfig.bbclass2
-rw-r--r--meta/classes/siteinfo.bbclass2
-rw-r--r--meta/classes/sourcepkg.bbclass8
-rw-r--r--meta/classes/src_distribute.bbclass12
-rw-r--r--meta/classes/syslinux.bbclass28
-rw-r--r--meta/classes/task.bbclass2
-rw-r--r--meta/classes/update-alternatives.bbclass14
-rw-r--r--meta/classes/update-rc.d.bbclass20
-rw-r--r--meta/classes/utility-tasks.bbclass2
-rw-r--r--meta/classes/utils.bbclass2
39 files changed, 208 insertions, 208 deletions
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass
index a8d2b5f32e..66eba9fad0 100644
--- a/meta/classes/autotools.bbclass
+++ b/meta/classes/autotools.bbclass
@@ -1,8 +1,8 @@
1def autotools_dep_prepend(d): 1def autotools_dep_prepend(d):
2 if d.getVar('INHIBIT_AUTOTOOLS_DEPS', 1): 2 if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True):
3 return '' 3 return ''
4 4
5 pn = d.getVar('PN', 1) 5 pn = d.getVar('PN', True)
6 deps = '' 6 deps = ''
7 7
8 if pn in ['autoconf-native', 'automake-native', 'help2man-native']: 8 if pn in ['autoconf-native', 'automake-native', 'help2man-native']:
@@ -13,7 +13,7 @@ def autotools_dep_prepend(d):
13 deps += 'libtool-native ' 13 deps += 'libtool-native '
14 if not bb.data.inherits_class('native', d) \ 14 if not bb.data.inherits_class('native', d) \
15 and not bb.data.inherits_class('cross', d) \ 15 and not bb.data.inherits_class('cross', d) \
16 and not d.getVar('INHIBIT_DEFAULT_DEPS', 1): 16 and not d.getVar('INHIBIT_DEFAULT_DEPS', True):
17 deps += 'libtool-cross ' 17 deps += 'libtool-cross '
18 18
19 return deps + 'gnu-config-native ' 19 return deps + 'gnu-config-native '
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index cab56deb39..48e4a28d83 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -60,8 +60,8 @@ def base_dep_prepend(d):
60 # we need that built is the responsibility of the patch function / class, not 60 # we need that built is the responsibility of the patch function / class, not
61 # the application. 61 # the application.
62 if not d.getVar('INHIBIT_DEFAULT_DEPS'): 62 if not d.getVar('INHIBIT_DEFAULT_DEPS'):
63 if (d.getVar('HOST_SYS', 1) != 63 if (d.getVar('HOST_SYS', True) !=
64 d.getVar('BUILD_SYS', 1)): 64 d.getVar('BUILD_SYS', True)):
65 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " 65 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
66 return deps 66 return deps
67 67
@@ -203,7 +203,7 @@ def preferred_ml_updates(d):
203 203
204 204
205def get_layers_branch_rev(d): 205def get_layers_branch_rev(d):
206 layers = (d.getVar("BBLAYERS", 1) or "").split() 206 layers = (d.getVar("BBLAYERS", True) or "").split()
207 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ 207 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
208 base_get_metadata_git_branch(i, None).strip(), \ 208 base_get_metadata_git_branch(i, None).strip(), \
209 base_get_metadata_git_revision(i, None)) \ 209 base_get_metadata_git_revision(i, None)) \
@@ -233,7 +233,7 @@ python base_eventhandler() {
233 if name.startswith("BuildStarted"): 233 if name.startswith("BuildStarted"):
234 e.data.setVar( 'BB_VERSION', bb.__version__) 234 e.data.setVar( 'BB_VERSION', bb.__version__)
235 statusvars = ['BB_VERSION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TUNE_FEATURES', 'TARGET_FPU'] 235 statusvars = ['BB_VERSION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TUNE_FEATURES', 'TARGET_FPU']
236 statuslines = ["%-17s = \"%s\"" % (i, e.data.getVar(i, 1) or '') for i in statusvars] 236 statuslines = ["%-17s = \"%s\"" % (i, e.data.getVar(i, True) or '') for i in statusvars]
237 237
238 statuslines += get_layers_branch_rev(e.data) 238 statuslines += get_layers_branch_rev(e.data)
239 statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines) 239 statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines)
@@ -242,7 +242,7 @@ python base_eventhandler() {
242 needed_vars = [ "TARGET_ARCH", "TARGET_OS" ] 242 needed_vars = [ "TARGET_ARCH", "TARGET_OS" ]
243 pesteruser = [] 243 pesteruser = []
244 for v in needed_vars: 244 for v in needed_vars:
245 val = e.data.getVar(v, 1) 245 val = e.data.getVar(v, True)
246 if not val or val == 'INVALID': 246 if not val or val == 'INVALID':
247 pesteruser.append(v) 247 pesteruser.append(v)
248 if pesteruser: 248 if pesteruser:
@@ -344,7 +344,7 @@ python () {
344 pr = pr_prefix.group(0) + str(nval) + pr[prval.end():] 344 pr = pr_prefix.group(0) + str(nval) + pr[prval.end():]
345 d.setVar('PR', pr) 345 d.setVar('PR', pr)
346 346
347 pn = d.getVar('PN', 1) 347 pn = d.getVar('PN', True)
348 license = d.getVar('LICENSE', True) 348 license = d.getVar('LICENSE', True)
349 if license == "INVALID": 349 if license == "INVALID":
350 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) 350 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
@@ -370,36 +370,36 @@ python () {
370 d.setVarFlag('do_package_setscene', 'fakeroot', 1) 370 d.setVarFlag('do_package_setscene', 'fakeroot', 1)
371 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0) 371 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0)
372 if not source_mirror_fetch: 372 if not source_mirror_fetch:
373 need_host = d.getVar('COMPATIBLE_HOST', 1) 373 need_host = d.getVar('COMPATIBLE_HOST', True)
374 if need_host: 374 if need_host:
375 import re 375 import re
376 this_host = d.getVar('HOST_SYS', 1) 376 this_host = d.getVar('HOST_SYS', True)
377 if not re.match(need_host, this_host): 377 if not re.match(need_host, this_host):
378 raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) 378 raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
379 379
380 need_machine = d.getVar('COMPATIBLE_MACHINE', 1) 380 need_machine = d.getVar('COMPATIBLE_MACHINE', True)
381 if need_machine: 381 if need_machine:
382 import re 382 import re
383 this_machine = d.getVar('MACHINE', 1) 383 this_machine = d.getVar('MACHINE', True)
384 if this_machine and not re.match(need_machine, this_machine): 384 if this_machine and not re.match(need_machine, this_machine):
385 this_soc_family = d.getVar('SOC_FAMILY', 1) 385 this_soc_family = d.getVar('SOC_FAMILY', True)
386 if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family: 386 if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family:
387 raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % this_machine) 387 raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % this_machine)
388 388
389 389
390 dont_want_license = d.getVar('INCOMPATIBLE_LICENSE', 1) 390 dont_want_license = d.getVar('INCOMPATIBLE_LICENSE', True)
391 if dont_want_license and not pn.endswith("-native") and not pn.endswith("-cross") and not pn.endswith("-cross-initial") and not pn.endswith("-cross-intermediate") and not pn.endswith("-crosssdk-intermediate") and not pn.endswith("-crosssdk") and not pn.endswith("-crosssdk-initial"): 391 if dont_want_license and not pn.endswith("-native") and not pn.endswith("-cross") and not pn.endswith("-cross-initial") and not pn.endswith("-cross-intermediate") and not pn.endswith("-crosssdk-intermediate") and not pn.endswith("-crosssdk") and not pn.endswith("-crosssdk-initial"):
392 hosttools_whitelist = (d.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, 1) or "").split() 392 hosttools_whitelist = (d.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, True) or "").split()
393 lgplv2_whitelist = (d.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, 1) or "").split() 393 lgplv2_whitelist = (d.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, True) or "").split()
394 dont_want_whitelist = (d.getVar('WHITELIST_%s' % dont_want_license, 1) or "").split() 394 dont_want_whitelist = (d.getVar('WHITELIST_%s' % dont_want_license, True) or "").split()
395 if pn not in hosttools_whitelist and pn not in lgplv2_whitelist and pn not in dont_want_whitelist: 395 if pn not in hosttools_whitelist and pn not in lgplv2_whitelist and pn not in dont_want_whitelist:
396 396
397 this_license = d.getVar('LICENSE', 1) 397 this_license = d.getVar('LICENSE', True)
398 if incompatible_license(d,dont_want_license): 398 if incompatible_license(d,dont_want_license):
399 bb.note("SKIPPING %s because it's %s" % (pn, this_license)) 399 bb.note("SKIPPING %s because it's %s" % (pn, this_license))
400 raise bb.parse.SkipPackage("incompatible with license %s" % this_license) 400 raise bb.parse.SkipPackage("incompatible with license %s" % this_license)
401 401
402 srcuri = d.getVar('SRC_URI', 1) 402 srcuri = d.getVar('SRC_URI', True)
403 # Svn packages should DEPEND on subversion-native 403 # Svn packages should DEPEND on subversion-native
404 if "svn://" in srcuri: 404 if "svn://" in srcuri:
405 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot') 405 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
@@ -426,8 +426,8 @@ python () {
426 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot') 426 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
427 427
428 # 'multimachine' handling 428 # 'multimachine' handling
429 mach_arch = d.getVar('MACHINE_ARCH', 1) 429 mach_arch = d.getVar('MACHINE_ARCH', True)
430 pkg_arch = d.getVar('PACKAGE_ARCH', 1) 430 pkg_arch = d.getVar('PACKAGE_ARCH', True)
431 431
432 if (pkg_arch == mach_arch): 432 if (pkg_arch == mach_arch):
433 # Already machine specific - nothing further to do 433 # Already machine specific - nothing further to do
@@ -458,9 +458,9 @@ python () {
458 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") 458 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
459 return 459 return
460 460
461 packages = d.getVar('PACKAGES', 1).split() 461 packages = d.getVar('PACKAGES', True).split()
462 for pkg in packages: 462 for pkg in packages:
463 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, 1) 463 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True)
464 464
465 # We could look for != PACKAGE_ARCH here but how to choose 465 # We could look for != PACKAGE_ARCH here but how to choose
466 # if multiple differences are present? 466 # if multiple differences are present?
diff --git a/meta/classes/copyleft_compliance.bbclass b/meta/classes/copyleft_compliance.bbclass
index 2eb9dedd24..4082e7e15d 100644
--- a/meta/classes/copyleft_compliance.bbclass
+++ b/meta/classes/copyleft_compliance.bbclass
@@ -69,8 +69,8 @@ python do_prepare_copyleft_sources () {
69 else: 69 else:
70 bb.debug(1, 'copyleft: %s is included' % p) 70 bb.debug(1, 'copyleft: %s is included' % p)
71 71
72 sources_dir = d.getVar('COPYLEFT_SOURCES_DIR', 1) 72 sources_dir = d.getVar('COPYLEFT_SOURCES_DIR', True)
73 src_uri = d.getVar('SRC_URI', 1).split() 73 src_uri = d.getVar('SRC_URI', True).split()
74 fetch = bb.fetch2.Fetch(src_uri, d) 74 fetch = bb.fetch2.Fetch(src_uri, d)
75 ud = fetch.ud 75 ud = fetch.ud
76 76
diff --git a/meta/classes/cpan-base.bbclass b/meta/classes/cpan-base.bbclass
index 79582ca76c..6cb1fefc29 100644
--- a/meta/classes/cpan-base.bbclass
+++ b/meta/classes/cpan-base.bbclass
@@ -28,7 +28,7 @@ def get_perl_version(d):
28 28
29# Determine where the library directories are 29# Determine where the library directories are
30def perl_get_libdirs(d): 30def perl_get_libdirs(d):
31 libdir = d.getVar('libdir', 1) 31 libdir = d.getVar('libdir', True)
32 if is_target(d) == "no": 32 if is_target(d) == "no":
33 libdir += '/perl-native' 33 libdir += '/perl-native'
34 libdir += '/perl' 34 libdir += '/perl'
diff --git a/meta/classes/cpan_build.bbclass b/meta/classes/cpan_build.bbclass
index 981332c4fa..36ffc56b85 100644
--- a/meta/classes/cpan_build.bbclass
+++ b/meta/classes/cpan_build.bbclass
@@ -10,9 +10,9 @@ inherit cpan-base
10# libmodule-build-perl) 10# libmodule-build-perl)
11# 11#
12def cpan_build_dep_prepend(d): 12def cpan_build_dep_prepend(d):
13 if d.getVar('CPAN_BUILD_DEPS', 1): 13 if d.getVar('CPAN_BUILD_DEPS', True):
14 return '' 14 return ''
15 pn = d.getVar('PN', 1) 15 pn = d.getVar('PN', True)
16 if pn in ['libmodule-build-perl', 'libmodule-build-perl-native']: 16 if pn in ['libmodule-build-perl', 'libmodule-build-perl-native']:
17 return '' 17 return ''
18 return 'libmodule-build-perl-native ' 18 return 'libmodule-build-perl-native '
diff --git a/meta/classes/debian.bbclass b/meta/classes/debian.bbclass
index 025abcfad0..3637e2ebe7 100644
--- a/meta/classes/debian.bbclass
+++ b/meta/classes/debian.bbclass
@@ -22,8 +22,8 @@ python () {
22python debian_package_name_hook () { 22python debian_package_name_hook () {
23 import glob, copy, stat, errno, re 23 import glob, copy, stat, errno, re
24 24
25 pkgdest = d.getVar('PKGDEST', 1) 25 pkgdest = d.getVar('PKGDEST', True)
26 packages = d.getVar('PACKAGES', 1) 26 packages = d.getVar('PACKAGES', True)
27 bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") 27 bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$")
28 lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") 28 lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$")
29 so_re = re.compile("lib.*\.so") 29 so_re = re.compile("lib.*\.so")
@@ -60,7 +60,7 @@ python debian_package_name_hook () {
60 for f in files: 60 for f in files:
61 if so_re.match(f): 61 if so_re.match(f):
62 fp = os.path.join(root, f) 62 fp = os.path.join(root, f)
63 cmd = (d.getVar('BUILD_PREFIX', 1) or "") + "objdump -p " + fp + " 2>/dev/null" 63 cmd = (d.getVar('BUILD_PREFIX', True) or "") + "objdump -p " + fp + " 2>/dev/null"
64 fd = os.popen(cmd) 64 fd = os.popen(cmd)
65 lines = fd.readlines() 65 lines = fd.readlines()
66 fd.close() 66 fd.close()
@@ -74,7 +74,7 @@ python debian_package_name_hook () {
74 if len(sonames) == 1: 74 if len(sonames) == 1:
75 soname = sonames[0] 75 soname = sonames[0]
76 elif len(sonames) > 1: 76 elif len(sonames) > 1:
77 lead = d.getVar('LEAD_SONAME', 1) 77 lead = d.getVar('LEAD_SONAME', True)
78 if lead: 78 if lead:
79 r = re.compile(lead) 79 r = re.compile(lead)
80 filtered = [] 80 filtered = []
@@ -117,7 +117,7 @@ python debian_package_name_hook () {
117 # and later 117 # and later
118 # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw 118 # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
119 # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 119 # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
120 for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', 1) or "").split(), reverse=True): 120 for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True):
121 auto_libname(packages, pkg) 121 auto_libname(packages, pkg)
122} 122}
123 123
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass
index ff5b836871..aba4bd7fa6 100644
--- a/meta/classes/distrodata.bbclass
+++ b/meta/classes/distrodata.bbclass
@@ -372,7 +372,7 @@ python do_checkpkg() {
372 372
373 f.close() 373 f.close()
374 if status != "ErrHostNoDir" and re.match("Err", status): 374 if status != "ErrHostNoDir" and re.match("Err", status):
375 logpath = d.getVar('LOG_DIR', 1) 375 logpath = d.getVar('LOG_DIR', True)
376 os.system("cp %s %s/" % (f.name, logpath)) 376 os.system("cp %s %s/" % (f.name, logpath))
377 os.unlink(f.name) 377 os.unlink(f.name)
378 return status 378 return status
diff --git a/meta/classes/distutils-base.bbclass b/meta/classes/distutils-base.bbclass
index e7d0bb8071..6d18e08f14 100644
--- a/meta/classes/distutils-base.bbclass
+++ b/meta/classes/distutils-base.bbclass
@@ -1,4 +1,4 @@
1DEPENDS += "${@["python-native python", ""][(d.getVar('PACKAGES', 1) == '')]}" 1DEPENDS += "${@["python-native python", ""][(d.getVar('PACKAGES', True) == '')]}"
2RDEPENDS_${PN} += "${@['', 'python-core']['${PN}' == '${BPN}']}" 2RDEPENDS_${PN} += "${@['', 'python-core']['${PN}' == '${BPN}']}"
3 3
4inherit distutils-common-base 4inherit distutils-common-base
diff --git a/meta/classes/distutils-native-base.bbclass b/meta/classes/distutils-native-base.bbclass
index 47367d796b..ceda512e39 100644
--- a/meta/classes/distutils-native-base.bbclass
+++ b/meta/classes/distutils-native-base.bbclass
@@ -1,3 +1,3 @@
1DEPENDS += "${@["python-native", ""][(d.getVar('PACKAGES', 1) == '')]}" 1DEPENDS += "${@["python-native", ""][(d.getVar('PACKAGES', True) == '')]}"
2 2
3inherit distutils-common-base 3inherit distutils-common-base
diff --git a/meta/classes/gconf.bbclass b/meta/classes/gconf.bbclass
index 7bfa871bd2..095d04b1b8 100644
--- a/meta/classes/gconf.bbclass
+++ b/meta/classes/gconf.bbclass
@@ -32,8 +32,8 @@ done
32 32
33python populate_packages_append () { 33python populate_packages_append () {
34 import re 34 import re
35 packages = d.getVar('PACKAGES', 1).split() 35 packages = d.getVar('PACKAGES', True).split()
36 pkgdest = d.getVar('PKGDEST', 1) 36 pkgdest = d.getVar('PKGDEST', True)
37 37
38 for pkg in packages: 38 for pkg in packages:
39 schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) 39 schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg)
@@ -46,15 +46,15 @@ python populate_packages_append () {
46 if schemas != []: 46 if schemas != []:
47 bb.note("adding gconf postinst and prerm scripts to %s" % pkg) 47 bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
48 d.setVar('SCHEMA_FILES', " ".join(schemas)) 48 d.setVar('SCHEMA_FILES', " ".join(schemas))
49 postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) 49 postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
50 if not postinst: 50 if not postinst:
51 postinst = '#!/bin/sh\n' 51 postinst = '#!/bin/sh\n'
52 postinst += d.getVar('gconf_postinst', 1) 52 postinst += d.getVar('gconf_postinst', True)
53 d.setVar('pkg_postinst_%s' % pkg, postinst) 53 d.setVar('pkg_postinst_%s' % pkg, postinst)
54 prerm = d.getVar('pkg_prerm_%s' % pkg, 1) or d.getVar('pkg_prerm', 1) 54 prerm = d.getVar('pkg_prerm_%s' % pkg, True) or d.getVar('pkg_prerm', True)
55 if not prerm: 55 if not prerm:
56 prerm = '#!/bin/sh\n' 56 prerm = '#!/bin/sh\n'
57 prerm += d.getVar('gconf_prerm', 1) 57 prerm += d.getVar('gconf_prerm', True)
58 d.setVar('pkg_prerm_%s' % pkg, prerm) 58 d.setVar('pkg_prerm_%s' % pkg, prerm)
59 rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" 59 rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
60 rdepends += " gconf" 60 rdepends += " gconf"
diff --git a/meta/classes/gtk-icon-cache.bbclass b/meta/classes/gtk-icon-cache.bbclass
index 0204fd3fec..60e3401f4b 100644
--- a/meta/classes/gtk-icon-cache.bbclass
+++ b/meta/classes/gtk-icon-cache.bbclass
@@ -28,31 +28,31 @@ done
28} 28}
29 29
30python populate_packages_append () { 30python populate_packages_append () {
31 packages = d.getVar('PACKAGES', 1).split() 31 packages = d.getVar('PACKAGES', True).split()
32 pkgdest = d.getVar('PKGDEST', 1) 32 pkgdest = d.getVar('PKGDEST', True)
33 33
34 for pkg in packages: 34 for pkg in packages:
35 icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', 1)) 35 icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True))
36 if not os.path.exists(icon_dir): 36 if not os.path.exists(icon_dir):
37 continue 37 continue
38 38
39 bb.note("adding hicolor-icon-theme dependency to %s" % pkg) 39 bb.note("adding hicolor-icon-theme dependency to %s" % pkg)
40 rdepends = d.getVar('RDEPENDS_%s' % pkg, 1) 40 rdepends = d.getVar('RDEPENDS_%s' % pkg, True)
41 rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme" 41 rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme"
42 d.setVar('RDEPENDS_%s' % pkg, rdepends) 42 d.setVar('RDEPENDS_%s' % pkg, rdepends)
43 43
44 bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) 44 bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg)
45 45
46 postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) 46 postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
47 if not postinst: 47 if not postinst:
48 postinst = '#!/bin/sh\n' 48 postinst = '#!/bin/sh\n'
49 postinst += d.getVar('gtk_icon_cache_postinst', 1) 49 postinst += d.getVar('gtk_icon_cache_postinst', True)
50 d.setVar('pkg_postinst_%s' % pkg, postinst) 50 d.setVar('pkg_postinst_%s' % pkg, postinst)
51 51
52 postrm = d.getVar('pkg_postrm_%s' % pkg, 1) or d.getVar('pkg_postrm', 1) 52 postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
53 if not postrm: 53 if not postrm:
54 postrm = '#!/bin/sh\n' 54 postrm = '#!/bin/sh\n'
55 postrm += d.getVar('gtk_icon_cache_postrm', 1) 55 postrm += d.getVar('gtk_icon_cache_postrm', True)
56 d.setVar('pkg_postrm_%s' % pkg, postrm) 56 d.setVar('pkg_postrm_%s' % pkg, postrm)
57} 57}
58 58
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass
index dfce381393..a62eb2cd57 100644
--- a/meta/classes/image.bbclass
+++ b/meta/classes/image.bbclass
@@ -98,15 +98,15 @@ python () {
98# is searched for in the BBPATH (same as the old version.) 98# is searched for in the BBPATH (same as the old version.)
99# 99#
100def get_devtable_list(d): 100def get_devtable_list(d):
101 devtable = d.getVar('IMAGE_DEVICE_TABLE', 1) 101 devtable = d.getVar('IMAGE_DEVICE_TABLE', True)
102 if devtable != None: 102 if devtable != None:
103 return devtable 103 return devtable
104 str = "" 104 str = ""
105 devtables = d.getVar('IMAGE_DEVICE_TABLES', 1) 105 devtables = d.getVar('IMAGE_DEVICE_TABLES', True)
106 if devtables == None: 106 if devtables == None:
107 devtables = 'files/device_table-minimal.txt' 107 devtables = 'files/device_table-minimal.txt'
108 for devtable in devtables.split(): 108 for devtable in devtables.split():
109 str += " %s" % bb.which(d.getVar('BBPATH', 1), devtable) 109 str += " %s" % bb.which(d.getVar('BBPATH', True), devtable)
110 return str 110 return str
111 111
112IMAGE_CLASSES ?= "image_types" 112IMAGE_CLASSES ?= "image_types"
@@ -119,7 +119,7 @@ ROOTFS_POSTPROCESS_COMMAND ?= ""
119# some default locales 119# some default locales
120IMAGE_LINGUAS ?= "de-de fr-fr en-gb" 120IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
121 121
122LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', 1).split()))}" 122LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', True).split()))}"
123 123
124PSEUDO_PASSWD = "${IMAGE_ROOTFS}" 124PSEUDO_PASSWD = "${IMAGE_ROOTFS}"
125 125
diff --git a/meta/classes/imagetest-qemu.bbclass b/meta/classes/imagetest-qemu.bbclass
index d01d1f4979..d56b44b5c4 100644
--- a/meta/classes/imagetest-qemu.bbclass
+++ b/meta/classes/imagetest-qemu.bbclass
@@ -35,12 +35,12 @@ def qemuimagetest_main(d):
35 35
36 casestr = re.compile(r'(?P<scen>\w+\b):(?P<case>\S+$)') 36 casestr = re.compile(r'(?P<scen>\w+\b):(?P<case>\S+$)')
37 resultstr = re.compile(r'\s*(?P<case>\w+)\s*(?P<pass>\d+)\s*(?P<fail>\d+)\s*(?P<noresult>\d+)') 37 resultstr = re.compile(r'\s*(?P<case>\w+)\s*(?P<pass>\d+)\s*(?P<fail>\d+)\s*(?P<noresult>\d+)')
38 machine = d.getVar('MACHINE', 1) 38 machine = d.getVar('MACHINE', True)
39 pname = d.getVar('PN', 1) 39 pname = d.getVar('PN', True)
40 40
41 """function to save test cases running status""" 41 """function to save test cases running status"""
42 def teststatus(test, status, index, length): 42 def teststatus(test, status, index, length):
43 test_status = d.getVar('TEST_STATUS', 1) 43 test_status = d.getVar('TEST_STATUS', True)
44 if not os.path.exists(test_status): 44 if not os.path.exists(test_status):
45 raise bb.build.FuncFailed("No test status file existing under TEST_TMP") 45 raise bb.build.FuncFailed("No test status file existing under TEST_TMP")
46 46
@@ -51,13 +51,13 @@ def qemuimagetest_main(d):
51 51
52 """funtion to run each case under scenario""" 52 """funtion to run each case under scenario"""
53 def runtest(scen, case, fulltestpath): 53 def runtest(scen, case, fulltestpath):
54 resultpath = d.getVar('TEST_RESULT', 1) 54 resultpath = d.getVar('TEST_RESULT', True)
55 tmppath = d.getVar('TEST_TMP', 1) 55 tmppath = d.getVar('TEST_TMP', True)
56 56
57 """initialize log file for testcase""" 57 """initialize log file for testcase"""
58 logpath = d.getVar('TEST_LOG', 1) 58 logpath = d.getVar('TEST_LOG', True)
59 bb.utils.mkdirhier("%s/%s" % (logpath, scen)) 59 bb.utils.mkdirhier("%s/%s" % (logpath, scen))
60 caselog = os.path.join(logpath, "%s/log_%s.%s" % (scen, case, d.getVar('DATETIME', 1))) 60 caselog = os.path.join(logpath, "%s/log_%s.%s" % (scen, case, d.getVar('DATETIME', True)))
61 os.system("touch %s" % caselog) 61 os.system("touch %s" % caselog)
62 62
63 """export TEST_TMP, TEST_RESULT, DEPLOY_DIR and QEMUARCH""" 63 """export TEST_TMP, TEST_RESULT, DEPLOY_DIR and QEMUARCH"""
@@ -141,7 +141,7 @@ def qemuimagetest_main(d):
141 141
142 """Clean tmp folder for testing""" 142 """Clean tmp folder for testing"""
143 def clean_tmp(): 143 def clean_tmp():
144 tmppath = d.getVar('TEST_TMP', 1) 144 tmppath = d.getVar('TEST_TMP', True)
145 145
146 if os.path.isdir(tmppath): 146 if os.path.isdir(tmppath):
147 for f in os.listdir(tmppath): 147 for f in os.listdir(tmppath):
@@ -155,28 +155,28 @@ def qemuimagetest_main(d):
155 clean_tmp() 155 clean_tmp()
156 156
157 """check testcase folder and create test log folder""" 157 """check testcase folder and create test log folder"""
158 testpath = d.getVar('TEST_DIR', 1) 158 testpath = d.getVar('TEST_DIR', True)
159 bb.utils.mkdirhier(testpath) 159 bb.utils.mkdirhier(testpath)
160 160
161 logpath = d.getVar('TEST_LOG', 1) 161 logpath = d.getVar('TEST_LOG', True)
162 bb.utils.mkdirhier(logpath) 162 bb.utils.mkdirhier(logpath)
163 163
164 tmppath = d.getVar('TEST_TMP', 1) 164 tmppath = d.getVar('TEST_TMP', True)
165 bb.utils.mkdirhier(tmppath) 165 bb.utils.mkdirhier(tmppath)
166 166
167 """initialize test status file""" 167 """initialize test status file"""
168 test_status = d.getVar('TEST_STATUS', 1) 168 test_status = d.getVar('TEST_STATUS', True)
169 if os.path.exists(test_status): 169 if os.path.exists(test_status):
170 os.remove(test_status) 170 os.remove(test_status)
171 os.system("touch %s" % test_status) 171 os.system("touch %s" % test_status)
172 172
173 """initialize result file""" 173 """initialize result file"""
174 resultpath = d.getVar('TEST_RESULT', 1) 174 resultpath = d.getVar('TEST_RESULT', True)
175 bb.utils.mkdirhier(resultpath) 175 bb.utils.mkdirhier(resultpath)
176 resultfile = os.path.join(resultpath, "testresult.%s" % d.getVar('DATETIME', 1)) 176 resultfile = os.path.join(resultpath, "testresult.%s" % d.getVar('DATETIME', True))
177 sresultfile = os.path.join(resultpath, "testresult.log") 177 sresultfile = os.path.join(resultpath, "testresult.log")
178 178
179 machine = d.getVar('MACHINE', 1) 179 machine = d.getVar('MACHINE', True)
180 180
181 if os.path.exists(sresultfile): 181 if os.path.exists(sresultfile):
182 os.remove(sresultfile) 182 os.remove(sresultfile)
@@ -188,7 +188,7 @@ def qemuimagetest_main(d):
188 f.close() 188 f.close()
189 189
190 """generate pre-defined testcase list""" 190 """generate pre-defined testcase list"""
191 testlist = d.getVar('TEST_SCEN', 1) 191 testlist = d.getVar('TEST_SCEN', True)
192 fulllist = generate_list(testlist) 192 fulllist = generate_list(testlist)
193 193
194 """Begin testing""" 194 """Begin testing"""
diff --git a/meta/classes/kernel-arch.bbclass b/meta/classes/kernel-arch.bbclass
index 8693395111..d37c1fb2ed 100644
--- a/meta/classes/kernel-arch.bbclass
+++ b/meta/classes/kernel-arch.bbclass
@@ -18,7 +18,7 @@ valid_archs = "alpha cris ia64 \
18def map_kernel_arch(a, d): 18def map_kernel_arch(a, d):
19 import re 19 import re
20 20
21 valid_archs = d.getVar('valid_archs', 1).split() 21 valid_archs = d.getVar('valid_archs', True).split()
22 22
23 if re.match('(i.86|athlon|x86.64)$', a): return 'x86' 23 if re.match('(i.86|athlon|x86.64)$', a): return 'x86'
24 elif re.match('arm26$', a): return 'arm26' 24 elif re.match('arm26$', a): return 'arm26'
@@ -32,7 +32,7 @@ def map_kernel_arch(a, d):
32 else: 32 else:
33 bb.error("cannot map '%s' to a linux kernel architecture" % a) 33 bb.error("cannot map '%s' to a linux kernel architecture" % a)
34 34
35export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', 1), d)}" 35export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', True), d)}"
36 36
37def map_uboot_arch(a, d): 37def map_uboot_arch(a, d):
38 import re 38 import re
@@ -41,5 +41,5 @@ def map_uboot_arch(a, d):
41 elif re.match('i.86$', a): return 'x86' 41 elif re.match('i.86$', a): return 'x86'
42 return a 42 return a
43 43
44export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', 1), d)}" 44export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', True), d)}"
45 45
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass
index ec5d65e186..8fbec90ef1 100644
--- a/meta/classes/kernel.bbclass
+++ b/meta/classes/kernel.bbclass
@@ -11,9 +11,9 @@ INITRAMFS_IMAGE ?= ""
11INITRAMFS_TASK ?= "" 11INITRAMFS_TASK ?= ""
12 12
13python __anonymous () { 13python __anonymous () {
14 kerneltype = d.getVar('KERNEL_IMAGETYPE', 1) or '' 14 kerneltype = d.getVar('KERNEL_IMAGETYPE', True) or ''
15 if kerneltype == 'uImage': 15 if kerneltype == 'uImage':
16 depends = d.getVar("DEPENDS", 1) 16 depends = d.getVar("DEPENDS", True)
17 depends = "%s u-boot-mkimage-native" % depends 17 depends = "%s u-boot-mkimage-native" % depends
18 d.setVar("DEPENDS", depends) 18 d.setVar("DEPENDS", depends)
19 19
@@ -75,7 +75,7 @@ EXTRA_OEMAKE = ""
75 75
76KERNEL_ALT_IMAGETYPE ??= "" 76KERNEL_ALT_IMAGETYPE ??= ""
77 77
78KERNEL_IMAGETYPE_FOR_MAKE = "${@(lambda s: s[:-3] if s[-3:] == ".gz" else s)(d.getVar('KERNEL_IMAGETYPE', 1))}" 78KERNEL_IMAGETYPE_FOR_MAKE = "${@(lambda s: s[:-3] if s[-3:] == ".gz" else s)(d.getVar('KERNEL_IMAGETYPE', True))}"
79 79
80kernel_do_compile() { 80kernel_do_compile() {
81 unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE 81 unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE
@@ -306,10 +306,10 @@ module_conf_rfcomm = "alias bt-proto-3 rfcomm"
306python populate_packages_prepend () { 306python populate_packages_prepend () {
307 def extract_modinfo(file): 307 def extract_modinfo(file):
308 import tempfile, re 308 import tempfile, re
309 tempfile.tempdir = d.getVar("WORKDIR", 1) 309 tempfile.tempdir = d.getVar("WORKDIR", True)
310 tf = tempfile.mkstemp() 310 tf = tempfile.mkstemp()
311 tmpfile = tf[1] 311 tmpfile = tf[1]
312 cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", 1), d.getVar("HOST_PREFIX", 1) or "", file, tmpfile) 312 cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", True), d.getVar("HOST_PREFIX", True) or "", file, tmpfile)
313 os.system(cmd) 313 os.system(cmd)
314 f = open(tmpfile) 314 f = open(tmpfile)
315 l = f.read().split("\000") 315 l = f.read().split("\000")
@@ -328,18 +328,18 @@ python populate_packages_prepend () {
328 def parse_depmod(): 328 def parse_depmod():
329 import re 329 import re
330 330
331 dvar = d.getVar('PKGD', 1) 331 dvar = d.getVar('PKGD', True)
332 if not dvar: 332 if not dvar:
333 bb.error("PKGD not defined") 333 bb.error("PKGD not defined")
334 return 334 return
335 335
336 kernelver = d.getVar('KERNEL_VERSION', 1) 336 kernelver = d.getVar('KERNEL_VERSION', True)
337 kernelver_stripped = kernelver 337 kernelver_stripped = kernelver
338 m = re.match('^(.*-hh.*)[\.\+].*$', kernelver) 338 m = re.match('^(.*-hh.*)[\.\+].*$', kernelver)
339 if m: 339 if m:
340 kernelver_stripped = m.group(1) 340 kernelver_stripped = m.group(1)
341 path = d.getVar("PATH", 1) 341 path = d.getVar("PATH", True)
342 host_prefix = d.getVar("HOST_PREFIX", 1) or "" 342 host_prefix = d.getVar("HOST_PREFIX", True) or ""
343 343
344 cmd = "PATH=\"%s\" %sdepmod -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, dvar, dvar, kernelver, kernelver_stripped) 344 cmd = "PATH=\"%s\" %sdepmod -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, dvar, dvar, kernelver, kernelver_stripped)
345 f = os.popen(cmd, 'r') 345 f = os.popen(cmd, 'r')
@@ -377,9 +377,9 @@ python populate_packages_prepend () {
377 377
378 def get_dependencies(file, pattern, format): 378 def get_dependencies(file, pattern, format):
379 # file no longer includes PKGD 379 # file no longer includes PKGD
380 file = file.replace(d.getVar('PKGD', 1) or '', '', 1) 380 file = file.replace(d.getVar('PKGD', True) or '', '', 1)
381 # instead is prefixed with /lib/modules/${KERNEL_VERSION} 381 # instead is prefixed with /lib/modules/${KERNEL_VERSION}
382 file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', 1) or '', '', 1) 382 file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', True) or '', '', 1)
383 383
384 if module_deps.has_key(file): 384 if module_deps.has_key(file):
385 import re 385 import re
@@ -398,40 +398,40 @@ python populate_packages_prepend () {
398 import re 398 import re
399 vals = extract_modinfo(file) 399 vals = extract_modinfo(file)
400 400
401 dvar = d.getVar('PKGD', 1) 401 dvar = d.getVar('PKGD', True)
402 402
403 # If autoloading is requested, output /etc/modutils/<name> and append 403 # If autoloading is requested, output /etc/modutils/<name> and append
404 # appropriate modprobe commands to the postinst 404 # appropriate modprobe commands to the postinst
405 autoload = d.getVar('module_autoload_%s' % basename, 1) 405 autoload = d.getVar('module_autoload_%s' % basename, True)
406 if autoload: 406 if autoload:
407 name = '%s/etc/modutils/%s' % (dvar, basename) 407 name = '%s/etc/modutils/%s' % (dvar, basename)
408 f = open(name, 'w') 408 f = open(name, 'w')
409 for m in autoload.split(): 409 for m in autoload.split():
410 f.write('%s\n' % m) 410 f.write('%s\n' % m)
411 f.close() 411 f.close()
412 postinst = d.getVar('pkg_postinst_%s' % pkg, 1) 412 postinst = d.getVar('pkg_postinst_%s' % pkg, True)
413 if not postinst: 413 if not postinst:
414 bb.fatal("pkg_postinst_%s not defined" % pkg) 414 bb.fatal("pkg_postinst_%s not defined" % pkg)
415 postinst += d.getVar('autoload_postinst_fragment', 1) % autoload 415 postinst += d.getVar('autoload_postinst_fragment', True) % autoload
416 d.setVar('pkg_postinst_%s' % pkg, postinst) 416 d.setVar('pkg_postinst_%s' % pkg, postinst)
417 417
418 # Write out any modconf fragment 418 # Write out any modconf fragment
419 modconf = d.getVar('module_conf_%s' % basename, 1) 419 modconf = d.getVar('module_conf_%s' % basename, True)
420 if modconf: 420 if modconf:
421 name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) 421 name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename)
422 f = open(name, 'w') 422 f = open(name, 'w')
423 f.write("%s\n" % modconf) 423 f.write("%s\n" % modconf)
424 f.close() 424 f.close()
425 425
426 files = d.getVar('FILES_%s' % pkg, 1) 426 files = d.getVar('FILES_%s' % pkg, True)
427 files = "%s /etc/modutils/%s /etc/modutils/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename, basename) 427 files = "%s /etc/modutils/%s /etc/modutils/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename, basename)
428 d.setVar('FILES_%s' % pkg, files) 428 d.setVar('FILES_%s' % pkg, files)
429 429
430 if vals.has_key("description"): 430 if vals.has_key("description"):
431 old_desc = d.getVar('DESCRIPTION_' + pkg, 1) or "" 431 old_desc = d.getVar('DESCRIPTION_' + pkg, True) or ""
432 d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"]) 432 d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"])
433 433
434 rdepends_str = d.getVar('RDEPENDS_' + pkg, 1) 434 rdepends_str = d.getVar('RDEPENDS_' + pkg, True)
435 if rdepends_str: 435 if rdepends_str:
436 rdepends = rdepends_str.split() 436 rdepends = rdepends_str.split()
437 else: 437 else:
@@ -443,12 +443,12 @@ python populate_packages_prepend () {
443 module_regex = '^(.*)\.k?o$' 443 module_regex = '^(.*)\.k?o$'
444 module_pattern = 'kernel-module-%s' 444 module_pattern = 'kernel-module-%s'
445 445
446 postinst = d.getVar('pkg_postinst_modules', 1) 446 postinst = d.getVar('pkg_postinst_modules', True)
447 postrm = d.getVar('pkg_postrm_modules', 1) 447 postrm = d.getVar('pkg_postrm_modules', True)
448 do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') 448 do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
449 do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') 449 do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
450 do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') 450 do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
451 do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", 1)) 451 do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", True))
452 452
453 import re 453 import re
454 metapkg = "kernel-modules" 454 metapkg = "kernel-modules"
@@ -460,7 +460,7 @@ python populate_packages_prepend () {
460 pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1)) 460 pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1))
461 blacklist.append(pkg) 461 blacklist.append(pkg)
462 metapkg_rdepends = [] 462 metapkg_rdepends = []
463 packages = d.getVar('PACKAGES', 1).split() 463 packages = d.getVar('PACKAGES', True).split()
464 for pkg in packages[1:]: 464 for pkg in packages[1:]:
465 if not pkg in blacklist and not pkg in metapkg_rdepends: 465 if not pkg in blacklist and not pkg in metapkg_rdepends:
466 metapkg_rdepends.append(pkg) 466 metapkg_rdepends.append(pkg)
diff --git a/meta/classes/libc-common.bbclass b/meta/classes/libc-common.bbclass
index ec33762a20..962f205f81 100644
--- a/meta/classes/libc-common.bbclass
+++ b/meta/classes/libc-common.bbclass
@@ -18,13 +18,13 @@ do_install() {
18} 18}
19 19
20def get_libc_fpu_setting(bb, d): 20def get_libc_fpu_setting(bb, d):
21 if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: 21 if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
22 return "--without-fp" 22 return "--without-fp"
23 return "" 23 return ""
24 24
25python populate_packages_prepend () { 25python populate_packages_prepend () {
26 if d.getVar('DEBIAN_NAMES', 1): 26 if d.getVar('DEBIAN_NAMES', True):
27 bpn = d.getVar('BPN', 1) 27 bpn = d.getVar('BPN', True)
28 d.setVar('PKG_'+bpn, 'libc6') 28 d.setVar('PKG_'+bpn, 'libc6')
29 d.setVar('PKG_'+bpn+'-dev', 'libc6-dev') 29 d.setVar('PKG_'+bpn+'-dev', 'libc6-dev')
30} 30}
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass
index cfc9eafb93..3de704f3f9 100644
--- a/meta/classes/license.bbclass
+++ b/meta/classes/license.bbclass
@@ -261,7 +261,7 @@ def incompatible_license(d,dont_want_license):
261 from fnmatch import fnmatchcase as fnmatch 261 from fnmatch import fnmatchcase as fnmatch
262 262
263 dont_want_licenses = [] 263 dont_want_licenses = []
264 dont_want_licenses.append(d.getVar('INCOMPATIBLE_LICENSE', 1)) 264 dont_want_licenses.append(d.getVar('INCOMPATIBLE_LICENSE', True))
265 if d.getVarFlag('SPDXLICENSEMAP', dont_want_license): 265 if d.getVarFlag('SPDXLICENSEMAP', dont_want_license):
266 dont_want_licenses.append(d.getVarFlag('SPDXLICENSEMAP', dont_want_license)) 266 dont_want_licenses.append(d.getVarFlag('SPDXLICENSEMAP', dont_want_license))
267 267
diff --git a/meta/classes/metadata_scm.bbclass b/meta/classes/metadata_scm.bbclass
index 57609ef8cd..62650be675 100644
--- a/meta/classes/metadata_scm.bbclass
+++ b/meta/classes/metadata_scm.bbclass
@@ -27,7 +27,7 @@ def base_detect_branch(d):
27 return "<unknown>" 27 return "<unknown>"
28 28
29def base_get_scmbasepath(d): 29def base_get_scmbasepath(d):
30 return d.getVar( 'COREBASE', 1 ) 30 return d.getVar( 'COREBASE', True)
31 31
32def base_get_metadata_monotone_branch(path, d): 32def base_get_metadata_monotone_branch(path, d):
33 monotone_branch = "<unknown>" 33 monotone_branch = "<unknown>"
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
index 4ed6972a7c..5c42619f3f 100644
--- a/meta/classes/package.bbclass
+++ b/meta/classes/package.bbclass
@@ -1067,7 +1067,7 @@ python emit_pkgdata() {
1067 return size 1067 return size
1068 1068
1069 packages = d.getVar('PACKAGES', True) 1069 packages = d.getVar('PACKAGES', True)
1070 pkgdest = d.getVar('PKGDEST', 1) 1070 pkgdest = d.getVar('PKGDEST', True)
1071 pkgdatadir = d.getVar('PKGDESTWORK', True) 1071 pkgdatadir = d.getVar('PKGDESTWORK', True)
1072 1072
1073 # Take shared lock since we're only reading, not writing 1073 # Take shared lock since we're only reading, not writing
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass
index e9d1ddcdbc..ff8b5b488a 100644
--- a/meta/classes/package_ipk.bbclass
+++ b/meta/classes/package_ipk.bbclass
@@ -15,12 +15,12 @@ python package_ipk_fn () {
15} 15}
16 16
17python package_ipk_install () { 17python package_ipk_install () {
18 pkg = d.getVar('PKG', 1) 18 pkg = d.getVar('PKG', True)
19 pkgfn = d.getVar('PKGFN', 1) 19 pkgfn = d.getVar('PKGFN', True)
20 rootfs = d.getVar('IMAGE_ROOTFS', 1) 20 rootfs = d.getVar('IMAGE_ROOTFS', True)
21 ipkdir = d.getVar('DEPLOY_DIR_IPK', 1) 21 ipkdir = d.getVar('DEPLOY_DIR_IPK', True)
22 stagingdir = d.getVar('STAGING_DIR', 1) 22 stagingdir = d.getVar('STAGING_DIR', True)
23 tmpdir = d.getVar('TMPDIR', 1) 23 tmpdir = d.getVar('TMPDIR', True)
24 24
25 if None in (pkg,pkgfn,rootfs): 25 if None in (pkg,pkgfn,rootfs):
26 raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") 26 raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)")
@@ -289,7 +289,7 @@ python do_package_ipk () {
289 289
290 localdata.setVar('ROOT', '') 290 localdata.setVar('ROOT', '')
291 localdata.setVar('ROOT_%s' % pkg, root) 291 localdata.setVar('ROOT_%s' % pkg, root)
292 pkgname = localdata.getVar('PKG_%s' % pkg, 1) 292 pkgname = localdata.getVar('PKG_%s' % pkg, True)
293 if not pkgname: 293 if not pkgname:
294 pkgname = pkg 294 pkgname = pkg
295 localdata.setVar('PKG', pkgname) 295 localdata.setVar('PKG', pkgname)
@@ -298,7 +298,7 @@ python do_package_ipk () {
298 298
299 bb.data.update_data(localdata) 299 bb.data.update_data(localdata)
300 basedir = os.path.join(os.path.dirname(root)) 300 basedir = os.path.join(os.path.dirname(root))
301 arch = localdata.getVar('PACKAGE_ARCH', 1) 301 arch = localdata.getVar('PACKAGE_ARCH', True)
302 pkgoutdir = "%s/%s" % (outdir, arch) 302 pkgoutdir = "%s/%s" % (outdir, arch)
303 bb.mkdirhier(pkgoutdir) 303 bb.mkdirhier(pkgoutdir)
304 os.chdir(root) 304 os.chdir(root)
@@ -310,7 +310,7 @@ python do_package_ipk () {
310 except ValueError: 310 except ValueError:
311 pass 311 pass
312 if not g and localdata.getVar('ALLOW_EMPTY') != "1": 312 if not g and localdata.getVar('ALLOW_EMPTY') != "1":
313 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', 1), localdata.getVar('PKGR', 1))) 313 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True)))
314 bb.utils.unlockfile(lf) 314 bb.utils.unlockfile(lf)
315 continue 315 continue
316 316
@@ -323,7 +323,7 @@ python do_package_ipk () {
323 raise bb.build.FuncFailed("unable to open control file for writing.") 323 raise bb.build.FuncFailed("unable to open control file for writing.")
324 324
325 fields = [] 325 fields = []
326 pe = d.getVar('PKGE', 1) 326 pe = d.getVar('PKGE', True)
327 if pe and int(pe) > 0: 327 if pe and int(pe) > 0:
328 fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) 328 fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']])
329 else: 329 else:
@@ -340,7 +340,7 @@ python do_package_ipk () {
340 def pullData(l, d): 340 def pullData(l, d):
341 l2 = [] 341 l2 = []
342 for i in l: 342 for i in l:
343 l2.append(d.getVar(i, 1)) 343 l2.append(d.getVar(i, True))
344 return l2 344 return l2
345 345
346 ctrlfile.write("Package: %s\n" % pkgname) 346 ctrlfile.write("Package: %s\n" % pkgname)
@@ -369,12 +369,12 @@ python do_package_ipk () {
369 369
370 bb.build.exec_func("mapping_rename_hook", localdata) 370 bb.build.exec_func("mapping_rename_hook", localdata)
371 371
372 rdepends = bb.utils.explode_dep_versions(localdata.getVar("RDEPENDS", 1) or "") 372 rdepends = bb.utils.explode_dep_versions(localdata.getVar("RDEPENDS", True) or "")
373 rrecommends = bb.utils.explode_dep_versions(localdata.getVar("RRECOMMENDS", 1) or "") 373 rrecommends = bb.utils.explode_dep_versions(localdata.getVar("RRECOMMENDS", True) or "")
374 rsuggests = bb.utils.explode_dep_versions(localdata.getVar("RSUGGESTS", 1) or "") 374 rsuggests = bb.utils.explode_dep_versions(localdata.getVar("RSUGGESTS", True) or "")
375 rprovides = bb.utils.explode_dep_versions(localdata.getVar("RPROVIDES", 1) or "") 375 rprovides = bb.utils.explode_dep_versions(localdata.getVar("RPROVIDES", True) or "")
376 rreplaces = bb.utils.explode_dep_versions(localdata.getVar("RREPLACES", 1) or "") 376 rreplaces = bb.utils.explode_dep_versions(localdata.getVar("RREPLACES", True) or "")
377 rconflicts = bb.utils.explode_dep_versions(localdata.getVar("RCONFLICTS", 1) or "") 377 rconflicts = bb.utils.explode_dep_versions(localdata.getVar("RCONFLICTS", True) or "")
378 378
379 if rdepends: 379 if rdepends:
380 ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) 380 ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends))
@@ -388,14 +388,14 @@ python do_package_ipk () {
388 ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces)) 388 ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces))
389 if rconflicts: 389 if rconflicts:
390 ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts)) 390 ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts))
391 src_uri = localdata.getVar("SRC_URI", 1) 391 src_uri = localdata.getVar("SRC_URI", True)
392 if src_uri: 392 if src_uri:
393 src_uri = re.sub("\s+", " ", src_uri) 393 src_uri = re.sub("\s+", " ", src_uri)
394 ctrlfile.write("Source: %s\n" % " ".join(src_uri.split())) 394 ctrlfile.write("Source: %s\n" % " ".join(src_uri.split()))
395 ctrlfile.close() 395 ctrlfile.close()
396 396
397 for script in ["preinst", "postinst", "prerm", "postrm"]: 397 for script in ["preinst", "postinst", "prerm", "postrm"]:
398 scriptvar = localdata.getVar('pkg_%s' % script, 1) 398 scriptvar = localdata.getVar('pkg_%s' % script, True)
399 if not scriptvar: 399 if not scriptvar:
400 continue 400 continue
401 try: 401 try:
@@ -407,7 +407,7 @@ python do_package_ipk () {
407 scriptfile.close() 407 scriptfile.close()
408 os.chmod(os.path.join(controldir, script), 0755) 408 os.chmod(os.path.join(controldir, script), 0755)
409 409
410 conffiles_str = localdata.getVar("CONFFILES", 1) 410 conffiles_str = localdata.getVar("CONFFILES", True)
411 if conffiles_str: 411 if conffiles_str:
412 try: 412 try:
413 conffiles = file(os.path.join(controldir, 'conffiles'), 'w') 413 conffiles = file(os.path.join(controldir, 'conffiles'), 'w')
@@ -419,7 +419,7 @@ python do_package_ipk () {
419 conffiles.close() 419 conffiles.close()
420 420
421 os.chdir(basedir) 421 os.chdir(basedir)
422 ret = os.system("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH", 1), 422 ret = os.system("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH", True),
423 d.getVar("OPKGBUILDCMD",1), pkg, pkgoutdir)) 423 d.getVar("OPKGBUILDCMD",1), pkg, pkgoutdir))
424 if ret != 0: 424 if ret != 0:
425 bb.utils.unlockfile(lf) 425 bb.utils.unlockfile(lf)
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass
index a264712f9e..af8c63ed6f 100644
--- a/meta/classes/package_rpm.bbclass
+++ b/meta/classes/package_rpm.bbclass
@@ -619,7 +619,7 @@ python write_specfile () {
619 619
620 localdata.setVar('ROOT', '') 620 localdata.setVar('ROOT', '')
621 localdata.setVar('ROOT_%s' % pkg, root) 621 localdata.setVar('ROOT_%s' % pkg, root)
622 pkgname = localdata.getVar('PKG_%s' % pkg, 1) 622 pkgname = localdata.getVar('PKG_%s' % pkg, True)
623 if not pkgname: 623 if not pkgname:
624 pkgname = pkg 624 pkgname = pkg
625 localdata.setVar('PKG', pkgname) 625 localdata.setVar('PKG', pkgname)
diff --git a/meta/classes/package_tar.bbclass b/meta/classes/package_tar.bbclass
index 201bd91657..7590177e4b 100644
--- a/meta/classes/package_tar.bbclass
+++ b/meta/classes/package_tar.bbclass
@@ -9,9 +9,9 @@ python package_tar_fn () {
9} 9}
10 10
11python package_tar_install () { 11python package_tar_install () {
12 pkg = d.getVar('PKG', 1) 12 pkg = d.getVar('PKG', True)
13 pkgfn = d.getVar('PKGFN', 1) 13 pkgfn = d.getVar('PKGFN', True)
14 rootfs = d.getVar('IMAGE_ROOTFS', 1) 14 rootfs = d.getVar('IMAGE_ROOTFS', True)
15 15
16 if None in (pkg,pkgfn,rootfs): 16 if None in (pkg,pkgfn,rootfs):
17 bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") 17 bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)")
@@ -35,24 +35,24 @@ python package_tar_install () {
35} 35}
36 36
37python do_package_tar () { 37python do_package_tar () {
38 workdir = d.getVar('WORKDIR', 1) 38 workdir = d.getVar('WORKDIR', True)
39 if not workdir: 39 if not workdir:
40 bb.error("WORKDIR not defined, unable to package") 40 bb.error("WORKDIR not defined, unable to package")
41 return 41 return
42 42
43 outdir = d.getVar('DEPLOY_DIR_TAR', 1) 43 outdir = d.getVar('DEPLOY_DIR_TAR', True)
44 if not outdir: 44 if not outdir:
45 bb.error("DEPLOY_DIR_TAR not defined, unable to package") 45 bb.error("DEPLOY_DIR_TAR not defined, unable to package")
46 return 46 return
47 bb.mkdirhier(outdir) 47 bb.mkdirhier(outdir)
48 48
49 dvar = d.getVar('D', 1) 49 dvar = d.getVar('D', True)
50 if not dvar: 50 if not dvar:
51 bb.error("D not defined, unable to package") 51 bb.error("D not defined, unable to package")
52 return 52 return
53 bb.mkdirhier(dvar) 53 bb.mkdirhier(dvar)
54 54
55 packages = d.getVar('PACKAGES', 1) 55 packages = d.getVar('PACKAGES', True)
56 if not packages: 56 if not packages:
57 bb.debug(1, "PACKAGES not defined, nothing to package") 57 bb.debug(1, "PACKAGES not defined, nothing to package")
58 return 58 return
@@ -79,11 +79,11 @@ python do_package_tar () {
79 pkgoutdir = outdir 79 pkgoutdir = outdir
80 bb.mkdirhier(pkgoutdir) 80 bb.mkdirhier(pkgoutdir)
81 bb.build.exec_func('package_tar_fn', localdata) 81 bb.build.exec_func('package_tar_fn', localdata)
82 tarfn = localdata.getVar('PKGFN', 1) 82 tarfn = localdata.getVar('PKGFN', True)
83 os.chdir(root) 83 os.chdir(root)
84 from glob import glob 84 from glob import glob
85 if not glob('*'): 85 if not glob('*'):
86 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', 1), localdata.getVar('PKGR', 1))) 86 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True)))
87 continue 87 continue
88 ret = os.system("tar -czf %s %s" % (tarfn, '.')) 88 ret = os.system("tar -czf %s %s" % (tarfn, '.'))
89 if ret != 0: 89 if ret != 0:
diff --git a/meta/classes/packagedata.bbclass b/meta/classes/packagedata.bbclass
index 9c7aede3bb..60f1aded0d 100644
--- a/meta/classes/packagedata.bbclass
+++ b/meta/classes/packagedata.bbclass
@@ -1,12 +1,12 @@
1python read_subpackage_metadata () { 1python read_subpackage_metadata () {
2 import oe.packagedata 2 import oe.packagedata
3 3
4 data = oe.packagedata.read_pkgdata(d.getVar('PN', 1), d) 4 data = oe.packagedata.read_pkgdata(d.getVar('PN', True), d)
5 5
6 for key in data.keys(): 6 for key in data.keys():
7 d.setVar(key, data[key]) 7 d.setVar(key, data[key])
8 8
9 for pkg in d.getVar('PACKAGES', 1).split(): 9 for pkg in d.getVar('PACKAGES', True).split():
10 sdata = oe.packagedata.read_subpkgdata(pkg, d) 10 sdata = oe.packagedata.read_subpkgdata(pkg, d)
11 for key in sdata.keys(): 11 for key in sdata.keys():
12 d.setVar(key, sdata[key]) 12 d.setVar(key, sdata[key])
diff --git a/meta/classes/pkg_distribute.bbclass b/meta/classes/pkg_distribute.bbclass
index 52643a2f90..9f249a0dfe 100644
--- a/meta/classes/pkg_distribute.bbclass
+++ b/meta/classes/pkg_distribute.bbclass
@@ -1,6 +1,6 @@
1PKG_DISTRIBUTECOMMAND[func] = "1" 1PKG_DISTRIBUTECOMMAND[func] = "1"
2python do_distribute_packages () { 2python do_distribute_packages () {
3 cmd = d.getVar('PKG_DISTRIBUTECOMMAND', 1) 3 cmd = d.getVar('PKG_DISTRIBUTECOMMAND', True)
4 if not cmd: 4 if not cmd:
5 raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined") 5 raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined")
6 bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d) 6 bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d)
diff --git a/meta/classes/pkg_metainfo.bbclass b/meta/classes/pkg_metainfo.bbclass
index 1714a535c2..4b182690f2 100644
--- a/meta/classes/pkg_metainfo.bbclass
+++ b/meta/classes/pkg_metainfo.bbclass
@@ -1,5 +1,5 @@
1python do_pkg_write_metainfo () { 1python do_pkg_write_metainfo () {
2 deploydir = d.getVar('DEPLOY_DIR', 1) 2 deploydir = d.getVar('DEPLOY_DIR', True)
3 if not deploydir: 3 if not deploydir:
4 bb.error("DEPLOY_DIR not defined, unable to write package info") 4 bb.error("DEPLOY_DIR not defined, unable to write package info")
5 return 5 return
@@ -9,11 +9,11 @@ python do_pkg_write_metainfo () {
9 except OSError: 9 except OSError:
10 raise bb.build.FuncFailed("unable to open package-info file for writing.") 10 raise bb.build.FuncFailed("unable to open package-info file for writing.")
11 11
12 name = d.getVar('PN', 1) 12 name = d.getVar('PN', True)
13 version = d.getVar('PV', 1) 13 version = d.getVar('PV', True)
14 desc = d.getVar('DESCRIPTION', 1) 14 desc = d.getVar('DESCRIPTION', True)
15 page = d.getVar('HOMEPAGE', 1) 15 page = d.getVar('HOMEPAGE', True)
16 lic = d.getVar('LICENSE', 1) 16 lic = d.getVar('LICENSE', True)
17 17
18 infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" ) 18 infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" )
19 infofile.close() 19 infofile.close()
diff --git a/meta/classes/populate_sdk_deb.bbclass b/meta/classes/populate_sdk_deb.bbclass
index fe3d849162..920c89a0f3 100644
--- a/meta/classes/populate_sdk_deb.bbclass
+++ b/meta/classes/populate_sdk_deb.bbclass
@@ -2,8 +2,8 @@ do_populate_sdk[depends] += "dpkg-native:do_populate_sysroot apt-native:do_popul
2do_populate_sdk[recrdeptask] += "do_package_write_deb" 2do_populate_sdk[recrdeptask] += "do_package_write_deb"
3 3
4 4
5DEB_SDK_ARCH = "${@[d.getVar('SDK_ARCH', 1), "i386"]\ 5DEB_SDK_ARCH = "${@[d.getVar('SDK_ARCH', True), "i386"]\
6 [d.getVar('SDK_ARCH', 1) in \ 6 [d.getVar('SDK_ARCH', True) in \
7 ["x86", "i486", "i586", "i686", "pentium"]]}" 7 ["x86", "i486", "i586", "i686", "pentium"]]}"
8 8
9populate_sdk_post_deb () { 9populate_sdk_post_deb () {
diff --git a/meta/classes/qemu.bbclass b/meta/classes/qemu.bbclass
index 1bdd209afe..22ebcfde40 100644
--- a/meta/classes/qemu.bbclass
+++ b/meta/classes/qemu.bbclass
@@ -6,7 +6,7 @@
6def qemu_target_binary(data): 6def qemu_target_binary(data):
7 import bb 7 import bb
8 8
9 target_arch = data.getVar("TARGET_ARCH", 1) 9 target_arch = data.getVar("TARGET_ARCH", True)
10 if target_arch in ("i486", "i586", "i686"): 10 if target_arch in ("i486", "i586", "i686"):
11 target_arch = "i386" 11 target_arch = "i386"
12 elif target_arch == "powerpc": 12 elif target_arch == "powerpc":
diff --git a/meta/classes/qt4e.bbclass b/meta/classes/qt4e.bbclass
index f3d5caa455..05c24efaa1 100644
--- a/meta/classes/qt4e.bbclass
+++ b/meta/classes/qt4e.bbclass
@@ -1,4 +1,4 @@
1DEPENDS_prepend = "${@["qt4-embedded ", ""][(d.getVar('PN', 1)[:12] == 'qt4-embedded')]}" 1DEPENDS_prepend = "${@["qt4-embedded ", ""][(d.getVar('PN', True)[:12] == 'qt4-embedded')]}"
2 2
3inherit qmake2 3inherit qmake2
4 4
diff --git a/meta/classes/recipe_sanity.bbclass b/meta/classes/recipe_sanity.bbclass
index bb60ffa00e..b3246599b9 100644
--- a/meta/classes/recipe_sanity.bbclass
+++ b/meta/classes/recipe_sanity.bbclass
@@ -1,5 +1,5 @@
1def __note(msg, d): 1def __note(msg, d):
2 bb.note("%s: recipe_sanity: %s" % (d.getVar("P", 1), msg)) 2 bb.note("%s: recipe_sanity: %s" % (d.getVar("P", True), msg))
3 3
4__recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS" 4__recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS"
5def bad_runtime_vars(cfgdata, d): 5def bad_runtime_vars(cfgdata, d):
@@ -7,7 +7,7 @@ def bad_runtime_vars(cfgdata, d):
7 bb.data.inherits_class("cross", d): 7 bb.data.inherits_class("cross", d):
8 return 8 return
9 9
10 for var in d.getVar("__recipe_sanity_badruntimevars", 1).split(): 10 for var in d.getVar("__recipe_sanity_badruntimevars", True).split():
11 val = d.getVar(var, 0) 11 val = d.getVar(var, 0)
12 if val and val != cfgdata.get(var): 12 if val and val != cfgdata.get(var):
13 __note("%s should be %s_${PN}" % (var, var), d) 13 __note("%s should be %s_${PN}" % (var, var), d)
@@ -15,17 +15,17 @@ def bad_runtime_vars(cfgdata, d):
15__recipe_sanity_reqvars = "DESCRIPTION" 15__recipe_sanity_reqvars = "DESCRIPTION"
16__recipe_sanity_reqdiffvars = "LICENSE" 16__recipe_sanity_reqdiffvars = "LICENSE"
17def req_vars(cfgdata, d): 17def req_vars(cfgdata, d):
18 for var in d.getVar("__recipe_sanity_reqvars", 1).split(): 18 for var in d.getVar("__recipe_sanity_reqvars", True).split():
19 if not d.getVar(var, 0): 19 if not d.getVar(var, 0):
20 __note("%s should be set" % var, d) 20 __note("%s should be set" % var, d)
21 21
22 for var in d.getVar("__recipe_sanity_reqdiffvars", 1).split(): 22 for var in d.getVar("__recipe_sanity_reqdiffvars", True).split():
23 val = d.getVar(var, 0) 23 val = d.getVar(var, 0)
24 cfgval = cfgdata.get(var) 24 cfgval = cfgdata.get(var)
25 25
26 # Hardcoding is bad, but I'm lazy. We don't care about license being 26 # Hardcoding is bad, but I'm lazy. We don't care about license being
27 # unset if the recipe has no sources! 27 # unset if the recipe has no sources!
28 if var == "LICENSE" and d.getVar("SRC_URI", 1) == cfgdata.get("SRC_URI"): 28 if var == "LICENSE" and d.getVar("SRC_URI", True) == cfgdata.get("SRC_URI"):
29 continue 29 continue
30 30
31 if not val: 31 if not val:
@@ -43,11 +43,11 @@ def var_renames_overwrite(cfgdata, d):
43def incorrect_nonempty_PACKAGES(cfgdata, d): 43def incorrect_nonempty_PACKAGES(cfgdata, d):
44 if bb.data.inherits_class("native", d) or \ 44 if bb.data.inherits_class("native", d) or \
45 bb.data.inherits_class("cross", d): 45 bb.data.inherits_class("cross", d):
46 if d.getVar("PACKAGES", 1): 46 if d.getVar("PACKAGES", True):
47 return True 47 return True
48 48
49def can_use_autotools_base(cfgdata, d): 49def can_use_autotools_base(cfgdata, d):
50 cfg = d.getVar("do_configure", 1) 50 cfg = d.getVar("do_configure", True)
51 if not bb.data.inherits_class("autotools", d): 51 if not bb.data.inherits_class("autotools", d):
52 return False 52 return False
53 53
@@ -65,10 +65,10 @@ def can_use_autotools_base(cfgdata, d):
65 65
66def can_remove_FILESPATH(cfgdata, d): 66def can_remove_FILESPATH(cfgdata, d):
67 expected = cfgdata.get("FILESPATH") 67 expected = cfgdata.get("FILESPATH")
68 #expected = "${@':'.join([os.path.normpath(os.path.join(fp, p, o)) for fp in d.getVar('FILESPATHBASE', 1).split(':') for p in d.getVar('FILESPATHPKG', 1).split(':') for o in (d.getVar('OVERRIDES', 1) + ':').split(':') if os.path.exists(os.path.join(fp, p, o))])}:${FILESDIR}" 68 #expected = "${@':'.join([os.path.normpath(os.path.join(fp, p, o)) for fp in d.getVar('FILESPATHBASE', True).split(':') for p in d.getVar('FILESPATHPKG', True).split(':') for o in (d.getVar('OVERRIDES', True) + ':').split(':') if os.path.exists(os.path.join(fp, p, o))])}:${FILESDIR}"
69 expectedpaths = bb.data.expand(expected, d) 69 expectedpaths = bb.data.expand(expected, d)
70 unexpanded = d.getVar("FILESPATH", 0) 70 unexpanded = d.getVar("FILESPATH", 0)
71 filespath = d.getVar("FILESPATH", 1).split(":") 71 filespath = d.getVar("FILESPATH", True).split(":")
72 filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] 72 filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)]
73 for fp in filespath: 73 for fp in filespath:
74 if not fp in expectedpaths: 74 if not fp in expectedpaths:
@@ -79,13 +79,13 @@ def can_remove_FILESPATH(cfgdata, d):
79 79
80def can_remove_FILESDIR(cfgdata, d): 80def can_remove_FILESDIR(cfgdata, d):
81 expected = cfgdata.get("FILESDIR") 81 expected = cfgdata.get("FILESDIR")
82 #expected = "${@bb.which(d.getVar('FILESPATH', 1), '.')}" 82 #expected = "${@bb.which(d.getVar('FILESPATH', True), '.')}"
83 unexpanded = d.getVar("FILESDIR", 0) 83 unexpanded = d.getVar("FILESDIR", 0)
84 if unexpanded is None: 84 if unexpanded is None:
85 return False 85 return False
86 86
87 expanded = os.path.normpath(d.getVar("FILESDIR", 1)) 87 expanded = os.path.normpath(d.getVar("FILESDIR", True))
88 filespath = d.getVar("FILESPATH", 1).split(":") 88 filespath = d.getVar("FILESPATH", True).split(":")
89 filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] 89 filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)]
90 90
91 return unexpanded != expected and \ 91 return unexpanded != expected and \
@@ -103,7 +103,7 @@ def can_remove_others(p, cfgdata, d):
103 continue 103 continue
104 104
105 try: 105 try:
106 expanded = d.getVar(k, 1) 106 expanded = d.getVar(k, True)
107 cfgexpanded = bb.data.expand(cfgunexpanded, d) 107 cfgexpanded = bb.data.expand(cfgunexpanded, d)
108 except bb.fetch.ParameterError: 108 except bb.fetch.ParameterError:
109 continue 109 continue
@@ -115,8 +115,8 @@ def can_remove_others(p, cfgdata, d):
115 (p, cfgunexpanded, unexpanded, expanded)) 115 (p, cfgunexpanded, unexpanded, expanded))
116 116
117python do_recipe_sanity () { 117python do_recipe_sanity () {
118 p = d.getVar("P", 1) 118 p = d.getVar("P", True)
119 p = "%s %s %s" % (d.getVar("PN", 1), d.getVar("PV", 1), d.getVar("PR", 1)) 119 p = "%s %s %s" % (d.getVar("PN", True), d.getVar("PV", True), d.getVar("PR", True))
120 120
121 sanitychecks = [ 121 sanitychecks = [
122 (can_remove_FILESDIR, "candidate for removal of FILESDIR"), 122 (can_remove_FILESDIR, "candidate for removal of FILESDIR"),
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass
index 880dcad1f3..ccbe5b99c9 100644
--- a/meta/classes/siteconfig.bbclass
+++ b/meta/classes/siteconfig.bbclass
@@ -2,7 +2,7 @@ python siteconfig_do_siteconfig () {
2 shared_state = sstate_state_fromvars(d) 2 shared_state = sstate_state_fromvars(d)
3 if shared_state['name'] != 'populate-sysroot': 3 if shared_state['name'] != 'populate-sysroot':
4 return 4 return
5 if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME', 1), 'site_config')): 5 if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME', True), 'site_config')):
6 bb.debug(1, "No site_config directory, skipping do_siteconfig") 6 bb.debug(1, "No site_config directory, skipping do_siteconfig")
7 return 7 return
8 bb.build.exec_func('do_siteconfig_gencache', d) 8 bb.build.exec_func('do_siteconfig_gencache', d)
diff --git a/meta/classes/siteinfo.bbclass b/meta/classes/siteinfo.bbclass
index bf6af2b838..8c256ceff3 100644
--- a/meta/classes/siteinfo.bbclass
+++ b/meta/classes/siteinfo.bbclass
@@ -130,7 +130,7 @@ def siteinfo_get_files(d, no_cache = False):
130 if no_cache: return sitefiles 130 if no_cache: return sitefiles
131 131
132 # Now check for siteconfig cache files 132 # Now check for siteconfig cache files
133 path_siteconfig = d.getVar('SITECONFIG_SYSROOTCACHE', 1) 133 path_siteconfig = d.getVar('SITECONFIG_SYSROOTCACHE', True)
134 if os.path.isdir(path_siteconfig): 134 if os.path.isdir(path_siteconfig):
135 for i in os.listdir(path_siteconfig): 135 for i in os.listdir(path_siteconfig):
136 filename = os.path.join(path_siteconfig, i) 136 filename = os.path.join(path_siteconfig, i)
diff --git a/meta/classes/sourcepkg.bbclass b/meta/classes/sourcepkg.bbclass
index 38edfe4e2e..2a78a90452 100644
--- a/meta/classes/sourcepkg.bbclass
+++ b/meta/classes/sourcepkg.bbclass
@@ -6,7 +6,7 @@ DISTRO ?= "openembedded"
6 6
7def get_src_tree(d): 7def get_src_tree(d):
8 8
9 workdir = d.getVar('WORKDIR', 1) 9 workdir = d.getVar('WORKDIR', True)
10 if not workdir: 10 if not workdir:
11 bb.error("WORKDIR not defined, unable to find source tree.") 11 bb.error("WORKDIR not defined, unable to find source tree.")
12 return 12 return
@@ -55,8 +55,8 @@ sourcepkg_do_archive_bb() {
55 55
56python sourcepkg_do_dumpdata() { 56python sourcepkg_do_dumpdata() {
57 57
58 workdir = d.getVar('WORKDIR', 1) 58 workdir = d.getVar('WORKDIR', True)
59 distro = d.getVar('DISTRO', 1) 59 distro = d.getVar('DISTRO', True)
60 s_tree = get_src_tree(d) 60 s_tree = get_src_tree(d)
61 openembeddeddir = os.path.join(workdir, s_tree, distro) 61 openembeddeddir = os.path.join(workdir, s_tree, distro)
62 dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d)) 62 dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d))
@@ -74,7 +74,7 @@ python sourcepkg_do_dumpdata() {
74 # emit the metadata which isnt valid shell 74 # emit the metadata which isnt valid shell
75 for e in d.keys(): 75 for e in d.keys():
76 if d.getVarFlag(e, 'python'): 76 if d.getVarFlag(e, 'python'):
77 f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, 1))) 77 f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, True)))
78 f.close() 78 f.close()
79} 79}
80 80
diff --git a/meta/classes/src_distribute.bbclass b/meta/classes/src_distribute.bbclass
index 651e492598..efa2720e23 100644
--- a/meta/classes/src_distribute.bbclass
+++ b/meta/classes/src_distribute.bbclass
@@ -3,12 +3,12 @@ python do_distribute_sources () {
3 l = bb.data.createCopy(d) 3 l = bb.data.createCopy(d)
4 bb.data.update_data(l) 4 bb.data.update_data(l)
5 5
6 sources_dir = d.getVar('SRC_DISTRIBUTEDIR', 1) 6 sources_dir = d.getVar('SRC_DISTRIBUTEDIR', True)
7 src_uri = d.getVar('SRC_URI', 1).split() 7 src_uri = d.getVar('SRC_URI', True).split()
8 fetcher = bb.fetch2.Fetch(src_uri, d) 8 fetcher = bb.fetch2.Fetch(src_uri, d)
9 ud = fetcher.ud 9 ud = fetcher.ud
10 10
11 licenses = d.getVar('LICENSE', 1).replace('&', '|') 11 licenses = d.getVar('LICENSE', True).replace('&', '|')
12 licenses = licenses.replace('(', '').replace(')', '') 12 licenses = licenses.replace('(', '').replace(')', '')
13 clean_licenses = "" 13 clean_licenses = ""
14 for x in licenses.split(): 14 for x in licenses.split():
@@ -20,7 +20,7 @@ python do_distribute_sources () {
20 20
21 for license in clean_licenses.split('|'): 21 for license in clean_licenses.split('|'):
22 for url in ud.values(): 22 for url in ud.values():
23 cmd = d.getVar('SRC_DISTRIBUTECOMMAND', 1) 23 cmd = d.getVar('SRC_DISTRIBUTECOMMAND', True)
24 if not cmd: 24 if not cmd:
25 raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined") 25 raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined")
26 url.setup_localpath(d) 26 url.setup_localpath(d)
@@ -29,9 +29,9 @@ python do_distribute_sources () {
29 if url.basename == '*': 29 if url.basename == '*':
30 import os.path 30 import os.path
31 dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath))) 31 dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath)))
32 d.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir)) 32 d.setVar('DEST', "%s_%s/" % (d.getVar('PF', True), dest_dir))
33 else: 33 else:
34 d.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename)) 34 d.setVar('DEST', "%s_%s" % (d.getVar('PF', True), url.basename))
35 else: 35 else:
36 d.setVar('DEST', '') 36 d.setVar('DEST', '')
37 37
diff --git a/meta/classes/syslinux.bbclass b/meta/classes/syslinux.bbclass
index ec93201581..b194fa69a3 100644
--- a/meta/classes/syslinux.bbclass
+++ b/meta/classes/syslinux.bbclass
@@ -57,12 +57,12 @@ python build_syslinux_menu () {
57 import copy 57 import copy
58 import sys 58 import sys
59 59
60 workdir = d.getVar('WORKDIR', 1) 60 workdir = d.getVar('WORKDIR', True)
61 if not workdir: 61 if not workdir:
62 bb.error("WORKDIR is not defined") 62 bb.error("WORKDIR is not defined")
63 return 63 return
64 64
65 labels = d.getVar('LABELS', 1) 65 labels = d.getVar('LABELS', True)
66 if not labels: 66 if not labels:
67 bb.debug(1, "LABELS not defined, nothing to do") 67 bb.debug(1, "LABELS not defined, nothing to do")
68 return 68 return
@@ -71,7 +71,7 @@ python build_syslinux_menu () {
71 bb.debug(1, "No labels, nothing to do") 71 bb.debug(1, "No labels, nothing to do")
72 return 72 return
73 73
74 cfile = d.getVar('SYSLINUXMENU', 1) 74 cfile = d.getVar('SYSLINUXMENU', True)
75 if not cfile: 75 if not cfile:
76 raise bb.build.FuncFailed('Unable to read SYSLINUXMENU') 76 raise bb.build.FuncFailed('Unable to read SYSLINUXMENU')
77 77
@@ -100,7 +100,7 @@ python build_syslinux_menu () {
100 localdata.setVar('OVERRIDES', label + ':' + overrides) 100 localdata.setVar('OVERRIDES', label + ':' + overrides)
101 bb.data.update_data(localdata) 101 bb.data.update_data(localdata)
102 102
103 usage = localdata.getVar('USAGE', 1) 103 usage = localdata.getVar('USAGE', True)
104 cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label)) 104 cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label))
105 cfgfile.write('%s\n' % (usage)) 105 cfgfile.write('%s\n' % (usage))
106 106
@@ -114,12 +114,12 @@ python build_syslinux_cfg () {
114 import copy 114 import copy
115 import sys 115 import sys
116 116
117 workdir = d.getVar('WORKDIR', 1) 117 workdir = d.getVar('WORKDIR', True)
118 if not workdir: 118 if not workdir:
119 bb.error("WORKDIR not defined, unable to package") 119 bb.error("WORKDIR not defined, unable to package")
120 return 120 return
121 121
122 labels = d.getVar('LABELS', 1) 122 labels = d.getVar('LABELS', True)
123 if not labels: 123 if not labels:
124 bb.debug(1, "LABELS not defined, nothing to do") 124 bb.debug(1, "LABELS not defined, nothing to do")
125 return 125 return
@@ -128,7 +128,7 @@ python build_syslinux_cfg () {
128 bb.debug(1, "No labels, nothing to do") 128 bb.debug(1, "No labels, nothing to do")
129 return 129 return
130 130
131 cfile = d.getVar('SYSLINUXCFG', 1) 131 cfile = d.getVar('SYSLINUXCFG', True)
132 if not cfile: 132 if not cfile:
133 raise bb.build.FuncFailed('Unable to read SYSLINUXCFG') 133 raise bb.build.FuncFailed('Unable to read SYSLINUXCFG')
134 134
@@ -139,7 +139,7 @@ python build_syslinux_cfg () {
139 139
140 cfgfile.write('# Automatically created by OE\n') 140 cfgfile.write('# Automatically created by OE\n')
141 141
142 opts = d.getVar('SYSLINUX_OPTS', 1) 142 opts = d.getVar('SYSLINUX_OPTS', True)
143 143
144 if opts: 144 if opts:
145 for opt in opts.split(';'): 145 for opt in opts.split(';'):
@@ -148,26 +148,26 @@ python build_syslinux_cfg () {
148 cfgfile.write('ALLOWOPTIONS 1\n'); 148 cfgfile.write('ALLOWOPTIONS 1\n');
149 cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) 149 cfgfile.write('DEFAULT %s\n' % (labels.split()[0]))
150 150
151 timeout = d.getVar('SYSLINUX_TIMEOUT', 1) 151 timeout = d.getVar('SYSLINUX_TIMEOUT', True)
152 152
153 if timeout: 153 if timeout:
154 cfgfile.write('TIMEOUT %s\n' % timeout) 154 cfgfile.write('TIMEOUT %s\n' % timeout)
155 else: 155 else:
156 cfgfile.write('TIMEOUT 50\n') 156 cfgfile.write('TIMEOUT 50\n')
157 157
158 prompt = d.getVar('SYSLINUX_PROMPT', 1) 158 prompt = d.getVar('SYSLINUX_PROMPT', True)
159 if prompt: 159 if prompt:
160 cfgfile.write('PROMPT %s\n' % prompt) 160 cfgfile.write('PROMPT %s\n' % prompt)
161 else: 161 else:
162 cfgfile.write('PROMPT 1\n') 162 cfgfile.write('PROMPT 1\n')
163 163
164 menu = d.getVar('AUTO_SYSLINUXMENU', 1) 164 menu = d.getVar('AUTO_SYSLINUXMENU', True)
165 165
166 # This is ugly. My bad. 166 # This is ugly. My bad.
167 167
168 if menu: 168 if menu:
169 bb.build.exec_func('build_syslinux_menu', d) 169 bb.build.exec_func('build_syslinux_menu', d)
170 mfile = d.getVar('SYSLINUXMENU', 1) 170 mfile = d.getVar('SYSLINUXMENU', True)
171 cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) ) 171 cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) )
172 172
173 for label in labels.split(): 173 for label in labels.split():
@@ -182,8 +182,8 @@ python build_syslinux_cfg () {
182 182
183 cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label)) 183 cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label))
184 184
185 append = localdata.getVar('APPEND', 1) 185 append = localdata.getVar('APPEND', True)
186 initrd = localdata.getVar('INITRD', 1) 186 initrd = localdata.getVar('INITRD', True)
187 187
188 if append: 188 if append:
189 cfgfile.write('APPEND ') 189 cfgfile.write('APPEND ')
diff --git a/meta/classes/task.bbclass b/meta/classes/task.bbclass
index 7891207a64..22c2fd3744 100644
--- a/meta/classes/task.bbclass
+++ b/meta/classes/task.bbclass
@@ -17,7 +17,7 @@ PACKAGE_ARCH = "all"
17# to the list. Their dependencies (RRECOMMENDS) are handled as usual 17# to the list. Their dependencies (RRECOMMENDS) are handled as usual
18# by package_depchains in a following step. 18# by package_depchains in a following step.
19python () { 19python () {
20 packages = d.getVar('PACKAGES', 1).split() 20 packages = d.getVar('PACKAGES', True).split()
21 genpackages = [] 21 genpackages = []
22 for pkg in packages: 22 for pkg in packages:
23 for postfix in ['-dbg', '-dev']: 23 for postfix in ['-dbg', '-dev']:
diff --git a/meta/classes/update-alternatives.bbclass b/meta/classes/update-alternatives.bbclass
index 0e8e58bd03..ae58344d3d 100644
--- a/meta/classes/update-alternatives.bbclass
+++ b/meta/classes/update-alternatives.bbclass
@@ -94,22 +94,22 @@ python __anonymous() {
94} 94}
95 95
96python populate_packages_prepend () { 96python populate_packages_prepend () {
97 pkg = d.getVar('PN', 1) 97 pkg = d.getVar('PN', True)
98 bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg) 98 bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg)
99 postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) 99 postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
100 if not postinst: 100 if not postinst:
101 postinst = '#!/bin/sh\n' 101 postinst = '#!/bin/sh\n'
102 if d.getVar('ALTERNATIVE_LINKS') != None: 102 if d.getVar('ALTERNATIVE_LINKS') != None:
103 postinst += d.getVar('update_alternatives_batch_postinst', 1) 103 postinst += d.getVar('update_alternatives_batch_postinst', True)
104 else: 104 else:
105 postinst += d.getVar('update_alternatives_postinst', 1) 105 postinst += d.getVar('update_alternatives_postinst', True)
106 d.setVar('pkg_postinst_%s' % pkg, postinst) 106 d.setVar('pkg_postinst_%s' % pkg, postinst)
107 postrm = d.getVar('pkg_postrm_%s' % pkg, 1) or d.getVar('pkg_postrm', 1) 107 postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
108 if not postrm: 108 if not postrm:
109 postrm = '#!/bin/sh\n' 109 postrm = '#!/bin/sh\n'
110 if d.getVar('ALTERNATIVE_LINKS') != None: 110 if d.getVar('ALTERNATIVE_LINKS') != None:
111 postrm += d.getVar('update_alternatives_batch_postrm', 1) 111 postrm += d.getVar('update_alternatives_batch_postrm', True)
112 else: 112 else:
113 postrm += d.getVar('update_alternatives_postrm', 1) 113 postrm += d.getVar('update_alternatives_postrm', True)
114 d.setVar('pkg_postrm_%s' % pkg, postrm) 114 d.setVar('pkg_postrm_%s' % pkg, postrm)
115} 115}
diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass
index db88a8e764..bddead4a25 100644
--- a/meta/classes/update-rc.d.bbclass
+++ b/meta/classes/update-rc.d.bbclass
@@ -47,7 +47,7 @@ python populate_packages_prepend () {
47 def update_rcd_package(pkg): 47 def update_rcd_package(pkg):
48 bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) 48 bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg)
49 localdata = bb.data.createCopy(d) 49 localdata = bb.data.createCopy(d)
50 overrides = localdata.getVar("OVERRIDES", 1) 50 overrides = localdata.getVar("OVERRIDES", True)
51 localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides)) 51 localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides))
52 bb.data.update_data(localdata) 52 bb.data.update_data(localdata)
53 53
@@ -56,28 +56,28 @@ python populate_packages_prepend () {
56 execute on the target. Not doing so may cause update_rc.d postinst invoked 56 execute on the target. Not doing so may cause update_rc.d postinst invoked
57 twice to cause unwanted warnings. 57 twice to cause unwanted warnings.
58 """ 58 """
59 postinst = localdata.getVar('pkg_postinst', 1) 59 postinst = localdata.getVar('pkg_postinst', True)
60 if not postinst: 60 if not postinst:
61 postinst = '#!/bin/sh\n' 61 postinst = '#!/bin/sh\n'
62 postinst += localdata.getVar('updatercd_postinst', 1) 62 postinst += localdata.getVar('updatercd_postinst', True)
63 d.setVar('pkg_postinst_%s' % pkg, postinst) 63 d.setVar('pkg_postinst_%s' % pkg, postinst)
64 64
65 prerm = localdata.getVar('pkg_prerm', 1) 65 prerm = localdata.getVar('pkg_prerm', True)
66 if not prerm: 66 if not prerm:
67 prerm = '#!/bin/sh\n' 67 prerm = '#!/bin/sh\n'
68 prerm += localdata.getVar('updatercd_prerm', 1) 68 prerm += localdata.getVar('updatercd_prerm', True)
69 d.setVar('pkg_prerm_%s' % pkg, prerm) 69 d.setVar('pkg_prerm_%s' % pkg, prerm)
70 70
71 postrm = localdata.getVar('pkg_postrm', 1) 71 postrm = localdata.getVar('pkg_postrm', True)
72 if not postrm: 72 if not postrm:
73 postrm = '#!/bin/sh\n' 73 postrm = '#!/bin/sh\n'
74 postrm += localdata.getVar('updatercd_postrm', 1) 74 postrm += localdata.getVar('updatercd_postrm', True)
75 d.setVar('pkg_postrm_%s' % pkg, postrm) 75 d.setVar('pkg_postrm_%s' % pkg, postrm)
76 76
77 pkgs = d.getVar('INITSCRIPT_PACKAGES', 1) 77 pkgs = d.getVar('INITSCRIPT_PACKAGES', True)
78 if pkgs == None: 78 if pkgs == None:
79 pkgs = d.getVar('UPDATERCPN', 1) 79 pkgs = d.getVar('UPDATERCPN', True)
80 packages = (d.getVar('PACKAGES', 1) or "").split() 80 packages = (d.getVar('PACKAGES', True) or "").split()
81 if not pkgs in packages and packages != []: 81 if not pkgs in packages and packages != []:
82 pkgs = packages[0] 82 pkgs = packages[0]
83 for pkg in pkgs.split(): 83 for pkg in pkgs.split():
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass
index 009ef1fd04..bbdf6e159b 100644
--- a/meta/classes/utility-tasks.bbclass
+++ b/meta/classes/utility-tasks.bbclass
@@ -24,7 +24,7 @@ python do_clean() {
24 bb.note("Removing " + dir) 24 bb.note("Removing " + dir)
25 oe.path.remove(dir) 25 oe.path.remove(dir)
26 26
27 for f in (d.getVar('CLEANFUNCS', 1) or '').split(): 27 for f in (d.getVar('CLEANFUNCS', True) or '').split():
28 bb.build.exec_func(f, d) 28 bb.build.exec_func(f, d)
29} 29}
30 30
diff --git a/meta/classes/utils.bbclass b/meta/classes/utils.bbclass
index 103fa9a546..3b5946308c 100644
--- a/meta/classes/utils.bbclass
+++ b/meta/classes/utils.bbclass
@@ -336,7 +336,7 @@ def base_set_filespath(path, d):
336 if extrapaths != "": 336 if extrapaths != "":
337 path = extrapaths.split(":") + path 337 path = extrapaths.split(":") + path
338 # The ":" ensures we have an 'empty' override 338 # The ":" ensures we have an 'empty' override
339 overrides = (d.getVar("OVERRIDES", 1) or "") + ":" 339 overrides = (d.getVar("OVERRIDES", True) or "") + ":"
340 for p in path: 340 for p in path:
341 if p != "": 341 if p != "":
342 for o in overrides.split(":"): 342 for o in overrides.split(":"):