diff options
author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2012-03-03 10:59:25 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2012-03-05 10:22:56 -0800 |
commit | 06f2f8ce0a3093973ca54b48f542f8485b666079 (patch) | |
tree | dbcfa5c491eb2e5d237aa539cb7c6e77dc07dd6f /meta | |
parent | d01dadfb87cfd2284b3e849d35a35fe5df0239c4 (diff) | |
download | poky-06f2f8ce0a3093973ca54b48f542f8485b666079.tar.gz |
meta: Convert getVar/getVarFlag(xxx, 1) -> (xxx, True)
Using "1" with getVar is bad coding style and "True" is preferred.
This patch is a sed over the meta directory of the form:
sed \
-e 's:\(\.getVar([^,()]*, \)1 *):\1True):g' \
-e 's:\(\.getVarFlag([^,()]*, [^,()]*, \)1 *):\1True):g' \
-i `grep -ril getVar *`
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta')
83 files changed, 290 insertions, 290 deletions
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass index a8d2b5f32e..66eba9fad0 100644 --- a/meta/classes/autotools.bbclass +++ b/meta/classes/autotools.bbclass | |||
@@ -1,8 +1,8 @@ | |||
1 | def autotools_dep_prepend(d): | 1 | def autotools_dep_prepend(d): |
2 | if d.getVar('INHIBIT_AUTOTOOLS_DEPS', 1): | 2 | if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True): |
3 | return '' | 3 | return '' |
4 | 4 | ||
5 | pn = d.getVar('PN', 1) | 5 | pn = d.getVar('PN', True) |
6 | deps = '' | 6 | deps = '' |
7 | 7 | ||
8 | if pn in ['autoconf-native', 'automake-native', 'help2man-native']: | 8 | if pn in ['autoconf-native', 'automake-native', 'help2man-native']: |
@@ -13,7 +13,7 @@ def autotools_dep_prepend(d): | |||
13 | deps += 'libtool-native ' | 13 | deps += 'libtool-native ' |
14 | if not bb.data.inherits_class('native', d) \ | 14 | if not bb.data.inherits_class('native', d) \ |
15 | and not bb.data.inherits_class('cross', d) \ | 15 | and not bb.data.inherits_class('cross', d) \ |
16 | and not d.getVar('INHIBIT_DEFAULT_DEPS', 1): | 16 | and not d.getVar('INHIBIT_DEFAULT_DEPS', True): |
17 | deps += 'libtool-cross ' | 17 | deps += 'libtool-cross ' |
18 | 18 | ||
19 | return deps + 'gnu-config-native ' | 19 | return deps + 'gnu-config-native ' |
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index cab56deb39..48e4a28d83 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass | |||
@@ -60,8 +60,8 @@ def base_dep_prepend(d): | |||
60 | # we need that built is the responsibility of the patch function / class, not | 60 | # we need that built is the responsibility of the patch function / class, not |
61 | # the application. | 61 | # the application. |
62 | if not d.getVar('INHIBIT_DEFAULT_DEPS'): | 62 | if not d.getVar('INHIBIT_DEFAULT_DEPS'): |
63 | if (d.getVar('HOST_SYS', 1) != | 63 | if (d.getVar('HOST_SYS', True) != |
64 | d.getVar('BUILD_SYS', 1)): | 64 | d.getVar('BUILD_SYS', True)): |
65 | deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " | 65 | deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " |
66 | return deps | 66 | return deps |
67 | 67 | ||
@@ -203,7 +203,7 @@ def preferred_ml_updates(d): | |||
203 | 203 | ||
204 | 204 | ||
205 | def get_layers_branch_rev(d): | 205 | def get_layers_branch_rev(d): |
206 | layers = (d.getVar("BBLAYERS", 1) or "").split() | 206 | layers = (d.getVar("BBLAYERS", True) or "").split() |
207 | layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ | 207 | layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ |
208 | base_get_metadata_git_branch(i, None).strip(), \ | 208 | base_get_metadata_git_branch(i, None).strip(), \ |
209 | base_get_metadata_git_revision(i, None)) \ | 209 | base_get_metadata_git_revision(i, None)) \ |
@@ -233,7 +233,7 @@ python base_eventhandler() { | |||
233 | if name.startswith("BuildStarted"): | 233 | if name.startswith("BuildStarted"): |
234 | e.data.setVar( 'BB_VERSION', bb.__version__) | 234 | e.data.setVar( 'BB_VERSION', bb.__version__) |
235 | statusvars = ['BB_VERSION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TUNE_FEATURES', 'TARGET_FPU'] | 235 | statusvars = ['BB_VERSION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TUNE_FEATURES', 'TARGET_FPU'] |
236 | statuslines = ["%-17s = \"%s\"" % (i, e.data.getVar(i, 1) or '') for i in statusvars] | 236 | statuslines = ["%-17s = \"%s\"" % (i, e.data.getVar(i, True) or '') for i in statusvars] |
237 | 237 | ||
238 | statuslines += get_layers_branch_rev(e.data) | 238 | statuslines += get_layers_branch_rev(e.data) |
239 | statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines) | 239 | statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines) |
@@ -242,7 +242,7 @@ python base_eventhandler() { | |||
242 | needed_vars = [ "TARGET_ARCH", "TARGET_OS" ] | 242 | needed_vars = [ "TARGET_ARCH", "TARGET_OS" ] |
243 | pesteruser = [] | 243 | pesteruser = [] |
244 | for v in needed_vars: | 244 | for v in needed_vars: |
245 | val = e.data.getVar(v, 1) | 245 | val = e.data.getVar(v, True) |
246 | if not val or val == 'INVALID': | 246 | if not val or val == 'INVALID': |
247 | pesteruser.append(v) | 247 | pesteruser.append(v) |
248 | if pesteruser: | 248 | if pesteruser: |
@@ -344,7 +344,7 @@ python () { | |||
344 | pr = pr_prefix.group(0) + str(nval) + pr[prval.end():] | 344 | pr = pr_prefix.group(0) + str(nval) + pr[prval.end():] |
345 | d.setVar('PR', pr) | 345 | d.setVar('PR', pr) |
346 | 346 | ||
347 | pn = d.getVar('PN', 1) | 347 | pn = d.getVar('PN', True) |
348 | license = d.getVar('LICENSE', True) | 348 | license = d.getVar('LICENSE', True) |
349 | if license == "INVALID": | 349 | if license == "INVALID": |
350 | bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) | 350 | bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) |
@@ -370,36 +370,36 @@ python () { | |||
370 | d.setVarFlag('do_package_setscene', 'fakeroot', 1) | 370 | d.setVarFlag('do_package_setscene', 'fakeroot', 1) |
371 | source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0) | 371 | source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0) |
372 | if not source_mirror_fetch: | 372 | if not source_mirror_fetch: |
373 | need_host = d.getVar('COMPATIBLE_HOST', 1) | 373 | need_host = d.getVar('COMPATIBLE_HOST', True) |
374 | if need_host: | 374 | if need_host: |
375 | import re | 375 | import re |
376 | this_host = d.getVar('HOST_SYS', 1) | 376 | this_host = d.getVar('HOST_SYS', True) |
377 | if not re.match(need_host, this_host): | 377 | if not re.match(need_host, this_host): |
378 | raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) | 378 | raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) |
379 | 379 | ||
380 | need_machine = d.getVar('COMPATIBLE_MACHINE', 1) | 380 | need_machine = d.getVar('COMPATIBLE_MACHINE', True) |
381 | if need_machine: | 381 | if need_machine: |
382 | import re | 382 | import re |
383 | this_machine = d.getVar('MACHINE', 1) | 383 | this_machine = d.getVar('MACHINE', True) |
384 | if this_machine and not re.match(need_machine, this_machine): | 384 | if this_machine and not re.match(need_machine, this_machine): |
385 | this_soc_family = d.getVar('SOC_FAMILY', 1) | 385 | this_soc_family = d.getVar('SOC_FAMILY', True) |
386 | if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family: | 386 | if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family: |
387 | raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % this_machine) | 387 | raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % this_machine) |
388 | 388 | ||
389 | 389 | ||
390 | dont_want_license = d.getVar('INCOMPATIBLE_LICENSE', 1) | 390 | dont_want_license = d.getVar('INCOMPATIBLE_LICENSE', True) |
391 | if dont_want_license and not pn.endswith("-native") and not pn.endswith("-cross") and not pn.endswith("-cross-initial") and not pn.endswith("-cross-intermediate") and not pn.endswith("-crosssdk-intermediate") and not pn.endswith("-crosssdk") and not pn.endswith("-crosssdk-initial"): | 391 | if dont_want_license and not pn.endswith("-native") and not pn.endswith("-cross") and not pn.endswith("-cross-initial") and not pn.endswith("-cross-intermediate") and not pn.endswith("-crosssdk-intermediate") and not pn.endswith("-crosssdk") and not pn.endswith("-crosssdk-initial"): |
392 | hosttools_whitelist = (d.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, 1) or "").split() | 392 | hosttools_whitelist = (d.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, True) or "").split() |
393 | lgplv2_whitelist = (d.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, 1) or "").split() | 393 | lgplv2_whitelist = (d.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, True) or "").split() |
394 | dont_want_whitelist = (d.getVar('WHITELIST_%s' % dont_want_license, 1) or "").split() | 394 | dont_want_whitelist = (d.getVar('WHITELIST_%s' % dont_want_license, True) or "").split() |
395 | if pn not in hosttools_whitelist and pn not in lgplv2_whitelist and pn not in dont_want_whitelist: | 395 | if pn not in hosttools_whitelist and pn not in lgplv2_whitelist and pn not in dont_want_whitelist: |
396 | 396 | ||
397 | this_license = d.getVar('LICENSE', 1) | 397 | this_license = d.getVar('LICENSE', True) |
398 | if incompatible_license(d,dont_want_license): | 398 | if incompatible_license(d,dont_want_license): |
399 | bb.note("SKIPPING %s because it's %s" % (pn, this_license)) | 399 | bb.note("SKIPPING %s because it's %s" % (pn, this_license)) |
400 | raise bb.parse.SkipPackage("incompatible with license %s" % this_license) | 400 | raise bb.parse.SkipPackage("incompatible with license %s" % this_license) |
401 | 401 | ||
402 | srcuri = d.getVar('SRC_URI', 1) | 402 | srcuri = d.getVar('SRC_URI', True) |
403 | # Svn packages should DEPEND on subversion-native | 403 | # Svn packages should DEPEND on subversion-native |
404 | if "svn://" in srcuri: | 404 | if "svn://" in srcuri: |
405 | d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot') | 405 | d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot') |
@@ -426,8 +426,8 @@ python () { | |||
426 | d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot') | 426 | d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot') |
427 | 427 | ||
428 | # 'multimachine' handling | 428 | # 'multimachine' handling |
429 | mach_arch = d.getVar('MACHINE_ARCH', 1) | 429 | mach_arch = d.getVar('MACHINE_ARCH', True) |
430 | pkg_arch = d.getVar('PACKAGE_ARCH', 1) | 430 | pkg_arch = d.getVar('PACKAGE_ARCH', True) |
431 | 431 | ||
432 | if (pkg_arch == mach_arch): | 432 | if (pkg_arch == mach_arch): |
433 | # Already machine specific - nothing further to do | 433 | # Already machine specific - nothing further to do |
@@ -458,9 +458,9 @@ python () { | |||
458 | d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") | 458 | d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") |
459 | return | 459 | return |
460 | 460 | ||
461 | packages = d.getVar('PACKAGES', 1).split() | 461 | packages = d.getVar('PACKAGES', True).split() |
462 | for pkg in packages: | 462 | for pkg in packages: |
463 | pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, 1) | 463 | pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True) |
464 | 464 | ||
465 | # We could look for != PACKAGE_ARCH here but how to choose | 465 | # We could look for != PACKAGE_ARCH here but how to choose |
466 | # if multiple differences are present? | 466 | # if multiple differences are present? |
diff --git a/meta/classes/copyleft_compliance.bbclass b/meta/classes/copyleft_compliance.bbclass index 2eb9dedd24..4082e7e15d 100644 --- a/meta/classes/copyleft_compliance.bbclass +++ b/meta/classes/copyleft_compliance.bbclass | |||
@@ -69,8 +69,8 @@ python do_prepare_copyleft_sources () { | |||
69 | else: | 69 | else: |
70 | bb.debug(1, 'copyleft: %s is included' % p) | 70 | bb.debug(1, 'copyleft: %s is included' % p) |
71 | 71 | ||
72 | sources_dir = d.getVar('COPYLEFT_SOURCES_DIR', 1) | 72 | sources_dir = d.getVar('COPYLEFT_SOURCES_DIR', True) |
73 | src_uri = d.getVar('SRC_URI', 1).split() | 73 | src_uri = d.getVar('SRC_URI', True).split() |
74 | fetch = bb.fetch2.Fetch(src_uri, d) | 74 | fetch = bb.fetch2.Fetch(src_uri, d) |
75 | ud = fetch.ud | 75 | ud = fetch.ud |
76 | 76 | ||
diff --git a/meta/classes/cpan-base.bbclass b/meta/classes/cpan-base.bbclass index 79582ca76c..6cb1fefc29 100644 --- a/meta/classes/cpan-base.bbclass +++ b/meta/classes/cpan-base.bbclass | |||
@@ -28,7 +28,7 @@ def get_perl_version(d): | |||
28 | 28 | ||
29 | # Determine where the library directories are | 29 | # Determine where the library directories are |
30 | def perl_get_libdirs(d): | 30 | def perl_get_libdirs(d): |
31 | libdir = d.getVar('libdir', 1) | 31 | libdir = d.getVar('libdir', True) |
32 | if is_target(d) == "no": | 32 | if is_target(d) == "no": |
33 | libdir += '/perl-native' | 33 | libdir += '/perl-native' |
34 | libdir += '/perl' | 34 | libdir += '/perl' |
diff --git a/meta/classes/cpan_build.bbclass b/meta/classes/cpan_build.bbclass index 981332c4fa..36ffc56b85 100644 --- a/meta/classes/cpan_build.bbclass +++ b/meta/classes/cpan_build.bbclass | |||
@@ -10,9 +10,9 @@ inherit cpan-base | |||
10 | # libmodule-build-perl) | 10 | # libmodule-build-perl) |
11 | # | 11 | # |
12 | def cpan_build_dep_prepend(d): | 12 | def cpan_build_dep_prepend(d): |
13 | if d.getVar('CPAN_BUILD_DEPS', 1): | 13 | if d.getVar('CPAN_BUILD_DEPS', True): |
14 | return '' | 14 | return '' |
15 | pn = d.getVar('PN', 1) | 15 | pn = d.getVar('PN', True) |
16 | if pn in ['libmodule-build-perl', 'libmodule-build-perl-native']: | 16 | if pn in ['libmodule-build-perl', 'libmodule-build-perl-native']: |
17 | return '' | 17 | return '' |
18 | return 'libmodule-build-perl-native ' | 18 | return 'libmodule-build-perl-native ' |
diff --git a/meta/classes/debian.bbclass b/meta/classes/debian.bbclass index 025abcfad0..3637e2ebe7 100644 --- a/meta/classes/debian.bbclass +++ b/meta/classes/debian.bbclass | |||
@@ -22,8 +22,8 @@ python () { | |||
22 | python debian_package_name_hook () { | 22 | python debian_package_name_hook () { |
23 | import glob, copy, stat, errno, re | 23 | import glob, copy, stat, errno, re |
24 | 24 | ||
25 | pkgdest = d.getVar('PKGDEST', 1) | 25 | pkgdest = d.getVar('PKGDEST', True) |
26 | packages = d.getVar('PACKAGES', 1) | 26 | packages = d.getVar('PACKAGES', True) |
27 | bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") | 27 | bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") |
28 | lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") | 28 | lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") |
29 | so_re = re.compile("lib.*\.so") | 29 | so_re = re.compile("lib.*\.so") |
@@ -60,7 +60,7 @@ python debian_package_name_hook () { | |||
60 | for f in files: | 60 | for f in files: |
61 | if so_re.match(f): | 61 | if so_re.match(f): |
62 | fp = os.path.join(root, f) | 62 | fp = os.path.join(root, f) |
63 | cmd = (d.getVar('BUILD_PREFIX', 1) or "") + "objdump -p " + fp + " 2>/dev/null" | 63 | cmd = (d.getVar('BUILD_PREFIX', True) or "") + "objdump -p " + fp + " 2>/dev/null" |
64 | fd = os.popen(cmd) | 64 | fd = os.popen(cmd) |
65 | lines = fd.readlines() | 65 | lines = fd.readlines() |
66 | fd.close() | 66 | fd.close() |
@@ -74,7 +74,7 @@ python debian_package_name_hook () { | |||
74 | if len(sonames) == 1: | 74 | if len(sonames) == 1: |
75 | soname = sonames[0] | 75 | soname = sonames[0] |
76 | elif len(sonames) > 1: | 76 | elif len(sonames) > 1: |
77 | lead = d.getVar('LEAD_SONAME', 1) | 77 | lead = d.getVar('LEAD_SONAME', True) |
78 | if lead: | 78 | if lead: |
79 | r = re.compile(lead) | 79 | r = re.compile(lead) |
80 | filtered = [] | 80 | filtered = [] |
@@ -117,7 +117,7 @@ python debian_package_name_hook () { | |||
117 | # and later | 117 | # and later |
118 | # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw | 118 | # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw |
119 | # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 | 119 | # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 |
120 | for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', 1) or "").split(), reverse=True): | 120 | for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True): |
121 | auto_libname(packages, pkg) | 121 | auto_libname(packages, pkg) |
122 | } | 122 | } |
123 | 123 | ||
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass index ff5b836871..aba4bd7fa6 100644 --- a/meta/classes/distrodata.bbclass +++ b/meta/classes/distrodata.bbclass | |||
@@ -372,7 +372,7 @@ python do_checkpkg() { | |||
372 | 372 | ||
373 | f.close() | 373 | f.close() |
374 | if status != "ErrHostNoDir" and re.match("Err", status): | 374 | if status != "ErrHostNoDir" and re.match("Err", status): |
375 | logpath = d.getVar('LOG_DIR', 1) | 375 | logpath = d.getVar('LOG_DIR', True) |
376 | os.system("cp %s %s/" % (f.name, logpath)) | 376 | os.system("cp %s %s/" % (f.name, logpath)) |
377 | os.unlink(f.name) | 377 | os.unlink(f.name) |
378 | return status | 378 | return status |
diff --git a/meta/classes/distutils-base.bbclass b/meta/classes/distutils-base.bbclass index e7d0bb8071..6d18e08f14 100644 --- a/meta/classes/distutils-base.bbclass +++ b/meta/classes/distutils-base.bbclass | |||
@@ -1,4 +1,4 @@ | |||
1 | DEPENDS += "${@["python-native python", ""][(d.getVar('PACKAGES', 1) == '')]}" | 1 | DEPENDS += "${@["python-native python", ""][(d.getVar('PACKAGES', True) == '')]}" |
2 | RDEPENDS_${PN} += "${@['', 'python-core']['${PN}' == '${BPN}']}" | 2 | RDEPENDS_${PN} += "${@['', 'python-core']['${PN}' == '${BPN}']}" |
3 | 3 | ||
4 | inherit distutils-common-base | 4 | inherit distutils-common-base |
diff --git a/meta/classes/distutils-native-base.bbclass b/meta/classes/distutils-native-base.bbclass index 47367d796b..ceda512e39 100644 --- a/meta/classes/distutils-native-base.bbclass +++ b/meta/classes/distutils-native-base.bbclass | |||
@@ -1,3 +1,3 @@ | |||
1 | DEPENDS += "${@["python-native", ""][(d.getVar('PACKAGES', 1) == '')]}" | 1 | DEPENDS += "${@["python-native", ""][(d.getVar('PACKAGES', True) == '')]}" |
2 | 2 | ||
3 | inherit distutils-common-base | 3 | inherit distutils-common-base |
diff --git a/meta/classes/gconf.bbclass b/meta/classes/gconf.bbclass index 7bfa871bd2..095d04b1b8 100644 --- a/meta/classes/gconf.bbclass +++ b/meta/classes/gconf.bbclass | |||
@@ -32,8 +32,8 @@ done | |||
32 | 32 | ||
33 | python populate_packages_append () { | 33 | python populate_packages_append () { |
34 | import re | 34 | import re |
35 | packages = d.getVar('PACKAGES', 1).split() | 35 | packages = d.getVar('PACKAGES', True).split() |
36 | pkgdest = d.getVar('PKGDEST', 1) | 36 | pkgdest = d.getVar('PKGDEST', True) |
37 | 37 | ||
38 | for pkg in packages: | 38 | for pkg in packages: |
39 | schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) | 39 | schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) |
@@ -46,15 +46,15 @@ python populate_packages_append () { | |||
46 | if schemas != []: | 46 | if schemas != []: |
47 | bb.note("adding gconf postinst and prerm scripts to %s" % pkg) | 47 | bb.note("adding gconf postinst and prerm scripts to %s" % pkg) |
48 | d.setVar('SCHEMA_FILES', " ".join(schemas)) | 48 | d.setVar('SCHEMA_FILES', " ".join(schemas)) |
49 | postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) | 49 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) |
50 | if not postinst: | 50 | if not postinst: |
51 | postinst = '#!/bin/sh\n' | 51 | postinst = '#!/bin/sh\n' |
52 | postinst += d.getVar('gconf_postinst', 1) | 52 | postinst += d.getVar('gconf_postinst', True) |
53 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 53 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
54 | prerm = d.getVar('pkg_prerm_%s' % pkg, 1) or d.getVar('pkg_prerm', 1) | 54 | prerm = d.getVar('pkg_prerm_%s' % pkg, True) or d.getVar('pkg_prerm', True) |
55 | if not prerm: | 55 | if not prerm: |
56 | prerm = '#!/bin/sh\n' | 56 | prerm = '#!/bin/sh\n' |
57 | prerm += d.getVar('gconf_prerm', 1) | 57 | prerm += d.getVar('gconf_prerm', True) |
58 | d.setVar('pkg_prerm_%s' % pkg, prerm) | 58 | d.setVar('pkg_prerm_%s' % pkg, prerm) |
59 | rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" | 59 | rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" |
60 | rdepends += " gconf" | 60 | rdepends += " gconf" |
diff --git a/meta/classes/gtk-icon-cache.bbclass b/meta/classes/gtk-icon-cache.bbclass index 0204fd3fec..60e3401f4b 100644 --- a/meta/classes/gtk-icon-cache.bbclass +++ b/meta/classes/gtk-icon-cache.bbclass | |||
@@ -28,31 +28,31 @@ done | |||
28 | } | 28 | } |
29 | 29 | ||
30 | python populate_packages_append () { | 30 | python populate_packages_append () { |
31 | packages = d.getVar('PACKAGES', 1).split() | 31 | packages = d.getVar('PACKAGES', True).split() |
32 | pkgdest = d.getVar('PKGDEST', 1) | 32 | pkgdest = d.getVar('PKGDEST', True) |
33 | 33 | ||
34 | for pkg in packages: | 34 | for pkg in packages: |
35 | icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', 1)) | 35 | icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True)) |
36 | if not os.path.exists(icon_dir): | 36 | if not os.path.exists(icon_dir): |
37 | continue | 37 | continue |
38 | 38 | ||
39 | bb.note("adding hicolor-icon-theme dependency to %s" % pkg) | 39 | bb.note("adding hicolor-icon-theme dependency to %s" % pkg) |
40 | rdepends = d.getVar('RDEPENDS_%s' % pkg, 1) | 40 | rdepends = d.getVar('RDEPENDS_%s' % pkg, True) |
41 | rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme" | 41 | rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme" |
42 | d.setVar('RDEPENDS_%s' % pkg, rdepends) | 42 | d.setVar('RDEPENDS_%s' % pkg, rdepends) |
43 | 43 | ||
44 | bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) | 44 | bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) |
45 | 45 | ||
46 | postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) | 46 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) |
47 | if not postinst: | 47 | if not postinst: |
48 | postinst = '#!/bin/sh\n' | 48 | postinst = '#!/bin/sh\n' |
49 | postinst += d.getVar('gtk_icon_cache_postinst', 1) | 49 | postinst += d.getVar('gtk_icon_cache_postinst', True) |
50 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 50 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
51 | 51 | ||
52 | postrm = d.getVar('pkg_postrm_%s' % pkg, 1) or d.getVar('pkg_postrm', 1) | 52 | postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) |
53 | if not postrm: | 53 | if not postrm: |
54 | postrm = '#!/bin/sh\n' | 54 | postrm = '#!/bin/sh\n' |
55 | postrm += d.getVar('gtk_icon_cache_postrm', 1) | 55 | postrm += d.getVar('gtk_icon_cache_postrm', True) |
56 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 56 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
57 | } | 57 | } |
58 | 58 | ||
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass index dfce381393..a62eb2cd57 100644 --- a/meta/classes/image.bbclass +++ b/meta/classes/image.bbclass | |||
@@ -98,15 +98,15 @@ python () { | |||
98 | # is searched for in the BBPATH (same as the old version.) | 98 | # is searched for in the BBPATH (same as the old version.) |
99 | # | 99 | # |
100 | def get_devtable_list(d): | 100 | def get_devtable_list(d): |
101 | devtable = d.getVar('IMAGE_DEVICE_TABLE', 1) | 101 | devtable = d.getVar('IMAGE_DEVICE_TABLE', True) |
102 | if devtable != None: | 102 | if devtable != None: |
103 | return devtable | 103 | return devtable |
104 | str = "" | 104 | str = "" |
105 | devtables = d.getVar('IMAGE_DEVICE_TABLES', 1) | 105 | devtables = d.getVar('IMAGE_DEVICE_TABLES', True) |
106 | if devtables == None: | 106 | if devtables == None: |
107 | devtables = 'files/device_table-minimal.txt' | 107 | devtables = 'files/device_table-minimal.txt' |
108 | for devtable in devtables.split(): | 108 | for devtable in devtables.split(): |
109 | str += " %s" % bb.which(d.getVar('BBPATH', 1), devtable) | 109 | str += " %s" % bb.which(d.getVar('BBPATH', True), devtable) |
110 | return str | 110 | return str |
111 | 111 | ||
112 | IMAGE_CLASSES ?= "image_types" | 112 | IMAGE_CLASSES ?= "image_types" |
@@ -119,7 +119,7 @@ ROOTFS_POSTPROCESS_COMMAND ?= "" | |||
119 | # some default locales | 119 | # some default locales |
120 | IMAGE_LINGUAS ?= "de-de fr-fr en-gb" | 120 | IMAGE_LINGUAS ?= "de-de fr-fr en-gb" |
121 | 121 | ||
122 | LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', 1).split()))}" | 122 | LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', True).split()))}" |
123 | 123 | ||
124 | PSEUDO_PASSWD = "${IMAGE_ROOTFS}" | 124 | PSEUDO_PASSWD = "${IMAGE_ROOTFS}" |
125 | 125 | ||
diff --git a/meta/classes/imagetest-qemu.bbclass b/meta/classes/imagetest-qemu.bbclass index d01d1f4979..d56b44b5c4 100644 --- a/meta/classes/imagetest-qemu.bbclass +++ b/meta/classes/imagetest-qemu.bbclass | |||
@@ -35,12 +35,12 @@ def qemuimagetest_main(d): | |||
35 | 35 | ||
36 | casestr = re.compile(r'(?P<scen>\w+\b):(?P<case>\S+$)') | 36 | casestr = re.compile(r'(?P<scen>\w+\b):(?P<case>\S+$)') |
37 | resultstr = re.compile(r'\s*(?P<case>\w+)\s*(?P<pass>\d+)\s*(?P<fail>\d+)\s*(?P<noresult>\d+)') | 37 | resultstr = re.compile(r'\s*(?P<case>\w+)\s*(?P<pass>\d+)\s*(?P<fail>\d+)\s*(?P<noresult>\d+)') |
38 | machine = d.getVar('MACHINE', 1) | 38 | machine = d.getVar('MACHINE', True) |
39 | pname = d.getVar('PN', 1) | 39 | pname = d.getVar('PN', True) |
40 | 40 | ||
41 | """function to save test cases running status""" | 41 | """function to save test cases running status""" |
42 | def teststatus(test, status, index, length): | 42 | def teststatus(test, status, index, length): |
43 | test_status = d.getVar('TEST_STATUS', 1) | 43 | test_status = d.getVar('TEST_STATUS', True) |
44 | if not os.path.exists(test_status): | 44 | if not os.path.exists(test_status): |
45 | raise bb.build.FuncFailed("No test status file existing under TEST_TMP") | 45 | raise bb.build.FuncFailed("No test status file existing under TEST_TMP") |
46 | 46 | ||
@@ -51,13 +51,13 @@ def qemuimagetest_main(d): | |||
51 | 51 | ||
52 | """funtion to run each case under scenario""" | 52 | """funtion to run each case under scenario""" |
53 | def runtest(scen, case, fulltestpath): | 53 | def runtest(scen, case, fulltestpath): |
54 | resultpath = d.getVar('TEST_RESULT', 1) | 54 | resultpath = d.getVar('TEST_RESULT', True) |
55 | tmppath = d.getVar('TEST_TMP', 1) | 55 | tmppath = d.getVar('TEST_TMP', True) |
56 | 56 | ||
57 | """initialize log file for testcase""" | 57 | """initialize log file for testcase""" |
58 | logpath = d.getVar('TEST_LOG', 1) | 58 | logpath = d.getVar('TEST_LOG', True) |
59 | bb.utils.mkdirhier("%s/%s" % (logpath, scen)) | 59 | bb.utils.mkdirhier("%s/%s" % (logpath, scen)) |
60 | caselog = os.path.join(logpath, "%s/log_%s.%s" % (scen, case, d.getVar('DATETIME', 1))) | 60 | caselog = os.path.join(logpath, "%s/log_%s.%s" % (scen, case, d.getVar('DATETIME', True))) |
61 | os.system("touch %s" % caselog) | 61 | os.system("touch %s" % caselog) |
62 | 62 | ||
63 | """export TEST_TMP, TEST_RESULT, DEPLOY_DIR and QEMUARCH""" | 63 | """export TEST_TMP, TEST_RESULT, DEPLOY_DIR and QEMUARCH""" |
@@ -141,7 +141,7 @@ def qemuimagetest_main(d): | |||
141 | 141 | ||
142 | """Clean tmp folder for testing""" | 142 | """Clean tmp folder for testing""" |
143 | def clean_tmp(): | 143 | def clean_tmp(): |
144 | tmppath = d.getVar('TEST_TMP', 1) | 144 | tmppath = d.getVar('TEST_TMP', True) |
145 | 145 | ||
146 | if os.path.isdir(tmppath): | 146 | if os.path.isdir(tmppath): |
147 | for f in os.listdir(tmppath): | 147 | for f in os.listdir(tmppath): |
@@ -155,28 +155,28 @@ def qemuimagetest_main(d): | |||
155 | clean_tmp() | 155 | clean_tmp() |
156 | 156 | ||
157 | """check testcase folder and create test log folder""" | 157 | """check testcase folder and create test log folder""" |
158 | testpath = d.getVar('TEST_DIR', 1) | 158 | testpath = d.getVar('TEST_DIR', True) |
159 | bb.utils.mkdirhier(testpath) | 159 | bb.utils.mkdirhier(testpath) |
160 | 160 | ||
161 | logpath = d.getVar('TEST_LOG', 1) | 161 | logpath = d.getVar('TEST_LOG', True) |
162 | bb.utils.mkdirhier(logpath) | 162 | bb.utils.mkdirhier(logpath) |
163 | 163 | ||
164 | tmppath = d.getVar('TEST_TMP', 1) | 164 | tmppath = d.getVar('TEST_TMP', True) |
165 | bb.utils.mkdirhier(tmppath) | 165 | bb.utils.mkdirhier(tmppath) |
166 | 166 | ||
167 | """initialize test status file""" | 167 | """initialize test status file""" |
168 | test_status = d.getVar('TEST_STATUS', 1) | 168 | test_status = d.getVar('TEST_STATUS', True) |
169 | if os.path.exists(test_status): | 169 | if os.path.exists(test_status): |
170 | os.remove(test_status) | 170 | os.remove(test_status) |
171 | os.system("touch %s" % test_status) | 171 | os.system("touch %s" % test_status) |
172 | 172 | ||
173 | """initialize result file""" | 173 | """initialize result file""" |
174 | resultpath = d.getVar('TEST_RESULT', 1) | 174 | resultpath = d.getVar('TEST_RESULT', True) |
175 | bb.utils.mkdirhier(resultpath) | 175 | bb.utils.mkdirhier(resultpath) |
176 | resultfile = os.path.join(resultpath, "testresult.%s" % d.getVar('DATETIME', 1)) | 176 | resultfile = os.path.join(resultpath, "testresult.%s" % d.getVar('DATETIME', True)) |
177 | sresultfile = os.path.join(resultpath, "testresult.log") | 177 | sresultfile = os.path.join(resultpath, "testresult.log") |
178 | 178 | ||
179 | machine = d.getVar('MACHINE', 1) | 179 | machine = d.getVar('MACHINE', True) |
180 | 180 | ||
181 | if os.path.exists(sresultfile): | 181 | if os.path.exists(sresultfile): |
182 | os.remove(sresultfile) | 182 | os.remove(sresultfile) |
@@ -188,7 +188,7 @@ def qemuimagetest_main(d): | |||
188 | f.close() | 188 | f.close() |
189 | 189 | ||
190 | """generate pre-defined testcase list""" | 190 | """generate pre-defined testcase list""" |
191 | testlist = d.getVar('TEST_SCEN', 1) | 191 | testlist = d.getVar('TEST_SCEN', True) |
192 | fulllist = generate_list(testlist) | 192 | fulllist = generate_list(testlist) |
193 | 193 | ||
194 | """Begin testing""" | 194 | """Begin testing""" |
diff --git a/meta/classes/kernel-arch.bbclass b/meta/classes/kernel-arch.bbclass index 8693395111..d37c1fb2ed 100644 --- a/meta/classes/kernel-arch.bbclass +++ b/meta/classes/kernel-arch.bbclass | |||
@@ -18,7 +18,7 @@ valid_archs = "alpha cris ia64 \ | |||
18 | def map_kernel_arch(a, d): | 18 | def map_kernel_arch(a, d): |
19 | import re | 19 | import re |
20 | 20 | ||
21 | valid_archs = d.getVar('valid_archs', 1).split() | 21 | valid_archs = d.getVar('valid_archs', True).split() |
22 | 22 | ||
23 | if re.match('(i.86|athlon|x86.64)$', a): return 'x86' | 23 | if re.match('(i.86|athlon|x86.64)$', a): return 'x86' |
24 | elif re.match('arm26$', a): return 'arm26' | 24 | elif re.match('arm26$', a): return 'arm26' |
@@ -32,7 +32,7 @@ def map_kernel_arch(a, d): | |||
32 | else: | 32 | else: |
33 | bb.error("cannot map '%s' to a linux kernel architecture" % a) | 33 | bb.error("cannot map '%s' to a linux kernel architecture" % a) |
34 | 34 | ||
35 | export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', 1), d)}" | 35 | export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', True), d)}" |
36 | 36 | ||
37 | def map_uboot_arch(a, d): | 37 | def map_uboot_arch(a, d): |
38 | import re | 38 | import re |
@@ -41,5 +41,5 @@ def map_uboot_arch(a, d): | |||
41 | elif re.match('i.86$', a): return 'x86' | 41 | elif re.match('i.86$', a): return 'x86' |
42 | return a | 42 | return a |
43 | 43 | ||
44 | export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', 1), d)}" | 44 | export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', True), d)}" |
45 | 45 | ||
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass index ec5d65e186..8fbec90ef1 100644 --- a/meta/classes/kernel.bbclass +++ b/meta/classes/kernel.bbclass | |||
@@ -11,9 +11,9 @@ INITRAMFS_IMAGE ?= "" | |||
11 | INITRAMFS_TASK ?= "" | 11 | INITRAMFS_TASK ?= "" |
12 | 12 | ||
13 | python __anonymous () { | 13 | python __anonymous () { |
14 | kerneltype = d.getVar('KERNEL_IMAGETYPE', 1) or '' | 14 | kerneltype = d.getVar('KERNEL_IMAGETYPE', True) or '' |
15 | if kerneltype == 'uImage': | 15 | if kerneltype == 'uImage': |
16 | depends = d.getVar("DEPENDS", 1) | 16 | depends = d.getVar("DEPENDS", True) |
17 | depends = "%s u-boot-mkimage-native" % depends | 17 | depends = "%s u-boot-mkimage-native" % depends |
18 | d.setVar("DEPENDS", depends) | 18 | d.setVar("DEPENDS", depends) |
19 | 19 | ||
@@ -75,7 +75,7 @@ EXTRA_OEMAKE = "" | |||
75 | 75 | ||
76 | KERNEL_ALT_IMAGETYPE ??= "" | 76 | KERNEL_ALT_IMAGETYPE ??= "" |
77 | 77 | ||
78 | KERNEL_IMAGETYPE_FOR_MAKE = "${@(lambda s: s[:-3] if s[-3:] == ".gz" else s)(d.getVar('KERNEL_IMAGETYPE', 1))}" | 78 | KERNEL_IMAGETYPE_FOR_MAKE = "${@(lambda s: s[:-3] if s[-3:] == ".gz" else s)(d.getVar('KERNEL_IMAGETYPE', True))}" |
79 | 79 | ||
80 | kernel_do_compile() { | 80 | kernel_do_compile() { |
81 | unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE | 81 | unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE |
@@ -306,10 +306,10 @@ module_conf_rfcomm = "alias bt-proto-3 rfcomm" | |||
306 | python populate_packages_prepend () { | 306 | python populate_packages_prepend () { |
307 | def extract_modinfo(file): | 307 | def extract_modinfo(file): |
308 | import tempfile, re | 308 | import tempfile, re |
309 | tempfile.tempdir = d.getVar("WORKDIR", 1) | 309 | tempfile.tempdir = d.getVar("WORKDIR", True) |
310 | tf = tempfile.mkstemp() | 310 | tf = tempfile.mkstemp() |
311 | tmpfile = tf[1] | 311 | tmpfile = tf[1] |
312 | cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", 1), d.getVar("HOST_PREFIX", 1) or "", file, tmpfile) | 312 | cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", True), d.getVar("HOST_PREFIX", True) or "", file, tmpfile) |
313 | os.system(cmd) | 313 | os.system(cmd) |
314 | f = open(tmpfile) | 314 | f = open(tmpfile) |
315 | l = f.read().split("\000") | 315 | l = f.read().split("\000") |
@@ -328,18 +328,18 @@ python populate_packages_prepend () { | |||
328 | def parse_depmod(): | 328 | def parse_depmod(): |
329 | import re | 329 | import re |
330 | 330 | ||
331 | dvar = d.getVar('PKGD', 1) | 331 | dvar = d.getVar('PKGD', True) |
332 | if not dvar: | 332 | if not dvar: |
333 | bb.error("PKGD not defined") | 333 | bb.error("PKGD not defined") |
334 | return | 334 | return |
335 | 335 | ||
336 | kernelver = d.getVar('KERNEL_VERSION', 1) | 336 | kernelver = d.getVar('KERNEL_VERSION', True) |
337 | kernelver_stripped = kernelver | 337 | kernelver_stripped = kernelver |
338 | m = re.match('^(.*-hh.*)[\.\+].*$', kernelver) | 338 | m = re.match('^(.*-hh.*)[\.\+].*$', kernelver) |
339 | if m: | 339 | if m: |
340 | kernelver_stripped = m.group(1) | 340 | kernelver_stripped = m.group(1) |
341 | path = d.getVar("PATH", 1) | 341 | path = d.getVar("PATH", True) |
342 | host_prefix = d.getVar("HOST_PREFIX", 1) or "" | 342 | host_prefix = d.getVar("HOST_PREFIX", True) or "" |
343 | 343 | ||
344 | cmd = "PATH=\"%s\" %sdepmod -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, dvar, dvar, kernelver, kernelver_stripped) | 344 | cmd = "PATH=\"%s\" %sdepmod -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, dvar, dvar, kernelver, kernelver_stripped) |
345 | f = os.popen(cmd, 'r') | 345 | f = os.popen(cmd, 'r') |
@@ -377,9 +377,9 @@ python populate_packages_prepend () { | |||
377 | 377 | ||
378 | def get_dependencies(file, pattern, format): | 378 | def get_dependencies(file, pattern, format): |
379 | # file no longer includes PKGD | 379 | # file no longer includes PKGD |
380 | file = file.replace(d.getVar('PKGD', 1) or '', '', 1) | 380 | file = file.replace(d.getVar('PKGD', True) or '', '', 1) |
381 | # instead is prefixed with /lib/modules/${KERNEL_VERSION} | 381 | # instead is prefixed with /lib/modules/${KERNEL_VERSION} |
382 | file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', 1) or '', '', 1) | 382 | file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', True) or '', '', 1) |
383 | 383 | ||
384 | if module_deps.has_key(file): | 384 | if module_deps.has_key(file): |
385 | import re | 385 | import re |
@@ -398,40 +398,40 @@ python populate_packages_prepend () { | |||
398 | import re | 398 | import re |
399 | vals = extract_modinfo(file) | 399 | vals = extract_modinfo(file) |
400 | 400 | ||
401 | dvar = d.getVar('PKGD', 1) | 401 | dvar = d.getVar('PKGD', True) |
402 | 402 | ||
403 | # If autoloading is requested, output /etc/modutils/<name> and append | 403 | # If autoloading is requested, output /etc/modutils/<name> and append |
404 | # appropriate modprobe commands to the postinst | 404 | # appropriate modprobe commands to the postinst |
405 | autoload = d.getVar('module_autoload_%s' % basename, 1) | 405 | autoload = d.getVar('module_autoload_%s' % basename, True) |
406 | if autoload: | 406 | if autoload: |
407 | name = '%s/etc/modutils/%s' % (dvar, basename) | 407 | name = '%s/etc/modutils/%s' % (dvar, basename) |
408 | f = open(name, 'w') | 408 | f = open(name, 'w') |
409 | for m in autoload.split(): | 409 | for m in autoload.split(): |
410 | f.write('%s\n' % m) | 410 | f.write('%s\n' % m) |
411 | f.close() | 411 | f.close() |
412 | postinst = d.getVar('pkg_postinst_%s' % pkg, 1) | 412 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) |
413 | if not postinst: | 413 | if not postinst: |
414 | bb.fatal("pkg_postinst_%s not defined" % pkg) | 414 | bb.fatal("pkg_postinst_%s not defined" % pkg) |
415 | postinst += d.getVar('autoload_postinst_fragment', 1) % autoload | 415 | postinst += d.getVar('autoload_postinst_fragment', True) % autoload |
416 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 416 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
417 | 417 | ||
418 | # Write out any modconf fragment | 418 | # Write out any modconf fragment |
419 | modconf = d.getVar('module_conf_%s' % basename, 1) | 419 | modconf = d.getVar('module_conf_%s' % basename, True) |
420 | if modconf: | 420 | if modconf: |
421 | name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) | 421 | name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) |
422 | f = open(name, 'w') | 422 | f = open(name, 'w') |
423 | f.write("%s\n" % modconf) | 423 | f.write("%s\n" % modconf) |
424 | f.close() | 424 | f.close() |
425 | 425 | ||
426 | files = d.getVar('FILES_%s' % pkg, 1) | 426 | files = d.getVar('FILES_%s' % pkg, True) |
427 | files = "%s /etc/modutils/%s /etc/modutils/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename, basename) | 427 | files = "%s /etc/modutils/%s /etc/modutils/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename, basename) |
428 | d.setVar('FILES_%s' % pkg, files) | 428 | d.setVar('FILES_%s' % pkg, files) |
429 | 429 | ||
430 | if vals.has_key("description"): | 430 | if vals.has_key("description"): |
431 | old_desc = d.getVar('DESCRIPTION_' + pkg, 1) or "" | 431 | old_desc = d.getVar('DESCRIPTION_' + pkg, True) or "" |
432 | d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"]) | 432 | d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"]) |
433 | 433 | ||
434 | rdepends_str = d.getVar('RDEPENDS_' + pkg, 1) | 434 | rdepends_str = d.getVar('RDEPENDS_' + pkg, True) |
435 | if rdepends_str: | 435 | if rdepends_str: |
436 | rdepends = rdepends_str.split() | 436 | rdepends = rdepends_str.split() |
437 | else: | 437 | else: |
@@ -443,12 +443,12 @@ python populate_packages_prepend () { | |||
443 | module_regex = '^(.*)\.k?o$' | 443 | module_regex = '^(.*)\.k?o$' |
444 | module_pattern = 'kernel-module-%s' | 444 | module_pattern = 'kernel-module-%s' |
445 | 445 | ||
446 | postinst = d.getVar('pkg_postinst_modules', 1) | 446 | postinst = d.getVar('pkg_postinst_modules', True) |
447 | postrm = d.getVar('pkg_postrm_modules', 1) | 447 | postrm = d.getVar('pkg_postrm_modules', True) |
448 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') | 448 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') |
449 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') | 449 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') |
450 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') | 450 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') |
451 | do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", 1)) | 451 | do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", True)) |
452 | 452 | ||
453 | import re | 453 | import re |
454 | metapkg = "kernel-modules" | 454 | metapkg = "kernel-modules" |
@@ -460,7 +460,7 @@ python populate_packages_prepend () { | |||
460 | pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1)) | 460 | pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1)) |
461 | blacklist.append(pkg) | 461 | blacklist.append(pkg) |
462 | metapkg_rdepends = [] | 462 | metapkg_rdepends = [] |
463 | packages = d.getVar('PACKAGES', 1).split() | 463 | packages = d.getVar('PACKAGES', True).split() |
464 | for pkg in packages[1:]: | 464 | for pkg in packages[1:]: |
465 | if not pkg in blacklist and not pkg in metapkg_rdepends: | 465 | if not pkg in blacklist and not pkg in metapkg_rdepends: |
466 | metapkg_rdepends.append(pkg) | 466 | metapkg_rdepends.append(pkg) |
diff --git a/meta/classes/libc-common.bbclass b/meta/classes/libc-common.bbclass index ec33762a20..962f205f81 100644 --- a/meta/classes/libc-common.bbclass +++ b/meta/classes/libc-common.bbclass | |||
@@ -18,13 +18,13 @@ do_install() { | |||
18 | } | 18 | } |
19 | 19 | ||
20 | def get_libc_fpu_setting(bb, d): | 20 | def get_libc_fpu_setting(bb, d): |
21 | if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: | 21 | if d.getVar('TARGET_FPU', True) in [ 'soft' ]: |
22 | return "--without-fp" | 22 | return "--without-fp" |
23 | return "" | 23 | return "" |
24 | 24 | ||
25 | python populate_packages_prepend () { | 25 | python populate_packages_prepend () { |
26 | if d.getVar('DEBIAN_NAMES', 1): | 26 | if d.getVar('DEBIAN_NAMES', True): |
27 | bpn = d.getVar('BPN', 1) | 27 | bpn = d.getVar('BPN', True) |
28 | d.setVar('PKG_'+bpn, 'libc6') | 28 | d.setVar('PKG_'+bpn, 'libc6') |
29 | d.setVar('PKG_'+bpn+'-dev', 'libc6-dev') | 29 | d.setVar('PKG_'+bpn+'-dev', 'libc6-dev') |
30 | } | 30 | } |
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass index cfc9eafb93..3de704f3f9 100644 --- a/meta/classes/license.bbclass +++ b/meta/classes/license.bbclass | |||
@@ -261,7 +261,7 @@ def incompatible_license(d,dont_want_license): | |||
261 | from fnmatch import fnmatchcase as fnmatch | 261 | from fnmatch import fnmatchcase as fnmatch |
262 | 262 | ||
263 | dont_want_licenses = [] | 263 | dont_want_licenses = [] |
264 | dont_want_licenses.append(d.getVar('INCOMPATIBLE_LICENSE', 1)) | 264 | dont_want_licenses.append(d.getVar('INCOMPATIBLE_LICENSE', True)) |
265 | if d.getVarFlag('SPDXLICENSEMAP', dont_want_license): | 265 | if d.getVarFlag('SPDXLICENSEMAP', dont_want_license): |
266 | dont_want_licenses.append(d.getVarFlag('SPDXLICENSEMAP', dont_want_license)) | 266 | dont_want_licenses.append(d.getVarFlag('SPDXLICENSEMAP', dont_want_license)) |
267 | 267 | ||
diff --git a/meta/classes/metadata_scm.bbclass b/meta/classes/metadata_scm.bbclass index 57609ef8cd..62650be675 100644 --- a/meta/classes/metadata_scm.bbclass +++ b/meta/classes/metadata_scm.bbclass | |||
@@ -27,7 +27,7 @@ def base_detect_branch(d): | |||
27 | return "<unknown>" | 27 | return "<unknown>" |
28 | 28 | ||
29 | def base_get_scmbasepath(d): | 29 | def base_get_scmbasepath(d): |
30 | return d.getVar( 'COREBASE', 1 ) | 30 | return d.getVar( 'COREBASE', True) |
31 | 31 | ||
32 | def base_get_metadata_monotone_branch(path, d): | 32 | def base_get_metadata_monotone_branch(path, d): |
33 | monotone_branch = "<unknown>" | 33 | monotone_branch = "<unknown>" |
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index 4ed6972a7c..5c42619f3f 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass | |||
@@ -1067,7 +1067,7 @@ python emit_pkgdata() { | |||
1067 | return size | 1067 | return size |
1068 | 1068 | ||
1069 | packages = d.getVar('PACKAGES', True) | 1069 | packages = d.getVar('PACKAGES', True) |
1070 | pkgdest = d.getVar('PKGDEST', 1) | 1070 | pkgdest = d.getVar('PKGDEST', True) |
1071 | pkgdatadir = d.getVar('PKGDESTWORK', True) | 1071 | pkgdatadir = d.getVar('PKGDESTWORK', True) |
1072 | 1072 | ||
1073 | # Take shared lock since we're only reading, not writing | 1073 | # Take shared lock since we're only reading, not writing |
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass index e9d1ddcdbc..ff8b5b488a 100644 --- a/meta/classes/package_ipk.bbclass +++ b/meta/classes/package_ipk.bbclass | |||
@@ -15,12 +15,12 @@ python package_ipk_fn () { | |||
15 | } | 15 | } |
16 | 16 | ||
17 | python package_ipk_install () { | 17 | python package_ipk_install () { |
18 | pkg = d.getVar('PKG', 1) | 18 | pkg = d.getVar('PKG', True) |
19 | pkgfn = d.getVar('PKGFN', 1) | 19 | pkgfn = d.getVar('PKGFN', True) |
20 | rootfs = d.getVar('IMAGE_ROOTFS', 1) | 20 | rootfs = d.getVar('IMAGE_ROOTFS', True) |
21 | ipkdir = d.getVar('DEPLOY_DIR_IPK', 1) | 21 | ipkdir = d.getVar('DEPLOY_DIR_IPK', True) |
22 | stagingdir = d.getVar('STAGING_DIR', 1) | 22 | stagingdir = d.getVar('STAGING_DIR', True) |
23 | tmpdir = d.getVar('TMPDIR', 1) | 23 | tmpdir = d.getVar('TMPDIR', True) |
24 | 24 | ||
25 | if None in (pkg,pkgfn,rootfs): | 25 | if None in (pkg,pkgfn,rootfs): |
26 | raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") | 26 | raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") |
@@ -289,7 +289,7 @@ python do_package_ipk () { | |||
289 | 289 | ||
290 | localdata.setVar('ROOT', '') | 290 | localdata.setVar('ROOT', '') |
291 | localdata.setVar('ROOT_%s' % pkg, root) | 291 | localdata.setVar('ROOT_%s' % pkg, root) |
292 | pkgname = localdata.getVar('PKG_%s' % pkg, 1) | 292 | pkgname = localdata.getVar('PKG_%s' % pkg, True) |
293 | if not pkgname: | 293 | if not pkgname: |
294 | pkgname = pkg | 294 | pkgname = pkg |
295 | localdata.setVar('PKG', pkgname) | 295 | localdata.setVar('PKG', pkgname) |
@@ -298,7 +298,7 @@ python do_package_ipk () { | |||
298 | 298 | ||
299 | bb.data.update_data(localdata) | 299 | bb.data.update_data(localdata) |
300 | basedir = os.path.join(os.path.dirname(root)) | 300 | basedir = os.path.join(os.path.dirname(root)) |
301 | arch = localdata.getVar('PACKAGE_ARCH', 1) | 301 | arch = localdata.getVar('PACKAGE_ARCH', True) |
302 | pkgoutdir = "%s/%s" % (outdir, arch) | 302 | pkgoutdir = "%s/%s" % (outdir, arch) |
303 | bb.mkdirhier(pkgoutdir) | 303 | bb.mkdirhier(pkgoutdir) |
304 | os.chdir(root) | 304 | os.chdir(root) |
@@ -310,7 +310,7 @@ python do_package_ipk () { | |||
310 | except ValueError: | 310 | except ValueError: |
311 | pass | 311 | pass |
312 | if not g and localdata.getVar('ALLOW_EMPTY') != "1": | 312 | if not g and localdata.getVar('ALLOW_EMPTY') != "1": |
313 | bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', 1), localdata.getVar('PKGR', 1))) | 313 | bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) |
314 | bb.utils.unlockfile(lf) | 314 | bb.utils.unlockfile(lf) |
315 | continue | 315 | continue |
316 | 316 | ||
@@ -323,7 +323,7 @@ python do_package_ipk () { | |||
323 | raise bb.build.FuncFailed("unable to open control file for writing.") | 323 | raise bb.build.FuncFailed("unable to open control file for writing.") |
324 | 324 | ||
325 | fields = [] | 325 | fields = [] |
326 | pe = d.getVar('PKGE', 1) | 326 | pe = d.getVar('PKGE', True) |
327 | if pe and int(pe) > 0: | 327 | if pe and int(pe) > 0: |
328 | fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) | 328 | fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) |
329 | else: | 329 | else: |
@@ -340,7 +340,7 @@ python do_package_ipk () { | |||
340 | def pullData(l, d): | 340 | def pullData(l, d): |
341 | l2 = [] | 341 | l2 = [] |
342 | for i in l: | 342 | for i in l: |
343 | l2.append(d.getVar(i, 1)) | 343 | l2.append(d.getVar(i, True)) |
344 | return l2 | 344 | return l2 |
345 | 345 | ||
346 | ctrlfile.write("Package: %s\n" % pkgname) | 346 | ctrlfile.write("Package: %s\n" % pkgname) |
@@ -369,12 +369,12 @@ python do_package_ipk () { | |||
369 | 369 | ||
370 | bb.build.exec_func("mapping_rename_hook", localdata) | 370 | bb.build.exec_func("mapping_rename_hook", localdata) |
371 | 371 | ||
372 | rdepends = bb.utils.explode_dep_versions(localdata.getVar("RDEPENDS", 1) or "") | 372 | rdepends = bb.utils.explode_dep_versions(localdata.getVar("RDEPENDS", True) or "") |
373 | rrecommends = bb.utils.explode_dep_versions(localdata.getVar("RRECOMMENDS", 1) or "") | 373 | rrecommends = bb.utils.explode_dep_versions(localdata.getVar("RRECOMMENDS", True) or "") |
374 | rsuggests = bb.utils.explode_dep_versions(localdata.getVar("RSUGGESTS", 1) or "") | 374 | rsuggests = bb.utils.explode_dep_versions(localdata.getVar("RSUGGESTS", True) or "") |
375 | rprovides = bb.utils.explode_dep_versions(localdata.getVar("RPROVIDES", 1) or "") | 375 | rprovides = bb.utils.explode_dep_versions(localdata.getVar("RPROVIDES", True) or "") |
376 | rreplaces = bb.utils.explode_dep_versions(localdata.getVar("RREPLACES", 1) or "") | 376 | rreplaces = bb.utils.explode_dep_versions(localdata.getVar("RREPLACES", True) or "") |
377 | rconflicts = bb.utils.explode_dep_versions(localdata.getVar("RCONFLICTS", 1) or "") | 377 | rconflicts = bb.utils.explode_dep_versions(localdata.getVar("RCONFLICTS", True) or "") |
378 | 378 | ||
379 | if rdepends: | 379 | if rdepends: |
380 | ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) | 380 | ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) |
@@ -388,14 +388,14 @@ python do_package_ipk () { | |||
388 | ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces)) | 388 | ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces)) |
389 | if rconflicts: | 389 | if rconflicts: |
390 | ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts)) | 390 | ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts)) |
391 | src_uri = localdata.getVar("SRC_URI", 1) | 391 | src_uri = localdata.getVar("SRC_URI", True) |
392 | if src_uri: | 392 | if src_uri: |
393 | src_uri = re.sub("\s+", " ", src_uri) | 393 | src_uri = re.sub("\s+", " ", src_uri) |
394 | ctrlfile.write("Source: %s\n" % " ".join(src_uri.split())) | 394 | ctrlfile.write("Source: %s\n" % " ".join(src_uri.split())) |
395 | ctrlfile.close() | 395 | ctrlfile.close() |
396 | 396 | ||
397 | for script in ["preinst", "postinst", "prerm", "postrm"]: | 397 | for script in ["preinst", "postinst", "prerm", "postrm"]: |
398 | scriptvar = localdata.getVar('pkg_%s' % script, 1) | 398 | scriptvar = localdata.getVar('pkg_%s' % script, True) |
399 | if not scriptvar: | 399 | if not scriptvar: |
400 | continue | 400 | continue |
401 | try: | 401 | try: |
@@ -407,7 +407,7 @@ python do_package_ipk () { | |||
407 | scriptfile.close() | 407 | scriptfile.close() |
408 | os.chmod(os.path.join(controldir, script), 0755) | 408 | os.chmod(os.path.join(controldir, script), 0755) |
409 | 409 | ||
410 | conffiles_str = localdata.getVar("CONFFILES", 1) | 410 | conffiles_str = localdata.getVar("CONFFILES", True) |
411 | if conffiles_str: | 411 | if conffiles_str: |
412 | try: | 412 | try: |
413 | conffiles = file(os.path.join(controldir, 'conffiles'), 'w') | 413 | conffiles = file(os.path.join(controldir, 'conffiles'), 'w') |
@@ -419,7 +419,7 @@ python do_package_ipk () { | |||
419 | conffiles.close() | 419 | conffiles.close() |
420 | 420 | ||
421 | os.chdir(basedir) | 421 | os.chdir(basedir) |
422 | ret = os.system("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH", 1), | 422 | ret = os.system("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH", True), |
423 | d.getVar("OPKGBUILDCMD",1), pkg, pkgoutdir)) | 423 | d.getVar("OPKGBUILDCMD",1), pkg, pkgoutdir)) |
424 | if ret != 0: | 424 | if ret != 0: |
425 | bb.utils.unlockfile(lf) | 425 | bb.utils.unlockfile(lf) |
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass index a264712f9e..af8c63ed6f 100644 --- a/meta/classes/package_rpm.bbclass +++ b/meta/classes/package_rpm.bbclass | |||
@@ -619,7 +619,7 @@ python write_specfile () { | |||
619 | 619 | ||
620 | localdata.setVar('ROOT', '') | 620 | localdata.setVar('ROOT', '') |
621 | localdata.setVar('ROOT_%s' % pkg, root) | 621 | localdata.setVar('ROOT_%s' % pkg, root) |
622 | pkgname = localdata.getVar('PKG_%s' % pkg, 1) | 622 | pkgname = localdata.getVar('PKG_%s' % pkg, True) |
623 | if not pkgname: | 623 | if not pkgname: |
624 | pkgname = pkg | 624 | pkgname = pkg |
625 | localdata.setVar('PKG', pkgname) | 625 | localdata.setVar('PKG', pkgname) |
diff --git a/meta/classes/package_tar.bbclass b/meta/classes/package_tar.bbclass index 201bd91657..7590177e4b 100644 --- a/meta/classes/package_tar.bbclass +++ b/meta/classes/package_tar.bbclass | |||
@@ -9,9 +9,9 @@ python package_tar_fn () { | |||
9 | } | 9 | } |
10 | 10 | ||
11 | python package_tar_install () { | 11 | python package_tar_install () { |
12 | pkg = d.getVar('PKG', 1) | 12 | pkg = d.getVar('PKG', True) |
13 | pkgfn = d.getVar('PKGFN', 1) | 13 | pkgfn = d.getVar('PKGFN', True) |
14 | rootfs = d.getVar('IMAGE_ROOTFS', 1) | 14 | rootfs = d.getVar('IMAGE_ROOTFS', True) |
15 | 15 | ||
16 | if None in (pkg,pkgfn,rootfs): | 16 | if None in (pkg,pkgfn,rootfs): |
17 | bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") | 17 | bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") |
@@ -35,24 +35,24 @@ python package_tar_install () { | |||
35 | } | 35 | } |
36 | 36 | ||
37 | python do_package_tar () { | 37 | python do_package_tar () { |
38 | workdir = d.getVar('WORKDIR', 1) | 38 | workdir = d.getVar('WORKDIR', True) |
39 | if not workdir: | 39 | if not workdir: |
40 | bb.error("WORKDIR not defined, unable to package") | 40 | bb.error("WORKDIR not defined, unable to package") |
41 | return | 41 | return |
42 | 42 | ||
43 | outdir = d.getVar('DEPLOY_DIR_TAR', 1) | 43 | outdir = d.getVar('DEPLOY_DIR_TAR', True) |
44 | if not outdir: | 44 | if not outdir: |
45 | bb.error("DEPLOY_DIR_TAR not defined, unable to package") | 45 | bb.error("DEPLOY_DIR_TAR not defined, unable to package") |
46 | return | 46 | return |
47 | bb.mkdirhier(outdir) | 47 | bb.mkdirhier(outdir) |
48 | 48 | ||
49 | dvar = d.getVar('D', 1) | 49 | dvar = d.getVar('D', True) |
50 | if not dvar: | 50 | if not dvar: |
51 | bb.error("D not defined, unable to package") | 51 | bb.error("D not defined, unable to package") |
52 | return | 52 | return |
53 | bb.mkdirhier(dvar) | 53 | bb.mkdirhier(dvar) |
54 | 54 | ||
55 | packages = d.getVar('PACKAGES', 1) | 55 | packages = d.getVar('PACKAGES', True) |
56 | if not packages: | 56 | if not packages: |
57 | bb.debug(1, "PACKAGES not defined, nothing to package") | 57 | bb.debug(1, "PACKAGES not defined, nothing to package") |
58 | return | 58 | return |
@@ -79,11 +79,11 @@ python do_package_tar () { | |||
79 | pkgoutdir = outdir | 79 | pkgoutdir = outdir |
80 | bb.mkdirhier(pkgoutdir) | 80 | bb.mkdirhier(pkgoutdir) |
81 | bb.build.exec_func('package_tar_fn', localdata) | 81 | bb.build.exec_func('package_tar_fn', localdata) |
82 | tarfn = localdata.getVar('PKGFN', 1) | 82 | tarfn = localdata.getVar('PKGFN', True) |
83 | os.chdir(root) | 83 | os.chdir(root) |
84 | from glob import glob | 84 | from glob import glob |
85 | if not glob('*'): | 85 | if not glob('*'): |
86 | bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', 1), localdata.getVar('PKGR', 1))) | 86 | bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) |
87 | continue | 87 | continue |
88 | ret = os.system("tar -czf %s %s" % (tarfn, '.')) | 88 | ret = os.system("tar -czf %s %s" % (tarfn, '.')) |
89 | if ret != 0: | 89 | if ret != 0: |
diff --git a/meta/classes/packagedata.bbclass b/meta/classes/packagedata.bbclass index 9c7aede3bb..60f1aded0d 100644 --- a/meta/classes/packagedata.bbclass +++ b/meta/classes/packagedata.bbclass | |||
@@ -1,12 +1,12 @@ | |||
1 | python read_subpackage_metadata () { | 1 | python read_subpackage_metadata () { |
2 | import oe.packagedata | 2 | import oe.packagedata |
3 | 3 | ||
4 | data = oe.packagedata.read_pkgdata(d.getVar('PN', 1), d) | 4 | data = oe.packagedata.read_pkgdata(d.getVar('PN', True), d) |
5 | 5 | ||
6 | for key in data.keys(): | 6 | for key in data.keys(): |
7 | d.setVar(key, data[key]) | 7 | d.setVar(key, data[key]) |
8 | 8 | ||
9 | for pkg in d.getVar('PACKAGES', 1).split(): | 9 | for pkg in d.getVar('PACKAGES', True).split(): |
10 | sdata = oe.packagedata.read_subpkgdata(pkg, d) | 10 | sdata = oe.packagedata.read_subpkgdata(pkg, d) |
11 | for key in sdata.keys(): | 11 | for key in sdata.keys(): |
12 | d.setVar(key, sdata[key]) | 12 | d.setVar(key, sdata[key]) |
diff --git a/meta/classes/pkg_distribute.bbclass b/meta/classes/pkg_distribute.bbclass index 52643a2f90..9f249a0dfe 100644 --- a/meta/classes/pkg_distribute.bbclass +++ b/meta/classes/pkg_distribute.bbclass | |||
@@ -1,6 +1,6 @@ | |||
1 | PKG_DISTRIBUTECOMMAND[func] = "1" | 1 | PKG_DISTRIBUTECOMMAND[func] = "1" |
2 | python do_distribute_packages () { | 2 | python do_distribute_packages () { |
3 | cmd = d.getVar('PKG_DISTRIBUTECOMMAND', 1) | 3 | cmd = d.getVar('PKG_DISTRIBUTECOMMAND', True) |
4 | if not cmd: | 4 | if not cmd: |
5 | raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined") | 5 | raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined") |
6 | bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d) | 6 | bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d) |
diff --git a/meta/classes/pkg_metainfo.bbclass b/meta/classes/pkg_metainfo.bbclass index 1714a535c2..4b182690f2 100644 --- a/meta/classes/pkg_metainfo.bbclass +++ b/meta/classes/pkg_metainfo.bbclass | |||
@@ -1,5 +1,5 @@ | |||
1 | python do_pkg_write_metainfo () { | 1 | python do_pkg_write_metainfo () { |
2 | deploydir = d.getVar('DEPLOY_DIR', 1) | 2 | deploydir = d.getVar('DEPLOY_DIR', True) |
3 | if not deploydir: | 3 | if not deploydir: |
4 | bb.error("DEPLOY_DIR not defined, unable to write package info") | 4 | bb.error("DEPLOY_DIR not defined, unable to write package info") |
5 | return | 5 | return |
@@ -9,11 +9,11 @@ python do_pkg_write_metainfo () { | |||
9 | except OSError: | 9 | except OSError: |
10 | raise bb.build.FuncFailed("unable to open package-info file for writing.") | 10 | raise bb.build.FuncFailed("unable to open package-info file for writing.") |
11 | 11 | ||
12 | name = d.getVar('PN', 1) | 12 | name = d.getVar('PN', True) |
13 | version = d.getVar('PV', 1) | 13 | version = d.getVar('PV', True) |
14 | desc = d.getVar('DESCRIPTION', 1) | 14 | desc = d.getVar('DESCRIPTION', True) |
15 | page = d.getVar('HOMEPAGE', 1) | 15 | page = d.getVar('HOMEPAGE', True) |
16 | lic = d.getVar('LICENSE', 1) | 16 | lic = d.getVar('LICENSE', True) |
17 | 17 | ||
18 | infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" ) | 18 | infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" ) |
19 | infofile.close() | 19 | infofile.close() |
diff --git a/meta/classes/populate_sdk_deb.bbclass b/meta/classes/populate_sdk_deb.bbclass index fe3d849162..920c89a0f3 100644 --- a/meta/classes/populate_sdk_deb.bbclass +++ b/meta/classes/populate_sdk_deb.bbclass | |||
@@ -2,8 +2,8 @@ do_populate_sdk[depends] += "dpkg-native:do_populate_sysroot apt-native:do_popul | |||
2 | do_populate_sdk[recrdeptask] += "do_package_write_deb" | 2 | do_populate_sdk[recrdeptask] += "do_package_write_deb" |
3 | 3 | ||
4 | 4 | ||
5 | DEB_SDK_ARCH = "${@[d.getVar('SDK_ARCH', 1), "i386"]\ | 5 | DEB_SDK_ARCH = "${@[d.getVar('SDK_ARCH', True), "i386"]\ |
6 | [d.getVar('SDK_ARCH', 1) in \ | 6 | [d.getVar('SDK_ARCH', True) in \ |
7 | ["x86", "i486", "i586", "i686", "pentium"]]}" | 7 | ["x86", "i486", "i586", "i686", "pentium"]]}" |
8 | 8 | ||
9 | populate_sdk_post_deb () { | 9 | populate_sdk_post_deb () { |
diff --git a/meta/classes/qemu.bbclass b/meta/classes/qemu.bbclass index 1bdd209afe..22ebcfde40 100644 --- a/meta/classes/qemu.bbclass +++ b/meta/classes/qemu.bbclass | |||
@@ -6,7 +6,7 @@ | |||
6 | def qemu_target_binary(data): | 6 | def qemu_target_binary(data): |
7 | import bb | 7 | import bb |
8 | 8 | ||
9 | target_arch = data.getVar("TARGET_ARCH", 1) | 9 | target_arch = data.getVar("TARGET_ARCH", True) |
10 | if target_arch in ("i486", "i586", "i686"): | 10 | if target_arch in ("i486", "i586", "i686"): |
11 | target_arch = "i386" | 11 | target_arch = "i386" |
12 | elif target_arch == "powerpc": | 12 | elif target_arch == "powerpc": |
diff --git a/meta/classes/qt4e.bbclass b/meta/classes/qt4e.bbclass index f3d5caa455..05c24efaa1 100644 --- a/meta/classes/qt4e.bbclass +++ b/meta/classes/qt4e.bbclass | |||
@@ -1,4 +1,4 @@ | |||
1 | DEPENDS_prepend = "${@["qt4-embedded ", ""][(d.getVar('PN', 1)[:12] == 'qt4-embedded')]}" | 1 | DEPENDS_prepend = "${@["qt4-embedded ", ""][(d.getVar('PN', True)[:12] == 'qt4-embedded')]}" |
2 | 2 | ||
3 | inherit qmake2 | 3 | inherit qmake2 |
4 | 4 | ||
diff --git a/meta/classes/recipe_sanity.bbclass b/meta/classes/recipe_sanity.bbclass index bb60ffa00e..b3246599b9 100644 --- a/meta/classes/recipe_sanity.bbclass +++ b/meta/classes/recipe_sanity.bbclass | |||
@@ -1,5 +1,5 @@ | |||
1 | def __note(msg, d): | 1 | def __note(msg, d): |
2 | bb.note("%s: recipe_sanity: %s" % (d.getVar("P", 1), msg)) | 2 | bb.note("%s: recipe_sanity: %s" % (d.getVar("P", True), msg)) |
3 | 3 | ||
4 | __recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS" | 4 | __recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS" |
5 | def bad_runtime_vars(cfgdata, d): | 5 | def bad_runtime_vars(cfgdata, d): |
@@ -7,7 +7,7 @@ def bad_runtime_vars(cfgdata, d): | |||
7 | bb.data.inherits_class("cross", d): | 7 | bb.data.inherits_class("cross", d): |
8 | return | 8 | return |
9 | 9 | ||
10 | for var in d.getVar("__recipe_sanity_badruntimevars", 1).split(): | 10 | for var in d.getVar("__recipe_sanity_badruntimevars", True).split(): |
11 | val = d.getVar(var, 0) | 11 | val = d.getVar(var, 0) |
12 | if val and val != cfgdata.get(var): | 12 | if val and val != cfgdata.get(var): |
13 | __note("%s should be %s_${PN}" % (var, var), d) | 13 | __note("%s should be %s_${PN}" % (var, var), d) |
@@ -15,17 +15,17 @@ def bad_runtime_vars(cfgdata, d): | |||
15 | __recipe_sanity_reqvars = "DESCRIPTION" | 15 | __recipe_sanity_reqvars = "DESCRIPTION" |
16 | __recipe_sanity_reqdiffvars = "LICENSE" | 16 | __recipe_sanity_reqdiffvars = "LICENSE" |
17 | def req_vars(cfgdata, d): | 17 | def req_vars(cfgdata, d): |
18 | for var in d.getVar("__recipe_sanity_reqvars", 1).split(): | 18 | for var in d.getVar("__recipe_sanity_reqvars", True).split(): |
19 | if not d.getVar(var, 0): | 19 | if not d.getVar(var, 0): |
20 | __note("%s should be set" % var, d) | 20 | __note("%s should be set" % var, d) |
21 | 21 | ||
22 | for var in d.getVar("__recipe_sanity_reqdiffvars", 1).split(): | 22 | for var in d.getVar("__recipe_sanity_reqdiffvars", True).split(): |
23 | val = d.getVar(var, 0) | 23 | val = d.getVar(var, 0) |
24 | cfgval = cfgdata.get(var) | 24 | cfgval = cfgdata.get(var) |
25 | 25 | ||
26 | # Hardcoding is bad, but I'm lazy. We don't care about license being | 26 | # Hardcoding is bad, but I'm lazy. We don't care about license being |
27 | # unset if the recipe has no sources! | 27 | # unset if the recipe has no sources! |
28 | if var == "LICENSE" and d.getVar("SRC_URI", 1) == cfgdata.get("SRC_URI"): | 28 | if var == "LICENSE" and d.getVar("SRC_URI", True) == cfgdata.get("SRC_URI"): |
29 | continue | 29 | continue |
30 | 30 | ||
31 | if not val: | 31 | if not val: |
@@ -43,11 +43,11 @@ def var_renames_overwrite(cfgdata, d): | |||
43 | def incorrect_nonempty_PACKAGES(cfgdata, d): | 43 | def incorrect_nonempty_PACKAGES(cfgdata, d): |
44 | if bb.data.inherits_class("native", d) or \ | 44 | if bb.data.inherits_class("native", d) or \ |
45 | bb.data.inherits_class("cross", d): | 45 | bb.data.inherits_class("cross", d): |
46 | if d.getVar("PACKAGES", 1): | 46 | if d.getVar("PACKAGES", True): |
47 | return True | 47 | return True |
48 | 48 | ||
49 | def can_use_autotools_base(cfgdata, d): | 49 | def can_use_autotools_base(cfgdata, d): |
50 | cfg = d.getVar("do_configure", 1) | 50 | cfg = d.getVar("do_configure", True) |
51 | if not bb.data.inherits_class("autotools", d): | 51 | if not bb.data.inherits_class("autotools", d): |
52 | return False | 52 | return False |
53 | 53 | ||
@@ -65,10 +65,10 @@ def can_use_autotools_base(cfgdata, d): | |||
65 | 65 | ||
66 | def can_remove_FILESPATH(cfgdata, d): | 66 | def can_remove_FILESPATH(cfgdata, d): |
67 | expected = cfgdata.get("FILESPATH") | 67 | expected = cfgdata.get("FILESPATH") |
68 | #expected = "${@':'.join([os.path.normpath(os.path.join(fp, p, o)) for fp in d.getVar('FILESPATHBASE', 1).split(':') for p in d.getVar('FILESPATHPKG', 1).split(':') for o in (d.getVar('OVERRIDES', 1) + ':').split(':') if os.path.exists(os.path.join(fp, p, o))])}:${FILESDIR}" | 68 | #expected = "${@':'.join([os.path.normpath(os.path.join(fp, p, o)) for fp in d.getVar('FILESPATHBASE', True).split(':') for p in d.getVar('FILESPATHPKG', True).split(':') for o in (d.getVar('OVERRIDES', True) + ':').split(':') if os.path.exists(os.path.join(fp, p, o))])}:${FILESDIR}" |
69 | expectedpaths = bb.data.expand(expected, d) | 69 | expectedpaths = bb.data.expand(expected, d) |
70 | unexpanded = d.getVar("FILESPATH", 0) | 70 | unexpanded = d.getVar("FILESPATH", 0) |
71 | filespath = d.getVar("FILESPATH", 1).split(":") | 71 | filespath = d.getVar("FILESPATH", True).split(":") |
72 | filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] | 72 | filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] |
73 | for fp in filespath: | 73 | for fp in filespath: |
74 | if not fp in expectedpaths: | 74 | if not fp in expectedpaths: |
@@ -79,13 +79,13 @@ def can_remove_FILESPATH(cfgdata, d): | |||
79 | 79 | ||
80 | def can_remove_FILESDIR(cfgdata, d): | 80 | def can_remove_FILESDIR(cfgdata, d): |
81 | expected = cfgdata.get("FILESDIR") | 81 | expected = cfgdata.get("FILESDIR") |
82 | #expected = "${@bb.which(d.getVar('FILESPATH', 1), '.')}" | 82 | #expected = "${@bb.which(d.getVar('FILESPATH', True), '.')}" |
83 | unexpanded = d.getVar("FILESDIR", 0) | 83 | unexpanded = d.getVar("FILESDIR", 0) |
84 | if unexpanded is None: | 84 | if unexpanded is None: |
85 | return False | 85 | return False |
86 | 86 | ||
87 | expanded = os.path.normpath(d.getVar("FILESDIR", 1)) | 87 | expanded = os.path.normpath(d.getVar("FILESDIR", True)) |
88 | filespath = d.getVar("FILESPATH", 1).split(":") | 88 | filespath = d.getVar("FILESPATH", True).split(":") |
89 | filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] | 89 | filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] |
90 | 90 | ||
91 | return unexpanded != expected and \ | 91 | return unexpanded != expected and \ |
@@ -103,7 +103,7 @@ def can_remove_others(p, cfgdata, d): | |||
103 | continue | 103 | continue |
104 | 104 | ||
105 | try: | 105 | try: |
106 | expanded = d.getVar(k, 1) | 106 | expanded = d.getVar(k, True) |
107 | cfgexpanded = bb.data.expand(cfgunexpanded, d) | 107 | cfgexpanded = bb.data.expand(cfgunexpanded, d) |
108 | except bb.fetch.ParameterError: | 108 | except bb.fetch.ParameterError: |
109 | continue | 109 | continue |
@@ -115,8 +115,8 @@ def can_remove_others(p, cfgdata, d): | |||
115 | (p, cfgunexpanded, unexpanded, expanded)) | 115 | (p, cfgunexpanded, unexpanded, expanded)) |
116 | 116 | ||
117 | python do_recipe_sanity () { | 117 | python do_recipe_sanity () { |
118 | p = d.getVar("P", 1) | 118 | p = d.getVar("P", True) |
119 | p = "%s %s %s" % (d.getVar("PN", 1), d.getVar("PV", 1), d.getVar("PR", 1)) | 119 | p = "%s %s %s" % (d.getVar("PN", True), d.getVar("PV", True), d.getVar("PR", True)) |
120 | 120 | ||
121 | sanitychecks = [ | 121 | sanitychecks = [ |
122 | (can_remove_FILESDIR, "candidate for removal of FILESDIR"), | 122 | (can_remove_FILESDIR, "candidate for removal of FILESDIR"), |
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass index 880dcad1f3..ccbe5b99c9 100644 --- a/meta/classes/siteconfig.bbclass +++ b/meta/classes/siteconfig.bbclass | |||
@@ -2,7 +2,7 @@ python siteconfig_do_siteconfig () { | |||
2 | shared_state = sstate_state_fromvars(d) | 2 | shared_state = sstate_state_fromvars(d) |
3 | if shared_state['name'] != 'populate-sysroot': | 3 | if shared_state['name'] != 'populate-sysroot': |
4 | return | 4 | return |
5 | if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME', 1), 'site_config')): | 5 | if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME', True), 'site_config')): |
6 | bb.debug(1, "No site_config directory, skipping do_siteconfig") | 6 | bb.debug(1, "No site_config directory, skipping do_siteconfig") |
7 | return | 7 | return |
8 | bb.build.exec_func('do_siteconfig_gencache', d) | 8 | bb.build.exec_func('do_siteconfig_gencache', d) |
diff --git a/meta/classes/siteinfo.bbclass b/meta/classes/siteinfo.bbclass index bf6af2b838..8c256ceff3 100644 --- a/meta/classes/siteinfo.bbclass +++ b/meta/classes/siteinfo.bbclass | |||
@@ -130,7 +130,7 @@ def siteinfo_get_files(d, no_cache = False): | |||
130 | if no_cache: return sitefiles | 130 | if no_cache: return sitefiles |
131 | 131 | ||
132 | # Now check for siteconfig cache files | 132 | # Now check for siteconfig cache files |
133 | path_siteconfig = d.getVar('SITECONFIG_SYSROOTCACHE', 1) | 133 | path_siteconfig = d.getVar('SITECONFIG_SYSROOTCACHE', True) |
134 | if os.path.isdir(path_siteconfig): | 134 | if os.path.isdir(path_siteconfig): |
135 | for i in os.listdir(path_siteconfig): | 135 | for i in os.listdir(path_siteconfig): |
136 | filename = os.path.join(path_siteconfig, i) | 136 | filename = os.path.join(path_siteconfig, i) |
diff --git a/meta/classes/sourcepkg.bbclass b/meta/classes/sourcepkg.bbclass index 38edfe4e2e..2a78a90452 100644 --- a/meta/classes/sourcepkg.bbclass +++ b/meta/classes/sourcepkg.bbclass | |||
@@ -6,7 +6,7 @@ DISTRO ?= "openembedded" | |||
6 | 6 | ||
7 | def get_src_tree(d): | 7 | def get_src_tree(d): |
8 | 8 | ||
9 | workdir = d.getVar('WORKDIR', 1) | 9 | workdir = d.getVar('WORKDIR', True) |
10 | if not workdir: | 10 | if not workdir: |
11 | bb.error("WORKDIR not defined, unable to find source tree.") | 11 | bb.error("WORKDIR not defined, unable to find source tree.") |
12 | return | 12 | return |
@@ -55,8 +55,8 @@ sourcepkg_do_archive_bb() { | |||
55 | 55 | ||
56 | python sourcepkg_do_dumpdata() { | 56 | python sourcepkg_do_dumpdata() { |
57 | 57 | ||
58 | workdir = d.getVar('WORKDIR', 1) | 58 | workdir = d.getVar('WORKDIR', True) |
59 | distro = d.getVar('DISTRO', 1) | 59 | distro = d.getVar('DISTRO', True) |
60 | s_tree = get_src_tree(d) | 60 | s_tree = get_src_tree(d) |
61 | openembeddeddir = os.path.join(workdir, s_tree, distro) | 61 | openembeddeddir = os.path.join(workdir, s_tree, distro) |
62 | dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d)) | 62 | dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d)) |
@@ -74,7 +74,7 @@ python sourcepkg_do_dumpdata() { | |||
74 | # emit the metadata which isnt valid shell | 74 | # emit the metadata which isnt valid shell |
75 | for e in d.keys(): | 75 | for e in d.keys(): |
76 | if d.getVarFlag(e, 'python'): | 76 | if d.getVarFlag(e, 'python'): |
77 | f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, 1))) | 77 | f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, True))) |
78 | f.close() | 78 | f.close() |
79 | } | 79 | } |
80 | 80 | ||
diff --git a/meta/classes/src_distribute.bbclass b/meta/classes/src_distribute.bbclass index 651e492598..efa2720e23 100644 --- a/meta/classes/src_distribute.bbclass +++ b/meta/classes/src_distribute.bbclass | |||
@@ -3,12 +3,12 @@ python do_distribute_sources () { | |||
3 | l = bb.data.createCopy(d) | 3 | l = bb.data.createCopy(d) |
4 | bb.data.update_data(l) | 4 | bb.data.update_data(l) |
5 | 5 | ||
6 | sources_dir = d.getVar('SRC_DISTRIBUTEDIR', 1) | 6 | sources_dir = d.getVar('SRC_DISTRIBUTEDIR', True) |
7 | src_uri = d.getVar('SRC_URI', 1).split() | 7 | src_uri = d.getVar('SRC_URI', True).split() |
8 | fetcher = bb.fetch2.Fetch(src_uri, d) | 8 | fetcher = bb.fetch2.Fetch(src_uri, d) |
9 | ud = fetcher.ud | 9 | ud = fetcher.ud |
10 | 10 | ||
11 | licenses = d.getVar('LICENSE', 1).replace('&', '|') | 11 | licenses = d.getVar('LICENSE', True).replace('&', '|') |
12 | licenses = licenses.replace('(', '').replace(')', '') | 12 | licenses = licenses.replace('(', '').replace(')', '') |
13 | clean_licenses = "" | 13 | clean_licenses = "" |
14 | for x in licenses.split(): | 14 | for x in licenses.split(): |
@@ -20,7 +20,7 @@ python do_distribute_sources () { | |||
20 | 20 | ||
21 | for license in clean_licenses.split('|'): | 21 | for license in clean_licenses.split('|'): |
22 | for url in ud.values(): | 22 | for url in ud.values(): |
23 | cmd = d.getVar('SRC_DISTRIBUTECOMMAND', 1) | 23 | cmd = d.getVar('SRC_DISTRIBUTECOMMAND', True) |
24 | if not cmd: | 24 | if not cmd: |
25 | raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined") | 25 | raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined") |
26 | url.setup_localpath(d) | 26 | url.setup_localpath(d) |
@@ -29,9 +29,9 @@ python do_distribute_sources () { | |||
29 | if url.basename == '*': | 29 | if url.basename == '*': |
30 | import os.path | 30 | import os.path |
31 | dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath))) | 31 | dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath))) |
32 | d.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir)) | 32 | d.setVar('DEST', "%s_%s/" % (d.getVar('PF', True), dest_dir)) |
33 | else: | 33 | else: |
34 | d.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename)) | 34 | d.setVar('DEST', "%s_%s" % (d.getVar('PF', True), url.basename)) |
35 | else: | 35 | else: |
36 | d.setVar('DEST', '') | 36 | d.setVar('DEST', '') |
37 | 37 | ||
diff --git a/meta/classes/syslinux.bbclass b/meta/classes/syslinux.bbclass index ec93201581..b194fa69a3 100644 --- a/meta/classes/syslinux.bbclass +++ b/meta/classes/syslinux.bbclass | |||
@@ -57,12 +57,12 @@ python build_syslinux_menu () { | |||
57 | import copy | 57 | import copy |
58 | import sys | 58 | import sys |
59 | 59 | ||
60 | workdir = d.getVar('WORKDIR', 1) | 60 | workdir = d.getVar('WORKDIR', True) |
61 | if not workdir: | 61 | if not workdir: |
62 | bb.error("WORKDIR is not defined") | 62 | bb.error("WORKDIR is not defined") |
63 | return | 63 | return |
64 | 64 | ||
65 | labels = d.getVar('LABELS', 1) | 65 | labels = d.getVar('LABELS', True) |
66 | if not labels: | 66 | if not labels: |
67 | bb.debug(1, "LABELS not defined, nothing to do") | 67 | bb.debug(1, "LABELS not defined, nothing to do") |
68 | return | 68 | return |
@@ -71,7 +71,7 @@ python build_syslinux_menu () { | |||
71 | bb.debug(1, "No labels, nothing to do") | 71 | bb.debug(1, "No labels, nothing to do") |
72 | return | 72 | return |
73 | 73 | ||
74 | cfile = d.getVar('SYSLINUXMENU', 1) | 74 | cfile = d.getVar('SYSLINUXMENU', True) |
75 | if not cfile: | 75 | if not cfile: |
76 | raise bb.build.FuncFailed('Unable to read SYSLINUXMENU') | 76 | raise bb.build.FuncFailed('Unable to read SYSLINUXMENU') |
77 | 77 | ||
@@ -100,7 +100,7 @@ python build_syslinux_menu () { | |||
100 | localdata.setVar('OVERRIDES', label + ':' + overrides) | 100 | localdata.setVar('OVERRIDES', label + ':' + overrides) |
101 | bb.data.update_data(localdata) | 101 | bb.data.update_data(localdata) |
102 | 102 | ||
103 | usage = localdata.getVar('USAGE', 1) | 103 | usage = localdata.getVar('USAGE', True) |
104 | cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label)) | 104 | cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label)) |
105 | cfgfile.write('%s\n' % (usage)) | 105 | cfgfile.write('%s\n' % (usage)) |
106 | 106 | ||
@@ -114,12 +114,12 @@ python build_syslinux_cfg () { | |||
114 | import copy | 114 | import copy |
115 | import sys | 115 | import sys |
116 | 116 | ||
117 | workdir = d.getVar('WORKDIR', 1) | 117 | workdir = d.getVar('WORKDIR', True) |
118 | if not workdir: | 118 | if not workdir: |
119 | bb.error("WORKDIR not defined, unable to package") | 119 | bb.error("WORKDIR not defined, unable to package") |
120 | return | 120 | return |
121 | 121 | ||
122 | labels = d.getVar('LABELS', 1) | 122 | labels = d.getVar('LABELS', True) |
123 | if not labels: | 123 | if not labels: |
124 | bb.debug(1, "LABELS not defined, nothing to do") | 124 | bb.debug(1, "LABELS not defined, nothing to do") |
125 | return | 125 | return |
@@ -128,7 +128,7 @@ python build_syslinux_cfg () { | |||
128 | bb.debug(1, "No labels, nothing to do") | 128 | bb.debug(1, "No labels, nothing to do") |
129 | return | 129 | return |
130 | 130 | ||
131 | cfile = d.getVar('SYSLINUXCFG', 1) | 131 | cfile = d.getVar('SYSLINUXCFG', True) |
132 | if not cfile: | 132 | if not cfile: |
133 | raise bb.build.FuncFailed('Unable to read SYSLINUXCFG') | 133 | raise bb.build.FuncFailed('Unable to read SYSLINUXCFG') |
134 | 134 | ||
@@ -139,7 +139,7 @@ python build_syslinux_cfg () { | |||
139 | 139 | ||
140 | cfgfile.write('# Automatically created by OE\n') | 140 | cfgfile.write('# Automatically created by OE\n') |
141 | 141 | ||
142 | opts = d.getVar('SYSLINUX_OPTS', 1) | 142 | opts = d.getVar('SYSLINUX_OPTS', True) |
143 | 143 | ||
144 | if opts: | 144 | if opts: |
145 | for opt in opts.split(';'): | 145 | for opt in opts.split(';'): |
@@ -148,26 +148,26 @@ python build_syslinux_cfg () { | |||
148 | cfgfile.write('ALLOWOPTIONS 1\n'); | 148 | cfgfile.write('ALLOWOPTIONS 1\n'); |
149 | cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) | 149 | cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) |
150 | 150 | ||
151 | timeout = d.getVar('SYSLINUX_TIMEOUT', 1) | 151 | timeout = d.getVar('SYSLINUX_TIMEOUT', True) |
152 | 152 | ||
153 | if timeout: | 153 | if timeout: |
154 | cfgfile.write('TIMEOUT %s\n' % timeout) | 154 | cfgfile.write('TIMEOUT %s\n' % timeout) |
155 | else: | 155 | else: |
156 | cfgfile.write('TIMEOUT 50\n') | 156 | cfgfile.write('TIMEOUT 50\n') |
157 | 157 | ||
158 | prompt = d.getVar('SYSLINUX_PROMPT', 1) | 158 | prompt = d.getVar('SYSLINUX_PROMPT', True) |
159 | if prompt: | 159 | if prompt: |
160 | cfgfile.write('PROMPT %s\n' % prompt) | 160 | cfgfile.write('PROMPT %s\n' % prompt) |
161 | else: | 161 | else: |
162 | cfgfile.write('PROMPT 1\n') | 162 | cfgfile.write('PROMPT 1\n') |
163 | 163 | ||
164 | menu = d.getVar('AUTO_SYSLINUXMENU', 1) | 164 | menu = d.getVar('AUTO_SYSLINUXMENU', True) |
165 | 165 | ||
166 | # This is ugly. My bad. | 166 | # This is ugly. My bad. |
167 | 167 | ||
168 | if menu: | 168 | if menu: |
169 | bb.build.exec_func('build_syslinux_menu', d) | 169 | bb.build.exec_func('build_syslinux_menu', d) |
170 | mfile = d.getVar('SYSLINUXMENU', 1) | 170 | mfile = d.getVar('SYSLINUXMENU', True) |
171 | cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) ) | 171 | cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) ) |
172 | 172 | ||
173 | for label in labels.split(): | 173 | for label in labels.split(): |
@@ -182,8 +182,8 @@ python build_syslinux_cfg () { | |||
182 | 182 | ||
183 | cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label)) | 183 | cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label)) |
184 | 184 | ||
185 | append = localdata.getVar('APPEND', 1) | 185 | append = localdata.getVar('APPEND', True) |
186 | initrd = localdata.getVar('INITRD', 1) | 186 | initrd = localdata.getVar('INITRD', True) |
187 | 187 | ||
188 | if append: | 188 | if append: |
189 | cfgfile.write('APPEND ') | 189 | cfgfile.write('APPEND ') |
diff --git a/meta/classes/task.bbclass b/meta/classes/task.bbclass index 7891207a64..22c2fd3744 100644 --- a/meta/classes/task.bbclass +++ b/meta/classes/task.bbclass | |||
@@ -17,7 +17,7 @@ PACKAGE_ARCH = "all" | |||
17 | # to the list. Their dependencies (RRECOMMENDS) are handled as usual | 17 | # to the list. Their dependencies (RRECOMMENDS) are handled as usual |
18 | # by package_depchains in a following step. | 18 | # by package_depchains in a following step. |
19 | python () { | 19 | python () { |
20 | packages = d.getVar('PACKAGES', 1).split() | 20 | packages = d.getVar('PACKAGES', True).split() |
21 | genpackages = [] | 21 | genpackages = [] |
22 | for pkg in packages: | 22 | for pkg in packages: |
23 | for postfix in ['-dbg', '-dev']: | 23 | for postfix in ['-dbg', '-dev']: |
diff --git a/meta/classes/update-alternatives.bbclass b/meta/classes/update-alternatives.bbclass index 0e8e58bd03..ae58344d3d 100644 --- a/meta/classes/update-alternatives.bbclass +++ b/meta/classes/update-alternatives.bbclass | |||
@@ -94,22 +94,22 @@ python __anonymous() { | |||
94 | } | 94 | } |
95 | 95 | ||
96 | python populate_packages_prepend () { | 96 | python populate_packages_prepend () { |
97 | pkg = d.getVar('PN', 1) | 97 | pkg = d.getVar('PN', True) |
98 | bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg) | 98 | bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg) |
99 | postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) | 99 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) |
100 | if not postinst: | 100 | if not postinst: |
101 | postinst = '#!/bin/sh\n' | 101 | postinst = '#!/bin/sh\n' |
102 | if d.getVar('ALTERNATIVE_LINKS') != None: | 102 | if d.getVar('ALTERNATIVE_LINKS') != None: |
103 | postinst += d.getVar('update_alternatives_batch_postinst', 1) | 103 | postinst += d.getVar('update_alternatives_batch_postinst', True) |
104 | else: | 104 | else: |
105 | postinst += d.getVar('update_alternatives_postinst', 1) | 105 | postinst += d.getVar('update_alternatives_postinst', True) |
106 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 106 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
107 | postrm = d.getVar('pkg_postrm_%s' % pkg, 1) or d.getVar('pkg_postrm', 1) | 107 | postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) |
108 | if not postrm: | 108 | if not postrm: |
109 | postrm = '#!/bin/sh\n' | 109 | postrm = '#!/bin/sh\n' |
110 | if d.getVar('ALTERNATIVE_LINKS') != None: | 110 | if d.getVar('ALTERNATIVE_LINKS') != None: |
111 | postrm += d.getVar('update_alternatives_batch_postrm', 1) | 111 | postrm += d.getVar('update_alternatives_batch_postrm', True) |
112 | else: | 112 | else: |
113 | postrm += d.getVar('update_alternatives_postrm', 1) | 113 | postrm += d.getVar('update_alternatives_postrm', True) |
114 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 114 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
115 | } | 115 | } |
diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass index db88a8e764..bddead4a25 100644 --- a/meta/classes/update-rc.d.bbclass +++ b/meta/classes/update-rc.d.bbclass | |||
@@ -47,7 +47,7 @@ python populate_packages_prepend () { | |||
47 | def update_rcd_package(pkg): | 47 | def update_rcd_package(pkg): |
48 | bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) | 48 | bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) |
49 | localdata = bb.data.createCopy(d) | 49 | localdata = bb.data.createCopy(d) |
50 | overrides = localdata.getVar("OVERRIDES", 1) | 50 | overrides = localdata.getVar("OVERRIDES", True) |
51 | localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides)) | 51 | localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides)) |
52 | bb.data.update_data(localdata) | 52 | bb.data.update_data(localdata) |
53 | 53 | ||
@@ -56,28 +56,28 @@ python populate_packages_prepend () { | |||
56 | execute on the target. Not doing so may cause update_rc.d postinst invoked | 56 | execute on the target. Not doing so may cause update_rc.d postinst invoked |
57 | twice to cause unwanted warnings. | 57 | twice to cause unwanted warnings. |
58 | """ | 58 | """ |
59 | postinst = localdata.getVar('pkg_postinst', 1) | 59 | postinst = localdata.getVar('pkg_postinst', True) |
60 | if not postinst: | 60 | if not postinst: |
61 | postinst = '#!/bin/sh\n' | 61 | postinst = '#!/bin/sh\n' |
62 | postinst += localdata.getVar('updatercd_postinst', 1) | 62 | postinst += localdata.getVar('updatercd_postinst', True) |
63 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 63 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
64 | 64 | ||
65 | prerm = localdata.getVar('pkg_prerm', 1) | 65 | prerm = localdata.getVar('pkg_prerm', True) |
66 | if not prerm: | 66 | if not prerm: |
67 | prerm = '#!/bin/sh\n' | 67 | prerm = '#!/bin/sh\n' |
68 | prerm += localdata.getVar('updatercd_prerm', 1) | 68 | prerm += localdata.getVar('updatercd_prerm', True) |
69 | d.setVar('pkg_prerm_%s' % pkg, prerm) | 69 | d.setVar('pkg_prerm_%s' % pkg, prerm) |
70 | 70 | ||
71 | postrm = localdata.getVar('pkg_postrm', 1) | 71 | postrm = localdata.getVar('pkg_postrm', True) |
72 | if not postrm: | 72 | if not postrm: |
73 | postrm = '#!/bin/sh\n' | 73 | postrm = '#!/bin/sh\n' |
74 | postrm += localdata.getVar('updatercd_postrm', 1) | 74 | postrm += localdata.getVar('updatercd_postrm', True) |
75 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 75 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
76 | 76 | ||
77 | pkgs = d.getVar('INITSCRIPT_PACKAGES', 1) | 77 | pkgs = d.getVar('INITSCRIPT_PACKAGES', True) |
78 | if pkgs == None: | 78 | if pkgs == None: |
79 | pkgs = d.getVar('UPDATERCPN', 1) | 79 | pkgs = d.getVar('UPDATERCPN', True) |
80 | packages = (d.getVar('PACKAGES', 1) or "").split() | 80 | packages = (d.getVar('PACKAGES', True) or "").split() |
81 | if not pkgs in packages and packages != []: | 81 | if not pkgs in packages and packages != []: |
82 | pkgs = packages[0] | 82 | pkgs = packages[0] |
83 | for pkg in pkgs.split(): | 83 | for pkg in pkgs.split(): |
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass index 009ef1fd04..bbdf6e159b 100644 --- a/meta/classes/utility-tasks.bbclass +++ b/meta/classes/utility-tasks.bbclass | |||
@@ -24,7 +24,7 @@ python do_clean() { | |||
24 | bb.note("Removing " + dir) | 24 | bb.note("Removing " + dir) |
25 | oe.path.remove(dir) | 25 | oe.path.remove(dir) |
26 | 26 | ||
27 | for f in (d.getVar('CLEANFUNCS', 1) or '').split(): | 27 | for f in (d.getVar('CLEANFUNCS', True) or '').split(): |
28 | bb.build.exec_func(f, d) | 28 | bb.build.exec_func(f, d) |
29 | } | 29 | } |
30 | 30 | ||
diff --git a/meta/classes/utils.bbclass b/meta/classes/utils.bbclass index 103fa9a546..3b5946308c 100644 --- a/meta/classes/utils.bbclass +++ b/meta/classes/utils.bbclass | |||
@@ -336,7 +336,7 @@ def base_set_filespath(path, d): | |||
336 | if extrapaths != "": | 336 | if extrapaths != "": |
337 | path = extrapaths.split(":") + path | 337 | path = extrapaths.split(":") + path |
338 | # The ":" ensures we have an 'empty' override | 338 | # The ":" ensures we have an 'empty' override |
339 | overrides = (d.getVar("OVERRIDES", 1) or "") + ":" | 339 | overrides = (d.getVar("OVERRIDES", True) or "") + ":" |
340 | for p in path: | 340 | for p in path: |
341 | if p != "": | 341 | if p != "": |
342 | for o in overrides.split(":"): | 342 | for o in overrides.split(":"): |
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf index 18879c82fe..c8d9a31074 100644 --- a/meta/conf/bitbake.conf +++ b/meta/conf/bitbake.conf | |||
@@ -102,7 +102,7 @@ ABIEXTENSION ??= "" | |||
102 | TARGET_ARCH = "${TUNE_ARCH}" | 102 | TARGET_ARCH = "${TUNE_ARCH}" |
103 | TARGET_OS = "linux${LIBCEXTENSION}${ABIEXTENSION}" | 103 | TARGET_OS = "linux${LIBCEXTENSION}${ABIEXTENSION}" |
104 | TARGET_VENDOR = "-oe" | 104 | TARGET_VENDOR = "-oe" |
105 | TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + d.getVar('TARGET_OS', 1), ''][d.getVar('TARGET_OS', 1) == ('' or 'custom')]}" | 105 | TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + d.getVar('TARGET_OS', True), ''][d.getVar('TARGET_OS', True) == ('' or 'custom')]}" |
106 | TARGET_PREFIX = "${TARGET_SYS}-" | 106 | TARGET_PREFIX = "${TARGET_SYS}-" |
107 | TARGET_CC_ARCH = "${TUNE_CCARGS}" | 107 | TARGET_CC_ARCH = "${TUNE_CCARGS}" |
108 | TARGET_LD_ARCH = "${TUNE_LDARGS}" | 108 | TARGET_LD_ARCH = "${TUNE_LDARGS}" |
@@ -111,7 +111,7 @@ TARGET_AS_ARCH = "${TUNE_ASARGS}" | |||
111 | SDK_ARCH = "${BUILD_ARCH}" | 111 | SDK_ARCH = "${BUILD_ARCH}" |
112 | SDK_OS = "${BUILD_OS}" | 112 | SDK_OS = "${BUILD_OS}" |
113 | SDK_VENDOR = "-oesdk" | 113 | SDK_VENDOR = "-oesdk" |
114 | SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + d.getVar('SDK_OS', 1), ''][d.getVar('SDK_OS', 1) == ('' or 'custom')]}" | 114 | SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + d.getVar('SDK_OS', True), ''][d.getVar('SDK_OS', True) == ('' or 'custom')]}" |
115 | SDK_PREFIX = "${SDK_SYS}-" | 115 | SDK_PREFIX = "${SDK_SYS}-" |
116 | SDK_CC_ARCH = "${BUILD_CC_ARCH}" | 116 | SDK_CC_ARCH = "${BUILD_CC_ARCH}" |
117 | SDK_PACKAGE_ARCHS = "all any noarch ${SDK_ARCH}-nativesdk" | 117 | SDK_PACKAGE_ARCHS = "all any noarch ${SDK_ARCH}-nativesdk" |
@@ -119,7 +119,7 @@ SDK_LD_ARCH = "${BUILD_LD_ARCH}" | |||
119 | SDK_AS_ARCH = "${BUILD_AS_ARCH}" | 119 | SDK_AS_ARCH = "${BUILD_AS_ARCH}" |
120 | 120 | ||
121 | PACKAGE_ARCH = "${TUNE_PKGARCH}" | 121 | PACKAGE_ARCH = "${TUNE_PKGARCH}" |
122 | MACHINE_ARCH = "${@[d.getVar('TUNE_PKGARCH', 1), d.getVar('MACHINE', 1)][bool(d.getVar('MACHINE', 1))].replace('-', '_')}" | 122 | MACHINE_ARCH = "${@[d.getVar('TUNE_PKGARCH', True), d.getVar('MACHINE', True)][bool(d.getVar('MACHINE', True))].replace('-', '_')}" |
123 | PACKAGE_EXTRA_ARCHS ??= "${PACKAGE_EXTRA_ARCHS_tune-${DEFAULTTUNE}}" | 123 | PACKAGE_EXTRA_ARCHS ??= "${PACKAGE_EXTRA_ARCHS_tune-${DEFAULTTUNE}}" |
124 | PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}" | 124 | PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}" |
125 | # MACHINE_ARCH shouldn't be included here as a variable dependency | 125 | # MACHINE_ARCH shouldn't be included here as a variable dependency |
@@ -300,14 +300,14 @@ FILES_${PN}-locale = "${datadir}/locale" | |||
300 | FILE_DIRNAME = "${@os.path.dirname(d.getVar('FILE'))}" | 300 | FILE_DIRNAME = "${@os.path.dirname(d.getVar('FILE'))}" |
301 | # FILESPATH is set in base.bbclass | 301 | # FILESPATH is set in base.bbclass |
302 | #FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}" | 302 | #FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}" |
303 | FILESDIR = "${@bb.which(d.getVar('FILESPATH', 1), '.')}" | 303 | FILESDIR = "${@bb.which(d.getVar('FILESPATH', True), '.')}" |
304 | 304 | ||
305 | ################################################################## | 305 | ################################################################## |
306 | # General work and output directories for the build system. | 306 | # General work and output directories for the build system. |
307 | ################################################################## | 307 | ################################################################## |
308 | 308 | ||
309 | TMPDIR ?= "${TOPDIR}/tmp" | 309 | TMPDIR ?= "${TOPDIR}/tmp" |
310 | CACHE = "${TMPDIR}/cache${@['', '/' + str(d.getVar('MACHINE', 1))][bool(d.getVar('MACHINE', 1))]}${@['', '/' + str(d.getVar('SDKMACHINE', 1))][bool(d.getVar('SDKMACHINE', 1))]}" | 310 | CACHE = "${TMPDIR}/cache${@['', '/' + str(d.getVar('MACHINE', True))][bool(d.getVar('MACHINE', True))]}${@['', '/' + str(d.getVar('SDKMACHINE', True))][bool(d.getVar('SDKMACHINE', True))]}" |
311 | # The persistent cache should be shared by all builds | 311 | # The persistent cache should be shared by all builds |
312 | PERSISTENT_DIR = "${TMPDIR}/cache" | 312 | PERSISTENT_DIR = "${TMPDIR}/cache" |
313 | LOG_DIR = "${TMPDIR}/log" | 313 | LOG_DIR = "${TMPDIR}/log" |
@@ -408,7 +408,7 @@ export PATH | |||
408 | # Build utility info. | 408 | # Build utility info. |
409 | ################################################################## | 409 | ################################################################## |
410 | 410 | ||
411 | CCACHE = "${@bb.which(d.getVar('PATH', 1), 'ccache') and 'ccache '}" | 411 | CCACHE = "${@bb.which(d.getVar('PATH', True), 'ccache') and 'ccache '}" |
412 | TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TARGET}" | 412 | TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TARGET}" |
413 | 413 | ||
414 | export CCACHE_DIR = "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" | 414 | export CCACHE_DIR = "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" |
diff --git a/meta/conf/distro/defaultsetup.conf b/meta/conf/distro/defaultsetup.conf index 064c1e0537..be28510959 100644 --- a/meta/conf/distro/defaultsetup.conf +++ b/meta/conf/distro/defaultsetup.conf | |||
@@ -13,7 +13,7 @@ require conf/distro/include/tclibc-${TCLIBC}.inc | |||
13 | TCLIBCAPPEND ?= "-${TCLIBC}" | 13 | TCLIBCAPPEND ?= "-${TCLIBC}" |
14 | TMPDIR .= "${TCLIBCAPPEND}" | 14 | TMPDIR .= "${TCLIBCAPPEND}" |
15 | 15 | ||
16 | CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(d.getVar('MACHINE', 1))][bool(d.getVar('MACHINE', 1))]}${@['', '/' + str(d.getVar('SDKMACHINE', 1))][bool(d.getVar('SDKMACHINE', 1))]}" | 16 | CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(d.getVar('MACHINE', True))][bool(d.getVar('MACHINE', True))]}${@['', '/' + str(d.getVar('SDKMACHINE', True))][bool(d.getVar('SDKMACHINE', True))]}" |
17 | 17 | ||
18 | USER_CLASSES ?= "" | 18 | USER_CLASSES ?= "" |
19 | PACKAGE_CLASSES ?= "package_ipk" | 19 | PACKAGE_CLASSES ?= "package_ipk" |
diff --git a/meta/conf/machine/include/arm/feature-arm-thumb.inc b/meta/conf/machine/include/arm/feature-arm-thumb.inc index b7d6061a9b..d094529d2d 100644 --- a/meta/conf/machine/include/arm/feature-arm-thumb.inc +++ b/meta/conf/machine/include/arm/feature-arm-thumb.inc | |||
@@ -5,7 +5,7 @@ | |||
5 | # but requires more instructions (140% for 70% smaller code) so may be | 5 | # but requires more instructions (140% for 70% smaller code) so may be |
6 | # slower. | 6 | # slower. |
7 | TUNEVALID[thumb] = "Use thumb instructions instead of ARM" | 7 | TUNEVALID[thumb] = "Use thumb instructions instead of ARM" |
8 | ARM_THUMB_M_OPT = "${@['-marm', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}" | 8 | ARM_THUMB_M_OPT = "${@['-marm', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', True) == 'thumb']}" |
9 | TUNE_CCARGS += "${@bb.utils.contains("TUNE_FEATURES", "thumb", "${ARM_THUMB_M_OPT}", "", d)}" | 9 | TUNE_CCARGS += "${@bb.utils.contains("TUNE_FEATURES", "thumb", "${ARM_THUMB_M_OPT}", "", d)}" |
10 | OVERRIDES .= "${@bb.utils.contains("TUNE_FEATURES", "thumb", ":thumb", "", d)}" | 10 | OVERRIDES .= "${@bb.utils.contains("TUNE_FEATURES", "thumb", ":thumb", "", d)}" |
11 | 11 | ||
diff --git a/meta/conf/machine/include/tune-thumb.inc b/meta/conf/machine/include/tune-thumb.inc index 214e3b6ff2..a2392c2d59 100644 --- a/meta/conf/machine/include/tune-thumb.inc +++ b/meta/conf/machine/include/tune-thumb.inc | |||
@@ -16,15 +16,15 @@ THUMB_INTERWORK ?= "yes" | |||
16 | # arm system and vice versa. It is strongly recommended that DISTROs not | 16 | # arm system and vice versa. It is strongly recommended that DISTROs not |
17 | # turn this off - the actual cost is very small. | 17 | # turn this off - the actual cost is very small. |
18 | 18 | ||
19 | OVERRIDE_THUMB = "${@['', ':thumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}" | 19 | OVERRIDE_THUMB = "${@['', ':thumb'][d.getVar('ARM_INSTRUCTION_SET', True) == 'thumb']}" |
20 | OVERRIDE_INTERWORK = "${@['', ':thumb-interwork'][d.getVar('THUMB_INTERWORK', 1) == 'yes']}" | 20 | OVERRIDE_INTERWORK = "${@['', ':thumb-interwork'][d.getVar('THUMB_INTERWORK', True) == 'yes']}" |
21 | OVERRIDES .= "${OVERRIDE_THUMB}${OVERRIDE_INTERWORK}" | 21 | OVERRIDES .= "${OVERRIDE_THUMB}${OVERRIDE_INTERWORK}" |
22 | 22 | ||
23 | # Compiler and linker options for application code and kernel code. These | 23 | # Compiler and linker options for application code and kernel code. These |
24 | # options ensure that the compiler has the correct settings for the selected | 24 | # options ensure that the compiler has the correct settings for the selected |
25 | # instruction set and interworking. | 25 | # instruction set and interworking. |
26 | ARM_INTERWORK_M_OPT = "${@['-mno-thumb-interwork', '-mthumb-interwork'][d.getVar('THUMB_INTERWORK', 1) == 'yes']}" | 26 | ARM_INTERWORK_M_OPT = "${@['-mno-thumb-interwork', '-mthumb-interwork'][d.getVar('THUMB_INTERWORK', True) == 'yes']}" |
27 | ARM_THUMB_M_OPT = "${@['-marm', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}" | 27 | ARM_THUMB_M_OPT = "${@['-marm', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', True) == 'thumb']}" |
28 | 28 | ||
29 | # | 29 | # |
30 | TUNE_CCARGS += "${ARM_INTERWORK_M_OPT} ${ARM_THUMB_M_OPT}" | 30 | TUNE_CCARGS += "${ARM_INTERWORK_M_OPT} ${ARM_THUMB_M_OPT}" |
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py index a5b31b8f8d..17b7e1f27d 100644 --- a/meta/lib/oe/patch.py +++ b/meta/lib/oe/patch.py | |||
@@ -220,7 +220,7 @@ class GitApplyTree(PatchTree): | |||
220 | 220 | ||
221 | class QuiltTree(PatchSet): | 221 | class QuiltTree(PatchSet): |
222 | def _runcmd(self, args, run = True): | 222 | def _runcmd(self, args, run = True): |
223 | quiltrc = self.d.getVar('QUILTRCFILE', 1) | 223 | quiltrc = self.d.getVar('QUILTRCFILE', True) |
224 | if not run: | 224 | if not run: |
225 | return ["quilt"] + ["--quiltrc"] + [quiltrc] + args | 225 | return ["quilt"] + ["--quiltrc"] + [quiltrc] + args |
226 | runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir) | 226 | runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir) |
@@ -398,7 +398,7 @@ class UserResolver(Resolver): | |||
398 | # Patch application failed | 398 | # Patch application failed |
399 | patchcmd = self.patchset.Push(True, False, False) | 399 | patchcmd = self.patchset.Push(True, False, False) |
400 | 400 | ||
401 | t = self.patchset.d.getVar('T', 1) | 401 | t = self.patchset.d.getVar('T', True) |
402 | if not t: | 402 | if not t: |
403 | bb.msg.fatal("Build", "T not set") | 403 | bb.msg.fatal("Build", "T not set") |
404 | bb.utils.mkdirhier(t) | 404 | bb.utils.mkdirhier(t) |
diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py index 8eaa3c5da4..683b09701c 100644 --- a/meta/lib/oe/path.py +++ b/meta/lib/oe/path.py | |||
@@ -42,7 +42,7 @@ def relative(src, dest): | |||
42 | 42 | ||
43 | def format_display(path, metadata): | 43 | def format_display(path, metadata): |
44 | """ Prepare a path for display to the user. """ | 44 | """ Prepare a path for display to the user. """ |
45 | rel = relative(metadata.getVar("TOPDIR", 1), path) | 45 | rel = relative(metadata.getVar("TOPDIR", True), path) |
46 | if len(rel) > len(path): | 46 | if len(rel) > len(path): |
47 | return path | 47 | return path |
48 | else: | 48 | else: |
diff --git a/meta/recipes-bsp/grub/grub_0.97.bb b/meta/recipes-bsp/grub/grub_0.97.bb index 6ec66e3b0f..af489fecea 100644 --- a/meta/recipes-bsp/grub/grub_0.97.bb +++ b/meta/recipes-bsp/grub/grub_0.97.bb | |||
@@ -23,7 +23,7 @@ inherit autotools | |||
23 | 23 | ||
24 | python __anonymous () { | 24 | python __anonymous () { |
25 | import re | 25 | import re |
26 | host = d.getVar('HOST_SYS', 1) | 26 | host = d.getVar('HOST_SYS', True) |
27 | if not re.match('i.86.*-linux', host): | 27 | if not re.match('i.86.*-linux', host): |
28 | raise bb.parse.SkipPackage("incompatible with host %s" % host) | 28 | raise bb.parse.SkipPackage("incompatible with host %s" % host) |
29 | } | 29 | } |
diff --git a/meta/recipes-core/eglibc/eglibc-package.inc b/meta/recipes-core/eglibc/eglibc-package.inc index d89871718e..9e45fc1fd5 100644 --- a/meta/recipes-core/eglibc/eglibc-package.inc +++ b/meta/recipes-core/eglibc/eglibc-package.inc | |||
@@ -8,10 +8,10 @@ | |||
8 | 8 | ||
9 | python __anonymous () { | 9 | python __anonymous () { |
10 | import bb, re | 10 | import bb, re |
11 | uc_os = (re.match('.*uclibc*', d.getVar('TARGET_OS', 1)) != None) | 11 | uc_os = (re.match('.*uclibc*', d.getVar('TARGET_OS', True)) != None) |
12 | if uc_os: | 12 | if uc_os: |
13 | raise bb.parse.SkipPackage("incompatible with target %s" % | 13 | raise bb.parse.SkipPackage("incompatible with target %s" % |
14 | d.getVar('TARGET_OS', 1)) | 14 | d.getVar('TARGET_OS', True)) |
15 | } | 15 | } |
16 | 16 | ||
17 | # Set this to zero if you don't want ldconfig in the output package | 17 | # Set this to zero if you don't want ldconfig in the output package |
diff --git a/meta/recipes-core/eglibc/eglibc_2.13.bb b/meta/recipes-core/eglibc/eglibc_2.13.bb index 8555985335..e718a1f93e 100644 --- a/meta/recipes-core/eglibc/eglibc_2.13.bb +++ b/meta/recipes-core/eglibc/eglibc_2.13.bb | |||
@@ -52,10 +52,10 @@ FILESPATH = "${@base_set_filespath([ '${FILE_DIRNAME}/eglibc-${PV}', '${FILE_DIR | |||
52 | 52 | ||
53 | python __anonymous () { | 53 | python __anonymous () { |
54 | import bb, re | 54 | import bb, re |
55 | uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', 1)) != None) | 55 | uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', True)) != None) |
56 | if uc_os: | 56 | if uc_os: |
57 | raise bb.parse.SkipPackage("incompatible with target %s" % | 57 | raise bb.parse.SkipPackage("incompatible with target %s" % |
58 | d.getVar('TARGET_OS', 1)) | 58 | d.getVar('TARGET_OS', True)) |
59 | } | 59 | } |
60 | 60 | ||
61 | export libc_cv_slibdir = "${base_libdir}" | 61 | export libc_cv_slibdir = "${base_libdir}" |
diff --git a/meta/recipes-core/eglibc/eglibc_2.15.bb b/meta/recipes-core/eglibc/eglibc_2.15.bb index 6d66342d59..b4c1ea5a99 100644 --- a/meta/recipes-core/eglibc/eglibc_2.15.bb +++ b/meta/recipes-core/eglibc/eglibc_2.15.bb | |||
@@ -55,10 +55,10 @@ FILESPATH = "${@base_set_filespath([ '${FILE_DIRNAME}/eglibc-${PV}', '${FILE_DIR | |||
55 | 55 | ||
56 | python __anonymous () { | 56 | python __anonymous () { |
57 | import bb, re | 57 | import bb, re |
58 | uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', 1)) != None) | 58 | uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', True)) != None) |
59 | if uc_os: | 59 | if uc_os: |
60 | raise bb.parse.SkipPackage("incompatible with target %s" % | 60 | raise bb.parse.SkipPackage("incompatible with target %s" % |
61 | d.getVar('TARGET_OS', 1)) | 61 | d.getVar('TARGET_OS', True)) |
62 | } | 62 | } |
63 | 63 | ||
64 | export libc_cv_slibdir = "${base_libdir}" | 64 | export libc_cv_slibdir = "${base_libdir}" |
diff --git a/meta/recipes-core/libxml/libxml2.inc b/meta/recipes-core/libxml/libxml2.inc index 0979acd05d..2eecbd30e7 100644 --- a/meta/recipes-core/libxml/libxml2.inc +++ b/meta/recipes-core/libxml/libxml2.inc | |||
@@ -33,7 +33,7 @@ export LDFLAGS += "-ldl" | |||
33 | 33 | ||
34 | python populate_packages_prepend () { | 34 | python populate_packages_prepend () { |
35 | # autonamer would call this libxml2-2, but we don't want that | 35 | # autonamer would call this libxml2-2, but we don't want that |
36 | if d.getVar('DEBIAN_NAMES', 1): | 36 | if d.getVar('DEBIAN_NAMES', True): |
37 | d.setVar('PKG_libxml2', '${MLPREFIX}libxml2') | 37 | d.setVar('PKG_libxml2', '${MLPREFIX}libxml2') |
38 | } | 38 | } |
39 | 39 | ||
diff --git a/meta/recipes-core/tasks/task-base.bb b/meta/recipes-core/tasks/task-base.bb index 2032432844..ddae7aebee 100644 --- a/meta/recipes-core/tasks/task-base.bb +++ b/meta/recipes-core/tasks/task-base.bb | |||
@@ -126,8 +126,8 @@ python __anonymous () { | |||
126 | 126 | ||
127 | import bb | 127 | import bb |
128 | 128 | ||
129 | distro_features = set(d.getVar("DISTRO_FEATURES", 1).split()) | 129 | distro_features = set(d.getVar("DISTRO_FEATURES", True).split()) |
130 | machine_features= set(d.getVar("MACHINE_FEATURES", 1).split()) | 130 | machine_features= set(d.getVar("MACHINE_FEATURES", True).split()) |
131 | 131 | ||
132 | if "bluetooth" in distro_features and not "bluetooth" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): | 132 | if "bluetooth" in distro_features and not "bluetooth" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): |
133 | d.setVar("ADD_BT", "task-base-bluetooth") | 133 | d.setVar("ADD_BT", "task-base-bluetooth") |
diff --git a/meta/recipes-core/tasks/task-core-sdk.bb b/meta/recipes-core/tasks/task-core-sdk.bb index d940e39318..ec6cdccdb5 100644 --- a/meta/recipes-core/tasks/task-core-sdk.bb +++ b/meta/recipes-core/tasks/task-core-sdk.bb | |||
@@ -50,7 +50,7 @@ RDEPENDS_task-core-sdk = "\ | |||
50 | 50 | ||
51 | #python generate_sdk_pkgs () { | 51 | #python generate_sdk_pkgs () { |
52 | # poky_pkgs = read_pkgdata('task-core', d)['PACKAGES'] | 52 | # poky_pkgs = read_pkgdata('task-core', d)['PACKAGES'] |
53 | # pkgs = d.getVar('PACKAGES', 1).split() | 53 | # pkgs = d.getVar('PACKAGES', True).split() |
54 | # for pkg in poky_pkgs.split(): | 54 | # for pkg in poky_pkgs.split(): |
55 | # newpkg = pkg.replace('task-core', 'task-core-sdk') | 55 | # newpkg = pkg.replace('task-core', 'task-core-sdk') |
56 | # | 56 | # |
diff --git a/meta/recipes-core/uclibc/uclibc-config.inc b/meta/recipes-core/uclibc/uclibc-config.inc index 8bb094800d..0e0c1885ac 100644 --- a/meta/recipes-core/uclibc/uclibc-config.inc +++ b/meta/recipes-core/uclibc/uclibc-config.inc | |||
@@ -35,7 +35,7 @@ def map_uclibc_arch(a, d): | |||
35 | """Return the uClibc architecture for the given TARGET_ARCH.""" | 35 | """Return the uClibc architecture for the given TARGET_ARCH.""" |
36 | import re | 36 | import re |
37 | 37 | ||
38 | valid_archs = d.getVar('valid_archs', 1).split() | 38 | valid_archs = d.getVar('valid_archs', True).split() |
39 | 39 | ||
40 | if re.match('^(arm|sa110).*', a): return 'arm' | 40 | if re.match('^(arm|sa110).*', a): return 'arm' |
41 | elif re.match('^(i.86|athlon)$', a): return 'i386' | 41 | elif re.match('^(i.86|athlon)$', a): return 'i386' |
@@ -50,14 +50,14 @@ def map_uclibc_arch(a, d): | |||
50 | else: | 50 | else: |
51 | bb.error("cannot map '%s' to a uClibc architecture" % a) | 51 | bb.error("cannot map '%s' to a uClibc architecture" % a) |
52 | 52 | ||
53 | export UCLIBC_ARCH = "${@map_uclibc_arch(d.getVar('TARGET_ARCH', 1), d)}" | 53 | export UCLIBC_ARCH = "${@map_uclibc_arch(d.getVar('TARGET_ARCH', True), d)}" |
54 | 54 | ||
55 | def map_uclibc_abi(o, d): | 55 | def map_uclibc_abi(o, d): |
56 | """Return the uClibc ABI for the given TARGET_OS.""" | 56 | """Return the uClibc ABI for the given TARGET_OS.""" |
57 | import re | 57 | import re |
58 | 58 | ||
59 | arch = d.getVar('TARGET_ARCH', 1) | 59 | arch = d.getVar('TARGET_ARCH', True) |
60 | if map_uclibc_arch(d.getVar('TARGET_ARCH', 1), d) == "arm": | 60 | if map_uclibc_arch(d.getVar('TARGET_ARCH', True), d) == "arm": |
61 | if re.match('.*eabi$', o): return 'ARM_EABI' | 61 | if re.match('.*eabi$', o): return 'ARM_EABI' |
62 | else: return 'ARM_OABI' | 62 | else: return 'ARM_OABI' |
63 | # FIXME: This is inaccurate! Handle o32, n32, n64 | 63 | # FIXME: This is inaccurate! Handle o32, n32, n64 |
@@ -65,7 +65,7 @@ def map_uclibc_abi(o, d): | |||
65 | elif re.match('^mips.*', arch): return 'MIPS_O32_ABI' | 65 | elif re.match('^mips.*', arch): return 'MIPS_O32_ABI' |
66 | return "" | 66 | return "" |
67 | 67 | ||
68 | export UCLIBC_ABI = "${@map_uclibc_abi(d.getVar('TARGET_OS', 1), d)}" | 68 | export UCLIBC_ABI = "${@map_uclibc_abi(d.getVar('TARGET_OS', True), d)}" |
69 | 69 | ||
70 | def map_uclibc_endian(a, d): | 70 | def map_uclibc_endian(a, d): |
71 | """Return the uClibc endianess for the given TARGET_ARCH.""" | 71 | """Return the uClibc endianess for the given TARGET_ARCH.""" |
@@ -79,7 +79,7 @@ def map_uclibc_endian(a, d): | |||
79 | return 'BIG' | 79 | return 'BIG' |
80 | return 'LITTLE' | 80 | return 'LITTLE' |
81 | 81 | ||
82 | export UCLIBC_ENDIAN = "${@map_uclibc_endian(d.getVar('TARGET_ARCH', 1), d)}" | 82 | export UCLIBC_ENDIAN = "${@map_uclibc_endian(d.getVar('TARGET_ARCH', True), d)}" |
83 | 83 | ||
84 | # internal helper | 84 | # internal helper |
85 | def uclibc_cfg(feature, features, tokens, cnf, rem): | 85 | def uclibc_cfg(feature, features, tokens, cnf, rem): |
diff --git a/meta/recipes-core/uclibc/uclibc.inc b/meta/recipes-core/uclibc/uclibc.inc index 5e9e2e9f37..69d8921e3e 100644 --- a/meta/recipes-core/uclibc/uclibc.inc +++ b/meta/recipes-core/uclibc/uclibc.inc | |||
@@ -124,9 +124,9 @@ configmangle = '/^KERNEL_HEADERS/d; \ | |||
124 | /^SHARED_LIB_LOADER_PREFIX/d; \ | 124 | /^SHARED_LIB_LOADER_PREFIX/d; \ |
125 | /^UCLIBC_EXTRA_CFLAGS/d; \ | 125 | /^UCLIBC_EXTRA_CFLAGS/d; \ |
126 | s,.*UCLIBC_HAS_WCHAR.*,UCLIBC_HAS_WCHAR=y,g; \ | 126 | s,.*UCLIBC_HAS_WCHAR.*,UCLIBC_HAS_WCHAR=y,g; \ |
127 | ${@["","s,.*COMPILE_IN_THUMB_MODE.*,COMPILE_IN_THUMB_MODE=y,;"][d.getVar("ARM_INSTRUCTION_SET", 1) != "arm"]} \ | 127 | ${@["","s,.*COMPILE_IN_THUMB_MODE.*,COMPILE_IN_THUMB_MODE=y,;"][d.getVar("ARM_INSTRUCTION_SET", True) != "arm"]} \ |
128 | ${@["","s,.*UCLIBC_HAS_LOCALE.*,UCLIBC_HAS_LOCALE=y,;"][d.getVar("USE_NLS", 1) == "yes"]} \ | 128 | ${@["","s,.*UCLIBC_HAS_LOCALE.*,UCLIBC_HAS_LOCALE=y,;"][d.getVar("USE_NLS", True) == "yes"]} \ |
129 | ${@["","s,.*LDSO_GNU_HASH_SUPPORT.*,# LDSO_GNU_HASH_SUPPORT is not set,;"][d.getVar("TARGET_ARCH", 1) in ['mips', 'mipsel', 'mips64', 'mips64el', 'avr32']]} \ | 129 | ${@["","s,.*LDSO_GNU_HASH_SUPPORT.*,# LDSO_GNU_HASH_SUPPORT is not set,;"][d.getVar("TARGET_ARCH", True) in ['mips', 'mipsel', 'mips64', 'mips64el', 'avr32']]} \ |
130 | /^CROSS/d; \ | 130 | /^CROSS/d; \ |
131 | /^TARGET_ARCH=/d; \ | 131 | /^TARGET_ARCH=/d; \ |
132 | /^TARGET_/s,^\([^=]*\).*,# \1 is not set,g; \ | 132 | /^TARGET_/s,^\([^=]*\).*,# \1 is not set,g; \ |
diff --git a/meta/recipes-devtools/apt/apt-native.inc b/meta/recipes-devtools/apt/apt-native.inc index 4ca0223988..4bf5105dd2 100644 --- a/meta/recipes-devtools/apt/apt-native.inc +++ b/meta/recipes-devtools/apt/apt-native.inc | |||
@@ -20,7 +20,7 @@ python do_install_config () { | |||
20 | 20 | ||
21 | data = bb.data.expand(data, d) | 21 | data = bb.data.expand(data, d) |
22 | 22 | ||
23 | outdir = os.path.join(d.getVar('D', 1), d.getVar('sysconfdir', 1), 'apt') | 23 | outdir = os.path.join(d.getVar('D', True), d.getVar('sysconfdir', True), 'apt') |
24 | if not os.path.exists(outdir): | 24 | if not os.path.exists(outdir): |
25 | os.makedirs(outdir) | 25 | os.makedirs(outdir) |
26 | outpath = os.path.join(outdir, 'apt.conf.sample') | 26 | outpath = os.path.join(outdir, 'apt.conf.sample') |
diff --git a/meta/recipes-devtools/apt/apt-package.inc b/meta/recipes-devtools/apt/apt-package.inc index d644b09745..736672c26c 100644 --- a/meta/recipes-devtools/apt/apt-package.inc +++ b/meta/recipes-devtools/apt/apt-package.inc | |||
@@ -59,15 +59,15 @@ FILES_${PN} = "${bindir}/apt-cdrom ${bindir}/apt-get \ | |||
59 | ${localstatedir} ${sysconfdir} \ | 59 | ${localstatedir} ${sysconfdir} \ |
60 | ${libdir}/dpkg" | 60 | ${libdir}/dpkg" |
61 | FILES_${PN}-utils = "${bindir}/apt-sortpkgs ${bindir}/apt-extracttemplates" | 61 | FILES_${PN}-utils = "${bindir}/apt-sortpkgs ${bindir}/apt-extracttemplates" |
62 | FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-manpages', 1))} \ | 62 | FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-manpages', True))} \ |
63 | ${docdir}/apt" | 63 | ${docdir}/apt" |
64 | FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-utils-manpages', 1))}" | 64 | FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-utils-manpages', True))}" |
65 | FILES_${PN}-dev = "${libdir}/libapt*.so ${includedir}" | 65 | FILES_${PN}-dev = "${libdir}/libapt*.so ${includedir}" |
66 | 66 | ||
67 | do_install () { | 67 | do_install () { |
68 | set -x | 68 | set -x |
69 | ${@get_commands_apt_doc(d, bb, d.getVar('apt-manpages', 1))} | 69 | ${@get_commands_apt_doc(d, bb, d.getVar('apt-manpages', True))} |
70 | ${@get_commands_apt_doc(d, bb, d.getVar('apt-utils-manpages', 1))} | 70 | ${@get_commands_apt_doc(d, bb, d.getVar('apt-utils-manpages', True))} |
71 | install -d ${D}${bindir} | 71 | install -d ${D}${bindir} |
72 | install -m 0755 bin/apt-cdrom ${D}${bindir}/ | 72 | install -m 0755 bin/apt-cdrom ${D}${bindir}/ |
73 | install -m 0755 bin/apt-get ${D}${bindir}/ | 73 | install -m 0755 bin/apt-get ${D}${bindir}/ |
diff --git a/meta/recipes-devtools/automake/automake.inc b/meta/recipes-devtools/automake/automake.inc index c259673d2f..7eef9ec7b9 100644 --- a/meta/recipes-devtools/automake/automake.inc +++ b/meta/recipes-devtools/automake/automake.inc | |||
@@ -9,6 +9,6 @@ SRC_URI = "${GNU_MIRROR}/automake/automake-${PV}.tar.bz2 " | |||
9 | 9 | ||
10 | inherit autotools | 10 | inherit autotools |
11 | 11 | ||
12 | export AUTOMAKE = "${@bb.which('automake', d.getVar('PATH', 1))}" | 12 | export AUTOMAKE = "${@bb.which('automake', d.getVar('PATH', True))}" |
13 | 13 | ||
14 | FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*" | 14 | FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*" |
diff --git a/meta/recipes-devtools/cmake/cmake_2.8.5.bb b/meta/recipes-devtools/cmake/cmake_2.8.5.bb index 3e2a218385..6145345b96 100644 --- a/meta/recipes-devtools/cmake/cmake_2.8.5.bb +++ b/meta/recipes-devtools/cmake/cmake_2.8.5.bb | |||
@@ -13,8 +13,8 @@ SRC_URI[sha256sum] = "5e18bff75f01656c64f553412a8905527e1b85efaf3163c6fb81ea5aac | |||
13 | 13 | ||
14 | # Strip ${prefix} from ${docdir}, set result into docdir_stripped | 14 | # Strip ${prefix} from ${docdir}, set result into docdir_stripped |
15 | python () { | 15 | python () { |
16 | prefix=d.getVar("prefix", 1) | 16 | prefix=d.getVar("prefix", True) |
17 | docdir=d.getVar("docdir", 1) | 17 | docdir=d.getVar("docdir", True) |
18 | 18 | ||
19 | if not docdir.startswith(prefix): | 19 | if not docdir.startswith(prefix): |
20 | raise bb.build.FuncFailed('docdir must contain prefix as its prefix') | 20 | raise bb.build.FuncFailed('docdir must contain prefix as its prefix') |
diff --git a/meta/recipes-devtools/gcc/gcc-common.inc b/meta/recipes-devtools/gcc/gcc-common.inc index df6021ae61..bf6c2398e8 100644 --- a/meta/recipes-devtools/gcc/gcc-common.inc +++ b/meta/recipes-devtools/gcc/gcc-common.inc | |||
@@ -10,14 +10,14 @@ inherit autotools gettext | |||
10 | FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/gcc-${PV}" | 10 | FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/gcc-${PV}" |
11 | 11 | ||
12 | def get_gcc_fpu_setting(bb, d): | 12 | def get_gcc_fpu_setting(bb, d): |
13 | if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: | 13 | if d.getVar('TARGET_FPU', True) in [ 'soft' ]: |
14 | return "--with-float=soft" | 14 | return "--with-float=soft" |
15 | if d.getVar('TARGET_FPU', 1) in [ 'ppc-efd' ]: | 15 | if d.getVar('TARGET_FPU', True) in [ 'ppc-efd' ]: |
16 | return "--enable-e500_double" | 16 | return "--enable-e500_double" |
17 | return "" | 17 | return "" |
18 | 18 | ||
19 | def get_gcc_mips_plt_setting(bb, d): | 19 | def get_gcc_mips_plt_setting(bb, d): |
20 | if d.getVar('TARGET_ARCH', 1) in [ 'mips', 'mipsel' ] and 'mplt' in d.getVar('DISTRO_FEATURES',1).split() : | 20 | if d.getVar('TARGET_ARCH', True) in [ 'mips', 'mipsel' ] and 'mplt' in d.getVar('DISTRO_FEATURES',1).split() : |
21 | return "--with-mips-plt" | 21 | return "--with-mips-plt" |
22 | return "" | 22 | return "" |
23 | 23 | ||
diff --git a/meta/recipes-devtools/gcc/gcc-configure-common.inc b/meta/recipes-devtools/gcc/gcc-configure-common.inc index 8ab799f7f1..7a96e914b0 100644 --- a/meta/recipes-devtools/gcc/gcc-configure-common.inc +++ b/meta/recipes-devtools/gcc/gcc-configure-common.inc | |||
@@ -27,7 +27,7 @@ EXTRA_OECONF_INTERMEDIATE ?= "" | |||
27 | 27 | ||
28 | GCCMULTILIB = "--disable-multilib" | 28 | GCCMULTILIB = "--disable-multilib" |
29 | 29 | ||
30 | EXTRA_OECONF = "${@['--enable-clocale=generic', ''][d.getVar('USE_NLS', 1) != 'no']} \ | 30 | EXTRA_OECONF = "${@['--enable-clocale=generic', ''][d.getVar('USE_NLS', True) != 'no']} \ |
31 | --with-gnu-ld \ | 31 | --with-gnu-ld \ |
32 | --enable-shared \ | 32 | --enable-shared \ |
33 | --enable-languages=${LANGUAGES} \ | 33 | --enable-languages=${LANGUAGES} \ |
diff --git a/meta/recipes-devtools/guile/guile_2.0.3.bb b/meta/recipes-devtools/guile/guile_2.0.3.bb index 538ff46c99..c27a8c00be 100644 --- a/meta/recipes-devtools/guile/guile_2.0.3.bb +++ b/meta/recipes-devtools/guile/guile_2.0.3.bb | |||
@@ -31,7 +31,7 @@ BBCLASSEXTEND = "native" | |||
31 | 31 | ||
32 | DEPENDS = "libunistring bdwgc gmp libtool libffi" | 32 | DEPENDS = "libunistring bdwgc gmp libtool libffi" |
33 | # add guile-native only to the target recipe's DEPENDS | 33 | # add guile-native only to the target recipe's DEPENDS |
34 | DEPENDS += "${@['guile-native', ''][d.getVar('PN', 1) != 'guile']}" | 34 | DEPENDS += "${@['guile-native', ''][d.getVar('PN', True) != 'guile']}" |
35 | 35 | ||
36 | EXTRA_OECONF += "${@['--without-libltdl-prefix --without-libgmp-prefix', ''][bb.data.inherits_class('native',d)]}" | 36 | EXTRA_OECONF += "${@['--without-libltdl-prefix --without-libgmp-prefix', ''][bb.data.inherits_class('native',d)]}" |
37 | 37 | ||
diff --git a/meta/recipes-devtools/perl/perl_5.14.2.bb b/meta/recipes-devtools/perl/perl_5.14.2.bb index 6703b5cb3b..f9a6cc9d5d 100644 --- a/meta/recipes-devtools/perl/perl_5.14.2.bb +++ b/meta/recipes-devtools/perl/perl_5.14.2.bb | |||
@@ -287,7 +287,7 @@ FILES_perl-module-unicore += "${libdir}/perl/${PV}/unicore" | |||
287 | # packages (actually the non modules packages and not created too) | 287 | # packages (actually the non modules packages and not created too) |
288 | ALLOW_EMPTY_perl-modules = "1" | 288 | ALLOW_EMPTY_perl-modules = "1" |
289 | PACKAGES_append = " perl-modules " | 289 | PACKAGES_append = " perl-modules " |
290 | RRECOMMENDS_perl-modules = "${@d.getVar('PACKAGES', 1).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}" | 290 | RRECOMMENDS_perl-modules = "${@d.getVar('PACKAGES', True).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}" |
291 | 291 | ||
292 | python populate_packages_prepend () { | 292 | python populate_packages_prepend () { |
293 | libdir = bb.data.expand('${libdir}/perl/${PV}', d) | 293 | libdir = bb.data.expand('${libdir}/perl/${PV}', d) |
diff --git a/meta/recipes-devtools/unfs-server/unfs-server_2.1+2.2beta47.bb b/meta/recipes-devtools/unfs-server/unfs-server_2.1+2.2beta47.bb index eb80b54fc4..4e619c5f69 100644 --- a/meta/recipes-devtools/unfs-server/unfs-server_2.1+2.2beta47.bb +++ b/meta/recipes-devtools/unfs-server/unfs-server_2.1+2.2beta47.bb | |||
@@ -70,7 +70,7 @@ do_configure_prepend () { | |||
70 | python __anonymous () { | 70 | python __anonymous () { |
71 | import re | 71 | import re |
72 | 72 | ||
73 | pn = d.getVar("PN", 1) | 73 | pn = d.getVar("PN", True) |
74 | if not pn.endswith('-native') and not pn.endswith('-nativesdk'): | 74 | if not pn.endswith('-native') and not pn.endswith('-nativesdk'): |
75 | raise bb.parse.SkipPackage("unfs-server is intended for native/nativesdk builds only") | 75 | raise bb.parse.SkipPackage("unfs-server is intended for native/nativesdk builds only") |
76 | } | 76 | } |
diff --git a/meta/recipes-extended/cups/cups14.inc b/meta/recipes-extended/cups/cups14.inc index 8c01caf55e..2bf3ecfd6f 100644 --- a/meta/recipes-extended/cups/cups14.inc +++ b/meta/recipes-extended/cups/cups14.inc | |||
@@ -58,7 +58,7 @@ fakeroot do_install () { | |||
58 | 58 | ||
59 | python do_package_append() { | 59 | python do_package_append() { |
60 | # Change permissions back the way they were, they probably had a reason... | 60 | # Change permissions back the way they were, they probably had a reason... |
61 | workdir = d.getVar('WORKDIR', 1) | 61 | workdir = d.getVar('WORKDIR', True) |
62 | os.system('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir) | 62 | os.system('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir) |
63 | } | 63 | } |
64 | 64 | ||
diff --git a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.0.bb b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.0.bb index 121babd28d..603b38feef 100644 --- a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.0.bb +++ b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.0.bb | |||
@@ -58,7 +58,7 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-*" | |||
58 | PACKAGES_DYNAMIC_virtclass-native = "" | 58 | PACKAGES_DYNAMIC_virtclass-native = "" |
59 | 59 | ||
60 | python populate_packages_prepend () { | 60 | python populate_packages_prepend () { |
61 | postinst_pixbufloader = d.getVar("postinst_pixbufloader", 1) | 61 | postinst_pixbufloader = d.getVar("postinst_pixbufloader", True) |
62 | 62 | ||
63 | loaders_root = bb.data.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders', d) | 63 | loaders_root = bb.data.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders', d) |
64 | 64 | ||
diff --git a/meta/recipes-gnome/gtk+/gtk+_2.12.7.bb b/meta/recipes-gnome/gtk+/gtk+_2.12.7.bb index c6077ecb3b..e45768d9da 100644 --- a/meta/recipes-gnome/gtk+/gtk+_2.12.7.bb +++ b/meta/recipes-gnome/gtk+/gtk+_2.12.7.bb | |||
@@ -34,8 +34,8 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*" | |||
34 | python populate_packages_prepend () { | 34 | python populate_packages_prepend () { |
35 | import os.path | 35 | import os.path |
36 | 36 | ||
37 | prologue = d.getVar("postinst_prologue", 1) | 37 | prologue = d.getVar("postinst_prologue", True) |
38 | postinst_pixbufloader = d.getVar("postinst_pixbufloader", 1) | 38 | postinst_pixbufloader = d.getVar("postinst_pixbufloader", True) |
39 | 39 | ||
40 | gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) | 40 | gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) |
41 | loaders_root = os.path.join(gtk_libdir, 'loaders') | 41 | loaders_root = os.path.join(gtk_libdir, 'loaders') |
@@ -46,6 +46,6 @@ python populate_packages_prepend () { | |||
46 | do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') | 46 | do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') |
47 | do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') | 47 | do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') |
48 | 48 | ||
49 | if (d.getVar('DEBIAN_NAMES', 1)): | 49 | if (d.getVar('DEBIAN_NAMES', True)): |
50 | d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') | 50 | d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') |
51 | } | 51 | } |
diff --git a/meta/recipes-gnome/gtk+/gtk+_2.16.6.bb b/meta/recipes-gnome/gtk+/gtk+_2.16.6.bb index 5fcb576bbe..1e7a87f5d2 100644 --- a/meta/recipes-gnome/gtk+/gtk+_2.16.6.bb +++ b/meta/recipes-gnome/gtk+/gtk+_2.16.6.bb | |||
@@ -34,8 +34,8 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*" | |||
34 | python populate_packages_prepend () { | 34 | python populate_packages_prepend () { |
35 | import os.path | 35 | import os.path |
36 | 36 | ||
37 | prologue = d.getVar("postinst_prologue", 1) | 37 | prologue = d.getVar("postinst_prologue", True) |
38 | postinst_pixbufloader = d.getVar("postinst_pixbufloader", 1) | 38 | postinst_pixbufloader = d.getVar("postinst_pixbufloader", True) |
39 | 39 | ||
40 | gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) | 40 | gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) |
41 | loaders_root = os.path.join(gtk_libdir, 'loaders') | 41 | loaders_root = os.path.join(gtk_libdir, 'loaders') |
@@ -46,6 +46,6 @@ python populate_packages_prepend () { | |||
46 | do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') | 46 | do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') |
47 | do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') | 47 | do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') |
48 | 48 | ||
49 | if (d.getVar('DEBIAN_NAMES', 1)): | 49 | if (d.getVar('DEBIAN_NAMES', True)): |
50 | d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') | 50 | d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') |
51 | } | 51 | } |
diff --git a/meta/recipes-gnome/gtk+/gtk+_2.24.8.bb b/meta/recipes-gnome/gtk+/gtk+_2.24.8.bb index dab66c7f59..c27826cedb 100644 --- a/meta/recipes-gnome/gtk+/gtk+_2.24.8.bb +++ b/meta/recipes-gnome/gtk+/gtk+_2.24.8.bb | |||
@@ -41,7 +41,7 @@ PACKAGES_DYNAMIC += "gtk-immodule-* gtk-printbackend-*" | |||
41 | python populate_packages_prepend () { | 41 | python populate_packages_prepend () { |
42 | import os.path | 42 | import os.path |
43 | 43 | ||
44 | prologue = d.getVar("postinst_prologue", 1) | 44 | prologue = d.getVar("postinst_prologue", True) |
45 | 45 | ||
46 | gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) | 46 | gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) |
47 | immodules_root = os.path.join(gtk_libdir, 'immodules') | 47 | immodules_root = os.path.join(gtk_libdir, 'immodules') |
@@ -50,6 +50,6 @@ python populate_packages_prepend () { | |||
50 | do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') | 50 | do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') |
51 | do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') | 51 | do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') |
52 | 52 | ||
53 | if (d.getVar('DEBIAN_NAMES', 1)): | 53 | if (d.getVar('DEBIAN_NAMES', True)): |
54 | d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') | 54 | d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') |
55 | } | 55 | } |
diff --git a/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb b/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb index 233a6ee773..3ca6e81fae 100644 --- a/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb +++ b/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb | |||
@@ -31,8 +31,8 @@ inherit gnome | |||
31 | python populate_packages_prepend() { | 31 | python populate_packages_prepend() { |
32 | import os.path | 32 | import os.path |
33 | 33 | ||
34 | engines_root = os.path.join(d.getVar('libdir', 1), "gtk-2.0/2.10.0/engines") | 34 | engines_root = os.path.join(d.getVar('libdir', True), "gtk-2.0/2.10.0/engines") |
35 | themes_root = os.path.join(d.getVar('datadir', 1), "themes") | 35 | themes_root = os.path.join(d.getVar('datadir', True), "themes") |
36 | 36 | ||
37 | do_split_packages(d, engines_root, '^lib(.*)\.so$', 'gtk-engine-%s', 'GTK %s theme engine', extra_depends='') | 37 | do_split_packages(d, engines_root, '^lib(.*)\.so$', 'gtk-engine-%s', 'GTK %s theme engine', extra_depends='') |
38 | do_split_packages(d, themes_root, '(.*)', 'gtk-theme-%s', 'GTK theme %s', allow_dirs=True, extra_depends='') | 38 | do_split_packages(d, themes_root, '(.*)', 'gtk-theme-%s', 'GTK theme %s', allow_dirs=True, extra_depends='') |
diff --git a/meta/recipes-graphics/cairo/cairo-fpu.inc b/meta/recipes-graphics/cairo/cairo-fpu.inc index 8c0ecfde94..bf5a1b01e1 100644 --- a/meta/recipes-graphics/cairo/cairo-fpu.inc +++ b/meta/recipes-graphics/cairo/cairo-fpu.inc | |||
@@ -1,6 +1,6 @@ | |||
1 | 1 | ||
2 | def get_cairo_fpu_setting(bb, d): | 2 | def get_cairo_fpu_setting(bb, d): |
3 | if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: | 3 | if d.getVar('TARGET_FPU', True) in [ 'soft' ]: |
4 | return "--disable-some-floating-point" | 4 | return "--disable-some-floating-point" |
5 | return "" | 5 | return "" |
6 | 6 | ||
diff --git a/meta/recipes-graphics/clutter/clutter-fpu.inc b/meta/recipes-graphics/clutter/clutter-fpu.inc index dfa933de5c..7b5dc68e3c 100644 --- a/meta/recipes-graphics/clutter/clutter-fpu.inc +++ b/meta/recipes-graphics/clutter/clutter-fpu.inc | |||
@@ -1,6 +1,6 @@ | |||
1 | 1 | ||
2 | def get_clutter_fpu_setting(bb, d): | 2 | def get_clutter_fpu_setting(bb, d): |
3 | if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: | 3 | if d.getVar('TARGET_FPU', True) in [ 'soft' ]: |
4 | return "--without-fpu" | 4 | return "--without-fpu" |
5 | return "" | 5 | return "" |
6 | 6 | ||
diff --git a/meta/recipes-graphics/fontconfig/fontconfig-native_2.8.0.bb b/meta/recipes-graphics/fontconfig/fontconfig-native_2.8.0.bb index 0e213909e3..a2fd409c42 100644 --- a/meta/recipes-graphics/fontconfig/fontconfig-native_2.8.0.bb +++ b/meta/recipes-graphics/fontconfig/fontconfig-native_2.8.0.bb | |||
@@ -3,7 +3,7 @@ inherit native | |||
3 | DEPENDS = "freetype-native expat-native zlib-native" | 3 | DEPENDS = "freetype-native expat-native zlib-native" |
4 | 4 | ||
5 | EXTRA_OEMAKE = "" | 5 | EXTRA_OEMAKE = "" |
6 | EXTRA_OECONF = "${@[' --disable-docs',' --disable-docs --with-freetype-config=%s/freetype-config' % d.getVar('STAGING_BINDIR', 1)][os.path.isfile('%s/freetype-config' % d.getVar('STAGING_BINDIR', 1))]}" | 6 | EXTRA_OECONF = "${@[' --disable-docs',' --disable-docs --with-freetype-config=%s/freetype-config' % d.getVar('STAGING_BINDIR', True)][os.path.isfile('%s/freetype-config' % d.getVar('STAGING_BINDIR', True))]}" |
7 | 7 | ||
8 | do_install_append () { | 8 | do_install_append () { |
9 | install -d ${D}${bindir}/ | 9 | install -d ${D}${bindir}/ |
diff --git a/meta/recipes-graphics/mesa/mesa-dri.inc b/meta/recipes-graphics/mesa/mesa-dri.inc index 3687648999..480672fd08 100644 --- a/meta/recipes-graphics/mesa/mesa-dri.inc +++ b/meta/recipes-graphics/mesa/mesa-dri.inc | |||
@@ -13,7 +13,7 @@ EXTRA_OECONF += "--with-driver=dri --disable-egl --disable-gallium --without-gal | |||
13 | python populate_packages_prepend() { | 13 | python populate_packages_prepend() { |
14 | import os.path | 14 | import os.path |
15 | 15 | ||
16 | dri_drivers_root = os.path.join(d.getVar('libdir', 1), "dri") | 16 | dri_drivers_root = os.path.join(d.getVar('libdir', True), "dri") |
17 | 17 | ||
18 | do_split_packages(d, dri_drivers_root, '^(.*)_dri\.so$', 'mesa-dri-driver-%s', 'Mesa %s DRI driver', extra_depends='') | 18 | do_split_packages(d, dri_drivers_root, '^(.*)_dri\.so$', 'mesa-dri-driver-%s', 'Mesa %s DRI driver', extra_depends='') |
19 | } | 19 | } |
diff --git a/meta/recipes-graphics/pango/pango.inc b/meta/recipes-graphics/pango/pango.inc index 516153c3df..8b36d04c03 100644 --- a/meta/recipes-graphics/pango/pango.inc +++ b/meta/recipes-graphics/pango/pango.inc | |||
@@ -50,7 +50,7 @@ fi | |||
50 | } | 50 | } |
51 | 51 | ||
52 | python populate_packages_prepend () { | 52 | python populate_packages_prepend () { |
53 | prologue = d.getVar("postinst_prologue", 1) | 53 | prologue = d.getVar("postinst_prologue", True) |
54 | 54 | ||
55 | modules_root = bb.data.expand('${libdir}/pango/${LIBV}/modules', d) | 55 | modules_root = bb.data.expand('${libdir}/pango/${LIBV}/modules', d) |
56 | 56 | ||
diff --git a/meta/recipes-graphics/xorg-lib/libxft_2.2.0.bb b/meta/recipes-graphics/xorg-lib/libxft_2.2.0.bb index 498700a204..1166e57ff3 100644 --- a/meta/recipes-graphics/xorg-lib/libxft_2.2.0.bb +++ b/meta/recipes-graphics/xorg-lib/libxft_2.2.0.bb | |||
@@ -26,7 +26,7 @@ XORG_PN = "libXft" | |||
26 | BBCLASSEXTEND = "native" | 26 | BBCLASSEXTEND = "native" |
27 | 27 | ||
28 | python () { | 28 | python () { |
29 | if d.getVar('DEBIAN_NAMES', 1): | 29 | if d.getVar('DEBIAN_NAMES', True): |
30 | d.setVar('PKG_${PN}', '${MLPREFIX}libxft2') | 30 | d.setVar('PKG_${PN}', '${MLPREFIX}libxft2') |
31 | } | 31 | } |
32 | 32 | ||
diff --git a/meta/recipes-kernel/linux/linux-dtb.inc b/meta/recipes-kernel/linux/linux-dtb.inc index 7ec75848dd..2f0c9188eb 100644 --- a/meta/recipes-kernel/linux/linux-dtb.inc +++ b/meta/recipes-kernel/linux/linux-dtb.inc | |||
@@ -5,11 +5,11 @@ KERNEL_DEVICETREE_FLAGS = "-R 8 -p 0x3000" | |||
5 | python __anonymous () { | 5 | python __anonymous () { |
6 | import bb | 6 | import bb |
7 | 7 | ||
8 | devicetree = d.getVar("KERNEL_DEVICETREE", 1) or '' | 8 | devicetree = d.getVar("KERNEL_DEVICETREE", True) or '' |
9 | if devicetree: | 9 | if devicetree: |
10 | depends = d.getVar("DEPENDS", 1) | 10 | depends = d.getVar("DEPENDS", True) |
11 | d.setVar("DEPENDS", "%s dtc-native" % depends) | 11 | d.setVar("DEPENDS", "%s dtc-native" % depends) |
12 | packages = d.getVar("PACKAGES", 1) | 12 | packages = d.getVar("PACKAGES", True) |
13 | d.setVar("PACKAGES", "%s kernel-devicetree" % packages) | 13 | d.setVar("PACKAGES", "%s kernel-devicetree" % packages) |
14 | } | 14 | } |
15 | 15 | ||
diff --git a/meta/recipes-multimedia/alsa/alsa-fpu.inc b/meta/recipes-multimedia/alsa/alsa-fpu.inc index 2a0c6b0194..50402307c5 100644 --- a/meta/recipes-multimedia/alsa/alsa-fpu.inc +++ b/meta/recipes-multimedia/alsa/alsa-fpu.inc | |||
@@ -1,6 +1,6 @@ | |||
1 | 1 | ||
2 | def get_alsa_fpu_setting(bb, d): | 2 | def get_alsa_fpu_setting(bb, d): |
3 | if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: | 3 | if d.getVar('TARGET_FPU', True) in [ 'soft' ]: |
4 | return "--with-softfloat" | 4 | return "--with-softfloat" |
5 | return "" | 5 | return "" |
6 | 6 | ||
diff --git a/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc b/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc index a71d8962af..ccabe3c032 100644 --- a/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc +++ b/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc | |||
@@ -10,13 +10,13 @@ python populate_packages_prepend () { | |||
10 | do_split_packages(d, gst_libdir, 'libgst(.*)\.la$', bb.data.expand('${PN}-%s-dev', d), 'GStreamer plugin for %s (development files)', extra_depends=bb.data.expand('${PN}-dev',d)) | 10 | do_split_packages(d, gst_libdir, 'libgst(.*)\.la$', bb.data.expand('${PN}-%s-dev', d), 'GStreamer plugin for %s (development files)', extra_depends=bb.data.expand('${PN}-dev',d)) |
11 | do_split_packages(d, gst_libdir, 'libgst(.*)\.a$', bb.data.expand('${PN}-%s-staticdev', d), 'GStreamer plugin for %s (static development files)', extra_depends=bb.data.expand('${PN}-staticdev',d)) | 11 | do_split_packages(d, gst_libdir, 'libgst(.*)\.a$', bb.data.expand('${PN}-%s-staticdev', d), 'GStreamer plugin for %s (static development files)', extra_depends=bb.data.expand('${PN}-staticdev',d)) |
12 | 12 | ||
13 | pn = d.getVar('PN', 1) | 13 | pn = d.getVar('PN', True) |
14 | metapkg = pn + '-meta' | 14 | metapkg = pn + '-meta' |
15 | d.setVar('ALLOW_EMPTY_' + metapkg, "1") | 15 | d.setVar('ALLOW_EMPTY_' + metapkg, "1") |
16 | d.setVar('FILES_' + metapkg, "") | 16 | d.setVar('FILES_' + metapkg, "") |
17 | blacklist = [ pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc' ] | 17 | blacklist = [ pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc' ] |
18 | metapkg_rdepends = [] | 18 | metapkg_rdepends = [] |
19 | packages = d.getVar('PACKAGES', 1).split() | 19 | packages = d.getVar('PACKAGES', True).split() |
20 | for pkg in packages[1:]: | 20 | for pkg in packages[1:]: |
21 | if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-staticdev'): | 21 | if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-staticdev'): |
22 | metapkg_rdepends.append(pkg) | 22 | metapkg_rdepends.append(pkg) |
diff --git a/meta/recipes-qt/qt4-graphics-system/qt4-graphics-system_1.0.bb b/meta/recipes-qt/qt4-graphics-system/qt4-graphics-system_1.0.bb index b3aa1e559c..bae0340faf 100644 --- a/meta/recipes-qt/qt4-graphics-system/qt4-graphics-system_1.0.bb +++ b/meta/recipes-qt/qt4-graphics-system/qt4-graphics-system_1.0.bb | |||
@@ -9,7 +9,7 @@ QT_GRAPHICS_SYSTEM ?= "raster" | |||
9 | VIRTUAL-RUNTIME_xserver_common ?= "x11-common" | 9 | VIRTUAL-RUNTIME_xserver_common ?= "x11-common" |
10 | 10 | ||
11 | def _get_extra_rdepends(d): | 11 | def _get_extra_rdepends(d): |
12 | gs = d.getVar('QT_GRAPHICS_SYSTEM', 1) | 12 | gs = d.getVar('QT_GRAPHICS_SYSTEM', True) |
13 | if gs == "opengl": | 13 | if gs == "opengl": |
14 | return "qt4-plugin-graphicssystems-glgraphicssystem" | 14 | return "qt4-plugin-graphicssystems-glgraphicssystem" |
15 | 15 | ||
diff --git a/meta/recipes-qt/qt4/qt4.inc b/meta/recipes-qt/qt4/qt4.inc index 2956aed604..4d719138ca 100644 --- a/meta/recipes-qt/qt4/qt4.inc +++ b/meta/recipes-qt/qt4/qt4.inc | |||
@@ -45,7 +45,7 @@ python __anonymous () { | |||
45 | lib_packages = [] | 45 | lib_packages = [] |
46 | dev_packages = [] | 46 | dev_packages = [] |
47 | dbg_packages = [] | 47 | dbg_packages = [] |
48 | for name in d.getVar("QT_LIB_NAMES", 1).split(): | 48 | for name in d.getVar("QT_LIB_NAMES", True).split(): |
49 | pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" | 49 | pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" |
50 | # NOTE: the headers for QtAssistantClient are different | 50 | # NOTE: the headers for QtAssistantClient are different |
51 | incname = name.replace("QtAssistantClient", "QtAssistant") | 51 | incname = name.replace("QtAssistantClient", "QtAssistant") |
@@ -61,10 +61,10 @@ python __anonymous () { | |||
61 | lib_packages.append(pkg) | 61 | lib_packages.append(pkg) |
62 | dev_packages.append("%s-dev" % pkg) | 62 | dev_packages.append("%s-dev" % pkg) |
63 | dbg_packages.append("%s-dbg" % pkg) | 63 | dbg_packages.append("%s-dbg" % pkg) |
64 | for name in d.getVar("OTHER_PACKAGES", 1).split(): | 64 | for name in d.getVar("OTHER_PACKAGES", True).split(): |
65 | dbg_packages.append("%s-dbg" % name) | 65 | dbg_packages.append("%s-dbg" % name) |
66 | 66 | ||
67 | for name in d.getVar("QT_EXTRA_LIBS", 1).split(): | 67 | for name in d.getVar("QT_EXTRA_LIBS", True).split(): |
68 | pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" | 68 | pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" |
69 | d.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s.so.*" % locals()) | 69 | d.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s.so.*" % locals()) |
70 | d.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s.prl | 70 | d.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s.prl |
diff --git a/meta/recipes-qt/qt4/qt4_arch.inc b/meta/recipes-qt/qt4/qt4_arch.inc index 46d65a20d7..bde68dc2e0 100644 --- a/meta/recipes-qt/qt4/qt4_arch.inc +++ b/meta/recipes-qt/qt4/qt4_arch.inc | |||
@@ -4,7 +4,7 @@ ARM_INSTRUCTION_SET = "arm" | |||
4 | 4 | ||
5 | def qt_arch(d): | 5 | def qt_arch(d): |
6 | import bb, re | 6 | import bb, re |
7 | arch = d.getVar('TARGET_ARCH', 1) | 7 | arch = d.getVar('TARGET_ARCH', True) |
8 | if re.match("^i.86$", arch): | 8 | if re.match("^i.86$", arch): |
9 | arch = "i386" | 9 | arch = "i386" |
10 | elif re.match("^arm.*", arch): | 10 | elif re.match("^arm.*", arch): |
diff --git a/meta/recipes-sato/puzzles/oh-puzzles_git.bb b/meta/recipes-sato/puzzles/oh-puzzles_git.bb index a23c4ac089..c084c40a43 100644 --- a/meta/recipes-sato/puzzles/oh-puzzles_git.bb +++ b/meta/recipes-sato/puzzles/oh-puzzles_git.bb | |||
@@ -61,7 +61,7 @@ FILES_${PN}-extra = "/usr/games/ /usr/share/applications /etc/gconf/schemas" | |||
61 | python __anonymous () { | 61 | python __anonymous () { |
62 | import bb | 62 | import bb |
63 | var = bb.data.expand("FILES_${PN}", d, 1) | 63 | var = bb.data.expand("FILES_${PN}", d, 1) |
64 | data = d.getVar(var, 1) | 64 | data = d.getVar(var, True) |
65 | for name in ("bridges", "fifteen", "inertia", "map", "samegame", "slant"): | 65 | for name in ("bridges", "fifteen", "inertia", "map", "samegame", "slant"): |
66 | data = data + " /usr/games/%s" % name | 66 | data = data + " /usr/games/%s" % name |
67 | data = data + " /usr/share/applications/%s.desktop" % name | 67 | data = data + " /usr/share/applications/%s.desktop" % name |
diff --git a/meta/recipes-support/boost/boost.inc b/meta/recipes-support/boost/boost.inc index 09f61c002e..c15a04cb95 100644 --- a/meta/recipes-support/boost/boost.inc +++ b/meta/recipes-support/boost/boost.inc | |||
@@ -51,11 +51,11 @@ python __anonymous () { | |||
51 | 51 | ||
52 | packages = [] | 52 | packages = [] |
53 | extras = [] | 53 | extras = [] |
54 | for lib in d.getVar('BOOST_LIBS', 1).split( ): | 54 | for lib in d.getVar('BOOST_LIBS', True).split( ): |
55 | pkg = "boost-%s" % lib.replace("_", "-") | 55 | pkg = "boost-%s" % lib.replace("_", "-") |
56 | extras.append("--with-%s" % lib) | 56 | extras.append("--with-%s" % lib) |
57 | packages.append(pkg) | 57 | packages.append(pkg) |
58 | if not d.getVar("FILES_%s" % pkg, 1): | 58 | if not d.getVar("FILES_%s" % pkg, True): |
59 | d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib) | 59 | d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib) |
60 | d.setVar("BOOST_PACKAGES", " ".join(packages)) | 60 | d.setVar("BOOST_PACKAGES", " ".join(packages)) |
61 | d.setVar("BJAM_EXTRA", " ".join(extras)) | 61 | d.setVar("BJAM_EXTRA", " ".join(extras)) |