diff options
author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-11-25 14:25:16 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-11-27 10:25:34 +0000 |
commit | 0a434ac10158e2011d41a1189e65e9474b1672be (patch) | |
tree | 6171516b25cb337343a6373e34aec0d061e4f5e8 | |
parent | 71fded5145454b144413057e11f78f718d947093 (diff) | |
download | poky-0a434ac10158e2011d41a1189e65e9474b1672be.tar.gz |
getVar/setVar cleanups
Complete the bb.data.getVar/setVar replacements with accesses
directly to the data store object.
(From OE-Core rev: 2864ff6a4b3c3f9b3bbb6d2597243cc5d3715939)
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
27 files changed, 152 insertions, 159 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index 72196d60a7..a95dfd9a08 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass | |||
@@ -360,12 +360,12 @@ python () { | |||
360 | d.setVarFlag('do_compile', 'umask', 022) | 360 | d.setVarFlag('do_compile', 'umask', 022) |
361 | deps = (d.getVarFlag('do_install', 'depends') or "").split() | 361 | deps = (d.getVarFlag('do_install', 'depends') or "").split() |
362 | deps.append('virtual/fakeroot-native:do_populate_sysroot') | 362 | deps.append('virtual/fakeroot-native:do_populate_sysroot') |
363 | bb.data.setVarFlag('do_install', 'depends', " ".join(deps),d) | 363 | d.setVarFlag('do_install', 'depends', " ".join(deps)) |
364 | d.setVarFlag('do_install', 'fakeroot', 1) | 364 | d.setVarFlag('do_install', 'fakeroot', 1) |
365 | d.setVarFlag('do_install', 'umask', 022) | 365 | d.setVarFlag('do_install', 'umask', 022) |
366 | deps = (d.getVarFlag('do_package', 'depends') or "").split() | 366 | deps = (d.getVarFlag('do_package', 'depends') or "").split() |
367 | deps.append('virtual/fakeroot-native:do_populate_sysroot') | 367 | deps.append('virtual/fakeroot-native:do_populate_sysroot') |
368 | bb.data.setVarFlag('do_package', 'depends', " ".join(deps),d) | 368 | d.setVarFlag('do_package', 'depends', " ".join(deps)) |
369 | d.setVarFlag('do_package', 'fakeroot', 1) | 369 | d.setVarFlag('do_package', 'fakeroot', 1) |
370 | d.setVarFlag('do_package', 'umask', 022) | 370 | d.setVarFlag('do_package', 'umask', 022) |
371 | d.setVarFlag('do_package_setscene', 'fakeroot', 1) | 371 | d.setVarFlag('do_package_setscene', 'fakeroot', 1) |
diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes/cross-canadian.bbclass index 6f5bcd0ad4..499a1fb171 100644 --- a/meta/classes/cross-canadian.bbclass +++ b/meta/classes/cross-canadian.bbclass | |||
@@ -20,7 +20,7 @@ python () { | |||
20 | sdkarchs = [] | 20 | sdkarchs = [] |
21 | for arch in archs: | 21 | for arch in archs: |
22 | sdkarchs.append(arch + '-nativesdk') | 22 | sdkarchs.append(arch + '-nativesdk') |
23 | bb.data.setVar('PACKAGE_ARCHS', " ".join(sdkarchs), d) | 23 | d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs)) |
24 | } | 24 | } |
25 | MULTIMACH_TARGET_SYS = "${PACKAGE_ARCH}${HOST_VENDOR}-${HOST_OS}" | 25 | MULTIMACH_TARGET_SYS = "${PACKAGE_ARCH}${HOST_VENDOR}-${HOST_OS}" |
26 | 26 | ||
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass index 687247a649..0c0b549fef 100644 --- a/meta/classes/distrodata.bbclass +++ b/meta/classes/distrodata.bbclass | |||
@@ -31,31 +31,31 @@ python do_distrodata_np() { | |||
31 | if pn.find("-native") != -1: | 31 | if pn.find("-native") != -1: |
32 | pnstripped = pn.split("-native") | 32 | pnstripped = pn.split("-native") |
33 | bb.note("Native Split: %s" % pnstripped) | 33 | bb.note("Native Split: %s" % pnstripped) |
34 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 34 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
35 | bb.data.update_data(localdata) | 35 | bb.data.update_data(localdata) |
36 | 36 | ||
37 | if pn.find("-nativesdk") != -1: | 37 | if pn.find("-nativesdk") != -1: |
38 | pnstripped = pn.split("-nativesdk") | 38 | pnstripped = pn.split("-nativesdk") |
39 | bb.note("Native Split: %s" % pnstripped) | 39 | bb.note("Native Split: %s" % pnstripped) |
40 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 40 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
41 | bb.data.update_data(localdata) | 41 | bb.data.update_data(localdata) |
42 | 42 | ||
43 | if pn.find("-cross") != -1: | 43 | if pn.find("-cross") != -1: |
44 | pnstripped = pn.split("-cross") | 44 | pnstripped = pn.split("-cross") |
45 | bb.note("cross Split: %s" % pnstripped) | 45 | bb.note("cross Split: %s" % pnstripped) |
46 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 46 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
47 | bb.data.update_data(localdata) | 47 | bb.data.update_data(localdata) |
48 | 48 | ||
49 | if pn.find("-crosssdk") != -1: | 49 | if pn.find("-crosssdk") != -1: |
50 | pnstripped = pn.split("-crosssdk") | 50 | pnstripped = pn.split("-crosssdk") |
51 | bb.note("cross Split: %s" % pnstripped) | 51 | bb.note("cross Split: %s" % pnstripped) |
52 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 52 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
53 | bb.data.update_data(localdata) | 53 | bb.data.update_data(localdata) |
54 | 54 | ||
55 | if pn.find("-initial") != -1: | 55 | if pn.find("-initial") != -1: |
56 | pnstripped = pn.split("-initial") | 56 | pnstripped = pn.split("-initial") |
57 | bb.note("initial Split: %s" % pnstripped) | 57 | bb.note("initial Split: %s" % pnstripped) |
58 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 58 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
59 | bb.data.update_data(localdata) | 59 | bb.data.update_data(localdata) |
60 | 60 | ||
61 | """generate package information from .bb file""" | 61 | """generate package information from .bb file""" |
@@ -130,19 +130,19 @@ python do_distrodata() { | |||
130 | if pn.find("-native") != -1: | 130 | if pn.find("-native") != -1: |
131 | pnstripped = pn.split("-native") | 131 | pnstripped = pn.split("-native") |
132 | bb.note("Native Split: %s" % pnstripped) | 132 | bb.note("Native Split: %s" % pnstripped) |
133 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 133 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
134 | bb.data.update_data(localdata) | 134 | bb.data.update_data(localdata) |
135 | 135 | ||
136 | if pn.find("-cross") != -1: | 136 | if pn.find("-cross") != -1: |
137 | pnstripped = pn.split("-cross") | 137 | pnstripped = pn.split("-cross") |
138 | bb.note("cross Split: %s" % pnstripped) | 138 | bb.note("cross Split: %s" % pnstripped) |
139 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 139 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
140 | bb.data.update_data(localdata) | 140 | bb.data.update_data(localdata) |
141 | 141 | ||
142 | if pn.find("-initial") != -1: | 142 | if pn.find("-initial") != -1: |
143 | pnstripped = pn.split("-initial") | 143 | pnstripped = pn.split("-initial") |
144 | bb.note("initial Split: %s" % pnstripped) | 144 | bb.note("initial Split: %s" % pnstripped) |
145 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 145 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
146 | bb.data.update_data(localdata) | 146 | bb.data.update_data(localdata) |
147 | 147 | ||
148 | """generate package information from .bb file""" | 148 | """generate package information from .bb file""" |
@@ -308,8 +308,8 @@ python do_checkpkg() { | |||
308 | which is designed for check purpose but we override check command for our own purpose | 308 | which is designed for check purpose but we override check command for our own purpose |
309 | """ | 309 | """ |
310 | ld = bb.data.createCopy(d) | 310 | ld = bb.data.createCopy(d) |
311 | bb.data.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12\" '${URI}'" \ | 311 | d.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12\" '${URI}'" \ |
312 | % tmpf.name, d) | 312 | % tmpf.name) |
313 | bb.data.update_data(ld) | 313 | bb.data.update_data(ld) |
314 | 314 | ||
315 | try: | 315 | try: |
@@ -452,19 +452,19 @@ python do_checkpkg() { | |||
452 | if pname.find("-native") != -1: | 452 | if pname.find("-native") != -1: |
453 | pnstripped = pname.split("-native") | 453 | pnstripped = pname.split("-native") |
454 | bb.note("Native Split: %s" % pnstripped) | 454 | bb.note("Native Split: %s" % pnstripped) |
455 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 455 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
456 | bb.data.update_data(localdata) | 456 | bb.data.update_data(localdata) |
457 | 457 | ||
458 | if pname.find("-cross") != -1: | 458 | if pname.find("-cross") != -1: |
459 | pnstripped = pname.split("-cross") | 459 | pnstripped = pname.split("-cross") |
460 | bb.note("cross Split: %s" % pnstripped) | 460 | bb.note("cross Split: %s" % pnstripped) |
461 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 461 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
462 | bb.data.update_data(localdata) | 462 | bb.data.update_data(localdata) |
463 | 463 | ||
464 | if pname.find("-initial") != -1: | 464 | if pname.find("-initial") != -1: |
465 | pnstripped = pname.split("-initial") | 465 | pnstripped = pname.split("-initial") |
466 | bb.note("initial Split: %s" % pnstripped) | 466 | bb.note("initial Split: %s" % pnstripped) |
467 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 467 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
468 | bb.data.update_data(localdata) | 468 | bb.data.update_data(localdata) |
469 | 469 | ||
470 | pdesc = localdata.getVar('DESCRIPTION', True) | 470 | pdesc = localdata.getVar('DESCRIPTION', True) |
diff --git a/meta/classes/gconf.bbclass b/meta/classes/gconf.bbclass index f164547782..c1dbbe30dc 100644 --- a/meta/classes/gconf.bbclass +++ b/meta/classes/gconf.bbclass | |||
@@ -45,7 +45,7 @@ python populate_packages_append () { | |||
45 | schemas.append(f) | 45 | schemas.append(f) |
46 | if schemas != []: | 46 | if schemas != []: |
47 | bb.note("adding gconf postinst and prerm scripts to %s" % pkg) | 47 | bb.note("adding gconf postinst and prerm scripts to %s" % pkg) |
48 | bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d) | 48 | d.setVar('SCHEMA_FILES', " ".join(schemas)) |
49 | postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) | 49 | postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) |
50 | if not postinst: | 50 | if not postinst: |
51 | postinst = '#!/bin/sh\n' | 51 | postinst = '#!/bin/sh\n' |
diff --git a/meta/classes/image-swab.bbclass b/meta/classes/image-swab.bbclass index 23183b3ec3..5aace0f5c8 100644 --- a/meta/classes/image-swab.bbclass +++ b/meta/classes/image-swab.bbclass | |||
@@ -53,7 +53,7 @@ python() { | |||
53 | if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d): | 53 | if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d): |
54 | deps = (d.getVarFlag('do_setscene', 'depends') or "").split() | 54 | deps = (d.getVarFlag('do_setscene', 'depends') or "").split() |
55 | deps.append('strace-native:do_populate_sysroot') | 55 | deps.append('strace-native:do_populate_sysroot') |
56 | bb.data.setVarFlag('do_setscene', 'depends', " ".join(deps), d) | 56 | d.setVarFlag('do_setscene', 'depends', " ".join(deps)) |
57 | logdir = bb.data.expand("${TRACE_LOGDIR}", d) | 57 | logdir = bb.data.expand("${TRACE_LOGDIR}", d) |
58 | bb.utils.mkdirhier(logdir) | 58 | bb.utils.mkdirhier(logdir) |
59 | else: | 59 | else: |
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass index 12e0b83b94..d0cc279a66 100644 --- a/meta/classes/kernel.bbclass +++ b/meta/classes/kernel.bbclass | |||
@@ -437,7 +437,7 @@ python populate_packages_prepend () { | |||
437 | else: | 437 | else: |
438 | rdepends = [] | 438 | rdepends = [] |
439 | rdepends.extend(get_dependencies(file, pattern, format)) | 439 | rdepends.extend(get_dependencies(file, pattern, format)) |
440 | bb.data.setVar('RDEPENDS_' + pkg, ' '.join(rdepends), d) | 440 | d.setVar('RDEPENDS_' + pkg, ' '.join(rdepends)) |
441 | 441 | ||
442 | module_deps = parse_depmod() | 442 | module_deps = parse_depmod() |
443 | module_regex = '^(.*)\.k?o$' | 443 | module_regex = '^(.*)\.k?o$' |
@@ -464,10 +464,10 @@ python populate_packages_prepend () { | |||
464 | for pkg in packages[1:]: | 464 | for pkg in packages[1:]: |
465 | if not pkg in blacklist and not pkg in metapkg_rdepends: | 465 | if not pkg in blacklist and not pkg in metapkg_rdepends: |
466 | metapkg_rdepends.append(pkg) | 466 | metapkg_rdepends.append(pkg) |
467 | bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d) | 467 | d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends)) |
468 | d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package') | 468 | d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package') |
469 | packages.append(metapkg) | 469 | packages.append(metapkg) |
470 | bb.data.setVar('PACKAGES', ' '.join(packages), d) | 470 | d.setVar('PACKAGES', ' '.join(packages)) |
471 | } | 471 | } |
472 | 472 | ||
473 | # Support checking the kernel size since some kernels need to reside in partitions | 473 | # Support checking the kernel size since some kernels need to reside in partitions |
diff --git a/meta/classes/libc-package.bbclass b/meta/classes/libc-package.bbclass index fc1a5794d7..bbe06fd54b 100644 --- a/meta/classes/libc-package.bbclass +++ b/meta/classes/libc-package.bbclass | |||
@@ -10,7 +10,7 @@ | |||
10 | GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice" | 10 | GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice" |
11 | 11 | ||
12 | python __anonymous () { | 12 | python __anonymous () { |
13 | enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", 1) | 13 | enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", True) |
14 | 14 | ||
15 | pn = d.getVar("PN", True) | 15 | pn = d.getVar("PN", True) |
16 | if pn.endswith("-initial"): | 16 | if pn.endswith("-initial"): |
@@ -19,15 +19,15 @@ python __anonymous () { | |||
19 | if enabled and int(enabled): | 19 | if enabled and int(enabled): |
20 | import re | 20 | import re |
21 | 21 | ||
22 | target_arch = d.getVar("TARGET_ARCH", 1) | 22 | target_arch = d.getVar("TARGET_ARCH", True) |
23 | binary_arches = d.getVar("BINARY_LOCALE_ARCHES", 1) or "" | 23 | binary_arches = d.getVar("BINARY_LOCALE_ARCHES", True) or "" |
24 | use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or "" | 24 | use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "" |
25 | 25 | ||
26 | for regexp in binary_arches.split(" "): | 26 | for regexp in binary_arches.split(" "): |
27 | r = re.compile(regexp) | 27 | r = re.compile(regexp) |
28 | 28 | ||
29 | if r.match(target_arch): | 29 | if r.match(target_arch): |
30 | depends = d.getVar("DEPENDS", 1) | 30 | depends = d.getVar("DEPENDS", True) |
31 | if use_cross_localedef == "1" : | 31 | if use_cross_localedef == "1" : |
32 | depends = "%s cross-localedef-native" % depends | 32 | depends = "%s cross-localedef-native" % depends |
33 | else: | 33 | else: |
@@ -109,19 +109,19 @@ inherit qemu | |||
109 | 109 | ||
110 | python package_do_split_gconvs () { | 110 | python package_do_split_gconvs () { |
111 | import os, re | 111 | import os, re |
112 | if (d.getVar('PACKAGE_NO_GCONV', 1) == '1'): | 112 | if (d.getVar('PACKAGE_NO_GCONV', True) == '1'): |
113 | bb.note("package requested not splitting gconvs") | 113 | bb.note("package requested not splitting gconvs") |
114 | return | 114 | return |
115 | 115 | ||
116 | if not d.getVar('PACKAGES', 1): | 116 | if not d.getVar('PACKAGES', True): |
117 | return | 117 | return |
118 | 118 | ||
119 | bpn = d.getVar('BPN', 1) | 119 | bpn = d.getVar('BPN', True) |
120 | libdir = d.getVar('libdir', 1) | 120 | libdir = d.getVar('libdir', True) |
121 | if not libdir: | 121 | if not libdir: |
122 | bb.error("libdir not defined") | 122 | bb.error("libdir not defined") |
123 | return | 123 | return |
124 | datadir = d.getVar('datadir', 1) | 124 | datadir = d.getVar('datadir', True) |
125 | if not datadir: | 125 | if not datadir: |
126 | bb.error("datadir not defined") | 126 | bb.error("datadir not defined") |
127 | return | 127 | return |
@@ -144,9 +144,9 @@ python package_do_split_gconvs () { | |||
144 | deps.append(dp) | 144 | deps.append(dp) |
145 | f.close() | 145 | f.close() |
146 | if deps != []: | 146 | if deps != []: |
147 | bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d) | 147 | d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) |
148 | if bpn != 'glibc': | 148 | if bpn != 'glibc': |
149 | bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d) | 149 | d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) |
150 | 150 | ||
151 | do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \ | 151 | do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \ |
152 | description='gconv module for character set %s', hook=calc_gconv_deps, \ | 152 | description='gconv module for character set %s', hook=calc_gconv_deps, \ |
@@ -165,9 +165,9 @@ python package_do_split_gconvs () { | |||
165 | deps.append(dp) | 165 | deps.append(dp) |
166 | f.close() | 166 | f.close() |
167 | if deps != []: | 167 | if deps != []: |
168 | bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d) | 168 | d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) |
169 | if bpn != 'glibc': | 169 | if bpn != 'glibc': |
170 | bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d) | 170 | d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) |
171 | 171 | ||
172 | do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \ | 172 | do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \ |
173 | description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='') | 173 | description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='') |
@@ -185,23 +185,23 @@ python package_do_split_gconvs () { | |||
185 | deps.append(dp) | 185 | deps.append(dp) |
186 | f.close() | 186 | f.close() |
187 | if deps != []: | 187 | if deps != []: |
188 | bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d) | 188 | d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) |
189 | if bpn != 'glibc': | 189 | if bpn != 'glibc': |
190 | bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d) | 190 | d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) |
191 | 191 | ||
192 | do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \ | 192 | do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \ |
193 | description='locale definition for %s', hook=calc_locale_deps, extra_depends='') | 193 | description='locale definition for %s', hook=calc_locale_deps, extra_depends='') |
194 | bb.data.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv', d) | 194 | d.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv') |
195 | 195 | ||
196 | use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", 1) | 196 | use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True) |
197 | 197 | ||
198 | dot_re = re.compile("(.*)\.(.*)") | 198 | dot_re = re.compile("(.*)\.(.*)") |
199 | 199 | ||
200 | #GLIBC_GENERATE_LOCALES var specifies which locales to be supported, empty or "all" means all locales | 200 | #GLIBC_GENERATE_LOCALES var specifies which locales to be supported, empty or "all" means all locales |
201 | if use_bin != "precompiled": | 201 | if use_bin != "precompiled": |
202 | supported = d.getVar('GLIBC_GENERATE_LOCALES', 1) | 202 | supported = d.getVar('GLIBC_GENERATE_LOCALES', True) |
203 | if not supported or supported == "all": | 203 | if not supported or supported == "all": |
204 | f = open(base_path_join(d.getVar('WORKDIR', 1), "SUPPORTED"), "r") | 204 | f = open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED"), "r") |
205 | supported = f.readlines() | 205 | supported = f.readlines() |
206 | f.close() | 206 | f.close() |
207 | else: | 207 | else: |
@@ -218,7 +218,7 @@ python package_do_split_gconvs () { | |||
218 | supported.append(dbase[0] + d2) | 218 | supported.append(dbase[0] + d2) |
219 | 219 | ||
220 | # Collate the locales by base and encoding | 220 | # Collate the locales by base and encoding |
221 | utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', 1) or 0) | 221 | utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0) |
222 | encodings = {} | 222 | encodings = {} |
223 | for l in supported: | 223 | for l in supported: |
224 | l = l[:-1] | 224 | l = l[:-1] |
@@ -233,12 +233,12 @@ python package_do_split_gconvs () { | |||
233 | encodings[locale].append(charset) | 233 | encodings[locale].append(charset) |
234 | 234 | ||
235 | def output_locale_source(name, pkgname, locale, encoding): | 235 | def output_locale_source(name, pkgname, locale, encoding): |
236 | bb.data.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \ | 236 | setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \ |
237 | (bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding)), d) | 237 | (bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding))) |
238 | bb.data.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', 1) \ | 238 | d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \ |
239 | % (locale, encoding, locale), d) | 239 | % (locale, encoding, locale)) |
240 | bb.data.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', 1) % \ | 240 | d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \ |
241 | (locale, encoding, locale), d) | 241 | (locale, encoding, locale)) |
242 | 242 | ||
243 | def output_locale_binary_rdepends(name, pkgname, locale, encoding): | 243 | def output_locale_binary_rdepends(name, pkgname, locale, encoding): |
244 | m = re.match("(.*)\.(.*)", name) | 244 | m = re.match("(.*)\.(.*)", name) |
@@ -246,23 +246,23 @@ python package_do_split_gconvs () { | |||
246 | libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-","")) | 246 | libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-","")) |
247 | else: | 247 | else: |
248 | libc_name = name | 248 | libc_name = name |
249 | bb.data.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \ | 249 | d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \ |
250 | % (bpn, libc_name)), d) | 250 | % (bpn, libc_name))) |
251 | rprovides = (d.getVar('RPROVIDES_%s' % pkgname, True) or "").split() | 251 | rprovides = (d.getVar('RPROVIDES_%s' % pkgname, True) or "").split() |
252 | rprovides.append(legitimize_package_name('%s-binary-localedata-%s' % (bpn, libc_name))) | 252 | rprovides.append(legitimize_package_name('%s-binary-localedata-%s' % (bpn, libc_name))) |
253 | bb.data.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides), d) | 253 | d.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides)) |
254 | 254 | ||
255 | commands = {} | 255 | commands = {} |
256 | 256 | ||
257 | def output_locale_binary(name, pkgname, locale, encoding): | 257 | def output_locale_binary(name, pkgname, locale, encoding): |
258 | treedir = base_path_join(d.getVar("WORKDIR", 1), "locale-tree") | 258 | treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree") |
259 | ldlibdir = base_path_join(treedir, d.getVar("base_libdir", 1)) | 259 | ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True)) |
260 | path = d.getVar("PATH", 1) | 260 | path = d.getVar("PATH", True) |
261 | i18npath = base_path_join(treedir, datadir, "i18n") | 261 | i18npath = base_path_join(treedir, datadir, "i18n") |
262 | gconvpath = base_path_join(treedir, "iconvdata") | 262 | gconvpath = base_path_join(treedir, "iconvdata") |
263 | outputpath = base_path_join(treedir, libdir, "locale") | 263 | outputpath = base_path_join(treedir, libdir, "locale") |
264 | 264 | ||
265 | use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or "0" | 265 | use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0" |
266 | if use_cross_localedef == "1": | 266 | if use_cross_localedef == "1": |
267 | target_arch = d.getVar('TARGET_ARCH', True) | 267 | target_arch = d.getVar('TARGET_ARCH', True) |
268 | locale_arch_options = { \ | 268 | locale_arch_options = { \ |
@@ -292,9 +292,9 @@ python package_do_split_gconvs () { | |||
292 | --inputfile=%s/i18n/locales/%s --charmap=%s %s" \ | 292 | --inputfile=%s/i18n/locales/%s --charmap=%s %s" \ |
293 | % (treedir, datadir, locale, encoding, name) | 293 | % (treedir, datadir, locale, encoding, name) |
294 | 294 | ||
295 | qemu_options = bb.data.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', 1), d, 1) | 295 | qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True) |
296 | if not qemu_options: | 296 | if not qemu_options: |
297 | qemu_options = d.getVar('QEMU_OPTIONS', 1) | 297 | qemu_options = d.getVar('QEMU_OPTIONS', True) |
298 | 298 | ||
299 | cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ | 299 | cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ |
300 | -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ | 300 | -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ |
@@ -307,7 +307,7 @@ python package_do_split_gconvs () { | |||
307 | def output_locale(name, locale, encoding): | 307 | def output_locale(name, locale, encoding): |
308 | pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name) | 308 | pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name) |
309 | d.setVar('ALLOW_EMPTY_%s' % pkgname, '1') | 309 | d.setVar('ALLOW_EMPTY_%s' % pkgname, '1') |
310 | bb.data.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', 1)), d) | 310 | d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True))) |
311 | rprovides = ' virtual-locale-%s' % legitimize_package_name(name) | 311 | rprovides = ' virtual-locale-%s' % legitimize_package_name(name) |
312 | m = re.match("(.*)_(.*)", name) | 312 | m = re.match("(.*)_(.*)", name) |
313 | if m: | 313 | if m: |
@@ -347,7 +347,7 @@ python package_do_split_gconvs () { | |||
347 | bb.note(" " + " ".join(non_utf8)) | 347 | bb.note(" " + " ".join(non_utf8)) |
348 | 348 | ||
349 | if use_bin == "compile": | 349 | if use_bin == "compile": |
350 | makefile = base_path_join(d.getVar("WORKDIR", 1), "locale-tree", "Makefile") | 350 | makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile") |
351 | m = open(makefile, "w") | 351 | m = open(makefile, "w") |
352 | m.write("all: %s\n\n" % " ".join(commands.keys())) | 352 | m.write("all: %s\n\n" % " ".join(commands.keys())) |
353 | for cmd in commands: | 353 | for cmd in commands: |
diff --git a/meta/classes/native.bbclass b/meta/classes/native.bbclass index 5f25bdd2c2..8f7cc1f2d3 100644 --- a/meta/classes/native.bbclass +++ b/meta/classes/native.bbclass | |||
@@ -121,7 +121,7 @@ python native_virtclass_handler () { | |||
121 | newdeps.append(dep + "-native") | 121 | newdeps.append(dep + "-native") |
122 | else: | 122 | else: |
123 | newdeps.append(dep) | 123 | newdeps.append(dep) |
124 | bb.data.setVar(varname, " ".join(newdeps), d) | 124 | d.setVar(varname, " ".join(newdeps)) |
125 | 125 | ||
126 | map_dependencies("DEPENDS", e.data) | 126 | map_dependencies("DEPENDS", e.data) |
127 | for pkg in (e.data.getVar("PACKAGES", True).split() + [""]): | 127 | for pkg in (e.data.getVar("PACKAGES", True).split() + [""]): |
@@ -139,7 +139,7 @@ python native_virtclass_handler () { | |||
139 | provides = provides.replace(prov, prov + "-native") | 139 | provides = provides.replace(prov, prov + "-native") |
140 | e.data.setVar("PROVIDES", provides) | 140 | e.data.setVar("PROVIDES", provides) |
141 | 141 | ||
142 | bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native", e.data) | 142 | e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native") |
143 | } | 143 | } |
144 | 144 | ||
145 | addhandler native_virtclass_handler | 145 | addhandler native_virtclass_handler |
diff --git a/meta/classes/nativesdk.bbclass b/meta/classes/nativesdk.bbclass index ca24efaa7c..e6204c02ba 100644 --- a/meta/classes/nativesdk.bbclass +++ b/meta/classes/nativesdk.bbclass | |||
@@ -15,7 +15,7 @@ python () { | |||
15 | sdkarchs = [] | 15 | sdkarchs = [] |
16 | for arch in archs: | 16 | for arch in archs: |
17 | sdkarchs.append(arch + '-nativesdk') | 17 | sdkarchs.append(arch + '-nativesdk') |
18 | bb.data.setVar('PACKAGE_ARCHS', " ".join(sdkarchs), d) | 18 | d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs)) |
19 | } | 19 | } |
20 | 20 | ||
21 | STAGING_DIR_HOST = "${STAGING_DIR}/${MULTIMACH_HOST_SYS}" | 21 | STAGING_DIR_HOST = "${STAGING_DIR}/${MULTIMACH_HOST_SYS}" |
@@ -66,7 +66,7 @@ python nativesdk_virtclass_handler () { | |||
66 | if not pn.endswith("-nativesdk"): | 66 | if not pn.endswith("-nativesdk"): |
67 | return | 67 | return |
68 | 68 | ||
69 | bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk", e.data) | 69 | e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk") |
70 | } | 70 | } |
71 | 71 | ||
72 | python () { | 72 | python () { |
@@ -91,7 +91,7 @@ python () { | |||
91 | newdeps.append(dep.replace("-nativesdk", "") + "-nativesdk") | 91 | newdeps.append(dep.replace("-nativesdk", "") + "-nativesdk") |
92 | else: | 92 | else: |
93 | newdeps.append(dep) | 93 | newdeps.append(dep) |
94 | bb.data.setVar(varname, " ".join(newdeps), d) | 94 | d.setVar(varname, " ".join(newdeps)) |
95 | 95 | ||
96 | map_dependencies("DEPENDS", d) | 96 | map_dependencies("DEPENDS", d) |
97 | #for pkg in (d.getVar("PACKAGES", True).split() + [""]): | 97 | #for pkg in (d.getVar("PACKAGES", True).split() + [""]): |
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index 2a78a8f54b..256cdc15de 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass | |||
@@ -151,7 +151,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
151 | the_files.append(fp % m.group(1)) | 151 | the_files.append(fp % m.group(1)) |
152 | else: | 152 | else: |
153 | the_files.append(aux_files_pattern_verbatim % m.group(1)) | 153 | the_files.append(aux_files_pattern_verbatim % m.group(1)) |
154 | bb.data.setVar('FILES_' + pkg, " ".join(the_files), d) | 154 | d.setVar('FILES_' + pkg, " ".join(the_files)) |
155 | if extra_depends != '': | 155 | if extra_depends != '': |
156 | the_depends = d.getVar('RDEPENDS_' + pkg, True) | 156 | the_depends = d.getVar('RDEPENDS_' + pkg, True) |
157 | if the_depends: | 157 | if the_depends: |
@@ -165,11 +165,11 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
165 | if postrm: | 165 | if postrm: |
166 | d.setVar('pkg_postrm_' + pkg, postrm) | 166 | d.setVar('pkg_postrm_' + pkg, postrm) |
167 | else: | 167 | else: |
168 | bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d) | 168 | d.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o)) |
169 | if callable(hook): | 169 | if callable(hook): |
170 | hook(f, pkg, file_regex, output_pattern, m.group(1)) | 170 | hook(f, pkg, file_regex, output_pattern, m.group(1)) |
171 | 171 | ||
172 | bb.data.setVar('PACKAGES', ' '.join(packages), d) | 172 | d.setVar('PACKAGES', ' '.join(packages)) |
173 | 173 | ||
174 | PACKAGE_DEPENDS += "file-native" | 174 | PACKAGE_DEPENDS += "file-native" |
175 | 175 | ||
@@ -183,7 +183,7 @@ python () { | |||
183 | deps = (d.getVarFlag('do_package', 'deptask') or "").split() | 183 | deps = (d.getVarFlag('do_package', 'deptask') or "").split() |
184 | # shlibs requires any DEPENDS to have already packaged for the *.list files | 184 | # shlibs requires any DEPENDS to have already packaged for the *.list files |
185 | deps.append("do_package") | 185 | deps.append("do_package") |
186 | bb.data.setVarFlag('do_package', 'deptask', " ".join(deps), d) | 186 | d.setVarFlag('do_package', 'deptask', " ".join(deps)) |
187 | elif not bb.data.inherits_class('image', d): | 187 | elif not bb.data.inherits_class('image', d): |
188 | d.setVar("PACKAGERDEPTASK", "") | 188 | d.setVar("PACKAGERDEPTASK", "") |
189 | } | 189 | } |
@@ -202,7 +202,7 @@ def splitfile(file, debugfile, debugsrcdir, d): | |||
202 | pathprefix = "export PATH=%s; " % d.getVar('PATH', True) | 202 | pathprefix = "export PATH=%s; " % d.getVar('PATH', True) |
203 | objcopy = d.getVar("OBJCOPY", True) | 203 | objcopy = d.getVar("OBJCOPY", True) |
204 | debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) | 204 | debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) |
205 | workdir = bb.data.expand("${WORKDIR}", d) | 205 | workdir = d.getVar("WORKDIR", True) |
206 | workparentdir = os.path.dirname(workdir) | 206 | workparentdir = os.path.dirname(workdir) |
207 | sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d) | 207 | sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d) |
208 | 208 | ||
@@ -245,7 +245,7 @@ def splitfile2(debugsrcdir, d): | |||
245 | strip = d.getVar("STRIP", True) | 245 | strip = d.getVar("STRIP", True) |
246 | objcopy = d.getVar("OBJCOPY", True) | 246 | objcopy = d.getVar("OBJCOPY", True) |
247 | debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) | 247 | debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) |
248 | workdir = bb.data.expand("${WORKDIR}", d) | 248 | workdir = d.getVar("WORKDIR", True) |
249 | workparentdir = os.path.dirname(workdir) | 249 | workparentdir = os.path.dirname(workdir) |
250 | workbasedir = os.path.basename(workdir) | 250 | workbasedir = os.path.basename(workdir) |
251 | sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d) | 251 | sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d) |
@@ -341,7 +341,7 @@ def runtime_mapping_rename (varname, d): | |||
341 | else: | 341 | else: |
342 | new_depends.append(new_depend) | 342 | new_depends.append(new_depend) |
343 | 343 | ||
344 | bb.data.setVar(varname, " ".join(new_depends) or None, d) | 344 | d.setVar(varname, " ".join(new_depends) or None) |
345 | 345 | ||
346 | #bb.note("%s after: %s" % (varname, d.getVar(varname, True))) | 346 | #bb.note("%s after: %s" % (varname, d.getVar(varname, True))) |
347 | 347 | ||
@@ -399,15 +399,15 @@ python package_do_split_locales() { | |||
399 | ln = legitimize_package_name(l) | 399 | ln = legitimize_package_name(l) |
400 | pkg = pn + '-locale-' + ln | 400 | pkg = pn + '-locale-' + ln |
401 | packages.append(pkg) | 401 | packages.append(pkg) |
402 | bb.data.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l), d) | 402 | d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l)) |
403 | bb.data.setVar('RDEPENDS_' + pkg, '%s virtual-locale-%s' % (mainpkg, ln), d) | 403 | d.setVar('RDEPENDS_' + pkg, '%s virtual-locale-%s' % (mainpkg, ln)) |
404 | bb.data.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln), d) | 404 | d.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln)) |
405 | bb.data.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l), d) | 405 | d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l)) |
406 | bb.data.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l), d) | 406 | d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) |
407 | if locale_section: | 407 | if locale_section: |
408 | d.setVar('SECTION_' + pkg, locale_section) | 408 | d.setVar('SECTION_' + pkg, locale_section) |
409 | 409 | ||
410 | bb.data.setVar('PACKAGES', ' '.join(packages), d) | 410 | d.setVar('PACKAGES', ' '.join(packages)) |
411 | 411 | ||
412 | # Disabled by RP 18/06/07 | 412 | # Disabled by RP 18/06/07 |
413 | # Wildcards aren't supported in debian | 413 | # Wildcards aren't supported in debian |
@@ -417,7 +417,7 @@ python package_do_split_locales() { | |||
417 | # Probably breaks since virtual-locale- isn't provided anywhere | 417 | # Probably breaks since virtual-locale- isn't provided anywhere |
418 | #rdep = (d.getVar('RDEPENDS_%s' % mainpkg, True) or d.getVar('RDEPENDS', True) or "").split() | 418 | #rdep = (d.getVar('RDEPENDS_%s' % mainpkg, True) or d.getVar('RDEPENDS', True) or "").split() |
419 | #rdep.append('%s-locale*' % pn) | 419 | #rdep.append('%s-locale*' % pn) |
420 | #bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d) | 420 | #d.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep)) |
421 | } | 421 | } |
422 | 422 | ||
423 | python perform_packagecopy () { | 423 | python perform_packagecopy () { |
@@ -1018,7 +1018,7 @@ python populate_packages () { | |||
1018 | break | 1018 | break |
1019 | if found == False: | 1019 | if found == False: |
1020 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) | 1020 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) |
1021 | bb.data.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False), d) | 1021 | d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False)) |
1022 | } | 1022 | } |
1023 | populate_packages[dirs] = "${D}" | 1023 | populate_packages[dirs] = "${D}" |
1024 | 1024 | ||
@@ -1033,11 +1033,11 @@ python emit_pkgdata() { | |||
1033 | c = codecs.getencoder("string_escape") | 1033 | c = codecs.getencoder("string_escape") |
1034 | return c(str)[0] | 1034 | return c(str)[0] |
1035 | 1035 | ||
1036 | val = bb.data.getVar('%s_%s' % (var, pkg), d, True) | 1036 | val = d.getVar('%s_%s' % (var, pkg), True) |
1037 | if val: | 1037 | if val: |
1038 | f.write('%s_%s: %s\n' % (var, pkg, encode(val))) | 1038 | f.write('%s_%s: %s\n' % (var, pkg, encode(val))) |
1039 | return | 1039 | return |
1040 | val = bb.data.getVar('%s' % (var), d, True) | 1040 | val = d.getVar('%s' % (var), True) |
1041 | if val: | 1041 | if val: |
1042 | f.write('%s: %s\n' % (var, encode(val))) | 1042 | f.write('%s: %s\n' % (var, encode(val))) |
1043 | return | 1043 | return |
@@ -1159,12 +1159,12 @@ python package_do_filedeps() { | |||
1159 | if len(provides) > 0: | 1159 | if len(provides) > 0: |
1160 | provides_files.append(file) | 1160 | provides_files.append(file) |
1161 | key = "FILERPROVIDES_" + file + "_" + pkg | 1161 | key = "FILERPROVIDES_" + file + "_" + pkg |
1162 | bb.data.setVar(key, " ".join(provides), d) | 1162 | d.setVar(key, " ".join(provides)) |
1163 | 1163 | ||
1164 | if len(requires) > 0: | 1164 | if len(requires) > 0: |
1165 | requires_files.append(file) | 1165 | requires_files.append(file) |
1166 | key = "FILERDEPENDS_" + file + "_" + pkg | 1166 | key = "FILERDEPENDS_" + file + "_" + pkg |
1167 | bb.data.setVar(key, " ".join(requires), d) | 1167 | d.setVar(key, " ".join(requires)) |
1168 | 1168 | ||
1169 | # Determine dependencies | 1169 | # Determine dependencies |
1170 | for pkg in packages.split(): | 1170 | for pkg in packages.split(): |
@@ -1181,8 +1181,8 @@ python package_do_filedeps() { | |||
1181 | 1181 | ||
1182 | process_deps(dep_pipe, pkg, f, provides_files, requires_files) | 1182 | process_deps(dep_pipe, pkg, f, provides_files, requires_files) |
1183 | 1183 | ||
1184 | bb.data.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files), d) | 1184 | d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files)) |
1185 | bb.data.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files), d) | 1185 | d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files)) |
1186 | } | 1186 | } |
1187 | 1187 | ||
1188 | SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs" | 1188 | SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs" |
@@ -1461,7 +1461,7 @@ python package_do_pkgconfig () { | |||
1461 | if m: | 1461 | if m: |
1462 | name = m.group(1) | 1462 | name = m.group(1) |
1463 | val = m.group(2) | 1463 | val = m.group(2) |
1464 | bb.data.setVar(name, bb.data.expand(val, pd), pd) | 1464 | pd.setVar(name, bb.data.expand(val, pd)) |
1465 | continue | 1465 | continue |
1466 | m = field_re.match(l) | 1466 | m = field_re.match(l) |
1467 | if m: | 1467 | if m: |
@@ -1519,7 +1519,7 @@ python package_do_pkgconfig () { | |||
1519 | python read_shlibdeps () { | 1519 | python read_shlibdeps () { |
1520 | packages = d.getVar('PACKAGES', True).split() | 1520 | packages = d.getVar('PACKAGES', True).split() |
1521 | for pkg in packages: | 1521 | for pkg in packages: |
1522 | rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, 0) or d.getVar('RDEPENDS', 0) or "") | 1522 | rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "") |
1523 | 1523 | ||
1524 | for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": | 1524 | for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": |
1525 | depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d) | 1525 | depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d) |
@@ -1529,7 +1529,7 @@ python read_shlibdeps () { | |||
1529 | fd.close() | 1529 | fd.close() |
1530 | for l in lines: | 1530 | for l in lines: |
1531 | rdepends[l.rstrip()] = "" | 1531 | rdepends[l.rstrip()] = "" |
1532 | bb.data.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False), d) | 1532 | d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False)) |
1533 | } | 1533 | } |
1534 | 1534 | ||
1535 | python package_depchains() { | 1535 | python package_depchains() { |
@@ -1569,7 +1569,7 @@ python package_depchains() { | |||
1569 | rreclist[pkgname] = "" | 1569 | rreclist[pkgname] = "" |
1570 | 1570 | ||
1571 | #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) | 1571 | #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) |
1572 | bb.data.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False), d) | 1572 | d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) |
1573 | 1573 | ||
1574 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): | 1574 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): |
1575 | 1575 | ||
@@ -1590,7 +1590,7 @@ python package_depchains() { | |||
1590 | rreclist[pkgname] = "" | 1590 | rreclist[pkgname] = "" |
1591 | 1591 | ||
1592 | #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) | 1592 | #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) |
1593 | bb.data.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False), d) | 1593 | d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) |
1594 | 1594 | ||
1595 | def add_dep(list, dep): | 1595 | def add_dep(list, dep): |
1596 | dep = dep.split(' (')[0].strip() | 1596 | dep = dep.split(' (')[0].strip() |
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass index 71e46a8c8e..fc28ee1e2d 100644 --- a/meta/classes/package_deb.bbclass +++ b/meta/classes/package_deb.bbclass | |||
@@ -11,7 +11,7 @@ DPKG_ARCH ?= "${TARGET_ARCH}" | |||
11 | PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs" | 11 | PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs" |
12 | 12 | ||
13 | python package_deb_fn () { | 13 | python package_deb_fn () { |
14 | bb.data.setVar('PKGFN', d.getVar('PKG'), d) | 14 | d.setVar('PKGFN', d.getVar('PKG')) |
15 | } | 15 | } |
16 | 16 | ||
17 | addtask package_deb_install | 17 | addtask package_deb_install |
@@ -409,7 +409,7 @@ python () { | |||
409 | deps = (d.getVarFlag('do_package_write_deb', 'depends') or "").split() | 409 | deps = (d.getVarFlag('do_package_write_deb', 'depends') or "").split() |
410 | deps.append('dpkg-native:do_populate_sysroot') | 410 | deps.append('dpkg-native:do_populate_sysroot') |
411 | deps.append('virtual/fakeroot-native:do_populate_sysroot') | 411 | deps.append('virtual/fakeroot-native:do_populate_sysroot') |
412 | bb.data.setVarFlag('do_package_write_deb', 'depends', " ".join(deps), d) | 412 | d.setVarFlag('do_package_write_deb', 'depends', " ".join(deps)) |
413 | d.setVarFlag('do_package_write_deb', 'fakeroot', "1") | 413 | d.setVarFlag('do_package_write_deb', 'fakeroot', "1") |
414 | d.setVarFlag('do_package_write_deb_setscene', 'fakeroot', "1") | 414 | d.setVarFlag('do_package_write_deb_setscene', 'fakeroot', "1") |
415 | 415 | ||
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass index df608fc0e3..1633affb08 100644 --- a/meta/classes/package_ipk.bbclass +++ b/meta/classes/package_ipk.bbclass | |||
@@ -11,7 +11,7 @@ PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks" | |||
11 | OPKGBUILDCMD ??= "opkg-build" | 11 | OPKGBUILDCMD ??= "opkg-build" |
12 | 12 | ||
13 | python package_ipk_fn () { | 13 | python package_ipk_fn () { |
14 | bb.data.setVar('PKGFN', d.getVar('PKG'), d) | 14 | d.setVar('PKGFN', d.getVar('PKG')) |
15 | } | 15 | } |
16 | 16 | ||
17 | python package_ipk_install () { | 17 | python package_ipk_install () { |
@@ -441,7 +441,7 @@ python () { | |||
441 | deps = (d.getVarFlag('do_package_write_ipk', 'depends') or "").split() | 441 | deps = (d.getVarFlag('do_package_write_ipk', 'depends') or "").split() |
442 | deps.append('opkg-utils-native:do_populate_sysroot') | 442 | deps.append('opkg-utils-native:do_populate_sysroot') |
443 | deps.append('virtual/fakeroot-native:do_populate_sysroot') | 443 | deps.append('virtual/fakeroot-native:do_populate_sysroot') |
444 | bb.data.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps), d) | 444 | d.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps)) |
445 | d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") | 445 | d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") |
446 | d.setVarFlag('do_package_write_ipk_setscene', 'fakeroot', "1") | 446 | d.setVarFlag('do_package_write_ipk_setscene', 'fakeroot', "1") |
447 | } | 447 | } |
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass index 75e4f2d196..93a4c3123c 100644 --- a/meta/classes/package_rpm.bbclass +++ b/meta/classes/package_rpm.bbclass | |||
@@ -8,7 +8,7 @@ RPMBUILD="rpmbuild" | |||
8 | PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms" | 8 | PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms" |
9 | 9 | ||
10 | python package_rpm_fn () { | 10 | python package_rpm_fn () { |
11 | bb.data.setVar('PKGFN', d.getVar('PKG'), d) | 11 | d.setVar('PKGFN', d.getVar('PKG')) |
12 | } | 12 | } |
13 | 13 | ||
14 | python package_rpm_install () { | 14 | python package_rpm_install () { |
@@ -467,7 +467,7 @@ python write_specfile () { | |||
467 | ver = ver.replace(pv, reppv) | 467 | ver = ver.replace(pv, reppv) |
468 | newdeps_dict[dep] = ver | 468 | newdeps_dict[dep] = ver |
469 | depends = bb.utils.join_deps(newdeps_dict) | 469 | depends = bb.utils.join_deps(newdeps_dict) |
470 | bb.data.setVar(varname, depends.strip(), d) | 470 | d.setVar(varname, depends.strip()) |
471 | 471 | ||
472 | # We need to change the style the dependency from BB to RPM | 472 | # We need to change the style the dependency from BB to RPM |
473 | # This needs to happen AFTER the mapping_rename_hook | 473 | # This needs to happen AFTER the mapping_rename_hook |
@@ -969,7 +969,7 @@ python () { | |||
969 | deps = (d.getVarFlag('do_package_write_rpm', 'depends') or "").split() | 969 | deps = (d.getVarFlag('do_package_write_rpm', 'depends') or "").split() |
970 | deps.append('rpm-native:do_populate_sysroot') | 970 | deps.append('rpm-native:do_populate_sysroot') |
971 | deps.append('virtual/fakeroot-native:do_populate_sysroot') | 971 | deps.append('virtual/fakeroot-native:do_populate_sysroot') |
972 | bb.data.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps), d) | 972 | d.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps)) |
973 | d.setVarFlag('do_package_write_rpm', 'fakeroot', 1) | 973 | d.setVarFlag('do_package_write_rpm', 'fakeroot', 1) |
974 | d.setVarFlag('do_package_write_rpm_setscene', 'fakeroot', 1) | 974 | d.setVarFlag('do_package_write_rpm_setscene', 'fakeroot', 1) |
975 | } | 975 | } |
diff --git a/meta/classes/package_tar.bbclass b/meta/classes/package_tar.bbclass index f26a2c0008..201bd91657 100644 --- a/meta/classes/package_tar.bbclass +++ b/meta/classes/package_tar.bbclass | |||
@@ -69,7 +69,7 @@ python do_package_tar () { | |||
69 | if not overrides: | 69 | if not overrides: |
70 | raise bb.build.FuncFailed('OVERRIDES not defined') | 70 | raise bb.build.FuncFailed('OVERRIDES not defined') |
71 | overrides = bb.data.expand(overrides, localdata) | 71 | overrides = bb.data.expand(overrides, localdata) |
72 | bb.data.setVar('OVERRIDES', '%s:%s' % (overrides, pkg), localdata) | 72 | localdata.setVar('OVERRIDES', '%s:%s' % (overrides, pkg)) |
73 | 73 | ||
74 | bb.data.update_data(localdata) | 74 | bb.data.update_data(localdata) |
75 | 75 | ||
@@ -95,7 +95,7 @@ python () { | |||
95 | deps = (d.getVarFlag('do_package_write_tar', 'depends') or "").split() | 95 | deps = (d.getVarFlag('do_package_write_tar', 'depends') or "").split() |
96 | deps.append('tar-native:do_populate_sysroot') | 96 | deps.append('tar-native:do_populate_sysroot') |
97 | deps.append('virtual/fakeroot-native:do_populate_sysroot') | 97 | deps.append('virtual/fakeroot-native:do_populate_sysroot') |
98 | bb.data.setVarFlag('do_package_write_tar', 'depends', " ".join(deps), d) | 98 | d.setVarFlag('do_package_write_tar', 'depends', " ".join(deps)) |
99 | d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") | 99 | d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") |
100 | } | 100 | } |
101 | 101 | ||
diff --git a/meta/classes/src_distribute.bbclass b/meta/classes/src_distribute.bbclass index 2069d652a3..651e492598 100644 --- a/meta/classes/src_distribute.bbclass +++ b/meta/classes/src_distribute.bbclass | |||
@@ -29,13 +29,13 @@ python do_distribute_sources () { | |||
29 | if url.basename == '*': | 29 | if url.basename == '*': |
30 | import os.path | 30 | import os.path |
31 | dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath))) | 31 | dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath))) |
32 | bb.data.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir), d) | 32 | d.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir)) |
33 | else: | 33 | else: |
34 | bb.data.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename), d) | 34 | d.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename)) |
35 | else: | 35 | else: |
36 | d.setVar('DEST', '') | 36 | d.setVar('DEST', '') |
37 | 37 | ||
38 | bb.data.setVar('SRC_DISTRIBUTEDIR', "%s/%s" % (sources_dir, license), d) | 38 | d.setVar('SRC_DISTRIBUTEDIR', "%s/%s" % (sources_dir, license)) |
39 | bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d) | 39 | bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d) |
40 | } | 40 | } |
41 | 41 | ||
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass index 951caa360f..504b09975d 100644 --- a/meta/classes/sstate.bbclass +++ b/meta/classes/sstate.bbclass | |||
@@ -20,18 +20,18 @@ SSTATEPOSTINSTFUNCS ?= "" | |||
20 | 20 | ||
21 | python () { | 21 | python () { |
22 | if bb.data.inherits_class('native', d): | 22 | if bb.data.inherits_class('native', d): |
23 | bb.data.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH'), d) | 23 | d.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH')) |
24 | elif bb.data.inherits_class('cross', d): | 24 | elif bb.data.inherits_class('cross', d): |
25 | bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d), d) | 25 | d.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d)) |
26 | bb.data.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d), d) | 26 | d.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d)) |
27 | elif bb.data.inherits_class('crosssdk', d): | 27 | elif bb.data.inherits_class('crosssdk', d): |
28 | bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${PACKAGE_ARCH}", d), d) | 28 | d.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${PACKAGE_ARCH}", d)) |
29 | elif bb.data.inherits_class('nativesdk', d): | 29 | elif bb.data.inherits_class('nativesdk', d): |
30 | bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}", d), d) | 30 | d.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}", d)) |
31 | elif bb.data.inherits_class('cross-canadian', d): | 31 | elif bb.data.inherits_class('cross-canadian', d): |
32 | bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}_${PACKAGE_ARCH}", d), d) | 32 | d.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}_${PACKAGE_ARCH}", d)) |
33 | else: | 33 | else: |
34 | bb.data.setVar('SSTATE_MANMACH', bb.data.expand("${MACHINE}", d), d) | 34 | d.setVar('SSTATE_MANMACH', bb.data.expand("${MACHINE}", d)) |
35 | 35 | ||
36 | # These classes encode staging paths into their scripts data so can only be | 36 | # These classes encode staging paths into their scripts data so can only be |
37 | # reused if we manipulate the paths | 37 | # reused if we manipulate the paths |
diff --git a/meta/classes/task.bbclass b/meta/classes/task.bbclass index 516d1a1c20..7891207a64 100644 --- a/meta/classes/task.bbclass +++ b/meta/classes/task.bbclass | |||
@@ -22,6 +22,6 @@ python () { | |||
22 | for pkg in packages: | 22 | for pkg in packages: |
23 | for postfix in ['-dbg', '-dev']: | 23 | for postfix in ['-dbg', '-dev']: |
24 | genpackages.append(pkg+postfix) | 24 | genpackages.append(pkg+postfix) |
25 | bb.data.setVar('PACKAGES', ' '.join(packages+genpackages), d) | 25 | d.setVar('PACKAGES', ' '.join(packages+genpackages)) |
26 | } | 26 | } |
27 | 27 | ||
diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass index 492c5fba2d..cba44d688f 100644 --- a/meta/classes/update-rc.d.bbclass +++ b/meta/classes/update-rc.d.bbclass | |||
@@ -45,7 +45,7 @@ python populate_packages_prepend () { | |||
45 | bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) | 45 | bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) |
46 | localdata = bb.data.createCopy(d) | 46 | localdata = bb.data.createCopy(d) |
47 | overrides = localdata.getVar("OVERRIDES", 1) | 47 | overrides = localdata.getVar("OVERRIDES", 1) |
48 | bb.data.setVar("OVERRIDES", "%s:%s" % (pkg, overrides), localdata) | 48 | localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides)) |
49 | bb.data.update_data(localdata) | 49 | bb.data.update_data(localdata) |
50 | 50 | ||
51 | """ | 51 | """ |
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf index 60f3200e89..acba388226 100644 --- a/meta/conf/bitbake.conf +++ b/meta/conf/bitbake.conf | |||
@@ -285,7 +285,7 @@ DOTDEBUG-dbg = "${bindir}/.debug ${sbindir}/.debug ${libexecdir}/.debug ${libdir | |||
285 | 285 | ||
286 | DEBUGFILEDIRECTORY-dbg = "/usr/lib/debug /usr/src/debug" | 286 | DEBUGFILEDIRECTORY-dbg = "/usr/lib/debug /usr/src/debug" |
287 | 287 | ||
288 | FILES_${PN}-dbg = "${@bb.data.getVar(['DOTDEBUG-dbg', 'DEBUGFILEDIRECTORY-dbg'][d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', 1) == 'debug-file-directory'], d, 1)}" | 288 | FILES_${PN}-dbg = "${@d.getVar(['DOTDEBUG-dbg', 'DEBUGFILEDIRECTORY-dbg'][d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory'], True)}" |
289 | 289 | ||
290 | SECTION_${PN}-dbg = "devel" | 290 | SECTION_${PN}-dbg = "devel" |
291 | ALLOW_EMPTY_${PN}-dbg = "1" | 291 | ALLOW_EMPTY_${PN}-dbg = "1" |
@@ -502,7 +502,7 @@ DEBUG_FLAGS ?= "-g -feliminate-unused-debug-types" | |||
502 | # Disabled until the option works properly -feliminate-dwarf2-dups | 502 | # Disabled until the option works properly -feliminate-dwarf2-dups |
503 | FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}" | 503 | FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}" |
504 | DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe" | 504 | DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe" |
505 | SELECTED_OPTIMIZATION = "${@bb.data.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD', 1) == '1'], d, 1)}" | 505 | SELECTED_OPTIMIZATION = "${@d.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD', True) == '1'], True)}" |
506 | SELECTED_OPTIMIZATION[vardeps] += "FULL_OPTIMIZATION DEBUG_OPTIMIZATION" | 506 | SELECTED_OPTIMIZATION[vardeps] += "FULL_OPTIMIZATION DEBUG_OPTIMIZATION" |
507 | BUILD_OPTIMIZATION = "-O2 -pipe" | 507 | BUILD_OPTIMIZATION = "-O2 -pipe" |
508 | 508 | ||
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py index ad391e3b01..630e88d2f2 100644 --- a/meta/lib/oe/distro_check.py +++ b/meta/lib/oe/distro_check.py | |||
@@ -284,19 +284,19 @@ def compare_in_distro_packages_list(distro_check_dir, d): | |||
284 | 284 | ||
285 | if pn.find("-native") != -1: | 285 | if pn.find("-native") != -1: |
286 | pnstripped = pn.split("-native") | 286 | pnstripped = pn.split("-native") |
287 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 287 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
288 | bb.data.update_data(localdata) | 288 | bb.data.update_data(localdata) |
289 | recipe_name = pnstripped[0] | 289 | recipe_name = pnstripped[0] |
290 | 290 | ||
291 | if pn.find("-cross") != -1: | 291 | if pn.find("-cross") != -1: |
292 | pnstripped = pn.split("-cross") | 292 | pnstripped = pn.split("-cross") |
293 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 293 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
294 | bb.data.update_data(localdata) | 294 | bb.data.update_data(localdata) |
295 | recipe_name = pnstripped[0] | 295 | recipe_name = pnstripped[0] |
296 | 296 | ||
297 | if pn.find("-initial") != -1: | 297 | if pn.find("-initial") != -1: |
298 | pnstripped = pn.split("-initial") | 298 | pnstripped = pn.split("-initial") |
299 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) | 299 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
300 | bb.data.update_data(localdata) | 300 | bb.data.update_data(localdata) |
301 | recipe_name = pnstripped[0] | 301 | recipe_name = pnstripped[0] |
302 | 302 | ||
diff --git a/meta/recipes-core/busybox/busybox.inc b/meta/recipes-core/busybox/busybox.inc index 0b74ea6780..b948e2ce6b 100644 --- a/meta/recipes-core/busybox/busybox.inc +++ b/meta/recipes-core/busybox/busybox.inc | |||
@@ -81,17 +81,15 @@ python () { | |||
81 | if "${OE_DEL}": | 81 | if "${OE_DEL}": |
82 | d.setVar('configmangle_append', "${OE_DEL}" + "\n") | 82 | d.setVar('configmangle_append', "${OE_DEL}" + "\n") |
83 | if "${OE_FEATURES}": | 83 | if "${OE_FEATURES}": |
84 | bb.data.setVar('configmangle_append', | 84 | d.setVar('configmangle_append', |
85 | "/^### DISTRO FEATURES$/a\\\n%s\n\n" % | 85 | "/^### DISTRO FEATURES$/a\\\n%s\n\n" % |
86 | ("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n")))), | 86 | ("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n"))))) |
87 | d) | 87 | d.setVar('configmangle_append', |
88 | bb.data.setVar('configmangle_append', | ||
89 | "/^### CROSS$/a\\\n%s\n" % | 88 | "/^### CROSS$/a\\\n%s\n" % |
90 | ("\\n".join(["CONFIG_CROSS_COMPILER_PREFIX=\"${TARGET_PREFIX}\"", | 89 | ("\\n".join(["CONFIG_CROSS_COMPILER_PREFIX=\"${TARGET_PREFIX}\"", |
91 | "CONFIG_EXTRA_CFLAGS=\"${CFLAGS}\"" | 90 | "CONFIG_EXTRA_CFLAGS=\"${CFLAGS}\"" |
92 | ]) | 91 | ]) |
93 | ), | 92 | )) |
94 | d) | ||
95 | } | 93 | } |
96 | 94 | ||
97 | do_prepare_config () { | 95 | do_prepare_config () { |
diff --git a/meta/recipes-core/tasks/task-core-sdk.bb b/meta/recipes-core/tasks/task-core-sdk.bb index a74de01b07..d940e39318 100644 --- a/meta/recipes-core/tasks/task-core-sdk.bb +++ b/meta/recipes-core/tasks/task-core-sdk.bb | |||
@@ -80,11 +80,11 @@ RDEPENDS_task-core-sdk = "\ | |||
80 | # rreclist.append('%s-dev' % name) | 80 | # rreclist.append('%s-dev' % name) |
81 | # | 81 | # |
82 | # oldrrec = d.getVar('RRECOMMENDS_%s' % newpkg) or '' | 82 | # oldrrec = d.getVar('RRECOMMENDS_%s' % newpkg) or '' |
83 | # bb.data.setVar('RRECOMMENDS_%s' % newpkg, oldrrec + ' ' + ' '.join(rreclist), d) | 83 | # d.setVar('RRECOMMENDS_%s' % newpkg, oldrrec + ' ' + ' '.join(rreclist)) |
84 | # # bb.note('RRECOMMENDS_%s = "%s"' % (newpkg, d.getVar('RRECOMMENDS_%s' % newpkg))) | 84 | # # bb.note('RRECOMMENDS_%s = "%s"' % (newpkg, d.getVar('RRECOMMENDS_%s' % newpkg))) |
85 | # | 85 | # |
86 | # # bb.note('pkgs is %s' % pkgs) | 86 | # # bb.note('pkgs is %s' % pkgs) |
87 | # bb.data.setVar('PACKAGES', ' '.join(pkgs), d) | 87 | # d.setVar('PACKAGES', ' '.join(pkgs)) |
88 | #} | 88 | #} |
89 | # | 89 | # |
90 | #PACKAGES_DYNAMIC = "task-core-sdk-*" | 90 | #PACKAGES_DYNAMIC = "task-core-sdk-*" |
diff --git a/meta/recipes-core/uclibc/uclibc.inc b/meta/recipes-core/uclibc/uclibc.inc index 8438f25450..92157bdb2a 100644 --- a/meta/recipes-core/uclibc/uclibc.inc +++ b/meta/recipes-core/uclibc/uclibc.inc | |||
@@ -141,11 +141,10 @@ python () { | |||
141 | if "${OE_DEL}": | 141 | if "${OE_DEL}": |
142 | d.setVar('configmangle_append', "${OE_DEL}" + "\n") | 142 | d.setVar('configmangle_append', "${OE_DEL}" + "\n") |
143 | if "${OE_FEATURES}": | 143 | if "${OE_FEATURES}": |
144 | bb.data.setVar('configmangle_append', | 144 | d.setVar('configmangle_append', |
145 | "/^### DISTRO FEATURES$/a\\\n%s\n\n" % | 145 | "/^### DISTRO FEATURES$/a\\\n%s\n\n" % |
146 | ("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n")))), | 146 | ("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n"))))) |
147 | d) | 147 | d.setVar('configmangle_append', |
148 | bb.data.setVar('configmangle_append', | ||
149 | "/^### CROSS$/a\\\n%s\n" % | 148 | "/^### CROSS$/a\\\n%s\n" % |
150 | ("\\n".join(["CROSS_COMPILER_PREFIX=\"${TARGET_PREFIX}\"", | 149 | ("\\n".join(["CROSS_COMPILER_PREFIX=\"${TARGET_PREFIX}\"", |
151 | "UCLIBC_EXTRA_CFLAGS=\"${UCLIBC_EXTRA_CFLAGS}\"", | 150 | "UCLIBC_EXTRA_CFLAGS=\"${UCLIBC_EXTRA_CFLAGS}\"", |
@@ -154,22 +153,18 @@ python () { | |||
154 | "DEVEL_PREFIX=\"/${prefix}\"", | 153 | "DEVEL_PREFIX=\"/${prefix}\"", |
155 | "SHARED_LIB_LOADER_PREFIX=\"/lib\"", | 154 | "SHARED_LIB_LOADER_PREFIX=\"/lib\"", |
156 | ]) | 155 | ]) |
157 | ), | 156 | )) |
158 | d) | 157 | d.setVar('configmangle_append', |
159 | bb.data.setVar('configmangle_append', | ||
160 | "/^### TGT$/a\\\nTARGET_ARCH=\"%s\"\\nTARGET_%s=y\n" % | 158 | "/^### TGT$/a\\\nTARGET_ARCH=\"%s\"\\nTARGET_%s=y\n" % |
161 | ("${UCLIBC_ARCH}", "${UCLIBC_ARCH}"), | 159 | ("${UCLIBC_ARCH}", "${UCLIBC_ARCH}")) |
162 | d) | 160 | d.setVar('configmangle_append', |
163 | bb.data.setVar('configmangle_append', | 161 | "/^### FPU$/a\\\n%s\n\n" % (["UCLIBC_HAS_FPU=y","# UCLIBC_HAS_FPU is not set"][d.getVar('TARGET_FPU', True) in [ 'soft' ]])) |
164 | "/^### FPU$/a\\\n%s\n\n" % (["UCLIBC_HAS_FPU=y","# UCLIBC_HAS_FPU is not set"][d.getVar('TARGET_FPU', 1) in [ 'soft' ]]), d) | ||
165 | if "${UCLIBC_ENDIAN}": | 162 | if "${UCLIBC_ENDIAN}": |
166 | bb.data.setVar('configmangle_append', | 163 | d.setVar('configmangle_append', |
167 | "/^### ABI$/a\\\nARCH_WANTS_%s_ENDIAN=y\n\n" % ("${UCLIBC_ENDIAN}"), | 164 | "/^### ABI$/a\\\nARCH_WANTS_%s_ENDIAN=y\n\n" % ("${UCLIBC_ENDIAN}")) |
168 | d) | ||
169 | if "${UCLIBC_ABI}": | 165 | if "${UCLIBC_ABI}": |
170 | bb.data.setVar('configmangle_append', | 166 | d.setVar('configmangle_append', |
171 | "/^### ABI$/a\\\nCONFIG_%s=y\n\n" % ("${UCLIBC_ABI}"), | 167 | "/^### ABI$/a\\\nCONFIG_%s=y\n\n" % ("${UCLIBC_ABI}")) |
172 | d) | ||
173 | } | 168 | } |
174 | 169 | ||
175 | do_patch_append() { | 170 | do_patch_append() { |
diff --git a/meta/recipes-graphics/xorg-lib/libx11-diet_1.4.4.bb b/meta/recipes-graphics/xorg-lib/libx11-diet_1.4.4.bb index 1762829755..6106986bb0 100644 --- a/meta/recipes-graphics/xorg-lib/libx11-diet_1.4.4.bb +++ b/meta/recipes-graphics/xorg-lib/libx11-diet_1.4.4.bb | |||
@@ -21,7 +21,7 @@ SRC_URI[sha256sum] = "7fe62180f08ef5f0a0062fb444591e349cae2ab5af6ad834599f5c654e | |||
21 | DEPENDS += "bigreqsproto xproto xextproto xtrans libxau xcmiscproto \ | 21 | DEPENDS += "bigreqsproto xproto xextproto xtrans libxau xcmiscproto \ |
22 | libxdmcp xf86bigfontproto kbproto inputproto xproto-native" | 22 | libxdmcp xf86bigfontproto kbproto inputproto xproto-native" |
23 | 23 | ||
24 | FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/libx11" | 24 | FILESDIR = "${@os.path.dirname(d.getVar('FILE', True))}/libx11" |
25 | 25 | ||
26 | EXTRA_OECONF += "--without-xcb --disable-udc --disable-xcms --disable-xlocale --with-keysymdefdir=${STAGING_INCDIR}/X11" | 26 | EXTRA_OECONF += "--without-xcb --disable-udc --disable-xcms --disable-xlocale --with-keysymdefdir=${STAGING_INCDIR}/X11" |
27 | CFLAGS += "-D_GNU_SOURCE" | 27 | CFLAGS += "-D_GNU_SOURCE" |
diff --git a/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc b/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc index 7949058b13..8ff9f82fb4 100644 --- a/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc +++ b/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc | |||
@@ -2,8 +2,8 @@ LIBV = "0.10" | |||
2 | 2 | ||
3 | python populate_packages_prepend () { | 3 | python populate_packages_prepend () { |
4 | gst_libdir = bb.data.expand('${libdir}/gstreamer-${LIBV}', d) | 4 | gst_libdir = bb.data.expand('${libdir}/gstreamer-${LIBV}', d) |
5 | postinst = d.getVar('plugin_postinst', 1) | 5 | postinst = d.getVar('plugin_postinst', True) |
6 | glibdir = bb.data.expand('${libdir}', d) | 6 | glibdir = d.getVar('libdir', True) |
7 | 7 | ||
8 | do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True) | 8 | do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True) |
9 | do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', bb.data.expand('${PN}-%s', d), 'GStreamer plugin for %s', postinst=postinst, extra_depends=bb.data.expand('${PN}',d)) | 9 | do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', bb.data.expand('${PN}-%s', d), 'GStreamer plugin for %s', postinst=postinst, extra_depends=bb.data.expand('${PN}',d)) |
@@ -19,7 +19,7 @@ python populate_packages_prepend () { | |||
19 | for pkg in packages[1:]: | 19 | for pkg in packages[1:]: |
20 | if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-static'): | 20 | if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-static'): |
21 | metapkg_rdepends.append(pkg) | 21 | metapkg_rdepends.append(pkg) |
22 | bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d) | 22 | d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends)) |
23 | d.setVar('DESCRIPTION_' + metapkg, pn + ' meta package') | 23 | d.setVar('DESCRIPTION_' + metapkg, pn + ' meta package') |
24 | } | 24 | } |
25 | 25 | ||
diff --git a/meta/recipes-qt/qt4/qt4.inc b/meta/recipes-qt/qt4/qt4.inc index 82ba6377a0..4dc9a75e57 100644 --- a/meta/recipes-qt/qt4/qt4.inc +++ b/meta/recipes-qt/qt4/qt4.inc | |||
@@ -43,14 +43,14 @@ python __anonymous () { | |||
43 | pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" | 43 | pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" |
44 | # NOTE: the headers for QtAssistantClient are different | 44 | # NOTE: the headers for QtAssistantClient are different |
45 | incname = name.replace("QtAssistantClient", "QtAssistant") | 45 | incname = name.replace("QtAssistantClient", "QtAssistant") |
46 | bb.data.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s${QT_LIBINFIX}.so.*" % locals(), d) | 46 | d.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s${QT_LIBINFIX}.so.*" % locals()) |
47 | bb.data.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s${QT_LIBINFIX}.prl | 47 | d.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s${QT_LIBINFIX}.prl |
48 | ${libdir}/lib%(name)s${QT_LIBINFIX}.a | 48 | ${libdir}/lib%(name)s${QT_LIBINFIX}.a |
49 | ${libdir}/lib%(name)s${QT_LIBINFIX}.la | 49 | ${libdir}/lib%(name)s${QT_LIBINFIX}.la |
50 | ${libdir}/lib%(name)s${QT_LIBINFIX}.so | 50 | ${libdir}/lib%(name)s${QT_LIBINFIX}.so |
51 | ${includedir}/${QT_DIR_NAME}/%(incname)s | 51 | ${includedir}/${QT_DIR_NAME}/%(incname)s |
52 | ${libdir}/pkgconfig/%(name)s${QT_LIBINFIX}.pc""" % locals(), d) | 52 | ${libdir}/pkgconfig/%(name)s${QT_LIBINFIX}.pc""" % locals()) |
53 | bb.data.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s${QT_LIBINFIX}.so*" % locals(), d) | 53 | d.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s${QT_LIBINFIX}.so*" % locals()) |
54 | d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg") | 54 | d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg") |
55 | lib_packages.append(pkg) | 55 | lib_packages.append(pkg) |
56 | dev_packages.append("%s-dev" % pkg) | 56 | dev_packages.append("%s-dev" % pkg) |
@@ -60,22 +60,22 @@ python __anonymous () { | |||
60 | 60 | ||
61 | for name in d.getVar("QT_EXTRA_LIBS", 1).split(): | 61 | for name in d.getVar("QT_EXTRA_LIBS", 1).split(): |
62 | pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" | 62 | pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" |
63 | bb.data.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s.so.*" % locals(), d) | 63 | d.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s.so.*" % locals()) |
64 | bb.data.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s.prl | 64 | d.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s.prl |
65 | ${libdir}/lib%(name)s.a | 65 | ${libdir}/lib%(name)s.a |
66 | ${libdir}/lib%(name)s.la | 66 | ${libdir}/lib%(name)s.la |
67 | ${libdir}/lib%(name)s.so | 67 | ${libdir}/lib%(name)s.so |
68 | ${includedir}/${QT_DIR_NAME}/%(incname)s | 68 | ${includedir}/${QT_DIR_NAME}/%(incname)s |
69 | ${libdir}/pkgconfig/%(name)s.pc""" % locals(), d) | 69 | ${libdir}/pkgconfig/%(name)s.pc""" % locals()) |
70 | bb.data.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s.so*" % locals(), d) | 70 | d.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s.so*" % locals()) |
71 | d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg") | 71 | d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg") |
72 | lib_packages.append(pkg) | 72 | lib_packages.append(pkg) |
73 | dev_packages.append("%s-dev" % pkg) | 73 | dev_packages.append("%s-dev" % pkg) |
74 | dbg_packages.append("%s-dbg" % pkg) | 74 | dbg_packages.append("%s-dbg" % pkg) |
75 | 75 | ||
76 | bb.data.setVar("LIB_PACKAGES", " ".join(lib_packages), d) | 76 | d.setVar("LIB_PACKAGES", " ".join(lib_packages)) |
77 | bb.data.setVar("DEV_PACKAGES", " ".join(dev_packages), d) | 77 | d.setVar("DEV_PACKAGES", " ".join(dev_packages)) |
78 | bb.data.setVar("DBG_PACKAGES", " ".join(dbg_packages), d) | 78 | d.setVar("DBG_PACKAGES", " ".join(dbg_packages)) |
79 | } | 79 | } |
80 | 80 | ||
81 | OTHER_PACKAGES = "\ | 81 | OTHER_PACKAGES = "\ |
@@ -261,7 +261,7 @@ python populate_packages_prepend() { | |||
261 | packages = "%s %s-dbg" % (packages, package) | 261 | packages = "%s %s-dbg" % (packages, package) |
262 | file_name = os.path.join(plugin_dir_dbg, os.path.basename(file)) | 262 | file_name = os.path.join(plugin_dir_dbg, os.path.basename(file)) |
263 | d.setVar("FILES_%s-dbg" % package, file_name) | 263 | d.setVar("FILES_%s-dbg" % package, file_name) |
264 | bb.data.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package), d) | 264 | d.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package)) |
265 | 265 | ||
266 | d.setVar('PACKAGES', packages) | 266 | d.setVar('PACKAGES', packages) |
267 | 267 | ||
diff --git a/meta/recipes-support/boost/boost.inc b/meta/recipes-support/boost/boost.inc index ddb65b754d..fecdb97602 100644 --- a/meta/recipes-support/boost/boost.inc +++ b/meta/recipes-support/boost/boost.inc | |||
@@ -57,8 +57,8 @@ python __anonymous () { | |||
57 | packages.append(pkg) | 57 | packages.append(pkg) |
58 | if not d.getVar("FILES_%s" % pkg, 1): | 58 | if not d.getVar("FILES_%s" % pkg, 1): |
59 | d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib) | 59 | d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib) |
60 | bb.data.setVar("BOOST_PACKAGES", " ".join(packages), d) | 60 | d.setVar("BOOST_PACKAGES", " ".join(packages)) |
61 | bb.data.setVar("BJAM_EXTRA", " ".join(extras), d) | 61 | d.setVar("BJAM_EXTRA", " ".join(extras)) |
62 | } | 62 | } |
63 | 63 | ||
64 | # Override the contents of specific packages | 64 | # Override the contents of specific packages |