diff options
author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-11-09 15:00:01 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-11-10 11:51:19 +0000 |
commit | c8dee9b92dfd545852ecac8dc2adfc95ac02e957 (patch) | |
tree | 5f1b86954646a0f3bb914407994388a6a4346769 | |
parent | 5d3860f4a8abb8e95442b04f8b84a333af362fcd (diff) | |
download | poky-c8dee9b92dfd545852ecac8dc2adfc95ac02e957.tar.gz |
Convert to use direct access to the data store (instead of bb.data.*Var*())
This is the result of running the following over the metadata:
sed \
-e 's:bb.data.\(setVar([^,()]*,[^,()]*\), *\([^ )]*\) *):\2.\1):g' \
-e 's:bb.data.\(setVarFlag([^,()]*,[^,()]*,[^,()]*\), *\([^) ]*\) *):\2.\1):g' \
-e 's:bb.data.\(getVar([^,()]*\), *\([^(), ]*\) *,\([^)]*\)):\2.\1,\3):g' \
-e 's:bb.data.\(getVarFlag([^,()]*,[^,()]*\), *\([^(), ]*\) *,\([^)]*\)):\2.\1,\3):g' \
-e 's:bb.data.\(getVarFlag([^,()]*,[^,()]*\), *\([^() ]*\) *):\2.\1):g' \
-e 's:bb.data.\(getVar([^,()]*\), *\([^) ]*\) *):\2.\1):g' \
-i `grep -ril bb.data *`
(From OE-Core rev: b22831fd63164c4db9c0b72934d7d734a6585251)
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
130 files changed, 1058 insertions, 1058 deletions
diff --git a/meta-demoapps/recipes-gnome/abiword/abiword-2.5.inc b/meta-demoapps/recipes-gnome/abiword/abiword-2.5.inc index ce537df7ff..511ae97bef 100644 --- a/meta-demoapps/recipes-gnome/abiword/abiword-2.5.inc +++ b/meta-demoapps/recipes-gnome/abiword/abiword-2.5.inc | |||
@@ -9,7 +9,7 @@ RDEPENDS_${PN} = "glibc-gconv-ibm850 glibc-gconv-cp1252 \ | |||
9 | SRC_URI = "http://www.abiword.org/downloads/abiword/${PV}/source/abiword-${PV}.tar.gz" | 9 | SRC_URI = "http://www.abiword.org/downloads/abiword/${PV}/source/abiword-${PV}.tar.gz" |
10 | 10 | ||
11 | #want 2.x from 2.x.y for the installation directory | 11 | #want 2.x from 2.x.y for the installation directory |
12 | SHRT_VER = "${@bb.data.getVar('PV',d,1).split('.')[0]}.${@bb.data.getVar('PV',d,1).split('.')[1]}" | 12 | SHRT_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}" |
13 | 13 | ||
14 | FILES_${PN} += " \ | 14 | FILES_${PN} += " \ |
15 | ${datadir}/icons/* \ | 15 | ${datadir}/icons/* \ |
diff --git a/meta-demoapps/recipes-gnome/abiword/abiword.inc b/meta-demoapps/recipes-gnome/abiword/abiword.inc index 4ec0ba976a..036f98f348 100644 --- a/meta-demoapps/recipes-gnome/abiword/abiword.inc +++ b/meta-demoapps/recipes-gnome/abiword/abiword.inc | |||
@@ -17,7 +17,7 @@ SVNURI = "svn://svn.abisource.com/abiword/trunk;module=abiword;proto=http" | |||
17 | SVNSRC = "${WORKDIR}/abi" | 17 | SVNSRC = "${WORKDIR}/abi" |
18 | 18 | ||
19 | #want 2.x from 2.x.y for the installation directory | 19 | #want 2.x from 2.x.y for the installation directory |
20 | SHRT_VER = "${@bb.data.getVar('PV',d,1).split('.')[0]}.${@bb.data.getVar('PV',d,1).split('.')[1]}" | 20 | SHRT_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}" |
21 | 21 | ||
22 | FILES_${PN} += " \ | 22 | FILES_${PN} += " \ |
23 | ${datadir}/icons/* \ | 23 | ${datadir}/icons/* \ |
diff --git a/meta-demoapps/recipes-support/poppler/poppler-fpu.inc b/meta-demoapps/recipes-support/poppler/poppler-fpu.inc index a26273020a..2fbee13b51 100644 --- a/meta-demoapps/recipes-support/poppler/poppler-fpu.inc +++ b/meta-demoapps/recipes-support/poppler/poppler-fpu.inc | |||
@@ -1,6 +1,6 @@ | |||
1 | 1 | ||
2 | def get_poppler_fpu_setting(bb, d): | 2 | def get_poppler_fpu_setting(bb, d): |
3 | if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]: | 3 | if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: |
4 | return "--enable-fixedpoint" | 4 | return "--enable-fixedpoint" |
5 | return "" | 5 | return "" |
6 | 6 | ||
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass index 451c7fcc1e..37e7d4b482 100644 --- a/meta/classes/autotools.bbclass +++ b/meta/classes/autotools.bbclass | |||
@@ -1,8 +1,8 @@ | |||
1 | def autotools_dep_prepend(d): | 1 | def autotools_dep_prepend(d): |
2 | if bb.data.getVar('INHIBIT_AUTOTOOLS_DEPS', d, 1): | 2 | if d.getVar('INHIBIT_AUTOTOOLS_DEPS', 1): |
3 | return '' | 3 | return '' |
4 | 4 | ||
5 | pn = bb.data.getVar('PN', d, 1) | 5 | pn = d.getVar('PN', 1) |
6 | deps = '' | 6 | deps = '' |
7 | 7 | ||
8 | if pn in ['autoconf-native', 'automake-native', 'help2man-native']: | 8 | if pn in ['autoconf-native', 'automake-native', 'help2man-native']: |
@@ -13,7 +13,7 @@ def autotools_dep_prepend(d): | |||
13 | deps += 'libtool-native ' | 13 | deps += 'libtool-native ' |
14 | if not bb.data.inherits_class('native', d) \ | 14 | if not bb.data.inherits_class('native', d) \ |
15 | and not bb.data.inherits_class('cross', d) \ | 15 | and not bb.data.inherits_class('cross', d) \ |
16 | and not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d, 1): | 16 | and not d.getVar('INHIBIT_DEFAULT_DEPS', 1): |
17 | deps += 'libtool-cross ' | 17 | deps += 'libtool-cross ' |
18 | 18 | ||
19 | return deps + 'gnu-config-native ' | 19 | return deps + 'gnu-config-native ' |
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index f5397446dd..f3f798f9bf 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass | |||
@@ -60,9 +60,9 @@ def base_dep_prepend(d): | |||
60 | # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not | 60 | # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not |
61 | # we need that built is the responsibility of the patch function / class, not | 61 | # we need that built is the responsibility of the patch function / class, not |
62 | # the application. | 62 | # the application. |
63 | if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d): | 63 | if not d.getVar('INHIBIT_DEFAULT_DEPS'): |
64 | if (bb.data.getVar('HOST_SYS', d, 1) != | 64 | if (d.getVar('HOST_SYS', 1) != |
65 | bb.data.getVar('BUILD_SYS', d, 1)): | 65 | d.getVar('BUILD_SYS', 1)): |
66 | deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " | 66 | deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " |
67 | return deps | 67 | return deps |
68 | 68 | ||
@@ -73,13 +73,13 @@ DEPENDS_prepend="${BASEDEPENDS} " | |||
73 | FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}" | 73 | FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}" |
74 | # THISDIR only works properly with imediate expansion as it has to run | 74 | # THISDIR only works properly with imediate expansion as it has to run |
75 | # in the context of the location its used (:=) | 75 | # in the context of the location its used (:=) |
76 | THISDIR = "${@os.path.dirname(bb.data.getVar('FILE', d, True))}" | 76 | THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}" |
77 | 77 | ||
78 | addtask fetch | 78 | addtask fetch |
79 | do_fetch[dirs] = "${DL_DIR}" | 79 | do_fetch[dirs] = "${DL_DIR}" |
80 | python base_do_fetch() { | 80 | python base_do_fetch() { |
81 | 81 | ||
82 | src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split() | 82 | src_uri = (d.getVar('SRC_URI', True) or "").split() |
83 | if len(src_uri) == 0: | 83 | if len(src_uri) == 0: |
84 | return | 84 | return |
85 | 85 | ||
@@ -96,14 +96,14 @@ python base_do_fetch() { | |||
96 | addtask unpack after do_fetch | 96 | addtask unpack after do_fetch |
97 | do_unpack[dirs] = "${WORKDIR}" | 97 | do_unpack[dirs] = "${WORKDIR}" |
98 | python base_do_unpack() { | 98 | python base_do_unpack() { |
99 | src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split() | 99 | src_uri = (d.getVar('SRC_URI', True) or "").split() |
100 | if len(src_uri) == 0: | 100 | if len(src_uri) == 0: |
101 | return | 101 | return |
102 | 102 | ||
103 | localdata = bb.data.createCopy(d) | 103 | localdata = bb.data.createCopy(d) |
104 | bb.data.update_data(localdata) | 104 | bb.data.update_data(localdata) |
105 | 105 | ||
106 | rootdir = bb.data.getVar('WORKDIR', localdata, True) | 106 | rootdir = localdata.getVar('WORKDIR', True) |
107 | 107 | ||
108 | try: | 108 | try: |
109 | fetcher = bb.fetch2.Fetch(src_uri, localdata) | 109 | fetcher = bb.fetch2.Fetch(src_uri, localdata) |
@@ -118,7 +118,7 @@ def generate_git_config(e): | |||
118 | from bb import data | 118 | from bb import data |
119 | 119 | ||
120 | if data.getVar('GIT_CORE_CONFIG', e.data, True): | 120 | if data.getVar('GIT_CORE_CONFIG', e.data, True): |
121 | gitconfig_path = bb.data.getVar('GIT_CONFIG', e.data, True) | 121 | gitconfig_path = e.data.getVar('GIT_CONFIG', True) |
122 | proxy_command = " gitproxy = %s\n" % data.getVar('GIT_PROXY_COMMAND', e.data, True) | 122 | proxy_command = " gitproxy = %s\n" % data.getVar('GIT_PROXY_COMMAND', e.data, True) |
123 | 123 | ||
124 | bb.mkdirhier(bb.data.expand("${STAGING_DIR_NATIVE}/usr/etc/", e.data)) | 124 | bb.mkdirhier(bb.data.expand("${STAGING_DIR_NATIVE}/usr/etc/", e.data)) |
@@ -207,11 +207,11 @@ python base_eventhandler() { | |||
207 | name = getName(e) | 207 | name = getName(e) |
208 | 208 | ||
209 | if name.startswith("BuildStarted"): | 209 | if name.startswith("BuildStarted"): |
210 | bb.data.setVar( 'BB_VERSION', bb.__version__, e.data ) | 210 | e.data.setVar( 'BB_VERSION', bb.__version__) |
211 | statusvars = ['BB_VERSION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TUNE_FEATURES', 'TARGET_FPU'] | 211 | statusvars = ['BB_VERSION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TUNE_FEATURES', 'TARGET_FPU'] |
212 | statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars] | 212 | statuslines = ["%-17s = \"%s\"" % (i, e.data.getVar(i, 1) or '') for i in statusvars] |
213 | 213 | ||
214 | layers = (bb.data.getVar("BBLAYERS", e.data, 1) or "").split() | 214 | layers = (e.data.getVar("BBLAYERS", 1) or "").split() |
215 | layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ | 215 | layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ |
216 | base_get_metadata_git_branch(i, None).strip(), \ | 216 | base_get_metadata_git_branch(i, None).strip(), \ |
217 | base_get_metadata_git_revision(i, None)) \ | 217 | base_get_metadata_git_revision(i, None)) \ |
@@ -237,7 +237,7 @@ python base_eventhandler() { | |||
237 | needed_vars = [ "TARGET_ARCH", "TARGET_OS" ] | 237 | needed_vars = [ "TARGET_ARCH", "TARGET_OS" ] |
238 | pesteruser = [] | 238 | pesteruser = [] |
239 | for v in needed_vars: | 239 | for v in needed_vars: |
240 | val = bb.data.getVar(v, e.data, 1) | 240 | val = e.data.getVar(v, 1) |
241 | if not val or val == 'INVALID': | 241 | if not val or val == 'INVALID': |
242 | pesteruser.append(v) | 242 | pesteruser.append(v) |
243 | if pesteruser: | 243 | if pesteruser: |
@@ -330,23 +330,23 @@ python () { | |||
330 | appendVar('EXTRA_OECONF', extraconf) | 330 | appendVar('EXTRA_OECONF', extraconf) |
331 | 331 | ||
332 | # If PRINC is set, try and increase the PR value by the amount specified | 332 | # If PRINC is set, try and increase the PR value by the amount specified |
333 | princ = bb.data.getVar('PRINC', d, True) | 333 | princ = d.getVar('PRINC', True) |
334 | if princ: | 334 | if princ: |
335 | pr = bb.data.getVar('PR', d, True) | 335 | pr = d.getVar('PR', True) |
336 | pr_prefix = re.search("\D+",pr) | 336 | pr_prefix = re.search("\D+",pr) |
337 | prval = re.search("\d+",pr) | 337 | prval = re.search("\d+",pr) |
338 | if pr_prefix is None or prval is None: | 338 | if pr_prefix is None or prval is None: |
339 | bb.error("Unable to analyse format of PR variable: %s" % pr) | 339 | bb.error("Unable to analyse format of PR variable: %s" % pr) |
340 | nval = int(prval.group(0)) + int(princ) | 340 | nval = int(prval.group(0)) + int(princ) |
341 | pr = pr_prefix.group(0) + str(nval) + pr[prval.end():] | 341 | pr = pr_prefix.group(0) + str(nval) + pr[prval.end():] |
342 | bb.data.setVar('PR', pr, d) | 342 | d.setVar('PR', pr) |
343 | 343 | ||
344 | pn = bb.data.getVar('PN', d, 1) | 344 | pn = d.getVar('PN', 1) |
345 | license = bb.data.getVar('LICENSE', d, True) | 345 | license = d.getVar('LICENSE', True) |
346 | if license == "INVALID": | 346 | if license == "INVALID": |
347 | bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) | 347 | bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) |
348 | 348 | ||
349 | commercial_license = " %s " % bb.data.getVar('COMMERCIAL_LICENSE', d, 1) | 349 | commercial_license = " %s " % d.getVar('COMMERCIAL_LICENSE', 1) |
350 | import re | 350 | import re |
351 | pnr = "[ \t]%s[ \t]" % pn.replace('+', "\+") | 351 | pnr = "[ \t]%s[ \t]" % pn.replace('+', "\+") |
352 | if commercial_license and re.search(pnr, commercial_license): | 352 | if commercial_license and re.search(pnr, commercial_license): |
@@ -356,86 +356,86 @@ python () { | |||
356 | # If we're building a target package we need to use fakeroot (pseudo) | 356 | # If we're building a target package we need to use fakeroot (pseudo) |
357 | # in order to capture permissions, owners, groups and special files | 357 | # in order to capture permissions, owners, groups and special files |
358 | if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d): | 358 | if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d): |
359 | bb.data.setVarFlag('do_configure', 'umask', 022, d) | 359 | d.setVarFlag('do_configure', 'umask', 022) |
360 | bb.data.setVarFlag('do_compile', 'umask', 022, d) | 360 | d.setVarFlag('do_compile', 'umask', 022) |
361 | deps = (bb.data.getVarFlag('do_install', 'depends', d) or "").split() | 361 | deps = (d.getVarFlag('do_install', 'depends') or "").split() |
362 | deps.append('virtual/fakeroot-native:do_populate_sysroot') | 362 | deps.append('virtual/fakeroot-native:do_populate_sysroot') |
363 | bb.data.setVarFlag('do_install', 'depends', " ".join(deps),d) | 363 | bb.data.setVarFlag('do_install', 'depends', " ".join(deps),d) |
364 | bb.data.setVarFlag('do_install', 'fakeroot', 1, d) | 364 | d.setVarFlag('do_install', 'fakeroot', 1) |
365 | bb.data.setVarFlag('do_install', 'umask', 022, d) | 365 | d.setVarFlag('do_install', 'umask', 022) |
366 | deps = (bb.data.getVarFlag('do_package', 'depends', d) or "").split() | 366 | deps = (d.getVarFlag('do_package', 'depends') or "").split() |
367 | deps.append('virtual/fakeroot-native:do_populate_sysroot') | 367 | deps.append('virtual/fakeroot-native:do_populate_sysroot') |
368 | bb.data.setVarFlag('do_package', 'depends', " ".join(deps),d) | 368 | bb.data.setVarFlag('do_package', 'depends', " ".join(deps),d) |
369 | bb.data.setVarFlag('do_package', 'fakeroot', 1, d) | 369 | d.setVarFlag('do_package', 'fakeroot', 1) |
370 | bb.data.setVarFlag('do_package', 'umask', 022, d) | 370 | d.setVarFlag('do_package', 'umask', 022) |
371 | bb.data.setVarFlag('do_package_setscene', 'fakeroot', 1, d) | 371 | d.setVarFlag('do_package_setscene', 'fakeroot', 1) |
372 | source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0) | 372 | source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0) |
373 | if not source_mirror_fetch: | 373 | if not source_mirror_fetch: |
374 | need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1) | 374 | need_host = d.getVar('COMPATIBLE_HOST', 1) |
375 | if need_host: | 375 | if need_host: |
376 | import re | 376 | import re |
377 | this_host = bb.data.getVar('HOST_SYS', d, 1) | 377 | this_host = d.getVar('HOST_SYS', 1) |
378 | if not re.match(need_host, this_host): | 378 | if not re.match(need_host, this_host): |
379 | raise bb.parse.SkipPackage("incompatible with host %s" % this_host) | 379 | raise bb.parse.SkipPackage("incompatible with host %s" % this_host) |
380 | 380 | ||
381 | need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1) | 381 | need_machine = d.getVar('COMPATIBLE_MACHINE', 1) |
382 | if need_machine: | 382 | if need_machine: |
383 | import re | 383 | import re |
384 | this_machine = bb.data.getVar('MACHINE', d, 1) | 384 | this_machine = d.getVar('MACHINE', 1) |
385 | if this_machine and not re.match(need_machine, this_machine): | 385 | if this_machine and not re.match(need_machine, this_machine): |
386 | this_soc_family = bb.data.getVar('SOC_FAMILY', d, 1) | 386 | this_soc_family = d.getVar('SOC_FAMILY', 1) |
387 | if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family: | 387 | if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family: |
388 | raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine) | 388 | raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine) |
389 | 389 | ||
390 | 390 | ||
391 | dont_want_license = bb.data.getVar('INCOMPATIBLE_LICENSE', d, 1) | 391 | dont_want_license = d.getVar('INCOMPATIBLE_LICENSE', 1) |
392 | if dont_want_license and not pn.endswith("-native") and not pn.endswith("-cross") and not pn.endswith("-cross-initial") and not pn.endswith("-cross-intermediate") and not pn.endswith("-crosssdk-intermediate") and not pn.endswith("-crosssdk") and not pn.endswith("-crosssdk-initial"): | 392 | if dont_want_license and not pn.endswith("-native") and not pn.endswith("-cross") and not pn.endswith("-cross-initial") and not pn.endswith("-cross-intermediate") and not pn.endswith("-crosssdk-intermediate") and not pn.endswith("-crosssdk") and not pn.endswith("-crosssdk-initial"): |
393 | hosttools_whitelist = (bb.data.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, d, 1) or "").split() | 393 | hosttools_whitelist = (d.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, 1) or "").split() |
394 | lgplv2_whitelist = (bb.data.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, d, 1) or "").split() | 394 | lgplv2_whitelist = (d.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, 1) or "").split() |
395 | dont_want_whitelist = (bb.data.getVar('WHITELIST_%s' % dont_want_license, d, 1) or "").split() | 395 | dont_want_whitelist = (d.getVar('WHITELIST_%s' % dont_want_license, 1) or "").split() |
396 | if pn not in hosttools_whitelist and pn not in lgplv2_whitelist and pn not in dont_want_whitelist: | 396 | if pn not in hosttools_whitelist and pn not in lgplv2_whitelist and pn not in dont_want_whitelist: |
397 | 397 | ||
398 | import re | 398 | import re |
399 | this_license = bb.data.getVar('LICENSE', d, 1) | 399 | this_license = d.getVar('LICENSE', 1) |
400 | if this_license and re.search(dont_want_license, this_license): | 400 | if this_license and re.search(dont_want_license, this_license): |
401 | bb.note("SKIPPING %s because it's %s" % (pn, this_license)) | 401 | bb.note("SKIPPING %s because it's %s" % (pn, this_license)) |
402 | raise bb.parse.SkipPackage("incompatible with license %s" % this_license) | 402 | raise bb.parse.SkipPackage("incompatible with license %s" % this_license) |
403 | 403 | ||
404 | # Git packages should DEPEND on git-native | 404 | # Git packages should DEPEND on git-native |
405 | srcuri = bb.data.getVar('SRC_URI', d, 1) | 405 | srcuri = d.getVar('SRC_URI', 1) |
406 | if "git://" in srcuri: | 406 | if "git://" in srcuri: |
407 | depends = bb.data.getVarFlag('do_fetch', 'depends', d) or "" | 407 | depends = d.getVarFlag('do_fetch', 'depends') or "" |
408 | depends = depends + " git-native:do_populate_sysroot" | 408 | depends = depends + " git-native:do_populate_sysroot" |
409 | bb.data.setVarFlag('do_fetch', 'depends', depends, d) | 409 | d.setVarFlag('do_fetch', 'depends', depends) |
410 | 410 | ||
411 | # Mercurial packages should DEPEND on mercurial-native | 411 | # Mercurial packages should DEPEND on mercurial-native |
412 | elif "hg://" in srcuri: | 412 | elif "hg://" in srcuri: |
413 | depends = bb.data.getVarFlag('do_fetch', 'depends', d) or "" | 413 | depends = d.getVarFlag('do_fetch', 'depends') or "" |
414 | depends = depends + " mercurial-native:do_populate_sysroot" | 414 | depends = depends + " mercurial-native:do_populate_sysroot" |
415 | bb.data.setVarFlag('do_fetch', 'depends', depends, d) | 415 | d.setVarFlag('do_fetch', 'depends', depends) |
416 | 416 | ||
417 | # OSC packages should DEPEND on osc-native | 417 | # OSC packages should DEPEND on osc-native |
418 | elif "osc://" in srcuri: | 418 | elif "osc://" in srcuri: |
419 | depends = bb.data.getVarFlag('do_fetch', 'depends', d) or "" | 419 | depends = d.getVarFlag('do_fetch', 'depends') or "" |
420 | depends = depends + " osc-native:do_populate_sysroot" | 420 | depends = depends + " osc-native:do_populate_sysroot" |
421 | bb.data.setVarFlag('do_fetch', 'depends', depends, d) | 421 | d.setVarFlag('do_fetch', 'depends', depends) |
422 | 422 | ||
423 | # *.xz should depends on xz-native for unpacking | 423 | # *.xz should depends on xz-native for unpacking |
424 | # Not endswith because of "*.patch.xz;patch=1". Need bb.decodeurl in future | 424 | # Not endswith because of "*.patch.xz;patch=1". Need bb.decodeurl in future |
425 | if '.xz' in srcuri: | 425 | if '.xz' in srcuri: |
426 | depends = bb.data.getVarFlag('do_unpack', 'depends', d) or "" | 426 | depends = d.getVarFlag('do_unpack', 'depends') or "" |
427 | depends = depends + " xz-native:do_populate_sysroot" | 427 | depends = depends + " xz-native:do_populate_sysroot" |
428 | bb.data.setVarFlag('do_unpack', 'depends', depends, d) | 428 | d.setVarFlag('do_unpack', 'depends', depends) |
429 | 429 | ||
430 | # unzip-native should already be staged before unpacking ZIP recipes | 430 | # unzip-native should already be staged before unpacking ZIP recipes |
431 | if ".zip" in srcuri: | 431 | if ".zip" in srcuri: |
432 | depends = bb.data.getVarFlag('do_unpack', 'depends', d) or "" | 432 | depends = d.getVarFlag('do_unpack', 'depends') or "" |
433 | depends = depends + " unzip-native:do_populate_sysroot" | 433 | depends = depends + " unzip-native:do_populate_sysroot" |
434 | bb.data.setVarFlag('do_unpack', 'depends', depends, d) | 434 | d.setVarFlag('do_unpack', 'depends', depends) |
435 | 435 | ||
436 | # 'multimachine' handling | 436 | # 'multimachine' handling |
437 | mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1) | 437 | mach_arch = d.getVar('MACHINE_ARCH', 1) |
438 | pkg_arch = bb.data.getVar('PACKAGE_ARCH', d, 1) | 438 | pkg_arch = d.getVar('PACKAGE_ARCH', 1) |
439 | 439 | ||
440 | if (pkg_arch == mach_arch): | 440 | if (pkg_arch == mach_arch): |
441 | # Already machine specific - nothing further to do | 441 | # Already machine specific - nothing further to do |
@@ -445,7 +445,7 @@ python () { | |||
445 | # We always try to scan SRC_URI for urls with machine overrides | 445 | # We always try to scan SRC_URI for urls with machine overrides |
446 | # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 | 446 | # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 |
447 | # | 447 | # |
448 | override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1) | 448 | override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', 1) |
449 | if override != '0': | 449 | if override != '0': |
450 | paths = [] | 450 | paths = [] |
451 | for p in [ "${PF}", "${P}", "${PN}", "files", "" ]: | 451 | for p in [ "${PF}", "${P}", "${PN}", "files", "" ]: |
@@ -461,18 +461,18 @@ python () { | |||
461 | for mp in paths: | 461 | for mp in paths: |
462 | if local.startswith(mp): | 462 | if local.startswith(mp): |
463 | #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch)) | 463 | #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch)) |
464 | bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d) | 464 | d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") |
465 | return | 465 | return |
466 | 466 | ||
467 | packages = bb.data.getVar('PACKAGES', d, 1).split() | 467 | packages = d.getVar('PACKAGES', 1).split() |
468 | for pkg in packages: | 468 | for pkg in packages: |
469 | pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1) | 469 | pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, 1) |
470 | 470 | ||
471 | # We could look for != PACKAGE_ARCH here but how to choose | 471 | # We could look for != PACKAGE_ARCH here but how to choose |
472 | # if multiple differences are present? | 472 | # if multiple differences are present? |
473 | # Look through PACKAGE_ARCHS for the priority order? | 473 | # Look through PACKAGE_ARCHS for the priority order? |
474 | if pkgarch and pkgarch == mach_arch: | 474 | if pkgarch and pkgarch == mach_arch: |
475 | bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d) | 475 | d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") |
476 | bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True)) | 476 | bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True)) |
477 | } | 477 | } |
478 | 478 | ||
@@ -483,7 +483,7 @@ python do_cleansstate() { | |||
483 | 483 | ||
484 | addtask cleanall after do_cleansstate | 484 | addtask cleanall after do_cleansstate |
485 | python do_cleanall() { | 485 | python do_cleanall() { |
486 | src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split() | 486 | src_uri = (d.getVar('SRC_URI', True) or "").split() |
487 | if len(src_uri) == 0: | 487 | if len(src_uri) == 0: |
488 | return | 488 | return |
489 | 489 | ||
diff --git a/meta/classes/bugzilla.bbclass b/meta/classes/bugzilla.bbclass index 801bd66d2f..4028d261c8 100644 --- a/meta/classes/bugzilla.bbclass +++ b/meta/classes/bugzilla.bbclass | |||
@@ -109,14 +109,14 @@ python bugzilla_eventhandler() { | |||
109 | return | 109 | return |
110 | 110 | ||
111 | if name == "TaskFailed": | 111 | if name == "TaskFailed": |
112 | xmlrpc = bb.data.getVar("BUGZILLA_XMLRPC", data, True) | 112 | xmlrpc = data.getVar("BUGZILLA_XMLRPC", True) |
113 | user = bb.data.getVar("BUGZILLA_USER", data, True) | 113 | user = data.getVar("BUGZILLA_USER", True) |
114 | passw = bb.data.getVar("BUGZILLA_PASS", data, True) | 114 | passw = data.getVar("BUGZILLA_PASS", True) |
115 | product = bb.data.getVar("BUGZILLA_PRODUCT", data, True) | 115 | product = data.getVar("BUGZILLA_PRODUCT", True) |
116 | compon = bb.data.getVar("BUGZILLA_COMPONENT", data, True) | 116 | compon = data.getVar("BUGZILLA_COMPONENT", True) |
117 | version = bb.data.getVar("BUGZILLA_VERSION", data, True) | 117 | version = data.getVar("BUGZILLA_VERSION", True) |
118 | 118 | ||
119 | proxy = bb.data.getVar('http_proxy', data, True ) | 119 | proxy = data.getVar('http_proxy', True ) |
120 | if (proxy): | 120 | if (proxy): |
121 | import urllib2 | 121 | import urllib2 |
122 | s, u, p, hostport = urllib2._parse_proxy(proxy) | 122 | s, u, p, hostport = urllib2._parse_proxy(proxy) |
@@ -132,14 +132,14 @@ python bugzilla_eventhandler() { | |||
132 | 'component': compon} | 132 | 'component': compon} |
133 | 133 | ||
134 | # evil hack to figure out what is going on | 134 | # evil hack to figure out what is going on |
135 | debug_file = open(os.path.join(bb.data.getVar("TMPDIR", data, True),"..","bugzilla-log"),"a") | 135 | debug_file = open(os.path.join(data.getVar("TMPDIR", True),"..","bugzilla-log"),"a") |
136 | 136 | ||
137 | file = None | 137 | file = None |
138 | bugname = "%(package)s-%(pv)s-autobuild" % { "package" : bb.data.getVar("PN", data, True), | 138 | bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN", True), |
139 | "pv" : bb.data.getVar("PV", data, True), | 139 | "pv" : data.getVar("PV", True), |
140 | } | 140 | } |
141 | log_file = glob.glob("%s/log.%s.*" % (bb.data.getVar('T', event.data, True), event.task)) | 141 | log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T', True), event.task)) |
142 | text = "The %s step in %s failed at %s for machine %s" % (e.task, bb.data.getVar("PN", data, True), bb.data.getVar('DATETIME', data, True), bb.data.getVar( 'MACHINE', data, True ) ) | 142 | text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN", True), data.getVar('DATETIME', True), data.getVar( 'MACHINE', True ) ) |
143 | if len(log_file) != 0: | 143 | if len(log_file) != 0: |
144 | print >> debug_file, "Adding log file %s" % log_file[0] | 144 | print >> debug_file, "Adding log file %s" % log_file[0] |
145 | file = open(log_file[0], 'r') | 145 | file = open(log_file[0], 'r') |
@@ -167,7 +167,7 @@ python bugzilla_eventhandler() { | |||
167 | 167 | ||
168 | if bug_number and log: | 168 | if bug_number and log: |
169 | print >> debug_file, "The bug is known as '%s'" % bug_number | 169 | print >> debug_file, "The bug is known as '%s'" % bug_number |
170 | desc = "Build log for machine %s" % (bb.data.getVar('MACHINE', data, True)) | 170 | desc = "Build log for machine %s" % (data.getVar('MACHINE', True)) |
171 | if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc): | 171 | if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc): |
172 | print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number | 172 | print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number |
173 | else: | 173 | else: |
@@ -181,6 +181,6 @@ python bugzilla_eventhandler() { | |||
181 | 181 | ||
182 | # store bug number for oestats-client | 182 | # store bug number for oestats-client |
183 | if bug_number: | 183 | if bug_number: |
184 | bb.data.setVar('OESTATS_BUG_NUMBER', bug_number, data) | 184 | data.setVar('OESTATS_BUG_NUMBER', bug_number) |
185 | } | 185 | } |
186 | 186 | ||
diff --git a/meta/classes/buildstats.bbclass b/meta/classes/buildstats.bbclass index 96c98d409f..4cd8fe6936 100644 --- a/meta/classes/buildstats.bbclass +++ b/meta/classes/buildstats.bbclass | |||
@@ -21,25 +21,25 @@ def get_cputime(): | |||
21 | return sum(int(field) for field in fields) | 21 | return sum(int(field) for field in fields) |
22 | 22 | ||
23 | def set_bn(e): | 23 | def set_bn(e): |
24 | bn = e.getPkgs()[0] + "-" + bb.data.getVar('MACHINE',e.data, True) | 24 | bn = e.getPkgs()[0] + "-" + e.data.getVar('MACHINE', True) |
25 | try: | 25 | try: |
26 | os.remove(bb.data.getVar('BNFILE', e.data, True)) | 26 | os.remove(e.data.getVar('BNFILE', True)) |
27 | except: | 27 | except: |
28 | pass | 28 | pass |
29 | file = open(bb.data.getVar('BNFILE', e.data, True), "w") | 29 | file = open(e.data.getVar('BNFILE', True), "w") |
30 | file.write(os.path.join(bn, bb.data.getVar('BUILDNAME', e.data, True))) | 30 | file.write(os.path.join(bn, e.data.getVar('BUILDNAME', True))) |
31 | file.close() | 31 | file.close() |
32 | 32 | ||
33 | def get_bn(e): | 33 | def get_bn(e): |
34 | file = open(bb.data.getVar('BNFILE', e.data, True)) | 34 | file = open(e.data.getVar('BNFILE', True)) |
35 | bn = file.readline() | 35 | bn = file.readline() |
36 | file.close() | 36 | file.close() |
37 | return bn | 37 | return bn |
38 | 38 | ||
39 | def set_device(e): | 39 | def set_device(e): |
40 | tmpdir = bb.data.getVar('TMPDIR', e.data, True) | 40 | tmpdir = e.data.getVar('TMPDIR', True) |
41 | try: | 41 | try: |
42 | os.remove(bb.data.getVar('DEVFILE', e.data, True)) | 42 | os.remove(e.data.getVar('DEVFILE', True)) |
43 | except: | 43 | except: |
44 | pass | 44 | pass |
45 | ############################################################################ | 45 | ############################################################################ |
@@ -66,12 +66,12 @@ def set_device(e): | |||
66 | rdev=line.split()[2] | 66 | rdev=line.split()[2] |
67 | else: | 67 | else: |
68 | rdev="NoLogicalDevice" | 68 | rdev="NoLogicalDevice" |
69 | file = open(bb.data.getVar('DEVFILE', e.data, True), "w") | 69 | file = open(e.data.getVar('DEVFILE', True), "w") |
70 | file.write(rdev) | 70 | file.write(rdev) |
71 | file.close() | 71 | file.close() |
72 | 72 | ||
73 | def get_device(e): | 73 | def get_device(e): |
74 | file = open(bb.data.getVar('DEVFILE', e.data, True)) | 74 | file = open(e.data.getVar('DEVFILE', True)) |
75 | device = file.readline() | 75 | device = file.readline() |
76 | file.close() | 76 | file.close() |
77 | return device | 77 | return device |
@@ -126,7 +126,7 @@ def get_timedata(var, data): | |||
126 | 126 | ||
127 | def write_task_data(status, logfile, dev, e): | 127 | def write_task_data(status, logfile, dev, e): |
128 | bn = get_bn(e) | 128 | bn = get_bn(e) |
129 | bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) | 129 | bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn) |
130 | taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) | 130 | taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) |
131 | file = open(os.path.join(logfile), "a") | 131 | file = open(os.path.join(logfile), "a") |
132 | timedata = get_timedata("__timedata_task", e.data) | 132 | timedata = get_timedata("__timedata_task", e.data) |
@@ -168,7 +168,7 @@ python run_buildstats () { | |||
168 | # set the buildname | 168 | # set the buildname |
169 | ######################################################################## | 169 | ######################################################################## |
170 | try: | 170 | try: |
171 | bb.mkdirhier(bb.data.getVar('BUILDSTATS_BASE', e.data, True)) | 171 | bb.mkdirhier(e.data.getVar('BUILDSTATS_BASE', True)) |
172 | except: | 172 | except: |
173 | pass | 173 | pass |
174 | set_bn(e) | 174 | set_bn(e) |
@@ -176,7 +176,7 @@ python run_buildstats () { | |||
176 | set_device(e) | 176 | set_device(e) |
177 | device = get_device(e) | 177 | device = get_device(e) |
178 | 178 | ||
179 | bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) | 179 | bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn) |
180 | try: | 180 | try: |
181 | bb.mkdirhier(bsdir) | 181 | bb.mkdirhier(bsdir) |
182 | except: | 182 | except: |
@@ -199,7 +199,7 @@ python run_buildstats () { | |||
199 | elif isinstance(e, bb.event.BuildCompleted): | 199 | elif isinstance(e, bb.event.BuildCompleted): |
200 | bn = get_bn(e) | 200 | bn = get_bn(e) |
201 | device = get_device(e) | 201 | device = get_device(e) |
202 | bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) | 202 | bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn) |
203 | taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) | 203 | taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) |
204 | build_time = os.path.join(bsdir, "build_stats") | 204 | build_time = os.path.join(bsdir, "build_stats") |
205 | file = open(build_time, "a") | 205 | file = open(build_time, "a") |
@@ -224,7 +224,7 @@ python run_buildstats () { | |||
224 | if isinstance(e, bb.build.TaskStarted): | 224 | if isinstance(e, bb.build.TaskStarted): |
225 | bn = get_bn(e) | 225 | bn = get_bn(e) |
226 | device = get_device(e) | 226 | device = get_device(e) |
227 | bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) | 227 | bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn) |
228 | taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) | 228 | taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) |
229 | if device != "NoLogicalDevice": | 229 | if device != "NoLogicalDevice": |
230 | set_diskdata("__diskdata_task", device, e.data) | 230 | set_diskdata("__diskdata_task", device, e.data) |
@@ -242,14 +242,14 @@ python run_buildstats () { | |||
242 | elif isinstance(e, bb.build.TaskSucceeded): | 242 | elif isinstance(e, bb.build.TaskSucceeded): |
243 | bn = get_bn(e) | 243 | bn = get_bn(e) |
244 | device = get_device(e) | 244 | device = get_device(e) |
245 | bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) | 245 | bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn) |
246 | taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) | 246 | taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) |
247 | write_task_data("passed", os.path.join(taskdir, e.task), device, e) | 247 | write_task_data("passed", os.path.join(taskdir, e.task), device, e) |
248 | if e.task == "do_rootfs": | 248 | if e.task == "do_rootfs": |
249 | bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) | 249 | bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn) |
250 | bs=os.path.join(bsdir, "build_stats") | 250 | bs=os.path.join(bsdir, "build_stats") |
251 | file = open(bs,"a") | 251 | file = open(bs,"a") |
252 | rootfs = bb.data.getVar('IMAGE_ROOTFS', e.data, True) | 252 | rootfs = e.data.getVar('IMAGE_ROOTFS', True) |
253 | rootfs_size = subprocess.Popen(["du", "-sh", rootfs], stdout=subprocess.PIPE).stdout.read() | 253 | rootfs_size = subprocess.Popen(["du", "-sh", rootfs], stdout=subprocess.PIPE).stdout.read() |
254 | file.write("Uncompressed Rootfs size: %s" % rootfs_size) | 254 | file.write("Uncompressed Rootfs size: %s" % rootfs_size) |
255 | file.close() | 255 | file.close() |
@@ -257,7 +257,7 @@ python run_buildstats () { | |||
257 | elif isinstance(e, bb.build.TaskFailed): | 257 | elif isinstance(e, bb.build.TaskFailed): |
258 | bn = get_bn(e) | 258 | bn = get_bn(e) |
259 | device = get_device(e) | 259 | device = get_device(e) |
260 | bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) | 260 | bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn) |
261 | taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) | 261 | taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) |
262 | write_task_data("failed", os.path.join(taskdir, e.task), device, e) | 262 | write_task_data("failed", os.path.join(taskdir, e.task), device, e) |
263 | ######################################################################## | 263 | ######################################################################## |
diff --git a/meta/classes/cpan-base.bbclass b/meta/classes/cpan-base.bbclass index b5dbdaea81..79582ca76c 100644 --- a/meta/classes/cpan-base.bbclass +++ b/meta/classes/cpan-base.bbclass | |||
@@ -28,7 +28,7 @@ def get_perl_version(d): | |||
28 | 28 | ||
29 | # Determine where the library directories are | 29 | # Determine where the library directories are |
30 | def perl_get_libdirs(d): | 30 | def perl_get_libdirs(d): |
31 | libdir = bb.data.getVar('libdir', d, 1) | 31 | libdir = d.getVar('libdir', 1) |
32 | if is_target(d) == "no": | 32 | if is_target(d) == "no": |
33 | libdir += '/perl-native' | 33 | libdir += '/perl-native' |
34 | libdir += '/perl' | 34 | libdir += '/perl' |
diff --git a/meta/classes/cpan_build.bbclass b/meta/classes/cpan_build.bbclass index cc503a424e..981332c4fa 100644 --- a/meta/classes/cpan_build.bbclass +++ b/meta/classes/cpan_build.bbclass | |||
@@ -10,9 +10,9 @@ inherit cpan-base | |||
10 | # libmodule-build-perl) | 10 | # libmodule-build-perl) |
11 | # | 11 | # |
12 | def cpan_build_dep_prepend(d): | 12 | def cpan_build_dep_prepend(d): |
13 | if bb.data.getVar('CPAN_BUILD_DEPS', d, 1): | 13 | if d.getVar('CPAN_BUILD_DEPS', 1): |
14 | return '' | 14 | return '' |
15 | pn = bb.data.getVar('PN', d, 1) | 15 | pn = d.getVar('PN', 1) |
16 | if pn in ['libmodule-build-perl', 'libmodule-build-perl-native']: | 16 | if pn in ['libmodule-build-perl', 'libmodule-build-perl-native']: |
17 | return '' | 17 | return '' |
18 | return 'libmodule-build-perl-native ' | 18 | return 'libmodule-build-perl-native ' |
diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes/cross-canadian.bbclass index 601175db45..6f5bcd0ad4 100644 --- a/meta/classes/cross-canadian.bbclass +++ b/meta/classes/cross-canadian.bbclass | |||
@@ -16,7 +16,7 @@ STAGING_BINDIR_TOOLCHAIN = "${STAGING_DIR_NATIVE}${bindir_native}/${SDK_ARCH}${S | |||
16 | # | 16 | # |
17 | PACKAGE_ARCH = "${SDK_ARCH}-nativesdk" | 17 | PACKAGE_ARCH = "${SDK_ARCH}-nativesdk" |
18 | python () { | 18 | python () { |
19 | archs = bb.data.getVar('PACKAGE_ARCHS', d, True).split() | 19 | archs = d.getVar('PACKAGE_ARCHS', True).split() |
20 | sdkarchs = [] | 20 | sdkarchs = [] |
21 | for arch in archs: | 21 | for arch in archs: |
22 | sdkarchs.append(arch + '-nativesdk') | 22 | sdkarchs.append(arch + '-nativesdk') |
diff --git a/meta/classes/debian.bbclass b/meta/classes/debian.bbclass index 554525dc84..025abcfad0 100644 --- a/meta/classes/debian.bbclass +++ b/meta/classes/debian.bbclass | |||
@@ -22,8 +22,8 @@ python () { | |||
22 | python debian_package_name_hook () { | 22 | python debian_package_name_hook () { |
23 | import glob, copy, stat, errno, re | 23 | import glob, copy, stat, errno, re |
24 | 24 | ||
25 | pkgdest = bb.data.getVar('PKGDEST', d, 1) | 25 | pkgdest = d.getVar('PKGDEST', 1) |
26 | packages = bb.data.getVar('PACKAGES', d, 1) | 26 | packages = d.getVar('PACKAGES', 1) |
27 | bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") | 27 | bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") |
28 | lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") | 28 | lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") |
29 | so_re = re.compile("lib.*\.so") | 29 | so_re = re.compile("lib.*\.so") |
@@ -60,7 +60,7 @@ python debian_package_name_hook () { | |||
60 | for f in files: | 60 | for f in files: |
61 | if so_re.match(f): | 61 | if so_re.match(f): |
62 | fp = os.path.join(root, f) | 62 | fp = os.path.join(root, f) |
63 | cmd = (bb.data.getVar('BUILD_PREFIX', d, 1) or "") + "objdump -p " + fp + " 2>/dev/null" | 63 | cmd = (d.getVar('BUILD_PREFIX', 1) or "") + "objdump -p " + fp + " 2>/dev/null" |
64 | fd = os.popen(cmd) | 64 | fd = os.popen(cmd) |
65 | lines = fd.readlines() | 65 | lines = fd.readlines() |
66 | fd.close() | 66 | fd.close() |
@@ -74,7 +74,7 @@ python debian_package_name_hook () { | |||
74 | if len(sonames) == 1: | 74 | if len(sonames) == 1: |
75 | soname = sonames[0] | 75 | soname = sonames[0] |
76 | elif len(sonames) > 1: | 76 | elif len(sonames) > 1: |
77 | lead = bb.data.getVar('LEAD_SONAME', d, 1) | 77 | lead = d.getVar('LEAD_SONAME', 1) |
78 | if lead: | 78 | if lead: |
79 | r = re.compile(lead) | 79 | r = re.compile(lead) |
80 | filtered = [] | 80 | filtered = [] |
@@ -95,21 +95,21 @@ python debian_package_name_hook () { | |||
95 | if soname_result: | 95 | if soname_result: |
96 | (pkgname, devname) = soname_result | 96 | (pkgname, devname) = soname_result |
97 | for pkg in packages.split(): | 97 | for pkg in packages.split(): |
98 | if (bb.data.getVar('PKG_' + pkg, d) or bb.data.getVar('DEBIAN_NOAUTONAME_' + pkg, d)): | 98 | if (d.getVar('PKG_' + pkg) or d.getVar('DEBIAN_NOAUTONAME_' + pkg)): |
99 | continue | 99 | continue |
100 | debian_pn = bb.data.getVar('DEBIANNAME_' + pkg, d) | 100 | debian_pn = d.getVar('DEBIANNAME_' + pkg) |
101 | if debian_pn: | 101 | if debian_pn: |
102 | newpkg = debian_pn | 102 | newpkg = debian_pn |
103 | elif pkg == orig_pkg: | 103 | elif pkg == orig_pkg: |
104 | newpkg = pkgname | 104 | newpkg = pkgname |
105 | else: | 105 | else: |
106 | newpkg = pkg.replace(orig_pkg, devname, 1) | 106 | newpkg = pkg.replace(orig_pkg, devname, 1) |
107 | mlpre=bb.data.getVar('MLPREFIX', d, True) | 107 | mlpre=d.getVar('MLPREFIX', True) |
108 | if mlpre: | 108 | if mlpre: |
109 | if not newpkg.find(mlpre) == 0: | 109 | if not newpkg.find(mlpre) == 0: |
110 | newpkg = mlpre + newpkg | 110 | newpkg = mlpre + newpkg |
111 | if newpkg != pkg: | 111 | if newpkg != pkg: |
112 | bb.data.setVar('PKG_' + pkg, newpkg, d) | 112 | d.setVar('PKG_' + pkg, newpkg) |
113 | 113 | ||
114 | # reversed sort is needed when some package is substring of another | 114 | # reversed sort is needed when some package is substring of another |
115 | # ie in ncurses we get without reverse sort: | 115 | # ie in ncurses we get without reverse sort: |
@@ -117,7 +117,7 @@ python debian_package_name_hook () { | |||
117 | # and later | 117 | # and later |
118 | # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw | 118 | # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw |
119 | # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 | 119 | # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 |
120 | for pkg in sorted((bb.data.getVar('AUTO_LIBNAME_PKGS', d, 1) or "").split(), reverse=True): | 120 | for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', 1) or "").split(), reverse=True): |
121 | auto_libname(packages, pkg) | 121 | auto_libname(packages, pkg) |
122 | } | 122 | } |
123 | 123 | ||
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass index ce7b931b13..687247a649 100644 --- a/meta/classes/distrodata.bbclass +++ b/meta/classes/distrodata.bbclass | |||
@@ -19,87 +19,87 @@ addtask distrodata_np | |||
19 | do_distrodata_np[nostamp] = "1" | 19 | do_distrodata_np[nostamp] = "1" |
20 | python do_distrodata_np() { | 20 | python do_distrodata_np() { |
21 | localdata = bb.data.createCopy(d) | 21 | localdata = bb.data.createCopy(d) |
22 | pn = bb.data.getVar("PN", d, True) | 22 | pn = d.getVar("PN", True) |
23 | bb.note("Package Name: %s" % pn) | 23 | bb.note("Package Name: %s" % pn) |
24 | 24 | ||
25 | import oe.distro_check as dist_check | 25 | import oe.distro_check as dist_check |
26 | tmpdir = bb.data.getVar('TMPDIR', d, True) | 26 | tmpdir = d.getVar('TMPDIR', True) |
27 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 27 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
28 | datetime = bb.data.getVar('DATETIME', localdata, True) | 28 | datetime = localdata.getVar('DATETIME', True) |
29 | dist_check.update_distro_data(distro_check_dir, datetime) | 29 | dist_check.update_distro_data(distro_check_dir, datetime) |
30 | 30 | ||
31 | if pn.find("-native") != -1: | 31 | if pn.find("-native") != -1: |
32 | pnstripped = pn.split("-native") | 32 | pnstripped = pn.split("-native") |
33 | bb.note("Native Split: %s" % pnstripped) | 33 | bb.note("Native Split: %s" % pnstripped) |
34 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 34 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
35 | bb.data.update_data(localdata) | 35 | bb.data.update_data(localdata) |
36 | 36 | ||
37 | if pn.find("-nativesdk") != -1: | 37 | if pn.find("-nativesdk") != -1: |
38 | pnstripped = pn.split("-nativesdk") | 38 | pnstripped = pn.split("-nativesdk") |
39 | bb.note("Native Split: %s" % pnstripped) | 39 | bb.note("Native Split: %s" % pnstripped) |
40 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 40 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
41 | bb.data.update_data(localdata) | 41 | bb.data.update_data(localdata) |
42 | 42 | ||
43 | if pn.find("-cross") != -1: | 43 | if pn.find("-cross") != -1: |
44 | pnstripped = pn.split("-cross") | 44 | pnstripped = pn.split("-cross") |
45 | bb.note("cross Split: %s" % pnstripped) | 45 | bb.note("cross Split: %s" % pnstripped) |
46 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 46 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
47 | bb.data.update_data(localdata) | 47 | bb.data.update_data(localdata) |
48 | 48 | ||
49 | if pn.find("-crosssdk") != -1: | 49 | if pn.find("-crosssdk") != -1: |
50 | pnstripped = pn.split("-crosssdk") | 50 | pnstripped = pn.split("-crosssdk") |
51 | bb.note("cross Split: %s" % pnstripped) | 51 | bb.note("cross Split: %s" % pnstripped) |
52 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 52 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
53 | bb.data.update_data(localdata) | 53 | bb.data.update_data(localdata) |
54 | 54 | ||
55 | if pn.find("-initial") != -1: | 55 | if pn.find("-initial") != -1: |
56 | pnstripped = pn.split("-initial") | 56 | pnstripped = pn.split("-initial") |
57 | bb.note("initial Split: %s" % pnstripped) | 57 | bb.note("initial Split: %s" % pnstripped) |
58 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 58 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
59 | bb.data.update_data(localdata) | 59 | bb.data.update_data(localdata) |
60 | 60 | ||
61 | """generate package information from .bb file""" | 61 | """generate package information from .bb file""" |
62 | pname = bb.data.getVar('PN', localdata, True) | 62 | pname = localdata.getVar('PN', True) |
63 | pcurver = bb.data.getVar('PV', localdata, True) | 63 | pcurver = localdata.getVar('PV', True) |
64 | pdesc = bb.data.getVar('DESCRIPTION', localdata, True) | 64 | pdesc = localdata.getVar('DESCRIPTION', True) |
65 | if pdesc is not None: | 65 | if pdesc is not None: |
66 | pdesc = pdesc.replace(',','') | 66 | pdesc = pdesc.replace(',','') |
67 | pdesc = pdesc.replace('\n','') | 67 | pdesc = pdesc.replace('\n','') |
68 | 68 | ||
69 | pgrp = bb.data.getVar('SECTION', localdata, True) | 69 | pgrp = localdata.getVar('SECTION', True) |
70 | plicense = bb.data.getVar('LICENSE', localdata, True).replace(',','_') | 70 | plicense = localdata.getVar('LICENSE', True).replace(',','_') |
71 | if bb.data.getVar('LIC_FILES_CHKSUM', localdata, True): | 71 | if localdata.getVar('LIC_FILES_CHKSUM', True): |
72 | pchksum="1" | 72 | pchksum="1" |
73 | else: | 73 | else: |
74 | pchksum="0" | 74 | pchksum="0" |
75 | 75 | ||
76 | if bb.data.getVar('RECIPE_STATUS', localdata, True): | 76 | if localdata.getVar('RECIPE_STATUS', True): |
77 | hasrstatus="1" | 77 | hasrstatus="1" |
78 | else: | 78 | else: |
79 | hasrstatus="0" | 79 | hasrstatus="0" |
80 | 80 | ||
81 | rstatus = bb.data.getVar('RECIPE_STATUS', localdata, True) | 81 | rstatus = localdata.getVar('RECIPE_STATUS', True) |
82 | if rstatus is not None: | 82 | if rstatus is not None: |
83 | rstatus = rstatus.replace(',','') | 83 | rstatus = rstatus.replace(',','') |
84 | 84 | ||
85 | pupver = bb.data.getVar('RECIPE_LATEST_VERSION', localdata, True) | 85 | pupver = localdata.getVar('RECIPE_LATEST_VERSION', True) |
86 | if pcurver == pupver: | 86 | if pcurver == pupver: |
87 | vermatch="1" | 87 | vermatch="1" |
88 | else: | 88 | else: |
89 | vermatch="0" | 89 | vermatch="0" |
90 | noupdate_reason = bb.data.getVar('RECIPE_NO_UPDATE_REASON', localdata, True) | 90 | noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) |
91 | if noupdate_reason is None: | 91 | if noupdate_reason is None: |
92 | noupdate="0" | 92 | noupdate="0" |
93 | else: | 93 | else: |
94 | noupdate="1" | 94 | noupdate="1" |
95 | noupdate_reason = noupdate_reason.replace(',','') | 95 | noupdate_reason = noupdate_reason.replace(',','') |
96 | 96 | ||
97 | ris = bb.data.getVar('RECIPE_INTEL_SECTION', localdata, True) | 97 | ris = localdata.getVar('RECIPE_INTEL_SECTION', True) |
98 | maintainer = bb.data.getVar('RECIPE_MAINTAINER', localdata, True) | 98 | maintainer = localdata.getVar('RECIPE_MAINTAINER', True) |
99 | rttr = bb.data.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', localdata, True) | 99 | rttr = localdata.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', True) |
100 | rlrd = bb.data.getVar('RECIPE_LATEST_RELEASE_DATE', localdata, True) | 100 | rlrd = localdata.getVar('RECIPE_LATEST_RELEASE_DATE', True) |
101 | dc = bb.data.getVar('DEPENDENCY_CHECK', localdata, True) | 101 | dc = localdata.getVar('DEPENDENCY_CHECK', True) |
102 | rc = bb.data.getVar('RECIPE_COMMENTS', localdata, True) | 102 | rc = localdata.getVar('RECIPE_COMMENTS', True) |
103 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) | 103 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) |
104 | 104 | ||
105 | bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s, %s, %s, %s\n" % \ | 105 | bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s, %s, %s, %s\n" % \ |
@@ -113,81 +113,81 @@ python do_distrodata_np() { | |||
113 | addtask distrodata | 113 | addtask distrodata |
114 | do_distrodata[nostamp] = "1" | 114 | do_distrodata[nostamp] = "1" |
115 | python do_distrodata() { | 115 | python do_distrodata() { |
116 | logpath = bb.data.getVar('LOG_DIR', d, True) | 116 | logpath = d.getVar('LOG_DIR', True) |
117 | bb.utils.mkdirhier(logpath) | 117 | bb.utils.mkdirhier(logpath) |
118 | logfile = os.path.join(logpath, "distrodata.csv") | 118 | logfile = os.path.join(logpath, "distrodata.csv") |
119 | 119 | ||
120 | import oe.distro_check as dist_check | 120 | import oe.distro_check as dist_check |
121 | localdata = bb.data.createCopy(d) | 121 | localdata = bb.data.createCopy(d) |
122 | tmpdir = bb.data.getVar('TMPDIR', d, True) | 122 | tmpdir = d.getVar('TMPDIR', True) |
123 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 123 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
124 | datetime = bb.data.getVar('DATETIME', localdata, True) | 124 | datetime = localdata.getVar('DATETIME', True) |
125 | dist_check.update_distro_data(distro_check_dir, datetime) | 125 | dist_check.update_distro_data(distro_check_dir, datetime) |
126 | 126 | ||
127 | pn = bb.data.getVar("PN", d, True) | 127 | pn = d.getVar("PN", True) |
128 | bb.note("Package Name: %s" % pn) | 128 | bb.note("Package Name: %s" % pn) |
129 | 129 | ||
130 | if pn.find("-native") != -1: | 130 | if pn.find("-native") != -1: |
131 | pnstripped = pn.split("-native") | 131 | pnstripped = pn.split("-native") |
132 | bb.note("Native Split: %s" % pnstripped) | 132 | bb.note("Native Split: %s" % pnstripped) |
133 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 133 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
134 | bb.data.update_data(localdata) | 134 | bb.data.update_data(localdata) |
135 | 135 | ||
136 | if pn.find("-cross") != -1: | 136 | if pn.find("-cross") != -1: |
137 | pnstripped = pn.split("-cross") | 137 | pnstripped = pn.split("-cross") |
138 | bb.note("cross Split: %s" % pnstripped) | 138 | bb.note("cross Split: %s" % pnstripped) |
139 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 139 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
140 | bb.data.update_data(localdata) | 140 | bb.data.update_data(localdata) |
141 | 141 | ||
142 | if pn.find("-initial") != -1: | 142 | if pn.find("-initial") != -1: |
143 | pnstripped = pn.split("-initial") | 143 | pnstripped = pn.split("-initial") |
144 | bb.note("initial Split: %s" % pnstripped) | 144 | bb.note("initial Split: %s" % pnstripped) |
145 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 145 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
146 | bb.data.update_data(localdata) | 146 | bb.data.update_data(localdata) |
147 | 147 | ||
148 | """generate package information from .bb file""" | 148 | """generate package information from .bb file""" |
149 | pname = bb.data.getVar('PN', localdata, True) | 149 | pname = localdata.getVar('PN', True) |
150 | pcurver = bb.data.getVar('PV', localdata, True) | 150 | pcurver = localdata.getVar('PV', True) |
151 | pdesc = bb.data.getVar('DESCRIPTION', localdata, True) | 151 | pdesc = localdata.getVar('DESCRIPTION', True) |
152 | if pdesc is not None: | 152 | if pdesc is not None: |
153 | pdesc = pdesc.replace(',','') | 153 | pdesc = pdesc.replace(',','') |
154 | pdesc = pdesc.replace('\n','') | 154 | pdesc = pdesc.replace('\n','') |
155 | 155 | ||
156 | pgrp = bb.data.getVar('SECTION', localdata, True) | 156 | pgrp = localdata.getVar('SECTION', True) |
157 | plicense = bb.data.getVar('LICENSE', localdata, True).replace(',','_') | 157 | plicense = localdata.getVar('LICENSE', True).replace(',','_') |
158 | if bb.data.getVar('LIC_FILES_CHKSUM', localdata, True): | 158 | if localdata.getVar('LIC_FILES_CHKSUM', True): |
159 | pchksum="1" | 159 | pchksum="1" |
160 | else: | 160 | else: |
161 | pchksum="0" | 161 | pchksum="0" |
162 | 162 | ||
163 | if bb.data.getVar('RECIPE_STATUS', localdata, True): | 163 | if localdata.getVar('RECIPE_STATUS', True): |
164 | hasrstatus="1" | 164 | hasrstatus="1" |
165 | else: | 165 | else: |
166 | hasrstatus="0" | 166 | hasrstatus="0" |
167 | 167 | ||
168 | rstatus = bb.data.getVar('RECIPE_STATUS', localdata, True) | 168 | rstatus = localdata.getVar('RECIPE_STATUS', True) |
169 | if rstatus is not None: | 169 | if rstatus is not None: |
170 | rstatus = rstatus.replace(',','') | 170 | rstatus = rstatus.replace(',','') |
171 | 171 | ||
172 | pupver = bb.data.getVar('RECIPE_LATEST_VERSION', localdata, True) | 172 | pupver = localdata.getVar('RECIPE_LATEST_VERSION', True) |
173 | if pcurver == pupver: | 173 | if pcurver == pupver: |
174 | vermatch="1" | 174 | vermatch="1" |
175 | else: | 175 | else: |
176 | vermatch="0" | 176 | vermatch="0" |
177 | 177 | ||
178 | noupdate_reason = bb.data.getVar('RECIPE_NO_UPDATE_REASON', localdata, True) | 178 | noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) |
179 | if noupdate_reason is None: | 179 | if noupdate_reason is None: |
180 | noupdate="0" | 180 | noupdate="0" |
181 | else: | 181 | else: |
182 | noupdate="1" | 182 | noupdate="1" |
183 | noupdate_reason = noupdate_reason.replace(',','') | 183 | noupdate_reason = noupdate_reason.replace(',','') |
184 | 184 | ||
185 | ris = bb.data.getVar('RECIPE_INTEL_SECTION', localdata, True) | 185 | ris = localdata.getVar('RECIPE_INTEL_SECTION', True) |
186 | maintainer = bb.data.getVar('RECIPE_MAINTAINER', localdata, True) | 186 | maintainer = localdata.getVar('RECIPE_MAINTAINER', True) |
187 | rttr = bb.data.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', localdata, True) | 187 | rttr = localdata.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', True) |
188 | rlrd = bb.data.getVar('RECIPE_LATEST_RELEASE_DATE', localdata, True) | 188 | rlrd = localdata.getVar('RECIPE_LATEST_RELEASE_DATE', True) |
189 | dc = bb.data.getVar('DEPENDENCY_CHECK', localdata, True) | 189 | dc = localdata.getVar('DEPENDENCY_CHECK', True) |
190 | rc = bb.data.getVar('RECIPE_COMMENTS', localdata, True) | 190 | rc = localdata.getVar('RECIPE_COMMENTS', True) |
191 | # do the comparison | 191 | # do the comparison |
192 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) | 192 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) |
193 | 193 | ||
@@ -298,7 +298,7 @@ python do_checkpkg() { | |||
298 | Clear internal url cache as it's a temporary check. Not doing so will have | 298 | Clear internal url cache as it's a temporary check. Not doing so will have |
299 | bitbake check url multiple times when looping through a single url | 299 | bitbake check url multiple times when looping through a single url |
300 | """ | 300 | """ |
301 | fn = bb.data.getVar('FILE', d, True) | 301 | fn = d.getVar('FILE', True) |
302 | bb.fetch2.urldata_cache[fn] = {} | 302 | bb.fetch2.urldata_cache[fn] = {} |
303 | 303 | ||
304 | """ | 304 | """ |
@@ -329,7 +329,7 @@ python do_checkpkg() { | |||
329 | Return new version if success, or else error in "Errxxxx" style | 329 | Return new version if success, or else error in "Errxxxx" style |
330 | """ | 330 | """ |
331 | def check_new_dir(url, curver, d): | 331 | def check_new_dir(url, curver, d): |
332 | pn = bb.data.getVar('PN', d, True) | 332 | pn = d.getVar('PN', True) |
333 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn) | 333 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn) |
334 | status = internal_fetch_wget(url, d, f) | 334 | status = internal_fetch_wget(url, d, f) |
335 | fhtml = f.read() | 335 | fhtml = f.read() |
@@ -372,7 +372,7 @@ python do_checkpkg() { | |||
372 | 372 | ||
373 | f.close() | 373 | f.close() |
374 | if status != "ErrHostNoDir" and re.match("Err", status): | 374 | if status != "ErrHostNoDir" and re.match("Err", status): |
375 | logpath = bb.data.getVar('LOG_DIR', d, 1) | 375 | logpath = d.getVar('LOG_DIR', 1) |
376 | os.system("cp %s %s/" % (f.name, logpath)) | 376 | os.system("cp %s %s/" % (f.name, logpath)) |
377 | os.unlink(f.name) | 377 | os.unlink(f.name) |
378 | return status | 378 | return status |
@@ -388,7 +388,7 @@ python do_checkpkg() { | |||
388 | """possible to have no version in pkg name, such as spectrum-fw""" | 388 | """possible to have no version in pkg name, such as spectrum-fw""" |
389 | if not re.search("\d+", curname): | 389 | if not re.search("\d+", curname): |
390 | return pcurver | 390 | return pcurver |
391 | pn = bb.data.getVar('PN', d, True) | 391 | pn = d.getVar('PN', True) |
392 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) | 392 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) |
393 | status = internal_fetch_wget(url, d, f) | 393 | status = internal_fetch_wget(url, d, f) |
394 | fhtml = f.read() | 394 | fhtml = f.read() |
@@ -431,55 +431,55 @@ python do_checkpkg() { | |||
431 | f.close() | 431 | f.close() |
432 | """if host hasn't directory information, no need to save tmp file""" | 432 | """if host hasn't directory information, no need to save tmp file""" |
433 | if status != "ErrHostNoDir" and re.match("Err", status): | 433 | if status != "ErrHostNoDir" and re.match("Err", status): |
434 | logpath = bb.data.getVar('LOG_DIR', d, True) | 434 | logpath = d.getVar('LOG_DIR', True) |
435 | os.system("cp %s %s/" % (f.name, logpath)) | 435 | os.system("cp %s %s/" % (f.name, logpath)) |
436 | os.unlink(f.name) | 436 | os.unlink(f.name) |
437 | return status | 437 | return status |
438 | 438 | ||
439 | """first check whether a uri is provided""" | 439 | """first check whether a uri is provided""" |
440 | src_uri = bb.data.getVar('SRC_URI', d, True) | 440 | src_uri = d.getVar('SRC_URI', True) |
441 | if not src_uri: | 441 | if not src_uri: |
442 | return | 442 | return |
443 | 443 | ||
444 | """initialize log files.""" | 444 | """initialize log files.""" |
445 | logpath = bb.data.getVar('LOG_DIR', d, True) | 445 | logpath = d.getVar('LOG_DIR', True) |
446 | bb.utils.mkdirhier(logpath) | 446 | bb.utils.mkdirhier(logpath) |
447 | logfile = os.path.join(logpath, "checkpkg.csv") | 447 | logfile = os.path.join(logpath, "checkpkg.csv") |
448 | 448 | ||
449 | """generate package information from .bb file""" | 449 | """generate package information from .bb file""" |
450 | pname = bb.data.getVar('PN', d, True) | 450 | pname = d.getVar('PN', True) |
451 | 451 | ||
452 | if pname.find("-native") != -1: | 452 | if pname.find("-native") != -1: |
453 | pnstripped = pname.split("-native") | 453 | pnstripped = pname.split("-native") |
454 | bb.note("Native Split: %s" % pnstripped) | 454 | bb.note("Native Split: %s" % pnstripped) |
455 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 455 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
456 | bb.data.update_data(localdata) | 456 | bb.data.update_data(localdata) |
457 | 457 | ||
458 | if pname.find("-cross") != -1: | 458 | if pname.find("-cross") != -1: |
459 | pnstripped = pname.split("-cross") | 459 | pnstripped = pname.split("-cross") |
460 | bb.note("cross Split: %s" % pnstripped) | 460 | bb.note("cross Split: %s" % pnstripped) |
461 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 461 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
462 | bb.data.update_data(localdata) | 462 | bb.data.update_data(localdata) |
463 | 463 | ||
464 | if pname.find("-initial") != -1: | 464 | if pname.find("-initial") != -1: |
465 | pnstripped = pname.split("-initial") | 465 | pnstripped = pname.split("-initial") |
466 | bb.note("initial Split: %s" % pnstripped) | 466 | bb.note("initial Split: %s" % pnstripped) |
467 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 467 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
468 | bb.data.update_data(localdata) | 468 | bb.data.update_data(localdata) |
469 | 469 | ||
470 | pdesc = bb.data.getVar('DESCRIPTION', localdata, True) | 470 | pdesc = localdata.getVar('DESCRIPTION', True) |
471 | pgrp = bb.data.getVar('SECTION', localdata, True) | 471 | pgrp = localdata.getVar('SECTION', True) |
472 | pversion = bb.data.getVar('PV', localdata, True) | 472 | pversion = localdata.getVar('PV', True) |
473 | plicense = bb.data.getVar('LICENSE', localdata, True) | 473 | plicense = localdata.getVar('LICENSE', True) |
474 | psection = bb.data.getVar('SECTION', localdata, True) | 474 | psection = localdata.getVar('SECTION', True) |
475 | phome = bb.data.getVar('HOMEPAGE', localdata, True) | 475 | phome = localdata.getVar('HOMEPAGE', True) |
476 | prelease = bb.data.getVar('PR', localdata, True) | 476 | prelease = localdata.getVar('PR', True) |
477 | ppriority = bb.data.getVar('PRIORITY', localdata, True) | 477 | ppriority = localdata.getVar('PRIORITY', True) |
478 | pdepends = bb.data.getVar('DEPENDS', localdata, True) | 478 | pdepends = localdata.getVar('DEPENDS', True) |
479 | pbugtracker = bb.data.getVar('BUGTRACKER', localdata, True) | 479 | pbugtracker = localdata.getVar('BUGTRACKER', True) |
480 | ppe = bb.data.getVar('PE', localdata, True) | 480 | ppe = localdata.getVar('PE', True) |
481 | psrcuri = bb.data.getVar('SRC_URI', localdata, True) | 481 | psrcuri = localdata.getVar('SRC_URI', True) |
482 | maintainer = bb.data.getVar('RECIPE_MAINTAINER', localdata, True) | 482 | maintainer = localdata.getVar('RECIPE_MAINTAINER', True) |
483 | 483 | ||
484 | found = 0 | 484 | found = 0 |
485 | for uri in src_uri.split(): | 485 | for uri in src_uri.split(): |
@@ -497,9 +497,9 @@ python do_checkpkg() { | |||
497 | 497 | ||
498 | (type, host, path, user, pswd, parm) = bb.decodeurl(uri) | 498 | (type, host, path, user, pswd, parm) = bb.decodeurl(uri) |
499 | if type in ['http', 'https', 'ftp']: | 499 | if type in ['http', 'https', 'ftp']: |
500 | pcurver = bb.data.getVar('PV', d, True) | 500 | pcurver = d.getVar('PV', True) |
501 | else: | 501 | else: |
502 | pcurver = bb.data.getVar("SRCREV", d, True) | 502 | pcurver = d.getVar("SRCREV", True) |
503 | 503 | ||
504 | if type in ['http', 'https', 'ftp']: | 504 | if type in ['http', 'https', 'ftp']: |
505 | newver = pcurver | 505 | newver = pcurver |
@@ -639,7 +639,7 @@ python do_checkpkg() { | |||
639 | pstatus += ":%s%s" % (host, path) | 639 | pstatus += ":%s%s" % (host, path) |
640 | 640 | ||
641 | """Read from manual distro tracking fields as alternative""" | 641 | """Read from manual distro tracking fields as alternative""" |
642 | pmver = bb.data.getVar("RECIPE_LATEST_VERSION", d, True) | 642 | pmver = d.getVar("RECIPE_LATEST_VERSION", True) |
643 | if not pmver: | 643 | if not pmver: |
644 | pmver = "N/A" | 644 | pmver = "N/A" |
645 | pmstatus = "ErrNoRecipeData" | 645 | pmstatus = "ErrNoRecipeData" |
@@ -688,12 +688,12 @@ python do_distro_check() { | |||
688 | 688 | ||
689 | localdata = bb.data.createCopy(d) | 689 | localdata = bb.data.createCopy(d) |
690 | bb.data.update_data(localdata) | 690 | bb.data.update_data(localdata) |
691 | tmpdir = bb.data.getVar('TMPDIR', d, True) | 691 | tmpdir = d.getVar('TMPDIR', True) |
692 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 692 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
693 | logpath = bb.data.getVar('LOG_DIR', d, True) | 693 | logpath = d.getVar('LOG_DIR', True) |
694 | bb.utils.mkdirhier(logpath) | 694 | bb.utils.mkdirhier(logpath) |
695 | result_file = os.path.join(logpath, "distrocheck.csv") | 695 | result_file = os.path.join(logpath, "distrocheck.csv") |
696 | datetime = bb.data.getVar('DATETIME', localdata, True) | 696 | datetime = localdata.getVar('DATETIME', True) |
697 | dc.update_distro_data(distro_check_dir, datetime) | 697 | dc.update_distro_data(distro_check_dir, datetime) |
698 | 698 | ||
699 | # do the comparison | 699 | # do the comparison |
@@ -734,12 +734,12 @@ python do_checklicense() { | |||
734 | import os | 734 | import os |
735 | import bb | 735 | import bb |
736 | import shutil | 736 | import shutil |
737 | logpath = bb.data.getVar('LOG_DIR', d, True) | 737 | logpath = d.getVar('LOG_DIR', True) |
738 | bb.utils.mkdirhier(logpath) | 738 | bb.utils.mkdirhier(logpath) |
739 | pn = bb.data.getVar('PN', d, True) | 739 | pn = d.getVar('PN', True) |
740 | logfile = os.path.join(logpath, "missinglicense.csv") | 740 | logfile = os.path.join(logpath, "missinglicense.csv") |
741 | generic_directory = bb.data.getVar('COMMON_LICENSE_DIR', d, True) | 741 | generic_directory = d.getVar('COMMON_LICENSE_DIR', True) |
742 | license_types = bb.data.getVar('LICENSE', d, True) | 742 | license_types = d.getVar('LICENSE', True) |
743 | for license_type in ((license_types.replace('+', '').replace('|', '&') | 743 | for license_type in ((license_types.replace('+', '').replace('|', '&') |
744 | .replace('(', '').replace(')', '').replace(';', '') | 744 | .replace('(', '').replace(')', '').replace(';', '') |
745 | .replace(',', '').replace(" ", "").split("&"))): | 745 | .replace(',', '').replace(" ", "").split("&"))): |
diff --git a/meta/classes/distutils-base.bbclass b/meta/classes/distutils-base.bbclass index e84b0fcc5a..e7d0bb8071 100644 --- a/meta/classes/distutils-base.bbclass +++ b/meta/classes/distutils-base.bbclass | |||
@@ -1,4 +1,4 @@ | |||
1 | DEPENDS += "${@["python-native python", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}" | 1 | DEPENDS += "${@["python-native python", ""][(d.getVar('PACKAGES', 1) == '')]}" |
2 | RDEPENDS_${PN} += "${@['', 'python-core']['${PN}' == '${BPN}']}" | 2 | RDEPENDS_${PN} += "${@['', 'python-core']['${PN}' == '${BPN}']}" |
3 | 3 | ||
4 | inherit distutils-common-base | 4 | inherit distutils-common-base |
diff --git a/meta/classes/distutils-native-base.bbclass b/meta/classes/distutils-native-base.bbclass index 2703fe0740..47367d796b 100644 --- a/meta/classes/distutils-native-base.bbclass +++ b/meta/classes/distutils-native-base.bbclass | |||
@@ -1,3 +1,3 @@ | |||
1 | DEPENDS += "${@["python-native", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}" | 1 | DEPENDS += "${@["python-native", ""][(d.getVar('PACKAGES', 1) == '')]}" |
2 | 2 | ||
3 | inherit distutils-common-base | 3 | inherit distutils-common-base |
diff --git a/meta/classes/gconf.bbclass b/meta/classes/gconf.bbclass index 67986787d7..bffc92ea7a 100644 --- a/meta/classes/gconf.bbclass +++ b/meta/classes/gconf.bbclass | |||
@@ -27,8 +27,8 @@ done | |||
27 | 27 | ||
28 | python populate_packages_append () { | 28 | python populate_packages_append () { |
29 | import re | 29 | import re |
30 | packages = bb.data.getVar('PACKAGES', d, 1).split() | 30 | packages = d.getVar('PACKAGES', 1).split() |
31 | pkgdest = bb.data.getVar('PKGDEST', d, 1) | 31 | pkgdest = d.getVar('PKGDEST', 1) |
32 | 32 | ||
33 | for pkg in packages: | 33 | for pkg in packages: |
34 | schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) | 34 | schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) |
@@ -41,15 +41,15 @@ python populate_packages_append () { | |||
41 | if schemas != []: | 41 | if schemas != []: |
42 | bb.note("adding gconf postinst and prerm scripts to %s" % pkg) | 42 | bb.note("adding gconf postinst and prerm scripts to %s" % pkg) |
43 | bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d) | 43 | bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d) |
44 | postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1) | 44 | postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) |
45 | if not postinst: | 45 | if not postinst: |
46 | postinst = '#!/bin/sh\n' | 46 | postinst = '#!/bin/sh\n' |
47 | postinst += bb.data.getVar('gconf_postinst', d, 1) | 47 | postinst += d.getVar('gconf_postinst', 1) |
48 | bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) | 48 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
49 | prerm = bb.data.getVar('pkg_prerm_%s' % pkg, d, 1) or bb.data.getVar('pkg_prerm', d, 1) | 49 | prerm = d.getVar('pkg_prerm_%s' % pkg, 1) or d.getVar('pkg_prerm', 1) |
50 | if not prerm: | 50 | if not prerm: |
51 | prerm = '#!/bin/sh\n' | 51 | prerm = '#!/bin/sh\n' |
52 | prerm += bb.data.getVar('gconf_prerm', d, 1) | 52 | prerm += d.getVar('gconf_prerm', 1) |
53 | bb.data.setVar('pkg_prerm_%s' % pkg, prerm, d) | 53 | d.setVar('pkg_prerm_%s' % pkg, prerm) |
54 | 54 | ||
55 | } | 55 | } |
diff --git a/meta/classes/gtk-icon-cache.bbclass b/meta/classes/gtk-icon-cache.bbclass index d0840d59b6..eac3061b0a 100644 --- a/meta/classes/gtk-icon-cache.bbclass +++ b/meta/classes/gtk-icon-cache.bbclass | |||
@@ -28,31 +28,31 @@ done | |||
28 | } | 28 | } |
29 | 29 | ||
30 | python populate_packages_append () { | 30 | python populate_packages_append () { |
31 | packages = bb.data.getVar('PACKAGES', d, 1).split() | 31 | packages = d.getVar('PACKAGES', 1).split() |
32 | pkgdest = bb.data.getVar('PKGDEST', d, 1) | 32 | pkgdest = d.getVar('PKGDEST', 1) |
33 | 33 | ||
34 | for pkg in packages: | 34 | for pkg in packages: |
35 | icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, bb.data.getVar('datadir', d, 1)) | 35 | icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', 1)) |
36 | if not os.path.exists(icon_dir): | 36 | if not os.path.exists(icon_dir): |
37 | continue | 37 | continue |
38 | 38 | ||
39 | bb.note("adding hicolor-icon-theme dependency to %s" % pkg) | 39 | bb.note("adding hicolor-icon-theme dependency to %s" % pkg) |
40 | rdepends = bb.data.getVar('RDEPENDS_%s' % pkg, d, 1) | 40 | rdepends = d.getVar('RDEPENDS_%s' % pkg, 1) |
41 | rdepends = rdepends + ' ' + bb.data.getVar('MLPREFIX', d) + "hicolor-icon-theme" | 41 | rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme" |
42 | bb.data.setVar('RDEPENDS_%s' % pkg, rdepends, d) | 42 | d.setVar('RDEPENDS_%s' % pkg, rdepends) |
43 | 43 | ||
44 | bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) | 44 | bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) |
45 | 45 | ||
46 | postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1) | 46 | postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) |
47 | if not postinst: | 47 | if not postinst: |
48 | postinst = '#!/bin/sh\n' | 48 | postinst = '#!/bin/sh\n' |
49 | postinst += bb.data.getVar('gtk_icon_cache_postinst', d, 1) | 49 | postinst += d.getVar('gtk_icon_cache_postinst', 1) |
50 | bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) | 50 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
51 | 51 | ||
52 | postrm = bb.data.getVar('pkg_postrm_%s' % pkg, d, 1) or bb.data.getVar('pkg_postrm', d, 1) | 52 | postrm = d.getVar('pkg_postrm_%s' % pkg, 1) or d.getVar('pkg_postrm', 1) |
53 | if not postrm: | 53 | if not postrm: |
54 | postrm = '#!/bin/sh\n' | 54 | postrm = '#!/bin/sh\n' |
55 | postrm += bb.data.getVar('gtk_icon_cache_postrm', d, 1) | 55 | postrm += d.getVar('gtk_icon_cache_postrm', 1) |
56 | bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d) | 56 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
57 | } | 57 | } |
58 | 58 | ||
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass index f8e9d8859b..7e3676af35 100644 --- a/meta/classes/icecc.bbclass +++ b/meta/classes/icecc.bbclass | |||
@@ -32,7 +32,7 @@ def icecc_dep_prepend(d): | |||
32 | # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not | 32 | # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not |
33 | # we need that built is the responsibility of the patch function / class, not | 33 | # we need that built is the responsibility of the patch function / class, not |
34 | # the application. | 34 | # the application. |
35 | if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d): | 35 | if not d.getVar('INHIBIT_DEFAULT_DEPS'): |
36 | return "icecc-create-env-native" | 36 | return "icecc-create-env-native" |
37 | return "" | 37 | return "" |
38 | 38 | ||
@@ -54,7 +54,7 @@ def create_path(compilers, bb, d): | |||
54 | staging += "-kernel" | 54 | staging += "-kernel" |
55 | 55 | ||
56 | #check if the icecc path is set by the user | 56 | #check if the icecc path is set by the user |
57 | icecc = bb.data.getVar('ICECC_PATH', d) or os.popen("which icecc").read()[:-1] | 57 | icecc = d.getVar('ICECC_PATH') or os.popen("which icecc").read()[:-1] |
58 | 58 | ||
59 | # Create the dir if necessary | 59 | # Create the dir if necessary |
60 | try: | 60 | try: |
@@ -81,7 +81,7 @@ def use_icc(bb,d): | |||
81 | package_tmp = bb.data.expand('${PN}', d) | 81 | package_tmp = bb.data.expand('${PN}', d) |
82 | 82 | ||
83 | system_class_blacklist = [ "none" ] | 83 | system_class_blacklist = [ "none" ] |
84 | user_class_blacklist = (bb.data.getVar('ICECC_USER_CLASS_BL', d) or "none").split() | 84 | user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL') or "none").split() |
85 | package_class_blacklist = system_class_blacklist + user_class_blacklist | 85 | package_class_blacklist = system_class_blacklist + user_class_blacklist |
86 | 86 | ||
87 | for black in package_class_blacklist: | 87 | for black in package_class_blacklist: |
@@ -92,7 +92,7 @@ def use_icc(bb,d): | |||
92 | #"system" package blacklist contains a list of packages that can not distribute compile tasks | 92 | #"system" package blacklist contains a list of packages that can not distribute compile tasks |
93 | #for one reason or the other | 93 | #for one reason or the other |
94 | system_package_blacklist = [ "uclibc", "glibc", "gcc", "bind", "u-boot", "dhcp-forwarder", "enchant", "connman", "orbit2" ] | 94 | system_package_blacklist = [ "uclibc", "glibc", "gcc", "bind", "u-boot", "dhcp-forwarder", "enchant", "connman", "orbit2" ] |
95 | user_package_blacklist = (bb.data.getVar('ICECC_USER_PACKAGE_BL', d) or "").split() | 95 | user_package_blacklist = (d.getVar('ICECC_USER_PACKAGE_BL') or "").split() |
96 | package_blacklist = system_package_blacklist + user_package_blacklist | 96 | package_blacklist = system_package_blacklist + user_package_blacklist |
97 | 97 | ||
98 | for black in package_blacklist: | 98 | for black in package_blacklist: |
@@ -100,7 +100,7 @@ def use_icc(bb,d): | |||
100 | #bb.note(package_tmp, ' found in blacklist, disable icecc') | 100 | #bb.note(package_tmp, ' found in blacklist, disable icecc') |
101 | return "no" | 101 | return "no" |
102 | 102 | ||
103 | if bb.data.getVar('PARALLEL_MAKE', d) == "": | 103 | if d.getVar('PARALLEL_MAKE') == "": |
104 | bb.note(package_tmp, " ", bb.data.expand('${PV}', d), " has empty PARALLEL_MAKE, disable icecc") | 104 | bb.note(package_tmp, " ", bb.data.expand('${PV}', d), " has empty PARALLEL_MAKE, disable icecc") |
105 | return "no" | 105 | return "no" |
106 | 106 | ||
@@ -119,8 +119,8 @@ def icc_version(bb, d): | |||
119 | if use_icc(bb, d) == "no": | 119 | if use_icc(bb, d) == "no": |
120 | return "" | 120 | return "" |
121 | 121 | ||
122 | parallel = bb.data.getVar('ICECC_PARALLEL_MAKE', d) or "" | 122 | parallel = d.getVar('ICECC_PARALLEL_MAKE') or "" |
123 | bb.data.setVar("PARALLEL_MAKE", parallel, d) | 123 | d.setVar("PARALLEL_MAKE", parallel) |
124 | 124 | ||
125 | if icc_is_native(bb, d): | 125 | if icc_is_native(bb, d): |
126 | archive_name = "local-host-env" | 126 | archive_name = "local-host-env" |
@@ -130,7 +130,7 @@ def icc_version(bb, d): | |||
130 | prefix = bb.data.expand('${HOST_PREFIX}' , d) | 130 | prefix = bb.data.expand('${HOST_PREFIX}' , d) |
131 | distro = bb.data.expand('${DISTRO}', d) | 131 | distro = bb.data.expand('${DISTRO}', d) |
132 | target_sys = bb.data.expand('${TARGET_SYS}', d) | 132 | target_sys = bb.data.expand('${TARGET_SYS}', d) |
133 | float = bb.data.getVar('TARGET_FPU', d) or "hard" | 133 | float = d.getVar('TARGET_FPU') or "hard" |
134 | archive_name = prefix + distro + "-" + target_sys + "-" + float | 134 | archive_name = prefix + distro + "-" + target_sys + "-" + float |
135 | if icc_is_kernel(bb, d): | 135 | if icc_is_kernel(bb, d): |
136 | archive_name += "-kernel" | 136 | archive_name += "-kernel" |
diff --git a/meta/classes/image-swab.bbclass b/meta/classes/image-swab.bbclass index b939ec4b45..23183b3ec3 100644 --- a/meta/classes/image-swab.bbclass +++ b/meta/classes/image-swab.bbclass | |||
@@ -51,13 +51,13 @@ python() { | |||
51 | # and cross packages which aren't swabber-native or one of its dependencies | 51 | # and cross packages which aren't swabber-native or one of its dependencies |
52 | # I have ignored them for now... | 52 | # I have ignored them for now... |
53 | if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d): | 53 | if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d): |
54 | deps = (bb.data.getVarFlag('do_setscene', 'depends', d) or "").split() | 54 | deps = (d.getVarFlag('do_setscene', 'depends') or "").split() |
55 | deps.append('strace-native:do_populate_sysroot') | 55 | deps.append('strace-native:do_populate_sysroot') |
56 | bb.data.setVarFlag('do_setscene', 'depends', " ".join(deps), d) | 56 | bb.data.setVarFlag('do_setscene', 'depends', " ".join(deps), d) |
57 | logdir = bb.data.expand("${TRACE_LOGDIR}", d) | 57 | logdir = bb.data.expand("${TRACE_LOGDIR}", d) |
58 | bb.utils.mkdirhier(logdir) | 58 | bb.utils.mkdirhier(logdir) |
59 | else: | 59 | else: |
60 | bb.data.setVar('STRACEFUNC', '', d) | 60 | d.setVar('STRACEFUNC', '') |
61 | } | 61 | } |
62 | 62 | ||
63 | STRACEPID = "${@os.getpid()}" | 63 | STRACEPID = "${@os.getpid()}" |
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass index 14726d2537..4642fa63e2 100644 --- a/meta/classes/image.bbclass +++ b/meta/classes/image.bbclass | |||
@@ -74,17 +74,17 @@ IMAGE_TYPE = ${@base_contains("IMAGE_FSTYPES", "live", "live", "empty", d)} | |||
74 | inherit image-${IMAGE_TYPE} | 74 | inherit image-${IMAGE_TYPE} |
75 | 75 | ||
76 | python () { | 76 | python () { |
77 | deps = bb.data.getVarFlag('do_rootfs', 'depends', d) or "" | 77 | deps = d.getVarFlag('do_rootfs', 'depends') or "" |
78 | for type in (bb.data.getVar('IMAGE_FSTYPES', d, True) or "").split(): | 78 | for type in (d.getVar('IMAGE_FSTYPES', True) or "").split(): |
79 | for dep in ((bb.data.getVar('IMAGE_DEPENDS_%s' % type, d) or "").split() or []): | 79 | for dep in ((d.getVar('IMAGE_DEPENDS_%s' % type) or "").split() or []): |
80 | deps += " %s:do_populate_sysroot" % dep | 80 | deps += " %s:do_populate_sysroot" % dep |
81 | for dep in (bb.data.getVar('EXTRA_IMAGEDEPENDS', d, True) or "").split(): | 81 | for dep in (d.getVar('EXTRA_IMAGEDEPENDS', True) or "").split(): |
82 | deps += " %s:do_populate_sysroot" % dep | 82 | deps += " %s:do_populate_sysroot" % dep |
83 | bb.data.setVarFlag('do_rootfs', 'depends', deps, d) | 83 | d.setVarFlag('do_rootfs', 'depends', deps) |
84 | 84 | ||
85 | # If we don't do this we try and run the mapping hooks while parsing which is slow | 85 | # If we don't do this we try and run the mapping hooks while parsing which is slow |
86 | # bitbake should really provide something to let us know this... | 86 | # bitbake should really provide something to let us know this... |
87 | if bb.data.getVar('BB_WORKERCONTEXT', d, True) is not None: | 87 | if d.getVar('BB_WORKERCONTEXT', True) is not None: |
88 | runtime_mapping_rename("PACKAGE_INSTALL", d) | 88 | runtime_mapping_rename("PACKAGE_INSTALL", d) |
89 | runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", d) | 89 | runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", d) |
90 | } | 90 | } |
@@ -98,15 +98,15 @@ python () { | |||
98 | # is searched for in the BBPATH (same as the old version.) | 98 | # is searched for in the BBPATH (same as the old version.) |
99 | # | 99 | # |
100 | def get_devtable_list(d): | 100 | def get_devtable_list(d): |
101 | devtable = bb.data.getVar('IMAGE_DEVICE_TABLE', d, 1) | 101 | devtable = d.getVar('IMAGE_DEVICE_TABLE', 1) |
102 | if devtable != None: | 102 | if devtable != None: |
103 | return devtable | 103 | return devtable |
104 | str = "" | 104 | str = "" |
105 | devtables = bb.data.getVar('IMAGE_DEVICE_TABLES', d, 1) | 105 | devtables = d.getVar('IMAGE_DEVICE_TABLES', 1) |
106 | if devtables == None: | 106 | if devtables == None: |
107 | devtables = 'files/device_table-minimal.txt' | 107 | devtables = 'files/device_table-minimal.txt' |
108 | for devtable in devtables.split(): | 108 | for devtable in devtables.split(): |
109 | str += " %s" % bb.which(bb.data.getVar('BBPATH', d, 1), devtable) | 109 | str += " %s" % bb.which(d.getVar('BBPATH', 1), devtable) |
110 | return str | 110 | return str |
111 | 111 | ||
112 | IMAGE_CLASSES ?= "image_types" | 112 | IMAGE_CLASSES ?= "image_types" |
@@ -119,7 +119,7 @@ ROOTFS_POSTPROCESS_COMMAND ?= "" | |||
119 | # some default locales | 119 | # some default locales |
120 | IMAGE_LINGUAS ?= "de-de fr-fr en-gb" | 120 | IMAGE_LINGUAS ?= "de-de fr-fr en-gb" |
121 | 121 | ||
122 | LINGUAS_INSTALL = "${@" ".join(map(lambda s: "locale-base-%s" % s, bb.data.getVar('IMAGE_LINGUAS', d, 1).split()))}" | 122 | LINGUAS_INSTALL = "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', 1).split()))}" |
123 | 123 | ||
124 | do_rootfs[nostamp] = "1" | 124 | do_rootfs[nostamp] = "1" |
125 | do_rootfs[dirs] = "${TOPDIR}" | 125 | do_rootfs[dirs] = "${TOPDIR}" |
diff --git a/meta/classes/image_types.bbclass b/meta/classes/image_types.bbclass index 9549a9e3e0..ea0d9a56d7 100644 --- a/meta/classes/image_types.bbclass +++ b/meta/classes/image_types.bbclass | |||
@@ -1,8 +1,8 @@ | |||
1 | def get_imagecmds(d): | 1 | def get_imagecmds(d): |
2 | cmds = "\n" | 2 | cmds = "\n" |
3 | old_overrides = bb.data.getVar('OVERRIDES', d, 0) | 3 | old_overrides = d.getVar('OVERRIDES', 0) |
4 | 4 | ||
5 | types = bb.data.getVar('IMAGE_FSTYPES', d, True).split() | 5 | types = d.getVar('IMAGE_FSTYPES', True).split() |
6 | # Live images will be processed via inheriting bbclass and | 6 | # Live images will be processed via inheriting bbclass and |
7 | # does not get processed here. | 7 | # does not get processed here. |
8 | # live images also depend on ext3 so ensure its present | 8 | # live images also depend on ext3 so ensure its present |
diff --git a/meta/classes/imagetest-qemu.bbclass b/meta/classes/imagetest-qemu.bbclass index 4ea86c04fe..de142bc0fb 100644 --- a/meta/classes/imagetest-qemu.bbclass +++ b/meta/classes/imagetest-qemu.bbclass | |||
@@ -35,12 +35,12 @@ def qemuimagetest_main(d): | |||
35 | 35 | ||
36 | casestr = re.compile(r'(?P<scen>\w+\b):(?P<case>\S+$)') | 36 | casestr = re.compile(r'(?P<scen>\w+\b):(?P<case>\S+$)') |
37 | resultstr = re.compile(r'\s*(?P<case>\w+)\s*(?P<pass>\d+)\s*(?P<fail>\d+)\s*(?P<noresult>\d+)') | 37 | resultstr = re.compile(r'\s*(?P<case>\w+)\s*(?P<pass>\d+)\s*(?P<fail>\d+)\s*(?P<noresult>\d+)') |
38 | machine = bb.data.getVar('MACHINE', d, 1) | 38 | machine = d.getVar('MACHINE', 1) |
39 | pname = bb.data.getVar('PN', d, 1) | 39 | pname = d.getVar('PN', 1) |
40 | 40 | ||
41 | """function to save test cases running status""" | 41 | """function to save test cases running status""" |
42 | def teststatus(test, status, index, length): | 42 | def teststatus(test, status, index, length): |
43 | test_status = bb.data.getVar('TEST_STATUS', d, 1) | 43 | test_status = d.getVar('TEST_STATUS', 1) |
44 | if not os.path.exists(test_status): | 44 | if not os.path.exists(test_status): |
45 | raise bb.build.FuncFailed("No test status file existing under TEST_TMP") | 45 | raise bb.build.FuncFailed("No test status file existing under TEST_TMP") |
46 | 46 | ||
@@ -51,30 +51,30 @@ def qemuimagetest_main(d): | |||
51 | 51 | ||
52 | """funtion to run each case under scenario""" | 52 | """funtion to run each case under scenario""" |
53 | def runtest(scen, case, fulltestpath): | 53 | def runtest(scen, case, fulltestpath): |
54 | resultpath = bb.data.getVar('TEST_RESULT', d, 1) | 54 | resultpath = d.getVar('TEST_RESULT', 1) |
55 | tmppath = bb.data.getVar('TEST_TMP', d, 1) | 55 | tmppath = d.getVar('TEST_TMP', 1) |
56 | 56 | ||
57 | """initialize log file for testcase""" | 57 | """initialize log file for testcase""" |
58 | logpath = bb.data.getVar('TEST_LOG', d, 1) | 58 | logpath = d.getVar('TEST_LOG', 1) |
59 | bb.utils.mkdirhier("%s/%s" % (logpath, scen)) | 59 | bb.utils.mkdirhier("%s/%s" % (logpath, scen)) |
60 | caselog = os.path.join(logpath, "%s/log_%s.%s" % (scen, case, bb.data.getVar('DATETIME', d, 1))) | 60 | caselog = os.path.join(logpath, "%s/log_%s.%s" % (scen, case, d.getVar('DATETIME', 1))) |
61 | os.system("touch %s" % caselog) | 61 | os.system("touch %s" % caselog) |
62 | 62 | ||
63 | """export TEST_TMP, TEST_RESULT, DEPLOY_DIR and QEMUARCH""" | 63 | """export TEST_TMP, TEST_RESULT, DEPLOY_DIR and QEMUARCH""" |
64 | os.environ["PATH"] = bb.data.getVar("PATH", d, True) | 64 | os.environ["PATH"] = d.getVar("PATH", True) |
65 | os.environ["TEST_TMP"] = tmppath | 65 | os.environ["TEST_TMP"] = tmppath |
66 | os.environ["TEST_RESULT"] = resultpath | 66 | os.environ["TEST_RESULT"] = resultpath |
67 | os.environ["DEPLOY_DIR"] = bb.data.getVar("DEPLOY_DIR", d, True) | 67 | os.environ["DEPLOY_DIR"] = d.getVar("DEPLOY_DIR", True) |
68 | os.environ["QEMUARCH"] = machine | 68 | os.environ["QEMUARCH"] = machine |
69 | os.environ["QEMUTARGET"] = pname | 69 | os.environ["QEMUTARGET"] = pname |
70 | os.environ["DISPLAY"] = bb.data.getVar("DISPLAY", d, True) | 70 | os.environ["DISPLAY"] = d.getVar("DISPLAY", True) |
71 | os.environ["COREBASE"] = bb.data.getVar("COREBASE", d, True) | 71 | os.environ["COREBASE"] = d.getVar("COREBASE", True) |
72 | os.environ["TOPDIR"] = bb.data.getVar("TOPDIR", d, True) | 72 | os.environ["TOPDIR"] = d.getVar("TOPDIR", True) |
73 | os.environ["OE_TMPDIR"] = bb.data.getVar("TMPDIR", d, True) | 73 | os.environ["OE_TMPDIR"] = d.getVar("TMPDIR", True) |
74 | os.environ["TEST_STATUS"] = bb.data.getVar("TEST_STATUS", d, True) | 74 | os.environ["TEST_STATUS"] = d.getVar("TEST_STATUS", True) |
75 | os.environ["TARGET_IPSAVE"] = bb.data.getVar("TARGET_IPSAVE", d, True) | 75 | os.environ["TARGET_IPSAVE"] = d.getVar("TARGET_IPSAVE", True) |
76 | os.environ["TEST_SERIALIZE"] = bb.data.getVar("TEST_SERIALIZE", d, True) | 76 | os.environ["TEST_SERIALIZE"] = d.getVar("TEST_SERIALIZE", True) |
77 | os.environ["SDK_NAME"] = bb.data.getVar("SDK_NAME", d, True) | 77 | os.environ["SDK_NAME"] = d.getVar("SDK_NAME", True) |
78 | 78 | ||
79 | """run Test Case""" | 79 | """run Test Case""" |
80 | bb.note("Run %s test in scenario %s" % (case, scen)) | 80 | bb.note("Run %s test in scenario %s" % (case, scen)) |
@@ -92,13 +92,13 @@ def qemuimagetest_main(d): | |||
92 | if n: | 92 | if n: |
93 | item = n.group('scen') | 93 | item = n.group('scen') |
94 | casefile = n.group('case') | 94 | casefile = n.group('case') |
95 | for dir in bb.data.getVar("QEMUIMAGETESTS", d, True).split(): | 95 | for dir in d.getVar("QEMUIMAGETESTS", True).split(): |
96 | fulltestcase = os.path.join(dir, item, casefile) | 96 | fulltestcase = os.path.join(dir, item, casefile) |
97 | if not os.path.isfile(fulltestcase): | 97 | if not os.path.isfile(fulltestcase): |
98 | raise bb.build.FuncFailed("Testcase %s not found" % fulltestcase) | 98 | raise bb.build.FuncFailed("Testcase %s not found" % fulltestcase) |
99 | list.append((item, casefile, fulltestcase)) | 99 | list.append((item, casefile, fulltestcase)) |
100 | else: | 100 | else: |
101 | for dir in bb.data.getVar("QEMUIMAGETESTS", d, True).split(): | 101 | for dir in d.getVar("QEMUIMAGETESTS", True).split(): |
102 | scenlist = os.path.join(dir, "scenario", machine, pname) | 102 | scenlist = os.path.join(dir, "scenario", machine, pname) |
103 | if not os.path.isfile(scenlist): | 103 | if not os.path.isfile(scenlist): |
104 | raise bb.build.FuncFailed("No scenario list file named %s found" % scenlist) | 104 | raise bb.build.FuncFailed("No scenario list file named %s found" % scenlist) |
@@ -118,7 +118,7 @@ def qemuimagetest_main(d): | |||
118 | 118 | ||
119 | """Clean tmp folder for testing""" | 119 | """Clean tmp folder for testing""" |
120 | def clean_tmp(): | 120 | def clean_tmp(): |
121 | tmppath = bb.data.getVar('TEST_TMP', d, 1) | 121 | tmppath = d.getVar('TEST_TMP', 1) |
122 | 122 | ||
123 | if os.path.isdir(tmppath): | 123 | if os.path.isdir(tmppath): |
124 | for f in os.listdir(tmppath): | 124 | for f in os.listdir(tmppath): |
@@ -132,28 +132,28 @@ def qemuimagetest_main(d): | |||
132 | clean_tmp() | 132 | clean_tmp() |
133 | 133 | ||
134 | """check testcase folder and create test log folder""" | 134 | """check testcase folder and create test log folder""" |
135 | testpath = bb.data.getVar('TEST_DIR', d, 1) | 135 | testpath = d.getVar('TEST_DIR', 1) |
136 | bb.utils.mkdirhier(testpath) | 136 | bb.utils.mkdirhier(testpath) |
137 | 137 | ||
138 | logpath = bb.data.getVar('TEST_LOG', d, 1) | 138 | logpath = d.getVar('TEST_LOG', 1) |
139 | bb.utils.mkdirhier(logpath) | 139 | bb.utils.mkdirhier(logpath) |
140 | 140 | ||
141 | tmppath = bb.data.getVar('TEST_TMP', d, 1) | 141 | tmppath = d.getVar('TEST_TMP', 1) |
142 | bb.utils.mkdirhier(tmppath) | 142 | bb.utils.mkdirhier(tmppath) |
143 | 143 | ||
144 | """initialize test status file""" | 144 | """initialize test status file""" |
145 | test_status = bb.data.getVar('TEST_STATUS', d, 1) | 145 | test_status = d.getVar('TEST_STATUS', 1) |
146 | if os.path.exists(test_status): | 146 | if os.path.exists(test_status): |
147 | os.remove(test_status) | 147 | os.remove(test_status) |
148 | os.system("touch %s" % test_status) | 148 | os.system("touch %s" % test_status) |
149 | 149 | ||
150 | """initialize result file""" | 150 | """initialize result file""" |
151 | resultpath = bb.data.getVar('TEST_RESULT', d, 1) | 151 | resultpath = d.getVar('TEST_RESULT', 1) |
152 | bb.utils.mkdirhier(resultpath) | 152 | bb.utils.mkdirhier(resultpath) |
153 | resultfile = os.path.join(resultpath, "testresult.%s" % bb.data.getVar('DATETIME', d, 1)) | 153 | resultfile = os.path.join(resultpath, "testresult.%s" % d.getVar('DATETIME', 1)) |
154 | sresultfile = os.path.join(resultpath, "testresult.log") | 154 | sresultfile = os.path.join(resultpath, "testresult.log") |
155 | 155 | ||
156 | machine = bb.data.getVar('MACHINE', d, 1) | 156 | machine = d.getVar('MACHINE', 1) |
157 | 157 | ||
158 | if os.path.exists(sresultfile): | 158 | if os.path.exists(sresultfile): |
159 | os.remove(sresultfile) | 159 | os.remove(sresultfile) |
@@ -165,7 +165,7 @@ def qemuimagetest_main(d): | |||
165 | f.close() | 165 | f.close() |
166 | 166 | ||
167 | """generate pre-defined testcase list""" | 167 | """generate pre-defined testcase list""" |
168 | testlist = bb.data.getVar('TEST_SCEN', d, 1) | 168 | testlist = d.getVar('TEST_SCEN', 1) |
169 | fulllist = generate_list(testlist) | 169 | fulllist = generate_list(testlist) |
170 | 170 | ||
171 | """Begin testing""" | 171 | """Begin testing""" |
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass index 017f7bedc6..a65f3ee865 100644 --- a/meta/classes/insane.bbclass +++ b/meta/classes/insane.bbclass | |||
@@ -105,7 +105,7 @@ ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch la2 pkgconfig la perms" | |||
105 | 105 | ||
106 | def package_qa_clean_path(path,d): | 106 | def package_qa_clean_path(path,d): |
107 | """ Remove the common prefix from the path. In this case it is the TMPDIR""" | 107 | """ Remove the common prefix from the path. In this case it is the TMPDIR""" |
108 | return path.replace(bb.data.getVar('TMPDIR',d,True),"") | 108 | return path.replace(d.getVar('TMPDIR',True),"") |
109 | 109 | ||
110 | def package_qa_write_error(error, d): | 110 | def package_qa_write_error(error, d): |
111 | logfile = d.getVar('QA_LOGFILE', True) | 111 | logfile = d.getVar('QA_LOGFILE', True) |
@@ -132,13 +132,13 @@ def package_qa_check_rpath(file,name, d, elf, messages): | |||
132 | if not elf: | 132 | if not elf: |
133 | return | 133 | return |
134 | 134 | ||
135 | scanelf = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'scanelf') | 135 | scanelf = os.path.join(d.getVar('STAGING_BINDIR_NATIVE',True),'scanelf') |
136 | bad_dirs = [bb.data.getVar('TMPDIR', d, True) + "/work", bb.data.getVar('STAGING_DIR_TARGET', d, True)] | 136 | bad_dirs = [d.getVar('TMPDIR', True) + "/work", d.getVar('STAGING_DIR_TARGET', True)] |
137 | bad_dir_test = bb.data.getVar('TMPDIR', d, True) | 137 | bad_dir_test = d.getVar('TMPDIR', True) |
138 | if not os.path.exists(scanelf): | 138 | if not os.path.exists(scanelf): |
139 | bb.fatal("Can not check RPATH, scanelf (part of pax-utils-native) not found") | 139 | bb.fatal("Can not check RPATH, scanelf (part of pax-utils-native) not found") |
140 | 140 | ||
141 | if not bad_dirs[0] in bb.data.getVar('WORKDIR', d, True): | 141 | if not bad_dirs[0] in d.getVar('WORKDIR', True): |
142 | bb.fatal("This class assumed that WORKDIR is ${TMPDIR}/work... Not doing any check") | 142 | bb.fatal("This class assumed that WORKDIR is ${TMPDIR}/work... Not doing any check") |
143 | 143 | ||
144 | output = os.popen("%s -B -F%%r#F '%s'" % (scanelf,file)) | 144 | output = os.popen("%s -B -F%%r#F '%s'" % (scanelf,file)) |
@@ -156,11 +156,11 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages): | |||
156 | if not elf: | 156 | if not elf: |
157 | return | 157 | return |
158 | 158 | ||
159 | objdump = bb.data.getVar('OBJDUMP', d, True) | 159 | objdump = d.getVar('OBJDUMP', True) |
160 | env_path = bb.data.getVar('PATH', d, True) | 160 | env_path = d.getVar('PATH', True) |
161 | 161 | ||
162 | libdir = bb.data.getVar("libdir", d, True) | 162 | libdir = d.getVar("libdir", True) |
163 | base_libdir = bb.data.getVar("base_libdir", d, True) | 163 | base_libdir = d.getVar("base_libdir", True) |
164 | 164 | ||
165 | import re | 165 | import re |
166 | rpath_re = re.compile("\s+RPATH\s+(.*)") | 166 | rpath_re = re.compile("\s+RPATH\s+(.*)") |
@@ -209,8 +209,8 @@ def package_qa_check_arch(path,name,d, elf, messages): | |||
209 | if not elf: | 209 | if not elf: |
210 | return | 210 | return |
211 | 211 | ||
212 | target_os = bb.data.getVar('TARGET_OS', d, True) | 212 | target_os = d.getVar('TARGET_OS', True) |
213 | target_arch = bb.data.getVar('TARGET_ARCH', d, True) | 213 | target_arch = d.getVar('TARGET_ARCH', True) |
214 | 214 | ||
215 | # FIXME: Cross package confuse this check, so just skip them | 215 | # FIXME: Cross package confuse this check, so just skip them |
216 | for s in ['cross', 'nativesdk', 'cross-canadian']: | 216 | for s in ['cross', 'nativesdk', 'cross-canadian']: |
@@ -243,7 +243,7 @@ def package_qa_check_desktop(path, name, d, elf, messages): | |||
243 | Run all desktop files through desktop-file-validate. | 243 | Run all desktop files through desktop-file-validate. |
244 | """ | 244 | """ |
245 | if path.endswith(".desktop"): | 245 | if path.endswith(".desktop"): |
246 | desktop_file_validate = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'desktop-file-validate') | 246 | desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE',True),'desktop-file-validate') |
247 | output = os.popen("%s %s" % (desktop_file_validate, path)) | 247 | output = os.popen("%s %s" % (desktop_file_validate, path)) |
248 | # This only produces output on errors | 248 | # This only produces output on errors |
249 | for l in output: | 249 | for l in output: |
@@ -261,14 +261,14 @@ def package_qa_hash_style(path, name, d, elf, messages): | |||
261 | if os.path.islink(path): | 261 | if os.path.islink(path): |
262 | return | 262 | return |
263 | 263 | ||
264 | gnu_hash = "--hash-style=gnu" in bb.data.getVar('LDFLAGS', d, True) | 264 | gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS', True) |
265 | if not gnu_hash: | 265 | if not gnu_hash: |
266 | gnu_hash = "--hash-style=both" in bb.data.getVar('LDFLAGS', d, True) | 266 | gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS', True) |
267 | if not gnu_hash: | 267 | if not gnu_hash: |
268 | return | 268 | return |
269 | 269 | ||
270 | objdump = bb.data.getVar('OBJDUMP', d, True) | 270 | objdump = d.getVar('OBJDUMP', True) |
271 | env_path = bb.data.getVar('PATH', d, True) | 271 | env_path = d.getVar('PATH', True) |
272 | 272 | ||
273 | sane = False | 273 | sane = False |
274 | has_syms = False | 274 | has_syms = False |
@@ -299,7 +299,7 @@ def package_qa_check_buildpaths(path, name, d, elf, messages): | |||
299 | if os.path.islink(path): | 299 | if os.path.islink(path): |
300 | return | 300 | return |
301 | 301 | ||
302 | tmpdir = bb.data.getVar('TMPDIR', d, True) | 302 | tmpdir = d.getVar('TMPDIR', True) |
303 | file_content = open(path).read() | 303 | file_content = open(path).read() |
304 | if tmpdir in file_content: | 304 | if tmpdir in file_content: |
305 | messages.append("File %s in package contained reference to tmpdir" % package_qa_clean_path(path,d)) | 305 | messages.append("File %s in package contained reference to tmpdir" % package_qa_clean_path(path,d)) |
@@ -311,9 +311,9 @@ def package_qa_check_license(workdir, d): | |||
311 | import tempfile | 311 | import tempfile |
312 | sane = True | 312 | sane = True |
313 | 313 | ||
314 | lic_files = bb.data.getVar('LIC_FILES_CHKSUM', d, True) | 314 | lic_files = d.getVar('LIC_FILES_CHKSUM', True) |
315 | lic = bb.data.getVar('LICENSE', d, True) | 315 | lic = d.getVar('LICENSE', True) |
316 | pn = bb.data.getVar('PN', d, True) | 316 | pn = d.getVar('PN', True) |
317 | 317 | ||
318 | if lic == "CLOSED": | 318 | if lic == "CLOSED": |
319 | return True | 319 | return True |
@@ -324,7 +324,7 @@ def package_qa_check_license(workdir, d): | |||
324 | bb.error(pn + ": Recipe file does not have license file information (LIC_FILES_CHKSUM)") | 324 | bb.error(pn + ": Recipe file does not have license file information (LIC_FILES_CHKSUM)") |
325 | return False | 325 | return False |
326 | 326 | ||
327 | srcdir = bb.data.getVar('S', d, True) | 327 | srcdir = d.getVar('S', True) |
328 | 328 | ||
329 | for url in lic_files.split(): | 329 | for url in lic_files.split(): |
330 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) | 330 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) |
@@ -384,7 +384,7 @@ def package_qa_check_staged(path,d): | |||
384 | """ | 384 | """ |
385 | 385 | ||
386 | sane = True | 386 | sane = True |
387 | tmpdir = bb.data.getVar('TMPDIR', d, True) | 387 | tmpdir = d.getVar('TMPDIR', True) |
388 | workdir = os.path.join(tmpdir, "work") | 388 | workdir = os.path.join(tmpdir, "work") |
389 | 389 | ||
390 | installed = "installed=yes" | 390 | installed = "installed=yes" |
@@ -417,8 +417,8 @@ def package_qa_walk(path, warnfuncs, errorfuncs, skip, package, d): | |||
417 | import oe.qa | 417 | import oe.qa |
418 | 418 | ||
419 | #if this will throw an exception, then fix the dict above | 419 | #if this will throw an exception, then fix the dict above |
420 | target_os = bb.data.getVar('TARGET_OS', d, True) | 420 | target_os = d.getVar('TARGET_OS', True) |
421 | target_arch = bb.data.getVar('TARGET_ARCH', d, True) | 421 | target_arch = d.getVar('TARGET_ARCH', True) |
422 | 422 | ||
423 | warnings = [] | 423 | warnings = [] |
424 | errors = [] | 424 | errors = [] |
@@ -457,19 +457,19 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, d): | |||
457 | localdata = bb.data.createCopy(d) | 457 | localdata = bb.data.createCopy(d) |
458 | root = "%s/%s" % (pkgdest, pkg) | 458 | root = "%s/%s" % (pkgdest, pkg) |
459 | 459 | ||
460 | bb.data.setVar('ROOT', '', localdata) | 460 | localdata.setVar('ROOT', '') |
461 | bb.data.setVar('ROOT_%s' % pkg, root, localdata) | 461 | localdata.setVar('ROOT_%s' % pkg, root) |
462 | pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, True) | 462 | pkgname = localdata.getVar('PKG_%s' % pkg, True) |
463 | if not pkgname: | 463 | if not pkgname: |
464 | pkgname = pkg | 464 | pkgname = pkg |
465 | bb.data.setVar('PKG', pkgname, localdata) | 465 | localdata.setVar('PKG', pkgname) |
466 | 466 | ||
467 | bb.data.setVar('OVERRIDES', pkg, localdata) | 467 | localdata.setVar('OVERRIDES', pkg) |
468 | 468 | ||
469 | bb.data.update_data(localdata) | 469 | bb.data.update_data(localdata) |
470 | 470 | ||
471 | # Now check the RDEPENDS | 471 | # Now check the RDEPENDS |
472 | rdepends = bb.utils.explode_deps(bb.data.getVar('RDEPENDS', localdata, True) or "") | 472 | rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS', True) or "") |
473 | 473 | ||
474 | 474 | ||
475 | # Now do the sanity check!!! | 475 | # Now do the sanity check!!! |
@@ -487,8 +487,8 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, d): | |||
487 | python do_package_qa () { | 487 | python do_package_qa () { |
488 | bb.note("DO PACKAGE QA") | 488 | bb.note("DO PACKAGE QA") |
489 | 489 | ||
490 | logdir = bb.data.getVar('T', d, True) | 490 | logdir = d.getVar('T', True) |
491 | pkg = bb.data.getVar('PN', d, True) | 491 | pkg = d.getVar('PN', True) |
492 | 492 | ||
493 | # Check the compile log for host contamination | 493 | # Check the compile log for host contamination |
494 | compilelog = os.path.join(logdir,"log.do_compile") | 494 | compilelog = os.path.join(logdir,"log.do_compile") |
@@ -508,8 +508,8 @@ python do_package_qa () { | |||
508 | (pkg, installlog)) | 508 | (pkg, installlog)) |
509 | 509 | ||
510 | # Scan the packages... | 510 | # Scan the packages... |
511 | pkgdest = bb.data.getVar('PKGDEST', d, True) | 511 | pkgdest = d.getVar('PKGDEST', True) |
512 | packages = bb.data.getVar('PACKAGES',d, True) | 512 | packages = d.getVar('PACKAGES', True) |
513 | 513 | ||
514 | # no packages should be scanned | 514 | # no packages should be scanned |
515 | if not packages: | 515 | if not packages: |
@@ -521,7 +521,7 @@ python do_package_qa () { | |||
521 | walk_sane = True | 521 | walk_sane = True |
522 | rdepends_sane = True | 522 | rdepends_sane = True |
523 | for package in packages.split(): | 523 | for package in packages.split(): |
524 | skip = (bb.data.getVar('INSANE_SKIP_' + package, d, True) or "").split() | 524 | skip = (d.getVar('INSANE_SKIP_' + package, True) or "").split() |
525 | if skip: | 525 | if skip: |
526 | bb.note("Package %s skipping QA tests: %s" % (package, str(skip))) | 526 | bb.note("Package %s skipping QA tests: %s" % (package, str(skip))) |
527 | warnchecks = [] | 527 | warnchecks = [] |
@@ -560,7 +560,7 @@ python do_qa_staging() { | |||
560 | 560 | ||
561 | python do_qa_configure() { | 561 | python do_qa_configure() { |
562 | configs = [] | 562 | configs = [] |
563 | workdir = bb.data.getVar('WORKDIR', d, True) | 563 | workdir = d.getVar('WORKDIR', True) |
564 | bb.note("Checking autotools environment for common misconfiguration") | 564 | bb.note("Checking autotools environment for common misconfiguration") |
565 | for root, dirs, files in os.walk(workdir): | 565 | for root, dirs, files in os.walk(workdir): |
566 | statement = "grep -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s > /dev/null" % \ | 566 | statement = "grep -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s > /dev/null" % \ |
@@ -575,8 +575,8 @@ Rerun configure task after fixing this. The path was '%s'""" % root) | |||
575 | if "configure.in" in files: | 575 | if "configure.in" in files: |
576 | configs.append(os.path.join(root, "configure.in")) | 576 | configs.append(os.path.join(root, "configure.in")) |
577 | 577 | ||
578 | cnf = bb.data.getVar('EXTRA_OECONF', d, True) or "" | 578 | cnf = d.getVar('EXTRA_OECONF', True) or "" |
579 | if "gettext" not in bb.data.getVar('P', d, True) and "gcc-runtime" not in bb.data.getVar('P', d, True) and "--disable-nls" not in cnf: | 579 | if "gettext" not in d.getVar('P', True) and "gcc-runtime" not in d.getVar('P', True) and "--disable-nls" not in cnf: |
580 | ml = d.getVar("MLPREFIX", True) or "" | 580 | ml = d.getVar("MLPREFIX", True) or "" |
581 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d): | 581 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d): |
582 | gt = "gettext-native" | 582 | gt = "gettext-native" |
@@ -584,7 +584,7 @@ Rerun configure task after fixing this. The path was '%s'""" % root) | |||
584 | gt = "gettext-nativesdk" | 584 | gt = "gettext-nativesdk" |
585 | else: | 585 | else: |
586 | gt = "virtual/" + ml + "gettext" | 586 | gt = "virtual/" + ml + "gettext" |
587 | deps = bb.utils.explode_deps(bb.data.getVar('DEPENDS', d, True) or "") | 587 | deps = bb.utils.explode_deps(d.getVar('DEPENDS', True) or "") |
588 | if gt not in deps: | 588 | if gt not in deps: |
589 | for config in configs: | 589 | for config in configs: |
590 | gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config | 590 | gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config |
diff --git a/meta/classes/kernel-arch.bbclass b/meta/classes/kernel-arch.bbclass index 225d5994fa..8693395111 100644 --- a/meta/classes/kernel-arch.bbclass +++ b/meta/classes/kernel-arch.bbclass | |||
@@ -18,7 +18,7 @@ valid_archs = "alpha cris ia64 \ | |||
18 | def map_kernel_arch(a, d): | 18 | def map_kernel_arch(a, d): |
19 | import re | 19 | import re |
20 | 20 | ||
21 | valid_archs = bb.data.getVar('valid_archs', d, 1).split() | 21 | valid_archs = d.getVar('valid_archs', 1).split() |
22 | 22 | ||
23 | if re.match('(i.86|athlon|x86.64)$', a): return 'x86' | 23 | if re.match('(i.86|athlon|x86.64)$', a): return 'x86' |
24 | elif re.match('arm26$', a): return 'arm26' | 24 | elif re.match('arm26$', a): return 'arm26' |
@@ -32,7 +32,7 @@ def map_kernel_arch(a, d): | |||
32 | else: | 32 | else: |
33 | bb.error("cannot map '%s' to a linux kernel architecture" % a) | 33 | bb.error("cannot map '%s' to a linux kernel architecture" % a) |
34 | 34 | ||
35 | export ARCH = "${@map_kernel_arch(bb.data.getVar('TARGET_ARCH', d, 1), d)}" | 35 | export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', 1), d)}" |
36 | 36 | ||
37 | def map_uboot_arch(a, d): | 37 | def map_uboot_arch(a, d): |
38 | import re | 38 | import re |
@@ -41,5 +41,5 @@ def map_uboot_arch(a, d): | |||
41 | elif re.match('i.86$', a): return 'x86' | 41 | elif re.match('i.86$', a): return 'x86' |
42 | return a | 42 | return a |
43 | 43 | ||
44 | export UBOOT_ARCH = "${@map_uboot_arch(bb.data.getVar('ARCH', d, 1), d)}" | 44 | export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', 1), d)}" |
45 | 45 | ||
diff --git a/meta/classes/kernel-yocto.bbclass b/meta/classes/kernel-yocto.bbclass index 2205686ddd..d555fc04b9 100644 --- a/meta/classes/kernel-yocto.bbclass +++ b/meta/classes/kernel-yocto.bbclass | |||
@@ -104,7 +104,7 @@ python do_kernel_configcheck() { | |||
104 | 104 | ||
105 | bb.plain("NOTE: validating kernel configuration") | 105 | bb.plain("NOTE: validating kernel configuration") |
106 | 106 | ||
107 | pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True) | 107 | pathprefix = "export PATH=%s; " % d.getVar('PATH', True) |
108 | cmd = bb.data.expand("cd ${B}/..; kconf_check -config- ${B} ${S} ${B} ${KBRANCH}",d ) | 108 | cmd = bb.data.expand("cd ${B}/..; kconf_check -config- ${B} ${S} ${B} ${KBRANCH}",d ) |
109 | ret, result = commands.getstatusoutput("%s%s" % (pathprefix, cmd)) | 109 | ret, result = commands.getstatusoutput("%s%s" % (pathprefix, cmd)) |
110 | 110 | ||
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass index dc711f287a..12e0b83b94 100644 --- a/meta/classes/kernel.bbclass +++ b/meta/classes/kernel.bbclass | |||
@@ -11,15 +11,15 @@ INITRAMFS_IMAGE ?= "" | |||
11 | INITRAMFS_TASK ?= "" | 11 | INITRAMFS_TASK ?= "" |
12 | 12 | ||
13 | python __anonymous () { | 13 | python __anonymous () { |
14 | kerneltype = bb.data.getVar('KERNEL_IMAGETYPE', d, 1) or '' | 14 | kerneltype = d.getVar('KERNEL_IMAGETYPE', 1) or '' |
15 | if kerneltype == 'uImage': | 15 | if kerneltype == 'uImage': |
16 | depends = bb.data.getVar("DEPENDS", d, 1) | 16 | depends = d.getVar("DEPENDS", 1) |
17 | depends = "%s u-boot-mkimage-native" % depends | 17 | depends = "%s u-boot-mkimage-native" % depends |
18 | bb.data.setVar("DEPENDS", depends, d) | 18 | d.setVar("DEPENDS", depends) |
19 | 19 | ||
20 | image = bb.data.getVar('INITRAMFS_IMAGE', d, True) | 20 | image = d.getVar('INITRAMFS_IMAGE', True) |
21 | if image: | 21 | if image: |
22 | bb.data.setVar('INITRAMFS_TASK', '${INITRAMFS_IMAGE}:do_rootfs', d) | 22 | d.setVar('INITRAMFS_TASK', '${INITRAMFS_IMAGE}:do_rootfs') |
23 | } | 23 | } |
24 | 24 | ||
25 | inherit kernel-arch deploy | 25 | inherit kernel-arch deploy |
@@ -31,7 +31,7 @@ PACKAGES_DYNAMIC += "kernel-firmware-*" | |||
31 | export OS = "${TARGET_OS}" | 31 | export OS = "${TARGET_OS}" |
32 | export CROSS_COMPILE = "${TARGET_PREFIX}" | 32 | export CROSS_COMPILE = "${TARGET_PREFIX}" |
33 | 33 | ||
34 | KERNEL_PRIORITY = "${@bb.data.getVar('PV',d,1).split('-')[0].split('.')[-1]}" | 34 | KERNEL_PRIORITY = "${@d.getVar('PV',1).split('-')[0].split('.')[-1]}" |
35 | 35 | ||
36 | KERNEL_RELEASE ?= "${KERNEL_VERSION}" | 36 | KERNEL_RELEASE ?= "${KERNEL_VERSION}" |
37 | 37 | ||
@@ -56,7 +56,7 @@ KERNEL_IMAGEDEST = "boot" | |||
56 | # | 56 | # |
57 | # configuration | 57 | # configuration |
58 | # | 58 | # |
59 | export CMDLINE_CONSOLE = "console=${@bb.data.getVar("KERNEL_CONSOLE",d,1) or "ttyS0"}" | 59 | export CMDLINE_CONSOLE = "console=${@d.getVar("KERNEL_CONSOLE",1) or "ttyS0"}" |
60 | 60 | ||
61 | KERNEL_VERSION = "${@get_kernelversion('${B}')}" | 61 | KERNEL_VERSION = "${@get_kernelversion('${B}')}" |
62 | 62 | ||
@@ -75,7 +75,7 @@ EXTRA_OEMAKE = "" | |||
75 | 75 | ||
76 | KERNEL_ALT_IMAGETYPE ??= "" | 76 | KERNEL_ALT_IMAGETYPE ??= "" |
77 | 77 | ||
78 | KERNEL_IMAGETYPE_FOR_MAKE = "${@(lambda s: s[:-3] if s[-3:] == ".gz" else s)(bb.data.getVar('KERNEL_IMAGETYPE', d, 1))}" | 78 | KERNEL_IMAGETYPE_FOR_MAKE = "${@(lambda s: s[:-3] if s[-3:] == ".gz" else s)(d.getVar('KERNEL_IMAGETYPE', 1))}" |
79 | 79 | ||
80 | kernel_do_compile() { | 80 | kernel_do_compile() { |
81 | unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE | 81 | unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE |
@@ -306,10 +306,10 @@ module_conf_rfcomm = "alias bt-proto-3 rfcomm" | |||
306 | python populate_packages_prepend () { | 306 | python populate_packages_prepend () { |
307 | def extract_modinfo(file): | 307 | def extract_modinfo(file): |
308 | import tempfile, re | 308 | import tempfile, re |
309 | tempfile.tempdir = bb.data.getVar("WORKDIR", d, 1) | 309 | tempfile.tempdir = d.getVar("WORKDIR", 1) |
310 | tf = tempfile.mkstemp() | 310 | tf = tempfile.mkstemp() |
311 | tmpfile = tf[1] | 311 | tmpfile = tf[1] |
312 | cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (bb.data.getVar("PATH", d, 1), bb.data.getVar("HOST_PREFIX", d, 1) or "", file, tmpfile) | 312 | cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", 1), d.getVar("HOST_PREFIX", 1) or "", file, tmpfile) |
313 | os.system(cmd) | 313 | os.system(cmd) |
314 | f = open(tmpfile) | 314 | f = open(tmpfile) |
315 | l = f.read().split("\000") | 315 | l = f.read().split("\000") |
@@ -328,18 +328,18 @@ python populate_packages_prepend () { | |||
328 | def parse_depmod(): | 328 | def parse_depmod(): |
329 | import re | 329 | import re |
330 | 330 | ||
331 | dvar = bb.data.getVar('PKGD', d, 1) | 331 | dvar = d.getVar('PKGD', 1) |
332 | if not dvar: | 332 | if not dvar: |
333 | bb.error("PKGD not defined") | 333 | bb.error("PKGD not defined") |
334 | return | 334 | return |
335 | 335 | ||
336 | kernelver = bb.data.getVar('KERNEL_VERSION', d, 1) | 336 | kernelver = d.getVar('KERNEL_VERSION', 1) |
337 | kernelver_stripped = kernelver | 337 | kernelver_stripped = kernelver |
338 | m = re.match('^(.*-hh.*)[\.\+].*$', kernelver) | 338 | m = re.match('^(.*-hh.*)[\.\+].*$', kernelver) |
339 | if m: | 339 | if m: |
340 | kernelver_stripped = m.group(1) | 340 | kernelver_stripped = m.group(1) |
341 | path = bb.data.getVar("PATH", d, 1) | 341 | path = d.getVar("PATH", 1) |
342 | host_prefix = bb.data.getVar("HOST_PREFIX", d, 1) or "" | 342 | host_prefix = d.getVar("HOST_PREFIX", 1) or "" |
343 | 343 | ||
344 | cmd = "PATH=\"%s\" %sdepmod -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, dvar, dvar, kernelver, kernelver_stripped) | 344 | cmd = "PATH=\"%s\" %sdepmod -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, dvar, dvar, kernelver, kernelver_stripped) |
345 | f = os.popen(cmd, 'r') | 345 | f = os.popen(cmd, 'r') |
@@ -377,9 +377,9 @@ python populate_packages_prepend () { | |||
377 | 377 | ||
378 | def get_dependencies(file, pattern, format): | 378 | def get_dependencies(file, pattern, format): |
379 | # file no longer includes PKGD | 379 | # file no longer includes PKGD |
380 | file = file.replace(bb.data.getVar('PKGD', d, 1) or '', '', 1) | 380 | file = file.replace(d.getVar('PKGD', 1) or '', '', 1) |
381 | # instead is prefixed with /lib/modules/${KERNEL_VERSION} | 381 | # instead is prefixed with /lib/modules/${KERNEL_VERSION} |
382 | file = file.replace("/lib/modules/%s/" % bb.data.getVar('KERNEL_VERSION', d, 1) or '', '', 1) | 382 | file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', 1) or '', '', 1) |
383 | 383 | ||
384 | if module_deps.has_key(file): | 384 | if module_deps.has_key(file): |
385 | import re | 385 | import re |
@@ -398,40 +398,40 @@ python populate_packages_prepend () { | |||
398 | import re | 398 | import re |
399 | vals = extract_modinfo(file) | 399 | vals = extract_modinfo(file) |
400 | 400 | ||
401 | dvar = bb.data.getVar('PKGD', d, 1) | 401 | dvar = d.getVar('PKGD', 1) |
402 | 402 | ||
403 | # If autoloading is requested, output /etc/modutils/<name> and append | 403 | # If autoloading is requested, output /etc/modutils/<name> and append |
404 | # appropriate modprobe commands to the postinst | 404 | # appropriate modprobe commands to the postinst |
405 | autoload = bb.data.getVar('module_autoload_%s' % basename, d, 1) | 405 | autoload = d.getVar('module_autoload_%s' % basename, 1) |
406 | if autoload: | 406 | if autoload: |
407 | name = '%s/etc/modutils/%s' % (dvar, basename) | 407 | name = '%s/etc/modutils/%s' % (dvar, basename) |
408 | f = open(name, 'w') | 408 | f = open(name, 'w') |
409 | for m in autoload.split(): | 409 | for m in autoload.split(): |
410 | f.write('%s\n' % m) | 410 | f.write('%s\n' % m) |
411 | f.close() | 411 | f.close() |
412 | postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) | 412 | postinst = d.getVar('pkg_postinst_%s' % pkg, 1) |
413 | if not postinst: | 413 | if not postinst: |
414 | bb.fatal("pkg_postinst_%s not defined" % pkg) | 414 | bb.fatal("pkg_postinst_%s not defined" % pkg) |
415 | postinst += bb.data.getVar('autoload_postinst_fragment', d, 1) % autoload | 415 | postinst += d.getVar('autoload_postinst_fragment', 1) % autoload |
416 | bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) | 416 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
417 | 417 | ||
418 | # Write out any modconf fragment | 418 | # Write out any modconf fragment |
419 | modconf = bb.data.getVar('module_conf_%s' % basename, d, 1) | 419 | modconf = d.getVar('module_conf_%s' % basename, 1) |
420 | if modconf: | 420 | if modconf: |
421 | name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) | 421 | name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) |
422 | f = open(name, 'w') | 422 | f = open(name, 'w') |
423 | f.write("%s\n" % modconf) | 423 | f.write("%s\n" % modconf) |
424 | f.close() | 424 | f.close() |
425 | 425 | ||
426 | files = bb.data.getVar('FILES_%s' % pkg, d, 1) | 426 | files = d.getVar('FILES_%s' % pkg, 1) |
427 | files = "%s /etc/modutils/%s /etc/modutils/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename, basename) | 427 | files = "%s /etc/modutils/%s /etc/modutils/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename, basename) |
428 | bb.data.setVar('FILES_%s' % pkg, files, d) | 428 | d.setVar('FILES_%s' % pkg, files) |
429 | 429 | ||
430 | if vals.has_key("description"): | 430 | if vals.has_key("description"): |
431 | old_desc = bb.data.getVar('DESCRIPTION_' + pkg, d, 1) or "" | 431 | old_desc = d.getVar('DESCRIPTION_' + pkg, 1) or "" |
432 | bb.data.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"], d) | 432 | d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"]) |
433 | 433 | ||
434 | rdepends_str = bb.data.getVar('RDEPENDS_' + pkg, d, 1) | 434 | rdepends_str = d.getVar('RDEPENDS_' + pkg, 1) |
435 | if rdepends_str: | 435 | if rdepends_str: |
436 | rdepends = rdepends_str.split() | 436 | rdepends = rdepends_str.split() |
437 | else: | 437 | else: |
@@ -443,29 +443,29 @@ python populate_packages_prepend () { | |||
443 | module_regex = '^(.*)\.k?o$' | 443 | module_regex = '^(.*)\.k?o$' |
444 | module_pattern = 'kernel-module-%s' | 444 | module_pattern = 'kernel-module-%s' |
445 | 445 | ||
446 | postinst = bb.data.getVar('pkg_postinst_modules', d, 1) | 446 | postinst = d.getVar('pkg_postinst_modules', 1) |
447 | postrm = bb.data.getVar('pkg_postrm_modules', d, 1) | 447 | postrm = d.getVar('pkg_postrm_modules', 1) |
448 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') | 448 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') |
449 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') | 449 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') |
450 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') | 450 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') |
451 | do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % bb.data.getVar("KERNEL_VERSION", d, 1)) | 451 | do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", 1)) |
452 | 452 | ||
453 | import re | 453 | import re |
454 | metapkg = "kernel-modules" | 454 | metapkg = "kernel-modules" |
455 | bb.data.setVar('ALLOW_EMPTY_' + metapkg, "1", d) | 455 | d.setVar('ALLOW_EMPTY_' + metapkg, "1") |
456 | bb.data.setVar('FILES_' + metapkg, "", d) | 456 | d.setVar('FILES_' + metapkg, "") |
457 | blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux', 'perf', 'perf-dbg', 'kernel-misc' ] | 457 | blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux', 'perf', 'perf-dbg', 'kernel-misc' ] |
458 | for l in module_deps.values(): | 458 | for l in module_deps.values(): |
459 | for i in l: | 459 | for i in l: |
460 | pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1)) | 460 | pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1)) |
461 | blacklist.append(pkg) | 461 | blacklist.append(pkg) |
462 | metapkg_rdepends = [] | 462 | metapkg_rdepends = [] |
463 | packages = bb.data.getVar('PACKAGES', d, 1).split() | 463 | packages = d.getVar('PACKAGES', 1).split() |
464 | for pkg in packages[1:]: | 464 | for pkg in packages[1:]: |
465 | if not pkg in blacklist and not pkg in metapkg_rdepends: | 465 | if not pkg in blacklist and not pkg in metapkg_rdepends: |
466 | metapkg_rdepends.append(pkg) | 466 | metapkg_rdepends.append(pkg) |
467 | bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d) | 467 | bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d) |
468 | bb.data.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package', d) | 468 | d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package') |
469 | packages.append(metapkg) | 469 | packages.append(metapkg) |
470 | bb.data.setVar('PACKAGES', ' '.join(packages), d) | 470 | bb.data.setVar('PACKAGES', ' '.join(packages), d) |
471 | } | 471 | } |
diff --git a/meta/classes/libc-common.bbclass b/meta/classes/libc-common.bbclass index 0ee9a55b09..ec33762a20 100644 --- a/meta/classes/libc-common.bbclass +++ b/meta/classes/libc-common.bbclass | |||
@@ -18,13 +18,13 @@ do_install() { | |||
18 | } | 18 | } |
19 | 19 | ||
20 | def get_libc_fpu_setting(bb, d): | 20 | def get_libc_fpu_setting(bb, d): |
21 | if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]: | 21 | if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: |
22 | return "--without-fp" | 22 | return "--without-fp" |
23 | return "" | 23 | return "" |
24 | 24 | ||
25 | python populate_packages_prepend () { | 25 | python populate_packages_prepend () { |
26 | if bb.data.getVar('DEBIAN_NAMES', d, 1): | 26 | if d.getVar('DEBIAN_NAMES', 1): |
27 | bpn = bb.data.getVar('BPN', d, 1) | 27 | bpn = d.getVar('BPN', 1) |
28 | bb.data.setVar('PKG_'+bpn, 'libc6', d) | 28 | d.setVar('PKG_'+bpn, 'libc6') |
29 | bb.data.setVar('PKG_'+bpn+'-dev', 'libc6-dev', d) | 29 | d.setVar('PKG_'+bpn+'-dev', 'libc6-dev') |
30 | } | 30 | } |
diff --git a/meta/classes/libc-package.bbclass b/meta/classes/libc-package.bbclass index 6ef2f97254..fc1a5794d7 100644 --- a/meta/classes/libc-package.bbclass +++ b/meta/classes/libc-package.bbclass | |||
@@ -10,7 +10,7 @@ | |||
10 | GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice" | 10 | GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice" |
11 | 11 | ||
12 | python __anonymous () { | 12 | python __anonymous () { |
13 | enabled = bb.data.getVar("ENABLE_BINARY_LOCALE_GENERATION", d, 1) | 13 | enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", 1) |
14 | 14 | ||
15 | pn = d.getVar("PN", True) | 15 | pn = d.getVar("PN", True) |
16 | if pn.endswith("-initial"): | 16 | if pn.endswith("-initial"): |
@@ -19,21 +19,21 @@ python __anonymous () { | |||
19 | if enabled and int(enabled): | 19 | if enabled and int(enabled): |
20 | import re | 20 | import re |
21 | 21 | ||
22 | target_arch = bb.data.getVar("TARGET_ARCH", d, 1) | 22 | target_arch = d.getVar("TARGET_ARCH", 1) |
23 | binary_arches = bb.data.getVar("BINARY_LOCALE_ARCHES", d, 1) or "" | 23 | binary_arches = d.getVar("BINARY_LOCALE_ARCHES", 1) or "" |
24 | use_cross_localedef = bb.data.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", d, 1) or "" | 24 | use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or "" |
25 | 25 | ||
26 | for regexp in binary_arches.split(" "): | 26 | for regexp in binary_arches.split(" "): |
27 | r = re.compile(regexp) | 27 | r = re.compile(regexp) |
28 | 28 | ||
29 | if r.match(target_arch): | 29 | if r.match(target_arch): |
30 | depends = bb.data.getVar("DEPENDS", d, 1) | 30 | depends = d.getVar("DEPENDS", 1) |
31 | if use_cross_localedef == "1" : | 31 | if use_cross_localedef == "1" : |
32 | depends = "%s cross-localedef-native" % depends | 32 | depends = "%s cross-localedef-native" % depends |
33 | else: | 33 | else: |
34 | depends = "%s qemu-native" % depends | 34 | depends = "%s qemu-native" % depends |
35 | bb.data.setVar("DEPENDS", depends, d) | 35 | d.setVar("DEPENDS", depends) |
36 | bb.data.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile", d) | 36 | d.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile") |
37 | break | 37 | break |
38 | } | 38 | } |
39 | 39 | ||
@@ -109,19 +109,19 @@ inherit qemu | |||
109 | 109 | ||
110 | python package_do_split_gconvs () { | 110 | python package_do_split_gconvs () { |
111 | import os, re | 111 | import os, re |
112 | if (bb.data.getVar('PACKAGE_NO_GCONV', d, 1) == '1'): | 112 | if (d.getVar('PACKAGE_NO_GCONV', 1) == '1'): |
113 | bb.note("package requested not splitting gconvs") | 113 | bb.note("package requested not splitting gconvs") |
114 | return | 114 | return |
115 | 115 | ||
116 | if not bb.data.getVar('PACKAGES', d, 1): | 116 | if not d.getVar('PACKAGES', 1): |
117 | return | 117 | return |
118 | 118 | ||
119 | bpn = bb.data.getVar('BPN', d, 1) | 119 | bpn = d.getVar('BPN', 1) |
120 | libdir = bb.data.getVar('libdir', d, 1) | 120 | libdir = d.getVar('libdir', 1) |
121 | if not libdir: | 121 | if not libdir: |
122 | bb.error("libdir not defined") | 122 | bb.error("libdir not defined") |
123 | return | 123 | return |
124 | datadir = bb.data.getVar('datadir', d, 1) | 124 | datadir = d.getVar('datadir', 1) |
125 | if not datadir: | 125 | if not datadir: |
126 | bb.error("datadir not defined") | 126 | bb.error("datadir not defined") |
127 | return | 127 | return |
@@ -191,17 +191,17 @@ python package_do_split_gconvs () { | |||
191 | 191 | ||
192 | do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \ | 192 | do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \ |
193 | description='locale definition for %s', hook=calc_locale_deps, extra_depends='') | 193 | description='locale definition for %s', hook=calc_locale_deps, extra_depends='') |
194 | bb.data.setVar('PACKAGES', bb.data.getVar('PACKAGES', d) + ' ' + bb.data.getVar('MLPREFIX', d) + bpn + '-gconv', d) | 194 | bb.data.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv', d) |
195 | 195 | ||
196 | use_bin = bb.data.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", d, 1) | 196 | use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", 1) |
197 | 197 | ||
198 | dot_re = re.compile("(.*)\.(.*)") | 198 | dot_re = re.compile("(.*)\.(.*)") |
199 | 199 | ||
200 | #GLIBC_GENERATE_LOCALES var specifies which locales to be supported, empty or "all" means all locales | 200 | #GLIBC_GENERATE_LOCALES var specifies which locales to be supported, empty or "all" means all locales |
201 | if use_bin != "precompiled": | 201 | if use_bin != "precompiled": |
202 | supported = bb.data.getVar('GLIBC_GENERATE_LOCALES', d, 1) | 202 | supported = d.getVar('GLIBC_GENERATE_LOCALES', 1) |
203 | if not supported or supported == "all": | 203 | if not supported or supported == "all": |
204 | f = open(base_path_join(bb.data.getVar('WORKDIR', d, 1), "SUPPORTED"), "r") | 204 | f = open(base_path_join(d.getVar('WORKDIR', 1), "SUPPORTED"), "r") |
205 | supported = f.readlines() | 205 | supported = f.readlines() |
206 | f.close() | 206 | f.close() |
207 | else: | 207 | else: |
@@ -209,7 +209,7 @@ python package_do_split_gconvs () { | |||
209 | supported = map(lambda s:s.replace(".", " ") + "\n", supported) | 209 | supported = map(lambda s:s.replace(".", " ") + "\n", supported) |
210 | else: | 210 | else: |
211 | supported = [] | 211 | supported = [] |
212 | full_bin_path = bb.data.getVar('PKGD', d, True) + binary_locales_dir | 212 | full_bin_path = d.getVar('PKGD', True) + binary_locales_dir |
213 | for dir in os.listdir(full_bin_path): | 213 | for dir in os.listdir(full_bin_path): |
214 | dbase = dir.split(".") | 214 | dbase = dir.split(".") |
215 | d2 = " " | 215 | d2 = " " |
@@ -218,7 +218,7 @@ python package_do_split_gconvs () { | |||
218 | supported.append(dbase[0] + d2) | 218 | supported.append(dbase[0] + d2) |
219 | 219 | ||
220 | # Collate the locales by base and encoding | 220 | # Collate the locales by base and encoding |
221 | utf8_only = int(bb.data.getVar('LOCALE_UTF8_ONLY', d, 1) or 0) | 221 | utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', 1) or 0) |
222 | encodings = {} | 222 | encodings = {} |
223 | for l in supported: | 223 | for l in supported: |
224 | l = l[:-1] | 224 | l = l[:-1] |
@@ -235,9 +235,9 @@ python package_do_split_gconvs () { | |||
235 | def output_locale_source(name, pkgname, locale, encoding): | 235 | def output_locale_source(name, pkgname, locale, encoding): |
236 | bb.data.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \ | 236 | bb.data.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \ |
237 | (bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding)), d) | 237 | (bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding)), d) |
238 | bb.data.setVar('pkg_postinst_%s' % pkgname, bb.data.getVar('locale_base_postinst', d, 1) \ | 238 | bb.data.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', 1) \ |
239 | % (locale, encoding, locale), d) | 239 | % (locale, encoding, locale), d) |
240 | bb.data.setVar('pkg_postrm_%s' % pkgname, bb.data.getVar('locale_base_postrm', d, 1) % \ | 240 | bb.data.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', 1) % \ |
241 | (locale, encoding, locale), d) | 241 | (locale, encoding, locale), d) |
242 | 242 | ||
243 | def output_locale_binary_rdepends(name, pkgname, locale, encoding): | 243 | def output_locale_binary_rdepends(name, pkgname, locale, encoding): |
@@ -248,23 +248,23 @@ python package_do_split_gconvs () { | |||
248 | libc_name = name | 248 | libc_name = name |
249 | bb.data.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \ | 249 | bb.data.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \ |
250 | % (bpn, libc_name)), d) | 250 | % (bpn, libc_name)), d) |
251 | rprovides = (bb.data.getVar('RPROVIDES_%s' % pkgname, d, True) or "").split() | 251 | rprovides = (d.getVar('RPROVIDES_%s' % pkgname, True) or "").split() |
252 | rprovides.append(legitimize_package_name('%s-binary-localedata-%s' % (bpn, libc_name))) | 252 | rprovides.append(legitimize_package_name('%s-binary-localedata-%s' % (bpn, libc_name))) |
253 | bb.data.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides), d) | 253 | bb.data.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides), d) |
254 | 254 | ||
255 | commands = {} | 255 | commands = {} |
256 | 256 | ||
257 | def output_locale_binary(name, pkgname, locale, encoding): | 257 | def output_locale_binary(name, pkgname, locale, encoding): |
258 | treedir = base_path_join(bb.data.getVar("WORKDIR", d, 1), "locale-tree") | 258 | treedir = base_path_join(d.getVar("WORKDIR", 1), "locale-tree") |
259 | ldlibdir = base_path_join(treedir, bb.data.getVar("base_libdir", d, 1)) | 259 | ldlibdir = base_path_join(treedir, d.getVar("base_libdir", 1)) |
260 | path = bb.data.getVar("PATH", d, 1) | 260 | path = d.getVar("PATH", 1) |
261 | i18npath = base_path_join(treedir, datadir, "i18n") | 261 | i18npath = base_path_join(treedir, datadir, "i18n") |
262 | gconvpath = base_path_join(treedir, "iconvdata") | 262 | gconvpath = base_path_join(treedir, "iconvdata") |
263 | outputpath = base_path_join(treedir, libdir, "locale") | 263 | outputpath = base_path_join(treedir, libdir, "locale") |
264 | 264 | ||
265 | use_cross_localedef = bb.data.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", d, 1) or "0" | 265 | use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or "0" |
266 | if use_cross_localedef == "1": | 266 | if use_cross_localedef == "1": |
267 | target_arch = bb.data.getVar('TARGET_ARCH', d, True) | 267 | target_arch = d.getVar('TARGET_ARCH', True) |
268 | locale_arch_options = { \ | 268 | locale_arch_options = { \ |
269 | "arm": " --uint32-align=4 --little-endian ", \ | 269 | "arm": " --uint32-align=4 --little-endian ", \ |
270 | "powerpc": " --uint32-align=4 --big-endian ", \ | 270 | "powerpc": " --uint32-align=4 --big-endian ", \ |
@@ -292,9 +292,9 @@ python package_do_split_gconvs () { | |||
292 | --inputfile=%s/i18n/locales/%s --charmap=%s %s" \ | 292 | --inputfile=%s/i18n/locales/%s --charmap=%s %s" \ |
293 | % (treedir, datadir, locale, encoding, name) | 293 | % (treedir, datadir, locale, encoding, name) |
294 | 294 | ||
295 | qemu_options = bb.data.getVar("QEMU_OPTIONS_%s" % bb.data.getVar('PACKAGE_ARCH', d, 1), d, 1) | 295 | qemu_options = bb.data.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', 1), d, 1) |
296 | if not qemu_options: | 296 | if not qemu_options: |
297 | qemu_options = bb.data.getVar('QEMU_OPTIONS', d, 1) | 297 | qemu_options = d.getVar('QEMU_OPTIONS', 1) |
298 | 298 | ||
299 | cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ | 299 | cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ |
300 | -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ | 300 | -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ |
@@ -305,14 +305,14 @@ python package_do_split_gconvs () { | |||
305 | bb.note("generating locale %s (%s)" % (locale, encoding)) | 305 | bb.note("generating locale %s (%s)" % (locale, encoding)) |
306 | 306 | ||
307 | def output_locale(name, locale, encoding): | 307 | def output_locale(name, locale, encoding): |
308 | pkgname = bb.data.getVar('MLPREFIX', d) + 'locale-base-' + legitimize_package_name(name) | 308 | pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name) |
309 | bb.data.setVar('ALLOW_EMPTY_%s' % pkgname, '1', d) | 309 | d.setVar('ALLOW_EMPTY_%s' % pkgname, '1') |
310 | bb.data.setVar('PACKAGES', '%s %s' % (pkgname, bb.data.getVar('PACKAGES', d, 1)), d) | 310 | bb.data.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', 1)), d) |
311 | rprovides = ' virtual-locale-%s' % legitimize_package_name(name) | 311 | rprovides = ' virtual-locale-%s' % legitimize_package_name(name) |
312 | m = re.match("(.*)_(.*)", name) | 312 | m = re.match("(.*)_(.*)", name) |
313 | if m: | 313 | if m: |
314 | rprovides += ' virtual-locale-%s' % m.group(1) | 314 | rprovides += ' virtual-locale-%s' % m.group(1) |
315 | bb.data.setVar('RPROVIDES_%s' % pkgname, rprovides, d) | 315 | d.setVar('RPROVIDES_%s' % pkgname, rprovides) |
316 | 316 | ||
317 | if use_bin == "compile": | 317 | if use_bin == "compile": |
318 | output_locale_binary_rdepends(name, pkgname, locale, encoding) | 318 | output_locale_binary_rdepends(name, pkgname, locale, encoding) |
@@ -347,7 +347,7 @@ python package_do_split_gconvs () { | |||
347 | bb.note(" " + " ".join(non_utf8)) | 347 | bb.note(" " + " ".join(non_utf8)) |
348 | 348 | ||
349 | if use_bin == "compile": | 349 | if use_bin == "compile": |
350 | makefile = base_path_join(bb.data.getVar("WORKDIR", d, 1), "locale-tree", "Makefile") | 350 | makefile = base_path_join(d.getVar("WORKDIR", 1), "locale-tree", "Makefile") |
351 | m = open(makefile, "w") | 351 | m = open(makefile, "w") |
352 | m.write("all: %s\n\n" % " ".join(commands.keys())) | 352 | m.write("all: %s\n\n" % " ".join(commands.keys())) |
353 | for cmd in commands: | 353 | for cmd in commands: |
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass index baf35f00cc..4d036b171e 100644 --- a/meta/classes/license.bbclass +++ b/meta/classes/license.bbclass | |||
@@ -104,10 +104,10 @@ python do_populate_lic() { | |||
104 | 104 | ||
105 | # If the generic does not exist we need to check to see if there is an SPDX mapping to it | 105 | # If the generic does not exist we need to check to see if there is an SPDX mapping to it |
106 | if not os.path.isfile(os.path.join(generic_directory, license_type)): | 106 | if not os.path.isfile(os.path.join(generic_directory, license_type)): |
107 | if bb.data.getVarFlag('SPDXLICENSEMAP', license_type, d) != None: | 107 | if d.getVarFlag('SPDXLICENSEMAP', license_type) != None: |
108 | # Great, there is an SPDXLICENSEMAP. We can copy! | 108 | # Great, there is an SPDXLICENSEMAP. We can copy! |
109 | bb.note("We need to use a SPDXLICENSEMAP for %s" % (license_type)) | 109 | bb.note("We need to use a SPDXLICENSEMAP for %s" % (license_type)) |
110 | spdx_generic = bb.data.getVarFlag('SPDXLICENSEMAP', license_type, d) | 110 | spdx_generic = d.getVarFlag('SPDXLICENSEMAP', license_type) |
111 | copy_license(generic_directory, gen_lic_dest, spdx_generic) | 111 | copy_license(generic_directory, gen_lic_dest, spdx_generic) |
112 | link_license(gen_lic_dest, destdir, spdx_generic) | 112 | link_license(gen_lic_dest, destdir, spdx_generic) |
113 | else: | 113 | else: |
@@ -120,16 +120,16 @@ python do_populate_lic() { | |||
120 | link_license(gen_lic_dest, destdir, license_type) | 120 | link_license(gen_lic_dest, destdir, license_type) |
121 | 121 | ||
122 | # All the license types for the package | 122 | # All the license types for the package |
123 | license_types = bb.data.getVar('LICENSE', d, True) | 123 | license_types = d.getVar('LICENSE', True) |
124 | # All the license files for the package | 124 | # All the license files for the package |
125 | lic_files = bb.data.getVar('LIC_FILES_CHKSUM', d, True) | 125 | lic_files = d.getVar('LIC_FILES_CHKSUM', True) |
126 | pn = bb.data.getVar('PN', d, True) | 126 | pn = d.getVar('PN', True) |
127 | # The base directory we wrangle licenses to | 127 | # The base directory we wrangle licenses to |
128 | destdir = os.path.join(bb.data.getVar('LICSSTATEDIR', d, True), pn) | 128 | destdir = os.path.join(d.getVar('LICSSTATEDIR', True), pn) |
129 | # The license files are located in S/LIC_FILE_CHECKSUM. | 129 | # The license files are located in S/LIC_FILE_CHECKSUM. |
130 | srcdir = bb.data.getVar('S', d, True) | 130 | srcdir = d.getVar('S', True) |
131 | # Directory we store the generic licenses as set in the distro configuration | 131 | # Directory we store the generic licenses as set in the distro configuration |
132 | generic_directory = bb.data.getVar('COMMON_LICENSE_DIR', d, True) | 132 | generic_directory = d.getVar('COMMON_LICENSE_DIR', True) |
133 | 133 | ||
134 | try: | 134 | try: |
135 | bb.mkdirhier(destdir) | 135 | bb.mkdirhier(destdir) |
@@ -154,7 +154,7 @@ python do_populate_lic() { | |||
154 | if ret is False or ret == 0: | 154 | if ret is False or ret == 0: |
155 | bb.warn("%s could not be copied for some reason. It may not exist. WARN for now." % srclicfile) | 155 | bb.warn("%s could not be copied for some reason. It may not exist. WARN for now." % srclicfile) |
156 | 156 | ||
157 | gen_lic_dest = os.path.join(bb.data.getVar('LICENSE_DIRECTORY', d, True), "common-licenses") | 157 | gen_lic_dest = os.path.join(d.getVar('LICENSE_DIRECTORY', True), "common-licenses") |
158 | 158 | ||
159 | clean_licenses = "" | 159 | clean_licenses = "" |
160 | 160 | ||
diff --git a/meta/classes/metadata_scm.bbclass b/meta/classes/metadata_scm.bbclass index 99ac05c18d..57609ef8cd 100644 --- a/meta/classes/metadata_scm.bbclass +++ b/meta/classes/metadata_scm.bbclass | |||
@@ -27,7 +27,7 @@ def base_detect_branch(d): | |||
27 | return "<unknown>" | 27 | return "<unknown>" |
28 | 28 | ||
29 | def base_get_scmbasepath(d): | 29 | def base_get_scmbasepath(d): |
30 | return bb.data.getVar( 'COREBASE', d, 1 ) | 30 | return d.getVar( 'COREBASE', 1 ) |
31 | 31 | ||
32 | def base_get_metadata_monotone_branch(path, d): | 32 | def base_get_metadata_monotone_branch(path, d): |
33 | monotone_branch = "<unknown>" | 33 | monotone_branch = "<unknown>" |
diff --git a/meta/classes/native.bbclass b/meta/classes/native.bbclass index ba8b0bf25e..5f25bdd2c2 100644 --- a/meta/classes/native.bbclass +++ b/meta/classes/native.bbclass | |||
@@ -98,18 +98,18 @@ python native_virtclass_handler () { | |||
98 | if not isinstance(e, bb.event.RecipePreFinalise): | 98 | if not isinstance(e, bb.event.RecipePreFinalise): |
99 | return | 99 | return |
100 | 100 | ||
101 | classextend = bb.data.getVar('BBCLASSEXTEND', e.data, True) or "" | 101 | classextend = e.data.getVar('BBCLASSEXTEND', True) or "" |
102 | if "native" not in classextend: | 102 | if "native" not in classextend: |
103 | return | 103 | return |
104 | 104 | ||
105 | pn = bb.data.getVar("PN", e.data, True) | 105 | pn = e.data.getVar("PN", True) |
106 | if not pn.endswith("-native"): | 106 | if not pn.endswith("-native"): |
107 | return | 107 | return |
108 | 108 | ||
109 | def map_dependencies(varname, d, suffix = ""): | 109 | def map_dependencies(varname, d, suffix = ""): |
110 | if suffix: | 110 | if suffix: |
111 | varname = varname + "_" + suffix | 111 | varname = varname + "_" + suffix |
112 | deps = bb.data.getVar(varname, d, True) | 112 | deps = d.getVar(varname, True) |
113 | if not deps: | 113 | if not deps: |
114 | return | 114 | return |
115 | deps = bb.utils.explode_deps(deps) | 115 | deps = bb.utils.explode_deps(deps) |
@@ -131,15 +131,15 @@ python native_virtclass_handler () { | |||
131 | map_dependencies("RPROVIDES", e.data, pkg) | 131 | map_dependencies("RPROVIDES", e.data, pkg) |
132 | map_dependencies("RREPLACES", e.data, pkg) | 132 | map_dependencies("RREPLACES", e.data, pkg) |
133 | 133 | ||
134 | provides = bb.data.getVar("PROVIDES", e.data, True) | 134 | provides = e.data.getVar("PROVIDES", True) |
135 | for prov in provides.split(): | 135 | for prov in provides.split(): |
136 | if prov.find(pn) != -1: | 136 | if prov.find(pn) != -1: |
137 | continue | 137 | continue |
138 | if not prov.endswith("-native"): | 138 | if not prov.endswith("-native"): |
139 | provides = provides.replace(prov, prov + "-native") | 139 | provides = provides.replace(prov, prov + "-native") |
140 | bb.data.setVar("PROVIDES", provides, e.data) | 140 | e.data.setVar("PROVIDES", provides) |
141 | 141 | ||
142 | bb.data.setVar("OVERRIDES", bb.data.getVar("OVERRIDES", e.data, False) + ":virtclass-native", e.data) | 142 | bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native", e.data) |
143 | } | 143 | } |
144 | 144 | ||
145 | addhandler native_virtclass_handler | 145 | addhandler native_virtclass_handler |
diff --git a/meta/classes/nativesdk.bbclass b/meta/classes/nativesdk.bbclass index bb59ac57a6..ca24efaa7c 100644 --- a/meta/classes/nativesdk.bbclass +++ b/meta/classes/nativesdk.bbclass | |||
@@ -11,7 +11,7 @@ STAGING_BINDIR_TOOLCHAIN = "${STAGING_DIR_NATIVE}${bindir_native}/${SDK_ARCH}${S | |||
11 | # | 11 | # |
12 | PACKAGE_ARCH = "${SDK_ARCH}-nativesdk" | 12 | PACKAGE_ARCH = "${SDK_ARCH}-nativesdk" |
13 | python () { | 13 | python () { |
14 | archs = bb.data.getVar('PACKAGE_ARCHS', d, True).split() | 14 | archs = d.getVar('PACKAGE_ARCHS', True).split() |
15 | sdkarchs = [] | 15 | sdkarchs = [] |
16 | for arch in archs: | 16 | for arch in archs: |
17 | sdkarchs.append(arch + '-nativesdk') | 17 | sdkarchs.append(arch + '-nativesdk') |
@@ -62,22 +62,22 @@ python nativesdk_virtclass_handler () { | |||
62 | if not isinstance(e, bb.event.RecipePreFinalise): | 62 | if not isinstance(e, bb.event.RecipePreFinalise): |
63 | return | 63 | return |
64 | 64 | ||
65 | pn = bb.data.getVar("PN", e.data, True) | 65 | pn = e.data.getVar("PN", True) |
66 | if not pn.endswith("-nativesdk"): | 66 | if not pn.endswith("-nativesdk"): |
67 | return | 67 | return |
68 | 68 | ||
69 | bb.data.setVar("OVERRIDES", bb.data.getVar("OVERRIDES", e.data, False) + ":virtclass-nativesdk", e.data) | 69 | bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk", e.data) |
70 | } | 70 | } |
71 | 71 | ||
72 | python () { | 72 | python () { |
73 | pn = bb.data.getVar("PN", d, True) | 73 | pn = d.getVar("PN", True) |
74 | if not pn.endswith("-nativesdk"): | 74 | if not pn.endswith("-nativesdk"): |
75 | return | 75 | return |
76 | 76 | ||
77 | def map_dependencies(varname, d, suffix = ""): | 77 | def map_dependencies(varname, d, suffix = ""): |
78 | if suffix: | 78 | if suffix: |
79 | varname = varname + "_" + suffix | 79 | varname = varname + "_" + suffix |
80 | deps = bb.data.getVar(varname, d, True) | 80 | deps = d.getVar(varname, True) |
81 | if not deps: | 81 | if not deps: |
82 | return | 82 | return |
83 | deps = bb.utils.explode_deps(deps) | 83 | deps = bb.utils.explode_deps(deps) |
@@ -101,13 +101,13 @@ python () { | |||
101 | # map_dependencies("RPROVIDES", d, pkg) | 101 | # map_dependencies("RPROVIDES", d, pkg) |
102 | # map_dependencies("RREPLACES", d, pkg) | 102 | # map_dependencies("RREPLACES", d, pkg) |
103 | 103 | ||
104 | provides = bb.data.getVar("PROVIDES", d, True) | 104 | provides = d.getVar("PROVIDES", True) |
105 | for prov in provides.split(): | 105 | for prov in provides.split(): |
106 | if prov.find(pn) != -1: | 106 | if prov.find(pn) != -1: |
107 | continue | 107 | continue |
108 | if not prov.endswith("-nativesdk"): | 108 | if not prov.endswith("-nativesdk"): |
109 | provides = provides.replace(prov, prov + "-nativesdk") | 109 | provides = provides.replace(prov, prov + "-nativesdk") |
110 | bb.data.setVar("PROVIDES", provides, d) | 110 | d.setVar("PROVIDES", provides) |
111 | } | 111 | } |
112 | 112 | ||
113 | addhandler nativesdk_virtclass_handler | 113 | addhandler nativesdk_virtclass_handler |
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index 0e1d8dbfc4..d01275f33a 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass | |||
@@ -88,9 +88,9 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
88 | if newdeps: | 88 | if newdeps: |
89 | extra_depends = " ".join(newdeps) | 89 | extra_depends = " ".join(newdeps) |
90 | 90 | ||
91 | dvar = bb.data.getVar('PKGD', d, True) | 91 | dvar = d.getVar('PKGD', True) |
92 | 92 | ||
93 | packages = bb.data.getVar('PACKAGES', d, True).split() | 93 | packages = d.getVar('PACKAGES', True).split() |
94 | 94 | ||
95 | if postinst: | 95 | if postinst: |
96 | postinst = '#!/bin/sh\n' + postinst + '\n' | 96 | postinst = '#!/bin/sh\n' + postinst + '\n' |
@@ -136,7 +136,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
136 | packages = [pkg] + packages | 136 | packages = [pkg] + packages |
137 | else: | 137 | else: |
138 | packages.append(pkg) | 138 | packages.append(pkg) |
139 | oldfiles = bb.data.getVar('FILES_' + pkg, d, True) | 139 | oldfiles = d.getVar('FILES_' + pkg, True) |
140 | if not oldfiles: | 140 | if not oldfiles: |
141 | the_files = [os.path.join(root, o)] | 141 | the_files = [os.path.join(root, o)] |
142 | if aux_files_pattern: | 142 | if aux_files_pattern: |
@@ -153,17 +153,17 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
153 | the_files.append(aux_files_pattern_verbatim % m.group(1)) | 153 | the_files.append(aux_files_pattern_verbatim % m.group(1)) |
154 | bb.data.setVar('FILES_' + pkg, " ".join(the_files), d) | 154 | bb.data.setVar('FILES_' + pkg, " ".join(the_files), d) |
155 | if extra_depends != '': | 155 | if extra_depends != '': |
156 | the_depends = bb.data.getVar('RDEPENDS_' + pkg, d, True) | 156 | the_depends = d.getVar('RDEPENDS_' + pkg, True) |
157 | if the_depends: | 157 | if the_depends: |
158 | the_depends = '%s %s' % (the_depends, extra_depends) | 158 | the_depends = '%s %s' % (the_depends, extra_depends) |
159 | else: | 159 | else: |
160 | the_depends = extra_depends | 160 | the_depends = extra_depends |
161 | bb.data.setVar('RDEPENDS_' + pkg, the_depends, d) | 161 | d.setVar('RDEPENDS_' + pkg, the_depends) |
162 | bb.data.setVar('DESCRIPTION_' + pkg, description % on, d) | 162 | d.setVar('DESCRIPTION_' + pkg, description % on) |
163 | if postinst: | 163 | if postinst: |
164 | bb.data.setVar('pkg_postinst_' + pkg, postinst, d) | 164 | d.setVar('pkg_postinst_' + pkg, postinst) |
165 | if postrm: | 165 | if postrm: |
166 | bb.data.setVar('pkg_postrm_' + pkg, postrm, d) | 166 | d.setVar('pkg_postrm_' + pkg, postrm) |
167 | else: | 167 | else: |
168 | bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d) | 168 | bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d) |
169 | if callable(hook): | 169 | if callable(hook): |
@@ -174,13 +174,13 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
174 | PACKAGE_DEPENDS += "file-native" | 174 | PACKAGE_DEPENDS += "file-native" |
175 | 175 | ||
176 | python () { | 176 | python () { |
177 | if bb.data.getVar('PACKAGES', d, True) != '': | 177 | if d.getVar('PACKAGES', True) != '': |
178 | deps = bb.data.getVarFlag('do_package', 'depends', d) or "" | 178 | deps = d.getVarFlag('do_package', 'depends') or "" |
179 | for dep in (bb.data.getVar('PACKAGE_DEPENDS', d, True) or "").split(): | 179 | for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split(): |
180 | deps += " %s:do_populate_sysroot" % dep | 180 | deps += " %s:do_populate_sysroot" % dep |
181 | bb.data.setVarFlag('do_package', 'depends', deps, d) | 181 | d.setVarFlag('do_package', 'depends', deps) |
182 | 182 | ||
183 | deps = (bb.data.getVarFlag('do_package', 'deptask', d) or "").split() | 183 | deps = (d.getVarFlag('do_package', 'deptask') or "").split() |
184 | # shlibs requires any DEPENDS to have already packaged for the *.list files | 184 | # shlibs requires any DEPENDS to have already packaged for the *.list files |
185 | deps.append("do_package") | 185 | deps.append("do_package") |
186 | bb.data.setVarFlag('do_package', 'deptask', " ".join(deps), d) | 186 | bb.data.setVarFlag('do_package', 'deptask', " ".join(deps), d) |
@@ -198,9 +198,9 @@ def splitfile(file, debugfile, debugsrcdir, d): | |||
198 | 198 | ||
199 | import commands, stat | 199 | import commands, stat |
200 | 200 | ||
201 | dvar = bb.data.getVar('PKGD', d, True) | 201 | dvar = d.getVar('PKGD', True) |
202 | pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True) | 202 | pathprefix = "export PATH=%s; " % d.getVar('PATH', True) |
203 | objcopy = bb.data.getVar("OBJCOPY", d, True) | 203 | objcopy = d.getVar("OBJCOPY", True) |
204 | debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) | 204 | debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) |
205 | workdir = bb.data.expand("${WORKDIR}", d) | 205 | workdir = bb.data.expand("${WORKDIR}", d) |
206 | workparentdir = os.path.dirname(workdir) | 206 | workparentdir = os.path.dirname(workdir) |
@@ -240,10 +240,10 @@ def splitfile2(debugsrcdir, d): | |||
240 | 240 | ||
241 | import commands, stat | 241 | import commands, stat |
242 | 242 | ||
243 | dvar = bb.data.getVar('PKGD', d, True) | 243 | dvar = d.getVar('PKGD', True) |
244 | pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True) | 244 | pathprefix = "export PATH=%s; " % d.getVar('PATH', True) |
245 | strip = bb.data.getVar("STRIP", d, True) | 245 | strip = d.getVar("STRIP", True) |
246 | objcopy = bb.data.getVar("OBJCOPY", d, True) | 246 | objcopy = d.getVar("OBJCOPY", True) |
247 | debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) | 247 | debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) |
248 | workdir = bb.data.expand("${WORKDIR}", d) | 248 | workdir = bb.data.expand("${WORKDIR}", d) |
249 | workparentdir = os.path.dirname(workdir) | 249 | workparentdir = os.path.dirname(workdir) |
@@ -279,8 +279,8 @@ def runstrip(file, elftype, d): | |||
279 | 279 | ||
280 | import commands, stat | 280 | import commands, stat |
281 | 281 | ||
282 | pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True) | 282 | pathprefix = "export PATH=%s; " % d.getVar('PATH', True) |
283 | strip = bb.data.getVar("STRIP", d, True) | 283 | strip = d.getVar("STRIP", True) |
284 | 284 | ||
285 | # Handle kernel modules specifically - .debug directories here are pointless | 285 | # Handle kernel modules specifically - .debug directories here are pointless |
286 | if file.find("/lib/modules/") != -1 and file.endswith(".ko"): | 286 | if file.find("/lib/modules/") != -1 and file.endswith(".ko"): |
@@ -329,10 +329,10 @@ def get_package_mapping (pkg, d): | |||
329 | return pkg | 329 | return pkg |
330 | 330 | ||
331 | def runtime_mapping_rename (varname, d): | 331 | def runtime_mapping_rename (varname, d): |
332 | #bb.note("%s before: %s" % (varname, bb.data.getVar(varname, d, True))) | 332 | #bb.note("%s before: %s" % (varname, d.getVar(varname, True))) |
333 | 333 | ||
334 | new_depends = [] | 334 | new_depends = [] |
335 | deps = bb.utils.explode_dep_versions(bb.data.getVar(varname, d, True) or "") | 335 | deps = bb.utils.explode_dep_versions(d.getVar(varname, True) or "") |
336 | for depend in deps: | 336 | for depend in deps: |
337 | # Have to be careful with any version component of the depend | 337 | # Have to be careful with any version component of the depend |
338 | new_depend = get_package_mapping(depend, d) | 338 | new_depend = get_package_mapping(depend, d) |
@@ -343,7 +343,7 @@ def runtime_mapping_rename (varname, d): | |||
343 | 343 | ||
344 | bb.data.setVar(varname, " ".join(new_depends) or None, d) | 344 | bb.data.setVar(varname, " ".join(new_depends) or None, d) |
345 | 345 | ||
346 | #bb.note("%s after: %s" % (varname, bb.data.getVar(varname, d, True))) | 346 | #bb.note("%s after: %s" % (varname, d.getVar(varname, True))) |
347 | 347 | ||
348 | # | 348 | # |
349 | # Package functions suitable for inclusion in PACKAGEFUNCS | 349 | # Package functions suitable for inclusion in PACKAGEFUNCS |
@@ -359,19 +359,19 @@ python package_get_auto_pr() { | |||
359 | } | 359 | } |
360 | 360 | ||
361 | python package_do_split_locales() { | 361 | python package_do_split_locales() { |
362 | if (bb.data.getVar('PACKAGE_NO_LOCALE', d, True) == '1'): | 362 | if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'): |
363 | bb.debug(1, "package requested not splitting locales") | 363 | bb.debug(1, "package requested not splitting locales") |
364 | return | 364 | return |
365 | 365 | ||
366 | packages = (bb.data.getVar('PACKAGES', d, True) or "").split() | 366 | packages = (d.getVar('PACKAGES', True) or "").split() |
367 | 367 | ||
368 | datadir = bb.data.getVar('datadir', d, True) | 368 | datadir = d.getVar('datadir', True) |
369 | if not datadir: | 369 | if not datadir: |
370 | bb.note("datadir not defined") | 370 | bb.note("datadir not defined") |
371 | return | 371 | return |
372 | 372 | ||
373 | dvar = bb.data.getVar('PKGD', d, True) | 373 | dvar = d.getVar('PKGD', True) |
374 | pn = bb.data.getVar('PN', d, True) | 374 | pn = d.getVar('PN', True) |
375 | 375 | ||
376 | if pn + '-locale' in packages: | 376 | if pn + '-locale' in packages: |
377 | packages.remove(pn + '-locale') | 377 | packages.remove(pn + '-locale') |
@@ -392,9 +392,9 @@ python package_do_split_locales() { | |||
392 | if mainpkg.find('-dev'): | 392 | if mainpkg.find('-dev'): |
393 | mainpkg = mainpkg.replace('-dev', '') | 393 | mainpkg = mainpkg.replace('-dev', '') |
394 | 394 | ||
395 | summary = bb.data.getVar('SUMMARY', d, True) or pn | 395 | summary = d.getVar('SUMMARY', True) or pn |
396 | description = bb.data.getVar('DESCRIPTION', d, True) or "" | 396 | description = d.getVar('DESCRIPTION', True) or "" |
397 | locale_section = bb.data.getVar('LOCALE_SECTION', d, True) | 397 | locale_section = d.getVar('LOCALE_SECTION', True) |
398 | for l in locales: | 398 | for l in locales: |
399 | ln = legitimize_package_name(l) | 399 | ln = legitimize_package_name(l) |
400 | pkg = pn + '-locale-' + ln | 400 | pkg = pn + '-locale-' + ln |
@@ -405,7 +405,7 @@ python package_do_split_locales() { | |||
405 | bb.data.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l), d) | 405 | bb.data.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l), d) |
406 | bb.data.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l), d) | 406 | bb.data.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l), d) |
407 | if locale_section: | 407 | if locale_section: |
408 | bb.data.setVar('SECTION_' + pkg, locale_section, d) | 408 | d.setVar('SECTION_' + pkg, locale_section) |
409 | 409 | ||
410 | bb.data.setVar('PACKAGES', ' '.join(packages), d) | 410 | bb.data.setVar('PACKAGES', ' '.join(packages), d) |
411 | 411 | ||
@@ -415,14 +415,14 @@ python package_do_split_locales() { | |||
415 | # glibc-localedata-translit* won't install as a dependency | 415 | # glibc-localedata-translit* won't install as a dependency |
416 | # for some other package which breaks meta-toolchain | 416 | # for some other package which breaks meta-toolchain |
417 | # Probably breaks since virtual-locale- isn't provided anywhere | 417 | # Probably breaks since virtual-locale- isn't provided anywhere |
418 | #rdep = (bb.data.getVar('RDEPENDS_%s' % mainpkg, d, True) or bb.data.getVar('RDEPENDS', d, True) or "").split() | 418 | #rdep = (d.getVar('RDEPENDS_%s' % mainpkg, True) or d.getVar('RDEPENDS', True) or "").split() |
419 | #rdep.append('%s-locale*' % pn) | 419 | #rdep.append('%s-locale*' % pn) |
420 | #bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d) | 420 | #bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d) |
421 | } | 421 | } |
422 | 422 | ||
423 | python perform_packagecopy () { | 423 | python perform_packagecopy () { |
424 | dest = bb.data.getVar('D', d, True) | 424 | dest = d.getVar('D', True) |
425 | dvar = bb.data.getVar('PKGD', d, True) | 425 | dvar = d.getVar('PKGD', True) |
426 | 426 | ||
427 | bb.mkdirhier(dvar) | 427 | bb.mkdirhier(dvar) |
428 | 428 | ||
@@ -549,16 +549,16 @@ python fixup_perms () { | |||
549 | # paths are resolved via BBPATH | 549 | # paths are resolved via BBPATH |
550 | def get_fs_perms_list(d): | 550 | def get_fs_perms_list(d): |
551 | str = "" | 551 | str = "" |
552 | fs_perms_tables = bb.data.getVar('FILESYSTEM_PERMS_TABLES', d, True) | 552 | fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True) |
553 | if not fs_perms_tables: | 553 | if not fs_perms_tables: |
554 | fs_perms_tables = 'files/fs-perms.txt' | 554 | fs_perms_tables = 'files/fs-perms.txt' |
555 | for conf_file in fs_perms_tables.split(): | 555 | for conf_file in fs_perms_tables.split(): |
556 | str += " %s" % bb.which(bb.data.getVar('BBPATH', d, True), conf_file) | 556 | str += " %s" % bb.which(d.getVar('BBPATH', True), conf_file) |
557 | return str | 557 | return str |
558 | 558 | ||
559 | 559 | ||
560 | 560 | ||
561 | dvar = bb.data.getVar('PKGD', d, True) | 561 | dvar = d.getVar('PKGD', True) |
562 | 562 | ||
563 | fs_perms_table = {} | 563 | fs_perms_table = {} |
564 | 564 | ||
@@ -586,7 +586,7 @@ python fixup_perms () { | |||
586 | 'oldincludedir' ] | 586 | 'oldincludedir' ] |
587 | 587 | ||
588 | for path in target_path_vars: | 588 | for path in target_path_vars: |
589 | dir = bb.data.getVar(path, d, True) or "" | 589 | dir = d.getVar(path, True) or "" |
590 | if dir == "": | 590 | if dir == "": |
591 | continue | 591 | continue |
592 | fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d)) | 592 | fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d)) |
@@ -664,11 +664,11 @@ python fixup_perms () { | |||
664 | python split_and_strip_files () { | 664 | python split_and_strip_files () { |
665 | import commands, stat, errno | 665 | import commands, stat, errno |
666 | 666 | ||
667 | dvar = bb.data.getVar('PKGD', d, True) | 667 | dvar = d.getVar('PKGD', True) |
668 | pn = bb.data.getVar('PN', d, True) | 668 | pn = d.getVar('PN', True) |
669 | 669 | ||
670 | # We default to '.debug' style | 670 | # We default to '.debug' style |
671 | if bb.data.getVar('PACKAGE_DEBUG_SPLIT_STYLE', d, True) == 'debug-file-directory': | 671 | if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory': |
672 | # Single debug-file-directory style debug info | 672 | # Single debug-file-directory style debug info |
673 | debugappend = ".debug" | 673 | debugappend = ".debug" |
674 | debugdir = "" | 674 | debugdir = "" |
@@ -691,7 +691,7 @@ python split_and_strip_files () { | |||
691 | # 8 - shared library | 691 | # 8 - shared library |
692 | def isELF(path): | 692 | def isELF(path): |
693 | type = 0 | 693 | type = 0 |
694 | pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True) | 694 | pathprefix = "export PATH=%s; " % d.getVar('PATH', True) |
695 | ret, result = commands.getstatusoutput("%sfile '%s'" % (pathprefix, path)) | 695 | ret, result = commands.getstatusoutput("%sfile '%s'" % (pathprefix, path)) |
696 | 696 | ||
697 | if ret: | 697 | if ret: |
@@ -715,8 +715,8 @@ python split_and_strip_files () { | |||
715 | # | 715 | # |
716 | file_list = {} | 716 | file_list = {} |
717 | file_links = {} | 717 | file_links = {} |
718 | if (bb.data.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', d, True) != '1') and \ | 718 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1') and \ |
719 | (bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, True) != '1'): | 719 | (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): |
720 | for root, dirs, files in os.walk(dvar): | 720 | for root, dirs, files in os.walk(dvar): |
721 | for f in files: | 721 | for f in files: |
722 | file = os.path.join(root, f) | 722 | file = os.path.join(root, f) |
@@ -764,7 +764,7 @@ python split_and_strip_files () { | |||
764 | # | 764 | # |
765 | # First lets process debug splitting | 765 | # First lets process debug splitting |
766 | # | 766 | # |
767 | if (bb.data.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', d, True) != '1'): | 767 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): |
768 | for file in file_list: | 768 | for file in file_list: |
769 | src = file[len(dvar):] | 769 | src = file[len(dvar):] |
770 | dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend | 770 | dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend |
@@ -842,7 +842,7 @@ python split_and_strip_files () { | |||
842 | # | 842 | # |
843 | # Now lets go back over things and strip them | 843 | # Now lets go back over things and strip them |
844 | # | 844 | # |
845 | if (bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, True) != '1'): | 845 | if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): |
846 | for file in file_list: | 846 | for file in file_list: |
847 | if file_list[file].startswith("ELF: "): | 847 | if file_list[file].startswith("ELF: "): |
848 | elf_file = int(file_list[file][5:]) | 848 | elf_file = int(file_list[file][5:]) |
@@ -856,11 +856,11 @@ python split_and_strip_files () { | |||
856 | python populate_packages () { | 856 | python populate_packages () { |
857 | import glob, stat, errno, re | 857 | import glob, stat, errno, re |
858 | 858 | ||
859 | workdir = bb.data.getVar('WORKDIR', d, True) | 859 | workdir = d.getVar('WORKDIR', True) |
860 | outdir = bb.data.getVar('DEPLOY_DIR', d, True) | 860 | outdir = d.getVar('DEPLOY_DIR', True) |
861 | dvar = bb.data.getVar('PKGD', d, True) | 861 | dvar = d.getVar('PKGD', True) |
862 | packages = bb.data.getVar('PACKAGES', d, True) | 862 | packages = d.getVar('PACKAGES', True) |
863 | pn = bb.data.getVar('PN', d, True) | 863 | pn = d.getVar('PN', True) |
864 | 864 | ||
865 | bb.mkdirhier(outdir) | 865 | bb.mkdirhier(outdir) |
866 | os.chdir(dvar) | 866 | os.chdir(dvar) |
@@ -877,7 +877,7 @@ python populate_packages () { | |||
877 | else: | 877 | else: |
878 | package_list.append(pkg) | 878 | package_list.append(pkg) |
879 | 879 | ||
880 | pkgdest = bb.data.getVar('PKGDEST', d, True) | 880 | pkgdest = d.getVar('PKGDEST', True) |
881 | os.system('rm -rf %s' % pkgdest) | 881 | os.system('rm -rf %s' % pkgdest) |
882 | 882 | ||
883 | seen = [] | 883 | seen = [] |
@@ -887,14 +887,14 @@ python populate_packages () { | |||
887 | root = os.path.join(pkgdest, pkg) | 887 | root = os.path.join(pkgdest, pkg) |
888 | bb.mkdirhier(root) | 888 | bb.mkdirhier(root) |
889 | 889 | ||
890 | bb.data.setVar('PKG', pkg, localdata) | 890 | localdata.setVar('PKG', pkg) |
891 | overrides = bb.data.getVar('OVERRIDES', localdata, True) | 891 | overrides = localdata.getVar('OVERRIDES', True) |
892 | if not overrides: | 892 | if not overrides: |
893 | raise bb.build.FuncFailed('OVERRIDES not defined') | 893 | raise bb.build.FuncFailed('OVERRIDES not defined') |
894 | bb.data.setVar('OVERRIDES', overrides + ':' + pkg, localdata) | 894 | localdata.setVar('OVERRIDES', overrides + ':' + pkg) |
895 | bb.data.update_data(localdata) | 895 | bb.data.update_data(localdata) |
896 | 896 | ||
897 | filesvar = bb.data.getVar('FILES', localdata, True) or "" | 897 | filesvar = localdata.getVar('FILES', True) or "" |
898 | files = filesvar.split() | 898 | files = filesvar.split() |
899 | file_links = {} | 899 | file_links = {} |
900 | for file in files: | 900 | for file in files: |
@@ -973,9 +973,9 @@ python populate_packages () { | |||
973 | bb.build.exec_func("package_name_hook", d) | 973 | bb.build.exec_func("package_name_hook", d) |
974 | 974 | ||
975 | for pkg in package_list: | 975 | for pkg in package_list: |
976 | pkgname = bb.data.getVar('PKG_%s' % pkg, d, True) | 976 | pkgname = d.getVar('PKG_%s' % pkg, True) |
977 | if pkgname is None: | 977 | if pkgname is None: |
978 | bb.data.setVar('PKG_%s' % pkg, pkg, d) | 978 | d.setVar('PKG_%s' % pkg, pkg) |
979 | 979 | ||
980 | dangling_links = {} | 980 | dangling_links = {} |
981 | pkg_files = {} | 981 | pkg_files = {} |
@@ -999,7 +999,7 @@ python populate_packages () { | |||
999 | dangling_links[pkg].append(os.path.normpath(target)) | 999 | dangling_links[pkg].append(os.path.normpath(target)) |
1000 | 1000 | ||
1001 | for pkg in package_list: | 1001 | for pkg in package_list: |
1002 | rdepends = bb.utils.explode_dep_versions(bb.data.getVar('RDEPENDS_' + pkg, d, True) or bb.data.getVar('RDEPENDS', d, True) or "") | 1002 | rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, True) or d.getVar('RDEPENDS', True) or "") |
1003 | 1003 | ||
1004 | for l in dangling_links[pkg]: | 1004 | for l in dangling_links[pkg]: |
1005 | found = False | 1005 | found = False |
@@ -1040,9 +1040,9 @@ python emit_pkgdata() { | |||
1040 | f.write('%s: %s\n' % (var, encode(val))) | 1040 | f.write('%s: %s\n' % (var, encode(val))) |
1041 | return | 1041 | return |
1042 | 1042 | ||
1043 | packages = bb.data.getVar('PACKAGES', d, True) | 1043 | packages = d.getVar('PACKAGES', True) |
1044 | pkgdest = bb.data.getVar('PKGDEST', d, 1) | 1044 | pkgdest = d.getVar('PKGDEST', 1) |
1045 | pkgdatadir = bb.data.getVar('PKGDESTWORK', d, True) | 1045 | pkgdatadir = d.getVar('PKGDESTWORK', True) |
1046 | 1046 | ||
1047 | # Take shared lock since we're only reading, not writing | 1047 | # Take shared lock since we're only reading, not writing |
1048 | lf = bb.utils.lockfile(bb.data.expand("${PACKAGELOCK}", d), True) | 1048 | lf = bb.utils.lockfile(bb.data.expand("${PACKAGELOCK}", d), True) |
@@ -1052,7 +1052,7 @@ python emit_pkgdata() { | |||
1052 | f.write("PACKAGES: %s\n" % packages) | 1052 | f.write("PACKAGES: %s\n" % packages) |
1053 | f.close() | 1053 | f.close() |
1054 | 1054 | ||
1055 | workdir = bb.data.getVar('WORKDIR', d, True) | 1055 | workdir = d.getVar('WORKDIR', True) |
1056 | 1056 | ||
1057 | for pkg in packages.split(): | 1057 | for pkg in packages.split(): |
1058 | subdata_file = pkgdatadir + "/runtime/%s" % pkg | 1058 | subdata_file = pkgdatadir + "/runtime/%s" % pkg |
@@ -1080,19 +1080,19 @@ python emit_pkgdata() { | |||
1080 | write_if_exists(sf, pkg, 'pkg_preinst') | 1080 | write_if_exists(sf, pkg, 'pkg_preinst') |
1081 | write_if_exists(sf, pkg, 'pkg_prerm') | 1081 | write_if_exists(sf, pkg, 'pkg_prerm') |
1082 | write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') | 1082 | write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') |
1083 | for dfile in (bb.data.getVar('FILERPROVIDESFLIST_' + pkg, d, True) or "").split(): | 1083 | for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split(): |
1084 | write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile) | 1084 | write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile) |
1085 | 1085 | ||
1086 | write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') | 1086 | write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') |
1087 | for dfile in (bb.data.getVar('FILERDEPENDSFLIST_' + pkg, d, True) or "").split(): | 1087 | for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split(): |
1088 | write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile) | 1088 | write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile) |
1089 | 1089 | ||
1090 | sf.close() | 1090 | sf.close() |
1091 | 1091 | ||
1092 | 1092 | ||
1093 | allow_empty = bb.data.getVar('ALLOW_EMPTY_%s' % pkg, d, True) | 1093 | allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True) |
1094 | if not allow_empty: | 1094 | if not allow_empty: |
1095 | allow_empty = bb.data.getVar('ALLOW_EMPTY', d, True) | 1095 | allow_empty = d.getVar('ALLOW_EMPTY', True) |
1096 | root = "%s/%s" % (pkgdest, pkg) | 1096 | root = "%s/%s" % (pkgdest, pkg) |
1097 | os.chdir(root) | 1097 | os.chdir(root) |
1098 | g = glob('*') | 1098 | g = glob('*') |
@@ -1123,8 +1123,8 @@ RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps" | |||
1123 | python package_do_filedeps() { | 1123 | python package_do_filedeps() { |
1124 | import os, re | 1124 | import os, re |
1125 | 1125 | ||
1126 | pkgdest = bb.data.getVar('PKGDEST', d, True) | 1126 | pkgdest = d.getVar('PKGDEST', True) |
1127 | packages = bb.data.getVar('PACKAGES', d, True) | 1127 | packages = d.getVar('PACKAGES', True) |
1128 | 1128 | ||
1129 | rpmdeps = bb.data.expand("${RPMDEPS}", d) | 1129 | rpmdeps = bb.data.expand("${RPMDEPS}", d) |
1130 | r = re.compile(r'[<>=]+ +[^ ]*') | 1130 | r = re.compile(r'[<>=]+ +[^ ]*') |
@@ -1189,7 +1189,7 @@ SHLIBSWORKDIR = "${WORKDIR}/shlibs" | |||
1189 | python package_do_shlibs() { | 1189 | python package_do_shlibs() { |
1190 | import re | 1190 | import re |
1191 | 1191 | ||
1192 | exclude_shlibs = bb.data.getVar('EXCLUDE_FROM_SHLIBS', d, 0) | 1192 | exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0) |
1193 | if exclude_shlibs: | 1193 | if exclude_shlibs: |
1194 | bb.note("not generating shlibs") | 1194 | bb.note("not generating shlibs") |
1195 | return | 1195 | return |
@@ -1197,27 +1197,27 @@ python package_do_shlibs() { | |||
1197 | lib_re = re.compile("^.*\.so") | 1197 | lib_re = re.compile("^.*\.so") |
1198 | libdir_re = re.compile(".*/lib$") | 1198 | libdir_re = re.compile(".*/lib$") |
1199 | 1199 | ||
1200 | packages = bb.data.getVar('PACKAGES', d, True) | 1200 | packages = d.getVar('PACKAGES', True) |
1201 | targetos = bb.data.getVar('TARGET_OS', d, True) | 1201 | targetos = d.getVar('TARGET_OS', True) |
1202 | 1202 | ||
1203 | workdir = bb.data.getVar('WORKDIR', d, True) | 1203 | workdir = d.getVar('WORKDIR', True) |
1204 | 1204 | ||
1205 | ver = bb.data.getVar('PKGV', d, True) | 1205 | ver = d.getVar('PKGV', True) |
1206 | if not ver: | 1206 | if not ver: |
1207 | bb.error("PKGV not defined") | 1207 | bb.error("PKGV not defined") |
1208 | return | 1208 | return |
1209 | 1209 | ||
1210 | pkgdest = bb.data.getVar('PKGDEST', d, True) | 1210 | pkgdest = d.getVar('PKGDEST', True) |
1211 | 1211 | ||
1212 | shlibs_dir = bb.data.getVar('SHLIBSDIR', d, True) | 1212 | shlibs_dir = d.getVar('SHLIBSDIR', True) |
1213 | shlibswork_dir = bb.data.getVar('SHLIBSWORKDIR', d, True) | 1213 | shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) |
1214 | 1214 | ||
1215 | # Take shared lock since we're only reading, not writing | 1215 | # Take shared lock since we're only reading, not writing |
1216 | lf = bb.utils.lockfile(bb.data.expand("${PACKAGELOCK}", d)) | 1216 | lf = bb.utils.lockfile(bb.data.expand("${PACKAGELOCK}", d)) |
1217 | 1217 | ||
1218 | def linux_so(root, path, file): | 1218 | def linux_so(root, path, file): |
1219 | cmd = bb.data.getVar('OBJDUMP', d, True) + " -p " + os.path.join(root, file) + " 2>/dev/null" | 1219 | cmd = d.getVar('OBJDUMP', True) + " -p " + os.path.join(root, file) + " 2>/dev/null" |
1220 | cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', d, True), cmd) | 1220 | cmd = "PATH=\"%s\" %s" % (d.getVar('PATH', True), cmd) |
1221 | fd = os.popen(cmd) | 1221 | fd = os.popen(cmd) |
1222 | lines = fd.readlines() | 1222 | lines = fd.readlines() |
1223 | fd.close() | 1223 | fd.close() |
@@ -1264,7 +1264,7 @@ python package_do_shlibs() { | |||
1264 | if not combo in sonames: | 1264 | if not combo in sonames: |
1265 | sonames.append(combo) | 1265 | sonames.append(combo) |
1266 | if file.endswith('.dylib') or file.endswith('.so'): | 1266 | if file.endswith('.dylib') or file.endswith('.so'): |
1267 | lafile = fullpath.replace(os.path.join(pkgdest, pkg), bb.data.getVar('PKGD', d, True)) | 1267 | lafile = fullpath.replace(os.path.join(pkgdest, pkg), d.getVar('PKGD', True)) |
1268 | # Drop suffix | 1268 | # Drop suffix |
1269 | lafile = lafile.rsplit(".",1)[0] | 1269 | lafile = lafile.rsplit(".",1)[0] |
1270 | lapath = os.path.dirname(lafile) | 1270 | lapath = os.path.dirname(lafile) |
@@ -1299,26 +1299,26 @@ python package_do_shlibs() { | |||
1299 | needed[pkg].append(name) | 1299 | needed[pkg].append(name) |
1300 | #bb.note("Adding %s for %s" % (name, pkg)) | 1300 | #bb.note("Adding %s for %s" % (name, pkg)) |
1301 | 1301 | ||
1302 | if bb.data.getVar('PACKAGE_SNAP_LIB_SYMLINKS', d, True) == "1": | 1302 | if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1": |
1303 | snap_symlinks = True | 1303 | snap_symlinks = True |
1304 | else: | 1304 | else: |
1305 | snap_symlinks = False | 1305 | snap_symlinks = False |
1306 | 1306 | ||
1307 | if (bb.data.getVar('USE_LDCONFIG', d, True) or "1") == "1": | 1307 | if (d.getVar('USE_LDCONFIG', True) or "1") == "1": |
1308 | use_ldconfig = True | 1308 | use_ldconfig = True |
1309 | else: | 1309 | else: |
1310 | use_ldconfig = False | 1310 | use_ldconfig = False |
1311 | 1311 | ||
1312 | needed = {} | 1312 | needed = {} |
1313 | shlib_provider = {} | 1313 | shlib_provider = {} |
1314 | private_libs = bb.data.getVar('PRIVATE_LIBS', d, True) | 1314 | private_libs = d.getVar('PRIVATE_LIBS', True) |
1315 | for pkg in packages.split(): | 1315 | for pkg in packages.split(): |
1316 | needs_ldconfig = False | 1316 | needs_ldconfig = False |
1317 | bb.debug(2, "calculating shlib provides for %s" % pkg) | 1317 | bb.debug(2, "calculating shlib provides for %s" % pkg) |
1318 | 1318 | ||
1319 | pkgver = bb.data.getVar('PKGV_' + pkg, d, True) | 1319 | pkgver = d.getVar('PKGV_' + pkg, True) |
1320 | if not pkgver: | 1320 | if not pkgver: |
1321 | pkgver = bb.data.getVar('PV_' + pkg, d, True) | 1321 | pkgver = d.getVar('PV_' + pkg, True) |
1322 | if not pkgver: | 1322 | if not pkgver: |
1323 | pkgver = ver | 1323 | pkgver = ver |
1324 | 1324 | ||
@@ -1352,11 +1352,11 @@ python package_do_shlibs() { | |||
1352 | fd.close() | 1352 | fd.close() |
1353 | if needs_ldconfig and use_ldconfig: | 1353 | if needs_ldconfig and use_ldconfig: |
1354 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) | 1354 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) |
1355 | postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, True) or bb.data.getVar('pkg_postinst', d, True) | 1355 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) |
1356 | if not postinst: | 1356 | if not postinst: |
1357 | postinst = '#!/bin/sh\n' | 1357 | postinst = '#!/bin/sh\n' |
1358 | postinst += bb.data.getVar('ldconfig_postinst_fragment', d, True) | 1358 | postinst += d.getVar('ldconfig_postinst_fragment', True) |
1359 | bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) | 1359 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
1360 | 1360 | ||
1361 | list_re = re.compile('^(.*)\.list$') | 1361 | list_re = re.compile('^(.*)\.list$') |
1362 | for dir in [shlibs_dir]: | 1362 | for dir in [shlibs_dir]: |
@@ -1380,7 +1380,7 @@ python package_do_shlibs() { | |||
1380 | 1380 | ||
1381 | bb.utils.unlockfile(lf) | 1381 | bb.utils.unlockfile(lf) |
1382 | 1382 | ||
1383 | assumed_libs = bb.data.getVar('ASSUME_SHLIBS', d, True) | 1383 | assumed_libs = d.getVar('ASSUME_SHLIBS', True) |
1384 | if assumed_libs: | 1384 | if assumed_libs: |
1385 | for e in assumed_libs.split(): | 1385 | for e in assumed_libs.split(): |
1386 | l, dep_pkg = e.split(":") | 1386 | l, dep_pkg = e.split(":") |
@@ -1424,12 +1424,12 @@ python package_do_shlibs() { | |||
1424 | python package_do_pkgconfig () { | 1424 | python package_do_pkgconfig () { |
1425 | import re | 1425 | import re |
1426 | 1426 | ||
1427 | packages = bb.data.getVar('PACKAGES', d, True) | 1427 | packages = d.getVar('PACKAGES', True) |
1428 | workdir = bb.data.getVar('WORKDIR', d, True) | 1428 | workdir = d.getVar('WORKDIR', True) |
1429 | pkgdest = bb.data.getVar('PKGDEST', d, True) | 1429 | pkgdest = d.getVar('PKGDEST', True) |
1430 | 1430 | ||
1431 | shlibs_dir = bb.data.getVar('SHLIBSDIR', d, True) | 1431 | shlibs_dir = d.getVar('SHLIBSDIR', True) |
1432 | shlibswork_dir = bb.data.getVar('SHLIBSWORKDIR', d, True) | 1432 | shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) |
1433 | 1433 | ||
1434 | pc_re = re.compile('(.*)\.pc$') | 1434 | pc_re = re.compile('(.*)\.pc$') |
1435 | var_re = re.compile('(.*)=(.*)') | 1435 | var_re = re.compile('(.*)=(.*)') |
@@ -1515,9 +1515,9 @@ python package_do_pkgconfig () { | |||
1515 | } | 1515 | } |
1516 | 1516 | ||
1517 | python read_shlibdeps () { | 1517 | python read_shlibdeps () { |
1518 | packages = bb.data.getVar('PACKAGES', d, True).split() | 1518 | packages = d.getVar('PACKAGES', True).split() |
1519 | for pkg in packages: | 1519 | for pkg in packages: |
1520 | rdepends = bb.utils.explode_dep_versions(bb.data.getVar('RDEPENDS_' + pkg, d, 0) or bb.data.getVar('RDEPENDS', d, 0) or "") | 1520 | rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, 0) or d.getVar('RDEPENDS', 0) or "") |
1521 | 1521 | ||
1522 | for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": | 1522 | for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": |
1523 | depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d) | 1523 | depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d) |
@@ -1544,14 +1544,14 @@ python package_depchains() { | |||
1544 | package. | 1544 | package. |
1545 | """ | 1545 | """ |
1546 | 1546 | ||
1547 | packages = bb.data.getVar('PACKAGES', d, True) | 1547 | packages = d.getVar('PACKAGES', True) |
1548 | postfixes = (bb.data.getVar('DEPCHAIN_POST', d, True) or '').split() | 1548 | postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split() |
1549 | prefixes = (bb.data.getVar('DEPCHAIN_PRE', d, True) or '').split() | 1549 | prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split() |
1550 | 1550 | ||
1551 | def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): | 1551 | def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): |
1552 | 1552 | ||
1553 | #bb.note('depends for %s is %s' % (base, depends)) | 1553 | #bb.note('depends for %s is %s' % (base, depends)) |
1554 | rreclist = bb.utils.explode_dep_versions(bb.data.getVar('RRECOMMENDS_' + pkg, d, True) or bb.data.getVar('RRECOMMENDS', d, True) or "") | 1554 | rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "") |
1555 | 1555 | ||
1556 | for depend in depends: | 1556 | for depend in depends: |
1557 | if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): | 1557 | if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): |
@@ -1572,7 +1572,7 @@ python package_depchains() { | |||
1572 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): | 1572 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): |
1573 | 1573 | ||
1574 | #bb.note('rdepends for %s is %s' % (base, rdepends)) | 1574 | #bb.note('rdepends for %s is %s' % (base, rdepends)) |
1575 | rreclist = bb.utils.explode_dep_versions(bb.data.getVar('RRECOMMENDS_' + pkg, d, True) or bb.data.getVar('RRECOMMENDS', d, True) or "") | 1575 | rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "") |
1576 | 1576 | ||
1577 | for depend in rdepends: | 1577 | for depend in rdepends: |
1578 | if depend.find('virtual-locale-') != -1: | 1578 | if depend.find('virtual-locale-') != -1: |
@@ -1596,15 +1596,15 @@ python package_depchains() { | |||
1596 | list.append(dep) | 1596 | list.append(dep) |
1597 | 1597 | ||
1598 | depends = [] | 1598 | depends = [] |
1599 | for dep in bb.utils.explode_deps(bb.data.getVar('DEPENDS', d, True) or ""): | 1599 | for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""): |
1600 | add_dep(depends, dep) | 1600 | add_dep(depends, dep) |
1601 | 1601 | ||
1602 | rdepends = [] | 1602 | rdepends = [] |
1603 | for dep in bb.utils.explode_deps(bb.data.getVar('RDEPENDS', d, True) or ""): | 1603 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS', True) or ""): |
1604 | add_dep(rdepends, dep) | 1604 | add_dep(rdepends, dep) |
1605 | 1605 | ||
1606 | for pkg in packages.split(): | 1606 | for pkg in packages.split(): |
1607 | for dep in bb.utils.explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, True) or ""): | 1607 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""): |
1608 | add_dep(rdepends, dep) | 1608 | add_dep(rdepends, dep) |
1609 | 1609 | ||
1610 | #bb.note('rdepends is %s' % rdepends) | 1610 | #bb.note('rdepends is %s' % rdepends) |
@@ -1630,7 +1630,7 @@ python package_depchains() { | |||
1630 | 1630 | ||
1631 | for suffix in pkgs: | 1631 | for suffix in pkgs: |
1632 | for pkg in pkgs[suffix]: | 1632 | for pkg in pkgs[suffix]: |
1633 | if bb.data.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs', d): | 1633 | if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'): |
1634 | continue | 1634 | continue |
1635 | (base, func) = pkgs[suffix][pkg] | 1635 | (base, func) = pkgs[suffix][pkg] |
1636 | if suffix == "-dev": | 1636 | if suffix == "-dev": |
@@ -1639,7 +1639,7 @@ python package_depchains() { | |||
1639 | pkg_addrrecs(pkg, base, suffix, func, rdepends, d) | 1639 | pkg_addrrecs(pkg, base, suffix, func, rdepends, d) |
1640 | else: | 1640 | else: |
1641 | rdeps = [] | 1641 | rdeps = [] |
1642 | for dep in bb.utils.explode_deps(bb.data.getVar('RDEPENDS_' + base, d, True) or bb.data.getVar('RDEPENDS', d, True) or ""): | 1642 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or d.getVar('RDEPENDS', True) or ""): |
1643 | add_dep(rdeps, dep) | 1643 | add_dep(rdeps, dep) |
1644 | pkg_addrrecs(pkg, base, suffix, func, rdeps, d) | 1644 | pkg_addrrecs(pkg, base, suffix, func, rdeps, d) |
1645 | } | 1645 | } |
@@ -1679,22 +1679,22 @@ python do_package () { | |||
1679 | # as any change to rpmdeps requires this to be rerun. | 1679 | # as any change to rpmdeps requires this to be rerun. |
1680 | # PACKAGE_BBCLASS_VERSION = "1" | 1680 | # PACKAGE_BBCLASS_VERSION = "1" |
1681 | 1681 | ||
1682 | packages = (bb.data.getVar('PACKAGES', d, True) or "").split() | 1682 | packages = (d.getVar('PACKAGES', True) or "").split() |
1683 | if len(packages) < 1: | 1683 | if len(packages) < 1: |
1684 | bb.debug(1, "No packages to build, skipping do_package") | 1684 | bb.debug(1, "No packages to build, skipping do_package") |
1685 | return | 1685 | return |
1686 | 1686 | ||
1687 | workdir = bb.data.getVar('WORKDIR', d, True) | 1687 | workdir = d.getVar('WORKDIR', True) |
1688 | outdir = bb.data.getVar('DEPLOY_DIR', d, True) | 1688 | outdir = d.getVar('DEPLOY_DIR', True) |
1689 | dest = bb.data.getVar('D', d, True) | 1689 | dest = d.getVar('D', True) |
1690 | dvar = bb.data.getVar('PKGD', d, True) | 1690 | dvar = d.getVar('PKGD', True) |
1691 | pn = bb.data.getVar('PN', d, True) | 1691 | pn = d.getVar('PN', True) |
1692 | 1692 | ||
1693 | if not workdir or not outdir or not dest or not dvar or not pn or not packages: | 1693 | if not workdir or not outdir or not dest or not dvar or not pn or not packages: |
1694 | bb.error("WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package") | 1694 | bb.error("WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package") |
1695 | return | 1695 | return |
1696 | 1696 | ||
1697 | for f in (bb.data.getVar('PACKAGEFUNCS', d, True) or '').split(): | 1697 | for f in (d.getVar('PACKAGEFUNCS', True) or '').split(): |
1698 | bb.build.exec_func(f, d) | 1698 | bb.build.exec_func(f, d) |
1699 | } | 1699 | } |
1700 | 1700 | ||
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass index 6733e64534..71e46a8c8e 100644 --- a/meta/classes/package_deb.bbclass +++ b/meta/classes/package_deb.bbclass | |||
@@ -11,18 +11,18 @@ DPKG_ARCH ?= "${TARGET_ARCH}" | |||
11 | PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs" | 11 | PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs" |
12 | 12 | ||
13 | python package_deb_fn () { | 13 | python package_deb_fn () { |
14 | bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d) | 14 | bb.data.setVar('PKGFN', d.getVar('PKG'), d) |
15 | } | 15 | } |
16 | 16 | ||
17 | addtask package_deb_install | 17 | addtask package_deb_install |
18 | python do_package_deb_install () { | 18 | python do_package_deb_install () { |
19 | pkg = bb.data.getVar('PKG', d, True) | 19 | pkg = d.getVar('PKG', True) |
20 | pkgfn = bb.data.getVar('PKGFN', d, True) | 20 | pkgfn = d.getVar('PKGFN', True) |
21 | rootfs = bb.data.getVar('IMAGE_ROOTFS', d, True) | 21 | rootfs = d.getVar('IMAGE_ROOTFS', True) |
22 | debdir = bb.data.getVar('DEPLOY_DIR_DEB', d, True) | 22 | debdir = d.getVar('DEPLOY_DIR_DEB', True) |
23 | apt_config = bb.data.expand('${STAGING_ETCDIR_NATIVE}/apt/apt.conf', d) | 23 | apt_config = bb.data.expand('${STAGING_ETCDIR_NATIVE}/apt/apt.conf', d) |
24 | stagingbindir = bb.data.getVar('STAGING_BINDIR_NATIVE', d, True) | 24 | stagingbindir = d.getVar('STAGING_BINDIR_NATIVE', True) |
25 | tmpdir = bb.data.getVar('TMPDIR', d, True) | 25 | tmpdir = d.getVar('TMPDIR', True) |
26 | 26 | ||
27 | if None in (pkg,pkgfn,rootfs): | 27 | if None in (pkg,pkgfn,rootfs): |
28 | raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGE_ROOTFS)") | 28 | raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGE_ROOTFS)") |
@@ -206,22 +206,22 @@ python do_package_deb () { | |||
206 | import re, copy | 206 | import re, copy |
207 | import textwrap | 207 | import textwrap |
208 | 208 | ||
209 | workdir = bb.data.getVar('WORKDIR', d, True) | 209 | workdir = d.getVar('WORKDIR', True) |
210 | if not workdir: | 210 | if not workdir: |
211 | bb.error("WORKDIR not defined, unable to package") | 211 | bb.error("WORKDIR not defined, unable to package") |
212 | return | 212 | return |
213 | 213 | ||
214 | outdir = bb.data.getVar('PKGWRITEDIRDEB', d, True) | 214 | outdir = d.getVar('PKGWRITEDIRDEB', True) |
215 | if not outdir: | 215 | if not outdir: |
216 | bb.error("PKGWRITEDIRDEB not defined, unable to package") | 216 | bb.error("PKGWRITEDIRDEB not defined, unable to package") |
217 | return | 217 | return |
218 | 218 | ||
219 | packages = bb.data.getVar('PACKAGES', d, True) | 219 | packages = d.getVar('PACKAGES', True) |
220 | if not packages: | 220 | if not packages: |
221 | bb.debug(1, "PACKAGES not defined, nothing to package") | 221 | bb.debug(1, "PACKAGES not defined, nothing to package") |
222 | return | 222 | return |
223 | 223 | ||
224 | tmpdir = bb.data.getVar('TMPDIR', d, True) | 224 | tmpdir = d.getVar('TMPDIR', True) |
225 | 225 | ||
226 | if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK): | 226 | if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK): |
227 | os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN")) | 227 | os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN")) |
@@ -230,7 +230,7 @@ python do_package_deb () { | |||
230 | bb.debug(1, "No packages; nothing to do") | 230 | bb.debug(1, "No packages; nothing to do") |
231 | return | 231 | return |
232 | 232 | ||
233 | pkgdest = bb.data.getVar('PKGDEST', d, True) | 233 | pkgdest = d.getVar('PKGDEST', True) |
234 | 234 | ||
235 | for pkg in packages.split(): | 235 | for pkg in packages.split(): |
236 | localdata = bb.data.createCopy(d) | 236 | localdata = bb.data.createCopy(d) |
@@ -238,19 +238,19 @@ python do_package_deb () { | |||
238 | 238 | ||
239 | lf = bb.utils.lockfile(root + ".lock") | 239 | lf = bb.utils.lockfile(root + ".lock") |
240 | 240 | ||
241 | bb.data.setVar('ROOT', '', localdata) | 241 | localdata.setVar('ROOT', '') |
242 | bb.data.setVar('ROOT_%s' % pkg, root, localdata) | 242 | localdata.setVar('ROOT_%s' % pkg, root) |
243 | pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, True) | 243 | pkgname = localdata.getVar('PKG_%s' % pkg, True) |
244 | if not pkgname: | 244 | if not pkgname: |
245 | pkgname = pkg | 245 | pkgname = pkg |
246 | bb.data.setVar('PKG', pkgname, localdata) | 246 | localdata.setVar('PKG', pkgname) |
247 | 247 | ||
248 | bb.data.setVar('OVERRIDES', pkg, localdata) | 248 | localdata.setVar('OVERRIDES', pkg) |
249 | 249 | ||
250 | bb.data.update_data(localdata) | 250 | bb.data.update_data(localdata) |
251 | basedir = os.path.join(os.path.dirname(root)) | 251 | basedir = os.path.join(os.path.dirname(root)) |
252 | 252 | ||
253 | pkgoutdir = os.path.join(outdir, bb.data.getVar('PACKAGE_ARCH', localdata, True)) | 253 | pkgoutdir = os.path.join(outdir, localdata.getVar('PACKAGE_ARCH', True)) |
254 | bb.mkdirhier(pkgoutdir) | 254 | bb.mkdirhier(pkgoutdir) |
255 | 255 | ||
256 | os.chdir(root) | 256 | os.chdir(root) |
@@ -261,8 +261,8 @@ python do_package_deb () { | |||
261 | del g[g.index('./DEBIAN')] | 261 | del g[g.index('./DEBIAN')] |
262 | except ValueError: | 262 | except ValueError: |
263 | pass | 263 | pass |
264 | if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": | 264 | if not g and localdata.getVar('ALLOW_EMPTY') != "1": |
265 | bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PKGV', localdata, True), bb.data.getVar('PKGR', localdata, True))) | 265 | bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) |
266 | bb.utils.unlockfile(lf) | 266 | bb.utils.unlockfile(lf) |
267 | continue | 267 | continue |
268 | 268 | ||
@@ -278,7 +278,7 @@ python do_package_deb () { | |||
278 | raise bb.build.FuncFailed("unable to open control file for writing.") | 278 | raise bb.build.FuncFailed("unable to open control file for writing.") |
279 | 279 | ||
280 | fields = [] | 280 | fields = [] |
281 | pe = bb.data.getVar('PKGE', d, True) | 281 | pe = d.getVar('PKGE', True) |
282 | if pe and int(pe) > 0: | 282 | if pe and int(pe) > 0: |
283 | fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) | 283 | fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) |
284 | else: | 284 | else: |
@@ -298,10 +298,10 @@ python do_package_deb () { | |||
298 | def pullData(l, d): | 298 | def pullData(l, d): |
299 | l2 = [] | 299 | l2 = [] |
300 | for i in l: | 300 | for i in l: |
301 | data = bb.data.getVar(i, d, True) | 301 | data = d.getVar(i, True) |
302 | if data is None: | 302 | if data is None: |
303 | raise KeyError(f) | 303 | raise KeyError(f) |
304 | if i == 'DPKG_ARCH' and bb.data.getVar('PACKAGE_ARCH', d, True) == 'all': | 304 | if i == 'DPKG_ARCH' and d.getVar('PACKAGE_ARCH', True) == 'all': |
305 | data = 'all' | 305 | data = 'all' |
306 | l2.append(data) | 306 | l2.append(data) |
307 | return l2 | 307 | return l2 |
@@ -311,12 +311,12 @@ python do_package_deb () { | |||
311 | try: | 311 | try: |
312 | for (c, fs) in fields: | 312 | for (c, fs) in fields: |
313 | for f in fs: | 313 | for f in fs: |
314 | if bb.data.getVar(f, localdata) is None: | 314 | if localdata.getVar(f) is None: |
315 | raise KeyError(f) | 315 | raise KeyError(f) |
316 | # Special behavior for description... | 316 | # Special behavior for description... |
317 | if 'DESCRIPTION' in fs: | 317 | if 'DESCRIPTION' in fs: |
318 | summary = bb.data.getVar('SUMMARY', localdata, True) or bb.data.getVar('DESCRIPTION', localdata, True) or "." | 318 | summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." |
319 | description = bb.data.getVar('DESCRIPTION', localdata, True) or "." | 319 | description = localdata.getVar('DESCRIPTION', True) or "." |
320 | description = textwrap.dedent(description).strip() | 320 | description = textwrap.dedent(description).strip() |
321 | ctrlfile.write('Description: %s\n' % unicode(summary)) | 321 | ctrlfile.write('Description: %s\n' % unicode(summary)) |
322 | ctrlfile.write('%s\n' % unicode(textwrap.fill(description, width=74, initial_indent=' ', subsequent_indent=' '))) | 322 | ctrlfile.write('%s\n' % unicode(textwrap.fill(description, width=74, initial_indent=' ', subsequent_indent=' '))) |
@@ -332,18 +332,18 @@ python do_package_deb () { | |||
332 | 332 | ||
333 | bb.build.exec_func("mapping_rename_hook", localdata) | 333 | bb.build.exec_func("mapping_rename_hook", localdata) |
334 | 334 | ||
335 | rdepends = bb.utils.explode_dep_versions(bb.data.getVar("RDEPENDS", localdata, True) or "") | 335 | rdepends = bb.utils.explode_dep_versions(localdata.getVar("RDEPENDS", True) or "") |
336 | for dep in rdepends: | 336 | for dep in rdepends: |
337 | if '*' in dep: | 337 | if '*' in dep: |
338 | del rdepends[dep] | 338 | del rdepends[dep] |
339 | rrecommends = bb.utils.explode_dep_versions(bb.data.getVar("RRECOMMENDS", localdata, True) or "") | 339 | rrecommends = bb.utils.explode_dep_versions(localdata.getVar("RRECOMMENDS", True) or "") |
340 | for dep in rrecommends: | 340 | for dep in rrecommends: |
341 | if '*' in dep: | 341 | if '*' in dep: |
342 | del rrecommends[dep] | 342 | del rrecommends[dep] |
343 | rsuggests = bb.utils.explode_dep_versions(bb.data.getVar("RSUGGESTS", localdata, True) or "") | 343 | rsuggests = bb.utils.explode_dep_versions(localdata.getVar("RSUGGESTS", True) or "") |
344 | rprovides = bb.utils.explode_dep_versions(bb.data.getVar("RPROVIDES", localdata, True) or "") | 344 | rprovides = bb.utils.explode_dep_versions(localdata.getVar("RPROVIDES", True) or "") |
345 | rreplaces = bb.utils.explode_dep_versions(bb.data.getVar("RREPLACES", localdata, True) or "") | 345 | rreplaces = bb.utils.explode_dep_versions(localdata.getVar("RREPLACES", True) or "") |
346 | rconflicts = bb.utils.explode_dep_versions(bb.data.getVar("RCONFLICTS", localdata, True) or "") | 346 | rconflicts = bb.utils.explode_dep_versions(localdata.getVar("RCONFLICTS", True) or "") |
347 | if rdepends: | 347 | if rdepends: |
348 | ctrlfile.write("Depends: %s\n" % unicode(bb.utils.join_deps(rdepends))) | 348 | ctrlfile.write("Depends: %s\n" % unicode(bb.utils.join_deps(rdepends))) |
349 | if rsuggests: | 349 | if rsuggests: |
@@ -359,7 +359,7 @@ python do_package_deb () { | |||
359 | ctrlfile.close() | 359 | ctrlfile.close() |
360 | 360 | ||
361 | for script in ["preinst", "postinst", "prerm", "postrm"]: | 361 | for script in ["preinst", "postinst", "prerm", "postrm"]: |
362 | scriptvar = bb.data.getVar('pkg_%s' % script, localdata, True) | 362 | scriptvar = localdata.getVar('pkg_%s' % script, True) |
363 | if not scriptvar: | 363 | if not scriptvar: |
364 | continue | 364 | continue |
365 | try: | 365 | try: |
@@ -372,7 +372,7 @@ python do_package_deb () { | |||
372 | scriptfile.close() | 372 | scriptfile.close() |
373 | os.chmod(os.path.join(controldir, script), 0755) | 373 | os.chmod(os.path.join(controldir, script), 0755) |
374 | 374 | ||
375 | conffiles_str = bb.data.getVar("CONFFILES", localdata, True) | 375 | conffiles_str = localdata.getVar("CONFFILES", True) |
376 | if conffiles_str: | 376 | if conffiles_str: |
377 | try: | 377 | try: |
378 | conffiles = file(os.path.join(controldir, 'conffiles'), 'w') | 378 | conffiles = file(os.path.join(controldir, 'conffiles'), 'w') |
@@ -384,7 +384,7 @@ python do_package_deb () { | |||
384 | conffiles.close() | 384 | conffiles.close() |
385 | 385 | ||
386 | os.chdir(basedir) | 386 | os.chdir(basedir) |
387 | ret = os.system("PATH=\"%s\" dpkg-deb -b %s %s" % (bb.data.getVar("PATH", localdata, True), root, pkgoutdir)) | 387 | ret = os.system("PATH=\"%s\" dpkg-deb -b %s %s" % (localdata.getVar("PATH", True), root, pkgoutdir)) |
388 | if ret != 0: | 388 | if ret != 0: |
389 | bb.utils.prunedir(controldir) | 389 | bb.utils.prunedir(controldir) |
390 | bb.utils.unlockfile(lf) | 390 | bb.utils.unlockfile(lf) |
@@ -405,17 +405,17 @@ python do_package_write_deb_setscene () { | |||
405 | addtask do_package_write_deb_setscene | 405 | addtask do_package_write_deb_setscene |
406 | 406 | ||
407 | python () { | 407 | python () { |
408 | if bb.data.getVar('PACKAGES', d, True) != '': | 408 | if d.getVar('PACKAGES', True) != '': |
409 | deps = (bb.data.getVarFlag('do_package_write_deb', 'depends', d) or "").split() | 409 | deps = (d.getVarFlag('do_package_write_deb', 'depends') or "").split() |
410 | deps.append('dpkg-native:do_populate_sysroot') | 410 | deps.append('dpkg-native:do_populate_sysroot') |
411 | deps.append('virtual/fakeroot-native:do_populate_sysroot') | 411 | deps.append('virtual/fakeroot-native:do_populate_sysroot') |
412 | bb.data.setVarFlag('do_package_write_deb', 'depends', " ".join(deps), d) | 412 | bb.data.setVarFlag('do_package_write_deb', 'depends', " ".join(deps), d) |
413 | bb.data.setVarFlag('do_package_write_deb', 'fakeroot', "1", d) | 413 | d.setVarFlag('do_package_write_deb', 'fakeroot', "1") |
414 | bb.data.setVarFlag('do_package_write_deb_setscene', 'fakeroot', "1", d) | 414 | d.setVarFlag('do_package_write_deb_setscene', 'fakeroot', "1") |
415 | 415 | ||
416 | # Map TARGET_ARCH to Debian's ideas about architectures | 416 | # Map TARGET_ARCH to Debian's ideas about architectures |
417 | if bb.data.getVar('DPKG_ARCH', d, True) in ["x86", "i486", "i586", "i686", "pentium"]: | 417 | if d.getVar('DPKG_ARCH', True) in ["x86", "i486", "i586", "i686", "pentium"]: |
418 | bb.data.setVar('DPKG_ARCH', 'i386', d) | 418 | d.setVar('DPKG_ARCH', 'i386') |
419 | } | 419 | } |
420 | 420 | ||
421 | python do_package_write_deb () { | 421 | python do_package_write_deb () { |
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass index d41b40d2c5..df608fc0e3 100644 --- a/meta/classes/package_ipk.bbclass +++ b/meta/classes/package_ipk.bbclass | |||
@@ -11,16 +11,16 @@ PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks" | |||
11 | OPKGBUILDCMD ??= "opkg-build" | 11 | OPKGBUILDCMD ??= "opkg-build" |
12 | 12 | ||
13 | python package_ipk_fn () { | 13 | python package_ipk_fn () { |
14 | bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d) | 14 | bb.data.setVar('PKGFN', d.getVar('PKG'), d) |
15 | } | 15 | } |
16 | 16 | ||
17 | python package_ipk_install () { | 17 | python package_ipk_install () { |
18 | pkg = bb.data.getVar('PKG', d, 1) | 18 | pkg = d.getVar('PKG', 1) |
19 | pkgfn = bb.data.getVar('PKGFN', d, 1) | 19 | pkgfn = d.getVar('PKGFN', 1) |
20 | rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1) | 20 | rootfs = d.getVar('IMAGE_ROOTFS', 1) |
21 | ipkdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1) | 21 | ipkdir = d.getVar('DEPLOY_DIR_IPK', 1) |
22 | stagingdir = bb.data.getVar('STAGING_DIR', d, 1) | 22 | stagingdir = d.getVar('STAGING_DIR', 1) |
23 | tmpdir = bb.data.getVar('TMPDIR', d, 1) | 23 | tmpdir = d.getVar('TMPDIR', 1) |
24 | 24 | ||
25 | if None in (pkg,pkgfn,rootfs): | 25 | if None in (pkg,pkgfn,rootfs): |
26 | raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") | 26 | raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") |
@@ -36,7 +36,7 @@ python package_ipk_install () { | |||
36 | # Generate ipk.conf if it or the stamp doesnt exist | 36 | # Generate ipk.conf if it or the stamp doesnt exist |
37 | conffile = os.path.join(stagingdir,"ipkg.conf") | 37 | conffile = os.path.join(stagingdir,"ipkg.conf") |
38 | if not os.access(conffile, os.R_OK): | 38 | if not os.access(conffile, os.R_OK): |
39 | ipkg_archs = bb.data.getVar('PACKAGE_ARCHS',d) | 39 | ipkg_archs = d.getVar('PACKAGE_ARCHS') |
40 | if ipkg_archs is None: | 40 | if ipkg_archs is None: |
41 | bb.error("PACKAGE_ARCHS missing") | 41 | bb.error("PACKAGE_ARCHS missing") |
42 | raise FuncFailed | 42 | raise FuncFailed |
@@ -259,15 +259,15 @@ python do_package_ipk () { | |||
259 | import re, copy | 259 | import re, copy |
260 | import textwrap | 260 | import textwrap |
261 | 261 | ||
262 | workdir = bb.data.getVar('WORKDIR', d, True) | 262 | workdir = d.getVar('WORKDIR', True) |
263 | outdir = bb.data.getVar('PKGWRITEDIRIPK', d, True) | 263 | outdir = d.getVar('PKGWRITEDIRIPK', True) |
264 | tmpdir = bb.data.getVar('TMPDIR', d, True) | 264 | tmpdir = d.getVar('TMPDIR', True) |
265 | pkgdest = bb.data.getVar('PKGDEST', d, True) | 265 | pkgdest = d.getVar('PKGDEST', True) |
266 | if not workdir or not outdir or not tmpdir: | 266 | if not workdir or not outdir or not tmpdir: |
267 | bb.error("Variables incorrectly set, unable to package") | 267 | bb.error("Variables incorrectly set, unable to package") |
268 | return | 268 | return |
269 | 269 | ||
270 | packages = bb.data.getVar('PACKAGES', d, True) | 270 | packages = d.getVar('PACKAGES', True) |
271 | if not packages or packages == '': | 271 | if not packages or packages == '': |
272 | bb.debug(1, "No packages; nothing to do") | 272 | bb.debug(1, "No packages; nothing to do") |
273 | return | 273 | return |
@@ -283,18 +283,18 @@ python do_package_ipk () { | |||
283 | 283 | ||
284 | lf = bb.utils.lockfile(root + ".lock") | 284 | lf = bb.utils.lockfile(root + ".lock") |
285 | 285 | ||
286 | bb.data.setVar('ROOT', '', localdata) | 286 | localdata.setVar('ROOT', '') |
287 | bb.data.setVar('ROOT_%s' % pkg, root, localdata) | 287 | localdata.setVar('ROOT_%s' % pkg, root) |
288 | pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1) | 288 | pkgname = localdata.getVar('PKG_%s' % pkg, 1) |
289 | if not pkgname: | 289 | if not pkgname: |
290 | pkgname = pkg | 290 | pkgname = pkg |
291 | bb.data.setVar('PKG', pkgname, localdata) | 291 | localdata.setVar('PKG', pkgname) |
292 | 292 | ||
293 | bb.data.setVar('OVERRIDES', pkg, localdata) | 293 | localdata.setVar('OVERRIDES', pkg) |
294 | 294 | ||
295 | bb.data.update_data(localdata) | 295 | bb.data.update_data(localdata) |
296 | basedir = os.path.join(os.path.dirname(root)) | 296 | basedir = os.path.join(os.path.dirname(root)) |
297 | arch = bb.data.getVar('PACKAGE_ARCH', localdata, 1) | 297 | arch = localdata.getVar('PACKAGE_ARCH', 1) |
298 | pkgoutdir = "%s/%s" % (outdir, arch) | 298 | pkgoutdir = "%s/%s" % (outdir, arch) |
299 | bb.mkdirhier(pkgoutdir) | 299 | bb.mkdirhier(pkgoutdir) |
300 | os.chdir(root) | 300 | os.chdir(root) |
@@ -305,8 +305,8 @@ python do_package_ipk () { | |||
305 | del g[g.index('./CONTROL')] | 305 | del g[g.index('./CONTROL')] |
306 | except ValueError: | 306 | except ValueError: |
307 | pass | 307 | pass |
308 | if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": | 308 | if not g and localdata.getVar('ALLOW_EMPTY') != "1": |
309 | bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PKGV', localdata, 1), bb.data.getVar('PKGR', localdata, 1))) | 309 | bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', 1), localdata.getVar('PKGR', 1))) |
310 | bb.utils.unlockfile(lf) | 310 | bb.utils.unlockfile(lf) |
311 | continue | 311 | continue |
312 | 312 | ||
@@ -319,7 +319,7 @@ python do_package_ipk () { | |||
319 | raise bb.build.FuncFailed("unable to open control file for writing.") | 319 | raise bb.build.FuncFailed("unable to open control file for writing.") |
320 | 320 | ||
321 | fields = [] | 321 | fields = [] |
322 | pe = bb.data.getVar('PKGE', d, 1) | 322 | pe = d.getVar('PKGE', 1) |
323 | if pe and int(pe) > 0: | 323 | if pe and int(pe) > 0: |
324 | fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) | 324 | fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) |
325 | else: | 325 | else: |
@@ -336,7 +336,7 @@ python do_package_ipk () { | |||
336 | def pullData(l, d): | 336 | def pullData(l, d): |
337 | l2 = [] | 337 | l2 = [] |
338 | for i in l: | 338 | for i in l: |
339 | l2.append(bb.data.getVar(i, d, 1)) | 339 | l2.append(d.getVar(i, 1)) |
340 | return l2 | 340 | return l2 |
341 | 341 | ||
342 | ctrlfile.write("Package: %s\n" % pkgname) | 342 | ctrlfile.write("Package: %s\n" % pkgname) |
@@ -344,12 +344,12 @@ python do_package_ipk () { | |||
344 | try: | 344 | try: |
345 | for (c, fs) in fields: | 345 | for (c, fs) in fields: |
346 | for f in fs: | 346 | for f in fs: |
347 | if bb.data.getVar(f, localdata) is None: | 347 | if localdata.getVar(f) is None: |
348 | raise KeyError(f) | 348 | raise KeyError(f) |
349 | # Special behavior for description... | 349 | # Special behavior for description... |
350 | if 'DESCRIPTION' in fs: | 350 | if 'DESCRIPTION' in fs: |
351 | summary = bb.data.getVar('SUMMARY', localdata, True) or bb.data.getVar('DESCRIPTION', localdata, True) or "." | 351 | summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." |
352 | description = bb.data.getVar('DESCRIPTION', localdata, True) or "." | 352 | description = localdata.getVar('DESCRIPTION', True) or "." |
353 | description = textwrap.dedent(description).strip() | 353 | description = textwrap.dedent(description).strip() |
354 | ctrlfile.write('Description: %s\n' % summary) | 354 | ctrlfile.write('Description: %s\n' % summary) |
355 | ctrlfile.write('%s\n' % textwrap.fill(description, width=74, initial_indent=' ', subsequent_indent=' ')) | 355 | ctrlfile.write('%s\n' % textwrap.fill(description, width=74, initial_indent=' ', subsequent_indent=' ')) |
@@ -365,12 +365,12 @@ python do_package_ipk () { | |||
365 | 365 | ||
366 | bb.build.exec_func("mapping_rename_hook", localdata) | 366 | bb.build.exec_func("mapping_rename_hook", localdata) |
367 | 367 | ||
368 | rdepends = bb.utils.explode_dep_versions(bb.data.getVar("RDEPENDS", localdata, 1) or "") | 368 | rdepends = bb.utils.explode_dep_versions(localdata.getVar("RDEPENDS", 1) or "") |
369 | rrecommends = bb.utils.explode_dep_versions(bb.data.getVar("RRECOMMENDS", localdata, 1) or "") | 369 | rrecommends = bb.utils.explode_dep_versions(localdata.getVar("RRECOMMENDS", 1) or "") |
370 | rsuggests = bb.utils.explode_dep_versions(bb.data.getVar("RSUGGESTS", localdata, 1) or "") | 370 | rsuggests = bb.utils.explode_dep_versions(localdata.getVar("RSUGGESTS", 1) or "") |
371 | rprovides = bb.utils.explode_dep_versions(bb.data.getVar("RPROVIDES", localdata, 1) or "") | 371 | rprovides = bb.utils.explode_dep_versions(localdata.getVar("RPROVIDES", 1) or "") |
372 | rreplaces = bb.utils.explode_dep_versions(bb.data.getVar("RREPLACES", localdata, 1) or "") | 372 | rreplaces = bb.utils.explode_dep_versions(localdata.getVar("RREPLACES", 1) or "") |
373 | rconflicts = bb.utils.explode_dep_versions(bb.data.getVar("RCONFLICTS", localdata, 1) or "") | 373 | rconflicts = bb.utils.explode_dep_versions(localdata.getVar("RCONFLICTS", 1) or "") |
374 | 374 | ||
375 | if rdepends: | 375 | if rdepends: |
376 | ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) | 376 | ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) |
@@ -384,14 +384,14 @@ python do_package_ipk () { | |||
384 | ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces)) | 384 | ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces)) |
385 | if rconflicts: | 385 | if rconflicts: |
386 | ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts)) | 386 | ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts)) |
387 | src_uri = bb.data.getVar("SRC_URI", localdata, 1) | 387 | src_uri = localdata.getVar("SRC_URI", 1) |
388 | if src_uri: | 388 | if src_uri: |
389 | src_uri = re.sub("\s+", " ", src_uri) | 389 | src_uri = re.sub("\s+", " ", src_uri) |
390 | ctrlfile.write("Source: %s\n" % " ".join(src_uri.split())) | 390 | ctrlfile.write("Source: %s\n" % " ".join(src_uri.split())) |
391 | ctrlfile.close() | 391 | ctrlfile.close() |
392 | 392 | ||
393 | for script in ["preinst", "postinst", "prerm", "postrm"]: | 393 | for script in ["preinst", "postinst", "prerm", "postrm"]: |
394 | scriptvar = bb.data.getVar('pkg_%s' % script, localdata, 1) | 394 | scriptvar = localdata.getVar('pkg_%s' % script, 1) |
395 | if not scriptvar: | 395 | if not scriptvar: |
396 | continue | 396 | continue |
397 | try: | 397 | try: |
@@ -403,7 +403,7 @@ python do_package_ipk () { | |||
403 | scriptfile.close() | 403 | scriptfile.close() |
404 | os.chmod(os.path.join(controldir, script), 0755) | 404 | os.chmod(os.path.join(controldir, script), 0755) |
405 | 405 | ||
406 | conffiles_str = bb.data.getVar("CONFFILES", localdata, 1) | 406 | conffiles_str = localdata.getVar("CONFFILES", 1) |
407 | if conffiles_str: | 407 | if conffiles_str: |
408 | try: | 408 | try: |
409 | conffiles = file(os.path.join(controldir, 'conffiles'), 'w') | 409 | conffiles = file(os.path.join(controldir, 'conffiles'), 'w') |
@@ -415,8 +415,8 @@ python do_package_ipk () { | |||
415 | conffiles.close() | 415 | conffiles.close() |
416 | 416 | ||
417 | os.chdir(basedir) | 417 | os.chdir(basedir) |
418 | ret = os.system("PATH=\"%s\" %s %s %s" % (bb.data.getVar("PATH", localdata, 1), | 418 | ret = os.system("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH", 1), |
419 | bb.data.getVar("OPKGBUILDCMD",d,1), pkg, pkgoutdir)) | 419 | d.getVar("OPKGBUILDCMD",1), pkg, pkgoutdir)) |
420 | if ret != 0: | 420 | if ret != 0: |
421 | bb.utils.unlockfile(lf) | 421 | bb.utils.unlockfile(lf) |
422 | raise bb.build.FuncFailed("opkg-build execution failed") | 422 | raise bb.build.FuncFailed("opkg-build execution failed") |
@@ -437,13 +437,13 @@ python do_package_write_ipk_setscene () { | |||
437 | addtask do_package_write_ipk_setscene | 437 | addtask do_package_write_ipk_setscene |
438 | 438 | ||
439 | python () { | 439 | python () { |
440 | if bb.data.getVar('PACKAGES', d, True) != '': | 440 | if d.getVar('PACKAGES', True) != '': |
441 | deps = (bb.data.getVarFlag('do_package_write_ipk', 'depends', d) or "").split() | 441 | deps = (d.getVarFlag('do_package_write_ipk', 'depends') or "").split() |
442 | deps.append('opkg-utils-native:do_populate_sysroot') | 442 | deps.append('opkg-utils-native:do_populate_sysroot') |
443 | deps.append('virtual/fakeroot-native:do_populate_sysroot') | 443 | deps.append('virtual/fakeroot-native:do_populate_sysroot') |
444 | bb.data.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps), d) | 444 | bb.data.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps), d) |
445 | bb.data.setVarFlag('do_package_write_ipk', 'fakeroot', "1", d) | 445 | d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") |
446 | bb.data.setVarFlag('do_package_write_ipk_setscene', 'fakeroot', "1", d) | 446 | d.setVarFlag('do_package_write_ipk_setscene', 'fakeroot', "1") |
447 | } | 447 | } |
448 | 448 | ||
449 | python do_package_write_ipk () { | 449 | python do_package_write_ipk () { |
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass index f804a0fc17..2c5545c11d 100644 --- a/meta/classes/package_rpm.bbclass +++ b/meta/classes/package_rpm.bbclass | |||
@@ -8,7 +8,7 @@ RPMBUILD="rpmbuild" | |||
8 | PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms" | 8 | PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms" |
9 | 9 | ||
10 | python package_rpm_fn () { | 10 | python package_rpm_fn () { |
11 | bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d) | 11 | bb.data.setVar('PKGFN', d.getVar('PKG'), d) |
12 | } | 12 | } |
13 | 13 | ||
14 | python package_rpm_install () { | 14 | python package_rpm_install () { |
@@ -406,7 +406,7 @@ python write_specfile () { | |||
406 | name = "".join(name.split(eext[1] + '-')) | 406 | name = "".join(name.split(eext[1] + '-')) |
407 | return name | 407 | return name |
408 | 408 | ||
409 | # ml = bb.data.getVar("MLPREFIX", d, True) | 409 | # ml = d.getVar("MLPREFIX", True) |
410 | # if ml and name and len(ml) != 0 and name.find(ml) == 0: | 410 | # if ml and name and len(ml) != 0 and name.find(ml) == 0: |
411 | # return ml.join(name.split(ml, 1)[1:]) | 411 | # return ml.join(name.split(ml, 1)[1:]) |
412 | # return name | 412 | # return name |
@@ -426,7 +426,7 @@ python write_specfile () { | |||
426 | # after renaming we cannot look up the dependencies in the packagedata | 426 | # after renaming we cannot look up the dependencies in the packagedata |
427 | # store. | 427 | # store. |
428 | def translate_vers(varname, d): | 428 | def translate_vers(varname, d): |
429 | depends = bb.data.getVar(varname, d, True) | 429 | depends = d.getVar(varname, True) |
430 | if depends: | 430 | if depends: |
431 | depends_dict = bb.utils.explode_dep_versions(depends) | 431 | depends_dict = bb.utils.explode_dep_versions(depends) |
432 | newdeps_dict = {} | 432 | newdeps_dict = {} |
@@ -481,34 +481,34 @@ python write_specfile () { | |||
481 | scr = scr[:pos] + 'if [ "$1" = "0" ] ; then\n' + scr[pos:] + '\nfi' | 481 | scr = scr[:pos] + 'if [ "$1" = "0" ] ; then\n' + scr[pos:] + '\nfi' |
482 | return scr | 482 | return scr |
483 | 483 | ||
484 | packages = bb.data.getVar('PACKAGES', d, True) | 484 | packages = d.getVar('PACKAGES', True) |
485 | if not packages or packages == '': | 485 | if not packages or packages == '': |
486 | bb.debug(1, "No packages; nothing to do") | 486 | bb.debug(1, "No packages; nothing to do") |
487 | return | 487 | return |
488 | 488 | ||
489 | pkgdest = bb.data.getVar('PKGDEST', d, True) | 489 | pkgdest = d.getVar('PKGDEST', True) |
490 | if not pkgdest: | 490 | if not pkgdest: |
491 | bb.fatal("No PKGDEST") | 491 | bb.fatal("No PKGDEST") |
492 | return | 492 | return |
493 | 493 | ||
494 | outspecfile = bb.data.getVar('OUTSPECFILE', d, True) | 494 | outspecfile = d.getVar('OUTSPECFILE', True) |
495 | if not outspecfile: | 495 | if not outspecfile: |
496 | bb.fatal("No OUTSPECFILE") | 496 | bb.fatal("No OUTSPECFILE") |
497 | return | 497 | return |
498 | 498 | ||
499 | # Construct the SPEC file... | 499 | # Construct the SPEC file... |
500 | srcname = strip_multilib(bb.data.getVar('PN', d, True), d) | 500 | srcname = strip_multilib(d.getVar('PN', True), d) |
501 | srcsummary = (bb.data.getVar('SUMMARY', d, True) or bb.data.getVar('DESCRIPTION', d, True) or ".") | 501 | srcsummary = (d.getVar('SUMMARY', True) or d.getVar('DESCRIPTION', True) or ".") |
502 | srcversion = bb.data.getVar('PKGV', d, True).replace('-', '+') | 502 | srcversion = d.getVar('PKGV', True).replace('-', '+') |
503 | srcrelease = bb.data.getVar('PKGR', d, True) | 503 | srcrelease = d.getVar('PKGR', True) |
504 | srcepoch = (bb.data.getVar('PKGE', d, True) or "") | 504 | srcepoch = (d.getVar('PKGE', True) or "") |
505 | srclicense = bb.data.getVar('LICENSE', d, True) | 505 | srclicense = d.getVar('LICENSE', True) |
506 | srcsection = bb.data.getVar('SECTION', d, True) | 506 | srcsection = d.getVar('SECTION', True) |
507 | srcmaintainer = bb.data.getVar('MAINTAINER', d, True) | 507 | srcmaintainer = d.getVar('MAINTAINER', True) |
508 | srchomepage = bb.data.getVar('HOMEPAGE', d, True) | 508 | srchomepage = d.getVar('HOMEPAGE', True) |
509 | srcdescription = bb.data.getVar('DESCRIPTION', d, True) or "." | 509 | srcdescription = d.getVar('DESCRIPTION', True) or "." |
510 | 510 | ||
511 | srcdepends = strip_multilib(bb.data.getVar('DEPENDS', d, True), d) | 511 | srcdepends = strip_multilib(d.getVar('DEPENDS', True), d) |
512 | srcrdepends = [] | 512 | srcrdepends = [] |
513 | srcrrecommends = [] | 513 | srcrrecommends = [] |
514 | srcrsuggests = [] | 514 | srcrsuggests = [] |
@@ -538,28 +538,28 @@ python write_specfile () { | |||
538 | 538 | ||
539 | lf = bb.utils.lockfile(root + ".lock") | 539 | lf = bb.utils.lockfile(root + ".lock") |
540 | 540 | ||
541 | bb.data.setVar('ROOT', '', localdata) | 541 | localdata.setVar('ROOT', '') |
542 | bb.data.setVar('ROOT_%s' % pkg, root, localdata) | 542 | localdata.setVar('ROOT_%s' % pkg, root) |
543 | pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1) | 543 | pkgname = localdata.getVar('PKG_%s' % pkg, 1) |
544 | if not pkgname: | 544 | if not pkgname: |
545 | pkgname = pkg | 545 | pkgname = pkg |
546 | bb.data.setVar('PKG', pkgname, localdata) | 546 | localdata.setVar('PKG', pkgname) |
547 | 547 | ||
548 | bb.data.setVar('OVERRIDES', pkg, localdata) | 548 | localdata.setVar('OVERRIDES', pkg) |
549 | 549 | ||
550 | bb.data.update_data(localdata) | 550 | bb.data.update_data(localdata) |
551 | 551 | ||
552 | conffiles = (bb.data.getVar('CONFFILES', localdata, True) or "").split() | 552 | conffiles = (localdata.getVar('CONFFILES', True) or "").split() |
553 | 553 | ||
554 | splitname = strip_multilib(pkgname, d) | 554 | splitname = strip_multilib(pkgname, d) |
555 | 555 | ||
556 | splitsummary = (bb.data.getVar('SUMMARY', localdata, True) or bb.data.getVar('DESCRIPTION', localdata, True) or ".") | 556 | splitsummary = (localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or ".") |
557 | splitversion = (bb.data.getVar('PKGV', localdata, True) or "").replace('-', '+') | 557 | splitversion = (localdata.getVar('PKGV', True) or "").replace('-', '+') |
558 | splitrelease = (bb.data.getVar('PKGR', localdata, True) or "") | 558 | splitrelease = (localdata.getVar('PKGR', True) or "") |
559 | splitepoch = (bb.data.getVar('PKGE', localdata, True) or "") | 559 | splitepoch = (localdata.getVar('PKGE', True) or "") |
560 | splitlicense = (bb.data.getVar('LICENSE', localdata, True) or "") | 560 | splitlicense = (localdata.getVar('LICENSE', True) or "") |
561 | splitsection = (bb.data.getVar('SECTION', localdata, True) or "") | 561 | splitsection = (localdata.getVar('SECTION', True) or "") |
562 | splitdescription = (bb.data.getVar('DESCRIPTION', localdata, True) or ".") | 562 | splitdescription = (localdata.getVar('DESCRIPTION', True) or ".") |
563 | 563 | ||
564 | translate_vers('RDEPENDS', localdata) | 564 | translate_vers('RDEPENDS', localdata) |
565 | translate_vers('RRECOMMENDS', localdata) | 565 | translate_vers('RRECOMMENDS', localdata) |
@@ -571,12 +571,12 @@ python write_specfile () { | |||
571 | # Map the dependencies into their final form | 571 | # Map the dependencies into their final form |
572 | bb.build.exec_func("mapping_rename_hook", localdata) | 572 | bb.build.exec_func("mapping_rename_hook", localdata) |
573 | 573 | ||
574 | splitrdepends = strip_multilib(bb.data.getVar('RDEPENDS', localdata, True), d) or "" | 574 | splitrdepends = strip_multilib(localdata.getVar('RDEPENDS', True), d) or "" |
575 | splitrrecommends = strip_multilib(bb.data.getVar('RRECOMMENDS', localdata, True), d) or "" | 575 | splitrrecommends = strip_multilib(localdata.getVar('RRECOMMENDS', True), d) or "" |
576 | splitrsuggests = strip_multilib(bb.data.getVar('RSUGGESTS', localdata, True), d) or "" | 576 | splitrsuggests = strip_multilib(localdata.getVar('RSUGGESTS', True), d) or "" |
577 | splitrprovides = strip_multilib(bb.data.getVar('RPROVIDES', localdata, True), d) or "" | 577 | splitrprovides = strip_multilib(localdata.getVar('RPROVIDES', True), d) or "" |
578 | splitrreplaces = strip_multilib(bb.data.getVar('RREPLACES', localdata, True), d) or "" | 578 | splitrreplaces = strip_multilib(localdata.getVar('RREPLACES', True), d) or "" |
579 | splitrconflicts = strip_multilib(bb.data.getVar('RCONFLICTS', localdata, True), d) or "" | 579 | splitrconflicts = strip_multilib(localdata.getVar('RCONFLICTS', True), d) or "" |
580 | splitrobsoletes = [] | 580 | splitrobsoletes = [] |
581 | 581 | ||
582 | # For now we need to manually supplement RPROVIDES with any update-alternatives links | 582 | # For now we need to manually supplement RPROVIDES with any update-alternatives links |
@@ -592,14 +592,14 @@ python write_specfile () { | |||
592 | srcrreplaces = splitrreplaces | 592 | srcrreplaces = splitrreplaces |
593 | srcrconflicts = splitrconflicts | 593 | srcrconflicts = splitrconflicts |
594 | 594 | ||
595 | srcpreinst = bb.data.getVar('pkg_preinst', localdata, True) | 595 | srcpreinst = localdata.getVar('pkg_preinst', True) |
596 | srcpostinst = bb.data.getVar('pkg_postinst', localdata, True) | 596 | srcpostinst = localdata.getVar('pkg_postinst', True) |
597 | srcprerm = bb.data.getVar('pkg_prerm', localdata, True) | 597 | srcprerm = localdata.getVar('pkg_prerm', True) |
598 | srcpostrm = bb.data.getVar('pkg_postrm', localdata, True) | 598 | srcpostrm = localdata.getVar('pkg_postrm', True) |
599 | 599 | ||
600 | file_list = [] | 600 | file_list = [] |
601 | walk_files(root, file_list, conffiles) | 601 | walk_files(root, file_list, conffiles) |
602 | if not file_list and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": | 602 | if not file_list and localdata.getVar('ALLOW_EMPTY') != "1": |
603 | bb.note("Not creating empty RPM package for %s" % splitname) | 603 | bb.note("Not creating empty RPM package for %s" % splitname) |
604 | else: | 604 | else: |
605 | bb.note("Creating RPM package for %s" % splitname) | 605 | bb.note("Creating RPM package for %s" % splitname) |
@@ -672,7 +672,7 @@ python write_specfile () { | |||
672 | 672 | ||
673 | # Now process scriptlets | 673 | # Now process scriptlets |
674 | for script in ["preinst", "postinst", "prerm", "postrm"]: | 674 | for script in ["preinst", "postinst", "prerm", "postrm"]: |
675 | scriptvar = bb.data.getVar('pkg_%s' % script, localdata, True) | 675 | scriptvar = localdata.getVar('pkg_%s' % script, True) |
676 | if not scriptvar: | 676 | if not scriptvar: |
677 | continue | 677 | continue |
678 | if script == 'preinst': | 678 | if script == 'preinst': |
@@ -691,7 +691,7 @@ python write_specfile () { | |||
691 | # Now process files | 691 | # Now process files |
692 | file_list = [] | 692 | file_list = [] |
693 | walk_files(root, file_list, conffiles) | 693 | walk_files(root, file_list, conffiles) |
694 | if not file_list and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": | 694 | if not file_list and localdata.getVar('ALLOW_EMPTY') != "1": |
695 | bb.note("Not creating empty RPM package for %s" % splitname) | 695 | bb.note("Not creating empty RPM package for %s" % splitname) |
696 | else: | 696 | else: |
697 | spec_files_bottom.append('%%files -n %s' % splitname) | 697 | spec_files_bottom.append('%%files -n %s' % splitname) |
@@ -813,29 +813,29 @@ python do_package_rpm () { | |||
813 | # We need a simple way to remove the MLPREFIX from the package name, | 813 | # We need a simple way to remove the MLPREFIX from the package name, |
814 | # and dependency information... | 814 | # and dependency information... |
815 | def strip_multilib(name, d): | 815 | def strip_multilib(name, d): |
816 | ml = bb.data.getVar("MLPREFIX", d, True) | 816 | ml = d.getVar("MLPREFIX", True) |
817 | if ml and name and len(ml) != 0 and name.find(ml) >= 0: | 817 | if ml and name and len(ml) != 0 and name.find(ml) >= 0: |
818 | return "".join(name.split(ml)) | 818 | return "".join(name.split(ml)) |
819 | return name | 819 | return name |
820 | 820 | ||
821 | workdir = bb.data.getVar('WORKDIR', d, True) | 821 | workdir = d.getVar('WORKDIR', True) |
822 | outdir = bb.data.getVar('DEPLOY_DIR_IPK', d, True) | 822 | outdir = d.getVar('DEPLOY_DIR_IPK', True) |
823 | tmpdir = bb.data.getVar('TMPDIR', d, True) | 823 | tmpdir = d.getVar('TMPDIR', True) |
824 | pkgd = bb.data.getVar('PKGD', d, True) | 824 | pkgd = d.getVar('PKGD', True) |
825 | pkgdest = bb.data.getVar('PKGDEST', d, True) | 825 | pkgdest = d.getVar('PKGDEST', True) |
826 | if not workdir or not outdir or not pkgd or not tmpdir: | 826 | if not workdir or not outdir or not pkgd or not tmpdir: |
827 | bb.error("Variables incorrectly set, unable to package") | 827 | bb.error("Variables incorrectly set, unable to package") |
828 | return | 828 | return |
829 | 829 | ||
830 | packages = bb.data.getVar('PACKAGES', d, True) | 830 | packages = d.getVar('PACKAGES', True) |
831 | if not packages or packages == '': | 831 | if not packages or packages == '': |
832 | bb.debug(1, "No packages; nothing to do") | 832 | bb.debug(1, "No packages; nothing to do") |
833 | return | 833 | return |
834 | 834 | ||
835 | # Construct the spec file... | 835 | # Construct the spec file... |
836 | srcname = strip_multilib(bb.data.getVar('PN', d, True), d) | 836 | srcname = strip_multilib(d.getVar('PN', True), d) |
837 | outspecfile = workdir + "/" + srcname + ".spec" | 837 | outspecfile = workdir + "/" + srcname + ".spec" |
838 | bb.data.setVar('OUTSPECFILE', outspecfile, d) | 838 | d.setVar('OUTSPECFILE', outspecfile) |
839 | bb.build.exec_func('write_specfile', d) | 839 | bb.build.exec_func('write_specfile', d) |
840 | 840 | ||
841 | # Construct per file dependencies file | 841 | # Construct per file dependencies file |
@@ -844,10 +844,10 @@ python do_package_rpm () { | |||
844 | outfile.write("\n# Dependency table\n") | 844 | outfile.write("\n# Dependency table\n") |
845 | for pkg in packages.split(): | 845 | for pkg in packages.split(): |
846 | dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg | 846 | dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg |
847 | dependsflist = (bb.data.getVar(dependsflist_key, d, True) or "") | 847 | dependsflist = (d.getVar(dependsflist_key, True) or "") |
848 | for dfile in dependsflist.split(): | 848 | for dfile in dependsflist.split(): |
849 | key = "FILE" + varname + "_" + dfile + "_" + pkg | 849 | key = "FILE" + varname + "_" + dfile + "_" + pkg |
850 | depends_dict = bb.utils.explode_dep_versions(bb.data.getVar(key, d, True) or "") | 850 | depends_dict = bb.utils.explode_dep_versions(d.getVar(key, True) or "") |
851 | file = dfile.replace("@underscore@", "_") | 851 | file = dfile.replace("@underscore@", "_") |
852 | file = file.replace("@closebrace@", "]") | 852 | file = file.replace("@closebrace@", "]") |
853 | file = file.replace("@openbrace@", "[") | 853 | file = file.replace("@openbrace@", "[") |
@@ -899,15 +899,15 @@ python do_package_rpm () { | |||
899 | os.chmod(outprovides, 0755) | 899 | os.chmod(outprovides, 0755) |
900 | 900 | ||
901 | # Setup the rpmbuild arguments... | 901 | # Setup the rpmbuild arguments... |
902 | rpmbuild = bb.data.getVar('RPMBUILD', d, True) | 902 | rpmbuild = d.getVar('RPMBUILD', True) |
903 | targetsys = bb.data.getVar('TARGET_SYS', d, True) | 903 | targetsys = d.getVar('TARGET_SYS', True) |
904 | targetvendor = bb.data.getVar('TARGET_VENDOR', d, True) | 904 | targetvendor = d.getVar('TARGET_VENDOR', True) |
905 | package_arch = bb.data.getVar('PACKAGE_ARCH', d, True) or "" | 905 | package_arch = d.getVar('PACKAGE_ARCH', True) or "" |
906 | if package_arch not in "all any noarch".split(): | 906 | if package_arch not in "all any noarch".split(): |
907 | ml_prefix = (bb.data.getVar('MLPREFIX', d, True) or "").replace("-", "_") | 907 | ml_prefix = (d.getVar('MLPREFIX', True) or "").replace("-", "_") |
908 | bb.data.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch, d) | 908 | d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch) |
909 | else: | 909 | else: |
910 | bb.data.setVar('PACKAGE_ARCH_EXTEND', package_arch, d) | 910 | d.setVar('PACKAGE_ARCH_EXTEND', package_arch) |
911 | pkgwritedir = bb.data.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}', d) | 911 | pkgwritedir = bb.data.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}', d) |
912 | pkgarch = bb.data.expand('${PACKAGE_ARCH_EXTEND}${TARGET_VENDOR}-${TARGET_OS}', d) | 912 | pkgarch = bb.data.expand('${PACKAGE_ARCH_EXTEND}${TARGET_VENDOR}-${TARGET_OS}', d) |
913 | magicfile = bb.data.expand('${STAGING_DIR_NATIVE}/usr/share/misc/magic.mgc', d) | 913 | magicfile = bb.data.expand('${STAGING_DIR_NATIVE}/usr/share/misc/magic.mgc', d) |
@@ -927,19 +927,19 @@ python do_package_rpm () { | |||
927 | cmd = cmd + " -bb " + outspecfile | 927 | cmd = cmd + " -bb " + outspecfile |
928 | 928 | ||
929 | # Build the rpm package! | 929 | # Build the rpm package! |
930 | bb.data.setVar('BUILDSPEC', cmd + "\n", d) | 930 | d.setVar('BUILDSPEC', cmd + "\n") |
931 | bb.data.setVarFlag('BUILDSPEC', 'func', '1', d) | 931 | d.setVarFlag('BUILDSPEC', 'func', '1') |
932 | bb.build.exec_func('BUILDSPEC', d) | 932 | bb.build.exec_func('BUILDSPEC', d) |
933 | } | 933 | } |
934 | 934 | ||
935 | python () { | 935 | python () { |
936 | if bb.data.getVar('PACKAGES', d, True) != '': | 936 | if d.getVar('PACKAGES', True) != '': |
937 | deps = (bb.data.getVarFlag('do_package_write_rpm', 'depends', d) or "").split() | 937 | deps = (d.getVarFlag('do_package_write_rpm', 'depends') or "").split() |
938 | deps.append('rpm-native:do_populate_sysroot') | 938 | deps.append('rpm-native:do_populate_sysroot') |
939 | deps.append('virtual/fakeroot-native:do_populate_sysroot') | 939 | deps.append('virtual/fakeroot-native:do_populate_sysroot') |
940 | bb.data.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps), d) | 940 | bb.data.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps), d) |
941 | bb.data.setVarFlag('do_package_write_rpm', 'fakeroot', 1, d) | 941 | d.setVarFlag('do_package_write_rpm', 'fakeroot', 1) |
942 | bb.data.setVarFlag('do_package_write_rpm_setscene', 'fakeroot', 1, d) | 942 | d.setVarFlag('do_package_write_rpm_setscene', 'fakeroot', 1) |
943 | } | 943 | } |
944 | 944 | ||
945 | SSTATETASKS += "do_package_write_rpm" | 945 | SSTATETASKS += "do_package_write_rpm" |
diff --git a/meta/classes/package_tar.bbclass b/meta/classes/package_tar.bbclass index a806e4514b..f26a2c0008 100644 --- a/meta/classes/package_tar.bbclass +++ b/meta/classes/package_tar.bbclass | |||
@@ -3,15 +3,15 @@ inherit package | |||
3 | IMAGE_PKGTYPE ?= "tar" | 3 | IMAGE_PKGTYPE ?= "tar" |
4 | 4 | ||
5 | python package_tar_fn () { | 5 | python package_tar_fn () { |
6 | fn = os.path.join(bb.data.getVar('DEPLOY_DIR_TAR', d), "%s-%s-%s.tar.gz" % (bb.data.getVar('PKG', d), bb.data.getVar('PKGV', d), bb.data.getVar('PKGR', d))) | 6 | fn = os.path.join(d.getVar('DEPLOY_DIR_TAR'), "%s-%s-%s.tar.gz" % (d.getVar('PKG'), d.getVar('PKGV'), d.getVar('PKGR'))) |
7 | fn = bb.data.expand(fn, d) | 7 | fn = bb.data.expand(fn, d) |
8 | bb.data.setVar('PKGFN', fn, d) | 8 | d.setVar('PKGFN', fn) |
9 | } | 9 | } |
10 | 10 | ||
11 | python package_tar_install () { | 11 | python package_tar_install () { |
12 | pkg = bb.data.getVar('PKG', d, 1) | 12 | pkg = d.getVar('PKG', 1) |
13 | pkgfn = bb.data.getVar('PKGFN', d, 1) | 13 | pkgfn = d.getVar('PKGFN', 1) |
14 | rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1) | 14 | rootfs = d.getVar('IMAGE_ROOTFS', 1) |
15 | 15 | ||
16 | if None in (pkg,pkgfn,rootfs): | 16 | if None in (pkg,pkgfn,rootfs): |
17 | bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") | 17 | bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") |
@@ -35,24 +35,24 @@ python package_tar_install () { | |||
35 | } | 35 | } |
36 | 36 | ||
37 | python do_package_tar () { | 37 | python do_package_tar () { |
38 | workdir = bb.data.getVar('WORKDIR', d, 1) | 38 | workdir = d.getVar('WORKDIR', 1) |
39 | if not workdir: | 39 | if not workdir: |
40 | bb.error("WORKDIR not defined, unable to package") | 40 | bb.error("WORKDIR not defined, unable to package") |
41 | return | 41 | return |
42 | 42 | ||
43 | outdir = bb.data.getVar('DEPLOY_DIR_TAR', d, 1) | 43 | outdir = d.getVar('DEPLOY_DIR_TAR', 1) |
44 | if not outdir: | 44 | if not outdir: |
45 | bb.error("DEPLOY_DIR_TAR not defined, unable to package") | 45 | bb.error("DEPLOY_DIR_TAR not defined, unable to package") |
46 | return | 46 | return |
47 | bb.mkdirhier(outdir) | 47 | bb.mkdirhier(outdir) |
48 | 48 | ||
49 | dvar = bb.data.getVar('D', d, 1) | 49 | dvar = d.getVar('D', 1) |
50 | if not dvar: | 50 | if not dvar: |
51 | bb.error("D not defined, unable to package") | 51 | bb.error("D not defined, unable to package") |
52 | return | 52 | return |
53 | bb.mkdirhier(dvar) | 53 | bb.mkdirhier(dvar) |
54 | 54 | ||
55 | packages = bb.data.getVar('PACKAGES', d, 1) | 55 | packages = d.getVar('PACKAGES', 1) |
56 | if not packages: | 56 | if not packages: |
57 | bb.debug(1, "PACKAGES not defined, nothing to package") | 57 | bb.debug(1, "PACKAGES not defined, nothing to package") |
58 | return | 58 | return |
@@ -61,11 +61,11 @@ python do_package_tar () { | |||
61 | localdata = bb.data.createCopy(d) | 61 | localdata = bb.data.createCopy(d) |
62 | root = "%s/install/%s" % (workdir, pkg) | 62 | root = "%s/install/%s" % (workdir, pkg) |
63 | 63 | ||
64 | bb.data.setVar('ROOT', '', localdata) | 64 | localdata.setVar('ROOT', '') |
65 | bb.data.setVar('ROOT_%s' % pkg, root, localdata) | 65 | localdata.setVar('ROOT_%s' % pkg, root) |
66 | bb.data.setVar('PKG', pkg, localdata) | 66 | localdata.setVar('PKG', pkg) |
67 | 67 | ||
68 | overrides = bb.data.getVar('OVERRIDES', localdata) | 68 | overrides = localdata.getVar('OVERRIDES') |
69 | if not overrides: | 69 | if not overrides: |
70 | raise bb.build.FuncFailed('OVERRIDES not defined') | 70 | raise bb.build.FuncFailed('OVERRIDES not defined') |
71 | overrides = bb.data.expand(overrides, localdata) | 71 | overrides = bb.data.expand(overrides, localdata) |
@@ -73,17 +73,17 @@ python do_package_tar () { | |||
73 | 73 | ||
74 | bb.data.update_data(localdata) | 74 | bb.data.update_data(localdata) |
75 | 75 | ||
76 | root = bb.data.getVar('ROOT', localdata) | 76 | root = localdata.getVar('ROOT') |
77 | bb.mkdirhier(root) | 77 | bb.mkdirhier(root) |
78 | basedir = os.path.dirname(root) | 78 | basedir = os.path.dirname(root) |
79 | pkgoutdir = outdir | 79 | pkgoutdir = outdir |
80 | bb.mkdirhier(pkgoutdir) | 80 | bb.mkdirhier(pkgoutdir) |
81 | bb.build.exec_func('package_tar_fn', localdata) | 81 | bb.build.exec_func('package_tar_fn', localdata) |
82 | tarfn = bb.data.getVar('PKGFN', localdata, 1) | 82 | tarfn = localdata.getVar('PKGFN', 1) |
83 | os.chdir(root) | 83 | os.chdir(root) |
84 | from glob import glob | 84 | from glob import glob |
85 | if not glob('*'): | 85 | if not glob('*'): |
86 | bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PKGV', localdata, 1), bb.data.getVar('PKGR', localdata, 1))) | 86 | bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', 1), localdata.getVar('PKGR', 1))) |
87 | continue | 87 | continue |
88 | ret = os.system("tar -czf %s %s" % (tarfn, '.')) | 88 | ret = os.system("tar -czf %s %s" % (tarfn, '.')) |
89 | if ret != 0: | 89 | if ret != 0: |
@@ -91,12 +91,12 @@ python do_package_tar () { | |||
91 | } | 91 | } |
92 | 92 | ||
93 | python () { | 93 | python () { |
94 | if bb.data.getVar('PACKAGES', d, True) != '': | 94 | if d.getVar('PACKAGES', True) != '': |
95 | deps = (bb.data.getVarFlag('do_package_write_tar', 'depends', d) or "").split() | 95 | deps = (d.getVarFlag('do_package_write_tar', 'depends') or "").split() |
96 | deps.append('tar-native:do_populate_sysroot') | 96 | deps.append('tar-native:do_populate_sysroot') |
97 | deps.append('virtual/fakeroot-native:do_populate_sysroot') | 97 | deps.append('virtual/fakeroot-native:do_populate_sysroot') |
98 | bb.data.setVarFlag('do_package_write_tar', 'depends', " ".join(deps), d) | 98 | bb.data.setVarFlag('do_package_write_tar', 'depends', " ".join(deps), d) |
99 | bb.data.setVarFlag('do_package_write_ipk', 'fakeroot', "1", d) | 99 | d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") |
100 | } | 100 | } |
101 | 101 | ||
102 | 102 | ||
diff --git a/meta/classes/packagedata.bbclass b/meta/classes/packagedata.bbclass index bf051feea8..9c7aede3bb 100644 --- a/meta/classes/packagedata.bbclass +++ b/meta/classes/packagedata.bbclass | |||
@@ -1,13 +1,13 @@ | |||
1 | python read_subpackage_metadata () { | 1 | python read_subpackage_metadata () { |
2 | import oe.packagedata | 2 | import oe.packagedata |
3 | 3 | ||
4 | data = oe.packagedata.read_pkgdata(bb.data.getVar('PN', d, 1), d) | 4 | data = oe.packagedata.read_pkgdata(d.getVar('PN', 1), d) |
5 | 5 | ||
6 | for key in data.keys(): | 6 | for key in data.keys(): |
7 | bb.data.setVar(key, data[key], d) | 7 | d.setVar(key, data[key]) |
8 | 8 | ||
9 | for pkg in bb.data.getVar('PACKAGES', d, 1).split(): | 9 | for pkg in d.getVar('PACKAGES', 1).split(): |
10 | sdata = oe.packagedata.read_subpkgdata(pkg, d) | 10 | sdata = oe.packagedata.read_subpkgdata(pkg, d) |
11 | for key in sdata.keys(): | 11 | for key in sdata.keys(): |
12 | bb.data.setVar(key, sdata[key], d) | 12 | d.setVar(key, sdata[key]) |
13 | } | 13 | } |
diff --git a/meta/classes/packagehistory.bbclass b/meta/classes/packagehistory.bbclass index 492bbac218..2cdf9d8a7c 100644 --- a/meta/classes/packagehistory.bbclass +++ b/meta/classes/packagehistory.bbclass | |||
@@ -10,8 +10,8 @@ PKGHIST_DIR = "${TMPDIR}/pkghistory/${BASEPKG_TARGET_SYS}/" | |||
10 | # for comparision when writing future packages | 10 | # for comparision when writing future packages |
11 | # | 11 | # |
12 | python emit_pkghistory() { | 12 | python emit_pkghistory() { |
13 | packages = bb.data.getVar('PACKAGES', d, True) | 13 | packages = d.getVar('PACKAGES', True) |
14 | pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True) | 14 | pkghistdir = d.getVar('PKGHIST_DIR', True) |
15 | 15 | ||
16 | 16 | ||
17 | # Should check PACKAGES here to see if anything removed | 17 | # Should check PACKAGES here to see if anything removed |
@@ -72,14 +72,14 @@ def check_pkghistory(pkg, pe, pv, pr, lastversion): | |||
72 | def write_pkghistory(pkg, pe, pv, pr, d): | 72 | def write_pkghistory(pkg, pe, pv, pr, d): |
73 | bb.debug(2, "Writing package history") | 73 | bb.debug(2, "Writing package history") |
74 | 74 | ||
75 | pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True) | 75 | pkghistdir = d.getVar('PKGHIST_DIR', True) |
76 | 76 | ||
77 | verpath = os.path.join(pkghistdir, pkg, pe, pv, pr) | 77 | verpath = os.path.join(pkghistdir, pkg, pe, pv, pr) |
78 | if not os.path.exists(verpath): | 78 | if not os.path.exists(verpath): |
79 | os.makedirs(verpath) | 79 | os.makedirs(verpath) |
80 | 80 | ||
81 | def write_latestlink(pkg, pe, pv, pr, d): | 81 | def write_latestlink(pkg, pe, pv, pr, d): |
82 | pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True) | 82 | pkghistdir = d.getVar('PKGHIST_DIR', True) |
83 | 83 | ||
84 | def rm_link(path): | 84 | def rm_link(path): |
85 | try: | 85 | try: |
diff --git a/meta/classes/patch.bbclass b/meta/classes/patch.bbclass index 86046e1ff8..b2b6d976c5 100644 --- a/meta/classes/patch.bbclass +++ b/meta/classes/patch.bbclass | |||
@@ -10,7 +10,7 @@ inherit terminal | |||
10 | python patch_do_patch() { | 10 | python patch_do_patch() { |
11 | import oe.patch | 11 | import oe.patch |
12 | 12 | ||
13 | src_uri = (bb.data.getVar('SRC_URI', d, 1) or '').split() | 13 | src_uri = (d.getVar('SRC_URI', 1) or '').split() |
14 | if not src_uri: | 14 | if not src_uri: |
15 | return | 15 | return |
16 | 16 | ||
@@ -20,23 +20,23 @@ python patch_do_patch() { | |||
20 | "git": oe.patch.GitApplyTree, | 20 | "git": oe.patch.GitApplyTree, |
21 | } | 21 | } |
22 | 22 | ||
23 | cls = patchsetmap[bb.data.getVar('PATCHTOOL', d, 1) or 'quilt'] | 23 | cls = patchsetmap[d.getVar('PATCHTOOL', 1) or 'quilt'] |
24 | 24 | ||
25 | resolvermap = { | 25 | resolvermap = { |
26 | "noop": oe.patch.NOOPResolver, | 26 | "noop": oe.patch.NOOPResolver, |
27 | "user": oe.patch.UserResolver, | 27 | "user": oe.patch.UserResolver, |
28 | } | 28 | } |
29 | 29 | ||
30 | rcls = resolvermap[bb.data.getVar('PATCHRESOLVE', d, 1) or 'user'] | 30 | rcls = resolvermap[d.getVar('PATCHRESOLVE', 1) or 'user'] |
31 | 31 | ||
32 | s = bb.data.getVar('S', d, 1) | 32 | s = d.getVar('S', 1) |
33 | 33 | ||
34 | path = os.getenv('PATH') | 34 | path = os.getenv('PATH') |
35 | os.putenv('PATH', bb.data.getVar('PATH', d, 1)) | 35 | os.putenv('PATH', d.getVar('PATH', 1)) |
36 | 36 | ||
37 | classes = {} | 37 | classes = {} |
38 | 38 | ||
39 | workdir = bb.data.getVar('WORKDIR', d, 1) | 39 | workdir = d.getVar('WORKDIR', 1) |
40 | for url in src_uri: | 40 | for url in src_uri: |
41 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) | 41 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) |
42 | 42 | ||
@@ -76,13 +76,13 @@ python patch_do_patch() { | |||
76 | pname = os.path.basename(local) | 76 | pname = os.path.basename(local) |
77 | 77 | ||
78 | if "mindate" in parm or "maxdate" in parm: | 78 | if "mindate" in parm or "maxdate" in parm: |
79 | pn = bb.data.getVar('PN', d, 1) | 79 | pn = d.getVar('PN', 1) |
80 | srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1) | 80 | srcdate = d.getVar('SRCDATE_%s' % pn, 1) |
81 | if not srcdate: | 81 | if not srcdate: |
82 | srcdate = bb.data.getVar('SRCDATE', d, 1) | 82 | srcdate = d.getVar('SRCDATE', 1) |
83 | 83 | ||
84 | if srcdate == "now": | 84 | if srcdate == "now": |
85 | srcdate = bb.data.getVar('DATE', d, 1) | 85 | srcdate = d.getVar('DATE', 1) |
86 | 86 | ||
87 | if "maxdate" in parm and parm["maxdate"] < srcdate: | 87 | if "maxdate" in parm and parm["maxdate"] < srcdate: |
88 | bb.note("Patch '%s' is outdated" % pname) | 88 | bb.note("Patch '%s' is outdated" % pname) |
@@ -94,25 +94,25 @@ python patch_do_patch() { | |||
94 | 94 | ||
95 | 95 | ||
96 | if "minrev" in parm: | 96 | if "minrev" in parm: |
97 | srcrev = bb.data.getVar('SRCREV', d, 1) | 97 | srcrev = d.getVar('SRCREV', 1) |
98 | if srcrev and srcrev < parm["minrev"]: | 98 | if srcrev and srcrev < parm["minrev"]: |
99 | bb.note("Patch '%s' applies to later revisions" % pname) | 99 | bb.note("Patch '%s' applies to later revisions" % pname) |
100 | continue | 100 | continue |
101 | 101 | ||
102 | if "maxrev" in parm: | 102 | if "maxrev" in parm: |
103 | srcrev = bb.data.getVar('SRCREV', d, 1) | 103 | srcrev = d.getVar('SRCREV', 1) |
104 | if srcrev and srcrev > parm["maxrev"]: | 104 | if srcrev and srcrev > parm["maxrev"]: |
105 | bb.note("Patch '%s' applies to earlier revisions" % pname) | 105 | bb.note("Patch '%s' applies to earlier revisions" % pname) |
106 | continue | 106 | continue |
107 | 107 | ||
108 | if "rev" in parm: | 108 | if "rev" in parm: |
109 | srcrev = bb.data.getVar('SRCREV', d, 1) | 109 | srcrev = d.getVar('SRCREV', 1) |
110 | if srcrev and parm["rev"] not in srcrev: | 110 | if srcrev and parm["rev"] not in srcrev: |
111 | bb.note("Patch '%s' doesn't apply to revision" % pname) | 111 | bb.note("Patch '%s' doesn't apply to revision" % pname) |
112 | continue | 112 | continue |
113 | 113 | ||
114 | if "notrev" in parm: | 114 | if "notrev" in parm: |
115 | srcrev = bb.data.getVar('SRCREV', d, 1) | 115 | srcrev = d.getVar('SRCREV', 1) |
116 | if srcrev and parm["notrev"] in srcrev: | 116 | if srcrev and parm["notrev"] in srcrev: |
117 | bb.note("Patch '%s' doesn't apply to revision" % pname) | 117 | bb.note("Patch '%s' doesn't apply to revision" % pname) |
118 | continue | 118 | continue |
diff --git a/meta/classes/pkg_distribute.bbclass b/meta/classes/pkg_distribute.bbclass index 81978e3e3b..52643a2f90 100644 --- a/meta/classes/pkg_distribute.bbclass +++ b/meta/classes/pkg_distribute.bbclass | |||
@@ -1,6 +1,6 @@ | |||
1 | PKG_DISTRIBUTECOMMAND[func] = "1" | 1 | PKG_DISTRIBUTECOMMAND[func] = "1" |
2 | python do_distribute_packages () { | 2 | python do_distribute_packages () { |
3 | cmd = bb.data.getVar('PKG_DISTRIBUTECOMMAND', d, 1) | 3 | cmd = d.getVar('PKG_DISTRIBUTECOMMAND', 1) |
4 | if not cmd: | 4 | if not cmd: |
5 | raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined") | 5 | raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined") |
6 | bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d) | 6 | bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d) |
diff --git a/meta/classes/pkg_metainfo.bbclass b/meta/classes/pkg_metainfo.bbclass index ac4f73c77b..1714a535c2 100644 --- a/meta/classes/pkg_metainfo.bbclass +++ b/meta/classes/pkg_metainfo.bbclass | |||
@@ -1,5 +1,5 @@ | |||
1 | python do_pkg_write_metainfo () { | 1 | python do_pkg_write_metainfo () { |
2 | deploydir = bb.data.getVar('DEPLOY_DIR', d, 1) | 2 | deploydir = d.getVar('DEPLOY_DIR', 1) |
3 | if not deploydir: | 3 | if not deploydir: |
4 | bb.error("DEPLOY_DIR not defined, unable to write package info") | 4 | bb.error("DEPLOY_DIR not defined, unable to write package info") |
5 | return | 5 | return |
@@ -9,11 +9,11 @@ python do_pkg_write_metainfo () { | |||
9 | except OSError: | 9 | except OSError: |
10 | raise bb.build.FuncFailed("unable to open package-info file for writing.") | 10 | raise bb.build.FuncFailed("unable to open package-info file for writing.") |
11 | 11 | ||
12 | name = bb.data.getVar('PN', d, 1) | 12 | name = d.getVar('PN', 1) |
13 | version = bb.data.getVar('PV', d, 1) | 13 | version = d.getVar('PV', 1) |
14 | desc = bb.data.getVar('DESCRIPTION', d, 1) | 14 | desc = d.getVar('DESCRIPTION', 1) |
15 | page = bb.data.getVar('HOMEPAGE', d, 1) | 15 | page = d.getVar('HOMEPAGE', 1) |
16 | lic = bb.data.getVar('LICENSE', d, 1) | 16 | lic = d.getVar('LICENSE', 1) |
17 | 17 | ||
18 | infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" ) | 18 | infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" ) |
19 | infofile.close() | 19 | infofile.close() |
diff --git a/meta/classes/populate_sdk_deb.bbclass b/meta/classes/populate_sdk_deb.bbclass index be7b5520c4..eedc80b26b 100644 --- a/meta/classes/populate_sdk_deb.bbclass +++ b/meta/classes/populate_sdk_deb.bbclass | |||
@@ -2,8 +2,8 @@ do_populate_sdk[depends] += "dpkg-native:do_populate_sysroot apt-native:do_popul | |||
2 | do_populate_sdk[recrdeptask] += "do_package_write_deb" | 2 | do_populate_sdk[recrdeptask] += "do_package_write_deb" |
3 | 3 | ||
4 | 4 | ||
5 | DEB_SDK_ARCH = "${@[bb.data.getVar('SDK_ARCH', d, 1), "i386"]\ | 5 | DEB_SDK_ARCH = "${@[d.getVar('SDK_ARCH', 1), "i386"]\ |
6 | [bb.data.getVar('SDK_ARCH', d, 1) in \ | 6 | [d.getVar('SDK_ARCH', 1) in \ |
7 | ["x86", "i486", "i586", "i686", "pentium"]]}" | 7 | ["x86", "i486", "i586", "i686", "pentium"]]}" |
8 | 8 | ||
9 | populate_sdk_post_deb () { | 9 | populate_sdk_post_deb () { |
diff --git a/meta/classes/populate_sdk_rpm.bbclass b/meta/classes/populate_sdk_rpm.bbclass index 9989d0abfd..829d83a8c5 100644 --- a/meta/classes/populate_sdk_rpm.bbclass +++ b/meta/classes/populate_sdk_rpm.bbclass | |||
@@ -127,6 +127,6 @@ python () { | |||
127 | localdata.setVar("DEFAULTTUNE", localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + eext[1], False) or "") | 127 | localdata.setVar("DEFAULTTUNE", localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + eext[1], False) or "") |
128 | ml_package_archs += localdata.getVar("PACKAGE_ARCHS", True) or "" | 128 | ml_package_archs += localdata.getVar("PACKAGE_ARCHS", True) or "" |
129 | #bb.note("ML_PACKAGE_ARCHS %s %s %s" % (eext[1], localdata.getVar("PACKAGE_ARCHS", True) or "(none)", overrides)) | 129 | #bb.note("ML_PACKAGE_ARCHS %s %s %s" % (eext[1], localdata.getVar("PACKAGE_ARCHS", True) or "(none)", overrides)) |
130 | bb.data.setVar('MULTILIB_PACKAGE_ARCHS', ml_package_archs, d) | 130 | d.setVar('MULTILIB_PACKAGE_ARCHS', ml_package_archs) |
131 | } | 131 | } |
132 | 132 | ||
diff --git a/meta/classes/qemu.bbclass b/meta/classes/qemu.bbclass index 66dfb2b0d2..1bdd209afe 100644 --- a/meta/classes/qemu.bbclass +++ b/meta/classes/qemu.bbclass | |||
@@ -6,7 +6,7 @@ | |||
6 | def qemu_target_binary(data): | 6 | def qemu_target_binary(data): |
7 | import bb | 7 | import bb |
8 | 8 | ||
9 | target_arch = bb.data.getVar("TARGET_ARCH", data, 1) | 9 | target_arch = data.getVar("TARGET_ARCH", 1) |
10 | if target_arch in ("i486", "i586", "i686"): | 10 | if target_arch in ("i486", "i586", "i686"): |
11 | target_arch = "i386" | 11 | target_arch = "i386" |
12 | elif target_arch == "powerpc": | 12 | elif target_arch == "powerpc": |
diff --git a/meta/classes/qt4e.bbclass b/meta/classes/qt4e.bbclass index 670605ba4b..d955aca5d8 100644 --- a/meta/classes/qt4e.bbclass +++ b/meta/classes/qt4e.bbclass | |||
@@ -1,4 +1,4 @@ | |||
1 | DEPENDS_prepend = "${@["qt4-embedded ", ""][(bb.data.getVar('PN', d, 1)[:12] == 'qt4-embedded')]}" | 1 | DEPENDS_prepend = "${@["qt4-embedded ", ""][(d.getVar('PN', 1)[:12] == 'qt4-embedded')]}" |
2 | 2 | ||
3 | inherit qmake2 | 3 | inherit qmake2 |
4 | 4 | ||
diff --git a/meta/classes/qt4x11.bbclass b/meta/classes/qt4x11.bbclass index ee2cdca3ad..3f955d48df 100644 --- a/meta/classes/qt4x11.bbclass +++ b/meta/classes/qt4x11.bbclass | |||
@@ -1,4 +1,4 @@ | |||
1 | DEPENDS_prepend = "${@["qt4-x11-free ", ""][(bb.data.getVar('BPN', d, True)[:12] == 'qt4-x11-free')]}" | 1 | DEPENDS_prepend = "${@["qt4-x11-free ", ""][(d.getVar('BPN', True)[:12] == 'qt4-x11-free')]}" |
2 | 2 | ||
3 | inherit qmake2 | 3 | inherit qmake2 |
4 | 4 | ||
diff --git a/meta/classes/relocatable.bbclass b/meta/classes/relocatable.bbclass index e665e317c0..54227a91ca 100644 --- a/meta/classes/relocatable.bbclass +++ b/meta/classes/relocatable.bbclass | |||
@@ -8,7 +8,7 @@ def process_dir (directory, d): | |||
8 | import stat | 8 | import stat |
9 | 9 | ||
10 | cmd = bb.data.expand('${CHRPATH_BIN}', d) | 10 | cmd = bb.data.expand('${CHRPATH_BIN}', d) |
11 | tmpdir = bb.data.getVar('TMPDIR', d) | 11 | tmpdir = d.getVar('TMPDIR') |
12 | basedir = bb.data.expand('${base_prefix}', d) | 12 | basedir = bb.data.expand('${base_prefix}', d) |
13 | 13 | ||
14 | #bb.debug("Checking %s for binaries to process" % directory) | 14 | #bb.debug("Checking %s for binaries to process" % directory) |
diff --git a/meta/classes/rootfs_ipk.bbclass b/meta/classes/rootfs_ipk.bbclass index ffc4a72c06..3b4c392cec 100644 --- a/meta/classes/rootfs_ipk.bbclass +++ b/meta/classes/rootfs_ipk.bbclass | |||
@@ -185,13 +185,13 @@ ipk_insert_feed_uris () { | |||
185 | 185 | ||
186 | python () { | 186 | python () { |
187 | 187 | ||
188 | if bb.data.getVar('BUILD_IMAGES_FROM_FEEDS', d, True): | 188 | if d.getVar('BUILD_IMAGES_FROM_FEEDS', True): |
189 | flags = bb.data.getVarFlag('do_rootfs', 'recrdeptask', d) | 189 | flags = d.getVarFlag('do_rootfs', 'recrdeptask') |
190 | flags = flags.replace("do_package_write_ipk", "") | 190 | flags = flags.replace("do_package_write_ipk", "") |
191 | flags = flags.replace("do_deploy", "") | 191 | flags = flags.replace("do_deploy", "") |
192 | flags = flags.replace("do_populate_sysroot", "") | 192 | flags = flags.replace("do_populate_sysroot", "") |
193 | bb.data.setVarFlag('do_rootfs', 'recrdeptask', flags, d) | 193 | d.setVarFlag('do_rootfs', 'recrdeptask', flags) |
194 | bb.data.setVar('OPKG_PREPROCESS_COMMANDS', "package_generate_archlist\nipk_insert_feed_uris", d) | 194 | d.setVar('OPKG_PREPROCESS_COMMANDS', "package_generate_archlist\nipk_insert_feed_uris") |
195 | bb.data.setVar('OPKG_POSTPROCESS_COMMANDS', '', d) | 195 | d.setVar('OPKG_POSTPROCESS_COMMANDS', '') |
196 | } | 196 | } |
197 | 197 | ||
diff --git a/meta/classes/rootfs_rpm.bbclass b/meta/classes/rootfs_rpm.bbclass index 9f50764886..95e9455e5c 100644 --- a/meta/classes/rootfs_rpm.bbclass +++ b/meta/classes/rootfs_rpm.bbclass | |||
@@ -200,14 +200,14 @@ install_all_locales() { | |||
200 | } | 200 | } |
201 | 201 | ||
202 | python () { | 202 | python () { |
203 | if bb.data.getVar('BUILD_IMAGES_FROM_FEEDS', d, True): | 203 | if d.getVar('BUILD_IMAGES_FROM_FEEDS', True): |
204 | flags = bb.data.getVarFlag('do_rootfs', 'recrdeptask', d) | 204 | flags = d.getVarFlag('do_rootfs', 'recrdeptask') |
205 | flags = flags.replace("do_package_write_rpm", "") | 205 | flags = flags.replace("do_package_write_rpm", "") |
206 | flags = flags.replace("do_deploy", "") | 206 | flags = flags.replace("do_deploy", "") |
207 | flags = flags.replace("do_populate_sysroot", "") | 207 | flags = flags.replace("do_populate_sysroot", "") |
208 | bb.data.setVarFlag('do_rootfs', 'recrdeptask', flags, d) | 208 | d.setVarFlag('do_rootfs', 'recrdeptask', flags) |
209 | bb.data.setVar('RPM_PREPROCESS_COMMANDS', '', d) | 209 | d.setVar('RPM_PREPROCESS_COMMANDS', '') |
210 | bb.data.setVar('RPM_POSTPROCESS_COMMANDS', '', d) | 210 | d.setVar('RPM_POSTPROCESS_COMMANDS', '') |
211 | 211 | ||
212 | ml_package_archs = "" | 212 | ml_package_archs = "" |
213 | ml_prefix_list = "" | 213 | ml_prefix_list = "" |
@@ -224,6 +224,6 @@ python () { | |||
224 | ml_package_archs += " " + package_archs | 224 | ml_package_archs += " " + package_archs |
225 | ml_prefix_list += " " + eext[1] | 225 | ml_prefix_list += " " + eext[1] |
226 | #bb.note("ML_PACKAGE_ARCHS %s %s %s" % (eext[1], localdata.getVar("PACKAGE_ARCHS", True) or "(none)", overrides)) | 226 | #bb.note("ML_PACKAGE_ARCHS %s %s %s" % (eext[1], localdata.getVar("PACKAGE_ARCHS", True) or "(none)", overrides)) |
227 | bb.data.setVar('MULTILIB_PACKAGE_ARCHS', ml_package_archs, d) | 227 | d.setVar('MULTILIB_PACKAGE_ARCHS', ml_package_archs) |
228 | bb.data.setVar('MULTILIB_PREFIX_LIST', ml_prefix_list, d) | 228 | d.setVar('MULTILIB_PREFIX_LIST', ml_prefix_list) |
229 | } | 229 | } |
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass index 838448f33c..53b82d73e3 100644 --- a/meta/classes/sanity.bbclass +++ b/meta/classes/sanity.bbclass | |||
@@ -14,7 +14,7 @@ def raise_sanity_error(msg): | |||
14 | def check_conf_exists(fn, data): | 14 | def check_conf_exists(fn, data): |
15 | bbpath = [] | 15 | bbpath = [] |
16 | fn = bb.data.expand(fn, data) | 16 | fn = bb.data.expand(fn, data) |
17 | vbbpath = bb.data.getVar("BBPATH", data) | 17 | vbbpath = data.getVar("BBPATH") |
18 | if vbbpath: | 18 | if vbbpath: |
19 | bbpath += vbbpath.split(":") | 19 | bbpath += vbbpath.split(":") |
20 | for p in bbpath: | 20 | for p in bbpath: |
@@ -87,12 +87,12 @@ def check_connectivity(d): | |||
87 | # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable | 87 | # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable |
88 | # using the same syntax as for SRC_URI. If the variable is not set | 88 | # using the same syntax as for SRC_URI. If the variable is not set |
89 | # the check is skipped | 89 | # the check is skipped |
90 | test_uris = (bb.data.getVar('CONNECTIVITY_CHECK_URIS', d, True) or "").split() | 90 | test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS', True) or "").split() |
91 | retval = "" | 91 | retval = "" |
92 | 92 | ||
93 | # Only check connectivity if network enabled and the | 93 | # Only check connectivity if network enabled and the |
94 | # CONNECTIVITY_CHECK_URIS are set | 94 | # CONNECTIVITY_CHECK_URIS are set |
95 | network_enabled = not bb.data.getVar('BB_NO_NETWORK', d, True) | 95 | network_enabled = not d.getVar('BB_NO_NETWORK', True) |
96 | check_enabled = len(test_uris) | 96 | check_enabled = len(test_uris) |
97 | # Take a copy of the data store and unset MIRRORS and PREMIRROS | 97 | # Take a copy of the data store and unset MIRRORS and PREMIRROS |
98 | data = bb.data.createCopy(d) | 98 | data = bb.data.createCopy(d) |
@@ -105,7 +105,7 @@ def check_connectivity(d): | |||
105 | except Exception: | 105 | except Exception: |
106 | # Allow the message to be configured so that users can be | 106 | # Allow the message to be configured so that users can be |
107 | # pointed to a support mechanism. | 107 | # pointed to a support mechanism. |
108 | msg = bb.data.getVar('CONNECTIVITY_CHECK_MSG', data, True) or "" | 108 | msg = data.getVar('CONNECTIVITY_CHECK_MSG', True) or "" |
109 | if len(msg) == 0: | 109 | if len(msg) == 0: |
110 | msg = "Failed to fetch test data from the network. Please ensure your network is configured correctly.\n" | 110 | msg = "Failed to fetch test data from the network. Please ensure your network is configured correctly.\n" |
111 | retval = msg | 111 | retval = msg |
@@ -450,7 +450,7 @@ def check_sanity(e): | |||
450 | 450 | ||
451 | addhandler check_sanity_eventhandler | 451 | addhandler check_sanity_eventhandler |
452 | python check_sanity_eventhandler() { | 452 | python check_sanity_eventhandler() { |
453 | if bb.event.getName(e) == "ConfigParsed" and bb.data.getVar("BB_WORKERCONTEXT", e.data, True) != "1": | 453 | if bb.event.getName(e) == "ConfigParsed" and e.data.getVar("BB_WORKERCONTEXT", True) != "1": |
454 | check_sanity(e) | 454 | check_sanity(e) |
455 | 455 | ||
456 | return | 456 | return |
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass index 0813c2543e..880dcad1f3 100644 --- a/meta/classes/siteconfig.bbclass +++ b/meta/classes/siteconfig.bbclass | |||
@@ -2,7 +2,7 @@ python siteconfig_do_siteconfig () { | |||
2 | shared_state = sstate_state_fromvars(d) | 2 | shared_state = sstate_state_fromvars(d) |
3 | if shared_state['name'] != 'populate-sysroot': | 3 | if shared_state['name'] != 'populate-sysroot': |
4 | return | 4 | return |
5 | if not os.path.isdir(os.path.join(bb.data.getVar('FILE_DIRNAME', d, 1), 'site_config')): | 5 | if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME', 1), 'site_config')): |
6 | bb.debug(1, "No site_config directory, skipping do_siteconfig") | 6 | bb.debug(1, "No site_config directory, skipping do_siteconfig") |
7 | return | 7 | return |
8 | bb.build.exec_func('do_siteconfig_gencache', d) | 8 | bb.build.exec_func('do_siteconfig_gencache', d) |
diff --git a/meta/classes/siteinfo.bbclass b/meta/classes/siteinfo.bbclass index 02294c4d2e..604c6ba6b0 100644 --- a/meta/classes/siteinfo.bbclass +++ b/meta/classes/siteinfo.bbclass | |||
@@ -130,7 +130,7 @@ def siteinfo_get_files(d, no_cache = False): | |||
130 | if no_cache: return sitefiles | 130 | if no_cache: return sitefiles |
131 | 131 | ||
132 | # Now check for siteconfig cache files | 132 | # Now check for siteconfig cache files |
133 | path_siteconfig = bb.data.getVar('SITECONFIG_SYSROOTCACHE', d, 1) | 133 | path_siteconfig = d.getVar('SITECONFIG_SYSROOTCACHE', 1) |
134 | if os.path.isdir(path_siteconfig): | 134 | if os.path.isdir(path_siteconfig): |
135 | for i in os.listdir(path_siteconfig): | 135 | for i in os.listdir(path_siteconfig): |
136 | filename = os.path.join(path_siteconfig, i) | 136 | filename = os.path.join(path_siteconfig, i) |
diff --git a/meta/classes/sourcepkg.bbclass b/meta/classes/sourcepkg.bbclass index 64e6d7c860..38edfe4e2e 100644 --- a/meta/classes/sourcepkg.bbclass +++ b/meta/classes/sourcepkg.bbclass | |||
@@ -6,12 +6,12 @@ DISTRO ?= "openembedded" | |||
6 | 6 | ||
7 | def get_src_tree(d): | 7 | def get_src_tree(d): |
8 | 8 | ||
9 | workdir = bb.data.getVar('WORKDIR', d, 1) | 9 | workdir = d.getVar('WORKDIR', 1) |
10 | if not workdir: | 10 | if not workdir: |
11 | bb.error("WORKDIR not defined, unable to find source tree.") | 11 | bb.error("WORKDIR not defined, unable to find source tree.") |
12 | return | 12 | return |
13 | 13 | ||
14 | s = bb.data.getVar('S', d, 0) | 14 | s = d.getVar('S', 0) |
15 | if not s: | 15 | if not s: |
16 | bb.error("S not defined, unable to find source tree.") | 16 | bb.error("S not defined, unable to find source tree.") |
17 | return | 17 | return |
@@ -55,8 +55,8 @@ sourcepkg_do_archive_bb() { | |||
55 | 55 | ||
56 | python sourcepkg_do_dumpdata() { | 56 | python sourcepkg_do_dumpdata() { |
57 | 57 | ||
58 | workdir = bb.data.getVar('WORKDIR', d, 1) | 58 | workdir = d.getVar('WORKDIR', 1) |
59 | distro = bb.data.getVar('DISTRO', d, 1) | 59 | distro = d.getVar('DISTRO', 1) |
60 | s_tree = get_src_tree(d) | 60 | s_tree = get_src_tree(d) |
61 | openembeddeddir = os.path.join(workdir, s_tree, distro) | 61 | openembeddeddir = os.path.join(workdir, s_tree, distro) |
62 | dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d)) | 62 | dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d)) |
@@ -73,8 +73,8 @@ python sourcepkg_do_dumpdata() { | |||
73 | bb.data.emit_env(f, d, True) | 73 | bb.data.emit_env(f, d, True) |
74 | # emit the metadata which isnt valid shell | 74 | # emit the metadata which isnt valid shell |
75 | for e in d.keys(): | 75 | for e in d.keys(): |
76 | if bb.data.getVarFlag(e, 'python', d): | 76 | if d.getVarFlag(e, 'python'): |
77 | f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1))) | 77 | f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, 1))) |
78 | f.close() | 78 | f.close() |
79 | } | 79 | } |
80 | 80 | ||
diff --git a/meta/classes/src_distribute.bbclass b/meta/classes/src_distribute.bbclass index fbfbdf0094..2069d652a3 100644 --- a/meta/classes/src_distribute.bbclass +++ b/meta/classes/src_distribute.bbclass | |||
@@ -3,12 +3,12 @@ python do_distribute_sources () { | |||
3 | l = bb.data.createCopy(d) | 3 | l = bb.data.createCopy(d) |
4 | bb.data.update_data(l) | 4 | bb.data.update_data(l) |
5 | 5 | ||
6 | sources_dir = bb.data.getVar('SRC_DISTRIBUTEDIR', d, 1) | 6 | sources_dir = d.getVar('SRC_DISTRIBUTEDIR', 1) |
7 | src_uri = bb.data.getVar('SRC_URI', d, 1).split() | 7 | src_uri = d.getVar('SRC_URI', 1).split() |
8 | fetcher = bb.fetch2.Fetch(src_uri, d) | 8 | fetcher = bb.fetch2.Fetch(src_uri, d) |
9 | ud = fetcher.ud | 9 | ud = fetcher.ud |
10 | 10 | ||
11 | licenses = bb.data.getVar('LICENSE', d, 1).replace('&', '|') | 11 | licenses = d.getVar('LICENSE', 1).replace('&', '|') |
12 | licenses = licenses.replace('(', '').replace(')', '') | 12 | licenses = licenses.replace('(', '').replace(')', '') |
13 | clean_licenses = "" | 13 | clean_licenses = "" |
14 | for x in licenses.split(): | 14 | for x in licenses.split(): |
@@ -20,20 +20,20 @@ python do_distribute_sources () { | |||
20 | 20 | ||
21 | for license in clean_licenses.split('|'): | 21 | for license in clean_licenses.split('|'): |
22 | for url in ud.values(): | 22 | for url in ud.values(): |
23 | cmd = bb.data.getVar('SRC_DISTRIBUTECOMMAND', d, 1) | 23 | cmd = d.getVar('SRC_DISTRIBUTECOMMAND', 1) |
24 | if not cmd: | 24 | if not cmd: |
25 | raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined") | 25 | raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined") |
26 | url.setup_localpath(d) | 26 | url.setup_localpath(d) |
27 | bb.data.setVar('SRC', url.localpath, d) | 27 | d.setVar('SRC', url.localpath) |
28 | if url.type == 'file': | 28 | if url.type == 'file': |
29 | if url.basename == '*': | 29 | if url.basename == '*': |
30 | import os.path | 30 | import os.path |
31 | dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath))) | 31 | dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath))) |
32 | bb.data.setVar('DEST', "%s_%s/" % (bb.data.getVar('PF', d, 1), dest_dir), d) | 32 | bb.data.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir), d) |
33 | else: | 33 | else: |
34 | bb.data.setVar('DEST', "%s_%s" % (bb.data.getVar('PF', d, 1), url.basename), d) | 34 | bb.data.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename), d) |
35 | else: | 35 | else: |
36 | bb.data.setVar('DEST', '', d) | 36 | d.setVar('DEST', '') |
37 | 37 | ||
38 | bb.data.setVar('SRC_DISTRIBUTEDIR', "%s/%s" % (sources_dir, license), d) | 38 | bb.data.setVar('SRC_DISTRIBUTEDIR', "%s/%s" % (sources_dir, license), d) |
39 | bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d) | 39 | bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d) |
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass index 6abf55bb14..a777c79bb3 100644 --- a/meta/classes/sstate.bbclass +++ b/meta/classes/sstate.bbclass | |||
@@ -20,7 +20,7 @@ SSTATEPOSTINSTFUNCS ?= "" | |||
20 | 20 | ||
21 | python () { | 21 | python () { |
22 | if bb.data.inherits_class('native', d): | 22 | if bb.data.inherits_class('native', d): |
23 | bb.data.setVar('SSTATE_PKGARCH', bb.data.getVar('BUILD_ARCH', d), d) | 23 | bb.data.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH'), d) |
24 | elif bb.data.inherits_class('cross', d): | 24 | elif bb.data.inherits_class('cross', d): |
25 | bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d), d) | 25 | bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d), d) |
26 | bb.data.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d), d) | 26 | bb.data.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d), d) |
@@ -37,19 +37,19 @@ python () { | |||
37 | # reused if we manipulate the paths | 37 | # reused if we manipulate the paths |
38 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d) or bb.data.inherits_class('crosssdk', d): | 38 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d) or bb.data.inherits_class('crosssdk', d): |
39 | scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}" | 39 | scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}" |
40 | bb.data.setVar('SSTATE_SCAN_CMD', scan_cmd, d) | 40 | d.setVar('SSTATE_SCAN_CMD', scan_cmd) |
41 | 41 | ||
42 | unique_tasks = set((bb.data.getVar('SSTATETASKS', d, True) or "").split()) | 42 | unique_tasks = set((d.getVar('SSTATETASKS', True) or "").split()) |
43 | d.setVar('SSTATETASKS', " ".join(unique_tasks)) | 43 | d.setVar('SSTATETASKS', " ".join(unique_tasks)) |
44 | namemap = [] | 44 | namemap = [] |
45 | for task in unique_tasks: | 45 | for task in unique_tasks: |
46 | namemap.append(bb.data.getVarFlag(task, 'sstate-name', d)) | 46 | namemap.append(d.getVarFlag(task, 'sstate-name')) |
47 | funcs = bb.data.getVarFlag(task, 'prefuncs', d) or "" | 47 | funcs = d.getVarFlag(task, 'prefuncs') or "" |
48 | funcs = "sstate_task_prefunc " + funcs | 48 | funcs = "sstate_task_prefunc " + funcs |
49 | bb.data.setVarFlag(task, 'prefuncs', funcs, d) | 49 | d.setVarFlag(task, 'prefuncs', funcs) |
50 | funcs = bb.data.getVarFlag(task, 'postfuncs', d) or "" | 50 | funcs = d.getVarFlag(task, 'postfuncs') or "" |
51 | funcs = funcs + " sstate_task_postfunc" | 51 | funcs = funcs + " sstate_task_postfunc" |
52 | bb.data.setVarFlag(task, 'postfuncs', funcs, d) | 52 | d.setVarFlag(task, 'postfuncs', funcs) |
53 | d.setVar('SSTATETASKNAMES', " ".join(namemap)) | 53 | d.setVar('SSTATETASKNAMES', " ".join(namemap)) |
54 | } | 54 | } |
55 | 55 | ||
@@ -65,18 +65,18 @@ def sstate_init(name, task, d): | |||
65 | 65 | ||
66 | def sstate_state_fromvars(d, task = None): | 66 | def sstate_state_fromvars(d, task = None): |
67 | if task is None: | 67 | if task is None: |
68 | task = bb.data.getVar('BB_CURRENTTASK', d, True) | 68 | task = d.getVar('BB_CURRENTTASK', True) |
69 | if not task: | 69 | if not task: |
70 | bb.fatal("sstate code running without task context?!") | 70 | bb.fatal("sstate code running without task context?!") |
71 | task = task.replace("_setscene", "") | 71 | task = task.replace("_setscene", "") |
72 | 72 | ||
73 | name = bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-name', d), d) | 73 | name = bb.data.expand(d.getVarFlag("do_" + task, 'sstate-name'), d) |
74 | inputs = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-inputdirs', d) or "", d)).split() | 74 | inputs = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-inputdirs') or "", d)).split() |
75 | outputs = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-outputdirs', d) or "", d)).split() | 75 | outputs = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-outputdirs') or "", d)).split() |
76 | plaindirs = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-plaindirs', d) or "", d)).split() | 76 | plaindirs = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-plaindirs') or "", d)).split() |
77 | lockfiles = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-lockfile', d) or "", d)).split() | 77 | lockfiles = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-lockfile') or "", d)).split() |
78 | lockfilesshared = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-lockfile-shared', d) or "", d)).split() | 78 | lockfilesshared = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-lockfile-shared') or "", d)).split() |
79 | interceptfuncs = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-interceptfuncs', d) or "", d)).split() | 79 | interceptfuncs = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-interceptfuncs') or "", d)).split() |
80 | if not name or len(inputs) != len(outputs): | 80 | if not name or len(inputs) != len(outputs): |
81 | bb.fatal("sstate variables not setup correctly?!") | 81 | bb.fatal("sstate variables not setup correctly?!") |
82 | 82 | ||
@@ -139,7 +139,7 @@ def sstate_install(ss, d): | |||
139 | f.write(di + "\n") | 139 | f.write(di + "\n") |
140 | f.close() | 140 | f.close() |
141 | 141 | ||
142 | for postinst in (bb.data.getVar('SSTATEPOSTINSTFUNCS', d, True) or '').split(): | 142 | for postinst in (d.getVar('SSTATEPOSTINSTFUNCS', True) or '').split(): |
143 | bb.build.exec_func(postinst, d) | 143 | bb.build.exec_func(postinst, d) |
144 | 144 | ||
145 | for lock in locks: | 145 | for lock in locks: |
@@ -156,7 +156,7 @@ def sstate_installpkg(ss, d): | |||
156 | oe.path.remove(dir) | 156 | oe.path.remove(dir) |
157 | 157 | ||
158 | sstateinst = bb.data.expand("${WORKDIR}/sstate-install-%s/" % ss['name'], d) | 158 | sstateinst = bb.data.expand("${WORKDIR}/sstate-install-%s/" % ss['name'], d) |
159 | sstatepkg = bb.data.getVar('SSTATE_PKG', d, True) + '_' + ss['name'] + ".tgz" | 159 | sstatepkg = d.getVar('SSTATE_PKG', True) + '_' + ss['name'] + ".tgz" |
160 | 160 | ||
161 | if not os.path.exists(sstatepkg): | 161 | if not os.path.exists(sstatepkg): |
162 | pstaging_fetch(sstatepkg, d) | 162 | pstaging_fetch(sstatepkg, d) |
@@ -167,16 +167,16 @@ def sstate_installpkg(ss, d): | |||
167 | 167 | ||
168 | sstate_clean(ss, d) | 168 | sstate_clean(ss, d) |
169 | 169 | ||
170 | bb.data.setVar('SSTATE_INSTDIR', sstateinst, d) | 170 | d.setVar('SSTATE_INSTDIR', sstateinst) |
171 | bb.data.setVar('SSTATE_PKG', sstatepkg, d) | 171 | d.setVar('SSTATE_PKG', sstatepkg) |
172 | bb.build.exec_func('sstate_unpack_package', d) | 172 | bb.build.exec_func('sstate_unpack_package', d) |
173 | 173 | ||
174 | # Fixup hardcoded paths | 174 | # Fixup hardcoded paths |
175 | fixmefn = sstateinst + "fixmepath" | 175 | fixmefn = sstateinst + "fixmepath" |
176 | if os.path.isfile(fixmefn): | 176 | if os.path.isfile(fixmefn): |
177 | staging = bb.data.getVar('STAGING_DIR', d, True) | 177 | staging = d.getVar('STAGING_DIR', True) |
178 | staging_target = bb.data.getVar('STAGING_DIR_TARGET', d, True) | 178 | staging_target = d.getVar('STAGING_DIR_TARGET', True) |
179 | staging_host = bb.data.getVar('STAGING_DIR_HOST', d, True) | 179 | staging_host = d.getVar('STAGING_DIR_HOST', True) |
180 | fixmefd = open(fixmefn, "r") | 180 | fixmefd = open(fixmefn, "r") |
181 | fixmefiles = fixmefd.readlines() | 181 | fixmefiles = fixmefd.readlines() |
182 | fixmefd.close() | 182 | fixmefd.close() |
@@ -206,13 +206,13 @@ def sstate_installpkg(ss, d): | |||
206 | def sstate_clean_cachefile(ss, d): | 206 | def sstate_clean_cachefile(ss, d): |
207 | import oe.path | 207 | import oe.path |
208 | 208 | ||
209 | sstatepkgdir = bb.data.getVar('SSTATE_DIR', d, True) | 209 | sstatepkgdir = d.getVar('SSTATE_DIR', True) |
210 | sstatepkgfile = sstatepkgdir + '/' + bb.data.getVar('SSTATE_PKGSPEC', d, True) + "*_" + ss['name'] + ".tgz*" | 210 | sstatepkgfile = sstatepkgdir + '/' + d.getVar('SSTATE_PKGSPEC', True) + "*_" + ss['name'] + ".tgz*" |
211 | bb.note("Removing %s" % sstatepkgfile) | 211 | bb.note("Removing %s" % sstatepkgfile) |
212 | oe.path.remove(sstatepkgfile) | 212 | oe.path.remove(sstatepkgfile) |
213 | 213 | ||
214 | def sstate_clean_cachefiles(d): | 214 | def sstate_clean_cachefiles(d): |
215 | for task in (bb.data.getVar('SSTATETASKS', d, True) or "").split(): | 215 | for task in (d.getVar('SSTATETASKS', True) or "").split(): |
216 | ss = sstate_state_fromvars(d, task[3:]) | 216 | ss = sstate_state_fromvars(d, task[3:]) |
217 | sstate_clean_cachefile(ss, d) | 217 | sstate_clean_cachefile(ss, d) |
218 | 218 | ||
@@ -274,10 +274,10 @@ CLEANFUNCS += "sstate_cleanall" | |||
274 | python sstate_cleanall() { | 274 | python sstate_cleanall() { |
275 | import fnmatch | 275 | import fnmatch |
276 | 276 | ||
277 | bb.note("Removing shared state for package %s" % bb.data.getVar('PN', d, True)) | 277 | bb.note("Removing shared state for package %s" % d.getVar('PN', True)) |
278 | 278 | ||
279 | manifest_dir = bb.data.getVar('SSTATE_MANIFESTS', d, True) | 279 | manifest_dir = d.getVar('SSTATE_MANIFESTS', True) |
280 | manifest_prefix = bb.data.getVar("SSTATE_MANFILEPREFIX", d, True) | 280 | manifest_prefix = d.getVar("SSTATE_MANFILEPREFIX", True) |
281 | manifest_pattern = os.path.basename(manifest_prefix) + ".*" | 281 | manifest_pattern = os.path.basename(manifest_prefix) + ".*" |
282 | 282 | ||
283 | if not os.path.exists(manifest_dir): | 283 | if not os.path.exists(manifest_dir): |
@@ -298,7 +298,7 @@ python sstate_cleanall() { | |||
298 | def sstate_hardcode_path(d): | 298 | def sstate_hardcode_path(d): |
299 | # Need to remove hardcoded paths and fix these when we install the | 299 | # Need to remove hardcoded paths and fix these when we install the |
300 | # staging packages. | 300 | # staging packages. |
301 | sstate_scan_cmd = bb.data.getVar('SSTATE_SCAN_CMD', d, True) | 301 | sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True) |
302 | p = os.popen("%s" % sstate_scan_cmd) | 302 | p = os.popen("%s" % sstate_scan_cmd) |
303 | file_list = p.read() | 303 | file_list = p.read() |
304 | 304 | ||
@@ -306,10 +306,10 @@ def sstate_hardcode_path(d): | |||
306 | p.close() | 306 | p.close() |
307 | return | 307 | return |
308 | 308 | ||
309 | staging = bb.data.getVar('STAGING_DIR', d, True) | 309 | staging = d.getVar('STAGING_DIR', True) |
310 | staging_target = bb.data.getVar('STAGING_DIR_TARGET', d, True) | 310 | staging_target = d.getVar('STAGING_DIR_TARGET', True) |
311 | staging_host = bb.data.getVar('STAGING_DIR_HOST', d, True) | 311 | staging_host = d.getVar('STAGING_DIR_HOST', True) |
312 | sstate_builddir = bb.data.getVar('SSTATE_BUILDDIR', d, True) | 312 | sstate_builddir = d.getVar('SSTATE_BUILDDIR', True) |
313 | 313 | ||
314 | for i in file_list.split('\n'): | 314 | for i in file_list.split('\n'): |
315 | if not i: | 315 | if not i: |
@@ -349,10 +349,10 @@ def sstate_package(ss, d): | |||
349 | os.remove(path) | 349 | os.remove(path) |
350 | os.symlink(base, path) | 350 | os.symlink(base, path) |
351 | 351 | ||
352 | tmpdir = bb.data.getVar('TMPDIR', d, True) | 352 | tmpdir = d.getVar('TMPDIR', True) |
353 | 353 | ||
354 | sstatebuild = bb.data.expand("${WORKDIR}/sstate-build-%s/" % ss['name'], d) | 354 | sstatebuild = bb.data.expand("${WORKDIR}/sstate-build-%s/" % ss['name'], d) |
355 | sstatepkg = bb.data.getVar('SSTATE_PKG', d, True) + '_'+ ss['name'] + ".tgz" | 355 | sstatepkg = d.getVar('SSTATE_PKG', True) + '_'+ ss['name'] + ".tgz" |
356 | bb.mkdirhier(sstatebuild) | 356 | bb.mkdirhier(sstatebuild) |
357 | bb.mkdirhier(os.path.dirname(sstatepkg)) | 357 | bb.mkdirhier(os.path.dirname(sstatepkg)) |
358 | for state in ss['dirs']: | 358 | for state in ss['dirs']: |
@@ -369,15 +369,15 @@ def sstate_package(ss, d): | |||
369 | bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0])) | 369 | bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0])) |
370 | oe.path.copytree(state[1], sstatebuild + state[0]) | 370 | oe.path.copytree(state[1], sstatebuild + state[0]) |
371 | 371 | ||
372 | workdir = bb.data.getVar('WORKDIR', d, True) | 372 | workdir = d.getVar('WORKDIR', True) |
373 | for plain in ss['plaindirs']: | 373 | for plain in ss['plaindirs']: |
374 | pdir = plain.replace(workdir, sstatebuild) | 374 | pdir = plain.replace(workdir, sstatebuild) |
375 | bb.mkdirhier(plain) | 375 | bb.mkdirhier(plain) |
376 | bb.mkdirhier(pdir) | 376 | bb.mkdirhier(pdir) |
377 | oe.path.copytree(plain, pdir) | 377 | oe.path.copytree(plain, pdir) |
378 | 378 | ||
379 | bb.data.setVar('SSTATE_BUILDDIR', sstatebuild, d) | 379 | d.setVar('SSTATE_BUILDDIR', sstatebuild) |
380 | bb.data.setVar('SSTATE_PKG', sstatepkg, d) | 380 | d.setVar('SSTATE_PKG', sstatepkg) |
381 | sstate_hardcode_path(d) | 381 | sstate_hardcode_path(d) |
382 | bb.build.exec_func('sstate_create_package', d) | 382 | bb.build.exec_func('sstate_create_package', d) |
383 | 383 | ||
@@ -389,7 +389,7 @@ def pstaging_fetch(sstatepkg, d): | |||
389 | import bb.fetch2 | 389 | import bb.fetch2 |
390 | 390 | ||
391 | # Only try and fetch if the user has configured a mirror | 391 | # Only try and fetch if the user has configured a mirror |
392 | mirrors = bb.data.getVar('SSTATE_MIRRORS', d, True) | 392 | mirrors = d.getVar('SSTATE_MIRRORS', True) |
393 | if not mirrors: | 393 | if not mirrors: |
394 | return | 394 | return |
395 | 395 | ||
@@ -402,9 +402,9 @@ def pstaging_fetch(sstatepkg, d): | |||
402 | 402 | ||
403 | bb.mkdirhier(dldir) | 403 | bb.mkdirhier(dldir) |
404 | 404 | ||
405 | bb.data.setVar('DL_DIR', dldir, localdata) | 405 | localdata.setVar('DL_DIR', dldir) |
406 | bb.data.setVar('PREMIRRORS', mirrors, localdata) | 406 | localdata.setVar('PREMIRRORS', mirrors) |
407 | bb.data.setVar('SRC_URI', srcuri, localdata) | 407 | localdata.setVar('SRC_URI', srcuri) |
408 | 408 | ||
409 | # Try a fetch from the sstate mirror, if it fails just return and | 409 | # Try a fetch from the sstate mirror, if it fails just return and |
410 | # we will build the package | 410 | # we will build the package |
@@ -493,15 +493,15 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d): | |||
493 | else: | 493 | else: |
494 | bb.debug(2, "SState: Looked for but didn't find file %s" % sstatefile) | 494 | bb.debug(2, "SState: Looked for but didn't find file %s" % sstatefile) |
495 | 495 | ||
496 | mirrors = bb.data.getVar("SSTATE_MIRRORS", d, True) | 496 | mirrors = d.getVar("SSTATE_MIRRORS", True) |
497 | if mirrors: | 497 | if mirrors: |
498 | # Copy the data object and override DL_DIR and SRC_URI | 498 | # Copy the data object and override DL_DIR and SRC_URI |
499 | localdata = bb.data.createCopy(d) | 499 | localdata = bb.data.createCopy(d) |
500 | bb.data.update_data(localdata) | 500 | bb.data.update_data(localdata) |
501 | 501 | ||
502 | dldir = bb.data.expand("${SSTATE_DIR}", localdata) | 502 | dldir = bb.data.expand("${SSTATE_DIR}", localdata) |
503 | bb.data.setVar('DL_DIR', dldir, localdata) | 503 | localdata.setVar('DL_DIR', dldir) |
504 | bb.data.setVar('PREMIRRORS', mirrors, localdata) | 504 | localdata.setVar('PREMIRRORS', mirrors) |
505 | 505 | ||
506 | bb.debug(2, "SState using premirror of: %s" % mirrors) | 506 | bb.debug(2, "SState using premirror of: %s" % mirrors) |
507 | 507 | ||
@@ -513,7 +513,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d): | |||
513 | sstatefile = sstatefile.replace("${BB_TASKHASH}", sq_hash[task]) | 513 | sstatefile = sstatefile.replace("${BB_TASKHASH}", sq_hash[task]) |
514 | 514 | ||
515 | srcuri = "file://" + os.path.basename(sstatefile) | 515 | srcuri = "file://" + os.path.basename(sstatefile) |
516 | bb.data.setVar('SRC_URI', srcuri, localdata) | 516 | localdata.setVar('SRC_URI', srcuri) |
517 | bb.debug(2, "SState: Attempting to fetch %s" % srcuri) | 517 | bb.debug(2, "SState: Attempting to fetch %s" % srcuri) |
518 | 518 | ||
519 | try: | 519 | try: |
diff --git a/meta/classes/staging.bbclass b/meta/classes/staging.bbclass index 04d51ede42..831840456b 100644 --- a/meta/classes/staging.bbclass +++ b/meta/classes/staging.bbclass | |||
@@ -84,7 +84,7 @@ python do_populate_sysroot () { | |||
84 | # | 84 | # |
85 | 85 | ||
86 | bb.build.exec_func("sysroot_stage_all", d) | 86 | bb.build.exec_func("sysroot_stage_all", d) |
87 | for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split(): | 87 | for f in (d.getVar('SYSROOT_PREPROCESS_FUNCS', True) or '').split(): |
88 | bb.build.exec_func(f, d) | 88 | bb.build.exec_func(f, d) |
89 | } | 89 | } |
90 | 90 | ||
@@ -100,8 +100,8 @@ python do_populate_sysroot_setscene () { | |||
100 | addtask do_populate_sysroot_setscene | 100 | addtask do_populate_sysroot_setscene |
101 | 101 | ||
102 | python () { | 102 | python () { |
103 | if bb.data.getVar('do_stage', d, True) is not None: | 103 | if d.getVar('do_stage', True) is not None: |
104 | bb.fatal("Legacy staging found for %s as it has a do_stage function. This will need conversion to a do_install or often simply removal to work with Poky" % bb.data.getVar("FILE", d, True)) | 104 | bb.fatal("Legacy staging found for %s as it has a do_stage function. This will need conversion to a do_install or often simply removal to work with Poky" % d.getVar("FILE", True)) |
105 | } | 105 | } |
106 | 106 | ||
107 | 107 | ||
diff --git a/meta/classes/syslinux.bbclass b/meta/classes/syslinux.bbclass index fb7597470b..0cc6b851bc 100644 --- a/meta/classes/syslinux.bbclass +++ b/meta/classes/syslinux.bbclass | |||
@@ -8,12 +8,12 @@ python build_syslinux_menu () { | |||
8 | import copy | 8 | import copy |
9 | import sys | 9 | import sys |
10 | 10 | ||
11 | workdir = bb.data.getVar('WORKDIR', d, 1) | 11 | workdir = d.getVar('WORKDIR', 1) |
12 | if not workdir: | 12 | if not workdir: |
13 | bb.error("WORKDIR is not defined") | 13 | bb.error("WORKDIR is not defined") |
14 | return | 14 | return |
15 | 15 | ||
16 | labels = bb.data.getVar('LABELS', d, 1) | 16 | labels = d.getVar('LABELS', 1) |
17 | if not labels: | 17 | if not labels: |
18 | bb.debug(1, "LABELS not defined, nothing to do") | 18 | bb.debug(1, "LABELS not defined, nothing to do") |
19 | return | 19 | return |
@@ -22,7 +22,7 @@ python build_syslinux_menu () { | |||
22 | bb.debug(1, "No labels, nothing to do") | 22 | bb.debug(1, "No labels, nothing to do") |
23 | return | 23 | return |
24 | 24 | ||
25 | cfile = bb.data.getVar('SYSLINUXMENU', d, 1) | 25 | cfile = d.getVar('SYSLINUXMENU', 1) |
26 | if not cfile: | 26 | if not cfile: |
27 | raise bb.build.FuncFailed('Unable to read SYSLINUXMENU') | 27 | raise bb.build.FuncFailed('Unable to read SYSLINUXMENU') |
28 | 28 | ||
@@ -45,15 +45,15 @@ python build_syslinux_menu () { | |||
45 | from copy import deepcopy | 45 | from copy import deepcopy |
46 | localdata = deepcopy(d) | 46 | localdata = deepcopy(d) |
47 | 47 | ||
48 | overrides = bb.data.getVar('OVERRIDES', localdata) | 48 | overrides = localdata.getVar('OVERRIDES') |
49 | if not overrides: | 49 | if not overrides: |
50 | raise bb.build.FuncFailed('OVERRIDES not defined') | 50 | raise bb.build.FuncFailed('OVERRIDES not defined') |
51 | overrides = bb.data.expand(overrides, localdata) | 51 | overrides = bb.data.expand(overrides, localdata) |
52 | 52 | ||
53 | bb.data.setVar('OVERRIDES', label + ':' + overrides, localdata) | 53 | localdata.setVar('OVERRIDES', label + ':' + overrides) |
54 | bb.data.update_data(localdata) | 54 | bb.data.update_data(localdata) |
55 | 55 | ||
56 | usage = bb.data.getVar('USAGE', localdata, 1) | 56 | usage = localdata.getVar('USAGE', 1) |
57 | cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label)) | 57 | cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label)) |
58 | cfgfile.write('%s\n' % (usage)) | 58 | cfgfile.write('%s\n' % (usage)) |
59 | 59 | ||
@@ -67,12 +67,12 @@ python build_syslinux_cfg () { | |||
67 | import copy | 67 | import copy |
68 | import sys | 68 | import sys |
69 | 69 | ||
70 | workdir = bb.data.getVar('WORKDIR', d, 1) | 70 | workdir = d.getVar('WORKDIR', 1) |
71 | if not workdir: | 71 | if not workdir: |
72 | bb.error("WORKDIR not defined, unable to package") | 72 | bb.error("WORKDIR not defined, unable to package") |
73 | return | 73 | return |
74 | 74 | ||
75 | labels = bb.data.getVar('LABELS', d, 1) | 75 | labels = d.getVar('LABELS', 1) |
76 | if not labels: | 76 | if not labels: |
77 | bb.debug(1, "LABELS not defined, nothing to do") | 77 | bb.debug(1, "LABELS not defined, nothing to do") |
78 | return | 78 | return |
@@ -81,7 +81,7 @@ python build_syslinux_cfg () { | |||
81 | bb.debug(1, "No labels, nothing to do") | 81 | bb.debug(1, "No labels, nothing to do") |
82 | return | 82 | return |
83 | 83 | ||
84 | cfile = bb.data.getVar('SYSLINUXCFG', d, 1) | 84 | cfile = d.getVar('SYSLINUXCFG', 1) |
85 | if not cfile: | 85 | if not cfile: |
86 | raise bb.build.FuncFailed('Unable to read SYSLINUXCFG') | 86 | raise bb.build.FuncFailed('Unable to read SYSLINUXCFG') |
87 | 87 | ||
@@ -98,7 +98,7 @@ python build_syslinux_cfg () { | |||
98 | 98 | ||
99 | cfgfile.write('# Automatically created by OE\n') | 99 | cfgfile.write('# Automatically created by OE\n') |
100 | 100 | ||
101 | opts = bb.data.getVar('SYSLINUX_OPTS', d, 1) | 101 | opts = d.getVar('SYSLINUX_OPTS', 1) |
102 | 102 | ||
103 | if opts: | 103 | if opts: |
104 | for opt in opts.split(';'): | 104 | for opt in opts.split(';'): |
@@ -107,7 +107,7 @@ python build_syslinux_cfg () { | |||
107 | cfgfile.write('ALLOWOPTIONS 1\n'); | 107 | cfgfile.write('ALLOWOPTIONS 1\n'); |
108 | cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) | 108 | cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) |
109 | 109 | ||
110 | timeout = bb.data.getVar('SYSLINUX_TIMEOUT', d, 1) | 110 | timeout = d.getVar('SYSLINUX_TIMEOUT', 1) |
111 | 111 | ||
112 | if timeout: | 112 | if timeout: |
113 | cfgfile.write('TIMEOUT %s\n' % timeout) | 113 | cfgfile.write('TIMEOUT %s\n' % timeout) |
@@ -116,29 +116,29 @@ python build_syslinux_cfg () { | |||
116 | 116 | ||
117 | cfgfile.write('PROMPT 1\n') | 117 | cfgfile.write('PROMPT 1\n') |
118 | 118 | ||
119 | menu = bb.data.getVar('AUTO_SYSLINUXMENU', d, 1) | 119 | menu = d.getVar('AUTO_SYSLINUXMENU', 1) |
120 | 120 | ||
121 | # This is ugly. My bad. | 121 | # This is ugly. My bad. |
122 | 122 | ||
123 | if menu: | 123 | if menu: |
124 | bb.build.exec_func('build_syslinux_menu', d) | 124 | bb.build.exec_func('build_syslinux_menu', d) |
125 | mfile = bb.data.getVar('SYSLINUXMENU', d, 1) | 125 | mfile = d.getVar('SYSLINUXMENU', 1) |
126 | cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) ) | 126 | cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) ) |
127 | 127 | ||
128 | for label in labels.split(): | 128 | for label in labels.split(): |
129 | localdata = bb.data.createCopy(d) | 129 | localdata = bb.data.createCopy(d) |
130 | 130 | ||
131 | overrides = bb.data.getVar('OVERRIDES', localdata, True) | 131 | overrides = localdata.getVar('OVERRIDES', True) |
132 | if not overrides: | 132 | if not overrides: |
133 | raise bb.build.FuncFailed('OVERRIDES not defined') | 133 | raise bb.build.FuncFailed('OVERRIDES not defined') |
134 | 134 | ||
135 | bb.data.setVar('OVERRIDES', label + ':' + overrides, localdata) | 135 | localdata.setVar('OVERRIDES', label + ':' + overrides) |
136 | bb.data.update_data(localdata) | 136 | bb.data.update_data(localdata) |
137 | 137 | ||
138 | cfgfile.write('LABEL %s\nKERNEL vmlinuz\n' % (label)) | 138 | cfgfile.write('LABEL %s\nKERNEL vmlinuz\n' % (label)) |
139 | 139 | ||
140 | append = bb.data.getVar('APPEND', localdata, 1) | 140 | append = localdata.getVar('APPEND', 1) |
141 | initrd = bb.data.getVar('INITRD', localdata, 1) | 141 | initrd = localdata.getVar('INITRD', 1) |
142 | 142 | ||
143 | if append: | 143 | if append: |
144 | cfgfile.write('APPEND ') | 144 | cfgfile.write('APPEND ') |
diff --git a/meta/classes/task.bbclass b/meta/classes/task.bbclass index 4edd704829..516d1a1c20 100644 --- a/meta/classes/task.bbclass +++ b/meta/classes/task.bbclass | |||
@@ -17,7 +17,7 @@ PACKAGE_ARCH = "all" | |||
17 | # to the list. Their dependencies (RRECOMMENDS) are handled as usual | 17 | # to the list. Their dependencies (RRECOMMENDS) are handled as usual |
18 | # by package_depchains in a following step. | 18 | # by package_depchains in a following step. |
19 | python () { | 19 | python () { |
20 | packages = bb.data.getVar('PACKAGES', d, 1).split() | 20 | packages = d.getVar('PACKAGES', 1).split() |
21 | genpackages = [] | 21 | genpackages = [] |
22 | for pkg in packages: | 22 | for pkg in packages: |
23 | for postfix in ['-dbg', '-dev']: | 23 | for postfix in ['-dbg', '-dev']: |
diff --git a/meta/classes/toolchain-scripts.bbclass b/meta/classes/toolchain-scripts.bbclass index 875efed020..c936a27a49 100644 --- a/meta/classes/toolchain-scripts.bbclass +++ b/meta/classes/toolchain-scripts.bbclass | |||
@@ -137,8 +137,8 @@ toolchain_create_sdk_version () { | |||
137 | } | 137 | } |
138 | 138 | ||
139 | python __anonymous () { | 139 | python __anonymous () { |
140 | deps = bb.data.getVarFlag('do_configure', 'depends', d) or "" | 140 | deps = d.getVarFlag('do_configure', 'depends') or "" |
141 | for dep in (bb.data.getVar('TOOLCHAIN_NEED_CONFIGSITE_CACHE', d, True) or "").split(): | 141 | for dep in (d.getVar('TOOLCHAIN_NEED_CONFIGSITE_CACHE', True) or "").split(): |
142 | deps += " %s:do_populate_sysroot" % dep | 142 | deps += " %s:do_populate_sysroot" % dep |
143 | bb.data.setVarFlag('do_configure', 'depends', deps, d) | 143 | d.setVarFlag('do_configure', 'depends', deps) |
144 | } | 144 | } |
diff --git a/meta/classes/update-alternatives.bbclass b/meta/classes/update-alternatives.bbclass index ba812210a7..e5ba6550d7 100644 --- a/meta/classes/update-alternatives.bbclass +++ b/meta/classes/update-alternatives.bbclass | |||
@@ -78,38 +78,38 @@ fi | |||
78 | } | 78 | } |
79 | 79 | ||
80 | def update_alternatives_after_parse(d): | 80 | def update_alternatives_after_parse(d): |
81 | if bb.data.getVar('ALTERNATIVE_LINKS', d) != None: | 81 | if d.getVar('ALTERNATIVE_LINKS') != None: |
82 | doinstall = bb.data.getVar('do_install', d, 0) | 82 | doinstall = d.getVar('do_install', 0) |
83 | doinstall += bb.data.getVar('update_alternatives_batch_doinstall', d, 0) | 83 | doinstall += d.getVar('update_alternatives_batch_doinstall', 0) |
84 | bb.data.setVar('do_install', doinstall, d) | 84 | d.setVar('do_install', doinstall) |
85 | return | 85 | return |
86 | 86 | ||
87 | if bb.data.getVar('ALTERNATIVE_NAME', d) == None: | 87 | if d.getVar('ALTERNATIVE_NAME') == None: |
88 | raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % bb.data.getVar('FILE', d) | 88 | raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % d.getVar('FILE') |
89 | if bb.data.getVar('ALTERNATIVE_PATH', d) == None: | 89 | if d.getVar('ALTERNATIVE_PATH') == None: |
90 | raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % bb.data.getVar('FILE', d) | 90 | raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % d.getVar('FILE') |
91 | 91 | ||
92 | python __anonymous() { | 92 | python __anonymous() { |
93 | update_alternatives_after_parse(d) | 93 | update_alternatives_after_parse(d) |
94 | } | 94 | } |
95 | 95 | ||
96 | python populate_packages_prepend () { | 96 | python populate_packages_prepend () { |
97 | pkg = bb.data.getVar('PN', d, 1) | 97 | pkg = d.getVar('PN', 1) |
98 | bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg) | 98 | bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg) |
99 | postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1) | 99 | postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) |
100 | if not postinst: | 100 | if not postinst: |
101 | postinst = '#!/bin/sh\n' | 101 | postinst = '#!/bin/sh\n' |
102 | if bb.data.getVar('ALTERNATIVE_LINKS', d) != None: | 102 | if d.getVar('ALTERNATIVE_LINKS') != None: |
103 | postinst += bb.data.getVar('update_alternatives_batch_postinst', d, 1) | 103 | postinst += d.getVar('update_alternatives_batch_postinst', 1) |
104 | else: | 104 | else: |
105 | postinst += bb.data.getVar('update_alternatives_postinst', d, 1) | 105 | postinst += d.getVar('update_alternatives_postinst', 1) |
106 | bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) | 106 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
107 | postrm = bb.data.getVar('pkg_postrm_%s' % pkg, d, 1) or bb.data.getVar('pkg_postrm', d, 1) | 107 | postrm = d.getVar('pkg_postrm_%s' % pkg, 1) or d.getVar('pkg_postrm', 1) |
108 | if not postrm: | 108 | if not postrm: |
109 | postrm = '#!/bin/sh\n' | 109 | postrm = '#!/bin/sh\n' |
110 | if bb.data.getVar('ALTERNATIVE_LINKS', d) != None: | 110 | if d.getVar('ALTERNATIVE_LINKS') != None: |
111 | postrm += bb.data.getVar('update_alternatives_batch_postrm', d, 1) | 111 | postrm += d.getVar('update_alternatives_batch_postrm', 1) |
112 | else: | 112 | else: |
113 | postrm += bb.data.getVar('update_alternatives_postrm', d, 1) | 113 | postrm += d.getVar('update_alternatives_postrm', 1) |
114 | bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d) | 114 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
115 | } | 115 | } |
diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass index 7e4dda7cbb..492c5fba2d 100644 --- a/meta/classes/update-rc.d.bbclass +++ b/meta/classes/update-rc.d.bbclass | |||
@@ -30,11 +30,11 @@ update-rc.d $D ${INITSCRIPT_NAME} remove | |||
30 | 30 | ||
31 | 31 | ||
32 | def update_rc_after_parse(d): | 32 | def update_rc_after_parse(d): |
33 | if bb.data.getVar('INITSCRIPT_PACKAGES', d) == None: | 33 | if d.getVar('INITSCRIPT_PACKAGES') == None: |
34 | if bb.data.getVar('INITSCRIPT_NAME', d) == None: | 34 | if d.getVar('INITSCRIPT_NAME') == None: |
35 | raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_NAME" % bb.data.getVar('FILE', d) | 35 | raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_NAME" % d.getVar('FILE') |
36 | if bb.data.getVar('INITSCRIPT_PARAMS', d) == None: | 36 | if d.getVar('INITSCRIPT_PARAMS') == None: |
37 | raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_PARAMS" % bb.data.getVar('FILE', d) | 37 | raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_PARAMS" % d.getVar('FILE') |
38 | 38 | ||
39 | python __anonymous() { | 39 | python __anonymous() { |
40 | update_rc_after_parse(d) | 40 | update_rc_after_parse(d) |
@@ -44,7 +44,7 @@ python populate_packages_prepend () { | |||
44 | def update_rcd_package(pkg): | 44 | def update_rcd_package(pkg): |
45 | bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) | 45 | bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) |
46 | localdata = bb.data.createCopy(d) | 46 | localdata = bb.data.createCopy(d) |
47 | overrides = bb.data.getVar("OVERRIDES", localdata, 1) | 47 | overrides = localdata.getVar("OVERRIDES", 1) |
48 | bb.data.setVar("OVERRIDES", "%s:%s" % (pkg, overrides), localdata) | 48 | bb.data.setVar("OVERRIDES", "%s:%s" % (pkg, overrides), localdata) |
49 | bb.data.update_data(localdata) | 49 | bb.data.update_data(localdata) |
50 | 50 | ||
@@ -53,28 +53,28 @@ python populate_packages_prepend () { | |||
53 | execute on the target. Not doing so may cause update_rc.d postinst invoked | 53 | execute on the target. Not doing so may cause update_rc.d postinst invoked |
54 | twice to cause unwanted warnings. | 54 | twice to cause unwanted warnings. |
55 | """ | 55 | """ |
56 | postinst = bb.data.getVar('pkg_postinst', localdata, 1) | 56 | postinst = localdata.getVar('pkg_postinst', 1) |
57 | if not postinst: | 57 | if not postinst: |
58 | postinst = '#!/bin/sh\n' | 58 | postinst = '#!/bin/sh\n' |
59 | postinst += bb.data.getVar('updatercd_postinst', localdata, 1) | 59 | postinst += localdata.getVar('updatercd_postinst', 1) |
60 | bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) | 60 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
61 | 61 | ||
62 | prerm = bb.data.getVar('pkg_prerm', localdata, 1) | 62 | prerm = localdata.getVar('pkg_prerm', 1) |
63 | if not prerm: | 63 | if not prerm: |
64 | prerm = '#!/bin/sh\n' | 64 | prerm = '#!/bin/sh\n' |
65 | prerm += bb.data.getVar('updatercd_prerm', localdata, 1) | 65 | prerm += localdata.getVar('updatercd_prerm', 1) |
66 | bb.data.setVar('pkg_prerm_%s' % pkg, prerm, d) | 66 | d.setVar('pkg_prerm_%s' % pkg, prerm) |
67 | 67 | ||
68 | postrm = bb.data.getVar('pkg_postrm', localdata, 1) | 68 | postrm = localdata.getVar('pkg_postrm', 1) |
69 | if not postrm: | 69 | if not postrm: |
70 | postrm = '#!/bin/sh\n' | 70 | postrm = '#!/bin/sh\n' |
71 | postrm += bb.data.getVar('updatercd_postrm', localdata, 1) | 71 | postrm += localdata.getVar('updatercd_postrm', 1) |
72 | bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d) | 72 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
73 | 73 | ||
74 | pkgs = bb.data.getVar('INITSCRIPT_PACKAGES', d, 1) | 74 | pkgs = d.getVar('INITSCRIPT_PACKAGES', 1) |
75 | if pkgs == None: | 75 | if pkgs == None: |
76 | pkgs = bb.data.getVar('UPDATERCPN', d, 1) | 76 | pkgs = d.getVar('UPDATERCPN', 1) |
77 | packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() | 77 | packages = (d.getVar('PACKAGES', 1) or "").split() |
78 | if not pkgs in packages and packages != []: | 78 | if not pkgs in packages and packages != []: |
79 | pkgs = packages[0] | 79 | pkgs = packages[0] |
80 | for pkg in pkgs.split(): | 80 | for pkg in pkgs.split(): |
diff --git a/meta/classes/useradd.bbclass b/meta/classes/useradd.bbclass index f3dd8236ee..928f619b7c 100644 --- a/meta/classes/useradd.bbclass +++ b/meta/classes/useradd.bbclass | |||
@@ -107,11 +107,11 @@ def update_useradd_after_parse(d): | |||
107 | useradd_packages = d.getVar('USERADD_PACKAGES', True) | 107 | useradd_packages = d.getVar('USERADD_PACKAGES', True) |
108 | 108 | ||
109 | if not useradd_packages: | 109 | if not useradd_packages: |
110 | raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % bb.data.getVar('FILE', d) | 110 | raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE') |
111 | 111 | ||
112 | for pkg in useradd_packages.split(): | 112 | for pkg in useradd_packages.split(): |
113 | if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True): | 113 | if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True): |
114 | raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (bb.data.getVar('FILE', d), pkg) | 114 | raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (d.getVar('FILE'), pkg) |
115 | 115 | ||
116 | python __anonymous() { | 116 | python __anonymous() { |
117 | update_useradd_after_parse(d) | 117 | update_useradd_after_parse(d) |
@@ -147,12 +147,12 @@ fakeroot python populate_packages_prepend () { | |||
147 | if not preinst: | 147 | if not preinst: |
148 | preinst = '#!/bin/sh\n' | 148 | preinst = '#!/bin/sh\n' |
149 | preinst += d.getVar('useradd_preinst', True) | 149 | preinst += d.getVar('useradd_preinst', True) |
150 | bb.data.setVar('pkg_preinst_%s' % pkg, preinst, d) | 150 | d.setVar('pkg_preinst_%s' % pkg, preinst) |
151 | 151 | ||
152 | # RDEPENDS setup | 152 | # RDEPENDS setup |
153 | rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" | 153 | rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" |
154 | rdepends += " base-passwd shadow" | 154 | rdepends += " base-passwd shadow" |
155 | bb.data.setVar("RDEPENDS_%s" % pkg, rdepends, d) | 155 | d.setVar("RDEPENDS_%s" % pkg, rdepends) |
156 | 156 | ||
157 | # Add the user/group preinstall scripts and RDEPENDS requirements | 157 | # Add the user/group preinstall scripts and RDEPENDS requirements |
158 | # to packages specified by USERADD_PACKAGES | 158 | # to packages specified by USERADD_PACKAGES |
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass index 64bd84a5d3..009ef1fd04 100644 --- a/meta/classes/utility-tasks.bbclass +++ b/meta/classes/utility-tasks.bbclass | |||
@@ -6,7 +6,7 @@ python do_listtasks() { | |||
6 | #bb.data.emit_env(sys.__stdout__, d) | 6 | #bb.data.emit_env(sys.__stdout__, d) |
7 | # emit the metadata which isnt valid shell | 7 | # emit the metadata which isnt valid shell |
8 | for e in d.keys(): | 8 | for e in d.keys(): |
9 | if bb.data.getVarFlag(e, 'task', d): | 9 | if d.getVarFlag(e, 'task'): |
10 | bb.plain("%s" % e) | 10 | bb.plain("%s" % e) |
11 | } | 11 | } |
12 | 12 | ||
@@ -20,18 +20,18 @@ python do_clean() { | |||
20 | bb.note("Removing " + dir) | 20 | bb.note("Removing " + dir) |
21 | oe.path.remove(dir) | 21 | oe.path.remove(dir) |
22 | 22 | ||
23 | dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d) | 23 | dir = "%s.*" % bb.data.expand(d.getVar('STAMP'), d) |
24 | bb.note("Removing " + dir) | 24 | bb.note("Removing " + dir) |
25 | oe.path.remove(dir) | 25 | oe.path.remove(dir) |
26 | 26 | ||
27 | for f in (bb.data.getVar('CLEANFUNCS', d, 1) or '').split(): | 27 | for f in (d.getVar('CLEANFUNCS', 1) or '').split(): |
28 | bb.build.exec_func(f, d) | 28 | bb.build.exec_func(f, d) |
29 | } | 29 | } |
30 | 30 | ||
31 | addtask checkuri | 31 | addtask checkuri |
32 | do_checkuri[nostamp] = "1" | 32 | do_checkuri[nostamp] = "1" |
33 | python do_checkuri() { | 33 | python do_checkuri() { |
34 | src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split() | 34 | src_uri = (d.getVar('SRC_URI', True) or "").split() |
35 | if len(src_uri) == 0: | 35 | if len(src_uri) == 0: |
36 | return | 36 | return |
37 | 37 | ||
diff --git a/meta/classes/utils.bbclass b/meta/classes/utils.bbclass index 3c2e342f91..103fa9a546 100644 --- a/meta/classes/utils.bbclass +++ b/meta/classes/utils.bbclass | |||
@@ -331,12 +331,12 @@ def explode_deps(s): | |||
331 | 331 | ||
332 | def base_set_filespath(path, d): | 332 | def base_set_filespath(path, d): |
333 | filespath = [] | 333 | filespath = [] |
334 | extrapaths = (bb.data.getVar("FILESEXTRAPATHS", d, True) or "") | 334 | extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "") |
335 | # Don't prepend empty strings to the path list | 335 | # Don't prepend empty strings to the path list |
336 | if extrapaths != "": | 336 | if extrapaths != "": |
337 | path = extrapaths.split(":") + path | 337 | path = extrapaths.split(":") + path |
338 | # The ":" ensures we have an 'empty' override | 338 | # The ":" ensures we have an 'empty' override |
339 | overrides = (bb.data.getVar("OVERRIDES", d, 1) or "") + ":" | 339 | overrides = (d.getVar("OVERRIDES", 1) or "") + ":" |
340 | for p in path: | 340 | for p in path: |
341 | if p != "": | 341 | if p != "": |
342 | for o in overrides.split(":"): | 342 | for o in overrides.split(":"): |
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf index d405b6a7ff..7e75be2d78 100644 --- a/meta/conf/bitbake.conf +++ b/meta/conf/bitbake.conf | |||
@@ -99,7 +99,7 @@ ABIEXTENSION ??= "" | |||
99 | TARGET_ARCH = "${TUNE_ARCH}" | 99 | TARGET_ARCH = "${TUNE_ARCH}" |
100 | TARGET_OS = "linux${LIBCEXTENSION}${ABIEXTENSION}" | 100 | TARGET_OS = "linux${LIBCEXTENSION}${ABIEXTENSION}" |
101 | TARGET_VENDOR = "-oe" | 101 | TARGET_VENDOR = "-oe" |
102 | TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + bb.data.getVar('TARGET_OS', d, 1), ''][bb.data.getVar('TARGET_OS', d, 1) == ('' or 'custom')]}" | 102 | TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + d.getVar('TARGET_OS', 1), ''][d.getVar('TARGET_OS', 1) == ('' or 'custom')]}" |
103 | TARGET_PREFIX = "${TARGET_SYS}-" | 103 | TARGET_PREFIX = "${TARGET_SYS}-" |
104 | TARGET_CC_ARCH = "${TUNE_CCARGS}" | 104 | TARGET_CC_ARCH = "${TUNE_CCARGS}" |
105 | TARGET_LD_ARCH = "${TUNE_LDARGS}" | 105 | TARGET_LD_ARCH = "${TUNE_LDARGS}" |
@@ -108,7 +108,7 @@ TARGET_AS_ARCH = "${TUNE_ASARGS}" | |||
108 | SDK_ARCH = "${BUILD_ARCH}" | 108 | SDK_ARCH = "${BUILD_ARCH}" |
109 | SDK_OS = "${BUILD_OS}" | 109 | SDK_OS = "${BUILD_OS}" |
110 | SDK_VENDOR = "-oesdk" | 110 | SDK_VENDOR = "-oesdk" |
111 | SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + bb.data.getVar('SDK_OS', d, 1), ''][bb.data.getVar('SDK_OS', d, 1) == ('' or 'custom')]}" | 111 | SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + d.getVar('SDK_OS', 1), ''][d.getVar('SDK_OS', 1) == ('' or 'custom')]}" |
112 | SDK_PREFIX = "${SDK_SYS}-" | 112 | SDK_PREFIX = "${SDK_SYS}-" |
113 | SDK_CC_ARCH = "${BUILD_CC_ARCH}" | 113 | SDK_CC_ARCH = "${BUILD_CC_ARCH}" |
114 | SDK_PACKAGE_ARCHS = "all any noarch ${SDK_ARCH}-nativesdk" | 114 | SDK_PACKAGE_ARCHS = "all any noarch ${SDK_ARCH}-nativesdk" |
@@ -116,7 +116,7 @@ SDK_LD_ARCH = "${BUILD_LD_ARCH}" | |||
116 | SDK_AS_ARCH = "${BUILD_AS_ARCH}" | 116 | SDK_AS_ARCH = "${BUILD_AS_ARCH}" |
117 | 117 | ||
118 | PACKAGE_ARCH = "${TUNE_PKGARCH}" | 118 | PACKAGE_ARCH = "${TUNE_PKGARCH}" |
119 | MACHINE_ARCH = "${@[bb.data.getVar('TUNE_PKGARCH', d, 1), bb.data.getVar('MACHINE', d, 1)][bool(bb.data.getVar('MACHINE', d, 1))].replace('-', '_')}" | 119 | MACHINE_ARCH = "${@[d.getVar('TUNE_PKGARCH', 1), d.getVar('MACHINE', 1)][bool(d.getVar('MACHINE', 1))].replace('-', '_')}" |
120 | PACKAGE_EXTRA_ARCHS ??= "${PACKAGE_EXTRA_ARCHS_tune-${DEFAULTTUNE}}" | 120 | PACKAGE_EXTRA_ARCHS ??= "${PACKAGE_EXTRA_ARCHS_tune-${DEFAULTTUNE}}" |
121 | PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}" | 121 | PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}" |
122 | # MACHINE_ARCH shouldn't be included here as a variable dependency | 122 | # MACHINE_ARCH shouldn't be included here as a variable dependency |
@@ -167,33 +167,33 @@ ASSUME_PROVIDED = "\ | |||
167 | # Package default variables. | 167 | # Package default variables. |
168 | ################################################################## | 168 | ################################################################## |
169 | 169 | ||
170 | PN = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[0] or 'defaultpkgname'}" | 170 | PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}" |
171 | PV = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[1] or '1.0'}" | 171 | PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}" |
172 | PR = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[2] or 'r0'}" | 172 | PR = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[2] or 'r0'}" |
173 | PF = "${PN}-${EXTENDPE}${PV}-${PR}" | 173 | PF = "${PN}-${EXTENDPE}${PV}-${PR}" |
174 | EXTENDPE = "${@['','${PE\x7d_'][bb.data.getVar('PE',d,1) > 0]}" | 174 | EXTENDPE = "${@['','${PE\x7d_'][d.getVar('PE',1) > 0]}" |
175 | P = "${PN}-${PV}" | 175 | P = "${PN}-${PV}" |
176 | 176 | ||
177 | EXTENDPRAUTO = "${@['.${PRAUTO\x7d',''][bb.data.getVar('PRAUTO',d,1) is None]}" | 177 | EXTENDPRAUTO = "${@['.${PRAUTO\x7d',''][d.getVar('PRAUTO',1) is None]}" |
178 | PRAUTOINX = "${PF}" | 178 | PRAUTOINX = "${PF}" |
179 | 179 | ||
180 | PKGV ?= "${PV}" | 180 | PKGV ?= "${PV}" |
181 | PKGR ?= "${PR}${EXTENDPRAUTO}" | 181 | PKGR ?= "${PR}${EXTENDPRAUTO}" |
182 | PKGE ?= "${@['','${PE\x7d'][bb.data.getVar('PE',d,1) > 0]}" | 182 | PKGE ?= "${@['','${PE\x7d'][d.getVar('PE',1) > 0]}" |
183 | EXTENDPKGEVER = "${@['','${PKGE\x7d:'][bb.data.getVar('PKGE',d,1).strip() != '']}" | 183 | EXTENDPKGEVER = "${@['','${PKGE\x7d:'][d.getVar('PKGE',1).strip() != '']}" |
184 | EXTENDPKGV ?= "${EXTENDPKGEVER}${PKGV}-${PKGR}" | 184 | EXTENDPKGV ?= "${EXTENDPKGEVER}${PKGV}-${PKGR}" |
185 | 185 | ||
186 | # Base package name | 186 | # Base package name |
187 | # Automatically derives "foo" from "foo-native", "foo-cross" or "foo-initial" | 187 | # Automatically derives "foo" from "foo-native", "foo-cross" or "foo-initial" |
188 | # otherwise it is the same as PN and P | 188 | # otherwise it is the same as PN and P |
189 | SPECIAL_PKGSUFFIX = "-native -cross -initial -intermediate -nativesdk -crosssdk -cross-canadian" | 189 | SPECIAL_PKGSUFFIX = "-native -cross -initial -intermediate -nativesdk -crosssdk -cross-canadian" |
190 | BPN = "${@base_prune_suffix(bb.data.getVar('PN', d, True), bb.data.getVar('SPECIAL_PKGSUFFIX', d, True).split(), d)}" | 190 | BPN = "${@base_prune_suffix(d.getVar('PN', True), d.getVar('SPECIAL_PKGSUFFIX', True).split(), d)}" |
191 | BP = "${BPN}-${PV}" | 191 | BP = "${BPN}-${PV}" |
192 | 192 | ||
193 | # | 193 | # |
194 | # network based PR service | 194 | # network based PR service |
195 | # | 195 | # |
196 | USE_PR_SERV = "${@[1,0][(bb.data.getVar('PRSERV_HOST',d,1) is None) or (bb.data.getVar('PRSERV_PORT',d,1) is None)]}" | 196 | USE_PR_SERV = "${@[1,0][(d.getVar('PRSERV_HOST',1) is None) or (d.getVar('PRSERV_PORT',1) is None)]}" |
197 | 197 | ||
198 | # Package info. | 198 | # Package info. |
199 | 199 | ||
@@ -288,7 +288,7 @@ DOTDEBUG-dbg = "${bindir}/.debug ${sbindir}/.debug ${libexecdir}/.debug ${libdir | |||
288 | 288 | ||
289 | DEBUGFILEDIRECTORY-dbg = "/usr/lib/debug /usr/src/debug" | 289 | DEBUGFILEDIRECTORY-dbg = "/usr/lib/debug /usr/src/debug" |
290 | 290 | ||
291 | FILES_${PN}-dbg = "${@bb.data.getVar(['DOTDEBUG-dbg', 'DEBUGFILEDIRECTORY-dbg'][bb.data.getVar('PACKAGE_DEBUG_SPLIT_STYLE', d, 1) == 'debug-file-directory'], d, 1)}" | 291 | FILES_${PN}-dbg = "${@bb.data.getVar(['DOTDEBUG-dbg', 'DEBUGFILEDIRECTORY-dbg'][d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', 1) == 'debug-file-directory'], d, 1)}" |
292 | 292 | ||
293 | SECTION_${PN}-dbg = "devel" | 293 | SECTION_${PN}-dbg = "devel" |
294 | ALLOW_EMPTY_${PN}-dbg = "1" | 294 | ALLOW_EMPTY_${PN}-dbg = "1" |
@@ -298,17 +298,17 @@ FILES_${PN}-locale = "${datadir}/locale" | |||
298 | 298 | ||
299 | # File manifest | 299 | # File manifest |
300 | 300 | ||
301 | FILE_DIRNAME = "${@os.path.dirname(bb.data.getVar('FILE', d))}" | 301 | FILE_DIRNAME = "${@os.path.dirname(d.getVar('FILE'))}" |
302 | # FILESPATH is set in base.bbclass | 302 | # FILESPATH is set in base.bbclass |
303 | #FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}" | 303 | #FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}" |
304 | FILESDIR = "${@bb.which(bb.data.getVar('FILESPATH', d, 1), '.')}" | 304 | FILESDIR = "${@bb.which(d.getVar('FILESPATH', 1), '.')}" |
305 | 305 | ||
306 | ################################################################## | 306 | ################################################################## |
307 | # General work and output directories for the build system. | 307 | # General work and output directories for the build system. |
308 | ################################################################## | 308 | ################################################################## |
309 | 309 | ||
310 | TMPDIR ?= "${TOPDIR}/tmp" | 310 | TMPDIR ?= "${TOPDIR}/tmp" |
311 | CACHE = "${TMPDIR}/cache${@['', '/' + str(bb.data.getVar('MACHINE', d, 1))][bool(bb.data.getVar('MACHINE', d, 1))]}${@['', '/' + str(bb.data.getVar('SDKMACHINE', d, 1))][bool(bb.data.getVar('SDKMACHINE', d, 1))]}" | 311 | CACHE = "${TMPDIR}/cache${@['', '/' + str(d.getVar('MACHINE', 1))][bool(d.getVar('MACHINE', 1))]}${@['', '/' + str(d.getVar('SDKMACHINE', 1))][bool(d.getVar('SDKMACHINE', 1))]}" |
312 | # The persistent cache should be shared by all builds | 312 | # The persistent cache should be shared by all builds |
313 | PERSISTENT_DIR = "${TMPDIR}/cache" | 313 | PERSISTENT_DIR = "${TMPDIR}/cache" |
314 | LOG_DIR = "${TMPDIR}/log" | 314 | LOG_DIR = "${TMPDIR}/log" |
@@ -403,7 +403,7 @@ export PATH | |||
403 | # Build utility info. | 403 | # Build utility info. |
404 | ################################################################## | 404 | ################################################################## |
405 | 405 | ||
406 | CCACHE = "${@bb.which(bb.data.getVar('PATH', d, 1), 'ccache') and 'ccache '}" | 406 | CCACHE = "${@bb.which(d.getVar('PATH', 1), 'ccache') and 'ccache '}" |
407 | TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TARGET}" | 407 | TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TARGET}" |
408 | 408 | ||
409 | export CCACHE_DIR = "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" | 409 | export CCACHE_DIR = "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" |
@@ -505,7 +505,7 @@ DEBUG_FLAGS ?= "-g -feliminate-unused-debug-types" | |||
505 | # Disabled until the option works properly -feliminate-dwarf2-dups | 505 | # Disabled until the option works properly -feliminate-dwarf2-dups |
506 | FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}" | 506 | FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}" |
507 | DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe" | 507 | DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe" |
508 | SELECTED_OPTIMIZATION = "${@bb.data.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][bb.data.getVar('DEBUG_BUILD', d, 1) == '1'], d, 1)}" | 508 | SELECTED_OPTIMIZATION = "${@bb.data.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD', 1) == '1'], d, 1)}" |
509 | BUILD_OPTIMIZATION = "-O2 -pipe" | 509 | BUILD_OPTIMIZATION = "-O2 -pipe" |
510 | 510 | ||
511 | ################################################################## | 511 | ################################################################## |
diff --git a/meta/conf/distro/defaultsetup.conf b/meta/conf/distro/defaultsetup.conf index 1a297785fd..064c1e0537 100644 --- a/meta/conf/distro/defaultsetup.conf +++ b/meta/conf/distro/defaultsetup.conf | |||
@@ -13,7 +13,7 @@ require conf/distro/include/tclibc-${TCLIBC}.inc | |||
13 | TCLIBCAPPEND ?= "-${TCLIBC}" | 13 | TCLIBCAPPEND ?= "-${TCLIBC}" |
14 | TMPDIR .= "${TCLIBCAPPEND}" | 14 | TMPDIR .= "${TCLIBCAPPEND}" |
15 | 15 | ||
16 | CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(bb.data.getVar('MACHINE', d, 1))][bool(bb.data.getVar('MACHINE', d, 1))]}${@['', '/' + str(bb.data.getVar('SDKMACHINE', d, 1))][bool(bb.data.getVar('SDKMACHINE', d, 1))]}" | 16 | CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(d.getVar('MACHINE', 1))][bool(d.getVar('MACHINE', 1))]}${@['', '/' + str(d.getVar('SDKMACHINE', 1))][bool(d.getVar('SDKMACHINE', 1))]}" |
17 | 17 | ||
18 | USER_CLASSES ?= "" | 18 | USER_CLASSES ?= "" |
19 | PACKAGE_CLASSES ?= "package_ipk" | 19 | PACKAGE_CLASSES ?= "package_ipk" |
diff --git a/meta/conf/machine/include/arm/feature-arm-thumb.inc b/meta/conf/machine/include/arm/feature-arm-thumb.inc index d606a35ca9..cd34199b04 100644 --- a/meta/conf/machine/include/arm/feature-arm-thumb.inc +++ b/meta/conf/machine/include/arm/feature-arm-thumb.inc | |||
@@ -5,7 +5,7 @@ | |||
5 | # but requires more instructions (140% for 70% smaller code) so may be | 5 | # but requires more instructions (140% for 70% smaller code) so may be |
6 | # slower. | 6 | # slower. |
7 | TUNEVALID[thumb] = "Use thumb instructions instead of ARM" | 7 | TUNEVALID[thumb] = "Use thumb instructions instead of ARM" |
8 | ARM_THUMB_M_OPT = "${@['-mno-thumb', '-mthumb'][bb.data.getVar('ARM_INSTRUCTION_SET', d, 1) == 'thumb']}" | 8 | ARM_THUMB_M_OPT = "${@['-mno-thumb', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}" |
9 | TUNE_CCARGS += "${@bb.utils.contains("TUNE_FEATURES", "thumb", "${ARM_THUMB_M_OPT}", "", d)}" | 9 | TUNE_CCARGS += "${@bb.utils.contains("TUNE_FEATURES", "thumb", "${ARM_THUMB_M_OPT}", "", d)}" |
10 | OVERRIDES .= "${@bb.utils.contains("TUNE_FEATURES", "thumb", ":thumb", "", d)}" | 10 | OVERRIDES .= "${@bb.utils.contains("TUNE_FEATURES", "thumb", ":thumb", "", d)}" |
11 | 11 | ||
diff --git a/meta/conf/machine/include/tune-thumb.inc b/meta/conf/machine/include/tune-thumb.inc index 9f6ce95a47..85473ce741 100644 --- a/meta/conf/machine/include/tune-thumb.inc +++ b/meta/conf/machine/include/tune-thumb.inc | |||
@@ -16,15 +16,15 @@ THUMB_INTERWORK ?= "yes" | |||
16 | # arm system and vice versa. It is strongly recommended that DISTROs not | 16 | # arm system and vice versa. It is strongly recommended that DISTROs not |
17 | # turn this off - the actual cost is very small. | 17 | # turn this off - the actual cost is very small. |
18 | 18 | ||
19 | OVERRIDE_THUMB = "${@['', ':thumb'][bb.data.getVar('ARM_INSTRUCTION_SET', d, 1) == 'thumb']}" | 19 | OVERRIDE_THUMB = "${@['', ':thumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}" |
20 | OVERRIDE_INTERWORK = "${@['', ':thumb-interwork'][bb.data.getVar('THUMB_INTERWORK', d, 1) == 'yes']}" | 20 | OVERRIDE_INTERWORK = "${@['', ':thumb-interwork'][d.getVar('THUMB_INTERWORK', 1) == 'yes']}" |
21 | OVERRIDES .= "${OVERRIDE_THUMB}${OVERRIDE_INTERWORK}" | 21 | OVERRIDES .= "${OVERRIDE_THUMB}${OVERRIDE_INTERWORK}" |
22 | 22 | ||
23 | # Compiler and linker options for application code and kernel code. These | 23 | # Compiler and linker options for application code and kernel code. These |
24 | # options ensure that the compiler has the correct settings for the selected | 24 | # options ensure that the compiler has the correct settings for the selected |
25 | # instruction set and interworking. | 25 | # instruction set and interworking. |
26 | ARM_INTERWORK_M_OPT = "${@['-mno-thumb-interwork', '-mthumb-interwork'][bb.data.getVar('THUMB_INTERWORK', d, 1) == 'yes']}" | 26 | ARM_INTERWORK_M_OPT = "${@['-mno-thumb-interwork', '-mthumb-interwork'][d.getVar('THUMB_INTERWORK', 1) == 'yes']}" |
27 | ARM_THUMB_M_OPT = "${@['-mno-thumb', '-mthumb'][bb.data.getVar('ARM_INSTRUCTION_SET', d, 1) == 'thumb']}" | 27 | ARM_THUMB_M_OPT = "${@['-mno-thumb', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}" |
28 | 28 | ||
29 | # | 29 | # |
30 | TUNE_CCARGS += "${ARM_INTERWORK_M_OPT} ${ARM_THUMB_M_OPT}" | 30 | TUNE_CCARGS += "${ARM_INTERWORK_M_OPT} ${ARM_THUMB_M_OPT}" |
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py index 4721355bd8..ad391e3b01 100644 --- a/meta/lib/oe/distro_check.py +++ b/meta/lib/oe/distro_check.py | |||
@@ -276,32 +276,32 @@ def compare_in_distro_packages_list(distro_check_dir, d): | |||
276 | localdata = bb.data.createCopy(d) | 276 | localdata = bb.data.createCopy(d) |
277 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") | 277 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") |
278 | matching_distros = [] | 278 | matching_distros = [] |
279 | pn = bb.data.getVar('PN', d, True) | 279 | pn = d.getVar('PN', True) |
280 | recipe_name = bb.data.getVar('PN', d, True) | 280 | recipe_name = d.getVar('PN', True) |
281 | bb.note("Checking: %s" % pn) | 281 | bb.note("Checking: %s" % pn) |
282 | 282 | ||
283 | trim_dict = dict({"-native":"-native", "-cross":"-cross", "-initial":"-initial"}) | 283 | trim_dict = dict({"-native":"-native", "-cross":"-cross", "-initial":"-initial"}) |
284 | 284 | ||
285 | if pn.find("-native") != -1: | 285 | if pn.find("-native") != -1: |
286 | pnstripped = pn.split("-native") | 286 | pnstripped = pn.split("-native") |
287 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 287 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
288 | bb.data.update_data(localdata) | 288 | bb.data.update_data(localdata) |
289 | recipe_name = pnstripped[0] | 289 | recipe_name = pnstripped[0] |
290 | 290 | ||
291 | if pn.find("-cross") != -1: | 291 | if pn.find("-cross") != -1: |
292 | pnstripped = pn.split("-cross") | 292 | pnstripped = pn.split("-cross") |
293 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 293 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
294 | bb.data.update_data(localdata) | 294 | bb.data.update_data(localdata) |
295 | recipe_name = pnstripped[0] | 295 | recipe_name = pnstripped[0] |
296 | 296 | ||
297 | if pn.find("-initial") != -1: | 297 | if pn.find("-initial") != -1: |
298 | pnstripped = pn.split("-initial") | 298 | pnstripped = pn.split("-initial") |
299 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 299 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
300 | bb.data.update_data(localdata) | 300 | bb.data.update_data(localdata) |
301 | recipe_name = pnstripped[0] | 301 | recipe_name = pnstripped[0] |
302 | 302 | ||
303 | bb.note("Recipe: %s" % recipe_name) | 303 | bb.note("Recipe: %s" % recipe_name) |
304 | tmp = bb.data.getVar('DISTRO_PN_ALIAS', localdata, True) | 304 | tmp = localdata.getVar('DISTRO_PN_ALIAS', True) |
305 | 305 | ||
306 | distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'}) | 306 | distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'}) |
307 | 307 | ||
@@ -343,23 +343,23 @@ def compare_in_distro_packages_list(distro_check_dir, d): | |||
343 | return matching_distros | 343 | return matching_distros |
344 | 344 | ||
345 | def create_log_file(d, logname): | 345 | def create_log_file(d, logname): |
346 | logpath = bb.data.getVar('LOG_DIR', d, True) | 346 | logpath = d.getVar('LOG_DIR', True) |
347 | bb.utils.mkdirhier(logpath) | 347 | bb.utils.mkdirhier(logpath) |
348 | logfn, logsuffix = os.path.splitext(logname) | 348 | logfn, logsuffix = os.path.splitext(logname) |
349 | logfile = os.path.join(logpath, "%s.%s%s" % (logfn, bb.data.getVar('DATETIME', d, True), logsuffix)) | 349 | logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME', True), logsuffix)) |
350 | if not os.path.exists(logfile): | 350 | if not os.path.exists(logfile): |
351 | slogfile = os.path.join(logpath, logname) | 351 | slogfile = os.path.join(logpath, logname) |
352 | if os.path.exists(slogfile): | 352 | if os.path.exists(slogfile): |
353 | os.remove(slogfile) | 353 | os.remove(slogfile) |
354 | os.system("touch %s" % logfile) | 354 | os.system("touch %s" % logfile) |
355 | os.symlink(logfile, slogfile) | 355 | os.symlink(logfile, slogfile) |
356 | bb.data.setVar('LOG_FILE', logfile, d) | 356 | d.setVar('LOG_FILE', logfile) |
357 | return logfile | 357 | return logfile |
358 | 358 | ||
359 | 359 | ||
360 | def save_distro_check_result(result, datetime, result_file, d): | 360 | def save_distro_check_result(result, datetime, result_file, d): |
361 | pn = bb.data.getVar('PN', d, True) | 361 | pn = d.getVar('PN', True) |
362 | logdir = bb.data.getVar('LOG_DIR', d, True) | 362 | logdir = d.getVar('LOG_DIR', True) |
363 | if not logdir: | 363 | if not logdir: |
364 | bb.error("LOG_DIR variable is not defined, can't write the distro_check results") | 364 | bb.error("LOG_DIR variable is not defined, can't write the distro_check results") |
365 | return | 365 | return |
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py index 75fb91e0fb..f4ccb3e183 100644 --- a/meta/lib/oe/patch.py +++ b/meta/lib/oe/patch.py | |||
@@ -179,7 +179,7 @@ class GitApplyTree(PatchTree): | |||
179 | 179 | ||
180 | class QuiltTree(PatchSet): | 180 | class QuiltTree(PatchSet): |
181 | def _runcmd(self, args, run = True): | 181 | def _runcmd(self, args, run = True): |
182 | quiltrc = bb.data.getVar('QUILTRCFILE', self.d, 1) | 182 | quiltrc = self.d.getVar('QUILTRCFILE', 1) |
183 | if not run: | 183 | if not run: |
184 | return ["quilt"] + ["--quiltrc"] + [quiltrc] + args | 184 | return ["quilt"] + ["--quiltrc"] + [quiltrc] + args |
185 | runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir) | 185 | runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir) |
@@ -357,7 +357,7 @@ class UserResolver(Resolver): | |||
357 | # Patch application failed | 357 | # Patch application failed |
358 | patchcmd = self.patchset.Push(True, False, False) | 358 | patchcmd = self.patchset.Push(True, False, False) |
359 | 359 | ||
360 | t = bb.data.getVar('T', self.patchset.d, 1) | 360 | t = self.patchset.d.getVar('T', 1) |
361 | if not t: | 361 | if not t: |
362 | bb.msg.fatal("Build", "T not set") | 362 | bb.msg.fatal("Build", "T not set") |
363 | bb.utils.mkdirhier(t) | 363 | bb.utils.mkdirhier(t) |
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py index b3473d3476..95daace6c6 100644 --- a/meta/lib/oe/utils.py +++ b/meta/lib/oe/utils.py | |||
@@ -16,19 +16,19 @@ def ifelse(condition, iftrue = True, iffalse = False): | |||
16 | return iffalse | 16 | return iffalse |
17 | 17 | ||
18 | def conditional(variable, checkvalue, truevalue, falsevalue, d): | 18 | def conditional(variable, checkvalue, truevalue, falsevalue, d): |
19 | if bb.data.getVar(variable,d,1) == checkvalue: | 19 | if d.getVar(variable,1) == checkvalue: |
20 | return truevalue | 20 | return truevalue |
21 | else: | 21 | else: |
22 | return falsevalue | 22 | return falsevalue |
23 | 23 | ||
24 | def less_or_equal(variable, checkvalue, truevalue, falsevalue, d): | 24 | def less_or_equal(variable, checkvalue, truevalue, falsevalue, d): |
25 | if float(bb.data.getVar(variable,d,1)) <= float(checkvalue): | 25 | if float(d.getVar(variable,1)) <= float(checkvalue): |
26 | return truevalue | 26 | return truevalue |
27 | else: | 27 | else: |
28 | return falsevalue | 28 | return falsevalue |
29 | 29 | ||
30 | def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d): | 30 | def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d): |
31 | result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue) | 31 | result = bb.vercmp(d.getVar(variable,True), checkvalue) |
32 | if result <= 0: | 32 | if result <= 0: |
33 | return truevalue | 33 | return truevalue |
34 | else: | 34 | else: |
@@ -48,7 +48,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d): | |||
48 | return falsevalue | 48 | return falsevalue |
49 | 49 | ||
50 | def both_contain(variable1, variable2, checkvalue, d): | 50 | def both_contain(variable1, variable2, checkvalue, d): |
51 | if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1: | 51 | if d.getVar(variable1,1).find(checkvalue) != -1 and d.getVar(variable2,1).find(checkvalue) != -1: |
52 | return checkvalue | 52 | return checkvalue |
53 | else: | 53 | else: |
54 | return "" | 54 | return "" |
diff --git a/meta/recipes-bsp/grub/grub_0.97.bb b/meta/recipes-bsp/grub/grub_0.97.bb index 0f297ff3ff..6ec66e3b0f 100644 --- a/meta/recipes-bsp/grub/grub_0.97.bb +++ b/meta/recipes-bsp/grub/grub_0.97.bb | |||
@@ -23,7 +23,7 @@ inherit autotools | |||
23 | 23 | ||
24 | python __anonymous () { | 24 | python __anonymous () { |
25 | import re | 25 | import re |
26 | host = bb.data.getVar('HOST_SYS', d, 1) | 26 | host = d.getVar('HOST_SYS', 1) |
27 | if not re.match('i.86.*-linux', host): | 27 | if not re.match('i.86.*-linux', host): |
28 | raise bb.parse.SkipPackage("incompatible with host %s" % host) | 28 | raise bb.parse.SkipPackage("incompatible with host %s" % host) |
29 | } | 29 | } |
diff --git a/meta/recipes-bsp/grub/grub_1.99.bb b/meta/recipes-bsp/grub/grub_1.99.bb index afd10f473f..b6aa827a5b 100644 --- a/meta/recipes-bsp/grub/grub_1.99.bb +++ b/meta/recipes-bsp/grub/grub_1.99.bb | |||
@@ -33,7 +33,7 @@ do_configure() { | |||
33 | 33 | ||
34 | python __anonymous () { | 34 | python __anonymous () { |
35 | import re | 35 | import re |
36 | host = bb.data.getVar('HOST_SYS', d, 1) | 36 | host = d.getVar('HOST_SYS', 1) |
37 | if not re.match('x86.64.*-linux', host) and not re.match('i.86.*-linux', host): | 37 | if not re.match('x86.64.*-linux', host) and not re.match('i.86.*-linux', host): |
38 | raise bb.parse.SkipPackage("incompatible with host %s" % host) | 38 | raise bb.parse.SkipPackage("incompatible with host %s" % host) |
39 | } | 39 | } |
diff --git a/meta/recipes-bsp/uboot/u-boot_2011.03.bb b/meta/recipes-bsp/uboot/u-boot_2011.03.bb index 34655ead59..1ebdbeabca 100644 --- a/meta/recipes-bsp/uboot/u-boot_2011.03.bb +++ b/meta/recipes-bsp/uboot/u-boot_2011.03.bb | |||
@@ -10,7 +10,7 @@ LICENSE = "GPLv2+" | |||
10 | LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb \ | 10 | LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb \ |
11 | file://README;beginline=1;endline=22;md5=3a00ef51d3fc96e9d6c1bc4708ccd3b5" | 11 | file://README;beginline=1;endline=22;md5=3a00ef51d3fc96e9d6c1bc4708ccd3b5" |
12 | 12 | ||
13 | FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/u-boot-git/${MACHINE}" | 13 | FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/u-boot-git/${MACHINE}" |
14 | 14 | ||
15 | # This revision corresponds to the tag "v2011.03" | 15 | # This revision corresponds to the tag "v2011.03" |
16 | # We use the revision in order to avoid having to fetch it from the repo during parse | 16 | # We use the revision in order to avoid having to fetch it from the repo during parse |
diff --git a/meta/recipes-bsp/uboot/u-boot_2011.06.bb b/meta/recipes-bsp/uboot/u-boot_2011.06.bb index acd0944a8a..8ebdbff5ea 100644 --- a/meta/recipes-bsp/uboot/u-boot_2011.06.bb +++ b/meta/recipes-bsp/uboot/u-boot_2011.06.bb | |||
@@ -10,7 +10,7 @@ LICENSE = "GPLv2+" | |||
10 | LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb \ | 10 | LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb \ |
11 | file://README;beginline=1;endline=22;md5=5ba4218ac89af7846802d0348df3fb90" | 11 | file://README;beginline=1;endline=22;md5=5ba4218ac89af7846802d0348df3fb90" |
12 | 12 | ||
13 | FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/u-boot-git/${MACHINE}" | 13 | FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/u-boot-git/${MACHINE}" |
14 | 14 | ||
15 | # This revision corresponds to the tag "v2011.06" | 15 | # This revision corresponds to the tag "v2011.06" |
16 | # We use the revision in order to avoid having to fetch it from the repo during parse | 16 | # We use the revision in order to avoid having to fetch it from the repo during parse |
diff --git a/meta/recipes-bsp/x-load/x-load_git.bb b/meta/recipes-bsp/x-load/x-load_git.bb index 8114fd2389..87fc16f0a5 100644 --- a/meta/recipes-bsp/x-load/x-load_git.bb +++ b/meta/recipes-bsp/x-load/x-load_git.bb | |||
@@ -1,6 +1,6 @@ | |||
1 | require x-load.inc | 1 | require x-load.inc |
2 | 2 | ||
3 | FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/x-load-git/${MACHINE}" | 3 | FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/x-load-git/${MACHINE}" |
4 | 4 | ||
5 | LICENSE = "GPLv2+" | 5 | LICENSE = "GPLv2+" |
6 | LIC_FILES_CHKSUM = "file://README;beginline=1;endline=25;md5=ef08d08cb99057bbb5b9d6d0c5a4396f" | 6 | LIC_FILES_CHKSUM = "file://README;beginline=1;endline=25;md5=ef08d08cb99057bbb5b9d6d0c5a4396f" |
diff --git a/meta/recipes-connectivity/connman/connman.inc b/meta/recipes-connectivity/connman/connman.inc index e86bd9fa38..7ee460ec8b 100644 --- a/meta/recipes-connectivity/connman/connman.inc +++ b/meta/recipes-connectivity/connman/connman.inc | |||
@@ -64,5 +64,5 @@ python populate_packages_prepend() { | |||
64 | plugintype = package.split( '-' )[-1] | 64 | plugintype = package.split( '-' )[-1] |
65 | if plugintype in depmap: | 65 | if plugintype in depmap: |
66 | bb.note( "Adding rdependency on %s to package %s" % ( depmap[plugintype], package ) ) | 66 | bb.note( "Adding rdependency on %s to package %s" % ( depmap[plugintype], package ) ) |
67 | bb.data.setVar("RDEPENDS_%s" % package, depmap[plugintype], d) | 67 | d.setVar("RDEPENDS_%s" % package, depmap[plugintype]) |
68 | } | 68 | } |
diff --git a/meta/recipes-core/busybox/busybox.inc b/meta/recipes-core/busybox/busybox.inc index f8fee51725..0b74ea6780 100644 --- a/meta/recipes-core/busybox/busybox.inc +++ b/meta/recipes-core/busybox/busybox.inc | |||
@@ -49,8 +49,8 @@ def busybox_cfg(feature, features, tokens, cnf, rem): | |||
49 | # Map distro and machine features to config settings | 49 | # Map distro and machine features to config settings |
50 | def features_to_busybox_settings(d): | 50 | def features_to_busybox_settings(d): |
51 | cnf, rem = ([], []) | 51 | cnf, rem = ([], []) |
52 | distro_features = bb.data.getVar('DISTRO_FEATURES', d, True).split() | 52 | distro_features = d.getVar('DISTRO_FEATURES', True).split() |
53 | machine_features = bb.data.getVar('MACHINE_FEATURES', d, True).split() | 53 | machine_features = d.getVar('MACHINE_FEATURES', True).split() |
54 | busybox_cfg('ipv6', distro_features, 'CONFIG_FEATURE_IPV6', cnf, rem) | 54 | busybox_cfg('ipv6', distro_features, 'CONFIG_FEATURE_IPV6', cnf, rem) |
55 | busybox_cfg('largefile', distro_features, 'CONFIG_LFS', cnf, rem) | 55 | busybox_cfg('largefile', distro_features, 'CONFIG_LFS', cnf, rem) |
56 | busybox_cfg('largefile', distro_features, 'CONFIG_FDISK_SUPPORT_LARGE_DISKS', cnf, rem) | 56 | busybox_cfg('largefile', distro_features, 'CONFIG_FDISK_SUPPORT_LARGE_DISKS', cnf, rem) |
@@ -79,7 +79,7 @@ DO_IPv6 := ${@base_contains('DISTRO_FEATURES', 'ipv6', 1, 0, d)} | |||
79 | 79 | ||
80 | python () { | 80 | python () { |
81 | if "${OE_DEL}": | 81 | if "${OE_DEL}": |
82 | bb.data.setVar('configmangle_append', "${OE_DEL}" + "\n", d) | 82 | d.setVar('configmangle_append', "${OE_DEL}" + "\n") |
83 | if "${OE_FEATURES}": | 83 | if "${OE_FEATURES}": |
84 | bb.data.setVar('configmangle_append', | 84 | bb.data.setVar('configmangle_append', |
85 | "/^### DISTRO FEATURES$/a\\\n%s\n\n" % | 85 | "/^### DISTRO FEATURES$/a\\\n%s\n\n" % |
diff --git a/meta/recipes-core/eglibc/eglibc-options.inc b/meta/recipes-core/eglibc/eglibc-options.inc index 112029dab8..baf4f4b06c 100644 --- a/meta/recipes-core/eglibc/eglibc-options.inc +++ b/meta/recipes-core/eglibc/eglibc-options.inc | |||
@@ -83,7 +83,7 @@ def distro_features_check_deps(distro_features): | |||
83 | # Map distro features to eglibc options settings | 83 | # Map distro features to eglibc options settings |
84 | def features_to_eglibc_settings(d): | 84 | def features_to_eglibc_settings(d): |
85 | cnf = ([]) | 85 | cnf = ([]) |
86 | distro_features = (bb.data.getVar('DISTRO_FEATURES', d, True) or '').split() | 86 | distro_features = (d.getVar('DISTRO_FEATURES', True) or '').split() |
87 | 87 | ||
88 | distro_features_check_deps(distro_features) | 88 | distro_features_check_deps(distro_features) |
89 | 89 | ||
@@ -128,8 +128,8 @@ def features_to_eglibc_settings(d): | |||
128 | 128 | ||
129 | # try to fix disable charsets/locales/locale-code compile fail | 129 | # try to fix disable charsets/locales/locale-code compile fail |
130 | if 'libc-charsets' in distro_features and 'libc-locales' in distro_features and 'libc-locale-code' in distro_features: | 130 | if 'libc-charsets' in distro_features and 'libc-locales' in distro_features and 'libc-locale-code' in distro_features: |
131 | bb.data.setVar('PACKAGE_NO_GCONV', '0', d) | 131 | d.setVar('PACKAGE_NO_GCONV', '0') |
132 | else: | 132 | else: |
133 | bb.data.setVar('PACKAGE_NO_GCONV', '1', d) | 133 | d.setVar('PACKAGE_NO_GCONV', '1') |
134 | 134 | ||
135 | return "\n".join(cnf) | 135 | return "\n".join(cnf) |
diff --git a/meta/recipes-core/eglibc/eglibc-package.inc b/meta/recipes-core/eglibc/eglibc-package.inc index 519a49c75f..020f55876b 100644 --- a/meta/recipes-core/eglibc/eglibc-package.inc +++ b/meta/recipes-core/eglibc/eglibc-package.inc | |||
@@ -8,10 +8,10 @@ | |||
8 | 8 | ||
9 | python __anonymous () { | 9 | python __anonymous () { |
10 | import bb, re | 10 | import bb, re |
11 | uc_os = (re.match('.*uclibc*', bb.data.getVar('TARGET_OS', d, 1)) != None) | 11 | uc_os = (re.match('.*uclibc*', d.getVar('TARGET_OS', 1)) != None) |
12 | if uc_os: | 12 | if uc_os: |
13 | raise bb.parse.SkipPackage("incompatible with target %s" % | 13 | raise bb.parse.SkipPackage("incompatible with target %s" % |
14 | bb.data.getVar('TARGET_OS', d, 1)) | 14 | d.getVar('TARGET_OS', 1)) |
15 | } | 15 | } |
16 | 16 | ||
17 | # Set this to zero if you don't want ldconfig in the output package | 17 | # Set this to zero if you don't want ldconfig in the output package |
diff --git a/meta/recipes-core/eglibc/eglibc_2.13.bb b/meta/recipes-core/eglibc/eglibc_2.13.bb index fc8ac64a1c..f076ae7c7f 100644 --- a/meta/recipes-core/eglibc/eglibc_2.13.bb +++ b/meta/recipes-core/eglibc/eglibc_2.13.bb | |||
@@ -53,10 +53,10 @@ FILESPATH = "${@base_set_filespath([ '${FILE_DIRNAME}/eglibc-${PV}', '${FILE_DIR | |||
53 | 53 | ||
54 | python __anonymous () { | 54 | python __anonymous () { |
55 | import bb, re | 55 | import bb, re |
56 | uc_os = (re.match('.*uclibc$', bb.data.getVar('TARGET_OS', d, 1)) != None) | 56 | uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', 1)) != None) |
57 | if uc_os: | 57 | if uc_os: |
58 | raise bb.parse.SkipPackage("incompatible with target %s" % | 58 | raise bb.parse.SkipPackage("incompatible with target %s" % |
59 | bb.data.getVar('TARGET_OS', d, 1)) | 59 | d.getVar('TARGET_OS', 1)) |
60 | } | 60 | } |
61 | 61 | ||
62 | export libc_cv_slibdir = "${base_libdir}" | 62 | export libc_cv_slibdir = "${base_libdir}" |
diff --git a/meta/recipes-core/eglibc/eglibc_2.14.bb b/meta/recipes-core/eglibc/eglibc_2.14.bb index 571d39d26e..501987525e 100644 --- a/meta/recipes-core/eglibc/eglibc_2.14.bb +++ b/meta/recipes-core/eglibc/eglibc_2.14.bb | |||
@@ -54,10 +54,10 @@ FILESPATH = "${@base_set_filespath([ '${FILE_DIRNAME}/eglibc-${PV}', '${FILE_DIR | |||
54 | 54 | ||
55 | python __anonymous () { | 55 | python __anonymous () { |
56 | import bb, re | 56 | import bb, re |
57 | uc_os = (re.match('.*uclibc$', bb.data.getVar('TARGET_OS', d, 1)) != None) | 57 | uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', 1)) != None) |
58 | if uc_os: | 58 | if uc_os: |
59 | raise bb.parse.SkipPackage("incompatible with target %s" % | 59 | raise bb.parse.SkipPackage("incompatible with target %s" % |
60 | bb.data.getVar('TARGET_OS', d, 1)) | 60 | d.getVar('TARGET_OS', 1)) |
61 | } | 61 | } |
62 | 62 | ||
63 | export libc_cv_slibdir = "${base_libdir}" | 63 | export libc_cv_slibdir = "${base_libdir}" |
diff --git a/meta/recipes-core/glib-2.0/glib-2.0_2.30.0.bb b/meta/recipes-core/glib-2.0/glib-2.0_2.30.0.bb index 0efce406e0..634a4e4f4a 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0_2.30.0.bb +++ b/meta/recipes-core/glib-2.0/glib-2.0_2.30.0.bb | |||
@@ -7,7 +7,7 @@ DEPENDS += "libffi python-argparse-native" | |||
7 | DEPENDS_virtclass-native += "libffi-native python-argparse-native" | 7 | DEPENDS_virtclass-native += "libffi-native python-argparse-native" |
8 | DEPENDS_virtclass-nativesdk += "libffi-nativesdk python-argparse-native zlib-nativesdk" | 8 | DEPENDS_virtclass-nativesdk += "libffi-nativesdk python-argparse-native zlib-nativesdk" |
9 | 9 | ||
10 | SHRT_VER = "${@bb.data.getVar('PV',d,1).split('.')[0]}.${@bb.data.getVar('PV',d,1).split('.')[1]}" | 10 | SHRT_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}" |
11 | 11 | ||
12 | QSORT_PATCH = "file://remove.test.for.qsort_r.patch" | 12 | QSORT_PATCH = "file://remove.test.for.qsort_r.patch" |
13 | QSORT_PATCH_virtclass-native = "" | 13 | QSORT_PATCH_virtclass-native = "" |
diff --git a/meta/recipes-core/libxml/libxml2.inc b/meta/recipes-core/libxml/libxml2.inc index 6f79333b91..1f7a4e6ec8 100644 --- a/meta/recipes-core/libxml/libxml2.inc +++ b/meta/recipes-core/libxml/libxml2.inc | |||
@@ -33,8 +33,8 @@ export LDFLAGS += "-ldl" | |||
33 | 33 | ||
34 | python populate_packages_prepend () { | 34 | python populate_packages_prepend () { |
35 | # autonamer would call this libxml2-2, but we don't want that | 35 | # autonamer would call this libxml2-2, but we don't want that |
36 | if bb.data.getVar('DEBIAN_NAMES', d, 1): | 36 | if d.getVar('DEBIAN_NAMES', 1): |
37 | bb.data.setVar('PKG_libxml2', '${MLPREFIX}libxml2', d) | 37 | d.setVar('PKG_libxml2', '${MLPREFIX}libxml2') |
38 | } | 38 | } |
39 | 39 | ||
40 | PACKAGES += "${PN}-utils" | 40 | PACKAGES += "${PN}-utils" |
diff --git a/meta/recipes-core/tasks/task-base.bb b/meta/recipes-core/tasks/task-base.bb index 99b7e17a69..e0960b5e0e 100644 --- a/meta/recipes-core/tasks/task-base.bb +++ b/meta/recipes-core/tasks/task-base.bb | |||
@@ -126,17 +126,17 @@ python __anonymous () { | |||
126 | 126 | ||
127 | import bb | 127 | import bb |
128 | 128 | ||
129 | distro_features = set(bb.data.getVar("DISTRO_FEATURES", d, 1).split()) | 129 | distro_features = set(d.getVar("DISTRO_FEATURES", 1).split()) |
130 | machine_features= set(bb.data.getVar("MACHINE_FEATURES", d, 1).split()) | 130 | machine_features= set(d.getVar("MACHINE_FEATURES", 1).split()) |
131 | 131 | ||
132 | if "bluetooth" in distro_features and not "bluetooth" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): | 132 | if "bluetooth" in distro_features and not "bluetooth" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): |
133 | bb.data.setVar("ADD_BT", "task-base-bluetooth", d) | 133 | d.setVar("ADD_BT", "task-base-bluetooth") |
134 | 134 | ||
135 | if "wifi" in distro_features and not "wifi" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): | 135 | if "wifi" in distro_features and not "wifi" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): |
136 | bb.data.setVar("ADD_WIFI", "task-base-wifi", d) | 136 | d.setVar("ADD_WIFI", "task-base-wifi") |
137 | 137 | ||
138 | if "3g" in distro_features and not "3g" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): | 138 | if "3g" in distro_features and not "3g" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): |
139 | bb.data.setVar("ADD_3G", "task-base-3g", d) | 139 | d.setVar("ADD_3G", "task-base-3g") |
140 | } | 140 | } |
141 | 141 | ||
142 | # | 142 | # |
diff --git a/meta/recipes-core/tasks/task-core-sdk.bb b/meta/recipes-core/tasks/task-core-sdk.bb index 5743631787..a74de01b07 100644 --- a/meta/recipes-core/tasks/task-core-sdk.bb +++ b/meta/recipes-core/tasks/task-core-sdk.bb | |||
@@ -50,7 +50,7 @@ RDEPENDS_task-core-sdk = "\ | |||
50 | 50 | ||
51 | #python generate_sdk_pkgs () { | 51 | #python generate_sdk_pkgs () { |
52 | # poky_pkgs = read_pkgdata('task-core', d)['PACKAGES'] | 52 | # poky_pkgs = read_pkgdata('task-core', d)['PACKAGES'] |
53 | # pkgs = bb.data.getVar('PACKAGES', d, 1).split() | 53 | # pkgs = d.getVar('PACKAGES', 1).split() |
54 | # for pkg in poky_pkgs.split(): | 54 | # for pkg in poky_pkgs.split(): |
55 | # newpkg = pkg.replace('task-core', 'task-core-sdk') | 55 | # newpkg = pkg.replace('task-core', 'task-core-sdk') |
56 | # | 56 | # |
@@ -79,9 +79,9 @@ RDEPENDS_task-core-sdk = "\ | |||
79 | # if packaged('%s-dev' % name, d): | 79 | # if packaged('%s-dev' % name, d): |
80 | # rreclist.append('%s-dev' % name) | 80 | # rreclist.append('%s-dev' % name) |
81 | # | 81 | # |
82 | # oldrrec = bb.data.getVar('RRECOMMENDS_%s' % newpkg, d) or '' | 82 | # oldrrec = d.getVar('RRECOMMENDS_%s' % newpkg) or '' |
83 | # bb.data.setVar('RRECOMMENDS_%s' % newpkg, oldrrec + ' ' + ' '.join(rreclist), d) | 83 | # bb.data.setVar('RRECOMMENDS_%s' % newpkg, oldrrec + ' ' + ' '.join(rreclist), d) |
84 | # # bb.note('RRECOMMENDS_%s = "%s"' % (newpkg, bb.data.getVar('RRECOMMENDS_%s' % newpkg, d))) | 84 | # # bb.note('RRECOMMENDS_%s = "%s"' % (newpkg, d.getVar('RRECOMMENDS_%s' % newpkg))) |
85 | # | 85 | # |
86 | # # bb.note('pkgs is %s' % pkgs) | 86 | # # bb.note('pkgs is %s' % pkgs) |
87 | # bb.data.setVar('PACKAGES', ' '.join(pkgs), d) | 87 | # bb.data.setVar('PACKAGES', ' '.join(pkgs), d) |
diff --git a/meta/recipes-core/uclibc/uclibc-config.inc b/meta/recipes-core/uclibc/uclibc-config.inc index 697164c008..a30188d209 100644 --- a/meta/recipes-core/uclibc/uclibc-config.inc +++ b/meta/recipes-core/uclibc/uclibc-config.inc | |||
@@ -35,7 +35,7 @@ def map_uclibc_arch(a, d): | |||
35 | """Return the uClibc architecture for the given TARGET_ARCH.""" | 35 | """Return the uClibc architecture for the given TARGET_ARCH.""" |
36 | import re | 36 | import re |
37 | 37 | ||
38 | valid_archs = bb.data.getVar('valid_archs', d, 1).split() | 38 | valid_archs = d.getVar('valid_archs', 1).split() |
39 | 39 | ||
40 | if re.match('^(arm|sa110).*', a): return 'arm' | 40 | if re.match('^(arm|sa110).*', a): return 'arm' |
41 | elif re.match('^(i.86|athlon)$', a): return 'i386' | 41 | elif re.match('^(i.86|athlon)$', a): return 'i386' |
@@ -50,14 +50,14 @@ def map_uclibc_arch(a, d): | |||
50 | else: | 50 | else: |
51 | bb.error("cannot map '%s' to a uClibc architecture" % a) | 51 | bb.error("cannot map '%s' to a uClibc architecture" % a) |
52 | 52 | ||
53 | export UCLIBC_ARCH = "${@map_uclibc_arch(bb.data.getVar('TARGET_ARCH', d, 1), d)}" | 53 | export UCLIBC_ARCH = "${@map_uclibc_arch(d.getVar('TARGET_ARCH', 1), d)}" |
54 | 54 | ||
55 | def map_uclibc_abi(o, d): | 55 | def map_uclibc_abi(o, d): |
56 | """Return the uClibc ABI for the given TARGET_OS.""" | 56 | """Return the uClibc ABI for the given TARGET_OS.""" |
57 | import re | 57 | import re |
58 | 58 | ||
59 | arch = bb.data.getVar('TARGET_ARCH', d, 1) | 59 | arch = d.getVar('TARGET_ARCH', 1) |
60 | if map_uclibc_arch(bb.data.getVar('TARGET_ARCH', d, 1), d) == "arm": | 60 | if map_uclibc_arch(d.getVar('TARGET_ARCH', 1), d) == "arm": |
61 | if re.match('.*eabi$', o): return 'ARM_EABI' | 61 | if re.match('.*eabi$', o): return 'ARM_EABI' |
62 | else: return 'ARM_OABI' | 62 | else: return 'ARM_OABI' |
63 | # FIXME: This is inaccurate! Handle o32, n32, n64 | 63 | # FIXME: This is inaccurate! Handle o32, n32, n64 |
@@ -65,7 +65,7 @@ def map_uclibc_abi(o, d): | |||
65 | elif re.match('^mips.*', arch): return 'MIPS_O32_ABI' | 65 | elif re.match('^mips.*', arch): return 'MIPS_O32_ABI' |
66 | return "" | 66 | return "" |
67 | 67 | ||
68 | export UCLIBC_ABI = "${@map_uclibc_abi(bb.data.getVar('TARGET_OS', d, 1), d)}" | 68 | export UCLIBC_ABI = "${@map_uclibc_abi(d.getVar('TARGET_OS', 1), d)}" |
69 | 69 | ||
70 | def map_uclibc_endian(a, d): | 70 | def map_uclibc_endian(a, d): |
71 | """Return the uClibc endianess for the given TARGET_ARCH.""" | 71 | """Return the uClibc endianess for the given TARGET_ARCH.""" |
@@ -79,7 +79,7 @@ def map_uclibc_endian(a, d): | |||
79 | return 'BIG' | 79 | return 'BIG' |
80 | return 'LITTLE' | 80 | return 'LITTLE' |
81 | 81 | ||
82 | export UCLIBC_ENDIAN = "${@map_uclibc_endian(bb.data.getVar('TARGET_ARCH', d, 1), d)}" | 82 | export UCLIBC_ENDIAN = "${@map_uclibc_endian(d.getVar('TARGET_ARCH', 1), d)}" |
83 | 83 | ||
84 | # internal helper | 84 | # internal helper |
85 | def uclibc_cfg(feature, features, tokens, cnf, rem): | 85 | def uclibc_cfg(feature, features, tokens, cnf, rem): |
@@ -94,8 +94,8 @@ def uclibc_cfg(feature, features, tokens, cnf, rem): | |||
94 | # Map distro and machine features to config settings | 94 | # Map distro and machine features to config settings |
95 | def features_to_uclibc_settings(d): | 95 | def features_to_uclibc_settings(d): |
96 | cnf, rem = ([], []) | 96 | cnf, rem = ([], []) |
97 | distro_features = bb.data.getVar('DISTRO_FEATURES', d, True).split() | 97 | distro_features = d.getVar('DISTRO_FEATURES', True).split() |
98 | machine_features = bb.data.getVar('MACHINE_FEATURES', d, True).split() | 98 | machine_features = d.getVar('MACHINE_FEATURES', True).split() |
99 | uclibc_cfg('ipv4', distro_features, 'UCLIBC_HAS_IPV4', cnf, rem) | 99 | uclibc_cfg('ipv4', distro_features, 'UCLIBC_HAS_IPV4', cnf, rem) |
100 | uclibc_cfg('ipv6', distro_features, 'UCLIBC_HAS_IPV6', cnf, rem) | 100 | uclibc_cfg('ipv6', distro_features, 'UCLIBC_HAS_IPV6', cnf, rem) |
101 | uclibc_cfg('largefile', distro_features, 'UCLIBC_HAS_LFS', cnf, rem) | 101 | uclibc_cfg('largefile', distro_features, 'UCLIBC_HAS_LFS', cnf, rem) |
diff --git a/meta/recipes-core/uclibc/uclibc.inc b/meta/recipes-core/uclibc/uclibc.inc index 222c34f383..8438f25450 100644 --- a/meta/recipes-core/uclibc/uclibc.inc +++ b/meta/recipes-core/uclibc/uclibc.inc | |||
@@ -125,9 +125,9 @@ configmangle = '/^KERNEL_HEADERS/d; \ | |||
125 | /^SHARED_LIB_LOADER_PREFIX/d; \ | 125 | /^SHARED_LIB_LOADER_PREFIX/d; \ |
126 | /^UCLIBC_EXTRA_CFLAGS/d; \ | 126 | /^UCLIBC_EXTRA_CFLAGS/d; \ |
127 | s,.*UCLIBC_HAS_WCHAR.*,UCLIBC_HAS_WCHAR=y,g; \ | 127 | s,.*UCLIBC_HAS_WCHAR.*,UCLIBC_HAS_WCHAR=y,g; \ |
128 | ${@["","s,.*COMPILE_IN_THUMB_MODE.*,COMPILE_IN_THUMB_MODE=y,;"][bb.data.getVar("ARM_INSTRUCTION_SET", d, 1) != "arm"]} \ | 128 | ${@["","s,.*COMPILE_IN_THUMB_MODE.*,COMPILE_IN_THUMB_MODE=y,;"][d.getVar("ARM_INSTRUCTION_SET", 1) != "arm"]} \ |
129 | ${@["","s,.*UCLIBC_HAS_LOCALE.*,UCLIBC_HAS_LOCALE=y,;"][bb.data.getVar("USE_NLS", d, 1) == "yes"]} \ | 129 | ${@["","s,.*UCLIBC_HAS_LOCALE.*,UCLIBC_HAS_LOCALE=y,;"][d.getVar("USE_NLS", 1) == "yes"]} \ |
130 | ${@["","s,.*LDSO_GNU_HASH_SUPPORT.*,# LDSO_GNU_HASH_SUPPORT is not set,;"][bb.data.getVar("TARGET_ARCH", d, 1) in ['mips', 'mipsel', 'mips64', 'mips64el', 'avr32']]} \ | 130 | ${@["","s,.*LDSO_GNU_HASH_SUPPORT.*,# LDSO_GNU_HASH_SUPPORT is not set,;"][d.getVar("TARGET_ARCH", 1) in ['mips', 'mipsel', 'mips64', 'mips64el', 'avr32']]} \ |
131 | /^CROSS/d; \ | 131 | /^CROSS/d; \ |
132 | /^TARGET_ARCH=/d; \ | 132 | /^TARGET_ARCH=/d; \ |
133 | /^TARGET_/s,^\([^=]*\).*,# \1 is not set,g; \ | 133 | /^TARGET_/s,^\([^=]*\).*,# \1 is not set,g; \ |
@@ -139,7 +139,7 @@ OE_FEATURES := "${@features_to_uclibc_conf(d)}" | |||
139 | OE_DEL := "${@features_to_uclibc_del(d)}" | 139 | OE_DEL := "${@features_to_uclibc_del(d)}" |
140 | python () { | 140 | python () { |
141 | if "${OE_DEL}": | 141 | if "${OE_DEL}": |
142 | bb.data.setVar('configmangle_append', "${OE_DEL}" + "\n", d) | 142 | d.setVar('configmangle_append', "${OE_DEL}" + "\n") |
143 | if "${OE_FEATURES}": | 143 | if "${OE_FEATURES}": |
144 | bb.data.setVar('configmangle_append', | 144 | bb.data.setVar('configmangle_append', |
145 | "/^### DISTRO FEATURES$/a\\\n%s\n\n" % | 145 | "/^### DISTRO FEATURES$/a\\\n%s\n\n" % |
@@ -161,7 +161,7 @@ python () { | |||
161 | ("${UCLIBC_ARCH}", "${UCLIBC_ARCH}"), | 161 | ("${UCLIBC_ARCH}", "${UCLIBC_ARCH}"), |
162 | d) | 162 | d) |
163 | bb.data.setVar('configmangle_append', | 163 | bb.data.setVar('configmangle_append', |
164 | "/^### FPU$/a\\\n%s\n\n" % (["UCLIBC_HAS_FPU=y","# UCLIBC_HAS_FPU is not set"][bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]]), d) | 164 | "/^### FPU$/a\\\n%s\n\n" % (["UCLIBC_HAS_FPU=y","# UCLIBC_HAS_FPU is not set"][d.getVar('TARGET_FPU', 1) in [ 'soft' ]]), d) |
165 | if "${UCLIBC_ENDIAN}": | 165 | if "${UCLIBC_ENDIAN}": |
166 | bb.data.setVar('configmangle_append', | 166 | bb.data.setVar('configmangle_append', |
167 | "/^### ABI$/a\\\nARCH_WANTS_%s_ENDIAN=y\n\n" % ("${UCLIBC_ENDIAN}"), | 167 | "/^### ABI$/a\\\nARCH_WANTS_%s_ENDIAN=y\n\n" % ("${UCLIBC_ENDIAN}"), |
diff --git a/meta/recipes-devtools/apt/apt-native.inc b/meta/recipes-devtools/apt/apt-native.inc index b16f99e93c..d4b207b986 100644 --- a/meta/recipes-devtools/apt/apt-native.inc +++ b/meta/recipes-devtools/apt/apt-native.inc | |||
@@ -13,14 +13,14 @@ python do_install () { | |||
13 | } | 13 | } |
14 | 14 | ||
15 | python do_install_config () { | 15 | python do_install_config () { |
16 | indir = os.path.dirname(bb.data.getVar('FILE',d,1)) | 16 | indir = os.path.dirname(d.getVar('FILE',1)) |
17 | infile = file(os.path.join(indir, 'files', 'apt.conf'), 'r') | 17 | infile = file(os.path.join(indir, 'files', 'apt.conf'), 'r') |
18 | data = infile.read() | 18 | data = infile.read() |
19 | infile.close() | 19 | infile.close() |
20 | 20 | ||
21 | data = bb.data.expand(data, d) | 21 | data = bb.data.expand(data, d) |
22 | 22 | ||
23 | outdir = os.path.join(bb.data.getVar('D', d, 1), bb.data.getVar('sysconfdir', d, 1), 'apt') | 23 | outdir = os.path.join(d.getVar('D', 1), d.getVar('sysconfdir', 1), 'apt') |
24 | if not os.path.exists(outdir): | 24 | if not os.path.exists(outdir): |
25 | os.makedirs(outdir) | 25 | os.makedirs(outdir) |
26 | outpath = os.path.join(outdir, 'apt.conf.sample') | 26 | outpath = os.path.join(outdir, 'apt.conf.sample') |
diff --git a/meta/recipes-devtools/apt/apt-package.inc b/meta/recipes-devtools/apt/apt-package.inc index 2e3be3885b..dde916e3da 100644 --- a/meta/recipes-devtools/apt/apt-package.inc +++ b/meta/recipes-devtools/apt/apt-package.inc | |||
@@ -59,15 +59,15 @@ FILES_${PN} = "${bindir}/apt-cdrom ${bindir}/apt-get \ | |||
59 | ${localstatedir} ${sysconfdir} \ | 59 | ${localstatedir} ${sysconfdir} \ |
60 | ${libdir}/dpkg" | 60 | ${libdir}/dpkg" |
61 | FILES_${PN}-utils = "${bindir}/apt-sortpkgs ${bindir}/apt-extracttemplates" | 61 | FILES_${PN}-utils = "${bindir}/apt-sortpkgs ${bindir}/apt-extracttemplates" |
62 | FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, bb.data.getVar('apt-manpages', d, 1))} \ | 62 | FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-manpages', 1))} \ |
63 | ${docdir}/apt" | 63 | ${docdir}/apt" |
64 | FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, bb.data.getVar('apt-utils-manpages', d, 1))}" | 64 | FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-utils-manpages', 1))}" |
65 | FILES_${PN}-dev = "${libdir}/libapt*.so ${includedir}" | 65 | FILES_${PN}-dev = "${libdir}/libapt*.so ${includedir}" |
66 | 66 | ||
67 | do_install () { | 67 | do_install () { |
68 | set -x | 68 | set -x |
69 | ${@get_commands_apt_doc(d, bb, bb.data.getVar('apt-manpages', d, 1))} | 69 | ${@get_commands_apt_doc(d, bb, d.getVar('apt-manpages', 1))} |
70 | ${@get_commands_apt_doc(d, bb, bb.data.getVar('apt-utils-manpages', d, 1))} | 70 | ${@get_commands_apt_doc(d, bb, d.getVar('apt-utils-manpages', 1))} |
71 | install -d ${D}${bindir} | 71 | install -d ${D}${bindir} |
72 | install -m 0755 bin/apt-cdrom ${D}${bindir}/ | 72 | install -m 0755 bin/apt-cdrom ${D}${bindir}/ |
73 | install -m 0755 bin/apt-get ${D}${bindir}/ | 73 | install -m 0755 bin/apt-get ${D}${bindir}/ |
diff --git a/meta/recipes-devtools/automake/automake.inc b/meta/recipes-devtools/automake/automake.inc index f217e1432b..370a972566 100644 --- a/meta/recipes-devtools/automake/automake.inc +++ b/meta/recipes-devtools/automake/automake.inc | |||
@@ -10,6 +10,6 @@ SRC_URI = "${GNU_MIRROR}/automake/automake-${PV}.tar.bz2 " | |||
10 | 10 | ||
11 | inherit autotools | 11 | inherit autotools |
12 | 12 | ||
13 | export AUTOMAKE = "${@bb.which('automake', bb.data.getVar('PATH', d, 1))}" | 13 | export AUTOMAKE = "${@bb.which('automake', d.getVar('PATH', 1))}" |
14 | 14 | ||
15 | FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*" | 15 | FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*" |
diff --git a/meta/recipes-devtools/cmake/cmake.inc b/meta/recipes-devtools/cmake/cmake.inc index ec37a101a3..8433c533b7 100644 --- a/meta/recipes-devtools/cmake/cmake.inc +++ b/meta/recipes-devtools/cmake/cmake.inc | |||
@@ -11,7 +11,7 @@ LIC_FILES_CHKSUM = "file://Copyright.txt;md5=f372516292ff7c33337bf16a74a5f9a8 \ | |||
11 | 11 | ||
12 | INC_PR = "r1" | 12 | INC_PR = "r1" |
13 | 13 | ||
14 | CMAKE_MAJOR_VERSION = "${@'.'.join(bb.data.getVar('PV',d,1).split('.')[0:2])}" | 14 | CMAKE_MAJOR_VERSION = "${@'.'.join(d.getVar('PV',1).split('.')[0:2])}" |
15 | 15 | ||
16 | SRC_URI = "http://www.cmake.org/files/v${CMAKE_MAJOR_VERSION}/cmake-${PV}.tar.gz \ | 16 | SRC_URI = "http://www.cmake.org/files/v${CMAKE_MAJOR_VERSION}/cmake-${PV}.tar.gz \ |
17 | file://support-oe-qt4-tools-names.patch" | 17 | file://support-oe-qt4-tools-names.patch" |
diff --git a/meta/recipes-devtools/cmake/cmake_2.8.5.bb b/meta/recipes-devtools/cmake/cmake_2.8.5.bb index 8e040728c9..3e2a218385 100644 --- a/meta/recipes-devtools/cmake/cmake_2.8.5.bb +++ b/meta/recipes-devtools/cmake/cmake_2.8.5.bb | |||
@@ -13,8 +13,8 @@ SRC_URI[sha256sum] = "5e18bff75f01656c64f553412a8905527e1b85efaf3163c6fb81ea5aac | |||
13 | 13 | ||
14 | # Strip ${prefix} from ${docdir}, set result into docdir_stripped | 14 | # Strip ${prefix} from ${docdir}, set result into docdir_stripped |
15 | python () { | 15 | python () { |
16 | prefix=bb.data.getVar("prefix", d, 1) | 16 | prefix=d.getVar("prefix", 1) |
17 | docdir=bb.data.getVar("docdir", d, 1) | 17 | docdir=d.getVar("docdir", 1) |
18 | 18 | ||
19 | if not docdir.startswith(prefix): | 19 | if not docdir.startswith(prefix): |
20 | raise bb.build.FuncFailed('docdir must contain prefix as its prefix') | 20 | raise bb.build.FuncFailed('docdir must contain prefix as its prefix') |
@@ -23,7 +23,7 @@ python () { | |||
23 | if len(docdir_stripped) > 0 and docdir_stripped[0] == '/': | 23 | if len(docdir_stripped) > 0 and docdir_stripped[0] == '/': |
24 | docdir_stripped = docdir_stripped[1:] | 24 | docdir_stripped = docdir_stripped[1:] |
25 | 25 | ||
26 | bb.data.setVar("docdir_stripped", docdir_stripped, d) | 26 | d.setVar("docdir_stripped", docdir_stripped) |
27 | } | 27 | } |
28 | 28 | ||
29 | EXTRA_OECMAKE=" \ | 29 | EXTRA_OECMAKE=" \ |
diff --git a/meta/recipes-devtools/gcc/gcc-common.inc b/meta/recipes-devtools/gcc/gcc-common.inc index f83f4da798..69e0213e9f 100644 --- a/meta/recipes-devtools/gcc/gcc-common.inc +++ b/meta/recipes-devtools/gcc/gcc-common.inc | |||
@@ -7,17 +7,17 @@ NATIVEDEPS = "" | |||
7 | 7 | ||
8 | inherit autotools gettext | 8 | inherit autotools gettext |
9 | 9 | ||
10 | FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/gcc-${PV}" | 10 | FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/gcc-${PV}" |
11 | 11 | ||
12 | def get_gcc_fpu_setting(bb, d): | 12 | def get_gcc_fpu_setting(bb, d): |
13 | if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]: | 13 | if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: |
14 | return "--with-float=soft" | 14 | return "--with-float=soft" |
15 | if bb.data.getVar('TARGET_FPU', d, 1) in [ 'ppc-efd' ]: | 15 | if d.getVar('TARGET_FPU', 1) in [ 'ppc-efd' ]: |
16 | return "--enable-e500_double" | 16 | return "--enable-e500_double" |
17 | return "" | 17 | return "" |
18 | 18 | ||
19 | def get_gcc_mips_plt_setting(bb, d): | 19 | def get_gcc_mips_plt_setting(bb, d): |
20 | if bb.data.getVar('TARGET_ARCH', d, 1) in [ 'mips', 'mipsel' ] and 'mplt' in bb.data.getVar('DISTRO_FEATURES',d,1).split() : | 20 | if d.getVar('TARGET_ARCH', 1) in [ 'mips', 'mipsel' ] and 'mplt' in d.getVar('DISTRO_FEATURES',1).split() : |
21 | return "--with-mips-plt" | 21 | return "--with-mips-plt" |
22 | return "" | 22 | return "" |
23 | 23 | ||
diff --git a/meta/recipes-devtools/gcc/gcc-configure-common.inc b/meta/recipes-devtools/gcc/gcc-configure-common.inc index 2ddc3d7c9f..ae23e8ee49 100644 --- a/meta/recipes-devtools/gcc/gcc-configure-common.inc +++ b/meta/recipes-devtools/gcc/gcc-configure-common.inc | |||
@@ -27,7 +27,7 @@ EXTRA_OECONF_INTERMEDIATE ?= "" | |||
27 | 27 | ||
28 | GCCMULTILIB = "--disable-multilib" | 28 | GCCMULTILIB = "--disable-multilib" |
29 | 29 | ||
30 | EXTRA_OECONF = "${@['--enable-clocale=generic', ''][bb.data.getVar('USE_NLS', d, 1) != 'no']} \ | 30 | EXTRA_OECONF = "${@['--enable-clocale=generic', ''][d.getVar('USE_NLS', 1) != 'no']} \ |
31 | --with-gnu-ld \ | 31 | --with-gnu-ld \ |
32 | --enable-shared \ | 32 | --enable-shared \ |
33 | --enable-languages=${LANGUAGES} \ | 33 | --enable-languages=${LANGUAGES} \ |
diff --git a/meta/recipes-devtools/gnu-config/gnu-config_20080123.bb b/meta/recipes-devtools/gnu-config/gnu-config_20080123.bb index de07f434b8..f93d342198 100644 --- a/meta/recipes-devtools/gnu-config/gnu-config_20080123.bb +++ b/meta/recipes-devtools/gnu-config/gnu-config_20080123.bb | |||
@@ -8,7 +8,7 @@ DEPENDS_virtclass-native = "perl-native-runtime" | |||
8 | 8 | ||
9 | INHIBIT_DEFAULT_DEPS = "1" | 9 | INHIBIT_DEFAULT_DEPS = "1" |
10 | 10 | ||
11 | FIXEDSRCDATE = "${@bb.data.getVar('FILE', d, 1).split('_')[-1].split('.')[0]}" | 11 | FIXEDSRCDATE = "${@d.getVar('FILE', 1).split('_')[-1].split('.')[0]}" |
12 | PV = "0.1+cvs${FIXEDSRCDATE}" | 12 | PV = "0.1+cvs${FIXEDSRCDATE}" |
13 | PR = "r4" | 13 | PR = "r4" |
14 | 14 | ||
diff --git a/meta/recipes-devtools/intltool/intltool.inc b/meta/recipes-devtools/intltool/intltool.inc index 23a1ee9774..8d983dfc6c 100644 --- a/meta/recipes-devtools/intltool/intltool.inc +++ b/meta/recipes-devtools/intltool/intltool.inc | |||
@@ -2,7 +2,7 @@ DESCRIPTION = "Utility scripts for internationalizing XML" | |||
2 | SECTION = "devel" | 2 | SECTION = "devel" |
3 | LICENSE = "GPLv2" | 3 | LICENSE = "GPLv2" |
4 | 4 | ||
5 | URLV="${@'.'.join(bb.data.getVar('PV',d,1).split('.')[0:2])}" | 5 | URLV="${@'.'.join(d.getVar('PV',1).split('.')[0:2])}" |
6 | SRC_URI = "${GNOME_MIRROR}/intltool/${URLV}/intltool-${PV}.tar.bz2" | 6 | SRC_URI = "${GNOME_MIRROR}/intltool/${URLV}/intltool-${PV}.tar.bz2" |
7 | S = "${WORKDIR}/intltool-${PV}" | 7 | S = "${WORKDIR}/intltool-${PV}" |
8 | 8 | ||
diff --git a/meta/recipes-devtools/opkg/opkg.inc b/meta/recipes-devtools/opkg/opkg.inc index a649213916..3e350e8197 100644 --- a/meta/recipes-devtools/opkg/opkg.inc +++ b/meta/recipes-devtools/opkg/opkg.inc | |||
@@ -12,7 +12,7 @@ DEPENDS_virtclass-nativesdk = "curl-nativesdk" | |||
12 | 12 | ||
13 | PE = "1" | 13 | PE = "1" |
14 | 14 | ||
15 | FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/opkg" | 15 | FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/opkg" |
16 | 16 | ||
17 | # Werror gives all kinds bounds issuses with gcc 4.3.3 | 17 | # Werror gives all kinds bounds issuses with gcc 4.3.3 |
18 | do_configure_prepend() { | 18 | do_configure_prepend() { |
diff --git a/meta/recipes-devtools/perl/perl_5.14.2.bb b/meta/recipes-devtools/perl/perl_5.14.2.bb index 788962638d..e864f1e436 100644 --- a/meta/recipes-devtools/perl/perl_5.14.2.bb +++ b/meta/recipes-devtools/perl/perl_5.14.2.bb | |||
@@ -285,7 +285,7 @@ FILES_perl-module-unicore += "${libdir}/perl/${PV}/unicore" | |||
285 | # packages (actually the non modules packages and not created too) | 285 | # packages (actually the non modules packages and not created too) |
286 | ALLOW_EMPTY_perl-modules = "1" | 286 | ALLOW_EMPTY_perl-modules = "1" |
287 | PACKAGES_append = " perl-modules " | 287 | PACKAGES_append = " perl-modules " |
288 | RRECOMMENDS_perl-modules = "${@bb.data.getVar('PACKAGES', d, 1).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}" | 288 | RRECOMMENDS_perl-modules = "${@d.getVar('PACKAGES', 1).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}" |
289 | 289 | ||
290 | python populate_packages_prepend () { | 290 | python populate_packages_prepend () { |
291 | libdir = bb.data.expand('${libdir}/perl/${PV}', d) | 291 | libdir = bb.data.expand('${libdir}/perl/${PV}', d) |
diff --git a/meta/recipes-devtools/python/python-pygobject_2.27.91.bb b/meta/recipes-devtools/python/python-pygobject_2.27.91.bb index da4faec1ca..efc06b45c8 100644 --- a/meta/recipes-devtools/python/python-pygobject_2.27.91.bb +++ b/meta/recipes-devtools/python/python-pygobject_2.27.91.bb | |||
@@ -7,7 +7,7 @@ DEPENDS_virtclass-native = "glib-2.0-native" | |||
7 | RDEPENDS_virtclass-native = "" | 7 | RDEPENDS_virtclass-native = "" |
8 | PR = "r3" | 8 | PR = "r3" |
9 | 9 | ||
10 | MAJ_VER = "${@bb.data.getVar('PV',d,1).split('.')[0]}.${@bb.data.getVar('PV',d,1).split('.')[1]}" | 10 | MAJ_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}" |
11 | 11 | ||
12 | SRC_URI = "${GNOME_MIRROR}/pygobject/${MAJ_VER}/pygobject-${PV}.tar.bz2" | 12 | SRC_URI = "${GNOME_MIRROR}/pygobject/${MAJ_VER}/pygobject-${PV}.tar.bz2" |
13 | 13 | ||
diff --git a/meta/recipes-devtools/qemu/qemu-targets.inc b/meta/recipes-devtools/qemu/qemu-targets.inc index 550a7fe1b5..1970dda55b 100644 --- a/meta/recipes-devtools/qemu/qemu-targets.inc +++ b/meta/recipes-devtools/qemu/qemu-targets.inc | |||
@@ -4,7 +4,7 @@ | |||
4 | 4 | ||
5 | def get_qemu_target_list(d): | 5 | def get_qemu_target_list(d): |
6 | import bb | 6 | import bb |
7 | archs = bb.data.getVar('QEMU_TARGETS', d, True).split() | 7 | archs = d.getVar('QEMU_TARGETS', True).split() |
8 | targets = "" | 8 | targets = "" |
9 | for arch in ['mips64', 'mips64el', 'ppcemb']: | 9 | for arch in ['mips64', 'mips64el', 'ppcemb']: |
10 | if arch in archs: | 10 | if arch in archs: |
diff --git a/meta/recipes-devtools/unfs-server/unfs-server_2.1+2.2beta47.bb b/meta/recipes-devtools/unfs-server/unfs-server_2.1+2.2beta47.bb index 29c7052056..eb80b54fc4 100644 --- a/meta/recipes-devtools/unfs-server/unfs-server_2.1+2.2beta47.bb +++ b/meta/recipes-devtools/unfs-server/unfs-server_2.1+2.2beta47.bb | |||
@@ -70,7 +70,7 @@ do_configure_prepend () { | |||
70 | python __anonymous () { | 70 | python __anonymous () { |
71 | import re | 71 | import re |
72 | 72 | ||
73 | pn = bb.data.getVar("PN", d, 1) | 73 | pn = d.getVar("PN", 1) |
74 | if not pn.endswith('-native') and not pn.endswith('-nativesdk'): | 74 | if not pn.endswith('-native') and not pn.endswith('-nativesdk'): |
75 | raise bb.parse.SkipPackage("unfs-server is intended for native/nativesdk builds only") | 75 | raise bb.parse.SkipPackage("unfs-server is intended for native/nativesdk builds only") |
76 | } | 76 | } |
diff --git a/meta/recipes-extended/cups/cups14.inc b/meta/recipes-extended/cups/cups14.inc index 48f493d6c7..8c01caf55e 100644 --- a/meta/recipes-extended/cups/cups14.inc +++ b/meta/recipes-extended/cups/cups14.inc | |||
@@ -58,7 +58,7 @@ fakeroot do_install () { | |||
58 | 58 | ||
59 | python do_package_append() { | 59 | python do_package_append() { |
60 | # Change permissions back the way they were, they probably had a reason... | 60 | # Change permissions back the way they were, they probably had a reason... |
61 | workdir = bb.data.getVar('WORKDIR', d, 1) | 61 | workdir = d.getVar('WORKDIR', 1) |
62 | os.system('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir) | 62 | os.system('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir) |
63 | } | 63 | } |
64 | 64 | ||
diff --git a/meta/recipes-extended/lsof/lsof_4.85.bb b/meta/recipes-extended/lsof/lsof_4.85.bb index 96bc0c54bc..8f25e24a50 100644 --- a/meta/recipes-extended/lsof/lsof_4.85.bb +++ b/meta/recipes-extended/lsof/lsof_4.85.bb | |||
@@ -17,10 +17,10 @@ LIC_FILES_CHKSUM = "file://${S}/00README;beginline=645;endline=679;md5=e0108f781 | |||
17 | 17 | ||
18 | python do_unpack () { | 18 | python do_unpack () { |
19 | bb.build.exec_func('base_do_unpack', d) | 19 | bb.build.exec_func('base_do_unpack', d) |
20 | src_uri = bb.data.getVar('SRC_URI', d) | 20 | src_uri = d.getVar('SRC_URI') |
21 | bb.data.setVar('SRC_URI', '${LOCALSRC}', d) | 21 | d.setVar('SRC_URI', '${LOCALSRC}') |
22 | bb.build.exec_func('base_do_unpack', d) | 22 | bb.build.exec_func('base_do_unpack', d) |
23 | bb.data.setVar('SRC_URI', src_uri, d) | 23 | d.setVar('SRC_URI', src_uri) |
24 | } | 24 | } |
25 | 25 | ||
26 | export LSOF_OS = "${TARGET_OS}" | 26 | export LSOF_OS = "${TARGET_OS}" |
diff --git a/meta/recipes-extended/pam/libpam_1.1.4.bb b/meta/recipes-extended/pam/libpam_1.1.4.bb index 868bffd960..afe17cc932 100644 --- a/meta/recipes-extended/pam/libpam_1.1.4.bb +++ b/meta/recipes-extended/pam/libpam_1.1.4.bb | |||
@@ -54,10 +54,10 @@ python populate_packages_prepend () { | |||
54 | 54 | ||
55 | def pam_plugin_append_file(pn, dir, file): | 55 | def pam_plugin_append_file(pn, dir, file): |
56 | nf = os.path.join(dir, file) | 56 | nf = os.path.join(dir, file) |
57 | of = bb.data.getVar('FILES_' + pn, d, True) | 57 | of = d.getVar('FILES_' + pn, True) |
58 | if of: | 58 | if of: |
59 | nf = of + " " + nf | 59 | nf = of + " " + nf |
60 | bb.data.setVar('FILES_' + pn, nf, d) | 60 | d.setVar('FILES_' + pn, nf) |
61 | 61 | ||
62 | dvar = bb.data.expand('${WORKDIR}/package', d, True) | 62 | dvar = bb.data.expand('${WORKDIR}/package', d, True) |
63 | pam_libdir = bb.data.expand('${base_libdir}/security', d) | 63 | pam_libdir = bb.data.expand('${base_libdir}/security', d) |
diff --git a/meta/recipes-extended/zip/zip.inc b/meta/recipes-extended/zip/zip.inc index 9550447264..8cce146b18 100644 --- a/meta/recipes-extended/zip/zip.inc +++ b/meta/recipes-extended/zip/zip.inc | |||
@@ -5,7 +5,7 @@ SECTION = "console/utils" | |||
5 | LICENSE = "Info-ZIP" | 5 | LICENSE = "Info-ZIP" |
6 | LIC_FILES_CHKSUM = "file://LICENSE;md5=04d43c5d70b496c032308106e26ae17d" | 6 | LIC_FILES_CHKSUM = "file://LICENSE;md5=04d43c5d70b496c032308106e26ae17d" |
7 | 7 | ||
8 | SRC_URI = "ftp://ftp.info-zip.org/pub/infozip/src/zip${@bb.data.getVar('PV',d,1).replace('.', '')}.tgz" | 8 | SRC_URI = "ftp://ftp.info-zip.org/pub/infozip/src/zip${@d.getVar('PV',1).replace('.', '')}.tgz" |
9 | 9 | ||
10 | EXTRA_OEMAKE = "'CC=${CC}' 'BIND=${CC}' 'AS=${CC} -c' 'CPP=${CPP}' \ | 10 | EXTRA_OEMAKE = "'CC=${CC}' 'BIND=${CC}' 'AS=${CC} -c' 'CPP=${CPP}' \ |
11 | 'CFLAGS=-I. -DUNIX ${CFLAGS}' 'INSTALL=install' \ | 11 | 'CFLAGS=-I. -DUNIX ${CFLAGS}' 'INSTALL=install' \ |
diff --git a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.0.bb b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.0.bb index 9fabbe2c46..b33eca9682 100644 --- a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.0.bb +++ b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.0.bb | |||
@@ -56,7 +56,7 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-*" | |||
56 | PACKAGES_DYNAMIC_virtclass-native = "" | 56 | PACKAGES_DYNAMIC_virtclass-native = "" |
57 | 57 | ||
58 | python populate_packages_prepend () { | 58 | python populate_packages_prepend () { |
59 | postinst_pixbufloader = bb.data.getVar("postinst_pixbufloader", d, 1) | 59 | postinst_pixbufloader = d.getVar("postinst_pixbufloader", 1) |
60 | 60 | ||
61 | loaders_root = bb.data.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders', d) | 61 | loaders_root = bb.data.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders', d) |
62 | 62 | ||
diff --git a/meta/recipes-gnome/gtk+/gtk+_2.12.7.bb b/meta/recipes-gnome/gtk+/gtk+_2.12.7.bb index 3b4acd9ce9..c6077ecb3b 100644 --- a/meta/recipes-gnome/gtk+/gtk+_2.12.7.bb +++ b/meta/recipes-gnome/gtk+/gtk+_2.12.7.bb | |||
@@ -34,8 +34,8 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*" | |||
34 | python populate_packages_prepend () { | 34 | python populate_packages_prepend () { |
35 | import os.path | 35 | import os.path |
36 | 36 | ||
37 | prologue = bb.data.getVar("postinst_prologue", d, 1) | 37 | prologue = d.getVar("postinst_prologue", 1) |
38 | postinst_pixbufloader = bb.data.getVar("postinst_pixbufloader", d, 1) | 38 | postinst_pixbufloader = d.getVar("postinst_pixbufloader", 1) |
39 | 39 | ||
40 | gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) | 40 | gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) |
41 | loaders_root = os.path.join(gtk_libdir, 'loaders') | 41 | loaders_root = os.path.join(gtk_libdir, 'loaders') |
@@ -46,6 +46,6 @@ python populate_packages_prepend () { | |||
46 | do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') | 46 | do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') |
47 | do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') | 47 | do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') |
48 | 48 | ||
49 | if (bb.data.getVar('DEBIAN_NAMES', d, 1)): | 49 | if (d.getVar('DEBIAN_NAMES', 1)): |
50 | bb.data.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0', d) | 50 | d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') |
51 | } | 51 | } |
diff --git a/meta/recipes-gnome/gtk+/gtk+_2.16.6.bb b/meta/recipes-gnome/gtk+/gtk+_2.16.6.bb index 371eda3814..5fcb576bbe 100644 --- a/meta/recipes-gnome/gtk+/gtk+_2.16.6.bb +++ b/meta/recipes-gnome/gtk+/gtk+_2.16.6.bb | |||
@@ -34,8 +34,8 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*" | |||
34 | python populate_packages_prepend () { | 34 | python populate_packages_prepend () { |
35 | import os.path | 35 | import os.path |
36 | 36 | ||
37 | prologue = bb.data.getVar("postinst_prologue", d, 1) | 37 | prologue = d.getVar("postinst_prologue", 1) |
38 | postinst_pixbufloader = bb.data.getVar("postinst_pixbufloader", d, 1) | 38 | postinst_pixbufloader = d.getVar("postinst_pixbufloader", 1) |
39 | 39 | ||
40 | gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) | 40 | gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) |
41 | loaders_root = os.path.join(gtk_libdir, 'loaders') | 41 | loaders_root = os.path.join(gtk_libdir, 'loaders') |
@@ -46,6 +46,6 @@ python populate_packages_prepend () { | |||
46 | do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') | 46 | do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') |
47 | do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') | 47 | do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') |
48 | 48 | ||
49 | if (bb.data.getVar('DEBIAN_NAMES', d, 1)): | 49 | if (d.getVar('DEBIAN_NAMES', 1)): |
50 | bb.data.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0', d) | 50 | d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') |
51 | } | 51 | } |
diff --git a/meta/recipes-gnome/gtk+/gtk+_2.24.6.bb b/meta/recipes-gnome/gtk+/gtk+_2.24.6.bb index 01f4a11bca..cd5c8cb116 100644 --- a/meta/recipes-gnome/gtk+/gtk+_2.24.6.bb +++ b/meta/recipes-gnome/gtk+/gtk+_2.24.6.bb | |||
@@ -40,7 +40,7 @@ PACKAGES_DYNAMIC += "gtk-immodule-* gtk-printbackend-*" | |||
40 | python populate_packages_prepend () { | 40 | python populate_packages_prepend () { |
41 | import os.path | 41 | import os.path |
42 | 42 | ||
43 | prologue = bb.data.getVar("postinst_prologue", d, 1) | 43 | prologue = d.getVar("postinst_prologue", 1) |
44 | 44 | ||
45 | gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) | 45 | gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) |
46 | immodules_root = os.path.join(gtk_libdir, 'immodules') | 46 | immodules_root = os.path.join(gtk_libdir, 'immodules') |
@@ -49,6 +49,6 @@ python populate_packages_prepend () { | |||
49 | do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') | 49 | do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') |
50 | do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') | 50 | do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') |
51 | 51 | ||
52 | if (bb.data.getVar('DEBIAN_NAMES', d, 1)): | 52 | if (d.getVar('DEBIAN_NAMES', 1)): |
53 | bb.data.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0', d) | 53 | d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') |
54 | } | 54 | } |
diff --git a/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb b/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb index bbf52be266..233a6ee773 100644 --- a/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb +++ b/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb | |||
@@ -31,8 +31,8 @@ inherit gnome | |||
31 | python populate_packages_prepend() { | 31 | python populate_packages_prepend() { |
32 | import os.path | 32 | import os.path |
33 | 33 | ||
34 | engines_root = os.path.join(bb.data.getVar('libdir', d, 1), "gtk-2.0/2.10.0/engines") | 34 | engines_root = os.path.join(d.getVar('libdir', 1), "gtk-2.0/2.10.0/engines") |
35 | themes_root = os.path.join(bb.data.getVar('datadir', d, 1), "themes") | 35 | themes_root = os.path.join(d.getVar('datadir', 1), "themes") |
36 | 36 | ||
37 | do_split_packages(d, engines_root, '^lib(.*)\.so$', 'gtk-engine-%s', 'GTK %s theme engine', extra_depends='') | 37 | do_split_packages(d, engines_root, '^lib(.*)\.so$', 'gtk-engine-%s', 'GTK %s theme engine', extra_depends='') |
38 | do_split_packages(d, themes_root, '(.*)', 'gtk-theme-%s', 'GTK theme %s', allow_dirs=True, extra_depends='') | 38 | do_split_packages(d, themes_root, '(.*)', 'gtk-theme-%s', 'GTK theme %s', allow_dirs=True, extra_depends='') |
diff --git a/meta/recipes-graphics/cairo/cairo-fpu.inc b/meta/recipes-graphics/cairo/cairo-fpu.inc index bdaf789799..8c0ecfde94 100644 --- a/meta/recipes-graphics/cairo/cairo-fpu.inc +++ b/meta/recipes-graphics/cairo/cairo-fpu.inc | |||
@@ -1,6 +1,6 @@ | |||
1 | 1 | ||
2 | def get_cairo_fpu_setting(bb, d): | 2 | def get_cairo_fpu_setting(bb, d): |
3 | if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]: | 3 | if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: |
4 | return "--disable-some-floating-point" | 4 | return "--disable-some-floating-point" |
5 | return "" | 5 | return "" |
6 | 6 | ||
diff --git a/meta/recipes-graphics/clutter/clutter-fpu.inc b/meta/recipes-graphics/clutter/clutter-fpu.inc index 21a16feac2..dfa933de5c 100644 --- a/meta/recipes-graphics/clutter/clutter-fpu.inc +++ b/meta/recipes-graphics/clutter/clutter-fpu.inc | |||
@@ -1,6 +1,6 @@ | |||
1 | 1 | ||
2 | def get_clutter_fpu_setting(bb, d): | 2 | def get_clutter_fpu_setting(bb, d): |
3 | if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]: | 3 | if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: |
4 | return "--without-fpu" | 4 | return "--without-fpu" |
5 | return "" | 5 | return "" |
6 | 6 | ||
diff --git a/meta/recipes-graphics/fontconfig/fontconfig-native_2.8.0.bb b/meta/recipes-graphics/fontconfig/fontconfig-native_2.8.0.bb index 03a1bc86c2..0e213909e3 100644 --- a/meta/recipes-graphics/fontconfig/fontconfig-native_2.8.0.bb +++ b/meta/recipes-graphics/fontconfig/fontconfig-native_2.8.0.bb | |||
@@ -3,7 +3,7 @@ inherit native | |||
3 | DEPENDS = "freetype-native expat-native zlib-native" | 3 | DEPENDS = "freetype-native expat-native zlib-native" |
4 | 4 | ||
5 | EXTRA_OEMAKE = "" | 5 | EXTRA_OEMAKE = "" |
6 | EXTRA_OECONF = "${@[' --disable-docs',' --disable-docs --with-freetype-config=%s/freetype-config' % bb.data.getVar('STAGING_BINDIR', d, 1)][os.path.isfile('%s/freetype-config' % bb.data.getVar('STAGING_BINDIR', d, 1))]}" | 6 | EXTRA_OECONF = "${@[' --disable-docs',' --disable-docs --with-freetype-config=%s/freetype-config' % d.getVar('STAGING_BINDIR', 1)][os.path.isfile('%s/freetype-config' % d.getVar('STAGING_BINDIR', 1))]}" |
7 | 7 | ||
8 | do_install_append () { | 8 | do_install_append () { |
9 | install -d ${D}${bindir}/ | 9 | install -d ${D}${bindir}/ |
diff --git a/meta/recipes-graphics/mesa/mesa-dri.inc b/meta/recipes-graphics/mesa/mesa-dri.inc index fcce25996d..3687648999 100644 --- a/meta/recipes-graphics/mesa/mesa-dri.inc +++ b/meta/recipes-graphics/mesa/mesa-dri.inc | |||
@@ -13,7 +13,7 @@ EXTRA_OECONF += "--with-driver=dri --disable-egl --disable-gallium --without-gal | |||
13 | python populate_packages_prepend() { | 13 | python populate_packages_prepend() { |
14 | import os.path | 14 | import os.path |
15 | 15 | ||
16 | dri_drivers_root = os.path.join(bb.data.getVar('libdir', d, 1), "dri") | 16 | dri_drivers_root = os.path.join(d.getVar('libdir', 1), "dri") |
17 | 17 | ||
18 | do_split_packages(d, dri_drivers_root, '^(.*)_dri\.so$', 'mesa-dri-driver-%s', 'Mesa %s DRI driver', extra_depends='') | 18 | do_split_packages(d, dri_drivers_root, '^(.*)_dri\.so$', 'mesa-dri-driver-%s', 'Mesa %s DRI driver', extra_depends='') |
19 | } | 19 | } |
diff --git a/meta/recipes-graphics/pango/pango.inc b/meta/recipes-graphics/pango/pango.inc index a8e99e2f6d..6d94e02878 100644 --- a/meta/recipes-graphics/pango/pango.inc +++ b/meta/recipes-graphics/pango/pango.inc | |||
@@ -39,7 +39,7 @@ fi | |||
39 | } | 39 | } |
40 | 40 | ||
41 | python populate_packages_prepend () { | 41 | python populate_packages_prepend () { |
42 | prologue = bb.data.getVar("postinst_prologue", d, 1) | 42 | prologue = d.getVar("postinst_prologue", 1) |
43 | 43 | ||
44 | modules_root = bb.data.expand('${libdir}/pango/${LIBV}/modules', d) | 44 | modules_root = bb.data.expand('${libdir}/pango/${LIBV}/modules', d) |
45 | 45 | ||
diff --git a/meta/recipes-graphics/xorg-lib/libxft_2.2.0.bb b/meta/recipes-graphics/xorg-lib/libxft_2.2.0.bb index 0fb10e6af0..0f9a1b3c9b 100644 --- a/meta/recipes-graphics/xorg-lib/libxft_2.2.0.bb +++ b/meta/recipes-graphics/xorg-lib/libxft_2.2.0.bb | |||
@@ -26,8 +26,8 @@ XORG_PN = "libXft" | |||
26 | BBCLASSEXTEND = "native nativesdk" | 26 | BBCLASSEXTEND = "native nativesdk" |
27 | 27 | ||
28 | python () { | 28 | python () { |
29 | if bb.data.getVar('DEBIAN_NAMES', d, 1): | 29 | if d.getVar('DEBIAN_NAMES', 1): |
30 | bb.data.setVar('PKG_${PN}', '${MLPREFIX}libxft2', d) | 30 | d.setVar('PKG_${PN}', '${MLPREFIX}libxft2') |
31 | } | 31 | } |
32 | 32 | ||
33 | FILES_${PN} = "${libdir}/lib*${SOLIBS}" | 33 | FILES_${PN} = "${libdir}/lib*${SOLIBS}" |
diff --git a/meta/recipes-kernel/linux/linux-dtb.inc b/meta/recipes-kernel/linux/linux-dtb.inc index eb894562b3..7ec75848dd 100644 --- a/meta/recipes-kernel/linux/linux-dtb.inc +++ b/meta/recipes-kernel/linux/linux-dtb.inc | |||
@@ -5,12 +5,12 @@ KERNEL_DEVICETREE_FLAGS = "-R 8 -p 0x3000" | |||
5 | python __anonymous () { | 5 | python __anonymous () { |
6 | import bb | 6 | import bb |
7 | 7 | ||
8 | devicetree = bb.data.getVar("KERNEL_DEVICETREE", d, 1) or '' | 8 | devicetree = d.getVar("KERNEL_DEVICETREE", 1) or '' |
9 | if devicetree: | 9 | if devicetree: |
10 | depends = bb.data.getVar("DEPENDS", d, 1) | 10 | depends = d.getVar("DEPENDS", 1) |
11 | bb.data.setVar("DEPENDS", "%s dtc-native" % depends, d) | 11 | d.setVar("DEPENDS", "%s dtc-native" % depends) |
12 | packages = bb.data.getVar("PACKAGES", d, 1) | 12 | packages = d.getVar("PACKAGES", 1) |
13 | bb.data.setVar("PACKAGES", "%s kernel-devicetree" % packages, d) | 13 | d.setVar("PACKAGES", "%s kernel-devicetree" % packages) |
14 | } | 14 | } |
15 | 15 | ||
16 | do_install_append() { | 16 | do_install_append() { |
diff --git a/meta/recipes-kernel/linux/linux-yocto-rt_2.6.34.bb b/meta/recipes-kernel/linux/linux-yocto-rt_2.6.34.bb index d92a902a9a..26423255c7 100644 --- a/meta/recipes-kernel/linux/linux-yocto-rt_2.6.34.bb +++ b/meta/recipes-kernel/linux/linux-yocto-rt_2.6.34.bb | |||
@@ -27,7 +27,7 @@ python __anonymous () { | |||
27 | import bb, re, string | 27 | import bb, re, string |
28 | 28 | ||
29 | kerntype = string.replace(bb.data.expand("${LINUX_KERNEL_TYPE}", d), "_", "-") | 29 | kerntype = string.replace(bb.data.expand("${LINUX_KERNEL_TYPE}", d), "_", "-") |
30 | bb.data.setVar("LINUX_KERNEL_TYPE_EXTENSION", kerntype, d) | 30 | d.setVar("LINUX_KERNEL_TYPE_EXTENSION", kerntype) |
31 | } | 31 | } |
32 | 32 | ||
33 | SRC_URI = "git://git.yoctoproject.org/linux-yocto-2.6.34.git;protocol=git;nocheckout=1;branch=${KBRANCH},wrs_meta;name=machine,meta" | 33 | SRC_URI = "git://git.yoctoproject.org/linux-yocto-2.6.34.git;protocol=git;nocheckout=1;branch=${KBRANCH},wrs_meta;name=machine,meta" |
diff --git a/meta/recipes-kernel/linux/linux-yocto_2.6.34.bb b/meta/recipes-kernel/linux/linux-yocto_2.6.34.bb index 4891463c03..cbcfa56fec 100644 --- a/meta/recipes-kernel/linux/linux-yocto_2.6.34.bb +++ b/meta/recipes-kernel/linux/linux-yocto_2.6.34.bb | |||
@@ -33,7 +33,7 @@ python __anonymous () { | |||
33 | import bb, re, string | 33 | import bb, re, string |
34 | 34 | ||
35 | kerntype = string.replace(bb.data.expand("${LINUX_KERNEL_TYPE}", d), "_", "-") | 35 | kerntype = string.replace(bb.data.expand("${LINUX_KERNEL_TYPE}", d), "_", "-") |
36 | bb.data.setVar("LINUX_KERNEL_TYPE_EXTENSION", kerntype, d) | 36 | d.setVar("LINUX_KERNEL_TYPE_EXTENSION", kerntype) |
37 | } | 37 | } |
38 | 38 | ||
39 | SRC_URI = "git://git.yoctoproject.org/linux-yocto-2.6.34.git;protocol=git;nocheckout=1;branch=${KBRANCH},wrs_meta;name=machine,meta" | 39 | SRC_URI = "git://git.yoctoproject.org/linux-yocto-2.6.34.git;protocol=git;nocheckout=1;branch=${KBRANCH},wrs_meta;name=machine,meta" |
diff --git a/meta/recipes-multimedia/alsa/alsa-fpu.inc b/meta/recipes-multimedia/alsa/alsa-fpu.inc index ebd3493e6c..2a0c6b0194 100644 --- a/meta/recipes-multimedia/alsa/alsa-fpu.inc +++ b/meta/recipes-multimedia/alsa/alsa-fpu.inc | |||
@@ -1,6 +1,6 @@ | |||
1 | 1 | ||
2 | def get_alsa_fpu_setting(bb, d): | 2 | def get_alsa_fpu_setting(bb, d): |
3 | if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]: | 3 | if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: |
4 | return "--with-softfloat" | 4 | return "--with-softfloat" |
5 | return "" | 5 | return "" |
6 | 6 | ||
diff --git a/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc b/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc index 796df14a9e..7949058b13 100644 --- a/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc +++ b/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc | |||
@@ -2,25 +2,25 @@ LIBV = "0.10" | |||
2 | 2 | ||
3 | python populate_packages_prepend () { | 3 | python populate_packages_prepend () { |
4 | gst_libdir = bb.data.expand('${libdir}/gstreamer-${LIBV}', d) | 4 | gst_libdir = bb.data.expand('${libdir}/gstreamer-${LIBV}', d) |
5 | postinst = bb.data.getVar('plugin_postinst', d, 1) | 5 | postinst = d.getVar('plugin_postinst', 1) |
6 | glibdir = bb.data.expand('${libdir}', d) | 6 | glibdir = bb.data.expand('${libdir}', d) |
7 | 7 | ||
8 | do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True) | 8 | do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True) |
9 | do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', bb.data.expand('${PN}-%s', d), 'GStreamer plugin for %s', postinst=postinst, extra_depends=bb.data.expand('${PN}',d)) | 9 | do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', bb.data.expand('${PN}-%s', d), 'GStreamer plugin for %s', postinst=postinst, extra_depends=bb.data.expand('${PN}',d)) |
10 | do_split_packages(d, gst_libdir, 'libgst(.*)\.l?a$', bb.data.expand('${PN}-%s-dev', d), 'GStreamer plugin for %s (development files)', extra_depends=bb.data.expand('${PN}-dev',d)) | 10 | do_split_packages(d, gst_libdir, 'libgst(.*)\.l?a$', bb.data.expand('${PN}-%s-dev', d), 'GStreamer plugin for %s (development files)', extra_depends=bb.data.expand('${PN}-dev',d)) |
11 | 11 | ||
12 | pn = bb.data.getVar('PN', d, 1) | 12 | pn = d.getVar('PN', 1) |
13 | metapkg = pn + '-meta' | 13 | metapkg = pn + '-meta' |
14 | bb.data.setVar('ALLOW_EMPTY_' + metapkg, "1", d) | 14 | d.setVar('ALLOW_EMPTY_' + metapkg, "1") |
15 | bb.data.setVar('FILES_' + metapkg, "", d) | 15 | d.setVar('FILES_' + metapkg, "") |
16 | blacklist = [ pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc' ] | 16 | blacklist = [ pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc' ] |
17 | metapkg_rdepends = [] | 17 | metapkg_rdepends = [] |
18 | packages = bb.data.getVar('PACKAGES', d, 1).split() | 18 | packages = d.getVar('PACKAGES', 1).split() |
19 | for pkg in packages[1:]: | 19 | for pkg in packages[1:]: |
20 | if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-static'): | 20 | if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-static'): |
21 | metapkg_rdepends.append(pkg) | 21 | metapkg_rdepends.append(pkg) |
22 | bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d) | 22 | bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d) |
23 | bb.data.setVar('DESCRIPTION_' + metapkg, pn + ' meta package', d) | 23 | d.setVar('DESCRIPTION_' + metapkg, pn + ' meta package') |
24 | } | 24 | } |
25 | 25 | ||
26 | ALLOW_EMPTY = "1" | 26 | ALLOW_EMPTY = "1" |
diff --git a/meta/recipes-multimedia/pulseaudio/pulseaudio.inc b/meta/recipes-multimedia/pulseaudio/pulseaudio.inc index c581eae4c6..747b650e7b 100644 --- a/meta/recipes-multimedia/pulseaudio/pulseaudio.inc +++ b/meta/recipes-multimedia/pulseaudio/pulseaudio.inc | |||
@@ -103,7 +103,7 @@ pkg_postrm_${PN}-server() { | |||
103 | } | 103 | } |
104 | 104 | ||
105 | python populate_packages_prepend() { | 105 | python populate_packages_prepend() { |
106 | #bb.data.setVar('PKG_pulseaudio', 'pulseaudio', d) | 106 | #d.setVar('PKG_pulseaudio', 'pulseaudio') |
107 | 107 | ||
108 | plugindir = bb.data.expand('${libdir}/pulse-${PV}/modules/', d) | 108 | plugindir = bb.data.expand('${libdir}/pulse-${PV}/modules/', d) |
109 | do_split_packages(d, plugindir, '^module-(.*)\.so$', 'pulseaudio-module-%s', 'PulseAudio module for %s', extra_depends='' ) | 109 | do_split_packages(d, plugindir, '^module-(.*)\.so$', 'pulseaudio-module-%s', 'PulseAudio module for %s', extra_depends='' ) |
diff --git a/meta/recipes-qt/qt4/qt4.inc b/meta/recipes-qt/qt4/qt4.inc index 1406a874ec..82ba6377a0 100644 --- a/meta/recipes-qt/qt4/qt4.inc +++ b/meta/recipes-qt/qt4/qt4.inc | |||
@@ -39,8 +39,8 @@ python __anonymous () { | |||
39 | lib_packages = [] | 39 | lib_packages = [] |
40 | dev_packages = [] | 40 | dev_packages = [] |
41 | dbg_packages = [] | 41 | dbg_packages = [] |
42 | for name in bb.data.getVar("QT_LIB_NAMES", d, 1).split(): | 42 | for name in d.getVar("QT_LIB_NAMES", 1).split(): |
43 | pkg = bb.data.getVar("QT_BASE_LIB",d, True) + name.lower().replace("qt", "").replace("_", "-") + "4" | 43 | pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" |
44 | # NOTE: the headers for QtAssistantClient are different | 44 | # NOTE: the headers for QtAssistantClient are different |
45 | incname = name.replace("QtAssistantClient", "QtAssistant") | 45 | incname = name.replace("QtAssistantClient", "QtAssistant") |
46 | bb.data.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s${QT_LIBINFIX}.so.*" % locals(), d) | 46 | bb.data.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s${QT_LIBINFIX}.so.*" % locals(), d) |
@@ -51,15 +51,15 @@ python __anonymous () { | |||
51 | ${includedir}/${QT_DIR_NAME}/%(incname)s | 51 | ${includedir}/${QT_DIR_NAME}/%(incname)s |
52 | ${libdir}/pkgconfig/%(name)s${QT_LIBINFIX}.pc""" % locals(), d) | 52 | ${libdir}/pkgconfig/%(name)s${QT_LIBINFIX}.pc""" % locals(), d) |
53 | bb.data.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s${QT_LIBINFIX}.so*" % locals(), d) | 53 | bb.data.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s${QT_LIBINFIX}.so*" % locals(), d) |
54 | bb.data.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg", d) | 54 | d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg") |
55 | lib_packages.append(pkg) | 55 | lib_packages.append(pkg) |
56 | dev_packages.append("%s-dev" % pkg) | 56 | dev_packages.append("%s-dev" % pkg) |
57 | dbg_packages.append("%s-dbg" % pkg) | 57 | dbg_packages.append("%s-dbg" % pkg) |
58 | for name in bb.data.getVar("OTHER_PACKAGES", d, 1).split(): | 58 | for name in d.getVar("OTHER_PACKAGES", 1).split(): |
59 | dbg_packages.append("%s-dbg" % name) | 59 | dbg_packages.append("%s-dbg" % name) |
60 | 60 | ||
61 | for name in bb.data.getVar("QT_EXTRA_LIBS", d, 1).split(): | 61 | for name in d.getVar("QT_EXTRA_LIBS", 1).split(): |
62 | pkg = bb.data.getVar("QT_BASE_LIB",d, True) + name.lower().replace("qt", "").replace("_", "-") + "4" | 62 | pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" |
63 | bb.data.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s.so.*" % locals(), d) | 63 | bb.data.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s.so.*" % locals(), d) |
64 | bb.data.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s.prl | 64 | bb.data.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s.prl |
65 | ${libdir}/lib%(name)s.a | 65 | ${libdir}/lib%(name)s.a |
@@ -68,7 +68,7 @@ python __anonymous () { | |||
68 | ${includedir}/${QT_DIR_NAME}/%(incname)s | 68 | ${includedir}/${QT_DIR_NAME}/%(incname)s |
69 | ${libdir}/pkgconfig/%(name)s.pc""" % locals(), d) | 69 | ${libdir}/pkgconfig/%(name)s.pc""" % locals(), d) |
70 | bb.data.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s.so*" % locals(), d) | 70 | bb.data.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s.so*" % locals(), d) |
71 | bb.data.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg", d) | 71 | d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg") |
72 | lib_packages.append(pkg) | 72 | lib_packages.append(pkg) |
73 | dev_packages.append("%s-dev" % pkg) | 73 | dev_packages.append("%s-dev" % pkg) |
74 | dbg_packages.append("%s-dbg" % pkg) | 74 | dbg_packages.append("%s-dbg" % pkg) |
@@ -256,14 +256,14 @@ python populate_packages_prepend() { | |||
256 | do_split_packages(d, plugin_dir, glob, plugin_name, '${PN} %s for %%s' % name, extra_depends='', hook=dev_hook) | 256 | do_split_packages(d, plugin_dir, glob, plugin_name, '${PN} %s for %%s' % name, extra_depends='', hook=dev_hook) |
257 | # Create a -dbg package as well | 257 | # Create a -dbg package as well |
258 | plugin_dir_dbg = bb.data.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/.debug' % path, d) | 258 | plugin_dir_dbg = bb.data.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/.debug' % path, d) |
259 | packages = bb.data.getVar('PACKAGES',d) | 259 | packages = d.getVar('PACKAGES') |
260 | for (file,package) in dev_packages: | 260 | for (file,package) in dev_packages: |
261 | packages = "%s %s-dbg" % (packages, package) | 261 | packages = "%s %s-dbg" % (packages, package) |
262 | file_name = os.path.join(plugin_dir_dbg, os.path.basename(file)) | 262 | file_name = os.path.join(plugin_dir_dbg, os.path.basename(file)) |
263 | bb.data.setVar("FILES_%s-dbg" % package, file_name, d) | 263 | d.setVar("FILES_%s-dbg" % package, file_name) |
264 | bb.data.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package), d) | 264 | bb.data.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package), d) |
265 | 265 | ||
266 | bb.data.setVar('PACKAGES', packages, d) | 266 | d.setVar('PACKAGES', packages) |
267 | 267 | ||
268 | qtopia_split('accessible', 'accessible', '^libq(.*)\.so$') | 268 | qtopia_split('accessible', 'accessible', '^libq(.*)\.so$') |
269 | qtopia_split('codecs', 'codec', '^libq(.*)\.so$') | 269 | qtopia_split('codecs', 'codec', '^libq(.*)\.so$') |
diff --git a/meta/recipes-qt/qt4/qt4_arch.inc b/meta/recipes-qt/qt4/qt4_arch.inc index e8c8fabe68..46d65a20d7 100644 --- a/meta/recipes-qt/qt4/qt4_arch.inc +++ b/meta/recipes-qt/qt4/qt4_arch.inc | |||
@@ -4,7 +4,7 @@ ARM_INSTRUCTION_SET = "arm" | |||
4 | 4 | ||
5 | def qt_arch(d): | 5 | def qt_arch(d): |
6 | import bb, re | 6 | import bb, re |
7 | arch = bb.data.getVar('TARGET_ARCH', d, 1) | 7 | arch = d.getVar('TARGET_ARCH', 1) |
8 | if re.match("^i.86$", arch): | 8 | if re.match("^i.86$", arch): |
9 | arch = "i386" | 9 | arch = "i386" |
10 | elif re.match("^arm.*", arch): | 10 | elif re.match("^arm.*", arch): |
@@ -17,9 +17,9 @@ def qt_arch(d): | |||
17 | 17 | ||
18 | def qt_endian(d): | 18 | def qt_endian(d): |
19 | import bb | 19 | import bb |
20 | if bb.data.getVar('SITEINFO_ENDIANNESS', d, True) == "le": | 20 | if d.getVar('SITEINFO_ENDIANNESS', True) == "le": |
21 | return "-little-endian" | 21 | return "-little-endian" |
22 | elif bb.data.getVar('SITEINFO_ENDIANNESS', d, True) == "be": | 22 | elif d.getVar('SITEINFO_ENDIANNESS', True) == "be": |
23 | return "-big-endian" | 23 | return "-big-endian" |
24 | else: | 24 | else: |
25 | assert False | 25 | assert False |
diff --git a/meta/recipes-sato/puzzles/oh-puzzles_git.bb b/meta/recipes-sato/puzzles/oh-puzzles_git.bb index 6d95d79e8c..c2e7622245 100644 --- a/meta/recipes-sato/puzzles/oh-puzzles_git.bb +++ b/meta/recipes-sato/puzzles/oh-puzzles_git.bb | |||
@@ -61,10 +61,10 @@ FILES_${PN}-extra = "/usr/games/ /usr/share/applications /etc/gconf/schemas" | |||
61 | python __anonymous () { | 61 | python __anonymous () { |
62 | import bb | 62 | import bb |
63 | var = bb.data.expand("FILES_${PN}", d, 1) | 63 | var = bb.data.expand("FILES_${PN}", d, 1) |
64 | data = bb.data.getVar(var, d, 1) | 64 | data = d.getVar(var, 1) |
65 | for name in ("bridges", "fifteen", "inertia", "map", "samegame", "slant"): | 65 | for name in ("bridges", "fifteen", "inertia", "map", "samegame", "slant"): |
66 | data = data + " /usr/games/%s" % name | 66 | data = data + " /usr/games/%s" % name |
67 | data = data + " /usr/share/applications/%s.desktop" % name | 67 | data = data + " /usr/share/applications/%s.desktop" % name |
68 | data = data + " /etc/gconf/schemas/%s.schemas" % name | 68 | data = data + " /etc/gconf/schemas/%s.schemas" % name |
69 | bb.data.setVar(var, data, d) | 69 | d.setVar(var, data) |
70 | } | 70 | } |
diff --git a/meta/recipes-sato/puzzles/puzzles_r9306.bb b/meta/recipes-sato/puzzles/puzzles_r9306.bb index ee0c02537a..c3849309ad 100644 --- a/meta/recipes-sato/puzzles/puzzles_r9306.bb +++ b/meta/recipes-sato/puzzles/puzzles_r9306.bb | |||
@@ -3,7 +3,7 @@ HOMEPAGE="http://www.chiark.greenend.org.uk/~sgtatham/puzzles/" | |||
3 | 3 | ||
4 | DEPENDS = "gtk+ libxt" | 4 | DEPENDS = "gtk+ libxt" |
5 | PR = "r0" | 5 | PR = "r0" |
6 | MOD_PV = "${@bb.data.getVar('PV',d,1)[1:]}" | 6 | MOD_PV = "${@d.getVar('PV',1)[1:]}" |
7 | 7 | ||
8 | LICENSE = "MIT" | 8 | LICENSE = "MIT" |
9 | LIC_FILES_CHKSUM = "file://LICENCE;md5=9928b60f3b78be315b7ab699c1b03ff5" | 9 | LIC_FILES_CHKSUM = "file://LICENCE;md5=9928b60f3b78be315b7ab699c1b03ff5" |
diff --git a/meta/recipes-support/attr/ea-acl.inc b/meta/recipes-support/attr/ea-acl.inc index ce98a95cdb..1fda792b77 100644 --- a/meta/recipes-support/attr/ea-acl.inc +++ b/meta/recipes-support/attr/ea-acl.inc | |||
@@ -34,8 +34,8 @@ FILES_lib${BPN}-doc = "${mandir}/man2 \ | |||
34 | 34 | ||
35 | BBCLASSEXTEND = "native" | 35 | BBCLASSEXTEND = "native" |
36 | # Only append ldflags for target recipe and if USE_NLS is enabled | 36 | # Only append ldflags for target recipe and if USE_NLS is enabled |
37 | LDFLAGS_append_libc-uclibc = "${@['', ' -lintl '][(bb.data.getVar('PN', d, True) == bb.data.getVar('BPN', d , True)) and (bb.data.getVar('USE_NLS', d, True) == 'yes')]}" | 37 | LDFLAGS_append_libc-uclibc = "${@['', ' -lintl '][(d.getVar('PN', True) == d.getVar('BPN', True)) and (d.getVar('USE_NLS', True) == 'yes')]}" |
38 | EXTRA_OECONF_append_libc-uclibc = "${@['', ' --disable-gettext '][(bb.data.getVar('PN', d, True) == bb.data.getVar('BPN', d , True)) and (bb.data.getVar('USE_NLS', d, True) == 'no')]}" | 38 | EXTRA_OECONF_append_libc-uclibc = "${@['', ' --disable-gettext '][(d.getVar('PN', True) == d.getVar('BPN', True)) and (d.getVar('USE_NLS', True) == 'no')]}" |
39 | 39 | ||
40 | fix_symlink () { | 40 | fix_symlink () { |
41 | if test "${libdir}" = "${base_libdir}" ; then | 41 | if test "${libdir}" = "${base_libdir}" ; then |
diff --git a/meta/recipes-support/boost/boost-36.inc b/meta/recipes-support/boost/boost-36.inc index bb267d791c..8b0622f6ba 100644 --- a/meta/recipes-support/boost/boost-36.inc +++ b/meta/recipes-support/boost/boost-36.inc | |||
@@ -11,8 +11,8 @@ LICENSE = "Boost" | |||
11 | PR = "r4" | 11 | PR = "r4" |
12 | 12 | ||
13 | ARM_INSTRUCTION_SET = "arm" | 13 | ARM_INSTRUCTION_SET = "arm" |
14 | BOOST_VER = "${@"_".join(bb.data.getVar("PV",d,1).split("."))}" | 14 | BOOST_VER = "${@"_".join(d.getVar("PV",1).split("."))}" |
15 | BOOST_MAJ = "${@"_".join(bb.data.getVar("PV",d,1).split(".")[0:2])}" | 15 | BOOST_MAJ = "${@"_".join(d.getVar("PV",1).split(".")[0:2])}" |
16 | BOOST_P = "boost_${BOOST_VER}" | 16 | BOOST_P = "boost_${BOOST_VER}" |
17 | 17 | ||
18 | BOOST_LIBS = "\ | 18 | BOOST_LIBS = "\ |
@@ -46,12 +46,12 @@ python __anonymous () { | |||
46 | 46 | ||
47 | packages = [] | 47 | packages = [] |
48 | extras = [] | 48 | extras = [] |
49 | for lib in bb.data.getVar('BOOST_LIBS', d, 1).split( ): | 49 | for lib in d.getVar('BOOST_LIBS', 1).split( ): |
50 | pkg = "boost-%s" % lib.replace("_", "-") | 50 | pkg = "boost-%s" % lib.replace("_", "-") |
51 | extras.append("--with-%s" % lib) | 51 | extras.append("--with-%s" % lib) |
52 | packages.append(pkg) | 52 | packages.append(pkg) |
53 | if not bb.data.getVar("FILES_%s" % pkg, d, 1): | 53 | if not d.getVar("FILES_%s" % pkg, 1): |
54 | bb.data.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib, d) | 54 | d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib) |
55 | bb.data.setVar("BOOST_PACKAGES", " ".join(packages), d) | 55 | bb.data.setVar("BOOST_PACKAGES", " ".join(packages), d) |
56 | bb.data.setVar("BJAM_EXTRA", " ".join(extras), d) | 56 | bb.data.setVar("BJAM_EXTRA", " ".join(extras), d) |
57 | } | 57 | } |
diff --git a/scripts/jhbuild/jhbuild2oe.py b/scripts/jhbuild/jhbuild2oe.py index ef292763de..9b31cafb69 100755 --- a/scripts/jhbuild/jhbuild2oe.py +++ b/scripts/jhbuild/jhbuild2oe.py | |||
@@ -161,9 +161,9 @@ class Handlers(object): | |||
161 | # create the package | 161 | # create the package |
162 | d = bb.data.init() | 162 | d = bb.data.init() |
163 | pn = self.packagename(element.attrib.get('id')) | 163 | pn = self.packagename(element.attrib.get('id')) |
164 | bb.data.setVar('PN', pn, d) | 164 | d.setVar('PN', pn) |
165 | bb.data.setVar('DEPENDS', ' '.join(deps), d) | 165 | bb.data.setVar('DEPENDS', ' '.join(deps), d) |
166 | bb.data.setVar('_handler', 'metamodule', d) | 166 | d.setVar('_handler', 'metamodule') |
167 | self.packages.append(d) | 167 | self.packages.append(d) |
168 | 168 | ||
169 | def autotools(self, element, parent): | 169 | def autotools(self, element, parent): |
@@ -181,23 +181,23 @@ class Handlers(object): | |||
181 | if id is None: | 181 | if id is None: |
182 | raise Exception('Error: autotools element has no id attribute.') | 182 | raise Exception('Error: autotools element has no id attribute.') |
183 | pn = self.packagename(id) | 183 | pn = self.packagename(id) |
184 | bb.data.setVar('PN', pn, d) | 184 | d.setVar('PN', pn) |
185 | if deps is not None: | 185 | if deps is not None: |
186 | bb.data.setVar('DEPENDS', ' '.join(deps), d) | 186 | bb.data.setVar('DEPENDS', ' '.join(deps), d) |
187 | 187 | ||
188 | if branch is not None: | 188 | if branch is not None: |
189 | # <branch repo="git.freedesktop.org" module="xorg/xserver"/> | 189 | # <branch repo="git.freedesktop.org" module="xorg/xserver"/> |
190 | repo = os.path.join(self.repositories[branch.attrib.get('repo')], branch.attrib.get('module')) | 190 | repo = os.path.join(self.repositories[branch.attrib.get('repo')], branch.attrib.get('module')) |
191 | bb.data.setVar('SRC_URI', repo, d) | 191 | d.setVar('SRC_URI', repo) |
192 | 192 | ||
193 | checkoutdir = branch.attrib.get('checkoutdir') | 193 | checkoutdir = branch.attrib.get('checkoutdir') |
194 | if checkoutdir is not None: | 194 | if checkoutdir is not None: |
195 | bb.data.setVar('S', os.path.join('${WORKDIR}', checkoutdir), d) | 195 | bb.data.setVar('S', os.path.join('${WORKDIR}', checkoutdir), d) |
196 | 196 | ||
197 | # build class | 197 | # build class |
198 | bb.data.setVar('INHERITS', 'autotools', d) | 198 | d.setVar('INHERITS', 'autotools') |
199 | bb.data.setVarFlag('INHERITS', 'operator', '+=', d) | 199 | d.setVarFlag('INHERITS', 'operator', '+=') |
200 | bb.data.setVar('_handler', 'autotools', d) | 200 | d.setVar('_handler', 'autotools') |
201 | self.packages.append(d) | 201 | self.packages.append(d) |
202 | 202 | ||
203 | class Emitter(object): | 203 | class Emitter(object): |
@@ -209,7 +209,7 @@ class Emitter(object): | |||
209 | def __init__(self, filefunc = None, basedir = None): | 209 | def __init__(self, filefunc = None, basedir = None): |
210 | def _defaultfilefunc(package): | 210 | def _defaultfilefunc(package): |
211 | # return a relative path to the bitbake .bb which will be written | 211 | # return a relative path to the bitbake .bb which will be written |
212 | return bb.data.getVar('PN', package, 1) + '.bb' | 212 | return package.getVar('PN', 1) + '.bb' |
213 | 213 | ||
214 | self.filefunc = filefunc or _defaultfilefunc | 214 | self.filefunc = filefunc or _defaultfilefunc |
215 | self.basedir = basedir or os.path.abspath(os.curdir) | 215 | self.basedir = basedir or os.path.abspath(os.curdir) |
@@ -226,16 +226,16 @@ class Emitter(object): | |||
226 | f.close() | 226 | f.close() |
227 | 227 | ||
228 | for key in bb.data.keys(package): | 228 | for key in bb.data.keys(package): |
229 | fdata = fdata.replace('@@'+key+'@@', bb.data.getVar(key, package)) | 229 | fdata = fdata.replace('@@'+key+'@@', package.getVar(key)) |
230 | else: | 230 | else: |
231 | for key in bb.data.keys(package): | 231 | for key in bb.data.keys(package): |
232 | if key == '_handler': | 232 | if key == '_handler': |
233 | continue | 233 | continue |
234 | elif key == 'INHERITS': | 234 | elif key == 'INHERITS': |
235 | fdata += 'inherit %s\n' % bb.data.getVar('INHERITS', package) | 235 | fdata += 'inherit %s\n' % package.getVar('INHERITS') |
236 | else: | 236 | else: |
237 | oper = bb.data.getVarFlag(key, 'operator', package) or '=' | 237 | oper = package.getVarFlag(key, 'operator') or '=' |
238 | fdata += '%s %s "%s"\n' % (key, oper, bb.data.getVar(key, package)) | 238 | fdata += '%s %s "%s"\n' % (key, oper, package.getVar(key)) |
239 | 239 | ||
240 | if not os.path.exists(os.path.join(self.basedir, os.path.dirname(self.filefunc(package)))): | 240 | if not os.path.exists(os.path.join(self.basedir, os.path.dirname(self.filefunc(package)))): |
241 | os.makedirs(os.path.join(self.basedir, os.path.dirname(self.filefunc(package)))) | 241 | os.makedirs(os.path.join(self.basedir, os.path.dirname(self.filefunc(package)))) |
@@ -254,8 +254,8 @@ def _test(): | |||
254 | 254 | ||
255 | def filefunc(package): | 255 | def filefunc(package): |
256 | # return a relative path to the bitbake .bb which will be written | 256 | # return a relative path to the bitbake .bb which will be written |
257 | src_uri = bb.data.getVar('SRC_URI', package, 1) | 257 | src_uri = package.getVar('SRC_URI', 1) |
258 | filename = bb.data.getVar('PN', package, 1) + '.bb' | 258 | filename = package.getVar('PN', 1) + '.bb' |
259 | if not src_uri: | 259 | if not src_uri: |
260 | return filename | 260 | return filename |
261 | else: | 261 | else: |