summaryrefslogtreecommitdiffstats
path: root/meta
diff options
context:
space:
mode:
Diffstat (limited to 'meta')
-rw-r--r--meta/classes/autotools.bbclass3
-rw-r--r--meta/classes/binconfig.bbclass1
-rw-r--r--meta/classes/cpan-base.bbclass4
-rw-r--r--meta/classes/cpan_build.bbclass1
-rw-r--r--meta/classes/distutils-base.bbclass1
-rw-r--r--meta/classes/gettext.bbclass1
-rw-r--r--meta/classes/icecc.bbclass9
-rw-r--r--meta/classes/image.bbclass4
-rw-r--r--meta/classes/insane.bbclass12
-rw-r--r--meta/classes/kernel-arch.bbclass2
-rw-r--r--meta/classes/kernel.bbclass11
-rw-r--r--meta/classes/linux-kernel-base.bbclass3
-rw-r--r--meta/classes/openmoko-base.bbclass1
-rw-r--r--meta/classes/openmoko2.bbclass2
-rw-r--r--meta/classes/package.bbclass18
-rw-r--r--meta/classes/package_deb.bbclass11
-rw-r--r--meta/classes/package_ipk.bbclass11
-rw-r--r--meta/classes/package_rpm.bbclass8
-rw-r--r--meta/classes/package_tar.bbclass6
-rw-r--r--meta/classes/packaged-staging.bbclass13
-rw-r--r--meta/classes/packagedata.bbclass9
-rw-r--r--meta/classes/packagehistory.bbclass5
-rw-r--r--meta/classes/patch.bbclass11
-rw-r--r--meta/classes/rootfs_ipk.bbclass2
-rw-r--r--meta/classes/rootfs_rpm.bbclass1
-rw-r--r--meta/classes/sanity.bbclass13
-rw-r--r--meta/classes/siteinfo.bbclass4
-rw-r--r--meta/classes/sourcepkg.bbclass4
-rw-r--r--meta/classes/tinderclient.bbclass8
-rw-r--r--meta/classes/update-alternatives.bbclass1
-rw-r--r--meta/classes/update-rc.d.bbclass1
31 files changed, 34 insertions, 147 deletions
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass
index 4946222f78..57a8b0ed18 100644
--- a/meta/classes/autotools.bbclass
+++ b/meta/classes/autotools.bbclass
@@ -2,8 +2,6 @@
2AUTOTOOLS_NATIVE_STAGE_INSTALL = "1" 2AUTOTOOLS_NATIVE_STAGE_INSTALL = "1"
3 3
4def autotools_dep_prepend(d): 4def autotools_dep_prepend(d):
5 import bb;
6
7 if bb.data.getVar('INHIBIT_AUTOTOOLS_DEPS', d, 1): 5 if bb.data.getVar('INHIBIT_AUTOTOOLS_DEPS', d, 1):
8 return '' 6 return ''
9 7
@@ -29,7 +27,6 @@ acpaths = "default"
29EXTRA_AUTORECONF = "--exclude=autopoint" 27EXTRA_AUTORECONF = "--exclude=autopoint"
30 28
31def autotools_set_crosscompiling(d): 29def autotools_set_crosscompiling(d):
32 import bb
33 if not bb.data.inherits_class('native', d): 30 if not bb.data.inherits_class('native', d):
34 return " cross_compiling=yes" 31 return " cross_compiling=yes"
35 return "" 32 return ""
diff --git a/meta/classes/binconfig.bbclass b/meta/classes/binconfig.bbclass
index 31e5cc5172..73ca4d6219 100644
--- a/meta/classes/binconfig.bbclass
+++ b/meta/classes/binconfig.bbclass
@@ -1,6 +1,5 @@
1# The namespaces can clash here hence the two step replace 1# The namespaces can clash here hence the two step replace
2def get_binconfig_mangle(d): 2def get_binconfig_mangle(d):
3 import bb.data
4 s = "-e ''" 3 s = "-e ''"
5 if not bb.data.inherits_class('native', d): 4 if not bb.data.inherits_class('native', d):
6 optional_quote = r"\(\"\?\)" 5 optional_quote = r"\(\"\?\)"
diff --git a/meta/classes/cpan-base.bbclass b/meta/classes/cpan-base.bbclass
index cc0d11e515..919a9b06a5 100644
--- a/meta/classes/cpan-base.bbclass
+++ b/meta/classes/cpan-base.bbclass
@@ -9,7 +9,7 @@ RDEPENDS += "${@["perl", ""][(bb.data.inherits_class('native', d))]}"
9 9
10# Determine the staged version of perl from the perl configuration file 10# Determine the staged version of perl from the perl configuration file
11def get_perl_version(d): 11def get_perl_version(d):
12 import os, bb, re 12 import re
13 cfg = bb.data.expand('${STAGING_DIR}/${HOST_SYS}/perl/config.sh', d) 13 cfg = bb.data.expand('${STAGING_DIR}/${HOST_SYS}/perl/config.sh', d)
14 try: 14 try:
15 f = open(cfg, 'r') 15 f = open(cfg, 'r')
@@ -33,7 +33,6 @@ def is_new_perl(d):
33 33
34# Determine where the library directories are 34# Determine where the library directories are
35def perl_get_libdirs(d): 35def perl_get_libdirs(d):
36 import bb
37 libdir = bb.data.getVar('libdir', d, 1) 36 libdir = bb.data.getVar('libdir', d, 1)
38 if is_new_perl(d) == "yes": 37 if is_new_perl(d) == "yes":
39 libdirs = libdir + '/perl5' 38 libdirs = libdir + '/perl5'
@@ -42,7 +41,6 @@ def perl_get_libdirs(d):
42 return libdirs 41 return libdirs
43 42
44def is_target(d): 43def is_target(d):
45 import bb
46 if not bb.data.inherits_class('native', d): 44 if not bb.data.inherits_class('native', d):
47 return "yes" 45 return "yes"
48 return "no" 46 return "no"
diff --git a/meta/classes/cpan_build.bbclass b/meta/classes/cpan_build.bbclass
index 4aff7c8047..9cfe72bcb7 100644
--- a/meta/classes/cpan_build.bbclass
+++ b/meta/classes/cpan_build.bbclass
@@ -10,7 +10,6 @@ inherit cpan-base
10# libmodule-build-perl) 10# libmodule-build-perl)
11# 11#
12def cpan_build_dep_prepend(d): 12def cpan_build_dep_prepend(d):
13 import bb;
14 if bb.data.getVar('CPAN_BUILD_DEPS', d, 1): 13 if bb.data.getVar('CPAN_BUILD_DEPS', d, 1):
15 return '' 14 return ''
16 pn = bb.data.getVar('PN', d, 1) 15 pn = bb.data.getVar('PN', d, 1)
diff --git a/meta/classes/distutils-base.bbclass b/meta/classes/distutils-base.bbclass
index 5150be76b9..5d6d9981e0 100644
--- a/meta/classes/distutils-base.bbclass
+++ b/meta/classes/distutils-base.bbclass
@@ -3,7 +3,6 @@ DEPENDS += "${@["python-native python", ""][(bb.data.getVar('PACKAGES', d, 1) =
3RDEPENDS += "python-core" 3RDEPENDS += "python-core"
4 4
5def python_dir(d): 5def python_dir(d):
6 import os, bb
7 staging_incdir = bb.data.getVar( "STAGING_INCDIR", d, 1 ) 6 staging_incdir = bb.data.getVar( "STAGING_INCDIR", d, 1 )
8 if os.path.exists( "%s/python2.5" % staging_incdir ): return "python2.5" 7 if os.path.exists( "%s/python2.5" % staging_incdir ): return "python2.5"
9 if os.path.exists( "%s/python2.4" % staging_incdir ): return "python2.4" 8 if os.path.exists( "%s/python2.4" % staging_incdir ): return "python2.4"
diff --git a/meta/classes/gettext.bbclass b/meta/classes/gettext.bbclass
index a1e00e72c1..0b69fa9392 100644
--- a/meta/classes/gettext.bbclass
+++ b/meta/classes/gettext.bbclass
@@ -1,5 +1,4 @@
1def gettext_after_parse(d): 1def gettext_after_parse(d):
2 import bb
3 # Remove the NLS bits if USE_NLS is no. 2 # Remove the NLS bits if USE_NLS is no.
4 if bb.data.getVar('USE_NLS', d, 1) == 'no': 3 if bb.data.getVar('USE_NLS', d, 1) == 'no':
5 cfg = oe_filter_out('^--(dis|en)able-nls$', bb.data.getVar('EXTRA_OECONF', d, 1) or "", d) 4 cfg = oe_filter_out('^--(dis|en)able-nls$', bb.data.getVar('EXTRA_OECONF', d, 1) or "", d)
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass
index 56cbd6444f..724074231d 100644
--- a/meta/classes/icecc.bbclass
+++ b/meta/classes/icecc.bbclass
@@ -33,7 +33,6 @@ def icc_determine_gcc_version(gcc):
33 33
34 'i686-apple-darwin8-gcc-4.0.1 (GCC) 4.0.1 (Apple Computer, Inc. build 5363)' 34 'i686-apple-darwin8-gcc-4.0.1 (GCC) 4.0.1 (Apple Computer, Inc. build 5363)'
35 """ 35 """
36 import os
37 return os.popen("%s --version" % gcc ).readline().split()[2] 36 return os.popen("%s --version" % gcc ).readline().split()[2]
38 37
39def create_cross_env(bb,d): 38def create_cross_env(bb,d):
@@ -47,7 +46,7 @@ def create_cross_env(bb,d):
47 if len(prefix) == 0: 46 if len(prefix) == 0:
48 return "" 47 return ""
49 48
50 import tarfile, socket, time, os 49 import tarfile, socket, time
51 ice_dir = bb.data.expand('${CROSS_DIR}', d) 50 ice_dir = bb.data.expand('${CROSS_DIR}', d)
52 prefix = bb.data.expand('${HOST_PREFIX}' , d) 51 prefix = bb.data.expand('${HOST_PREFIX}' , d)
53 distro = bb.data.expand('${DISTRO}', d) 52 distro = bb.data.expand('${DISTRO}', d)
@@ -96,7 +95,7 @@ def create_cross_env(bb,d):
96 95
97def create_native_env(bb,d): 96def create_native_env(bb,d):
98 97
99 import tarfile, socket, time, os 98 import tarfile, socket, time
100 ice_dir = bb.data.expand('${CROSS_DIR}', d) 99 ice_dir = bb.data.expand('${CROSS_DIR}', d)
101 prefix = bb.data.expand('${HOST_PREFIX}' , d) 100 prefix = bb.data.expand('${HOST_PREFIX}' , d)
102 distro = bb.data.expand('${DISTRO}', d) 101 distro = bb.data.expand('${DISTRO}', d)
@@ -137,7 +136,7 @@ def create_native_env(bb,d):
137 136
138def create_cross_kernel_env(bb,d): 137def create_cross_kernel_env(bb,d):
139 138
140 import tarfile, socket, time, os 139 import tarfile, socket, time
141 ice_dir = bb.data.expand('${CROSS_DIR}', d) 140 ice_dir = bb.data.expand('${CROSS_DIR}', d)
142 prefix = bb.data.expand('${HOST_PREFIX}' , d) 141 prefix = bb.data.expand('${HOST_PREFIX}' , d)
143 distro = bb.data.expand('${DISTRO}', d) 142 distro = bb.data.expand('${DISTRO}', d)
@@ -204,8 +203,6 @@ def create_path(compilers, type, bb, d):
204 """ 203 """
205 Create Symlinks for the icecc in the staging directory 204 Create Symlinks for the icecc in the staging directory
206 """ 205 """
207 import os
208
209 staging = os.path.join(bb.data.expand('${STAGING_DIR}', d), "ice", type) 206 staging = os.path.join(bb.data.expand('${STAGING_DIR}', d), "ice", type)
210 207
211 #check if the icecc path is set by the user 208 #check if the icecc path is set by the user
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass
index 6b0a14d9ac..c5a2dd2c50 100644
--- a/meta/classes/image.bbclass
+++ b/meta/classes/image.bbclass
@@ -26,8 +26,6 @@ PACKAGE_ARCH = "${MACHINE_ARCH}"
26do_rootfs[depends] += "makedevs-native:do_populate_staging fakeroot-native:do_populate_staging ldconfig-native:do_populate_staging" 26do_rootfs[depends] += "makedevs-native:do_populate_staging fakeroot-native:do_populate_staging ldconfig-native:do_populate_staging"
27 27
28python () { 28python () {
29 import bb
30
31 deps = bb.data.getVarFlag('do_rootfs', 'depends', d) or "" 29 deps = bb.data.getVarFlag('do_rootfs', 'depends', d) or ""
32 for type in (bb.data.getVar('IMAGE_FSTYPES', d, True) or "").split(): 30 for type in (bb.data.getVar('IMAGE_FSTYPES', d, True) or "").split():
33 for dep in ((bb.data.getVar('IMAGE_DEPENDS_%s' % type, d) or "").split() or []): 31 for dep in ((bb.data.getVar('IMAGE_DEPENDS_%s' % type, d) or "").split() or []):
@@ -49,7 +47,6 @@ python () {
49# is searched for in the BBPATH (same as the old version.) 47# is searched for in the BBPATH (same as the old version.)
50# 48#
51def get_devtable_list(d): 49def get_devtable_list(d):
52 import bb
53 devtable = bb.data.getVar('IMAGE_DEVICE_TABLE', d, 1) 50 devtable = bb.data.getVar('IMAGE_DEVICE_TABLE', d, 1)
54 if devtable != None: 51 if devtable != None:
55 return devtable 52 return devtable
@@ -62,7 +59,6 @@ def get_devtable_list(d):
62 return str 59 return str
63 60
64def get_imagecmds(d): 61def get_imagecmds(d):
65 import bb
66 cmds = "\n" 62 cmds = "\n"
67 old_overrides = bb.data.getVar('OVERRIDES', d, 0) 63 old_overrides = bb.data.getVar('OVERRIDES', d, 0)
68 for type in bb.data.getVar('IMAGE_FSTYPES', d, True).split(): 64 for type in bb.data.getVar('IMAGE_FSTYPES', d, True).split():
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass
index 56d6a0b887..4b993b55d4 100644
--- a/meta/classes/insane.bbclass
+++ b/meta/classes/insane.bbclass
@@ -169,7 +169,6 @@ def package_qa_get_elf(path, bits32):
169 169
170def package_qa_clean_path(path,d): 170def package_qa_clean_path(path,d):
171 """ Remove the common prefix from the path. In this case it is the TMPDIR""" 171 """ Remove the common prefix from the path. In this case it is the TMPDIR"""
172 import bb
173 return path.replace(bb.data.getVar('TMPDIR',d,True),"") 172 return path.replace(bb.data.getVar('TMPDIR',d,True),"")
174 173
175def package_qa_make_fatal_error(error_class, name, path,d): 174def package_qa_make_fatal_error(error_class, name, path,d):
@@ -184,7 +183,6 @@ def package_qa_write_error(error_class, name, path, d):
184 """ 183 """
185 Log the error 184 Log the error
186 """ 185 """
187 import bb, os
188 186
189 ERROR_NAMES =[ 187 ERROR_NAMES =[
190 "non dev contains .so", 188 "non dev contains .so",
@@ -214,7 +212,6 @@ def package_qa_write_error(error_class, name, path, d):
214 f.close() 212 f.close()
215 213
216def package_qa_handle_error(error_class, error_msg, name, path, d): 214def package_qa_handle_error(error_class, error_msg, name, path, d):
217 import bb
218 fatal = package_qa_make_fatal_error(error_class, name, path, d) 215 fatal = package_qa_make_fatal_error(error_class, name, path, d)
219 if fatal: 216 if fatal:
220 bb.error("QA Issue: %s" % error_msg) 217 bb.error("QA Issue: %s" % error_msg)
@@ -229,7 +226,6 @@ def package_qa_check_rpath(file,name,d):
229 """ 226 """
230 Check for dangerous RPATHs 227 Check for dangerous RPATHs
231 """ 228 """
232 import bb, os
233 sane = True 229 sane = True
234 scanelf = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'scanelf') 230 scanelf = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'scanelf')
235 bad_dir = bb.data.getVar('TMPDIR', d, True) + "/work" 231 bad_dir = bb.data.getVar('TMPDIR', d, True) + "/work"
@@ -255,7 +251,6 @@ def package_qa_check_devdbg(path, name,d):
255 non dev packages containing 251 non dev packages containing
256 """ 252 """
257 253
258 import bb, os
259 sane = True 254 sane = True
260 255
261 if not "-dev" in name: 256 if not "-dev" in name:
@@ -283,7 +278,6 @@ def package_qa_check_arch(path,name,d):
283 """ 278 """
284 Check if archs are compatible 279 Check if archs are compatible
285 """ 280 """
286 import bb, os
287 sane = True 281 sane = True
288 target_os = bb.data.getVar('TARGET_OS', d, True) 282 target_os = bb.data.getVar('TARGET_OS', d, True)
289 target_arch = bb.data.getVar('TARGET_ARCH', d, True) 283 target_arch = bb.data.getVar('TARGET_ARCH', d, True)
@@ -322,7 +316,6 @@ def package_qa_check_desktop(path, name, d):
322 """ 316 """
323 Run all desktop files through desktop-file-validate. 317 Run all desktop files through desktop-file-validate.
324 """ 318 """
325 import bb, os
326 sane = True 319 sane = True
327 if path.endswith(".desktop"): 320 if path.endswith(".desktop"):
328 desktop_file_validate = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'desktop-file-validate') 321 desktop_file_validate = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'desktop-file-validate')
@@ -337,7 +330,6 @@ def package_qa_check_buildpaths(path, name, d):
337 """ 330 """
338 Check for build paths inside target files and error if not found in the whitelist 331 Check for build paths inside target files and error if not found in the whitelist
339 """ 332 """
340 import bb, os
341 sane = True 333 sane = True
342 334
343 # Ignore .debug files, not interesting 335 # Ignore .debug files, not interesting
@@ -364,7 +356,6 @@ def package_qa_check_staged(path,d):
364 to find the one responsible for the errors easily even 356 to find the one responsible for the errors easily even
365 if we look at every .pc and .la file 357 if we look at every .pc and .la file
366 """ 358 """
367 import os, bb
368 359
369 sane = True 360 sane = True
370 tmpdir = bb.data.getVar('TMPDIR', d, True) 361 tmpdir = bb.data.getVar('TMPDIR', d, True)
@@ -402,7 +393,6 @@ def package_qa_check_staged(path,d):
402 393
403# Walk over all files in a directory and call func 394# Walk over all files in a directory and call func
404def package_qa_walk(path, funcs, package,d): 395def package_qa_walk(path, funcs, package,d):
405 import os
406 sane = True 396 sane = True
407 397
408 for root, dirs, files in os.walk(path): 398 for root, dirs, files in os.walk(path):
@@ -415,7 +405,6 @@ def package_qa_walk(path, funcs, package,d):
415 return sane 405 return sane
416 406
417def package_qa_check_rdepends(pkg, workdir, d): 407def package_qa_check_rdepends(pkg, workdir, d):
418 import bb
419 sane = True 408 sane = True
420 if not "-dbg" in pkg and not "task-" in pkg and not "-image" in pkg: 409 if not "-dbg" in pkg and not "task-" in pkg and not "-image" in pkg:
421 # Copied from package_ipk.bbclass 410 # Copied from package_ipk.bbclass
@@ -496,7 +485,6 @@ python do_qa_staging() {
496addtask qa_configure after do_configure before do_compile 485addtask qa_configure after do_configure before do_compile
497python do_qa_configure() { 486python do_qa_configure() {
498 bb.note("Checking sanity of the config.log file") 487 bb.note("Checking sanity of the config.log file")
499 import os
500 for root, dirs, files in os.walk(bb.data.getVar('WORKDIR', d, True)): 488 for root, dirs, files in os.walk(bb.data.getVar('WORKDIR', d, True)):
501 statement = "grep 'CROSS COMPILE Badness:' %s > /dev/null" % \ 489 statement = "grep 'CROSS COMPILE Badness:' %s > /dev/null" % \
502 os.path.join(root,"config.log") 490 os.path.join(root,"config.log")
diff --git a/meta/classes/kernel-arch.bbclass b/meta/classes/kernel-arch.bbclass
index 2ce0f9727d..35c26b89ff 100644
--- a/meta/classes/kernel-arch.bbclass
+++ b/meta/classes/kernel-arch.bbclass
@@ -15,7 +15,7 @@ valid_archs = "alpha cris ia64 \
15 avr32 blackfin" 15 avr32 blackfin"
16 16
17def map_kernel_arch(a, d): 17def map_kernel_arch(a, d):
18 import bb, re 18 import re
19 19
20 valid_archs = bb.data.getVar('valid_archs', d, 1).split() 20 valid_archs = bb.data.getVar('valid_archs', d, 1).split()
21 21
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass
index b2266bee5d..74ec7d56b1 100644
--- a/meta/classes/kernel.bbclass
+++ b/meta/classes/kernel.bbclass
@@ -9,9 +9,6 @@ INHIBIT_DEFAULT_DEPS = "1"
9KERNEL_IMAGETYPE ?= "zImage" 9KERNEL_IMAGETYPE ?= "zImage"
10 10
11python __anonymous () { 11python __anonymous () {
12
13 import bb
14
15 kerneltype = bb.data.getVar('KERNEL_IMAGETYPE', d, 1) or '' 12 kerneltype = bb.data.getVar('KERNEL_IMAGETYPE', d, 1) or ''
16 if kerneltype == 'uImage': 13 if kerneltype == 'uImage':
17 depends = bb.data.getVar("DEPENDS", d, 1) 14 depends = bb.data.getVar("DEPENDS", d, 1)
@@ -271,7 +268,7 @@ module_conf_rfcomm = "alias bt-proto-3 rfcomm"
271 268
272python populate_packages_prepend () { 269python populate_packages_prepend () {
273 def extract_modinfo(file): 270 def extract_modinfo(file):
274 import os, re 271 import re
275 tmpfile = os.tmpnam() 272 tmpfile = os.tmpnam()
276 cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (bb.data.getVar("PATH", d, 1), bb.data.getVar("HOST_PREFIX", d, 1) or "", file, tmpfile) 273 cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (bb.data.getVar("PATH", d, 1), bb.data.getVar("HOST_PREFIX", d, 1) or "", file, tmpfile)
277 os.system(cmd) 274 os.system(cmd)
@@ -289,7 +286,7 @@ python populate_packages_prepend () {
289 return vals 286 return vals
290 287
291 def parse_depmod(): 288 def parse_depmod():
292 import os, re 289 import re
293 290
294 dvar = bb.data.getVar('D', d, 1) 291 dvar = bb.data.getVar('D', d, 1)
295 if not dvar: 292 if not dvar:
@@ -343,7 +340,7 @@ python populate_packages_prepend () {
343 file = file.replace(bb.data.getVar('D', d, 1) or '', '', 1) 340 file = file.replace(bb.data.getVar('D', d, 1) or '', '', 1)
344 341
345 if module_deps.has_key(file): 342 if module_deps.has_key(file):
346 import os.path, re 343 import re
347 dependencies = [] 344 dependencies = []
348 for i in module_deps[file]: 345 for i in module_deps[file]:
349 m = re.match(pattern, os.path.basename(i)) 346 m = re.match(pattern, os.path.basename(i))
@@ -411,7 +408,7 @@ python populate_packages_prepend () {
411 postrm = bb.data.getVar('pkg_postrm_modules', d, 1) 408 postrm = bb.data.getVar('pkg_postrm_modules', d, 1)
412 do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % bb.data.getVar("KERNEL_VERSION", d, 1)) 409 do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % bb.data.getVar("KERNEL_VERSION", d, 1))
413 410
414 import re, os 411 import re
415 metapkg = "kernel-modules" 412 metapkg = "kernel-modules"
416 bb.data.setVar('ALLOW_EMPTY_' + metapkg, "1", d) 413 bb.data.setVar('ALLOW_EMPTY_' + metapkg, "1", d)
417 bb.data.setVar('FILES_' + metapkg, "", d) 414 bb.data.setVar('FILES_' + metapkg, "", d)
diff --git a/meta/classes/linux-kernel-base.bbclass b/meta/classes/linux-kernel-base.bbclass
index 4e2e2da373..b3e0fdad7a 100644
--- a/meta/classes/linux-kernel-base.bbclass
+++ b/meta/classes/linux-kernel-base.bbclass
@@ -1,6 +1,6 @@
1# parse kernel ABI version out of <linux/version.h> 1# parse kernel ABI version out of <linux/version.h>
2def get_kernelversion(p): 2def get_kernelversion(p):
3 import re, os 3 import re
4 4
5 fn = p + '/include/linux/utsrelease.h' 5 fn = p + '/include/linux/utsrelease.h'
6 if not os.path.isfile(fn): 6 if not os.path.isfile(fn):
@@ -30,7 +30,6 @@ def get_kernelmajorversion(p):
30 return None 30 return None
31 31
32def linux_module_packages(s, d): 32def linux_module_packages(s, d):
33 import bb, os.path
34 suffix = "" 33 suffix = ""
35 return " ".join(map(lambda s: "kernel-module-%s%s" % (s.lower().replace('_', '-').replace('@', '+'), suffix), s.split())) 34 return " ".join(map(lambda s: "kernel-module-%s%s" % (s.lower().replace('_', '-').replace('@', '+'), suffix), s.split()))
36 35
diff --git a/meta/classes/openmoko-base.bbclass b/meta/classes/openmoko-base.bbclass
index 8643daa7a4..d7be1c2932 100644
--- a/meta/classes/openmoko-base.bbclass
+++ b/meta/classes/openmoko-base.bbclass
@@ -4,7 +4,6 @@ OPENMOKO_RELEASE ?= "OM-2007"
4OPENMOKO_MIRROR ?= "svn://svn.openmoko.org/trunk" 4OPENMOKO_MIRROR ?= "svn://svn.openmoko.org/trunk"
5 5
6def openmoko_base_get_subdir(d): 6def openmoko_base_get_subdir(d):
7 import bb
8 openmoko, section = bb.data.getVar('SECTION', d, 1).split("/") 7 openmoko, section = bb.data.getVar('SECTION', d, 1).split("/")
9 if section == 'base' or section == 'libs': return "" 8 if section == 'base' or section == 'libs': return ""
10 elif section in 'apps tools pim'.split(): return "applications" 9 elif section in 'apps tools pim'.split(): return "applications"
diff --git a/meta/classes/openmoko2.bbclass b/meta/classes/openmoko2.bbclass
index ef734e4311..233c721ff7 100644
--- a/meta/classes/openmoko2.bbclass
+++ b/meta/classes/openmoko2.bbclass
@@ -5,12 +5,10 @@ OPENMOKO_RELEASE ?= "OM-2007.2"
5OPENMOKO_MIRROR ?= "svn://svn.openmoko.org/trunk" 5OPENMOKO_MIRROR ?= "svn://svn.openmoko.org/trunk"
6 6
7def openmoko_two_get_license(d): 7def openmoko_two_get_license(d):
8 import bb
9 openmoko, section = bb.data.getVar('SECTION', d, 1).split("/") 8 openmoko, section = bb.data.getVar('SECTION', d, 1).split("/")
10 return "LGPL GPL".split()[section != "libs"] 9 return "LGPL GPL".split()[section != "libs"]
11 10
12def openmoko_two_get_subdir(d): 11def openmoko_two_get_subdir(d):
13 import bb
14 openmoko, section = bb.data.getVar('SECTION', d, 1).split("/") 12 openmoko, section = bb.data.getVar('SECTION', d, 1).split("/")
15 if section == 'base': return "" 13 if section == 'base': return ""
16 elif section == 'libs': return "libraries" 14 elif section == 'libs': return "libraries"
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
index 41eeb8d3d1..6d384bebc1 100644
--- a/meta/classes/package.bbclass
+++ b/meta/classes/package.bbclass
@@ -29,7 +29,6 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
29 Used in .bb files to split up dynamically generated subpackages of a 29 Used in .bb files to split up dynamically generated subpackages of a
30 given package, usually plugins or modules. 30 given package, usually plugins or modules.
31 """ 31 """
32 import os, os.path, bb
33 32
34 dvar = bb.data.getVar('PKGD', d, True) 33 dvar = bb.data.getVar('PKGD', d, True)
35 34
@@ -117,7 +116,6 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
117PACKAGE_DEPENDS += "file-native" 116PACKAGE_DEPENDS += "file-native"
118 117
119python () { 118python () {
120 import bb
121 if bb.data.getVar('PACKAGES', d, True) != '': 119 if bb.data.getVar('PACKAGES', d, True) != '':
122 deps = bb.data.getVarFlag('do_package', 'depends', d) or "" 120 deps = bb.data.getVarFlag('do_package', 'depends', d) or ""
123 for dep in (bb.data.getVar('PACKAGE_DEPENDS', d, True) or "").split(): 121 for dep in (bb.data.getVar('PACKAGE_DEPENDS', d, True) or "").split():
@@ -135,7 +133,7 @@ def runstrip(file, d):
135 # A working 'file' (one which works on the target architecture) 133 # A working 'file' (one which works on the target architecture)
136 # is necessary for this stuff to work, hence the addition to do_package[depends] 134 # is necessary for this stuff to work, hence the addition to do_package[depends]
137 135
138 import bb, os, commands, stat 136 import commands, stat
139 137
140 pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True) 138 pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True)
141 139
@@ -192,8 +190,6 @@ def runstrip(file, d):
192# 190#
193 191
194def get_package_mapping (pkg, d): 192def get_package_mapping (pkg, d):
195 import bb, os
196
197 data = read_subpkgdata(pkg, d) 193 data = read_subpkgdata(pkg, d)
198 key = "PKG_%s" % pkg 194 key = "PKG_%s" % pkg
199 195
@@ -203,8 +199,6 @@ def get_package_mapping (pkg, d):
203 return pkg 199 return pkg
204 200
205def runtime_mapping_rename (varname, d): 201def runtime_mapping_rename (varname, d):
206 import bb, os
207
208 #bb.note("%s before: %s" % (varname, bb.data.getVar(varname, d, True))) 202 #bb.note("%s before: %s" % (varname, bb.data.getVar(varname, d, True)))
209 203
210 new_depends = [] 204 new_depends = []
@@ -226,8 +220,6 @@ def runtime_mapping_rename (varname, d):
226# 220#
227 221
228python package_do_split_locales() { 222python package_do_split_locales() {
229 import os
230
231 if (bb.data.getVar('PACKAGE_NO_LOCALE', d, True) == '1'): 223 if (bb.data.getVar('PACKAGE_NO_LOCALE', d, True) == '1'):
232 bb.debug(1, "package requested not splitting locales") 224 bb.debug(1, "package requested not splitting locales")
233 return 225 return
@@ -284,8 +276,6 @@ python package_do_split_locales() {
284} 276}
285 277
286python perform_packagecopy () { 278python perform_packagecopy () {
287 import os
288
289 dest = bb.data.getVar('D', d, True) 279 dest = bb.data.getVar('D', d, True)
290 dvar = bb.data.getVar('PKGD', d, True) 280 dvar = bb.data.getVar('PKGD', d, True)
291 281
@@ -297,7 +287,7 @@ python perform_packagecopy () {
297} 287}
298 288
299python populate_packages () { 289python populate_packages () {
300 import os, glob, stat, errno, re 290 import glob, stat, errno, re
301 291
302 workdir = bb.data.getVar('WORKDIR', d, True) 292 workdir = bb.data.getVar('WORKDIR', d, True)
303 outdir = bb.data.getVar('DEPLOY_DIR', d, True) 293 outdir = bb.data.getVar('DEPLOY_DIR', d, True)
@@ -530,7 +520,7 @@ fi
530SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs" 520SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs"
531 521
532python package_do_shlibs() { 522python package_do_shlibs() {
533 import os, re, os.path 523 import re
534 524
535 exclude_shlibs = bb.data.getVar('EXCLUDE_FROM_SHLIBS', d, 0) 525 exclude_shlibs = bb.data.getVar('EXCLUDE_FROM_SHLIBS', d, 0)
536 if exclude_shlibs: 526 if exclude_shlibs:
@@ -746,7 +736,7 @@ python package_do_shlibs() {
746} 736}
747 737
748python package_do_pkgconfig () { 738python package_do_pkgconfig () {
749 import re, os 739 import re
750 740
751 packages = bb.data.getVar('PACKAGES', d, True) 741 packages = bb.data.getVar('PACKAGES', d, True)
752 workdir = bb.data.getVar('WORKDIR', d, True) 742 workdir = bb.data.getVar('WORKDIR', d, True)
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass
index d90939fdb6..5c002465b8 100644
--- a/meta/classes/package_deb.bbclass
+++ b/meta/classes/package_deb.bbclass
@@ -15,13 +15,11 @@ DPKG_ARCH_i686 ?= "i386"
15DPKG_ARCH_pentium ?= "i386" 15DPKG_ARCH_pentium ?= "i386"
16 16
17python package_deb_fn () { 17python package_deb_fn () {
18 from bb import data
19 bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d) 18 bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d)
20} 19}
21 20
22addtask package_deb_install 21addtask package_deb_install
23python do_package_deb_install () { 22python do_package_deb_install () {
24 import os, sys
25 pkg = bb.data.getVar('PKG', d, 1) 23 pkg = bb.data.getVar('PKG', d, 1)
26 pkgfn = bb.data.getVar('PKGFN', d, 1) 24 pkgfn = bb.data.getVar('PKGFN', d, 1)
27 rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1) 25 rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1)
@@ -37,6 +35,7 @@ python do_package_deb_install () {
37 os.makedirs(rootfs) 35 os.makedirs(rootfs)
38 os.chdir(rootfs) 36 os.chdir(rootfs)
39 except OSError: 37 except OSError:
38 import sys
40 raise bb.build.FuncFailed(str(sys.exc_value)) 39 raise bb.build.FuncFailed(str(sys.exc_value))
41 40
42 # update packages file 41 # update packages file
@@ -67,14 +66,13 @@ python do_package_deb_install () {
67} 66}
68 67
69python do_package_deb () { 68python do_package_deb () {
70 import sys, re, copy 69 import re, copy
71 70
72 workdir = bb.data.getVar('WORKDIR', d, 1) 71 workdir = bb.data.getVar('WORKDIR', d, 1)
73 if not workdir: 72 if not workdir:
74 bb.error("WORKDIR not defined, unable to package") 73 bb.error("WORKDIR not defined, unable to package")
75 return 74 return
76 75
77 import os # path manipulations
78 outdir = bb.data.getVar('DEPLOY_DIR_DEB', d, 1) 76 outdir = bb.data.getVar('DEPLOY_DIR_DEB', d, 1)
79 if not outdir: 77 if not outdir:
80 bb.error("DEPLOY_DIR_DEB not defined, unable to package") 78 bb.error("DEPLOY_DIR_DEB not defined, unable to package")
@@ -135,8 +133,7 @@ python do_package_deb () {
135 except ValueError: 133 except ValueError:
136 pass 134 pass
137 if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": 135 if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1":
138 from bb import note 136 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1)))
139 note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1)))
140 bb.utils.unlockfile(lf) 137 bb.utils.unlockfile(lf)
141 continue 138 continue
142 139
@@ -186,6 +183,7 @@ python do_package_deb () {
186 for (c, fs) in fields: 183 for (c, fs) in fields:
187 ctrlfile.write(unicode(c % tuple(pullData(fs, localdata)))) 184 ctrlfile.write(unicode(c % tuple(pullData(fs, localdata))))
188 except KeyError: 185 except KeyError:
186 import sys
189 (type, value, traceback) = sys.exc_info() 187 (type, value, traceback) = sys.exc_info()
190 bb.utils.unlockfile(lf) 188 bb.utils.unlockfile(lf)
191 ctrlfile.close() 189 ctrlfile.close()
@@ -252,7 +250,6 @@ python do_package_deb () {
252} 250}
253 251
254python () { 252python () {
255 import bb
256 if bb.data.getVar('PACKAGES', d, True) != '': 253 if bb.data.getVar('PACKAGES', d, True) != '':
257 deps = (bb.data.getVarFlag('do_package_write_deb', 'depends', d) or "").split() 254 deps = (bb.data.getVarFlag('do_package_write_deb', 'depends', d) or "").split()
258 deps.append('dpkg-native:do_populate_staging') 255 deps.append('dpkg-native:do_populate_staging')
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass
index 68a8b5c40b..2621fa8e91 100644
--- a/meta/classes/package_ipk.bbclass
+++ b/meta/classes/package_ipk.bbclass
@@ -6,12 +6,10 @@ IPKGCONF_TARGET = "${WORKDIR}/opkg.conf"
6IPKGCONF_SDK = "${WORKDIR}/opkg-sdk.conf" 6IPKGCONF_SDK = "${WORKDIR}/opkg-sdk.conf"
7 7
8python package_ipk_fn () { 8python package_ipk_fn () {
9 from bb import data
10 bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d) 9 bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d)
11} 10}
12 11
13python package_ipk_install () { 12python package_ipk_install () {
14 import os, sys
15 pkg = bb.data.getVar('PKG', d, 1) 13 pkg = bb.data.getVar('PKG', d, 1)
16 pkgfn = bb.data.getVar('PKGFN', d, 1) 14 pkgfn = bb.data.getVar('PKGFN', d, 1)
17 rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1) 15 rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1)
@@ -25,6 +23,7 @@ python package_ipk_install () {
25 bb.mkdirhier(rootfs) 23 bb.mkdirhier(rootfs)
26 os.chdir(rootfs) 24 os.chdir(rootfs)
27 except OSError: 25 except OSError:
26 import sys
28 (type, value, traceback) = sys.exc_info() 27 (type, value, traceback) = sys.exc_info()
29 print value 28 print value
30 raise bb.build.FuncFailed 29 raise bb.build.FuncFailed
@@ -126,14 +125,13 @@ package_generate_archlist () {
126} 125}
127 126
128python do_package_ipk () { 127python do_package_ipk () {
129 import sys, re, copy 128 import re, copy
130 129
131 workdir = bb.data.getVar('WORKDIR', d, 1) 130 workdir = bb.data.getVar('WORKDIR', d, 1)
132 if not workdir: 131 if not workdir:
133 bb.error("WORKDIR not defined, unable to package") 132 bb.error("WORKDIR not defined, unable to package")
134 return 133 return
135 134
136 import os # path manipulations
137 outdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1) 135 outdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1)
138 if not outdir: 136 if not outdir:
139 bb.error("DEPLOY_DIR_IPK not defined, unable to package") 137 bb.error("DEPLOY_DIR_IPK not defined, unable to package")
@@ -192,8 +190,7 @@ python do_package_ipk () {
192 except ValueError: 190 except ValueError:
193 pass 191 pass
194 if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": 192 if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1":
195 from bb import note 193 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1)))
196 note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1)))
197 bb.utils.unlockfile(lf) 194 bb.utils.unlockfile(lf)
198 continue 195 continue
199 196
@@ -234,6 +231,7 @@ python do_package_ipk () {
234 raise KeyError(f) 231 raise KeyError(f)
235 ctrlfile.write(c % tuple(pullData(fs, localdata))) 232 ctrlfile.write(c % tuple(pullData(fs, localdata)))
236 except KeyError: 233 except KeyError:
234 import sys
237 (type, value, traceback) = sys.exc_info() 235 (type, value, traceback) = sys.exc_info()
238 ctrlfile.close() 236 ctrlfile.close()
239 bb.utils.unlockfile(lf) 237 bb.utils.unlockfile(lf)
@@ -302,7 +300,6 @@ python do_package_ipk () {
302} 300}
303 301
304python () { 302python () {
305 import bb
306 if bb.data.getVar('PACKAGES', d, True) != '': 303 if bb.data.getVar('PACKAGES', d, True) != '':
307 deps = (bb.data.getVarFlag('do_package_write_ipk', 'depends', d) or "").split() 304 deps = (bb.data.getVarFlag('do_package_write_ipk', 'depends', d) or "").split()
308 deps.append('opkg-utils-native:do_populate_staging') 305 deps.append('opkg-utils-native:do_populate_staging')
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass
index d291733dcf..49e84678b8 100644
--- a/meta/classes/package_rpm.bbclass
+++ b/meta/classes/package_rpm.bbclass
@@ -12,9 +12,6 @@ RPMOPTS="--rcfile=${WORKDIR}/rpmrc --target ${TARGET_SYS}"
12RPM="rpm ${RPMOPTS}" 12RPM="rpm ${RPMOPTS}"
13 13
14python write_specfile() { 14python write_specfile() {
15 from bb import data, build
16 import sys
17
18 version = bb.data.getVar('PV', d, 1) 15 version = bb.data.getVar('PV', d, 1)
19 version = version.replace('-', '+') 16 version = version.replace('-', '+')
20 bb.data.setVar('RPMPV', version, d) 17 bb.data.setVar('RPMPV', version, d)
@@ -55,8 +52,7 @@ python write_specfile() {
55 pass 52 pass
56 53
57 if not files and bb.data.getVar('ALLOW_EMPTY', d) != "1": 54 if not files and bb.data.getVar('ALLOW_EMPTY', d) != "1":
58 from bb import note 55 bb.note("Not creating empty archive for %s-%s-%s" % (bb.data.getVar('PKG',d, 1), bb.data.getVar('PV', d, 1), bb.data.getVar('PR', d, 1)))
59 note("Not creating empty archive for %s-%s-%s" % (bb.data.getVar('PKG',d, 1), bb.data.getVar('PV', d, 1), bb.data.getVar('PR', d, 1)))
60 return 56 return
61 57
62 # output .spec using this metadata store 58 # output .spec using this metadata store
@@ -159,7 +155,6 @@ python do_package_rpm () {
159 bb.error("WORKDIR not defined, unable to package") 155 bb.error("WORKDIR not defined, unable to package")
160 return 156 return
161 157
162 import os # path manipulations
163 outdir = bb.data.getVar('DEPLOY_DIR_RPM', d, 1) 158 outdir = bb.data.getVar('DEPLOY_DIR_RPM', d, 1)
164 if not outdir: 159 if not outdir:
165 bb.error("DEPLOY_DIR_RPM not defined, unable to package") 160 bb.error("DEPLOY_DIR_RPM not defined, unable to package")
@@ -213,7 +208,6 @@ python do_package_rpm () {
213} 208}
214 209
215python () { 210python () {
216 import bb
217 if bb.data.getVar('PACKAGES', d, True) != '': 211 if bb.data.getVar('PACKAGES', d, True) != '':
218 deps = (bb.data.getVarFlag('do_package_write_rpm', 'depends', d) or "").split() 212 deps = (bb.data.getVarFlag('do_package_write_rpm', 'depends', d) or "").split()
219 deps.append('rpm-native:do_populate_staging') 213 deps.append('rpm-native:do_populate_staging')
diff --git a/meta/classes/package_tar.bbclass b/meta/classes/package_tar.bbclass
index 876cec6cfe..24a77be93c 100644
--- a/meta/classes/package_tar.bbclass
+++ b/meta/classes/package_tar.bbclass
@@ -3,15 +3,12 @@ inherit package
3IMAGE_PKGTYPE ?= "tar" 3IMAGE_PKGTYPE ?= "tar"
4 4
5python package_tar_fn () { 5python package_tar_fn () {
6 import os
7 from bb import data
8 fn = os.path.join(bb.data.getVar('DEPLOY_DIR_TAR', d), "%s-%s-%s.tar.gz" % (bb.data.getVar('PKG', d), bb.data.getVar('PV', d), bb.data.getVar('PR', d))) 6 fn = os.path.join(bb.data.getVar('DEPLOY_DIR_TAR', d), "%s-%s-%s.tar.gz" % (bb.data.getVar('PKG', d), bb.data.getVar('PV', d), bb.data.getVar('PR', d)))
9 fn = bb.data.expand(fn, d) 7 fn = bb.data.expand(fn, d)
10 bb.data.setVar('PKGFN', fn, d) 8 bb.data.setVar('PKGFN', fn, d)
11} 9}
12 10
13python package_tar_install () { 11python package_tar_install () {
14 import os, sys
15 pkg = bb.data.getVar('PKG', d, 1) 12 pkg = bb.data.getVar('PKG', d, 1)
16 pkgfn = bb.data.getVar('PKGFN', d, 1) 13 pkgfn = bb.data.getVar('PKGFN', d, 1)
17 rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1) 14 rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1)
@@ -23,6 +20,7 @@ python package_tar_install () {
23 bb.mkdirhier(rootfs) 20 bb.mkdirhier(rootfs)
24 os.chdir(rootfs) 21 os.chdir(rootfs)
25 except OSError: 22 except OSError:
23 import sys
26 (type, value, traceback) = sys.exc_info() 24 (type, value, traceback) = sys.exc_info()
27 print value 25 print value
28 raise bb.build.FuncFailed 26 raise bb.build.FuncFailed
@@ -42,7 +40,6 @@ python do_package_tar () {
42 bb.error("WORKDIR not defined, unable to package") 40 bb.error("WORKDIR not defined, unable to package")
43 return 41 return
44 42
45 import os # path manipulations
46 outdir = bb.data.getVar('DEPLOY_DIR_TAR', d, 1) 43 outdir = bb.data.getVar('DEPLOY_DIR_TAR', d, 1)
47 if not outdir: 44 if not outdir:
48 bb.error("DEPLOY_DIR_TAR not defined, unable to package") 45 bb.error("DEPLOY_DIR_TAR not defined, unable to package")
@@ -94,7 +91,6 @@ python do_package_tar () {
94} 91}
95 92
96python () { 93python () {
97 import bb
98 if bb.data.getVar('PACKAGES', d, True) != '': 94 if bb.data.getVar('PACKAGES', d, True) != '':
99 deps = (bb.data.getVarFlag('do_package_write_tar', 'depends', d) or "").split() 95 deps = (bb.data.getVarFlag('do_package_write_tar', 'depends', d) or "").split()
100 deps.append('tar-native:do_populate_staging') 96 deps.append('tar-native:do_populate_staging')
diff --git a/meta/classes/packaged-staging.bbclass b/meta/classes/packaged-staging.bbclass
index b9d59bbd8a..82a4450bc3 100644
--- a/meta/classes/packaged-staging.bbclass
+++ b/meta/classes/packaged-staging.bbclass
@@ -27,7 +27,6 @@ PSTAGE_NATIVEDEPENDS = "\
27BB_STAMP_WHITELIST = "${PSTAGE_NATIVEDEPENDS}" 27BB_STAMP_WHITELIST = "${PSTAGE_NATIVEDEPENDS}"
28 28
29python () { 29python () {
30 import bb
31 pstage_allowed = True 30 pstage_allowed = True
32 31
33 # These classes encode staging paths into the binary data so can only be 32 # These classes encode staging paths into the binary data so can only be
@@ -81,8 +80,6 @@ PSTAGE_LIST_CMD = "${PSTAGE_PKGMANAGER} -f ${PSTAGE_MACHCONFIG} -o ${TMP
81PSTAGE_TMPDIR_STAGE = "${WORKDIR}/staging-pkg" 80PSTAGE_TMPDIR_STAGE = "${WORKDIR}/staging-pkg"
82 81
83def pstage_manualclean(srcname, destvarname, d): 82def pstage_manualclean(srcname, destvarname, d):
84 import os, bb
85
86 src = os.path.join(bb.data.getVar('PSTAGE_TMPDIR_STAGE', d, True), srcname) 83 src = os.path.join(bb.data.getVar('PSTAGE_TMPDIR_STAGE', d, True), srcname)
87 dest = bb.data.getVar(destvarname, d, True) 84 dest = bb.data.getVar(destvarname, d, True)
88 85
@@ -95,7 +92,6 @@ def pstage_manualclean(srcname, destvarname, d):
95 os.system("rm %s 2> /dev/null" % filepath) 92 os.system("rm %s 2> /dev/null" % filepath)
96 93
97def pstage_set_pkgmanager(d): 94def pstage_set_pkgmanager(d):
98 import bb
99 path = bb.data.getVar("PATH", d, 1) 95 path = bb.data.getVar("PATH", d, 1)
100 pkgmanager = bb.which(path, 'opkg-cl') 96 pkgmanager = bb.which(path, 'opkg-cl')
101 if pkgmanager == "": 97 if pkgmanager == "":
@@ -105,8 +101,6 @@ def pstage_set_pkgmanager(d):
105 101
106 102
107def pstage_cleanpackage(pkgname, d): 103def pstage_cleanpackage(pkgname, d):
108 import os, bb
109
110 path = bb.data.getVar("PATH", d, 1) 104 path = bb.data.getVar("PATH", d, 1)
111 pstage_set_pkgmanager(d) 105 pstage_set_pkgmanager(d)
112 list_cmd = bb.data.getVar("PSTAGE_LIST_CMD", d, True) 106 list_cmd = bb.data.getVar("PSTAGE_LIST_CMD", d, True)
@@ -168,8 +162,6 @@ PSTAGE_TASKS_COVERED = "fetch unpack munge patch configure qa_configure rig_loca
168SCENEFUNCS += "packagestage_scenefunc" 162SCENEFUNCS += "packagestage_scenefunc"
169 163
170python packagestage_scenefunc () { 164python packagestage_scenefunc () {
171 import os
172
173 if bb.data.getVar("PSTAGING_ACTIVE", d, 1) == "0": 165 if bb.data.getVar("PSTAGING_ACTIVE", d, 1) == "0":
174 return 166 return
175 167
@@ -249,10 +241,7 @@ packagestage_scenefunc[dirs] = "${STAGING_DIR}"
249 241
250addhandler packagedstage_stampfixing_eventhandler 242addhandler packagedstage_stampfixing_eventhandler
251python packagedstage_stampfixing_eventhandler() { 243python packagedstage_stampfixing_eventhandler() {
252 from bb.event import getName 244 if bb.event.getName(e) == "StampUpdate":
253 import os
254
255 if getName(e) == "StampUpdate":
256 taskscovered = bb.data.getVar("PSTAGE_TASKS_COVERED", e.data, 1).split() 245 taskscovered = bb.data.getVar("PSTAGE_TASKS_COVERED", e.data, 1).split()
257 for (fn, task) in e.targets: 246 for (fn, task) in e.targets:
258 # strip off 'do_' 247 # strip off 'do_'
diff --git a/meta/classes/packagedata.bbclass b/meta/classes/packagedata.bbclass
index c9d64d6da2..86f18a9e96 100644
--- a/meta/classes/packagedata.bbclass
+++ b/meta/classes/packagedata.bbclass
@@ -1,5 +1,4 @@
1def packaged(pkg, d): 1def packaged(pkg, d):
2 import os, bb
3 return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK) 2 return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK)
4 3
5def read_pkgdatafile(fn): 4def read_pkgdatafile(fn):
@@ -10,7 +9,6 @@ def read_pkgdatafile(fn):
10 c = codecs.getdecoder("string_escape") 9 c = codecs.getdecoder("string_escape")
11 return c(str)[0] 10 return c(str)[0]
12 11
13 import os
14 if os.access(fn, os.R_OK): 12 if os.access(fn, os.R_OK):
15 import re 13 import re
16 f = file(fn, 'r') 14 f = file(fn, 'r')
@@ -25,7 +23,6 @@ def read_pkgdatafile(fn):
25 return pkgdata 23 return pkgdata
26 24
27def get_subpkgedata_fn(pkg, d): 25def get_subpkgedata_fn(pkg, d):
28 import bb, os
29 archs = bb.data.expand("${PACKAGE_ARCHS}", d).split(" ") 26 archs = bb.data.expand("${PACKAGE_ARCHS}", d).split(" ")
30 archs.reverse() 27 archs.reverse()
31 pkgdata = bb.data.expand('${TMPDIR}/pkgdata/', d) 28 pkgdata = bb.data.expand('${TMPDIR}/pkgdata/', d)
@@ -37,25 +34,20 @@ def get_subpkgedata_fn(pkg, d):
37 return bb.data.expand('${PKGDATA_DIR}/runtime/%s' % pkg, d) 34 return bb.data.expand('${PKGDATA_DIR}/runtime/%s' % pkg, d)
38 35
39def has_subpkgdata(pkg, d): 36def has_subpkgdata(pkg, d):
40 import bb, os
41 return os.access(get_subpkgedata_fn(pkg, d), os.R_OK) 37 return os.access(get_subpkgedata_fn(pkg, d), os.R_OK)
42 38
43def read_subpkgdata(pkg, d): 39def read_subpkgdata(pkg, d):
44 import bb
45 return read_pkgdatafile(get_subpkgedata_fn(pkg, d)) 40 return read_pkgdatafile(get_subpkgedata_fn(pkg, d))
46 41
47def has_pkgdata(pn, d): 42def has_pkgdata(pn, d):
48 import bb, os
49 fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d) 43 fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
50 return os.access(fn, os.R_OK) 44 return os.access(fn, os.R_OK)
51 45
52def read_pkgdata(pn, d): 46def read_pkgdata(pn, d):
53 import bb
54 fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d) 47 fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
55 return read_pkgdatafile(fn) 48 return read_pkgdatafile(fn)
56 49
57python read_subpackage_metadata () { 50python read_subpackage_metadata () {
58 import bb
59 data = read_pkgdata(bb.data.getVar('PN', d, 1), d) 51 data = read_pkgdata(bb.data.getVar('PN', d, 1), d)
60 52
61 for key in data.keys(): 53 for key in data.keys():
@@ -72,7 +64,6 @@ python read_subpackage_metadata () {
72# Collapse FOO_pkg variables into FOO 64# Collapse FOO_pkg variables into FOO
73# 65#
74def read_subpkgdata_dict(pkg, d): 66def read_subpkgdata_dict(pkg, d):
75 import bb
76 ret = {} 67 ret = {}
77 subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d)) 68 subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d))
78 for var in subd: 69 for var in subd:
diff --git a/meta/classes/packagehistory.bbclass b/meta/classes/packagehistory.bbclass
index 185ab545ff..492bbac218 100644
--- a/meta/classes/packagehistory.bbclass
+++ b/meta/classes/packagehistory.bbclass
@@ -61,8 +61,6 @@ python emit_pkghistory() {
61 61
62 62
63def check_pkghistory(pkg, pe, pv, pr, lastversion): 63def check_pkghistory(pkg, pe, pv, pr, lastversion):
64 import bb
65
66 (last_pe, last_pv, last_pr) = lastversion 64 (last_pe, last_pv, last_pr) = lastversion
67 65
68 bb.debug(2, "Checking package history") 66 bb.debug(2, "Checking package history")
@@ -72,7 +70,6 @@ def check_pkghistory(pkg, pe, pv, pr, lastversion):
72 70
73 71
74def write_pkghistory(pkg, pe, pv, pr, d): 72def write_pkghistory(pkg, pe, pv, pr, d):
75 import bb, os
76 bb.debug(2, "Writing package history") 73 bb.debug(2, "Writing package history")
77 74
78 pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True) 75 pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True)
@@ -82,8 +79,6 @@ def write_pkghistory(pkg, pe, pv, pr, d):
82 os.makedirs(verpath) 79 os.makedirs(verpath)
83 80
84def write_latestlink(pkg, pe, pv, pr, d): 81def write_latestlink(pkg, pe, pv, pr, d):
85 import bb, os
86
87 pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True) 82 pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True)
88 83
89 def rm_link(path): 84 def rm_link(path):
diff --git a/meta/classes/patch.bbclass b/meta/classes/patch.bbclass
index ba0f19215d..0706a02bc9 100644
--- a/meta/classes/patch.bbclass
+++ b/meta/classes/patch.bbclass
@@ -4,8 +4,6 @@
4QUILTRCFILE ?= "${STAGING_BINDIR_NATIVE}/quiltrc" 4QUILTRCFILE ?= "${STAGING_BINDIR_NATIVE}/quiltrc"
5 5
6def patch_init(d): 6def patch_init(d):
7 import os, sys
8
9 class NotFoundError(Exception): 7 class NotFoundError(Exception):
10 def __init__(self, path): 8 def __init__(self, path):
11 self.path = path 9 self.path = path
@@ -13,8 +11,6 @@ def patch_init(d):
13 return "Error: %s not found." % self.path 11 return "Error: %s not found." % self.path
14 12
15 def md5sum(fname): 13 def md5sum(fname):
16 import sys
17
18 # when we move to Python 2.5 as minimal supported 14 # when we move to Python 2.5 as minimal supported
19 # we can kill that try/except as hashlib is 2.5+ 15 # we can kill that try/except as hashlib is 2.5+
20 try: 16 try:
@@ -76,8 +72,6 @@ def patch_init(d):
76 def __str__(self): 72 def __str__(self):
77 return "Patch Error: %s" % self.msg 73 return "Patch Error: %s" % self.msg
78 74
79 import bb, bb.data, bb.fetch
80
81 class PatchSet(object): 75 class PatchSet(object):
82 defaults = { 76 defaults = {
83 "strippath": 1 77 "strippath": 1
@@ -251,6 +245,7 @@ def patch_init(d):
251 try: 245 try:
252 output = runcmd(["quilt", "applied"], self.dir) 246 output = runcmd(["quilt", "applied"], self.dir)
253 except CmdError: 247 except CmdError:
248 import sys
254 if sys.exc_value.output.strip() == "No patches applied": 249 if sys.exc_value.output.strip() == "No patches applied":
255 return 250 return
256 else: 251 else:
@@ -364,6 +359,7 @@ def patch_init(d):
364 try: 359 try:
365 self.patchset.Push() 360 self.patchset.Push()
366 except Exception: 361 except Exception:
362 import sys
367 os.chdir(olddir) 363 os.chdir(olddir)
368 raise sys.exc_value 364 raise sys.exc_value
369 365
@@ -458,9 +454,6 @@ PATCHDEPENDENCY = "${PATCHTOOL}-native:do_populate_staging"
458do_patch[depends] = "${PATCHDEPENDENCY}" 454do_patch[depends] = "${PATCHDEPENDENCY}"
459 455
460python patch_do_patch() { 456python patch_do_patch() {
461 import re
462 import bb.fetch
463
464 patch_init(d) 457 patch_init(d)
465 458
466 src_uri = (bb.data.getVar('SRC_URI', d, 1) or '').split() 459 src_uri = (bb.data.getVar('SRC_URI', d, 1) or '').split()
diff --git a/meta/classes/rootfs_ipk.bbclass b/meta/classes/rootfs_ipk.bbclass
index aa28cd63f4..065b78b814 100644
--- a/meta/classes/rootfs_ipk.bbclass
+++ b/meta/classes/rootfs_ipk.bbclass
@@ -159,7 +159,7 @@ ipk_insert_feed_uris () {
159} 159}
160 160
161python () { 161python () {
162 import bb 162
163 if bb.data.getVar('BUILD_IMAGES_FROM_FEEDS', d, True): 163 if bb.data.getVar('BUILD_IMAGES_FROM_FEEDS', d, True):
164 flags = bb.data.getVarFlag('do_rootfs', 'recrdeptask', d) 164 flags = bb.data.getVarFlag('do_rootfs', 'recrdeptask', d)
165 flags = flags.replace("do_package_write_ipk", "") 165 flags = flags.replace("do_package_write_ipk", "")
diff --git a/meta/classes/rootfs_rpm.bbclass b/meta/classes/rootfs_rpm.bbclass
index 1e8ad6d9ef..da5243ddf2 100644
--- a/meta/classes/rootfs_rpm.bbclass
+++ b/meta/classes/rootfs_rpm.bbclass
@@ -234,7 +234,6 @@ install_all_locales() {
234} 234}
235 235
236python () { 236python () {
237 import bb
238 if bb.data.getVar('BUILD_IMAGES_FROM_FEEDS', d, True): 237 if bb.data.getVar('BUILD_IMAGES_FROM_FEEDS', d, True):
239 flags = bb.data.getVarFlag('do_rootfs', 'recrdeptask', d) 238 flags = bb.data.getVarFlag('do_rootfs', 'recrdeptask', d)
240 flags = flags.replace("do_package_write_rpm", "") 239 flags = flags.replace("do_package_write_rpm", "")
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass
index e11bdd2218..39f1e22003 100644
--- a/meta/classes/sanity.bbclass
+++ b/meta/classes/sanity.bbclass
@@ -3,7 +3,6 @@
3# 3#
4 4
5def raise_sanity_error(msg): 5def raise_sanity_error(msg):
6 import bb
7 bb.fatal(""" Poky's config sanity checker detected a potential misconfiguration. 6 bb.fatal(""" Poky's config sanity checker detected a potential misconfiguration.
8 Either fix the cause of this error or at your own risk disable the checker (see sanity.conf). 7 Either fix the cause of this error or at your own risk disable the checker (see sanity.conf).
9 Following is the list of potential problems / advisories: 8 Following is the list of potential problems / advisories:
@@ -11,8 +10,6 @@ def raise_sanity_error(msg):
11 %s""" % msg) 10 %s""" % msg)
12 11
13def check_conf_exists(fn, data): 12def check_conf_exists(fn, data):
14 import bb, os
15
16 bbpath = [] 13 bbpath = []
17 fn = bb.data.expand(fn, data) 14 fn = bb.data.expand(fn, data)
18 vbbpath = bb.data.getVar("BBPATH", data) 15 vbbpath = bb.data.getVar("BBPATH", data)
@@ -26,12 +23,12 @@ def check_conf_exists(fn, data):
26 23
27def check_sanity(e): 24def check_sanity(e):
28 from bb import note, error, data, __version__ 25 from bb import note, error, data, __version__
29 from bb.event import Handled, NotHandled, getName 26
30 try: 27 try:
31 from distutils.version import LooseVersion 28 from distutils.version import LooseVersion
32 except ImportError: 29 except ImportError:
33 def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1 30 def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1
34 import os, commands 31 import commands
35 32
36 # Check the bitbake version meets minimum requirements 33 # Check the bitbake version meets minimum requirements
37 minversion = data.getVar('BB_MIN_VERSION', e.data , True) 34 minversion = data.getVar('BB_MIN_VERSION', e.data , True)
@@ -163,10 +160,8 @@ def check_sanity(e):
163 160
164addhandler check_sanity_eventhandler 161addhandler check_sanity_eventhandler
165python check_sanity_eventhandler() { 162python check_sanity_eventhandler() {
166 from bb import note, error, data, __version__ 163 from bb.event import Handled, NotHandled
167 from bb.event import getName 164 if bb.event.getName(e) == "ConfigParsed":
168
169 if getName(e) == "ConfigParsed":
170 check_sanity(e) 165 check_sanity(e)
171 166
172 return NotHandled 167 return NotHandled
diff --git a/meta/classes/siteinfo.bbclass b/meta/classes/siteinfo.bbclass
index 431b81ce29..7b012b7ba7 100644
--- a/meta/classes/siteinfo.bbclass
+++ b/meta/classes/siteinfo.bbclass
@@ -16,8 +16,6 @@
16# If 'what' doesn't exist then an empty value is returned 16# If 'what' doesn't exist then an empty value is returned
17# 17#
18def get_siteinfo_list(d): 18def get_siteinfo_list(d):
19 import bb
20
21 target = bb.data.getVar('HOST_ARCH', d, 1) + "-" + bb.data.getVar('HOST_OS', d, 1) 19 target = bb.data.getVar('HOST_ARCH', d, 1) + "-" + bb.data.getVar('HOST_OS', d, 1)
22 20
23 targetinfo = {\ 21 targetinfo = {\
@@ -74,8 +72,6 @@ def get_siteinfo_list(d):
74# 2) ${FILE_DIRNAME}/site-${PV} - app version specific 72# 2) ${FILE_DIRNAME}/site-${PV} - app version specific
75# 73#
76def siteinfo_get_files(d): 74def siteinfo_get_files(d):
77 import bb, os
78
79 sitefiles = "" 75 sitefiles = ""
80 76
81 # Determine which site files to look for 77 # Determine which site files to look for
diff --git a/meta/classes/sourcepkg.bbclass b/meta/classes/sourcepkg.bbclass
index 390d3684d4..f738553039 100644
--- a/meta/classes/sourcepkg.bbclass
+++ b/meta/classes/sourcepkg.bbclass
@@ -5,8 +5,6 @@ EXCLUDE_FROM ?= ".pc autom4te.cache"
5DISTRO ?= "openembedded" 5DISTRO ?= "openembedded"
6 6
7def get_src_tree(d): 7def get_src_tree(d):
8 import bb
9 import os, os.path
10 8
11 workdir = bb.data.getVar('WORKDIR', d, 1) 9 workdir = bb.data.getVar('WORKDIR', d, 1)
12 if not workdir: 10 if not workdir:
@@ -56,8 +54,6 @@ sourcepkg_do_archive_bb() {
56} 54}
57 55
58python sourcepkg_do_dumpdata() { 56python sourcepkg_do_dumpdata() {
59 import os
60 import os.path
61 57
62 workdir = bb.data.getVar('WORKDIR', d, 1) 58 workdir = bb.data.getVar('WORKDIR', d, 1)
63 distro = bb.data.getVar('DISTRO', d, 1) 59 distro = bb.data.getVar('DISTRO', d, 1)
diff --git a/meta/classes/tinderclient.bbclass b/meta/classes/tinderclient.bbclass
index bc004efb26..28df0f9505 100644
--- a/meta/classes/tinderclient.bbclass
+++ b/meta/classes/tinderclient.bbclass
@@ -51,8 +51,7 @@ def tinder_format_http_post(d,status,log):
51 for the tinderbox to be happy. 51 for the tinderbox to be happy.
52 """ 52 """
53 53
54 from bb import data, build 54 import random
55 import os,random
56 55
57 # the variables we will need to send on this form post 56 # the variables we will need to send on this form post
58 variables = { 57 variables = {
@@ -125,7 +124,6 @@ def tinder_build_start(d):
125 report = report[report.find(search)+len(search):] 124 report = report[report.find(search)+len(search):]
126 report = report[0:report.find("'")] 125 report = report[0:report.find("'")]
127 126
128 import bb
129 bb.note("Machine ID assigned by tinderbox: %s" % report ) 127 bb.note("Machine ID assigned by tinderbox: %s" % report )
130 128
131 # now we will need to save the machine number 129 # now we will need to save the machine number
@@ -165,7 +163,6 @@ def tinder_print_info(d):
165 """ 163 """
166 164
167 from bb import data 165 from bb import data
168 import os
169 # get the local vars 166 # get the local vars
170 167
171 time = tinder_time_string() 168 time = tinder_time_string()
@@ -216,7 +213,6 @@ def tinder_print_env():
216 Print the environment variables of this build 213 Print the environment variables of this build
217 """ 214 """
218 from bb import data 215 from bb import data
219 import os
220 216
221 time_start = tinder_time_string() 217 time_start = tinder_time_string()
222 time_end = tinder_time_string() 218 time_end = tinder_time_string()
@@ -278,7 +274,7 @@ def tinder_do_tinder_report(event):
278 """ 274 """
279 from bb.event import getName 275 from bb.event import getName
280 from bb import data, mkdirhier, build 276 from bb import data, mkdirhier, build
281 import os, glob 277 import glob
282 278
283 # variables 279 # variables
284 name = getName(event) 280 name = getName(event)
diff --git a/meta/classes/update-alternatives.bbclass b/meta/classes/update-alternatives.bbclass
index c63581c5d1..ddbf4c1947 100644
--- a/meta/classes/update-alternatives.bbclass
+++ b/meta/classes/update-alternatives.bbclass
@@ -11,7 +11,6 @@ update-alternatives --remove ${ALTERNATIVE_NAME} ${ALTERNATIVE_PATH}
11} 11}
12 12
13def update_alternatives_after_parse(d): 13def update_alternatives_after_parse(d):
14 import bb
15 if bb.data.getVar('ALTERNATIVE_NAME', d) == None: 14 if bb.data.getVar('ALTERNATIVE_NAME', d) == None:
16 raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % bb.data.getVar('FILE', d) 15 raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % bb.data.getVar('FILE', d)
17 if bb.data.getVar('ALTERNATIVE_PATH', d) == None: 16 if bb.data.getVar('ALTERNATIVE_PATH', d) == None:
diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass
index 74053edb89..5a8062cc7c 100644
--- a/meta/classes/update-rc.d.bbclass
+++ b/meta/classes/update-rc.d.bbclass
@@ -26,7 +26,6 @@ update-rc.d $D ${INITSCRIPT_NAME} remove
26 26
27 27
28def update_rc_after_parse(d): 28def update_rc_after_parse(d):
29 import bb
30 if bb.data.getVar('INITSCRIPT_PACKAGES', d) == None: 29 if bb.data.getVar('INITSCRIPT_PACKAGES', d) == None:
31 if bb.data.getVar('INITSCRIPT_NAME', d) == None: 30 if bb.data.getVar('INITSCRIPT_NAME', d) == None:
32 raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_NAME" % bb.data.getVar('FILE', d) 31 raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_NAME" % bb.data.getVar('FILE', d)