diff options
261 files changed, 2306 insertions, 2306 deletions
diff --git a/meta/classes/allarch.bbclass b/meta/classes/allarch.bbclass index ddc2a85..9dce498 100644 --- a/meta/classes/allarch.bbclass +++ b/meta/classes/allarch.bbclass | |||
@@ -11,7 +11,7 @@ PACKAGE_ARCH = "all" | |||
11 | python () { | 11 | python () { |
12 | # Allow this class to be included but overridden - only set | 12 | # Allow this class to be included but overridden - only set |
13 | # the values if we're still "all" package arch. | 13 | # the values if we're still "all" package arch. |
14 | if d.getVar("PACKAGE_ARCH", True) == "all": | 14 | if d.getVar("PACKAGE_ARCH") == "all": |
15 | # No need for virtual/libc or a cross compiler | 15 | # No need for virtual/libc or a cross compiler |
16 | d.setVar("INHIBIT_DEFAULT_DEPS","1") | 16 | d.setVar("INHIBIT_DEFAULT_DEPS","1") |
17 | 17 | ||
@@ -47,6 +47,6 @@ python () { | |||
47 | d.setVarFlag("emit_pkgdata", "vardepsexclude", "MULTILIB_VARIANTS") | 47 | d.setVarFlag("emit_pkgdata", "vardepsexclude", "MULTILIB_VARIANTS") |
48 | d.setVarFlag("write_specfile", "vardepsexclude", "MULTILIBS") | 48 | d.setVarFlag("write_specfile", "vardepsexclude", "MULTILIBS") |
49 | elif bb.data.inherits_class('packagegroup', d) and not bb.data.inherits_class('nativesdk', d): | 49 | elif bb.data.inherits_class('packagegroup', d) and not bb.data.inherits_class('nativesdk', d): |
50 | bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE", True)) | 50 | bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE")) |
51 | } | 51 | } |
52 | 52 | ||
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass index 3543ca9..5295977 100644 --- a/meta/classes/archiver.bbclass +++ b/meta/classes/archiver.bbclass | |||
@@ -52,10 +52,10 @@ do_deploy_all_archives[dirs] = "${WORKDIR}" | |||
52 | 52 | ||
53 | 53 | ||
54 | python () { | 54 | python () { |
55 | pn = d.getVar('PN', True) | 55 | pn = d.getVar('PN') |
56 | assume_provided = (d.getVar("ASSUME_PROVIDED", True) or "").split() | 56 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() |
57 | if pn in assume_provided: | 57 | if pn in assume_provided: |
58 | for p in d.getVar("PROVIDES", True).split(): | 58 | for p in d.getVar("PROVIDES").split(): |
59 | if p != pn: | 59 | if p != pn: |
60 | pn = p | 60 | pn = p |
61 | break | 61 | break |
@@ -68,7 +68,7 @@ python () { | |||
68 | bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) | 68 | bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) |
69 | 69 | ||
70 | # We just archive gcc-source for all the gcc related recipes | 70 | # We just archive gcc-source for all the gcc related recipes |
71 | if d.getVar('BPN', True) in ['gcc', 'libgcc'] \ | 71 | if d.getVar('BPN') in ['gcc', 'libgcc'] \ |
72 | and not pn.startswith('gcc-source'): | 72 | and not pn.startswith('gcc-source'): |
73 | bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) | 73 | bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) |
74 | return | 74 | return |
@@ -106,7 +106,7 @@ python () { | |||
106 | # Output the srpm package | 106 | # Output the srpm package |
107 | ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True) | 107 | ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True) |
108 | if ar_srpm == "1": | 108 | if ar_srpm == "1": |
109 | if d.getVar('PACKAGES', True) != '' and d.getVar('IMAGE_PKGTYPE', True) == 'rpm': | 109 | if d.getVar('PACKAGES') != '' and d.getVar('IMAGE_PKGTYPE') == 'rpm': |
110 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) | 110 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) |
111 | if ar_dumpdata == "1": | 111 | if ar_dumpdata == "1": |
112 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) | 112 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) |
@@ -130,9 +130,9 @@ python do_ar_original() { | |||
130 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original": | 130 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original": |
131 | return | 131 | return |
132 | 132 | ||
133 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) | 133 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') |
134 | bb.note('Archiving the original source...') | 134 | bb.note('Archiving the original source...') |
135 | urls = d.getVar("SRC_URI", True).split() | 135 | urls = d.getVar("SRC_URI").split() |
136 | # destsuffix (git fetcher) and subdir (everything else) are allowed to be | 136 | # destsuffix (git fetcher) and subdir (everything else) are allowed to be |
137 | # absolute paths (for example, destsuffix=${S}/foobar). | 137 | # absolute paths (for example, destsuffix=${S}/foobar). |
138 | # That messes with unpacking inside our tmpdir below, because the fetchers | 138 | # That messes with unpacking inside our tmpdir below, because the fetchers |
@@ -157,7 +157,7 @@ python do_ar_original() { | |||
157 | if os.path.isfile(local): | 157 | if os.path.isfile(local): |
158 | shutil.copy(local, ar_outdir) | 158 | shutil.copy(local, ar_outdir) |
159 | elif os.path.isdir(local): | 159 | elif os.path.isdir(local): |
160 | tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR', True)) | 160 | tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR')) |
161 | fetch.unpack(tmpdir, (url,)) | 161 | fetch.unpack(tmpdir, (url,)) |
162 | # To handle recipes with more than one source, we add the "name" | 162 | # To handle recipes with more than one source, we add the "name" |
163 | # URL parameter as suffix. We treat it as an error when | 163 | # URL parameter as suffix. We treat it as an error when |
@@ -195,24 +195,24 @@ python do_ar_patched() { | |||
195 | return | 195 | return |
196 | 196 | ||
197 | # Get the ARCHIVER_OUTDIR before we reset the WORKDIR | 197 | # Get the ARCHIVER_OUTDIR before we reset the WORKDIR |
198 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) | 198 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') |
199 | ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) | 199 | ar_workdir = d.getVar('ARCHIVER_WORKDIR') |
200 | bb.note('Archiving the patched source...') | 200 | bb.note('Archiving the patched source...') |
201 | d.setVar('WORKDIR', ar_workdir) | 201 | d.setVar('WORKDIR', ar_workdir) |
202 | create_tarball(d, d.getVar('S', True), 'patched', ar_outdir) | 202 | create_tarball(d, d.getVar('S'), 'patched', ar_outdir) |
203 | } | 203 | } |
204 | 204 | ||
205 | python do_ar_configured() { | 205 | python do_ar_configured() { |
206 | import shutil | 206 | import shutil |
207 | 207 | ||
208 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) | 208 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') |
209 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured': | 209 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured': |
210 | bb.note('Archiving the configured source...') | 210 | bb.note('Archiving the configured source...') |
211 | pn = d.getVar('PN', True) | 211 | pn = d.getVar('PN') |
212 | # "gcc-source-${PV}" recipes don't have "do_configure" | 212 | # "gcc-source-${PV}" recipes don't have "do_configure" |
213 | # task, so we need to run "do_preconfigure" instead | 213 | # task, so we need to run "do_preconfigure" instead |
214 | if pn.startswith("gcc-source-"): | 214 | if pn.startswith("gcc-source-"): |
215 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) | 215 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) |
216 | bb.build.exec_func('do_preconfigure', d) | 216 | bb.build.exec_func('do_preconfigure', d) |
217 | 217 | ||
218 | # The libtool-native's do_configure will remove the | 218 | # The libtool-native's do_configure will remove the |
@@ -221,7 +221,7 @@ python do_ar_configured() { | |||
221 | # instead of. | 221 | # instead of. |
222 | elif pn != 'libtool-native': | 222 | elif pn != 'libtool-native': |
223 | # Change the WORKDIR to make do_configure run in another dir. | 223 | # Change the WORKDIR to make do_configure run in another dir. |
224 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) | 224 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) |
225 | if bb.data.inherits_class('kernel-yocto', d): | 225 | if bb.data.inherits_class('kernel-yocto', d): |
226 | bb.build.exec_func('do_kernel_configme', d) | 226 | bb.build.exec_func('do_kernel_configme', d) |
227 | if bb.data.inherits_class('cmake', d): | 227 | if bb.data.inherits_class('cmake', d): |
@@ -235,12 +235,12 @@ python do_ar_configured() { | |||
235 | for func in (postfuncs or '').split(): | 235 | for func in (postfuncs or '').split(): |
236 | if func != "do_qa_configure": | 236 | if func != "do_qa_configure": |
237 | bb.build.exec_func(func, d) | 237 | bb.build.exec_func(func, d) |
238 | srcdir = d.getVar('S', True) | 238 | srcdir = d.getVar('S') |
239 | builddir = d.getVar('B', True) | 239 | builddir = d.getVar('B') |
240 | if srcdir != builddir: | 240 | if srcdir != builddir: |
241 | if os.path.exists(builddir): | 241 | if os.path.exists(builddir): |
242 | oe.path.copytree(builddir, os.path.join(srcdir, \ | 242 | oe.path.copytree(builddir, os.path.join(srcdir, \ |
243 | 'build.%s.ar_configured' % d.getVar('PF', True))) | 243 | 'build.%s.ar_configured' % d.getVar('PF'))) |
244 | create_tarball(d, srcdir, 'configured', ar_outdir) | 244 | create_tarball(d, srcdir, 'configured', ar_outdir) |
245 | } | 245 | } |
246 | 246 | ||
@@ -251,14 +251,14 @@ def create_tarball(d, srcdir, suffix, ar_outdir): | |||
251 | import tarfile | 251 | import tarfile |
252 | 252 | ||
253 | # Make sure we are only creating a single tarball for gcc sources | 253 | # Make sure we are only creating a single tarball for gcc sources |
254 | if (d.getVar('SRC_URI', True) == ""): | 254 | if (d.getVar('SRC_URI') == ""): |
255 | return | 255 | return |
256 | 256 | ||
257 | bb.utils.mkdirhier(ar_outdir) | 257 | bb.utils.mkdirhier(ar_outdir) |
258 | if suffix: | 258 | if suffix: |
259 | filename = '%s-%s.tar.gz' % (d.getVar('PF', True), suffix) | 259 | filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix) |
260 | else: | 260 | else: |
261 | filename = '%s.tar.gz' % d.getVar('PF', True) | 261 | filename = '%s.tar.gz' % d.getVar('PF') |
262 | tarname = os.path.join(ar_outdir, filename) | 262 | tarname = os.path.join(ar_outdir, filename) |
263 | 263 | ||
264 | bb.note('Creating %s' % tarname) | 264 | bb.note('Creating %s' % tarname) |
@@ -286,7 +286,7 @@ def create_diff_gz(d, src_orig, src, ar_outdir): | |||
286 | dirname = os.path.dirname(src) | 286 | dirname = os.path.dirname(src) |
287 | basename = os.path.basename(src) | 287 | basename = os.path.basename(src) |
288 | os.chdir(dirname) | 288 | os.chdir(dirname) |
289 | out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF', True)) | 289 | out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF')) |
290 | diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) | 290 | diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) |
291 | subprocess.call(diff_cmd, shell=True) | 291 | subprocess.call(diff_cmd, shell=True) |
292 | bb.utils.remove(src_patched, recurse=True) | 292 | bb.utils.remove(src_patched, recurse=True) |
@@ -297,9 +297,9 @@ python do_unpack_and_patch() { | |||
297 | [ 'patched', 'configured'] and \ | 297 | [ 'patched', 'configured'] and \ |
298 | d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1': | 298 | d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1': |
299 | return | 299 | return |
300 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) | 300 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') |
301 | ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) | 301 | ar_workdir = d.getVar('ARCHIVER_WORKDIR') |
302 | pn = d.getVar('PN', True) | 302 | pn = d.getVar('PN') |
303 | 303 | ||
304 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR | 304 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR |
305 | if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')): | 305 | if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')): |
@@ -309,18 +309,18 @@ python do_unpack_and_patch() { | |||
309 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the | 309 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the |
310 | # possibly requiring of the following tasks (such as some recipes's | 310 | # possibly requiring of the following tasks (such as some recipes's |
311 | # do_patch required 'B' existed). | 311 | # do_patch required 'B' existed). |
312 | bb.utils.mkdirhier(d.getVar('B', True)) | 312 | bb.utils.mkdirhier(d.getVar('B')) |
313 | 313 | ||
314 | bb.build.exec_func('do_unpack', d) | 314 | bb.build.exec_func('do_unpack', d) |
315 | 315 | ||
316 | # Save the original source for creating the patches | 316 | # Save the original source for creating the patches |
317 | if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': | 317 | if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': |
318 | src = d.getVar('S', True).rstrip('/') | 318 | src = d.getVar('S').rstrip('/') |
319 | src_orig = '%s.orig' % src | 319 | src_orig = '%s.orig' % src |
320 | oe.path.copytree(src, src_orig) | 320 | oe.path.copytree(src, src_orig) |
321 | 321 | ||
322 | # Make sure gcc and kernel sources are patched only once | 322 | # Make sure gcc and kernel sources are patched only once |
323 | if not (d.getVar('SRC_URI', True) == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))): | 323 | if not (d.getVar('SRC_URI') == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))): |
324 | bb.build.exec_func('do_patch', d) | 324 | bb.build.exec_func('do_patch', d) |
325 | 325 | ||
326 | # Create the patches | 326 | # Create the patches |
@@ -339,14 +339,14 @@ python do_ar_recipe () { | |||
339 | 339 | ||
340 | require_re = re.compile( r"require\s+(.+)" ) | 340 | require_re = re.compile( r"require\s+(.+)" ) |
341 | include_re = re.compile( r"include\s+(.+)" ) | 341 | include_re = re.compile( r"include\s+(.+)" ) |
342 | bbfile = d.getVar('FILE', True) | 342 | bbfile = d.getVar('FILE') |
343 | outdir = os.path.join(d.getVar('WORKDIR', True), \ | 343 | outdir = os.path.join(d.getVar('WORKDIR'), \ |
344 | '%s-recipe' % d.getVar('PF', True)) | 344 | '%s-recipe' % d.getVar('PF')) |
345 | bb.utils.mkdirhier(outdir) | 345 | bb.utils.mkdirhier(outdir) |
346 | shutil.copy(bbfile, outdir) | 346 | shutil.copy(bbfile, outdir) |
347 | 347 | ||
348 | pn = d.getVar('PN', True) | 348 | pn = d.getVar('PN') |
349 | bbappend_files = d.getVar('BBINCLUDED', True).split() | 349 | bbappend_files = d.getVar('BBINCLUDED').split() |
350 | # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend | 350 | # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend |
351 | # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. | 351 | # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. |
352 | bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn) | 352 | bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn) |
@@ -356,7 +356,7 @@ python do_ar_recipe () { | |||
356 | shutil.copy(file, outdir) | 356 | shutil.copy(file, outdir) |
357 | 357 | ||
358 | dirname = os.path.dirname(bbfile) | 358 | dirname = os.path.dirname(bbfile) |
359 | bbpath = '%s:%s' % (dirname, d.getVar('BBPATH', True)) | 359 | bbpath = '%s:%s' % (dirname, d.getVar('BBPATH')) |
360 | f = open(bbfile, 'r') | 360 | f = open(bbfile, 'r') |
361 | for line in f.readlines(): | 361 | for line in f.readlines(): |
362 | incfile = None | 362 | incfile = None |
@@ -370,7 +370,7 @@ python do_ar_recipe () { | |||
370 | if incfile: | 370 | if incfile: |
371 | shutil.copy(incfile, outdir) | 371 | shutil.copy(incfile, outdir) |
372 | 372 | ||
373 | create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR', True)) | 373 | create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR')) |
374 | bb.utils.remove(outdir, recurse=True) | 374 | bb.utils.remove(outdir, recurse=True) |
375 | } | 375 | } |
376 | 376 | ||
@@ -379,8 +379,8 @@ python do_dumpdata () { | |||
379 | dump environment data to ${PF}-showdata.dump | 379 | dump environment data to ${PF}-showdata.dump |
380 | """ | 380 | """ |
381 | 381 | ||
382 | dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR', True), \ | 382 | dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \ |
383 | '%s-showdata.dump' % d.getVar('PF', True)) | 383 | '%s-showdata.dump' % d.getVar('PF')) |
384 | bb.note('Dumping metadata into %s' % dumpfile) | 384 | bb.note('Dumping metadata into %s' % dumpfile) |
385 | with open(dumpfile, "w") as f: | 385 | with open(dumpfile, "w") as f: |
386 | # emit variables and shell functions | 386 | # emit variables and shell functions |
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass index c43ea9a..c43531b 100644 --- a/meta/classes/autotools.bbclass +++ b/meta/classes/autotools.bbclass | |||
@@ -1,8 +1,8 @@ | |||
1 | def autotools_dep_prepend(d): | 1 | def autotools_dep_prepend(d): |
2 | if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True): | 2 | if d.getVar('INHIBIT_AUTOTOOLS_DEPS'): |
3 | return '' | 3 | return '' |
4 | 4 | ||
5 | pn = d.getVar('PN', True) | 5 | pn = d.getVar('PN') |
6 | deps = '' | 6 | deps = '' |
7 | 7 | ||
8 | if pn in ['autoconf-native', 'automake-native', 'help2man-native']: | 8 | if pn in ['autoconf-native', 'automake-native', 'help2man-native']: |
@@ -14,7 +14,7 @@ def autotools_dep_prepend(d): | |||
14 | if not bb.data.inherits_class('native', d) \ | 14 | if not bb.data.inherits_class('native', d) \ |
15 | and not bb.data.inherits_class('nativesdk', d) \ | 15 | and not bb.data.inherits_class('nativesdk', d) \ |
16 | and not bb.data.inherits_class('cross', d) \ | 16 | and not bb.data.inherits_class('cross', d) \ |
17 | and not d.getVar('INHIBIT_DEFAULT_DEPS', True): | 17 | and not d.getVar('INHIBIT_DEFAULT_DEPS'): |
18 | deps += 'libtool-cross ' | 18 | deps += 'libtool-cross ' |
19 | 19 | ||
20 | return deps + 'gnu-config-native ' | 20 | return deps + 'gnu-config-native ' |
@@ -139,15 +139,15 @@ ACLOCALDIR = "${WORKDIR}/aclocal-copy" | |||
139 | python autotools_copy_aclocals () { | 139 | python autotools_copy_aclocals () { |
140 | import copy | 140 | import copy |
141 | 141 | ||
142 | s = d.getVar("AUTOTOOLS_SCRIPT_PATH", True) | 142 | s = d.getVar("AUTOTOOLS_SCRIPT_PATH") |
143 | if not os.path.exists(s + "/configure.in") and not os.path.exists(s + "/configure.ac"): | 143 | if not os.path.exists(s + "/configure.in") and not os.path.exists(s + "/configure.ac"): |
144 | if not d.getVar("AUTOTOOLS_COPYACLOCAL", False): | 144 | if not d.getVar("AUTOTOOLS_COPYACLOCAL", False): |
145 | return | 145 | return |
146 | 146 | ||
147 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | 147 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) |
148 | #bb.warn(str(taskdepdata)) | 148 | #bb.warn(str(taskdepdata)) |
149 | pn = d.getVar("PN", True) | 149 | pn = d.getVar("PN") |
150 | aclocaldir = d.getVar("ACLOCALDIR", True) | 150 | aclocaldir = d.getVar("ACLOCALDIR") |
151 | oe.path.remove(aclocaldir) | 151 | oe.path.remove(aclocaldir) |
152 | bb.utils.mkdirhier(aclocaldir) | 152 | bb.utils.mkdirhier(aclocaldir) |
153 | start = None | 153 | start = None |
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index 19673e6..2765ebf 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass | |||
@@ -16,7 +16,7 @@ OE_IMPORTS[type] = "list" | |||
16 | def oe_import(d): | 16 | def oe_import(d): |
17 | import sys | 17 | import sys |
18 | 18 | ||
19 | bbpath = d.getVar("BBPATH", True).split(":") | 19 | bbpath = d.getVar("BBPATH").split(":") |
20 | sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath] | 20 | sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath] |
21 | 21 | ||
22 | def inject(name, value): | 22 | def inject(name, value): |
@@ -37,7 +37,7 @@ def oe_import(d): | |||
37 | OE_IMPORTED := "${@oe_import(d)}" | 37 | OE_IMPORTED := "${@oe_import(d)}" |
38 | 38 | ||
39 | def lsb_distro_identifier(d): | 39 | def lsb_distro_identifier(d): |
40 | adjust = d.getVar('LSB_DISTRO_ADJUST', True) | 40 | adjust = d.getVar('LSB_DISTRO_ADJUST') |
41 | adjust_func = None | 41 | adjust_func = None |
42 | if adjust: | 42 | if adjust: |
43 | try: | 43 | try: |
@@ -72,7 +72,7 @@ def base_dep_prepend(d): | |||
72 | # we need that built is the responsibility of the patch function / class, not | 72 | # we need that built is the responsibility of the patch function / class, not |
73 | # the application. | 73 | # the application. |
74 | if not d.getVar('INHIBIT_DEFAULT_DEPS', False): | 74 | if not d.getVar('INHIBIT_DEFAULT_DEPS', False): |
75 | if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)): | 75 | if (d.getVar('HOST_SYS') != d.getVar('BUILD_SYS')): |
76 | deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " | 76 | deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " |
77 | return deps | 77 | return deps |
78 | 78 | ||
@@ -83,11 +83,11 @@ DEPENDS_prepend="${BASEDEPENDS} " | |||
83 | FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" | 83 | FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" |
84 | # THISDIR only works properly with imediate expansion as it has to run | 84 | # THISDIR only works properly with imediate expansion as it has to run |
85 | # in the context of the location its used (:=) | 85 | # in the context of the location its used (:=) |
86 | THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}" | 86 | THISDIR = "${@os.path.dirname(d.getVar('FILE'))}" |
87 | 87 | ||
88 | def extra_path_elements(d): | 88 | def extra_path_elements(d): |
89 | path = "" | 89 | path = "" |
90 | elements = (d.getVar('EXTRANATIVEPATH', True) or "").split() | 90 | elements = (d.getVar('EXTRANATIVEPATH') or "").split() |
91 | for e in elements: | 91 | for e in elements: |
92 | path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" | 92 | path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" |
93 | return path | 93 | return path |
@@ -96,11 +96,11 @@ PATH_prepend = "${@extra_path_elements(d)}" | |||
96 | 96 | ||
97 | def get_lic_checksum_file_list(d): | 97 | def get_lic_checksum_file_list(d): |
98 | filelist = [] | 98 | filelist = [] |
99 | lic_files = d.getVar("LIC_FILES_CHKSUM", True) or '' | 99 | lic_files = d.getVar("LIC_FILES_CHKSUM") or '' |
100 | tmpdir = d.getVar("TMPDIR", True) | 100 | tmpdir = d.getVar("TMPDIR") |
101 | s = d.getVar("S", True) | 101 | s = d.getVar("S") |
102 | b = d.getVar("B", True) | 102 | b = d.getVar("B") |
103 | workdir = d.getVar("WORKDIR", True) | 103 | workdir = d.getVar("WORKDIR") |
104 | 104 | ||
105 | urls = lic_files.split() | 105 | urls = lic_files.split() |
106 | for url in urls: | 106 | for url in urls: |
@@ -116,7 +116,7 @@ def get_lic_checksum_file_list(d): | |||
116 | continue | 116 | continue |
117 | filelist.append(path + ":" + str(os.path.exists(path))) | 117 | filelist.append(path + ":" + str(os.path.exists(path))) |
118 | except bb.fetch.MalformedUrl: | 118 | except bb.fetch.MalformedUrl: |
119 | bb.fatal(d.getVar('PN', True) + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) | 119 | bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) |
120 | return " ".join(filelist) | 120 | return " ".join(filelist) |
121 | 121 | ||
122 | addtask fetch | 122 | addtask fetch |
@@ -126,7 +126,7 @@ do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" | |||
126 | do_fetch[vardeps] += "SRCREV" | 126 | do_fetch[vardeps] += "SRCREV" |
127 | python base_do_fetch() { | 127 | python base_do_fetch() { |
128 | 128 | ||
129 | src_uri = (d.getVar('SRC_URI', True) or "").split() | 129 | src_uri = (d.getVar('SRC_URI') or "").split() |
130 | if len(src_uri) == 0: | 130 | if len(src_uri) == 0: |
131 | return | 131 | return |
132 | 132 | ||
@@ -141,31 +141,31 @@ addtask unpack after do_fetch | |||
141 | do_unpack[dirs] = "${WORKDIR}" | 141 | do_unpack[dirs] = "${WORKDIR}" |
142 | 142 | ||
143 | python () { | 143 | python () { |
144 | if d.getVar('S', True) != d.getVar('WORKDIR', True): | 144 | if d.getVar('S') != d.getVar('WORKDIR'): |
145 | d.setVarFlag('do_unpack', 'cleandirs', '${S}') | 145 | d.setVarFlag('do_unpack', 'cleandirs', '${S}') |
146 | else: | 146 | else: |
147 | d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches')) | 147 | d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches')) |
148 | } | 148 | } |
149 | python base_do_unpack() { | 149 | python base_do_unpack() { |
150 | src_uri = (d.getVar('SRC_URI', True) or "").split() | 150 | src_uri = (d.getVar('SRC_URI') or "").split() |
151 | if len(src_uri) == 0: | 151 | if len(src_uri) == 0: |
152 | return | 152 | return |
153 | 153 | ||
154 | try: | 154 | try: |
155 | fetcher = bb.fetch2.Fetch(src_uri, d) | 155 | fetcher = bb.fetch2.Fetch(src_uri, d) |
156 | fetcher.unpack(d.getVar('WORKDIR', True)) | 156 | fetcher.unpack(d.getVar('WORKDIR')) |
157 | except bb.fetch2.BBFetchException as e: | 157 | except bb.fetch2.BBFetchException as e: |
158 | bb.fatal(str(e)) | 158 | bb.fatal(str(e)) |
159 | } | 159 | } |
160 | 160 | ||
161 | def pkgarch_mapping(d): | 161 | def pkgarch_mapping(d): |
162 | # Compatibility mappings of TUNE_PKGARCH (opt in) | 162 | # Compatibility mappings of TUNE_PKGARCH (opt in) |
163 | if d.getVar("PKGARCHCOMPAT_ARMV7A", True): | 163 | if d.getVar("PKGARCHCOMPAT_ARMV7A"): |
164 | if d.getVar("TUNE_PKGARCH", True) == "armv7a-vfp-neon": | 164 | if d.getVar("TUNE_PKGARCH") == "armv7a-vfp-neon": |
165 | d.setVar("TUNE_PKGARCH", "armv7a") | 165 | d.setVar("TUNE_PKGARCH", "armv7a") |
166 | 166 | ||
167 | def get_layers_branch_rev(d): | 167 | def get_layers_branch_rev(d): |
168 | layers = (d.getVar("BBLAYERS", True) or "").split() | 168 | layers = (d.getVar("BBLAYERS") or "").split() |
169 | layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ | 169 | layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ |
170 | base_get_metadata_git_branch(i, None).strip(), \ | 170 | base_get_metadata_git_branch(i, None).strip(), \ |
171 | base_get_metadata_git_revision(i, None)) \ | 171 | base_get_metadata_git_revision(i, None)) \ |
@@ -192,7 +192,7 @@ BUILDCFG_FUNCS[type] = "list" | |||
192 | def buildcfg_vars(d): | 192 | def buildcfg_vars(d): |
193 | statusvars = oe.data.typed_value('BUILDCFG_VARS', d) | 193 | statusvars = oe.data.typed_value('BUILDCFG_VARS', d) |
194 | for var in statusvars: | 194 | for var in statusvars: |
195 | value = d.getVar(var, True) | 195 | value = d.getVar(var) |
196 | if value is not None: | 196 | if value is not None: |
197 | yield '%-17s = "%s"' % (var, value) | 197 | yield '%-17s = "%s"' % (var, value) |
198 | 198 | ||
@@ -200,7 +200,7 @@ def buildcfg_neededvars(d): | |||
200 | needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) | 200 | needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) |
201 | pesteruser = [] | 201 | pesteruser = [] |
202 | for v in needed_vars: | 202 | for v in needed_vars: |
203 | val = d.getVar(v, True) | 203 | val = d.getVar(v) |
204 | if not val or val == 'INVALID': | 204 | if not val or val == 'INVALID': |
205 | pesteruser.append(v) | 205 | pesteruser.append(v) |
206 | 206 | ||
@@ -233,7 +233,7 @@ python base_eventhandler() { | |||
233 | if flines: | 233 | if flines: |
234 | statuslines.extend(flines) | 234 | statuslines.extend(flines) |
235 | 235 | ||
236 | statusheader = e.data.getVar('BUILDCFG_HEADER', True) | 236 | statusheader = e.data.getVar('BUILDCFG_HEADER') |
237 | if statusheader: | 237 | if statusheader: |
238 | bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) | 238 | bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) |
239 | 239 | ||
@@ -241,7 +241,7 @@ python base_eventhandler() { | |||
241 | # target ones and we'd see dulpicate key names overwriting each other | 241 | # target ones and we'd see dulpicate key names overwriting each other |
242 | # for various PREFERRED_PROVIDERS | 242 | # for various PREFERRED_PROVIDERS |
243 | if isinstance(e, bb.event.RecipePreFinalise): | 243 | if isinstance(e, bb.event.RecipePreFinalise): |
244 | if e.data.getVar("TARGET_PREFIX", True) == e.data.getVar("SDK_PREFIX", True): | 244 | if e.data.getVar("TARGET_PREFIX") == e.data.getVar("SDK_PREFIX"): |
245 | e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils") | 245 | e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils") |
246 | e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial") | 246 | e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial") |
247 | e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc") | 247 | e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc") |
@@ -267,14 +267,14 @@ python base_eventhandler() { | |||
267 | # sysroot since they're now "unreachable". This makes switching virtual/kernel work in | 267 | # sysroot since they're now "unreachable". This makes switching virtual/kernel work in |
268 | # particular. | 268 | # particular. |
269 | # | 269 | # |
270 | pn = d.getVar('PN', True) | 270 | pn = d.getVar('PN') |
271 | source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) | 271 | source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) |
272 | if not source_mirror_fetch: | 272 | if not source_mirror_fetch: |
273 | provs = (d.getVar("PROVIDES", True) or "").split() | 273 | provs = (d.getVar("PROVIDES") or "").split() |
274 | multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() | 274 | multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split() |
275 | for p in provs: | 275 | for p in provs: |
276 | if p.startswith("virtual/") and p not in multiwhitelist: | 276 | if p.startswith("virtual/") and p not in multiwhitelist: |
277 | profprov = d.getVar("PREFERRED_PROVIDER_" + p, True) | 277 | profprov = d.getVar("PREFERRED_PROVIDER_" + p) |
278 | if profprov and pn != profprov: | 278 | if profprov and pn != profprov: |
279 | raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn)) | 279 | raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn)) |
280 | } | 280 | } |
@@ -336,9 +336,9 @@ def set_packagetriplet(d): | |||
336 | tos = [] | 336 | tos = [] |
337 | tvs = [] | 337 | tvs = [] |
338 | 338 | ||
339 | archs.append(d.getVar("PACKAGE_ARCHS", True).split()) | 339 | archs.append(d.getVar("PACKAGE_ARCHS").split()) |
340 | tos.append(d.getVar("TARGET_OS", True)) | 340 | tos.append(d.getVar("TARGET_OS")) |
341 | tvs.append(d.getVar("TARGET_VENDOR", True)) | 341 | tvs.append(d.getVar("TARGET_VENDOR")) |
342 | 342 | ||
343 | def settriplet(d, varname, archs, tos, tvs): | 343 | def settriplet(d, varname, archs, tos, tvs): |
344 | triplets = [] | 344 | triplets = [] |
@@ -350,16 +350,16 @@ def set_packagetriplet(d): | |||
350 | 350 | ||
351 | settriplet(d, "PKGTRIPLETS", archs, tos, tvs) | 351 | settriplet(d, "PKGTRIPLETS", archs, tos, tvs) |
352 | 352 | ||
353 | variants = d.getVar("MULTILIB_VARIANTS", True) or "" | 353 | variants = d.getVar("MULTILIB_VARIANTS") or "" |
354 | for item in variants.split(): | 354 | for item in variants.split(): |
355 | localdata = bb.data.createCopy(d) | 355 | localdata = bb.data.createCopy(d) |
356 | overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item | 356 | overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item |
357 | localdata.setVar("OVERRIDES", overrides) | 357 | localdata.setVar("OVERRIDES", overrides) |
358 | bb.data.update_data(localdata) | 358 | bb.data.update_data(localdata) |
359 | 359 | ||
360 | archs.append(localdata.getVar("PACKAGE_ARCHS", True).split()) | 360 | archs.append(localdata.getVar("PACKAGE_ARCHS").split()) |
361 | tos.append(localdata.getVar("TARGET_OS", True)) | 361 | tos.append(localdata.getVar("TARGET_OS")) |
362 | tvs.append(localdata.getVar("TARGET_VENDOR", True)) | 362 | tvs.append(localdata.getVar("TARGET_VENDOR")) |
363 | 363 | ||
364 | settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs) | 364 | settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs) |
365 | 365 | ||
@@ -374,10 +374,10 @@ python () { | |||
374 | # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends" | 374 | # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends" |
375 | pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} | 375 | pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} |
376 | if pkgconfigflags: | 376 | if pkgconfigflags: |
377 | pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split() | 377 | pkgconfig = (d.getVar('PACKAGECONFIG') or "").split() |
378 | pn = d.getVar("PN", True) | 378 | pn = d.getVar("PN") |
379 | 379 | ||
380 | mlprefix = d.getVar("MLPREFIX", True) | 380 | mlprefix = d.getVar("MLPREFIX") |
381 | 381 | ||
382 | def expandFilter(appends, extension, prefix): | 382 | def expandFilter(appends, extension, prefix): |
383 | appends = bb.utils.explode_deps(d.expand(" ".join(appends))) | 383 | appends = bb.utils.explode_deps(d.expand(" ".join(appends))) |
@@ -419,7 +419,7 @@ python () { | |||
419 | num = len(items) | 419 | num = len(items) |
420 | if num > 4: | 420 | if num > 4: |
421 | bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!" | 421 | bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!" |
422 | % (d.getVar('PN', True), flag)) | 422 | % (d.getVar('PN'), flag)) |
423 | 423 | ||
424 | if flag in pkgconfig: | 424 | if flag in pkgconfig: |
425 | if num >= 3 and items[2]: | 425 | if num >= 3 and items[2]: |
@@ -434,8 +434,8 @@ python () { | |||
434 | appendVar('RDEPENDS_${PN}', extrardeps) | 434 | appendVar('RDEPENDS_${PN}', extrardeps) |
435 | appendVar('PACKAGECONFIG_CONFARGS', extraconf) | 435 | appendVar('PACKAGECONFIG_CONFARGS', extraconf) |
436 | 436 | ||
437 | pn = d.getVar('PN', True) | 437 | pn = d.getVar('PN') |
438 | license = d.getVar('LICENSE', True) | 438 | license = d.getVar('LICENSE') |
439 | if license == "INVALID": | 439 | if license == "INVALID": |
440 | bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) | 440 | bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) |
441 | 441 | ||
@@ -465,26 +465,26 @@ python () { | |||
465 | d.setVarFlag('do_devshell', 'fakeroot', '1') | 465 | d.setVarFlag('do_devshell', 'fakeroot', '1') |
466 | d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 466 | d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') |
467 | 467 | ||
468 | need_machine = d.getVar('COMPATIBLE_MACHINE', True) | 468 | need_machine = d.getVar('COMPATIBLE_MACHINE') |
469 | if need_machine: | 469 | if need_machine: |
470 | import re | 470 | import re |
471 | compat_machines = (d.getVar('MACHINEOVERRIDES', True) or "").split(":") | 471 | compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":") |
472 | for m in compat_machines: | 472 | for m in compat_machines: |
473 | if re.match(need_machine, m): | 473 | if re.match(need_machine, m): |
474 | break | 474 | break |
475 | else: | 475 | else: |
476 | raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE', True)) | 476 | raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE')) |
477 | 477 | ||
478 | source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) | 478 | source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) |
479 | if not source_mirror_fetch: | 479 | if not source_mirror_fetch: |
480 | need_host = d.getVar('COMPATIBLE_HOST', True) | 480 | need_host = d.getVar('COMPATIBLE_HOST') |
481 | if need_host: | 481 | if need_host: |
482 | import re | 482 | import re |
483 | this_host = d.getVar('HOST_SYS', True) | 483 | this_host = d.getVar('HOST_SYS') |
484 | if not re.match(need_host, this_host): | 484 | if not re.match(need_host, this_host): |
485 | raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) | 485 | raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) |
486 | 486 | ||
487 | bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split() | 487 | bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() |
488 | 488 | ||
489 | check_license = False if pn.startswith("nativesdk-") else True | 489 | check_license = False if pn.startswith("nativesdk-") else True |
490 | for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", | 490 | for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", |
@@ -503,21 +503,21 @@ python () { | |||
503 | for lic in bad_licenses: | 503 | for lic in bad_licenses: |
504 | spdx_license = return_spdx(d, lic) | 504 | spdx_license = return_spdx(d, lic) |
505 | for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]: | 505 | for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]: |
506 | whitelist.extend((d.getVar(w + lic, True) or "").split()) | 506 | whitelist.extend((d.getVar(w + lic) or "").split()) |
507 | if spdx_license: | 507 | if spdx_license: |
508 | whitelist.extend((d.getVar(w + spdx_license, True) or "").split()) | 508 | whitelist.extend((d.getVar(w + spdx_license) or "").split()) |
509 | ''' | 509 | ''' |
510 | We need to track what we are whitelisting and why. If pn is | 510 | We need to track what we are whitelisting and why. If pn is |
511 | incompatible we need to be able to note that the image that | 511 | incompatible we need to be able to note that the image that |
512 | is created may infact contain incompatible licenses despite | 512 | is created may infact contain incompatible licenses despite |
513 | INCOMPATIBLE_LICENSE being set. | 513 | INCOMPATIBLE_LICENSE being set. |
514 | ''' | 514 | ''' |
515 | incompatwl.extend((d.getVar(w + lic, True) or "").split()) | 515 | incompatwl.extend((d.getVar(w + lic) or "").split()) |
516 | if spdx_license: | 516 | if spdx_license: |
517 | incompatwl.extend((d.getVar(w + spdx_license, True) or "").split()) | 517 | incompatwl.extend((d.getVar(w + spdx_license) or "").split()) |
518 | 518 | ||
519 | if not pn in whitelist: | 519 | if not pn in whitelist: |
520 | pkgs = d.getVar('PACKAGES', True).split() | 520 | pkgs = d.getVar('PACKAGES').split() |
521 | skipped_pkgs = [] | 521 | skipped_pkgs = [] |
522 | unskipped_pkgs = [] | 522 | unskipped_pkgs = [] |
523 | for pkg in pkgs: | 523 | for pkg in pkgs: |
@@ -529,7 +529,7 @@ python () { | |||
529 | if unskipped_pkgs: | 529 | if unskipped_pkgs: |
530 | for pkg in skipped_pkgs: | 530 | for pkg in skipped_pkgs: |
531 | bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license) | 531 | bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license) |
532 | mlprefix = d.getVar('MLPREFIX', True) | 532 | mlprefix = d.getVar('MLPREFIX') |
533 | d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1) | 533 | d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1) |
534 | for pkg in unskipped_pkgs: | 534 | for pkg in unskipped_pkgs: |
535 | bb.debug(1, "INCLUDING the package " + pkg) | 535 | bb.debug(1, "INCLUDING the package " + pkg) |
@@ -545,8 +545,8 @@ python () { | |||
545 | # matching of license expressions - just check that all license strings | 545 | # matching of license expressions - just check that all license strings |
546 | # in LICENSE_<pkg> are found in LICENSE. | 546 | # in LICENSE_<pkg> are found in LICENSE. |
547 | license_set = oe.license.list_licenses(license) | 547 | license_set = oe.license.list_licenses(license) |
548 | for pkg in d.getVar('PACKAGES', True).split(): | 548 | for pkg in d.getVar('PACKAGES').split(): |
549 | pkg_license = d.getVar('LICENSE_' + pkg, True) | 549 | pkg_license = d.getVar('LICENSE_' + pkg) |
550 | if pkg_license: | 550 | if pkg_license: |
551 | unlisted = oe.license.list_licenses(pkg_license) - license_set | 551 | unlisted = oe.license.list_licenses(pkg_license) - license_set |
552 | if unlisted: | 552 | if unlisted: |
@@ -554,7 +554,7 @@ python () { | |||
554 | "listed in LICENSE" % (pkg, ' '.join(unlisted))) | 554 | "listed in LICENSE" % (pkg, ' '.join(unlisted))) |
555 | 555 | ||
556 | needsrcrev = False | 556 | needsrcrev = False |
557 | srcuri = d.getVar('SRC_URI', True) | 557 | srcuri = d.getVar('SRC_URI') |
558 | for uri in srcuri.split(): | 558 | for uri in srcuri.split(): |
559 | (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3] | 559 | (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3] |
560 | 560 | ||
@@ -614,8 +614,8 @@ python () { | |||
614 | set_packagetriplet(d) | 614 | set_packagetriplet(d) |
615 | 615 | ||
616 | # 'multimachine' handling | 616 | # 'multimachine' handling |
617 | mach_arch = d.getVar('MACHINE_ARCH', True) | 617 | mach_arch = d.getVar('MACHINE_ARCH') |
618 | pkg_arch = d.getVar('PACKAGE_ARCH', True) | 618 | pkg_arch = d.getVar('PACKAGE_ARCH') |
619 | 619 | ||
620 | if (pkg_arch == mach_arch): | 620 | if (pkg_arch == mach_arch): |
621 | # Already machine specific - nothing further to do | 621 | # Already machine specific - nothing further to do |
@@ -625,11 +625,11 @@ python () { | |||
625 | # We always try to scan SRC_URI for urls with machine overrides | 625 | # We always try to scan SRC_URI for urls with machine overrides |
626 | # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 | 626 | # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 |
627 | # | 627 | # |
628 | override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', True) | 628 | override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH') |
629 | if override != '0': | 629 | if override != '0': |
630 | paths = [] | 630 | paths = [] |
631 | fpaths = (d.getVar('FILESPATH', True) or '').split(':') | 631 | fpaths = (d.getVar('FILESPATH') or '').split(':') |
632 | machine = d.getVar('MACHINE', True) | 632 | machine = d.getVar('MACHINE') |
633 | for p in fpaths: | 633 | for p in fpaths: |
634 | if os.path.basename(p) == machine and os.path.isdir(p): | 634 | if os.path.basename(p) == machine and os.path.isdir(p): |
635 | paths.append(p) | 635 | paths.append(p) |
@@ -646,16 +646,16 @@ python () { | |||
646 | d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") | 646 | d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") |
647 | return | 647 | return |
648 | 648 | ||
649 | packages = d.getVar('PACKAGES', True).split() | 649 | packages = d.getVar('PACKAGES').split() |
650 | for pkg in packages: | 650 | for pkg in packages: |
651 | pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True) | 651 | pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg) |
652 | 652 | ||
653 | # We could look for != PACKAGE_ARCH here but how to choose | 653 | # We could look for != PACKAGE_ARCH here but how to choose |
654 | # if multiple differences are present? | 654 | # if multiple differences are present? |
655 | # Look through PACKAGE_ARCHS for the priority order? | 655 | # Look through PACKAGE_ARCHS for the priority order? |
656 | if pkgarch and pkgarch == mach_arch: | 656 | if pkgarch and pkgarch == mach_arch: |
657 | d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") | 657 | d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") |
658 | bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True)) | 658 | bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN")) |
659 | } | 659 | } |
660 | 660 | ||
661 | addtask cleansstate after do_clean | 661 | addtask cleansstate after do_clean |
@@ -666,7 +666,7 @@ addtask cleanall after do_cleansstate | |||
666 | do_cleansstate[nostamp] = "1" | 666 | do_cleansstate[nostamp] = "1" |
667 | 667 | ||
668 | python do_cleanall() { | 668 | python do_cleanall() { |
669 | src_uri = (d.getVar('SRC_URI', True) or "").split() | 669 | src_uri = (d.getVar('SRC_URI') or "").split() |
670 | if len(src_uri) == 0: | 670 | if len(src_uri) == 0: |
671 | return | 671 | return |
672 | 672 | ||
diff --git a/meta/classes/binconfig.bbclass b/meta/classes/binconfig.bbclass index 8591308..5372294 100644 --- a/meta/classes/binconfig.bbclass +++ b/meta/classes/binconfig.bbclass | |||
@@ -22,7 +22,7 @@ def get_binconfig_mangle(d): | |||
22 | s += " -e 's:-I${WORKDIR}:-I${STAGING_INCDIR}:'" | 22 | s += " -e 's:-I${WORKDIR}:-I${STAGING_INCDIR}:'" |
23 | s += " -e 's:-L${WORKDIR}:-L${STAGING_LIBDIR}:'" | 23 | s += " -e 's:-L${WORKDIR}:-L${STAGING_LIBDIR}:'" |
24 | if d.getVar("OE_BINCONFIG_EXTRA_MANGLE", False): | 24 | if d.getVar("OE_BINCONFIG_EXTRA_MANGLE", False): |
25 | s += d.getVar("OE_BINCONFIG_EXTRA_MANGLE", True) | 25 | s += d.getVar("OE_BINCONFIG_EXTRA_MANGLE") |
26 | 26 | ||
27 | return s | 27 | return s |
28 | 28 | ||
diff --git a/meta/classes/blacklist.bbclass b/meta/classes/blacklist.bbclass index a0141a8..c6f4223 100644 --- a/meta/classes/blacklist.bbclass +++ b/meta/classes/blacklist.bbclass | |||
@@ -16,7 +16,7 @@ | |||
16 | addhandler blacklist_multilib_eventhandler | 16 | addhandler blacklist_multilib_eventhandler |
17 | blacklist_multilib_eventhandler[eventmask] = "bb.event.ConfigParsed" | 17 | blacklist_multilib_eventhandler[eventmask] = "bb.event.ConfigParsed" |
18 | python blacklist_multilib_eventhandler() { | 18 | python blacklist_multilib_eventhandler() { |
19 | multilibs = e.data.getVar('MULTILIBS', True) | 19 | multilibs = e.data.getVar('MULTILIBS') |
20 | if not multilibs: | 20 | if not multilibs: |
21 | return | 21 | return |
22 | 22 | ||
@@ -38,7 +38,7 @@ python blacklist_multilib_eventhandler() { | |||
38 | } | 38 | } |
39 | 39 | ||
40 | python () { | 40 | python () { |
41 | blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN', True), True) | 41 | blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN'), True) |
42 | 42 | ||
43 | if blacklist: | 43 | if blacklist: |
44 | raise bb.parse.SkipPackage("Recipe is blacklisted: %s" % (blacklist)) | 44 | raise bb.parse.SkipPackage("Recipe is blacklisted: %s" % (blacklist)) |
diff --git a/meta/classes/bugzilla.bbclass b/meta/classes/bugzilla.bbclass index 3fc8956..6d9a821 100644 --- a/meta/classes/bugzilla.bbclass +++ b/meta/classes/bugzilla.bbclass | |||
@@ -110,12 +110,12 @@ python bugzilla_eventhandler() { | |||
110 | return | 110 | return |
111 | 111 | ||
112 | if name == "TaskFailed": | 112 | if name == "TaskFailed": |
113 | xmlrpc = data.getVar("BUGZILLA_XMLRPC", True) | 113 | xmlrpc = data.getVar("BUGZILLA_XMLRPC") |
114 | user = data.getVar("BUGZILLA_USER", True) | 114 | user = data.getVar("BUGZILLA_USER") |
115 | passw = data.getVar("BUGZILLA_PASS", True) | 115 | passw = data.getVar("BUGZILLA_PASS") |
116 | product = data.getVar("BUGZILLA_PRODUCT", True) | 116 | product = data.getVar("BUGZILLA_PRODUCT") |
117 | compon = data.getVar("BUGZILLA_COMPONENT", True) | 117 | compon = data.getVar("BUGZILLA_COMPONENT") |
118 | version = data.getVar("BUGZILLA_VERSION", True) | 118 | version = data.getVar("BUGZILLA_VERSION") |
119 | 119 | ||
120 | proxy = data.getVar('http_proxy', True ) | 120 | proxy = data.getVar('http_proxy', True ) |
121 | if (proxy): | 121 | if (proxy): |
@@ -133,14 +133,14 @@ python bugzilla_eventhandler() { | |||
133 | 'component': compon} | 133 | 'component': compon} |
134 | 134 | ||
135 | # evil hack to figure out what is going on | 135 | # evil hack to figure out what is going on |
136 | debug_file = open(os.path.join(data.getVar("TMPDIR", True),"..","bugzilla-log"),"a") | 136 | debug_file = open(os.path.join(data.getVar("TMPDIR"),"..","bugzilla-log"),"a") |
137 | 137 | ||
138 | file = None | 138 | file = None |
139 | bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN", True), | 139 | bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN"), |
140 | "pv" : data.getVar("PV", True), | 140 | "pv" : data.getVar("PV"), |
141 | } | 141 | } |
142 | log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T', True), event.task)) | 142 | log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T'), event.task)) |
143 | text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN", True), data.getVar('DATETIME', True), data.getVar( 'MACHINE', True ) ) | 143 | text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN"), data.getVar('DATETIME'), data.getVar( 'MACHINE', True ) ) |
144 | if len(log_file) != 0: | 144 | if len(log_file) != 0: |
145 | print >> debug_file, "Adding log file %s" % log_file[0] | 145 | print >> debug_file, "Adding log file %s" % log_file[0] |
146 | file = open(log_file[0], 'r') | 146 | file = open(log_file[0], 'r') |
@@ -168,7 +168,7 @@ python bugzilla_eventhandler() { | |||
168 | 168 | ||
169 | if bug_number and log: | 169 | if bug_number and log: |
170 | print >> debug_file, "The bug is known as '%s'" % bug_number | 170 | print >> debug_file, "The bug is known as '%s'" % bug_number |
171 | desc = "Build log for machine %s" % (data.getVar('MACHINE', True)) | 171 | desc = "Build log for machine %s" % (data.getVar('MACHINE')) |
172 | if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc): | 172 | if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc): |
173 | print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number | 173 | print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number |
174 | else: | 174 | else: |
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass index d82e9bb..73cd886 100644 --- a/meta/classes/buildhistory.bbclass +++ b/meta/classes/buildhistory.bbclass | |||
@@ -64,18 +64,18 @@ PATCH_GIT_USER_NAME ?= "OpenEmbedded" | |||
64 | # Write out metadata about this package for comparison when writing future packages | 64 | # Write out metadata about this package for comparison when writing future packages |
65 | # | 65 | # |
66 | python buildhistory_emit_pkghistory() { | 66 | python buildhistory_emit_pkghistory() { |
67 | if not d.getVar('BB_CURRENTTASK', True) in ['packagedata', 'packagedata_setscene']: | 67 | if not d.getVar('BB_CURRENTTASK') in ['packagedata', 'packagedata_setscene']: |
68 | return 0 | 68 | return 0 |
69 | 69 | ||
70 | if not "package" in (d.getVar('BUILDHISTORY_FEATURES', True) or "").split(): | 70 | if not "package" in (d.getVar('BUILDHISTORY_FEATURES') or "").split(): |
71 | return 0 | 71 | return 0 |
72 | 72 | ||
73 | import re | 73 | import re |
74 | import json | 74 | import json |
75 | import errno | 75 | import errno |
76 | 76 | ||
77 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 77 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE') |
78 | oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE', True) | 78 | oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE') |
79 | 79 | ||
80 | class RecipeInfo: | 80 | class RecipeInfo: |
81 | def __init__(self, name): | 81 | def __init__(self, name): |
@@ -182,12 +182,12 @@ python buildhistory_emit_pkghistory() { | |||
182 | items.sort() | 182 | items.sort() |
183 | return ' '.join(items) | 183 | return ' '.join(items) |
184 | 184 | ||
185 | pn = d.getVar('PN', True) | 185 | pn = d.getVar('PN') |
186 | pe = d.getVar('PE', True) or "0" | 186 | pe = d.getVar('PE') or "0" |
187 | pv = d.getVar('PV', True) | 187 | pv = d.getVar('PV') |
188 | pr = d.getVar('PR', True) | 188 | pr = d.getVar('PR') |
189 | 189 | ||
190 | pkgdata_dir = d.getVar('PKGDATA_DIR', True) | 190 | pkgdata_dir = d.getVar('PKGDATA_DIR') |
191 | packages = "" | 191 | packages = "" |
192 | try: | 192 | try: |
193 | with open(os.path.join(pkgdata_dir, pn)) as f: | 193 | with open(os.path.join(pkgdata_dir, pn)) as f: |
@@ -203,7 +203,7 @@ python buildhistory_emit_pkghistory() { | |||
203 | raise | 203 | raise |
204 | 204 | ||
205 | packagelist = packages.split() | 205 | packagelist = packages.split() |
206 | preserve = d.getVar('BUILDHISTORY_PRESERVE', True).split() | 206 | preserve = d.getVar('BUILDHISTORY_PRESERVE').split() |
207 | if not os.path.exists(pkghistdir): | 207 | if not os.path.exists(pkghistdir): |
208 | bb.utils.mkdirhier(pkghistdir) | 208 | bb.utils.mkdirhier(pkghistdir) |
209 | else: | 209 | else: |
@@ -223,11 +223,11 @@ python buildhistory_emit_pkghistory() { | |||
223 | rcpinfo.pe = pe | 223 | rcpinfo.pe = pe |
224 | rcpinfo.pv = pv | 224 | rcpinfo.pv = pv |
225 | rcpinfo.pr = pr | 225 | rcpinfo.pr = pr |
226 | rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS', True) or "")) | 226 | rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS') or "")) |
227 | rcpinfo.packages = packages | 227 | rcpinfo.packages = packages |
228 | write_recipehistory(rcpinfo, d) | 228 | write_recipehistory(rcpinfo, d) |
229 | 229 | ||
230 | pkgdest = d.getVar('PKGDEST', True) | 230 | pkgdest = d.getVar('PKGDEST') |
231 | for pkg in packagelist: | 231 | for pkg in packagelist: |
232 | pkgdata = {} | 232 | pkgdata = {} |
233 | with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: | 233 | with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: |
@@ -293,7 +293,7 @@ python buildhistory_emit_pkghistory() { | |||
293 | def write_recipehistory(rcpinfo, d): | 293 | def write_recipehistory(rcpinfo, d): |
294 | bb.debug(2, "Writing recipe history") | 294 | bb.debug(2, "Writing recipe history") |
295 | 295 | ||
296 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 296 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE') |
297 | 297 | ||
298 | infofile = os.path.join(pkghistdir, "latest") | 298 | infofile = os.path.join(pkghistdir, "latest") |
299 | with open(infofile, "w") as f: | 299 | with open(infofile, "w") as f: |
@@ -308,7 +308,7 @@ def write_recipehistory(rcpinfo, d): | |||
308 | def write_pkghistory(pkginfo, d): | 308 | def write_pkghistory(pkginfo, d): |
309 | bb.debug(2, "Writing package history for package %s" % pkginfo.name) | 309 | bb.debug(2, "Writing package history for package %s" % pkginfo.name) |
310 | 310 | ||
311 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 311 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE') |
312 | 312 | ||
313 | pkgpath = os.path.join(pkghistdir, pkginfo.name) | 313 | pkgpath = os.path.join(pkghistdir, pkginfo.name) |
314 | if not os.path.exists(pkgpath): | 314 | if not os.path.exists(pkgpath): |
@@ -369,7 +369,7 @@ def buildhistory_list_installed(d, rootfs_type="image"): | |||
369 | pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target") | 369 | pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target") |
370 | 370 | ||
371 | for output_type, output_file in process_list: | 371 | for output_type, output_file in process_list: |
372 | output_file_full = os.path.join(d.getVar('WORKDIR', True), output_file) | 372 | output_file_full = os.path.join(d.getVar('WORKDIR'), output_file) |
373 | 373 | ||
374 | with open(output_file_full, 'w') as output: | 374 | with open(output_file_full, 'w') as output: |
375 | output.write(format_pkg_list(pkgs, output_type)) | 375 | output.write(format_pkg_list(pkgs, output_type)) |
@@ -550,7 +550,7 @@ END | |||
550 | python buildhistory_get_extra_sdkinfo() { | 550 | python buildhistory_get_extra_sdkinfo() { |
551 | import operator | 551 | import operator |
552 | import math | 552 | import math |
553 | if d.getVar('BB_CURRENTTASK', True) == 'populate_sdk_ext': | 553 | if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext': |
554 | tasksizes = {} | 554 | tasksizes = {} |
555 | filesizes = {} | 555 | filesizes = {} |
556 | for root, _, files in os.walk(d.expand('${SDK_OUTPUT}/${SDKPATH}/sstate-cache')): | 556 | for root, _, files in os.walk(d.expand('${SDK_OUTPUT}/${SDKPATH}/sstate-cache')): |
@@ -591,7 +591,7 @@ SDK_POSTPROCESS_COMMAND_append = " buildhistory_get_sdkinfo ; buildhistory_get_e | |||
591 | SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; " | 591 | SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; " |
592 | 592 | ||
593 | def buildhistory_get_build_id(d): | 593 | def buildhistory_get_build_id(d): |
594 | if d.getVar('BB_WORKERCONTEXT', True) != '1': | 594 | if d.getVar('BB_WORKERCONTEXT') != '1': |
595 | return "" | 595 | return "" |
596 | localdata = bb.data.createCopy(d) | 596 | localdata = bb.data.createCopy(d) |
597 | bb.data.update_data(localdata) | 597 | bb.data.update_data(localdata) |
@@ -605,12 +605,12 @@ def buildhistory_get_build_id(d): | |||
605 | if flines: | 605 | if flines: |
606 | statuslines.extend(flines) | 606 | statuslines.extend(flines) |
607 | 607 | ||
608 | statusheader = d.getVar('BUILDCFG_HEADER', True) | 608 | statusheader = d.getVar('BUILDCFG_HEADER') |
609 | return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) | 609 | return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) |
610 | 610 | ||
611 | def buildhistory_get_metadata_revs(d): | 611 | def buildhistory_get_metadata_revs(d): |
612 | # We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want | 612 | # We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want |
613 | layers = (d.getVar("BBLAYERS", True) or "").split() | 613 | layers = (d.getVar("BBLAYERS") or "").split() |
614 | medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \ | 614 | medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \ |
615 | base_get_metadata_git_branch(i, None).strip(), \ | 615 | base_get_metadata_git_branch(i, None).strip(), \ |
616 | base_get_metadata_git_revision(i, None)) \ | 616 | base_get_metadata_git_revision(i, None)) \ |
@@ -622,7 +622,7 @@ def outputvars(vars, listvars, d): | |||
622 | listvars = listvars.split() | 622 | listvars = listvars.split() |
623 | ret = "" | 623 | ret = "" |
624 | for var in vars: | 624 | for var in vars: |
625 | value = d.getVar(var, True) or "" | 625 | value = d.getVar(var) or "" |
626 | if var in listvars: | 626 | if var in listvars: |
627 | # Squash out spaces | 627 | # Squash out spaces |
628 | value = oe.utils.squashspaces(value) | 628 | value = oe.utils.squashspaces(value) |
@@ -630,17 +630,17 @@ def outputvars(vars, listvars, d): | |||
630 | return ret.rstrip('\n') | 630 | return ret.rstrip('\n') |
631 | 631 | ||
632 | def buildhistory_get_imagevars(d): | 632 | def buildhistory_get_imagevars(d): |
633 | if d.getVar('BB_WORKERCONTEXT', True) != '1': | 633 | if d.getVar('BB_WORKERCONTEXT') != '1': |
634 | return "" | 634 | return "" |
635 | imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND" | 635 | imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND" |
636 | listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE" | 636 | listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE" |
637 | return outputvars(imagevars, listvars, d) | 637 | return outputvars(imagevars, listvars, d) |
638 | 638 | ||
639 | def buildhistory_get_sdkvars(d): | 639 | def buildhistory_get_sdkvars(d): |
640 | if d.getVar('BB_WORKERCONTEXT', True) != '1': | 640 | if d.getVar('BB_WORKERCONTEXT') != '1': |
641 | return "" | 641 | return "" |
642 | sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE" | 642 | sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE" |
643 | if d.getVar('BB_CURRENTTASK', True) == 'populate_sdk_ext': | 643 | if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext': |
644 | # Extensible SDK uses some additional variables | 644 | # Extensible SDK uses some additional variables |
645 | sdkvars += " SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN" | 645 | sdkvars += " SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN" |
646 | listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST" | 646 | listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST" |
@@ -735,16 +735,16 @@ END | |||
735 | } | 735 | } |
736 | 736 | ||
737 | python buildhistory_eventhandler() { | 737 | python buildhistory_eventhandler() { |
738 | if e.data.getVar('BUILDHISTORY_FEATURES', True).strip(): | 738 | if e.data.getVar('BUILDHISTORY_FEATURES').strip(): |
739 | reset = e.data.getVar("BUILDHISTORY_RESET", True) | 739 | reset = e.data.getVar("BUILDHISTORY_RESET") |
740 | olddir = e.data.getVar("BUILDHISTORY_OLD_DIR", True) | 740 | olddir = e.data.getVar("BUILDHISTORY_OLD_DIR") |
741 | if isinstance(e, bb.event.BuildStarted): | 741 | if isinstance(e, bb.event.BuildStarted): |
742 | if reset: | 742 | if reset: |
743 | import shutil | 743 | import shutil |
744 | # Clean up after potentially interrupted build. | 744 | # Clean up after potentially interrupted build. |
745 | if os.path.isdir(olddir): | 745 | if os.path.isdir(olddir): |
746 | shutil.rmtree(olddir) | 746 | shutil.rmtree(olddir) |
747 | rootdir = e.data.getVar("BUILDHISTORY_DIR", True) | 747 | rootdir = e.data.getVar("BUILDHISTORY_DIR") |
748 | entries = [ x for x in os.listdir(rootdir) if not x.startswith('.') ] | 748 | entries = [ x for x in os.listdir(rootdir) if not x.startswith('.') ] |
749 | bb.utils.mkdirhier(olddir) | 749 | bb.utils.mkdirhier(olddir) |
750 | for entry in entries: | 750 | for entry in entries: |
@@ -754,7 +754,7 @@ python buildhistory_eventhandler() { | |||
754 | if reset: | 754 | if reset: |
755 | import shutil | 755 | import shutil |
756 | shutil.rmtree(olddir) | 756 | shutil.rmtree(olddir) |
757 | if e.data.getVar("BUILDHISTORY_COMMIT", True) == "1": | 757 | if e.data.getVar("BUILDHISTORY_COMMIT") == "1": |
758 | bb.note("Writing buildhistory") | 758 | bb.note("Writing buildhistory") |
759 | localdata = bb.data.createCopy(e.data) | 759 | localdata = bb.data.createCopy(e.data) |
760 | localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures)) | 760 | localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures)) |
@@ -774,7 +774,7 @@ def _get_srcrev_values(d): | |||
774 | """ | 774 | """ |
775 | 775 | ||
776 | scms = [] | 776 | scms = [] |
777 | fetcher = bb.fetch.Fetch(d.getVar('SRC_URI', True).split(), d) | 777 | fetcher = bb.fetch.Fetch(d.getVar('SRC_URI').split(), d) |
778 | urldata = fetcher.ud | 778 | urldata = fetcher.ud |
779 | for u in urldata: | 779 | for u in urldata: |
780 | if urldata[u].method.supports_srcrev(): | 780 | if urldata[u].method.supports_srcrev(): |
@@ -806,7 +806,7 @@ def _get_srcrev_values(d): | |||
806 | do_fetch[postfuncs] += "write_srcrev" | 806 | do_fetch[postfuncs] += "write_srcrev" |
807 | do_fetch[vardepsexclude] += "write_srcrev" | 807 | do_fetch[vardepsexclude] += "write_srcrev" |
808 | python write_srcrev() { | 808 | python write_srcrev() { |
809 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 809 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE') |
810 | srcrevfile = os.path.join(pkghistdir, 'latest_srcrev') | 810 | srcrevfile = os.path.join(pkghistdir, 'latest_srcrev') |
811 | 811 | ||
812 | srcrevs, tag_srcrevs = _get_srcrev_values(d) | 812 | srcrevs, tag_srcrevs = _get_srcrev_values(d) |
@@ -838,7 +838,7 @@ python write_srcrev() { | |||
838 | for name, srcrev in tag_srcrevs.items(): | 838 | for name, srcrev in tag_srcrevs.items(): |
839 | f.write('# tag_%s = "%s"\n' % (name, srcrev)) | 839 | f.write('# tag_%s = "%s"\n' % (name, srcrev)) |
840 | if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev: | 840 | if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev: |
841 | pkg = d.getVar('PN', True) | 841 | pkg = d.getVar('PN') |
842 | bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev)) | 842 | bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev)) |
843 | 843 | ||
844 | else: | 844 | else: |
diff --git a/meta/classes/buildstats-summary.bbclass b/meta/classes/buildstats-summary.bbclass index b86abcc..f9b241b 100644 --- a/meta/classes/buildstats-summary.bbclass +++ b/meta/classes/buildstats-summary.bbclass | |||
@@ -7,7 +7,7 @@ python buildstats_summary () { | |||
7 | if not os.path.exists(bsdir): | 7 | if not os.path.exists(bsdir): |
8 | return | 8 | return |
9 | 9 | ||
10 | sstatetasks = (e.data.getVar('SSTATETASKS', True) or '').split() | 10 | sstatetasks = (e.data.getVar('SSTATETASKS') or '').split() |
11 | built = collections.defaultdict(lambda: [set(), set()]) | 11 | built = collections.defaultdict(lambda: [set(), set()]) |
12 | for pf in os.listdir(bsdir): | 12 | for pf in os.listdir(bsdir): |
13 | taskdir = os.path.join(bsdir, pf) | 13 | taskdir = os.path.join(bsdir, pf) |
diff --git a/meta/classes/buildstats.bbclass b/meta/classes/buildstats.bbclass index c6b77e6..8703cb2 100644 --- a/meta/classes/buildstats.bbclass +++ b/meta/classes/buildstats.bbclass | |||
@@ -75,8 +75,8 @@ def get_buildtimedata(var, d): | |||
75 | return timediff, cpuperc | 75 | return timediff, cpuperc |
76 | 76 | ||
77 | def write_task_data(status, logfile, e, d): | 77 | def write_task_data(status, logfile, e, d): |
78 | bn = d.getVar('BUILDNAME', True) | 78 | bn = d.getVar('BUILDNAME') |
79 | bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) | 79 | bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn) |
80 | with open(os.path.join(logfile), "a") as f: | 80 | with open(os.path.join(logfile), "a") as f: |
81 | elapsedtime = get_timedata("__timedata_task", d, e.time) | 81 | elapsedtime = get_timedata("__timedata_task", d, e.time) |
82 | if elapsedtime: | 82 | if elapsedtime: |
@@ -106,9 +106,9 @@ python run_buildstats () { | |||
106 | import bb.event | 106 | import bb.event |
107 | import time, subprocess, platform | 107 | import time, subprocess, platform |
108 | 108 | ||
109 | bn = d.getVar('BUILDNAME', True) | 109 | bn = d.getVar('BUILDNAME') |
110 | bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) | 110 | bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn) |
111 | taskdir = os.path.join(bsdir, d.getVar('PF', True)) | 111 | taskdir = os.path.join(bsdir, d.getVar('PF')) |
112 | 112 | ||
113 | if isinstance(e, bb.event.BuildStarted): | 113 | if isinstance(e, bb.event.BuildStarted): |
114 | ######################################################################## | 114 | ######################################################################## |
@@ -162,7 +162,7 @@ python run_buildstats () { | |||
162 | if e.task == "do_rootfs": | 162 | if e.task == "do_rootfs": |
163 | bs = os.path.join(bsdir, "build_stats") | 163 | bs = os.path.join(bsdir, "build_stats") |
164 | with open(bs, "a") as f: | 164 | with open(bs, "a") as f: |
165 | rootfs = d.getVar('IMAGE_ROOTFS', True) | 165 | rootfs = d.getVar('IMAGE_ROOTFS') |
166 | if os.path.isdir(rootfs): | 166 | if os.path.isdir(rootfs): |
167 | try: | 167 | try: |
168 | rootfs_size = subprocess.check_output(["du", "-sh", rootfs], | 168 | rootfs_size = subprocess.check_output(["du", "-sh", rootfs], |
@@ -197,7 +197,7 @@ python runqueue_stats () { | |||
197 | # are available that we need to find the output directory. | 197 | # are available that we need to find the output directory. |
198 | # The persistent SystemStats is stored in the datastore and | 198 | # The persistent SystemStats is stored in the datastore and |
199 | # closed when the build is done. | 199 | # closed when the build is done. |
200 | system_stats = d.getVar('_buildstats_system_stats', True) | 200 | system_stats = d.getVar('_buildstats_system_stats') |
201 | if not system_stats and isinstance(e, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted)): | 201 | if not system_stats and isinstance(e, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted)): |
202 | system_stats = buildstats.SystemStats(d) | 202 | system_stats = buildstats.SystemStats(d) |
203 | d.setVar('_buildstats_system_stats', system_stats) | 203 | d.setVar('_buildstats_system_stats', system_stats) |
diff --git a/meta/classes/ccache.bbclass b/meta/classes/ccache.bbclass index 2e9837c..93fcaca 100644 --- a/meta/classes/ccache.bbclass +++ b/meta/classes/ccache.bbclass | |||
@@ -1,4 +1,4 @@ | |||
1 | CCACHE = "${@bb.utils.which(d.getVar('PATH', True), 'ccache') and 'ccache '}" | 1 | CCACHE = "${@bb.utils.which(d.getVar('PATH'), 'ccache') and 'ccache '}" |
2 | export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" | 2 | export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" |
3 | CCACHE_DISABLE[unexport] = "1" | 3 | CCACHE_DISABLE[unexport] = "1" |
4 | 4 | ||
diff --git a/meta/classes/chrpath.bbclass b/meta/classes/chrpath.bbclass index f183b4a..ad3c397 100644 --- a/meta/classes/chrpath.bbclass +++ b/meta/classes/chrpath.bbclass | |||
@@ -44,7 +44,7 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d): | |||
44 | p = sub.Popen([cmd, '-r', args, fpath],stdout=sub.PIPE,stderr=sub.PIPE) | 44 | p = sub.Popen([cmd, '-r', args, fpath],stdout=sub.PIPE,stderr=sub.PIPE) |
45 | out, err = p.communicate() | 45 | out, err = p.communicate() |
46 | if p.returncode != 0: | 46 | if p.returncode != 0: |
47 | bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN', True), p.returncode, out, err)) | 47 | bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN'), p.returncode, out, err)) |
48 | 48 | ||
49 | def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d): | 49 | def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d): |
50 | import subprocess as sub | 50 | import subprocess as sub |
@@ -72,7 +72,7 @@ def process_dir (rootdir, directory, d): | |||
72 | cmd = d.expand('${CHRPATH_BIN}') | 72 | cmd = d.expand('${CHRPATH_BIN}') |
73 | tmpdir = os.path.normpath(d.getVar('TMPDIR', False)) | 73 | tmpdir = os.path.normpath(d.getVar('TMPDIR', False)) |
74 | baseprefix = os.path.normpath(d.expand('${base_prefix}')) | 74 | baseprefix = os.path.normpath(d.expand('${base_prefix}')) |
75 | hostos = d.getVar("HOST_OS", True) | 75 | hostos = d.getVar("HOST_OS") |
76 | 76 | ||
77 | #bb.debug("Checking %s for binaries to process" % directory) | 77 | #bb.debug("Checking %s for binaries to process" % directory) |
78 | if not os.path.exists(directory): | 78 | if not os.path.exists(directory): |
diff --git a/meta/classes/cmake.bbclass b/meta/classes/cmake.bbclass index fad0baa..9e74599 100644 --- a/meta/classes/cmake.bbclass +++ b/meta/classes/cmake.bbclass | |||
@@ -46,7 +46,7 @@ cmake_do_generate_toolchain_file() { | |||
46 | # CMake system name must be something like "Linux". | 46 | # CMake system name must be something like "Linux". |
47 | # This is important for cross-compiling. | 47 | # This is important for cross-compiling. |
48 | set( CMAKE_SYSTEM_NAME `echo ${TARGET_OS} | sed -e 's/^./\u&/' -e 's/^\(Linux\).*/\1/'` ) | 48 | set( CMAKE_SYSTEM_NAME `echo ${TARGET_OS} | sed -e 's/^./\u&/' -e 's/^\(Linux\).*/\1/'` ) |
49 | set( CMAKE_SYSTEM_PROCESSOR ${@map_target_arch_to_uname_arch(d.getVar('TARGET_ARCH', True))} ) | 49 | set( CMAKE_SYSTEM_PROCESSOR ${@map_target_arch_to_uname_arch(d.getVar('TARGET_ARCH'))} ) |
50 | set( CMAKE_C_COMPILER ${OECMAKE_C_COMPILER} ) | 50 | set( CMAKE_C_COMPILER ${OECMAKE_C_COMPILER} ) |
51 | set( CMAKE_CXX_COMPILER ${OECMAKE_CXX_COMPILER} ) | 51 | set( CMAKE_CXX_COMPILER ${OECMAKE_CXX_COMPILER} ) |
52 | set( CMAKE_ASM_COMPILER ${OECMAKE_C_COMPILER} ) | 52 | set( CMAKE_ASM_COMPILER ${OECMAKE_C_COMPILER} ) |
@@ -112,15 +112,15 @@ cmake_do_configure() { | |||
112 | ${OECMAKE_SITEFILE} \ | 112 | ${OECMAKE_SITEFILE} \ |
113 | ${OECMAKE_SOURCEPATH} \ | 113 | ${OECMAKE_SOURCEPATH} \ |
114 | -DCMAKE_INSTALL_PREFIX:PATH=${prefix} \ | 114 | -DCMAKE_INSTALL_PREFIX:PATH=${prefix} \ |
115 | -DCMAKE_INSTALL_BINDIR:PATH=${@os.path.relpath(d.getVar('bindir', True), d.getVar('prefix', True))} \ | 115 | -DCMAKE_INSTALL_BINDIR:PATH=${@os.path.relpath(d.getVar('bindir'), d.getVar('prefix'))} \ |
116 | -DCMAKE_INSTALL_SBINDIR:PATH=${@os.path.relpath(d.getVar('sbindir', True), d.getVar('prefix', True))} \ | 116 | -DCMAKE_INSTALL_SBINDIR:PATH=${@os.path.relpath(d.getVar('sbindir'), d.getVar('prefix'))} \ |
117 | -DCMAKE_INSTALL_LIBEXECDIR:PATH=${@os.path.relpath(d.getVar('libexecdir', True), d.getVar('prefix', True))} \ | 117 | -DCMAKE_INSTALL_LIBEXECDIR:PATH=${@os.path.relpath(d.getVar('libexecdir'), d.getVar('prefix'))} \ |
118 | -DCMAKE_INSTALL_SYSCONFDIR:PATH=${sysconfdir} \ | 118 | -DCMAKE_INSTALL_SYSCONFDIR:PATH=${sysconfdir} \ |
119 | -DCMAKE_INSTALL_SHAREDSTATEDIR:PATH=${@os.path.relpath(d.getVar('sharedstatedir', True), d. getVar('prefix', True))} \ | 119 | -DCMAKE_INSTALL_SHAREDSTATEDIR:PATH=${@os.path.relpath(d.getVar('sharedstatedir'), d. getVar('prefix'))} \ |
120 | -DCMAKE_INSTALL_LOCALSTATEDIR:PATH=${localstatedir} \ | 120 | -DCMAKE_INSTALL_LOCALSTATEDIR:PATH=${localstatedir} \ |
121 | -DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir', True), d.getVar('prefix', True))} \ | 121 | -DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir'), d.getVar('prefix'))} \ |
122 | -DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir', True), d.getVar('prefix', True))} \ | 122 | -DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir'), d.getVar('prefix'))} \ |
123 | -DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir', True), d.getVar('prefix', True))} \ | 123 | -DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir'), d.getVar('prefix'))} \ |
124 | -DCMAKE_INSTALL_SO_NO_EXE=0 \ | 124 | -DCMAKE_INSTALL_SO_NO_EXE=0 \ |
125 | -DCMAKE_TOOLCHAIN_FILE=${WORKDIR}/toolchain.cmake \ | 125 | -DCMAKE_TOOLCHAIN_FILE=${WORKDIR}/toolchain.cmake \ |
126 | -DCMAKE_VERBOSE_MAKEFILE=1 \ | 126 | -DCMAKE_VERBOSE_MAKEFILE=1 \ |
diff --git a/meta/classes/cml1.bbclass b/meta/classes/cml1.bbclass index 5834806..187d407 100644 --- a/meta/classes/cml1.bbclass +++ b/meta/classes/cml1.bbclass | |||
@@ -26,7 +26,7 @@ python do_menuconfig() { | |||
26 | except OSError: | 26 | except OSError: |
27 | mtime = 0 | 27 | mtime = 0 |
28 | 28 | ||
29 | oe_terminal("${SHELL} -c \"make %s; if [ \$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND', True), | 29 | oe_terminal("${SHELL} -c \"make %s; if [ \$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND'), |
30 | d.getVar('PN', True ) + ' Configuration', d) | 30 | d.getVar('PN', True ) + ' Configuration', d) |
31 | 31 | ||
32 | # FIXME this check can be removed when the minimum bitbake version has been bumped | 32 | # FIXME this check can be removed when the minimum bitbake version has been bumped |
@@ -49,7 +49,7 @@ python do_diffconfig() { | |||
49 | import shutil | 49 | import shutil |
50 | import subprocess | 50 | import subprocess |
51 | 51 | ||
52 | workdir = d.getVar('WORKDIR', True) | 52 | workdir = d.getVar('WORKDIR') |
53 | fragment = workdir + '/fragment.cfg' | 53 | fragment = workdir + '/fragment.cfg' |
54 | configorig = '.config.orig' | 54 | configorig = '.config.orig' |
55 | config = '.config' | 55 | config = '.config' |
diff --git a/meta/classes/compress_doc.bbclass b/meta/classes/compress_doc.bbclass index 8073c17..069db19 100644 --- a/meta/classes/compress_doc.bbclass +++ b/meta/classes/compress_doc.bbclass | |||
@@ -31,25 +31,25 @@ DOC_DECOMPRESS_CMD[xz] ?= "unxz -v" | |||
31 | 31 | ||
32 | PACKAGE_PREPROCESS_FUNCS += "package_do_compress_doc compress_doc_updatealternatives" | 32 | PACKAGE_PREPROCESS_FUNCS += "package_do_compress_doc compress_doc_updatealternatives" |
33 | python package_do_compress_doc() { | 33 | python package_do_compress_doc() { |
34 | compress_mode = d.getVar('DOC_COMPRESS', True) | 34 | compress_mode = d.getVar('DOC_COMPRESS') |
35 | compress_list = (d.getVar('DOC_COMPRESS_LIST', True) or '').split() | 35 | compress_list = (d.getVar('DOC_COMPRESS_LIST') or '').split() |
36 | if compress_mode not in compress_list: | 36 | if compress_mode not in compress_list: |
37 | bb.fatal('Compression policy %s not supported (not listed in %s)\n' % (compress_mode, compress_list)) | 37 | bb.fatal('Compression policy %s not supported (not listed in %s)\n' % (compress_mode, compress_list)) |
38 | 38 | ||
39 | dvar = d.getVar('PKGD', True) | 39 | dvar = d.getVar('PKGD') |
40 | compress_cmds = {} | 40 | compress_cmds = {} |
41 | decompress_cmds = {} | 41 | decompress_cmds = {} |
42 | for mode in compress_list: | 42 | for mode in compress_list: |
43 | compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode, True) | 43 | compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode, True) |
44 | decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode, True) | 44 | decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode, True) |
45 | 45 | ||
46 | mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir", True)) | 46 | mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir")) |
47 | if os.path.exists(mandir): | 47 | if os.path.exists(mandir): |
48 | # Decompress doc files which format is not compress_mode | 48 | # Decompress doc files which format is not compress_mode |
49 | decompress_doc(mandir, compress_mode, decompress_cmds) | 49 | decompress_doc(mandir, compress_mode, decompress_cmds) |
50 | compress_doc(mandir, compress_mode, compress_cmds) | 50 | compress_doc(mandir, compress_mode, compress_cmds) |
51 | 51 | ||
52 | infodir = os.path.abspath(dvar + os.sep + d.getVar("infodir", True)) | 52 | infodir = os.path.abspath(dvar + os.sep + d.getVar("infodir")) |
53 | if os.path.exists(infodir): | 53 | if os.path.exists(infodir): |
54 | # Decompress doc files which format is not compress_mode | 54 | # Decompress doc files which format is not compress_mode |
55 | decompress_doc(infodir, compress_mode, decompress_cmds) | 55 | decompress_doc(infodir, compress_mode, decompress_cmds) |
@@ -218,18 +218,18 @@ python compress_doc_updatealternatives () { | |||
218 | if not bb.data.inherits_class('update-alternatives', d): | 218 | if not bb.data.inherits_class('update-alternatives', d): |
219 | return | 219 | return |
220 | 220 | ||
221 | mandir = d.getVar("mandir", True) | 221 | mandir = d.getVar("mandir") |
222 | infodir = d.getVar("infodir", True) | 222 | infodir = d.getVar("infodir") |
223 | compress_mode = d.getVar('DOC_COMPRESS', True) | 223 | compress_mode = d.getVar('DOC_COMPRESS') |
224 | for pkg in (d.getVar('PACKAGES', True) or "").split(): | 224 | for pkg in (d.getVar('PACKAGES') or "").split(): |
225 | old_names = (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split() | 225 | old_names = (d.getVar('ALTERNATIVE_%s' % pkg) or "").split() |
226 | new_names = [] | 226 | new_names = [] |
227 | for old_name in old_names: | 227 | for old_name in old_names: |
228 | old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name, True) | 228 | old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name, True) |
229 | old_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name, True) or \ | 229 | old_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name, True) or \ |
230 | d.getVarFlag('ALTERNATIVE_TARGET', old_name, True) or \ | 230 | d.getVarFlag('ALTERNATIVE_TARGET', old_name, True) or \ |
231 | d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or \ | 231 | d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or \ |
232 | d.getVar('ALTERNATIVE_TARGET', True) or \ | 232 | d.getVar('ALTERNATIVE_TARGET') or \ |
233 | old_link | 233 | old_link |
234 | # Sometimes old_target is specified as relative to the link name. | 234 | # Sometimes old_target is specified as relative to the link name. |
235 | old_target = os.path.join(os.path.dirname(old_link), old_target) | 235 | old_target = os.path.join(os.path.dirname(old_link), old_target) |
@@ -247,7 +247,7 @@ python compress_doc_updatealternatives () { | |||
247 | elif d.getVarFlag('ALTERNATIVE_TARGET', old_name, True): | 247 | elif d.getVarFlag('ALTERNATIVE_TARGET', old_name, True): |
248 | d.delVarFlag('ALTERNATIVE_TARGET', old_name) | 248 | d.delVarFlag('ALTERNATIVE_TARGET', old_name) |
249 | d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target) | 249 | d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target) |
250 | elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True): | 250 | elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg): |
251 | d.setVar('ALTERNATIVE_TARGET_%s' % pkg, new_target) | 251 | d.setVar('ALTERNATIVE_TARGET_%s' % pkg, new_target) |
252 | elif d.getVar('ALTERNATIVE_TARGET', old_name, True): | 252 | elif d.getVar('ALTERNATIVE_TARGET', old_name, True): |
253 | d.setVar('ALTERNATIVE_TARGET', new_target) | 253 | d.setVar('ALTERNATIVE_TARGET', new_target) |
diff --git a/meta/classes/copyleft_compliance.bbclass b/meta/classes/copyleft_compliance.bbclass index 907c183..eabf12c 100644 --- a/meta/classes/copyleft_compliance.bbclass +++ b/meta/classes/copyleft_compliance.bbclass | |||
@@ -13,7 +13,7 @@ python do_prepare_copyleft_sources () { | |||
13 | import os.path | 13 | import os.path |
14 | import shutil | 14 | import shutil |
15 | 15 | ||
16 | p = d.getVar('P', True) | 16 | p = d.getVar('P') |
17 | included, reason = copyleft_should_include(d) | 17 | included, reason = copyleft_should_include(d) |
18 | if not included: | 18 | if not included: |
19 | bb.debug(1, 'copyleft: %s is excluded: %s' % (p, reason)) | 19 | bb.debug(1, 'copyleft: %s is excluded: %s' % (p, reason)) |
@@ -21,13 +21,13 @@ python do_prepare_copyleft_sources () { | |||
21 | else: | 21 | else: |
22 | bb.debug(1, 'copyleft: %s is included: %s' % (p, reason)) | 22 | bb.debug(1, 'copyleft: %s is included: %s' % (p, reason)) |
23 | 23 | ||
24 | sources_dir = d.getVar('COPYLEFT_SOURCES_DIR', True) | 24 | sources_dir = d.getVar('COPYLEFT_SOURCES_DIR') |
25 | dl_dir = d.getVar('DL_DIR', True) | 25 | dl_dir = d.getVar('DL_DIR') |
26 | src_uri = d.getVar('SRC_URI', True).split() | 26 | src_uri = d.getVar('SRC_URI').split() |
27 | fetch = bb.fetch2.Fetch(src_uri, d) | 27 | fetch = bb.fetch2.Fetch(src_uri, d) |
28 | ud = fetch.ud | 28 | ud = fetch.ud |
29 | 29 | ||
30 | pf = d.getVar('PF', True) | 30 | pf = d.getVar('PF') |
31 | dest = os.path.join(sources_dir, pf) | 31 | dest = os.path.join(sources_dir, pf) |
32 | shutil.rmtree(dest, ignore_errors=True) | 32 | shutil.rmtree(dest, ignore_errors=True) |
33 | bb.utils.mkdirhier(dest) | 33 | bb.utils.mkdirhier(dest) |
diff --git a/meta/classes/copyleft_filter.bbclass b/meta/classes/copyleft_filter.bbclass index 46be7f7..426956f 100644 --- a/meta/classes/copyleft_filter.bbclass +++ b/meta/classes/copyleft_filter.bbclass | |||
@@ -49,7 +49,7 @@ def copyleft_should_include(d): | |||
49 | 49 | ||
50 | included, motive = False, 'recipe did not match anything' | 50 | included, motive = False, 'recipe did not match anything' |
51 | 51 | ||
52 | recipe_type = d.getVar('COPYLEFT_RECIPE_TYPE', True) | 52 | recipe_type = d.getVar('COPYLEFT_RECIPE_TYPE') |
53 | if recipe_type not in oe.data.typed_value('COPYLEFT_RECIPE_TYPES', d): | 53 | if recipe_type not in oe.data.typed_value('COPYLEFT_RECIPE_TYPES', d): |
54 | include, motive = False, 'recipe type "%s" is excluded' % recipe_type | 54 | include, motive = False, 'recipe type "%s" is excluded' % recipe_type |
55 | 55 | ||
@@ -57,9 +57,9 @@ def copyleft_should_include(d): | |||
57 | exclude = oe.data.typed_value('COPYLEFT_LICENSE_EXCLUDE', d) | 57 | exclude = oe.data.typed_value('COPYLEFT_LICENSE_EXCLUDE', d) |
58 | 58 | ||
59 | try: | 59 | try: |
60 | is_included, reason = oe.license.is_included(d.getVar('LICENSE', True), include, exclude) | 60 | is_included, reason = oe.license.is_included(d.getVar('LICENSE'), include, exclude) |
61 | except oe.license.LicenseError as exc: | 61 | except oe.license.LicenseError as exc: |
62 | bb.fatal('%s: %s' % (d.getVar('PF', True), exc)) | 62 | bb.fatal('%s: %s' % (d.getVar('PF'), exc)) |
63 | else: | 63 | else: |
64 | if is_included: | 64 | if is_included: |
65 | if reason: | 65 | if reason: |
@@ -69,10 +69,10 @@ def copyleft_should_include(d): | |||
69 | else: | 69 | else: |
70 | included, motive = False, 'recipe has excluded licenses: %s' % ', '.join(reason) | 70 | included, motive = False, 'recipe has excluded licenses: %s' % ', '.join(reason) |
71 | 71 | ||
72 | if any(fnmatch(d.getVar('PN', True), name) \ | 72 | if any(fnmatch(d.getVar('PN'), name) \ |
73 | for name in oe.data.typed_value('COPYLEFT_PN_INCLUDE', d)): | 73 | for name in oe.data.typed_value('COPYLEFT_PN_INCLUDE', d)): |
74 | included, motive = True, 'recipe included by name' | 74 | included, motive = True, 'recipe included by name' |
75 | if any(fnmatch(d.getVar('PN', True), name) \ | 75 | if any(fnmatch(d.getVar('PN'), name) \ |
76 | for name in oe.data.typed_value('COPYLEFT_PN_EXCLUDE', d)): | 76 | for name in oe.data.typed_value('COPYLEFT_PN_EXCLUDE', d)): |
77 | included, motive = False, 'recipe excluded by name' | 77 | included, motive = False, 'recipe excluded by name' |
78 | 78 | ||
diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes/cross-canadian.bbclass index 21921b3..64db113 100644 --- a/meta/classes/cross-canadian.bbclass +++ b/meta/classes/cross-canadian.bbclass | |||
@@ -20,25 +20,25 @@ CANADIANEXTRAOS = "${BASECANADIANEXTRAOS}" | |||
20 | CANADIANEXTRAVENDOR = "" | 20 | CANADIANEXTRAVENDOR = "" |
21 | MODIFYTOS ??= "1" | 21 | MODIFYTOS ??= "1" |
22 | python () { | 22 | python () { |
23 | archs = d.getVar('PACKAGE_ARCHS', True).split() | 23 | archs = d.getVar('PACKAGE_ARCHS').split() |
24 | sdkarchs = [] | 24 | sdkarchs = [] |
25 | for arch in archs: | 25 | for arch in archs: |
26 | sdkarchs.append(arch + '-${SDKPKGSUFFIX}') | 26 | sdkarchs.append(arch + '-${SDKPKGSUFFIX}') |
27 | d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs)) | 27 | d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs)) |
28 | 28 | ||
29 | # Allow the following code segment to be disabled, e.g. meta-environment | 29 | # Allow the following code segment to be disabled, e.g. meta-environment |
30 | if d.getVar("MODIFYTOS", True) != "1": | 30 | if d.getVar("MODIFYTOS") != "1": |
31 | return | 31 | return |
32 | 32 | ||
33 | if d.getVar("TCLIBC", True) == "baremetal": | 33 | if d.getVar("TCLIBC") == "baremetal": |
34 | return | 34 | return |
35 | 35 | ||
36 | tos = d.getVar("TARGET_OS", True) | 36 | tos = d.getVar("TARGET_OS") |
37 | whitelist = [] | 37 | whitelist = [] |
38 | extralibcs = [""] | 38 | extralibcs = [""] |
39 | if "uclibc" in d.getVar("BASECANADIANEXTRAOS", True): | 39 | if "uclibc" in d.getVar("BASECANADIANEXTRAOS"): |
40 | extralibcs.append("uclibc") | 40 | extralibcs.append("uclibc") |
41 | if "musl" in d.getVar("BASECANADIANEXTRAOS", True): | 41 | if "musl" in d.getVar("BASECANADIANEXTRAOS"): |
42 | extralibcs.append("musl") | 42 | extralibcs.append("musl") |
43 | for variant in ["", "spe", "x32", "eabi", "n32"]: | 43 | for variant in ["", "spe", "x32", "eabi", "n32"]: |
44 | for libc in extralibcs: | 44 | for libc in extralibcs: |
@@ -51,33 +51,33 @@ python () { | |||
51 | entry = entry + "-" + libc | 51 | entry = entry + "-" + libc |
52 | whitelist.append(entry) | 52 | whitelist.append(entry) |
53 | if tos not in whitelist: | 53 | if tos not in whitelist: |
54 | bb.fatal("Building cross-candian for an unknown TARGET_SYS (%s), please update cross-canadian.bbclass" % d.getVar("TARGET_SYS", True)) | 54 | bb.fatal("Building cross-candian for an unknown TARGET_SYS (%s), please update cross-canadian.bbclass" % d.getVar("TARGET_SYS")) |
55 | 55 | ||
56 | for n in ["PROVIDES", "DEPENDS"]: | 56 | for n in ["PROVIDES", "DEPENDS"]: |
57 | d.setVar(n, d.getVar(n, True)) | 57 | d.setVar(n, d.getVar(n)) |
58 | d.setVar("STAGING_BINDIR_TOOLCHAIN", d.getVar("STAGING_BINDIR_TOOLCHAIN", True)) | 58 | d.setVar("STAGING_BINDIR_TOOLCHAIN", d.getVar("STAGING_BINDIR_TOOLCHAIN")) |
59 | for prefix in ["AR", "AS", "DLLTOOL", "CC", "CXX", "GCC", "LD", "LIPO", "NM", "OBJDUMP", "RANLIB", "STRIP", "WINDRES"]: | 59 | for prefix in ["AR", "AS", "DLLTOOL", "CC", "CXX", "GCC", "LD", "LIPO", "NM", "OBJDUMP", "RANLIB", "STRIP", "WINDRES"]: |
60 | n = prefix + "_FOR_TARGET" | 60 | n = prefix + "_FOR_TARGET" |
61 | d.setVar(n, d.getVar(n, True)) | 61 | d.setVar(n, d.getVar(n)) |
62 | # This is a bit ugly. We need to zero LIBC/ABI extension which will change TARGET_OS | 62 | # This is a bit ugly. We need to zero LIBC/ABI extension which will change TARGET_OS |
63 | # however we need the old value in some variables. We expand those here first. | 63 | # however we need the old value in some variables. We expand those here first. |
64 | tarch = d.getVar("TARGET_ARCH", True) | 64 | tarch = d.getVar("TARGET_ARCH") |
65 | if tarch == "x86_64": | 65 | if tarch == "x86_64": |
66 | d.setVar("LIBCEXTENSION", "") | 66 | d.setVar("LIBCEXTENSION", "") |
67 | d.setVar("ABIEXTENSION", "") | 67 | d.setVar("ABIEXTENSION", "") |
68 | d.appendVar("CANADIANEXTRAOS", " linux-gnux32") | 68 | d.appendVar("CANADIANEXTRAOS", " linux-gnux32") |
69 | for extraos in d.getVar("BASECANADIANEXTRAOS", True).split(): | 69 | for extraos in d.getVar("BASECANADIANEXTRAOS").split(): |
70 | d.appendVar("CANADIANEXTRAOS", " " + extraos + "x32") | 70 | d.appendVar("CANADIANEXTRAOS", " " + extraos + "x32") |
71 | elif tarch == "powerpc": | 71 | elif tarch == "powerpc": |
72 | # PowerPC can build "linux" and "linux-gnuspe" | 72 | # PowerPC can build "linux" and "linux-gnuspe" |
73 | d.setVar("LIBCEXTENSION", "") | 73 | d.setVar("LIBCEXTENSION", "") |
74 | d.setVar("ABIEXTENSION", "") | 74 | d.setVar("ABIEXTENSION", "") |
75 | d.appendVar("CANADIANEXTRAOS", " linux-gnuspe") | 75 | d.appendVar("CANADIANEXTRAOS", " linux-gnuspe") |
76 | for extraos in d.getVar("BASECANADIANEXTRAOS", True).split(): | 76 | for extraos in d.getVar("BASECANADIANEXTRAOS").split(): |
77 | d.appendVar("CANADIANEXTRAOS", " " + extraos + "spe") | 77 | d.appendVar("CANADIANEXTRAOS", " " + extraos + "spe") |
78 | elif tarch == "mips64": | 78 | elif tarch == "mips64": |
79 | d.appendVar("CANADIANEXTRAOS", " linux-gnun32") | 79 | d.appendVar("CANADIANEXTRAOS", " linux-gnun32") |
80 | for extraos in d.getVar("BASECANADIANEXTRAOS", True).split(): | 80 | for extraos in d.getVar("BASECANADIANEXTRAOS").split(): |
81 | d.appendVar("CANADIANEXTRAOS", " " + extraos + "n32") | 81 | d.appendVar("CANADIANEXTRAOS", " " + extraos + "n32") |
82 | if tarch == "arm" or tarch == "armeb": | 82 | if tarch == "arm" or tarch == "armeb": |
83 | d.appendVar("CANADIANEXTRAOS", " linux-gnueabi linux-musleabi linux-uclibceabi") | 83 | d.appendVar("CANADIANEXTRAOS", " linux-gnueabi linux-musleabi linux-uclibceabi") |
@@ -86,10 +86,10 @@ python () { | |||
86 | d.setVar("TARGET_OS", "linux") | 86 | d.setVar("TARGET_OS", "linux") |
87 | 87 | ||
88 | # Also need to handle multilib target vendors | 88 | # Also need to handle multilib target vendors |
89 | vendors = d.getVar("CANADIANEXTRAVENDOR", True) | 89 | vendors = d.getVar("CANADIANEXTRAVENDOR") |
90 | if not vendors: | 90 | if not vendors: |
91 | vendors = all_multilib_tune_values(d, 'TARGET_VENDOR') | 91 | vendors = all_multilib_tune_values(d, 'TARGET_VENDOR') |
92 | origvendor = d.getVar("TARGET_VENDOR_MULTILIB_ORIGINAL", True) | 92 | origvendor = d.getVar("TARGET_VENDOR_MULTILIB_ORIGINAL") |
93 | if origvendor: | 93 | if origvendor: |
94 | d.setVar("TARGET_VENDOR", origvendor) | 94 | d.setVar("TARGET_VENDOR", origvendor) |
95 | if origvendor not in vendors.split(): | 95 | if origvendor not in vendors.split(): |
@@ -116,7 +116,7 @@ HOST_LD_ARCH = "${SDK_LD_ARCH}" | |||
116 | HOST_AS_ARCH = "${SDK_AS_ARCH}" | 116 | HOST_AS_ARCH = "${SDK_AS_ARCH}" |
117 | 117 | ||
118 | #assign DPKG_ARCH | 118 | #assign DPKG_ARCH |
119 | DPKG_ARCH = "${@debian_arch_map(d.getVar('SDK_ARCH', True), '')}" | 119 | DPKG_ARCH = "${@debian_arch_map(d.getVar('SDK_ARCH'), '')}" |
120 | 120 | ||
121 | CPPFLAGS = "${BUILDSDK_CPPFLAGS}" | 121 | CPPFLAGS = "${BUILDSDK_CPPFLAGS}" |
122 | CFLAGS = "${BUILDSDK_CFLAGS}" | 122 | CFLAGS = "${BUILDSDK_CFLAGS}" |
diff --git a/meta/classes/crosssdk.bbclass b/meta/classes/crosssdk.bbclass index 7315c38..eaf2beb 100644 --- a/meta/classes/crosssdk.bbclass +++ b/meta/classes/crosssdk.bbclass | |||
@@ -5,7 +5,7 @@ MACHINEOVERRIDES = "" | |||
5 | PACKAGE_ARCH = "${SDK_ARCH}" | 5 | PACKAGE_ARCH = "${SDK_ARCH}" |
6 | python () { | 6 | python () { |
7 | # set TUNE_PKGARCH to SDK_ARCH | 7 | # set TUNE_PKGARCH to SDK_ARCH |
8 | d.setVar('TUNE_PKGARCH', d.getVar('SDK_ARCH', True)) | 8 | d.setVar('TUNE_PKGARCH', d.getVar('SDK_ARCH')) |
9 | } | 9 | } |
10 | 10 | ||
11 | STAGING_DIR_TARGET = "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}" | 11 | STAGING_DIR_TARGET = "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}" |
diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass index 75b8fa9..aad0573 100644 --- a/meta/classes/cve-check.bbclass +++ b/meta/classes/cve-check.bbclass | |||
@@ -51,7 +51,7 @@ python do_cve_check () { | |||
51 | Check recipe for patched and unpatched CVEs | 51 | Check recipe for patched and unpatched CVEs |
52 | """ | 52 | """ |
53 | 53 | ||
54 | if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE", True)): | 54 | if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE")): |
55 | patched_cves = get_patches_cves(d) | 55 | patched_cves = get_patches_cves(d) |
56 | patched, unpatched = check_cves(d, patched_cves) | 56 | patched, unpatched = check_cves(d, patched_cves) |
57 | if patched or unpatched: | 57 | if patched or unpatched: |
@@ -70,7 +70,7 @@ python cve_check_cleanup () { | |||
70 | Delete the file used to gather all the CVE information. | 70 | Delete the file used to gather all the CVE information. |
71 | """ | 71 | """ |
72 | 72 | ||
73 | bb.utils.remove(e.data.getVar("CVE_CHECK_TMP_FILE", True)) | 73 | bb.utils.remove(e.data.getVar("CVE_CHECK_TMP_FILE")) |
74 | } | 74 | } |
75 | 75 | ||
76 | addhandler cve_check_cleanup | 76 | addhandler cve_check_cleanup |
@@ -83,12 +83,12 @@ python cve_check_write_rootfs_manifest () { | |||
83 | 83 | ||
84 | import shutil | 84 | import shutil |
85 | 85 | ||
86 | if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE", True)): | 86 | if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE")): |
87 | bb.note("Writing rootfs CVE manifest") | 87 | bb.note("Writing rootfs CVE manifest") |
88 | deploy_dir = d.getVar("DEPLOY_DIR_IMAGE", True) | 88 | deploy_dir = d.getVar("DEPLOY_DIR_IMAGE") |
89 | link_name = d.getVar("IMAGE_LINK_NAME", True) | 89 | link_name = d.getVar("IMAGE_LINK_NAME") |
90 | manifest_name = d.getVar("CVE_CHECK_MANIFEST", True) | 90 | manifest_name = d.getVar("CVE_CHECK_MANIFEST") |
91 | cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE", True) | 91 | cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE") |
92 | 92 | ||
93 | shutil.copyfile(cve_tmp_file, manifest_name) | 93 | shutil.copyfile(cve_tmp_file, manifest_name) |
94 | 94 | ||
@@ -101,7 +101,7 @@ python cve_check_write_rootfs_manifest () { | |||
101 | bb.plain("Image CVE report stored in: %s" % manifest_name) | 101 | bb.plain("Image CVE report stored in: %s" % manifest_name) |
102 | } | 102 | } |
103 | 103 | ||
104 | ROOTFS_POSTPROCESS_COMMAND_prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST', True) == '1' else ''}" | 104 | ROOTFS_POSTPROCESS_COMMAND_prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" |
105 | 105 | ||
106 | def get_patches_cves(d): | 106 | def get_patches_cves(d): |
107 | """ | 107 | """ |
@@ -110,7 +110,7 @@ def get_patches_cves(d): | |||
110 | 110 | ||
111 | import re | 111 | import re |
112 | 112 | ||
113 | pn = d.getVar("PN", True) | 113 | pn = d.getVar("PN") |
114 | cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+") | 114 | cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+") |
115 | patched_cves = set() | 115 | patched_cves = set() |
116 | bb.debug(2, "Looking for patches that solves CVEs for %s" % pn) | 116 | bb.debug(2, "Looking for patches that solves CVEs for %s" % pn) |
@@ -149,15 +149,15 @@ def check_cves(d, patched_cves): | |||
149 | cves_patched = [] | 149 | cves_patched = [] |
150 | cves_unpatched = [] | 150 | cves_unpatched = [] |
151 | bpn = d.getVar("CVE_PRODUCT") | 151 | bpn = d.getVar("CVE_PRODUCT") |
152 | pv = d.getVar("PV", True).split("git+")[0] | 152 | pv = d.getVar("PV").split("git+")[0] |
153 | cves = " ".join(patched_cves) | 153 | cves = " ".join(patched_cves) |
154 | cve_db_dir = d.getVar("CVE_CHECK_DB_DIR", True) | 154 | cve_db_dir = d.getVar("CVE_CHECK_DB_DIR") |
155 | cve_whitelist = ast.literal_eval(d.getVar("CVE_CHECK_CVE_WHITELIST", True)) | 155 | cve_whitelist = ast.literal_eval(d.getVar("CVE_CHECK_CVE_WHITELIST")) |
156 | cve_cmd = "cve-check-tool" | 156 | cve_cmd = "cve-check-tool" |
157 | cmd = [cve_cmd, "--no-html", "--csv", "--not-affected", "-t", "faux", "-d", cve_db_dir] | 157 | cmd = [cve_cmd, "--no-html", "--csv", "--not-affected", "-t", "faux", "-d", cve_db_dir] |
158 | 158 | ||
159 | # If the recipe has been whitlisted we return empty lists | 159 | # If the recipe has been whitlisted we return empty lists |
160 | if d.getVar("PN", True) in d.getVar("CVE_CHECK_PN_WHITELIST", True).split(): | 160 | if d.getVar("PN") in d.getVar("CVE_CHECK_PN_WHITELIST").split(): |
161 | bb.note("Recipe has been whitelisted, skipping check") | 161 | bb.note("Recipe has been whitelisted, skipping check") |
162 | return ([], []) | 162 | return ([], []) |
163 | 163 | ||
@@ -210,7 +210,7 @@ def get_cve_info(d, cves): | |||
210 | from pysqlite2 import dbapi2 as sqlite3 | 210 | from pysqlite2 import dbapi2 as sqlite3 |
211 | 211 | ||
212 | cve_data = {} | 212 | cve_data = {} |
213 | db_file = d.getVar("CVE_CHECK_DB_FILE", True) | 213 | db_file = d.getVar("CVE_CHECK_DB_FILE") |
214 | placeholder = ",".join("?" * len(cves)) | 214 | placeholder = ",".join("?" * len(cves)) |
215 | query = "SELECT * FROM NVD WHERE id IN (%s)" % placeholder | 215 | query = "SELECT * FROM NVD WHERE id IN (%s)" % placeholder |
216 | conn = sqlite3.connect(db_file) | 216 | conn = sqlite3.connect(db_file) |
@@ -231,15 +231,15 @@ def cve_write_data(d, patched, unpatched, cve_data): | |||
231 | CVE manifest if enabled. | 231 | CVE manifest if enabled. |
232 | """ | 232 | """ |
233 | 233 | ||
234 | cve_file = d.getVar("CVE_CHECK_LOCAL_FILE", True) | 234 | cve_file = d.getVar("CVE_CHECK_LOCAL_FILE") |
235 | nvd_link = "https://web.nvd.nist.gov/view/vuln/detail?vulnId=" | 235 | nvd_link = "https://web.nvd.nist.gov/view/vuln/detail?vulnId=" |
236 | write_string = "" | 236 | write_string = "" |
237 | first_alert = True | 237 | first_alert = True |
238 | bb.utils.mkdirhier(d.getVar("CVE_CHECK_LOCAL_DIR", True)) | 238 | bb.utils.mkdirhier(d.getVar("CVE_CHECK_LOCAL_DIR")) |
239 | 239 | ||
240 | for cve in sorted(cve_data): | 240 | for cve in sorted(cve_data): |
241 | write_string += "PACKAGE NAME: %s\n" % d.getVar("PN", True) | 241 | write_string += "PACKAGE NAME: %s\n" % d.getVar("PN") |
242 | write_string += "PACKAGE VERSION: %s\n" % d.getVar("PV", True) | 242 | write_string += "PACKAGE VERSION: %s\n" % d.getVar("PV") |
243 | write_string += "CVE: %s\n" % cve | 243 | write_string += "CVE: %s\n" % cve |
244 | if cve in patched: | 244 | if cve in patched: |
245 | write_string += "CVE STATUS: Patched\n" | 245 | write_string += "CVE STATUS: Patched\n" |
@@ -257,13 +257,13 @@ def cve_write_data(d, patched, unpatched, cve_data): | |||
257 | bb.note("Writing file %s with CVE information" % cve_file) | 257 | bb.note("Writing file %s with CVE information" % cve_file) |
258 | f.write(write_string) | 258 | f.write(write_string) |
259 | 259 | ||
260 | if d.getVar("CVE_CHECK_COPY_FILES", True) == "1": | 260 | if d.getVar("CVE_CHECK_COPY_FILES") == "1": |
261 | cve_dir = d.getVar("CVE_CHECK_DIR", True) | 261 | cve_dir = d.getVar("CVE_CHECK_DIR") |
262 | bb.utils.mkdirhier(cve_dir) | 262 | bb.utils.mkdirhier(cve_dir) |
263 | deploy_file = os.path.join(cve_dir, d.getVar("PN", True)) | 263 | deploy_file = os.path.join(cve_dir, d.getVar("PN")) |
264 | with open(deploy_file, "w") as f: | 264 | with open(deploy_file, "w") as f: |
265 | f.write(write_string) | 265 | f.write(write_string) |
266 | 266 | ||
267 | if d.getVar("CVE_CHECK_CREATE_MANIFEST", True) == "1": | 267 | if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1": |
268 | with open(d.getVar("CVE_CHECK_TMP_FILE", True), "a") as f: | 268 | with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f: |
269 | f.write("%s" % write_string) | 269 | f.write("%s" % write_string) |
diff --git a/meta/classes/debian.bbclass b/meta/classes/debian.bbclass index be7cacc..8124558 100644 --- a/meta/classes/debian.bbclass +++ b/meta/classes/debian.bbclass | |||
@@ -20,17 +20,17 @@ do_package_write_tar[rdeptask] = "${DEBIANRDEP}" | |||
20 | do_package_write_rpm[rdeptask] = "${DEBIANRDEP}" | 20 | do_package_write_rpm[rdeptask] = "${DEBIANRDEP}" |
21 | 21 | ||
22 | python () { | 22 | python () { |
23 | if not d.getVar("PACKAGES", True): | 23 | if not d.getVar("PACKAGES"): |
24 | d.setVar("DEBIANRDEP", "") | 24 | d.setVar("DEBIANRDEP", "") |
25 | } | 25 | } |
26 | 26 | ||
27 | python debian_package_name_hook () { | 27 | python debian_package_name_hook () { |
28 | import glob, copy, stat, errno, re | 28 | import glob, copy, stat, errno, re |
29 | 29 | ||
30 | pkgdest = d.getVar('PKGDEST', True) | 30 | pkgdest = d.getVar('PKGDEST') |
31 | packages = d.getVar('PACKAGES', True) | 31 | packages = d.getVar('PACKAGES') |
32 | bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") | 32 | bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir")) + "$") |
33 | lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") | 33 | lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir")) + "$") |
34 | so_re = re.compile("lib.*\.so") | 34 | so_re = re.compile("lib.*\.so") |
35 | 35 | ||
36 | def socrunch(s): | 36 | def socrunch(s): |
@@ -53,11 +53,11 @@ python debian_package_name_hook () { | |||
53 | return (s[stat.ST_MODE] & stat.S_IEXEC) | 53 | return (s[stat.ST_MODE] & stat.S_IEXEC) |
54 | 54 | ||
55 | def add_rprovides(pkg, d): | 55 | def add_rprovides(pkg, d): |
56 | newpkg = d.getVar('PKG_' + pkg, True) | 56 | newpkg = d.getVar('PKG_' + pkg) |
57 | if newpkg and newpkg != pkg: | 57 | if newpkg and newpkg != pkg: |
58 | provs = (d.getVar('RPROVIDES_' + pkg, True) or "").split() | 58 | provs = (d.getVar('RPROVIDES_' + pkg) or "").split() |
59 | if pkg not in provs: | 59 | if pkg not in provs: |
60 | d.appendVar('RPROVIDES_' + pkg, " " + pkg + " (=" + d.getVar("PKGV", True) + ")") | 60 | d.appendVar('RPROVIDES_' + pkg, " " + pkg + " (=" + d.getVar("PKGV") + ")") |
61 | 61 | ||
62 | def auto_libname(packages, orig_pkg): | 62 | def auto_libname(packages, orig_pkg): |
63 | sonames = [] | 63 | sonames = [] |
@@ -70,7 +70,7 @@ python debian_package_name_hook () { | |||
70 | if lib_re.match(root): | 70 | if lib_re.match(root): |
71 | has_libs = 1 | 71 | has_libs = 1 |
72 | if so_re.match(os.path.basename(file)): | 72 | if so_re.match(os.path.basename(file)): |
73 | cmd = (d.getVar('TARGET_PREFIX', True) or "") + "objdump -p " + file + " 2>/dev/null" | 73 | cmd = (d.getVar('TARGET_PREFIX') or "") + "objdump -p " + file + " 2>/dev/null" |
74 | fd = os.popen(cmd) | 74 | fd = os.popen(cmd) |
75 | lines = fd.readlines() | 75 | lines = fd.readlines() |
76 | fd.close() | 76 | fd.close() |
@@ -84,7 +84,7 @@ python debian_package_name_hook () { | |||
84 | if len(sonames) == 1: | 84 | if len(sonames) == 1: |
85 | soname = sonames[0] | 85 | soname = sonames[0] |
86 | elif len(sonames) > 1: | 86 | elif len(sonames) > 1: |
87 | lead = d.getVar('LEAD_SONAME', True) | 87 | lead = d.getVar('LEAD_SONAME') |
88 | if lead: | 88 | if lead: |
89 | r = re.compile(lead) | 89 | r = re.compile(lead) |
90 | filtered = [] | 90 | filtered = [] |
@@ -115,7 +115,7 @@ python debian_package_name_hook () { | |||
115 | newpkg = pkgname | 115 | newpkg = pkgname |
116 | else: | 116 | else: |
117 | newpkg = pkg.replace(orig_pkg, devname, 1) | 117 | newpkg = pkg.replace(orig_pkg, devname, 1) |
118 | mlpre=d.getVar('MLPREFIX', True) | 118 | mlpre=d.getVar('MLPREFIX') |
119 | if mlpre: | 119 | if mlpre: |
120 | if not newpkg.find(mlpre) == 0: | 120 | if not newpkg.find(mlpre) == 0: |
121 | newpkg = mlpre + newpkg | 121 | newpkg = mlpre + newpkg |
@@ -131,7 +131,7 @@ python debian_package_name_hook () { | |||
131 | # and later | 131 | # and later |
132 | # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw | 132 | # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw |
133 | # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 | 133 | # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 |
134 | for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True): | 134 | for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS') or "").split(), reverse=True): |
135 | auto_libname(packages, pkg) | 135 | auto_libname(packages, pkg) |
136 | } | 136 | } |
137 | 137 | ||
diff --git a/meta/classes/devshell.bbclass b/meta/classes/devshell.bbclass index be71aff..864ace4 100644 --- a/meta/classes/devshell.bbclass +++ b/meta/classes/devshell.bbclass | |||
@@ -5,14 +5,14 @@ DEVSHELL = "${SHELL}" | |||
5 | python do_devshell () { | 5 | python do_devshell () { |
6 | if d.getVarFlag("do_devshell", "manualfakeroot", True): | 6 | if d.getVarFlag("do_devshell", "manualfakeroot", True): |
7 | d.prependVar("DEVSHELL", "pseudo ") | 7 | d.prependVar("DEVSHELL", "pseudo ") |
8 | fakeenv = d.getVar("FAKEROOTENV", True).split() | 8 | fakeenv = d.getVar("FAKEROOTENV").split() |
9 | for f in fakeenv: | 9 | for f in fakeenv: |
10 | k = f.split("=") | 10 | k = f.split("=") |
11 | d.setVar(k[0], k[1]) | 11 | d.setVar(k[0], k[1]) |
12 | d.appendVar("OE_TERMINAL_EXPORTS", " " + k[0]) | 12 | d.appendVar("OE_TERMINAL_EXPORTS", " " + k[0]) |
13 | d.delVarFlag("do_devshell", "fakeroot") | 13 | d.delVarFlag("do_devshell", "fakeroot") |
14 | 14 | ||
15 | oe_terminal(d.getVar('DEVSHELL', True), 'OpenEmbedded Developer Shell', d) | 15 | oe_terminal(d.getVar('DEVSHELL'), 'OpenEmbedded Developer Shell', d) |
16 | } | 16 | } |
17 | 17 | ||
18 | addtask devshell after do_patch | 18 | addtask devshell after do_patch |
@@ -82,7 +82,7 @@ def devpyshell(d): | |||
82 | more = False | 82 | more = False |
83 | 83 | ||
84 | i = code.InteractiveInterpreter(locals=_context) | 84 | i = code.InteractiveInterpreter(locals=_context) |
85 | print("OE PyShell (PN = %s)\n" % d.getVar("PN", True)) | 85 | print("OE PyShell (PN = %s)\n" % d.getVar("PN")) |
86 | 86 | ||
87 | def prompt(more): | 87 | def prompt(more): |
88 | if more: | 88 | if more: |
diff --git a/meta/classes/distro_features_check.bbclass b/meta/classes/distro_features_check.bbclass index 7e91dbc..e74d3c0 100644 --- a/meta/classes/distro_features_check.bbclass +++ b/meta/classes/distro_features_check.bbclass | |||
@@ -11,15 +11,15 @@ | |||
11 | 11 | ||
12 | python () { | 12 | python () { |
13 | # Assume at least one var is set. | 13 | # Assume at least one var is set. |
14 | distro_features = (d.getVar('DISTRO_FEATURES', True) or "").split() | 14 | distro_features = (d.getVar('DISTRO_FEATURES') or "").split() |
15 | 15 | ||
16 | any_of_distro_features = d.getVar('ANY_OF_DISTRO_FEATURES', True) | 16 | any_of_distro_features = d.getVar('ANY_OF_DISTRO_FEATURES') |
17 | if any_of_distro_features: | 17 | if any_of_distro_features: |
18 | any_of_distro_features = any_of_distro_features.split() | 18 | any_of_distro_features = any_of_distro_features.split() |
19 | if set.isdisjoint(set(any_of_distro_features),set(distro_features)): | 19 | if set.isdisjoint(set(any_of_distro_features),set(distro_features)): |
20 | raise bb.parse.SkipPackage("one of '%s' needs to be in DISTRO_FEATURES" % any_of_distro_features) | 20 | raise bb.parse.SkipPackage("one of '%s' needs to be in DISTRO_FEATURES" % any_of_distro_features) |
21 | 21 | ||
22 | required_distro_features = d.getVar('REQUIRED_DISTRO_FEATURES', True) | 22 | required_distro_features = d.getVar('REQUIRED_DISTRO_FEATURES') |
23 | if required_distro_features: | 23 | if required_distro_features: |
24 | required_distro_features = required_distro_features.split() | 24 | required_distro_features = required_distro_features.split() |
25 | for f in required_distro_features: | 25 | for f in required_distro_features: |
@@ -28,7 +28,7 @@ python () { | |||
28 | else: | 28 | else: |
29 | raise bb.parse.SkipPackage("missing required distro feature '%s' (not in DISTRO_FEATURES)" % f) | 29 | raise bb.parse.SkipPackage("missing required distro feature '%s' (not in DISTRO_FEATURES)" % f) |
30 | 30 | ||
31 | conflict_distro_features = d.getVar('CONFLICT_DISTRO_FEATURES', True) | 31 | conflict_distro_features = d.getVar('CONFLICT_DISTRO_FEATURES') |
32 | if conflict_distro_features: | 32 | if conflict_distro_features: |
33 | conflict_distro_features = conflict_distro_features.split() | 33 | conflict_distro_features = conflict_distro_features.split() |
34 | for f in conflict_distro_features: | 34 | for f in conflict_distro_features: |
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass index fbb7402..5b3a3e0 100644 --- a/meta/classes/distrodata.bbclass +++ b/meta/classes/distrodata.bbclass | |||
@@ -25,75 +25,75 @@ addtask distrodata_np | |||
25 | do_distrodata_np[nostamp] = "1" | 25 | do_distrodata_np[nostamp] = "1" |
26 | python do_distrodata_np() { | 26 | python do_distrodata_np() { |
27 | localdata = bb.data.createCopy(d) | 27 | localdata = bb.data.createCopy(d) |
28 | pn = d.getVar("PN", True) | 28 | pn = d.getVar("PN") |
29 | bb.note("Package Name: %s" % pn) | 29 | bb.note("Package Name: %s" % pn) |
30 | 30 | ||
31 | import oe.distro_check as dist_check | 31 | import oe.distro_check as dist_check |
32 | tmpdir = d.getVar('TMPDIR', True) | 32 | tmpdir = d.getVar('TMPDIR') |
33 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 33 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
34 | datetime = localdata.getVar('DATETIME', True) | 34 | datetime = localdata.getVar('DATETIME') |
35 | dist_check.update_distro_data(distro_check_dir, datetime, localdata) | 35 | dist_check.update_distro_data(distro_check_dir, datetime, localdata) |
36 | 36 | ||
37 | if pn.find("-native") != -1: | 37 | if pn.find("-native") != -1: |
38 | pnstripped = pn.split("-native") | 38 | pnstripped = pn.split("-native") |
39 | bb.note("Native Split: %s" % pnstripped) | 39 | bb.note("Native Split: %s" % pnstripped) |
40 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 40 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
41 | bb.data.update_data(localdata) | 41 | bb.data.update_data(localdata) |
42 | 42 | ||
43 | if pn.find("-cross") != -1: | 43 | if pn.find("-cross") != -1: |
44 | pnstripped = pn.split("-cross") | 44 | pnstripped = pn.split("-cross") |
45 | bb.note("cross Split: %s" % pnstripped) | 45 | bb.note("cross Split: %s" % pnstripped) |
46 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 46 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
47 | bb.data.update_data(localdata) | 47 | bb.data.update_data(localdata) |
48 | 48 | ||
49 | if pn.find("-crosssdk") != -1: | 49 | if pn.find("-crosssdk") != -1: |
50 | pnstripped = pn.split("-crosssdk") | 50 | pnstripped = pn.split("-crosssdk") |
51 | bb.note("cross Split: %s" % pnstripped) | 51 | bb.note("cross Split: %s" % pnstripped) |
52 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 52 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
53 | bb.data.update_data(localdata) | 53 | bb.data.update_data(localdata) |
54 | 54 | ||
55 | if pn.startswith("nativesdk-"): | 55 | if pn.startswith("nativesdk-"): |
56 | pnstripped = pn.replace("nativesdk-", "") | 56 | pnstripped = pn.replace("nativesdk-", "") |
57 | bb.note("NativeSDK Split: %s" % pnstripped) | 57 | bb.note("NativeSDK Split: %s" % pnstripped) |
58 | localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) | 58 | localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES')) |
59 | bb.data.update_data(localdata) | 59 | bb.data.update_data(localdata) |
60 | 60 | ||
61 | 61 | ||
62 | if pn.find("-initial") != -1: | 62 | if pn.find("-initial") != -1: |
63 | pnstripped = pn.split("-initial") | 63 | pnstripped = pn.split("-initial") |
64 | bb.note("initial Split: %s" % pnstripped) | 64 | bb.note("initial Split: %s" % pnstripped) |
65 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 65 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
66 | bb.data.update_data(localdata) | 66 | bb.data.update_data(localdata) |
67 | 67 | ||
68 | """generate package information from .bb file""" | 68 | """generate package information from .bb file""" |
69 | pname = localdata.getVar('PN', True) | 69 | pname = localdata.getVar('PN') |
70 | pcurver = localdata.getVar('PV', True) | 70 | pcurver = localdata.getVar('PV') |
71 | pdesc = localdata.getVar('DESCRIPTION', True) | 71 | pdesc = localdata.getVar('DESCRIPTION') |
72 | if pdesc is not None: | 72 | if pdesc is not None: |
73 | pdesc = pdesc.replace(',','') | 73 | pdesc = pdesc.replace(',','') |
74 | pdesc = pdesc.replace('\n','') | 74 | pdesc = pdesc.replace('\n','') |
75 | 75 | ||
76 | pgrp = localdata.getVar('SECTION', True) | 76 | pgrp = localdata.getVar('SECTION') |
77 | plicense = localdata.getVar('LICENSE', True).replace(',','_') | 77 | plicense = localdata.getVar('LICENSE').replace(',','_') |
78 | 78 | ||
79 | rstatus = localdata.getVar('RECIPE_COLOR', True) | 79 | rstatus = localdata.getVar('RECIPE_COLOR') |
80 | if rstatus is not None: | 80 | if rstatus is not None: |
81 | rstatus = rstatus.replace(',','') | 81 | rstatus = rstatus.replace(',','') |
82 | 82 | ||
83 | pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True) | 83 | pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION') |
84 | if pcurver == pupver: | 84 | if pcurver == pupver: |
85 | vermatch="1" | 85 | vermatch="1" |
86 | else: | 86 | else: |
87 | vermatch="0" | 87 | vermatch="0" |
88 | noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) | 88 | noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON') |
89 | if noupdate_reason is None: | 89 | if noupdate_reason is None: |
90 | noupdate="0" | 90 | noupdate="0" |
91 | else: | 91 | else: |
92 | noupdate="1" | 92 | noupdate="1" |
93 | noupdate_reason = noupdate_reason.replace(',','') | 93 | noupdate_reason = noupdate_reason.replace(',','') |
94 | 94 | ||
95 | maintainer = localdata.getVar('RECIPE_MAINTAINER', True) | 95 | maintainer = localdata.getVar('RECIPE_MAINTAINER') |
96 | rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True) | 96 | rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE') |
97 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) | 97 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) |
98 | 98 | ||
99 | bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \ | 99 | bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \ |
@@ -109,80 +109,80 @@ addtask distrodata | |||
109 | do_distrodata[nostamp] = "1" | 109 | do_distrodata[nostamp] = "1" |
110 | python do_distrodata() { | 110 | python do_distrodata() { |
111 | import csv | 111 | import csv |
112 | logpath = d.getVar('LOG_DIR', True) | 112 | logpath = d.getVar('LOG_DIR') |
113 | bb.utils.mkdirhier(logpath) | 113 | bb.utils.mkdirhier(logpath) |
114 | logfile = os.path.join(logpath, "distrodata.csv") | 114 | logfile = os.path.join(logpath, "distrodata.csv") |
115 | 115 | ||
116 | import oe.distro_check as dist_check | 116 | import oe.distro_check as dist_check |
117 | localdata = bb.data.createCopy(d) | 117 | localdata = bb.data.createCopy(d) |
118 | tmpdir = d.getVar('TMPDIR', True) | 118 | tmpdir = d.getVar('TMPDIR') |
119 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 119 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
120 | datetime = localdata.getVar('DATETIME', True) | 120 | datetime = localdata.getVar('DATETIME') |
121 | dist_check.update_distro_data(distro_check_dir, datetime, localdata) | 121 | dist_check.update_distro_data(distro_check_dir, datetime, localdata) |
122 | 122 | ||
123 | pn = d.getVar("PN", True) | 123 | pn = d.getVar("PN") |
124 | bb.note("Package Name: %s" % pn) | 124 | bb.note("Package Name: %s" % pn) |
125 | 125 | ||
126 | if pn.find("-native") != -1: | 126 | if pn.find("-native") != -1: |
127 | pnstripped = pn.split("-native") | 127 | pnstripped = pn.split("-native") |
128 | bb.note("Native Split: %s" % pnstripped) | 128 | bb.note("Native Split: %s" % pnstripped) |
129 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 129 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
130 | bb.data.update_data(localdata) | 130 | bb.data.update_data(localdata) |
131 | 131 | ||
132 | if pn.startswith("nativesdk-"): | 132 | if pn.startswith("nativesdk-"): |
133 | pnstripped = pn.replace("nativesdk-", "") | 133 | pnstripped = pn.replace("nativesdk-", "") |
134 | bb.note("NativeSDK Split: %s" % pnstripped) | 134 | bb.note("NativeSDK Split: %s" % pnstripped) |
135 | localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) | 135 | localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES')) |
136 | bb.data.update_data(localdata) | 136 | bb.data.update_data(localdata) |
137 | 137 | ||
138 | if pn.find("-cross") != -1: | 138 | if pn.find("-cross") != -1: |
139 | pnstripped = pn.split("-cross") | 139 | pnstripped = pn.split("-cross") |
140 | bb.note("cross Split: %s" % pnstripped) | 140 | bb.note("cross Split: %s" % pnstripped) |
141 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 141 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
142 | bb.data.update_data(localdata) | 142 | bb.data.update_data(localdata) |
143 | 143 | ||
144 | if pn.find("-crosssdk") != -1: | 144 | if pn.find("-crosssdk") != -1: |
145 | pnstripped = pn.split("-crosssdk") | 145 | pnstripped = pn.split("-crosssdk") |
146 | bb.note("cross Split: %s" % pnstripped) | 146 | bb.note("cross Split: %s" % pnstripped) |
147 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 147 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
148 | bb.data.update_data(localdata) | 148 | bb.data.update_data(localdata) |
149 | 149 | ||
150 | if pn.find("-initial") != -1: | 150 | if pn.find("-initial") != -1: |
151 | pnstripped = pn.split("-initial") | 151 | pnstripped = pn.split("-initial") |
152 | bb.note("initial Split: %s" % pnstripped) | 152 | bb.note("initial Split: %s" % pnstripped) |
153 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 153 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
154 | bb.data.update_data(localdata) | 154 | bb.data.update_data(localdata) |
155 | 155 | ||
156 | """generate package information from .bb file""" | 156 | """generate package information from .bb file""" |
157 | pname = localdata.getVar('PN', True) | 157 | pname = localdata.getVar('PN') |
158 | pcurver = localdata.getVar('PV', True) | 158 | pcurver = localdata.getVar('PV') |
159 | pdesc = localdata.getVar('DESCRIPTION', True) | 159 | pdesc = localdata.getVar('DESCRIPTION') |
160 | if pdesc is not None: | 160 | if pdesc is not None: |
161 | pdesc = pdesc.replace(',','') | 161 | pdesc = pdesc.replace(',','') |
162 | pdesc = pdesc.replace('\n','') | 162 | pdesc = pdesc.replace('\n','') |
163 | 163 | ||
164 | pgrp = localdata.getVar('SECTION', True) | 164 | pgrp = localdata.getVar('SECTION') |
165 | plicense = localdata.getVar('LICENSE', True).replace(',','_') | 165 | plicense = localdata.getVar('LICENSE').replace(',','_') |
166 | 166 | ||
167 | rstatus = localdata.getVar('RECIPE_COLOR', True) | 167 | rstatus = localdata.getVar('RECIPE_COLOR') |
168 | if rstatus is not None: | 168 | if rstatus is not None: |
169 | rstatus = rstatus.replace(',','') | 169 | rstatus = rstatus.replace(',','') |
170 | 170 | ||
171 | pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True) | 171 | pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION') |
172 | if pcurver == pupver: | 172 | if pcurver == pupver: |
173 | vermatch="1" | 173 | vermatch="1" |
174 | else: | 174 | else: |
175 | vermatch="0" | 175 | vermatch="0" |
176 | 176 | ||
177 | noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) | 177 | noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON') |
178 | if noupdate_reason is None: | 178 | if noupdate_reason is None: |
179 | noupdate="0" | 179 | noupdate="0" |
180 | else: | 180 | else: |
181 | noupdate="1" | 181 | noupdate="1" |
182 | noupdate_reason = noupdate_reason.replace(',','') | 182 | noupdate_reason = noupdate_reason.replace(',','') |
183 | 183 | ||
184 | maintainer = localdata.getVar('RECIPE_MAINTAINER', True) | 184 | maintainer = localdata.getVar('RECIPE_MAINTAINER') |
185 | rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True) | 185 | rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE') |
186 | # do the comparison | 186 | # do the comparison |
187 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) | 187 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) |
188 | 188 | ||
@@ -272,60 +272,60 @@ python do_checkpkg() { | |||
272 | from bb.fetch2 import FetchError, NoMethodError, decodeurl | 272 | from bb.fetch2 import FetchError, NoMethodError, decodeurl |
273 | 273 | ||
274 | """first check whether a uri is provided""" | 274 | """first check whether a uri is provided""" |
275 | src_uri = (d.getVar('SRC_URI', True) or '').split() | 275 | src_uri = (d.getVar('SRC_URI') or '').split() |
276 | if src_uri: | 276 | if src_uri: |
277 | uri_type, _, _, _, _, _ = decodeurl(src_uri[0]) | 277 | uri_type, _, _, _, _, _ = decodeurl(src_uri[0]) |
278 | else: | 278 | else: |
279 | uri_type = "none" | 279 | uri_type = "none" |
280 | 280 | ||
281 | """initialize log files.""" | 281 | """initialize log files.""" |
282 | logpath = d.getVar('LOG_DIR', True) | 282 | logpath = d.getVar('LOG_DIR') |
283 | bb.utils.mkdirhier(logpath) | 283 | bb.utils.mkdirhier(logpath) |
284 | logfile = os.path.join(logpath, "checkpkg.csv") | 284 | logfile = os.path.join(logpath, "checkpkg.csv") |
285 | 285 | ||
286 | """generate package information from .bb file""" | 286 | """generate package information from .bb file""" |
287 | pname = d.getVar('PN', True) | 287 | pname = d.getVar('PN') |
288 | 288 | ||
289 | if pname.find("-native") != -1: | 289 | if pname.find("-native") != -1: |
290 | if d.getVar('BBCLASSEXTEND', True): | 290 | if d.getVar('BBCLASSEXTEND'): |
291 | return | 291 | return |
292 | pnstripped = pname.split("-native") | 292 | pnstripped = pname.split("-native") |
293 | bb.note("Native Split: %s" % pnstripped) | 293 | bb.note("Native Split: %s" % pnstripped) |
294 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 294 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
295 | bb.data.update_data(localdata) | 295 | bb.data.update_data(localdata) |
296 | 296 | ||
297 | if pname.startswith("nativesdk-"): | 297 | if pname.startswith("nativesdk-"): |
298 | if d.getVar('BBCLASSEXTEND', True): | 298 | if d.getVar('BBCLASSEXTEND'): |
299 | return | 299 | return |
300 | pnstripped = pname.replace("nativesdk-", "") | 300 | pnstripped = pname.replace("nativesdk-", "") |
301 | bb.note("NativeSDK Split: %s" % pnstripped) | 301 | bb.note("NativeSDK Split: %s" % pnstripped) |
302 | localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) | 302 | localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES')) |
303 | bb.data.update_data(localdata) | 303 | bb.data.update_data(localdata) |
304 | 304 | ||
305 | if pname.find("-cross") != -1: | 305 | if pname.find("-cross") != -1: |
306 | pnstripped = pname.split("-cross") | 306 | pnstripped = pname.split("-cross") |
307 | bb.note("cross Split: %s" % pnstripped) | 307 | bb.note("cross Split: %s" % pnstripped) |
308 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 308 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
309 | bb.data.update_data(localdata) | 309 | bb.data.update_data(localdata) |
310 | 310 | ||
311 | if pname.find("-initial") != -1: | 311 | if pname.find("-initial") != -1: |
312 | pnstripped = pname.split("-initial") | 312 | pnstripped = pname.split("-initial") |
313 | bb.note("initial Split: %s" % pnstripped) | 313 | bb.note("initial Split: %s" % pnstripped) |
314 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 314 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
315 | bb.data.update_data(localdata) | 315 | bb.data.update_data(localdata) |
316 | 316 | ||
317 | pdesc = localdata.getVar('DESCRIPTION', True) | 317 | pdesc = localdata.getVar('DESCRIPTION') |
318 | pgrp = localdata.getVar('SECTION', True) | 318 | pgrp = localdata.getVar('SECTION') |
319 | pversion = localdata.getVar('PV', True) | 319 | pversion = localdata.getVar('PV') |
320 | plicense = localdata.getVar('LICENSE', True) | 320 | plicense = localdata.getVar('LICENSE') |
321 | psection = localdata.getVar('SECTION', True) | 321 | psection = localdata.getVar('SECTION') |
322 | phome = localdata.getVar('HOMEPAGE', True) | 322 | phome = localdata.getVar('HOMEPAGE') |
323 | prelease = localdata.getVar('PR', True) | 323 | prelease = localdata.getVar('PR') |
324 | pdepends = localdata.getVar('DEPENDS', True) | 324 | pdepends = localdata.getVar('DEPENDS') |
325 | pbugtracker = localdata.getVar('BUGTRACKER', True) | 325 | pbugtracker = localdata.getVar('BUGTRACKER') |
326 | ppe = localdata.getVar('PE', True) | 326 | ppe = localdata.getVar('PE') |
327 | psrcuri = localdata.getVar('SRC_URI', True) | 327 | psrcuri = localdata.getVar('SRC_URI') |
328 | maintainer = localdata.getVar('RECIPE_MAINTAINER', True) | 328 | maintainer = localdata.getVar('RECIPE_MAINTAINER') |
329 | 329 | ||
330 | """ Get upstream version version """ | 330 | """ Get upstream version version """ |
331 | pupver = "" | 331 | pupver = "" |
@@ -362,7 +362,7 @@ python do_checkpkg() { | |||
362 | psrcuri = "none" | 362 | psrcuri = "none" |
363 | pdepends = "".join(pdepends.split("\t")) | 363 | pdepends = "".join(pdepends.split("\t")) |
364 | pdesc = "".join(pdesc.split("\t")) | 364 | pdesc = "".join(pdesc.split("\t")) |
365 | no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON', True) | 365 | no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON') |
366 | lf = bb.utils.lockfile("%s.lock" % logfile) | 366 | lf = bb.utils.lockfile("%s.lock" % logfile) |
367 | with open(logfile, "a") as f: | 367 | with open(logfile, "a") as f: |
368 | writer = csv.writer(f, delimiter='\t') | 368 | writer = csv.writer(f, delimiter='\t') |
@@ -401,12 +401,12 @@ python do_distro_check() { | |||
401 | 401 | ||
402 | localdata = bb.data.createCopy(d) | 402 | localdata = bb.data.createCopy(d) |
403 | bb.data.update_data(localdata) | 403 | bb.data.update_data(localdata) |
404 | tmpdir = d.getVar('TMPDIR', True) | 404 | tmpdir = d.getVar('TMPDIR') |
405 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 405 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
406 | logpath = d.getVar('LOG_DIR', True) | 406 | logpath = d.getVar('LOG_DIR') |
407 | bb.utils.mkdirhier(logpath) | 407 | bb.utils.mkdirhier(logpath) |
408 | result_file = os.path.join(logpath, "distrocheck.csv") | 408 | result_file = os.path.join(logpath, "distrocheck.csv") |
409 | datetime = localdata.getVar('DATETIME', True) | 409 | datetime = localdata.getVar('DATETIME') |
410 | dc.update_distro_data(distro_check_dir, datetime, localdata) | 410 | dc.update_distro_data(distro_check_dir, datetime, localdata) |
411 | 411 | ||
412 | # do the comparison | 412 | # do the comparison |
@@ -449,12 +449,12 @@ do_checklicense[nostamp] = "1" | |||
449 | python do_checklicense() { | 449 | python do_checklicense() { |
450 | import csv | 450 | import csv |
451 | import shutil | 451 | import shutil |
452 | logpath = d.getVar('LOG_DIR', True) | 452 | logpath = d.getVar('LOG_DIR') |
453 | bb.utils.mkdirhier(logpath) | 453 | bb.utils.mkdirhier(logpath) |
454 | pn = d.getVar('PN', True) | 454 | pn = d.getVar('PN') |
455 | logfile = os.path.join(logpath, "missinglicense.csv") | 455 | logfile = os.path.join(logpath, "missinglicense.csv") |
456 | generic_directory = d.getVar('COMMON_LICENSE_DIR', True) | 456 | generic_directory = d.getVar('COMMON_LICENSE_DIR') |
457 | license_types = d.getVar('LICENSE', True) | 457 | license_types = d.getVar('LICENSE') |
458 | for license_type in ((license_types.replace('+', '').replace('|', '&') | 458 | for license_type in ((license_types.replace('+', '').replace('|', '&') |
459 | .replace('(', '').replace(')', '').replace(';', '') | 459 | .replace('(', '').replace(')', '').replace(';', '') |
460 | .replace(',', '').replace(" ", "").split("&"))): | 460 | .replace(',', '').replace(" ", "").split("&"))): |
diff --git a/meta/classes/distutils-base.bbclass b/meta/classes/distutils-base.bbclass index aa18e8b..9f398d7 100644 --- a/meta/classes/distutils-base.bbclass +++ b/meta/classes/distutils-base.bbclass | |||
@@ -1,4 +1,4 @@ | |||
1 | DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES', True) == '')]}" | 1 | DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES') == '')]}" |
2 | RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" | 2 | RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" |
3 | 3 | ||
4 | inherit distutils-common-base pythonnative | 4 | inherit distutils-common-base pythonnative |
diff --git a/meta/classes/distutils3-base.bbclass b/meta/classes/distutils3-base.bbclass index 82ab6a3..7dbf07a 100644 --- a/meta/classes/distutils3-base.bbclass +++ b/meta/classes/distutils3-base.bbclass | |||
@@ -1,4 +1,4 @@ | |||
1 | DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES', True) == '')]}" | 1 | DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES') == '')]}" |
2 | RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" | 2 | RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" |
3 | 3 | ||
4 | inherit distutils-common-base python3native | 4 | inherit distutils-common-base python3native |
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass index 31908c3..5ba6c34 100644 --- a/meta/classes/externalsrc.bbclass +++ b/meta/classes/externalsrc.bbclass | |||
@@ -28,34 +28,34 @@ SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch" | |||
28 | EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}" | 28 | EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}" |
29 | 29 | ||
30 | python () { | 30 | python () { |
31 | externalsrc = d.getVar('EXTERNALSRC', True) | 31 | externalsrc = d.getVar('EXTERNALSRC') |
32 | 32 | ||
33 | # If this is the base recipe and EXTERNALSRC is set for it or any of its | 33 | # If this is the base recipe and EXTERNALSRC is set for it or any of its |
34 | # derivatives, then enable BB_DONT_CACHE to force the recipe to always be | 34 | # derivatives, then enable BB_DONT_CACHE to force the recipe to always be |
35 | # re-parsed so that the file-checksums function for do_compile is run every | 35 | # re-parsed so that the file-checksums function for do_compile is run every |
36 | # time. | 36 | # time. |
37 | bpn = d.getVar('BPN', True) | 37 | bpn = d.getVar('BPN') |
38 | if bpn == d.getVar('PN', True): | 38 | if bpn == d.getVar('PN'): |
39 | classextend = (d.getVar('BBCLASSEXTEND', True) or '').split() | 39 | classextend = (d.getVar('BBCLASSEXTEND') or '').split() |
40 | if (externalsrc or | 40 | if (externalsrc or |
41 | ('native' in classextend and | 41 | ('native' in classextend and |
42 | d.getVar('EXTERNALSRC_pn-%s-native' % bpn, True)) or | 42 | d.getVar('EXTERNALSRC_pn-%s-native' % bpn)) or |
43 | ('nativesdk' in classextend and | 43 | ('nativesdk' in classextend and |
44 | d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn, True)) or | 44 | d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn)) or |
45 | ('cross' in classextend and | 45 | ('cross' in classextend and |
46 | d.getVar('EXTERNALSRC_pn-%s-cross' % bpn, True))): | 46 | d.getVar('EXTERNALSRC_pn-%s-cross' % bpn))): |
47 | d.setVar('BB_DONT_CACHE', '1') | 47 | d.setVar('BB_DONT_CACHE', '1') |
48 | 48 | ||
49 | if externalsrc: | 49 | if externalsrc: |
50 | d.setVar('S', externalsrc) | 50 | d.setVar('S', externalsrc) |
51 | externalsrcbuild = d.getVar('EXTERNALSRC_BUILD', True) | 51 | externalsrcbuild = d.getVar('EXTERNALSRC_BUILD') |
52 | if externalsrcbuild: | 52 | if externalsrcbuild: |
53 | d.setVar('B', externalsrcbuild) | 53 | d.setVar('B', externalsrcbuild) |
54 | else: | 54 | else: |
55 | d.setVar('B', '${WORKDIR}/${BPN}-${PV}/') | 55 | d.setVar('B', '${WORKDIR}/${BPN}-${PV}/') |
56 | 56 | ||
57 | local_srcuri = [] | 57 | local_srcuri = [] |
58 | fetch = bb.fetch2.Fetch((d.getVar('SRC_URI', True) or '').split(), d) | 58 | fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d) |
59 | for url in fetch.urls: | 59 | for url in fetch.urls: |
60 | url_data = fetch.ud[url] | 60 | url_data = fetch.ud[url] |
61 | parm = url_data.parm | 61 | parm = url_data.parm |
@@ -94,7 +94,7 @@ python () { | |||
94 | # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string | 94 | # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string |
95 | d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack']) | 95 | d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack']) |
96 | 96 | ||
97 | for task in d.getVar("SRCTREECOVEREDTASKS", True).split(): | 97 | for task in d.getVar("SRCTREECOVEREDTASKS").split(): |
98 | if local_srcuri and task in fetch_tasks: | 98 | if local_srcuri and task in fetch_tasks: |
99 | continue | 99 | continue |
100 | bb.build.deltask(task, d) | 100 | bb.build.deltask(task, d) |
@@ -106,13 +106,13 @@ python () { | |||
106 | d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') | 106 | d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') |
107 | 107 | ||
108 | # We don't want the workdir to go away | 108 | # We don't want the workdir to go away |
109 | d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN', True)) | 109 | d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN')) |
110 | 110 | ||
111 | # If B=S the same builddir is used even for different architectures. | 111 | # If B=S the same builddir is used even for different architectures. |
112 | # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that | 112 | # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that |
113 | # change of do_configure task hash is correctly detected and stamps are | 113 | # change of do_configure task hash is correctly detected and stamps are |
114 | # invalidated if e.g. MACHINE changes. | 114 | # invalidated if e.g. MACHINE changes. |
115 | if d.getVar('S', True) == d.getVar('B', True): | 115 | if d.getVar('S') == d.getVar('B'): |
116 | configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate' | 116 | configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate' |
117 | d.setVar('CONFIGURESTAMPFILE', configstamp) | 117 | d.setVar('CONFIGURESTAMPFILE', configstamp) |
118 | d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}') | 118 | d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}') |
@@ -120,10 +120,10 @@ python () { | |||
120 | 120 | ||
121 | python externalsrc_configure_prefunc() { | 121 | python externalsrc_configure_prefunc() { |
122 | # Create desired symlinks | 122 | # Create desired symlinks |
123 | symlinks = (d.getVar('EXTERNALSRC_SYMLINKS', True) or '').split() | 123 | symlinks = (d.getVar('EXTERNALSRC_SYMLINKS') or '').split() |
124 | for symlink in symlinks: | 124 | for symlink in symlinks: |
125 | symsplit = symlink.split(':', 1) | 125 | symsplit = symlink.split(':', 1) |
126 | lnkfile = os.path.join(d.getVar('S', True), symsplit[0]) | 126 | lnkfile = os.path.join(d.getVar('S'), symsplit[0]) |
127 | target = d.expand(symsplit[1]) | 127 | target = d.expand(symsplit[1]) |
128 | if len(symsplit) > 1: | 128 | if len(symsplit) > 1: |
129 | if os.path.islink(lnkfile): | 129 | if os.path.islink(lnkfile): |
@@ -139,7 +139,7 @@ python externalsrc_configure_prefunc() { | |||
139 | 139 | ||
140 | python externalsrc_compile_prefunc() { | 140 | python externalsrc_compile_prefunc() { |
141 | # Make it obvious that this is happening, since forgetting about it could lead to much confusion | 141 | # Make it obvious that this is happening, since forgetting about it could lead to much confusion |
142 | bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN', True), d.getVar('EXTERNALSRC', True))) | 142 | bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN'), d.getVar('EXTERNALSRC'))) |
143 | } | 143 | } |
144 | 144 | ||
145 | def srctree_hash_files(d): | 145 | def srctree_hash_files(d): |
@@ -147,7 +147,7 @@ def srctree_hash_files(d): | |||
147 | import subprocess | 147 | import subprocess |
148 | import tempfile | 148 | import tempfile |
149 | 149 | ||
150 | s_dir = d.getVar('EXTERNALSRC', True) | 150 | s_dir = d.getVar('EXTERNALSRC') |
151 | git_dir = os.path.join(s_dir, '.git') | 151 | git_dir = os.path.join(s_dir, '.git') |
152 | oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1') | 152 | oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1') |
153 | 153 | ||
@@ -165,7 +165,7 @@ def srctree_hash_files(d): | |||
165 | fobj.write(sha1) | 165 | fobj.write(sha1) |
166 | ret = oe_hash_file + ':True' | 166 | ret = oe_hash_file + ':True' |
167 | else: | 167 | else: |
168 | ret = d.getVar('EXTERNALSRC', True) + '/*:True' | 168 | ret = d.getVar('EXTERNALSRC') + '/*:True' |
169 | return ret | 169 | return ret |
170 | 170 | ||
171 | def srctree_configure_hash_files(d): | 171 | def srctree_configure_hash_files(d): |
@@ -173,7 +173,7 @@ def srctree_configure_hash_files(d): | |||
173 | Get the list of files that should trigger do_configure to re-execute, | 173 | Get the list of files that should trigger do_configure to re-execute, |
174 | based on the value of CONFIGURE_FILES | 174 | based on the value of CONFIGURE_FILES |
175 | """ | 175 | """ |
176 | in_files = (d.getVar('CONFIGURE_FILES', True) or '').split() | 176 | in_files = (d.getVar('CONFIGURE_FILES') or '').split() |
177 | out_items = [] | 177 | out_items = [] |
178 | search_files = [] | 178 | search_files = [] |
179 | for entry in in_files: | 179 | for entry in in_files: |
@@ -182,7 +182,7 @@ def srctree_configure_hash_files(d): | |||
182 | else: | 182 | else: |
183 | search_files.append(entry) | 183 | search_files.append(entry) |
184 | if search_files: | 184 | if search_files: |
185 | s_dir = d.getVar('EXTERNALSRC', True) | 185 | s_dir = d.getVar('EXTERNALSRC') |
186 | for root, _, files in os.walk(s_dir): | 186 | for root, _, files in os.walk(s_dir): |
187 | for f in files: | 187 | for f in files: |
188 | if f in search_files: | 188 | if f in search_files: |
diff --git a/meta/classes/extrausers.bbclass b/meta/classes/extrausers.bbclass index 852810e..402fc7d 100644 --- a/meta/classes/extrausers.bbclass +++ b/meta/classes/extrausers.bbclass | |||
@@ -15,7 +15,7 @@ | |||
15 | 15 | ||
16 | inherit useradd_base | 16 | inherit useradd_base |
17 | 17 | ||
18 | PACKAGE_INSTALL_append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS', True))]}" | 18 | PACKAGE_INSTALL_append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS'))]}" |
19 | 19 | ||
20 | # Image level user / group settings | 20 | # Image level user / group settings |
21 | ROOTFS_POSTPROCESS_COMMAND_append = " set_user_group;" | 21 | ROOTFS_POSTPROCESS_COMMAND_append = " set_user_group;" |
diff --git a/meta/classes/fontcache.bbclass b/meta/classes/fontcache.bbclass index 8ebdfc4..d047a79 100644 --- a/meta/classes/fontcache.bbclass +++ b/meta/classes/fontcache.bbclass | |||
@@ -30,26 +30,26 @@ fi | |||
30 | } | 30 | } |
31 | 31 | ||
32 | python () { | 32 | python () { |
33 | font_pkgs = d.getVar('FONT_PACKAGES', True).split() | 33 | font_pkgs = d.getVar('FONT_PACKAGES').split() |
34 | deps = d.getVar("FONT_EXTRA_RDEPENDS", True) | 34 | deps = d.getVar("FONT_EXTRA_RDEPENDS") |
35 | 35 | ||
36 | for pkg in font_pkgs: | 36 | for pkg in font_pkgs: |
37 | if deps: d.appendVar('RDEPENDS_' + pkg, ' '+deps) | 37 | if deps: d.appendVar('RDEPENDS_' + pkg, ' '+deps) |
38 | } | 38 | } |
39 | 39 | ||
40 | python add_fontcache_postinsts() { | 40 | python add_fontcache_postinsts() { |
41 | for pkg in d.getVar('FONT_PACKAGES', True).split(): | 41 | for pkg in d.getVar('FONT_PACKAGES').split(): |
42 | bb.note("adding fonts postinst and postrm scripts to %s" % pkg) | 42 | bb.note("adding fonts postinst and postrm scripts to %s" % pkg) |
43 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) | 43 | postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst') |
44 | if not postinst: | 44 | if not postinst: |
45 | postinst = '#!/bin/sh\n' | 45 | postinst = '#!/bin/sh\n' |
46 | postinst += d.getVar('fontcache_common', True) | 46 | postinst += d.getVar('fontcache_common') |
47 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 47 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
48 | 48 | ||
49 | postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) | 49 | postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm') |
50 | if not postrm: | 50 | if not postrm: |
51 | postrm = '#!/bin/sh\n' | 51 | postrm = '#!/bin/sh\n' |
52 | postrm += d.getVar('fontcache_common', True) | 52 | postrm += d.getVar('fontcache_common') |
53 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 53 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
54 | } | 54 | } |
55 | 55 | ||
diff --git a/meta/classes/fs-uuid.bbclass b/meta/classes/fs-uuid.bbclass index bd2613c..313c5a3 100644 --- a/meta/classes/fs-uuid.bbclass +++ b/meta/classes/fs-uuid.bbclass | |||
@@ -3,7 +3,7 @@ | |||
3 | # on ext file systems and depends on tune2fs. | 3 | # on ext file systems and depends on tune2fs. |
4 | def get_rootfs_uuid(d): | 4 | def get_rootfs_uuid(d): |
5 | import subprocess | 5 | import subprocess |
6 | rootfs = d.getVar('ROOTFS', True) | 6 | rootfs = d.getVar('ROOTFS') |
7 | output = subprocess.check_output(['tune2fs', '-l', rootfs]) | 7 | output = subprocess.check_output(['tune2fs', '-l', rootfs]) |
8 | for line in output.split('\n'): | 8 | for line in output.split('\n'): |
9 | if line.startswith('Filesystem UUID:'): | 9 | if line.startswith('Filesystem UUID:'): |
diff --git a/meta/classes/gconf.bbclass b/meta/classes/gconf.bbclass index d7afa72..d07bead 100644 --- a/meta/classes/gconf.bbclass +++ b/meta/classes/gconf.bbclass | |||
@@ -42,8 +42,8 @@ done | |||
42 | 42 | ||
43 | python populate_packages_append () { | 43 | python populate_packages_append () { |
44 | import re | 44 | import re |
45 | packages = d.getVar('PACKAGES', True).split() | 45 | packages = d.getVar('PACKAGES').split() |
46 | pkgdest = d.getVar('PKGDEST', True) | 46 | pkgdest = d.getVar('PKGDEST') |
47 | 47 | ||
48 | for pkg in packages: | 48 | for pkg in packages: |
49 | schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) | 49 | schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) |
@@ -56,15 +56,15 @@ python populate_packages_append () { | |||
56 | if schemas != []: | 56 | if schemas != []: |
57 | bb.note("adding gconf postinst and prerm scripts to %s" % pkg) | 57 | bb.note("adding gconf postinst and prerm scripts to %s" % pkg) |
58 | d.setVar('SCHEMA_FILES', " ".join(schemas)) | 58 | d.setVar('SCHEMA_FILES', " ".join(schemas)) |
59 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) | 59 | postinst = d.getVar('pkg_postinst_%s' % pkg) |
60 | if not postinst: | 60 | if not postinst: |
61 | postinst = '#!/bin/sh\n' | 61 | postinst = '#!/bin/sh\n' |
62 | postinst += d.getVar('gconf_postinst', True) | 62 | postinst += d.getVar('gconf_postinst') |
63 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 63 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
64 | prerm = d.getVar('pkg_prerm_%s' % pkg, True) | 64 | prerm = d.getVar('pkg_prerm_%s' % pkg) |
65 | if not prerm: | 65 | if not prerm: |
66 | prerm = '#!/bin/sh\n' | 66 | prerm = '#!/bin/sh\n' |
67 | prerm += d.getVar('gconf_prerm', True) | 67 | prerm += d.getVar('gconf_prerm') |
68 | d.setVar('pkg_prerm_%s' % pkg, prerm) | 68 | d.setVar('pkg_prerm_%s' % pkg, prerm) |
69 | d.appendVar("RDEPENDS_%s" % pkg, ' ' + d.getVar('MLPREFIX', False) + 'gconf') | 69 | d.appendVar("RDEPENDS_%s" % pkg, ' ' + d.getVar('MLPREFIX', False) + 'gconf') |
70 | } | 70 | } |
diff --git a/meta/classes/gettext.bbclass b/meta/classes/gettext.bbclass index 03b89b2..0be1424 100644 --- a/meta/classes/gettext.bbclass +++ b/meta/classes/gettext.bbclass | |||
@@ -1,15 +1,15 @@ | |||
1 | def gettext_dependencies(d): | 1 | def gettext_dependencies(d): |
2 | if d.getVar('INHIBIT_DEFAULT_DEPS', True) and not oe.utils.inherits(d, 'cross-canadian'): | 2 | if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'): |
3 | return "" | 3 | return "" |
4 | if d.getVar('USE_NLS', True) == 'no': | 4 | if d.getVar('USE_NLS') == 'no': |
5 | return "gettext-minimal-native" | 5 | return "gettext-minimal-native" |
6 | return d.getVar('DEPENDS_GETTEXT', False) | 6 | return d.getVar('DEPENDS_GETTEXT', False) |
7 | 7 | ||
8 | def gettext_oeconf(d): | 8 | def gettext_oeconf(d): |
9 | if d.getVar('USE_NLS', True) == 'no': | 9 | if d.getVar('USE_NLS') == 'no': |
10 | return '--disable-nls' | 10 | return '--disable-nls' |
11 | # Remove the NLS bits if USE_NLS is no or INHIBIT_DEFAULT_DEPS is set | 11 | # Remove the NLS bits if USE_NLS is no or INHIBIT_DEFAULT_DEPS is set |
12 | if d.getVar('INHIBIT_DEFAULT_DEPS', True) and not oe.utils.inherits(d, 'cross-canadian'): | 12 | if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'): |
13 | return '--disable-nls' | 13 | return '--disable-nls' |
14 | return "--enable-nls" | 14 | return "--enable-nls" |
15 | 15 | ||
diff --git a/meta/classes/gio-module-cache.bbclass b/meta/classes/gio-module-cache.bbclass index 91461b1..39b7bef 100644 --- a/meta/classes/gio-module-cache.bbclass +++ b/meta/classes/gio-module-cache.bbclass | |||
@@ -17,21 +17,21 @@ fi | |||
17 | } | 17 | } |
18 | 18 | ||
19 | python populate_packages_append () { | 19 | python populate_packages_append () { |
20 | packages = d.getVar('GIO_MODULE_PACKAGES', True).split() | 20 | packages = d.getVar('GIO_MODULE_PACKAGES').split() |
21 | 21 | ||
22 | for pkg in packages: | 22 | for pkg in packages: |
23 | bb.note("adding gio-module-cache postinst and postrm scripts to %s" % pkg) | 23 | bb.note("adding gio-module-cache postinst and postrm scripts to %s" % pkg) |
24 | 24 | ||
25 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) | 25 | postinst = d.getVar('pkg_postinst_%s' % pkg) |
26 | if not postinst: | 26 | if not postinst: |
27 | postinst = '#!/bin/sh\n' | 27 | postinst = '#!/bin/sh\n' |
28 | postinst += d.getVar('gio_module_cache_common', True) | 28 | postinst += d.getVar('gio_module_cache_common') |
29 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 29 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
30 | 30 | ||
31 | postrm = d.getVar('pkg_postrm_%s' % pkg, True) | 31 | postrm = d.getVar('pkg_postrm_%s' % pkg) |
32 | if not postrm: | 32 | if not postrm: |
33 | postrm = '#!/bin/sh\n' | 33 | postrm = '#!/bin/sh\n' |
34 | postrm += d.getVar('gio_module_cache_common', True) | 34 | postrm += d.getVar('gio_module_cache_common') |
35 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 35 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
36 | } | 36 | } |
37 | 37 | ||
diff --git a/meta/classes/grub-efi.bbclass b/meta/classes/grub-efi.bbclass index 17417ba..3dc9146 100644 --- a/meta/classes/grub-efi.bbclass +++ b/meta/classes/grub-efi.bbclass | |||
@@ -72,14 +72,14 @@ efi_hddimg_populate() { | |||
72 | python build_efi_cfg() { | 72 | python build_efi_cfg() { |
73 | import sys | 73 | import sys |
74 | 74 | ||
75 | workdir = d.getVar('WORKDIR', True) | 75 | workdir = d.getVar('WORKDIR') |
76 | if not workdir: | 76 | if not workdir: |
77 | bb.error("WORKDIR not defined, unable to package") | 77 | bb.error("WORKDIR not defined, unable to package") |
78 | return | 78 | return |
79 | 79 | ||
80 | gfxserial = d.getVar('GRUB_GFXSERIAL', True) or "" | 80 | gfxserial = d.getVar('GRUB_GFXSERIAL') or "" |
81 | 81 | ||
82 | labels = d.getVar('LABELS', True) | 82 | labels = d.getVar('LABELS') |
83 | if not labels: | 83 | if not labels: |
84 | bb.debug(1, "LABELS not defined, nothing to do") | 84 | bb.debug(1, "LABELS not defined, nothing to do") |
85 | return | 85 | return |
@@ -88,7 +88,7 @@ python build_efi_cfg() { | |||
88 | bb.debug(1, "No labels, nothing to do") | 88 | bb.debug(1, "No labels, nothing to do") |
89 | return | 89 | return |
90 | 90 | ||
91 | cfile = d.getVar('GRUB_CFG', True) | 91 | cfile = d.getVar('GRUB_CFG') |
92 | if not cfile: | 92 | if not cfile: |
93 | bb.fatal('Unable to read GRUB_CFG') | 93 | bb.fatal('Unable to read GRUB_CFG') |
94 | 94 | ||
@@ -99,33 +99,33 @@ python build_efi_cfg() { | |||
99 | 99 | ||
100 | cfgfile.write('# Automatically created by OE\n') | 100 | cfgfile.write('# Automatically created by OE\n') |
101 | 101 | ||
102 | opts = d.getVar('GRUB_OPTS', True) | 102 | opts = d.getVar('GRUB_OPTS') |
103 | if opts: | 103 | if opts: |
104 | for opt in opts.split(';'): | 104 | for opt in opts.split(';'): |
105 | cfgfile.write('%s\n' % opt) | 105 | cfgfile.write('%s\n' % opt) |
106 | 106 | ||
107 | cfgfile.write('default=%s\n' % (labels.split()[0])) | 107 | cfgfile.write('default=%s\n' % (labels.split()[0])) |
108 | 108 | ||
109 | timeout = d.getVar('GRUB_TIMEOUT', True) | 109 | timeout = d.getVar('GRUB_TIMEOUT') |
110 | if timeout: | 110 | if timeout: |
111 | cfgfile.write('timeout=%s\n' % timeout) | 111 | cfgfile.write('timeout=%s\n' % timeout) |
112 | else: | 112 | else: |
113 | cfgfile.write('timeout=50\n') | 113 | cfgfile.write('timeout=50\n') |
114 | 114 | ||
115 | root = d.getVar('GRUB_ROOT', True) | 115 | root = d.getVar('GRUB_ROOT') |
116 | if not root: | 116 | if not root: |
117 | bb.fatal('GRUB_ROOT not defined') | 117 | bb.fatal('GRUB_ROOT not defined') |
118 | 118 | ||
119 | if gfxserial == "1": | 119 | if gfxserial == "1": |
120 | btypes = [ [ " graphics console", "" ], | 120 | btypes = [ [ " graphics console", "" ], |
121 | [ " serial console", d.getVar('GRUB_SERIAL', True) or "" ] ] | 121 | [ " serial console", d.getVar('GRUB_SERIAL') or "" ] ] |
122 | else: | 122 | else: |
123 | btypes = [ [ "", "" ] ] | 123 | btypes = [ [ "", "" ] ] |
124 | 124 | ||
125 | for label in labels.split(): | 125 | for label in labels.split(): |
126 | localdata = d.createCopy() | 126 | localdata = d.createCopy() |
127 | 127 | ||
128 | overrides = localdata.getVar('OVERRIDES', True) | 128 | overrides = localdata.getVar('OVERRIDES') |
129 | if not overrides: | 129 | if not overrides: |
130 | bb.fatal('OVERRIDES not defined') | 130 | bb.fatal('OVERRIDES not defined') |
131 | 131 | ||
@@ -141,8 +141,8 @@ python build_efi_cfg() { | |||
141 | 141 | ||
142 | cfgfile.write(' %s' % replace_rootfs_uuid(d, root)) | 142 | cfgfile.write(' %s' % replace_rootfs_uuid(d, root)) |
143 | 143 | ||
144 | append = localdata.getVar('APPEND', True) | 144 | append = localdata.getVar('APPEND') |
145 | initrd = localdata.getVar('INITRD', True) | 145 | initrd = localdata.getVar('INITRD') |
146 | 146 | ||
147 | if append: | 147 | if append: |
148 | append = replace_rootfs_uuid(d, append) | 148 | append = replace_rootfs_uuid(d, append) |
diff --git a/meta/classes/gsettings.bbclass b/meta/classes/gsettings.bbclass index dec5abc..e6d1c8a 100644 --- a/meta/classes/gsettings.bbclass +++ b/meta/classes/gsettings.bbclass | |||
@@ -18,20 +18,20 @@ gsettings_postinstrm () { | |||
18 | } | 18 | } |
19 | 19 | ||
20 | python populate_packages_append () { | 20 | python populate_packages_append () { |
21 | pkg = d.getVar('PN', True) | 21 | pkg = d.getVar('PN') |
22 | bb.note("adding gsettings postinst scripts to %s" % pkg) | 22 | bb.note("adding gsettings postinst scripts to %s" % pkg) |
23 | 23 | ||
24 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) | 24 | postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst') |
25 | if not postinst: | 25 | if not postinst: |
26 | postinst = '#!/bin/sh\n' | 26 | postinst = '#!/bin/sh\n' |
27 | postinst += d.getVar('gsettings_postinstrm', True) | 27 | postinst += d.getVar('gsettings_postinstrm') |
28 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 28 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
29 | 29 | ||
30 | bb.note("adding gsettings postrm scripts to %s" % pkg) | 30 | bb.note("adding gsettings postrm scripts to %s" % pkg) |
31 | 31 | ||
32 | postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) | 32 | postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm') |
33 | if not postrm: | 33 | if not postrm: |
34 | postrm = '#!/bin/sh\n' | 34 | postrm = '#!/bin/sh\n' |
35 | postrm += d.getVar('gsettings_postinstrm', True) | 35 | postrm += d.getVar('gsettings_postinstrm') |
36 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 36 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
37 | } | 37 | } |
diff --git a/meta/classes/gtk-icon-cache.bbclass b/meta/classes/gtk-icon-cache.bbclass index 0f1052b..c5d8d7c 100644 --- a/meta/classes/gtk-icon-cache.bbclass +++ b/meta/classes/gtk-icon-cache.bbclass | |||
@@ -35,11 +35,11 @@ fi | |||
35 | } | 35 | } |
36 | 36 | ||
37 | python populate_packages_append () { | 37 | python populate_packages_append () { |
38 | packages = d.getVar('PACKAGES', True).split() | 38 | packages = d.getVar('PACKAGES').split() |
39 | pkgdest = d.getVar('PKGDEST', True) | 39 | pkgdest = d.getVar('PKGDEST') |
40 | 40 | ||
41 | for pkg in packages: | 41 | for pkg in packages: |
42 | icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True)) | 42 | icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir')) |
43 | if not os.path.exists(icon_dir): | 43 | if not os.path.exists(icon_dir): |
44 | continue | 44 | continue |
45 | 45 | ||
@@ -49,16 +49,16 @@ python populate_packages_append () { | |||
49 | 49 | ||
50 | bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) | 50 | bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) |
51 | 51 | ||
52 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) | 52 | postinst = d.getVar('pkg_postinst_%s' % pkg) |
53 | if not postinst: | 53 | if not postinst: |
54 | postinst = '#!/bin/sh\n' | 54 | postinst = '#!/bin/sh\n' |
55 | postinst += d.getVar('gtk_icon_cache_postinst', True) | 55 | postinst += d.getVar('gtk_icon_cache_postinst') |
56 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 56 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
57 | 57 | ||
58 | postrm = d.getVar('pkg_postrm_%s' % pkg, True) | 58 | postrm = d.getVar('pkg_postrm_%s' % pkg) |
59 | if not postrm: | 59 | if not postrm: |
60 | postrm = '#!/bin/sh\n' | 60 | postrm = '#!/bin/sh\n' |
61 | postrm += d.getVar('gtk_icon_cache_postrm', True) | 61 | postrm += d.getVar('gtk_icon_cache_postrm') |
62 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 62 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
63 | } | 63 | } |
64 | 64 | ||
diff --git a/meta/classes/gtk-immodules-cache.bbclass b/meta/classes/gtk-immodules-cache.bbclass index ebbc9de..baea959 100644 --- a/meta/classes/gtk-immodules-cache.bbclass +++ b/meta/classes/gtk-immodules-cache.bbclass | |||
@@ -61,21 +61,21 @@ fi | |||
61 | } | 61 | } |
62 | 62 | ||
63 | python populate_packages_append () { | 63 | python populate_packages_append () { |
64 | gtkimmodules_pkgs = d.getVar('GTKIMMODULES_PACKAGES', True).split() | 64 | gtkimmodules_pkgs = d.getVar('GTKIMMODULES_PACKAGES').split() |
65 | 65 | ||
66 | for pkg in gtkimmodules_pkgs: | 66 | for pkg in gtkimmodules_pkgs: |
67 | bb.note("adding gtk-immodule-cache postinst and postrm scripts to %s" % pkg) | 67 | bb.note("adding gtk-immodule-cache postinst and postrm scripts to %s" % pkg) |
68 | 68 | ||
69 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) | 69 | postinst = d.getVar('pkg_postinst_%s' % pkg) |
70 | if not postinst: | 70 | if not postinst: |
71 | postinst = '#!/bin/sh\n' | 71 | postinst = '#!/bin/sh\n' |
72 | postinst += d.getVar('gtk_immodule_cache_postinst', True) | 72 | postinst += d.getVar('gtk_immodule_cache_postinst') |
73 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 73 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
74 | 74 | ||
75 | postrm = d.getVar('pkg_postrm_%s' % pkg, True) | 75 | postrm = d.getVar('pkg_postrm_%s' % pkg) |
76 | if not postrm: | 76 | if not postrm: |
77 | postrm = '#!/bin/sh\n' | 77 | postrm = '#!/bin/sh\n' |
78 | postrm += d.getVar('gtk_immodule_cache_postrm', True) | 78 | postrm += d.getVar('gtk_immodule_cache_postrm') |
79 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 79 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
80 | } | 80 | } |
81 | 81 | ||
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass index a837894..8a351cf 100644 --- a/meta/classes/icecc.bbclass +++ b/meta/classes/icecc.bbclass | |||
@@ -100,7 +100,7 @@ def use_icecc(bb,d): | |||
100 | if icecc_is_allarch(bb, d): | 100 | if icecc_is_allarch(bb, d): |
101 | return "no" | 101 | return "no" |
102 | 102 | ||
103 | pn = d.getVar('PN', True) | 103 | pn = d.getVar('PN') |
104 | 104 | ||
105 | system_class_blacklist = [] | 105 | system_class_blacklist = [] |
106 | user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL', False) or "none").split() | 106 | user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL', False) or "none").split() |
@@ -139,7 +139,7 @@ def use_icecc(bb,d): | |||
139 | return "yes" | 139 | return "yes" |
140 | 140 | ||
141 | def icecc_is_allarch(bb, d): | 141 | def icecc_is_allarch(bb, d): |
142 | return d.getVar("PACKAGE_ARCH", True) == "all" or bb.data.inherits_class('allarch', d) | 142 | return d.getVar("PACKAGE_ARCH") == "all" or bb.data.inherits_class('allarch', d) |
143 | 143 | ||
144 | def icecc_is_kernel(bb, d): | 144 | def icecc_is_kernel(bb, d): |
145 | return \ | 145 | return \ |
diff --git a/meta/classes/image-buildinfo.bbclass b/meta/classes/image-buildinfo.bbclass index da1edf7..3b79de5 100644 --- a/meta/classes/image-buildinfo.bbclass +++ b/meta/classes/image-buildinfo.bbclass | |||
@@ -18,7 +18,7 @@ def image_buildinfo_outputvars(vars, listvars, d): | |||
18 | listvars = listvars.split() | 18 | listvars = listvars.split() |
19 | ret = "" | 19 | ret = "" |
20 | for var in vars: | 20 | for var in vars: |
21 | value = d.getVar(var, True) or "" | 21 | value = d.getVar(var) or "" |
22 | if (d.getVarFlag(var, 'type', True) == "list"): | 22 | if (d.getVarFlag(var, 'type', True) == "list"): |
23 | value = oe.utils.squashspaces(value) | 23 | value = oe.utils.squashspaces(value) |
24 | ret += "%s = %s\n" % (var, value) | 24 | ret += "%s = %s\n" % (var, value) |
@@ -42,7 +42,7 @@ def get_layer_git_status(path): | |||
42 | 42 | ||
43 | # Returns layer revisions along with their respective status | 43 | # Returns layer revisions along with their respective status |
44 | def get_layer_revs(d): | 44 | def get_layer_revs(d): |
45 | layers = (d.getVar("BBLAYERS", True) or "").split() | 45 | layers = (d.getVar("BBLAYERS") or "").split() |
46 | medadata_revs = ["%-17s = %s:%s %s" % (os.path.basename(i), \ | 46 | medadata_revs = ["%-17s = %s:%s %s" % (os.path.basename(i), \ |
47 | base_get_metadata_git_branch(i, None).strip(), \ | 47 | base_get_metadata_git_branch(i, None).strip(), \ |
48 | base_get_metadata_git_revision(i, None), \ | 48 | base_get_metadata_git_revision(i, None), \ |
@@ -52,11 +52,11 @@ def get_layer_revs(d): | |||
52 | 52 | ||
53 | def buildinfo_target(d): | 53 | def buildinfo_target(d): |
54 | # Get context | 54 | # Get context |
55 | if d.getVar('BB_WORKERCONTEXT', True) != '1': | 55 | if d.getVar('BB_WORKERCONTEXT') != '1': |
56 | return "" | 56 | return "" |
57 | # Single and list variables to be read | 57 | # Single and list variables to be read |
58 | vars = (d.getVar("IMAGE_BUILDINFO_VARS", True) or "") | 58 | vars = (d.getVar("IMAGE_BUILDINFO_VARS") or "") |
59 | listvars = (d.getVar("IMAGE_BUILDINFO_LVARS", True) or "") | 59 | listvars = (d.getVar("IMAGE_BUILDINFO_LVARS") or "") |
60 | return image_buildinfo_outputvars(vars, listvars, d) | 60 | return image_buildinfo_outputvars(vars, listvars, d) |
61 | 61 | ||
62 | # Write build information to target filesystem | 62 | # Write build information to target filesystem |
diff --git a/meta/classes/image-live.bbclass b/meta/classes/image-live.bbclass index 4a634dc..a3d1b4e 100644 --- a/meta/classes/image-live.bbclass +++ b/meta/classes/image-live.bbclass | |||
@@ -51,8 +51,8 @@ IMAGE_TYPEDEP_hddimg = "ext4" | |||
51 | IMAGE_TYPES_MASKED += "live hddimg iso" | 51 | IMAGE_TYPES_MASKED += "live hddimg iso" |
52 | 52 | ||
53 | python() { | 53 | python() { |
54 | image_b = d.getVar('IMAGE_BASENAME', True) | 54 | image_b = d.getVar('IMAGE_BASENAME') |
55 | initrd_i = d.getVar('INITRD_IMAGE_LIVE', True) | 55 | initrd_i = d.getVar('INITRD_IMAGE_LIVE') |
56 | if image_b == initrd_i: | 56 | if image_b == initrd_i: |
57 | bb.error('INITRD_IMAGE_LIVE %s cannot use image live, hddimg or iso.' % initrd_i) | 57 | bb.error('INITRD_IMAGE_LIVE %s cannot use image live, hddimg or iso.' % initrd_i) |
58 | bb.fatal('Check IMAGE_FSTYPES and INITRAMFS_FSTYPES settings.') | 58 | bb.fatal('Check IMAGE_FSTYPES and INITRAMFS_FSTYPES settings.') |
@@ -264,9 +264,9 @@ build_hddimg() { | |||
264 | 264 | ||
265 | python do_bootimg() { | 265 | python do_bootimg() { |
266 | set_live_vm_vars(d, 'LIVE') | 266 | set_live_vm_vars(d, 'LIVE') |
267 | if d.getVar("PCBIOS", True) == "1": | 267 | if d.getVar("PCBIOS") == "1": |
268 | bb.build.exec_func('build_syslinux_cfg', d) | 268 | bb.build.exec_func('build_syslinux_cfg', d) |
269 | if d.getVar("EFI", True) == "1": | 269 | if d.getVar("EFI") == "1": |
270 | bb.build.exec_func('build_efi_cfg', d) | 270 | bb.build.exec_func('build_efi_cfg', d) |
271 | bb.build.exec_func('build_hddimg', d) | 271 | bb.build.exec_func('build_hddimg', d) |
272 | bb.build.exec_func('build_iso', d) | 272 | bb.build.exec_func('build_iso', d) |
diff --git a/meta/classes/image-vm.bbclass b/meta/classes/image-vm.bbclass index 2f35d6b..35c9244 100644 --- a/meta/classes/image-vm.bbclass +++ b/meta/classes/image-vm.bbclass | |||
@@ -112,9 +112,9 @@ build_boot_dd() { | |||
112 | python do_bootdirectdisk() { | 112 | python do_bootdirectdisk() { |
113 | validate_disk_signature(d) | 113 | validate_disk_signature(d) |
114 | set_live_vm_vars(d, 'VM') | 114 | set_live_vm_vars(d, 'VM') |
115 | if d.getVar("PCBIOS", True) == "1": | 115 | if d.getVar("PCBIOS") == "1": |
116 | bb.build.exec_func('build_syslinux_cfg', d) | 116 | bb.build.exec_func('build_syslinux_cfg', d) |
117 | if d.getVar("EFI", True) == "1": | 117 | if d.getVar("EFI") == "1": |
118 | bb.build.exec_func('build_efi_cfg', d) | 118 | bb.build.exec_func('build_efi_cfg', d) |
119 | bb.build.exec_func('build_boot_dd', d) | 119 | bb.build.exec_func('build_boot_dd', d) |
120 | } | 120 | } |
@@ -132,7 +132,7 @@ def generate_disk_signature(): | |||
132 | def validate_disk_signature(d): | 132 | def validate_disk_signature(d): |
133 | import re | 133 | import re |
134 | 134 | ||
135 | disk_signature = d.getVar("DISK_SIGNATURE", True) | 135 | disk_signature = d.getVar("DISK_SIGNATURE") |
136 | 136 | ||
137 | if not re.match(r'^[0-9a-fA-F]{8}$', disk_signature): | 137 | if not re.match(r'^[0-9a-fA-F]{8}$', disk_signature): |
138 | bb.fatal("DISK_SIGNATURE '%s' must be an 8 digit hex string" % disk_signature) | 138 | bb.fatal("DISK_SIGNATURE '%s' must be an 8 digit hex string" % disk_signature) |
@@ -158,11 +158,11 @@ create_qcow2_image () { | |||
158 | } | 158 | } |
159 | 159 | ||
160 | python do_vmimg() { | 160 | python do_vmimg() { |
161 | if 'vmdk' in d.getVar('IMAGE_FSTYPES', True): | 161 | if 'vmdk' in d.getVar('IMAGE_FSTYPES'): |
162 | bb.build.exec_func('create_vmdk_image', d) | 162 | bb.build.exec_func('create_vmdk_image', d) |
163 | if 'vdi' in d.getVar('IMAGE_FSTYPES', True): | 163 | if 'vdi' in d.getVar('IMAGE_FSTYPES'): |
164 | bb.build.exec_func('create_vdi_image', d) | 164 | bb.build.exec_func('create_vdi_image', d) |
165 | if 'qcow2' in d.getVar('IMAGE_FSTYPES', True): | 165 | if 'qcow2' in d.getVar('IMAGE_FSTYPES'): |
166 | bb.build.exec_func('create_qcow2_image', d) | 166 | bb.build.exec_func('create_qcow2_image', d) |
167 | } | 167 | } |
168 | 168 | ||
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass index e63f6a3..28bff9e 100644 --- a/meta/classes/image.bbclass +++ b/meta/classes/image.bbclass | |||
@@ -2,7 +2,7 @@ inherit rootfs_${IMAGE_PKGTYPE} | |||
2 | 2 | ||
3 | # Only Linux SDKs support populate_sdk_ext, fall back to populate_sdk | 3 | # Only Linux SDKs support populate_sdk_ext, fall back to populate_sdk |
4 | # in the non-Linux SDK_OS case, such as mingw32 | 4 | # in the non-Linux SDK_OS case, such as mingw32 |
5 | SDKEXTCLASS ?= "${@['populate_sdk', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS", True)]}" | 5 | SDKEXTCLASS ?= "${@['populate_sdk', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS")]}" |
6 | inherit ${SDKEXTCLASS} | 6 | inherit ${SDKEXTCLASS} |
7 | 7 | ||
8 | TOOLCHAIN_TARGET_TASK += "${PACKAGE_INSTALL}" | 8 | TOOLCHAIN_TARGET_TASK += "${PACKAGE_INSTALL}" |
@@ -133,7 +133,7 @@ def build_live(d): | |||
133 | if bb.utils.contains("IMAGE_FSTYPES", "live", "live", "0", d) == "0": # live is not set but hob might set iso or hddimg | 133 | if bb.utils.contains("IMAGE_FSTYPES", "live", "live", "0", d) == "0": # live is not set but hob might set iso or hddimg |
134 | d.setVar('NOISO', bb.utils.contains('IMAGE_FSTYPES', "iso", "0", "1", d)) | 134 | d.setVar('NOISO', bb.utils.contains('IMAGE_FSTYPES', "iso", "0", "1", d)) |
135 | d.setVar('NOHDD', bb.utils.contains('IMAGE_FSTYPES', "hddimg", "0", "1", d)) | 135 | d.setVar('NOHDD', bb.utils.contains('IMAGE_FSTYPES', "hddimg", "0", "1", d)) |
136 | if d.getVar('NOISO', True) == "0" or d.getVar('NOHDD', True) == "0": | 136 | if d.getVar('NOISO') == "0" or d.getVar('NOHDD') == "0": |
137 | return "image-live" | 137 | return "image-live" |
138 | return "" | 138 | return "" |
139 | return "image-live" | 139 | return "image-live" |
@@ -145,7 +145,7 @@ IMAGE_TYPE_vm = '${@bb.utils.contains_any("IMAGE_FSTYPES", ["vmdk", "vdi", "qcow | |||
145 | inherit ${IMAGE_TYPE_vm} | 145 | inherit ${IMAGE_TYPE_vm} |
146 | 146 | ||
147 | def build_uboot(d): | 147 | def build_uboot(d): |
148 | if 'u-boot' in (d.getVar('IMAGE_FSTYPES', True) or ''): | 148 | if 'u-boot' in (d.getVar('IMAGE_FSTYPES') or ''): |
149 | return "image_types_uboot" | 149 | return "image_types_uboot" |
150 | else: | 150 | else: |
151 | return "" | 151 | return "" |
@@ -158,7 +158,7 @@ python () { | |||
158 | d.appendVarFlag('do_rootfs', 'depends', deps) | 158 | d.appendVarFlag('do_rootfs', 'depends', deps) |
159 | 159 | ||
160 | deps = "" | 160 | deps = "" |
161 | for dep in (d.getVar('EXTRA_IMAGEDEPENDS', True) or "").split(): | 161 | for dep in (d.getVar('EXTRA_IMAGEDEPENDS') or "").split(): |
162 | deps += " %s:do_populate_sysroot" % dep | 162 | deps += " %s:do_populate_sysroot" % dep |
163 | d.appendVarFlag('do_build', 'depends', deps) | 163 | d.appendVarFlag('do_build', 'depends', deps) |
164 | 164 | ||
@@ -167,22 +167,22 @@ python () { | |||
167 | features = set(oe.data.typed_value('IMAGE_FEATURES', d)) | 167 | features = set(oe.data.typed_value('IMAGE_FEATURES', d)) |
168 | remain_features = features.copy() | 168 | remain_features = features.copy() |
169 | for feature in features: | 169 | for feature in features: |
170 | replaces = set((d.getVar("IMAGE_FEATURES_REPLACES_%s" % feature, True) or "").split()) | 170 | replaces = set((d.getVar("IMAGE_FEATURES_REPLACES_%s" % feature) or "").split()) |
171 | remain_features -= replaces | 171 | remain_features -= replaces |
172 | 172 | ||
173 | #Check for conflict image features | 173 | #Check for conflict image features |
174 | for feature in remain_features: | 174 | for feature in remain_features: |
175 | conflicts = set((d.getVar("IMAGE_FEATURES_CONFLICTS_%s" % feature, True) or "").split()) | 175 | conflicts = set((d.getVar("IMAGE_FEATURES_CONFLICTS_%s" % feature) or "").split()) |
176 | temp = conflicts & remain_features | 176 | temp = conflicts & remain_features |
177 | if temp: | 177 | if temp: |
178 | bb.fatal("%s contains conflicting IMAGE_FEATURES %s %s" % (d.getVar('PN', True), feature, ' '.join(list(temp)))) | 178 | bb.fatal("%s contains conflicting IMAGE_FEATURES %s %s" % (d.getVar('PN'), feature, ' '.join(list(temp)))) |
179 | 179 | ||
180 | d.setVar('IMAGE_FEATURES', ' '.join(sorted(list(remain_features)))) | 180 | d.setVar('IMAGE_FEATURES', ' '.join(sorted(list(remain_features)))) |
181 | 181 | ||
182 | check_image_features(d) | 182 | check_image_features(d) |
183 | initramfs_image = d.getVar('INITRAMFS_IMAGE', True) or "" | 183 | initramfs_image = d.getVar('INITRAMFS_IMAGE') or "" |
184 | if initramfs_image != "": | 184 | if initramfs_image != "": |
185 | d.appendVarFlag('do_build', 'depends', " %s:do_bundle_initramfs" % d.getVar('PN', True)) | 185 | d.appendVarFlag('do_build', 'depends', " %s:do_bundle_initramfs" % d.getVar('PN')) |
186 | d.appendVarFlag('do_bundle_initramfs', 'depends', " %s:do_image_complete" % initramfs_image) | 186 | d.appendVarFlag('do_bundle_initramfs', 'depends', " %s:do_image_complete" % initramfs_image) |
187 | } | 187 | } |
188 | 188 | ||
@@ -194,7 +194,7 @@ IMAGE_POSTPROCESS_COMMAND ?= "" | |||
194 | # some default locales | 194 | # some default locales |
195 | IMAGE_LINGUAS ?= "de-de fr-fr en-gb" | 195 | IMAGE_LINGUAS ?= "de-de fr-fr en-gb" |
196 | 196 | ||
197 | LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', True).split()))}" | 197 | LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS').split()))}" |
198 | 198 | ||
199 | # Prefer image, but use the fallback files for lookups if the image ones | 199 | # Prefer image, but use the fallback files for lookups if the image ones |
200 | # aren't yet available. | 200 | # aren't yet available. |
@@ -229,20 +229,20 @@ fakeroot python do_rootfs () { | |||
229 | progress_reporter.next_stage() | 229 | progress_reporter.next_stage() |
230 | 230 | ||
231 | # Handle package exclusions | 231 | # Handle package exclusions |
232 | excl_pkgs = d.getVar("PACKAGE_EXCLUDE", True).split() | 232 | excl_pkgs = d.getVar("PACKAGE_EXCLUDE").split() |
233 | inst_pkgs = d.getVar("PACKAGE_INSTALL", True).split() | 233 | inst_pkgs = d.getVar("PACKAGE_INSTALL").split() |
234 | inst_attempt_pkgs = d.getVar("PACKAGE_INSTALL_ATTEMPTONLY", True).split() | 234 | inst_attempt_pkgs = d.getVar("PACKAGE_INSTALL_ATTEMPTONLY").split() |
235 | 235 | ||
236 | d.setVar('PACKAGE_INSTALL_ORIG', ' '.join(inst_pkgs)) | 236 | d.setVar('PACKAGE_INSTALL_ORIG', ' '.join(inst_pkgs)) |
237 | d.setVar('PACKAGE_INSTALL_ATTEMPTONLY', ' '.join(inst_attempt_pkgs)) | 237 | d.setVar('PACKAGE_INSTALL_ATTEMPTONLY', ' '.join(inst_attempt_pkgs)) |
238 | 238 | ||
239 | for pkg in excl_pkgs: | 239 | for pkg in excl_pkgs: |
240 | if pkg in inst_pkgs: | 240 | if pkg in inst_pkgs: |
241 | bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL (%s). It will be removed from the list." % (pkg, d.getVar('PN', True), inst_pkgs)) | 241 | bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL (%s). It will be removed from the list." % (pkg, d.getVar('PN'), inst_pkgs)) |
242 | inst_pkgs.remove(pkg) | 242 | inst_pkgs.remove(pkg) |
243 | 243 | ||
244 | if pkg in inst_attempt_pkgs: | 244 | if pkg in inst_attempt_pkgs: |
245 | bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL_ATTEMPTONLY (%s). It will be removed from the list." % (pkg, d.getVar('PN', True), inst_pkgs)) | 245 | bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL_ATTEMPTONLY (%s). It will be removed from the list." % (pkg, d.getVar('PN'), inst_pkgs)) |
246 | inst_attempt_pkgs.remove(pkg) | 246 | inst_attempt_pkgs.remove(pkg) |
247 | 247 | ||
248 | d.setVar("PACKAGE_INSTALL", ' '.join(inst_pkgs)) | 248 | d.setVar("PACKAGE_INSTALL", ' '.join(inst_pkgs)) |
@@ -252,7 +252,7 @@ fakeroot python do_rootfs () { | |||
252 | # We have to delay the runtime_mapping_rename until just before rootfs runs | 252 | # We have to delay the runtime_mapping_rename until just before rootfs runs |
253 | # otherwise, the multilib renaming could step in and squash any fixups that | 253 | # otherwise, the multilib renaming could step in and squash any fixups that |
254 | # may have occurred. | 254 | # may have occurred. |
255 | pn = d.getVar('PN', True) | 255 | pn = d.getVar('PN') |
256 | runtime_mapping_rename("PACKAGE_INSTALL", pn, d) | 256 | runtime_mapping_rename("PACKAGE_INSTALL", pn, d) |
257 | runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", pn, d) | 257 | runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", pn, d) |
258 | runtime_mapping_rename("BAD_RECOMMENDATIONS", pn, d) | 258 | runtime_mapping_rename("BAD_RECOMMENDATIONS", pn, d) |
@@ -275,7 +275,7 @@ addtask rootfs before do_build | |||
275 | fakeroot python do_image () { | 275 | fakeroot python do_image () { |
276 | from oe.utils import execute_pre_post_process | 276 | from oe.utils import execute_pre_post_process |
277 | 277 | ||
278 | pre_process_cmds = d.getVar("IMAGE_PREPROCESS_COMMAND", True) | 278 | pre_process_cmds = d.getVar("IMAGE_PREPROCESS_COMMAND") |
279 | 279 | ||
280 | execute_pre_post_process(d, pre_process_cmds) | 280 | execute_pre_post_process(d, pre_process_cmds) |
281 | } | 281 | } |
@@ -286,7 +286,7 @@ addtask do_image after do_rootfs before do_build | |||
286 | fakeroot python do_image_complete () { | 286 | fakeroot python do_image_complete () { |
287 | from oe.utils import execute_pre_post_process | 287 | from oe.utils import execute_pre_post_process |
288 | 288 | ||
289 | post_process_cmds = d.getVar("IMAGE_POSTPROCESS_COMMAND", True) | 289 | post_process_cmds = d.getVar("IMAGE_POSTPROCESS_COMMAND") |
290 | 290 | ||
291 | execute_pre_post_process(d, post_process_cmds) | 291 | execute_pre_post_process(d, post_process_cmds) |
292 | } | 292 | } |
@@ -309,7 +309,7 @@ addtask do_image_complete after do_image before do_build | |||
309 | fakeroot python do_image_qa () { | 309 | fakeroot python do_image_qa () { |
310 | from oe.utils import ImageQAFailed | 310 | from oe.utils import ImageQAFailed |
311 | 311 | ||
312 | qa_cmds = (d.getVar('IMAGE_QA_COMMANDS', True) or '').split() | 312 | qa_cmds = (d.getVar('IMAGE_QA_COMMANDS') or '').split() |
313 | qamsg = "" | 313 | qamsg = "" |
314 | 314 | ||
315 | for cmd in qa_cmds: | 315 | for cmd in qa_cmds: |
@@ -324,7 +324,7 @@ fakeroot python do_image_qa () { | |||
324 | qamsg = qamsg + '\n' | 324 | qamsg = qamsg + '\n' |
325 | 325 | ||
326 | if qamsg: | 326 | if qamsg: |
327 | imgname = d.getVar('IMAGE_NAME', True) | 327 | imgname = d.getVar('IMAGE_NAME') |
328 | bb.fatal("QA errors found whilst validating image: %s\n%s" % (imgname, qamsg)) | 328 | bb.fatal("QA errors found whilst validating image: %s\n%s" % (imgname, qamsg)) |
329 | } | 329 | } |
330 | addtask do_image_qa after do_image_complete before do_build | 330 | addtask do_image_qa after do_image_complete before do_build |
@@ -334,17 +334,17 @@ addtask do_image_qa after do_image_complete before do_build | |||
334 | # to tmp/sysroots/<machine>/imgdata/<image>.env | 334 | # to tmp/sysroots/<machine>/imgdata/<image>.env |
335 | # | 335 | # |
336 | python do_rootfs_wicenv () { | 336 | python do_rootfs_wicenv () { |
337 | wicvars = d.getVar('WICVARS', True) | 337 | wicvars = d.getVar('WICVARS') |
338 | if not wicvars: | 338 | if not wicvars: |
339 | return | 339 | return |
340 | 340 | ||
341 | stdir = d.getVar('STAGING_DIR_TARGET', True) | 341 | stdir = d.getVar('STAGING_DIR_TARGET') |
342 | outdir = os.path.join(stdir, 'imgdata') | 342 | outdir = os.path.join(stdir, 'imgdata') |
343 | bb.utils.mkdirhier(outdir) | 343 | bb.utils.mkdirhier(outdir) |
344 | basename = d.getVar('IMAGE_BASENAME', True) | 344 | basename = d.getVar('IMAGE_BASENAME') |
345 | with open(os.path.join(outdir, basename) + '.env', 'w') as envf: | 345 | with open(os.path.join(outdir, basename) + '.env', 'w') as envf: |
346 | for var in wicvars.split(): | 346 | for var in wicvars.split(): |
347 | value = d.getVar(var, True) | 347 | value = d.getVar(var) |
348 | if value: | 348 | if value: |
349 | envf.write('%s="%s"\n' % (var, value.strip())) |