diff options
author | Joshua Lock <joshua.g.lock@intel.com> | 2016-12-14 21:13:04 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2016-12-16 10:23:23 +0000 |
commit | c4e2c59088765d1f1de7ec57cde91980f887c2ff (patch) | |
tree | a2fda8ac5916fb59a711e9220c2177008cca9347 /meta/classes/archiver.bbclass | |
parent | d5e67725ac11e3296cad104470931ffa16824b90 (diff) | |
download | poky-c4e2c59088765d1f1de7ec57cde91980f887c2ff.tar.gz |
meta: remove True option to getVar calls
getVar() now defaults to expanding by default, thus remove the True
option from getVar() calls with a regex search and replace.
Search made with the following regex: getVar ?\(( ?[^,()]*), True\)
(From OE-Core rev: 7c552996597faaee2fbee185b250c0ee30ea3b5f)
Signed-off-by: Joshua Lock <joshua.g.lock@intel.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes/archiver.bbclass')
-rw-r--r-- | meta/classes/archiver.bbclass | 74 |
1 files changed, 37 insertions, 37 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass index 3543ca9c58..52959776c3 100644 --- a/meta/classes/archiver.bbclass +++ b/meta/classes/archiver.bbclass | |||
@@ -52,10 +52,10 @@ do_deploy_all_archives[dirs] = "${WORKDIR}" | |||
52 | 52 | ||
53 | 53 | ||
54 | python () { | 54 | python () { |
55 | pn = d.getVar('PN', True) | 55 | pn = d.getVar('PN') |
56 | assume_provided = (d.getVar("ASSUME_PROVIDED", True) or "").split() | 56 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() |
57 | if pn in assume_provided: | 57 | if pn in assume_provided: |
58 | for p in d.getVar("PROVIDES", True).split(): | 58 | for p in d.getVar("PROVIDES").split(): |
59 | if p != pn: | 59 | if p != pn: |
60 | pn = p | 60 | pn = p |
61 | break | 61 | break |
@@ -68,7 +68,7 @@ python () { | |||
68 | bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) | 68 | bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) |
69 | 69 | ||
70 | # We just archive gcc-source for all the gcc related recipes | 70 | # We just archive gcc-source for all the gcc related recipes |
71 | if d.getVar('BPN', True) in ['gcc', 'libgcc'] \ | 71 | if d.getVar('BPN') in ['gcc', 'libgcc'] \ |
72 | and not pn.startswith('gcc-source'): | 72 | and not pn.startswith('gcc-source'): |
73 | bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) | 73 | bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) |
74 | return | 74 | return |
@@ -106,7 +106,7 @@ python () { | |||
106 | # Output the srpm package | 106 | # Output the srpm package |
107 | ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True) | 107 | ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True) |
108 | if ar_srpm == "1": | 108 | if ar_srpm == "1": |
109 | if d.getVar('PACKAGES', True) != '' and d.getVar('IMAGE_PKGTYPE', True) == 'rpm': | 109 | if d.getVar('PACKAGES') != '' and d.getVar('IMAGE_PKGTYPE') == 'rpm': |
110 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) | 110 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) |
111 | if ar_dumpdata == "1": | 111 | if ar_dumpdata == "1": |
112 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) | 112 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) |
@@ -130,9 +130,9 @@ python do_ar_original() { | |||
130 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original": | 130 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original": |
131 | return | 131 | return |
132 | 132 | ||
133 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) | 133 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') |
134 | bb.note('Archiving the original source...') | 134 | bb.note('Archiving the original source...') |
135 | urls = d.getVar("SRC_URI", True).split() | 135 | urls = d.getVar("SRC_URI").split() |
136 | # destsuffix (git fetcher) and subdir (everything else) are allowed to be | 136 | # destsuffix (git fetcher) and subdir (everything else) are allowed to be |
137 | # absolute paths (for example, destsuffix=${S}/foobar). | 137 | # absolute paths (for example, destsuffix=${S}/foobar). |
138 | # That messes with unpacking inside our tmpdir below, because the fetchers | 138 | # That messes with unpacking inside our tmpdir below, because the fetchers |
@@ -157,7 +157,7 @@ python do_ar_original() { | |||
157 | if os.path.isfile(local): | 157 | if os.path.isfile(local): |
158 | shutil.copy(local, ar_outdir) | 158 | shutil.copy(local, ar_outdir) |
159 | elif os.path.isdir(local): | 159 | elif os.path.isdir(local): |
160 | tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR', True)) | 160 | tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR')) |
161 | fetch.unpack(tmpdir, (url,)) | 161 | fetch.unpack(tmpdir, (url,)) |
162 | # To handle recipes with more than one source, we add the "name" | 162 | # To handle recipes with more than one source, we add the "name" |
163 | # URL parameter as suffix. We treat it as an error when | 163 | # URL parameter as suffix. We treat it as an error when |
@@ -195,24 +195,24 @@ python do_ar_patched() { | |||
195 | return | 195 | return |
196 | 196 | ||
197 | # Get the ARCHIVER_OUTDIR before we reset the WORKDIR | 197 | # Get the ARCHIVER_OUTDIR before we reset the WORKDIR |
198 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) | 198 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') |
199 | ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) | 199 | ar_workdir = d.getVar('ARCHIVER_WORKDIR') |
200 | bb.note('Archiving the patched source...') | 200 | bb.note('Archiving the patched source...') |
201 | d.setVar('WORKDIR', ar_workdir) | 201 | d.setVar('WORKDIR', ar_workdir) |
202 | create_tarball(d, d.getVar('S', True), 'patched', ar_outdir) | 202 | create_tarball(d, d.getVar('S'), 'patched', ar_outdir) |
203 | } | 203 | } |
204 | 204 | ||
205 | python do_ar_configured() { | 205 | python do_ar_configured() { |
206 | import shutil | 206 | import shutil |
207 | 207 | ||
208 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) | 208 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') |
209 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured': | 209 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured': |
210 | bb.note('Archiving the configured source...') | 210 | bb.note('Archiving the configured source...') |
211 | pn = d.getVar('PN', True) | 211 | pn = d.getVar('PN') |
212 | # "gcc-source-${PV}" recipes don't have "do_configure" | 212 | # "gcc-source-${PV}" recipes don't have "do_configure" |
213 | # task, so we need to run "do_preconfigure" instead | 213 | # task, so we need to run "do_preconfigure" instead |
214 | if pn.startswith("gcc-source-"): | 214 | if pn.startswith("gcc-source-"): |
215 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) | 215 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) |
216 | bb.build.exec_func('do_preconfigure', d) | 216 | bb.build.exec_func('do_preconfigure', d) |
217 | 217 | ||
218 | # The libtool-native's do_configure will remove the | 218 | # The libtool-native's do_configure will remove the |
@@ -221,7 +221,7 @@ python do_ar_configured() { | |||
221 | # instead of. | 221 | # instead of. |
222 | elif pn != 'libtool-native': | 222 | elif pn != 'libtool-native': |
223 | # Change the WORKDIR to make do_configure run in another dir. | 223 | # Change the WORKDIR to make do_configure run in another dir. |
224 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) | 224 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) |
225 | if bb.data.inherits_class('kernel-yocto', d): | 225 | if bb.data.inherits_class('kernel-yocto', d): |
226 | bb.build.exec_func('do_kernel_configme', d) | 226 | bb.build.exec_func('do_kernel_configme', d) |
227 | if bb.data.inherits_class('cmake', d): | 227 | if bb.data.inherits_class('cmake', d): |
@@ -235,12 +235,12 @@ python do_ar_configured() { | |||
235 | for func in (postfuncs or '').split(): | 235 | for func in (postfuncs or '').split(): |
236 | if func != "do_qa_configure": | 236 | if func != "do_qa_configure": |
237 | bb.build.exec_func(func, d) | 237 | bb.build.exec_func(func, d) |
238 | srcdir = d.getVar('S', True) | 238 | srcdir = d.getVar('S') |
239 | builddir = d.getVar('B', True) | 239 | builddir = d.getVar('B') |
240 | if srcdir != builddir: | 240 | if srcdir != builddir: |
241 | if os.path.exists(builddir): | 241 | if os.path.exists(builddir): |
242 | oe.path.copytree(builddir, os.path.join(srcdir, \ | 242 | oe.path.copytree(builddir, os.path.join(srcdir, \ |
243 | 'build.%s.ar_configured' % d.getVar('PF', True))) | 243 | 'build.%s.ar_configured' % d.getVar('PF'))) |
244 | create_tarball(d, srcdir, 'configured', ar_outdir) | 244 | create_tarball(d, srcdir, 'configured', ar_outdir) |
245 | } | 245 | } |
246 | 246 | ||
@@ -251,14 +251,14 @@ def create_tarball(d, srcdir, suffix, ar_outdir): | |||
251 | import tarfile | 251 | import tarfile |
252 | 252 | ||
253 | # Make sure we are only creating a single tarball for gcc sources | 253 | # Make sure we are only creating a single tarball for gcc sources |
254 | if (d.getVar('SRC_URI', True) == ""): | 254 | if (d.getVar('SRC_URI') == ""): |
255 | return | 255 | return |
256 | 256 | ||
257 | bb.utils.mkdirhier(ar_outdir) | 257 | bb.utils.mkdirhier(ar_outdir) |
258 | if suffix: | 258 | if suffix: |
259 | filename = '%s-%s.tar.gz' % (d.getVar('PF', True), suffix) | 259 | filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix) |
260 | else: | 260 | else: |
261 | filename = '%s.tar.gz' % d.getVar('PF', True) | 261 | filename = '%s.tar.gz' % d.getVar('PF') |
262 | tarname = os.path.join(ar_outdir, filename) | 262 | tarname = os.path.join(ar_outdir, filename) |
263 | 263 | ||
264 | bb.note('Creating %s' % tarname) | 264 | bb.note('Creating %s' % tarname) |
@@ -286,7 +286,7 @@ def create_diff_gz(d, src_orig, src, ar_outdir): | |||
286 | dirname = os.path.dirname(src) | 286 | dirname = os.path.dirname(src) |
287 | basename = os.path.basename(src) | 287 | basename = os.path.basename(src) |
288 | os.chdir(dirname) | 288 | os.chdir(dirname) |
289 | out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF', True)) | 289 | out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF')) |
290 | diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) | 290 | diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) |
291 | subprocess.call(diff_cmd, shell=True) | 291 | subprocess.call(diff_cmd, shell=True) |
292 | bb.utils.remove(src_patched, recurse=True) | 292 | bb.utils.remove(src_patched, recurse=True) |
@@ -297,9 +297,9 @@ python do_unpack_and_patch() { | |||
297 | [ 'patched', 'configured'] and \ | 297 | [ 'patched', 'configured'] and \ |
298 | d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1': | 298 | d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1': |
299 | return | 299 | return |
300 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) | 300 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') |
301 | ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) | 301 | ar_workdir = d.getVar('ARCHIVER_WORKDIR') |
302 | pn = d.getVar('PN', True) | 302 | pn = d.getVar('PN') |
303 | 303 | ||
304 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR | 304 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR |
305 | if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')): | 305 | if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')): |
@@ -309,18 +309,18 @@ python do_unpack_and_patch() { | |||
309 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the | 309 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the |
310 | # possibly requiring of the following tasks (such as some recipes's | 310 | # possibly requiring of the following tasks (such as some recipes's |
311 | # do_patch required 'B' existed). | 311 | # do_patch required 'B' existed). |
312 | bb.utils.mkdirhier(d.getVar('B', True)) | 312 | bb.utils.mkdirhier(d.getVar('B')) |
313 | 313 | ||
314 | bb.build.exec_func('do_unpack', d) | 314 | bb.build.exec_func('do_unpack', d) |
315 | 315 | ||
316 | # Save the original source for creating the patches | 316 | # Save the original source for creating the patches |
317 | if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': | 317 | if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': |
318 | src = d.getVar('S', True).rstrip('/') | 318 | src = d.getVar('S').rstrip('/') |
319 | src_orig = '%s.orig' % src | 319 | src_orig = '%s.orig' % src |
320 | oe.path.copytree(src, src_orig) | 320 | oe.path.copytree(src, src_orig) |
321 | 321 | ||
322 | # Make sure gcc and kernel sources are patched only once | 322 | # Make sure gcc and kernel sources are patched only once |
323 | if not (d.getVar('SRC_URI', True) == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))): | 323 | if not (d.getVar('SRC_URI') == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))): |
324 | bb.build.exec_func('do_patch', d) | 324 | bb.build.exec_func('do_patch', d) |
325 | 325 | ||
326 | # Create the patches | 326 | # Create the patches |
@@ -339,14 +339,14 @@ python do_ar_recipe () { | |||
339 | 339 | ||
340 | require_re = re.compile( r"require\s+(.+)" ) | 340 | require_re = re.compile( r"require\s+(.+)" ) |
341 | include_re = re.compile( r"include\s+(.+)" ) | 341 | include_re = re.compile( r"include\s+(.+)" ) |
342 | bbfile = d.getVar('FILE', True) | 342 | bbfile = d.getVar('FILE') |
343 | outdir = os.path.join(d.getVar('WORKDIR', True), \ | 343 | outdir = os.path.join(d.getVar('WORKDIR'), \ |
344 | '%s-recipe' % d.getVar('PF', True)) | 344 | '%s-recipe' % d.getVar('PF')) |
345 | bb.utils.mkdirhier(outdir) | 345 | bb.utils.mkdirhier(outdir) |
346 | shutil.copy(bbfile, outdir) | 346 | shutil.copy(bbfile, outdir) |
347 | 347 | ||
348 | pn = d.getVar('PN', True) | 348 | pn = d.getVar('PN') |
349 | bbappend_files = d.getVar('BBINCLUDED', True).split() | 349 | bbappend_files = d.getVar('BBINCLUDED').split() |
350 | # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend | 350 | # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend |
351 | # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. | 351 | # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. |
352 | bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn) | 352 | bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn) |
@@ -356,7 +356,7 @@ python do_ar_recipe () { | |||
356 | shutil.copy(file, outdir) | 356 | shutil.copy(file, outdir) |
357 | 357 | ||
358 | dirname = os.path.dirname(bbfile) | 358 | dirname = os.path.dirname(bbfile) |
359 | bbpath = '%s:%s' % (dirname, d.getVar('BBPATH', True)) | 359 | bbpath = '%s:%s' % (dirname, d.getVar('BBPATH')) |
360 | f = open(bbfile, 'r') | 360 | f = open(bbfile, 'r') |
361 | for line in f.readlines(): | 361 | for line in f.readlines(): |
362 | incfile = None | 362 | incfile = None |
@@ -370,7 +370,7 @@ python do_ar_recipe () { | |||
370 | if incfile: | 370 | if incfile: |
371 | shutil.copy(incfile, outdir) | 371 | shutil.copy(incfile, outdir) |
372 | 372 | ||
373 | create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR', True)) | 373 | create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR')) |
374 | bb.utils.remove(outdir, recurse=True) | 374 | bb.utils.remove(outdir, recurse=True) |
375 | } | 375 | } |
376 | 376 | ||
@@ -379,8 +379,8 @@ python do_dumpdata () { | |||
379 | dump environment data to ${PF}-showdata.dump | 379 | dump environment data to ${PF}-showdata.dump |
380 | """ | 380 | """ |
381 | 381 | ||
382 | dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR', True), \ | 382 | dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \ |
383 | '%s-showdata.dump' % d.getVar('PF', True)) | 383 | '%s-showdata.dump' % d.getVar('PF')) |
384 | bb.note('Dumping metadata into %s' % dumpfile) | 384 | bb.note('Dumping metadata into %s' % dumpfile) |
385 | with open(dumpfile, "w") as f: | 385 | with open(dumpfile, "w") as f: |
386 | # emit variables and shell functions | 386 | # emit variables and shell functions |