diff options
Diffstat (limited to 'meta/classes/archiver.bbclass')
-rw-r--r-- | meta/classes/archiver.bbclass | 112 |
1 files changed, 70 insertions, 42 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass index 858507b343..a95c899a0f 100644 --- a/meta/classes/archiver.bbclass +++ b/meta/classes/archiver.bbclass | |||
@@ -1,11 +1,15 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | 1 | # |
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | 2 | # Copyright OpenEmbedded Contributors |
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
3 | # | 7 | # |
4 | # This bbclass is used for creating archive for: | 8 | # This bbclass is used for creating archive for: |
5 | # 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" | 9 | # 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" |
6 | # 2) patched source: ARCHIVER_MODE[src] = "patched" (default) | 10 | # 2) patched source: ARCHIVER_MODE[src] = "patched" (default) |
7 | # 3) configured source: ARCHIVER_MODE[src] = "configured" | 11 | # 3) configured source: ARCHIVER_MODE[src] = "configured" |
8 | # 4) source mirror: ARCHIVE_MODE[src] = "mirror" | 12 | # 4) source mirror: ARCHIVER_MODE[src] = "mirror" |
9 | # 5) The patches between do_unpack and do_patch: | 13 | # 5) The patches between do_unpack and do_patch: |
10 | # ARCHIVER_MODE[diff] = "1" | 14 | # ARCHIVER_MODE[diff] = "1" |
11 | # And you can set the one that you'd like to exclude from the diff: | 15 | # And you can set the one that you'd like to exclude from the diff: |
@@ -51,55 +55,66 @@ ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" | |||
51 | ARCHIVER_MODE[dumpdata] ?= "0" | 55 | ARCHIVER_MODE[dumpdata] ?= "0" |
52 | ARCHIVER_MODE[recipe] ?= "0" | 56 | ARCHIVER_MODE[recipe] ?= "0" |
53 | ARCHIVER_MODE[mirror] ?= "split" | 57 | ARCHIVER_MODE[mirror] ?= "split" |
58 | ARCHIVER_MODE[compression] ?= "xz" | ||
54 | 59 | ||
55 | DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" | 60 | DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" |
56 | ARCHIVER_TOPDIR ?= "${WORKDIR}/archiver-sources" | 61 | ARCHIVER_TOPDIR ?= "${WORKDIR}/archiver-sources" |
57 | ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/" | 62 | ARCHIVER_ARCH = "${TARGET_SYS}" |
63 | ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${ARCHIVER_ARCH}/${PF}/" | ||
58 | ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm" | 64 | ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm" |
59 | ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/" | 65 | ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${ARCHIVER_ARCH}/${PF}/" |
60 | ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" | 66 | ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" |
61 | 67 | ||
62 | # When producing a combined mirror directory, allow duplicates for the case | 68 | # When producing a combined mirror directory, allow duplicates for the case |
63 | # where multiple recipes use the same SRC_URI. | 69 | # where multiple recipes use the same SRC_URI. |
64 | ARCHIVER_COMBINED_MIRRORDIR = "${ARCHIVER_TOPDIR}/mirror" | 70 | ARCHIVER_COMBINED_MIRRORDIR = "${ARCHIVER_TOPDIR}/mirror" |
65 | SSTATE_DUPWHITELIST += "${DEPLOY_DIR_SRC}/mirror" | 71 | SSTATE_ALLOW_OVERLAP_FILES += "${DEPLOY_DIR_SRC}/mirror" |
66 | 72 | ||
67 | do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" | 73 | do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" |
68 | do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" | 74 | do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" |
69 | do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" | 75 | do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" |
70 | do_deploy_archives[dirs] = "${WORKDIR}" | ||
71 | 76 | ||
72 | # This is a convenience for the shell script to use it | 77 | # This is a convenience for the shell script to use it |
73 | 78 | ||
74 | 79 | def include_package(d, pn): | |
75 | python () { | ||
76 | pn = d.getVar('PN') | ||
77 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() | ||
78 | if pn in assume_provided: | ||
79 | for p in d.getVar("PROVIDES").split(): | ||
80 | if p != pn: | ||
81 | pn = p | ||
82 | break | ||
83 | 80 | ||
84 | included, reason = copyleft_should_include(d) | 81 | included, reason = copyleft_should_include(d) |
85 | if not included: | 82 | if not included: |
86 | bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason)) | 83 | bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason)) |
87 | return | 84 | return False |
85 | |||
88 | else: | 86 | else: |
89 | bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) | 87 | bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) |
90 | 88 | ||
91 | |||
92 | # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, | 89 | # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, |
93 | # so avoid archiving source here. | 90 | # so avoid archiving source here. |
94 | if pn.startswith('glibc-locale'): | 91 | if pn.startswith('glibc-locale'): |
95 | return | 92 | return False |
96 | 93 | ||
97 | # We just archive gcc-source for all the gcc related recipes | 94 | # We just archive gcc-source for all the gcc related recipes |
98 | if d.getVar('BPN') in ['gcc', 'libgcc'] \ | 95 | if d.getVar('BPN') in ['gcc', 'libgcc'] \ |
99 | and not pn.startswith('gcc-source'): | 96 | and not pn.startswith('gcc-source'): |
100 | bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) | 97 | bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) |
98 | return False | ||
99 | |||
100 | return True | ||
101 | |||
102 | python () { | ||
103 | pn = d.getVar('PN') | ||
104 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() | ||
105 | if pn in assume_provided: | ||
106 | for p in d.getVar("PROVIDES").split(): | ||
107 | if p != pn: | ||
108 | pn = p | ||
109 | break | ||
110 | |||
111 | if not include_package(d, pn): | ||
101 | return | 112 | return |
102 | 113 | ||
114 | # TARGET_SYS in ARCHIVER_ARCH will break the stamp for gcc-source in multiconfig | ||
115 | if pn.startswith('gcc-source'): | ||
116 | d.setVar('ARCHIVER_ARCH', "allarch") | ||
117 | |||
103 | def hasTask(task): | 118 | def hasTask(task): |
104 | return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) | 119 | return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) |
105 | 120 | ||
@@ -118,7 +133,7 @@ python () { | |||
118 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) | 133 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) |
119 | elif ar_src == "configured": | 134 | elif ar_src == "configured": |
120 | # We can't use "addtask do_ar_configured after do_configure" since it | 135 | # We can't use "addtask do_ar_configured after do_configure" since it |
121 | # will cause the deptask of do_populate_sysroot to run not matter what | 136 | # will cause the deptask of do_populate_sysroot to run no matter what |
122 | # archives we need, so we add the depends here. | 137 | # archives we need, so we add the depends here. |
123 | 138 | ||
124 | # There is a corner case with "gcc-source-${PV}" recipes, they don't have | 139 | # There is a corner case with "gcc-source-${PV}" recipes, they don't have |
@@ -151,6 +166,7 @@ python () { | |||
151 | d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}') | 166 | d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}') |
152 | d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}') | 167 | d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}') |
153 | d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}') | 168 | d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}') |
169 | d.appendVar('PSEUDO_INCLUDE_PATHS', ',${ARCHIVER_TOPDIR}') | ||
154 | if ar_dumpdata == "1": | 170 | if ar_dumpdata == "1": |
155 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) | 171 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) |
156 | if ar_recipe == "1": | 172 | if ar_recipe == "1": |
@@ -163,7 +179,7 @@ python () { | |||
163 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) | 179 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) |
164 | } | 180 | } |
165 | 181 | ||
166 | # Take all the sources for a recipe and puts them in WORKDIR/archiver-work/. | 182 | # Take all the sources for a recipe and put them in WORKDIR/archiver-work/. |
167 | # Files in SRC_URI are copied directly, anything that's a directory | 183 | # Files in SRC_URI are copied directly, anything that's a directory |
168 | # (e.g. git repositories) is "unpacked" and then put into a tarball. | 184 | # (e.g. git repositories) is "unpacked" and then put into a tarball. |
169 | python do_ar_original() { | 185 | python do_ar_original() { |
@@ -281,7 +297,10 @@ python do_ar_configured() { | |||
281 | # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the | 297 | # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the |
282 | # do_configure, we archive the already configured ${S} to | 298 | # do_configure, we archive the already configured ${S} to |
283 | # instead of. | 299 | # instead of. |
284 | elif pn != 'libtool-native': | 300 | # The kernel class functions require it to be on work-shared, we |
301 | # don't unpack, patch, configure again, just archive the already | ||
302 | # configured ${S} | ||
303 | elif not (pn == 'libtool-native' or is_work_shared(d)): | ||
285 | def runTask(task): | 304 | def runTask(task): |
286 | prefuncs = d.getVarFlag(task, 'prefuncs') or '' | 305 | prefuncs = d.getVarFlag(task, 'prefuncs') or '' |
287 | for func in prefuncs.split(): | 306 | for func in prefuncs.split(): |
@@ -321,7 +340,7 @@ python do_ar_mirror() { | |||
321 | dl_dir = d.getVar('DL_DIR') | 340 | dl_dir = d.getVar('DL_DIR') |
322 | mirror_exclusions = (d.getVar('ARCHIVER_MIRROR_EXCLUDE') or '').split() | 341 | mirror_exclusions = (d.getVar('ARCHIVER_MIRROR_EXCLUDE') or '').split() |
323 | mirror_mode = d.getVarFlag('ARCHIVER_MODE', 'mirror') | 342 | mirror_mode = d.getVarFlag('ARCHIVER_MODE', 'mirror') |
324 | have_mirror_tarballs = d.getVar('BB_GENERATE_MIRROR_TARBALLS') | 343 | have_mirror_tarballs = oe.types.boolean(d.getVar('BB_GENERATE_MIRROR_TARBALLS')) |
325 | 344 | ||
326 | if mirror_mode == 'combined': | 345 | if mirror_mode == 'combined': |
327 | destdir = d.getVar('ARCHIVER_COMBINED_MIRRORDIR') | 346 | destdir = d.getVar('ARCHIVER_COMBINED_MIRRORDIR') |
@@ -383,19 +402,11 @@ python do_ar_mirror() { | |||
383 | subprocess.check_call(cmd, shell=True) | 402 | subprocess.check_call(cmd, shell=True) |
384 | } | 403 | } |
385 | 404 | ||
386 | def exclude_useless_paths(tarinfo): | ||
387 | if tarinfo.isdir(): | ||
388 | if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'): | ||
389 | return None | ||
390 | elif tarinfo.name == 'temp' or tarinfo.name == 'patches' or tarinfo.name == '.pc': | ||
391 | return None | ||
392 | return tarinfo | ||
393 | |||
394 | def create_tarball(d, srcdir, suffix, ar_outdir): | 405 | def create_tarball(d, srcdir, suffix, ar_outdir): |
395 | """ | 406 | """ |
396 | create the tarball from srcdir | 407 | create the tarball from srcdir |
397 | """ | 408 | """ |
398 | import tarfile | 409 | import subprocess |
399 | 410 | ||
400 | # Make sure we are only creating a single tarball for gcc sources | 411 | # Make sure we are only creating a single tarball for gcc sources |
401 | if (d.getVar('SRC_URI') == ""): | 412 | if (d.getVar('SRC_URI') == ""): |
@@ -406,17 +417,30 @@ def create_tarball(d, srcdir, suffix, ar_outdir): | |||
406 | # that we archive the actual directory and not just the link. | 417 | # that we archive the actual directory and not just the link. |
407 | srcdir = os.path.realpath(srcdir) | 418 | srcdir = os.path.realpath(srcdir) |
408 | 419 | ||
420 | compression_method = d.getVarFlag('ARCHIVER_MODE', 'compression') | ||
421 | if compression_method == "xz": | ||
422 | compression_cmd = "xz %s" % d.getVar('XZ_DEFAULTS') | ||
423 | # To keep compatibility with ARCHIVER_MODE[compression] | ||
424 | elif compression_method == "gz": | ||
425 | compression_cmd = "gzip" | ||
426 | elif compression_method == "bz2": | ||
427 | compression_cmd = "bzip2" | ||
428 | else: | ||
429 | bb.fatal("Unsupported compression_method: %s" % compression_method) | ||
430 | |||
409 | bb.utils.mkdirhier(ar_outdir) | 431 | bb.utils.mkdirhier(ar_outdir) |
410 | if suffix: | 432 | if suffix: |
411 | filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix) | 433 | filename = '%s-%s.tar.%s' % (d.getVar('PF'), suffix, compression_method) |
412 | else: | 434 | else: |
413 | filename = '%s.tar.gz' % d.getVar('PF') | 435 | filename = '%s.tar.%s' % (d.getVar('PF'), compression_method) |
414 | tarname = os.path.join(ar_outdir, filename) | 436 | tarname = os.path.join(ar_outdir, filename) |
415 | 437 | ||
416 | bb.note('Creating %s' % tarname) | 438 | bb.note('Creating %s' % tarname) |
417 | tar = tarfile.open(tarname, 'w:gz') | 439 | dirname = os.path.dirname(srcdir) |
418 | tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths) | 440 | basename = os.path.basename(srcdir) |
419 | tar.close() | 441 | exclude = "--exclude=temp --exclude=patches --exclude='.pc'" |
442 | tar_cmd = "tar %s -cf - %s | %s > %s" % (exclude, basename, compression_cmd, tarname) | ||
443 | subprocess.check_call(tar_cmd, cwd=dirname, shell=True) | ||
420 | 444 | ||
421 | # creating .diff.gz between source.orig and source | 445 | # creating .diff.gz between source.orig and source |
422 | def create_diff_gz(d, src_orig, src, ar_outdir): | 446 | def create_diff_gz(d, src_orig, src, ar_outdir): |
@@ -449,8 +473,9 @@ def create_diff_gz(d, src_orig, src, ar_outdir): | |||
449 | os.chdir(cwd) | 473 | os.chdir(cwd) |
450 | 474 | ||
451 | def is_work_shared(d): | 475 | def is_work_shared(d): |
452 | pn = d.getVar('PN') | 476 | sharedworkdir = os.path.join(d.getVar('TMPDIR'), 'work-shared') |
453 | return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source') | 477 | sourcedir = os.path.realpath(d.getVar('S')) |
478 | return sourcedir.startswith(sharedworkdir) | ||
454 | 479 | ||
455 | # Run do_unpack and do_patch | 480 | # Run do_unpack and do_patch |
456 | python do_unpack_and_patch() { | 481 | python do_unpack_and_patch() { |
@@ -463,7 +488,7 @@ python do_unpack_and_patch() { | |||
463 | ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE') | 488 | ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE') |
464 | pn = d.getVar('PN') | 489 | pn = d.getVar('PN') |
465 | 490 | ||
466 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR | 491 | # The kernel class functions require it to be on work-shared, so we don't change WORKDIR |
467 | if not is_work_shared(d): | 492 | if not is_work_shared(d): |
468 | # Change the WORKDIR to make do_unpack do_patch run in another dir. | 493 | # Change the WORKDIR to make do_unpack do_patch run in another dir. |
469 | d.setVar('WORKDIR', ar_workdir) | 494 | d.setVar('WORKDIR', ar_workdir) |
@@ -483,6 +508,9 @@ python do_unpack_and_patch() { | |||
483 | src_orig = '%s.orig' % src | 508 | src_orig = '%s.orig' % src |
484 | oe.path.copytree(src, src_orig) | 509 | oe.path.copytree(src, src_orig) |
485 | 510 | ||
511 | if bb.data.inherits_class('dos2unix', d): | ||
512 | bb.build.exec_func('do_convert_crlf_to_lf', d) | ||
513 | |||
486 | # Make sure gcc and kernel sources are patched only once | 514 | # Make sure gcc and kernel sources are patched only once |
487 | if not (d.getVar('SRC_URI') == "" or is_work_shared(d)): | 515 | if not (d.getVar('SRC_URI') == "" or is_work_shared(d)): |
488 | bb.build.exec_func('do_patch', d) | 516 | bb.build.exec_func('do_patch', d) |
@@ -505,7 +533,7 @@ python do_unpack_and_patch() { | |||
505 | # of the output file ensures that we create it each time the recipe | 533 | # of the output file ensures that we create it each time the recipe |
506 | # gets rebuilt, at least as long as a PR server is used. We also rely | 534 | # gets rebuilt, at least as long as a PR server is used. We also rely |
507 | # on that mechanism to catch changes in the file content, because the | 535 | # on that mechanism to catch changes in the file content, because the |
508 | # file content is not part of of the task signature either. | 536 | # file content is not part of the task signature either. |
509 | do_ar_recipe[vardepsexclude] += "BBINCLUDED" | 537 | do_ar_recipe[vardepsexclude] += "BBINCLUDED" |
510 | python do_ar_recipe () { | 538 | python do_ar_recipe () { |
511 | """ | 539 | """ |
@@ -571,7 +599,7 @@ python do_dumpdata () { | |||
571 | 599 | ||
572 | SSTATETASKS += "do_deploy_archives" | 600 | SSTATETASKS += "do_deploy_archives" |
573 | do_deploy_archives () { | 601 | do_deploy_archives () { |
574 | echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}." | 602 | bbnote "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}." |
575 | } | 603 | } |
576 | python do_deploy_archives_setscene () { | 604 | python do_deploy_archives_setscene () { |
577 | sstate_setscene(d) | 605 | sstate_setscene(d) |