summaryrefslogtreecommitdiffstats
path: root/meta/classes/archiver.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/archiver.bbclass')
-rw-r--r--meta/classes/archiver.bbclass108
1 files changed, 67 insertions, 41 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass
index 858507b343..2d0bbfbd42 100644
--- a/meta/classes/archiver.bbclass
+++ b/meta/classes/archiver.bbclass
@@ -1,11 +1,15 @@
1# ex:ts=4:sw=4:sts=4:et 1#
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- 2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
3# 7#
4# This bbclass is used for creating archive for: 8# This bbclass is used for creating archive for:
5# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" 9# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original"
6# 2) patched source: ARCHIVER_MODE[src] = "patched" (default) 10# 2) patched source: ARCHIVER_MODE[src] = "patched" (default)
7# 3) configured source: ARCHIVER_MODE[src] = "configured" 11# 3) configured source: ARCHIVER_MODE[src] = "configured"
8# 4) source mirror: ARCHIVE_MODE[src] = "mirror" 12# 4) source mirror: ARCHIVER_MODE[src] = "mirror"
9# 5) The patches between do_unpack and do_patch: 13# 5) The patches between do_unpack and do_patch:
10# ARCHIVER_MODE[diff] = "1" 14# ARCHIVER_MODE[diff] = "1"
11# And you can set the one that you'd like to exclude from the diff: 15# And you can set the one that you'd like to exclude from the diff:
@@ -51,55 +55,66 @@ ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
51ARCHIVER_MODE[dumpdata] ?= "0" 55ARCHIVER_MODE[dumpdata] ?= "0"
52ARCHIVER_MODE[recipe] ?= "0" 56ARCHIVER_MODE[recipe] ?= "0"
53ARCHIVER_MODE[mirror] ?= "split" 57ARCHIVER_MODE[mirror] ?= "split"
58ARCHIVER_MODE[compression] ?= "xz"
54 59
55DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" 60DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
56ARCHIVER_TOPDIR ?= "${WORKDIR}/archiver-sources" 61ARCHIVER_TOPDIR ?= "${WORKDIR}/archiver-sources"
57ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/" 62ARCHIVER_ARCH = "${TARGET_SYS}"
63ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${ARCHIVER_ARCH}/${PF}/"
58ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm" 64ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm"
59ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/" 65ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${ARCHIVER_ARCH}/${PF}/"
60ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" 66ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/"
61 67
62# When producing a combined mirror directory, allow duplicates for the case 68# When producing a combined mirror directory, allow duplicates for the case
63# where multiple recipes use the same SRC_URI. 69# where multiple recipes use the same SRC_URI.
64ARCHIVER_COMBINED_MIRRORDIR = "${ARCHIVER_TOPDIR}/mirror" 70ARCHIVER_COMBINED_MIRRORDIR = "${ARCHIVER_TOPDIR}/mirror"
65SSTATE_DUPWHITELIST += "${DEPLOY_DIR_SRC}/mirror" 71SSTATE_ALLOW_OVERLAP_FILES += "${DEPLOY_DIR_SRC}/mirror"
66 72
67do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" 73do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}"
68do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" 74do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}"
69do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" 75do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}"
70do_deploy_archives[dirs] = "${WORKDIR}"
71 76
72# This is a convenience for the shell script to use it 77# This is a convenience for the shell script to use it
73 78
74 79def include_package(d, pn):
75python () {
76 pn = d.getVar('PN')
77 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
78 if pn in assume_provided:
79 for p in d.getVar("PROVIDES").split():
80 if p != pn:
81 pn = p
82 break
83 80
84 included, reason = copyleft_should_include(d) 81 included, reason = copyleft_should_include(d)
85 if not included: 82 if not included:
86 bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason)) 83 bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason))
87 return 84 return False
85
88 else: 86 else:
89 bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) 87 bb.debug(1, 'archiver: %s is included: %s' % (pn, reason))
90 88
91
92 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, 89 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
93 # so avoid archiving source here. 90 # so avoid archiving source here.
94 if pn.startswith('glibc-locale'): 91 if pn.startswith('glibc-locale'):
95 return 92 return False
96 93
97 # We just archive gcc-source for all the gcc related recipes 94 # We just archive gcc-source for all the gcc related recipes
98 if d.getVar('BPN') in ['gcc', 'libgcc'] \ 95 if d.getVar('BPN') in ['gcc', 'libgcc'] \
99 and not pn.startswith('gcc-source'): 96 and not pn.startswith('gcc-source'):
100 bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) 97 bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
98 return False
99
100 return True
101
102python () {
103 pn = d.getVar('PN')
104 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
105 if pn in assume_provided:
106 for p in d.getVar("PROVIDES").split():
107 if p != pn:
108 pn = p
109 break
110
111 if not include_package(d, pn):
101 return 112 return
102 113
114 # TARGET_SYS in ARCHIVER_ARCH will break the stamp for gcc-source in multiconfig
115 if pn.startswith('gcc-source'):
116 d.setVar('ARCHIVER_ARCH', "allarch")
117
103 def hasTask(task): 118 def hasTask(task):
104 return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) 119 return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
105 120
@@ -118,7 +133,7 @@ python () {
118 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) 133 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn)
119 elif ar_src == "configured": 134 elif ar_src == "configured":
120 # We can't use "addtask do_ar_configured after do_configure" since it 135 # We can't use "addtask do_ar_configured after do_configure" since it
121 # will cause the deptask of do_populate_sysroot to run not matter what 136 # will cause the deptask of do_populate_sysroot to run no matter what
122 # archives we need, so we add the depends here. 137 # archives we need, so we add the depends here.
123 138
124 # There is a corner case with "gcc-source-${PV}" recipes, they don't have 139 # There is a corner case with "gcc-source-${PV}" recipes, they don't have
@@ -163,7 +178,7 @@ python () {
163 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) 178 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn)
164} 179}
165 180
166# Take all the sources for a recipe and puts them in WORKDIR/archiver-work/. 181# Take all the sources for a recipe and put them in WORKDIR/archiver-work/.
167# Files in SRC_URI are copied directly, anything that's a directory 182# Files in SRC_URI are copied directly, anything that's a directory
168# (e.g. git repositories) is "unpacked" and then put into a tarball. 183# (e.g. git repositories) is "unpacked" and then put into a tarball.
169python do_ar_original() { 184python do_ar_original() {
@@ -281,7 +296,10 @@ python do_ar_configured() {
281 # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the 296 # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the
282 # do_configure, we archive the already configured ${S} to 297 # do_configure, we archive the already configured ${S} to
283 # instead of. 298 # instead of.
284 elif pn != 'libtool-native': 299 # The kernel class functions require it to be on work-shared, we
300 # don't unpack, patch, configure again, just archive the already
301 # configured ${S}
302 elif not (pn == 'libtool-native' or is_work_shared(d)):
285 def runTask(task): 303 def runTask(task):
286 prefuncs = d.getVarFlag(task, 'prefuncs') or '' 304 prefuncs = d.getVarFlag(task, 'prefuncs') or ''
287 for func in prefuncs.split(): 305 for func in prefuncs.split():
@@ -383,19 +401,11 @@ python do_ar_mirror() {
383 subprocess.check_call(cmd, shell=True) 401 subprocess.check_call(cmd, shell=True)
384} 402}
385 403
386def exclude_useless_paths(tarinfo):
387 if tarinfo.isdir():
388 if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'):
389 return None
390 elif tarinfo.name == 'temp' or tarinfo.name == 'patches' or tarinfo.name == '.pc':
391 return None
392 return tarinfo
393
394def create_tarball(d, srcdir, suffix, ar_outdir): 404def create_tarball(d, srcdir, suffix, ar_outdir):
395 """ 405 """
396 create the tarball from srcdir 406 create the tarball from srcdir
397 """ 407 """
398 import tarfile 408 import subprocess
399 409
400 # Make sure we are only creating a single tarball for gcc sources 410 # Make sure we are only creating a single tarball for gcc sources
401 if (d.getVar('SRC_URI') == ""): 411 if (d.getVar('SRC_URI') == ""):
@@ -406,17 +416,30 @@ def create_tarball(d, srcdir, suffix, ar_outdir):
406 # that we archive the actual directory and not just the link. 416 # that we archive the actual directory and not just the link.
407 srcdir = os.path.realpath(srcdir) 417 srcdir = os.path.realpath(srcdir)
408 418
419 compression_method = d.getVarFlag('ARCHIVER_MODE', 'compression')
420 if compression_method == "xz":
421 compression_cmd = "xz %s" % d.getVar('XZ_DEFAULTS')
422 # To keep compatibility with ARCHIVER_MODE[compression]
423 elif compression_method == "gz":
424 compression_cmd = "gzip"
425 elif compression_method == "bz2":
426 compression_cmd = "bzip2"
427 else:
428 bb.fatal("Unsupported compression_method: %s" % compression_method)
429
409 bb.utils.mkdirhier(ar_outdir) 430 bb.utils.mkdirhier(ar_outdir)
410 if suffix: 431 if suffix:
411 filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix) 432 filename = '%s-%s.tar.%s' % (d.getVar('PF'), suffix, compression_method)
412 else: 433 else:
413 filename = '%s.tar.gz' % d.getVar('PF') 434 filename = '%s.tar.%s' % (d.getVar('PF'), compression_method)
414 tarname = os.path.join(ar_outdir, filename) 435 tarname = os.path.join(ar_outdir, filename)
415 436
416 bb.note('Creating %s' % tarname) 437 bb.note('Creating %s' % tarname)
417 tar = tarfile.open(tarname, 'w:gz') 438 dirname = os.path.dirname(srcdir)
418 tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths) 439 basename = os.path.basename(srcdir)
419 tar.close() 440 exclude = "--exclude=temp --exclude=patches --exclude='.pc'"
441 tar_cmd = "tar %s -cf - %s | %s > %s" % (exclude, basename, compression_cmd, tarname)
442 subprocess.check_call(tar_cmd, cwd=dirname, shell=True)
420 443
421# creating .diff.gz between source.orig and source 444# creating .diff.gz between source.orig and source
422def create_diff_gz(d, src_orig, src, ar_outdir): 445def create_diff_gz(d, src_orig, src, ar_outdir):
@@ -449,8 +472,8 @@ def create_diff_gz(d, src_orig, src, ar_outdir):
449 os.chdir(cwd) 472 os.chdir(cwd)
450 473
451def is_work_shared(d): 474def is_work_shared(d):
452 pn = d.getVar('PN') 475 sharedworkdir = os.path.join(d.getVar('TMPDIR'), 'work-shared')
453 return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source') 476 return d.getVar('S').startswith(sharedworkdir)
454 477
455# Run do_unpack and do_patch 478# Run do_unpack and do_patch
456python do_unpack_and_patch() { 479python do_unpack_and_patch() {
@@ -463,7 +486,7 @@ python do_unpack_and_patch() {
463 ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE') 486 ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
464 pn = d.getVar('PN') 487 pn = d.getVar('PN')
465 488
466 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR 489 # The kernel class functions require it to be on work-shared, so we don't change WORKDIR
467 if not is_work_shared(d): 490 if not is_work_shared(d):
468 # Change the WORKDIR to make do_unpack do_patch run in another dir. 491 # Change the WORKDIR to make do_unpack do_patch run in another dir.
469 d.setVar('WORKDIR', ar_workdir) 492 d.setVar('WORKDIR', ar_workdir)
@@ -483,6 +506,9 @@ python do_unpack_and_patch() {
483 src_orig = '%s.orig' % src 506 src_orig = '%s.orig' % src
484 oe.path.copytree(src, src_orig) 507 oe.path.copytree(src, src_orig)
485 508
509 if bb.data.inherits_class('dos2unix', d):
510 bb.build.exec_func('do_convert_crlf_to_lf', d)
511
486 # Make sure gcc and kernel sources are patched only once 512 # Make sure gcc and kernel sources are patched only once
487 if not (d.getVar('SRC_URI') == "" or is_work_shared(d)): 513 if not (d.getVar('SRC_URI') == "" or is_work_shared(d)):
488 bb.build.exec_func('do_patch', d) 514 bb.build.exec_func('do_patch', d)
@@ -505,7 +531,7 @@ python do_unpack_and_patch() {
505# of the output file ensures that we create it each time the recipe 531# of the output file ensures that we create it each time the recipe
506# gets rebuilt, at least as long as a PR server is used. We also rely 532# gets rebuilt, at least as long as a PR server is used. We also rely
507# on that mechanism to catch changes in the file content, because the 533# on that mechanism to catch changes in the file content, because the
508# file content is not part of of the task signature either. 534# file content is not part of the task signature either.
509do_ar_recipe[vardepsexclude] += "BBINCLUDED" 535do_ar_recipe[vardepsexclude] += "BBINCLUDED"
510python do_ar_recipe () { 536python do_ar_recipe () {
511 """ 537 """
@@ -571,7 +597,7 @@ python do_dumpdata () {
571 597
572SSTATETASKS += "do_deploy_archives" 598SSTATETASKS += "do_deploy_archives"
573do_deploy_archives () { 599do_deploy_archives () {
574 echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}." 600 bbnote "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
575} 601}
576python do_deploy_archives_setscene () { 602python do_deploy_archives_setscene () {
577 sstate_setscene(d) 603 sstate_setscene(d)