summaryrefslogtreecommitdiffstats
path: root/meta/classes
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes')
-rw-r--r--meta/classes/allarch.bbclass2
-rw-r--r--meta/classes/base.bbclass1
-rw-r--r--meta/classes/cargo_common.bbclass1
-rw-r--r--meta/classes/crate-fetch.bbclass28
-rw-r--r--meta/classes/create-spdx.bbclass16
-rw-r--r--meta/classes/cve-check.bbclass1
-rw-r--r--meta/classes/debian.bbclass4
-rw-r--r--meta/classes/distutils-common-base.bbclass3
-rw-r--r--meta/classes/distutils3-base.bbclass3
-rw-r--r--meta/classes/distutils3.bbclass4
-rw-r--r--meta/classes/go.bbclass4
-rw-r--r--meta/classes/gtk-doc.bbclass2
-rw-r--r--meta/classes/image.bbclass3
-rw-r--r--meta/classes/insane.bbclass52
-rw-r--r--meta/classes/kernel-artifact-names.bbclass5
-rw-r--r--meta/classes/kernel-devicetree.bbclass24
-rw-r--r--meta/classes/kernel-fitimage.bbclass20
-rw-r--r--meta/classes/kernel.bbclass81
-rw-r--r--meta/classes/manpages.bbclass7
-rw-r--r--meta/classes/meson.bbclass30
-rw-r--r--meta/classes/multilib.bbclass4
-rw-r--r--meta/classes/native.bbclass31
-rw-r--r--meta/classes/overlayfs-etc.bbclass76
-rw-r--r--meta/classes/overlayfs.bbclass53
-rw-r--r--meta/classes/package.bbclass19
-rw-r--r--meta/classes/package_deb.bbclass1
-rw-r--r--meta/classes/package_ipk.bbclass1
-rw-r--r--meta/classes/package_rpm.bbclass1
-rw-r--r--meta/classes/populate_sdk_base.bbclass11
-rw-r--r--meta/classes/python3native.bbclass2
-rw-r--r--meta/classes/qemuboot.bbclass9
-rw-r--r--meta/classes/rootfs-postcommands.bbclass4
-rw-r--r--meta/classes/rootfs_rpm.bbclass2
-rw-r--r--meta/classes/sanity.bbclass60
-rw-r--r--meta/classes/setuptools3-base.bbclass31
-rw-r--r--meta/classes/setuptools3.bbclass66
-rw-r--r--meta/classes/sstate.bbclass12
-rw-r--r--meta/classes/testimage.bbclass5
-rw-r--r--meta/classes/testsdk.bbclass2
-rw-r--r--meta/classes/uboot-sign.bbclass16
-rw-r--r--meta/classes/utility-tasks.bbclass1
-rw-r--r--meta/classes/waf.bbclass3
42 files changed, 476 insertions, 225 deletions
diff --git a/meta/classes/allarch.bbclass b/meta/classes/allarch.bbclass
index 5bd5c44..a766a65 100644
--- a/meta/classes/allarch.bbclass
+++ b/meta/classes/allarch.bbclass
@@ -61,3 +61,5 @@ python () {
61 bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE")) 61 bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE"))
62} 62}
63 63
64def qemu_wrapper_cmdline(data, rootfs_path, library_paths):
65 return 'false'
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index b709777..5f4956a 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -150,6 +150,7 @@ do_fetch[dirs] = "${DL_DIR}"
150do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" 150do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
151do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" 151do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
152do_fetch[vardeps] += "SRCREV" 152do_fetch[vardeps] += "SRCREV"
153do_fetch[network] = "1"
153python base_do_fetch() { 154python base_do_fetch() {
154 155
155 src_uri = (d.getVar('SRC_URI') or "").split() 156 src_uri = (d.getVar('SRC_URI') or "").split()
diff --git a/meta/classes/cargo_common.bbclass b/meta/classes/cargo_common.bbclass
index 23d82aa..90fad75 100644
--- a/meta/classes/cargo_common.bbclass
+++ b/meta/classes/cargo_common.bbclass
@@ -9,7 +9,6 @@
9## 9##
10 10
11# add crate fetch support 11# add crate fetch support
12inherit crate-fetch
13inherit rust-common 12inherit rust-common
14 13
15# Where we download our registry and dependencies to 14# Where we download our registry and dependencies to
diff --git a/meta/classes/crate-fetch.bbclass b/meta/classes/crate-fetch.bbclass
deleted file mode 100644
index a7fa22b..0000000
--- a/meta/classes/crate-fetch.bbclass
+++ /dev/null
@@ -1,28 +0,0 @@
1#
2# crate-fetch class
3#
4# Registers 'crate' method for Bitbake fetch2.
5#
6# Adds support for following format in recipe SRC_URI:
7# crate://<packagename>/<version>
8#
9
10def import_crate(d):
11 import crate
12 if not getattr(crate, 'imported', False):
13 bb.fetch2.methods.append(crate.Crate())
14 crate.imported = True
15
16python crate_import_handler() {
17 import_crate(d)
18}
19
20addhandler crate_import_handler
21crate_import_handler[eventmask] = "bb.event.RecipePreFinalise"
22
23def crate_get_srcrev(d):
24 import_crate(d)
25 return bb.fetch2.get_srcrev(d)
26
27# Override SRCPV to make sure it imports the fetcher first
28SRCPV = "${@crate_get_srcrev(d)}"
diff --git a/meta/classes/create-spdx.bbclass b/meta/classes/create-spdx.bbclass
index 0a4db80..eb95350 100644
--- a/meta/classes/create-spdx.bbclass
+++ b/meta/classes/create-spdx.bbclass
@@ -254,6 +254,7 @@ def add_package_sources_from_debug(d, package_doc, spdx_package, package, packag
254 Path(d.getVar('PKGD')), 254 Path(d.getVar('PKGD')),
255 Path(d.getVar('STAGING_DIR_TARGET')), 255 Path(d.getVar('STAGING_DIR_TARGET')),
256 Path(d.getVar('STAGING_DIR_NATIVE')), 256 Path(d.getVar('STAGING_DIR_NATIVE')),
257 Path(d.getVar('STAGING_KERNEL_DIR')),
257 ] 258 ]
258 259
259 pkg_data = oe.packagedata.read_subpkgdata_extended(package, d) 260 pkg_data = oe.packagedata.read_subpkgdata_extended(package, d)
@@ -275,7 +276,10 @@ def add_package_sources_from_debug(d, package_doc, spdx_package, package, packag
275 for debugsrc in file_data["debugsrc"]: 276 for debugsrc in file_data["debugsrc"]:
276 ref_id = "NOASSERTION" 277 ref_id = "NOASSERTION"
277 for search in debug_search_paths: 278 for search in debug_search_paths:
278 debugsrc_path = search / debugsrc.lstrip("/") 279 if debugsrc.startswith("/usr/src/kernel"):
280 debugsrc_path = search / debugsrc.replace('/usr/src/kernel/', '')
281 else:
282 debugsrc_path = search / debugsrc.lstrip("/")
279 if not debugsrc_path.exists(): 283 if not debugsrc_path.exists():
280 continue 284 continue
281 285
@@ -870,8 +874,9 @@ python image_combine_spdx() {
870 with image_spdx_path.open("wb") as f: 874 with image_spdx_path.open("wb") as f:
871 doc.to_json(f, sort_keys=True) 875 doc.to_json(f, sort_keys=True)
872 876
873 image_spdx_link = imgdeploydir / (image_link_name + ".spdx.json") 877 if image_link_name:
874 image_spdx_link.symlink_to(os.path.relpath(image_spdx_path, image_spdx_link.parent)) 878 image_spdx_link = imgdeploydir / (image_link_name + ".spdx.json")
879 image_spdx_link.symlink_to(os.path.relpath(image_spdx_path, image_spdx_link.parent))
875 880
876 num_threads = int(d.getVar("BB_NUMBER_THREADS")) 881 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
877 882
@@ -942,8 +947,9 @@ python image_combine_spdx() {
942 tar.addfile(info, fileobj=index_str) 947 tar.addfile(info, fileobj=index_str)
943 948
944 def make_image_link(target_path, suffix): 949 def make_image_link(target_path, suffix):
945 link = imgdeploydir / (image_link_name + suffix) 950 if image_link_name:
946 link.symlink_to(os.path.relpath(target_path, link.parent)) 951 link = imgdeploydir / (image_link_name + suffix)
952 link.symlink_to(os.path.relpath(target_path, link.parent))
947 953
948 make_image_link(spdx_tar_path, ".spdx.tar.zst") 954 make_image_link(spdx_tar_path, ".spdx.tar.zst")
949 955
diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass
index 70d1988..6c04ff9 100644
--- a/meta/classes/cve-check.bbclass
+++ b/meta/classes/cve-check.bbclass
@@ -111,6 +111,7 @@ python do_cve_check () {
111} 111}
112 112
113addtask cve_check before do_build after do_fetch 113addtask cve_check before do_build after do_fetch
114do_cve_check[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}"
114do_cve_check[depends] = "cve-update-db-native:do_fetch" 115do_cve_check[depends] = "cve-update-db-native:do_fetch"
115do_cve_check[nostamp] = "1" 116do_cve_check[nostamp] = "1"
116 117
diff --git a/meta/classes/debian.bbclass b/meta/classes/debian.bbclass
index 39b1a1a..8367be9 100644
--- a/meta/classes/debian.bbclass
+++ b/meta/classes/debian.bbclass
@@ -14,6 +14,10 @@ AUTO_LIBNAME_PKGS = "${PACKAGES}"
14inherit package 14inherit package
15 15
16DEBIANRDEP = "do_packagedata" 16DEBIANRDEP = "do_packagedata"
17do_package_write_ipk[deptask] = "${DEBIANRDEP}"
18do_package_write_deb[deptask] = "${DEBIANRDEP}"
19do_package_write_tar[deptask] = "${DEBIANRDEP}"
20do_package_write_rpm[deptask] = "${DEBIANRDEP}"
17do_package_write_ipk[rdeptask] = "${DEBIANRDEP}" 21do_package_write_ipk[rdeptask] = "${DEBIANRDEP}"
18do_package_write_deb[rdeptask] = "${DEBIANRDEP}" 22do_package_write_deb[rdeptask] = "${DEBIANRDEP}"
19do_package_write_tar[rdeptask] = "${DEBIANRDEP}" 23do_package_write_tar[rdeptask] = "${DEBIANRDEP}"
diff --git a/meta/classes/distutils-common-base.bbclass b/meta/classes/distutils-common-base.bbclass
index bc994f7..59c750a 100644
--- a/meta/classes/distutils-common-base.bbclass
+++ b/meta/classes/distutils-common-base.bbclass
@@ -23,3 +23,6 @@ FILES:${PN}-dev += "\
23 ${libdir}/pkgconfig \ 23 ${libdir}/pkgconfig \
24 ${PYTHON_SITEPACKAGES_DIR}/*.la \ 24 ${PYTHON_SITEPACKAGES_DIR}/*.la \
25" 25"
26python __anonymous() {
27 bb.warn("distutils-common-base.bbclass is deprecated, please use setuptools3-base.bbclass instead")
28}
diff --git a/meta/classes/distutils3-base.bbclass b/meta/classes/distutils3-base.bbclass
index d41873e..850c535 100644
--- a/meta/classes/distutils3-base.bbclass
+++ b/meta/classes/distutils3-base.bbclass
@@ -4,3 +4,6 @@ RDEPENDS:${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-tar
4 4
5inherit distutils-common-base python3native python3targetconfig 5inherit distutils-common-base python3native python3targetconfig
6 6
7python __anonymous() {
8 bb.warn("distutils3-base.bbclass is deprecated, please use setuptools3-base.bbclass instead")
9
diff --git a/meta/classes/distutils3.bbclass b/meta/classes/distutils3.bbclass
index be645d3..a6d8e87 100644
--- a/meta/classes/distutils3.bbclass
+++ b/meta/classes/distutils3.bbclass
@@ -14,6 +14,10 @@ DISTUTILS_PYTHON:class-native = "nativepython3"
14 14
15DISTUTILS_SETUP_PATH ?= "${S}" 15DISTUTILS_SETUP_PATH ?= "${S}"
16 16
17python __anonymous() {
18 bb.warn("distutils3.bbclass is deprecated, please use setuptools3.bbclass instead")
19}
20
17distutils3_do_configure() { 21distutils3_do_configure() {
18 : 22 :
19} 23}
diff --git a/meta/classes/go.bbclass b/meta/classes/go.bbclass
index aa54b4a..9c4c92b 100644
--- a/meta/classes/go.bbclass
+++ b/meta/classes/go.bbclass
@@ -2,6 +2,8 @@ inherit goarch
2 2
3GO_PARALLEL_BUILD ?= "${@oe.utils.parallel_make_argument(d, '-p %d')}" 3GO_PARALLEL_BUILD ?= "${@oe.utils.parallel_make_argument(d, '-p %d')}"
4 4
5export GODEBUG = "gocachehash=1"
6
5GOROOT:class-native = "${STAGING_LIBDIR_NATIVE}/go" 7GOROOT:class-native = "${STAGING_LIBDIR_NATIVE}/go"
6GOROOT:class-nativesdk = "${STAGING_DIR_TARGET}${libdir}/go" 8GOROOT:class-nativesdk = "${STAGING_DIR_TARGET}${libdir}/go"
7GOROOT = "${STAGING_LIBDIR}/go" 9GOROOT = "${STAGING_LIBDIR}/go"
@@ -65,7 +67,7 @@ GO_INSTALL_FILTEROUT ?= "${GO_IMPORT}/vendor/"
65 67
66B = "${WORKDIR}/build" 68B = "${WORKDIR}/build"
67export GOPATH = "${B}" 69export GOPATH = "${B}"
68export GOTMPDIR ?= "${WORKDIR}/go-tmp" 70export GOTMPDIR ?= "${WORKDIR}/build-tmp"
69GOTMPDIR[vardepvalue] = "" 71GOTMPDIR[vardepvalue] = ""
70 72
71python go_do_unpack() { 73python go_do_unpack() {
diff --git a/meta/classes/gtk-doc.bbclass b/meta/classes/gtk-doc.bbclass
index 7149bc0..07b46ac 100644
--- a/meta/classes/gtk-doc.bbclass
+++ b/meta/classes/gtk-doc.bbclass
@@ -63,7 +63,7 @@ export GIO_MODULE_DIR=${STAGING_LIBDIR}/gio/modules-dummy
63GIR_EXTRA_LIBS_PATH=\`find ${B} -name *.so -printf "%h\n"|sort|uniq| tr '\n' ':'\`\$GIR_EXTRA_LIBS_PATH 63GIR_EXTRA_LIBS_PATH=\`find ${B} -name *.so -printf "%h\n"|sort|uniq| tr '\n' ':'\`\$GIR_EXTRA_LIBS_PATH
64GIR_EXTRA_LIBS_PATH=\`find ${B} -name .libs| tr '\n' ':'\`\$GIR_EXTRA_LIBS_PATH 64GIR_EXTRA_LIBS_PATH=\`find ${B} -name .libs| tr '\n' ':'\`\$GIR_EXTRA_LIBS_PATH
65 65
66# meson sets this wrongly (only to libs in build-dir), qemu-wrapper_cmdline() and GIR_EXTRA_LIBS_PATH take care of it properly 66# meson sets this wrongly (only to libs in build-dir), qemu_wrapper_cmdline() and GIR_EXTRA_LIBS_PATH take care of it properly
67unset LD_LIBRARY_PATH 67unset LD_LIBRARY_PATH
68 68
69if [ -d ".libs" ]; then 69if [ -d ".libs" ]; then
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass
index 8a46b48..2b0ce4a 100644
--- a/meta/classes/image.bbclass
+++ b/meta/classes/image.bbclass
@@ -15,6 +15,7 @@ IMGCLASSES += "${@bb.utils.contains('IMAGE_FSTYPES', 'container', 'image-contain
15IMGCLASSES += "image_types_wic" 15IMGCLASSES += "image_types_wic"
16IMGCLASSES += "rootfs-postcommands" 16IMGCLASSES += "rootfs-postcommands"
17IMGCLASSES += "image-postinst-intercepts" 17IMGCLASSES += "image-postinst-intercepts"
18IMGCLASSES += "overlayfs-etc"
18inherit ${IMGCLASSES} 19inherit ${IMGCLASSES}
19 20
20TOOLCHAIN_TARGET_TASK += "${PACKAGE_INSTALL}" 21TOOLCHAIN_TARGET_TASK += "${PACKAGE_INSTALL}"
@@ -33,7 +34,7 @@ INHIBIT_DEFAULT_DEPS = "1"
33# IMAGE_FEATURES may contain any available package group 34# IMAGE_FEATURES may contain any available package group
34IMAGE_FEATURES ?= "" 35IMAGE_FEATURES ?= ""
35IMAGE_FEATURES[type] = "list" 36IMAGE_FEATURES[type] = "list"
36IMAGE_FEATURES[validitems] += "debug-tweaks read-only-rootfs read-only-rootfs-delayed-postinsts stateless-rootfs empty-root-password allow-empty-password allow-root-login post-install-logging" 37IMAGE_FEATURES[validitems] += "debug-tweaks read-only-rootfs read-only-rootfs-delayed-postinsts stateless-rootfs empty-root-password allow-empty-password allow-root-login post-install-logging overlayfs-etc"
37 38
38# Generate companion debugfs? 39# Generate companion debugfs?
39IMAGE_GEN_DEBUGFS ?= "0" 40IMAGE_GEN_DEBUGFS ?= "0"
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass
index 27b1a00..11532ec 100644
--- a/meta/classes/insane.bbclass
+++ b/meta/classes/insane.bbclass
@@ -968,17 +968,6 @@ def package_qa_check_host_user(path, name, d, elf, messages):
968 return False 968 return False
969 return True 969 return True
970 970
971QARECIPETEST[src-uri-bad] = "package_qa_check_src_uri"
972def package_qa_check_src_uri(pn, d, messages):
973 import re
974
975 if "${PN}" in d.getVar("SRC_URI", False):
976 oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d)
977
978 for url in d.getVar("SRC_URI").split():
979 if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url):
980 oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d)
981
982QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check" 971QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check"
983def package_qa_check_unhandled_features_check(pn, d, messages): 972def package_qa_check_unhandled_features_check(pn, d, messages):
984 if not bb.data.inherits_class('features_check', d): 973 if not bb.data.inherits_class('features_check', d):
@@ -1167,6 +1156,30 @@ python do_qa_patch() {
1167 bb.warn(msg) 1156 bb.warn(msg)
1168 msg = "Patch log indicates that patches do not apply cleanly." 1157 msg = "Patch log indicates that patches do not apply cleanly."
1169 oe.qa.handle_error("patch-fuzz", msg, d) 1158 oe.qa.handle_error("patch-fuzz", msg, d)
1159
1160 # Check if the patch contains a correctly formatted and spelled Upstream-Status
1161 import re
1162 from oe import patch
1163
1164 for url in patch.src_patches(d):
1165 (_, _, fullpath, _, _, _) = bb.fetch.decodeurl(url)
1166
1167 # skip patches not in oe-core
1168 if '/meta/' not in fullpath:
1169 continue
1170
1171 content = open(fullpath, encoding='utf-8', errors='ignore').read()
1172 kinda_status_re = re.compile(r"^.*upstream.*status.*$", re.IGNORECASE | re.MULTILINE)
1173 strict_status_re = re.compile(r"^Upstream-Status: (Pending|Submitted|Denied|Accepted|Inappropriate|Backport|Inactive-Upstream)( .+)?$", re.MULTILINE)
1174 match_kinda = kinda_status_re.search(content)
1175 match_strict = strict_status_re.search(content)
1176 guidelines = "https://www.openembedded.org/wiki/Commit_Patch_Message_Guidelines#Patch_Header_Recommendations:_Upstream-Status"
1177
1178 if not match_strict:
1179 if match_kinda:
1180 bb.error("Malformed Upstream-Status in patch\n%s\nPlease correct according to %s :\n%s" % (fullpath, guidelines, match_kinda.group(0)))
1181 else:
1182 bb.error("Missing Upstream-Status in patch\n%s\nPlease add according to %s ." % (fullpath, guidelines))
1170} 1183}
1171 1184
1172python do_qa_configure() { 1185python do_qa_configure() {
@@ -1261,11 +1274,28 @@ Rerun configure task after fixing this."""
1261 oe.qa.exit_if_errors(d) 1274 oe.qa.exit_if_errors(d)
1262} 1275}
1263 1276
1277def unpack_check_src_uri(pn, d):
1278 import re
1279
1280 skip = (d.getVar('INSANE_SKIP') or "").split()
1281 if 'src-uri-bad' in skip:
1282 bb.note("Recipe %s skipping qa checking: src-uri-bad" % d.getVar('PN'))
1283 return
1284
1285 if "${PN}" in d.getVar("SRC_URI", False):
1286 oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d)
1287
1288 for url in d.getVar("SRC_URI").split():
1289 if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url):
1290 oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d)
1291
1264python do_qa_unpack() { 1292python do_qa_unpack() {
1265 src_uri = d.getVar('SRC_URI') 1293 src_uri = d.getVar('SRC_URI')
1266 s_dir = d.getVar('S') 1294 s_dir = d.getVar('S')
1267 if src_uri and not os.path.exists(s_dir): 1295 if src_uri and not os.path.exists(s_dir):
1268 bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir)) 1296 bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir))
1297
1298 unpack_check_src_uri(d.getVar('PN'), d)
1269} 1299}
1270 1300
1271# The Staging Func, to check all staging 1301# The Staging Func, to check all staging
diff --git a/meta/classes/kernel-artifact-names.bbclass b/meta/classes/kernel-artifact-names.bbclass
index a65cddd..e77107c 100644
--- a/meta/classes/kernel-artifact-names.bbclass
+++ b/meta/classes/kernel-artifact-names.bbclass
@@ -8,15 +8,20 @@ inherit image-artifact-names
8 8
9KERNEL_ARTIFACT_NAME ?= "${PKGE}-${PKGV}-${PKGR}-${MACHINE}${IMAGE_VERSION_SUFFIX}" 9KERNEL_ARTIFACT_NAME ?= "${PKGE}-${PKGV}-${PKGR}-${MACHINE}${IMAGE_VERSION_SUFFIX}"
10KERNEL_ARTIFACT_LINK_NAME ?= "${MACHINE}" 10KERNEL_ARTIFACT_LINK_NAME ?= "${MACHINE}"
11KERNEL_ARTIFACT_BIN_EXT ?= ".bin"
11 12
12KERNEL_IMAGE_NAME ?= "${KERNEL_ARTIFACT_NAME}" 13KERNEL_IMAGE_NAME ?= "${KERNEL_ARTIFACT_NAME}"
13KERNEL_IMAGE_LINK_NAME ?= "${KERNEL_ARTIFACT_LINK_NAME}" 14KERNEL_IMAGE_LINK_NAME ?= "${KERNEL_ARTIFACT_LINK_NAME}"
15KERNEL_IMAGE_BIN_EXT ?= "${KERNEL_ARTIFACT_BIN_EXT}"
16KERNEL_IMAGETYPE_SYMLINK ?= "1"
14 17
15KERNEL_DTB_NAME ?= "${KERNEL_ARTIFACT_NAME}" 18KERNEL_DTB_NAME ?= "${KERNEL_ARTIFACT_NAME}"
16KERNEL_DTB_LINK_NAME ?= "${KERNEL_ARTIFACT_LINK_NAME}" 19KERNEL_DTB_LINK_NAME ?= "${KERNEL_ARTIFACT_LINK_NAME}"
20KERNEL_DTB_BIN_EXT ?= "${KERNEL_ARTIFACT_BIN_EXT}"
17 21
18KERNEL_FIT_NAME ?= "${KERNEL_ARTIFACT_NAME}" 22KERNEL_FIT_NAME ?= "${KERNEL_ARTIFACT_NAME}"
19KERNEL_FIT_LINK_NAME ?= "${KERNEL_ARTIFACT_LINK_NAME}" 23KERNEL_FIT_LINK_NAME ?= "${KERNEL_ARTIFACT_LINK_NAME}"
24KERNEL_FIT_BIN_EXT ?= "${KERNEL_ARTIFACT_BIN_EXT}"
20 25
21MODULE_TARBALL_NAME ?= "${KERNEL_ARTIFACT_NAME}" 26MODULE_TARBALL_NAME ?= "${KERNEL_ARTIFACT_NAME}"
22MODULE_TARBALL_LINK_NAME ?= "${KERNEL_ARTIFACT_LINK_NAME}" 27MODULE_TARBALL_LINK_NAME ?= "${KERNEL_ARTIFACT_LINK_NAME}"
diff --git a/meta/classes/kernel-devicetree.bbclass b/meta/classes/kernel-devicetree.bbclass
index a50ea4f..b4338da 100644
--- a/meta/classes/kernel-devicetree.bbclass
+++ b/meta/classes/kernel-devicetree.bbclass
@@ -83,21 +83,29 @@ do_deploy:append() {
83 dtb_base_name=`basename $dtb .$dtb_ext` 83 dtb_base_name=`basename $dtb .$dtb_ext`
84 install -d $deployDir 84 install -d $deployDir
85 install -m 0644 ${D}/${KERNEL_IMAGEDEST}/$dtb_base_name.$dtb_ext $deployDir/$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext 85 install -m 0644 ${D}/${KERNEL_IMAGEDEST}/$dtb_base_name.$dtb_ext $deployDir/$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext
86 ln -sf $dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext $deployDir/$dtb_base_name.$dtb_ext 86 if [ "${KERNEL_IMAGETYPE_SYMLINK}" = "1" ] ; then
87 ln -sf $dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext $deployDir/$dtb_base_name-${KERNEL_DTB_LINK_NAME}.$dtb_ext 87 ln -sf $dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext $deployDir/$dtb_base_name.$dtb_ext
88 fi
89 if [ -n "${KERNEL_DTB_LINK_NAME}" ] ; then
90 ln -sf $dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext $deployDir/$dtb_base_name-${KERNEL_DTB_LINK_NAME}.$dtb_ext
91 fi
88 for type in ${KERNEL_IMAGETYPE_FOR_MAKE}; do 92 for type in ${KERNEL_IMAGETYPE_FOR_MAKE}; do
89 if [ "$type" = "zImage" ] && [ "${KERNEL_DEVICETREE_BUNDLE}" = "1" ]; then 93 if [ "$type" = "zImage" ] && [ "${KERNEL_DEVICETREE_BUNDLE}" = "1" ]; then
90 cat ${D}/${KERNEL_IMAGEDEST}/$type \ 94 cat ${D}/${KERNEL_IMAGEDEST}/$type \
91 $deployDir/$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext \ 95 $deployDir/$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext \
92 > $deployDir/$type-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext.bin 96 > $deployDir/$type-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT}
93 ln -sf $type-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext.bin \ 97 if [ -n "${KERNEL_DTB_LINK_NAME}" ]; then
94 $deployDir/$type-$dtb_base_name-${KERNEL_DTB_LINK_NAME}.$dtb_ext.bin 98 ln -sf $type-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT} \
99 $deployDir/$type-$dtb_base_name-${KERNEL_DTB_LINK_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT}
100 fi
95 if [ -e "${KERNEL_OUTPUT_DIR}/${type}.initramfs" ]; then 101 if [ -e "${KERNEL_OUTPUT_DIR}/${type}.initramfs" ]; then
96 cat ${KERNEL_OUTPUT_DIR}/${type}.initramfs \ 102 cat ${KERNEL_OUTPUT_DIR}/${type}.initramfs \
97 $deployDir/$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext \ 103 $deployDir/$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext \
98 > $deployDir/${type}-${INITRAMFS_NAME}-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext.bin 104 > $deployDir/${type}-${INITRAMFS_NAME}-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT}
99 ln -sf ${type}-${INITRAMFS_NAME}-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext.bin \ 105 if [ -n "${KERNEL_DTB_LINK_NAME}" ]; then
100 $deployDir/${type}-${INITRAMFS_NAME}-$dtb_base_name-${KERNEL_DTB_LINK_NAME}.$dtb_ext.bin 106 ln -sf ${type}-${INITRAMFS_NAME}-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT} \
107 $deployDir/${type}-${INITRAMFS_NAME}-$dtb_base_name-${KERNEL_DTB_LINK_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT}
108 fi
101 fi 109 fi
102 fi 110 fi
103 done 111 done
diff --git a/meta/classes/kernel-fitimage.bbclass b/meta/classes/kernel-fitimage.bbclass
index 8718ce7..b0c971b 100644
--- a/meta/classes/kernel-fitimage.bbclass
+++ b/meta/classes/kernel-fitimage.bbclass
@@ -722,22 +722,30 @@ kernel_do_deploy:append() {
722 if [ "${INITRAMFS_IMAGE_BUNDLE}" != "1" ]; then 722 if [ "${INITRAMFS_IMAGE_BUNDLE}" != "1" ]; then
723 echo "Copying fit-image.its source file..." 723 echo "Copying fit-image.its source file..."
724 install -m 0644 ${B}/fit-image.its "$deployDir/fitImage-its-${KERNEL_FIT_NAME}.its" 724 install -m 0644 ${B}/fit-image.its "$deployDir/fitImage-its-${KERNEL_FIT_NAME}.its"
725 ln -snf fitImage-its-${KERNEL_FIT_NAME}.its "$deployDir/fitImage-its-${KERNEL_FIT_LINK_NAME}" 725 if [ -n "${KERNEL_FIT_LINK_NAME}" ] ; then
726 ln -snf fitImage-its-${KERNEL_FIT_NAME}.its "$deployDir/fitImage-its-${KERNEL_FIT_LINK_NAME}"
727 fi
726 728
727 echo "Copying linux.bin file..." 729 echo "Copying linux.bin file..."
728 install -m 0644 ${B}/linux.bin $deployDir/fitImage-linux.bin-${KERNEL_FIT_NAME}.bin 730 install -m 0644 ${B}/linux.bin $deployDir/fitImage-linux.bin-${KERNEL_FIT_NAME}${KERNEL_FIT_BIN_EXT}
729 ln -snf fitImage-linux.bin-${KERNEL_FIT_NAME}.bin "$deployDir/fitImage-linux.bin-${KERNEL_FIT_LINK_NAME}" 731 if [ -n "${KERNEL_FIT_LINK_NAME}" ] ; then
732 ln -snf fitImage-linux.bin-${KERNEL_FIT_NAME}${KERNEL_FIT_BIN_EXT} "$deployDir/fitImage-linux.bin-${KERNEL_FIT_LINK_NAME}"
733 fi
730 fi 734 fi
731 735
732 if [ -n "${INITRAMFS_IMAGE}" ]; then 736 if [ -n "${INITRAMFS_IMAGE}" ]; then
733 echo "Copying fit-image-${INITRAMFS_IMAGE}.its source file..." 737 echo "Copying fit-image-${INITRAMFS_IMAGE}.its source file..."
734 install -m 0644 ${B}/fit-image-${INITRAMFS_IMAGE}.its "$deployDir/fitImage-its-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_NAME}.its" 738 install -m 0644 ${B}/fit-image-${INITRAMFS_IMAGE}.its "$deployDir/fitImage-its-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_NAME}.its"
735 ln -snf fitImage-its-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_NAME}.its "$deployDir/fitImage-its-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_LINK_NAME}" 739 if [ -n "${KERNEL_FIT_LINK_NAME}" ] ; then
740 ln -snf fitImage-its-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_NAME}.its "$deployDir/fitImage-its-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_LINK_NAME}"
741 fi
736 742
737 if [ "${INITRAMFS_IMAGE_BUNDLE}" != "1" ]; then 743 if [ "${INITRAMFS_IMAGE_BUNDLE}" != "1" ]; then
738 echo "Copying fitImage-${INITRAMFS_IMAGE} file..." 744 echo "Copying fitImage-${INITRAMFS_IMAGE} file..."
739 install -m 0644 ${B}/arch/${ARCH}/boot/fitImage-${INITRAMFS_IMAGE} "$deployDir/fitImage-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_NAME}.bin" 745 install -m 0644 ${B}/arch/${ARCH}/boot/fitImage-${INITRAMFS_IMAGE} "$deployDir/fitImage-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_NAME}${KERNEL_FIT_BIN_EXT}"
740 ln -snf fitImage-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_NAME}.bin "$deployDir/fitImage-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_LINK_NAME}" 746 if [ -n "${KERNEL_FIT_LINK_NAME}" ] ; then
747 ln -snf fitImage-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_NAME}${KERNEL_FIT_BIN_EXT} "$deployDir/fitImage-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_LINK_NAME}"
748 fi
741 fi 749 fi
742 fi 750 fi
743 fi 751 fi
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass
index 2d219cb..473e28b 100644
--- a/meta/classes/kernel.bbclass
+++ b/meta/classes/kernel.bbclass
@@ -77,7 +77,7 @@ python __anonymous () {
77 # KERNEL_IMAGETYPES may contain a mixture of image types supported directly 77 # KERNEL_IMAGETYPES may contain a mixture of image types supported directly
78 # by the kernel build system and types which are created by post-processing 78 # by the kernel build system and types which are created by post-processing
79 # the output of the kernel build system (e.g. compressing vmlinux -> 79 # the output of the kernel build system (e.g. compressing vmlinux ->
80 # vmlinux.gz in kernel_do_compile()). 80 # vmlinux.gz in kernel_do_transform_kernel()).
81 # KERNEL_IMAGETYPE_FOR_MAKE should contain only image types supported 81 # KERNEL_IMAGETYPE_FOR_MAKE should contain only image types supported
82 # directly by the kernel build system. 82 # directly by the kernel build system.
83 if not d.getVar('KERNEL_IMAGETYPE_FOR_MAKE'): 83 if not d.getVar('KERNEL_IMAGETYPE_FOR_MAKE'):
@@ -134,6 +134,8 @@ set -e
134 # standalone for use by wic and other tools. 134 # standalone for use by wic and other tools.
135 if image: 135 if image:
136 d.appendVarFlag('do_bundle_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete') 136 d.appendVarFlag('do_bundle_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete')
137 if image and bb.utils.to_boolean(d.getVar('INITRAMFS_IMAGE_BUNDLE')):
138 bb.build.addtask('do_transform_bundled_initramfs', 'do_deploy', 'do_bundle_initramfs', d)
137 139
138 # NOTE: setting INITRAMFS_TASK is for backward compatibility 140 # NOTE: setting INITRAMFS_TASK is for backward compatibility
139 # The preferred method is to set INITRAMFS_IMAGE, because 141 # The preferred method is to set INITRAMFS_IMAGE, because
@@ -316,6 +318,14 @@ do_bundle_initramfs () {
316} 318}
317do_bundle_initramfs[dirs] = "${B}" 319do_bundle_initramfs[dirs] = "${B}"
318 320
321kernel_do_transform_bundled_initramfs() {
322 # vmlinux.gz is not built by kernel
323 if (echo "${KERNEL_IMAGETYPES}" | grep -wq "vmlinux\.gz"); then
324 gzip -9cn < ${KERNEL_OUTPUT_DIR}/vmlinux.initramfs > ${KERNEL_OUTPUT_DIR}/vmlinux.gz.initramfs
325 fi
326}
327do_transform_bundled_initramfs[dirs] = "${B}"
328
319python do_devshell:prepend () { 329python do_devshell:prepend () {
320 os.environ["LDFLAGS"] = '' 330 os.environ["LDFLAGS"] = ''
321} 331}
@@ -364,12 +374,17 @@ kernel_do_compile() {
364 for typeformake in ${KERNEL_IMAGETYPE_FOR_MAKE} ; do 374 for typeformake in ${KERNEL_IMAGETYPE_FOR_MAKE} ; do
365 oe_runmake ${typeformake} CC="${KERNEL_CC}" LD="${KERNEL_LD}" ${KERNEL_EXTRA_ARGS} $use_alternate_initrd 375 oe_runmake ${typeformake} CC="${KERNEL_CC}" LD="${KERNEL_LD}" ${KERNEL_EXTRA_ARGS} $use_alternate_initrd
366 done 376 done
377}
378
379kernel_do_transform_kernel() {
367 # vmlinux.gz is not built by kernel 380 # vmlinux.gz is not built by kernel
368 if (echo "${KERNEL_IMAGETYPES}" | grep -wq "vmlinux\.gz"); then 381 if (echo "${KERNEL_IMAGETYPES}" | grep -wq "vmlinux\.gz"); then
369 mkdir -p "${KERNEL_OUTPUT_DIR}" 382 mkdir -p "${KERNEL_OUTPUT_DIR}"
370 gzip -9cn < ${B}/vmlinux > "${KERNEL_OUTPUT_DIR}/vmlinux.gz" 383 gzip -9cn < ${B}/vmlinux > "${KERNEL_OUTPUT_DIR}/vmlinux.gz"
371 fi 384 fi
372} 385}
386do_transform_kernel[dirs] = "${B}"
387addtask transform_kernel after do_compile before do_install
373 388
374do_compile_kernelmodules() { 389do_compile_kernelmodules() {
375 unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE 390 unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE
@@ -621,11 +636,11 @@ inherit cml1
621 636
622KCONFIG_CONFIG_COMMAND:append = " LD='${KERNEL_LD}' HOSTLDFLAGS='${BUILD_LDFLAGS}'" 637KCONFIG_CONFIG_COMMAND:append = " LD='${KERNEL_LD}' HOSTLDFLAGS='${BUILD_LDFLAGS}'"
623 638
624EXPORT_FUNCTIONS do_compile do_install do_configure 639EXPORT_FUNCTIONS do_compile do_transform_kernel do_transform_bundled_initramfs do_install do_configure
625 640
626# kernel-base becomes kernel-${KERNEL_VERSION} 641# kernel-base becomes kernel-${KERNEL_VERSION}
627# kernel-image becomes kernel-image-${KERNEL_VERSION} 642# kernel-image becomes kernel-image-${KERNEL_VERSION}
628PACKAGES = "${KERNEL_PACKAGE_NAME} ${KERNEL_PACKAGE_NAME}-base ${KERNEL_PACKAGE_NAME}-vmlinux ${KERNEL_PACKAGE_NAME}-image ${KERNEL_PACKAGE_NAME}-dev ${KERNEL_PACKAGE_NAME}-modules" 643PACKAGES = "${KERNEL_PACKAGE_NAME} ${KERNEL_PACKAGE_NAME}-base ${KERNEL_PACKAGE_NAME}-vmlinux ${KERNEL_PACKAGE_NAME}-image ${KERNEL_PACKAGE_NAME}-dev ${KERNEL_PACKAGE_NAME}-modules ${KERNEL_PACKAGE_NAME}-dbg"
629FILES:${PN} = "" 644FILES:${PN} = ""
630FILES:${KERNEL_PACKAGE_NAME}-base = "${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.order ${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.builtin ${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.builtin.modinfo" 645FILES:${KERNEL_PACKAGE_NAME}-base = "${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.order ${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.builtin ${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.builtin.modinfo"
631FILES:${KERNEL_PACKAGE_NAME}-image = "" 646FILES:${KERNEL_PACKAGE_NAME}-image = ""
@@ -685,30 +700,19 @@ do_kernel_link_images() {
685} 700}
686addtask kernel_link_images after do_compile before do_strip 701addtask kernel_link_images after do_compile before do_strip
687 702
688do_strip() { 703python do_strip() {
689 if [ -n "${KERNEL_IMAGE_STRIP_EXTRA_SECTIONS}" ]; then 704 import shutil
690 if ! (echo "${KERNEL_IMAGETYPES}" | grep -wq "vmlinux"); then
691 bbwarn "image type(s) will not be stripped (not supported): ${KERNEL_IMAGETYPES}"
692 return
693 fi
694
695 cd ${B}
696 headers=`"$CROSS_COMPILE"readelf -S ${KERNEL_OUTPUT_DIR}/vmlinux | \
697 grep "^ \{1,\}\[[0-9 ]\{1,\}\] [^ ]" | \
698 sed "s/^ \{1,\}\[[0-9 ]\{1,\}\] //" | \
699 gawk '{print $1}'`
700
701 for str in ${KERNEL_IMAGE_STRIP_EXTRA_SECTIONS}; do {
702 if ! (echo "$headers" | grep -q "^$str$"); then
703 bbwarn "Section not found: $str";
704 fi
705 705
706 "$CROSS_COMPILE"strip -s -R $str ${KERNEL_OUTPUT_DIR}/vmlinux 706 strip = d.getVar('STRIP')
707 }; done 707 extra_sections = d.getVar('KERNEL_IMAGE_STRIP_EXTRA_SECTIONS')
708 kernel_image = d.getVar('B') + "/" + d.getVar('KERNEL_OUTPUT_DIR') + "/vmlinux"
708 709
709 bbnote "KERNEL_IMAGE_STRIP_EXTRA_SECTIONS is set, stripping sections:" \ 710 if (extra_sections and kernel_image.find('boot/vmlinux') != -1):
710 "${KERNEL_IMAGE_STRIP_EXTRA_SECTIONS}" 711 kernel_image_stripped = kernel_image + ".stripped"
711 fi; 712 shutil.copy2(kernel_image, kernel_image_stripped)
713 oe.package.runstrip((kernel_image_stripped, 8, strip, extra_sections))
714 bb.debug(1, "KERNEL_IMAGE_STRIP_EXTRA_SECTIONS is set, stripping sections: " + \
715 extra_sections)
712} 716}
713do_strip[dirs] = "${B}" 717do_strip[dirs] = "${B}"
714 718
@@ -753,9 +757,18 @@ kernel_do_deploy() {
753 757
754 for imageType in ${KERNEL_IMAGETYPES} ; do 758 for imageType in ${KERNEL_IMAGETYPES} ; do
755 baseName=$imageType-${KERNEL_IMAGE_NAME} 759 baseName=$imageType-${KERNEL_IMAGE_NAME}
756 install -m 0644 ${KERNEL_OUTPUT_DIR}/$imageType $deployDir/$baseName.bin 760
757 ln -sf $baseName.bin $deployDir/$imageType-${KERNEL_IMAGE_LINK_NAME}.bin 761 if [ -s ${KERNEL_OUTPUT_DIR}/$imageType.stripped ] ; then
758 ln -sf $baseName.bin $deployDir/$imageType 762 install -m 0644 ${KERNEL_OUTPUT_DIR}/$imageType.stripped $deployDir/$baseName${KERNEL_IMAGE_BIN_EXT}
763 else
764 install -m 0644 ${KERNEL_OUTPUT_DIR}/$imageType $deployDir/$baseName${KERNEL_IMAGE_BIN_EXT}
765 fi
766 if [ -n "${KERNEL_IMAGE_LINK_NAME}" ] ; then
767 ln -sf $baseName${KERNEL_IMAGE_BIN_EXT} $deployDir/$imageType-${KERNEL_IMAGE_LINK_NAME}${KERNEL_IMAGE_BIN_EXT}
768 fi
769 if [ "${KERNEL_IMAGETYPE_SYMLINK}" = "1" ] ; then
770 ln -sf $baseName${KERNEL_IMAGE_BIN_EXT} $deployDir/$imageType
771 fi
759 done 772 done
760 773
761 if [ ${MODULE_TARBALL_DEPLOY} = "1" ] && (grep -q -i -e '^CONFIG_MODULES=y$' .config); then 774 if [ ${MODULE_TARBALL_DEPLOY} = "1" ] && (grep -q -i -e '^CONFIG_MODULES=y$' .config); then
@@ -768,17 +781,21 @@ kernel_do_deploy() {
768 TAR_ARGS="$TAR_ARGS --owner=0 --group=0" 781 TAR_ARGS="$TAR_ARGS --owner=0 --group=0"
769 tar $TAR_ARGS -cv -C ${D}${root_prefix} lib | gzip -9n > $deployDir/modules-${MODULE_TARBALL_NAME}.tgz 782 tar $TAR_ARGS -cv -C ${D}${root_prefix} lib | gzip -9n > $deployDir/modules-${MODULE_TARBALL_NAME}.tgz
770 783
771 ln -sf modules-${MODULE_TARBALL_NAME}.tgz $deployDir/modules-${MODULE_TARBALL_LINK_NAME}.tgz 784 if [ -n "${MODULE_TARBALL_LINK_NAME}" ] ; then
785 ln -sf modules-${MODULE_TARBALL_NAME}.tgz $deployDir/modules-${MODULE_TARBALL_LINK_NAME}.tgz
786 fi
772 fi 787 fi
773 788
774 if [ ! -z "${INITRAMFS_IMAGE}" -a x"${INITRAMFS_IMAGE_BUNDLE}" = x1 ]; then 789 if [ ! -z "${INITRAMFS_IMAGE}" -a x"${INITRAMFS_IMAGE_BUNDLE}" = x1 ]; then
775 for imageType in ${KERNEL_IMAGETYPE_FOR_MAKE} ; do 790 for imageType in ${KERNEL_IMAGETYPES} ; do
776 if [ "$imageType" = "fitImage" ] ; then 791 if [ "$imageType" = "fitImage" ] ; then
777 continue 792 continue
778 fi 793 fi
779 initramfsBaseName=$imageType-${INITRAMFS_NAME} 794 initramfsBaseName=$imageType-${INITRAMFS_NAME}
780 install -m 0644 ${KERNEL_OUTPUT_DIR}/$imageType.initramfs $deployDir/$initramfsBaseName.bin 795 install -m 0644 ${KERNEL_OUTPUT_DIR}/$imageType.initramfs $deployDir/$initramfsBaseName${KERNEL_IMAGE_BIN_EXT}
781 ln -sf $initramfsBaseName.bin $deployDir/$imageType-${INITRAMFS_LINK_NAME}.bin 796 if [ -n "${INITRAMFS_LINK_NAME}" ] ; then
797 ln -sf $initramfsBaseName${KERNEL_IMAGE_BIN_EXT} $deployDir/$imageType-${INITRAMFS_LINK_NAME}${KERNEL_IMAGE_BIN_EXT}
798 fi
782 done 799 done
783 fi 800 fi
784} 801}
diff --git a/meta/classes/manpages.bbclass b/meta/classes/manpages.bbclass
index 64b7d8c..5e09c77 100644
--- a/meta/classes/manpages.bbclass
+++ b/meta/classes/manpages.bbclass
@@ -12,13 +12,14 @@ MAN_PKG ?= "${PN}-doc"
12# only add man-db to RDEPENDS when manual files are built and installed 12# only add man-db to RDEPENDS when manual files are built and installed
13RDEPENDS:${MAN_PKG} += "${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'man-db', '', d)}" 13RDEPENDS:${MAN_PKG} += "${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'man-db', '', d)}"
14 14
15pkg_postinst:append:${MAN_PKG} () { 15pkg_postinst:${MAN_PKG}:append () {
16 # only update manual page index caches when manual files are built and installed 16 # only update manual page index caches when manual files are built and installed
17 if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then 17 if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then
18 if test -n "$D"; then 18 if test -n "$D"; then
19 if ${@bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', 'true','false', d)}; then 19 if ${@bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', 'true', 'false', d)}; then
20 sed "s:\(\s\)/:\1$D/:g" $D${sysconfdir}/man_db.conf | ${@qemu_run_binary(d, '$D', '${bindir}/mandb')} -C - -u -q $D${mandir} 20 sed "s:\(\s\)/:\1$D/:g" $D${sysconfdir}/man_db.conf | ${@qemu_run_binary(d, '$D', '${bindir}/mandb')} -C - -u -q $D${mandir}
21 chown -R root:root $D${mandir} 21 chown -R root:root $D${mandir}
22
22 mkdir -p $D${localstatedir}/cache/man 23 mkdir -p $D${localstatedir}/cache/man
23 cd $D${mandir} 24 cd $D${mandir}
24 find . -name index.db | while read index; do 25 find . -name index.db | while read index; do
@@ -36,7 +37,7 @@ pkg_postinst:append:${MAN_PKG} () {
36 fi 37 fi
37} 38}
38 39
39pkg_postrm:append:${MAN_PKG} () { 40pkg_postrm:${MAN_PKG}:append () {
40 # only update manual page index caches when manual files are built and installed 41 # only update manual page index caches when manual files are built and installed
41 if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then 42 if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then
42 mandb -q 43 mandb -q
diff --git a/meta/classes/meson.bbclass b/meta/classes/meson.bbclass
index a7981e4..0bfe945 100644
--- a/meta/classes/meson.bbclass
+++ b/meta/classes/meson.bbclass
@@ -1,7 +1,12 @@
1inherit python3native meson-routines 1inherit python3native meson-routines qemu
2 2
3DEPENDS:append = " meson-native ninja-native" 3DEPENDS:append = " meson-native ninja-native"
4 4
5EXEWRAPPER_ENABLED:class-native = "False"
6EXEWRAPPER_ENABLED:class-nativesdk = "False"
7EXEWRAPPER_ENABLED ?= "${@bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', 'True', 'False', d)}"
8DEPENDS:append = "${@' qemu-native' if d.getVar('EXEWRAPPER_ENABLED') == 'True' else ''}"
9
5# As Meson enforces out-of-tree builds we can just use cleandirs 10# As Meson enforces out-of-tree builds we can just use cleandirs
6B = "${WORKDIR}/build" 11B = "${WORKDIR}/build"
7do_configure[cleandirs] = "${B}" 12do_configure[cleandirs] = "${B}"
@@ -36,6 +41,9 @@ MESON_CROSS_FILE = ""
36MESON_CROSS_FILE:class-target = "--cross-file ${WORKDIR}/meson.cross" 41MESON_CROSS_FILE:class-target = "--cross-file ${WORKDIR}/meson.cross"
37MESON_CROSS_FILE:class-nativesdk = "--cross-file ${WORKDIR}/meson.cross" 42MESON_CROSS_FILE:class-nativesdk = "--cross-file ${WORKDIR}/meson.cross"
38 43
44# Needed to set up qemu wrapper below
45export STAGING_DIR_HOST
46
39def rust_tool(d, target_var): 47def rust_tool(d, target_var):
40 rustc = d.getVar('RUSTC') 48 rustc = d.getVar('RUSTC')
41 if not rustc: 49 if not rustc:
@@ -62,6 +70,7 @@ cups-config = 'cups-config'
62g-ir-scanner = '${STAGING_BINDIR}/g-ir-scanner-wrapper' 70g-ir-scanner = '${STAGING_BINDIR}/g-ir-scanner-wrapper'
63g-ir-compiler = '${STAGING_BINDIR}/g-ir-compiler-wrapper' 71g-ir-compiler = '${STAGING_BINDIR}/g-ir-compiler-wrapper'
64${@rust_tool(d, "HOST_SYS")} 72${@rust_tool(d, "HOST_SYS")}
73${@"exe_wrapper = '${WORKDIR}/meson-qemuwrapper'" if d.getVar('EXEWRAPPER_ENABLED') == 'True' else ""}
65 74
66[built-in options] 75[built-in options]
67c_args = ${@meson_array('CFLAGS', d)} 76c_args = ${@meson_array('CFLAGS', d)}
@@ -71,7 +80,6 @@ cpp_link_args = ${@meson_array('LDFLAGS', d)}
71 80
72[properties] 81[properties]
73needs_exe_wrapper = true 82needs_exe_wrapper = true
74gtkdoc_exe_wrapper = '${B}/gtkdoc-qemuwrapper'
75 83
76[host_machine] 84[host_machine]
77system = '${@meson_operating_system('HOST_OS', d)}' 85system = '${@meson_operating_system('HOST_OS', d)}'
@@ -106,6 +114,24 @@ cpp_link_args = ${@meson_array('BUILD_LDFLAGS', d)}
106EOF 114EOF
107} 115}
108 116
117do_write_config:append:class-target() {
118 # Write out a qemu wrapper that will be used as exe_wrapper so that meson
119 # can run target helper binaries through that.
120 qemu_binary="${@qemu_wrapper_cmdline(d, '$STAGING_DIR_HOST', ['$STAGING_DIR_HOST/${libdir}','$STAGING_DIR_HOST/${base_libdir}'])}"
121 cat > ${WORKDIR}/meson-qemuwrapper << EOF
122#!/bin/sh
123# Use a modules directory which doesn't exist so we don't load random things
124# which may then get deleted (or their dependencies) and potentially segfault
125export GIO_MODULE_DIR=${STAGING_LIBDIR}/gio/modules-dummy
126
127# meson sets this wrongly (only to libs in build-dir), qemu_wrapper_cmdline() and GIR_EXTRA_LIBS_PATH take care of it properly
128unset LD_LIBRARY_PATH
129
130$qemu_binary "\$@"
131EOF
132 chmod +x ${WORKDIR}/meson-qemuwrapper
133}
134
109# Tell externalsrc that changes to this file require a reconfigure 135# Tell externalsrc that changes to this file require a reconfigure
110CONFIGURE_FILES = "meson.build" 136CONFIGURE_FILES = "meson.build"
111 137
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass
index 73ad2ab..4a3e582 100644
--- a/meta/classes/multilib.bbclass
+++ b/meta/classes/multilib.bbclass
@@ -92,6 +92,10 @@ multilib_virtclass_handler[eventmask] = "bb.event.RecipePreFinalise"
92 92
93python __anonymous () { 93python __anonymous () {
94 if bb.data.inherits_class('image', d): 94 if bb.data.inherits_class('image', d):
95 # set rpm preferred file color for 32-bit multilib image
96 if d.getVar("SITEINFO_BITS") == "32":
97 d.setVar("RPM_PREFER_ELF_ARCH", "1")
98
95 variant = d.getVar("BBEXTENDVARIANT") 99 variant = d.getVar("BBEXTENDVARIANT")
96 import oe.classextend 100 import oe.classextend
97 101
diff --git a/meta/classes/native.bbclass b/meta/classes/native.bbclass
index 76a599b..fc7422c 100644
--- a/meta/classes/native.bbclass
+++ b/meta/classes/native.bbclass
@@ -195,3 +195,34 @@ USE_NLS = "no"
195 195
196RECIPERDEPTASK = "do_populate_sysroot" 196RECIPERDEPTASK = "do_populate_sysroot"
197do_populate_sysroot[rdeptask] = "${RECIPERDEPTASK}" 197do_populate_sysroot[rdeptask] = "${RECIPERDEPTASK}"
198
199#
200# Native task outputs are directly run on the target (host) system after being
201# built. Even if the output of this recipe doesn't change, a change in one of
202# its dependencies may cause a change in the output it generates (e.g. rpm
203# output depends on the output of its dependent zstd library).
204#
205# This can cause poor interactions with hash equivalence, since this recipes
206# output-changing dependency is "hidden" and downstream task only see that this
207# recipe has the same outhash and therefore is equivalent. This can result in
208# different output in different cases.
209#
210# To resolve this, unhide the output-changing dependency by adding its unihash
211# to this tasks outhash calculation. Unfortunately, don't know specifically
212# know which dependencies are output-changing, so we have to add all of them.
213#
214python native_add_do_populate_sysroot_deps () {
215 current_task = "do_" + d.getVar("BB_CURRENTTASK")
216 if current_task != "do_populate_sysroot":
217 return
218
219 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
220 pn = d.getVar("PN")
221 deps = {
222 dep[0]:dep[6] for dep in taskdepdata.values() if
223 dep[1] == current_task and dep[0] != pn
224 }
225
226 d.setVar("HASHEQUIV_EXTRA_SIGDATA", "\n".join("%s: %s" % (k, deps[k]) for k in sorted(deps.keys())))
227}
228SSTATECREATEFUNCS += "native_add_do_populate_sysroot_deps"
diff --git a/meta/classes/overlayfs-etc.bbclass b/meta/classes/overlayfs-etc.bbclass
new file mode 100644
index 0000000..4ced07b
--- /dev/null
+++ b/meta/classes/overlayfs-etc.bbclass
@@ -0,0 +1,76 @@
1# Class for setting up /etc in overlayfs
2#
3# In order to have /etc directory in overlayfs a special handling at early boot stage is required
4# The idea is to supply a custom init script that mounts /etc before launching actual init program,
5# because the latter already requires /etc to be mounted
6#
7# The configuration must be machine specific. You should at least set these three variables:
8# OVERLAYFS_ETC_MOUNT_POINT ?= "/data"
9# OVERLAYFS_ETC_FSTYPE ?= "ext4"
10# OVERLAYFS_ETC_DEVICE ?= "/dev/mmcblk0p2"
11#
12# To control more mount options you should consider setting mount options:
13# OVERLAYFS_ETC_MOUNT_OPTIONS ?= "defaults"
14#
15# The class provides two options for /sbin/init generation
16# 1. Default option is to rename original /sbin/init to /sbin/init.orig and place generated init under
17# original name, i.e. /sbin/init. It has an advantage that you won't need to change any kernel
18# parameters in order to make it work, but it poses a restriction that package-management can't
19# be used, becaause updating init manager would remove generated script
20# 2. If you are would like to keep original init as is, you can set
21# OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "0"
22# Then generated init will be named /sbin/preinit and you would need to extend you kernel parameters
23# manually in your bootloader configuration.
24#
25# Regardless which mode you choose, update and migration strategy of configuration files under /etc
26# overlay is out of scope of this class
27
28ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("IMAGE_FEATURES", "overlayfs-etc", "create_overlayfs_etc_preinit;", "", d)}'
29IMAGE_FEATURES_CONFLICTS_overlayfs-etc = "package-management"
30
31OVERLAYFS_ETC_MOUNT_POINT ??= ""
32OVERLAYFS_ETC_FSTYPE ??= ""
33OVERLAYFS_ETC_DEVICE ??= ""
34OVERLAYFS_ETC_USE_ORIG_INIT_NAME ??= "1"
35OVERLAYFS_ETC_MOUNT_OPTIONS ??= "defaults"
36OVERLAYFS_ETC_INIT_TEMPLATE ??= "${COREBASE}/meta/files/overlayfs-etc-preinit.sh.in"
37
38python create_overlayfs_etc_preinit() {
39 overlayEtcMountPoint = d.getVar("OVERLAYFS_ETC_MOUNT_POINT")
40 overlayEtcFsType = d.getVar("OVERLAYFS_ETC_FSTYPE")
41 overlayEtcDevice = d.getVar("OVERLAYFS_ETC_DEVICE")
42
43 if not overlayEtcMountPoint:
44 bb.fatal("OVERLAYFS_ETC_MOUNT_POINT must be set in your MACHINE configuration")
45 if not overlayEtcDevice:
46 bb.fatal("OVERLAYFS_ETC_DEVICE must be set in your MACHINE configuration")
47 if not overlayEtcFsType:
48 bb.fatal("OVERLAYFS_ETC_FSTYPE should contain a valid file system type on {0}".format(overlayEtcDevice))
49
50 with open(d.getVar("OVERLAYFS_ETC_INIT_TEMPLATE"), "r") as f:
51 PreinitTemplate = f.read()
52
53 useOrigInit = oe.types.boolean(d.getVar('OVERLAYFS_ETC_USE_ORIG_INIT_NAME'))
54 preinitPath = oe.path.join(d.getVar("IMAGE_ROOTFS"), d.getVar("base_sbindir"), "preinit")
55 initBaseName = oe.path.join(d.getVar("base_sbindir"), "init")
56 origInitNameSuffix = ".orig"
57
58 args = {
59 'OVERLAYFS_ETC_MOUNT_POINT': overlayEtcMountPoint,
60 'OVERLAYFS_ETC_MOUNT_OPTIONS': d.getVar('OVERLAYFS_ETC_MOUNT_OPTIONS'),
61 'OVERLAYFS_ETC_FSTYPE': overlayEtcFsType,
62 'OVERLAYFS_ETC_DEVICE': overlayEtcDevice,
63 'SBIN_INIT_NAME': initBaseName + origInitNameSuffix if useOrigInit else initBaseName
64 }
65
66 if useOrigInit:
67 # rename original /sbin/init
68 origInit = oe.path.join(d.getVar("IMAGE_ROOTFS"), initBaseName)
69 bb.debug(1, "rootfs path %s, init path %s, test %s" % (d.getVar('IMAGE_ROOTFS'), origInit, d.getVar("IMAGE_ROOTFS")))
70 bb.utils.rename(origInit, origInit + origInitNameSuffix)
71 preinitPath = origInit
72
73 with open(preinitPath, 'w') as f:
74 f.write(PreinitTemplate.format(**args))
75 os.chmod(preinitPath, 0o755)
76}
diff --git a/meta/classes/overlayfs.bbclass b/meta/classes/overlayfs.bbclass
index 3c0f4dc..4a860f7 100644
--- a/meta/classes/overlayfs.bbclass
+++ b/meta/classes/overlayfs.bbclass
@@ -31,56 +31,25 @@
31# OVERLAYFS_WRITABLE_PATHS[mnt-overlay] = "/usr/share/another-application" 31# OVERLAYFS_WRITABLE_PATHS[mnt-overlay] = "/usr/share/another-application"
32# 32#
33# Note: the class does not support /etc directory itself, because systemd depends on it 33# Note: the class does not support /etc directory itself, because systemd depends on it
34# For /etc directory use overlayfs-etc class
34 35
35REQUIRED_DISTRO_FEATURES += "systemd overlayfs" 36REQUIRED_DISTRO_FEATURES += "systemd overlayfs"
36 37
37inherit systemd features_check 38inherit systemd features_check
38 39
40OVERLAYFS_CREATE_DIRS_TEMPLATE ??= "${COREBASE}/meta/files/overlayfs-create-dirs.service.in"
41OVERLAYFS_MOUNT_UNIT_TEMPLATE ??= "${COREBASE}/meta/files/overlayfs-unit.mount.in"
42OVERLAYFS_ALL_OVERLAYS_TEMPLATE ??= "${COREBASE}/meta/files/overlayfs-all-overlays.service.in"
43
39python do_create_overlayfs_units() { 44python do_create_overlayfs_units() {
40 from oe.overlayfs import mountUnitName 45 from oe.overlayfs import mountUnitName
41 46
42 CreateDirsUnitTemplate = """[Unit] 47 with open(d.getVar("OVERLAYFS_CREATE_DIRS_TEMPLATE"), "r") as f:
43Description=Overlayfs directories setup 48 CreateDirsUnitTemplate = f.read()
44Requires={DATA_MOUNT_UNIT} 49 with open(d.getVar("OVERLAYFS_MOUNT_UNIT_TEMPLATE"), "r") as f:
45After={DATA_MOUNT_UNIT} 50 MountUnitTemplate = f.read()
46DefaultDependencies=no 51 with open(d.getVar("OVERLAYFS_ALL_OVERLAYS_TEMPLATE"), "r") as f:
47 52 AllOverlaysTemplate = f.read()
48[Service]
49Type=oneshot
50ExecStart=mkdir -p {DATA_MOUNT_POINT}/workdir{LOWERDIR} && mkdir -p {DATA_MOUNT_POINT}/upper{LOWERDIR}
51RemainAfterExit=true
52StandardOutput=journal
53
54[Install]
55WantedBy=multi-user.target
56"""
57 MountUnitTemplate = """[Unit]
58Description=Overlayfs mount unit
59Requires={CREATE_DIRS_SERVICE}
60After={CREATE_DIRS_SERVICE}
61
62[Mount]
63What=overlay
64Where={LOWERDIR}
65Type=overlay
66Options=lowerdir={LOWERDIR},upperdir={DATA_MOUNT_POINT}/upper{LOWERDIR},workdir={DATA_MOUNT_POINT}/workdir{LOWERDIR}
67
68[Install]
69WantedBy=multi-user.target
70"""
71 AllOverlaysTemplate = """[Unit]
72Description=Groups all overlays required by {PN} in one unit
73After={ALL_OVERLAYFS_UNITS}
74Requires={ALL_OVERLAYFS_UNITS}
75
76[Service]
77Type=oneshot
78ExecStart=/bin/true
79RemainAfterExit=true
80
81[Install]
82WantedBy=local-fs.target
83"""
84 53
85 def prepareUnits(data, lower): 54 def prepareUnits(data, lower):
86 from oe.overlayfs import helperUnitName 55 from oe.overlayfs import helperUnitName
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
index 84eafbd..4927fb9 100644
--- a/meta/classes/package.bbclass
+++ b/meta/classes/package.bbclass
@@ -390,10 +390,6 @@ def splitdebuginfo(file, dvar, debugdir, debuglibdir, debugappend, debugsrcdir,
390 dvar = d.getVar('PKGD') 390 dvar = d.getVar('PKGD')
391 objcopy = d.getVar("OBJCOPY") 391 objcopy = d.getVar("OBJCOPY")
392 392
393 # We ignore kernel modules, we don't generate debug info files.
394 if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
395 return (file, sources)
396
397 newmode = None 393 newmode = None
398 if not os.access(file, os.W_OK) or os.access(file, os.R_OK): 394 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
399 origmode = os.stat(file)[stat.ST_MODE] 395 origmode = os.stat(file)[stat.ST_MODE]
@@ -1122,7 +1118,6 @@ python split_and_strip_files () {
1122 # 1118 #
1123 elffiles = {} 1119 elffiles = {}
1124 symlinks = {} 1120 symlinks = {}
1125 kernmods = []
1126 staticlibs = [] 1121 staticlibs = []
1127 inodes = {} 1122 inodes = {}
1128 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir")) 1123 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
@@ -1145,9 +1140,6 @@ python split_and_strip_files () {
1145 if file in skipfiles: 1140 if file in skipfiles:
1146 continue 1141 continue
1147 1142
1148 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
1149 kernmods.append(file)
1150 continue
1151 if oe.package.is_static_lib(file): 1143 if oe.package.is_static_lib(file):
1152 staticlibs.append(file) 1144 staticlibs.append(file)
1153 continue 1145 continue
@@ -1164,8 +1156,11 @@ python split_and_strip_files () {
1164 if not s: 1156 if not s:
1165 continue 1157 continue
1166 # Check its an executable 1158 # Check its an executable
1167 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) or (s[stat.ST_MODE] & stat.S_IXOTH) \ 1159 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
1168 or ((file.startswith(libdir) or file.startswith(baselibdir)) and (".so" in f or ".node" in f)): 1160 or (s[stat.ST_MODE] & stat.S_IXOTH) \
1161 or ((file.startswith(libdir) or file.startswith(baselibdir)) \
1162 and (".so" in f or ".node" in f)) \
1163 or (f.startswith('vmlinux') or ".ko" in f):
1169 1164
1170 if cpath.islink(file): 1165 if cpath.islink(file):
1171 checkelflinks[file] = ltarget 1166 checkelflinks[file] = ltarget
@@ -1312,8 +1307,6 @@ python split_and_strip_files () {
1312 elf_file = int(elffiles[file]) 1307 elf_file = int(elffiles[file])
1313 #bb.note("Strip %s" % file) 1308 #bb.note("Strip %s" % file)
1314 sfiles.append((file, elf_file, strip)) 1309 sfiles.append((file, elf_file, strip))
1315 for f in kernmods:
1316 sfiles.append((f, 16, strip))
1317 if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): 1310 if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1318 for f in staticlibs: 1311 for f in staticlibs:
1319 sfiles.append((f, 16, strip)) 1312 sfiles.append((f, 16, strip))
@@ -1878,7 +1871,7 @@ python package_do_shlibs() {
1878 sonames.add(prov) 1871 sonames.add(prov)
1879 if libdir_re.match(os.path.dirname(file)): 1872 if libdir_re.match(os.path.dirname(file)):
1880 needs_ldconfig = True 1873 needs_ldconfig = True
1881 if snap_symlinks and (os.path.basename(file) != this_soname): 1874 if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname):
1882 renames.append((file, os.path.join(os.path.dirname(file), this_soname))) 1875 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1883 return (needs_ldconfig, needed, sonames, renames) 1876 return (needs_ldconfig, needed, sonames, renames)
1884 1877
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass
index 1ae6393..2e75e22 100644
--- a/meta/classes/package_deb.bbclass
+++ b/meta/classes/package_deb.bbclass
@@ -316,6 +316,7 @@ do_package_write_deb[dirs] = "${PKGWRITEDIRDEB}"
316do_package_write_deb[cleandirs] = "${PKGWRITEDIRDEB}" 316do_package_write_deb[cleandirs] = "${PKGWRITEDIRDEB}"
317do_package_write_deb[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}" 317do_package_write_deb[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}"
318addtask package_write_deb after do_packagedata do_package do_deploy_source_date_epoch before do_build 318addtask package_write_deb after do_packagedata do_package do_deploy_source_date_epoch before do_build
319do_build[rdeptask] += "do_package_write_deb"
319 320
320PACKAGEINDEXDEPS += "dpkg-native:do_populate_sysroot" 321PACKAGEINDEXDEPS += "dpkg-native:do_populate_sysroot"
321PACKAGEINDEXDEPS += "apt-native:do_populate_sysroot" 322PACKAGEINDEXDEPS += "apt-native:do_populate_sysroot"
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass
index 902b7f9..f67cb0e 100644
--- a/meta/classes/package_ipk.bbclass
+++ b/meta/classes/package_ipk.bbclass
@@ -275,6 +275,7 @@ do_package_write_ipk[dirs] = "${PKGWRITEDIRIPK}"
275do_package_write_ipk[cleandirs] = "${PKGWRITEDIRIPK}" 275do_package_write_ipk[cleandirs] = "${PKGWRITEDIRIPK}"
276do_package_write_ipk[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}" 276do_package_write_ipk[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}"
277addtask package_write_ipk after do_packagedata do_package do_deploy_source_date_epoch before do_build 277addtask package_write_ipk after do_packagedata do_package do_deploy_source_date_epoch before do_build
278do_build[rdeptask] += "do_package_write_ipk"
278 279
279PACKAGEINDEXDEPS += "opkg-utils-native:do_populate_sysroot" 280PACKAGEINDEXDEPS += "opkg-utils-native:do_populate_sysroot"
280PACKAGEINDEXDEPS += "opkg-native:do_populate_sysroot" 281PACKAGEINDEXDEPS += "opkg-native:do_populate_sysroot"
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass
index b075442..e9ff1f7 100644
--- a/meta/classes/package_rpm.bbclass
+++ b/meta/classes/package_rpm.bbclass
@@ -749,6 +749,7 @@ do_package_write_rpm[dirs] = "${PKGWRITEDIRRPM}"
749do_package_write_rpm[cleandirs] = "${PKGWRITEDIRRPM}" 749do_package_write_rpm[cleandirs] = "${PKGWRITEDIRRPM}"
750do_package_write_rpm[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}" 750do_package_write_rpm[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}"
751addtask package_write_rpm after do_packagedata do_package do_deploy_source_date_epoch before do_build 751addtask package_write_rpm after do_packagedata do_package do_deploy_source_date_epoch before do_build
752do_build[rdeptask] += "do_package_write_rpm"
752 753
753PACKAGEINDEXDEPS += "rpm-native:do_populate_sysroot" 754PACKAGEINDEXDEPS += "rpm-native:do_populate_sysroot"
754PACKAGEINDEXDEPS += "createrepo-c-native:do_populate_sysroot" 755PACKAGEINDEXDEPS += "createrepo-c-native:do_populate_sysroot"
diff --git a/meta/classes/populate_sdk_base.bbclass b/meta/classes/populate_sdk_base.bbclass
index fafdd96..16f929b 100644
--- a/meta/classes/populate_sdk_base.bbclass
+++ b/meta/classes/populate_sdk_base.bbclass
@@ -92,6 +92,8 @@ SDK_HOST_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.host.manifest"
92SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest" 92SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest"
93SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest" 93SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest"
94 94
95SDK_PRUNE_SYSROOT_DIRS ?= "/dev"
96
95python write_target_sdk_manifest () { 97python write_target_sdk_manifest () {
96 from oe.sdk import sdk_list_installed_packages 98 from oe.sdk import sdk_list_installed_packages
97 from oe.utils import format_pkg_list 99 from oe.utils import format_pkg_list
@@ -103,6 +105,12 @@ python write_target_sdk_manifest () {
103 output.write(format_pkg_list(pkgs, 'ver')) 105 output.write(format_pkg_list(pkgs, 'ver'))
104} 106}
105 107
108sdk_prune_dirs () {
109 for d in ${SDK_PRUNE_SYSROOT_DIRS}; do
110 rm -rf ${SDK_OUTPUT}${SDKTARGETSYSROOT}$d
111 done
112}
113
106python write_sdk_test_data() { 114python write_sdk_test_data() {
107 from oe.data import export2json 115 from oe.data import export2json
108 testdata = "%s/%s.testdata.json" % (d.getVar('SDKDEPLOYDIR'), d.getVar('TOOLCHAIN_OUTPUTNAME')) 116 testdata = "%s/%s.testdata.json" % (d.getVar('SDKDEPLOYDIR'), d.getVar('TOOLCHAIN_OUTPUTNAME'))
@@ -122,8 +130,9 @@ python write_host_sdk_manifest () {
122} 130}
123 131
124POPULATE_SDK_POST_TARGET_COMMAND:append = " write_sdk_test_data ; " 132POPULATE_SDK_POST_TARGET_COMMAND:append = " write_sdk_test_data ; "
125POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk = " write_target_sdk_manifest ; " 133POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk = " write_target_sdk_manifest; sdk_prune_dirs; "
126POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk = " write_host_sdk_manifest; " 134POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk = " write_host_sdk_manifest; "
135
127SDK_PACKAGING_COMMAND = "${@'${SDK_PACKAGING_FUNC};' if '${SDK_PACKAGING_FUNC}' else ''}" 136SDK_PACKAGING_COMMAND = "${@'${SDK_PACKAGING_FUNC};' if '${SDK_PACKAGING_FUNC}' else ''}"
128SDK_POSTPROCESS_COMMAND = " create_sdk_files; check_sdk_sysroots; archive_sdk; ${SDK_PACKAGING_COMMAND} " 137SDK_POSTPROCESS_COMMAND = " create_sdk_files; check_sdk_sysroots; archive_sdk; ${SDK_PACKAGING_COMMAND} "
129 138
diff --git a/meta/classes/python3native.bbclass b/meta/classes/python3native.bbclass
index 13fbaa5..3783c0c 100644
--- a/meta/classes/python3native.bbclass
+++ b/meta/classes/python3native.bbclass
@@ -4,7 +4,7 @@ PYTHON="${STAGING_BINDIR_NATIVE}/python3-native/python3"
4EXTRANATIVEPATH += "python3-native" 4EXTRANATIVEPATH += "python3-native"
5DEPENDS:append = " python3-native " 5DEPENDS:append = " python3-native "
6 6
7# python-config and other scripts are using distutils modules 7# python-config and other scripts are using sysconfig modules
8# which we patch to access these variables 8# which we patch to access these variables
9export STAGING_INCDIR 9export STAGING_INCDIR
10export STAGING_LIBDIR 10export STAGING_LIBDIR
diff --git a/meta/classes/qemuboot.bbclass b/meta/classes/qemuboot.bbclass
index 8cdb544..cc1cbce 100644
--- a/meta/classes/qemuboot.bbclass
+++ b/meta/classes/qemuboot.bbclass
@@ -93,7 +93,7 @@ QB_RNG ?= "-object rng-random,filename=/dev/urandom,id=rng0 -device virtio-rng-p
93QB_OPT_APPEND ?= "" 93QB_OPT_APPEND ?= ""
94QB_NETWORK_DEVICE ?= "-device virtio-net-pci,netdev=net0,mac=@MAC@" 94QB_NETWORK_DEVICE ?= "-device virtio-net-pci,netdev=net0,mac=@MAC@"
95QB_CMDLINE_IP_SLIRP ?= "ip=dhcp" 95QB_CMDLINE_IP_SLIRP ?= "ip=dhcp"
96QB_CMDLINE_IP_TAP ?= "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0" 96QB_CMDLINE_IP_TAP ?= "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0::eth0:off:8.8.8.8"
97QB_ROOTFS_EXTRA_OPT ?= "" 97QB_ROOTFS_EXTRA_OPT ?= ""
98QB_GRAPHICS ?= "" 98QB_GRAPHICS ?= ""
99 99
@@ -118,7 +118,10 @@ python do_write_qemuboot_conf() {
118 import configparser 118 import configparser
119 119
120 qemuboot = "%s/%s.qemuboot.conf" % (d.getVar('IMGDEPLOYDIR'), d.getVar('IMAGE_NAME')) 120 qemuboot = "%s/%s.qemuboot.conf" % (d.getVar('IMGDEPLOYDIR'), d.getVar('IMAGE_NAME'))
121 qemuboot_link = "%s/%s.qemuboot.conf" % (d.getVar('IMGDEPLOYDIR'), d.getVar('IMAGE_LINK_NAME')) 121 if d.getVar('IMAGE_LINK_NAME'):
122 qemuboot_link = "%s/%s.qemuboot.conf" % (d.getVar('IMGDEPLOYDIR'), d.getVar('IMAGE_LINK_NAME'))
123 else:
124 qemuboot_link = ""
122 finalpath = d.getVar("DEPLOY_DIR_IMAGE") 125 finalpath = d.getVar("DEPLOY_DIR_IMAGE")
123 topdir = d.getVar('TOPDIR') 126 topdir = d.getVar('TOPDIR')
124 cf = configparser.ConfigParser() 127 cf = configparser.ConfigParser()
@@ -153,7 +156,7 @@ python do_write_qemuboot_conf() {
153 with open(qemuboot, 'w') as f: 156 with open(qemuboot, 'w') as f:
154 cf.write(f) 157 cf.write(f)
155 158
156 if qemuboot_link != qemuboot: 159 if qemuboot_link and qemuboot_link != qemuboot:
157 if os.path.lexists(qemuboot_link): 160 if os.path.lexists(qemuboot_link):
158 os.remove(qemuboot_link) 161 os.remove(qemuboot_link)
159 os.symlink(os.path.basename(qemuboot), qemuboot_link) 162 os.symlink(os.path.basename(qemuboot), qemuboot_link)
diff --git a/meta/classes/rootfs-postcommands.bbclass b/meta/classes/rootfs-postcommands.bbclass
index a3f96ef..74035c3 100644
--- a/meta/classes/rootfs-postcommands.bbclass
+++ b/meta/classes/rootfs-postcommands.bbclass
@@ -21,7 +21,7 @@ ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("IMAGE_FEATURES", "read-only
21# otherwise kernel or initramfs end up mounting the rootfs read/write 21# otherwise kernel or initramfs end up mounting the rootfs read/write
22# (the default) if supported by the underlying storage. 22# (the default) if supported by the underlying storage.
23# 23#
24# We do this with _append because the default value might get set later with ?= 24# We do this with :append because the default value might get set later with ?=
25# and we don't want to disable such a default that by setting a value here. 25# and we don't want to disable such a default that by setting a value here.
26APPEND:append = '${@bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", " ro", "", d)}' 26APPEND:append = '${@bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", " ro", "", d)}'
27 27
@@ -52,7 +52,7 @@ inherit image-artifact-names
52# the numeric IDs of dynamically created entries remain stable. 52# the numeric IDs of dynamically created entries remain stable.
53# 53#
54# We want this to run as late as possible, in particular after 54# We want this to run as late as possible, in particular after
55# systemd_sysusers_create and set_user_group. Using _append is not 55# systemd_sysusers_create and set_user_group. Using :append is not
56# enough for that, set_user_group is added that way and would end 56# enough for that, set_user_group is added that way and would end
57# up running after us. 57# up running after us.
58SORT_PASSWD_POSTPROCESS_COMMAND ??= " sort_passwd; " 58SORT_PASSWD_POSTPROCESS_COMMAND ??= " sort_passwd; "
diff --git a/meta/classes/rootfs_rpm.bbclass b/meta/classes/rootfs_rpm.bbclass
index 3d8d784..bec4d63 100644
--- a/meta/classes/rootfs_rpm.bbclass
+++ b/meta/classes/rootfs_rpm.bbclass
@@ -4,7 +4,7 @@
4 4
5ROOTFS_PKGMANAGE = "rpm dnf" 5ROOTFS_PKGMANAGE = "rpm dnf"
6 6
7# dnf is using our custom distutils, and so will fail without these 7# dnf is using our custom sysconfig module, and so will fail without these
8export STAGING_INCDIR 8export STAGING_INCDIR
9export STAGING_LIBDIR 9export STAGING_LIBDIR
10 10
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass
index 9fbc9c1..f288b4c 100644
--- a/meta/classes/sanity.bbclass
+++ b/meta/classes/sanity.bbclass
@@ -185,37 +185,6 @@ def raise_sanity_error(msg, d, network_error=False):
185 185
186 %s""" % msg) 186 %s""" % msg)
187 187
188# Check flags associated with a tuning.
189def check_toolchain_tune_args(data, tune, multilib, errs):
190 found_errors = False
191 if check_toolchain_args_present(data, tune, multilib, errs, 'CCARGS'):
192 found_errors = True
193 if check_toolchain_args_present(data, tune, multilib, errs, 'ASARGS'):
194 found_errors = True
195 if check_toolchain_args_present(data, tune, multilib, errs, 'LDARGS'):
196 found_errors = True
197
198 return found_errors
199
200def check_toolchain_args_present(data, tune, multilib, tune_errors, which):
201 args_set = (data.getVar("TUNE_%s" % which) or "").split()
202 args_wanted = (data.getVar("TUNEABI_REQUIRED_%s:tune-%s" % (which, tune)) or "").split()
203 args_missing = []
204
205 # If no args are listed/required, we are done.
206 if not args_wanted:
207 return
208 for arg in args_wanted:
209 if arg not in args_set:
210 args_missing.append(arg)
211
212 found_errors = False
213 if args_missing:
214 found_errors = True
215 tune_errors.append("TUNEABI for %s requires '%s' in TUNE_%s (%s)." %
216 (tune, ' '.join(args_missing), which, ' '.join(args_set)))
217 return found_errors
218
219# Check a single tune for validity. 188# Check a single tune for validity.
220def check_toolchain_tune(data, tune, multilib): 189def check_toolchain_tune(data, tune, multilib):
221 tune_errors = [] 190 tune_errors = []
@@ -247,17 +216,6 @@ def check_toolchain_tune(data, tune, multilib):
247 bb.debug(2, " %s: %s" % (feature, valid_tunes[feature])) 216 bb.debug(2, " %s: %s" % (feature, valid_tunes[feature]))
248 else: 217 else:
249 tune_errors.append("Feature '%s' is not defined." % feature) 218 tune_errors.append("Feature '%s' is not defined." % feature)
250 whitelist = localdata.getVar("TUNEABI_WHITELIST")
251 if whitelist:
252 tuneabi = localdata.getVar("TUNEABI:tune-%s" % tune)
253 if not tuneabi:
254 tuneabi = tune
255 if True not in [x in whitelist.split() for x in tuneabi.split()]:
256 tune_errors.append("Tuning '%s' (%s) cannot be used with any supported tuning/ABI." %
257 (tune, tuneabi))
258 else:
259 if not check_toolchain_tune_args(localdata, tuneabi, multilib, tune_errors):
260 bb.debug(2, "Sanity check: Compiler args OK for %s." % tune)
261 if tune_errors: 219 if tune_errors:
262 return "Tuning '%s' has the following errors:\n" % tune + '\n'.join(tune_errors) 220 return "Tuning '%s' has the following errors:\n" % tune + '\n'.join(tune_errors)
263 221
@@ -462,13 +420,12 @@ def check_sanity_validmachine(sanity_data):
462# Patch before 2.7 can't handle all the features in git-style diffs. Some 420# Patch before 2.7 can't handle all the features in git-style diffs. Some
463# patches may incorrectly apply, and others won't apply at all. 421# patches may incorrectly apply, and others won't apply at all.
464def check_patch_version(sanity_data): 422def check_patch_version(sanity_data):
465 from distutils.version import LooseVersion
466 import re, subprocess 423 import re, subprocess
467 424
468 try: 425 try:
469 result = subprocess.check_output(["patch", "--version"], stderr=subprocess.STDOUT).decode('utf-8') 426 result = subprocess.check_output(["patch", "--version"], stderr=subprocess.STDOUT).decode('utf-8')
470 version = re.search(r"[0-9.]+", result.splitlines()[0]).group() 427 version = re.search(r"[0-9.]+", result.splitlines()[0]).group()
471 if LooseVersion(version) < LooseVersion("2.7"): 428 if bb.utils.vercmp_string_op(version, "2.7", "<"):
472 return "Your version of patch is older than 2.7 and has bugs which will break builds. Please install a newer version of patch.\n" 429 return "Your version of patch is older than 2.7 and has bugs which will break builds. Please install a newer version of patch.\n"
473 else: 430 else:
474 return None 431 return None
@@ -478,7 +435,6 @@ def check_patch_version(sanity_data):
478# Unpatched versions of make 3.82 are known to be broken. See GNU Savannah Bug 30612. 435# Unpatched versions of make 3.82 are known to be broken. See GNU Savannah Bug 30612.
479# Use a modified reproducer from http://savannah.gnu.org/bugs/?30612 to validate. 436# Use a modified reproducer from http://savannah.gnu.org/bugs/?30612 to validate.
480def check_make_version(sanity_data): 437def check_make_version(sanity_data):
481 from distutils.version import LooseVersion
482 import subprocess 438 import subprocess
483 439
484 try: 440 try:
@@ -486,7 +442,7 @@ def check_make_version(sanity_data):
486 except subprocess.CalledProcessError as e: 442 except subprocess.CalledProcessError as e:
487 return "Unable to execute make --version, exit code %d\n%s\n" % (e.returncode, e.output) 443 return "Unable to execute make --version, exit code %d\n%s\n" % (e.returncode, e.output)
488 version = result.split()[2] 444 version = result.split()[2]
489 if LooseVersion(version) == LooseVersion("3.82"): 445 if bb.utils.vercmp_string_op(version, "3.82", "=="):
490 # Construct a test file 446 # Construct a test file
491 f = open("makefile_test", "w") 447 f = open("makefile_test", "w")
492 f.write("makefile_test.a: makefile_test_a.c makefile_test_b.c makefile_test.a( makefile_test_a.c makefile_test_b.c)\n") 448 f.write("makefile_test.a: makefile_test_a.c makefile_test_b.c makefile_test.a( makefile_test_a.c makefile_test_b.c)\n")
@@ -539,12 +495,11 @@ def check_wsl(d):
539# built buildtools-extended-tarball) 495# built buildtools-extended-tarball)
540# 496#
541def check_gcc_version(sanity_data): 497def check_gcc_version(sanity_data):
542 from distutils.version import LooseVersion
543 import subprocess 498 import subprocess
544 499
545 build_cc, version = oe.utils.get_host_compiler_version(sanity_data) 500 build_cc, version = oe.utils.get_host_compiler_version(sanity_data)
546 if build_cc.strip() == "gcc": 501 if build_cc.strip() == "gcc":
547 if LooseVersion(version) < LooseVersion("7.5"): 502 if bb.utils.vercmp_string_op(version, "7.5", "<"):
548 return "Your version of gcc is older than 7.5 and will break builds. Please install a newer version of gcc (you could use the project's buildtools-extended-tarball or use scripts/install-buildtools).\n" 503 return "Your version of gcc is older than 7.5 and will break builds. Please install a newer version of gcc (you could use the project's buildtools-extended-tarball or use scripts/install-buildtools).\n"
549 return None 504 return None
550 505
@@ -552,14 +507,13 @@ def check_gcc_version(sanity_data):
552# but earlier versions do not; this needs to work properly for sstate 507# but earlier versions do not; this needs to work properly for sstate
553# Version 1.28 is needed so opkg-build works correctly when reproducibile builds are enabled 508# Version 1.28 is needed so opkg-build works correctly when reproducibile builds are enabled
554def check_tar_version(sanity_data): 509def check_tar_version(sanity_data):
555 from distutils.version import LooseVersion
556 import subprocess 510 import subprocess
557 try: 511 try:
558 result = subprocess.check_output(["tar", "--version"], stderr=subprocess.STDOUT).decode('utf-8') 512 result = subprocess.check_output(["tar", "--version"], stderr=subprocess.STDOUT).decode('utf-8')
559 except subprocess.CalledProcessError as e: 513 except subprocess.CalledProcessError as e:
560 return "Unable to execute tar --version, exit code %d\n%s\n" % (e.returncode, e.output) 514 return "Unable to execute tar --version, exit code %d\n%s\n" % (e.returncode, e.output)
561 version = result.split()[3] 515 version = result.split()[3]
562 if LooseVersion(version) < LooseVersion("1.28"): 516 if bb.utils.vercmp_string_op(version, "1.28", "<"):
563 return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n" 517 return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n"
564 return None 518 return None
565 519
@@ -567,14 +521,13 @@ def check_tar_version(sanity_data):
567# The kernel tools assume git >= 1.8.3.1 (verified needed > 1.7.9.5) see #6162 521# The kernel tools assume git >= 1.8.3.1 (verified needed > 1.7.9.5) see #6162
568# The git fetcher also had workarounds for git < 1.7.9.2 which we've dropped 522# The git fetcher also had workarounds for git < 1.7.9.2 which we've dropped
569def check_git_version(sanity_data): 523def check_git_version(sanity_data):
570 from distutils.version import LooseVersion
571 import subprocess 524 import subprocess
572 try: 525 try:
573 result = subprocess.check_output(["git", "--version"], stderr=subprocess.DEVNULL).decode('utf-8') 526 result = subprocess.check_output(["git", "--version"], stderr=subprocess.DEVNULL).decode('utf-8')
574 except subprocess.CalledProcessError as e: 527 except subprocess.CalledProcessError as e:
575 return "Unable to execute git --version, exit code %d\n%s\n" % (e.returncode, e.output) 528 return "Unable to execute git --version, exit code %d\n%s\n" % (e.returncode, e.output)
576 version = result.split()[2] 529 version = result.split()[2]
577 if LooseVersion(version) < LooseVersion("1.8.3.1"): 530 if bb.utils.vercmp_string_op(version, "1.8.3.1", "<"):
578 return "Your version of git is older than 1.8.3.1 and has bugs which will break builds. Please install a newer version of git.\n" 531 return "Your version of git is older than 1.8.3.1 and has bugs which will break builds. Please install a newer version of git.\n"
579 return None 532 return None
580 533
@@ -796,9 +749,8 @@ def check_sanity_everybuild(status, d):
796 status.addresult('The system requires at least Python 3.6 to run. Please update your Python interpreter.\n') 749 status.addresult('The system requires at least Python 3.6 to run. Please update your Python interpreter.\n')
797 750
798 # Check the bitbake version meets minimum requirements 751 # Check the bitbake version meets minimum requirements
799 from distutils.version import LooseVersion
800 minversion = d.getVar('BB_MIN_VERSION') 752 minversion = d.getVar('BB_MIN_VERSION')
801 if (LooseVersion(bb.__version__) < LooseVersion(minversion)): 753 if bb.utils.vercmp_string_op(bb.__version__, minversion, "<"):
802 status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__)) 754 status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__))
803 755
804 sanity_check_locale(d) 756 sanity_check_locale(d)
diff --git a/meta/classes/setuptools3-base.bbclass b/meta/classes/setuptools3-base.bbclass
new file mode 100644
index 0000000..5098ae9
--- /dev/null
+++ b/meta/classes/setuptools3-base.bbclass
@@ -0,0 +1,31 @@
1DEPENDS:append:class-target = " ${PYTHON_PN}-native ${PYTHON_PN}"
2DEPENDS:append:class-nativesdk = " ${PYTHON_PN}-native ${PYTHON_PN}"
3RDEPENDS:${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}"
4
5export STAGING_INCDIR
6export STAGING_LIBDIR
7
8# LDSHARED is the ld *command* used to create shared library
9export LDSHARED = "${CCLD} -shared"
10# LDXXSHARED is the ld *command* used to create shared library of C++
11# objects
12export LDCXXSHARED = "${CXX} -shared"
13# CCSHARED are the C *flags* used to create objects to go into a shared
14# library (module)
15export CCSHARED = "-fPIC -DPIC"
16# LINKFORSHARED are the flags passed to the $(CC) command that links
17# the python executable
18export LINKFORSHARED = "${SECURITY_CFLAGS} -Xlinker -export-dynamic"
19
20FILES:${PN} += "${libdir}/* ${libdir}/${PYTHON_DIR}/*"
21
22FILES:${PN}-staticdev += "\
23 ${PYTHON_SITEPACKAGES_DIR}/*.a \
24"
25FILES:${PN}-dev += "\
26 ${datadir}/pkgconfig \
27 ${libdir}/pkgconfig \
28 ${PYTHON_SITEPACKAGES_DIR}/*.la \
29"
30inherit python3native python3targetconfig
31
diff --git a/meta/classes/setuptools3.bbclass b/meta/classes/setuptools3.bbclass
index 8ca66ee..fd8499d 100644
--- a/meta/classes/setuptools3.bbclass
+++ b/meta/classes/setuptools3.bbclass
@@ -1,4 +1,68 @@
1inherit distutils3 1inherit setuptools3-base
2 2
3B = "${WORKDIR}/build"
4
5SETUPTOOLS_BUILD_ARGS ?= ""
6SETUPTOOLS_INSTALL_ARGS ?= "--root=${D} \
7 --prefix=${prefix} \
8 --install-lib=${PYTHON_SITEPACKAGES_DIR} \
9 --install-data=${datadir}"
10
11SETUPTOOLS_PYTHON = "python3"
12SETUPTOOLS_PYTHON:class-native = "nativepython3"
13
14SETUPTOOLS_SETUP_PATH ?= "${S}"
15
16setuptools3_do_configure() {
17 :
18}
19
20setuptools3_do_compile() {
21 cd ${SETUPTOOLS_SETUP_PATH}
22 NO_FETCH_BUILD=1 \
23 STAGING_INCDIR=${STAGING_INCDIR} \
24 STAGING_LIBDIR=${STAGING_LIBDIR} \
25 ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py \
26 build --build-base=${B} ${SETUPTOOLS_BUILD_ARGS} || \
27 bbfatal_log "'${PYTHON_PN} setup.py build ${SETUPTOOLS_BUILD_ARGS}' execution failed."
28}
29setuptools3_do_compile[vardepsexclude] = "MACHINE"
30
31setuptools3_do_install() {
32 cd ${SETUPTOOLS_SETUP_PATH}
33 install -d ${D}${PYTHON_SITEPACKAGES_DIR}
34 STAGING_INCDIR=${STAGING_INCDIR} \
35 STAGING_LIBDIR=${STAGING_LIBDIR} \
36 PYTHONPATH=${D}${PYTHON_SITEPACKAGES_DIR} \
37 ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py \
38 build --build-base=${B} install --skip-build ${SETUPTOOLS_INSTALL_ARGS} || \
39 bbfatal_log "'${PYTHON_PN} setup.py install ${SETUPTOOLS_INSTALL_ARGS}' execution failed."
40
41 # support filenames with *spaces*
42 find ${D} -name "*.py" -exec grep -q ${D} {} \; \
43 -exec sed -i -e s:${D}::g {} \;
44
45 for i in ${D}${bindir}/* ${D}${sbindir}/*; do
46 if [ -f "$i" ]; then
47 sed -i -e s:${PYTHON}:${USRBINPATH}/env\ ${SETUPTOOLS_PYTHON}:g $i
48 sed -i -e s:${STAGING_BINDIR_NATIVE}:${bindir}:g $i
49 fi
50 done
51
52 rm -f ${D}${PYTHON_SITEPACKAGES_DIR}/easy-install.pth
53
54 #
55 # FIXME: Bandaid against wrong datadir computation
56 #
57 if [ -e ${D}${datadir}/share ]; then
58 mv -f ${D}${datadir}/share/* ${D}${datadir}/
59 rmdir ${D}${datadir}/share
60 fi
61}
62setuptools3_do_install[vardepsexclude] = "MACHINE"
63
64EXPORT_FUNCTIONS do_configure do_compile do_install
65
66export LDSHARED="${CCLD} -shared"
3DEPENDS += "python3-setuptools-native" 67DEPENDS += "python3-setuptools-native"
4 68
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass
index 0326d27..b45da4f 100644
--- a/meta/classes/sstate.bbclass
+++ b/meta/classes/sstate.bbclass
@@ -158,6 +158,8 @@ python () {
158 for task in unique_tasks: 158 for task in unique_tasks:
159 d.prependVarFlag(task, 'prefuncs', "sstate_task_prefunc ") 159 d.prependVarFlag(task, 'prefuncs', "sstate_task_prefunc ")
160 d.appendVarFlag(task, 'postfuncs', " sstate_task_postfunc") 160 d.appendVarFlag(task, 'postfuncs', " sstate_task_postfunc")
161 d.setVarFlag(task, 'network', '1')
162 d.setVarFlag(task + "_setscene", 'network', '1')
161} 163}
162 164
163def sstate_init(task, d): 165def sstate_init(task, d):
@@ -793,7 +795,9 @@ def sstate_setscene(d):
793 shared_state = sstate_state_fromvars(d) 795 shared_state = sstate_state_fromvars(d)
794 accelerate = sstate_installpkg(shared_state, d) 796 accelerate = sstate_installpkg(shared_state, d)
795 if not accelerate: 797 if not accelerate:
796 bb.fatal("No suitable staging package found") 798 msg = "No sstate archive obtainable, will run full task instead."
799 bb.warn(msg)
800 raise bb.BBHandledException(msg)
797 801
798python sstate_task_prefunc () { 802python sstate_task_prefunc () {
799 shared_state = sstate_state_fromvars(d) 803 shared_state = sstate_state_fromvars(d)
@@ -899,7 +903,7 @@ sstate_unpack_package () {
899 ZSTD="pzstd -p ${ZSTD_THREADS}" 903 ZSTD="pzstd -p ${ZSTD_THREADS}"
900 fi 904 fi
901 905
902 tar -I "$ZSTD" -xvf ${SSTATE_PKG} 906 tar -I "$ZSTD" -xvpf ${SSTATE_PKG}
903 # update .siginfo atime on local/NFS mirror if it is a symbolic link 907 # update .siginfo atime on local/NFS mirror if it is a symbolic link
904 [ ! -h ${SSTATE_PKG}.siginfo ] || touch -a ${SSTATE_PKG}.siginfo 2>/dev/null || true 908 [ ! -h ${SSTATE_PKG}.siginfo ] || touch -a ${SSTATE_PKG}.siginfo 2>/dev/null || true
905 # update each symbolic link instead of any referenced file 909 # update each symbolic link instead of any referenced file
@@ -992,9 +996,9 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True,
992 found.add(tid) 996 found.add(tid)
993 missed.remove(tid) 997 missed.remove(tid)
994 except bb.fetch2.FetchError as e: 998 except bb.fetch2.FetchError as e:
995 bb.debug(2, "SState: Unsuccessful fetch test for %s (%s)" % (srcuri, e)) 999 bb.debug(2, "SState: Unsuccessful fetch test for %s (%s)" % (srcuri, repr(e)))
996 except Exception as e: 1000 except Exception as e:
997 bb.error("SState: cannot test %s: %s" % (srcuri, e)) 1001 bb.error("SState: cannot test %s: %s" % (srcuri, repr(e)))
998 1002
999 if progress: 1003 if progress:
1000 bb.event.fire(bb.event.ProcessProgress(msg, len(tasklist) - thread_worker.tasks.qsize()), d) 1004 bb.event.fire(bb.event.ProcessProgress(msg, len(tasklist) - thread_worker.tasks.qsize()), d)
diff --git a/meta/classes/testimage.bbclass b/meta/classes/testimage.bbclass
index 9980b3f..8982489 100644
--- a/meta/classes/testimage.bbclass
+++ b/meta/classes/testimage.bbclass
@@ -36,6 +36,7 @@ TESTIMAGE_AUTO ??= "0"
36# TEST_OVERALL_TIMEOUT can be used to set the maximum time in seconds the tests will be allowed to run (defaults to no limit). 36# TEST_OVERALL_TIMEOUT can be used to set the maximum time in seconds the tests will be allowed to run (defaults to no limit).
37# TEST_QEMUPARAMS can be used to pass extra parameters to qemu, e.g. "-m 1024" for setting the amount of ram to 1 GB. 37# TEST_QEMUPARAMS can be used to pass extra parameters to qemu, e.g. "-m 1024" for setting the amount of ram to 1 GB.
38# TEST_RUNQEMUPARAMS can be used to pass extra parameters to runqemu, e.g. "gl" to enable OpenGL acceleration. 38# TEST_RUNQEMUPARAMS can be used to pass extra parameters to runqemu, e.g. "gl" to enable OpenGL acceleration.
39# QEMU_USE_KVM can be set to "" to disable the use of kvm (by default it is enabled if target_arch == build_arch or both of them are x86 archs)
39 40
40# TESTIMAGE_BOOT_PATTERNS can be used to override certain patterns used to communicate with the target when booting, 41# TESTIMAGE_BOOT_PATTERNS can be used to override certain patterns used to communicate with the target when booting,
41# if a pattern is not specifically present on this variable a default will be used when booting the target. 42# if a pattern is not specifically present on this variable a default will be used when booting the target.
@@ -60,7 +61,7 @@ BASICTESTSUITE = "\
60 ping date df ssh scp python perl gi ptest parselogs \ 61 ping date df ssh scp python perl gi ptest parselogs \
61 logrotate connman systemd oe_syslog pam stap ldd xorg \ 62 logrotate connman systemd oe_syslog pam stap ldd xorg \
62 kernelmodule gcc buildcpio buildlzip buildgalculator \ 63 kernelmodule gcc buildcpio buildlzip buildgalculator \
63 dnf rpm opkg apt weston" 64 dnf rpm opkg apt weston go rust"
64 65
65DEFAULT_TEST_SUITES = "${BASICTESTSUITE}" 66DEFAULT_TEST_SUITES = "${BASICTESTSUITE}"
66 67
@@ -75,6 +76,7 @@ DEFAULT_TEST_SUITES:remove:qemumips64 = "${MIPSREMOVE}"
75 76
76TEST_SUITES ?= "${DEFAULT_TEST_SUITES}" 77TEST_SUITES ?= "${DEFAULT_TEST_SUITES}"
77 78
79QEMU_USE_KVM ?= "1"
78TEST_QEMUBOOT_TIMEOUT ?= "1000" 80TEST_QEMUBOOT_TIMEOUT ?= "1000"
79TEST_OVERALL_TIMEOUT ?= "" 81TEST_OVERALL_TIMEOUT ?= ""
80TEST_TARGET ?= "qemu" 82TEST_TARGET ?= "qemu"
@@ -137,6 +139,7 @@ python do_testimage() {
137 139
138addtask testimage 140addtask testimage
139do_testimage[nostamp] = "1" 141do_testimage[nostamp] = "1"
142do_testimage[network] = "1"
140do_testimage[depends] += "${TESTIMAGEDEPENDS}" 143do_testimage[depends] += "${TESTIMAGEDEPENDS}"
141do_testimage[lockfiles] += "${TESTIMAGELOCK}" 144do_testimage[lockfiles] += "${TESTIMAGELOCK}"
142 145
diff --git a/meta/classes/testsdk.bbclass b/meta/classes/testsdk.bbclass
index 758a23a..8b2e74f 100644
--- a/meta/classes/testsdk.bbclass
+++ b/meta/classes/testsdk.bbclass
@@ -36,12 +36,14 @@ python do_testsdk() {
36} 36}
37addtask testsdk 37addtask testsdk
38do_testsdk[nostamp] = "1" 38do_testsdk[nostamp] = "1"
39do_testsdk[network] = "1"
39 40
40python do_testsdkext() { 41python do_testsdkext() {
41 import_and_run('TESTSDKEXT_CLASS_NAME', d) 42 import_and_run('TESTSDKEXT_CLASS_NAME', d)
42} 43}
43addtask testsdkext 44addtask testsdkext
44do_testsdkext[nostamp] = "1" 45do_testsdkext[nostamp] = "1"
46do_testsdkext[network] = "1"
45 47
46python () { 48python () {
47 if oe.types.boolean(d.getVar("TESTIMAGE_AUTO") or "False"): 49 if oe.types.boolean(d.getVar("TESTIMAGE_AUTO") or "False"):
diff --git a/meta/classes/uboot-sign.bbclass b/meta/classes/uboot-sign.bbclass
index bae8cad..8d136e9 100644
--- a/meta/classes/uboot-sign.bbclass
+++ b/meta/classes/uboot-sign.bbclass
@@ -131,6 +131,20 @@ concat_dtb_helper() {
131 elif [ -e "${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}" -a -e "$deployed_uboot_dtb_binary" ]; then 131 elif [ -e "${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}" -a -e "$deployed_uboot_dtb_binary" ]; then
132 cd ${DEPLOYDIR} 132 cd ${DEPLOYDIR}
133 cat ${UBOOT_NODTB_IMAGE} $deployed_uboot_dtb_binary | tee ${B}/${CONFIG_B_PATH}/${UBOOT_BINARY} > ${UBOOT_IMAGE} 133 cat ${UBOOT_NODTB_IMAGE} $deployed_uboot_dtb_binary | tee ${B}/${CONFIG_B_PATH}/${UBOOT_BINARY} > ${UBOOT_IMAGE}
134
135 if [ -n "${UBOOT_CONFIG}" ]
136 then
137 for config in ${UBOOT_MACHINE}; do
138 i=$(expr $i + 1);
139 for type in ${UBOOT_CONFIG}; do
140 j=$(expr $j + 1);
141 if [ $j -eq $i ]
142 then
143 cp ${UBOOT_IMAGE} ${B}/${CONFIG_B_PATH}/u-boot-$type.${UBOOT_SUFFIX}
144 fi
145 done
146 done
147 fi
134 else 148 else
135 bbwarn "Failure while adding public key to u-boot binary. Verified boot won't be available." 149 bbwarn "Failure while adding public key to u-boot binary. Verified boot won't be available."
136 fi 150 fi
@@ -205,7 +219,7 @@ install_helper() {
205 fi 219 fi
206} 220}
207 221
208# Install SPL dtb and u-boot nodtb to datadir, 222# Install SPL dtb and u-boot nodtb to datadir,
209install_spl_helper() { 223install_spl_helper() {
210 if [ -f "${SPL_DIR}/${SPL_DTB_BINARY}" ]; then 224 if [ -f "${SPL_DIR}/${SPL_DTB_BINARY}" ]; then
211 install -Dm 0644 ${SPL_DIR}/${SPL_DTB_BINARY} ${D}${datadir}/${SPL_DTB_IMAGE} 225 install -Dm 0644 ${SPL_DIR}/${SPL_DTB_BINARY} ${D}${datadir}/${SPL_DTB_IMAGE}
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass
index 34d6b8f..0466325 100644
--- a/meta/classes/utility-tasks.bbclass
+++ b/meta/classes/utility-tasks.bbclass
@@ -38,6 +38,7 @@ python do_clean() {
38 38
39addtask checkuri 39addtask checkuri
40do_checkuri[nostamp] = "1" 40do_checkuri[nostamp] = "1"
41do_checkuri[network] = "1"
41python do_checkuri() { 42python do_checkuri() {
42 src_uri = (d.getVar('SRC_URI') or "").split() 43 src_uri = (d.getVar('SRC_URI') or "").split()
43 if len(src_uri) == 0: 44 if len(src_uri) == 0:
diff --git a/meta/classes/waf.bbclass b/meta/classes/waf.bbclass
index df0ab8d..bc594d3 100644
--- a/meta/classes/waf.bbclass
+++ b/meta/classes/waf.bbclass
@@ -43,14 +43,13 @@ BB_HASHBASE_WHITELIST += "WAFLOCK"
43 43
44python waf_preconfigure() { 44python waf_preconfigure() {
45 import subprocess 45 import subprocess
46 from distutils.version import StrictVersion
47 subsrcdir = d.getVar('S') 46 subsrcdir = d.getVar('S')
48 python = d.getVar('WAF_PYTHON') 47 python = d.getVar('WAF_PYTHON')
49 wafbin = os.path.join(subsrcdir, 'waf') 48 wafbin = os.path.join(subsrcdir, 'waf')
50 try: 49 try:
51 result = subprocess.check_output([python, wafbin, '--version'], cwd=subsrcdir, stderr=subprocess.STDOUT) 50 result = subprocess.check_output([python, wafbin, '--version'], cwd=subsrcdir, stderr=subprocess.STDOUT)
52 version = result.decode('utf-8').split()[1] 51 version = result.decode('utf-8').split()[1]
53 if StrictVersion(version) >= StrictVersion("1.8.7"): 52 if bb.utils.vercmp_string_op(version, "1.8.7", ">="):
54 d.setVar("WAF_EXTRA_CONF", "--bindir=${bindir} --libdir=${libdir}") 53 d.setVar("WAF_EXTRA_CONF", "--bindir=${bindir} --libdir=${libdir}")
55 except subprocess.CalledProcessError as e: 54 except subprocess.CalledProcessError as e:
56 bb.warn("Unable to execute waf --version, exit code %d. Assuming waf version without bindir/libdir support." % e.returncode) 55 bb.warn("Unable to execute waf --version, exit code %d. Assuming waf version without bindir/libdir support." % e.returncode)