diff options
Diffstat (limited to 'meta/classes')
-rw-r--r-- | meta/classes/buildhistory.bbclass | 30 | ||||
-rw-r--r-- | meta/classes/cve-check.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/populate_sdk_base.bbclass | 11 | ||||
-rw-r--r-- | meta/classes/rootfs-postcommands.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/sanity.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/sstate.bbclass | 16 | ||||
-rw-r--r-- | meta/classes/toaster.bbclass | 8 | ||||
-rw-r--r-- | meta/classes/uboot-sign.bbclass | 16 | ||||
-rw-r--r-- | meta/classes/uninative.bbclass | 2 |
9 files changed, 57 insertions, 32 deletions
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass index 49af61c9c5..b2cf9aa28a 100644 --- a/meta/classes/buildhistory.bbclass +++ b/meta/classes/buildhistory.bbclass | |||
@@ -960,23 +960,19 @@ def write_latest_srcrev(d, pkghistdir): | |||
960 | value = value.replace('"', '').strip() | 960 | value = value.replace('"', '').strip() |
961 | old_tag_srcrevs[key] = value | 961 | old_tag_srcrevs[key] = value |
962 | with open(srcrevfile, 'w') as f: | 962 | with open(srcrevfile, 'w') as f: |
963 | orig_srcrev = d.getVar('SRCREV', False) or 'INVALID' | 963 | for name, srcrev in sorted(srcrevs.items()): |
964 | if orig_srcrev != 'INVALID': | 964 | suffix = "_" + name |
965 | f.write('# SRCREV = "%s"\n' % orig_srcrev) | 965 | if name == "default": |
966 | if len(srcrevs) > 1: | 966 | suffix = "" |
967 | for name, srcrev in sorted(srcrevs.items()): | 967 | orig_srcrev = d.getVar('SRCREV%s' % suffix, False) |
968 | orig_srcrev = d.getVar('SRCREV_%s' % name, False) | 968 | if orig_srcrev: |
969 | if orig_srcrev: | 969 | f.write('# SRCREV%s = "%s"\n' % (suffix, orig_srcrev)) |
970 | f.write('# SRCREV_%s = "%s"\n' % (name, orig_srcrev)) | 970 | f.write('SRCREV%s = "%s"\n' % (suffix, srcrev)) |
971 | f.write('SRCREV_%s = "%s"\n' % (name, srcrev)) | 971 | for name, srcrev in sorted(tag_srcrevs.items()): |
972 | else: | 972 | f.write('# tag_%s = "%s"\n' % (name, srcrev)) |
973 | f.write('SRCREV = "%s"\n' % next(iter(srcrevs.values()))) | 973 | if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev: |
974 | if len(tag_srcrevs) > 0: | 974 | pkg = d.getVar('PN') |
975 | for name, srcrev in sorted(tag_srcrevs.items()): | 975 | bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev)) |
976 | f.write('# tag_%s = "%s"\n' % (name, srcrev)) | ||
977 | if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev: | ||
978 | pkg = d.getVar('PN') | ||
979 | bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev)) | ||
980 | 976 | ||
981 | else: | 977 | else: |
982 | if os.path.exists(srcrevfile): | 978 | if os.path.exists(srcrevfile): |
diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass index 4fa1a64f85..a95e810605 100644 --- a/meta/classes/cve-check.bbclass +++ b/meta/classes/cve-check.bbclass | |||
@@ -110,6 +110,7 @@ python do_cve_check () { | |||
110 | } | 110 | } |
111 | 111 | ||
112 | addtask cve_check before do_build after do_fetch | 112 | addtask cve_check before do_build after do_fetch |
113 | do_cve_check[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}" | ||
113 | do_cve_check[depends] = "cve-update-db-native:do_fetch" | 114 | do_cve_check[depends] = "cve-update-db-native:do_fetch" |
114 | do_cve_check[nostamp] = "1" | 115 | do_cve_check[nostamp] = "1" |
115 | 116 | ||
@@ -142,6 +143,7 @@ python cve_check_write_rootfs_manifest () { | |||
142 | manifest_name = d.getVar("CVE_CHECK_MANIFEST") | 143 | manifest_name = d.getVar("CVE_CHECK_MANIFEST") |
143 | cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE") | 144 | cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE") |
144 | 145 | ||
146 | bb.utils.mkdirhier(os.path.dirname(manifest_name)) | ||
145 | shutil.copyfile(cve_tmp_file, manifest_name) | 147 | shutil.copyfile(cve_tmp_file, manifest_name) |
146 | 148 | ||
147 | if manifest_name and os.path.exists(manifest_name): | 149 | if manifest_name and os.path.exists(manifest_name): |
diff --git a/meta/classes/populate_sdk_base.bbclass b/meta/classes/populate_sdk_base.bbclass index 2d33611ddd..76757a3a9d 100644 --- a/meta/classes/populate_sdk_base.bbclass +++ b/meta/classes/populate_sdk_base.bbclass | |||
@@ -90,6 +90,8 @@ SDK_HOST_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.host.manifest" | |||
90 | SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest" | 90 | SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest" |
91 | SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest" | 91 | SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest" |
92 | 92 | ||
93 | SDK_PRUNE_SYSROOT_DIRS ?= "/dev" | ||
94 | |||
93 | python write_target_sdk_manifest () { | 95 | python write_target_sdk_manifest () { |
94 | from oe.sdk import sdk_list_installed_packages | 96 | from oe.sdk import sdk_list_installed_packages |
95 | from oe.utils import format_pkg_list | 97 | from oe.utils import format_pkg_list |
@@ -101,6 +103,12 @@ python write_target_sdk_manifest () { | |||
101 | output.write(format_pkg_list(pkgs, 'ver')) | 103 | output.write(format_pkg_list(pkgs, 'ver')) |
102 | } | 104 | } |
103 | 105 | ||
106 | sdk_prune_dirs () { | ||
107 | for d in ${SDK_PRUNE_SYSROOT_DIRS}; do | ||
108 | rm -rf ${SDK_OUTPUT}${SDKTARGETSYSROOT}$d | ||
109 | done | ||
110 | } | ||
111 | |||
104 | python write_sdk_test_data() { | 112 | python write_sdk_test_data() { |
105 | from oe.data import export2json | 113 | from oe.data import export2json |
106 | testdata = "%s/%s.testdata.json" % (d.getVar('SDKDEPLOYDIR'), d.getVar('TOOLCHAIN_OUTPUTNAME')) | 114 | testdata = "%s/%s.testdata.json" % (d.getVar('SDKDEPLOYDIR'), d.getVar('TOOLCHAIN_OUTPUTNAME')) |
@@ -120,8 +128,9 @@ python write_host_sdk_manifest () { | |||
120 | } | 128 | } |
121 | 129 | ||
122 | POPULATE_SDK_POST_TARGET_COMMAND_append = " write_sdk_test_data ; " | 130 | POPULATE_SDK_POST_TARGET_COMMAND_append = " write_sdk_test_data ; " |
123 | POPULATE_SDK_POST_TARGET_COMMAND_append_task-populate-sdk = " write_target_sdk_manifest ; " | 131 | POPULATE_SDK_POST_TARGET_COMMAND_append_task-populate-sdk = " write_target_sdk_manifest; sdk_prune_dirs; " |
124 | POPULATE_SDK_POST_HOST_COMMAND_append_task-populate-sdk = " write_host_sdk_manifest; " | 132 | POPULATE_SDK_POST_HOST_COMMAND_append_task-populate-sdk = " write_host_sdk_manifest; " |
133 | |||
125 | SDK_PACKAGING_COMMAND = "${@'${SDK_PACKAGING_FUNC};' if '${SDK_PACKAGING_FUNC}' else ''}" | 134 | SDK_PACKAGING_COMMAND = "${@'${SDK_PACKAGING_FUNC};' if '${SDK_PACKAGING_FUNC}' else ''}" |
126 | SDK_POSTPROCESS_COMMAND = " create_sdk_files; check_sdk_sysroots; archive_sdk; ${SDK_PACKAGING_COMMAND} " | 135 | SDK_POSTPROCESS_COMMAND = " create_sdk_files; check_sdk_sysroots; archive_sdk; ${SDK_PACKAGING_COMMAND} " |
127 | 136 | ||
diff --git a/meta/classes/rootfs-postcommands.bbclass b/meta/classes/rootfs-postcommands.bbclass index e66ed5938b..87b5751e24 100644 --- a/meta/classes/rootfs-postcommands.bbclass +++ b/meta/classes/rootfs-postcommands.bbclass | |||
@@ -60,7 +60,7 @@ python () { | |||
60 | } | 60 | } |
61 | 61 | ||
62 | systemd_create_users () { | 62 | systemd_create_users () { |
63 | for conffile in ${IMAGE_ROOTFS}/usr/lib/sysusers.d/systemd.conf ${IMAGE_ROOTFS}/usr/lib/sysusers.d/systemd-remote.conf; do | 63 | for conffile in ${IMAGE_ROOTFS}/usr/lib/sysusers.d/*.conf; do |
64 | [ -e $conffile ] || continue | 64 | [ -e $conffile ] || continue |
65 | grep -v "^#" $conffile | sed -e '/^$/d' | while read type name id comment; do | 65 | grep -v "^#" $conffile | sed -e '/^$/d' | while read type name id comment; do |
66 | if [ "$type" = "u" ]; then | 66 | if [ "$type" = "u" ]; then |
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass index a2ac4eeb80..c8a42dc8bf 100644 --- a/meta/classes/sanity.bbclass +++ b/meta/classes/sanity.bbclass | |||
@@ -395,7 +395,7 @@ def check_connectivity(d): | |||
395 | msg += " Please ensure your host's network is configured correctly.\n" | 395 | msg += " Please ensure your host's network is configured correctly.\n" |
396 | msg += " If your ISP or network is blocking the above URL,\n" | 396 | msg += " If your ISP or network is blocking the above URL,\n" |
397 | msg += " try with another domain name, for example by setting:\n" | 397 | msg += " try with another domain name, for example by setting:\n" |
398 | msg += " CONNECTIVITY_CHECK_URIS = \"https://www.yoctoproject.org/\"" | 398 | msg += " CONNECTIVITY_CHECK_URIS = \"https://www.example.com/\"" |
399 | msg += " You could also set BB_NO_NETWORK = \"1\" to disable network\n" | 399 | msg += " You could also set BB_NO_NETWORK = \"1\" to disable network\n" |
400 | msg += " access if all required sources are on local disk.\n" | 400 | msg += " access if all required sources are on local disk.\n" |
401 | retval = msg | 401 | retval = msg |
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass index da29225983..de6e7fa960 100644 --- a/meta/classes/sstate.bbclass +++ b/meta/classes/sstate.bbclass | |||
@@ -827,14 +827,18 @@ sstate_create_package () { | |||
827 | fi | 827 | fi |
828 | chmod 0664 $TFILE | 828 | chmod 0664 $TFILE |
829 | # Skip if it was already created by some other process | 829 | # Skip if it was already created by some other process |
830 | if [ ! -e ${SSTATE_PKG} ]; then | 830 | if [ -h ${SSTATE_PKG} ] && [ ! -e ${SSTATE_PKG} ]; then |
831 | # There is a symbolic link, but it links to nothing. | ||
832 | # Forcefully replace it with the new file. | ||
833 | ln -f $TFILE ${SSTATE_PKG} || true | ||
834 | elif [ ! -e ${SSTATE_PKG} ]; then | ||
831 | # Move into place using ln to attempt an atomic op. | 835 | # Move into place using ln to attempt an atomic op. |
832 | # Abort if it already exists | 836 | # Abort if it already exists |
833 | ln $TFILE ${SSTATE_PKG} && rm $TFILE | 837 | ln $TFILE ${SSTATE_PKG} || true |
834 | else | 838 | else |
835 | rm $TFILE | 839 | touch ${SSTATE_PKG} 2>/dev/null || true |
836 | fi | 840 | fi |
837 | touch ${SSTATE_PKG} 2>/dev/null || true | 841 | rm $TFILE |
838 | } | 842 | } |
839 | 843 | ||
840 | python sstate_sign_package () { | 844 | python sstate_sign_package () { |
@@ -864,7 +868,7 @@ python sstate_report_unihash() { | |||
864 | sstate_unpack_package () { | 868 | sstate_unpack_package () { |
865 | tar -xvzf ${SSTATE_PKG} | 869 | tar -xvzf ${SSTATE_PKG} |
866 | # update .siginfo atime on local/NFS mirror if it is a symbolic link | 870 | # update .siginfo atime on local/NFS mirror if it is a symbolic link |
867 | [ ! -h ${SSTATE_PKG}.siginfo ] || touch -a ${SSTATE_PKG}.siginfo 2>/dev/null || true | 871 | [ ! -h ${SSTATE_PKG}.siginfo ] || [ ! -e ${SSTATE_PKG}.siginfo ] || touch -a ${SSTATE_PKG}.siginfo 2>/dev/null || true |
868 | # update each symbolic link instead of any referenced file | 872 | # update each symbolic link instead of any referenced file |
869 | touch --no-dereference ${SSTATE_PKG} 2>/dev/null || true | 873 | touch --no-dereference ${SSTATE_PKG} 2>/dev/null || true |
870 | [ ! -e ${SSTATE_PKG}.sig ] || touch --no-dereference ${SSTATE_PKG}.sig 2>/dev/null || true | 874 | [ ! -e ${SSTATE_PKG}.sig ] || touch --no-dereference ${SSTATE_PKG}.sig 2>/dev/null || true |
@@ -946,7 +950,7 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True, | |||
946 | 950 | ||
947 | localdata2 = bb.data.createCopy(localdata) | 951 | localdata2 = bb.data.createCopy(localdata) |
948 | srcuri = "file://" + sstatefile | 952 | srcuri = "file://" + sstatefile |
949 | localdata.setVar('SRC_URI', srcuri) | 953 | localdata2.setVar('SRC_URI', srcuri) |
950 | bb.debug(2, "SState: Attempting to fetch %s" % srcuri) | 954 | bb.debug(2, "SState: Attempting to fetch %s" % srcuri) |
951 | 955 | ||
952 | try: | 956 | try: |
diff --git a/meta/classes/toaster.bbclass b/meta/classes/toaster.bbclass index 9518ddf7a4..f365c09142 100644 --- a/meta/classes/toaster.bbclass +++ b/meta/classes/toaster.bbclass | |||
@@ -101,12 +101,12 @@ def _toaster_load_pkgdatafile(dirpath, filepath): | |||
101 | for line in fin: | 101 | for line in fin: |
102 | try: | 102 | try: |
103 | kn, kv = line.strip().split(": ", 1) | 103 | kn, kv = line.strip().split(": ", 1) |
104 | m = re.match(r"^PKG_([^A-Z:]*)", kn) | 104 | m = re.match(r"^PKG:([^A-Z:]*)", kn) |
105 | if m: | 105 | if m: |
106 | pkgdata['OPKGN'] = m.group(1) | 106 | pkgdata['OPKGN'] = m.group(1) |
107 | kn = "_".join([x for x in kn.split("_") if x.isupper()]) | 107 | kn = kn.split(":")[0] |
108 | pkgdata[kn] = kv.strip() | 108 | pkgdata[kn] = kv |
109 | if kn == 'FILES_INFO': | 109 | if kn.startswith('FILES_INFO'): |
110 | pkgdata[kn] = json.loads(kv) | 110 | pkgdata[kn] = json.loads(kv) |
111 | 111 | ||
112 | except ValueError: | 112 | except ValueError: |
diff --git a/meta/classes/uboot-sign.bbclass b/meta/classes/uboot-sign.bbclass index ba48f24b10..fca9de2934 100644 --- a/meta/classes/uboot-sign.bbclass +++ b/meta/classes/uboot-sign.bbclass | |||
@@ -131,6 +131,20 @@ concat_dtb_helper() { | |||
131 | elif [ -e "${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}" -a -e "$deployed_uboot_dtb_binary" ]; then | 131 | elif [ -e "${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}" -a -e "$deployed_uboot_dtb_binary" ]; then |
132 | cd ${DEPLOYDIR} | 132 | cd ${DEPLOYDIR} |
133 | cat ${UBOOT_NODTB_IMAGE} $deployed_uboot_dtb_binary | tee ${B}/${CONFIG_B_PATH}/${UBOOT_BINARY} > ${UBOOT_IMAGE} | 133 | cat ${UBOOT_NODTB_IMAGE} $deployed_uboot_dtb_binary | tee ${B}/${CONFIG_B_PATH}/${UBOOT_BINARY} > ${UBOOT_IMAGE} |
134 | |||
135 | if [ -n "${UBOOT_CONFIG}" ] | ||
136 | then | ||
137 | for config in ${UBOOT_MACHINE}; do | ||
138 | i=$(expr $i + 1); | ||
139 | for type in ${UBOOT_CONFIG}; do | ||
140 | j=$(expr $j + 1); | ||
141 | if [ $j -eq $i ] | ||
142 | then | ||
143 | cp ${UBOOT_IMAGE} ${B}/${CONFIG_B_PATH}/u-boot-$type.${UBOOT_SUFFIX} | ||
144 | fi | ||
145 | done | ||
146 | done | ||
147 | fi | ||
134 | else | 148 | else |
135 | bbwarn "Failure while adding public key to u-boot binary. Verified boot won't be available." | 149 | bbwarn "Failure while adding public key to u-boot binary. Verified boot won't be available." |
136 | fi | 150 | fi |
@@ -205,7 +219,7 @@ install_helper() { | |||
205 | fi | 219 | fi |
206 | } | 220 | } |
207 | 221 | ||
208 | # Install SPL dtb and u-boot nodtb to datadir, | 222 | # Install SPL dtb and u-boot nodtb to datadir, |
209 | install_spl_helper() { | 223 | install_spl_helper() { |
210 | if [ -f "${SPL_DIR}/${SPL_DTB_BINARY}" ]; then | 224 | if [ -f "${SPL_DIR}/${SPL_DTB_BINARY}" ]; then |
211 | install -Dm 0644 ${SPL_DIR}/${SPL_DTB_BINARY} ${D}${datadir}/${SPL_DTB_IMAGE} | 225 | install -Dm 0644 ${SPL_DIR}/${SPL_DTB_BINARY} ${D}${datadir}/${SPL_DTB_IMAGE} |
diff --git a/meta/classes/uninative.bbclass b/meta/classes/uninative.bbclass index 3c7ccd66f4..4412d7c567 100644 --- a/meta/classes/uninative.bbclass +++ b/meta/classes/uninative.bbclass | |||
@@ -2,7 +2,7 @@ UNINATIVE_LOADER ?= "${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux/lib/ | |||
2 | UNINATIVE_STAGING_DIR ?= "${STAGING_DIR}" | 2 | UNINATIVE_STAGING_DIR ?= "${STAGING_DIR}" |
3 | 3 | ||
4 | UNINATIVE_URL ?= "unset" | 4 | UNINATIVE_URL ?= "unset" |
5 | UNINATIVE_TARBALL ?= "${BUILD_ARCH}-nativesdk-libc.tar.xz" | 5 | UNINATIVE_TARBALL ?= "${BUILD_ARCH}-nativesdk-libc-${UNINATIVE_VERSION}.tar.xz" |
6 | # Example checksums | 6 | # Example checksums |
7 | #UNINATIVE_CHECKSUM[aarch64] = "dead" | 7 | #UNINATIVE_CHECKSUM[aarch64] = "dead" |
8 | #UNINATIVE_CHECKSUM[i686] = "dead" | 8 | #UNINATIVE_CHECKSUM[i686] = "dead" |