diff options
Diffstat (limited to 'meta/recipes-core/meta')
20 files changed, 532 insertions, 283 deletions
diff --git a/meta/recipes-core/meta/build-sysroots.bb b/meta/recipes-core/meta/build-sysroots.bb index ad22a75eb2..db05c111ab 100644 --- a/meta/recipes-core/meta/build-sysroots.bb +++ b/meta/recipes-core/meta/build-sysroots.bb | |||
@@ -1,5 +1,6 @@ | |||
1 | INHIBIT_DEFAULT_DEPS = "1" | ||
2 | LICENSE = "MIT" | 1 | LICENSE = "MIT" |
2 | SUMMARY = "Build old style sysroot based on everything in the components directory that matches the current MACHINE" | ||
3 | INHIBIT_DEFAULT_DEPS = "1" | ||
3 | 4 | ||
4 | STANDALONE_SYSROOT = "${STAGING_DIR}/${MACHINE}" | 5 | STANDALONE_SYSROOT = "${STAGING_DIR}/${MACHINE}" |
5 | STANDALONE_SYSROOT_NATIVE = "${STAGING_DIR}/${BUILD_ARCH}" | 6 | STANDALONE_SYSROOT_NATIVE = "${STAGING_DIR}/${BUILD_ARCH}" |
@@ -16,6 +17,18 @@ deltask configure | |||
16 | deltask compile | 17 | deltask compile |
17 | deltask install | 18 | deltask install |
18 | deltask populate_sysroot | 19 | deltask populate_sysroot |
20 | deltask create_spdx | ||
21 | deltask collect_spdx_deps | ||
22 | deltask create_runtime_spdx | ||
23 | deltask recipe_qa | ||
24 | |||
25 | do_build_warn () { | ||
26 | bbwarn "Native or target sysroot population needs to be explicitly selected; please use | ||
27 | bitbake -c build_native_sysroot build-sysroots | ||
28 | bitbake -c build_target_sysroot build-sysroots | ||
29 | or both." | ||
30 | } | ||
31 | addtask do_build_warn before do_build | ||
19 | 32 | ||
20 | python do_build_native_sysroot () { | 33 | python do_build_native_sysroot () { |
21 | targetsysroot = d.getVar("STANDALONE_SYSROOT") | 34 | targetsysroot = d.getVar("STANDALONE_SYSROOT") |
@@ -26,7 +39,7 @@ python do_build_native_sysroot () { | |||
26 | } | 39 | } |
27 | do_build_native_sysroot[cleandirs] = "${STANDALONE_SYSROOT_NATIVE}" | 40 | do_build_native_sysroot[cleandirs] = "${STANDALONE_SYSROOT_NATIVE}" |
28 | do_build_native_sysroot[nostamp] = "1" | 41 | do_build_native_sysroot[nostamp] = "1" |
29 | addtask do_build_native_sysroot before do_build | 42 | addtask do_build_native_sysroot |
30 | 43 | ||
31 | python do_build_target_sysroot () { | 44 | python do_build_target_sysroot () { |
32 | targetsysroot = d.getVar("STANDALONE_SYSROOT") | 45 | targetsysroot = d.getVar("STANDALONE_SYSROOT") |
@@ -37,6 +50,6 @@ python do_build_target_sysroot () { | |||
37 | } | 50 | } |
38 | do_build_target_sysroot[cleandirs] = "${STANDALONE_SYSROOT}" | 51 | do_build_target_sysroot[cleandirs] = "${STANDALONE_SYSROOT}" |
39 | do_build_target_sysroot[nostamp] = "1" | 52 | do_build_target_sysroot[nostamp] = "1" |
40 | addtask do_build_target_sysroot before do_build | 53 | addtask do_build_target_sysroot |
41 | 54 | ||
42 | do_clean[cleandirs] += "${STANDALONE_SYSROOT} ${STANDALONE_SYSROOT_NATIVE}" | 55 | do_clean[cleandirs] += "${STANDALONE_SYSROOT} ${STANDALONE_SYSROOT_NATIVE}" |
diff --git a/meta/recipes-core/meta/buildtools-docs-tarball.bb b/meta/recipes-core/meta/buildtools-docs-tarball.bb new file mode 100644 index 0000000000..72648e3b1c --- /dev/null +++ b/meta/recipes-core/meta/buildtools-docs-tarball.bb | |||
@@ -0,0 +1,18 @@ | |||
1 | require recipes-core/meta/buildtools-tarball.bb | ||
2 | |||
3 | DESCRIPTION = "SDK type target for building a standalone tarball containing the tools needed to build the project docs." | ||
4 | SUMMARY = "SDK type target for building a standalone tarball containing the tools needed to build the project docs." | ||
5 | LICENSE = "MIT" | ||
6 | |||
7 | # Add nativesdk equivalent of build-essentials | ||
8 | TOOLCHAIN_HOST_TASK += "\ | ||
9 | nativesdk-python3-sphinx \ | ||
10 | nativesdk-python3-sphinx-rtd-theme \ | ||
11 | nativesdk-python3-pyyaml \ | ||
12 | " | ||
13 | |||
14 | TOOLCHAIN_OUTPUTNAME = "${SDK_ARCH}-buildtools-docs-nativesdk-standalone-${DISTRO_VERSION}" | ||
15 | |||
16 | SDK_TITLE = "Docs Build tools tarball" | ||
17 | |||
18 | TESTSDK_CASES = "buildtools-docs-cases" | ||
diff --git a/meta/recipes-core/meta/buildtools-extended-tarball.bb b/meta/recipes-core/meta/buildtools-extended-tarball.bb index 83e3fddccc..633f8e6b99 100644 --- a/meta/recipes-core/meta/buildtools-extended-tarball.bb +++ b/meta/recipes-core/meta/buildtools-extended-tarball.bb | |||
@@ -28,21 +28,13 @@ TOOLCHAIN_HOST_TASK += "\ | |||
28 | nativesdk-libtool \ | 28 | nativesdk-libtool \ |
29 | nativesdk-pkgconfig \ | 29 | nativesdk-pkgconfig \ |
30 | nativesdk-glibc-utils \ | 30 | nativesdk-glibc-utils \ |
31 | nativesdk-glibc-gconv-ibm850 \ | 31 | nativesdk-glibc-gconvs \ |
32 | nativesdk-glibc-gconv-iso8859-1 \ | ||
33 | nativesdk-glibc-gconv-utf-16 \ | ||
34 | nativesdk-glibc-gconv-cp1250 \ | ||
35 | nativesdk-glibc-gconv-cp1251 \ | ||
36 | nativesdk-glibc-gconv-cp1252 \ | ||
37 | nativesdk-glibc-gconv-euc-jp \ | ||
38 | nativesdk-glibc-gconv-libjis \ | ||
39 | nativesdk-libxcrypt-dev \ | 32 | nativesdk-libxcrypt-dev \ |
40 | nativesdk-parted \ | 33 | nativesdk-parted \ |
41 | nativesdk-dosfstools \ | 34 | nativesdk-dosfstools \ |
42 | nativesdk-gptfdisk \ | 35 | nativesdk-gptfdisk \ |
43 | " | 36 | " |
44 | # gconv-cp1250, cp1251 and euc-jp needed for iconv to work in vim builds | 37 | # gconvs needed for iconv to work in vim builds |
45 | # also copied list from uninative | ||
46 | 38 | ||
47 | TOOLCHAIN_OUTPUTNAME = "${SDK_ARCH}-buildtools-extended-nativesdk-standalone-${DISTRO_VERSION}" | 39 | TOOLCHAIN_OUTPUTNAME = "${SDK_ARCH}-buildtools-extended-nativesdk-standalone-${DISTRO_VERSION}" |
48 | 40 | ||
diff --git a/meta/recipes-core/meta/buildtools-make-tarball.bb b/meta/recipes-core/meta/buildtools-make-tarball.bb new file mode 100644 index 0000000000..3a9659076f --- /dev/null +++ b/meta/recipes-core/meta/buildtools-make-tarball.bb | |||
@@ -0,0 +1,15 @@ | |||
1 | require recipes-core/meta/buildtools-tarball.bb | ||
2 | |||
3 | DESCRIPTION = "SDK type target for building a standalone make binary. The tarball can be used to run bitbake builds \ | ||
4 | on systems where make is broken (e.g. the 4.2.1 version on CentOS 8 based distros)." | ||
5 | SUMMARY = "Standalone tarball for running builds on systems with inadequate make" | ||
6 | LICENSE = "MIT" | ||
7 | |||
8 | # Add nativesdk equivalent of build-essentials | ||
9 | TOOLCHAIN_HOST_TASK = "\ | ||
10 | nativesdk-sdk-provides-dummy \ | ||
11 | nativesdk-make \ | ||
12 | " | ||
13 | TOOLCHAIN_OUTPUTNAME = "${SDK_ARCH}-buildtools-make-nativesdk-standalone-${DISTRO_VERSION}" | ||
14 | |||
15 | SDK_TITLE = "Make build tool" | ||
diff --git a/meta/recipes-core/meta/buildtools-tarball.bb b/meta/recipes-core/meta/buildtools-tarball.bb index 9da81d5523..92fbda335d 100644 --- a/meta/recipes-core/meta/buildtools-tarball.bb +++ b/meta/recipes-core/meta/buildtools-tarball.bb | |||
@@ -7,13 +7,15 @@ TOOLCHAIN_TARGET_TASK ?= "" | |||
7 | 7 | ||
8 | TOOLCHAIN_HOST_TASK ?= "\ | 8 | TOOLCHAIN_HOST_TASK ?= "\ |
9 | nativesdk-sdk-provides-dummy \ | 9 | nativesdk-sdk-provides-dummy \ |
10 | nativesdk-python3-core \ | 10 | nativesdk-python3 \ |
11 | nativesdk-python3-modules \ | ||
12 | nativesdk-python3-misc \ | ||
13 | nativesdk-python3-git \ | 11 | nativesdk-python3-git \ |
14 | nativesdk-python3-jinja2 \ | 12 | nativesdk-python3-jinja2 \ |
15 | nativesdk-python3-testtools \ | 13 | nativesdk-python3-testtools \ |
14 | nativesdk-python3-pip \ | ||
15 | nativesdk-python3-setuptools \ | ||
16 | nativesdk-python3-subunit \ | 16 | nativesdk-python3-subunit \ |
17 | nativesdk-python3-pyyaml \ | ||
18 | nativesdk-python3-websockets \ | ||
17 | nativesdk-ncurses-terminfo-base \ | 19 | nativesdk-ncurses-terminfo-base \ |
18 | nativesdk-chrpath \ | 20 | nativesdk-chrpath \ |
19 | nativesdk-tar \ | 21 | nativesdk-tar \ |
@@ -29,6 +31,9 @@ TOOLCHAIN_HOST_TASK ?= "\ | |||
29 | nativesdk-rpcsvc-proto \ | 31 | nativesdk-rpcsvc-proto \ |
30 | nativesdk-patch \ | 32 | nativesdk-patch \ |
31 | nativesdk-mtools \ | 33 | nativesdk-mtools \ |
34 | nativesdk-zstd \ | ||
35 | nativesdk-lz4 \ | ||
36 | nativesdk-libacl \ | ||
32 | " | 37 | " |
33 | 38 | ||
34 | MULTIMACH_TARGET_SYS = "${SDK_ARCH}-nativesdk${SDK_VENDOR}-${SDK_OS}" | 39 | MULTIMACH_TARGET_SYS = "${SDK_ARCH}-nativesdk${SDK_VENDOR}-${SDK_OS}" |
@@ -47,7 +52,6 @@ RDEPENDS = "${TOOLCHAIN_HOST_TASK}" | |||
47 | 52 | ||
48 | EXCLUDE_FROM_WORLD = "1" | 53 | EXCLUDE_FROM_WORLD = "1" |
49 | 54 | ||
50 | inherit meta | ||
51 | inherit populate_sdk | 55 | inherit populate_sdk |
52 | inherit toolchain-scripts-base | 56 | inherit toolchain-scripts-base |
53 | inherit nopackages | 57 | inherit nopackages |
@@ -59,7 +63,7 @@ do_populate_sdk[stamp-extra-info] = "${PACKAGE_ARCH}" | |||
59 | 63 | ||
60 | REAL_MULTIMACH_TARGET_SYS = "none" | 64 | REAL_MULTIMACH_TARGET_SYS = "none" |
61 | 65 | ||
62 | create_sdk_files_append () { | 66 | create_sdk_files:append () { |
63 | rm -f ${SDK_OUTPUT}/${SDKPATH}/site-config-* | 67 | rm -f ${SDK_OUTPUT}/${SDKPATH}/site-config-* |
64 | rm -f ${SDK_OUTPUT}/${SDKPATH}/environment-setup-* | 68 | rm -f ${SDK_OUTPUT}/${SDKPATH}/environment-setup-* |
65 | rm -f ${SDK_OUTPUT}/${SDKPATH}/version-* | 69 | rm -f ${SDK_OUTPUT}/${SDKPATH}/version-* |
@@ -67,10 +71,17 @@ create_sdk_files_append () { | |||
67 | # Generate new (mini) sdk-environment-setup file | 71 | # Generate new (mini) sdk-environment-setup file |
68 | script=${1:-${SDK_OUTPUT}/${SDKPATH}/environment-setup-${SDK_SYS}} | 72 | script=${1:-${SDK_OUTPUT}/${SDKPATH}/environment-setup-${SDK_SYS}} |
69 | touch $script | 73 | touch $script |
70 | echo 'export PATH=${SDKPATHNATIVE}${bindir_nativesdk}:${SDKPATHNATIVE}${sbindir_nativesdk}:${SDKPATHNATIVE}${base_bindir_nativesdk}:${SDKPATHNATIVE}${base_sbindir_nativesdk}:$PATH' >> $script | 74 | echo 'export PATH="${SDKPATHNATIVE}${bindir_nativesdk}:${SDKPATHNATIVE}${sbindir_nativesdk}:${SDKPATHNATIVE}${base_bindir_nativesdk}:${SDKPATHNATIVE}${base_sbindir_nativesdk}:$PATH"' >> $script |
71 | echo 'export OECORE_NATIVE_SYSROOT="${SDKPATHNATIVE}"' >> $script | 75 | echo 'export OECORE_NATIVE_SYSROOT="${SDKPATHNATIVE}"' >> $script |
72 | echo 'export GIT_SSL_CAINFO="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script | 76 | if [ -e "${SDK_OUTPUT}${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt" ]; then |
73 | echo 'export SSL_CERT_FILE="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script | 77 | echo 'export GIT_SSL_CAINFO="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script |
78 | echo 'export SSL_CERT_FILE="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script | ||
79 | echo 'export REQUESTS_CA_BUNDLE="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script | ||
80 | echo 'export CURL_CA_BUNDLE="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script | ||
81 | fi | ||
82 | echo 'HOST_PKG_PATH=$(command -p pkg-config --variable=pc_path pkg-config 2>/dev/null)' >>$script | ||
83 | echo 'export PKG_CONFIG_LIBDIR=${SDKPATHNATIVE}/${libdir}/pkgconfig:${SDKPATHNATIVE}/${datadir}/pkgconfig:${HOST_PKG_PATH:-/usr/lib/pkgconfig:/usr/share/pkgconfig}' >>$script | ||
84 | echo 'unset HOST_PKG_PATH' | ||
74 | 85 | ||
75 | toolchain_create_sdk_version ${SDK_OUTPUT}/${SDKPATH}/version-${SDK_SYS} | 86 | toolchain_create_sdk_version ${SDK_OUTPUT}/${SDKPATH}/version-${SDK_SYS} |
76 | 87 | ||
@@ -87,8 +98,8 @@ EOF | |||
87 | 98 | ||
88 | if [ "${SDKMACHINE}" = "i686" ]; then | 99 | if [ "${SDKMACHINE}" = "i686" ]; then |
89 | echo 'export NO32LIBS="0"' >>$script | 100 | echo 'export NO32LIBS="0"' >>$script |
90 | echo 'echo "$BB_ENV_EXTRAWHITE" | grep -q "NO32LIBS"' >>$script | 101 | echo 'echo "$BB_ENV_PASSTHROUGH_ADDITIONS" | grep -q "NO32LIBS"' >>$script |
91 | echo '[ $? != 0 ] && export BB_ENV_EXTRAWHITE="NO32LIBS $BB_ENV_EXTRAWHITE"' >>$script | 102 | echo '[ $? != 0 ] && export BB_ENV_PASSTHROUGH_ADDITIONS="NO32LIBS $BB_ENV_PASSTHROUGH_ADDITIONS"' >>$script |
92 | fi | 103 | fi |
93 | } | 104 | } |
94 | 105 | ||
@@ -97,3 +108,20 @@ TOOLCHAIN_NEED_CONFIGSITE_CACHE = "" | |||
97 | 108 | ||
98 | # The recipe doesn't need any default deps | 109 | # The recipe doesn't need any default deps |
99 | INHIBIT_DEFAULT_DEPS = "1" | 110 | INHIBIT_DEFAULT_DEPS = "1" |
111 | |||
112 | # Directory in testsdk that contains testcases | ||
113 | TESTSDK_CASES = "buildtools-cases" | ||
114 | |||
115 | python do_testsdk() { | ||
116 | import oeqa.sdk.testsdk | ||
117 | testsdk = oeqa.sdk.testsdk.TestSDK() | ||
118 | |||
119 | cases_path = os.path.join(os.path.abspath(os.path.dirname(oeqa.sdk.testsdk.__file__)), d.getVar("TESTSDK_CASES")) | ||
120 | testsdk.context_executor_class.default_cases = cases_path | ||
121 | |||
122 | testsdk.run(d) | ||
123 | } | ||
124 | addtask testsdk | ||
125 | do_testsdk[nostamp] = "1" | ||
126 | do_testsdk[network] = "1" | ||
127 | do_testsdk[depends] += "xz-native:do_populate_sysroot" | ||
diff --git a/meta/recipes-core/meta/cve-update-db-native.bb b/meta/recipes-core/meta/cve-update-db-native.bb deleted file mode 100644 index cf62e1e32c..0000000000 --- a/meta/recipes-core/meta/cve-update-db-native.bb +++ /dev/null | |||
@@ -1,218 +0,0 @@ | |||
1 | SUMMARY = "Updates the NVD CVE database" | ||
2 | LICENSE = "MIT" | ||
3 | |||
4 | INHIBIT_DEFAULT_DEPS = "1" | ||
5 | |||
6 | inherit native | ||
7 | |||
8 | deltask do_unpack | ||
9 | deltask do_patch | ||
10 | deltask do_configure | ||
11 | deltask do_compile | ||
12 | deltask do_install | ||
13 | deltask do_populate_sysroot | ||
14 | |||
15 | python () { | ||
16 | if not bb.data.inherits_class("cve-check", d): | ||
17 | raise bb.parse.SkipRecipe("Skip recipe when cve-check class is not loaded.") | ||
18 | } | ||
19 | |||
20 | python do_fetch() { | ||
21 | """ | ||
22 | Update NVD database with json data feed | ||
23 | """ | ||
24 | import bb.utils | ||
25 | import bb.progress | ||
26 | import sqlite3, urllib, urllib.parse, gzip | ||
27 | from datetime import date | ||
28 | |||
29 | bb.utils.export_proxies(d) | ||
30 | |||
31 | BASE_URL = "https://nvd.nist.gov/feeds/json/cve/1.1/nvdcve-1.1-" | ||
32 | YEAR_START = 2002 | ||
33 | |||
34 | db_file = d.getVar("CVE_CHECK_DB_FILE") | ||
35 | db_dir = os.path.dirname(db_file) | ||
36 | |||
37 | if os.path.exists("{0}-journal".format(db_file)): | ||
38 | # If a journal is present the last update might have been interrupted. In that case, | ||
39 | # just wipe any leftovers and force the DB to be recreated. | ||
40 | os.remove("{0}-journal".format(db_file)) | ||
41 | |||
42 | if os.path.exists(db_file): | ||
43 | os.remove(db_file) | ||
44 | |||
45 | # Don't refresh the database more than once an hour | ||
46 | try: | ||
47 | import time | ||
48 | if time.time() - os.path.getmtime(db_file) < (60*60): | ||
49 | bb.debug(2, "Recently updated, skipping") | ||
50 | return | ||
51 | except OSError: | ||
52 | pass | ||
53 | |||
54 | bb.utils.mkdirhier(db_dir) | ||
55 | |||
56 | # Connect to database | ||
57 | conn = sqlite3.connect(db_file) | ||
58 | c = conn.cursor() | ||
59 | |||
60 | initialize_db(c) | ||
61 | |||
62 | with bb.progress.ProgressHandler(d) as ph, open(os.path.join(d.getVar("TMPDIR"), 'cve_check'), 'a') as cve_f: | ||
63 | total_years = date.today().year + 1 - YEAR_START | ||
64 | for i, year in enumerate(range(YEAR_START, date.today().year + 1)): | ||
65 | bb.debug(2, "Updating %d" % year) | ||
66 | ph.update((float(i + 1) / total_years) * 100) | ||
67 | year_url = BASE_URL + str(year) | ||
68 | meta_url = year_url + ".meta" | ||
69 | json_url = year_url + ".json.gz" | ||
70 | |||
71 | # Retrieve meta last modified date | ||
72 | try: | ||
73 | response = urllib.request.urlopen(meta_url) | ||
74 | except urllib.error.URLError as e: | ||
75 | cve_f.write('Warning: CVE db update error, Unable to fetch CVE data.\n\n') | ||
76 | bb.warn("Failed to fetch CVE data (%s)" % e.reason) | ||
77 | return | ||
78 | |||
79 | if response: | ||
80 | for l in response.read().decode("utf-8").splitlines(): | ||
81 | key, value = l.split(":", 1) | ||
82 | if key == "lastModifiedDate": | ||
83 | last_modified = value | ||
84 | break | ||
85 | else: | ||
86 | bb.warn("Cannot parse CVE metadata, update failed") | ||
87 | return | ||
88 | |||
89 | # Compare with current db last modified date | ||
90 | c.execute("select DATE from META where YEAR = ?", (year,)) | ||
91 | meta = c.fetchone() | ||
92 | if not meta or meta[0] != last_modified: | ||
93 | bb.debug(2, "Updating entries") | ||
94 | # Clear products table entries corresponding to current year | ||
95 | c.execute("delete from PRODUCTS where ID like ?", ('CVE-%d%%' % year,)) | ||
96 | |||
97 | # Update db with current year json file | ||
98 | try: | ||
99 | response = urllib.request.urlopen(json_url) | ||
100 | if response: | ||
101 | update_db(c, gzip.decompress(response.read()).decode('utf-8')) | ||
102 | c.execute("insert or replace into META values (?, ?)", [year, last_modified]) | ||
103 | except urllib.error.URLError as e: | ||
104 | cve_f.write('Warning: CVE db update error, CVE data is outdated.\n\n') | ||
105 | bb.warn("Cannot parse CVE data (%s), update failed" % e.reason) | ||
106 | return | ||
107 | else: | ||
108 | bb.debug(2, "Already up to date (last modified %s)" % last_modified) | ||
109 | # Update success, set the date to cve_check file. | ||
110 | if year == date.today().year: | ||
111 | cve_f.write('CVE database update : %s\n\n' % date.today()) | ||
112 | |||
113 | conn.commit() | ||
114 | conn.close() | ||
115 | } | ||
116 | |||
117 | do_fetch[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}" | ||
118 | do_fetch[file-checksums] = "" | ||
119 | do_fetch[vardeps] = "" | ||
120 | |||
121 | def initialize_db(c): | ||
122 | c.execute("CREATE TABLE IF NOT EXISTS META (YEAR INTEGER UNIQUE, DATE TEXT)") | ||
123 | |||
124 | c.execute("CREATE TABLE IF NOT EXISTS NVD (ID TEXT UNIQUE, SUMMARY TEXT, \ | ||
125 | SCOREV2 TEXT, SCOREV3 TEXT, MODIFIED INTEGER, VECTOR TEXT)") | ||
126 | |||
127 | c.execute("CREATE TABLE IF NOT EXISTS PRODUCTS (ID TEXT, \ | ||
128 | VENDOR TEXT, PRODUCT TEXT, VERSION_START TEXT, OPERATOR_START TEXT, \ | ||
129 | VERSION_END TEXT, OPERATOR_END TEXT)") | ||
130 | c.execute("CREATE INDEX IF NOT EXISTS PRODUCT_ID_IDX on PRODUCTS(ID);") | ||
131 | |||
132 | def parse_node_and_insert(c, node, cveId): | ||
133 | # Parse children node if needed | ||
134 | for child in node.get('children', ()): | ||
135 | parse_node_and_insert(c, child, cveId) | ||
136 | |||
137 | def cpe_generator(): | ||
138 | for cpe in node.get('cpe_match', ()): | ||
139 | if not cpe['vulnerable']: | ||
140 | return | ||
141 | cpe23 = cpe['cpe23Uri'].split(':') | ||
142 | vendor = cpe23[3] | ||
143 | product = cpe23[4] | ||
144 | version = cpe23[5] | ||
145 | |||
146 | if version != '*' and version != '-': | ||
147 | # Version is defined, this is a '=' match | ||
148 | yield [cveId, vendor, product, version, '=', '', ''] | ||
149 | elif version == '-': | ||
150 | # no version information is available | ||
151 | yield [cveId, vendor, product, version, '', '', ''] | ||
152 | else: | ||
153 | # Parse start version, end version and operators | ||
154 | op_start = '' | ||
155 | op_end = '' | ||
156 | v_start = '' | ||
157 | v_end = '' | ||
158 | |||
159 | if 'versionStartIncluding' in cpe: | ||
160 | op_start = '>=' | ||
161 | v_start = cpe['versionStartIncluding'] | ||
162 | |||
163 | if 'versionStartExcluding' in cpe: | ||
164 | op_start = '>' | ||
165 | v_start = cpe['versionStartExcluding'] | ||
166 | |||
167 | if 'versionEndIncluding' in cpe: | ||
168 | op_end = '<=' | ||
169 | v_end = cpe['versionEndIncluding'] | ||
170 | |||
171 | if 'versionEndExcluding' in cpe: | ||
172 | op_end = '<' | ||
173 | v_end = cpe['versionEndExcluding'] | ||
174 | |||
175 | if op_start or op_end or v_start or v_end: | ||
176 | yield [cveId, vendor, product, v_start, op_start, v_end, op_end] | ||
177 | else: | ||
178 | # This is no version information, expressed differently. | ||
179 | # Save processing by representing as -. | ||
180 | yield [cveId, vendor, product, '-', '', '', ''] | ||
181 | |||
182 | c.executemany("insert into PRODUCTS values (?, ?, ?, ?, ?, ?, ?)", cpe_generator()) | ||
183 | |||
184 | def update_db(c, jsondata): | ||
185 | import json | ||
186 | root = json.loads(jsondata) | ||
187 | |||
188 | for elt in root['CVE_Items']: | ||
189 | if not elt['impact']: | ||
190 | continue | ||
191 | |||
192 | accessVector = None | ||
193 | cveId = elt['cve']['CVE_data_meta']['ID'] | ||
194 | cveDesc = elt['cve']['description']['description_data'][0]['value'] | ||
195 | date = elt['lastModifiedDate'] | ||
196 | try: | ||
197 | accessVector = elt['impact']['baseMetricV2']['cvssV2']['accessVector'] | ||
198 | cvssv2 = elt['impact']['baseMetricV2']['cvssV2']['baseScore'] | ||
199 | except KeyError: | ||
200 | cvssv2 = 0.0 | ||
201 | try: | ||
202 | accessVector = accessVector or elt['impact']['baseMetricV3']['cvssV3']['attackVector'] | ||
203 | cvssv3 = elt['impact']['baseMetricV3']['cvssV3']['baseScore'] | ||
204 | except KeyError: | ||
205 | accessVector = accessVector or "UNKNOWN" | ||
206 | cvssv3 = 0.0 | ||
207 | |||
208 | c.execute("insert or replace into NVD values (?, ?, ?, ?, ?, ?)", | ||
209 | [cveId, cveDesc, cvssv2, cvssv3, date, accessVector]) | ||
210 | |||
211 | configurations = elt['configurations']['nodes'] | ||
212 | for config in configurations: | ||
213 | parse_node_and_insert(c, config, cveId) | ||
214 | |||
215 | |||
216 | do_fetch[nostamp] = "1" | ||
217 | |||
218 | EXCLUDE_FROM_WORLD = "1" | ||
diff --git a/meta/recipes-core/meta/cve-update-nvd2-native.bb b/meta/recipes-core/meta/cve-update-nvd2-native.bb new file mode 100644 index 0000000000..1901641965 --- /dev/null +++ b/meta/recipes-core/meta/cve-update-nvd2-native.bb | |||
@@ -0,0 +1,377 @@ | |||
1 | SUMMARY = "Updates the NVD CVE database" | ||
2 | LICENSE = "MIT" | ||
3 | |||
4 | # Important note: | ||
5 | # This product uses the NVD API but is not endorsed or certified by the NVD. | ||
6 | |||
7 | INHIBIT_DEFAULT_DEPS = "1" | ||
8 | |||
9 | inherit native | ||
10 | |||
11 | deltask do_unpack | ||
12 | deltask do_patch | ||
13 | deltask do_configure | ||
14 | deltask do_compile | ||
15 | deltask do_install | ||
16 | deltask do_populate_sysroot | ||
17 | |||
18 | NVDCVE_URL ?= "https://services.nvd.nist.gov/rest/json/cves/2.0" | ||
19 | |||
20 | # If you have a NVD API key (https://nvd.nist.gov/developers/request-an-api-key) | ||
21 | # then setting this to get higher rate limits. | ||
22 | NVDCVE_API_KEY ?= "" | ||
23 | |||
24 | # CVE database update interval, in seconds. By default: once a day (24*60*60). | ||
25 | # Use 0 to force the update | ||
26 | # Use a negative value to skip the update | ||
27 | CVE_DB_UPDATE_INTERVAL ?= "86400" | ||
28 | |||
29 | # CVE database incremental update age threshold, in seconds. If the database is | ||
30 | # older than this threshold, do a full re-download, else, do an incremental | ||
31 | # update. By default: the maximum allowed value from NVD: 120 days (120*24*60*60) | ||
32 | # Use 0 to force a full download. | ||
33 | CVE_DB_INCR_UPDATE_AGE_THRES ?= "10368000" | ||
34 | |||
35 | # Number of attempts for each http query to nvd server before giving up | ||
36 | CVE_DB_UPDATE_ATTEMPTS ?= "5" | ||
37 | |||
38 | CVE_DB_TEMP_FILE ?= "${CVE_CHECK_DB_DIR}/temp_nvdcve_2.db" | ||
39 | |||
40 | python () { | ||
41 | if not bb.data.inherits_class("cve-check", d): | ||
42 | raise bb.parse.SkipRecipe("Skip recipe when cve-check class is not loaded.") | ||
43 | } | ||
44 | |||
45 | python do_fetch() { | ||
46 | """ | ||
47 | Update NVD database with API 2.0 | ||
48 | """ | ||
49 | import bb.utils | ||
50 | import bb.progress | ||
51 | import shutil | ||
52 | |||
53 | bb.utils.export_proxies(d) | ||
54 | |||
55 | db_file = d.getVar("CVE_CHECK_DB_FILE") | ||
56 | db_dir = os.path.dirname(db_file) | ||
57 | db_tmp_file = d.getVar("CVE_DB_TEMP_FILE") | ||
58 | |||
59 | cleanup_db_download(db_file, db_tmp_file) | ||
60 | # By default let's update the whole database (since time 0) | ||
61 | database_time = 0 | ||
62 | |||
63 | # The NVD database changes once a day, so no need to update more frequently | ||
64 | # Allow the user to force-update | ||
65 | try: | ||
66 | import time | ||
67 | update_interval = int(d.getVar("CVE_DB_UPDATE_INTERVAL")) | ||
68 | if update_interval < 0: | ||
69 | bb.note("CVE database update skipped") | ||
70 | return | ||
71 | if time.time() - os.path.getmtime(db_file) < update_interval: | ||
72 | bb.note("CVE database recently updated, skipping") | ||
73 | return | ||
74 | database_time = os.path.getmtime(db_file) | ||
75 | |||
76 | except OSError: | ||
77 | pass | ||
78 | |||
79 | bb.utils.mkdirhier(db_dir) | ||
80 | if os.path.exists(db_file): | ||
81 | shutil.copy2(db_file, db_tmp_file) | ||
82 | |||
83 | if update_db_file(db_tmp_file, d, database_time) == True: | ||
84 | # Update downloaded correctly, can swap files | ||
85 | shutil.move(db_tmp_file, db_file) | ||
86 | else: | ||
87 | # Update failed, do not modify the database | ||
88 | bb.warn("CVE database update failed") | ||
89 | os.remove(db_tmp_file) | ||
90 | } | ||
91 | |||
92 | do_fetch[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}" | ||
93 | do_fetch[file-checksums] = "" | ||
94 | do_fetch[vardeps] = "" | ||
95 | |||
96 | def cleanup_db_download(db_file, db_tmp_file): | ||
97 | """ | ||
98 | Cleanup the download space from possible failed downloads | ||
99 | """ | ||
100 | |||
101 | # Clean up the updates done on the main file | ||
102 | # Remove it only if a journal file exists - it means a complete re-download | ||
103 | if os.path.exists("{0}-journal".format(db_file)): | ||
104 | # If a journal is present the last update might have been interrupted. In that case, | ||
105 | # just wipe any leftovers and force the DB to be recreated. | ||
106 | os.remove("{0}-journal".format(db_file)) | ||
107 | |||
108 | if os.path.exists(db_file): | ||
109 | os.remove(db_file) | ||
110 | |||
111 | # Clean-up the temporary file downloads, we can remove both journal | ||
112 | # and the temporary database | ||
113 | if os.path.exists("{0}-journal".format(db_tmp_file)): | ||
114 | # If a journal is present the last update might have been interrupted. In that case, | ||
115 | # just wipe any leftovers and force the DB to be recreated. | ||
116 | os.remove("{0}-journal".format(db_tmp_file)) | ||
117 | |||
118 | if os.path.exists(db_tmp_file): | ||
119 | os.remove(db_tmp_file) | ||
120 | |||
121 | def nvd_request_wait(attempt, min_wait): | ||
122 | return min ( ( (2 * attempt) + min_wait ) , 30) | ||
123 | |||
124 | def nvd_request_next(url, attempts, api_key, args, min_wait): | ||
125 | """ | ||
126 | Request next part of the NVD database | ||
127 | NVD API documentation: https://nvd.nist.gov/developers/vulnerabilities | ||
128 | """ | ||
129 | |||
130 | import urllib.request | ||
131 | import urllib.parse | ||
132 | import gzip | ||
133 | import http | ||
134 | import time | ||
135 | |||
136 | request = urllib.request.Request(url + "?" + urllib.parse.urlencode(args)) | ||
137 | if api_key: | ||
138 | request.add_header("apiKey", api_key) | ||
139 | bb.note("Requesting %s" % request.full_url) | ||
140 | |||
141 | for attempt in range(attempts): | ||
142 | try: | ||
143 | r = urllib.request.urlopen(request) | ||
144 | |||
145 | if (r.headers['content-encoding'] == 'gzip'): | ||
146 | buf = r.read() | ||
147 | raw_data = gzip.decompress(buf) | ||
148 | else: | ||
149 | raw_data = r.read().decode("utf-8") | ||
150 | |||
151 | r.close() | ||
152 | |||
153 | except Exception as e: | ||
154 | wait_time = nvd_request_wait(attempt, min_wait) | ||
155 | bb.note("CVE database: received error (%s)" % (e)) | ||
156 | bb.note("CVE database: retrying download after %d seconds. attempted (%d/%d)" % (wait_time, attempt+1, attempts)) | ||
157 | time.sleep(wait_time) | ||
158 | pass | ||
159 | else: | ||
160 | return raw_data | ||
161 | else: | ||
162 | # We failed at all attempts | ||
163 | return None | ||
164 | |||
165 | def update_db_file(db_tmp_file, d, database_time): | ||
166 | """ | ||
167 | Update the given database file | ||
168 | """ | ||
169 | import bb.utils, bb.progress | ||
170 | import datetime | ||
171 | import sqlite3 | ||
172 | import json | ||
173 | |||
174 | # Connect to database | ||
175 | conn = sqlite3.connect(db_tmp_file) | ||
176 | initialize_db(conn) | ||
177 | |||
178 | req_args = {'startIndex' : 0} | ||
179 | |||
180 | incr_update_threshold = int(d.getVar("CVE_DB_INCR_UPDATE_AGE_THRES")) | ||
181 | if database_time != 0: | ||
182 | database_date = datetime.datetime.fromtimestamp(database_time, tz=datetime.timezone.utc) | ||
183 | today_date = datetime.datetime.now(tz=datetime.timezone.utc) | ||
184 | delta = today_date - database_date | ||
185 | if incr_update_threshold == 0: | ||
186 | bb.note("CVE database: forced full update") | ||
187 | elif delta < datetime.timedelta(seconds=incr_update_threshold): | ||
188 | bb.note("CVE database: performing partial update") | ||
189 | # The maximum range for time is 120 days | ||
190 | if delta > datetime.timedelta(days=120): | ||
191 | bb.error("CVE database: Trying to do an incremental update on a larger than supported range") | ||
192 | req_args['lastModStartDate'] = database_date.isoformat() | ||
193 | req_args['lastModEndDate'] = today_date.isoformat() | ||
194 | else: | ||
195 | bb.note("CVE database: file too old, forcing a full update") | ||
196 | else: | ||
197 | bb.note("CVE database: no preexisting database, do a full download") | ||
198 | |||
199 | with bb.progress.ProgressHandler(d) as ph, open(os.path.join(d.getVar("TMPDIR"), 'cve_check'), 'a') as cve_f: | ||
200 | |||
201 | bb.note("Updating entries") | ||
202 | index = 0 | ||
203 | url = d.getVar("NVDCVE_URL") | ||
204 | api_key = d.getVar("NVDCVE_API_KEY") or None | ||
205 | attempts = int(d.getVar("CVE_DB_UPDATE_ATTEMPTS")) | ||
206 | |||
207 | # Recommended by NVD | ||
208 | wait_time = 6 | ||
209 | if api_key: | ||
210 | wait_time = 2 | ||
211 | |||
212 | while True: | ||
213 | req_args['startIndex'] = index | ||
214 | raw_data = nvd_request_next(url, attempts, api_key, req_args, wait_time) | ||
215 | if raw_data is None: | ||
216 | # We haven't managed to download data | ||
217 | return False | ||
218 | |||
219 | data = json.loads(raw_data) | ||
220 | |||
221 | index = data["startIndex"] | ||
222 | total = data["totalResults"] | ||
223 | per_page = data["resultsPerPage"] | ||
224 | bb.note("Got %d entries" % per_page) | ||
225 | for cve in data["vulnerabilities"]: | ||
226 | update_db(conn, cve) | ||
227 | |||
228 | index += per_page | ||
229 | ph.update((float(index) / (total+1)) * 100) | ||
230 | if index >= total: | ||
231 | break | ||
232 | |||
233 | # Recommended by NVD | ||
234 | time.sleep(wait_time) | ||
235 | |||
236 | # Update success, set the date to cve_check file. | ||
237 | cve_f.write('CVE database update : %s\n\n' % datetime.date.today()) | ||
238 | |||
239 | conn.commit() | ||
240 | conn.close() | ||
241 | return True | ||
242 | |||
243 | def initialize_db(conn): | ||
244 | with conn: | ||
245 | c = conn.cursor() | ||
246 | |||
247 | c.execute("CREATE TABLE IF NOT EXISTS META (YEAR INTEGER UNIQUE, DATE TEXT)") | ||
248 | |||
249 | c.execute("CREATE TABLE IF NOT EXISTS NVD (ID TEXT UNIQUE, SUMMARY TEXT, \ | ||
250 | SCOREV2 TEXT, SCOREV3 TEXT, MODIFIED INTEGER, VECTOR TEXT, VECTORSTRING TEXT)") | ||
251 | |||
252 | c.execute("CREATE TABLE IF NOT EXISTS PRODUCTS (ID TEXT, \ | ||
253 | VENDOR TEXT, PRODUCT TEXT, VERSION_START TEXT, OPERATOR_START TEXT, \ | ||
254 | VERSION_END TEXT, OPERATOR_END TEXT)") | ||
255 | c.execute("CREATE INDEX IF NOT EXISTS PRODUCT_ID_IDX on PRODUCTS(ID);") | ||
256 | |||
257 | c.close() | ||
258 | |||
259 | def parse_node_and_insert(conn, node, cveId): | ||
260 | |||
261 | def cpe_generator(): | ||
262 | for cpe in node.get('cpeMatch', ()): | ||
263 | if not cpe['vulnerable']: | ||
264 | return | ||
265 | cpe23 = cpe.get('criteria') | ||
266 | if not cpe23: | ||
267 | return | ||
268 | cpe23 = cpe23.split(':') | ||
269 | if len(cpe23) < 6: | ||
270 | return | ||
271 | vendor = cpe23[3] | ||
272 | product = cpe23[4] | ||
273 | version = cpe23[5] | ||
274 | |||
275 | if cpe23[6] == '*' or cpe23[6] == '-': | ||
276 | version_suffix = "" | ||
277 | else: | ||
278 | version_suffix = "_" + cpe23[6] | ||
279 | |||
280 | if version != '*' and version != '-': | ||
281 | # Version is defined, this is a '=' match | ||
282 | yield [cveId, vendor, product, version + version_suffix, '=', '', ''] | ||
283 | elif version == '-': | ||
284 | # no version information is available | ||
285 | yield [cveId, vendor, product, version, '', '', ''] | ||
286 | else: | ||
287 | # Parse start version, end version and operators | ||
288 | op_start = '' | ||
289 | op_end = '' | ||
290 | v_start = '' | ||
291 | v_end = '' | ||
292 | |||
293 | if 'versionStartIncluding' in cpe: | ||
294 | op_start = '>=' | ||
295 | v_start = cpe['versionStartIncluding'] | ||
296 | |||
297 | if 'versionStartExcluding' in cpe: | ||
298 | op_start = '>' | ||
299 | v_start = cpe['versionStartExcluding'] | ||
300 | |||
301 | if 'versionEndIncluding' in cpe: | ||
302 | op_end = '<=' | ||
303 | v_end = cpe['versionEndIncluding'] | ||
304 | |||
305 | if 'versionEndExcluding' in cpe: | ||
306 | op_end = '<' | ||
307 | v_end = cpe['versionEndExcluding'] | ||
308 | |||
309 | if op_start or op_end or v_start or v_end: | ||
310 | yield [cveId, vendor, product, v_start, op_start, v_end, op_end] | ||
311 | else: | ||
312 | # This is no version information, expressed differently. | ||
313 | # Save processing by representing as -. | ||
314 | yield [cveId, vendor, product, '-', '', '', ''] | ||
315 | |||
316 | conn.executemany("insert into PRODUCTS values (?, ?, ?, ?, ?, ?, ?)", cpe_generator()).close() | ||
317 | |||
318 | def update_db(conn, elt): | ||
319 | """ | ||
320 | Update a single entry in the on-disk database | ||
321 | """ | ||
322 | |||
323 | accessVector = None | ||
324 | vectorString = None | ||
325 | cveId = elt['cve']['id'] | ||
326 | if elt['cve']['vulnStatus'] == "Rejected": | ||
327 | c = conn.cursor() | ||
328 | c.execute("delete from PRODUCTS where ID = ?;", [cveId]) | ||
329 | c.execute("delete from NVD where ID = ?;", [cveId]) | ||
330 | c.close() | ||
331 | return | ||
332 | cveDesc = "" | ||
333 | for desc in elt['cve']['descriptions']: | ||
334 | if desc['lang'] == 'en': | ||
335 | cveDesc = desc['value'] | ||
336 | date = elt['cve']['lastModified'] | ||
337 | try: | ||
338 | accessVector = elt['cve']['metrics']['cvssMetricV2'][0]['cvssData']['accessVector'] | ||
339 | vectorString = elt['cve']['metrics']['cvssMetricV2'][0]['cvssData']['vectorString'] | ||
340 | cvssv2 = elt['cve']['metrics']['cvssMetricV2'][0]['cvssData']['baseScore'] | ||
341 | except KeyError: | ||
342 | cvssv2 = 0.0 | ||
343 | cvssv3 = None | ||
344 | try: | ||
345 | accessVector = accessVector or elt['cve']['metrics']['cvssMetricV30'][0]['cvssData']['attackVector'] | ||
346 | vectorString = vectorString or elt['cve']['metrics']['cvssMetricV30'][0]['cvssData']['vectorString'] | ||
347 | cvssv3 = elt['cve']['metrics']['cvssMetricV30'][0]['cvssData']['baseScore'] | ||
348 | except KeyError: | ||
349 | pass | ||
350 | try: | ||
351 | accessVector = accessVector or elt['cve']['metrics']['cvssMetricV31'][0]['cvssData']['attackVector'] | ||
352 | vectorString = vectorString or elt['cve']['metrics']['cvssMetricV31'][0]['cvssData']['vectorString'] | ||
353 | cvssv3 = cvssv3 or elt['cve']['metrics']['cvssMetricV31'][0]['cvssData']['baseScore'] | ||
354 | except KeyError: | ||
355 | pass | ||
356 | accessVector = accessVector or "UNKNOWN" | ||
357 | vectorString = vectorString or "UNKNOWN" | ||
358 | cvssv3 = cvssv3 or 0.0 | ||
359 | |||
360 | conn.execute("insert or replace into NVD values (?, ?, ?, ?, ?, ?, ?)", | ||
361 | [cveId, cveDesc, cvssv2, cvssv3, date, accessVector, vectorString]).close() | ||
362 | |||
363 | try: | ||
364 | # Remove any pre-existing CVE configuration. Even for partial database | ||
365 | # update, those will be repopulated. This ensures that old | ||
366 | # configuration is not kept for an updated CVE. | ||
367 | conn.execute("delete from PRODUCTS where ID = ?", [cveId]).close() | ||
368 | for config in elt['cve']['configurations']: | ||
369 | # This is suboptimal as it doesn't handle AND/OR and negate, but is better than nothing | ||
370 | for node in config["nodes"]: | ||
371 | parse_node_and_insert(conn, node, cveId) | ||
372 | except KeyError: | ||
373 | bb.note("CVE %s has no configurations" % cveId) | ||
374 | |||
375 | do_fetch[nostamp] = "1" | ||
376 | |||
377 | EXCLUDE_FROM_WORLD = "1" | ||
diff --git a/meta/recipes-core/meta/dummy-sdk-package.inc b/meta/recipes-core/meta/dummy-sdk-package.inc index bedde2965c..bd26e39ad3 100644 --- a/meta/recipes-core/meta/dummy-sdk-package.inc +++ b/meta/recipes-core/meta/dummy-sdk-package.inc | |||
@@ -13,7 +13,7 @@ python() { | |||
13 | d.setVar('PACKAGE_ARCH', '${DUMMYARCH}') | 13 | d.setVar('PACKAGE_ARCH', '${DUMMYARCH}') |
14 | } | 14 | } |
15 | 15 | ||
16 | ALLOW_EMPTY_${PN} = "1" | 16 | ALLOW_EMPTY:${PN} = "1" |
17 | 17 | ||
18 | PR[vardeps] += "DUMMYPROVIDES" | 18 | PR[vardeps] += "DUMMYPROVIDES" |
19 | PR[vardeps] += "DUMMYPROVIDES_PACKAGES" | 19 | PR[vardeps] += "DUMMYPROVIDES_PACKAGES" |
@@ -22,10 +22,10 @@ DUMMYPROVIDES_PACKAGES ??= "" | |||
22 | DUMMYPROVIDES_PACKAGES_MULTILIB = "${@' '.join([multilib_pkg_extend(d, pkg) for pkg in d.getVar('DUMMYPROVIDES_PACKAGES').split()])}" | 22 | DUMMYPROVIDES_PACKAGES_MULTILIB = "${@' '.join([multilib_pkg_extend(d, pkg) for pkg in d.getVar('DUMMYPROVIDES_PACKAGES').split()])}" |
23 | DUMMYPROVIDES += "${DUMMYPROVIDES_PACKAGES_MULTILIB}" | 23 | DUMMYPROVIDES += "${DUMMYPROVIDES_PACKAGES_MULTILIB}" |
24 | 24 | ||
25 | python populate_packages_prepend() { | 25 | python populate_packages:prepend() { |
26 | p = d.getVar("PN") | 26 | p = d.getVar("PN") |
27 | d.appendVar("RPROVIDES_%s" % p, "${DUMMYPROVIDES}") | 27 | d.appendVar("RPROVIDES:%s" % p, "${DUMMYPROVIDES}") |
28 | d.appendVar("RCONFLICTS_%s" % p, "${DUMMYPROVIDES}") | 28 | d.appendVar("RCONFLICTS:%s" % p, "${DUMMYPROVIDES}") |
29 | d.appendVar("RREPLACES_%s" % p, "${DUMMYPROVIDES_PACKAGES_MULTILIB}") | 29 | d.appendVar("RREPLACES:%s" % p, "${DUMMYPROVIDES_PACKAGES_MULTILIB}") |
30 | } | 30 | } |
31 | 31 | ||
diff --git a/meta/recipes-core/meta/meta-environment-extsdk.bb b/meta/recipes-core/meta/meta-environment-extsdk.bb index 2076b56f25..706312b0d6 100644 --- a/meta/recipes-core/meta/meta-environment-extsdk.bb +++ b/meta/recipes-core/meta/meta-environment-extsdk.bb | |||
@@ -4,7 +4,7 @@ require meta-environment.bb | |||
4 | 4 | ||
5 | PN = "meta-environment-extsdk-${MACHINE}" | 5 | PN = "meta-environment-extsdk-${MACHINE}" |
6 | 6 | ||
7 | create_sdk_files_append() { | 7 | create_sdk_files:append() { |
8 | local sysroot=${SDKPATH}/tmp/${@os.path.relpath(d.getVar('STAGING_DIR'), d.getVar('TMPDIR'))}/${MACHINE} | 8 | local sysroot=${SDKPATH}/tmp/${@os.path.relpath(d.getVar('STAGING_DIR'), d.getVar('TMPDIR'))}/${MACHINE} |
9 | local sdkpathnative=${SDKPATH}/tmp/${@os.path.relpath(d.getVar('STAGING_DIR'), d.getVar('TMPDIR'))}/${BUILD_ARCH} | 9 | local sdkpathnative=${SDKPATH}/tmp/${@os.path.relpath(d.getVar('STAGING_DIR'), d.getVar('TMPDIR'))}/${BUILD_ARCH} |
10 | 10 | ||
diff --git a/meta/recipes-core/meta/meta-environment.bb b/meta/recipes-core/meta/meta-environment.bb index da1230bead..65436bc3e6 100644 --- a/meta/recipes-core/meta/meta-environment.bb +++ b/meta/recipes-core/meta/meta-environment.bb | |||
@@ -1,6 +1,5 @@ | |||
1 | SUMMARY = "Package of environment files for SDK" | 1 | SUMMARY = "Package of environment files for SDK" |
2 | LICENSE = "MIT" | 2 | LICENSE = "MIT" |
3 | PR = "r8" | ||
4 | 3 | ||
5 | EXCLUDE_FROM_WORLD = "1" | 4 | EXCLUDE_FROM_WORLD = "1" |
6 | 5 | ||
@@ -9,7 +8,7 @@ MODIFYTOS = "0" | |||
9 | REAL_MULTIMACH_TARGET_SYS = "${TUNE_PKGARCH}${TARGET_VENDOR}-${TARGET_OS}" | 8 | REAL_MULTIMACH_TARGET_SYS = "${TUNE_PKGARCH}${TARGET_VENDOR}-${TARGET_OS}" |
10 | 9 | ||
11 | inherit toolchain-scripts | 10 | inherit toolchain-scripts |
12 | TOOLCHAIN_NEED_CONFIGSITE_CACHE_append = " zlib" | 11 | TOOLCHAIN_NEED_CONFIGSITE_CACHE:append = " zlib" |
13 | # Need to expand here before cross-candian changes HOST_ARCH -> SDK_ARCH | 12 | # Need to expand here before cross-candian changes HOST_ARCH -> SDK_ARCH |
14 | TOOLCHAIN_CONFIGSITE_NOCACHE := "${TOOLCHAIN_CONFIGSITE_NOCACHE}" | 13 | TOOLCHAIN_CONFIGSITE_NOCACHE := "${TOOLCHAIN_CONFIGSITE_NOCACHE}" |
15 | 14 | ||
@@ -47,6 +46,11 @@ python do_generate_content() { | |||
47 | } | 46 | } |
48 | addtask generate_content before do_install after do_compile | 47 | addtask generate_content before do_install after do_compile |
49 | 48 | ||
49 | python () { | ||
50 | sitefiles, searched = siteinfo_get_files(d, sysrootcache=False) | ||
51 | d.appendVarFlag("do_generate_content", "file-checksums", " " + " ".join(searched)) | ||
52 | } | ||
53 | |||
50 | create_sdk_files() { | 54 | create_sdk_files() { |
51 | # Setup site file for external use | 55 | # Setup site file for external use |
52 | toolchain_create_sdk_siteconfig ${SDK_OUTPUT}/${SDKPATH}/site-config-${REAL_MULTIMACH_TARGET_SYS} | 56 | toolchain_create_sdk_siteconfig ${SDK_OUTPUT}/${SDKPATH}/site-config-${REAL_MULTIMACH_TARGET_SYS} |
@@ -66,7 +70,7 @@ do_install() { | |||
66 | 70 | ||
67 | PN = "meta-environment-${MACHINE}" | 71 | PN = "meta-environment-${MACHINE}" |
68 | PACKAGES = "${PN}" | 72 | PACKAGES = "${PN}" |
69 | FILES_${PN}= " \ | 73 | FILES:${PN}= " \ |
70 | ${SDKPATH}/* \ | 74 | ${SDKPATH}/* \ |
71 | " | 75 | " |
72 | 76 | ||
diff --git a/meta/recipes-core/meta/meta-go-toolchain.bb b/meta/recipes-core/meta/meta-go-toolchain.bb index dde385c1b1..c24518efe3 100644 --- a/meta/recipes-core/meta/meta-go-toolchain.bb +++ b/meta/recipes-core/meta/meta-go-toolchain.bb | |||
@@ -3,10 +3,10 @@ LICENSE = "MIT" | |||
3 | 3 | ||
4 | inherit populate_sdk | 4 | inherit populate_sdk |
5 | 5 | ||
6 | TOOLCHAIN_HOST_TASK_append = " \ | 6 | TOOLCHAIN_HOST_TASK:append = " \ |
7 | packagegroup-go-cross-canadian-${MACHINE} \ | 7 | packagegroup-go-cross-canadian-${MACHINE} \ |
8 | " | 8 | " |
9 | 9 | ||
10 | TOOLCHAIN_TARGET_TASK_append = " \ | 10 | TOOLCHAIN_TARGET_TASK:append = " \ |
11 | ${@multilib_pkg_extend(d, 'packagegroup-go-sdk-target')} \ | 11 | ${@multilib_pkg_extend(d, 'packagegroup-go-sdk-target')} \ |
12 | " | 12 | " |
diff --git a/meta/recipes-core/meta/meta-ide-support.bb b/meta/recipes-core/meta/meta-ide-support.bb index 768f6f4bb6..d85aa120c0 100644 --- a/meta/recipes-core/meta/meta-ide-support.bb +++ b/meta/recipes-core/meta/meta-ide-support.bb | |||
@@ -2,14 +2,39 @@ SUMMARY = "Integrated Development Environment support" | |||
2 | DESCRIPTION = "Meta package for ensuring the build directory contains all appropriate toolchain packages for using an IDE" | 2 | DESCRIPTION = "Meta package for ensuring the build directory contains all appropriate toolchain packages for using an IDE" |
3 | LICENSE = "MIT" | 3 | LICENSE = "MIT" |
4 | 4 | ||
5 | DEPENDS = "virtual/libc gdb-cross-${TARGET_ARCH} qemu-native qemu-helper-native unfs3-native cmake-native" | 5 | DEPENDS = "virtual/libc gdb-cross-${TARGET_ARCH} qemu-native qemu-helper-native unfs3-native cmake-native autoconf-native automake-native meson-native intltool-native pkgconfig-native" |
6 | PR = "r3" | ||
7 | RM_WORK_EXCLUDE += "${PN}" | 6 | RM_WORK_EXCLUDE += "${PN}" |
8 | 7 | ||
9 | inherit meta toolchain-scripts nopackages | 8 | inherit toolchain-scripts nopackages deploy testsdk |
9 | |||
10 | TESTSDK_CLASS_NAME = "oeqa.sdk.testmetaidesupport.TestSDK" | ||
10 | 11 | ||
11 | do_populate_ide_support () { | 12 | do_populate_ide_support () { |
12 | toolchain_create_tree_env_script | 13 | toolchain_create_tree_env_script |
13 | } | 14 | } |
14 | 15 | ||
15 | addtask populate_ide_support before do_build after do_install | 16 | python () { |
17 | sitefiles, searched = siteinfo_get_files(d, sysrootcache=False) | ||
18 | d.setVar("CONFIG_SITE", " ".join(sitefiles)) | ||
19 | d.appendVarFlag("do_populate_ide_support", "file-checksums", " " + " ".join(searched)) | ||
20 | } | ||
21 | |||
22 | addtask populate_ide_support before do_deploy after do_install | ||
23 | |||
24 | python do_write_test_data() { | ||
25 | from oe.data import export2json | ||
26 | |||
27 | out_dir = d.getVar('B') | ||
28 | testdata_name = os.path.join(out_dir, "%s.testdata.json" % d.getVar('PN')) | ||
29 | |||
30 | export2json(d, testdata_name) | ||
31 | } | ||
32 | addtask write_test_data before do_deploy after do_install | ||
33 | |||
34 | do_deploy () { | ||
35 | install ${B}/* ${DEPLOYDIR} | ||
36 | } | ||
37 | |||
38 | addtask deploy before do_build | ||
39 | |||
40 | do_build[deptask] += "do_prepare_recipe_sysroot" | ||
diff --git a/meta/recipes-core/meta/meta-toolchain.bb b/meta/recipes-core/meta/meta-toolchain.bb index b02b0665e6..260e03934e 100644 --- a/meta/recipes-core/meta/meta-toolchain.bb +++ b/meta/recipes-core/meta/meta-toolchain.bb | |||
@@ -1,6 +1,5 @@ | |||
1 | SUMMARY = "Meta package for building a installable toolchain" | 1 | SUMMARY = "Meta package for building a installable toolchain" |
2 | LICENSE = "MIT" | 2 | LICENSE = "MIT" |
3 | 3 | ||
4 | PR = "r7" | ||
5 | 4 | ||
6 | inherit populate_sdk | 5 | inherit populate_sdk |
diff --git a/meta/recipes-core/meta/meta-world-pkgdata.bb b/meta/recipes-core/meta/meta-world-pkgdata.bb index b299861375..0438bf6138 100644 --- a/meta/recipes-core/meta/meta-world-pkgdata.bb +++ b/meta/recipes-core/meta/meta-world-pkgdata.bb | |||
@@ -33,6 +33,8 @@ deltask do_patch | |||
33 | deltask do_configure | 33 | deltask do_configure |
34 | deltask do_compile | 34 | deltask do_compile |
35 | deltask do_install | 35 | deltask do_install |
36 | deltask do_create_spdx | ||
37 | deltask do_create_spdx_runtime | ||
36 | 38 | ||
37 | do_prepare_recipe_sysroot[deptask] = "" | 39 | do_prepare_recipe_sysroot[deptask] = "" |
38 | 40 | ||
diff --git a/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb b/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb index 4909401c5a..bb4e746237 100644 --- a/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb +++ b/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb | |||
@@ -36,7 +36,6 @@ DUMMYPROVIDES = "\ | |||
36 | /usr/bin/perl \ | 36 | /usr/bin/perl \ |
37 | " | 37 | " |
38 | 38 | ||
39 | PR = "r2" | ||
40 | 39 | ||
41 | require dummy-sdk-package.inc | 40 | require dummy-sdk-package.inc |
42 | 41 | ||
diff --git a/meta/recipes-core/meta/signing-keys.bb b/meta/recipes-core/meta/signing-keys.bb index 5bab94aa36..107a39d658 100644 --- a/meta/recipes-core/meta/signing-keys.bb +++ b/meta/recipes-core/meta/signing-keys.bb | |||
@@ -14,9 +14,11 @@ SYSROOT_DIRS += "${sysconfdir}/pki" | |||
14 | 14 | ||
15 | PACKAGES =+ "${PN}-ipk ${PN}-rpm ${PN}-packagefeed" | 15 | PACKAGES =+ "${PN}-ipk ${PN}-rpm ${PN}-packagefeed" |
16 | 16 | ||
17 | FILES_${PN}-rpm = "${sysconfdir}/pki/rpm-gpg" | 17 | FILES:${PN}-rpm = "${sysconfdir}/pki/rpm-gpg" |
18 | FILES_${PN}-ipk = "${sysconfdir}/pki/ipk-gpg" | 18 | FILES:${PN}-ipk = "${sysconfdir}/pki/ipk-gpg" |
19 | FILES_${PN}-packagefeed = "${sysconfdir}/pki/packagefeed-gpg" | 19 | FILES:${PN}-packagefeed = "${sysconfdir}/pki/packagefeed-gpg" |
20 | |||
21 | RDEPENDS:${PN}-dev = "" | ||
20 | 22 | ||
21 | python do_get_public_keys () { | 23 | python do_get_public_keys () { |
22 | from oe.gpg_sign import get_signer | 24 | from oe.gpg_sign import get_signer |
diff --git a/meta/recipes-core/meta/target-sdk-provides-dummy.bb b/meta/recipes-core/meta/target-sdk-provides-dummy.bb index e3beeb796c..849407cca5 100644 --- a/meta/recipes-core/meta/target-sdk-provides-dummy.bb +++ b/meta/recipes-core/meta/target-sdk-provides-dummy.bb | |||
@@ -58,4 +58,4 @@ DUMMYPROVIDES = "\ | |||
58 | 58 | ||
59 | require dummy-sdk-package.inc | 59 | require dummy-sdk-package.inc |
60 | 60 | ||
61 | SSTATE_DUPWHITELIST += "${PKGDATA_DIR}/${PN} ${PKGDATA_DIR}/runtime/${PN}" | 61 | SSTATE_ALLOW_OVERLAP_FILES += "${PKGDATA_DIR}/${PN} ${PKGDATA_DIR}/runtime/${PN}" |
diff --git a/meta/recipes-core/meta/testexport-tarball.bb b/meta/recipes-core/meta/testexport-tarball.bb index daedd78cb4..abdd009252 100644 --- a/meta/recipes-core/meta/testexport-tarball.bb +++ b/meta/recipes-core/meta/testexport-tarball.bb | |||
@@ -4,7 +4,7 @@ DESCRIPTION = "SDK type target for standalone tarball containing packages define | |||
4 | SUMMARY = "Standalone tarball for test systems with missing software" | 4 | SUMMARY = "Standalone tarball for test systems with missing software" |
5 | LICENSE = "MIT" | 5 | LICENSE = "MIT" |
6 | 6 | ||
7 | TEST_EXPORT_SDK_PACKAGES ??= "" | 7 | require conf/testexport.conf |
8 | 8 | ||
9 | TOOLCHAIN_TARGET_TASK ?= "" | 9 | TOOLCHAIN_TARGET_TASK ?= "" |
10 | 10 | ||
@@ -26,7 +26,6 @@ RDEPENDS = "${TOOLCHAIN_HOST_TASK}" | |||
26 | 26 | ||
27 | EXCLUDE_FROM_WORLD = "1" | 27 | EXCLUDE_FROM_WORLD = "1" |
28 | 28 | ||
29 | inherit meta | ||
30 | inherit populate_sdk | 29 | inherit populate_sdk |
31 | inherit toolchain-scripts-base | 30 | inherit toolchain-scripts-base |
32 | inherit nopackages | 31 | inherit nopackages |
@@ -38,7 +37,7 @@ do_populate_sdk[stamp-extra-info] = "${PACKAGE_ARCH}" | |||
38 | 37 | ||
39 | REAL_MULTIMACH_TARGET_SYS = "none" | 38 | REAL_MULTIMACH_TARGET_SYS = "none" |
40 | 39 | ||
41 | create_sdk_files_append () { | 40 | create_sdk_files:append () { |
42 | rm -f ${SDK_OUTPUT}/${SDKPATH}/site-config-* | 41 | rm -f ${SDK_OUTPUT}/${SDKPATH}/site-config-* |
43 | rm -f ${SDK_OUTPUT}/${SDKPATH}/environment-setup-* | 42 | rm -f ${SDK_OUTPUT}/${SDKPATH}/environment-setup-* |
44 | rm -f ${SDK_OUTPUT}/${SDKPATH}/version-* | 43 | rm -f ${SDK_OUTPUT}/${SDKPATH}/version-* |
@@ -58,8 +57,8 @@ create_sdk_files_append () { | |||
58 | 57 | ||
59 | if [ "${SDKMACHINE}" = "i686" ]; then | 58 | if [ "${SDKMACHINE}" = "i686" ]; then |
60 | echo 'export NO32LIBS="0"' >>$script | 59 | echo 'export NO32LIBS="0"' >>$script |
61 | echo 'echo "$BB_ENV_EXTRAWHITE" | grep -q "NO32LIBS"' >>$script | 60 | echo 'echo "$BB_ENV_PASSTHROUGH_ADDITIONS" | grep -q "NO32LIBS"' >>$script |
62 | echo '[ $? != 0 ] && export BB_ENV_EXTRAWHITE="NO32LIBS $BB_ENV_EXTRAWHITE"' >>$script | 61 | echo '[ $? != 0 ] && export BB_ENV_PASSTHROUGH_ADDITIONS="NO32LIBS $BB_ENV_PASSTHROUGH_ADDITIONS"' >>$script |
63 | fi | 62 | fi |
64 | } | 63 | } |
65 | 64 | ||
diff --git a/meta/recipes-core/meta/uninative-tarball.bb b/meta/recipes-core/meta/uninative-tarball.bb index c4a6c96b4d..7eebcaf11a 100644 --- a/meta/recipes-core/meta/uninative-tarball.bb +++ b/meta/recipes-core/meta/uninative-tarball.bb | |||
@@ -3,22 +3,16 @@ LICENSE = "MIT" | |||
3 | 3 | ||
4 | TOOLCHAIN_TARGET_TASK = "" | 4 | TOOLCHAIN_TARGET_TASK = "" |
5 | 5 | ||
6 | # ibm850 - mcopy from mtools | ||
7 | # iso8859-1 - guile | ||
8 | # utf-16, cp1252 - binutils-windres | ||
9 | TOOLCHAIN_HOST_TASK = "\ | 6 | TOOLCHAIN_HOST_TASK = "\ |
10 | nativesdk-glibc \ | 7 | nativesdk-glibc \ |
11 | nativesdk-glibc-gconv-ibm850 \ | 8 | nativesdk-glibc-dbg \ |
12 | nativesdk-glibc-gconv-iso8859-1 \ | 9 | nativesdk-glibc-gconvs \ |
13 | nativesdk-glibc-gconv-utf-16 \ | ||
14 | nativesdk-glibc-gconv-cp1252 \ | ||
15 | nativesdk-glibc-gconv-euc-jp \ | ||
16 | nativesdk-glibc-gconv-libjis \ | ||
17 | nativesdk-patchelf \ | 10 | nativesdk-patchelf \ |
18 | nativesdk-libxcrypt \ | 11 | nativesdk-libxcrypt \ |
19 | nativesdk-libxcrypt-compat \ | 12 | nativesdk-libxcrypt-compat \ |
20 | nativesdk-libnss-nis \ | 13 | nativesdk-libnss-nis \ |
21 | nativesdk-sdk-provides-dummy \ | 14 | nativesdk-sdk-provides-dummy \ |
15 | nativesdk-libgcc \ | ||
22 | " | 16 | " |
23 | 17 | ||
24 | INHIBIT_DEFAULT_DEPS = "1" | 18 | INHIBIT_DEFAULT_DEPS = "1" |
@@ -35,7 +29,6 @@ RDEPENDS = "${TOOLCHAIN_HOST_TASK}" | |||
35 | 29 | ||
36 | EXCLUDE_FROM_WORLD = "1" | 30 | EXCLUDE_FROM_WORLD = "1" |
37 | 31 | ||
38 | inherit meta | ||
39 | inherit populate_sdk | 32 | inherit populate_sdk |
40 | inherit nopackages | 33 | inherit nopackages |
41 | 34 | ||
diff --git a/meta/recipes-core/meta/wic-tools.bb b/meta/recipes-core/meta/wic-tools.bb index bc6cc0d183..76494e7fca 100644 --- a/meta/recipes-core/meta/wic-tools.bb +++ b/meta/recipes-core/meta/wic-tools.bb | |||
@@ -4,14 +4,15 @@ LICENSE = "MIT" | |||
4 | 4 | ||
5 | DEPENDS = "\ | 5 | DEPENDS = "\ |
6 | parted-native gptfdisk-native dosfstools-native \ | 6 | parted-native gptfdisk-native dosfstools-native \ |
7 | mtools-native bmap-tools-native grub-native cdrtools-native \ | 7 | mtools-native bmaptool-native grub-native cdrtools-native \ |
8 | btrfs-tools-native squashfs-tools-native pseudo-native \ | 8 | btrfs-tools-native squashfs-tools-native pseudo-native \ |
9 | e2fsprogs-native util-linux-native tar-native\ | 9 | e2fsprogs-native util-linux-native tar-native erofs-utils-native \ |
10 | virtual/${TARGET_PREFIX}binutils \ | ||
10 | " | 11 | " |
11 | DEPENDS_append_x86 = " syslinux-native syslinux grub-efi systemd-boot" | 12 | DEPENDS:append:x86 = " syslinux-native syslinux grub-efi systemd-boot" |
12 | DEPENDS_append_x86-64 = " syslinux-native syslinux grub-efi systemd-boot" | 13 | DEPENDS:append:x86-64 = " syslinux-native syslinux grub-efi systemd-boot" |
13 | DEPENDS_append_x86-x32 = " syslinux-native syslinux grub-efi" | 14 | DEPENDS:append:x86-x32 = " syslinux-native syslinux grub-efi" |
14 | DEPENDS_append_aarch64 = " grub-efi systemd-boot" | 15 | DEPENDS:append:aarch64 = " grub-efi systemd-boot" |
15 | 16 | ||
16 | INHIBIT_DEFAULT_DEPS = "1" | 17 | INHIBIT_DEFAULT_DEPS = "1" |
17 | 18 | ||