diff options
Diffstat (limited to 'meta/lib/oe')
-rw-r--r-- | meta/lib/oe/__init__.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/bootfiles.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/go.py | 6 | ||||
-rw-r--r-- | meta/lib/oe/license.py | 15 | ||||
-rw-r--r-- | meta/lib/oe/license_finder.py | 179 | ||||
-rw-r--r-- | meta/lib/oe/package.py | 3 | ||||
-rw-r--r-- | meta/lib/oe/packagedata.py | 3 | ||||
-rw-r--r-- | meta/lib/oe/path.py | 3 | ||||
-rw-r--r-- | meta/lib/oe/reproducible.py | 5 | ||||
-rw-r--r-- | meta/lib/oe/rust.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/spdx30_tasks.py | 45 | ||||
-rw-r--r-- | meta/lib/oe/spdx_common.py | 41 | ||||
-rw-r--r-- | meta/lib/oe/sstatesig.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/tune.py | 81 | ||||
-rw-r--r-- | meta/lib/oe/utils.py | 48 |
15 files changed, 374 insertions, 63 deletions
diff --git a/meta/lib/oe/__init__.py b/meta/lib/oe/__init__.py index dd094a874a..73de774266 100644 --- a/meta/lib/oe/__init__.py +++ b/meta/lib/oe/__init__.py | |||
@@ -12,4 +12,4 @@ __path__ = extend_path(__path__, __name__) | |||
12 | BBIMPORTS = ["qa", "data", "path", "utils", "types", "package", "packagedata", \ | 12 | BBIMPORTS = ["qa", "data", "path", "utils", "types", "package", "packagedata", \ |
13 | "packagegroup", "sstatesig", "lsb", "cachedpath", "license", "qemu", \ | 13 | "packagegroup", "sstatesig", "lsb", "cachedpath", "license", "qemu", \ |
14 | "reproducible", "rust", "buildcfg", "go", "spdx30_tasks", "spdx_common", \ | 14 | "reproducible", "rust", "buildcfg", "go", "spdx30_tasks", "spdx_common", \ |
15 | "cve_check"] | 15 | "cve_check", "tune"] |
diff --git a/meta/lib/oe/bootfiles.py b/meta/lib/oe/bootfiles.py index 155fe742db..7ee148c4e2 100644 --- a/meta/lib/oe/bootfiles.py +++ b/meta/lib/oe/bootfiles.py | |||
@@ -10,7 +10,7 @@ | |||
10 | # Returns a list of tuples with (original filepath relative to | 10 | # Returns a list of tuples with (original filepath relative to |
11 | # deploy_dir, desired filepath renaming) | 11 | # deploy_dir, desired filepath renaming) |
12 | # | 12 | # |
13 | # Heavily inspired of bootimg-partition.py | 13 | # Heavily inspired of bootimg_partition.py |
14 | # | 14 | # |
15 | def get_boot_files(deploy_dir, boot_files): | 15 | def get_boot_files(deploy_dir, boot_files): |
16 | import re | 16 | import re |
diff --git a/meta/lib/oe/go.py b/meta/lib/oe/go.py index dfd957d157..4559dc63b2 100644 --- a/meta/lib/oe/go.py +++ b/meta/lib/oe/go.py | |||
@@ -7,6 +7,10 @@ | |||
7 | import re | 7 | import re |
8 | 8 | ||
9 | def map_arch(a): | 9 | def map_arch(a): |
10 | """ | ||
11 | Map our architecture names to Go's GOARCH names. | ||
12 | See https://github.com/golang/go/blob/master/src/internal/syslist/syslist.go for the complete list. | ||
13 | """ | ||
10 | if re.match('i.86', a): | 14 | if re.match('i.86', a): |
11 | return '386' | 15 | return '386' |
12 | elif a == 'x86_64': | 16 | elif a == 'x86_64': |
@@ -31,4 +35,4 @@ def map_arch(a): | |||
31 | return 'riscv64' | 35 | return 'riscv64' |
32 | elif a == 'loongarch64': | 36 | elif a == 'loongarch64': |
33 | return 'loong64' | 37 | return 'loong64' |
34 | return '' | 38 | raise KeyError(f"Cannot map architecture {a}") |
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py index 6f882c3812..6e55fa1e7f 100644 --- a/meta/lib/oe/license.py +++ b/meta/lib/oe/license.py | |||
@@ -462,3 +462,18 @@ def skip_incompatible_package_licenses(d, pkgs): | |||
462 | skipped_pkgs[pkg] = incompatible_lic | 462 | skipped_pkgs[pkg] = incompatible_lic |
463 | 463 | ||
464 | return skipped_pkgs | 464 | return skipped_pkgs |
465 | |||
466 | def tidy_licenses(value): | ||
467 | """ | ||
468 | Flat, split and sort licenses. | ||
469 | """ | ||
470 | from oe.license import flattened_licenses | ||
471 | |||
472 | def _choose(a, b): | ||
473 | str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold) | ||
474 | return ["(%s | %s)" % (str_a, str_b)] | ||
475 | |||
476 | if not isinstance(value, str): | ||
477 | value = " & ".join(value) | ||
478 | |||
479 | return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold) | ||
diff --git a/meta/lib/oe/license_finder.py b/meta/lib/oe/license_finder.py new file mode 100644 index 0000000000..16f5d7c94c --- /dev/null +++ b/meta/lib/oe/license_finder.py | |||
@@ -0,0 +1,179 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import fnmatch | ||
8 | import hashlib | ||
9 | import logging | ||
10 | import os | ||
11 | import re | ||
12 | |||
13 | import bb | ||
14 | import bb.utils | ||
15 | |||
16 | logger = logging.getLogger("BitBake.OE.LicenseFinder") | ||
17 | |||
18 | def _load_hash_csv(d): | ||
19 | """ | ||
20 | Load a mapping of (checksum: license name) from all files/license-hashes.csv | ||
21 | files that can be found in the available layers. | ||
22 | """ | ||
23 | import csv | ||
24 | md5sums = {} | ||
25 | |||
26 | # Read license md5sums from csv file | ||
27 | for path in d.getVar('BBPATH').split(':'): | ||
28 | csv_path = os.path.join(path, 'files', 'license-hashes.csv') | ||
29 | if os.path.isfile(csv_path): | ||
30 | with open(csv_path, newline='') as csv_file: | ||
31 | reader = csv.DictReader(csv_file, delimiter=',', fieldnames=['md5sum', 'license']) | ||
32 | for row in reader: | ||
33 | md5sums[row['md5sum']] = row['license'] | ||
34 | |||
35 | return md5sums | ||
36 | |||
37 | |||
38 | def _crunch_known_licenses(d): | ||
39 | """ | ||
40 | Calculate the MD5 checksums for the original and "crunched" versions of all | ||
41 | known licenses. | ||
42 | """ | ||
43 | md5sums = {} | ||
44 | |||
45 | lic_dirs = [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or "").split() | ||
46 | for lic_dir in lic_dirs: | ||
47 | for fn in os.listdir(lic_dir): | ||
48 | path = os.path.join(lic_dir, fn) | ||
49 | # Hash the exact contents | ||
50 | md5value = bb.utils.md5_file(path) | ||
51 | md5sums[md5value] = fn | ||
52 | # Also hash a "crunched" version | ||
53 | md5value = _crunch_license(path) | ||
54 | md5sums[md5value] = fn | ||
55 | |||
56 | return md5sums | ||
57 | |||
58 | |||
59 | def _crunch_license(licfile): | ||
60 | ''' | ||
61 | Remove non-material text from a license file and then calculate its | ||
62 | md5sum. This works well for licenses that contain a copyright statement, | ||
63 | but is also a useful way to handle people's insistence upon reformatting | ||
64 | the license text slightly (with no material difference to the text of the | ||
65 | license). | ||
66 | ''' | ||
67 | |||
68 | import oe.utils | ||
69 | |||
70 | # Note: these are carefully constructed! | ||
71 | license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$') | ||
72 | license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$') | ||
73 | copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$') | ||
74 | disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$') | ||
75 | email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$') | ||
76 | header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$') | ||
77 | tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$') | ||
78 | url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$') | ||
79 | |||
80 | lictext = [] | ||
81 | with open(licfile, 'r', errors='surrogateescape') as f: | ||
82 | for line in f: | ||
83 | # Drop opening statements | ||
84 | if copyright_re.match(line): | ||
85 | continue | ||
86 | elif disclaimer_re.match(line): | ||
87 | continue | ||
88 | elif email_re.match(line): | ||
89 | continue | ||
90 | elif header_re.match(line): | ||
91 | continue | ||
92 | elif tag_re.match(line): | ||
93 | continue | ||
94 | elif url_re.match(line): | ||
95 | continue | ||
96 | elif license_title_re.match(line): | ||
97 | continue | ||
98 | elif license_statement_re.match(line): | ||
99 | continue | ||
100 | # Strip comment symbols | ||
101 | line = line.replace('*', '') \ | ||
102 | .replace('#', '') | ||
103 | # Unify spelling | ||
104 | line = line.replace('sub-license', 'sublicense') | ||
105 | # Squash spaces | ||
106 | line = oe.utils.squashspaces(line.strip()) | ||
107 | # Replace smart quotes, double quotes and backticks with single quotes | ||
108 | line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'') | ||
109 | # Unify brackets | ||
110 | line = line.replace("{", "[").replace("}", "]") | ||
111 | if line: | ||
112 | lictext.append(line) | ||
113 | |||
114 | m = hashlib.md5() | ||
115 | try: | ||
116 | m.update(' '.join(lictext).encode('utf-8')) | ||
117 | md5val = m.hexdigest() | ||
118 | except UnicodeEncodeError: | ||
119 | md5val = None | ||
120 | return md5val | ||
121 | |||
122 | |||
123 | def find_license_files(srctree, first_only=False): | ||
124 | """ | ||
125 | Search srctree for files that look like they could be licenses. | ||
126 | If first_only is True, only return the first file found. | ||
127 | """ | ||
128 | licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] | ||
129 | skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go", ".sh") | ||
130 | licfiles = [] | ||
131 | for root, dirs, files in os.walk(srctree): | ||
132 | # Sort files so that LICENSE is before LICENSE.subcomponent, which is | ||
133 | # meaningful if first_only is set. | ||
134 | for fn in sorted(files): | ||
135 | if fn.endswith(skip_extensions): | ||
136 | continue | ||
137 | for spec in licspecs: | ||
138 | if fnmatch.fnmatch(fn, spec): | ||
139 | fullpath = os.path.join(root, fn) | ||
140 | if not fullpath in licfiles: | ||
141 | licfiles.append(fullpath) | ||
142 | if first_only: | ||
143 | return licfiles | ||
144 | |||
145 | return licfiles | ||
146 | |||
147 | |||
148 | def match_licenses(licfiles, srctree, d, extra_hashes={}): | ||
149 | md5sums = {} | ||
150 | md5sums.update(_load_hash_csv(d)) | ||
151 | md5sums.update(_crunch_known_licenses(d)) | ||
152 | md5sums.update(extra_hashes) | ||
153 | |||
154 | licenses = [] | ||
155 | for licfile in sorted(licfiles): | ||
156 | resolved_licfile = d.expand(licfile) | ||
157 | md5value = bb.utils.md5_file(resolved_licfile) | ||
158 | license = md5sums.get(md5value, None) | ||
159 | if not license: | ||
160 | crunched_md5 = _crunch_license(resolved_licfile) | ||
161 | license = md5sums.get(crunched_md5, None) | ||
162 | if not license: | ||
163 | license = 'Unknown' | ||
164 | logger.info("Please add the following line for '%s' to a 'license-hashes.csv' " \ | ||
165 | "and replace `Unknown` with the license:\n" \ | ||
166 | "%s,Unknown" % (os.path.relpath(licfile, srctree + "/.."), md5value)) | ||
167 | |||
168 | licenses.append((license, os.path.relpath(licfile, srctree), md5value)) | ||
169 | |||
170 | return licenses | ||
171 | |||
172 | |||
173 | def find_licenses(srctree, d, first_only=False, extra_hashes={}): | ||
174 | licfiles = find_license_files(srctree, first_only) | ||
175 | licenses = match_licenses(licfiles, srctree, d, extra_hashes) | ||
176 | |||
177 | # FIXME should we grab at least one source file with a license header and add that too? | ||
178 | |||
179 | return licenses | ||
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py index 60392cbced..ce69151e5d 100644 --- a/meta/lib/oe/package.py +++ b/meta/lib/oe/package.py | |||
@@ -16,6 +16,7 @@ import mmap | |||
16 | import subprocess | 16 | import subprocess |
17 | import shutil | 17 | import shutil |
18 | 18 | ||
19 | import bb.parse | ||
19 | import oe.cachedpath | 20 | import oe.cachedpath |
20 | 21 | ||
21 | def runstrip(file, elftype, strip, extra_strip_sections=''): | 22 | def runstrip(file, elftype, strip, extra_strip_sections=''): |
@@ -1049,6 +1050,7 @@ def copydebugsources(debugsrcdir, sources, d): | |||
1049 | if os.path.exists(p) and not os.listdir(p): | 1050 | if os.path.exists(p) and not os.listdir(p): |
1050 | os.rmdir(p) | 1051 | os.rmdir(p) |
1051 | 1052 | ||
1053 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
1052 | def save_debugsources_info(debugsrcdir, sources_raw, d): | 1054 | def save_debugsources_info(debugsrcdir, sources_raw, d): |
1053 | import json | 1055 | import json |
1054 | import bb.compress.zstd | 1056 | import bb.compress.zstd |
@@ -1081,6 +1083,7 @@ def save_debugsources_info(debugsrcdir, sources_raw, d): | |||
1081 | with bb.compress.zstd.open(debugsources_file, "wt", encoding="utf-8", num_threads=num_threads) as f: | 1083 | with bb.compress.zstd.open(debugsources_file, "wt", encoding="utf-8", num_threads=num_threads) as f: |
1082 | json.dump(sources_dict, f, sort_keys=True) | 1084 | json.dump(sources_dict, f, sort_keys=True) |
1083 | 1085 | ||
1086 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
1084 | def read_debugsources_info(d): | 1087 | def read_debugsources_info(d): |
1085 | import json | 1088 | import json |
1086 | import bb.compress.zstd | 1089 | import bb.compress.zstd |
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py index 2d1d6ddeb7..b6a10a930a 100644 --- a/meta/lib/oe/packagedata.py +++ b/meta/lib/oe/packagedata.py | |||
@@ -7,6 +7,7 @@ | |||
7 | import codecs | 7 | import codecs |
8 | import os | 8 | import os |
9 | import json | 9 | import json |
10 | import bb.parse | ||
10 | import bb.compress.zstd | 11 | import bb.compress.zstd |
11 | import oe.path | 12 | import oe.path |
12 | 13 | ||
@@ -64,6 +65,7 @@ def read_subpkgdata_dict(pkg, d): | |||
64 | ret[newvar] = subd[var] | 65 | ret[newvar] = subd[var] |
65 | return ret | 66 | return ret |
66 | 67 | ||
68 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
67 | def read_subpkgdata_extended(pkg, d): | 69 | def read_subpkgdata_extended(pkg, d): |
68 | import json | 70 | import json |
69 | import bb.compress.zstd | 71 | import bb.compress.zstd |
@@ -182,6 +184,7 @@ def runtime_mapping_rename(varname, pkg, d): | |||
182 | 184 | ||
183 | #bb.note("%s after: %s" % (varname, d.getVar(varname))) | 185 | #bb.note("%s after: %s" % (varname, d.getVar(varname))) |
184 | 186 | ||
187 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
185 | def emit_pkgdata(pkgfiles, d): | 188 | def emit_pkgdata(pkgfiles, d): |
186 | def process_postinst_on_target(pkg, mlprefix): | 189 | def process_postinst_on_target(pkg, mlprefix): |
187 | pkgval = d.getVar('PKG:%s' % pkg) | 190 | pkgval = d.getVar('PKG:%s' % pkg) |
diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py index 5d21cdcbdf..a1efe97d88 100644 --- a/meta/lib/oe/path.py +++ b/meta/lib/oe/path.py | |||
@@ -10,6 +10,8 @@ import shutil | |||
10 | import subprocess | 10 | import subprocess |
11 | import os.path | 11 | import os.path |
12 | 12 | ||
13 | import bb.parse | ||
14 | |||
13 | def join(*paths): | 15 | def join(*paths): |
14 | """Like os.path.join but doesn't treat absolute RHS specially""" | 16 | """Like os.path.join but doesn't treat absolute RHS specially""" |
15 | return os.path.normpath("/".join(paths)) | 17 | return os.path.normpath("/".join(paths)) |
@@ -77,6 +79,7 @@ def replace_absolute_symlinks(basedir, d): | |||
77 | os.remove(path) | 79 | os.remove(path) |
78 | os.symlink(base, path) | 80 | os.symlink(base, path) |
79 | 81 | ||
82 | @bb.parse.vardepsexclude("TOPDIR") | ||
80 | def format_display(path, metadata): | 83 | def format_display(path, metadata): |
81 | """ Prepare a path for display to the user. """ | 84 | """ Prepare a path for display to the user. """ |
82 | rel = relative(metadata.getVar("TOPDIR"), path) | 85 | rel = relative(metadata.getVar("TOPDIR"), path) |
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py index cdb38d5aa4..0270024a83 100644 --- a/meta/lib/oe/reproducible.py +++ b/meta/lib/oe/reproducible.py | |||
@@ -75,10 +75,11 @@ def get_source_date_epoch_from_known_files(d, sourcedir): | |||
75 | return source_date_epoch | 75 | return source_date_epoch |
76 | 76 | ||
77 | def find_git_folder(d, sourcedir): | 77 | def find_git_folder(d, sourcedir): |
78 | # First guess: UNPACKDIR/git | 78 | # First guess: UNPACKDIR/BB_GIT_DEFAULT_DESTSUFFIX |
79 | # This is the default git fetcher unpack path | 79 | # This is the default git fetcher unpack path |
80 | unpackdir = d.getVar('UNPACKDIR') | 80 | unpackdir = d.getVar('UNPACKDIR') |
81 | gitpath = os.path.join(unpackdir, "git/.git") | 81 | default_destsuffix = d.getVar('BB_GIT_DEFAULT_DESTSUFFIX') |
82 | gitpath = os.path.join(unpackdir, default_destsuffix, ".git") | ||
82 | if os.path.isdir(gitpath): | 83 | if os.path.isdir(gitpath): |
83 | return gitpath | 84 | return gitpath |
84 | 85 | ||
diff --git a/meta/lib/oe/rust.py b/meta/lib/oe/rust.py index 185553eeeb..1dc9cf150d 100644 --- a/meta/lib/oe/rust.py +++ b/meta/lib/oe/rust.py | |||
@@ -8,6 +8,4 @@ | |||
8 | def arch_to_rust_arch(arch): | 8 | def arch_to_rust_arch(arch): |
9 | if arch == "ppc64le": | 9 | if arch == "ppc64le": |
10 | return "powerpc64le" | 10 | return "powerpc64le" |
11 | if arch in ('riscv32', 'riscv64'): | ||
12 | return arch + 'gc' | ||
13 | return arch | 11 | return arch |
diff --git a/meta/lib/oe/spdx30_tasks.py b/meta/lib/oe/spdx30_tasks.py index 61d7ba45e3..c352dab152 100644 --- a/meta/lib/oe/spdx30_tasks.py +++ b/meta/lib/oe/spdx30_tasks.py | |||
@@ -156,6 +156,11 @@ def add_package_files( | |||
156 | bb.note(f"Skip {topdir}") | 156 | bb.note(f"Skip {topdir}") |
157 | return spdx_files | 157 | return spdx_files |
158 | 158 | ||
159 | check_compiled_sources = d.getVar("SPDX_INCLUDE_COMPILED_SOURCES") == "1" | ||
160 | if check_compiled_sources: | ||
161 | compiled_sources, types = oe.spdx_common.get_compiled_sources(d) | ||
162 | bb.debug(1, f"Total compiled files: {len(compiled_sources)}") | ||
163 | |||
159 | for subdir, dirs, files in os.walk(topdir, onerror=walk_error): | 164 | for subdir, dirs, files in os.walk(topdir, onerror=walk_error): |
160 | dirs[:] = [d for d in dirs if d not in ignore_dirs] | 165 | dirs[:] = [d for d in dirs if d not in ignore_dirs] |
161 | if subdir == str(topdir): | 166 | if subdir == str(topdir): |
@@ -171,6 +176,11 @@ def add_package_files( | |||
171 | filename = str(filepath.relative_to(topdir)) | 176 | filename = str(filepath.relative_to(topdir)) |
172 | file_purposes = get_purposes(filepath) | 177 | file_purposes = get_purposes(filepath) |
173 | 178 | ||
179 | # Check if file is compiled | ||
180 | if check_compiled_sources: | ||
181 | if not oe.spdx_common.is_compiled_source(filename, compiled_sources, types): | ||
182 | continue | ||
183 | |||
174 | spdx_file = objset.new_file( | 184 | spdx_file = objset.new_file( |
175 | get_spdxid(file_counter), | 185 | get_spdxid(file_counter), |
176 | filename, | 186 | filename, |
@@ -542,7 +552,7 @@ def create_spdx(d): | |||
542 | ) | 552 | ) |
543 | build_objset.new_relationship( | 553 | build_objset.new_relationship( |
544 | source_files, | 554 | source_files, |
545 | oe.spdx30.RelationshipType.hasConcludedLicense, | 555 | oe.spdx30.RelationshipType.hasDeclaredLicense, |
546 | [oe.sbom30.get_element_link_id(recipe_spdx_license)], | 556 | [oe.sbom30.get_element_link_id(recipe_spdx_license)], |
547 | ) | 557 | ) |
548 | 558 | ||
@@ -714,24 +724,23 @@ def create_spdx(d): | |||
714 | impact_statement=description, | 724 | impact_statement=description, |
715 | ) | 725 | ) |
716 | 726 | ||
717 | if detail in ( | 727 | vex_just_type = d.getVarFlag( |
718 | "ignored", | 728 | "CVE_CHECK_VEX_JUSTIFICATION", detail |
719 | "cpe-incorrect", | 729 | ) |
720 | "disputed", | 730 | if vex_just_type: |
721 | "upstream-wontfix", | 731 | if ( |
722 | ): | 732 | vex_just_type |
723 | # VEX doesn't have justifications for this | 733 | not in oe.spdx30.security_VexJustificationType.NAMED_INDIVIDUALS |
724 | pass | 734 | ): |
725 | elif detail in ( | 735 | bb.fatal( |
726 | "not-applicable-config", | 736 | f"Unknown vex justification '{vex_just_type}', detail '{detail}', for ignored {cve}" |
727 | "not-applicable-platform", | ||
728 | ): | ||
729 | for v in spdx_vex: | ||
730 | v.security_justificationType = ( | ||
731 | oe.spdx30.security_VexJustificationType.vulnerableCodeNotPresent | ||
732 | ) | 737 | ) |
733 | else: | 738 | |
734 | bb.fatal(f"Unknown detail '{detail}' for ignored {cve}") | 739 | for v in spdx_vex: |
740 | v.security_justificationType = oe.spdx30.security_VexJustificationType.NAMED_INDIVIDUALS[ | ||
741 | vex_just_type | ||
742 | ] | ||
743 | |||
735 | elif status == "Unknown": | 744 | elif status == "Unknown": |
736 | bb.note(f"Skipping {cve} with status 'Unknown'") | 745 | bb.note(f"Skipping {cve} with status 'Unknown'") |
737 | else: | 746 | else: |
diff --git a/meta/lib/oe/spdx_common.py b/meta/lib/oe/spdx_common.py index 4caefc7673..c2dec65563 100644 --- a/meta/lib/oe/spdx_common.py +++ b/meta/lib/oe/spdx_common.py | |||
@@ -242,3 +242,44 @@ def fetch_data_to_uri(fd, name): | |||
242 | uri = uri + "@" + fd.revision | 242 | uri = uri + "@" + fd.revision |
243 | 243 | ||
244 | return uri | 244 | return uri |
245 | |||
246 | def is_compiled_source (filename, compiled_sources, types): | ||
247 | """ | ||
248 | Check if the file is a compiled file | ||
249 | """ | ||
250 | import os | ||
251 | # If we don't have compiled source, we assume all are compiled. | ||
252 | if not compiled_sources: | ||
253 | return True | ||
254 | |||
255 | # We return always true if the file type is not in the list of compiled files. | ||
256 | # Some files in the source directory are not compiled, for example, Makefiles, | ||
257 | # but also python .py file. We need to include them in the SPDX. | ||
258 | basename = os.path.basename(filename) | ||
259 | ext = basename.partition(".")[2] | ||
260 | if ext not in types: | ||
261 | return True | ||
262 | # Check that the file is in the list | ||
263 | return filename in compiled_sources | ||
264 | |||
265 | def get_compiled_sources(d): | ||
266 | """ | ||
267 | Get list of compiled sources from debug information and normalize the paths | ||
268 | """ | ||
269 | import itertools | ||
270 | source_info = oe.package.read_debugsources_info(d) | ||
271 | if not source_info: | ||
272 | bb.debug(1, "Do not have debugsources.list. Skipping") | ||
273 | return [], [] | ||
274 | |||
275 | # Sources are not split now in SPDX, so we aggregate them | ||
276 | sources = set(itertools.chain.from_iterable(source_info.values())) | ||
277 | # Check extensions of files | ||
278 | types = set() | ||
279 | for src in sources: | ||
280 | basename = os.path.basename(src) | ||
281 | ext = basename.partition(".")[2] | ||
282 | if ext not in types and ext: | ||
283 | types.add(ext) | ||
284 | bb.debug(1, f"Num of sources: {len(sources)} and types: {len(types)} {str(types)}") | ||
285 | return sources, types | ||
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py index 826549948e..ef687f5d41 100644 --- a/meta/lib/oe/sstatesig.py +++ b/meta/lib/oe/sstatesig.py | |||
@@ -3,6 +3,7 @@ | |||
3 | # | 3 | # |
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | import bb.parse | ||
6 | import bb.siggen | 7 | import bb.siggen |
7 | import bb.runqueue | 8 | import bb.runqueue |
8 | import oe | 9 | import oe |
@@ -493,6 +494,7 @@ def sstate_get_manifest_filename(task, d): | |||
493 | d2.setVar("SSTATE_MANMACH", extrainf) | 494 | d2.setVar("SSTATE_MANMACH", extrainf) |
494 | return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) | 495 | return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) |
495 | 496 | ||
497 | @bb.parse.vardepsexclude("BBEXTENDCURR", "BBEXTENDVARIANT", "OVERRIDES", "PACKAGE_EXTRA_ARCHS") | ||
496 | def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): | 498 | def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): |
497 | d2 = d | 499 | d2 = d |
498 | variant = '' | 500 | variant = '' |
diff --git a/meta/lib/oe/tune.py b/meta/lib/oe/tune.py new file mode 100644 index 0000000000..7fda19430d --- /dev/null +++ b/meta/lib/oe/tune.py | |||
@@ -0,0 +1,81 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | # riscv_isa_to_tune(isa) | ||
8 | # | ||
9 | # Automatically translate a RISC-V ISA string to TUNE_FEATURES | ||
10 | # | ||
11 | # Abbreviations, such as rv32g -> rv32imaffd_zicsr_zifencei are supported. | ||
12 | # | ||
13 | # Profiles, such as rva22u64, are NOT supported, you must use ISA strings. | ||
14 | # | ||
15 | def riscv_isa_to_tune(isa): | ||
16 | _isa = isa.lower() | ||
17 | |||
18 | feature = [] | ||
19 | iter = 0 | ||
20 | |||
21 | # rv or riscv | ||
22 | if _isa[iter:].startswith('rv'): | ||
23 | feature.append('rv') | ||
24 | iter = iter + 2 | ||
25 | elif _isa[iter:].startswith('riscv'): | ||
26 | feature.append('rv') | ||
27 | iter = iter + 5 | ||
28 | else: | ||
29 | # Not a risc-v ISA! | ||
30 | return _isa | ||
31 | |||
32 | while (_isa[iter:]): | ||
33 | # Skip _ and whitespace | ||
34 | if _isa[iter] == '_' or _isa[iter].isspace(): | ||
35 | iter = iter + 1 | ||
36 | continue | ||
37 | |||
38 | # Length, just capture numbers here | ||
39 | if _isa[iter].isdigit(): | ||
40 | iter_end = iter | ||
41 | while iter_end < len(_isa) and _isa[iter_end].isdigit(): | ||
42 | iter_end = iter_end + 1 | ||
43 | |||
44 | feature.append(_isa[iter:iter_end]) | ||
45 | iter = iter_end | ||
46 | continue | ||
47 | |||
48 | # Typically i, e or g is next, followed by extensions. | ||
49 | # Extensions are single character, except for Z, Ss, Sh, Sm, Sv, and X | ||
50 | |||
51 | # If the extension starts with 'Z', 'S' or 'X' use the name until the next _, whitespace or end | ||
52 | if _isa[iter] in ['z', 's', 'x']: | ||
53 | ext_type = _isa[iter] | ||
54 | iter_end = iter + 1 | ||
55 | |||
56 | # Multicharacter extension, these are supposed to have a _ before the next multicharacter extension | ||
57 | # See 37.4 and 37.5: | ||
58 | # 37.4: Underscores "_" may be used to separate ISA extensions... | ||
59 | # 37.5: All multi-letter extensions ... must be separated from other multi-letter extensions by an underscore... | ||
60 | # Some extensions permit only alphabetic characters, while others allow alphanumeric chartacters | ||
61 | while iter_end < len(_isa) and _isa[iter_end] != "_" and not _isa[iter_end].isspace(): | ||
62 | iter_end = iter_end + 1 | ||
63 | |||
64 | feature.append(_isa[iter:iter_end]) | ||
65 | iter = iter_end | ||
66 | continue | ||
67 | |||
68 | # 'g' is special, it's an abbreviation for imafd_zicsr_zifencei | ||
69 | # When expanding the abbreviation, any additional letters must appear before the _z* extensions | ||
70 | if _isa[iter] == 'g': | ||
71 | _isa = 'imafd' + _isa[iter+1:] + '_zicsr_zifencei' | ||
72 | iter = 0 | ||
73 | continue | ||
74 | |||
75 | feature.append(_isa[iter]) | ||
76 | iter = iter + 1 | ||
77 | continue | ||
78 | |||
79 | # Eliminate duplicates, but preserve the order | ||
80 | feature = list(dict.fromkeys(feature)) | ||
81 | return ' '.join(feature) | ||
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py index d272dd2b8d..779c5e593f 100644 --- a/meta/lib/oe/utils.py +++ b/meta/lib/oe/utils.py | |||
@@ -9,6 +9,8 @@ import multiprocessing | |||
9 | import traceback | 9 | import traceback |
10 | import errno | 10 | import errno |
11 | 11 | ||
12 | import bb.parse | ||
13 | |||
12 | def read_file(filename): | 14 | def read_file(filename): |
13 | try: | 15 | try: |
14 | f = open( filename, "r" ) | 16 | f = open( filename, "r" ) |
@@ -265,6 +267,7 @@ def execute_pre_post_process(d, cmds): | |||
265 | bb.note("Executing %s ..." % cmd) | 267 | bb.note("Executing %s ..." % cmd) |
266 | bb.build.exec_func(cmd, d) | 268 | bb.build.exec_func(cmd, d) |
267 | 269 | ||
270 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
268 | def get_bb_number_threads(d): | 271 | def get_bb_number_threads(d): |
269 | return int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1) | 272 | return int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1) |
270 | 273 | ||
@@ -412,62 +415,31 @@ def format_pkg_list(pkg_dict, ret_format=None, pkgdata_dir=None): | |||
412 | return output_str | 415 | return output_str |
413 | 416 | ||
414 | 417 | ||
415 | # Helper function to get the host compiler version | 418 | # Helper function to get the host gcc version |
416 | # Do not assume the compiler is gcc | 419 | def get_host_gcc_version(d, taskcontextonly=False): |
417 | def get_host_compiler_version(d, taskcontextonly=False): | ||
418 | import re, subprocess | 420 | import re, subprocess |
419 | 421 | ||
420 | if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1': | 422 | if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1': |
421 | return | 423 | return |
422 | 424 | ||
423 | compiler = d.getVar("BUILD_CC") | ||
424 | # Get rid of ccache since it is not present when parsing. | ||
425 | if compiler.startswith('ccache '): | ||
426 | compiler = compiler[7:] | ||
427 | try: | 425 | try: |
428 | env = os.environ.copy() | 426 | env = os.environ.copy() |
429 | # datastore PATH does not contain session PATH as set by environment-setup-... | 427 | # datastore PATH does not contain session PATH as set by environment-setup-... |
430 | # this breaks the install-buildtools use-case | 428 | # this breaks the install-buildtools use-case |
431 | # env["PATH"] = d.getVar("PATH") | 429 | # env["PATH"] = d.getVar("PATH") |
432 | output = subprocess.check_output("%s --version" % compiler, \ | 430 | output = subprocess.check_output("gcc --version", \ |
433 | shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8") | ||
434 | except subprocess.CalledProcessError as e: | ||
435 | bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8"))) | ||
436 | |||
437 | match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0]) | ||
438 | if not match: | ||
439 | bb.fatal("Can't get compiler version from %s --version output" % compiler) | ||
440 | |||
441 | version = match.group(1) | ||
442 | return compiler, version | ||
443 | |||
444 | |||
445 | def host_gcc_version(d, taskcontextonly=False): | ||
446 | import re, subprocess | ||
447 | |||
448 | if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1': | ||
449 | return | ||
450 | |||
451 | compiler = d.getVar("BUILD_CC") | ||
452 | # Get rid of ccache since it is not present when parsing. | ||
453 | if compiler.startswith('ccache '): | ||
454 | compiler = compiler[7:] | ||
455 | try: | ||
456 | env = os.environ.copy() | ||
457 | env["PATH"] = d.getVar("PATH") | ||
458 | output = subprocess.check_output("%s --version" % compiler, \ | ||
459 | shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8") | 431 | shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8") |
460 | except subprocess.CalledProcessError as e: | 432 | except subprocess.CalledProcessError as e: |
461 | bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8"))) | 433 | bb.fatal("Error running gcc --version: %s" % (e.output.decode("utf-8"))) |
462 | 434 | ||
463 | match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0]) | 435 | match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0]) |
464 | if not match: | 436 | if not match: |
465 | bb.fatal("Can't get compiler version from %s --version output" % compiler) | 437 | bb.fatal("Can't get compiler version from gcc --version output") |
466 | 438 | ||
467 | version = match.group(1) | 439 | version = match.group(1) |
468 | return "-%s" % version if version in ("4.8", "4.9") else "" | 440 | return version |
469 | |||
470 | 441 | ||
442 | @bb.parse.vardepsexclude("DEFAULTTUNE_MULTILIB_ORIGINAL", "OVERRIDES") | ||
471 | def get_multilib_datastore(variant, d): | 443 | def get_multilib_datastore(variant, d): |
472 | localdata = bb.data.createCopy(d) | 444 | localdata = bb.data.createCopy(d) |
473 | if variant: | 445 | if variant: |